--- a/.gitlab/merge_request_templates/Default.md Thu Jun 16 15:15:03 2022 +0200
+++ b/.gitlab/merge_request_templates/Default.md Thu Jun 16 15:28:54 2022 +0200
@@ -1,8 +1,3 @@
----
-name: Official Review
-about: Submit a series for review
----
-
/assign_reviewer @mercurial.review
Welcome to the Mercurial Merge Request creation process:
--- a/.hgignore Thu Jun 16 15:15:03 2022 +0200
+++ b/.hgignore Thu Jun 16 15:28:54 2022 +0200
@@ -22,6 +22,8 @@
tests/artifacts/cache/big-file-churn.hg
tests/.coverage*
tests/.testtimes*
+# the file is written in the CWD when run-tests is run.
+.testtimes
tests/.hypothesis
tests/hypothesis-generated
tests/annotated
@@ -33,6 +35,7 @@
contrib/chg/chg
contrib/hgsh/hgsh
contrib/vagrant/.vagrant
+contrib/merge-lists/target/
dist
packages
doc/common.txt
--- a/Makefile Thu Jun 16 15:15:03 2022 +0200
+++ b/Makefile Thu Jun 16 15:28:54 2022 +0200
@@ -151,12 +151,9 @@
$(MAKE) -f $(HGROOT)/contrib/Makefile.python PYTHONVER=$* PREFIX=$(HGPYTHONS)/$* python )
cd tests && $(HGPYTHONS)/$*/bin/python run-tests.py $(TESTFLAGS)
-rust-tests: py_feature = $(shell $(PYTHON) -c \
- 'import sys; print(["python27-bin", "python3-bin"][sys.version_info[0] >= 3])')
rust-tests:
cd $(HGROOT)/rust/hg-cpython \
- && $(CARGO) test --quiet --all \
- --no-default-features --features "$(py_feature) $(HG_RUST_FEATURES)"
+ && $(CARGO) test --quiet --all --features "$(HG_RUST_FEATURES)"
check-code:
hg manifest | xargs python contrib/check-code.py
@@ -238,16 +235,6 @@
# Place a bogon .DS_Store file in the target dir so we can be
# sure it doesn't get included in the final package.
touch build/mercurial/.DS_Store
- # install zsh completions - this location appears to be
- # searched by default as of macOS Sierra.
- install -d build/mercurial/usr/local/share/zsh/site-functions/
- install -m 0644 contrib/zsh_completion build/mercurial/usr/local/share/zsh/site-functions/_hg
- # install bash completions - there doesn't appear to be a
- # place that's searched by default for bash, so we'll follow
- # the lead of Apple's git install and just put it in a
- # location of our own.
- install -d build/mercurial/usr/local/hg/contrib/
- install -m 0644 contrib/bash_completion build/mercurial/usr/local/hg/contrib/hg-completion.bash
make -C contrib/chg \
HGPATH=/usr/local/bin/hg \
PYTHON=/usr/bin/python2.7 \
--- a/contrib/automation/hgautomation/aws.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/automation/hgautomation/aws.py Thu Jun 16 15:28:54 2022 +0200
@@ -919,17 +919,12 @@
'SecurityGroupIds': [c.security_groups['linux-dev-1'].id],
}
- requirements2_path = (
- pathlib.Path(__file__).parent.parent / 'linux-requirements-py2.txt'
- )
requirements3_path = (
pathlib.Path(__file__).parent.parent / 'linux-requirements-py3.txt'
)
requirements35_path = (
pathlib.Path(__file__).parent.parent / 'linux-requirements-py3.5.txt'
)
- with requirements2_path.open('r', encoding='utf-8') as fh:
- requirements2 = fh.read()
with requirements3_path.open('r', encoding='utf-8') as fh:
requirements3 = fh.read()
with requirements35_path.open('r', encoding='utf-8') as fh:
@@ -941,7 +936,6 @@
{
'instance_config': config,
'bootstrap_script': BOOTSTRAP_DEBIAN,
- 'requirements_py2': requirements2,
'requirements_py3': requirements3,
'requirements_py35': requirements35,
}
@@ -977,10 +971,6 @@
fh.write(BOOTSTRAP_DEBIAN)
fh.chmod(0o0700)
- with sftp.open('%s/requirements-py2.txt' % home, 'wb') as fh:
- fh.write(requirements2)
- fh.chmod(0o0700)
-
with sftp.open('%s/requirements-py3.txt' % home, 'wb') as fh:
fh.write(requirements3)
fh.chmod(0o0700)
--- a/contrib/automation/hgautomation/cli.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/automation/hgautomation/cli.py Thu Jun 16 15:28:54 2022 +0200
@@ -65,7 +65,6 @@
def build_inno(
hga: HGAutomation,
aws_region,
- python_version,
arch,
revision,
version,
@@ -80,21 +79,18 @@
windows.synchronize_hg(SOURCE_ROOT, revision, instance)
- for py_version in python_version:
- for a in arch:
- windows.build_inno_installer(
- instance.winrm_client,
- py_version,
- a,
- DIST_PATH,
- version=version,
- )
+ for a in arch:
+ windows.build_inno_installer(
+ instance.winrm_client,
+ a,
+ DIST_PATH,
+ version=version,
+ )
def build_wix(
hga: HGAutomation,
aws_region,
- python_version,
arch,
revision,
version,
@@ -109,15 +105,13 @@
windows.synchronize_hg(SOURCE_ROOT, revision, instance)
- for py_version in python_version:
- for a in arch:
- windows.build_wix_installer(
- instance.winrm_client,
- py_version,
- a,
- DIST_PATH,
- version=version,
- )
+ for a in arch:
+ windows.build_wix_installer(
+ instance.winrm_client,
+ a,
+ DIST_PATH,
+ version=version,
+ )
def build_windows_wheel(
@@ -158,7 +152,7 @@
windows.synchronize_hg(SOURCE_ROOT, revision, instance)
- for py_version in ("2.7", "3.7", "3.8", "3.9", "3.10"):
+ for py_version in ("3.7", "3.8", "3.9", "3.10"):
for arch in ("x86", "x64"):
windows.purge_hg(winrm_client)
windows.build_wheel(
@@ -168,15 +162,14 @@
dest_path=DIST_PATH,
)
- for py_version in (2, 3):
- for arch in ('x86', 'x64'):
- windows.purge_hg(winrm_client)
- windows.build_inno_installer(
- winrm_client, py_version, arch, DIST_PATH, version=version
- )
- windows.build_wix_installer(
- winrm_client, py_version, arch, DIST_PATH, version=version
- )
+ for arch in ('x86', 'x64'):
+ windows.purge_hg(winrm_client)
+ windows.build_inno_installer(
+ winrm_client, arch, DIST_PATH, version=version
+ )
+ windows.build_wix_installer(
+ winrm_client, arch, DIST_PATH, version=version
+ )
def terminate_ec2_instances(hga: HGAutomation, aws_region):
@@ -340,14 +333,6 @@
help='Build Inno Setup installer(s)',
)
sp.add_argument(
- '--python-version',
- help='Which version of Python to target',
- choices={2, 3},
- type=int,
- nargs='*',
- default=[3],
- )
- sp.add_argument(
'--arch',
help='Architecture to build for',
choices={'x86', 'x64'},
@@ -377,7 +362,7 @@
sp.add_argument(
'--python-version',
help='Python version to build for',
- choices={'2.7', '3.7', '3.8', '3.9', '3.10'},
+ choices={'3.7', '3.8', '3.9', '3.10'},
nargs='*',
default=['3.8'],
)
@@ -402,14 +387,6 @@
sp = subparsers.add_parser('build-wix', help='Build WiX installer(s)')
sp.add_argument(
- '--python-version',
- help='Which version of Python to target',
- choices={2, 3},
- type=int,
- nargs='*',
- default=[3],
- )
- sp.add_argument(
'--arch',
help='Architecture to build for',
choices={'x86', 'x64'},
@@ -469,9 +446,7 @@
'--python-version',
help='Python version to use',
choices={
- 'system2',
'system3',
- '2.7',
'3.5',
'3.6',
'3.7',
@@ -480,7 +455,7 @@
'pypy3.5',
'pypy3.6',
},
- default='system2',
+ default='system3',
)
sp.add_argument(
'test_flags',
@@ -501,8 +476,8 @@
sp.add_argument(
'--python-version',
help='Python version to use',
- choices={'2.7', '3.5', '3.6', '3.7', '3.8', '3.9', '3.10'},
- default='2.7',
+ choices={'3.5', '3.6', '3.7', '3.8', '3.9', '3.10'},
+ default='3.9',
)
sp.add_argument(
'--arch',
--- a/contrib/automation/hgautomation/linux.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/automation/hgautomation/linux.py Thu Jun 16 15:28:54 2022 +0200
@@ -25,7 +25,6 @@
}
INSTALL_PYTHONS = r'''
-PYENV2_VERSIONS="2.7.17 pypy2.7-7.2.0"
PYENV3_VERSIONS="3.5.10 3.6.13 3.7.10 3.8.10 3.9.5 pypy3.5-7.0.0 pypy3.6-7.3.3 pypy3.7-7.3.3"
git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv
@@ -46,13 +45,6 @@
wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/66/f0/6867af06d2e2f511e4e1d7094ff663acdebc4f15d4a0cb0fed1007395124/${VIRTUALENV_TARBALL}
echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check -
-for v in ${PYENV2_VERSIONS}; do
- pyenv install -v ${v}
- ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
- ${PYENV_ROOT}/versions/${v}/bin/pip install ${VIRTUALENV_TARBALL}
- ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py2.txt
-done
-
for v in ${PYENV3_VERSIONS}; do
pyenv install -v ${v}
${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
@@ -72,7 +64,7 @@
${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/${REQUIREMENTS}
done
-pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system
+pyenv global ${PYENV3_VERSIONS} system
'''.lstrip().replace(
'\r\n', '\n'
)
@@ -274,17 +266,8 @@
netbase \
ntfs-3g \
nvme-cli \
- pyflakes \
pyflakes3 \
- pylint \
pylint3 \
- python-all-dev \
- python-dev \
- python-docutils \
- python-fuzzywuzzy \
- python-pygments \
- python-subversion \
- python-vcr \
python3-boto3 \
python3-dev \
python3-docutils \
@@ -532,7 +515,7 @@
hg_bin = source_path / 'hg'
res = subprocess.run(
- ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
+ ['python3', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
cwd=str(source_path),
env=env,
check=True,
@@ -542,7 +525,7 @@
full_revision = res.stdout.decode('ascii')
args = [
- 'python2.7',
+ 'python3',
str(hg_bin),
'--config',
'ui.ssh=ssh -F %s' % ssh_config,
@@ -595,9 +578,7 @@
print('running tests')
- if python_version == 'system2':
- python = '/usr/bin/python2'
- elif python_version == 'system3':
+ if python_version == 'system3':
python = '/usr/bin/python3'
elif python_version.startswith('pypy'):
python = '/hgdev/pyenv/shims/%s' % python_version
--- a/contrib/automation/hgautomation/windows.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/automation/hgautomation/windows.py Thu Jun 16 15:28:54 2022 +0200
@@ -19,30 +19,6 @@
from .winrm import run_powershell
-# PowerShell commands to activate a Visual Studio 2008 environment.
-# This is essentially a port of vcvarsall.bat to PowerShell.
-ACTIVATE_VC9_AMD64 = r'''
-Write-Output "activating Visual Studio 2008 environment for AMD64"
-$root = "$env:LOCALAPPDATA\Programs\Common\Microsoft\Visual C++ for Python\9.0"
-$Env:VCINSTALLDIR = "${root}\VC\"
-$Env:WindowsSdkDir = "${root}\WinSDK\"
-$Env:PATH = "${root}\VC\Bin\amd64;${root}\WinSDK\Bin\x64;${root}\WinSDK\Bin;$Env:PATH"
-$Env:INCLUDE = "${root}\VC\Include;${root}\WinSDK\Include;$Env:PATH"
-$Env:LIB = "${root}\VC\Lib\amd64;${root}\WinSDK\Lib\x64;$Env:LIB"
-$Env:LIBPATH = "${root}\VC\Lib\amd64;${root}\WinSDK\Lib\x64;$Env:LIBPATH"
-'''.lstrip()
-
-ACTIVATE_VC9_X86 = r'''
-Write-Output "activating Visual Studio 2008 environment for x86"
-$root = "$env:LOCALAPPDATA\Programs\Common\Microsoft\Visual C++ for Python\9.0"
-$Env:VCINSTALLDIR = "${root}\VC\"
-$Env:WindowsSdkDir = "${root}\WinSDK\"
-$Env:PATH = "${root}\VC\Bin;${root}\WinSDK\Bin;$Env:PATH"
-$Env:INCLUDE = "${root}\VC\Include;${root}\WinSDK\Include;$Env:INCLUDE"
-$Env:LIB = "${root}\VC\Lib;${root}\WinSDK\Lib;$Env:LIB"
-$Env:LIBPATH = "${root}\VC\lib;${root}\WinSDK\Lib;$Env:LIBPATH"
-'''.lstrip()
-
HG_PURGE = r'''
$Env:PATH = "C:\hgdev\venv-bootstrap\Scripts;$Env:PATH"
Set-Location C:\hgdev\src
@@ -78,14 +54,6 @@
}}
'''
-BUILD_INNO_PYTHON2 = r'''
-Set-Location C:\hgdev\src
-$python = "C:\hgdev\python27-{arch}\python.exe"
-C:\hgdev\python37-x64\python.exe contrib\packaging\packaging.py inno --python $python {extra_args}
-if ($LASTEXITCODE -ne 0) {{
- throw "process exited non-0: $LASTEXITCODE"
-}}
-'''.lstrip()
BUILD_WHEEL = r'''
Set-Location C:\hgdev\src
@@ -105,14 +73,6 @@
}}
'''
-BUILD_WIX_PYTHON2 = r'''
-Set-Location C:\hgdev\src
-$python = "C:\hgdev\python27-{arch}\python.exe"
-C:\hgdev\python37-x64\python.exe contrib\packaging\packaging.py wix --python $python {extra_args}
-if ($LASTEXITCODE -ne 0) {{
- throw "process exited non-0: $LASTEXITCODE"
-}}
-'''
RUN_TESTS = r'''
C:\hgdev\MinGW\msys\1.0\bin\sh.exe --login -c "cd /c/hgdev/src/tests && /c/hgdev/{python_path}/python.exe run-tests.py {test_flags}"
@@ -121,8 +81,7 @@
}}
'''
-WHEEL_FILENAME_PYTHON27_X86 = 'mercurial-{version}-cp27-cp27m-win32.whl'
-WHEEL_FILENAME_PYTHON27_X64 = 'mercurial-{version}-cp27-cp27m-win_amd64.whl'
+
WHEEL_FILENAME_PYTHON37_X86 = 'mercurial-{version}-cp37-cp37m-win32.whl'
WHEEL_FILENAME_PYTHON37_X64 = 'mercurial-{version}-cp37-cp37m-win_amd64.whl'
WHEEL_FILENAME_PYTHON38_X86 = 'mercurial-{version}-cp38-cp38-win32.whl'
@@ -132,13 +91,9 @@
WHEEL_FILENAME_PYTHON310_X86 = 'mercurial-{version}-cp310-cp310-win32.whl'
WHEEL_FILENAME_PYTHON310_X64 = 'mercurial-{version}-cp310-cp310-win_amd64.whl'
-EXE_FILENAME_PYTHON2_X86 = 'Mercurial-{version}-x86-python2.exe'
-EXE_FILENAME_PYTHON2_X64 = 'Mercurial-{version}-x64-python2.exe'
EXE_FILENAME_PYTHON3_X86 = 'Mercurial-{version}-x86.exe'
EXE_FILENAME_PYTHON3_X64 = 'Mercurial-{version}-x64.exe'
-MSI_FILENAME_PYTHON2_X86 = 'mercurial-{version}-x86-python2.msi'
-MSI_FILENAME_PYTHON2_X64 = 'mercurial-{version}-x64-python2.msi'
MSI_FILENAME_PYTHON3_X86 = 'mercurial-{version}-x86.msi'
MSI_FILENAME_PYTHON3_X64 = 'mercurial-{version}-x64.msi'
@@ -147,14 +102,6 @@
X86_USER_AGENT_PATTERN = '.*Windows.*'
X64_USER_AGENT_PATTERN = '.*Windows.*(WOW|x)64.*'
-EXE_PYTHON2_X86_DESCRIPTION = (
- 'Mercurial {version} Inno Setup installer - x86 Windows (Python 2) '
- '- does not require admin rights'
-)
-EXE_PYTHON2_X64_DESCRIPTION = (
- 'Mercurial {version} Inno Setup installer - x64 Windows (Python 2) '
- '- does not require admin rights'
-)
# TODO remove Python version once Python 2 is dropped.
EXE_PYTHON3_X86_DESCRIPTION = (
'Mercurial {version} Inno Setup installer - x86 Windows (Python 3) '
@@ -164,14 +111,6 @@
'Mercurial {version} Inno Setup installer - x64 Windows (Python 3) '
'- does not require admin rights'
)
-MSI_PYTHON2_X86_DESCRIPTION = (
- 'Mercurial {version} MSI installer - x86 Windows (Python 2) '
- '- requires admin rights'
-)
-MSI_PYTHON2_X64_DESCRIPTION = (
- 'Mercurial {version} MSI installer - x64 Windows (Python 2) '
- '- requires admin rights'
-)
MSI_PYTHON3_X86_DESCRIPTION = (
'Mercurial {version} MSI installer - x86 Windows (Python 3) '
'- requires admin rights'
@@ -182,15 +121,6 @@
)
-def get_vc_prefix(arch):
- if arch == 'x86':
- return ACTIVATE_VC9_X86
- elif arch == 'x64':
- return ACTIVATE_VC9_AMD64
- else:
- raise ValueError('illegal arch: %s; must be x86 or x64' % arch)
-
-
def fix_authorized_keys_permissions(winrm_client, path):
commands = [
'$ErrorActionPreference = "Stop"',
@@ -261,7 +191,7 @@
hg_bin = hg_repo / 'hg'
res = subprocess.run(
- ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
+ ['python3', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
cwd=str(hg_repo),
env=env,
check=True,
@@ -271,7 +201,7 @@
full_revision = res.stdout.decode('ascii')
args = [
- 'python2.7',
+ 'python3',
hg_bin,
'--config',
'ui.ssh=ssh -F %s' % ssh_config,
@@ -334,7 +264,6 @@
def build_inno_installer(
winrm_client,
- python_version: int,
arch: str,
dest_path: pathlib.Path,
version=None,
@@ -344,37 +273,23 @@
Using a WinRM client, remote commands are executed to build
a Mercurial Inno Setup installer.
"""
- print(
- 'building Inno Setup installer for Python %d %s'
- % (python_version, arch)
- )
+ print('building Inno Setup installer for %s' % arch)
- if python_version == 3:
- # TODO fix this limitation in packaging code
- if not version:
- raise Exception(
- "version string is required when building for Python 3"
- )
+ # TODO fix this limitation in packaging code
+ if not version:
+ raise Exception("version string is required when building for Python 3")
- if arch == "x86":
- target_triple = "i686-pc-windows-msvc"
- elif arch == "x64":
- target_triple = "x86_64-pc-windows-msvc"
- else:
- raise Exception("unhandled arch: %s" % arch)
+ if arch == "x86":
+ target_triple = "i686-pc-windows-msvc"
+ elif arch == "x64":
+ target_triple = "x86_64-pc-windows-msvc"
+ else:
+ raise Exception("unhandled arch: %s" % arch)
- ps = BUILD_INNO_PYTHON3.format(
- pyoxidizer_target=target_triple,
- version=version,
- )
- else:
- extra_args = []
- if version:
- extra_args.extend(['--version', version])
-
- ps = get_vc_prefix(arch) + BUILD_INNO_PYTHON2.format(
- arch=arch, extra_args=' '.join(extra_args)
- )
+ ps = BUILD_INNO_PYTHON3.format(
+ pyoxidizer_target=target_triple,
+ version=version,
+ )
run_powershell(winrm_client, ps)
copy_latest_dist(winrm_client, '*.exe', dest_path)
@@ -394,17 +309,12 @@
python_version=python_version.replace(".", ""), arch=arch
)
- # Python 2.7 requires an activated environment.
- if python_version == "2.7":
- ps = get_vc_prefix(arch) + ps
-
run_powershell(winrm_client, ps)
copy_latest_dist(winrm_client, '*.whl', dest_path)
def build_wix_installer(
winrm_client,
- python_version: int,
arch: str,
dest_path: pathlib.Path,
version=None,
@@ -413,34 +323,23 @@
Using a WinRM client, remote commands are executed to build a WiX installer.
"""
- print('Building WiX installer for Python %d %s' % (python_version, arch))
+ print('Building WiX installer for %s' % arch)
- if python_version == 3:
- # TODO fix this limitation in packaging code
- if not version:
- raise Exception(
- "version string is required when building for Python 3"
- )
+ # TODO fix this limitation in packaging code
+ if not version:
+ raise Exception("version string is required when building for Python 3")
- if arch == "x86":
- target_triple = "i686-pc-windows-msvc"
- elif arch == "x64":
- target_triple = "x86_64-pc-windows-msvc"
- else:
- raise Exception("unhandled arch: %s" % arch)
+ if arch == "x86":
+ target_triple = "i686-pc-windows-msvc"
+ elif arch == "x64":
+ target_triple = "x86_64-pc-windows-msvc"
+ else:
+ raise Exception("unhandled arch: %s" % arch)
- ps = BUILD_WIX_PYTHON3.format(
- pyoxidizer_target=target_triple,
- version=version,
- )
- else:
- extra_args = []
- if version:
- extra_args.extend(['--version', version])
-
- ps = get_vc_prefix(arch) + BUILD_WIX_PYTHON2.format(
- arch=arch, extra_args=' '.join(extra_args)
- )
+ ps = BUILD_WIX_PYTHON3.format(
+ pyoxidizer_target=target_triple,
+ version=version,
+ )
run_powershell(winrm_client, ps)
copy_latest_dist(winrm_client, '*.msi', dest_path)
@@ -474,8 +373,6 @@
def resolve_wheel_artifacts(dist_path: pathlib.Path, version: str):
return (
- dist_path / WHEEL_FILENAME_PYTHON27_X86.format(version=version),
- dist_path / WHEEL_FILENAME_PYTHON27_X64.format(version=version),
dist_path / WHEEL_FILENAME_PYTHON37_X86.format(version=version),
dist_path / WHEEL_FILENAME_PYTHON37_X64.format(version=version),
dist_path / WHEEL_FILENAME_PYTHON38_X86.format(version=version),
@@ -489,8 +386,6 @@
def resolve_all_artifacts(dist_path: pathlib.Path, version: str):
return (
- dist_path / WHEEL_FILENAME_PYTHON27_X86.format(version=version),
- dist_path / WHEEL_FILENAME_PYTHON27_X64.format(version=version),
dist_path / WHEEL_FILENAME_PYTHON37_X86.format(version=version),
dist_path / WHEEL_FILENAME_PYTHON37_X64.format(version=version),
dist_path / WHEEL_FILENAME_PYTHON38_X86.format(version=version),
@@ -499,24 +394,16 @@
dist_path / WHEEL_FILENAME_PYTHON39_X64.format(version=version),
dist_path / WHEEL_FILENAME_PYTHON310_X86.format(version=version),
dist_path / WHEEL_FILENAME_PYTHON310_X64.format(version=version),
- dist_path / EXE_FILENAME_PYTHON2_X86.format(version=version),
- dist_path / EXE_FILENAME_PYTHON2_X64.format(version=version),
dist_path / EXE_FILENAME_PYTHON3_X86.format(version=version),
dist_path / EXE_FILENAME_PYTHON3_X64.format(version=version),
- dist_path / MSI_FILENAME_PYTHON2_X86.format(version=version),
- dist_path / MSI_FILENAME_PYTHON2_X64.format(version=version),
dist_path / MSI_FILENAME_PYTHON3_X86.format(version=version),
dist_path / MSI_FILENAME_PYTHON3_X64.format(version=version),
)
def generate_latest_dat(version: str):
- python2_x86_exe_filename = EXE_FILENAME_PYTHON2_X86.format(version=version)
- python2_x64_exe_filename = EXE_FILENAME_PYTHON2_X64.format(version=version)
python3_x86_exe_filename = EXE_FILENAME_PYTHON3_X86.format(version=version)
python3_x64_exe_filename = EXE_FILENAME_PYTHON3_X64.format(version=version)
- python2_x86_msi_filename = MSI_FILENAME_PYTHON2_X86.format(version=version)
- python2_x64_msi_filename = MSI_FILENAME_PYTHON2_X64.format(version=version)
python3_x86_msi_filename = MSI_FILENAME_PYTHON3_X86.format(version=version)
python3_x64_msi_filename = MSI_FILENAME_PYTHON3_X64.format(version=version)
@@ -536,20 +423,6 @@
EXE_PYTHON3_X64_DESCRIPTION.format(version=version),
),
(
- '9',
- version,
- X86_USER_AGENT_PATTERN,
- '%s/%s' % (MERCURIAL_SCM_BASE_URL, python2_x86_exe_filename),
- EXE_PYTHON2_X86_DESCRIPTION.format(version=version),
- ),
- (
- '9',
- version,
- X64_USER_AGENT_PATTERN,
- '%s/%s' % (MERCURIAL_SCM_BASE_URL, python2_x64_exe_filename),
- EXE_PYTHON2_X64_DESCRIPTION.format(version=version),
- ),
- (
'10',
version,
X86_USER_AGENT_PATTERN,
@@ -563,20 +436,6 @@
'%s/%s' % (MERCURIAL_SCM_BASE_URL, python3_x64_msi_filename),
MSI_PYTHON3_X64_DESCRIPTION.format(version=version),
),
- (
- '9',
- version,
- X86_USER_AGENT_PATTERN,
- '%s/%s' % (MERCURIAL_SCM_BASE_URL, python2_x86_msi_filename),
- MSI_PYTHON2_X86_DESCRIPTION.format(version=version),
- ),
- (
- '9',
- version,
- X64_USER_AGENT_PATTERN,
- '%s/%s' % (MERCURIAL_SCM_BASE_URL, python2_x64_msi_filename),
- MSI_PYTHON2_X64_DESCRIPTION.format(version=version),
- ),
)
lines = ['\t'.join(e) for e in entries]
--- a/contrib/automation/linux-requirements-py2.txt Thu Jun 16 15:15:03 2022 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,129 +0,0 @@
-#
-# This file is autogenerated by pip-compile
-# To update, run:
-#
-# pip-compile --generate-hashes --output-file=contrib/automation/linux-requirements-py2.txt contrib/automation/linux-requirements.txt.in
-#
-astroid==1.6.6 \
- --hash=sha256:87de48a92e29cedf7210ffa853d11441e7ad94cb47bacd91b023499b51cbc756 \
- --hash=sha256:d25869fc7f44f1d9fb7d24fd7ea0639656f5355fc3089cd1f3d18c6ec6b124c7 \
- # via pylint
-backports.functools-lru-cache==1.6.1 \
- --hash=sha256:0bada4c2f8a43d533e4ecb7a12214d9420e66eb206d54bf2d682581ca4b80848 \
- --hash=sha256:8fde5f188da2d593bd5bc0be98d9abc46c95bb8a9dde93429570192ee6cc2d4a \
- # via astroid, isort, pylint
-bzr==2.7.0 ; python_version <= "2.7" and platform_python_implementation == "CPython" \
- --hash=sha256:c9f6bbe0a50201dadc5fddadd94ba50174193c6cf6e39e16f6dd0ad98a1df338 \
- # via -r contrib/automation/linux-requirements.txt.in
-configparser==4.0.2 \
- --hash=sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c \
- --hash=sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df \
- # via pylint
-contextlib2==0.6.0.post1 \
- --hash=sha256:01f490098c18b19d2bd5bb5dc445b2054d2fa97f09a4280ba2c5f3c394c8162e \
- --hash=sha256:3355078a159fbb44ee60ea80abd0d87b80b78c248643b49aa6d94673b413609b \
- # via vcrpy
-docutils==0.16 \
- --hash=sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af \
- --hash=sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc \
- # via -r contrib/automation/linux-requirements.txt.in
-enum34==1.1.10 \
- --hash=sha256:a98a201d6de3f2ab3db284e70a33b0f896fbf35f8086594e8c9e74b909058d53 \
- --hash=sha256:c3858660960c984d6ab0ebad691265180da2b43f07e061c0f8dca9ef3cffd328 \
- --hash=sha256:cce6a7477ed816bd2542d03d53db9f0db935dd013b70f336a95c73979289f248 \
- # via astroid
-funcsigs==1.0.2 \
- --hash=sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca \
- --hash=sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50 \
- # via mock
-futures==3.3.0 \
- --hash=sha256:49b3f5b064b6e3afc3316421a3f25f66c137ae88f068abbf72830170033c5e16 \
- --hash=sha256:7e033af76a5e35f58e56da7a91e687706faf4e7bdfb2cbc3f2cca6b9bcda9794 \
- # via isort
-fuzzywuzzy==0.18.0 \
- --hash=sha256:45016e92264780e58972dca1b3d939ac864b78437422beecebb3095f8efd00e8 \
- --hash=sha256:928244b28db720d1e0ee7587acf660ea49d7e4c632569cad4f1cd7e68a5f0993 \
- # via -r contrib/automation/linux-requirements.txt.in
-isort==4.3.21 \
- --hash=sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1 \
- --hash=sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd \
- # via pylint
-lazy-object-proxy==1.5.1 \
- --hash=sha256:00b78a97a79d0dfefa584d44dd1aba9668d3de7ec82335ba0ff51d53ef107143 \
- --hash=sha256:042b54fd71c2092e6d10e5e66fa60f65c5954f8145e809f5d9f394c9b13d32ee \
- --hash=sha256:11f87dc06eb5f376cc6d5f0c19a1b4dca202035622777c4ce8e5b72c87b035d6 \
- --hash=sha256:19ae6f6511a02008ef3554e158c41bb2a8e5c8455935b98d6da076d9f152fd7c \
- --hash=sha256:22c1935c6f8e3d6ea2e169eb03928adbdb8a2251d2890f8689368d65e70aa176 \
- --hash=sha256:30ef2068f4f94660144515380ef04b93d15add2214eab8be4cd46ebc900d681c \
- --hash=sha256:33da47ba3a581860ddd3d38c950a5fe950ca389f7123edd0d6ab0bc473499fe7 \
- --hash=sha256:3e8698dc384857413580012f4ca322d89e63ef20fc3d4635a5b606d6d4b61f6a \
- --hash=sha256:4fdd7113fc5143c72dacf415079eec42fcbe69cc9d3d291b4ca742e3a9455807 \
- --hash=sha256:63b6d9a5077d54db271fcc6772440f7380ec3fa559d0e2497dbfae2f47c2c814 \
- --hash=sha256:8133b63b05f12751cddd8e3e7f02ba39dc7cfa7d2ba99d80d7436f0ba26d6b75 \
- --hash=sha256:89b8e5780e49753e2b4cd5aab45d3df092ddcbba3de2c4d4492a029588fe1758 \
- --hash=sha256:8d82e27cbbea6edb8821751806f39f5dcfd7b46a5e23d27b98d6d8c8ec751df8 \
- --hash=sha256:92cedd6e26712505adb1c17fab64651a498cc0102a80ba562ff4a2451088f57a \
- --hash=sha256:9723364577b79ad9958a68851fe2acb94da6fd25170c595516a8289e6a129043 \
- --hash=sha256:c484020ad26973a14a7cb1e1d2e0bfe97cf6803273ae9bd154e0213cc74bad49 \
- --hash=sha256:c697bd1b333b3e6abdff04ef9f5fb4b1936633d9cc4e28d90606705c9083254c \
- --hash=sha256:d0f7e14ff3424639d33e6bc449e77e4b345e52c21bbd6f6004a1d219196e2664 \
- --hash=sha256:db2df3eff7ed3e6813638686f1bb5934d1a0662d9d3b4196b5164a86be3a1e8f \
- --hash=sha256:edbcb4c5efabd93ede05b272296a5a78a67e9b6e82ba7f51a07b8103db06ce01 \
- --hash=sha256:ef355fb3802e0fc5a71dadb65a3c317bfc9bdf567d357f8e0b1900b432ffe486 \
- --hash=sha256:fe2f61fed5817bf8db01d9a72309ed5990c478a077e9585b58740c26774bce39 \
- # via astroid
-mccabe==0.6.1 \
- --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \
- --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f \
- # via pylint
-mock==3.0.5 \
- --hash=sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3 \
- --hash=sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8 \
- # via vcrpy
-pyflakes==2.2.0 \
- --hash=sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92 \
- --hash=sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8 \
- # via -r contrib/automation/linux-requirements.txt.in
-pygments==2.5.2 \
- --hash=sha256:2a3fe295e54a20164a9df49c75fa58526d3be48e14aceba6d6b1e8ac0bfd6f1b \
- --hash=sha256:98c8aa5a9f778fcd1026a17361ddaf7330d1b7c62ae97c3bb0ae73e0b9b6b0fe \
- # via -r contrib/automation/linux-requirements.txt.in
-pylint==1.9.5 \
- --hash=sha256:367e3d49813d349a905390ac27989eff82ab84958731c5ef0bef867452cfdc42 \
- --hash=sha256:97a42df23d436c70132971d1dcb9efad2fe5c0c6add55b90161e773caf729300 \
- # via -r contrib/automation/linux-requirements.txt.in
-python-levenshtein==0.12.0 \
- --hash=sha256:033a11de5e3d19ea25c9302d11224e1a1898fe5abd23c61c7c360c25195e3eb1 \
- # via -r contrib/automation/linux-requirements.txt.in
-pyyaml==5.3.1 \
- --hash=sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97 \
- --hash=sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76 \
- --hash=sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2 \
- --hash=sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648 \
- --hash=sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf \
- --hash=sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f \
- --hash=sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2 \
- --hash=sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee \
- --hash=sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d \
- --hash=sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c \
- --hash=sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a \
- # via vcrpy
-singledispatch==3.4.0.3 \
- --hash=sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c \
- --hash=sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8 \
- # via astroid, pylint
-six==1.15.0 \
- --hash=sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259 \
- --hash=sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced \
- # via astroid, mock, pylint, singledispatch, vcrpy
-vcrpy==3.0.0 \
- --hash=sha256:21168d5ae14263a833d4b71acfd8278d8841114f24be1b4ab4a5719d0c7f07bc \
- --hash=sha256:a2e6b653a627f9f3d6ded4d68587e470b91e4c1444e7dae939510dfeacb65276 \
- # via -r contrib/automation/linux-requirements.txt.in
-wrapt==1.12.1 \
- --hash=sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7 \
- # via astroid, vcrpy
-
-# WARNING: The following packages were not pinned, but pip requires them to be
-# pinned when the requirements file includes hashes. Consider using the --allow-unsafe flag.
-# setuptools
--- a/contrib/bdiff-torture.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/bdiff-torture.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,6 +1,5 @@
# Randomized torture test generation for bdiff
-from __future__ import absolute_import, print_function
import random
import sys
--- a/contrib/benchmarks/__init__.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/benchmarks/__init__.py Thu Jun 16 15:28:54 2022 +0200
@@ -31,7 +31,6 @@
$ asv --config contrib/asv.conf.json preview
'''
-from __future__ import absolute_import
import functools
import os
--- a/contrib/benchmarks/perf.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/benchmarks/perf.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from . import perfbench
--- a/contrib/benchmarks/revset.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/benchmarks/revset.py Thu Jun 16 15:28:54 2022 +0200
@@ -10,7 +10,6 @@
Each revset benchmark is parameterized with variants (first, last, sort, ...)
'''
-from __future__ import absolute_import
import os
import string
--- a/contrib/byteify-strings.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/byteify-strings.py Thu Jun 16 15:28:54 2022 +0200
@@ -7,7 +7,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import, print_function
import argparse
import contextlib
--- a/contrib/casesmash.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/casesmash.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,4 +1,3 @@
-from __future__ import absolute_import
import __builtin__
import os
from mercurial import util
--- a/contrib/catapipe.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/catapipe.py Thu Jun 16 15:28:54 2022 +0200
@@ -34,7 +34,6 @@
HGCATAPULTSERVERPIPE environment variable, which both run-tests and hg
understand. To trace *only* run-tests, use HGTESTCATAPULTSERVERPIPE instead.
"""
-from __future__ import absolute_import, print_function
import argparse
import json
--- a/contrib/check-code.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/check-code.py Thu Jun 16 15:28:54 2022 +0200
@@ -19,7 +19,6 @@
* ONLY use no--check-code for skipping entire files from external sources
"""
-from __future__ import absolute_import, print_function
import glob
import keyword
import optparse
@@ -344,16 +343,6 @@
"linebreak after :",
),
(
- r'class\s[^( \n]+:',
- "old-style class, use class foo(object)",
- r'#.*old-style',
- ),
- (
- r'class\s[^( \n]+\(\):',
- "class foo() creates old style object, use class foo(object)",
- r'#.*old-style',
- ),
- (
r'\b(%s)\('
% '|'.join(k for k in keyword.kwlist if k not in ('print', 'exec')),
"Python keyword is not a function",
@@ -431,26 +420,6 @@
"module-level @cachefunc is risky, please avoid",
),
(
- r'^import Queue',
- "don't use Queue, use pycompat.queue.Queue + "
- "pycompat.queue.Empty",
- ),
- (
- r'^import cStringIO',
- "don't use cStringIO.StringIO, use util.stringio",
- ),
- (r'^import urllib', "don't use urllib, use util.urlreq/util.urlerr"),
- (
- r'^import SocketServer',
- "don't use SockerServer, use util.socketserver",
- ),
- (r'^import urlparse', "don't use urlparse, use util.urlreq"),
- (r'^import xmlrpclib', "don't use xmlrpclib, use util.xmlrpclib"),
- (r'^import cPickle', "don't use cPickle, use util.pickle"),
- (r'^import pickle', "don't use pickle, use util.pickle"),
- (r'^import httplib', "don't use httplib, use util.httplib"),
- (r'^import BaseHTTPServer', "use util.httpserver instead"),
- (
r'^(from|import) mercurial\.(cext|pure|cffi)',
"use mercurial.policy.importmod instead",
),
@@ -789,7 +758,7 @@
preparefilters(filters)
-class norepeatlogger(object):
+class norepeatlogger:
def __init__(self):
self._lastseen = None
--- a/contrib/check-commit Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/check-commit Thu Jun 16 15:28:54 2022 +0200
@@ -15,7 +15,6 @@
#
# See also: https://mercurial-scm.org/wiki/ContributingChanges
-from __future__ import absolute_import, print_function
import os
import re
--- a/contrib/check-config.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/check-config.py Thu Jun 16 15:28:54 2022 +0200
@@ -7,7 +7,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import, print_function
import re
import sys
--- a/contrib/check-py3-compat.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/check-py3-compat.py Thu Jun 16 15:28:54 2022 +0200
@@ -7,7 +7,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import, print_function
import ast
import importlib
@@ -17,31 +16,6 @@
import warnings
-def check_compat_py2(f):
- """Check Python 3 compatibility for a file with Python 2"""
- with open(f, 'rb') as fh:
- content = fh.read()
- root = ast.parse(content)
-
- # Ignore empty files.
- if not root.body:
- return
-
- futures = set()
- haveprint = False
- for node in ast.walk(root):
- if isinstance(node, ast.ImportFrom):
- if node.module == '__future__':
- futures |= {n.name for n in node.names}
- elif isinstance(node, ast.Print):
- haveprint = True
-
- if 'absolute_import' not in futures:
- print('%s not using absolute_import' % f)
- if haveprint and 'print_function' not in futures:
- print('%s requires print_function' % f)
-
-
def check_compat_py3(f):
"""Check Python 3 compatibility of a file with Python 3."""
with open(f, 'rb') as fh:
@@ -94,23 +68,19 @@
if __name__ == '__main__':
- if sys.version_info[0] == 2:
- fn = check_compat_py2
- else:
- # check_compat_py3 will import every filename we specify as long as it
- # starts with one of a few prefixes. It does this by converting
- # specified filenames like 'mercurial/foo.py' to 'mercurial.foo' and
- # importing that. When running standalone (not as part of a test), this
- # means we actually import the installed versions, not the files we just
- # specified. When running as test-check-py3-compat.t, we technically
- # would import the correct paths, but it's cleaner to have both cases
- # use the same import logic.
- sys.path.insert(0, os.getcwd())
- fn = check_compat_py3
+ # check_compat_py3 will import every filename we specify as long as it
+ # starts with one of a few prefixes. It does this by converting
+ # specified filenames like 'mercurial/foo.py' to 'mercurial.foo' and
+ # importing that. When running standalone (not as part of a test), this
+ # means we actually import the installed versions, not the files we just
+ # specified. When running as test-check-py3-compat.t, we technically
+ # would import the correct paths, but it's cleaner to have both cases
+ # use the same import logic.
+ sys.path.insert(0, os.getcwd())
for f in sys.argv[1:]:
with warnings.catch_warnings(record=True) as warns:
- fn(f)
+ check_compat_py3(f)
for w in warns:
print(
--- a/contrib/debugcmdserver.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/debugcmdserver.py Thu Jun 16 15:28:54 2022 +0200
@@ -7,7 +7,6 @@
# $ ./hg serve --cmds pipe | ./contrib/debugcmdserver.py -
# o, 52 -> 'capabilities: getencoding runcommand\nencoding: UTF-8'
-from __future__ import absolute_import, print_function
import struct
import sys
--- a/contrib/debugshell.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/debugshell.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,7 +1,6 @@
# debugshell extension
"""a python shell with repo, changelog & manifest objects"""
-from __future__ import absolute_import
import code
import mercurial
import sys
--- a/contrib/dumprevlog Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/dumprevlog Thu Jun 16 15:28:54 2022 +0200
@@ -2,7 +2,6 @@
# Dump revlogs as raw data stream
# $ find .hg/store/ -name "*.i" | xargs dumprevlog > repo.dump
-from __future__ import absolute_import, print_function
import sys
from mercurial.node import hex
--- a/contrib/fuzz/dirs_corpus.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/fuzz/dirs_corpus.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, print_function
-
import argparse
import zipfile
--- a/contrib/fuzz/dirstate_corpus.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/fuzz/dirstate_corpus.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, print_function
-
import argparse
import os
import zipfile
--- a/contrib/fuzz/fm1readmarkers_corpus.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/fuzz/fm1readmarkers_corpus.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, print_function
-
import argparse
import zipfile
--- a/contrib/fuzz/manifest_corpus.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/fuzz/manifest_corpus.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, print_function
-
import argparse
import zipfile
--- a/contrib/fuzz/mpatch_corpus.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/fuzz/mpatch_corpus.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, print_function
-
import argparse
import os
import struct
@@ -22,7 +20,7 @@
if sys.version_info[0] < 3:
- class py2reprhack(object):
+ class py2reprhack:
def __repr__(self):
"""Py2 calls __repr__ for `bytes(foo)`, forward to __bytes__"""
return self.__bytes__()
@@ -30,7 +28,7 @@
else:
- class py2reprhack(object):
+ class py2reprhack:
"""Not needed on py3."""
--- a/contrib/fuzz/revlog_corpus.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/fuzz/revlog_corpus.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import argparse
import os
import zipfile
--- a/contrib/genosxversion.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/genosxversion.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,4 @@
#!/usr/bin/env python2
-from __future__ import absolute_import, print_function
import argparse
import os
--- a/contrib/heptapod-ci.yml Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/heptapod-ci.yml Thu Jun 16 15:28:54 2022 +0200
@@ -1,6 +1,5 @@
stages:
- tests
- - phabricator
image: registry.heptapod.net/mercurial/ci-images/mercurial-core:$HG_CI_IMAGE_TAG
@@ -30,31 +29,22 @@
- echo "$RUNTEST_ARGS"
- HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" "$PYTHON" tests/run-tests.py --color=always $RUNTEST_ARGS
-checks-py2:
- <<: *runtests
- variables:
- RUNTEST_ARGS: "--time --test-list /tmp/check-tests.txt"
-
-checks-py3:
+checks:
<<: *runtests
variables:
RUNTEST_ARGS: "--time --test-list /tmp/check-tests.txt"
PYTHON: python3
-rust-cargo-test-py3:
+rust-cargo-test:
+ <<: *all
stage: tests
script:
- echo "python used, $PYTHON"
- make rust-tests
+ variables:
+ PYTHON: python3
-test-py2:
- <<: *runtests
- variables:
- RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt"
- TEST_HGMODULEPOLICY: "c"
- TEST_HGTESTS_ALLOW_NETIO: "1"
-
-test-py3:
+test-c:
<<: *runtests
variables:
RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt"
@@ -62,20 +52,14 @@
TEST_HGMODULEPOLICY: "c"
TEST_HGTESTS_ALLOW_NETIO: "1"
-test-py2-pure:
- <<: *runtests
- variables:
- RUNTEST_ARGS: "--pure --blacklist /tmp/check-tests.txt"
- TEST_HGMODULEPOLICY: "py"
-
-test-py3-pure:
+test-pure:
<<: *runtests
variables:
RUNTEST_ARGS: "--pure --blacklist /tmp/check-tests.txt"
PYTHON: python3
TEST_HGMODULEPOLICY: "py"
-test-py3-rust:
+test-rust:
<<: *runtests
variables:
HGWITHRUSTEXT: cpython
@@ -83,7 +67,7 @@
PYTHON: python3
TEST_HGMODULEPOLICY: "rust+c"
-test-py3-rhg:
+test-rhg:
<<: *runtests
variables:
HGWITHRUSTEXT: cpython
@@ -91,20 +75,14 @@
PYTHON: python3
TEST_HGMODULEPOLICY: "rust+c"
-test-py2-chg:
- <<: *runtests
- variables:
- RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt --chg"
- TEST_HGMODULEPOLICY: "c"
-
-test-py3-chg:
+test-chg:
<<: *runtests
variables:
PYTHON: python3
RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt --chg"
TEST_HGMODULEPOLICY: "c"
-check-pytype-py3:
+check-pytype:
extends: .runtests_template
before_script:
- hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no
@@ -142,7 +120,7 @@
- C:/MinGW/msys/1.0/bin/sh.exe --login -c 'cd "$OLDPWD" && HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" $PYTHON tests/run-tests.py --color=always $RUNTEST_ARGS'
-windows-py3:
+windows:
<<: *windows_runtests
tags:
- windows
@@ -151,7 +129,7 @@
RUNTEST_ARGS: "--blacklist C:/Temp/check-tests.txt"
PYTHON: py -3
-windows-py3-pyox:
+windows-pyox:
<<: *windows_runtests
tags:
- windows
--- a/contrib/hg-ssh Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/hg-ssh Thu Jun 16 15:28:54 2022 +0200
@@ -28,7 +28,6 @@
You can also add a --read-only flag to allow read-only access to a key, e.g.:
command="hg-ssh --read-only repos/*"
"""
-from __future__ import absolute_import
import os
import re
--- a/contrib/hgclient.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/hgclient.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,6 +1,5 @@
# A minimal client for Mercurial's command server
-from __future__ import absolute_import, print_function
import io
import os
@@ -50,7 +49,7 @@
return server
-class unixconnection(object):
+class unixconnection:
def __init__(self, sockpath):
self.sock = sock = socket.socket(socket.AF_UNIX)
sock.connect(sockpath)
@@ -63,7 +62,7 @@
self.sock.close()
-class unixserver(object):
+class unixserver:
def __init__(self, sockpath, logpath=None, repopath=None):
self.sockpath = sockpath
cmdline = [b'hg', b'serve', b'--cmdserver', b'unix', b'-a', sockpath]
--- a/contrib/import-checker.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/import-checker.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,6 +1,5 @@
#!/usr/bin/env python3
-from __future__ import absolute_import, print_function
import ast
import collections
@@ -20,10 +19,11 @@
import testparseutil
-# Whitelist of modules that symbols can be directly imported from.
+# Allow list of modules that symbols can be directly imported from.
allowsymbolimports = (
'__future__',
'breezy',
+ 'concurrent',
'hgclient',
'mercurial',
'mercurial.hgweb.common',
@@ -46,9 +46,10 @@
'mercurial.thirdparty.attr',
'mercurial.thirdparty.zope',
'mercurial.thirdparty.zope.interface',
+ 'typing',
)
-# Whitelist of symbols that can be directly imported.
+# Allow list of symbols that can be directly imported.
directsymbols = ('demandimport',)
# Modules that must be aliased because they are commonly confused with
@@ -58,21 +59,6 @@
}
-def usingabsolute(root):
- """Whether absolute imports are being used."""
- if sys.version_info[0] >= 3:
- return True
-
- for node in ast.walk(root):
- if isinstance(node, ast.ImportFrom):
- if node.module == '__future__':
- for n in node.names:
- if n.name == 'absolute_import':
- return True
-
- return False
-
-
def walklocal(root):
"""Recursively yield all descendant nodes but not in a different scope"""
todo = collections.deque(ast.iter_child_nodes(root))
@@ -402,21 +388,10 @@
def verify_import_convention(module, source, localmods):
- """Verify imports match our established coding convention.
-
- We have 2 conventions: legacy and modern. The modern convention is in
- effect when using absolute imports.
+ """Verify imports match our established coding convention."""
+ root = ast.parse(source)
- The legacy convention only looks for mixed imports. The modern convention
- is much more thorough.
- """
- root = ast.parse(source)
- absolute = usingabsolute(root)
-
- if absolute:
- return verify_modern_convention(module, root, localmods)
- else:
- return verify_stdlib_on_own_line(root)
+ return verify_modern_convention(module, root, localmods)
def verify_modern_convention(module, root, localmods, root_col_offset=0):
@@ -617,33 +592,6 @@
)
-def verify_stdlib_on_own_line(root):
- """Given some python source, verify that stdlib imports are done
- in separate statements from relative local module imports.
-
- >>> list(verify_stdlib_on_own_line(ast.parse('import sys, foo')))
- [('mixed imports\\n stdlib: sys\\n relative: foo', 1)]
- >>> list(verify_stdlib_on_own_line(ast.parse('import sys, os')))
- []
- >>> list(verify_stdlib_on_own_line(ast.parse('import foo, bar')))
- []
- """
- for node in ast.walk(root):
- if isinstance(node, ast.Import):
- from_stdlib = {False: [], True: []}
- for n in node.names:
- from_stdlib[n.name in stdlib_modules].append(n.name)
- if from_stdlib[True] and from_stdlib[False]:
- yield (
- 'mixed imports\n stdlib: %s\n relative: %s'
- % (
- ', '.join(sorted(from_stdlib[True])),
- ', '.join(sorted(from_stdlib[False])),
- ),
- node.lineno,
- )
-
-
class CircularImport(Exception):
pass
@@ -679,7 +627,6 @@
All module names recorded in `imports` should be absolute one.
- >>> from __future__ import print_function
>>> imports = {'top.foo': ['top.bar', 'os.path', 'top.qux'],
... 'top.bar': ['top.baz', 'sys'],
... 'top.baz': ['top.foo'],
--- a/contrib/install-windows-dependencies.ps1 Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/install-windows-dependencies.ps1 Thu Jun 16 15:28:54 2022 +0200
@@ -29,19 +29,19 @@
$PYTHON38_x64_URL = "https://www.python.org/ftp/python/3.8.10/python-3.8.10-amd64.exe"
$PYTHON38_x64_SHA256 = "7628244cb53408b50639d2c1287c659f4e29d3dfdb9084b11aed5870c0c6a48a"
-$PYTHON39_x86_URL = "https://www.python.org/ftp/python/3.9.9/python-3.9.9.exe"
-$PYTHON39_x86_SHA256 = "6646a5683adf14d35e8c53aab946895bc0f0b825f7acac3a62cc85ee7d0dc71a"
-$PYTHON39_X64_URL = "https://www.python.org/ftp/python/3.9.9/python-3.9.9-amd64.exe"
-$PYTHON39_x64_SHA256 = "137d59e5c0b01a8f1bdcba08344402ae658c81c6bf03b6602bd8b4e951ad0714"
+$PYTHON39_x86_URL = "https://www.python.org/ftp/python/3.9.12/python-3.9.12.exe"
+$PYTHON39_x86_SHA256 = "3d883326f30ac231c06b33f2a8ea700a185c20bf98d01da118079e9134d5fd20"
+$PYTHON39_X64_URL = "https://www.python.org/ftp/python/3.9.12/python-3.9.12-amd64.exe"
+$PYTHON39_x64_SHA256 = "2ba57ab2281094f78fc0227a27f4d47c90d94094e7cca35ce78419e616b3cb63"
-$PYTHON310_x86_URL = "https://www.python.org/ftp/python/3.10.0/python-3.10.0.exe"
-$PYTHON310_x86_SHA256 = "ea896eeefb1db9e12fb89ec77a6e28c9fe52b4a162a34c85d9688be2ec2392e8"
-$PYTHON310_X64_URL = "https://www.python.org/ftp/python/3.10.0/python-3.10.0-amd64.exe"
-$PYTHON310_x64_SHA256 = "cb580eb7dc55f9198e650f016645023e8b2224cf7d033857d12880b46c5c94ef"
+$PYTHON310_x86_URL = "https://www.python.org/ftp/python/3.10.4/python-3.10.4.exe"
+$PYTHON310_x86_SHA256 = "97c37c53c7a826f5b00e185754ab2a324a919f7afc469b20764b71715c80041d"
+$PYTHON310_X64_URL = "https://www.python.org/ftp/python/3.10.4/python-3.10.4-amd64.exe"
+$PYTHON310_x64_SHA256 = "a81fc4180f34e5733c3f15526c668ff55de096366f9006d8a44c0336704e50f1"
-# PIP 19.2.3.
-$PIP_URL = "https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/get-pip.py"
-$PIP_SHA256 = "57e3643ff19f018f8a00dfaa6b7e4620e3c1a7a2171fd218425366ec006b3bfe"
+# PIP 22.0.4.
+$PIP_URL = "https://github.com/pypa/get-pip/raw/38e54e5de07c66e875c11a1ebbdb938854625dd8/public/get-pip.py"
+$PIP_SHA256 = "e235c437e5c7d7524fbce3880ca39b917a73dc565e0c813465b7a7a329bb279a"
$INNO_SETUP_URL = "http://files.jrsoftware.org/is/5/innosetup-5.6.1-unicode.exe"
$INNO_SETUP_SHA256 = "27D49E9BC769E9D1B214C153011978DB90DC01C2ACD1DDCD9ED7B3FE3B96B538"
@@ -90,7 +90,13 @@
$p = Start-Process -FilePath $path -ArgumentList $arguments -Wait -PassThru -WindowStyle Hidden
if ($p.ExitCode -ne 0) {
- throw "process exited non-0: $($p.ExitCode)"
+ # If the MSI is already installed, ignore the error
+ if ($p.ExitCode -eq 1638) {
+ Write-Output "program already installed; continuing..."
+ }
+ else {
+ throw "process exited non-0: $($p.ExitCode)"
+ }
}
}
@@ -150,7 +156,7 @@
Install-Python3 "Python 3.7 32-bit" ${prefix}\assets\python37-x86.exe ${prefix}\python37-x86 ${pip}
Install-Python3 "Python 3.7 64-bit" ${prefix}\assets\python37-x64.exe ${prefix}\python37-x64 ${pip}
Install-Python3 "Python 3.8 32-bit" ${prefix}\assets\python38-x86.exe ${prefix}\python38-x86 ${pip}
-# Install-Python3 "Python 3.8 64-bit" ${prefix}\assets\python38-x64.exe ${prefix}\python38-x64 ${pip}
+ Install-Python3 "Python 3.8 64-bit" ${prefix}\assets\python38-x64.exe ${prefix}\python38-x64 ${pip}
Install-Python3 "Python 3.9 32-bit" ${prefix}\assets\python39-x86.exe ${prefix}\python39-x86 ${pip}
Install-Python3 "Python 3.9 64-bit" ${prefix}\assets\python39-x64.exe ${prefix}\python39-x64 ${pip}
Install-Python3 "Python 3.10 32-bit" ${prefix}\assets\python310-x86.exe ${prefix}\python310-x86 ${pip}
--- a/contrib/memory.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/memory.py Thu Jun 16 15:28:54 2022 +0200
@@ -11,8 +11,6 @@
prints it to ``stderr`` on exit.
'''
-from __future__ import absolute_import
-
def memusage(ui):
"""Report memory usage of the current process."""
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/merge-lists/Cargo.lock Thu Jun 16 15:28:54 2022 +0200
@@ -0,0 +1,560 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "aho-corasick"
+version = "0.7.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "assert_cmd"
+version = "2.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93ae1ddd39efd67689deb1979d80bad3bf7f2b09c6e6117c8d1f2443b5e2f83e"
+dependencies = [
+ "bstr",
+ "doc-comment",
+ "predicates",
+ "predicates-core",
+ "predicates-tree",
+ "wait-timeout",
+]
+
+[[package]]
+name = "atty"
+version = "0.2.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
+dependencies = [
+ "hermit-abi",
+ "libc",
+ "winapi",
+]
+
+[[package]]
+name = "autocfg"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
+
+[[package]]
+name = "bitflags"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+
+[[package]]
+name = "bstr"
+version = "0.2.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ba3569f383e8f1598449f1a423e72e99569137b47740b1da11ef19af3d5c3223"
+dependencies = [
+ "lazy_static",
+ "memchr",
+ "regex-automata",
+]
+
+[[package]]
+name = "clap"
+version = "3.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d8c93436c21e4698bacadf42917db28b23017027a4deccb35dbe47a7e7840123"
+dependencies = [
+ "atty",
+ "bitflags",
+ "clap_derive",
+ "indexmap",
+ "lazy_static",
+ "os_str_bytes",
+ "strsim",
+ "termcolor",
+ "textwrap",
+]
+
+[[package]]
+name = "clap_derive"
+version = "3.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "da95d038ede1a964ce99f49cbe27a7fb538d1da595e4b4f70b8c8f338d17bf16"
+dependencies = [
+ "heck",
+ "proc-macro-error",
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "console"
+version = "0.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a28b32d32ca44b70c3e4acd7db1babf555fa026e385fb95f18028f88848b3c31"
+dependencies = [
+ "encode_unicode",
+ "libc",
+ "once_cell",
+ "terminal_size",
+ "winapi",
+]
+
+[[package]]
+name = "difflib"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8"
+
+[[package]]
+name = "doc-comment"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10"
+
+[[package]]
+name = "either"
+version = "1.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457"
+
+[[package]]
+name = "encode_unicode"
+version = "0.3.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f"
+
+[[package]]
+name = "fuchsia-cprng"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba"
+
+[[package]]
+name = "hashbrown"
+version = "0.11.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
+
+[[package]]
+name = "heck"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9"
+
+[[package]]
+name = "hermit-abi"
+version = "0.1.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "indexmap"
+version = "1.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "282a6247722caba404c065016bbfa522806e51714c34f5dfc3e4a3a46fcb4223"
+dependencies = [
+ "autocfg",
+ "hashbrown",
+]
+
+[[package]]
+name = "insta"
+version = "1.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "30a7e1911532a662f6b08b68f884080850f2fd9544963c3ab23a5af42bda1eac"
+dependencies = [
+ "console",
+ "once_cell",
+ "serde",
+ "serde_json",
+ "serde_yaml",
+ "similar",
+]
+
+[[package]]
+name = "itertools"
+version = "0.10.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3"
+dependencies = [
+ "either",
+]
+
+[[package]]
+name = "itoa"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35"
+
+[[package]]
+name = "lazy_static"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+
+[[package]]
+name = "libc"
+version = "0.2.119"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1bf2e165bb3457c8e098ea76f3e3bc9db55f87aa90d52d0e6be741470916aaa4"
+
+[[package]]
+name = "linked-hash-map"
+version = "0.5.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3"
+
+[[package]]
+name = "memchr"
+version = "2.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"
+
+[[package]]
+name = "merge-lists"
+version = "0.1.0"
+dependencies = [
+ "assert_cmd",
+ "clap",
+ "insta",
+ "itertools",
+ "regex",
+ "similar",
+ "tempdir",
+]
+
+[[package]]
+name = "once_cell"
+version = "1.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "87f3e037eac156d1775da914196f0f37741a274155e34a0b7e427c35d2a2ecb9"
+
+[[package]]
+name = "os_str_bytes"
+version = "6.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e22443d1643a904602595ba1cd8f7d896afe56d26712531c5ff73a15b2fbf64"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "predicates"
+version = "2.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a5aab5be6e4732b473071984b3164dbbfb7a3674d30ea5ff44410b6bcd960c3c"
+dependencies = [
+ "difflib",
+ "itertools",
+ "predicates-core",
+]
+
+[[package]]
+name = "predicates-core"
+version = "1.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "da1c2388b1513e1b605fcec39a95e0a9e8ef088f71443ef37099fa9ae6673fcb"
+
+[[package]]
+name = "predicates-tree"
+version = "1.0.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4d86de6de25020a36c6d3643a86d9a6a9f552107c0559c60ea03551b5e16c032"
+dependencies = [
+ "predicates-core",
+ "termtree",
+]
+
+[[package]]
+name = "proc-macro-error"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
+dependencies = [
+ "proc-macro-error-attr",
+ "proc-macro2",
+ "quote",
+ "syn",
+ "version_check",
+]
+
+[[package]]
+name = "proc-macro-error-attr"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "version_check",
+]
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.36"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029"
+dependencies = [
+ "unicode-xid",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "864d3e96a899863136fc6e99f3d7cae289dafe43bf2c5ac19b70df7210c0a145"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "rand"
+version = "0.4.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293"
+dependencies = [
+ "fuchsia-cprng",
+ "libc",
+ "rand_core 0.3.1",
+ "rdrand",
+ "winapi",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b"
+dependencies = [
+ "rand_core 0.4.2",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc"
+
+[[package]]
+name = "rdrand"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2"
+dependencies = [
+ "rand_core 0.3.1",
+]
+
+[[package]]
+name = "regex"
+version = "1.5.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-automata"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
+
+[[package]]
+name = "regex-syntax"
+version = "0.6.25"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
+
+[[package]]
+name = "remove_dir_all"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "ryu"
+version = "1.0.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f"
+
+[[package]]
+name = "serde"
+version = "1.0.136"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.136"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.79"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e8d9fa5c3b304765ce1fd9c4c8a3de2c8db365a5b91be52f186efc675681d95"
+dependencies = [
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "serde_yaml"
+version = "0.8.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a4a521f2940385c165a24ee286aa8599633d162077a54bdcae2a6fd5a7bfa7a0"
+dependencies = [
+ "indexmap",
+ "ryu",
+ "serde",
+ "yaml-rust",
+]
+
+[[package]]
+name = "similar"
+version = "2.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2e24979f63a11545f5f2c60141afe249d4f19f84581ea2138065e400941d83d3"
+dependencies = [
+ "bstr",
+]
+
+[[package]]
+name = "strsim"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
+
+[[package]]
+name = "syn"
+version = "1.0.87"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e59d925cf59d8151f25a3bedf97c9c157597c9df7324d32d68991cc399ed08b"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-xid",
+]
+
+[[package]]
+name = "tempdir"
+version = "0.3.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "15f2b5fb00ccdf689e0149d1b1b3c03fead81c2b37735d812fa8bddbbf41b6d8"
+dependencies = [
+ "rand",
+ "remove_dir_all",
+]
+
+[[package]]
+name = "termcolor"
+version = "1.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755"
+dependencies = [
+ "winapi-util",
+]
+
+[[package]]
+name = "terminal_size"
+version = "0.1.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "633c1a546cee861a1a6d0dc69ebeca693bf4296661ba7852b9d21d159e0506df"
+dependencies = [
+ "libc",
+ "winapi",
+]
+
+[[package]]
+name = "termtree"
+version = "0.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "507e9898683b6c43a9aa55b64259b721b52ba226e0f3779137e50ad114a4c90b"
+
+[[package]]
+name = "textwrap"
+version = "0.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b1141d4d61095b28419e22cb0bbf02755f5e54e0526f97f1e3d1d160e60885fb"
+
+[[package]]
+name = "unicode-xid"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3"
+
+[[package]]
+name = "version_check"
+version = "0.9.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
+
+[[package]]
+name = "wait-timeout"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+dependencies = [
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+]
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+
+[[package]]
+name = "winapi-util"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+
+[[package]]
+name = "yaml-rust"
+version = "0.4.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85"
+dependencies = [
+ "linked-hash-map",
+]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/merge-lists/Cargo.toml Thu Jun 16 15:28:54 2022 +0200
@@ -0,0 +1,21 @@
+# A tool that performs a 3-way merge, resolving conflicts in sorted lists and
+# leaving other conflicts unchanged. This is useful with Mercurial's support
+# for partial merge tools (configured in `[partial-merge-tools]`).
+
+[package]
+name = "merge-lists"
+version = "0.1.0"
+edition = "2021"
+# We need https://github.com/rust-lang/rust/pull/89825
+rust-version = "1.59"
+
+[dependencies]
+clap = { version = "3.1.6", features = ["derive"] }
+itertools = "0.10.3"
+regex = "1.5.5"
+similar = { version="2.1.0", features = ["bytes"] }
+
+[dev-dependencies]
+assert_cmd = "2.0.4"
+insta = "1.13.0"
+tempdir = "0.3.7"
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/merge-lists/src/main.rs Thu Jun 16 15:28:54 2022 +0200
@@ -0,0 +1,300 @@
+use clap::{ArgGroup, Parser};
+use itertools::Itertools;
+use regex::bytes::Regex;
+use similar::ChangeTag;
+use std::cmp::{max, min, Ordering};
+use std::collections::HashSet;
+use std::ffi::OsString;
+use std::ops::Range;
+use std::path::PathBuf;
+
+fn find_unchanged_ranges(
+ old_bytes: &[u8],
+ new_bytes: &[u8],
+) -> Vec<(Range<usize>, Range<usize>)> {
+ let diff = similar::TextDiff::configure()
+ .algorithm(similar::Algorithm::Patience)
+ .diff_lines(old_bytes, new_bytes);
+ let mut new_unchanged_ranges = vec![];
+ let mut old_index = 0;
+ let mut new_index = 0;
+ for diff in diff.iter_all_changes() {
+ match diff.tag() {
+ ChangeTag::Equal => {
+ new_unchanged_ranges.push((
+ old_index..old_index + diff.value().len(),
+ new_index..new_index + diff.value().len(),
+ ));
+ old_index += diff.value().len();
+ new_index += diff.value().len();
+ }
+ ChangeTag::Delete => {
+ old_index += diff.value().len();
+ }
+ ChangeTag::Insert => {
+ new_index += diff.value().len();
+ }
+ }
+ }
+ new_unchanged_ranges
+}
+
+/// Returns a list of all the lines in the input (including trailing newlines),
+/// but only if they all match the regex and they are sorted.
+fn get_lines<'input>(
+ input: &'input [u8],
+ regex: &Regex,
+) -> Option<Vec<&'input [u8]>> {
+ let lines = input.split_inclusive(|x| *x == b'\n').collect_vec();
+ let mut previous_line = "".as_bytes();
+ for line in &lines {
+ if *line < previous_line {
+ return None;
+ }
+ if !regex.is_match(line) {
+ return None;
+ }
+ previous_line = line;
+ }
+ Some(lines)
+}
+
+fn resolve_conflict(
+ base_slice: &[u8],
+ local_slice: &[u8],
+ other_slice: &[u8],
+ regex: &Regex,
+) -> Option<Vec<u8>> {
+ let base_lines = get_lines(base_slice, regex)?;
+ let local_lines = get_lines(local_slice, regex)?;
+ let other_lines = get_lines(other_slice, regex)?;
+ let base_lines_set: HashSet<_> = base_lines.iter().copied().collect();
+ let local_lines_set: HashSet<_> = local_lines.iter().copied().collect();
+ let other_lines_set: HashSet<_> = other_lines.iter().copied().collect();
+ let mut result = local_lines_set;
+ for to_add in other_lines_set.difference(&base_lines_set) {
+ result.insert(to_add);
+ }
+ for to_remove in base_lines_set.difference(&other_lines_set) {
+ result.remove(to_remove);
+ }
+ Some(result.into_iter().sorted().collect_vec().concat())
+}
+
+fn resolve(
+ base_bytes: &[u8],
+ local_bytes: &[u8],
+ other_bytes: &[u8],
+ regex: &Regex,
+) -> (Vec<u8>, Vec<u8>, Vec<u8>) {
+ // Find unchanged ranges between the base and the two sides. We do that by
+ // initially considering the whole base unchanged. Then we compare each
+ // side with the base and intersect the unchanged ranges we find with
+ // what we had before.
+ let unchanged_ranges = vec![UnchangedRange {
+ base_range: 0..base_bytes.len(),
+ offsets: vec![],
+ }];
+ let unchanged_ranges = intersect_regions(
+ unchanged_ranges,
+ &find_unchanged_ranges(base_bytes, local_bytes),
+ );
+ let mut unchanged_ranges = intersect_regions(
+ unchanged_ranges,
+ &find_unchanged_ranges(base_bytes, other_bytes),
+ );
+ // Add an empty UnchangedRange at the end to make it easier to find change
+ // ranges. That way there's a changed range before each UnchangedRange.
+ unchanged_ranges.push(UnchangedRange {
+ base_range: base_bytes.len()..base_bytes.len(),
+ offsets: vec![
+ local_bytes.len().wrapping_sub(base_bytes.len()) as isize,
+ other_bytes.len().wrapping_sub(base_bytes.len()) as isize,
+ ],
+ });
+
+ let mut new_base_bytes: Vec<u8> = vec![];
+ let mut new_local_bytes: Vec<u8> = vec![];
+ let mut new_other_bytes: Vec<u8> = vec![];
+ let mut previous = UnchangedRange {
+ base_range: 0..0,
+ offsets: vec![0, 0],
+ };
+ for current in unchanged_ranges {
+ let base_slice =
+ &base_bytes[previous.base_range.end..current.base_range.start];
+ let local_slice = &local_bytes[previous.end(0)..current.start(0)];
+ let other_slice = &other_bytes[previous.end(1)..current.start(1)];
+ if let Some(resolution) =
+ resolve_conflict(base_slice, local_slice, other_slice, regex)
+ {
+ new_base_bytes.extend(&resolution);
+ new_local_bytes.extend(&resolution);
+ new_other_bytes.extend(&resolution);
+ } else {
+ new_base_bytes.extend(base_slice);
+ new_local_bytes.extend(local_slice);
+ new_other_bytes.extend(other_slice);
+ }
+ new_base_bytes.extend(&base_bytes[current.base_range.clone()]);
+ new_local_bytes.extend(&local_bytes[current.start(0)..current.end(0)]);
+ new_other_bytes.extend(&other_bytes[current.start(1)..current.end(1)]);
+ previous = current;
+ }
+
+ (new_base_bytes, new_local_bytes, new_other_bytes)
+}
+
+/// A tool that performs a 3-way merge, resolving conflicts in sorted lists and
+/// leaving other conflicts unchanged. This is useful with Mercurial's support
+/// for partial merge tools (configured in `[partial-merge-tools]`).
+#[derive(Parser, Debug)]
+#[clap(version, about, long_about = None)]
+#[clap(group(ArgGroup::new("match").required(true).args(&["pattern", "python-imports"])))]
+struct Args {
+ /// Path to the file's content in the "local" side
+ local: OsString,
+
+ /// Path to the file's content in the base
+ base: OsString,
+
+ /// Path to the file's content in the "other" side
+ other: OsString,
+
+ /// Regular expression to use
+ #[clap(long, short)]
+ pattern: Option<String>,
+
+ /// Use built-in regular expression for Python imports
+ #[clap(long)]
+ python_imports: bool,
+}
+
+fn get_regex(args: &Args) -> Regex {
+ let pattern = if args.python_imports {
+ r"import \w+(\.\w+)*( +#.*)?\n|from (\w+(\.\w+)* import \w+( as \w+)?(, \w+( as \w+)?)*( +#.*)?)"
+ } else if let Some(pattern) = &args.pattern {
+ pattern
+ } else {
+ ".*"
+ };
+ let pattern = format!(r"{}\r?\n?", pattern);
+ regex::bytes::Regex::new(&pattern).unwrap()
+}
+
+fn main() {
+ let args: Args = Args::parse();
+
+ let base_path = PathBuf::from(&args.base);
+ let local_path = PathBuf::from(&args.local);
+ let other_path = PathBuf::from(&args.other);
+
+ let base_bytes = std::fs::read(&base_path).unwrap();
+ let local_bytes = std::fs::read(&local_path).unwrap();
+ let other_bytes = std::fs::read(&other_path).unwrap();
+
+ let regex = get_regex(&args);
+ let (new_base_bytes, new_local_bytes, new_other_bytes) =
+ resolve(&base_bytes, &local_bytes, &other_bytes, ®ex);
+
+ // Write out the result if anything changed
+ if new_base_bytes != base_bytes {
+ std::fs::write(&base_path, new_base_bytes).unwrap();
+ }
+ if new_local_bytes != local_bytes {
+ std::fs::write(&local_path, new_local_bytes).unwrap();
+ }
+ if new_other_bytes != other_bytes {
+ std::fs::write(&other_path, new_other_bytes).unwrap();
+ }
+}
+
+fn checked_add(base: usize, offset: isize) -> usize {
+ if offset < 0 {
+ base.checked_sub(offset.checked_abs().unwrap() as usize)
+ .unwrap()
+ } else {
+ base.checked_add(offset as usize).unwrap()
+ }
+}
+
+// The remainder of the file is copied from
+// https://github.com/martinvonz/jj/blob/main/lib/src/diff.rs
+
+#[derive(Clone, PartialEq, Eq, Debug)]
+struct UnchangedRange {
+ base_range: Range<usize>,
+ offsets: Vec<isize>,
+}
+
+impl UnchangedRange {
+ fn start(&self, side: usize) -> usize {
+ checked_add(self.base_range.start, self.offsets[side])
+ }
+
+ fn end(&self, side: usize) -> usize {
+ checked_add(self.base_range.end, self.offsets[side])
+ }
+}
+
+impl PartialOrd for UnchangedRange {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+impl Ord for UnchangedRange {
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.base_range
+ .start
+ .cmp(&other.base_range.start)
+ .then_with(|| self.base_range.end.cmp(&other.base_range.end))
+ }
+}
+
+/// Takes the current regions and intersects it with the new unchanged ranges
+/// from a 2-way diff. The result is a map of unchanged regions with one more
+/// offset in the map's values.
+fn intersect_regions(
+ current_ranges: Vec<UnchangedRange>,
+ new_unchanged_ranges: &[(Range<usize>, Range<usize>)],
+) -> Vec<UnchangedRange> {
+ let mut result = vec![];
+ let mut current_ranges_iter = current_ranges.into_iter().peekable();
+ for (new_base_range, other_range) in new_unchanged_ranges.iter() {
+ assert_eq!(new_base_range.len(), other_range.len());
+ while let Some(UnchangedRange {
+ base_range,
+ offsets,
+ }) = current_ranges_iter.peek()
+ {
+ // No need to look further if we're past the new range.
+ if base_range.start >= new_base_range.end {
+ break;
+ }
+ // Discard any current unchanged regions that don't match between
+ // the base and the new input.
+ if base_range.end <= new_base_range.start {
+ current_ranges_iter.next();
+ continue;
+ }
+ let new_start = max(base_range.start, new_base_range.start);
+ let new_end = min(base_range.end, new_base_range.end);
+ let mut new_offsets = offsets.clone();
+ new_offsets
+ .push(other_range.start.wrapping_sub(new_base_range.start)
+ as isize);
+ result.push(UnchangedRange {
+ base_range: new_start..new_end,
+ offsets: new_offsets,
+ });
+ if base_range.end >= new_base_range.end {
+ // Break without consuming the item; there may be other new
+ // ranges that overlap with it.
+ break;
+ }
+ current_ranges_iter.next();
+ }
+ }
+ result
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/merge-lists/tests/test-merge-lists.rs Thu Jun 16 15:28:54 2022 +0200
@@ -0,0 +1,204 @@
+use similar::DiffableStr;
+use std::ffi::OsStr;
+use tempdir::TempDir;
+
+fn run_test(arg: &str, input: &str) -> String {
+ let mut cmd = assert_cmd::Command::cargo_bin("merge-lists").unwrap();
+ let temp_dir = TempDir::new("test").unwrap();
+ let base_path = temp_dir.path().join("base");
+ let local_path = temp_dir.path().join("local");
+ let other_path = temp_dir.path().join("other");
+
+ let rest = input.strip_prefix("\nbase:\n").unwrap();
+ let mut split = rest.split("\nlocal:\n");
+ std::fs::write(&base_path, split.next().unwrap()).unwrap();
+ let rest = split.next().unwrap();
+ let mut split = rest.split("\nother:\n");
+ std::fs::write(&local_path, split.next().unwrap()).unwrap();
+ std::fs::write(&other_path, split.next().unwrap()).unwrap();
+ cmd.args(&[
+ OsStr::new(arg),
+ local_path.as_os_str(),
+ base_path.as_os_str(),
+ other_path.as_os_str(),
+ ])
+ .assert()
+ .success();
+
+ let new_base_bytes = std::fs::read(&base_path).unwrap();
+ let new_local_bytes = std::fs::read(&local_path).unwrap();
+ let new_other_bytes = std::fs::read(&other_path).unwrap();
+ // No newline before "base:" because of https://github.com/mitsuhiko/insta/issues/117
+ format!(
+ "base:\n{}\nlocal:\n{}\nother:\n{}",
+ new_base_bytes.as_str().unwrap(),
+ new_local_bytes.as_str().unwrap(),
+ new_other_bytes.as_str().unwrap()
+ )
+}
+
+#[test]
+fn test_merge_lists_basic() {
+ let output = run_test(
+ "--python-imports",
+ r"
+base:
+import lib1
+import lib2
+
+local:
+import lib2
+import lib3
+
+other:
+import lib3
+import lib4
+",
+ );
+ insta::assert_snapshot!(output, @r###"
+ base:
+ import lib3
+ import lib4
+
+ local:
+ import lib3
+ import lib4
+
+ other:
+ import lib3
+ import lib4
+ "###);
+}
+
+#[test]
+fn test_merge_lists_from() {
+ // Test some "from x import y" statements and some non-import conflicts
+ // (unresolvable)
+ let output = run_test(
+ "--python-imports",
+ r"
+base:
+from . import x
+
+1+1
+
+local:
+from . import x
+from a import b
+
+2+2
+
+other:
+from a import c
+
+3+3
+",
+ );
+ insta::assert_snapshot!(output, @r###"
+ base:
+ from a import b
+ from a import c
+
+ 1+1
+
+ local:
+ from a import b
+ from a import c
+
+ 2+2
+
+ other:
+ from a import b
+ from a import c
+
+ 3+3
+ "###);
+}
+
+#[test]
+fn test_merge_lists_not_sorted() {
+ // Test that nothing is done if the elements in the conflicting hunks are
+ // not sorted
+ let output = run_test(
+ "--python-imports",
+ r"
+base:
+import x
+
+1+1
+
+local:
+import a
+import x
+
+2+2
+
+other:
+import z
+import y
+
+3+3
+",
+ );
+ insta::assert_snapshot!(output, @r###"
+ base:
+ import x
+
+ 1+1
+
+ local:
+ import a
+ import x
+
+ 2+2
+
+ other:
+ import z
+ import y
+
+ 3+3
+ "###);
+}
+
+#[test]
+fn test_custom_regex() {
+ // Test merging of all lines (by matching anything)
+ let output = run_test(
+ "--pattern=.*",
+ r"
+base:
+aardvark
+baboon
+camel
+
+local:
+aardvark
+camel
+eagle
+
+other:
+aardvark
+camel
+deer
+",
+ );
+ insta::assert_snapshot!(output, @r###"
+ base:
+ aardvark
+ camel
+ deer
+ eagle
+
+ local:
+ aardvark
+ camel
+ deer
+ eagle
+
+ other:
+ aardvark
+ camel
+ deer
+ eagle
+ "###);
+}
--- a/contrib/packaging/debian/rules Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/packaging/debian/rules Thu Jun 16 15:28:54 2022 +0200
@@ -92,10 +92,8 @@
mkdir -p "$(CURDIR)"/debian/mercurial/etc/mercurial/hgrc.d/
cp contrib/packaging/debian/*.rc "$(CURDIR)"/debian/mercurial/etc/mercurial/hgrc.d/
# completions
- mkdir -p "$(CURDIR)"/debian/mercurial/usr/share/bash-completion/completions
- cp contrib/bash_completion "$(CURDIR)"/debian/mercurial/usr/share/bash-completion/completions/hg
mkdir -p "$(CURDIR)"/debian/mercurial/usr/share/zsh/vendor-completions
- cp contrib/zsh_completion "$(CURDIR)"/debian/mercurial/usr/share/zsh/vendor-completions/_hg
+ mv "$(CURDIR)"/debian/mercurial/usr/share/zsh/site-functions/_hg "$(CURDIR)"/debian/mercurial/usr/share/zsh/vendor-completions/_hg
if [ "$(DEB_HG_CHG_BY_DEFAULT)" -eq 1 ]; then \
mkdir -p "$(CURDIR)"/debian/mercurial/usr/lib/mercurial; \
mv "$(CURDIR)"/debian/mercurial/usr/bin/hg "$(CURDIR)"/debian/mercurial/usr/lib/mercurial/hg; \
--- a/contrib/packaging/hgpackaging/cli.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/packaging/hgpackaging/cli.py Thu Jun 16 15:28:54 2022 +0200
@@ -20,13 +20,7 @@
SOURCE_DIR = HERE.parent.parent.parent
-def build_inno(pyoxidizer_target=None, python=None, iscc=None, version=None):
- if not pyoxidizer_target and not python:
- raise Exception("--python required unless building with PyOxidizer")
-
- if python and not os.path.isabs(python):
- raise Exception("--python arg must be an absolute path")
-
+def build_inno(pyoxidizer_target, iscc=None, version=None):
if iscc:
iscc = pathlib.Path(iscc)
else:
@@ -38,59 +32,30 @@
build_dir = SOURCE_DIR / "build"
- if pyoxidizer_target:
- inno.build_with_pyoxidizer(
- SOURCE_DIR, build_dir, pyoxidizer_target, iscc, version=version
- )
- else:
- inno.build_with_py2exe(
- SOURCE_DIR,
- build_dir,
- pathlib.Path(python),
- iscc,
- version=version,
- )
+ inno.build_with_pyoxidizer(
+ SOURCE_DIR, build_dir, pyoxidizer_target, iscc, version=version
+ )
def build_wix(
+ pyoxidizer_target,
name=None,
- pyoxidizer_target=None,
- python=None,
version=None,
sign_sn=None,
sign_cert=None,
sign_password=None,
sign_timestamp_url=None,
- extra_packages_script=None,
extra_wxs=None,
extra_features=None,
extra_pyoxidizer_vars=None,
):
- if not pyoxidizer_target and not python:
- raise Exception("--python required unless building with PyOxidizer")
-
- if python and not os.path.isabs(python):
- raise Exception("--python arg must be an absolute path")
-
kwargs = {
"source_dir": SOURCE_DIR,
"version": version,
+ "target_triple": pyoxidizer_target,
+ "extra_pyoxidizer_vars": extra_pyoxidizer_vars,
}
- if pyoxidizer_target:
- fn = wix.build_installer_pyoxidizer
- kwargs["target_triple"] = pyoxidizer_target
- kwargs["extra_pyoxidizer_vars"] = extra_pyoxidizer_vars
- else:
- fn = wix.build_installer_py2exe
- kwargs["python_exe"] = pathlib.Path(python)
-
- if extra_packages_script:
- if pyoxidizer_target:
- raise Exception(
- "pyoxidizer does not support --extra-packages-script"
- )
- kwargs["extra_packages_script"] = extra_packages_script
if extra_wxs:
kwargs["extra_wxs"] = dict(
thing.split("=") for thing in extra_wxs.split(",")
@@ -107,7 +72,7 @@
"timestamp_url": sign_timestamp_url,
}
- fn(**kwargs)
+ wix.build_installer_pyoxidizer(**kwargs)
def get_parser():
@@ -119,14 +84,14 @@
sp.add_argument(
"--pyoxidizer-target",
choices={"i686-pc-windows-msvc", "x86_64-pc-windows-msvc"},
+ required=True,
help="Build with PyOxidizer targeting this host triple",
)
- sp.add_argument("--python", help="path to python.exe to use")
sp.add_argument("--iscc", help="path to iscc.exe to use")
sp.add_argument(
"--version",
help="Mercurial version string to use "
- "(detected from __version__.py if not defined",
+ "(detected from __version__.py if not defined)",
)
sp.set_defaults(func=build_inno)
@@ -137,9 +102,9 @@
sp.add_argument(
"--pyoxidizer-target",
choices={"i686-pc-windows-msvc", "x86_64-pc-windows-msvc"},
+ required=True,
help="Build with PyOxidizer targeting this host triple",
)
- sp.add_argument("--python", help="Path to Python executable to use")
sp.add_argument(
"--sign-sn",
help="Subject name (or fragment thereof) of certificate "
@@ -155,12 +120,6 @@
)
sp.add_argument("--version", help="Version string to use")
sp.add_argument(
- "--extra-packages-script",
- help=(
- "Script to execute to include extra packages in " "py2exe binary."
- ),
- )
- sp.add_argument(
"--extra-wxs", help="CSV of path_to_wxs_file=working_dir_for_wxs_file"
)
sp.add_argument(
--- a/contrib/packaging/hgpackaging/downloads.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/packaging/hgpackaging/downloads.py Thu Jun 16 15:28:54 2022 +0200
@@ -10,6 +10,7 @@
import gzip
import hashlib
import pathlib
+import typing
import urllib.request
@@ -25,48 +26,6 @@
'size': 715086,
'sha256': '411f94974492fd2ecf52590cb05b1023530aec67e64154a88b1e4ebcd9c28588',
},
- 'py2exe': {
- 'url': 'https://versaweb.dl.sourceforge.net/project/py2exe/py2exe/0.6.9/py2exe-0.6.9.zip',
- 'size': 149687,
- 'sha256': '6bd383312e7d33eef2e43a5f236f9445e4f3e0f6b16333c6f183ed445c44ddbd',
- 'version': '0.6.9',
- },
- # The VC9 CRT merge modules aren't readily available on most systems because
- # they are only installed as part of a full Visual Studio 2008 install.
- # While we could potentially extract them from a Visual Studio 2008
- # installer, it is easier to just fetch them from a known URL.
- 'vc9-crt-x86-msm': {
- 'url': 'https://github.com/indygreg/vc90-merge-modules/raw/9232f8f0b2135df619bf7946eaa176b4ac35ccff/Microsoft_VC90_CRT_x86.msm',
- 'size': 615424,
- 'sha256': '837e887ef31b332feb58156f429389de345cb94504228bb9a523c25a9dd3d75e',
- },
- 'vc9-crt-x86-msm-policy': {
- 'url': 'https://github.com/indygreg/vc90-merge-modules/raw/9232f8f0b2135df619bf7946eaa176b4ac35ccff/policy_9_0_Microsoft_VC90_CRT_x86.msm',
- 'size': 71168,
- 'sha256': '3fbcf92e3801a0757f36c5e8d304e134a68d5cafd197a6df7734ae3e8825c940',
- },
- 'vc9-crt-x64-msm': {
- 'url': 'https://github.com/indygreg/vc90-merge-modules/raw/9232f8f0b2135df619bf7946eaa176b4ac35ccff/Microsoft_VC90_CRT_x86_x64.msm',
- 'size': 662528,
- 'sha256': '50d9639b5ad4844a2285269c7551bf5157ec636e32396ddcc6f7ec5bce487a7c',
- },
- 'vc9-crt-x64-msm-policy': {
- 'url': 'https://github.com/indygreg/vc90-merge-modules/raw/9232f8f0b2135df619bf7946eaa176b4ac35ccff/policy_9_0_Microsoft_VC90_CRT_x86_x64.msm',
- 'size': 71168,
- 'sha256': '0550ea1929b21239134ad3a678c944ba0f05f11087117b6cf0833e7110686486',
- },
- 'virtualenv': {
- 'url': 'https://files.pythonhosted.org/packages/37/db/89d6b043b22052109da35416abc3c397655e4bd3cff031446ba02b9654fa/virtualenv-16.4.3.tar.gz',
- 'size': 3713208,
- 'sha256': '984d7e607b0a5d1329425dd8845bd971b957424b5ba664729fab51ab8c11bc39',
- 'version': '16.4.3',
- },
- 'wix': {
- 'url': 'https://github.com/wixtoolset/wix3/releases/download/wix3111rtm/wix311-binaries.zip',
- 'size': 34358269,
- 'sha256': '37f0a533b0978a454efb5dc3bd3598becf9660aaf4287e55bf68ca6b527d051d',
- 'version': '3.11.1',
- },
}
@@ -168,8 +127,8 @@
def download_entry(
- name: dict, dest_path: pathlib.Path, local_name=None
-) -> pathlib.Path:
+ name: str, dest_path: pathlib.Path, local_name=None
+) -> typing.Tuple[pathlib.Path, typing.Dict[str, typing.Union[str, int]]]:
entry = DOWNLOADS[name]
url = entry['url']
--- a/contrib/packaging/hgpackaging/inno.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/packaging/hgpackaging/inno.py Thu Jun 16 15:28:54 2022 +0200
@@ -14,29 +14,13 @@
import jinja2
-from .py2exe import (
- build_py2exe,
- stage_install,
-)
from .pyoxidizer import create_pyoxidizer_install_layout
from .util import (
- find_legacy_vc_runtime_files,
normalize_windows_version,
process_install_rules,
read_version_py,
)
-EXTRA_PACKAGES = {
- 'dulwich',
- 'keyring',
- 'pygments',
- 'win32ctypes',
-}
-
-EXTRA_INCLUDES = {
- '_curses',
- '_curses_panel',
-}
EXTRA_INSTALL_RULES = [
('contrib/win32/mercurial.ini', 'defaultrc/mercurial.rc'),
@@ -47,80 +31,6 @@
}
-def build_with_py2exe(
- source_dir: pathlib.Path,
- build_dir: pathlib.Path,
- python_exe: pathlib.Path,
- iscc_exe: pathlib.Path,
- version=None,
-):
- """Build the Inno installer using py2exe.
-
- Build files will be placed in ``build_dir``.
-
- py2exe's setup.py doesn't use setuptools. It doesn't have modern logic
- for finding the Python 2.7 toolchain. So, we require the environment
- to already be configured with an active toolchain.
- """
- if not iscc_exe.exists():
- raise Exception('%s does not exist' % iscc_exe)
-
- vc_x64 = r'\x64' in os.environ.get('LIB', '')
- arch = 'x64' if vc_x64 else 'x86'
- inno_build_dir = build_dir / ('inno-py2exe-%s' % arch)
- staging_dir = inno_build_dir / 'stage'
-
- requirements_txt = (
- source_dir / 'contrib' / 'packaging' / 'requirements-windows-py2.txt'
- )
-
- inno_build_dir.mkdir(parents=True, exist_ok=True)
-
- build_py2exe(
- source_dir,
- build_dir,
- python_exe,
- 'inno',
- requirements_txt,
- extra_packages=EXTRA_PACKAGES,
- extra_includes=EXTRA_INCLUDES,
- )
-
- # Purge the staging directory for every build so packaging is
- # pristine.
- if staging_dir.exists():
- print('purging %s' % staging_dir)
- shutil.rmtree(staging_dir)
-
- # Now assemble all the packaged files into the staging directory.
- stage_install(source_dir, staging_dir)
-
- # We also install some extra files.
- process_install_rules(EXTRA_INSTALL_RULES, source_dir, staging_dir)
-
- # hg.exe depends on VC9 runtime DLLs. Copy those into place.
- for f in find_legacy_vc_runtime_files(vc_x64):
- if f.name.endswith('.manifest'):
- basename = 'Microsoft.VC90.CRT.manifest'
- else:
- basename = f.name
-
- dest_path = staging_dir / basename
-
- print('copying %s to %s' % (f, dest_path))
- shutil.copyfile(f, dest_path)
-
- build_installer(
- source_dir,
- inno_build_dir,
- staging_dir,
- iscc_exe,
- version,
- arch="x64" if vc_x64 else None,
- suffix="-python2",
- )
-
-
def build_with_pyoxidizer(
source_dir: pathlib.Path,
build_dir: pathlib.Path,
--- a/contrib/packaging/hgpackaging/py2exe.py Thu Jun 16 15:15:03 2022 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,248 +0,0 @@
-# py2exe.py - Functionality for performing py2exe builds.
-#
-# Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
-#
-# This software may be used and distributed according to the terms of the
-# GNU General Public License version 2 or any later version.
-
-# no-check-code because Python 3 native.
-
-import os
-import pathlib
-import subprocess
-
-from .downloads import download_entry
-from .util import (
- extract_tar_to_directory,
- extract_zip_to_directory,
- process_install_rules,
- python_exe_info,
-)
-
-
-STAGING_RULES = [
- ('contrib/bash_completion', 'contrib/'),
- ('contrib/hgk', 'contrib/hgk.tcl'),
- ('contrib/hgweb.fcgi', 'contrib/'),
- ('contrib/hgweb.wsgi', 'contrib/'),
- ('contrib/logo-droplets.svg', 'contrib/'),
- ('contrib/mercurial.el', 'contrib/'),
- ('contrib/mq.el', 'contrib/'),
- ('contrib/tcsh_completion', 'contrib/'),
- ('contrib/tcsh_completion_build.sh', 'contrib/'),
- ('contrib/vim/*', 'contrib/vim/'),
- ('contrib/win32/postinstall.txt', 'ReleaseNotes.txt'),
- ('contrib/win32/ReadMe.html', 'ReadMe.html'),
- ('contrib/xml.rnc', 'contrib/'),
- ('contrib/zsh_completion', 'contrib/'),
- ('dist/hg.exe', './'),
- ('dist/lib/*.dll', 'lib/'),
- ('dist/lib/*.pyd', 'lib/'),
- ('dist/lib/library.zip', 'lib/'),
- ('dist/Microsoft.VC*.CRT.manifest', './'),
- ('dist/msvc*.dll', './'),
- ('dist/python*.dll', './'),
- ('doc/*.html', 'doc/'),
- ('doc/style.css', 'doc/'),
- ('mercurial/helptext/**/*.txt', 'helptext/'),
- ('mercurial/defaultrc/*.rc', 'defaultrc/'),
- ('mercurial/locale/**/*', 'locale/'),
- ('mercurial/templates/**/*', 'templates/'),
- ('COPYING', 'Copying.txt'),
-]
-
-# List of paths to exclude from the staging area.
-STAGING_EXCLUDES = [
- 'doc/hg-ssh.8.html',
-]
-
-
-def build_py2exe(
- source_dir: pathlib.Path,
- build_dir: pathlib.Path,
- python_exe: pathlib.Path,
- build_name: str,
- venv_requirements_txt: pathlib.Path,
- extra_packages=None,
- extra_excludes=None,
- extra_dll_excludes=None,
- extra_packages_script=None,
- extra_includes=None,
-):
- """Build Mercurial with py2exe.
-
- Build files will be placed in ``build_dir``.
-
- py2exe's setup.py doesn't use setuptools. It doesn't have modern logic
- for finding the Python 2.7 toolchain. So, we require the environment
- to already be configured with an active toolchain.
- """
- if 'VCINSTALLDIR' not in os.environ:
- raise Exception(
- 'not running from a Visual C++ build environment; '
- 'execute the "Visual C++ <version> Command Prompt" '
- 'application shortcut or a vcsvarsall.bat file'
- )
-
- # Identity x86/x64 and validate the environment matches the Python
- # architecture.
- vc_x64 = r'\x64' in os.environ['LIB']
-
- py_info = python_exe_info(python_exe)
-
- if vc_x64:
- if py_info['arch'] != '64bit':
- raise Exception(
- 'architecture mismatch: Visual C++ environment '
- 'is configured for 64-bit but Python is 32-bit'
- )
- else:
- if py_info['arch'] != '32bit':
- raise Exception(
- 'architecture mismatch: Visual C++ environment '
- 'is configured for 32-bit but Python is 64-bit'
- )
-
- if py_info['py3']:
- raise Exception('Only Python 2 is currently supported')
-
- build_dir.mkdir(exist_ok=True)
-
- gettext_pkg, gettext_entry = download_entry('gettext', build_dir)
- gettext_dep_pkg = download_entry('gettext-dep', build_dir)[0]
- virtualenv_pkg, virtualenv_entry = download_entry('virtualenv', build_dir)
- py2exe_pkg, py2exe_entry = download_entry('py2exe', build_dir)
-
- venv_path = build_dir / (
- 'venv-%s-%s' % (build_name, 'x64' if vc_x64 else 'x86')
- )
-
- gettext_root = build_dir / ('gettext-win-%s' % gettext_entry['version'])
-
- if not gettext_root.exists():
- extract_zip_to_directory(gettext_pkg, gettext_root)
- extract_zip_to_directory(gettext_dep_pkg, gettext_root)
-
- # This assumes Python 2. We don't need virtualenv on Python 3.
- virtualenv_src_path = build_dir / (
- 'virtualenv-%s' % virtualenv_entry['version']
- )
- virtualenv_py = virtualenv_src_path / 'virtualenv.py'
-
- if not virtualenv_src_path.exists():
- extract_tar_to_directory(virtualenv_pkg, build_dir)
-
- py2exe_source_path = build_dir / ('py2exe-%s' % py2exe_entry['version'])
-
- if not py2exe_source_path.exists():
- extract_zip_to_directory(py2exe_pkg, build_dir)
-
- if not venv_path.exists():
- print('creating virtualenv with dependencies')
- subprocess.run(
- [str(python_exe), str(virtualenv_py), str(venv_path)], check=True
- )
-
- venv_python = venv_path / 'Scripts' / 'python.exe'
- venv_pip = venv_path / 'Scripts' / 'pip.exe'
-
- subprocess.run(
- [str(venv_pip), 'install', '-r', str(venv_requirements_txt)], check=True
- )
-
- # Force distutils to use VC++ settings from environment, which was
- # validated above.
- env = dict(os.environ)
- env['DISTUTILS_USE_SDK'] = '1'
- env['MSSdk'] = '1'
-
- if extra_packages_script:
- more_packages = set(
- subprocess.check_output(extra_packages_script, cwd=build_dir)
- .split(b'\0')[-1]
- .strip()
- .decode('utf-8')
- .splitlines()
- )
- if more_packages:
- if not extra_packages:
- extra_packages = more_packages
- else:
- extra_packages |= more_packages
-
- if extra_packages:
- env['HG_PY2EXE_EXTRA_PACKAGES'] = ' '.join(sorted(extra_packages))
- hgext3rd_extras = sorted(
- e for e in extra_packages if e.startswith('hgext3rd.')
- )
- if hgext3rd_extras:
- env['HG_PY2EXE_EXTRA_INSTALL_PACKAGES'] = ' '.join(hgext3rd_extras)
- if extra_includes:
- env['HG_PY2EXE_EXTRA_INCLUDES'] = ' '.join(sorted(extra_includes))
- if extra_excludes:
- env['HG_PY2EXE_EXTRA_EXCLUDES'] = ' '.join(sorted(extra_excludes))
- if extra_dll_excludes:
- env['HG_PY2EXE_EXTRA_DLL_EXCLUDES'] = ' '.join(
- sorted(extra_dll_excludes)
- )
-
- py2exe_py_path = venv_path / 'Lib' / 'site-packages' / 'py2exe'
- if not py2exe_py_path.exists():
- print('building py2exe')
- subprocess.run(
- [str(venv_python), 'setup.py', 'install'],
- cwd=py2exe_source_path,
- env=env,
- check=True,
- )
-
- # Register location of msgfmt and other binaries.
- env['PATH'] = '%s%s%s' % (
- env['PATH'],
- os.pathsep,
- str(gettext_root / 'bin'),
- )
-
- print('building Mercurial')
- subprocess.run(
- [str(venv_python), 'setup.py', 'py2exe', 'build_doc', '--html'],
- cwd=str(source_dir),
- env=env,
- check=True,
- )
-
-
-def stage_install(
- source_dir: pathlib.Path, staging_dir: pathlib.Path, lower_case=False
-):
- """Copy all files to be installed to a directory.
-
- This allows packaging to simply walk a directory tree to find source
- files.
- """
- if lower_case:
- rules = []
- for source, dest in STAGING_RULES:
- # Only lower directory names.
- if '/' in dest:
- parent, leaf = dest.rsplit('/', 1)
- dest = '%s/%s' % (parent.lower(), leaf)
- rules.append((source, dest))
- else:
- rules = STAGING_RULES
-
- process_install_rules(rules, source_dir, staging_dir)
-
- # Write out a default editor.rc file to configure notepad as the
- # default editor.
- with (staging_dir / 'defaultrc' / 'editor.rc').open(
- 'w', encoding='utf-8'
- ) as fh:
- fh.write('[ui]\neditor = notepad\n')
-
- # Purge any files we don't want to be there.
- for f in STAGING_EXCLUDES:
- p = staging_dir / f
- if p.exists():
- print('removing %s' % p)
- p.unlink()
--- a/contrib/packaging/hgpackaging/pyoxidizer.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/packaging/hgpackaging/pyoxidizer.py Thu Jun 16 15:28:54 2022 +0200
@@ -23,7 +23,6 @@
STAGING_RULES_WINDOWS = [
- ('contrib/bash_completion', 'contrib/'),
('contrib/hgk', 'contrib/hgk.tcl'),
('contrib/hgweb.fcgi', 'contrib/'),
('contrib/hgweb.wsgi', 'contrib/'),
@@ -36,7 +35,6 @@
('contrib/win32/postinstall.txt', 'ReleaseNotes.txt'),
('contrib/win32/ReadMe.html', 'ReadMe.html'),
('contrib/xml.rnc', 'contrib/'),
- ('contrib/zsh_completion', 'contrib/'),
('doc/*.html', 'doc/'),
('doc/style.css', 'doc/'),
('COPYING', 'Copying.txt'),
--- a/contrib/packaging/hgpackaging/util.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/packaging/hgpackaging/util.py Thu Jun 16 15:28:54 2022 +0200
@@ -7,23 +7,15 @@
# no-check-code because Python 3 native.
-import distutils.version
-import getpass
import glob
import os
import pathlib
import re
import shutil
import subprocess
-import tarfile
import zipfile
-def extract_tar_to_directory(source: pathlib.Path, dest: pathlib.Path):
- with tarfile.open(source, 'r') as tf:
- tf.extractall(dest)
-
-
def extract_zip_to_directory(source: pathlib.Path, dest: pathlib.Path):
with zipfile.ZipFile(source, 'r') as zf:
zf.extractall(dest)
@@ -81,59 +73,6 @@
raise Exception("could not find vcruntime140.dll")
-def find_legacy_vc_runtime_files(x64=False):
- """Finds Visual C++ Runtime DLLs to include in distribution."""
- winsxs = pathlib.Path(os.environ['SYSTEMROOT']) / 'WinSxS'
-
- prefix = 'amd64' if x64 else 'x86'
-
- candidates = sorted(
- p
- for p in os.listdir(winsxs)
- if p.lower().startswith('%s_microsoft.vc90.crt_' % prefix)
- )
-
- for p in candidates:
- print('found candidate VC runtime: %s' % p)
-
- # Take the newest version.
- version = candidates[-1]
-
- d = winsxs / version
-
- return [
- d / 'msvcm90.dll',
- d / 'msvcp90.dll',
- d / 'msvcr90.dll',
- winsxs / 'Manifests' / ('%s.manifest' % version),
- ]
-
-
-def windows_10_sdk_info():
- """Resolves information about the Windows 10 SDK."""
-
- base = pathlib.Path(os.environ['ProgramFiles(x86)']) / 'Windows Kits' / '10'
-
- if not base.is_dir():
- raise Exception('unable to find Windows 10 SDK at %s' % base)
-
- # Find the latest version.
- bin_base = base / 'bin'
-
- versions = [v for v in os.listdir(bin_base) if v.startswith('10.')]
- version = sorted(versions, reverse=True)[0]
-
- bin_version = bin_base / version
-
- return {
- 'root': base,
- 'version': version,
- 'bin_root': bin_version,
- 'bin_x86': bin_version / 'x86',
- 'bin_x64': bin_version / 'x64',
- }
-
-
def normalize_windows_version(version):
"""Normalize Mercurial version string so WiX/Inno accepts it.
@@ -194,93 +133,6 @@
return '.'.join('%d' % x for x in versions[0:4])
-def find_signtool():
- """Find signtool.exe from the Windows SDK."""
- sdk = windows_10_sdk_info()
-
- for key in ('bin_x64', 'bin_x86'):
- p = sdk[key] / 'signtool.exe'
-
- if p.exists():
- return p
-
- raise Exception('could not find signtool.exe in Windows 10 SDK')
-
-
-def sign_with_signtool(
- file_path,
- description,
- subject_name=None,
- cert_path=None,
- cert_password=None,
- timestamp_url=None,
-):
- """Digitally sign a file with signtool.exe.
-
- ``file_path`` is file to sign.
- ``description`` is text that goes in the signature.
-
- The signing certificate can be specified by ``cert_path`` or
- ``subject_name``. These correspond to the ``/f`` and ``/n`` arguments
- to signtool.exe, respectively.
-
- The certificate password can be specified via ``cert_password``. If
- not provided, you will be prompted for the password.
-
- ``timestamp_url`` is the URL of a RFC 3161 timestamp server (``/tr``
- argument to signtool.exe).
- """
- if cert_path and subject_name:
- raise ValueError('cannot specify both cert_path and subject_name')
-
- while cert_path and not cert_password:
- cert_password = getpass.getpass('password for %s: ' % cert_path)
-
- args = [
- str(find_signtool()),
- 'sign',
- '/v',
- '/fd',
- 'sha256',
- '/d',
- description,
- ]
-
- if cert_path:
- args.extend(['/f', str(cert_path), '/p', cert_password])
- elif subject_name:
- args.extend(['/n', subject_name])
-
- if timestamp_url:
- args.extend(['/tr', timestamp_url, '/td', 'sha256'])
-
- args.append(str(file_path))
-
- print('signing %s' % file_path)
- subprocess.run(args, check=True)
-
-
-PRINT_PYTHON_INFO = '''
-import platform; print("%s:%s" % (platform.architecture()[0], platform.python_version()))
-'''.strip()
-
-
-def python_exe_info(python_exe: pathlib.Path):
- """Obtain information about a Python executable."""
-
- res = subprocess.check_output([str(python_exe), '-c', PRINT_PYTHON_INFO])
-
- arch, version = res.decode('utf-8').split(':')
-
- version = distutils.version.LooseVersion(version)
-
- return {
- 'arch': arch,
- 'version': version,
- 'py3': version >= distutils.version.LooseVersion('3'),
- }
-
-
def process_install_rules(
rules: list, source_dir: pathlib.Path, dest_dir: pathlib.Path
):
--- a/contrib/packaging/hgpackaging/wix.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/packaging/hgpackaging/wix.py Thu Jun 16 15:28:54 2022 +0200
@@ -7,376 +7,16 @@
# no-check-code because Python 3 native.
-import collections
import json
import os
import pathlib
-import re
import shutil
-import subprocess
import typing
-import uuid
-import xml.dom.minidom
-from .downloads import download_entry
-from .py2exe import (
- build_py2exe,
- stage_install,
-)
from .pyoxidizer import (
build_docs_html,
- create_pyoxidizer_install_layout,
run_pyoxidizer,
)
-from .util import (
- extract_zip_to_directory,
- normalize_windows_version,
- process_install_rules,
- sign_with_signtool,
-)
-
-
-EXTRA_PACKAGES = {
- 'dulwich',
- 'distutils',
- 'keyring',
- 'pygments',
- 'win32ctypes',
-}
-
-EXTRA_INCLUDES = {
- '_curses',
- '_curses_panel',
-}
-
-EXTRA_INSTALL_RULES = [
- ('contrib/packaging/wix/COPYING.rtf', 'COPYING.rtf'),
- ('contrib/win32/mercurial.ini', 'defaultrc/mercurial.rc'),
-]
-
-STAGING_REMOVE_FILES = [
- # We use the RTF variant.
- 'copying.txt',
-]
-
-SHORTCUTS = {
- # hg.1.html'
- 'hg.file.5d3e441c_28d9_5542_afd0_cdd4234f12d5': {
- 'Name': 'Mercurial Command Reference',
- },
- # hgignore.5.html
- 'hg.file.5757d8e0_f207_5e10_a2ec_3ba0a062f431': {
- 'Name': 'Mercurial Ignore Files',
- },
- # hgrc.5.html
- 'hg.file.92e605fd_1d1a_5dc6_9fc0_5d2998eb8f5e': {
- 'Name': 'Mercurial Configuration Files',
- },
-}
-
-
-def find_version(source_dir: pathlib.Path):
- version_py = source_dir / 'mercurial' / '__version__.py'
-
- with version_py.open('r', encoding='utf-8') as fh:
- source = fh.read().strip()
-
- m = re.search('version = b"(.*)"', source)
- return m.group(1)
-
-
-def ensure_vc90_merge_modules(build_dir):
- x86 = (
- download_entry(
- 'vc9-crt-x86-msm',
- build_dir,
- local_name='microsoft.vcxx.crt.x86_msm.msm',
- )[0],
- download_entry(
- 'vc9-crt-x86-msm-policy',
- build_dir,
- local_name='policy.x.xx.microsoft.vcxx.crt.x86_msm.msm',
- )[0],
- )
-
- x64 = (
- download_entry(
- 'vc9-crt-x64-msm',
- build_dir,
- local_name='microsoft.vcxx.crt.x64_msm.msm',
- )[0],
- download_entry(
- 'vc9-crt-x64-msm-policy',
- build_dir,
- local_name='policy.x.xx.microsoft.vcxx.crt.x64_msm.msm',
- )[0],
- )
- return {
- 'x86': x86,
- 'x64': x64,
- }
-
-
-def run_candle(wix, cwd, wxs, source_dir, defines=None):
- args = [
- str(wix / 'candle.exe'),
- '-nologo',
- str(wxs),
- '-dSourceDir=%s' % source_dir,
- ]
-
- if defines:
- args.extend('-d%s=%s' % define for define in sorted(defines.items()))
-
- subprocess.run(args, cwd=str(cwd), check=True)
-
-
-def make_files_xml(staging_dir: pathlib.Path, is_x64) -> str:
- """Create XML string listing every file to be installed."""
-
- # We derive GUIDs from a deterministic file path identifier.
- # We shoehorn the name into something that looks like a URL because
- # the UUID namespaces are supposed to work that way (even though
- # the input data probably is never validated).
-
- doc = xml.dom.minidom.parseString(
- '<?xml version="1.0" encoding="utf-8"?>'
- '<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">'
- '</Wix>'
- )
-
- # Assemble the install layout by directory. This makes it easier to
- # emit XML, since each directory has separate entities.
- manifest = collections.defaultdict(dict)
-
- for root, dirs, files in os.walk(staging_dir):
- dirs.sort()
-
- root = pathlib.Path(root)
- rel_dir = root.relative_to(staging_dir)
-
- for i in range(len(rel_dir.parts)):
- parent = '/'.join(rel_dir.parts[0 : i + 1])
- manifest.setdefault(parent, {})
-
- for f in sorted(files):
- full = root / f
- manifest[str(rel_dir).replace('\\', '/')][full.name] = full
-
- component_groups = collections.defaultdict(list)
-
- # Now emit a <Fragment> for each directory.
- # Each directory is composed of a <DirectoryRef> pointing to its parent
- # and defines child <Directory>'s and a <Component> with all the files.
- for dir_name, entries in sorted(manifest.items()):
- # The directory id is derived from the path. But the root directory
- # is special.
- if dir_name == '.':
- parent_directory_id = 'INSTALLDIR'
- else:
- parent_directory_id = 'hg.dir.%s' % dir_name.replace(
- '/', '.'
- ).replace('-', '_')
-
- fragment = doc.createElement('Fragment')
- directory_ref = doc.createElement('DirectoryRef')
- directory_ref.setAttribute('Id', parent_directory_id)
-
- # Add <Directory> entries for immediate children directories.
- for possible_child in sorted(manifest.keys()):
- if (
- dir_name == '.'
- and '/' not in possible_child
- and possible_child != '.'
- ):
- child_directory_id = ('hg.dir.%s' % possible_child).replace(
- '-', '_'
- )
- name = possible_child
- else:
- if not possible_child.startswith('%s/' % dir_name):
- continue
- name = possible_child[len(dir_name) + 1 :]
- if '/' in name:
- continue
-
- child_directory_id = 'hg.dir.%s' % possible_child.replace(
- '/', '.'
- ).replace('-', '_')
-
- directory = doc.createElement('Directory')
- directory.setAttribute('Id', child_directory_id)
- directory.setAttribute('Name', name)
- directory_ref.appendChild(directory)
-
- # Add <Component>s for files in this directory.
- for rel, source_path in sorted(entries.items()):
- if dir_name == '.':
- full_rel = rel
- else:
- full_rel = '%s/%s' % (dir_name, rel)
-
- component_unique_id = (
- 'https://www.mercurial-scm.org/wix-installer/0/component/%s'
- % full_rel
- )
- component_guid = uuid.uuid5(uuid.NAMESPACE_URL, component_unique_id)
- component_id = 'hg.component.%s' % str(component_guid).replace(
- '-', '_'
- )
-
- component = doc.createElement('Component')
-
- component.setAttribute('Id', component_id)
- component.setAttribute('Guid', str(component_guid).upper())
- component.setAttribute('Win64', 'yes' if is_x64 else 'no')
-
- # Assign this component to a top-level group.
- if dir_name == '.':
- component_groups['ROOT'].append(component_id)
- elif '/' in dir_name:
- component_groups[dir_name[0 : dir_name.index('/')]].append(
- component_id
- )
- else:
- component_groups[dir_name].append(component_id)
-
- unique_id = (
- 'https://www.mercurial-scm.org/wix-installer/0/%s' % full_rel
- )
- file_guid = uuid.uuid5(uuid.NAMESPACE_URL, unique_id)
-
- # IDs have length limits. So use GUID to derive them.
- file_guid_normalized = str(file_guid).replace('-', '_')
- file_id = 'hg.file.%s' % file_guid_normalized
-
- file_element = doc.createElement('File')
- file_element.setAttribute('Id', file_id)
- file_element.setAttribute('Source', str(source_path))
- file_element.setAttribute('KeyPath', 'yes')
- file_element.setAttribute('ReadOnly', 'yes')
-
- component.appendChild(file_element)
- directory_ref.appendChild(component)
-
- fragment.appendChild(directory_ref)
- doc.documentElement.appendChild(fragment)
-
- for group, component_ids in sorted(component_groups.items()):
- fragment = doc.createElement('Fragment')
- component_group = doc.createElement('ComponentGroup')
- component_group.setAttribute('Id', 'hg.group.%s' % group)
-
- for component_id in component_ids:
- component_ref = doc.createElement('ComponentRef')
- component_ref.setAttribute('Id', component_id)
- component_group.appendChild(component_ref)
-
- fragment.appendChild(component_group)
- doc.documentElement.appendChild(fragment)
-
- # Add <Shortcut> to files that have it defined.
- for file_id, metadata in sorted(SHORTCUTS.items()):
- els = doc.getElementsByTagName('File')
- els = [el for el in els if el.getAttribute('Id') == file_id]
-
- if not els:
- raise Exception('could not find File[Id=%s]' % file_id)
-
- for el in els:
- shortcut = doc.createElement('Shortcut')
- shortcut.setAttribute('Id', 'hg.shortcut.%s' % file_id)
- shortcut.setAttribute('Directory', 'ProgramMenuDir')
- shortcut.setAttribute('Icon', 'hgIcon.ico')
- shortcut.setAttribute('IconIndex', '0')
- shortcut.setAttribute('Advertise', 'yes')
- for k, v in sorted(metadata.items()):
- shortcut.setAttribute(k, v)
-
- el.appendChild(shortcut)
-
- return doc.toprettyxml()
-
-
-def build_installer_py2exe(
- source_dir: pathlib.Path,
- python_exe: pathlib.Path,
- msi_name='mercurial',
- version=None,
- extra_packages_script=None,
- extra_wxs: typing.Optional[typing.Dict[str, str]] = None,
- extra_features: typing.Optional[typing.List[str]] = None,
- signing_info: typing.Optional[typing.Dict[str, str]] = None,
-):
- """Build a WiX MSI installer using py2exe.
-
- ``source_dir`` is the path to the Mercurial source tree to use.
- ``arch`` is the target architecture. either ``x86`` or ``x64``.
- ``python_exe`` is the path to the Python executable to use/bundle.
- ``version`` is the Mercurial version string. If not defined,
- ``mercurial/__version__.py`` will be consulted.
- ``extra_packages_script`` is a command to be run to inject extra packages
- into the py2exe binary. It should stage packages into the virtualenv and
- print a null byte followed by a newline-separated list of packages that
- should be included in the exe.
- ``extra_wxs`` is a dict of {wxs_name: working_dir_for_wxs_build}.
- ``extra_features`` is a list of additional named Features to include in
- the build. These must match Feature names in one of the wxs scripts.
- """
- arch = 'x64' if r'\x64' in os.environ.get('LIB', '') else 'x86'
-
- hg_build_dir = source_dir / 'build'
-
- requirements_txt = (
- source_dir / 'contrib' / 'packaging' / 'requirements-windows-py2.txt'
- )
-
- build_py2exe(
- source_dir,
- hg_build_dir,
- python_exe,
- 'wix',
- requirements_txt,
- extra_packages=EXTRA_PACKAGES,
- extra_packages_script=extra_packages_script,
- extra_includes=EXTRA_INCLUDES,
- )
-
- build_dir = hg_build_dir / ('wix-%s' % arch)
- staging_dir = build_dir / 'stage'
-
- build_dir.mkdir(exist_ok=True)
-
- # Purge the staging directory for every build so packaging is pristine.
- if staging_dir.exists():
- print('purging %s' % staging_dir)
- shutil.rmtree(staging_dir)
-
- stage_install(source_dir, staging_dir, lower_case=True)
-
- # We also install some extra files.
- process_install_rules(EXTRA_INSTALL_RULES, source_dir, staging_dir)
-
- # And remove some files we don't want.
- for f in STAGING_REMOVE_FILES:
- p = staging_dir / f
- if p.exists():
- print('removing %s' % p)
- p.unlink()
-
- return run_wix_packaging(
- source_dir,
- build_dir,
- staging_dir,
- arch,
- version=version,
- python2=True,
- msi_name=msi_name,
- suffix="-python2",
- extra_wxs=extra_wxs,
- extra_features=extra_features,
- signing_info=signing_info,
- )
def build_installer_pyoxidizer(
@@ -454,133 +94,3 @@
return {
"msi_path": dist_path,
}
-
-
-def run_wix_packaging(
- source_dir: pathlib.Path,
- build_dir: pathlib.Path,
- staging_dir: pathlib.Path,
- arch: str,
- version: str,
- python2: bool,
- msi_name: typing.Optional[str] = "mercurial",
- suffix: str = "",
- extra_wxs: typing.Optional[typing.Dict[str, str]] = None,
- extra_features: typing.Optional[typing.List[str]] = None,
- signing_info: typing.Optional[typing.Dict[str, str]] = None,
-):
- """Invokes WiX to package up a built Mercurial.
-
- ``signing_info`` is a dict defining properties to facilitate signing the
- installer. Recognized keys include ``name``, ``subject_name``,
- ``cert_path``, ``cert_password``, and ``timestamp_url``. If populated,
- we will sign both the hg.exe and the .msi using the signing credentials
- specified.
- """
-
- orig_version = version or find_version(source_dir)
- version = normalize_windows_version(orig_version)
- print('using version string: %s' % version)
- if version != orig_version:
- print('(normalized from: %s)' % orig_version)
-
- if signing_info:
- sign_with_signtool(
- staging_dir / "hg.exe",
- "%s %s" % (signing_info["name"], version),
- subject_name=signing_info["subject_name"],
- cert_path=signing_info["cert_path"],
- cert_password=signing_info["cert_password"],
- timestamp_url=signing_info["timestamp_url"],
- )
-
- wix_dir = source_dir / 'contrib' / 'packaging' / 'wix'
-
- wix_pkg, wix_entry = download_entry('wix', build_dir)
- wix_path = build_dir / ('wix-%s' % wix_entry['version'])
-
- if not wix_path.exists():
- extract_zip_to_directory(wix_pkg, wix_path)
-
- if python2:
- ensure_vc90_merge_modules(build_dir)
-
- source_build_rel = pathlib.Path(os.path.relpath(source_dir, build_dir))
-
- defines = {'Platform': arch}
-
- # Derive a .wxs file with the staged files.
- manifest_wxs = build_dir / 'stage.wxs'
- with manifest_wxs.open('w', encoding='utf-8') as fh:
- fh.write(make_files_xml(staging_dir, is_x64=arch == 'x64'))
-
- run_candle(wix_path, build_dir, manifest_wxs, staging_dir, defines=defines)
-
- for source, rel_path in sorted((extra_wxs or {}).items()):
- run_candle(wix_path, build_dir, source, rel_path, defines=defines)
-
- source = wix_dir / 'mercurial.wxs'
- defines['Version'] = version
- defines['Comments'] = 'Installs Mercurial version %s' % version
-
- if python2:
- defines["PythonVersion"] = "2"
- defines['VCRedistSrcDir'] = str(build_dir)
- else:
- defines["PythonVersion"] = "3"
-
- if (staging_dir / "lib").exists():
- defines["MercurialHasLib"] = "1"
-
- if extra_features:
- assert all(';' not in f for f in extra_features)
- defines['MercurialExtraFeatures'] = ';'.join(extra_features)
-
- run_candle(wix_path, build_dir, source, source_build_rel, defines=defines)
-
- msi_path = (
- source_dir
- / 'dist'
- / ('%s-%s-%s%s.msi' % (msi_name, orig_version, arch, suffix))
- )
-
- args = [
- str(wix_path / 'light.exe'),
- '-nologo',
- '-ext',
- 'WixUIExtension',
- '-sw1076',
- '-spdb',
- '-o',
- str(msi_path),
- ]
-
- for source, rel_path in sorted((extra_wxs or {}).items()):
- assert source.endswith('.wxs')
- source = os.path.basename(source)
- args.append(str(build_dir / ('%s.wixobj' % source[:-4])))
-
- args.extend(
- [
- str(build_dir / 'stage.wixobj'),
- str(build_dir / 'mercurial.wixobj'),
- ]
- )
-
- subprocess.run(args, cwd=str(source_dir), check=True)
-
- print('%s created' % msi_path)
-
- if signing_info:
- sign_with_signtool(
- msi_path,
- "%s %s" % (signing_info["name"], version),
- subject_name=signing_info["subject_name"],
- cert_path=signing_info["cert_path"],
- cert_password=signing_info["cert_password"],
- timestamp_url=signing_info["timestamp_url"],
- )
-
- return {
- 'msi_path': msi_path,
- }
--- a/contrib/packaging/inno/readme.rst Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/packaging/inno/readme.rst Thu Jun 16 15:28:54 2022 +0200
@@ -5,52 +5,35 @@
The following system dependencies must be installed:
-* Python 2.7 (download from https://www.python.org/downloads/)
-* Microsoft Visual C++ Compiler for Python 2.7
- (https://www.microsoft.com/en-us/download/details.aspx?id=44266)
* Inno Setup (http://jrsoftware.org/isdl.php) version 5.4 or newer.
Be sure to install the optional Inno Setup Preprocessor feature,
which is required.
-* Python 3.5+ (to run the ``packaging.py`` script)
+* Python 3.6+ (to run the ``packaging.py`` script)
Building
========
-The ``packaging.py`` script automates the process of producing an
-Inno installer. It manages fetching and configuring the
-non-system dependencies (such as py2exe, gettext, and various
-Python packages).
-
-The script requires an activated ``Visual C++ 2008`` command prompt.
-A shortcut to such a prompt was installed with ``Microsoft Visual C++
-Compiler for Python 2.7``. From your Start Menu, look for
-``Microsoft Visual C++ Compiler Package for Python 2.7`` then launch
-either ``Visual C++ 2008 32-bit Command Prompt`` or
-``Visual C++ 2008 64-bit Command Prompt``.
+The ``packaging.py`` script automates the process of producing an Inno
+installer. It manages fetching and configuring non-system dependencies
+(such as gettext, and various Python packages). It can be run from a
+basic cmd.exe Window (i.e. activating the MSBuildTools environment is
+not required).
From the prompt, change to the Mercurial source directory. e.g.
``cd c:\src\hg``.
-Next, invoke ``packaging.py`` to produce an Inno installer. You will
-need to supply the path to the Python interpreter to use.::
+Next, invoke ``packaging.py`` to produce an Inno installer.::
$ py -3 contrib\packaging\packaging.py \
- inno --python c:\python27\python.exe
-
-.. note::
-
- The script validates that the Visual C++ environment is
- active and that the architecture of the specified Python
- interpreter matches the Visual C++ environment and errors
- if not.
+ inno --pyoxidizer-target x86_64-pc-windows-msvc
If everything runs as intended, dependencies will be fetched and
configured into the ``build`` sub-directory, Mercurial will be built,
-and an installer placed in the ``dist`` sub-directory. The final
-line of output should print the name of the generated installer.
+and an installer placed in the ``dist`` sub-directory. The final line
+of output should print the name of the generated installer.
-Additional options may be configured. Run
-``packaging.py inno --help`` to see a list of program flags.
+Additional options may be configured. Run ``packaging.py inno --help``
+to see a list of program flags.
MinGW
=====
--- a/contrib/packaging/mercurial.spec Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/packaging/mercurial.spec Thu Jun 16 15:28:54 2022 +0200
@@ -126,14 +126,6 @@
install -m 755 contrib/hgk $RPM_BUILD_ROOT%{_bindir}/
install -m 755 contrib/hg-ssh $RPM_BUILD_ROOT%{_bindir}/
-bash_completion_dir=$RPM_BUILD_ROOT%{_sysconfdir}/bash_completion.d
-mkdir -p $bash_completion_dir
-install -m 644 contrib/bash_completion $bash_completion_dir/mercurial.sh
-
-zsh_completion_dir=$RPM_BUILD_ROOT%{_datadir}/zsh/site-functions
-mkdir -p $zsh_completion_dir
-install -m 644 contrib/zsh_completion $zsh_completion_dir/_mercurial
-
mkdir -p $RPM_BUILD_ROOT%{emacs_lispdir}
install -m 644 contrib/mercurial.el $RPM_BUILD_ROOT%{emacs_lispdir}/
install -m 644 contrib/mq.el $RPM_BUILD_ROOT%{emacs_lispdir}/
@@ -148,9 +140,12 @@
%doc CONTRIBUTORS COPYING doc/README doc/hg*.txt doc/hg*.html *.cgi contrib/*.fcgi contrib/*.wsgi
%doc %attr(644,root,root) %{_mandir}/man?/hg*
%doc %attr(644,root,root) contrib/*.svg
+%dir %{_datadir}/bash-completion/
+%dir %{_datadir}/bash-completion/completions
+%{_datadir}/bash-completion/completions/hg
%dir %{_datadir}/zsh/
%dir %{_datadir}/zsh/site-functions/
-%{_datadir}/zsh/site-functions/_mercurial
+%{_datadir}/zsh/site-functions/_hg
%dir %{_datadir}/emacs/site-lisp/
%{_datadir}/emacs/site-lisp/mercurial.el
%{_datadir}/emacs/site-lisp/mq.el
@@ -158,8 +153,6 @@
%{_bindir}/chg
%{_bindir}/hgk
%{_bindir}/hg-ssh
-%dir %{_sysconfdir}/bash_completion.d/
-%config(noreplace) %{_sysconfdir}/bash_completion.d/mercurial.sh
%dir %{_sysconfdir}/mercurial
%dir %{_sysconfdir}/mercurial/hgrc.d
%if "%{?withpython}"
--- a/contrib/packaging/requirements-windows-py2.txt Thu Jun 16 15:15:03 2022 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,59 +0,0 @@
-#
-# This file is autogenerated by pip-compile
-# To update, run:
-#
-# pip-compile --generate-hashes --output-file=contrib/packaging/requirements-windows-py2.txt contrib/packaging/requirements-windows.txt.in
-#
-certifi==2021.5.30 \
- --hash=sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee \
- --hash=sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8 \
- # via dulwich
-configparser==4.0.2 \
- --hash=sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c \
- --hash=sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df \
- # via entrypoints
-docutils==0.16 \
- --hash=sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af \
- --hash=sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc \
- # via -r contrib/packaging/requirements-windows.txt.in
-dulwich==0.19.16 ; python_version <= "2.7" \
- --hash=sha256:10699277c6268d0c16febe141a5b1c1a6e9744f3144c2d2de1706f4b1adafe63 \
- --hash=sha256:267160904e9a1cb6c248c5efc53597a35d038ecc6f60bdc4546b3053bed11982 \
- --hash=sha256:4e3aba5e4844e7c700721c1fc696987ea820ee3528a03604dc4e74eff4196826 \
- --hash=sha256:60bb2c2c92f5025c1b53a556304008f0f624c98ae36f22d870e056b2d4236c11 \
- --hash=sha256:dddae02d372fc3b5cfb0046d0f62246ef281fa0c088df7601ab5916607add94b \
- --hash=sha256:f00d132082b8fcc2eb0d722abc773d4aeb5558c1475d7edd1f0f571146c29db9 \
- --hash=sha256:f74561c448bfb6f04c07de731c1181ae4280017f759b0bb04fa5770aa84ca850 \
- # via -r contrib/packaging/requirements-windows.txt.in
-entrypoints==0.3 \
- --hash=sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19 \
- --hash=sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451 \
- # via keyring
-keyring==18.0.1 \
- --hash=sha256:67d6cc0132bd77922725fae9f18366bb314fd8f95ff4d323a4df41890a96a838 \
- --hash=sha256:7b29ebfcf8678c4da531b2478a912eea01e80007e5ddca9ee0c7038cb3489ec6 \
- # via -r contrib/packaging/requirements-windows.txt.in
-pygments==2.5.2 \
- --hash=sha256:2a3fe295e54a20164a9df49c75fa58526d3be48e14aceba6d6b1e8ac0bfd6f1b \
- --hash=sha256:98c8aa5a9f778fcd1026a17361ddaf7330d1b7c62ae97c3bb0ae73e0b9b6b0fe \
- # via -r contrib/packaging/requirements-windows.txt.in
-pywin32-ctypes==0.2.0 \
- --hash=sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942 \
- --hash=sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98 \
- # via -r contrib/packaging/requirements-windows.txt.in, keyring
-urllib3==1.25.11 \
- --hash=sha256:8d7eaa5a82a1cac232164990f04874c594c9453ec55eef02eab885aa02fc17a2 \
- --hash=sha256:f5321fbe4bf3fefa0efd0bfe7fb14e90909eb62a48ccda331726b4319897dd5e \
- # via dulwich
-windows-curses==2.1.0 \
- --hash=sha256:261fde5680d1ce4ce116908996b9a3cfb0ffb03ea68d42240f62b56a9fa6af2c \
- --hash=sha256:66034dc9a705d87308cc9ea90836f4ee60008a1d5e2c1d34ace627f60268158b \
- --hash=sha256:669caad3ae16faf2d201d7ab3b8af418a2fd074d8a39d60ca26f3acb34b6afe5 \
- --hash=sha256:73bd3eebccfda55330783f165151de115bfa238d1332f0b2e224b550d6187840 \
- --hash=sha256:89a6d973f88cfe49b41ea80164dcbec209d296e0cec34a02002578b0bf464a64 \
- --hash=sha256:8ba7c000d7ffa5452bbd0966b96e69261e4f117ebe510aeb8771a9650197b7f0 \
- --hash=sha256:97084c6b37b1534f6a28a514d521dfae402f77dcbad42b14ee32e8d5bdc13648 \
- --hash=sha256:9e474a181f96d60429a4766145628264e60b72e7715876f9135aeb2e842f9433 \
- --hash=sha256:cfe64c30807c146ef8d094412f90f2a2c81ad6aefff3ebfe8e37aabe2f801303 \
- --hash=sha256:ff8c67f74b88944d99fa9d22971c05c335bc74f149120f0a69340c2c3a595497 \
- # via -r contrib/packaging/requirements-windows.txt.in
--- a/contrib/packaging/requirements-windows.txt.in Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/packaging/requirements-windows.txt.in Thu Jun 16 15:28:54 2022 +0200
@@ -1,13 +1,11 @@
docutils
-# Pinned to an old version because 0.20 drops Python 3 compatibility.
-dulwich < 0.20 ; python_version <= '2.7'
-dulwich ; python_version >= '3'
+dulwich
# Needed by the release note tooling
fuzzywuzzy
keyring
-pygit2 ; python_version >= '3'
+pygit2
pygments
# Needed by the phabricator tests
--- a/contrib/packaging/wix/mercurial.wxs Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/packaging/wix/mercurial.wxs Thu Jun 16 15:28:54 2022 +0200
@@ -33,8 +33,8 @@
CompressionLevel='high' />
<Property Id='DiskPrompt' Value="Mercurial $(var.Version) Installation [1]" />
- <Condition Message='Mercurial MSI installers require Windows XP or higher'>
- VersionNT >= 501
+ <Condition Message='Mercurial MSI installers require Windows 8.1 or higher'>
+ VersionNT >= 603
</Condition>
<Property Id="INSTALLDIR">
@@ -79,23 +79,6 @@
</Component>
</Directory>
</Directory>
-
- <!-- Install VCRedist merge modules on Python 2. On Python 3,
- vcruntimeXXX.dll is part of the install layout and gets picked up
- as a regular file. -->
- <?if $(var.PythonVersion) = "2" ?>
- <?if $(var.Platform) = "x86" ?>
- <Merge Id='VCRuntime' DiskId='1' Language='1033'
- SourceFile='$(var.VCRedistSrcDir)\microsoft.vcxx.crt.x86_msm.msm' />
- <Merge Id='VCRuntimePolicy' DiskId='1' Language='1033'
- SourceFile='$(var.VCRedistSrcDir)\policy.x.xx.microsoft.vcxx.crt.x86_msm.msm' />
- <?else?>
- <Merge Id='VCRuntime' DiskId='1' Language='1033'
- SourceFile='$(var.VCRedistSrcDir)\microsoft.vcxx.crt.x64_msm.msm' />
- <Merge Id='VCRuntimePolicy' DiskId='1' Language='1033'
- SourceFile='$(var.VCRedistSrcDir)\policy.x.xx.microsoft.vcxx.crt.x64_msm.msm' />
- <?endif?>
- <?endif?>
</Directory>
<Feature Id='Complete' Title='Mercurial' Description='The complete package'
@@ -111,10 +94,6 @@
<ComponentGroupRef Id="hg.group.lib" />
<?endif?>
<ComponentGroupRef Id="hg.group.templates" />
- <?if $(var.PythonVersion) = "2" ?>
- <MergeRef Id='VCRuntime' />
- <MergeRef Id='VCRuntimePolicy' />
- <?endif?>
</Feature>
<?ifdef MercurialExtraFeatures?>
<?foreach EXTRAFEAT in $(var.MercurialExtraFeatures)?>
--- a/contrib/packaging/wix/readme.rst Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/packaging/wix/readme.rst Thu Jun 16 15:28:54 2022 +0200
@@ -12,50 +12,36 @@
Requirements
============
-Building the WiX installers requires a Windows machine. The following
-dependencies must be installed:
+Building the WiX installer requires a Windows machine.
-* Python 2.7 (download from https://www.python.org/downloads/)
-* Microsoft Visual C++ Compiler for Python 2.7
- (https://www.microsoft.com/en-us/download/details.aspx?id=44266)
-* Python 3.5+ (to run the ``packaging.py`` script)
+The following system dependencies must be installed:
+
+* Python 3.6+ (to run the ``packaging.py`` script)
Building
========
The ``packaging.py`` script automates the process of producing an MSI
installer. It manages fetching and configuring non-system dependencies
-(such as py2exe, gettext, and various Python packages).
-
-The script requires an activated ``Visual C++ 2008`` command prompt.
-A shortcut to such a prompt was installed with ``Microsoft Visual
-C++ Compiler for Python 2.7``. From your Start Menu, look for
-``Microsoft Visual C++ Compiler Package for Python 2.7`` then
-launch either ``Visual C++ 2008 32-bit Command Prompt`` or
-``Visual C++ 2008 64-bit Command Prompt``.
+(such as gettext, and various Python packages). It can be run from a
+basic cmd.exe Window (i.e. activating the MSBuildTools environment is
+not required).
From the prompt, change to the Mercurial source directory. e.g.
``cd c:\src\hg``.
-Next, invoke ``packaging.py`` to produce an MSI installer. You will need
-to supply the path to the Python interpreter to use.::
+Next, invoke ``packaging.py`` to produce an MSI installer.::
$ py -3 contrib\packaging\packaging.py \
- wix --python c:\python27\python.exe
-
-.. note::
-
- The script validates that the Visual C++ environment is active and
- that the architecture of the specified Python interpreter matches the
- Visual C++ environment. An error is raised otherwise.
+ wix --pyoxidizer-target x86_64-pc-windows-msvc
If everything runs as intended, dependencies will be fetched and
configured into the ``build`` sub-directory, Mercurial will be built,
and an installer placed in the ``dist`` sub-directory. The final line
of output should print the name of the generated installer.
-Additional options may be configured. Run ``packaging.py wix --help`` to
-see a list of program flags.
+Additional options may be configured. Run ``packaging.py wix --help``
+to see a list of program flags.
Relationship to TortoiseHG
==========================
@@ -63,7 +49,7 @@
TortoiseHG uses the WiX files in this directory.
The code for building TortoiseHG installers lives at
-https://bitbucket.org/tortoisehg/thg-winbuild and is maintained by
+https://foss.heptapod.net/mercurial/tortoisehg/thg-winbuild and is maintained by
Steve Borho (steve@borho.org).
When changing behavior of the WiX installer, be sure to notify
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/perf-utils/compare-discovery-case Thu Jun 16 15:28:54 2022 +0200
@@ -0,0 +1,183 @@
+#!/usr/bin/env python3
+# compare various algorithm variants for a given case
+#
+# search-discovery-case REPO LOCAL_CASE REMOTE_CASE
+#
+# The description for the case input uses the same format as the ouput of
+# search-discovery-case
+
+import json
+import os
+import subprocess
+import sys
+
+this_script = os.path.abspath(sys.argv[0])
+script_name = os.path.basename(this_script)
+this_dir = os.path.dirname(this_script)
+hg_dir = os.path.join(this_dir, '..', '..')
+HG_REPO = os.path.normpath(hg_dir)
+HG_BIN = os.path.join(HG_REPO, 'hg')
+
+
+SUBSET_PATH = os.path.join(HG_REPO, 'contrib', 'perf-utils', 'subsetmaker.py')
+
+CMD_BASE = (
+ HG_BIN,
+ 'debugdiscovery',
+ '--template',
+ 'json',
+ '--config',
+ 'extensions.subset=%s' % SUBSET_PATH,
+)
+
+# --old
+# --nonheads
+#
+# devel.discovery.exchange-heads=True
+# devel.discovery.grow-sample=True
+# devel.discovery.grow-sample.dynamic=True
+
+VARIANTS = {
+ 'tree-discovery': ('--old',),
+ 'set-discovery-basic': (
+ '--config',
+ 'devel.discovery.exchange-heads=no',
+ '--config',
+ 'devel.discovery.grow-sample=no',
+ '--config',
+ 'devel.discovery.grow-sample.dynamic=no',
+ '--config',
+ 'devel.discovery.randomize=yes',
+ ),
+ 'set-discovery-heads': (
+ '--config',
+ 'devel.discovery.exchange-heads=yes',
+ '--config',
+ 'devel.discovery.grow-sample=no',
+ '--config',
+ 'devel.discovery.grow-sample.dynamic=no',
+ '--config',
+ 'devel.discovery.randomize=yes',
+ ),
+ 'set-discovery-grow-sample': (
+ '--config',
+ 'devel.discovery.exchange-heads=yes',
+ '--config',
+ 'devel.discovery.grow-sample=yes',
+ '--config',
+ 'devel.discovery.grow-sample.dynamic=no',
+ '--config',
+ 'devel.discovery.randomize=yes',
+ ),
+ 'set-discovery-dynamic-sample': (
+ '--config',
+ 'devel.discovery.exchange-heads=yes',
+ '--config',
+ 'devel.discovery.grow-sample=yes',
+ '--config',
+ 'devel.discovery.grow-sample.dynamic=yes',
+ '--config',
+ 'devel.discovery.randomize=yes',
+ ),
+ 'set-discovery-default': (
+ '--config',
+ 'devel.discovery.randomize=yes',
+ ),
+}
+
+VARIANTS_KEYS = [
+ 'tree-discovery',
+ 'set-discovery-basic',
+ 'set-discovery-heads',
+ 'set-discovery-grow-sample',
+ 'set-discovery-dynamic-sample',
+ 'set-discovery-default',
+]
+
+assert set(VARIANTS.keys()) == set(VARIANTS_KEYS)
+
+
+def format_case(case):
+ return '-'.join(str(s) for s in case)
+
+
+def to_revsets(case):
+ t = case[0]
+ if t == 'scratch':
+ return 'not scratch(all(), %d, "%d")' % (case[1], case[2])
+ elif t == 'randomantichain':
+ return '::randomantichain(all(), "%d")' % case[1]
+ elif t == 'rev':
+ return '::%d' % case[1]
+ else:
+ assert False
+
+
+def compare(repo, local_case, remote_case):
+ case = (repo, local_case, remote_case)
+ for variant in VARIANTS_KEYS:
+ res = process(case, VARIANTS[variant])
+ revs = res["nb-revs"]
+ local_heads = res["nb-head-local"]
+ common_heads = res["nb-common-heads"]
+ roundtrips = res["total-roundtrips"]
+ queries = res["total-queries"]
+ if 'tree-discovery' in variant:
+ print(
+ repo,
+ format_case(local_case),
+ format_case(remote_case),
+ variant,
+ roundtrips,
+ queries,
+ revs,
+ local_heads,
+ common_heads,
+ )
+ else:
+ undecided_common = res["nb-ini_und-common"]
+ undecided_missing = res["nb-ini_und-missing"]
+ undecided = undecided_common + undecided_missing
+ print(
+ repo,
+ format_case(local_case),
+ format_case(remote_case),
+ variant,
+ roundtrips,
+ queries,
+ revs,
+ local_heads,
+ common_heads,
+ undecided,
+ undecided_common,
+ undecided_missing,
+ )
+ return 0
+
+
+def process(case, variant):
+ (repo, left, right) = case
+ cmd = list(CMD_BASE)
+ cmd.append('-R')
+ cmd.append(repo)
+ cmd.append('--local-as-revs')
+ cmd.append(to_revsets(left))
+ cmd.append('--remote-as-revs')
+ cmd.append(to_revsets(right))
+ cmd.extend(variant)
+ s = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+ out, err = s.communicate()
+ return json.loads(out)[0]
+
+
+if __name__ == '__main__':
+ if len(sys.argv) != 4:
+ usage = f'USAGE: {script_name} REPO LOCAL_CASE REMOTE_CASE'
+ print(usage, file=sys.stderr)
+ sys.exit(128)
+ repo = sys.argv[1]
+ local_case = sys.argv[2].split('-')
+ local_case = (local_case[0],) + tuple(int(x) for x in local_case[1:])
+ remote_case = sys.argv[3].split('-')
+ remote_case = (remote_case[0],) + tuple(int(x) for x in remote_case[1:])
+ sys.exit(compare(repo, local_case, remote_case))
--- a/contrib/perf-utils/perf-revlog-write-plot.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/perf-utils/perf-revlog-write-plot.py Thu Jun 16 15:28:54 2022 +0200
@@ -9,7 +9,6 @@
# various plot related to write performance in a revlog
#
# usage: perf-revlog-write-plot.py details.json
-from __future__ import absolute_import, print_function
import json
import re
--- a/contrib/perf-utils/search-discovery-case Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/perf-utils/search-discovery-case Thu Jun 16 15:28:54 2022 +0200
@@ -6,7 +6,6 @@
# This use a subsetmaker extension (next to this script) to generate a steam of
# random discovery instance. When interesting case are discovered, information
# about them are print on the stdout.
-from __future__ import print_function
import json
import os
@@ -143,18 +142,35 @@
Ideally, we would make this configurable, but this is not a focus for now
- return None or (round-trip, undecided-common, undecided-missing)
+ return None or (
+ round-trip,
+ undecided-common,
+ undecided-missing,
+ total-revs,
+ common-revs,
+ missing-revs,
+ )
"""
roundtrips = res["total-roundtrips"]
if roundtrips <= 1:
return None
+ total_revs = res["nb-revs"]
+ common_revs = res["nb-revs-common"]
+ missing_revs = res["nb-revs-missing"]
undecided_common = res["nb-ini_und-common"]
undecided_missing = res["nb-ini_und-missing"]
if undecided_common == 0:
return None
if undecided_missing == 0:
return None
- return (roundtrips, undecided_common, undecided_missing)
+ return (
+ roundtrips,
+ undecided_common,
+ undecided_missing,
+ total_revs,
+ common_revs,
+ missing_revs,
+ )
def end(*args, **kwargs):
--- a/contrib/perf-utils/subsetmaker.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/perf-utils/subsetmaker.py Thu Jun 16 15:28:54 2022 +0200
@@ -15,6 +15,10 @@
smartset,
)
+import sortedcontainers
+
+SortedSet = sortedcontainers.SortedSet
+
revsetpredicate = registrar.revsetpredicate()
@@ -78,7 +82,7 @@
n = revsetlang.getinteger(n, _(b"scratch expects a number"))
selected = set()
- heads = set()
+ heads = SortedSet()
children_count = collections.defaultdict(lambda: 0)
parents = repo.changelog._uncheckedparentrevs
@@ -102,7 +106,7 @@
for x in range(n):
if not heads:
break
- pick = rand.choice(list(heads))
+ pick = rand.choice(heads)
heads.remove(pick)
assert pick not in selected
selected.add(pick)
@@ -155,16 +159,44 @@
else:
assert False
- selected = set()
+ cl = repo.changelog
- baseset = revset.getset(repo, smartset.fullreposet(repo), x)
- undecided = baseset
+ # We already have cheap access to the parent mapping.
+ # However, we need to build a mapping of the children mapping
+ parents = repo.changelog._uncheckedparentrevs
+ children_map = collections.defaultdict(list)
+ for r in cl:
+ p1, p2 = parents(r)
+ if p1 >= 0:
+ children_map[p1].append(r)
+ if p2 >= 0:
+ children_map[p2].append(r)
+ children = children_map.__getitem__
+
+ selected = set()
+ undecided = SortedSet(cl)
while undecided:
- pick = rand.choice(list(undecided))
+ # while there is "undecided content", we pick a random changeset X
+ # and we remove anything in `::X + X::` from undecided content
+ pick = rand.choice(undecided)
selected.add(pick)
- undecided = repo.revs(
- '%ld and not (::%ld or %ld::head())', baseset, selected, selected
- )
+ undecided.remove(pick)
+
+ ancestors = set(p for p in parents(pick) if p in undecided)
+ descendants = set(c for c in children(pick) if c in undecided)
+
+ while ancestors:
+ current = ancestors.pop()
+ undecided.remove(current)
+ for p in parents(current):
+ if p in undecided:
+ ancestors.add(p)
+ while descendants:
+ current = descendants.pop()
+ undecided.remove(current)
+ for p in children(current):
+ if p in undecided:
+ ancestors.add(p)
return smartset.baseset(selected) & subset
--- a/contrib/perf.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/perf.py Thu Jun 16 15:28:54 2022 +0200
@@ -54,7 +54,6 @@
# - make perf command for recent feature work correctly with early
# Mercurial
-from __future__ import absolute_import
import contextlib
import functools
import gc
@@ -370,7 +369,7 @@
return len
-class noop(object):
+class noop:
"""dummy context manager"""
def __enter__(self):
@@ -414,7 +413,7 @@
# available since 2.2 (or ae5f92e154d3)
from mercurial import node
- class defaultformatter(object):
+ class defaultformatter:
"""Minimized composition of baseformatter and plainformatter"""
def __init__(self, ui, topic, opts):
@@ -653,7 +652,7 @@
origvalue = getattr(obj, _sysstr(name))
- class attrutil(object):
+ class attrutil:
def set(self, newvalue):
setattr(obj, _sysstr(name), newvalue)
@@ -2943,7 +2942,7 @@
fm.end()
-class _faketr(object):
+class _faketr:
def add(s, x, y, z=None):
return None
--- a/contrib/phab-clean.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/phab-clean.py Thu Jun 16 15:28:54 2022 +0200
@@ -3,7 +3,6 @@
# A small script to automatically reject idle Diffs
#
# you need to set the PHABBOT_USER and PHABBOT_TOKEN environment variable for authentication
-from __future__ import absolute_import, print_function
import datetime
import os
--- a/contrib/python-hook-examples.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/python-hook-examples.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,7 +1,6 @@
'''
Examples of useful python hooks for Mercurial.
'''
-from __future__ import absolute_import
from mercurial import (
patch,
util,
--- a/contrib/python-zstandard/make_cffi.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/python-zstandard/make_cffi.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,7 +4,6 @@
# This software may be modified and distributed under the terms
# of the BSD license. See the LICENSE file for details.
-from __future__ import absolute_import
import cffi
import distutils.ccompiler
--- a/contrib/python-zstandard/setup.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/python-zstandard/setup.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be modified and distributed under the terms
# of the BSD license. See the LICENSE file for details.
-from __future__ import print_function
from distutils.version import LooseVersion
import os
--- a/contrib/python-zstandard/tests/test_module_attributes.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/python-zstandard/tests/test_module_attributes.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import unicode_literals
-
import unittest
import zstandard as zstd
--- a/contrib/python-zstandard/zstandard/__init__.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/python-zstandard/zstandard/__init__.py Thu Jun 16 15:28:54 2022 +0200
@@ -6,7 +6,6 @@
"""Python interface to the Zstandard (zstd) compression library."""
-from __future__ import absolute_import, unicode_literals
# This module serves 2 roles:
#
--- a/contrib/python-zstandard/zstandard/cffi.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/python-zstandard/zstandard/cffi.py Thu Jun 16 15:28:54 2022 +0200
@@ -6,7 +6,6 @@
"""Python interface to the Zstandard (zstd) compression library."""
-from __future__ import absolute_import, unicode_literals
# This should match what the C extension exports.
__all__ = [
--- a/contrib/python3-ratchet.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/python3-ratchet.py Thu Jun 16 15:28:54 2022 +0200
@@ -15,8 +15,6 @@
$ python3 ../contrib/python3-ratchet.py \
> --working-tests=../contrib/python3-whitelist
"""
-from __future__ import print_function
-from __future__ import absolute_import
import argparse
import json
--- a/contrib/revsetbenchmarks.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/revsetbenchmarks.py Thu Jun 16 15:28:54 2022 +0200
@@ -8,7 +8,6 @@
#
# call with --help for details
-from __future__ import absolute_import, print_function
import math
import optparse # cannot use argparse, python 2.7 only
import os
--- a/contrib/showstack.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/showstack.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,7 +4,6 @@
r"""dump stack trace when receiving SIGQUIT (Ctrl-\) or SIGINFO (Ctrl-T on BSDs)
"""
-from __future__ import absolute_import, print_function
import signal
import sys
import traceback
--- a/contrib/simplemerge Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/simplemerge Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,4 @@
#!/usr/bin/env python3
-from __future__ import absolute_import
import getopt
import sys
--- a/contrib/synthrepo.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/synthrepo.py Thu Jun 16 15:28:54 2022 +0200
@@ -36,7 +36,6 @@
- Symlinks and binary files are ignored
'''
-from __future__ import absolute_import
import bisect
import collections
import itertools
@@ -213,7 +212,7 @@
for filename, mar, lineadd, lineremove, isbin in parsegitdiff(diff):
if isbin:
continue
- added = sum(pycompat.itervalues(lineadd), 0)
+ added = sum(lineadd.values(), 0)
if mar == 'm':
if added and lineremove:
lineschanged[
--- a/contrib/testparseutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/testparseutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import, print_function
import abc
import re
@@ -80,7 +79,7 @@
####################
-class embeddedmatcher(object): # pytype: disable=ignored-metaclass
+class embeddedmatcher: # pytype: disable=ignored-metaclass
"""Base class to detect embedded code fragments in *.t test script"""
__metaclass__ = abc.ABCMeta
@@ -157,7 +156,7 @@
:ends: line number (1-origin), at which embedded code ends (exclusive)
:code: extracted embedded code, which is single-stringified
- >>> class ambigmatcher(object):
+ >>> class ambigmatcher:
... # mock matcher class to examine implementation of
... # "ambiguous matching" corner case
... def __init__(self, desc, matchfunc):
--- a/contrib/undumprevlog Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/undumprevlog Thu Jun 16 15:28:54 2022 +0200
@@ -3,7 +3,6 @@
# $ hg init
# $ undumprevlog < repo.dump
-from __future__ import absolute_import, print_function
import sys
from mercurial.node import bin
--- a/contrib/win32/hgwebdir_wsgi.py Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/win32/hgwebdir_wsgi.py Thu Jun 16 15:28:54 2022 +0200
@@ -78,7 +78,6 @@
# - Restart the web server and see if things are running.
#
-from __future__ import absolute_import
# Configuration file location
hgweb_config = r'c:\your\directory\wsgi.config'
--- a/contrib/win32/mercurial.ini Thu Jun 16 15:15:03 2022 +0200
+++ b/contrib/win32/mercurial.ini Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,7 @@
; This file will be replaced by the installer on every upgrade.
; Editing this file can cause strange side effects on Vista.
;
-; http://bitbucket.org/tortoisehg/stable/issue/135
+; https://foss.heptapod.net/mercurial/tortoisehg/thg/-/issues/135
;
; To change settings you see in this file, override (or enable) them in
; your user Mercurial.ini file, where USERNAME is your Windows user name:
--- a/doc/check-seclevel.py Thu Jun 16 15:15:03 2022 +0200
+++ b/doc/check-seclevel.py Thu Jun 16 15:28:54 2022 +0200
@@ -2,7 +2,6 @@
#
# checkseclevel - checking section title levels in each online help document
-from __future__ import absolute_import
import optparse
import os
--- a/doc/docchecker Thu Jun 16 15:15:03 2022 +0200
+++ b/doc/docchecker Thu Jun 16 15:28:54 2022 +0200
@@ -7,7 +7,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import, print_function
import os
import re
--- a/doc/gendoc.py Thu Jun 16 15:15:03 2022 +0200
+++ b/doc/gendoc.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,7 +4,6 @@
where DOC is the name of a document
"""
-from __future__ import absolute_import
import os
import sys
--- a/doc/hgmanpage.py Thu Jun 16 15:15:03 2022 +0200
+++ b/doc/hgmanpage.py Thu Jun 16 15:28:54 2022 +0200
@@ -41,7 +41,6 @@
by the command whatis or apropos.
"""
-from __future__ import absolute_import
__docformat__ = 'reStructuredText'
@@ -113,7 +112,7 @@
self.output = visitor.astext()
-class Table(object):
+class Table:
def __init__(self):
self._rows = []
self._options = ['center']
@@ -313,7 +312,7 @@
pass
def list_start(self, node):
- class enum_char(object):
+ class enum_char:
enum_style = {
'bullet': '\\(bu',
'emdash': '\\(em',
--- a/doc/runrst Thu Jun 16 15:15:03 2022 +0200
+++ b/doc/runrst Thu Jun 16 15:28:54 2022 +0200
@@ -12,7 +12,6 @@
where WRITER is the name of a Docutils writer such as 'html' or 'manpage'
"""
-from __future__ import absolute_import
import sys
--- a/hg Thu Jun 16 15:15:03 2022 +0200
+++ b/hg Thu Jun 16 15:28:54 2022 +0200
@@ -6,7 +6,6 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import os
import sys
@@ -44,10 +43,9 @@
with tracing.log('hg script'):
# enable importing on demand to reduce startup time
try:
- if sys.version_info[0] < 3 or sys.version_info >= (3, 6):
- import hgdemandimport
+ import hgdemandimport
- hgdemandimport.enable()
+ hgdemandimport.enable()
except ImportError:
sys.stderr.write(
"abort: couldn't find mercurial libraries in [%s]\n"
--- a/hgdemandimport/__init__.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgdemandimport/__init__.py Thu Jun 16 15:28:54 2022 +0200
@@ -11,15 +11,11 @@
# demand loading is per-package. Keeping demandimport in the mercurial package
# would disable demand loading for any modules in mercurial.
-from __future__ import absolute_import
import os
import sys
-if sys.version_info[0] >= 3:
- from . import demandimportpy3 as demandimport
-else:
- from . import demandimportpy2 as demandimport
+from . import demandimportpy3 as demandimport
# Full module names which can't be lazy imported.
# Extensions can add to this set.
--- a/hgdemandimport/demandimportpy2.py Thu Jun 16 15:15:03 2022 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,326 +0,0 @@
-# demandimport.py - global demand-loading of modules for Mercurial
-#
-# Copyright 2006, 2007 Olivia Mackall <olivia@selenic.com>
-#
-# This software may be used and distributed according to the terms of the
-# GNU General Public License version 2 or any later version.
-
-'''
-demandimport - automatic demandloading of modules
-
-To enable this module, do:
-
- import demandimport; demandimport.enable()
-
-Imports of the following forms will be demand-loaded:
-
- import a, b.c
- import a.b as c
- from a import b,c # a will be loaded immediately
-
-These imports will not be delayed:
-
- from a import *
- b = __import__(a)
-'''
-
-from __future__ import absolute_import
-
-import __builtin__ as builtins
-import contextlib
-import sys
-
-from . import tracing
-
-contextmanager = contextlib.contextmanager
-
-_origimport = __import__
-
-nothing = object()
-
-
-def _hgextimport(importfunc, name, globals, *args, **kwargs):
- try:
- return importfunc(name, globals, *args, **kwargs)
- except ImportError:
- if not globals:
- raise
- # extensions are loaded with "hgext_" prefix
- hgextname = 'hgext_%s' % name
- nameroot = hgextname.split('.', 1)[0]
- contextroot = globals.get('__name__', '').split('.', 1)[0]
- if nameroot != contextroot:
- raise
- # retry to import with "hgext_" prefix
- return importfunc(hgextname, globals, *args, **kwargs)
-
-
-class _demandmod(object):
- """module demand-loader and proxy
-
- Specify 1 as 'level' argument at construction, to import module
- relatively.
- """
-
- def __init__(self, name, globals, locals, level):
- if '.' in name:
- head, rest = name.split('.', 1)
- after = [rest]
- else:
- head = name
- after = []
- object.__setattr__(
- self, "_data", (head, globals, locals, after, level, set())
- )
- object.__setattr__(self, "_module", None)
-
- def _extend(self, name):
- """add to the list of submodules to load"""
- self._data[3].append(name)
-
- def _addref(self, name):
- """Record that the named module ``name`` imports this module.
-
- References to this proxy class having the name of this module will be
- replaced at module load time. We assume the symbol inside the importing
- module is identical to the "head" name of this module. We don't
- actually know if "as X" syntax is being used to change the symbol name
- because this information isn't exposed to __import__.
- """
- self._data[5].add(name)
-
- def _load(self):
- if not self._module:
- with tracing.log('demandimport %s', self._data[0]):
- head, globals, locals, after, level, modrefs = self._data
- mod = _hgextimport(
- _origimport, head, globals, locals, None, level
- )
- if mod is self:
- # In this case, _hgextimport() above should imply
- # _demandimport(). Otherwise, _hgextimport() never
- # returns _demandmod. This isn't intentional behavior,
- # in fact. (see also issue5304 for detail)
- #
- # If self._module is already bound at this point, self
- # should be already _load()-ed while _hgextimport().
- # Otherwise, there is no way to import actual module
- # as expected, because (re-)invoking _hgextimport()
- # should cause same result.
- # This is reason why _load() returns without any more
- # setup but assumes self to be already bound.
- mod = self._module
- assert mod and mod is not self, "%s, %s" % (self, mod)
- return
-
- # load submodules
- def subload(mod, p):
- h, t = p, None
- if '.' in p:
- h, t = p.split('.', 1)
- if getattr(mod, h, nothing) is nothing:
- setattr(
- mod,
- h,
- _demandmod(p, mod.__dict__, mod.__dict__, level=1),
- )
- elif t:
- subload(getattr(mod, h), t)
-
- for x in after:
- subload(mod, x)
-
- # Replace references to this proxy instance with the
- # actual module.
- if locals:
- if locals.get(head) is self:
- locals[head] = mod
- elif locals.get(head + 'mod') is self:
- locals[head + 'mod'] = mod
-
- for modname in modrefs:
- modref = sys.modules.get(modname, None)
- if modref and getattr(modref, head, None) is self:
- setattr(modref, head, mod)
-
- object.__setattr__(self, "_module", mod)
-
- def __repr__(self):
- if self._module:
- return "<proxied module '%s'>" % self._data[0]
- return "<unloaded module '%s'>" % self._data[0]
-
- def __call__(self, *args, **kwargs):
- raise TypeError("%s object is not callable" % repr(self))
-
- def __getattr__(self, attr):
- self._load()
- return getattr(self._module, attr)
-
- def __setattr__(self, attr, val):
- self._load()
- setattr(self._module, attr, val)
-
- @property
- def __dict__(self):
- self._load()
- return self._module.__dict__
-
- @property
- def __doc__(self):
- self._load()
- return self._module.__doc__
-
-
-_pypy = '__pypy__' in sys.builtin_module_names
-
-
-def _demandimport(name, globals=None, locals=None, fromlist=None, level=-1):
- if locals is None or name in ignores or fromlist == ('*',):
- # these cases we can't really delay
- return _hgextimport(_origimport, name, globals, locals, fromlist, level)
- elif not fromlist:
- # import a [as b]
- if '.' in name: # a.b
- base, rest = name.split('.', 1)
- # email.__init__ loading email.mime
- if globals and globals.get('__name__', None) == base:
- return _origimport(name, globals, locals, fromlist, level)
- # if a is already demand-loaded, add b to its submodule list
- if base in locals:
- if isinstance(locals[base], _demandmod):
- locals[base]._extend(rest)
- return locals[base]
- return _demandmod(name, globals, locals, level)
- else:
- # There is a fromlist.
- # from a import b,c,d
- # from . import b,c,d
- # from .a import b,c,d
-
- # level == -1: relative and absolute attempted (Python 2 only).
- # level >= 0: absolute only (Python 2 w/ absolute_import and Python 3).
- # The modern Mercurial convention is to use absolute_import everywhere,
- # so modern Mercurial code will have level >= 0.
-
- # The name of the module the import statement is located in.
- globalname = globals.get('__name__')
-
- def processfromitem(mod, attr):
- """Process an imported symbol in the import statement.
-
- If the symbol doesn't exist in the parent module, and if the
- parent module is a package, it must be a module. We set missing
- modules up as _demandmod instances.
- """
- symbol = getattr(mod, attr, nothing)
- nonpkg = getattr(mod, '__path__', nothing) is nothing
- if symbol is nothing:
- if nonpkg:
- # do not try relative import, which would raise ValueError,
- # and leave unknown attribute as the default __import__()
- # would do. the missing attribute will be detected later
- # while processing the import statement.
- return
- mn = '%s.%s' % (mod.__name__, attr)
- if mn in ignores:
- importfunc = _origimport
- else:
- importfunc = _demandmod
- symbol = importfunc(attr, mod.__dict__, locals, level=1)
- setattr(mod, attr, symbol)
-
- # Record the importing module references this symbol so we can
- # replace the symbol with the actual module instance at load
- # time.
- if globalname and isinstance(symbol, _demandmod):
- symbol._addref(globalname)
-
- def chainmodules(rootmod, modname):
- # recurse down the module chain, and return the leaf module
- mod = rootmod
- for comp in modname.split('.')[1:]:
- obj = getattr(mod, comp, nothing)
- if obj is nothing:
- obj = _demandmod(comp, mod.__dict__, mod.__dict__, level=1)
- setattr(mod, comp, obj)
- elif mod.__name__ + '.' + comp in sys.modules:
- # prefer loaded module over attribute (issue5617)
- obj = sys.modules[mod.__name__ + '.' + comp]
- mod = obj
- return mod
-
- if level >= 0:
- if name:
- # "from a import b" or "from .a import b" style
- rootmod = _hgextimport(
- _origimport, name, globals, locals, level=level
- )
- mod = chainmodules(rootmod, name)
- elif _pypy:
- # PyPy's __import__ throws an exception if invoked
- # with an empty name and no fromlist. Recreate the
- # desired behaviour by hand.
- mn = globalname
- mod = sys.modules[mn]
- if getattr(mod, '__path__', nothing) is nothing:
- mn = mn.rsplit('.', 1)[0]
- mod = sys.modules[mn]
- if level > 1:
- mn = mn.rsplit('.', level - 1)[0]
- mod = sys.modules[mn]
- else:
- mod = _hgextimport(
- _origimport, name, globals, locals, level=level
- )
-
- for x in fromlist:
- processfromitem(mod, x)
-
- return mod
-
- # But, we still need to support lazy loading of standard library and 3rd
- # party modules. So handle level == -1.
- mod = _hgextimport(_origimport, name, globals, locals)
- mod = chainmodules(mod, name)
-
- for x in fromlist:
- processfromitem(mod, x)
-
- return mod
-
-
-ignores = set()
-
-
-def init(ignoreset):
- global ignores
- ignores = ignoreset
-
-
-def isenabled():
- return builtins.__import__ == _demandimport
-
-
-def enable():
- """enable global demand-loading of modules"""
- builtins.__import__ = _demandimport
-
-
-def disable():
- """disable global demand-loading of modules"""
- builtins.__import__ = _origimport
-
-
-@contextmanager
-def deactivated():
- """context manager for disabling demandimport in 'with' blocks"""
- demandenabled = isenabled()
- if demandenabled:
- disable()
-
- try:
- yield
- finally:
- if demandenabled:
- enable()
--- a/hgdemandimport/demandimportpy3.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgdemandimport/demandimportpy3.py Thu Jun 16 15:28:54 2022 +0200
@@ -24,7 +24,6 @@
"""
# This line is unnecessary, but it satisfies test-check-py3-compat.t.
-from __future__ import absolute_import
import contextlib
import importlib.util
@@ -34,12 +33,6 @@
_deactivated = False
-# Python 3.5's LazyLoader doesn't work for some reason.
-# https://bugs.python.org/issue26186 is a known issue with extension
-# importing. But it appears to not have a meaningful effect with
-# Mercurial.
-_supported = sys.version_info[0:2] >= (3, 6)
-
class _lazyloaderex(importlib.util.LazyLoader):
"""This is a LazyLoader except it also follows the _deactivated global and
@@ -55,7 +48,7 @@
super().exec_module(module)
-class LazyFinder(object):
+class LazyFinder:
"""A wrapper around a ``MetaPathFinder`` that makes loaders lazy.
``sys.meta_path`` finders have their ``find_spec()`` called to locate a
@@ -145,9 +138,6 @@
def enable():
- if not _supported:
- return
-
new_finders = []
for finder in sys.meta_path:
new_finders.append(
--- a/hgdemandimport/tracing.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgdemandimport/tracing.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import contextlib
import os
--- a/hgext/__init__.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/__init__.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,4 +1,3 @@
-from __future__ import absolute_import
import pkgutil
__path__ = pkgutil.extend_path(__path__, __name__)
--- a/hgext/absorb.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/absorb.py Thu Jun 16 15:28:54 2022 +0200
@@ -31,7 +31,6 @@
# * Converge getdraftstack() with other code in core
# * move many attributes on fixupstate to be private
-from __future__ import absolute_import
import collections
@@ -84,7 +83,7 @@
defaultdict = collections.defaultdict
-class nullui(object):
+class nullui:
"""blank ui object doing nothing"""
debugflag = False
@@ -98,7 +97,7 @@
return nullfunc
-class emptyfilecontext(object):
+class emptyfilecontext:
"""minimal filecontext representing an empty file"""
def __init__(self, repo):
@@ -278,7 +277,7 @@
)
-class filefixupstate(object):
+class filefixupstate:
"""state needed to apply fixups to a single file
internally, it keeps file contents of several revisions and a linelog.
@@ -425,7 +424,7 @@
newfixups.append((fixuprev, a1, a2, b1, b2))
elif a2 - a1 == b2 - b1 or b1 == b2:
# 1:1 line mapping, or chunk was deleted
- for i in pycompat.xrange(a1, a2):
+ for i in range(a1, a2):
rev, linenum = annotated[i]
if rev > 1:
if b1 == b2: # deletion, simply remove that single line
@@ -452,7 +451,7 @@
"""
llog = linelog.linelog()
a, alines = b'', []
- for i in pycompat.xrange(len(self.contents)):
+ for i in range(len(self.contents)):
b, blines = self.contents[i], self.contentlines[i]
llrev = i * 2 + 1
chunks = self._alldiffchunks(a, b, alines, blines)
@@ -464,7 +463,7 @@
def _checkoutlinelog(self):
"""() -> [str]. check out file contents from linelog"""
contents = []
- for i in pycompat.xrange(len(self.contents)):
+ for i in range(len(self.contents)):
rev = (i + 1) * 2
self.linelog.annotate(rev)
content = b''.join(map(self._getline, self.linelog.annotateresult))
@@ -606,9 +605,9 @@
a1, a2, b1, b2 = chunk
aidxs, bidxs = [0] * (a2 - a1), [0] * (b2 - b1)
for idx, fa1, fa2, fb1, fb2 in fixups:
- for i in pycompat.xrange(fa1, fa2):
+ for i in range(fa1, fa2):
aidxs[i - a1] = (max(idx, 1) - 1) // 2
- for i in pycompat.xrange(fb1, fb2):
+ for i in range(fb1, fb2):
bidxs[i - b1] = (max(idx, 1) - 1) // 2
fm.startitem()
@@ -638,7 +637,7 @@
)
fm.data(path=self.path, linetype=linetype)
- for i in pycompat.xrange(a1, a2):
+ for i in range(a1, a2):
writeline(
aidxs[i - a1],
b'-',
@@ -646,7 +645,7 @@
b'deleted',
b'diff.deleted',
)
- for i in pycompat.xrange(b1, b2):
+ for i in range(b1, b2):
writeline(
bidxs[i - b1],
b'+',
@@ -656,7 +655,7 @@
)
-class fixupstate(object):
+class fixupstate:
"""state needed to run absorb
internally, it keeps paths and filefixupstates.
@@ -734,7 +733,7 @@
def apply(self):
"""apply fixups to individual filefixupstates"""
- for path, state in pycompat.iteritems(self.fixupmap):
+ for path, state in self.fixupmap.items():
if self.ui.debugflag:
self.ui.write(_(b'applying fixups to %s\n') % path)
state.apply()
@@ -742,10 +741,7 @@
@property
def chunkstats(self):
"""-> {path: chunkstats}. collect chunkstats from filefixupstates"""
- return {
- path: state.chunkstats
- for path, state in pycompat.iteritems(self.fixupmap)
- }
+ return {path: state.chunkstats for path, state in self.fixupmap.items()}
def commit(self):
"""commit changes. update self.finalnode, self.replacemap"""
@@ -763,7 +759,7 @@
chunkstats = self.chunkstats
if ui.verbose:
# chunkstats for each file
- for path, stat in pycompat.iteritems(chunkstats):
+ for path, stat in chunkstats.items():
if stat[0]:
ui.write(
_(b'%s: %d of %d chunk(s) applied\n')
@@ -846,7 +842,7 @@
repo = self.repo
needupdate = [
(name, self.replacemap[hsh])
- for name, hsh in pycompat.iteritems(repo._bookmarks)
+ for name, hsh in repo._bookmarks.items()
if hsh in self.replacemap
]
changes = []
@@ -909,7 +905,7 @@
# ctx changes more files (not a subset of memworkingcopy)
if not set(ctx.files()).issubset(set(memworkingcopy)):
return False
- for path, content in pycompat.iteritems(memworkingcopy):
+ for path, content in memworkingcopy.items():
if path not in pctx or path not in ctx:
return False
fctx = ctx[path]
@@ -952,7 +948,7 @@
def _cleanupoldcommits(self):
replacements = {
k: ([v] if v is not None else [])
- for k, v in pycompat.iteritems(self.replacemap)
+ for k, v in self.replacemap.items()
}
if replacements:
scmutil.cleanupnodes(
@@ -1002,7 +998,7 @@
if not path or not info:
continue
patchmap[path].append(info)
- for path, patches in pycompat.iteritems(patchmap):
+ for path, patches in patchmap.items():
if path not in ctx or not patches:
continue
patches.sort(reverse=True)
@@ -1049,6 +1045,10 @@
origchunks = patch.parsepatch(diff)
chunks = cmdutil.recordfilter(ui, origchunks, matcher)[0]
targetctx = overlaydiffcontext(stack[-1], chunks)
+ if opts.get(b'edit_lines'):
+ # If we're going to open the editor, don't ask the user to confirm
+ # first
+ opts[b'apply_changes'] = True
fm = None
if opts.get(b'print_changes') or not opts.get(b'apply_changes'):
fm = ui.formatter(b'absorb', opts)
@@ -1066,7 +1066,7 @@
fm.context(ctx=ctx)
fm.data(linetype=b'changeset')
fm.write(b'node', b'%-7.7s ', ctx.hex(), label=b'absorb.node')
- descfirstline = ctx.description().splitlines()[0]
+ descfirstline = stringutil.firstline(ctx.description())
fm.write(
b'descfirstline',
b'%s\n',
--- a/hgext/acl.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/acl.py Thu Jun 16 15:28:54 2022 +0200
@@ -213,14 +213,12 @@
'''
-from __future__ import absolute_import
from mercurial.i18n import _
from mercurial import (
error,
extensions,
match,
- pycompat,
registrar,
util,
)
@@ -453,7 +451,7 @@
allow = buildmatch(ui, repo, user, b'acl.allow')
deny = buildmatch(ui, repo, user, b'acl.deny')
- for rev in pycompat.xrange(repo[node].rev(), len(repo)):
+ for rev in range(repo[node].rev(), len(repo)):
ctx = repo[rev]
branch = ctx.branch()
if denybranches and denybranches(branch):
--- a/hgext/amend.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/amend.py Thu Jun 16 15:28:54 2022 +0200
@@ -10,7 +10,6 @@
``commit --amend`` but does not prompt an editor.
"""
-from __future__ import absolute_import
from mercurial.i18n import _
from mercurial import (
--- a/hgext/automv.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/automv.py Thu Jun 16 15:28:54 2022 +0200
@@ -24,7 +24,6 @@
#
# See http://markmail.org/thread/5pxnljesvufvom57 for context.
-from __future__ import absolute_import
from mercurial.i18n import _
from mercurial import (
--- a/hgext/beautifygraph.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/beautifygraph.py Thu Jun 16 15:28:54 2022 +0200
@@ -11,14 +11,12 @@
A terminal with UTF-8 support and monospace narrow text are required.
'''
-from __future__ import absolute_import
from mercurial.i18n import _
from mercurial import (
encoding,
extensions,
graphmod,
- pycompat,
templatekw,
)
@@ -54,7 +52,7 @@
def convertedges(line):
line = b' %s ' % line
pretty = []
- for idx in pycompat.xrange(len(line) - 2):
+ for idx in range(len(line) - 2):
pretty.append(
prettyedge(
line[idx : idx + 1],
--- a/hgext/blackbox.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/blackbox.py Thu Jun 16 15:28:54 2022 +0200
@@ -42,7 +42,6 @@
"""
-from __future__ import absolute_import
import re
@@ -106,7 +105,7 @@
_lastlogger = loggingutil.proxylogger()
-class blackboxlogger(object):
+class blackboxlogger:
def __init__(self, ui, repo):
self._repo = repo
self._trackedevents = set(ui.configlist(b'blackbox', b'track'))
--- a/hgext/bookflow.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/bookflow.py Thu Jun 16 15:28:54 2022 +0200
@@ -13,7 +13,6 @@
:hg up|co NAME: switch to bookmark
:hg push -B .: push active bookmark
"""
-from __future__ import absolute_import
from mercurial.i18n import _
from mercurial import (
--- a/hgext/bugzilla.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/bugzilla.py Thu Jun 16 15:28:54 2022 +0200
@@ -291,7 +291,6 @@
Changeset commit comment. Bug 1234.
'''
-from __future__ import absolute_import
import json
import re
@@ -435,7 +434,7 @@
)
-class bzaccess(object):
+class bzaccess:
'''Base class for access to Bugzilla.'''
def __init__(self, ui):
@@ -691,7 +690,7 @@
# Bugzilla via XMLRPC interface.
-class cookietransportrequest(object):
+class cookietransportrequest:
"""A Transport request method that retains cookies over its lifetime.
The regular xmlrpclib transports ignore cookies. Which causes
@@ -1096,7 +1095,7 @@
pass
-class bugzilla(object):
+class bugzilla:
# supported versions of bugzilla. different versions have
# different schemas.
_versions = {
--- a/hgext/censor.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/censor.py Thu Jun 16 15:28:54 2022 +0200
@@ -28,7 +28,6 @@
ignore censored data and merely report that it was encountered.
"""
-from __future__ import absolute_import
from mercurial.i18n import _
from mercurial.node import short
--- a/hgext/children.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/children.py Thu Jun 16 15:28:54 2022 +0200
@@ -14,7 +14,6 @@
"children(REV)"` instead.
'''
-from __future__ import absolute_import
from mercurial.i18n import _
from mercurial import (
--- a/hgext/churn.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/churn.py Thu Jun 16 15:28:54 2022 +0200
@@ -8,7 +8,6 @@
'''command to display statistics about repository history'''
-from __future__ import absolute_import, division
import datetime
import os
--- a/hgext/clonebundles.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/clonebundles.py Thu Jun 16 15:28:54 2022 +0200
@@ -202,7 +202,6 @@
Mercurial server when the bundle hosting service fails.
"""
-from __future__ import absolute_import
from mercurial import (
bundlecaches,
--- a/hgext/closehead.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/closehead.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
'''close arbitrary heads without checking them out first'''
-from __future__ import absolute_import
from mercurial.i18n import _
from mercurial import (
--- a/hgext/commitextras.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/commitextras.py Thu Jun 16 15:28:54 2022 +0200
@@ -7,7 +7,6 @@
'''adds a new flag extras to commit (ADVANCED)'''
-from __future__ import absolute_import
import re
--- a/hgext/convert/__init__.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/convert/__init__.py Thu Jun 16 15:28:54 2022 +0200
@@ -7,7 +7,6 @@
'''import revisions from foreign VCS repositories into Mercurial'''
-from __future__ import absolute_import
from mercurial.i18n import _
from mercurial import registrar
--- a/hgext/convert/bzr.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/convert/bzr.py Thu Jun 16 15:28:54 2022 +0200
@@ -8,7 +8,6 @@
# This module is for handling Breezy imports or `brz`, but it's also compatible
# with Bazaar or `bzr`, that was formerly known as Bazaar-NG;
# it cannot access `bar` repositories, but they were never used very much.
-from __future__ import absolute_import
import os
@@ -16,7 +15,6 @@
from mercurial import (
demandimport,
error,
- pycompat,
util,
)
from . import common
@@ -210,7 +208,7 @@
if not branch.supports_tags():
return {}
tagdict = branch.tags.get_tag_dict()
- for name, rev in pycompat.iteritems(tagdict):
+ for name, rev in tagdict.items():
bytetags[self.recode(name)] = rev
return bytetags
--- a/hgext/convert/common.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/convert/common.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,12 +4,11 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import base64
import datetime
-import errno
import os
+import pickle
import re
import shlex
import subprocess
@@ -25,7 +24,6 @@
)
from mercurial.utils import procutil
-pickle = util.pickle
propertycache = util.propertycache
@@ -35,7 +33,7 @@
return d.encode('latin1')
-class _shlexpy3proxy(object):
+class _shlexpy3proxy:
def __init__(self, l):
self._l = l
@@ -56,45 +54,25 @@
def shlexer(data=None, filepath=None, wordchars=None, whitespace=None):
if data is None:
- if pycompat.ispy3:
- data = open(filepath, b'r', encoding='latin1')
- else:
- data = open(filepath, b'r')
+ data = open(filepath, b'r', encoding='latin1')
else:
if filepath is not None:
raise error.ProgrammingError(
b'shlexer only accepts data or filepath, not both'
)
- if pycompat.ispy3:
- data = data.decode('latin1')
+ data = data.decode('latin1')
l = shlex.shlex(data, infile=filepath, posix=True)
if whitespace is not None:
l.whitespace_split = True
- if pycompat.ispy3:
- l.whitespace += whitespace.decode('latin1')
- else:
- l.whitespace += whitespace
+ l.whitespace += whitespace.decode('latin1')
if wordchars is not None:
- if pycompat.ispy3:
- l.wordchars += wordchars.decode('latin1')
- else:
- l.wordchars += wordchars
- if pycompat.ispy3:
- return _shlexpy3proxy(l)
- return l
-
-
-if pycompat.ispy3:
- base64_encodebytes = base64.encodebytes
- base64_decodebytes = base64.decodebytes
-else:
- base64_encodebytes = base64.encodestring
- base64_decodebytes = base64.decodestring
+ l.wordchars += wordchars.decode('latin1')
+ return _shlexpy3proxy(l)
def encodeargs(args):
def encodearg(s):
- lines = base64_encodebytes(s)
+ lines = base64.encodebytes(s)
lines = [l.splitlines()[0] for l in pycompat.iterbytestr(lines)]
return b''.join(lines)
@@ -103,7 +81,7 @@
def decodeargs(s):
- s = base64_decodebytes(s)
+ s = base64.decodebytes(s)
return pickle.loads(s)
@@ -128,7 +106,7 @@
SKIPREV = b'SKIP'
-class commit(object):
+class commit:
def __init__(
self,
author,
@@ -158,7 +136,7 @@
self.ctx = ctx # for hg to hg conversions
-class converter_source(object):
+class converter_source:
"""Conversion source interface"""
def __init__(self, ui, repotype, path=None, revs=None):
@@ -247,7 +225,7 @@
if not encoding:
encoding = self.encoding or b'utf-8'
- if isinstance(s, pycompat.unicode):
+ if isinstance(s, str):
return s.encode("utf-8")
try:
return s.decode(pycompat.sysstr(encoding)).encode("utf-8")
@@ -308,7 +286,7 @@
return True
-class converter_sink(object):
+class converter_sink:
"""Conversion sink (target) interface"""
def __init__(self, ui, repotype, path):
@@ -404,7 +382,7 @@
raise NotImplementedError
-class commandline(object):
+class commandline:
def __init__(self, ui, command):
self.ui = ui
self.command = command
@@ -418,7 +396,7 @@
def _cmdline(self, cmd, *args, **kwargs):
kwargs = pycompat.byteskwargs(kwargs)
cmdline = [self.command, cmd] + list(args)
- for k, v in pycompat.iteritems(kwargs):
+ for k, v in kwargs.items():
if len(k) == 1:
cmdline.append(b'-' + k)
else:
@@ -549,11 +527,9 @@
return
try:
fp = open(self.path, b'rb')
- except IOError as err:
- if err.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
return
- for i, line in enumerate(util.iterfile(fp)):
+ for i, line in enumerate(fp):
line = line.splitlines()[0].rstrip()
if not line:
# Ignore blank lines
--- a/hgext/convert/convcmd.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/convert/convcmd.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,7 +4,6 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import collections
import os
@@ -87,7 +86,7 @@
def recode(s):
- if isinstance(s, pycompat.unicode):
+ if isinstance(s, str):
return s.encode(pycompat.sysstr(orig_encoding), 'replace')
else:
return s.decode('utf-8').encode(
@@ -177,7 +176,7 @@
raise error.Abort(_(b'%s: unknown repository type') % path)
-class progresssource(object):
+class progresssource:
def __init__(self, ui, source, filecount):
self.ui = ui
self.source = source
@@ -199,7 +198,7 @@
self.progress.complete()
-class converter(object):
+class converter:
def __init__(self, ui, source, dest, revmapfile, opts):
self.source = source
@@ -243,7 +242,7 @@
m = {}
try:
fp = open(path, b'rb')
- for i, line in enumerate(util.iterfile(fp)):
+ for i, line in enumerate(fp):
line = line.splitlines()[0].rstrip()
if not line:
# Ignore blank lines
@@ -585,9 +584,7 @@
# write another hash correspondence to override the
# previous one so we don't end up with extra tag heads
tagsparents = [
- e
- for e in pycompat.iteritems(self.map)
- if e[1] == tagsparent
+ e for e in self.map.items() if e[1] == tagsparent
]
if tagsparents:
self.map[tagsparents[0][0]] = nrev
--- a/hgext/convert/cvs.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/convert/cvs.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,7 +4,6 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import errno
import os
@@ -19,7 +18,6 @@
from mercurial import (
encoding,
error,
- pycompat,
util,
)
from mercurial.utils import (
@@ -317,7 +315,7 @@
if full:
raise error.Abort(_(b"convert from cvs does not support --full"))
self._parse()
- return sorted(pycompat.iteritems(self.files[rev])), {}, set()
+ return sorted(self.files[rev].items()), {}, set()
def getcommit(self, rev):
self._parse()
--- a/hgext/convert/cvsps.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/convert/cvsps.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,10 +4,10 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import functools
import os
+import pickle
import re
from mercurial.i18n import _
@@ -25,10 +25,8 @@
stringutil,
)
-pickle = util.pickle
-
-class logentry(object):
+class logentry:
"""Class logentry has the following attributes:
.author - author name as CVS knows it
.branch - name of branch this revision is on
@@ -468,7 +466,7 @@
# find the branches starting from this revision
branchpoints = set()
- for branch, revision in pycompat.iteritems(branchmap):
+ for branch, revision in branchmap.items():
revparts = tuple([int(i) for i in revision.split(b'.')])
if len(revparts) < 2: # bad tags
continue
@@ -579,7 +577,7 @@
return log
-class changeset(object):
+class changeset:
"""Class changeset has the following attributes:
.id - integer identifying this changeset (list index)
.author - author name as CVS knows it
@@ -834,7 +832,7 @@
# branchpoints such that it is the latest possible
# commit without any intervening, unrelated commits.
- for candidate in pycompat.xrange(i):
+ for candidate in range(i):
if c.branch not in changesets[candidate].branchpoints:
if p is not None:
break
--- a/hgext/convert/darcs.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/convert/darcs.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,9 +4,7 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
-import errno
import os
import re
import shutil
@@ -114,7 +112,7 @@
shutil.rmtree(self.tmppath, ignore_errors=True)
def recode(self, s, encoding=None):
- if isinstance(s, pycompat.unicode):
+ if isinstance(s, str):
# XMLParser returns unicode objects for anything it can't
# encode into ASCII. We convert them back to str to get
# recode's normal conversion behavior.
@@ -231,10 +229,8 @@
try:
data = util.readfile(path)
mode = os.lstat(path).st_mode
- except IOError as inst:
- if inst.errno == errno.ENOENT:
- return None, None
- raise
+ except FileNotFoundError:
+ return None, None
mode = (mode & 0o111) and b'x' or b''
return data, mode
--- a/hgext/convert/filemap.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/convert/filemap.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,7 +4,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import, print_function
import posixpath
@@ -42,7 +41,7 @@
return posixpath.normpath(path)
-class filemapper(object):
+class filemapper:
"""Map and filter filenames when importing.
A name can be mapped to itself, a new name, or None (omit from new
repository)."""
@@ -126,7 +125,7 @@
repo belong to the source repo and what parts don't."""
if self.targetprefixes is None:
self.targetprefixes = set()
- for before, after in pycompat.iteritems(self.rename):
+ for before, after in self.rename.items():
self.targetprefixes.add(after)
# If "." is a target, then all target files are considered from the
--- a/hgext/convert/git.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/convert/git.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,7 +4,6 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import os
@@ -20,7 +19,7 @@
from . import common
-class submodule(object):
+class submodule:
def __init__(self, path, node, url):
self.path = path
self.node = node
--- a/hgext/convert/gnuarch.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/convert/gnuarch.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import os
import shutil
@@ -28,7 +27,7 @@
class gnuarch_source(common.converter_source, common.commandline):
- class gnuarch_rev(object):
+ class gnuarch_rev:
def __init__(self, rev):
self.rev = rev
self.summary = b''
--- a/hgext/convert/hg.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/convert/hg.py Thu Jun 16 15:28:54 2022 +0200
@@ -16,7 +16,6 @@
# identifier to be stored in the converted revision. This will cause
# the converted revision to have a different identity than the
# source.
-from __future__ import absolute_import
import os
import re
@@ -40,7 +39,6 @@
merge as mergemod,
mergestate,
phases,
- pycompat,
util,
)
from mercurial.utils import dateutil
@@ -139,7 +137,7 @@
if missings:
self.after()
- for pbranch, heads in sorted(pycompat.iteritems(missings)):
+ for pbranch, heads in sorted(missings.items()):
pbranchpath = os.path.join(self.path, pbranch)
prepo = hg.peer(self.ui, {}, pbranchpath)
self.ui.note(
@@ -424,7 +422,7 @@
tagparent = tagparent or self.repo.nullid
oldlines = set()
- for branch, heads in pycompat.iteritems(self.repo.branchmap()):
+ for branch, heads in self.repo.branchmap().items():
for h in heads:
if b'.hgtags' in self.repo[h]:
oldlines.update(
@@ -596,7 +594,7 @@
maappend = ma.append
rappend = r.append
d = ctx1.manifest().diff(ctx2.manifest())
- for f, ((node1, flag1), (node2, flag2)) in pycompat.iteritems(d):
+ for f, ((node1, flag1), (node2, flag2)) in d.items():
if node2 is None:
rappend(f)
else:
@@ -622,7 +620,7 @@
cleanp2 = set()
if len(parents) == 2:
d = parents[1].manifest().diff(ctx.manifest(), clean=True)
- for f, value in pycompat.iteritems(d):
+ for f, value in d.items():
if value is None:
cleanp2.add(f)
changes = [(f, rev) for f in files if f not in self.ignored]
--- a/hgext/convert/monotone.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/convert/monotone.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import os
import re
@@ -103,7 +102,7 @@
# Prepare the command in automate stdio format
kwargs = pycompat.byteskwargs(kwargs)
command = []
- for k, v in pycompat.iteritems(kwargs):
+ for k, v in kwargs.items():
command.append(b"%d:%s" % (len(k), k))
if v:
command.append(b"%d:%s" % (len(v), v))
@@ -151,7 +150,7 @@
raise error.Abort(_(b'bad mtn packet - no end of packet size'))
lengthstr += read
try:
- length = pycompat.long(lengthstr[:-1])
+ length = int(lengthstr[:-1])
except TypeError:
raise error.Abort(
_(b'bad mtn packet - bad packet size %s') % lengthstr
--- a/hgext/convert/p4.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/convert/p4.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,7 +4,6 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import marshal
import re
--- a/hgext/convert/subversion.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/convert/subversion.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,11 +1,11 @@
# Subversion 1.4/1.5 Python API backend
#
# Copyright(C) 2007 Daniel Holth et al
-from __future__ import absolute_import
import codecs
import locale
import os
+import pickle
import re
import xml.dom.minidom
@@ -26,7 +26,6 @@
from . import common
-pickle = util.pickle
stringio = util.stringio
propertycache = util.propertycache
urlerr = util.urlerr
@@ -181,7 +180,7 @@
return optrev
-class changedpath(object):
+class changedpath:
def __init__(self, p):
self.copyfrom_path = p.copyfrom_path
self.copyfrom_rev = p.copyfrom_rev
@@ -203,7 +202,7 @@
def receiver(orig_paths, revnum, author, date, message, pool):
paths = {}
if orig_paths is not None:
- for k, v in pycompat.iteritems(orig_paths):
+ for k, v in orig_paths.items():
paths[k] = changedpath(v)
pickle.dump((paths, revnum, author, date, message), fp, protocol)
@@ -249,7 +248,7 @@
get_log_child(ui.fout, *args)
-class logstream(object):
+class logstream:
"""Interruptible revision log iterator."""
def __init__(self, stdout):
@@ -298,7 +297,7 @@
def receiver(orig_paths, revnum, author, date, message, pool):
paths = {}
if orig_paths is not None:
- for k, v in pycompat.iteritems(orig_paths):
+ for k, v in orig_paths.items():
paths[k] = changedpath(v)
self.append((paths, revnum, author, date, message))
@@ -365,32 +364,6 @@
}
-class NonUtf8PercentEncodedBytes(Exception):
- pass
-
-
-# Subversion paths are Unicode. Since the percent-decoding is done on
-# UTF-8-encoded strings, percent-encoded bytes are interpreted as UTF-8.
-def url2pathname_like_subversion(unicodepath):
- if pycompat.ispy3:
- # On Python 3, we have to pass unicode to urlreq.url2pathname().
- # Percent-decoded bytes get decoded using UTF-8 and the 'replace' error
- # handler.
- unicodepath = urlreq.url2pathname(unicodepath)
- if u'\N{REPLACEMENT CHARACTER}' in unicodepath:
- raise NonUtf8PercentEncodedBytes
- else:
- return unicodepath
- else:
- # If we passed unicode on Python 2, it would be converted using the
- # latin-1 encoding. Therefore, we pass UTF-8-encoded bytes.
- unicodepath = urlreq.url2pathname(unicodepath.encode('utf-8'))
- try:
- return unicodepath.decode('utf-8')
- except UnicodeDecodeError:
- raise NonUtf8PercentEncodedBytes
-
-
def issvnurl(ui, url):
try:
proto, path = url.split(b'://', 1)
@@ -413,9 +386,15 @@
% pycompat.sysbytes(fsencoding)
)
return False
- try:
- unicodepath = url2pathname_like_subversion(unicodepath)
- except NonUtf8PercentEncodedBytes:
+
+ # Subversion paths are Unicode. Since it does percent-decoding on
+ # UTF-8-encoded strings, percent-encoded bytes are interpreted as
+ # UTF-8.
+ # On Python 3, we have to pass unicode to urlreq.url2pathname().
+ # Percent-decoded bytes get decoded using UTF-8 and the 'replace'
+ # error handler.
+ unicodepath = urlreq.url2pathname(unicodepath)
+ if u'\N{REPLACEMENT CHARACTER}' in unicodepath:
ui.warn(
_(
b'Subversion does not support non-UTF-8 '
@@ -423,6 +402,7 @@
)
)
return False
+
# Below, we approximate how Subversion checks the path. On Unix, we
# should therefore convert the path to bytes using `fsencoding`
# (like Subversion does). On Windows, the right thing would
@@ -730,7 +710,7 @@
)
files = [
n
- for n, e in pycompat.iteritems(entries)
+ for n, e in entries.items()
if e.kind == svn.core.svn_node_file
]
self.removed = set()
@@ -820,7 +800,7 @@
origpaths = []
copies = [
(e.copyfrom_path, e.copyfrom_rev, p)
- for p, e in pycompat.iteritems(origpaths)
+ for p, e in origpaths.items()
if e.copyfrom_path
]
# Apply moves/copies from more specific to general
@@ -851,7 +831,7 @@
# be represented in mercurial.
addeds = {
p: e.copyfrom_path
- for p, e in pycompat.iteritems(origpaths)
+ for p, e in origpaths.items()
if e.action == b'A' and e.copyfrom_path
}
badroots = set()
@@ -1140,7 +1120,7 @@
parents = []
# check whether this revision is the start of a branch or part
# of a branch renaming
- orig_paths = sorted(pycompat.iteritems(orig_paths))
+ orig_paths = sorted(orig_paths.items())
root_paths = [
(p, e) for p, e in orig_paths if self.module.startswith(p)
]
@@ -1302,7 +1282,7 @@
path += b'/'
return (
(path + p)
- for p, e in pycompat.iteritems(entries)
+ for p, e in entries.items()
if e.kind == svn.core.svn_node_file
)
--- a/hgext/convert/transport.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/convert/transport.py Thu Jun 16 15:28:54 2022 +0200
@@ -16,7 +16,6 @@
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
-from __future__ import absolute_import
import svn.client
import svn.core
@@ -71,7 +70,7 @@
pass
-class SvnRaTransport(object):
+class SvnRaTransport:
"""
Open an ra connection to a Subversion repository.
"""
@@ -108,7 +107,7 @@
self.ra = ra
svn.ra.reparent(self.ra, self.svn_url.encode('utf8'))
- class Reporter(object):
+ class Reporter:
def __init__(self, reporter_data):
self._reporter, self._baton = reporter_data
--- a/hgext/eol.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/eol.py Thu Jun 16 15:28:54 2022 +0200
@@ -91,7 +91,6 @@
used.
"""
-from __future__ import absolute_import
import os
import re
@@ -186,7 +185,7 @@
}
-class eolfile(object):
+class eolfile:
def __init__(self, ui, root, data):
self._decode = {
b'LF': b'to-lf',
@@ -310,7 +309,7 @@
ensureenabled(ui)
files = set()
revs = set()
- for rev in pycompat.xrange(repo[node].rev(), len(repo)):
+ for rev in range(repo[node].rev(), len(repo)):
revs.add(rev)
if headsonly:
ctx = repo[rev]
@@ -379,7 +378,7 @@
if not repo.local():
return
- for name, fn in pycompat.iteritems(filters):
+ for name, fn in filters.items():
repo.adddatafilter(name, fn)
ui.setconfig(b'patch', b'eol', b'auto', b'eol')
--- a/hgext/extdiff.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/extdiff.py Thu Jun 16 15:28:54 2022 +0200
@@ -81,7 +81,6 @@
pretty fast (at least faster than having to compare the entire tree).
'''
-from __future__ import absolute_import
import os
import re
@@ -696,7 +695,7 @@
return dodiff(ui, repo, cmdline, pats, opts)
-class savedcmd(object):
+class savedcmd:
"""use external program to diff repository (or selected files)
Show differences between revisions for the specified files, using
--- a/hgext/factotum.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/factotum.py Thu Jun 16 15:28:54 2022 +0200
@@ -45,7 +45,6 @@
'''
-from __future__ import absolute_import
import os
from mercurial.i18n import _
--- a/hgext/fastannotate/__init__.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/fastannotate/__init__.py Thu Jun 16 15:28:54 2022 +0200
@@ -101,7 +101,6 @@
#
# * format changes to the revmap file (maybe use length-encoding
# instead of null-terminated file paths at least?)
-from __future__ import absolute_import
from mercurial.i18n import _
from mercurial import (
--- a/hgext/fastannotate/commands.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/fastannotate/commands.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import os
--- a/hgext/fastannotate/context.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/fastannotate/context.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import collections
import contextlib
@@ -76,7 +75,7 @@
linecount = text.count(b'\n')
if text and not text.endswith(b'\n'):
linecount += 1
- return ([(fctx, i) for i in pycompat.xrange(linecount)], text)
+ return ([(fctx, i) for i in range(linecount)], text)
# extracted from mercurial.context.basefilectx.annotate. slightly modified
@@ -160,7 +159,7 @@
_defaultdiffopthash = hashdiffopts(mdiff.defaultopts)
-class annotateopts(object):
+class annotateopts:
"""like mercurial.mdiff.diffopts, but is for annotate
followrename: follow renames, like "hg annotate -f"
@@ -175,7 +174,7 @@
def __init__(self, **opts):
opts = pycompat.byteskwargs(opts)
- for k, v in pycompat.iteritems(self.defaults):
+ for k, v in self.defaults.items():
setattr(self, k, opts.get(k, v))
@util.propertycache
@@ -197,7 +196,7 @@
defaultopts = annotateopts()
-class _annotatecontext(object):
+class _annotatecontext:
"""do not use this class directly as it does not use lock to protect
writes. use "with annotatecontext(...)" instead.
"""
@@ -578,13 +577,13 @@
result = [None] * len(annotateresult)
# {(rev, linenum): [lineindex]}
key2idxs = collections.defaultdict(list)
- for i in pycompat.xrange(len(result)):
+ for i in range(len(result)):
key2idxs[(revs[i], annotateresult[i][1])].append(i)
while key2idxs:
# find an unresolved line and its linelog rev to annotate
hsh = None
try:
- for (rev, _linenum), idxs in pycompat.iteritems(key2idxs):
+ for (rev, _linenum), idxs in key2idxs.items():
if revmap.rev2flag(rev) & revmapmod.sidebranchflag:
continue
hsh = annotateresult[idxs[0]][0]
@@ -595,7 +594,7 @@
# the remaining key2idxs are not in main branch, resolving them
# using the hard way...
revlines = {}
- for (rev, linenum), idxs in pycompat.iteritems(key2idxs):
+ for (rev, linenum), idxs in key2idxs.items():
if rev not in revlines:
hsh = annotateresult[idxs[0]][0]
if self.ui.debugflag:
@@ -784,7 +783,7 @@
pass
-class pathhelper(object):
+class pathhelper:
"""helper for getting paths for lockfile, linelog and revmap"""
def __init__(self, repo, path, opts=defaultopts):
--- a/hgext/fastannotate/error.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/fastannotate/error.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,7 +4,6 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
class CorruptedFileError(Exception):
--- a/hgext/fastannotate/formatter.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/fastannotate/formatter.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,7 +4,6 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from mercurial.node import (
hex,
@@ -20,7 +19,7 @@
# imitating mercurial.commands.annotate, not using the vanilla formatter since
# the data structures are a bit different, and we have some fast paths.
-class defaultformatter(object):
+class defaultformatter:
"""the default formatter that does leftpad and support some common flags"""
def __init__(self, ui, repo, opts):
@@ -94,7 +93,7 @@
# buffered output
result = b''
- for i in pycompat.xrange(len(annotatedresult)):
+ for i in range(len(annotatedresult)):
for j, p in enumerate(pieces):
sep = self.funcmap[j][1]
padding = b' ' * (maxwidths[j] - len(p[i]))
@@ -149,7 +148,7 @@
result = b''
lasti = len(annotatedresult) - 1
- for i in pycompat.xrange(len(annotatedresult)):
+ for i in range(len(annotatedresult)):
result += b'\n {\n'
for j, p in enumerate(pieces):
k, vs = p
--- a/hgext/fastannotate/protocol.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/fastannotate/protocol.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,7 +4,6 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import contextlib
import os
@@ -15,7 +14,6 @@
error,
extensions,
hg,
- pycompat,
util,
wireprotov1peer,
wireprotov1server,
@@ -190,7 +188,7 @@
for result in results:
r = result.result()
# TODO: pconvert these paths on the server?
- r = {util.pconvert(p): v for p, v in pycompat.iteritems(r)}
+ r = {util.pconvert(p): v for p, v in r.items()}
for path in sorted(r):
# ignore malicious paths
if not path.startswith(b'fastannotate/') or b'/../' in (
--- a/hgext/fastannotate/revmap.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/fastannotate/revmap.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import bisect
import io
@@ -16,7 +15,6 @@
from mercurial.pycompat import open
from mercurial import (
error as hgerror,
- pycompat,
)
from . import error
@@ -49,7 +47,7 @@
_hshlen = 20
-class revmap(object):
+class revmap:
"""trivial hg bin hash - linelog rev bidirectional map
also stores a flag (uint8) for each revision, and track renames.
@@ -166,13 +164,11 @@
if self._lastmaxrev == -1: # write the entire file
with open(self.path, b'wb') as f:
f.write(self.HEADER)
- for i in pycompat.xrange(1, len(self._rev2hsh)):
+ for i in range(1, len(self._rev2hsh)):
self._writerev(i, f)
else: # append incrementally
with open(self.path, b'ab') as f:
- for i in pycompat.xrange(
- self._lastmaxrev + 1, len(self._rev2hsh)
- ):
+ for i in range(self._lastmaxrev + 1, len(self._rev2hsh)):
self._writerev(i, f)
self._lastmaxrev = self.maxrev
--- a/hgext/fastannotate/support.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/fastannotate/support.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from mercurial.pycompat import getattr
from mercurial import (
@@ -23,7 +22,7 @@
)
-class _lazyfctx(object):
+class _lazyfctx:
"""delegates to fctx but do not construct fctx when unnecessary"""
def __init__(self, repo, node, path):
--- a/hgext/fastexport.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/fastexport.py Thu Jun 16 15:28:54 2022 +0200
@@ -7,7 +7,6 @@
# The format specification for fast-import streams can be found at
# https://git-scm.com/docs/git-fast-import#_input_format
-from __future__ import absolute_import
import re
from mercurial.i18n import _
--- a/hgext/fetch.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/fetch.py Thu Jun 16 15:28:54 2022 +0200
@@ -7,7 +7,6 @@
'''pull, update and merge in one command (DEPRECATED)'''
-from __future__ import absolute_import
from mercurial.i18n import _
from mercurial.node import short
--- a/hgext/fix.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/fix.py Thu Jun 16 15:28:54 2022 +0200
@@ -122,7 +122,6 @@
file content back to stdout as documented above.
"""
-from __future__ import absolute_import
import collections
import itertools
@@ -378,9 +377,7 @@
Useful as a hook point for extending "hg fix" with output summarizing the
effects of the command, though we choose not to output anything here.
"""
- replacements = {
- prec: [succ] for prec, succ in pycompat.iteritems(replacements)
- }
+ replacements = {prec: [succ] for prec, succ in replacements.items()}
scmutil.cleanupnodes(repo, replacements, b'fix', fixphase=True)
@@ -693,7 +690,7 @@
"""
metadata = {}
newdata = fixctx[path].data()
- for fixername, fixer in pycompat.iteritems(fixers):
+ for fixername, fixer in fixers.items():
if fixer.affects(opts, fixctx, path):
ranges = lineranges(
opts, path, basepaths, basectxs, fixctx, newdata
@@ -771,7 +768,7 @@
Directly updates the dirstate for the affected files.
"""
- for path, data in pycompat.iteritems(filedata):
+ for path, data in filedata.items():
fctx = ctx[path]
fctx.write(data, fctx.flags())
@@ -906,7 +903,7 @@
return names
-class Fixer(object):
+class Fixer:
"""Wraps the raw config values for a fixer with methods"""
def __init__(
--- a/hgext/fsmonitor/__init__.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/fsmonitor/__init__.py Thu Jun 16 15:28:54 2022 +0200
@@ -107,7 +107,6 @@
# The issues related to nested repos and subrepos are probably not fundamental
# ones. Patches to fix them are welcome.
-from __future__ import absolute_import
import codecs
import os
@@ -336,7 +335,7 @@
nonnormalset = {
f
for f, e in self._map.items()
- if e.v1_state() != b"n" or e.v1_mtime() == -1
+ if e._v1_state() != b"n" or e._v1_mtime() == -1
}
copymap = self._map.copymap
@@ -502,15 +501,11 @@
visit.update(f for f in copymap if f not in results and matchfn(f))
else:
if matchalways:
- visit.update(
- f for f, st in pycompat.iteritems(dmap) if f not in results
- )
+ visit.update(f for f, st in dmap.items() if f not in results)
visit.update(f for f in copymap if f not in results)
else:
visit.update(
- f
- for f, st in pycompat.iteritems(dmap)
- if f not in results and matchfn(f)
+ f for f, st in dmap.items() if f not in results and matchfn(f)
)
visit.update(f for f in copymap if f not in results and matchfn(f))
@@ -686,7 +681,7 @@
)
-class poststatus(object):
+class poststatus:
def __init__(self, startclock):
self._startclock = pycompat.sysbytes(startclock)
@@ -761,7 +756,7 @@
pass
-class state_update(object):
+class state_update:
"""This context manager is responsible for dispatching the state-enter
and state-leave signals to the watchman service. The enter and leave
methods can be invoked manually (for scenarios where context manager
--- a/hgext/fsmonitor/pywatchman/__init__.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/fsmonitor/pywatchman/__init__.py Thu Jun 16 15:28:54 2022 +0200
@@ -27,7 +27,6 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# no unicode literals
-from __future__ import absolute_import, division, print_function
import inspect
import math
@@ -302,7 +301,7 @@
)
-class Transport(object):
+class Transport:
"""communication transport to the watchman server"""
buf = None
@@ -347,7 +346,7 @@
self.buf.append(b)
-class Codec(object):
+class Codec:
"""communication encoding for the watchman server"""
transport = None
@@ -860,7 +859,7 @@
self.transport.write(cmd + b"\n")
-class client(object):
+class client:
"""Handles the communication with the watchman service"""
sockpath = None
--- a/hgext/fsmonitor/pywatchman/capabilities.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/fsmonitor/pywatchman/capabilities.py Thu Jun 16 15:28:54 2022 +0200
@@ -27,7 +27,6 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# no unicode literals
-from __future__ import absolute_import, division, print_function
def parse_version(vstr):
--- a/hgext/fsmonitor/pywatchman/compat.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/fsmonitor/pywatchman/compat.py Thu Jun 16 15:28:54 2022 +0200
@@ -27,7 +27,6 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# no unicode literals
-from __future__ import absolute_import, division, print_function
import sys
--- a/hgext/fsmonitor/pywatchman/encoding.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/fsmonitor/pywatchman/encoding.py Thu Jun 16 15:28:54 2022 +0200
@@ -27,7 +27,6 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# no unicode literals
-from __future__ import absolute_import, division, print_function
import sys
--- a/hgext/fsmonitor/pywatchman/load.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/fsmonitor/pywatchman/load.py Thu Jun 16 15:28:54 2022 +0200
@@ -27,7 +27,6 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# no unicode literals
-from __future__ import absolute_import, division, print_function
import ctypes
--- a/hgext/fsmonitor/pywatchman/pybser.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/fsmonitor/pywatchman/pybser.py Thu Jun 16 15:28:54 2022 +0200
@@ -27,7 +27,6 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# no unicode literals
-from __future__ import absolute_import, division, print_function
import binascii
import collections
@@ -94,7 +93,7 @@
return ret
-class _bser_buffer(object):
+class _bser_buffer:
def __init__(self, version):
self.bser_version = version
self.buf = ctypes.create_string_buffer(8192)
@@ -325,7 +324,7 @@
# This is a quack-alike with the bserObjectType in bser.c
# It provides by getattr accessors and getitem for both index
# and name.
-class _BunserDict(object):
+class _BunserDict:
__slots__ = ("_keys", "_values")
def __init__(self, keys, values):
@@ -351,7 +350,7 @@
return len(self._keys)
-class Bunser(object):
+class Bunser:
def __init__(self, mutable=True, value_encoding=None, value_errors=None):
self.mutable = mutable
self.value_encoding = value_encoding
--- a/hgext/fsmonitor/state.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/fsmonitor/state.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import errno
import os
@@ -23,7 +22,7 @@
_versionformat = b">I"
-class state(object):
+class state:
def __init__(self, repo):
self._vfs = repo.vfs
self._ui = repo.ui
@@ -138,9 +137,8 @@
def invalidate(self):
try:
os.unlink(os.path.join(self._rootdir, b'.hg', b'fsmonitor.state'))
- except OSError as inst:
- if inst.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
+ pass
self._identity = util.filestat(None)
def setlastclock(self, clock):
--- a/hgext/fsmonitor/watchmanclient.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/fsmonitor/watchmanclient.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import getpass
@@ -44,7 +43,7 @@
super(WatchmanNoRoot, self).__init__(msg)
-class client(object):
+class client:
def __init__(self, ui, root, timeout=1.0):
err = None
if not self._user:
--- a/hgext/git/__init__.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/git/__init__.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,7 +4,6 @@
firstborn a la Rumpelstiltskin, etc.
"""
-from __future__ import absolute_import
import os
@@ -17,6 +16,7 @@
localrepo,
pycompat,
registrar,
+ requirements as requirementsmod,
scmutil,
store,
util,
@@ -48,7 +48,7 @@
# TODO: extract an interface for this in core
-class gitstore(object): # store.basicstore):
+class gitstore: # store.basicstore):
def __init__(self, path, vfstype):
self.vfs = vfstype(path)
self.opener = self.vfs
@@ -130,7 +130,7 @@
return orig(requirements, storebasepath, vfstype)
-class gitfilestorage(object):
+class gitfilestorage:
def file(self, path):
if path[0:1] == b'/':
path = path[1:]
@@ -162,7 +162,7 @@
_BMS_PREFIX = 'refs/heads/'
-class gitbmstore(object):
+class gitbmstore:
def __init__(self, gitrepo):
self.gitrepo = gitrepo
self._aclean = True
@@ -301,9 +301,15 @@
class gitlocalrepo(orig):
def _makedirstate(self):
+ v2_req = requirementsmod.DIRSTATE_V2_REQUIREMENT
+ use_dirstate_v2 = v2_req in self.requirements
+
# TODO narrow support here
return dirstate.gitdirstate(
- self.ui, self.vfs.base, self.store.git
+ self.ui,
+ self.vfs,
+ self.store.git,
+ use_dirstate_v2,
)
def commit(self, *args, **kwargs):
--- a/hgext/git/dirstate.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/git/dirstate.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,11 +1,9 @@
-from __future__ import absolute_import
-
import contextlib
-import errno
import os
from mercurial.node import sha1nodeconstants
from mercurial import (
+ dirstatemap,
error,
extensions,
match as matchmod,
@@ -13,6 +11,9 @@
scmutil,
util,
)
+from mercurial.dirstateutils import (
+ timestamp,
+)
from mercurial.interfaces import (
dirstate as intdirstate,
util as interfaceutil,
@@ -20,6 +21,9 @@
from . import gitutil
+
+DirstateItem = dirstatemap.DirstateItem
+propertycache = util.propertycache
pygit2 = gitutil.get_pygit2()
@@ -28,7 +32,7 @@
return orig(filepath, warn, sourceinfo=False)
result = []
warnings = []
- with open(filepath, b'rb') as fp:
+ with open(filepath, 'rb') as fp:
for l in fp:
l = l.strip()
if not l or l.startswith(b'#'):
@@ -68,14 +72,29 @@
@interfaceutil.implementer(intdirstate.idirstate)
-class gitdirstate(object):
- def __init__(self, ui, root, gitrepo):
+class gitdirstate:
+ def __init__(self, ui, vfs, gitrepo, use_dirstate_v2):
self._ui = ui
- self._root = os.path.dirname(root)
+ self._root = os.path.dirname(vfs.base)
+ self._opener = vfs
self.git = gitrepo
self._plchangecallbacks = {}
# TODO: context.poststatusfixup is bad and uses this attribute
self._dirty = False
+ self._mapcls = dirstatemap.dirstatemap
+ self._use_dirstate_v2 = use_dirstate_v2
+
+ @propertycache
+ def _map(self):
+ """Return the dirstate contents (see documentation for dirstatemap)."""
+ self._map = self._mapcls(
+ self._ui,
+ self._opener,
+ self._root,
+ sha1nodeconstants,
+ self._use_dirstate_v2,
+ )
+ return self._map
def p1(self):
try:
@@ -144,6 +163,13 @@
[],
[],
)
+
+ try:
+ mtime_boundary = timestamp.get_fs_now(self._opener)
+ except OSError:
+ # In largefiles or readonly context
+ mtime_boundary = None
+
gstatus = self.git.status()
for path, status in gstatus.items():
path = pycompat.fsencode(path)
@@ -195,6 +221,7 @@
scmutil.status(
modified, added, removed, deleted, unknown, ignored, clean
),
+ mtime_boundary,
)
def flagfunc(self, buildfallback):
@@ -207,6 +234,13 @@
os.path.dirname(pycompat.fsencode(self.git.path))
)
+ def get_entry(self, path):
+ """return a DirstateItem for the associated path"""
+ entry = self._map.get(path)
+ if entry is None:
+ return DirstateItem()
+ return entry
+
def normalize(self, path):
normed = util.normcase(path)
assert normed == path, b"TODO handling of case folding: %s != %s" % (
@@ -283,9 +317,7 @@
# TODO construct the stat info from the status object?
try:
s = os.stat(os.path.join(cwd, path))
- except OSError as e:
- if e.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
continue
r[path] = s
return r
--- a/hgext/git/gitlog.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/git/gitlog.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
from mercurial.i18n import _
from mercurial.node import (
@@ -31,7 +29,7 @@
pygit2 = gitutil.get_pygit2()
-class baselog(object): # revlog.revlog):
+class baselog: # revlog.revlog):
"""Common implementations between changelog and manifestlog."""
def __init__(self, gr, db):
@@ -71,7 +69,7 @@
return t is not None
-class baselogindex(object):
+class baselogindex:
def __init__(self, log):
self._log = log
@@ -114,7 +112,7 @@
return False
def __iter__(self):
- return iter(pycompat.xrange(len(self)))
+ return iter(range(len(self)))
@property
def filteredrevs(self):
@@ -188,7 +186,7 @@
def shortest(self, node, minlength=1):
nodehex = hex(node)
- for attempt in pycompat.xrange(minlength, len(nodehex) + 1):
+ for attempt in range(minlength, len(nodehex) + 1):
candidate = nodehex[:attempt]
matches = int(
self._db.execute(
@@ -536,8 +534,7 @@
).fetchone()[0]
# This filelog is missing some data. Build the
# filelog, then recurse (which will always find data).
- if pycompat.ispy3:
- commit = commit.decode('ascii')
+ commit = commit.decode('ascii')
index.fill_in_filelog(self.gitrepo, self._db, commit, gp, gn)
return self.parents(node)
else:
--- a/hgext/git/gitutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/git/gitutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,9 +1,6 @@
"""utilities to assist in working with pygit2"""
-from __future__ import absolute_import
-from mercurial.node import bin, hex, sha1nodeconstants
-
-from mercurial import pycompat
+from mercurial.node import bin, sha1nodeconstants
pygit2_module = None
@@ -39,14 +36,12 @@
pygit2 and sqlite both need nodes as strings, not bytes.
"""
assert len(n) == 20
- return pycompat.sysstr(hex(n))
+ return n.hex()
def fromgitnode(n):
"""Opposite of togitnode."""
assert len(n) == 40
- if pycompat.ispy3:
- return bin(n.encode('ascii'))
return bin(n)
--- a/hgext/git/index.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/git/index.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import collections
import os
import sqlite3
--- a/hgext/git/manifest.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/git/manifest.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
from mercurial import (
match as matchmod,
pathutil,
@@ -17,7 +15,7 @@
@interfaceutil.implementer(repository.imanifestdict)
-class gittreemanifest(object):
+class gittreemanifest:
"""Expose git trees (and optionally a builder's overlay) as a manifestdict.
Very similar to mercurial.manifest.treemanifest.
@@ -260,7 +258,7 @@
@interfaceutil.implementer(repository.imanifestrevisionstored)
-class gittreemanifestctx(object):
+class gittreemanifestctx:
def __init__(self, repo, gittree):
self._repo = repo
self._tree = gittree
@@ -281,7 +279,7 @@
@interfaceutil.implementer(repository.imanifestrevisionwritable)
-class memgittreemanifestctx(object):
+class memgittreemanifestctx:
def __init__(self, repo, tree):
self._repo = repo
self._tree = tree
--- a/hgext/githelp.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/githelp.py Thu Jun 16 15:28:54 2022 +0200
@@ -15,7 +15,6 @@
produced.
"""
-from __future__ import absolute_import
import getopt
import re
@@ -116,14 +115,14 @@
opts = dict(
[
(k, convert(v)) if isinstance(v, bytes) else (k, v)
- for k, v in pycompat.iteritems(opts)
+ for k, v in opts.items()
]
)
return args, opts
-class Command(object):
+class Command:
def __init__(self, name):
self.name = name
self.args = []
@@ -132,7 +131,7 @@
def __bytes__(self):
cmd = b"hg " + self.name
if self.opts:
- for k, values in sorted(pycompat.iteritems(self.opts)):
+ for k, values in sorted(self.opts.items()):
for v in values:
if v:
if isinstance(v, int):
@@ -164,7 +163,7 @@
return AndCommand(self, other)
-class AndCommand(object):
+class AndCommand:
def __init__(self, left, right):
self.left = left
self.right = right
--- a/hgext/gpg.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/gpg.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
'''commands to sign and verify changesets'''
-from __future__ import absolute_import
import binascii
import os
@@ -65,7 +64,7 @@
help.CATEGORY_NAMES[_HELP_CATEGORY] = b'Signing changes (GPG)'
-class gpg(object):
+class gpg:
def __init__(self, path, key=None):
self.path = path
self.key = (key and b" --local-user \"%s\"" % key) or b""
--- a/hgext/graphlog.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/graphlog.py Thu Jun 16 15:28:54 2022 +0200
@@ -15,7 +15,6 @@
revision graph is also shown.
'''
-from __future__ import absolute_import
from mercurial.i18n import _
from mercurial import (
--- a/hgext/hgk.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/hgk.py Thu Jun 16 15:28:54 2022 +0200
@@ -34,7 +34,6 @@
vdiff on hovered and selected revisions.
'''
-from __future__ import absolute_import
import os
@@ -246,7 +245,7 @@
else:
i -= chunk
- for x in pycompat.xrange(chunk):
+ for x in range(chunk):
if i + x >= count:
l[chunk - x :] = [0] * (chunk - x)
break
@@ -257,7 +256,7 @@
else:
if (i + x) in repo:
l[x] = 1
- for x in pycompat.xrange(chunk - 1, -1, -1):
+ for x in range(chunk - 1, -1, -1):
if l[x] != 0:
yield (i + x, full is not None and l[x] or None)
if i == 0:
@@ -268,7 +267,7 @@
if len(ar) == 0:
return 1
mask = 0
- for i in pycompat.xrange(len(ar)):
+ for i in range(len(ar)):
if sha in reachable[i]:
mask |= 1 << i
@@ -377,9 +376,7 @@
"""start interactive history viewer"""
opts = pycompat.byteskwargs(opts)
os.chdir(repo.root)
- optstr = b' '.join(
- [b'--%s %s' % (k, v) for k, v in pycompat.iteritems(opts) if v]
- )
+ optstr = b' '.join([b'--%s %s' % (k, v) for k, v in opts.items() if v])
if repo.filtername is None:
optstr += b'--hidden'
--- a/hgext/highlight/__init__.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/highlight/__init__.py Thu Jun 16 15:28:54 2022 +0200
@@ -26,7 +26,6 @@
match (even matches with a low confidence score) will be used.
"""
-from __future__ import absolute_import
from . import highlight
from mercurial.hgweb import (
--- a/hgext/highlight/highlight.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/highlight/highlight.py Thu Jun 16 15:28:54 2022 +0200
@@ -8,7 +8,6 @@
# The original module was split in an interface and an implementation
# file to defer pygments loading and speedup extension setup.
-from __future__ import absolute_import
from mercurial import demandimport
--- a/hgext/histedit.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/histedit.py Thu Jun 16 15:28:54 2022 +0200
@@ -190,7 +190,6 @@
"""
-from __future__ import absolute_import
# chistedit dependencies that are not available everywhere
try:
@@ -200,8 +199,10 @@
fcntl = None
termios = None
+import binascii
import functools
import os
+import pickle
import struct
from mercurial.i18n import _
@@ -245,7 +246,6 @@
urlutil,
)
-pickle = util.pickle
cmdtable = {}
command = registrar.command(cmdtable)
@@ -352,7 +352,7 @@
return b''.join([b'# %s\n' % l if l else b'#\n' for l in lines])
-class histeditstate(object):
+class histeditstate:
def __init__(self, repo):
self.repo = repo
self.actions = None
@@ -455,7 +455,7 @@
rules = []
rulelen = int(lines[index])
index += 1
- for i in pycompat.xrange(rulelen):
+ for i in range(rulelen):
ruleaction = lines[index]
index += 1
rule = lines[index]
@@ -466,7 +466,7 @@
replacements = []
replacementlen = int(lines[index])
index += 1
- for i in pycompat.xrange(replacementlen):
+ for i in range(replacementlen):
replacement = lines[index]
original = bin(replacement[:40])
succ = [
@@ -491,7 +491,7 @@
return self.repo.vfs.exists(b'histedit-state')
-class histeditaction(object):
+class histeditaction:
def __init__(self, state, node):
self.state = state
self.repo = state.repo
@@ -505,7 +505,7 @@
# Check for validation of rule ids and get the rulehash
try:
rev = bin(ruleid)
- except TypeError:
+ except binascii.Error:
try:
_ctx = scmutil.revsingle(state.repo, ruleid)
rulehash = _ctx.hex()
@@ -553,9 +553,7 @@
summary = cmdutil.rendertemplate(
ctx, ui.config(b'histedit', b'summary-template')
)
- # Handle the fact that `''.splitlines() => []`
- summary = summary.splitlines()[0] if summary else b''
- line = b'%s %s %s' % (self.verb, ctx, summary)
+ line = b'%s %s %s' % (self.verb, ctx, stringutil.firstline(summary))
# trim to 75 columns by default so it's not stupidly wide in my editor
# (the 5 more are left for verb)
maxlen = self.repo.ui.configint(b'histedit', b'linelen')
@@ -1143,7 +1141,7 @@
return struct.unpack(b'hh', fcntl.ioctl(1, termios.TIOCGWINSZ, b' '))
-class histeditrule(object):
+class histeditrule:
def __init__(self, ui, ctx, pos, action=b'pick'):
self.ui = ui
self.ctx = ctx
@@ -1193,7 +1191,7 @@
# This is split off from the prefix property so that we can
# separately make the description for 'roll' red (since it
# will get discarded).
- return self.ctx.description().splitlines()[0].strip()
+ return stringutil.firstline(self.ctx.description())
def checkconflicts(self, other):
if other.pos > self.pos and other.origpos <= self.origpos:
@@ -1243,7 +1241,7 @@
return line[: n - 2] + b' >'
-class _chistedit_state(object):
+class _chistedit_state:
def __init__(
self,
repo,
@@ -1292,7 +1290,7 @@
line = b"bookmark: %s" % b' '.join(bms)
win.addstr(3, 1, line[:length])
- line = b"summary: %s" % (ctx.description().splitlines()[0])
+ line = b"summary: %s" % stringutil.firstline(ctx.description())
win.addstr(4, 1, line[:length])
line = b"files: "
@@ -1576,7 +1574,7 @@
start = min(old_rule_pos, new_rule_pos)
end = max(old_rule_pos, new_rule_pos)
- for r in pycompat.xrange(start, end + 1):
+ for r in range(start, end + 1):
rules[new_rule_pos].checkconflicts(rules[r])
rules[old_rule_pos].checkconflicts(rules[r])
@@ -2102,7 +2100,7 @@
mapping, tmpnodes, created, ntm = processreplacement(state)
if mapping:
- for prec, succs in pycompat.iteritems(mapping):
+ for prec, succs in mapping.items():
if not succs:
ui.debug(b'histedit: %s is dropped\n' % short(prec))
else:
@@ -2140,7 +2138,7 @@
nodechanges = fd(
{
hf(oldn): fl([hf(n) for n in newn], name=b'node')
- for oldn, newn in pycompat.iteritems(mapping)
+ for oldn, newn in mapping.items()
},
key=b"oldnode",
value=b"newnodes",
@@ -2322,12 +2320,7 @@
def _getsummary(ctx):
- # a common pattern is to extract the summary but default to the empty
- # string
- summary = ctx.description() or b''
- if summary:
- summary = summary.splitlines()[0]
- return summary
+ return stringutil.firstline(ctx.description())
def bootstrapcontinue(ui, state, opts):
@@ -2388,7 +2381,7 @@
tsum = summary[len(fword) + 1 :].lstrip()
# safe but slow: reverse iterate over the actions so we
# don't clash on two commits having the same summary
- for na, l in reversed(list(pycompat.iteritems(newact))):
+ for na, l in reversed(list(newact.items())):
actx = repo[na.node]
asum = _getsummary(actx)
if asum == tsum:
@@ -2401,7 +2394,7 @@
# copy over and flatten the new list
actions = []
- for na, l in pycompat.iteritems(newact):
+ for na, l in newact.items():
actions.append(na)
actions += l
--- a/hgext/hooklib/__init__.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/hooklib/__init__.py Thu Jun 16 15:28:54 2022 +0200
@@ -13,7 +13,6 @@
extension as option. The functionality itself is planned to be supported
long-term.
"""
-from __future__ import absolute_import
from . import (
changeset_obsoleted,
changeset_published,
--- a/hgext/hooklib/changeset_obsoleted.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/hooklib/changeset_obsoleted.py Thu Jun 16 15:28:54 2022 +0200
@@ -17,7 +17,6 @@
python:hgext.hooklib.changeset_obsoleted.hook
"""
-from __future__ import absolute_import
import email.errors as emailerrors
import email.utils as emailutils
@@ -115,7 +114,7 @@
msg['From'] = mail.addressencode(ui, sender, n.charsets, n.test)
msg['To'] = ', '.join(sorted(subs))
- msgtext = msg.as_bytes() if pycompat.ispy3 else msg.as_string()
+ msgtext = msg.as_bytes()
if ui.configbool(b'notify', b'test'):
ui.write(msgtext)
if not msgtext.endswith(b'\n'):
--- a/hgext/hooklib/changeset_published.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/hooklib/changeset_published.py Thu Jun 16 15:28:54 2022 +0200
@@ -17,7 +17,6 @@
python:hgext.hooklib.changeset_published.hook
"""
-from __future__ import absolute_import
import email.errors as emailerrors
import email.utils as emailutils
@@ -114,7 +113,7 @@
msg['From'] = mail.addressencode(ui, sender, n.charsets, n.test)
msg['To'] = ', '.join(sorted(subs))
- msgtext = msg.as_bytes() if pycompat.ispy3 else msg.as_string()
+ msgtext = msg.as_bytes()
if ui.configbool(b'notify', b'test'):
ui.write(msgtext)
if not msgtext.endswith(b'\n'):
--- a/hgext/hooklib/enforce_draft_commits.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/hooklib/enforce_draft_commits.py Thu Jun 16 15:28:54 2022 +0200
@@ -14,7 +14,6 @@
python:hgext.hooklib.enforce_draft_commits.hook
"""
-from __future__ import absolute_import
from mercurial.i18n import _
from mercurial import (
--- a/hgext/hooklib/reject_merge_commits.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/hooklib/reject_merge_commits.py Thu Jun 16 15:28:54 2022 +0200
@@ -14,7 +14,6 @@
python:hgext.hooklib.reject_merge_commits.hook
"""
-from __future__ import absolute_import
from mercurial.i18n import _
from mercurial import (
--- a/hgext/hooklib/reject_new_heads.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/hooklib/reject_new_heads.py Thu Jun 16 15:28:54 2022 +0200
@@ -14,7 +14,6 @@
python:hgext.hooklib.reject_new_heads.hook
"""
-from __future__ import absolute_import
from mercurial.i18n import _
from mercurial import (
--- a/hgext/infinitepush/__init__.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/infinitepush/__init__.py Thu Jun 16 15:28:54 2022 +0200
@@ -87,11 +87,9 @@
bookmarks = True
"""
-from __future__ import absolute_import
import collections
import contextlib
-import errno
import functools
import logging
import os
@@ -287,7 +285,7 @@
return remotebookmark
-class bundlestore(object):
+class bundlestore:
def __init__(self, repo):
self._repo = repo
storetype = self._repo.ui.config(b'infinitepush', b'storetype')
@@ -406,7 +404,7 @@
def wireprotolistkeyspatterns(repo, proto, namespace, patterns):
patterns = wireprototypes.decodelist(patterns)
- d = pycompat.iteritems(repo.listkeys(encoding.tolocal(namespace), patterns))
+ d = repo.listkeys(encoding.tolocal(namespace), patterns).items()
return pushkey.encodekeys(d)
@@ -420,7 +418,7 @@
if pattern.endswith(b'*'):
pattern = b're:^' + pattern[:-1] + b'.*'
kind, pat, matcher = stringutil.stringmatcher(pattern)
- for bookmark, node in pycompat.iteritems(bookmarks):
+ for bookmark, node in bookmarks.items():
if matcher(bookmark):
results[bookmark] = node
return results
@@ -543,7 +541,7 @@
if part.type == b'changegroup':
haschangegroup = True
newpart = bundle2.bundlepart(part.type, data=part.read())
- for key, value in pycompat.iteritems(part.params):
+ for key, value in part.params.items():
newpart.addparam(key, value)
parts.append(newpart)
@@ -795,7 +793,7 @@
# saveremotenames expects 20 byte binary nodes for branches
branches[rname].append(bin(hexnode))
- for bookmark, hexnode in pycompat.iteritems(newbookmarks):
+ for bookmark, hexnode in newbookmarks.items():
bookmarks[bookmark] = hexnode
remotenamesext.saveremotenames(repo, remotepath, branches, bookmarks)
@@ -805,7 +803,7 @@
return
with repo.wlock(), repo.lock(), repo.transaction(b'bookmark') as tr:
changes = []
- for scratchbook, node in pycompat.iteritems(bookmarks):
+ for scratchbook, node in bookmarks.items():
changectx = repo[node]
changes.append((scratchbook, changectx.node()))
repo._bookmarks.applychanges(repo, tr, changes)
@@ -1046,7 +1044,7 @@
bundle2._processpart(op, part)
else:
bundlepart = bundle2.bundlepart(part.type, data=part.read())
- for key, value in pycompat.iteritems(part.params):
+ for key, value in part.params.items():
bundlepart.addparam(key, value)
# Certain parts require a response
@@ -1138,7 +1136,7 @@
# differs from previous behavior, we need to put it behind a
# config flag for incremental rollout.
bundlepart = bundle2.bundlepart(part.type, data=part.read())
- for key, value in pycompat.iteritems(part.params):
+ for key, value in part.params.items():
bundlepart.addparam(key, value)
# Certain parts require a response
@@ -1308,9 +1306,8 @@
finally:
try:
os.unlink(bundlefile)
- except OSError as e:
- if e.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
+ pass
return 1
@@ -1324,9 +1321,7 @@
b'new': newnode,
b'old': oldnode,
}
- op.reply.newpart(
- b'pushkey', mandatoryparams=pycompat.iteritems(params)
- )
+ op.reply.newpart(b'pushkey', mandatoryparams=params.items())
def bundle2pushkey(orig, op, part):
--- a/hgext/infinitepush/bundleparts.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/infinitepush/bundleparts.py Thu Jun 16 15:28:54 2022 +0200
@@ -3,7 +3,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from mercurial.i18n import _
from mercurial.node import hex
@@ -13,7 +12,6 @@
changegroup,
error,
extensions,
- pycompat,
revsetlang,
util,
)
@@ -68,7 +66,7 @@
parts.append(
bundle2.bundlepart(
scratchbranchparttype.upper(),
- advisoryparams=pycompat.iteritems(params),
+ advisoryparams=params.items(),
data=cg,
)
)
@@ -103,7 +101,7 @@
return
-class copiedpart(object):
+class copiedpart:
"""a copy of unbundlepart content that can be consumed later"""
def __init__(self, part):
--- a/hgext/infinitepush/common.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/infinitepush/common.py Thu Jun 16 15:28:54 2022 +0200
@@ -3,7 +3,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import os
--- a/hgext/infinitepush/fileindexapi.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/infinitepush/fileindexapi.py Thu Jun 16 15:28:54 2022 +0200
@@ -11,7 +11,6 @@
indexpath = PATH
"""
-from __future__ import absolute_import
import os
--- a/hgext/infinitepush/indexapi.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/infinitepush/indexapi.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,10 +5,8 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
-
-class indexapi(object):
+class indexapi:
"""Class that manages access to infinitepush index.
This class is a context manager and all write operations (like
--- a/hgext/infinitepush/sqlindexapi.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/infinitepush/sqlindexapi.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import logging
import os
@@ -14,8 +13,6 @@
import warnings
import mysql.connector
-from mercurial import pycompat
-
from . import indexapi
@@ -180,7 +177,7 @@
self.sqlconnect()
args = []
values = []
- for bookmark, node in pycompat.iteritems(bookmarks):
+ for bookmark, node in bookmarks.items():
args.append(b'(%s, %s, %s)')
values.extend((bookmark, node, self.reponame))
args = b','.join(args)
--- a/hgext/infinitepush/store.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/infinitepush/store.py Thu Jun 16 15:28:54 2022 +0200
@@ -3,7 +3,6 @@
# based on bundleheads extension by Gregory Szorc <gps@mozilla.com>
-from __future__ import absolute_import
import abc
import os
@@ -26,7 +25,7 @@
pass
-class abstractbundlestore(object): # pytype: disable=ignored-metaclass
+class abstractbundlestore: # pytype: disable=ignored-metaclass
"""Defines the interface for bundle stores.
A bundle store is an entity that stores raw bundle data. It is a simple
@@ -57,7 +56,7 @@
"""
-class filebundlestore(object):
+class filebundlestore:
"""bundle store in filesystem
meant for storing bundles somewhere on disk and on network filesystems
--- a/hgext/journal.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/journal.py Thu Jun 16 15:28:54 2022 +0200
@@ -11,10 +11,8 @@
"""
-from __future__ import absolute_import
import collections
-import errno
import os
import weakref
@@ -129,7 +127,7 @@
repo = store._repo
if util.safehasattr(repo, 'journal'):
oldmarks = bookmarks.bmstore(repo)
- for mark, value in pycompat.iteritems(store):
+ for mark, value in store.items():
oldvalue = oldmarks.get(mark, repo.nullid)
if value != oldvalue:
repo.journal.record(bookmarktype, mark, oldvalue, value)
@@ -141,9 +139,7 @@
"""A set of shared features for this repository"""
try:
return set(repo.vfs.read(b'shared').splitlines())
- except IOError as inst:
- if inst.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
return set()
@@ -167,7 +163,7 @@
pass
while iterable_map:
- value, key, it = order(pycompat.itervalues(iterable_map))
+ value, key, it = order(iterable_map.values())
yield value
try:
iterable_map[key][0] = next(it)
@@ -283,7 +279,7 @@
__str__ = encoding.strmethod(__bytes__)
-class journalstorage(object):
+class journalstorage:
"""Storage for journal entries
Entries are divided over two files; one with entries that pertain to the
--- a/hgext/keyword.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/keyword.py Thu Jun 16 15:28:54 2022 +0200
@@ -83,8 +83,6 @@
'''
-from __future__ import absolute_import
-
import os
import re
import weakref
@@ -237,7 +235,7 @@
return modified, added
-class kwtemplater(object):
+class kwtemplater:
"""
Sets up keyword templates, corresponding keyword regex, and
provides keyword substitution functions.
@@ -515,7 +513,7 @@
kwmaps = _defaultkwmaps(ui)
if uikwmaps:
ui.status(_(b'\tdisabling current template maps\n'))
- for k, v in pycompat.iteritems(kwmaps):
+ for k, v in kwmaps.items():
ui.setconfig(b'keywordmaps', k, v, b'keyword')
else:
ui.status(_(b'\n\tconfiguration using current keyword template maps\n'))
@@ -529,7 +527,7 @@
ui.writenoi18n(b'[extensions]\nkeyword =\n')
demoitems(b'keyword', ui.configitems(b'keyword'))
demoitems(b'keywordset', ui.configitems(b'keywordset'))
- demoitems(b'keywordmaps', pycompat.iteritems(kwmaps))
+ demoitems(b'keywordmaps', kwmaps.items())
keywords = b'$' + b'$\n$'.join(sorted(kwmaps.keys())) + b'$\n'
repo.wvfs.write(fn, keywords)
repo[None].add([fn])
--- a/hgext/largefiles/__init__.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/largefiles/__init__.py Thu Jun 16 15:28:54 2022 +0200
@@ -104,7 +104,6 @@
explicitly do so with the --large flag passed to the :hg:`add`
command.
'''
-from __future__ import absolute_import
from mercurial import (
cmdutil,
--- a/hgext/largefiles/basestore.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/largefiles/basestore.py Thu Jun 16 15:28:54 2022 +0200
@@ -7,7 +7,6 @@
# GNU General Public License version 2 or any later version.
'''base class for store implementations and store-related utility code'''
-from __future__ import absolute_import
from mercurial.i18n import _
@@ -42,7 +41,7 @@
return b"%s: %s" % (urlutil.hidepassword(self.url), self.detail)
-class basestore(object):
+class basestore:
def __init__(self, ui, repo, url):
self.ui = ui
self.repo = repo
--- a/hgext/largefiles/lfcommands.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/largefiles/lfcommands.py Thu Jun 16 15:28:54 2022 +0200
@@ -7,9 +7,8 @@
# GNU General Public License version 2 or any later version.
'''High-level command function for lfconvert, plus the cmdtable.'''
-from __future__ import absolute_import
-import errno
+import binascii
import os
import shutil
@@ -385,7 +384,7 @@
continue
try:
newid = bin(id)
- except TypeError:
+ except binascii.Error:
ui.warn(_(b'skipping incorrectly formatted id %s\n') % id)
continue
try:
@@ -474,10 +473,8 @@
for lfile in lfiles:
try:
expectedhash = lfutil.readasstandin(ctx[lfutil.standin(lfile)])
- except IOError as err:
- if err.errno == errno.ENOENT:
- continue # node must be None and standin wasn't found in wctx
- raise
+ except FileNotFoundError:
+ continue # node must be None and standin wasn't found in wctx
if not lfutil.findfile(repo, expectedhash):
toget.append((lfile, expectedhash))
--- a/hgext/largefiles/lfutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/largefiles/lfutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -7,7 +7,6 @@
# GNU General Public License version 2 or any later version.
'''largefiles utility code: must not import other modules in this package.'''
-from __future__ import absolute_import
import contextlib
import copy
@@ -757,7 +756,7 @@
return match
-class automatedcommithook(object):
+class automatedcommithook:
"""Stateful hook to update standins at the 1st commit of resuming
For efficiency, updating standins in the working directory should
--- a/hgext/largefiles/localstore.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/largefiles/localstore.py Thu Jun 16 15:28:54 2022 +0200
@@ -7,7 +7,6 @@
# GNU General Public License version 2 or any later version.
'''store class for local filesystem'''
-from __future__ import absolute_import
from mercurial.i18n import _
from mercurial.pycompat import open
--- a/hgext/largefiles/overrides.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/largefiles/overrides.py Thu Jun 16 15:28:54 2022 +0200
@@ -7,7 +7,6 @@
# GNU General Public License version 2 or any later version.
'''Overridden Mercurial commands and functions for the largefiles extension'''
-from __future__ import absolute_import
import copy
import os
@@ -493,7 +492,7 @@
large = opts.pop('large', False)
if large:
- class fakerepo(object):
+ class fakerepo:
dirstate = lfutil.openlfdirstate(ui, repo)
orig(ui, fakerepo, *pats, **opts)
@@ -714,7 +713,7 @@
copies = orig(ctx1, ctx2, match=match)
updated = {}
- for k, v in pycompat.iteritems(copies):
+ for k, v in copies.items():
updated[lfutil.splitstandin(k) or k] = lfutil.splitstandin(v) or v
return updated
--- a/hgext/largefiles/proto.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/largefiles/proto.py Thu Jun 16 15:28:54 2022 +0200
@@ -2,7 +2,6 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import os
--- a/hgext/largefiles/remotestore.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/largefiles/remotestore.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,13 +5,11 @@
# GNU General Public License version 2 or any later version.
'''remote largefile store; the base class for wirestore'''
-from __future__ import absolute_import
from mercurial.i18n import _
from mercurial import (
error,
- pycompat,
util,
)
@@ -53,9 +51,8 @@
def exists(self, hashes):
return {
h: s == 0
- for (h, s) in pycompat.iteritems(
- self._stat(hashes)
- ) # dict-from-generator
+ for (h, s) in self._stat(hashes).items()
+ # dict-from-generator
}
def sendfile(self, filename, hash):
--- a/hgext/largefiles/reposetup.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/largefiles/reposetup.py Thu Jun 16 15:28:54 2022 +0200
@@ -7,7 +7,6 @@
# GNU General Public License version 2 or any later version.
'''setup for largefiles repositories: reposetup'''
-from __future__ import absolute_import
import copy
--- a/hgext/largefiles/storefactory.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/largefiles/storefactory.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,7 +1,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import re
--- a/hgext/largefiles/wirestore.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/largefiles/wirestore.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,7 +4,6 @@
# GNU General Public License version 2 or any later version.
'''largefile store working over Mercurial's wire protocol'''
-from __future__ import absolute_import
from . import (
lfutil,
--- a/hgext/lfs/__init__.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/lfs/__init__.py Thu Jun 16 15:28:54 2022 +0200
@@ -120,7 +120,6 @@
usercache = /path/to/global/cache
"""
-from __future__ import absolute_import
import sys
@@ -400,7 +399,7 @@
def pointer(v):
# In the file spec, version is first and the other keys are sorted.
sortkeyfunc = lambda x: (x[0] != b'version', x)
- items = sorted(pycompat.iteritems(pointers[v]), key=sortkeyfunc)
+ items = sorted(pointers[v].items(), key=sortkeyfunc)
return util.sortdict(items)
makemap = lambda v: {
--- a/hgext/lfs/blobstore.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/lfs/blobstore.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import contextlib
import errno
@@ -109,7 +108,7 @@
return None # progress is handled by the worker client
-class local(object):
+class local:
"""Local blobstore for large file contents.
This blobstore is used both as a cache and as a staging area for large blobs
@@ -274,7 +273,7 @@
except (AttributeError, IndexError):
# it might be anything, for example a string
reason = inst.reason
- if isinstance(reason, pycompat.unicode):
+ if isinstance(reason, str):
# SSLError of Python 2.7.9 contains a unicode
reason = encoding.unitolocal(reason)
return reason
@@ -307,7 +306,7 @@
return None
-class _gitlfsremote(object):
+class _gitlfsremote:
def __init__(self, repo, url):
ui = repo.ui
self.ui = ui
@@ -407,7 +406,7 @@
)
def encodestr(x):
- if isinstance(x, pycompat.unicode):
+ if isinstance(x, str):
return x.encode('utf-8')
return x
@@ -643,7 +642,7 @@
getattr(h, "close_all", lambda: None)()
-class _dummyremote(object):
+class _dummyremote:
"""Dummy store storing blobs to temp directory."""
def __init__(self, repo, url):
@@ -662,7 +661,7 @@
tostore.download(p.oid(), fp, None)
-class _nullremote(object):
+class _nullremote:
"""Null store storing blobs to /dev/null."""
def __init__(self, repo, url):
@@ -675,7 +674,7 @@
pass
-class _promptremote(object):
+class _promptremote:
"""Prompt user to set lfs.url when accessed."""
def __init__(self, repo, url):
--- a/hgext/lfs/pointer.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/lfs/pointer.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import re
@@ -41,7 +40,7 @@
def serialize(self):
sortkeyfunc = lambda x: (x[0] != b'version', x)
- items = sorted(pycompat.iteritems(self.validate()), key=sortkeyfunc)
+ items = sorted(self.validate().items(), key=sortkeyfunc)
return b''.join(b'%s %s\n' % (k, v) for k, v in items)
def oid(self):
@@ -63,7 +62,7 @@
def validate(self):
"""raise InvalidPointer on error. return self if there is no error"""
requiredcount = 0
- for k, v in pycompat.iteritems(self):
+ for k, v in self.items():
if k in self._requiredre:
if not self._requiredre[k].match(v):
raise InvalidPointer(
--- a/hgext/lfs/wireprotolfsserver.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/lfs/wireprotolfsserver.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import datetime
import errno
--- a/hgext/lfs/wrapper.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/lfs/wrapper.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import hashlib
@@ -25,7 +24,6 @@
exchange,
exthelper,
localrepo,
- pycompat,
revlog,
scmutil,
util,
@@ -143,7 +141,7 @@
# translate hg filelog metadata to lfs metadata with "x-hg-" prefix
if hgmeta is not None:
- for k, v in pycompat.iteritems(hgmeta):
+ for k, v in hgmeta.items():
metadata[b'x-hg-%s' % k] = v
rawtext = metadata.serialize()
--- a/hgext/logtoprocess.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/logtoprocess.py Thu Jun 16 15:28:54 2022 +0200
@@ -32,7 +32,6 @@
"""
-from __future__ import absolute_import
import os
@@ -45,7 +44,7 @@
testedwith = b'ships-with-hg-core'
-class processlogger(object):
+class processlogger:
"""Map log events to external commands
Arguments are passed on as environment variables.
--- a/hgext/mq.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/mq.py Thu Jun 16 15:28:54 2022 +0200
@@ -62,9 +62,7 @@
in the strip extension.
'''
-from __future__ import absolute_import, print_function
-
-import errno
+
import os
import re
import shutil
@@ -151,7 +149,7 @@
except KeyError:
# note: load is lazy so we could avoid the try-except,
# but I (marmoute) prefer this explicit code.
- class dummyui(object):
+ class dummyui:
def debug(self, msg):
pass
@@ -184,7 +182,7 @@
normname = util.normpath
-class statusentry(object):
+class statusentry:
def __init__(self, node, name):
self.node, self.name = node, name
@@ -294,7 +292,7 @@
return lines
-class patchheader(object):
+class patchheader:
def __init__(self, pf, plainmode=False):
def eatdiff(lines):
while lines:
@@ -462,7 +460,7 @@
the field and a blank line."""
if self.message:
subj = b'subject: ' + self.message[0].lower()
- for i in pycompat.xrange(len(self.comments)):
+ for i in range(len(self.comments)):
if subj == self.comments[i].lower():
del self.comments[i]
self.message = self.message[2:]
@@ -496,7 +494,7 @@
pass
-class queue(object):
+class queue:
def __init__(self, ui, baseui, path, patchdir=None):
self.basepath = path
try:
@@ -552,19 +550,15 @@
try:
lines = self.opener.read(self.statuspath).splitlines()
return list(parselines(lines))
- except IOError as e:
- if e.errno == errno.ENOENT:
- return []
- raise
+ except FileNotFoundError:
+ return []
@util.propertycache
def fullseries(self):
try:
return self.opener.read(self.seriespath).splitlines()
- except IOError as e:
- if e.errno == errno.ENOENT:
- return []
- raise
+ except FileNotFoundError:
+ return []
@util.propertycache
def series(self):
@@ -692,9 +686,7 @@
self.activeguards = []
try:
guards = self.opener.read(self.guardspath).split()
- except IOError as err:
- if err.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
guards = []
for i, guard in enumerate(guards):
bad = self.checkguard(guard)
@@ -1141,9 +1133,8 @@
for p in patches:
try:
os.unlink(self.join(p))
- except OSError as inst:
- if inst.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
+ pass
qfinished = []
if numrevs:
@@ -2025,7 +2016,7 @@
# we can't copy a file created by the patch itself
if dst in copies:
del copies[dst]
- for src, dsts in pycompat.iteritems(copies):
+ for src, dsts in copies.items():
for dst in dsts:
repo.dirstate.copy(src, dst)
else:
@@ -2041,7 +2032,7 @@
# if the patch excludes a modified file, mark that
# file with mtime=0 so status can see it.
mm = []
- for i in pycompat.xrange(len(m) - 1, -1, -1):
+ for i in range(len(m) - 1, -1, -1):
if not match1(m[i]):
mm.append(m[i])
del m[i]
@@ -2152,8 +2143,8 @@
raise error.Abort(_(b"patch queue directory already exists"))
try:
os.mkdir(self.path)
- except OSError as inst:
- if inst.errno != errno.EEXIST or not create:
+ except FileExistsError:
+ if not create:
raise
if create:
return self.qrepo(create=True)
@@ -2166,7 +2157,7 @@
else:
start = self.series.index(patch) + 1
unapplied = []
- for i in pycompat.xrange(start, len(self.series)):
+ for i in range(start, len(self.series)):
pushable, reason = self.pushable(i)
if pushable:
unapplied.append((i, self.series[i]))
@@ -2211,7 +2202,7 @@
if not missing:
if self.ui.verbose:
idxwidth = len(b"%d" % (start + length - 1))
- for i in pycompat.xrange(start, start + length):
+ for i in range(start, start + length):
patch = self.series[i]
if patch in applied:
char, state = b'A', b'applied'
@@ -2372,7 +2363,7 @@
def nextpatch(start):
if all_patches or start >= len(self.series):
return start
- for i in pycompat.xrange(start, len(self.series)):
+ for i in range(start, len(self.series)):
p, reason = self.pushable(i)
if p:
return i
@@ -3390,7 +3381,7 @@
raise error.Abort(
_(b'cannot mix -l/--list with options or arguments')
)
- for i in pycompat.xrange(len(q.series)):
+ for i in range(len(q.series)):
status(i)
return
if not args or args[0][0:1] in b'-+':
@@ -3768,18 +3759,14 @@
pushable = lambda i: q.pushable(q.applied[i].name)[0]
if args or opts.get(b'none'):
old_unapplied = q.unapplied(repo)
- old_guarded = [
- i for i in pycompat.xrange(len(q.applied)) if not pushable(i)
- ]
+ old_guarded = [i for i in range(len(q.applied)) if not pushable(i)]
q.setactive(args)
q.savedirty()
if not args:
ui.status(_(b'guards deactivated\n'))
if not opts.get(b'pop') and not opts.get(b'reapply'):
unapplied = q.unapplied(repo)
- guarded = [
- i for i in pycompat.xrange(len(q.applied)) if not pushable(i)
- ]
+ guarded = [i for i in range(len(q.applied)) if not pushable(i)]
if len(unapplied) != len(old_unapplied):
ui.status(
_(
@@ -3826,7 +3813,7 @@
reapply = opts.get(b'reapply') and q.applied and q.applied[-1].name
popped = False
if opts.get(b'pop') or opts.get(b'reapply'):
- for i in pycompat.xrange(len(q.applied)):
+ for i in range(len(q.applied)):
if not pushable(i):
ui.status(_(b'popping guarded patches\n'))
popped = True
@@ -4288,7 +4275,7 @@
entry[1].extend(mqopt)
def dotable(cmdtable):
- for cmd, entry in pycompat.iteritems(cmdtable):
+ for cmd, entry in cmdtable.items():
cmd = cmdutil.parsealiases(cmd)[0]
func = entry[0]
if func.norepo:
--- a/hgext/narrow/__init__.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/narrow/__init__.py Thu Jun 16 15:28:54 2022 +0200
@@ -6,7 +6,6 @@
# GNU General Public License version 2 or any later version.
'''create clones which fetch history data for subset of files (EXPERIMENTAL)'''
-from __future__ import absolute_import
from mercurial import (
localrepo,
--- a/hgext/narrow/narrowbundle2.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/narrow/narrowbundle2.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import errno
import struct
--- a/hgext/narrow/narrowcommands.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/narrow/narrowcommands.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,7 +4,6 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import itertools
import os
--- a/hgext/narrow/narrowdirstate.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/narrow/narrowdirstate.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from mercurial.i18n import _
from mercurial import error
--- a/hgext/narrow/narrowrepo.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/narrow/narrowrepo.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from mercurial import wireprototypes
--- a/hgext/narrow/narrowtemplates.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/narrow/narrowtemplates.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from mercurial import (
registrar,
--- a/hgext/narrow/narrowwirepeer.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/narrow/narrowwirepeer.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from mercurial import (
bundle2,
--- a/hgext/notify.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/notify.py Thu Jun 16 15:28:54 2022 +0200
@@ -154,7 +154,6 @@
references. See also ``notify.strip``.
'''
-from __future__ import absolute_import
import email.errors as emailerrors
import email.utils as emailutils
@@ -315,7 +314,7 @@
}
-class notifier(object):
+class notifier:
'''email notification class.'''
def __init__(self, ui, repo, hooktype):
@@ -466,7 +465,7 @@
# create fresh mime message from scratch
# (multipart templates must take care of this themselves)
headers = msg.items()
- payload = msg.get_payload(decode=pycompat.ispy3)
+ payload = msg.get_payload(decode=True)
# for notification prefer readability over data precision
msg = mail.mimeencode(self.ui, payload, self.charsets, self.test)
# reinstate custom headers
@@ -525,7 +524,7 @@
)
msg['To'] = ', '.join(sorted(subs))
- msgtext = msg.as_bytes() if pycompat.ispy3 else msg.as_string()
+ msgtext = msg.as_bytes()
if self.test:
self.ui.write(msgtext)
if not msgtext.endswith(b'\n'):
--- a/hgext/pager.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/pager.py Thu Jun 16 15:28:54 2022 +0200
@@ -21,7 +21,6 @@
[pager]
attend-cat = false
'''
-from __future__ import absolute_import
from mercurial import (
cmdutil,
--- a/hgext/patchbomb.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/patchbomb.py Thu Jun 16 15:28:54 2022 +0200
@@ -71,13 +71,11 @@
You can set patchbomb to always ask for confirmation by setting
``patchbomb.confirm`` to true.
'''
-from __future__ import absolute_import
import email.encoders as emailencoders
import email.mime.base as emimebase
import email.mime.multipart as emimemultipart
import email.utils as eutil
-import errno
import os
import socket
@@ -985,9 +983,8 @@
try:
generator.flatten(m, False)
ui.write(b'\n')
- except IOError as inst:
- if inst.errno != errno.EPIPE:
- raise
+ except BrokenPipeError:
+ pass
else:
if not sendmail:
sendmail = mail.connect(ui, mbox=mbox)
--- a/hgext/phabricator.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/phabricator.py Thu Jun 16 15:28:54 2022 +0200
@@ -57,11 +57,11 @@
example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
"""
-from __future__ import absolute_import
import base64
import contextlib
import hashlib
+import io
import itertools
import json
import mimetypes
@@ -219,9 +219,7 @@
rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
# json.loads only returns unicode strings
arcconfig = pycompat.rapply(
- lambda x: encoding.unitolocal(x)
- if isinstance(x, pycompat.unicode)
- else x,
+ lambda x: encoding.unitolocal(x) if isinstance(x, str) else x,
pycompat.json_loads(rawparams),
)
@@ -447,9 +445,7 @@
time.sleep(retry_interval)
ui.debug(b'Conduit Response: %s\n' % body)
parsed = pycompat.rapply(
- lambda x: encoding.unitolocal(x)
- if isinstance(x, pycompat.unicode)
- else x,
+ lambda x: encoding.unitolocal(x) if isinstance(x, str) else x,
# json.loads only accepts bytes from py3.6+
pycompat.json_loads(encoding.unifromlocal(body)),
)
@@ -473,9 +469,7 @@
rawparams = encoding.unifromlocal(ui.fin.read())
# json.loads only returns unicode strings
params = pycompat.rapply(
- lambda x: encoding.unitolocal(x)
- if isinstance(x, pycompat.unicode)
- else x,
+ lambda x: encoding.unitolocal(x) if isinstance(x, str) else x,
pycompat.json_loads(rawparams),
)
# json.dumps only accepts unicode strings
@@ -674,7 +668,7 @@
return output.getvalue()
-class DiffChangeType(object):
+class DiffChangeType:
ADD = 1
CHANGE = 2
DELETE = 3
@@ -685,7 +679,7 @@
MULTICOPY = 8
-class DiffFileType(object):
+class DiffFileType:
TEXT = 1
IMAGE = 2
BINARY = 3
@@ -706,7 +700,7 @@
@attr.s
-class phabchange(object):
+class phabchange:
"""Represents a Differential change, owns Differential hunks and owned by a
Differential diff. Each one represents one file in a diff.
"""
@@ -747,7 +741,7 @@
@attr.s
-class phabdiff(object):
+class phabdiff:
"""Represents a Differential diff, owns Differential changes. Corresponds
to a commit.
"""
@@ -2200,7 +2194,7 @@
for drev, contents in patches:
ui.status(_(b'applying patch from D%s\n') % drev)
- with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
+ with patch.extract(ui, io.BytesIO(contents)) as patchdata:
msg, node, rej = cmdutil.tryimportone(
ui,
repo,
@@ -2279,7 +2273,7 @@
drevmap = getdrevmap(repo, logcmdutil.revrange(repo, [revs]))
specs = []
unknown = []
- for r, d in pycompat.iteritems(drevmap):
+ for r, d in drevmap.items():
if d is None:
unknown.append(repo[r])
else:
@@ -2364,7 +2358,7 @@
revs = repo.revs('sort(_underway(), topo)')
drevmap = getdrevmap(repo, revs)
unknownrevs, drevids, revsbydrevid = [], set(), {}
- for rev, drevid in pycompat.iteritems(drevmap):
+ for rev, drevid in drevmap.items():
if drevid is not None:
drevids.add(drevid)
revsbydrevid.setdefault(drevid, set()).add(rev)
--- a/hgext/rebase.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/rebase.py Thu Jun 16 15:28:54 2022 +0200
@@ -14,9 +14,7 @@
https://mercurial-scm.org/wiki/RebaseExtension
'''
-from __future__ import absolute_import
-import errno
import os
from mercurial.i18n import _
@@ -160,7 +158,7 @@
)
-class rebaseruntime(object):
+class rebaseruntime:
"""This class is a container for rebase runtime state"""
def __init__(self, repo, ui, inmemory=False, dryrun=False, opts=None):
@@ -244,7 +242,7 @@
f.write(b'%d\n' % int(self.keepbranchesf))
f.write(b'%s\n' % (self.activebookmark or b''))
destmap = self.destmap
- for d, v in pycompat.iteritems(self.state):
+ for d, v in self.state.items():
oldrev = repo[d].hex()
if v >= 0:
newrev = repo[v].hex()
@@ -506,7 +504,7 @@
# commits.
self.storestatus(tr)
- cands = [k for k, v in pycompat.iteritems(self.state) if v == revtodo]
+ cands = [k for k, v in self.state.items() if v == revtodo]
p = repo.ui.makeprogress(
_(b"rebasing"), unit=_(b'changesets'), total=len(cands)
)
@@ -1337,7 +1335,7 @@
# emulate the old behavior, showing "nothing to rebase" (a better
# behavior may be abort with "cannot find branching point" error)
bpbase.clear()
- for bp, bs in pycompat.iteritems(bpbase): # calculate roots
+ for bp, bs in bpbase.items(): # calculate roots
roots += list(repo.revs(b'children(%d) & ancestors(%ld)', bp, bs))
rebaseset = repo.revs(b'%ld::', roots)
@@ -1941,9 +1939,7 @@
f = repo.vfs(b"last-message.txt")
collapsemsg = f.readline().strip()
f.close()
- except IOError as err:
- if err.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
if isabort:
# Oh well, just abort like normal
collapsemsg = b''
@@ -2104,7 +2100,7 @@
fl = fm.formatlist
fd = fm.formatdict
changes = {}
- for oldns, newn in pycompat.iteritems(replacements):
+ for oldns, newn in replacements.items():
for oldn in oldns:
changes[hf(oldn)] = fl([hf(n) for n in newn], name=b'node')
nodechanges = fd(changes, key=b"oldnode", value=b"newnodes")
@@ -2258,7 +2254,7 @@
msg = _(b'rebase: (use "hg rebase --abort" to clear broken state)\n')
ui.write(msg)
return
- numrebased = len([i for i in pycompat.itervalues(state) if i >= 0])
+ numrebased = len([i for i in state.values() if i >= 0])
# i18n: column positioning for "hg summary"
ui.write(
_(b'rebase: %s, %s (rebase --continue)\n')
--- a/hgext/record.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/record.py Thu Jun 16 15:28:54 2022 +0200
@@ -10,7 +10,6 @@
The feature provided by this extension has been moved into core Mercurial as
:hg:`commit --interactive`.'''
-from __future__ import absolute_import
from mercurial.i18n import _
from mercurial import (
--- a/hgext/releasenotes.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/releasenotes.py Thu Jun 16 15:28:54 2022 +0200
@@ -11,10 +11,8 @@
process simpler by automating it.
"""
-from __future__ import absolute_import
import difflib
-import errno
import re
from mercurial.i18n import _
@@ -78,7 +76,7 @@
BULLET_SECTION = _(b'Other Changes')
-class parsedreleasenotes(object):
+class parsedreleasenotes:
def __init__(self):
self.sections = {}
@@ -171,14 +169,14 @@
self.addnontitleditem(section, paragraphs)
-class releasenotessections(object):
+class releasenotessections:
def __init__(self, ui, repo=None):
if repo:
sections = util.sortdict(DEFAULT_SECTIONS)
custom_sections = getcustomadmonitions(repo)
if custom_sections:
sections.update(custom_sections)
- self._sections = list(pycompat.iteritems(sections))
+ self._sections = list(sections.items())
else:
self._sections = list(DEFAULT_SECTIONS)
@@ -689,10 +687,7 @@
try:
with open(file_, b'rb') as fh:
notes = parsereleasenotesfile(sections, fh.read())
- except IOError as e:
- if e.errno != errno.ENOENT:
- raise
-
+ except FileNotFoundError:
notes = parsedreleasenotes()
notes.merge(ui, incoming)
--- a/hgext/relink.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/relink.py Thu Jun 16 15:28:54 2022 +0200
@@ -6,7 +6,6 @@
# GNU General Public License version 2 or any later version.
"""recreates hardlinks between repository clones"""
-from __future__ import absolute_import
import os
import stat
--- a/hgext/remotefilelog/__init__.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/remotefilelog/__init__.py Thu Jun 16 15:28:54 2022 +0200
@@ -124,7 +124,6 @@
corruption before returning metadata
"""
-from __future__ import absolute_import
import os
import time
--- a/hgext/remotefilelog/basepack.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/remotefilelog/basepack.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import collections
import errno
import mmap
@@ -15,7 +13,6 @@
from mercurial.node import hex
from mercurial import (
policy,
- pycompat,
util,
vfs as vfsmod,
)
@@ -56,16 +53,8 @@
# loaded the pack list.
REFRESHRATE = 0.1
-if pycompat.isposix and not pycompat.ispy3:
- # With glibc 2.7+ the 'e' flag uses O_CLOEXEC when opening.
- # The 'e' flag will be ignored on older versions of glibc.
- # Python 3 can't handle the 'e' flag.
- PACKOPENMODE = b'rbe'
-else:
- PACKOPENMODE = b'rb'
-
-class _cachebackedpacks(object):
+class _cachebackedpacks:
def __init__(self, packs, cachesize):
self._packs = set(packs)
self._lrucache = util.lrucachedict(cachesize)
@@ -111,7 +100,7 @@
self._lastpack = None
-class basepackstore(object):
+class basepackstore:
# Default cache size limit for the pack files.
DEFAULTCACHESIZE = 100
@@ -177,9 +166,8 @@
)
else:
ids.add(id)
- except OSError as ex:
- if ex.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
+ pass
def _getavailablepackfilessorted(self):
"""Like `_getavailablepackfiles`, but also sorts the files by mtime,
@@ -269,7 +257,7 @@
return newpacks
-class versionmixin(object):
+class versionmixin:
# Mix-in for classes with multiple supported versions
VERSION = None
SUPPORTED_VERSIONS = [2]
@@ -320,7 +308,7 @@
params = self.params
rawfanout = self._index[FANOUTSTART : FANOUTSTART + params.fanoutsize]
fanouttable = []
- for i in pycompat.xrange(0, params.fanoutcount):
+ for i in range(0, params.fanoutcount):
loc = i * 4
fanoutentry = struct.unpack(b'!I', rawfanout[loc : loc + 4])[0]
fanouttable.append(fanoutentry)
@@ -345,12 +333,12 @@
self._data.close()
# TODO: use an opener/vfs to access these paths
- with open(self.indexpath, PACKOPENMODE) as indexfp:
+ with open(self.indexpath, b'rb') as indexfp:
# memory-map the file, size 0 means whole file
self._index = mmap.mmap(
indexfp.fileno(), 0, access=mmap.ACCESS_READ
)
- with open(self.packpath, PACKOPENMODE) as datafp:
+ with open(self.packpath, b'rb') as datafp:
self._data = mmap.mmap(datafp.fileno(), 0, access=mmap.ACCESS_READ)
self._pagedin = 0
@@ -528,7 +516,7 @@
self.idxfp.write(struct.pack(b'!BB', self.VERSION, config))
-class indexparams(object):
+class indexparams:
__slots__ = (
'fanoutprefix',
'fanoutstruct',
--- a/hgext/remotefilelog/basestore.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/remotefilelog/basestore.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,6 +1,3 @@
-from __future__ import absolute_import
-
-import errno
import os
import shutil
import stat
@@ -21,7 +18,7 @@
)
-class basestore(object):
+class basestore:
def __init__(self, repo, path, reponame, shared=False):
"""Creates a remotefilelog store object for the given repo name.
@@ -148,7 +145,7 @@
filenamemap = self._resolvefilenames(existing.keys())
- for filename, sha in pycompat.iteritems(filenamemap):
+ for filename, sha in filenamemap.items():
yield (filename, existing[sha])
def _resolvefilenames(self, hashes):
@@ -173,7 +170,7 @@
# Scan the changelog until we've found every file name
cl = self.repo.unfiltered().changelog
- for rev in pycompat.xrange(len(cl) - 1, -1, -1):
+ for rev in range(len(cl) - 1, -1, -1):
if not missingfilename:
break
files = cl.readfiles(cl.node(rev))
@@ -346,10 +343,7 @@
count += 1
try:
pathstat = os.stat(path)
- except OSError as e:
- # errno.ENOENT = no such file or directory
- if e.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
msg = _(
b"warning: file %s was removed by another process\n"
)
@@ -364,10 +358,7 @@
else:
try:
shallowutil.unlinkfile(path)
- except OSError as e:
- # errno.ENOENT = no such file or directory
- if e.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
msg = _(
b"warning: file %s was removed by another "
b"process\n"
@@ -390,10 +381,7 @@
atime, oldpath, oldpathstat = queue.get()
try:
shallowutil.unlinkfile(oldpath)
- except OSError as e:
- # errno.ENOENT = no such file or directory
- if e.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
msg = _(
b"warning: file %s was removed by another process\n"
)
@@ -414,7 +402,7 @@
)
-class baseunionstore(object):
+class baseunionstore:
def __init__(self, *args, **kwargs):
# If one of the functions that iterates all of the stores is about to
# throw a KeyError, try this many times with a full refresh between
--- a/hgext/remotefilelog/connectionpool.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/remotefilelog/connectionpool.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,11 +5,9 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from mercurial import (
hg,
- pycompat,
sshpeer,
util,
)
@@ -17,7 +15,7 @@
_sshv1peer = sshpeer.sshv1peer
-class connectionpool(object):
+class connectionpool:
def __init__(self, repo):
self._repo = repo
self._pool = dict()
@@ -61,13 +59,13 @@
return conn
def close(self):
- for pathpool in pycompat.itervalues(self._pool):
+ for pathpool in self._pool.values():
for conn in pathpool:
conn.close()
del pathpool[:]
-class connection(object):
+class connection:
def __init__(self, pool, peer):
self._pool = pool
self.peer = peer
--- a/hgext/remotefilelog/constants.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/remotefilelog/constants.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import struct
from mercurial.i18n import _
--- a/hgext/remotefilelog/contentstore.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/remotefilelog/contentstore.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import threading
from mercurial.node import (
@@ -9,7 +7,6 @@
from mercurial.pycompat import getattr
from mercurial import (
mdiff,
- pycompat,
revlog,
)
from . import (
@@ -19,7 +16,7 @@
)
-class ChainIndicies(object):
+class ChainIndicies:
"""A static class for easy reference to the delta chain indicies."""
# The filename of this revision delta
@@ -231,7 +228,7 @@
self._threaddata.metacache = (node, meta)
-class remotecontentstore(object):
+class remotecontentstore:
def __init__(self, ui, fileservice, shared):
self._fileservice = fileservice
# type(shared) is usually remotefilelogcontentstore
@@ -276,7 +273,7 @@
pass
-class manifestrevlogstore(object):
+class manifestrevlogstore:
def __init__(self, repo):
self._store = repo.store
self._svfs = repo.svfs
@@ -368,7 +365,7 @@
rl = revlog.revlog(self._svfs, radix=b'00manifesttree')
startlinkrev = self._repackstartlinkrev
endlinkrev = self._repackendlinkrev
- for rev in pycompat.xrange(len(rl) - 1, -1, -1):
+ for rev in range(len(rl) - 1, -1, -1):
linkrev = rl.linkrev(rev)
if linkrev < startlinkrev:
break
@@ -385,7 +382,7 @@
treename = path[5 : -len(b'/00manifest')]
rl = revlog.revlog(self._svfs, indexfile=path[:-2])
- for rev in pycompat.xrange(len(rl) - 1, -1, -1):
+ for rev in range(len(rl) - 1, -1, -1):
linkrev = rl.linkrev(rev)
if linkrev < startlinkrev:
break
--- a/hgext/remotefilelog/datapack.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/remotefilelog/datapack.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import struct
import zlib
@@ -9,7 +7,6 @@
)
from mercurial.i18n import _
from mercurial import (
- pycompat,
util,
)
from . import (
@@ -234,7 +231,7 @@
# Scan forward to find the first non-same entry, which is the upper
# bound.
- for i in pycompat.xrange(fanoutkey + 1, params.fanoutcount):
+ for i in range(fanoutkey + 1, params.fanoutcount):
end = fanout[i] + params.indexstart
if end != start:
break
@@ -455,7 +452,7 @@
def createindex(self, nodelocations, indexoffset):
entries = sorted(
- (n, db, o, s) for n, (db, o, s) in pycompat.iteritems(self.entries)
+ (n, db, o, s) for n, (db, o, s) in self.entries.items()
)
rawindex = b''
--- a/hgext/remotefilelog/debugcommands.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/remotefilelog/debugcommands.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,7 +4,6 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import os
import zlib
@@ -82,7 +81,7 @@
os.remove(temppath)
r = filelog.filelog(repo.svfs, b'temprevlog')
- class faket(object):
+ class faket:
def add(self, a, b, c):
pass
@@ -211,7 +210,7 @@
continue
filepath = os.path.join(root, file)
size, firstnode, mapping = parsefileblob(filepath, decompress)
- for p1, p2, linknode, copyfrom in pycompat.itervalues(mapping):
+ for p1, p2, linknode, copyfrom in mapping.values():
if linknode == sha1nodeconstants.nullid:
actualpath = os.path.relpath(root, path)
key = fileserverclient.getcachekey(
--- a/hgext/remotefilelog/fileserverclient.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/remotefilelog/fileserverclient.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import io
import os
@@ -140,7 +139,7 @@
peer.__class__ = remotefilepeer
-class cacheconnection(object):
+class cacheconnection:
"""The connection for communicating with the remote cache. Performs
gets and sets by communicating with an external process that has the
cache-specific implementation.
@@ -303,7 +302,7 @@
pipeo.flush()
-class fileserverclient(object):
+class fileserverclient:
"""A client for requesting files from the remote file server."""
def __init__(self, repo):
@@ -518,7 +517,7 @@
# returns cache misses. This enables tests to run easily
# and may eventually allow us to be a drop in replacement
# for the largefiles extension.
- class simplecache(object):
+ class simplecache:
def __init__(self):
self.missingids = []
self.connected = True
--- a/hgext/remotefilelog/historypack.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/remotefilelog/historypack.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import struct
from mercurial.node import (
@@ -7,7 +5,6 @@
sha1nodeconstants,
)
from mercurial import (
- pycompat,
util,
)
from mercurial.utils import hashutil
@@ -209,7 +206,7 @@
start = fanout[fanoutkey] + params.indexstart
indexend = self._indexend
- for i in pycompat.xrange(fanoutkey + 1, params.fanoutcount):
+ for i in range(fanoutkey + 1, params.fanoutcount):
end = fanout[i] + params.indexstart
if end != start:
break
@@ -325,7 +322,7 @@
)[0]
offset += ENTRYCOUNTSIZE
- for i in pycompat.xrange(revcount):
+ for i in range(revcount):
entry = struct.unpack(
PACKFORMAT, data[offset : offset + PACKENTRYLENGTH]
)
@@ -521,7 +518,7 @@
files = (
(hashutil.sha1(filename).digest(), filename, offset, size)
- for filename, (offset, size) in pycompat.iteritems(self.files)
+ for filename, (offset, size) in self.files.items()
)
files = sorted(files)
@@ -557,7 +554,7 @@
)
nodeindexoffset += constants.FILENAMESIZE + len(filename)
- for node, location in sorted(pycompat.iteritems(nodelocations)):
+ for node, location in sorted(nodelocations.items()):
nodeindexentries.append(
struct.pack(nodeindexformat, node, location)
)
--- a/hgext/remotefilelog/metadatastore.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/remotefilelog/metadatastore.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
from mercurial.node import (
hex,
sha1nodeconstants,
@@ -143,7 +141,7 @@
)
-class remotemetadatastore(object):
+class remotemetadatastore:
def __init__(self, ui, fileservice, shared):
self._fileservice = fileservice
self._shared = shared
--- a/hgext/remotefilelog/remotefilectx.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/remotefilelog/remotefilectx.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,7 +4,6 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import collections
import time
--- a/hgext/remotefilelog/remotefilelog.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/remotefilelog/remotefilelog.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import collections
import os
@@ -16,7 +15,6 @@
ancestor,
error,
mdiff,
- pycompat,
revlog,
)
from mercurial.utils import storageutil
@@ -29,7 +27,7 @@
)
-class remotefilelognodemap(object):
+class remotefilelognodemap:
def __init__(self, filename, store):
self._filename = filename
self._store = store
@@ -44,7 +42,7 @@
return node
-class remotefilelog(object):
+class remotefilelog:
_generaldelta = True
_flagserrorclass = error.RevlogError
@@ -424,7 +422,7 @@
return self.repo.nullid
revmap, parentfunc = self._buildrevgraph(a, b)
- nodemap = {v: k for (k, v) in pycompat.iteritems(revmap)}
+ nodemap = {v: k for (k, v) in revmap.items()}
ancs = ancestor.ancestors(parentfunc, revmap[a], revmap[b])
if ancs:
@@ -439,7 +437,7 @@
return self.repo.nullid
revmap, parentfunc = self._buildrevgraph(a, b)
- nodemap = {v: k for (k, v) in pycompat.iteritems(revmap)}
+ nodemap = {v: k for (k, v) in revmap.items()}
ancs = ancestor.commonancestorsheads(parentfunc, revmap[a], revmap[b])
return map(nodemap.__getitem__, ancs)
@@ -455,7 +453,7 @@
parentsmap = collections.defaultdict(list)
allparents = set()
for mapping in (amap, bmap):
- for node, pdata in pycompat.iteritems(mapping):
+ for node, pdata in mapping.items():
parents = parentsmap[node]
p1, p2, linknode, copyfrom = pdata
# Don't follow renames (copyfrom).
--- a/hgext/remotefilelog/remotefilelogserver.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/remotefilelog/remotefilelogserver.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,9 +4,7 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
-import errno
import os
import stat
import time
@@ -22,7 +20,6 @@
error,
extensions,
match,
- pycompat,
scmutil,
store,
streamclone,
@@ -95,7 +92,7 @@
b'x_rfl_getfile', b'file node', permission=b'pull'
)(getfile)
- class streamstate(object):
+ class streamstate:
match = None
shallowremote = False
noflatmf = False
@@ -257,9 +254,8 @@
if not os.path.exists(dirname):
try:
os.makedirs(dirname)
- except OSError as ex:
- if ex.errno != errno.EEXIST:
- raise
+ except FileExistsError:
+ pass
f = None
try:
@@ -417,7 +413,7 @@
cachepath = repo.vfs.join(b"remotefilelogcache")
for head in heads:
mf = repo[head].manifest()
- for filename, filenode in pycompat.iteritems(mf):
+ for filename, filenode in mf.items():
filecachepath = os.path.join(cachepath, filename, hex(filenode))
neededfiles.add(filecachepath)
--- a/hgext/remotefilelog/repack.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/remotefilelog/repack.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import os
import time
@@ -11,7 +9,6 @@
lock as lockmod,
mdiff,
policy,
- pycompat,
scmutil,
util,
vfs,
@@ -349,7 +346,7 @@
# Group the packs by generation (i.e. by size)
generations = []
- for i in pycompat.xrange(len(limits)):
+ for i in range(len(limits)):
generations.append([])
sizes = {}
@@ -489,18 +486,18 @@
if type(m) is dict:
# m is a result of diff of two manifests and is a dictionary that
# maps filename to ((newnode, newflag), (oldnode, oldflag)) tuple
- for filename, diff in pycompat.iteritems(m):
+ for filename, diff in m.items():
if diff[0][0] is not None:
keepkeys.add(keyfn(filename, diff[0][0]))
else:
# m is a manifest object
- for filename, filenode in pycompat.iteritems(m):
+ for filename, filenode in m.items():
keepkeys.add(keyfn(filename, filenode))
return keepkeys
-class repacker(object):
+class repacker:
"""Class for orchestrating the repack of data and history information into a
new format.
"""
@@ -596,7 +593,7 @@
maxchainlen = ui.configint(b'packs', b'maxchainlen', 1000)
byfile = {}
- for entry in pycompat.itervalues(ledger.entries):
+ for entry in ledger.entries.values():
if entry.datasource:
byfile.setdefault(entry.filename, {})[entry.node] = entry
@@ -604,7 +601,7 @@
repackprogress = ui.makeprogress(
_(b"repacking data"), unit=self.unit, total=len(byfile)
)
- for filename, entries in sorted(pycompat.iteritems(byfile)):
+ for filename, entries in sorted(byfile.items()):
repackprogress.update(count)
ancestors = {}
@@ -751,14 +748,14 @@
ui = self.repo.ui
byfile = {}
- for entry in pycompat.itervalues(ledger.entries):
+ for entry in ledger.entries.values():
if entry.historysource:
byfile.setdefault(entry.filename, {})[entry.node] = entry
progress = ui.makeprogress(
_(b"repacking history"), unit=self.unit, total=len(byfile)
)
- for filename, entries in sorted(pycompat.iteritems(byfile)):
+ for filename, entries in sorted(byfile.items()):
ancestors = {}
nodes = list(node for node in entries)
@@ -821,7 +818,7 @@
return sortednodes
-class repackledger(object):
+class repackledger:
"""Storage for all the bookkeeping that happens during a repack. It contains
the list of revisions being repacked, what happened to each revision, and
which source store contained which revision originally (for later cleanup).
@@ -869,7 +866,7 @@
self.created.add(value)
-class repackentry(object):
+class repackentry:
"""Simple class representing a single revision entry in the repackledger."""
__slots__ = (
--- a/hgext/remotefilelog/shallowbundle.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/remotefilelog/shallowbundle.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,7 +4,6 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from mercurial.i18n import _
from mercurial.node import bin, hex
@@ -14,7 +13,6 @@
error,
match,
mdiff,
- pycompat,
)
from . import (
constants,
@@ -44,7 +42,7 @@
nodelist.insert(0, p)
# build deltas
- for i in pycompat.xrange(len(nodelist) - 1):
+ for i in range(len(nodelist) - 1):
prev, curr = nodelist[i], nodelist[i + 1]
linknode = lookup(curr)
for c in self.nodechunk(rlog, curr, prev, linknode):
--- a/hgext/remotefilelog/shallowrepo.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/remotefilelog/shallowrepo.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,7 +4,6 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import os
@@ -15,7 +14,6 @@
error,
localrepo,
match,
- pycompat,
scmutil,
sparse,
util,
@@ -269,7 +267,7 @@
mfrevlog = mfl.getstorage(b'')
if base is not None:
mfdict = mfl[repo[base].manifestnode()].read()
- skip = set(pycompat.iteritems(mfdict))
+ skip = set(mfdict.items())
else:
skip = set()
@@ -299,7 +297,7 @@
else:
mfdict = mfl[mfnode].read()
- diff = pycompat.iteritems(mfdict)
+ diff = mfdict.items()
if pats:
diff = (pf for pf in diff if m(pf[0]))
if sparsematch:
--- a/hgext/remotefilelog/shallowstore.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/remotefilelog/shallowstore.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,7 +4,6 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
def wrapstore(store):
--- a/hgext/remotefilelog/shallowutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/remotefilelog/shallowutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,10 +4,8 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import collections
-import errno
import os
import stat
import struct
@@ -103,7 +101,7 @@
"""
result = collections.defaultdict(lambda: 0)
for dict in dicts:
- for k, v in pycompat.iteritems(dict):
+ for k, v in dict.items():
result[k] += v
return result
@@ -111,7 +109,7 @@
def prefixkeys(dict, prefix):
"""Returns ``dict`` with ``prefix`` prepended to all its keys."""
result = {}
- for k, v in pycompat.iteritems(dict):
+ for k, v in dict.items():
result[prefix + k] = v
return result
@@ -160,7 +158,7 @@
length limit is exceeded
"""
metabuf = b''
- for k, v in sorted(pycompat.iteritems((metadict or {}))):
+ for k, v in sorted((metadict or {}).items()):
if len(k) != 1:
raise error.ProgrammingError(b'packmeta: illegal key: %s' % k)
if len(v) > 0xFFFE:
@@ -176,8 +174,8 @@
_metaitemtypes = {
- constants.METAKEYFLAG: (int, pycompat.long),
- constants.METAKEYSIZE: (int, pycompat.long),
+ constants.METAKEYFLAG: (int, int),
+ constants.METAKEYSIZE: (int, int),
}
@@ -188,7 +186,7 @@
and METAKEYFLAG will be dropped if its value is 0.
"""
newmeta = {}
- for k, v in pycompat.iteritems(metadict or {}):
+ for k, v in (metadict or {}).items():
expectedtype = _metaitemtypes.get(k, (bytes,))
if not isinstance(v, expectedtype):
raise error.ProgrammingError(b'packmeta: wrong type of key %s' % k)
@@ -209,7 +207,7 @@
integers.
"""
metadict = _parsepackmeta(metabuf)
- for k, v in pycompat.iteritems(metadict):
+ for k, v in metadict.items():
if k in _metaitemtypes and int in _metaitemtypes[k]:
metadict[k] = bin2int(v)
return metadict
@@ -360,9 +358,8 @@
if not os.path.exists(dirname):
try:
os.makedirs(dirname)
- except OSError as ex:
- if ex.errno != errno.EEXIST:
- raise
+ except FileExistsError:
+ pass
fd, temp = tempfile.mkstemp(prefix=b'.%s-' % filename, dir=dirname)
os.close(fd)
@@ -455,14 +452,14 @@
def readnodelist(stream):
rawlen = readexactly(stream, constants.NODECOUNTSIZE)
nodecount = struct.unpack(constants.NODECOUNTSTRUCT, rawlen)[0]
- for i in pycompat.xrange(nodecount):
+ for i in range(nodecount):
yield readexactly(stream, constants.NODESIZE)
def readpathlist(stream):
rawlen = readexactly(stream, constants.PATHCOUNTSIZE)
pathcount = struct.unpack(constants.PATHCOUNTSTRUCT, rawlen)[0]
- for i in pycompat.xrange(pathcount):
+ for i in range(pathcount):
yield readpath(stream)
@@ -520,9 +517,8 @@
for path in reversed(missingdirs):
try:
os.mkdir(path)
- except OSError as ex:
- if ex.errno != errno.EEXIST:
- raise
+ except FileExistsError:
+ pass
for path in missingdirs:
setstickygroupdir(path, gid, ui.warn)
--- a/hgext/remotefilelog/shallowverifier.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/remotefilelog/shallowverifier.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,7 +4,6 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from mercurial.i18n import _
from mercurial import verify
--- a/hgext/remotenames.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/remotenames.py Thu Jun 16 15:28:54 2022 +0200
@@ -24,7 +24,8 @@
namespace (default: 'default')
"""
-from __future__ import absolute_import
+
+import collections.abc
from mercurial.i18n import _
@@ -35,7 +36,6 @@
extensions,
logexchange,
namespaces,
- pycompat,
registrar,
revsetlang,
smartset,
@@ -45,15 +45,6 @@
from mercurial.utils import stringutil
-if pycompat.ispy3:
- import collections.abc
-
- mutablemapping = collections.abc.MutableMapping
-else:
- import collections
-
- mutablemapping = collections.MutableMapping
-
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -82,7 +73,7 @@
)
-class lazyremotenamedict(mutablemapping):
+class lazyremotenamedict(collections.abc.MutableMapping):
"""
Read-only dict-like Class to lazily resolve remotename entries
@@ -171,13 +162,13 @@
if not self.loaded:
self._load()
- for k, vtup in pycompat.iteritems(self.potentialentries):
+ for k, vtup in self.potentialentries.items():
yield (k, [bin(vtup[0])])
items = iteritems
-class remotenames(object):
+class remotenames:
"""
This class encapsulates all the remotenames state. It also contains
methods to access that state in convenient ways. Remotenames are lazy
@@ -208,7 +199,7 @@
if not self._nodetobmarks:
bmarktonodes = self.bmarktonodes()
self._nodetobmarks = {}
- for name, node in pycompat.iteritems(bmarktonodes):
+ for name, node in bmarktonodes.items():
self._nodetobmarks.setdefault(node[0], []).append(name)
return self._nodetobmarks
@@ -219,7 +210,7 @@
if not self._nodetobranch:
branchtonodes = self.branchtonodes()
self._nodetobranch = {}
- for name, nodes in pycompat.iteritems(branchtonodes):
+ for name, nodes in branchtonodes.items():
for node in nodes:
self._nodetobranch.setdefault(node, []).append(name)
return self._nodetobranch
@@ -229,7 +220,7 @@
marktonodes = self.bmarktonodes()
self._hoisttonodes = {}
hoist += b'/'
- for name, node in pycompat.iteritems(marktonodes):
+ for name, node in marktonodes.items():
if name.startswith(hoist):
name = name[len(hoist) :]
self._hoisttonodes[name] = node
@@ -240,7 +231,7 @@
marktonodes = self.bmarktonodes()
self._nodetohoists = {}
hoist += b'/'
- for name, node in pycompat.iteritems(marktonodes):
+ for name, node in marktonodes.items():
if name.startswith(hoist):
name = name[len(hoist) :]
self._nodetohoists.setdefault(node[0], []).append(name)
--- a/hgext/schemes.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/schemes.py Thu Jun 16 15:28:54 2022 +0200
@@ -39,7 +39,6 @@
You can override a predefined scheme by defining a new scheme with the
same name.
"""
-from __future__ import absolute_import
import os
import re
@@ -68,7 +67,7 @@
_partre = re.compile(br'{(\d+)\}')
-class ShortRepository(object):
+class ShortRepository:
def __init__(self, url, scheme, templater):
self.scheme = scheme
self.templater = templater
--- a/hgext/share.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/share.py Thu Jun 16 15:28:54 2022 +0200
@@ -65,9 +65,7 @@
and there are no untracked files, delete that share and create a new share.
'''
-from __future__ import absolute_import
-import errno
from mercurial.i18n import _
from mercurial import (
bookmarks,
@@ -178,9 +176,7 @@
return False
try:
shared = repo.vfs.read(b'shared').splitlines()
- except IOError as inst:
- if inst.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
return False
return hg.sharedbookmarks in shared
@@ -200,9 +196,8 @@
# is up-to-date.
return fp
fp.close()
- except IOError as inst:
- if inst.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
+ pass
# otherwise, we should read bookmarks from srcrepo,
# because .hg/bookmarks in srcrepo might be already
--- a/hgext/show.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/show.py Thu Jun 16 15:28:54 2022 +0200
@@ -25,7 +25,6 @@
performed.
"""
-from __future__ import absolute_import
from mercurial.i18n import _
from mercurial.node import nullrev
--- a/hgext/sparse.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/sparse.py Thu Jun 16 15:28:54 2022 +0200
@@ -71,18 +71,15 @@
tools/tests/**
"""
-from __future__ import absolute_import
from mercurial.i18n import _
from mercurial.pycompat import setattr
from mercurial import (
cmdutil,
commands,
- dirstate,
error,
extensions,
logcmdutil,
- match as matchmod,
merge as mergemod,
pycompat,
registrar,
@@ -106,7 +103,6 @@
_setupclone(ui)
_setuplog(ui)
_setupadd(ui)
- _setupdirstate(ui)
def replacefilecache(cls, propname, replacement):
@@ -209,69 +205,6 @@
extensions.wrapcommand(commands.table, b'add', _add)
-def _setupdirstate(ui):
- """Modify the dirstate to prevent stat'ing excluded files,
- and to prevent modifications to files outside the checkout.
- """
-
- def walk(orig, self, match, subrepos, unknown, ignored, full=True):
- # hack to not exclude explicitly-specified paths so that they can
- # be warned later on e.g. dirstate.add()
- em = matchmod.exact(match.files())
- sm = matchmod.unionmatcher([self._sparsematcher, em])
- match = matchmod.intersectmatchers(match, sm)
- return orig(self, match, subrepos, unknown, ignored, full)
-
- extensions.wrapfunction(dirstate.dirstate, b'walk', walk)
-
- # dirstate.rebuild should not add non-matching files
- def _rebuild(orig, self, parent, allfiles, changedfiles=None):
- matcher = self._sparsematcher
- if not matcher.always():
- allfiles = [f for f in allfiles if matcher(f)]
- if changedfiles:
- changedfiles = [f for f in changedfiles if matcher(f)]
-
- if changedfiles is not None:
- # In _rebuild, these files will be deleted from the dirstate
- # when they are not found to be in allfiles
- dirstatefilestoremove = {f for f in self if not matcher(f)}
- changedfiles = dirstatefilestoremove.union(changedfiles)
-
- return orig(self, parent, allfiles, changedfiles)
-
- extensions.wrapfunction(dirstate.dirstate, b'rebuild', _rebuild)
-
- # Prevent adding files that are outside the sparse checkout
- editfuncs = [
- b'set_tracked',
- b'set_untracked',
- b'copy',
- ]
- hint = _(
- b'include file with `hg debugsparse --include <pattern>` or use '
- + b'`hg add -s <file>` to include file directory while adding'
- )
- for func in editfuncs:
-
- def _wrapper(orig, self, *args, **kwargs):
- sparsematch = self._sparsematcher
- if not sparsematch.always():
- for f in args:
- if f is not None and not sparsematch(f) and f not in self:
- raise error.Abort(
- _(
- b"cannot add '%s' - it is outside "
- b"the sparse checkout"
- )
- % f,
- hint=hint,
- )
- return orig(self, *args, **kwargs)
-
- extensions.wrapfunction(dirstate.dirstate, func, _wrapper)
-
-
@command(
b'debugsparse',
[
@@ -398,6 +331,9 @@
if count > 1:
raise error.Abort(_(b"too many flags specified"))
+ # enable sparse on repo even if the requirements is missing.
+ repo._has_sparse = True
+
if count == 0:
if repo.vfs.exists(b'sparse'):
ui.status(repo.vfs.read(b"sparse") + b"\n")
@@ -453,3 +389,5 @@
)
finally:
wlock.release()
+
+ del repo._has_sparse
--- a/hgext/split.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/split.py Thu Jun 16 15:28:54 2022 +0200
@@ -7,7 +7,6 @@
# GNU General Public License version 2 or any later version.
"""command to split a changeset into smaller ones (EXPERIMENTAL)"""
-from __future__ import absolute_import
from mercurial.i18n import _
--- a/hgext/sqlitestore.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/sqlitestore.py Thu Jun 16 15:28:54 2022 +0200
@@ -43,7 +43,6 @@
# --extra-config-opt extensions.sqlitestore= \
# --extra-config-opt storage.new-repo-backend=sqlite
-from __future__ import absolute_import
import sqlite3
import struct
@@ -265,7 +264,7 @@
@attr.s
-class revisionentry(object):
+class revisionentry:
rid = attr.ib()
rev = attr.ib()
node = attr.ib()
@@ -279,7 +278,7 @@
@interfaceutil.implementer(repository.irevisiondelta)
@attr.s(slots=True)
-class sqliterevisiondelta(object):
+class sqliterevisiondelta:
node = attr.ib()
p1node = attr.ib()
p2node = attr.ib()
@@ -295,14 +294,14 @@
@interfaceutil.implementer(repository.iverifyproblem)
@attr.s(frozen=True)
-class sqliteproblem(object):
+class sqliteproblem:
warning = attr.ib(default=None)
error = attr.ib(default=None)
node = attr.ib(default=None)
@interfaceutil.implementer(repository.ifilestorage)
-class sqlitefilestore(object):
+class sqlitefilestore:
"""Implements storage for an individual tracked path."""
def __init__(self, db, path, compression):
@@ -397,7 +396,7 @@
return len(self._revisions)
def __iter__(self):
- return iter(pycompat.xrange(len(self._revisions)))
+ return iter(range(len(self._revisions)))
def hasnode(self, node):
if node == sha1nodeconstants.nullid:
@@ -1250,7 +1249,7 @@
@interfaceutil.implementer(repository.ilocalrepositoryfilestorage)
-class sqlitefilestorage(object):
+class sqlitefilestorage:
"""Repository file storage backed by SQLite."""
def file(self, path):
--- a/hgext/strip.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/strip.py Thu Jun 16 15:28:54 2022 +0200
@@ -6,7 +6,6 @@
This extension allows you to strip changesets and all their descendants from the
repository. See the command help for details.
"""
-from __future__ import absolute_import
from mercurial import commands
--- a/hgext/transplant.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/transplant.py Thu Jun 16 15:28:54 2022 +0200
@@ -13,7 +13,6 @@
Transplanted patches are recorded in .hg/transplant/transplants, as a
map from a changeset hash to its hash in the source repository.
'''
-from __future__ import absolute_import
import os
@@ -76,13 +75,13 @@
)
-class transplantentry(object):
+class transplantentry:
def __init__(self, lnode, rnode):
self.lnode = lnode
self.rnode = rnode
-class transplants(object):
+class transplants:
def __init__(self, path=None, transplantfile=None, opener=None):
self.path = path
self.transplantfile = transplantfile
@@ -107,7 +106,7 @@
if not os.path.isdir(self.path):
os.mkdir(self.path)
fp = self.opener(self.transplantfile, b'w')
- for list in pycompat.itervalues(self.transplants):
+ for list in self.transplants.values():
for t in list:
l, r = map(hex, (t.lnode, t.rnode))
fp.write(l + b':' + r + b'\n')
@@ -129,7 +128,7 @@
self.dirty = True
-class transplanter(object):
+class transplanter:
def __init__(self, ui, repo, opts):
self.ui = ui
self.repo = repo
--- a/hgext/uncommit.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/uncommit.py Thu Jun 16 15:28:54 2022 +0200
@@ -17,7 +17,6 @@
added and removed in the working directory.
"""
-from __future__ import absolute_import
from mercurial.i18n import _
@@ -81,9 +80,7 @@
files = initialfiles - exclude
# Filter copies
copied = copiesmod.pathcopies(base, ctx)
- copied = {
- dst: src for dst, src in pycompat.iteritems(copied) if dst in files
- }
+ copied = {dst: src for dst, src in copied.items() if dst in files}
def filectxfn(repo, memctx, path, contentctx=ctx, redirect=()):
if path not in contentctx:
--- a/hgext/win32mbcs.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/win32mbcs.py Thu Jun 16 15:28:54 2022 +0200
@@ -44,7 +44,6 @@
It is useful for the users who want to commit with UTF-8 log message.
'''
-from __future__ import absolute_import
import os
import sys
@@ -95,7 +94,7 @@
def encode(arg):
- if isinstance(arg, pycompat.unicode):
+ if isinstance(arg, str):
return arg.encode(_encoding)
elif isinstance(arg, tuple):
return tuple(map(encode, arg))
@@ -136,7 +135,7 @@
def wrapper(func, args, kwds):
- return basewrapper(func, pycompat.unicode, encode, decode, args, kwds)
+ return basewrapper(func, str, encode, decode, args, kwds)
def reversewrapper(func, args, kwds):
--- a/hgext/win32text.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/win32text.py Thu Jun 16 15:28:54 2022 +0200
@@ -41,7 +41,6 @@
# or pretxnchangegroup.cr = python:hgext.win32text.forbidcr
'''
-from __future__ import absolute_import
import re
from mercurial.i18n import _
@@ -49,7 +48,6 @@
from mercurial import (
cmdutil,
extensions,
- pycompat,
registrar,
)
from mercurial.utils import stringutil
@@ -157,9 +155,7 @@
# changegroup that contains an unacceptable commit followed later
# by a commit that fixes the problem.
tip = repo[b'tip']
- for rev in pycompat.xrange(
- repo.changelog.tiprev(), repo[node].rev() - 1, -1
- ):
+ for rev in range(repo.changelog.tiprev(), repo[node].rev() - 1, -1):
c = repo[rev]
for f in c.files():
if f in seen or f not in tip or f not in c:
@@ -213,7 +209,7 @@
def reposetup(ui, repo):
if not repo.local():
return
- for name, fn in pycompat.iteritems(_filters):
+ for name, fn in _filters.items():
repo.adddatafilter(name, fn)
--- a/hgext/zeroconf/Zeroconf.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/zeroconf/Zeroconf.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, print_function
-
""" Multicast DNS Service Discovery for Python, v0.12
Copyright (C) 2003, Paul Scott-Murphy
@@ -233,7 +231,7 @@
# implementation classes
-class DNSEntry(object):
+class DNSEntry:
"""A DNS entry"""
def __init__(self, name, type, clazz):
@@ -294,7 +292,7 @@
"""A DNS question entry"""
def __init__(self, name, type, clazz):
- if pycompat.ispy3 and isinstance(name, str):
+ if isinstance(name, str):
name = name.encode('ascii')
if not name.endswith(b".local."):
raise NonLocalNameException(name)
@@ -508,7 +506,7 @@
return self.toString(b"%s:%s" % (self.server, self.port))
-class DNSIncoming(object):
+class DNSIncoming:
"""Object representation of an incoming DNS packet"""
def __init__(self, data):
@@ -704,7 +702,7 @@
return result
-class DNSOutgoing(object):
+class DNSOutgoing:
"""Object representation of an outgoing packet"""
def __init__(self, flags, multicast=1):
@@ -866,7 +864,7 @@
return b''.join(self.data)
-class DNSCache(object):
+class DNSCache:
"""A cache of DNS entries"""
def __init__(self):
@@ -984,7 +982,7 @@
self.condition.release()
-class Listener(object):
+class Listener:
"""A Listener is used by this module to listen on the multicast
group to which DNS messages are sent, allowing the implementation
to cache information as it arrives.
@@ -1129,7 +1127,7 @@
event(self.zeroconf)
-class ServiceInfo(object):
+class ServiceInfo:
"""Service information"""
def __init__(
@@ -1388,7 +1386,7 @@
return result
-class Zeroconf(object):
+class Zeroconf:
"""Implementation of Zeroconf Multicast DNS Service Discovery
Supports registration, unregistration, queries and browsing.
@@ -1461,7 +1459,7 @@
def notifyAll(self):
"""Notifies all waiting threads"""
self.condition.acquire()
- self.condition.notifyAll()
+ self.condition.notify_all()
self.condition.release()
def getServiceInfo(self, type, name, timeout=3000):
--- a/hgext/zeroconf/__init__.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext/zeroconf/__init__.py Thu Jun 16 15:28:54 2022 +0200
@@ -22,7 +22,6 @@
$ hg paths
zc-test = http://example.com:8000/test
'''
-from __future__ import absolute_import
import os
import socket
@@ -159,7 +158,7 @@
# listen
-class listener(object):
+class listener:
def __init__(self):
self.found = {}
--- a/hgext3rd/__init__.py Thu Jun 16 15:15:03 2022 +0200
+++ b/hgext3rd/__init__.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,4 @@
# name space package to host third party extensions
-from __future__ import absolute_import
import pkgutil
__path__ = pkgutil.extend_path(__path__, __name__)
--- a/i18n/check-translation.py Thu Jun 16 15:15:03 2022 +0200
+++ b/i18n/check-translation.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,7 +1,6 @@
#!/usr/bin/env python3
#
# check-translation.py - check Mercurial specific translation problems
-from __future__ import absolute_import
import re
--- a/i18n/hggettext Thu Jun 16 15:15:03 2022 +0200
+++ b/i18n/hggettext Thu Jun 16 15:28:54 2022 +0200
@@ -20,7 +20,6 @@
join the message cataloges to get the final catalog.
"""
-from __future__ import absolute_import, print_function
import inspect
import os
--- a/i18n/polib.py Thu Jun 16 15:15:03 2022 +0200
+++ b/i18n/polib.py Thu Jun 16 15:28:54 2022 +0200
@@ -13,7 +13,6 @@
:func:`~polib.mofile` convenience functions.
"""
-from __future__ import absolute_import
__author__ = 'David Jean Louis <izimobil@gmail.com>'
__version__ = '1.0.7'
@@ -43,7 +42,7 @@
except ImportError:
# replacement of io.open() for python < 2.6
# we use codecs instead
- class io(object):
+ class io:
@staticmethod
def open(fpath, mode='r', encoding=None):
return codecs.open(fpath, mode, encoding)
@@ -817,7 +816,7 @@
# class _BaseEntry {{{
-class _BaseEntry(object):
+class _BaseEntry:
"""
Base class for :class:`~polib.POEntry` and :class:`~polib.MOEntry` classes.
This class should **not** be instanciated directly.
@@ -1228,7 +1227,7 @@
# class _POFileParser {{{
-class _POFileParser(object):
+class _POFileParser:
"""
A finite state machine to parse efficiently and correctly po
file format.
@@ -1707,7 +1706,7 @@
# class _MOFileParser {{{
-class _MOFileParser(object):
+class _MOFileParser:
"""
A class to parse binary mo files.
"""
--- a/i18n/posplit Thu Jun 16 15:15:03 2022 +0200
+++ b/i18n/posplit Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# license: MIT/X11/Expat
#
-from __future__ import absolute_import, print_function
import polib
import re
--- a/mercurial/__init__.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/__init__.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
# Allow 'from mercurial import demandimport' to keep working.
import hgdemandimport
--- a/mercurial/ancestor.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/ancestor.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import heapq
@@ -13,7 +12,6 @@
from . import (
dagop,
policy,
- pycompat,
)
parsers = policy.importmod('parsers')
@@ -147,7 +145,7 @@
return deepest(gca)
-class incrementalmissingancestors(object):
+class incrementalmissingancestors:
"""persistent state used to calculate missing ancestors incrementally
Although similar in spirit to lazyancestors below, this is a separate class
@@ -188,7 +186,7 @@
# no revs to consider
return
- for curr in pycompat.xrange(start, min(revs) - 1, -1):
+ for curr in range(start, min(revs) - 1, -1):
if curr not in bases:
continue
revs.discard(curr)
@@ -229,7 +227,7 @@
# exit.
missing = []
- for curr in pycompat.xrange(start, nullrev, -1):
+ for curr in range(start, nullrev, -1):
if not revsvisit:
break
@@ -317,7 +315,7 @@
see(p2)
-class lazyancestors(object):
+class lazyancestors:
def __init__(self, pfunc, revs, stoprev=0, inclusive=False):
"""Create a new object generating ancestors for the given revs. Does
not generate revs lower than stoprev.
--- a/mercurial/archival.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/archival.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import gzip
import os
@@ -76,7 +75,7 @@
def guesskind(dest):
- for kind, extensions in pycompat.iteritems(exts):
+ for kind, extensions in exts.items():
if any(dest.endswith(ext) for ext in extensions):
return kind
return None
@@ -133,43 +132,10 @@
return out.getvalue()
-class tarit(object):
+class tarit:
"""write archive to tar file or stream. can write uncompressed,
or compress with gzip or bzip2."""
- if pycompat.ispy3:
- GzipFileWithTime = gzip.GzipFile # camelcase-required
- else:
-
- class GzipFileWithTime(gzip.GzipFile):
- def __init__(self, *args, **kw):
- timestamp = None
- if 'mtime' in kw:
- timestamp = kw.pop('mtime')
- if timestamp is None:
- self.timestamp = time.time()
- else:
- self.timestamp = timestamp
- gzip.GzipFile.__init__(self, *args, **kw)
-
- def _write_gzip_header(self):
- self.fileobj.write(b'\037\213') # magic header
- self.fileobj.write(b'\010') # compression method
- fname = self.name
- if fname and fname.endswith(b'.gz'):
- fname = fname[:-3]
- flags = 0
- if fname:
- flags = gzip.FNAME # pytype: disable=module-attr
- self.fileobj.write(pycompat.bytechr(flags))
- gzip.write32u( # pytype: disable=module-attr
- self.fileobj, int(self.timestamp)
- )
- self.fileobj.write(b'\002')
- self.fileobj.write(b'\377')
- if fname:
- self.fileobj.write(fname + b'\000')
-
def __init__(self, dest, mtime, kind=b''):
self.mtime = mtime
self.fileobj = None
@@ -179,7 +145,7 @@
mode = mode[0:1]
if not fileobj:
fileobj = open(name, mode + b'b')
- gzfileobj = self.GzipFileWithTime(
+ gzfileobj = gzip.GzipFile(
name,
pycompat.sysstr(mode + b'b'),
zlib.Z_BEST_COMPRESSION,
@@ -227,7 +193,7 @@
self.fileobj.close()
-class zipit(object):
+class zipit:
"""write archive to zip file or stream. can write uncompressed,
or compressed with deflate."""
@@ -274,7 +240,7 @@
self.z.close()
-class fileit(object):
+class fileit:
'''write archive as files in directory.'''
def __init__(self, name, mtime):
@@ -339,9 +305,6 @@
subrepos tells whether to include subrepos.
"""
- if kind == b'txz' and not pycompat.ispy3:
- raise error.Abort(_(b'xz compression is only available in Python 3'))
-
if kind == b'files':
if prefix:
raise error.Abort(_(b'cannot give prefix when archiving to files'))
--- a/mercurial/bookmarks.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/bookmarks.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,9 +5,7 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
-import errno
import struct
from .i18n import _
@@ -28,6 +26,7 @@
util,
)
from .utils import (
+ stringutil,
urlutil,
)
@@ -59,7 +58,7 @@
return fp
-class bmstore(object):
+class bmstore:
r"""Storage for bookmarks.
This object should do all bookmark-related reads and writes, so
@@ -101,8 +100,8 @@
if nrefs[-2] > refspec:
# bookmarks weren't sorted before 4.5
nrefs.sort()
- except (TypeError, ValueError):
- # TypeError:
+ except ValueError:
+ # binascii.Error (ValueError subclass):
# - bin(...)
# ValueError:
# - node in nm, for non-20-bytes entry
@@ -114,9 +113,8 @@
_(b'malformed line in %s: %r\n')
% (bookmarkspath, pycompat.bytestr(line))
)
- except IOError as inst:
- if inst.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
+ pass
self._active = _readactive(repo, self)
@property
@@ -138,7 +136,7 @@
return iter(self._refmap)
def iteritems(self):
- return pycompat.iteritems(self._refmap)
+ return self._refmap.items()
def items(self):
return self._refmap.items()
@@ -251,7 +249,7 @@
self._aclean = True
def _write(self, fp):
- for name, node in sorted(pycompat.iteritems(self._refmap)):
+ for name, node in sorted(self._refmap.items()):
fp.write(b"%s %s\n" % (hex(node), encoding.fromlocal(name)))
self._clean = True
self._repo.invalidatevolatilesets()
@@ -343,7 +341,7 @@
# No readline() in osutil.posixfile, reading everything is
# cheap.
content = repo.vfs.tryread(b'bookmarks.current')
- mark = encoding.tolocal((content.splitlines() or [b''])[0])
+ mark = encoding.tolocal(stringutil.firstline(content))
if mark == b'' or mark not in marks:
mark = None
return mark
@@ -419,7 +417,7 @@
)
name = repo._activebookmark.split(b'@', 1)[0]
heads = []
- for mark, n in pycompat.iteritems(repo._bookmarks):
+ for mark, n in repo._bookmarks.items():
if mark.split(b'@', 1)[0] == name:
heads.append(n)
return heads
@@ -477,7 +475,7 @@
marks = getattr(repo, '_bookmarks', {})
hasnode = repo.changelog.hasnode
- for k, v in pycompat.iteritems(marks):
+ for k, v in marks.items():
# don't expose local divergent bookmarks
if hasnode(v) and not isdivergent(k):
yield k, v
@@ -688,7 +686,7 @@
remotemarks"""
changed = []
localmarks = repo._bookmarks
- for (b, id) in pycompat.iteritems(remotemarks):
+ for (b, id) in remotemarks.items():
if id != localmarks.get(b, None) and id in repo:
changed.append((b, id, ui.debug, _(b"updating bookmark %s\n") % b))
for b in localmarks:
@@ -1075,7 +1073,7 @@
hexfn = fm.hexfunc
if len(bmarks) == 0 and fm.isplain():
ui.status(_(b"no bookmarks set\n"))
- for bmark, (n, prefix, label) in sorted(pycompat.iteritems(bmarks)):
+ for bmark, (n, prefix, label) in sorted(bmarks.items()):
fm.startitem()
fm.context(repo=repo)
if not ui.quiet:
--- a/mercurial/branchmap.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/branchmap.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import struct
@@ -63,7 +62,7 @@
unpack_from = struct.unpack_from
-class BranchMapCache(object):
+class BranchMapCache:
"""mapping of filtered views of repo with their branchcache"""
def __init__(self):
@@ -120,7 +119,7 @@
clbranchinfo = cl.branchinfo
rbheads = []
closed = set()
- for bheads in pycompat.itervalues(remotebranchmap):
+ for bheads in remotebranchmap.values():
rbheads += bheads
for h in bheads:
r = clrev(h)
@@ -160,7 +159,7 @@
def _unknownnode(node):
"""raises ValueError when branchcache found a node which does not exists"""
- raise ValueError('node %s does not exist' % pycompat.sysstr(hex(node)))
+ raise ValueError('node %s does not exist' % node.hex())
def _branchcachedesc(repo):
@@ -170,7 +169,7 @@
return b'branch cache'
-class branchcache(object):
+class branchcache:
"""A dict like object that hold branches heads cache.
This cache is used to avoid costly computations to determine all the
@@ -271,7 +270,7 @@
return key in self._entries
def iteritems(self):
- for k, v in pycompat.iteritems(self._entries):
+ for k, v in self._entries.items():
self._verifybranch(k)
yield k, v
@@ -401,13 +400,13 @@
return heads
def iterbranches(self):
- for bn, heads in pycompat.iteritems(self):
+ for bn, heads in self.items():
yield (bn, heads) + self._branchtip(heads)
def iterheads(self):
"""returns all the heads"""
self._verifyall()
- return pycompat.itervalues(self._entries)
+ return self._entries.values()
def copy(self):
"""return an deep copy of the branchcache object"""
@@ -429,22 +428,22 @@
self._delayed = True
return
try:
- f = repo.cachevfs(self._filename(repo), b"w", atomictemp=True)
- cachekey = [hex(self.tipnode), b'%d' % self.tiprev]
- if self.filteredhash is not None:
- cachekey.append(hex(self.filteredhash))
- f.write(b" ".join(cachekey) + b'\n')
- nodecount = 0
- for label, nodes in sorted(pycompat.iteritems(self._entries)):
- label = encoding.fromlocal(label)
- for node in nodes:
- nodecount += 1
- if node in self._closednodes:
- state = b'c'
- else:
- state = b'o'
- f.write(b"%s %s %s\n" % (hex(node), state, label))
- f.close()
+ filename = self._filename(repo)
+ with repo.cachevfs(filename, b"w", atomictemp=True) as f:
+ cachekey = [hex(self.tipnode), b'%d' % self.tiprev]
+ if self.filteredhash is not None:
+ cachekey.append(hex(self.filteredhash))
+ f.write(b" ".join(cachekey) + b'\n')
+ nodecount = 0
+ for label, nodes in sorted(self._entries.items()):
+ label = encoding.fromlocal(label)
+ for node in nodes:
+ nodecount += 1
+ if node in self._closednodes:
+ state = b'c'
+ else:
+ state = b'o'
+ f.write(b"%s %s %s\n" % (hex(node), state, label))
repo.ui.log(
b'branchcache',
b'wrote %s with %d labels and %d nodes\n',
@@ -491,7 +490,7 @@
# Faster than using ctx.obsolete()
obsrevs = obsolete.getrevs(repo, b'obsolete')
- for branch, newheadrevs in pycompat.iteritems(newbranches):
+ for branch, newheadrevs in newbranches.items():
# For every branch, compute the new branchheads.
# A branchhead is a revision such that no descendant is on
# the same branch.
@@ -632,7 +631,7 @@
_rbccloseflag = 0x80000000
-class revbranchcache(object):
+class revbranchcache:
"""Persistent cache, mapping from revision number to branch name and close.
This is a low level cache, independent of filtering.
--- a/mercurial/bundle2.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/bundle2.py Thu Jun 16 15:28:54 2022 +0200
@@ -145,7 +145,6 @@
preserve.
"""
-from __future__ import absolute_import, division
import collections
import errno
@@ -252,7 +251,7 @@
return _decorator
-class unbundlerecords(object):
+class unbundlerecords:
"""keep record of what happens during and unbundle
New records are added using `records.add('cat', obj)`. Where 'cat' is a
@@ -300,7 +299,7 @@
__bool__ = __nonzero__
-class bundleoperation(object):
+class bundleoperation:
"""an object that represents a single bundling process
Its purpose is to carry unbundle-related objects and states.
@@ -380,7 +379,7 @@
return op
-class partiterator(object):
+class partiterator:
def __init__(self, repo, op, unbundler):
self.repo = repo
self.op = op
@@ -627,7 +626,7 @@
bundlepriority = [b'HG10GZ', b'HG10BZ', b'HG10UN']
-class bundle20(object):
+class bundle20:
"""represent an outgoing bundle2 container
Use the `addparam` method to add stream level parameter. and `newpart` to
@@ -751,7 +750,7 @@
return salvaged
-class unpackermixin(object):
+class unpackermixin:
"""A mixin to extract bytes and struct data from a stream"""
def __init__(self, fp):
@@ -984,7 +983,7 @@
unbundler._compressed = True
-class bundlepart(object):
+class bundlepart:
"""A bundle2 part contains application level payload
The part `type` is used to route the part to the application level
@@ -1274,7 +1273,7 @@
)
-class interruptoperation(object):
+class interruptoperation:
"""A limited operation to be use by part handler during interruption
It only have access to an ui object.
@@ -1693,7 +1692,7 @@
raise error.ProgrammingError(b'unknown bundle type: %s' % bundletype)
caps = {}
- if b'obsolescence' in opts:
+ if opts.get(b'obsolescence', False):
caps[b'obsmarkers'] = (b'V1',)
bundle = bundle20(ui, caps)
bundle.setcompression(compression, compopts)
@@ -2240,7 +2239,7 @@
b'remote repository changed while pushing - please try again '
b'(%s is %s expected %s)'
)
- for expectedphase, nodes in pycompat.iteritems(phasetonodes):
+ for expectedphase, nodes in phasetonodes.items():
for n in nodes:
actualphase = phasecache.phase(unfi, cl.rev(n))
if actualphase != expectedphase:
--- a/mercurial/bundlecaches.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/bundlecaches.py Thu Jun 16 15:28:54 2022 +0200
@@ -3,6 +3,8 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
+import collections
+
from .i18n import _
from .thirdparty import attr
@@ -21,13 +23,33 @@
@attr.s
-class bundlespec(object):
+class bundlespec:
compression = attr.ib()
wirecompression = attr.ib()
version = attr.ib()
wireversion = attr.ib()
- params = attr.ib()
- contentopts = attr.ib()
+ # parameters explicitly overwritten by the config or the specification
+ _explicit_params = attr.ib()
+ # default parameter for the version
+ #
+ # Keeping it separated is useful to check what was actually overwritten.
+ _default_opts = attr.ib()
+
+ @property
+ def params(self):
+ return collections.ChainMap(self._explicit_params, self._default_opts)
+
+ @property
+ def contentopts(self):
+ # kept for Backward Compatibility concerns.
+ return self.params
+
+ def set_param(self, key, value, overwrite=True):
+ """Set a bundle parameter value.
+
+ Will only overwrite if overwrite is true"""
+ if overwrite or key not in self._explicit_params:
+ self._explicit_params[key] = value
# Maps bundle version human names to changegroup versions.
@@ -56,23 +78,78 @@
b'tagsfnodescache': True,
b'revbranchcache': True,
},
- b'packed1': {b'cg.version': b's1'},
+ b'streamv2': {
+ b'changegroup': False,
+ b'cg.version': b'02',
+ b'obsolescence': False,
+ b'phases': False,
+ b"streamv2": True,
+ b'tagsfnodescache': False,
+ b'revbranchcache': False,
+ },
+ b'packed1': {
+ b'cg.version': b's1',
+ },
+ b'bundle2': { # legacy
+ b'cg.version': b'02',
+ },
}
_bundlespeccontentopts[b'bundle2'] = _bundlespeccontentopts[b'v2']
-_bundlespecvariants = {
- b"streamv2": {
- b"changegroup": False,
- b"streamv2": True,
- b"tagsfnodescache": False,
- b"revbranchcache": False,
- }
-}
+_bundlespecvariants = {b"streamv2": {}}
# Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
_bundlespecv1compengines = {b'gzip', b'bzip2', b'none'}
+def param_bool(key, value):
+ """make a boolean out of a parameter value"""
+ b = stringutil.parsebool(value)
+ if b is None:
+ msg = _(b"parameter %s should be a boolean ('%s')")
+ msg %= (key, value)
+ raise error.InvalidBundleSpecification(msg)
+ return b
+
+
+# mapping of known parameter name need their value processed
+bundle_spec_param_processing = {
+ b"obsolescence": param_bool,
+ b"obsolescence-mandatory": param_bool,
+ b"phases": param_bool,
+}
+
+
+def _parseparams(s):
+ """parse bundlespec parameter section
+
+ input: "comp-version;params" string
+
+ return: (spec; {param_key: param_value})
+ """
+ if b';' not in s:
+ return s, {}
+
+ params = {}
+ version, paramstr = s.split(b';', 1)
+
+ err = _(b'invalid bundle specification: missing "=" in parameter: %s')
+ for p in paramstr.split(b';'):
+ if b'=' not in p:
+ msg = err % p
+ raise error.InvalidBundleSpecification(msg)
+
+ key, value = p.split(b'=', 1)
+ key = urlreq.unquote(key)
+ value = urlreq.unquote(value)
+ process = bundle_spec_param_processing.get(key)
+ if process is not None:
+ value = process(key, value)
+ params[key] = value
+
+ return version, params
+
+
def parsebundlespec(repo, spec, strict=True):
"""Parse a bundle string specification into parts.
@@ -106,31 +183,6 @@
Note: this function will likely eventually return a more complex data
structure, including bundle2 part information.
"""
-
- def parseparams(s):
- if b';' not in s:
- return s, {}
-
- params = {}
- version, paramstr = s.split(b';', 1)
-
- for p in paramstr.split(b';'):
- if b'=' not in p:
- raise error.InvalidBundleSpecification(
- _(
- b'invalid bundle specification: '
- b'missing "=" in parameter: %s'
- )
- % p
- )
-
- key, value = p.split(b'=', 1)
- key = urlreq.unquote(key)
- value = urlreq.unquote(value)
- params[key] = value
-
- return version, params
-
if strict and b'-' not in spec:
raise error.InvalidBundleSpecification(
_(
@@ -140,7 +192,8 @@
% spec
)
- if b'-' in spec:
+ pre_args = spec.split(b';', 1)[0]
+ if b'-' in pre_args:
compression, version = spec.split(b'-', 1)
if compression not in util.compengines.supportedbundlenames:
@@ -148,9 +201,9 @@
_(b'%s compression is not supported') % compression
)
- version, params = parseparams(version)
+ version, params = _parseparams(version)
- if version not in _bundlespeccgversions:
+ if version not in _bundlespeccontentopts:
raise error.UnsupportedBundleSpecification(
_(b'%s is not a recognized bundle version') % version
)
@@ -159,7 +212,7 @@
# case some defaults are assumed (but only when not in strict mode).
assert not strict
- spec, params = parseparams(spec)
+ spec, params = _parseparams(spec)
if spec in util.compengines.supportedbundlenames:
compression = spec
@@ -172,7 +225,7 @@
# Modern compression engines require v2.
if compression not in _bundlespecv1compengines:
version = b'v2'
- elif spec in _bundlespeccgversions:
+ elif spec in _bundlespeccontentopts:
if spec == b'packed1':
compression = b'none'
else:
@@ -203,16 +256,25 @@
)
# Compute contentopts based on the version
+ if b"stream" in params and params[b"stream"] == b"v2":
+ # That case is fishy as this mostly derails the version selection
+ # mechanism. `stream` bundles are quite specific and used differently
+ # as "normal" bundles.
+ #
+ # So we are pinning this to "v2", as this will likely be
+ # compatible forever. (see the next conditional).
+ #
+ # (we should probably define a cleaner way to do this and raise a
+ # warning when the old way is encounter)
+ version = b"streamv2"
contentopts = _bundlespeccontentopts.get(version, {}).copy()
-
- # Process the variants
- if b"stream" in params and params[b"stream"] == b"v2":
- variant = _bundlespecvariants[b"streamv2"]
- contentopts.update(variant)
+ if version == b"streamv2":
+ # streamv2 have been reported as "v2" for a while.
+ version = b"v2"
engine = util.compengines.forbundlename(compression)
compression, wirecompression = engine.bundletype()
- wireversion = _bundlespeccgversions[version]
+ wireversion = _bundlespeccontentopts[version][b'cg.version']
return bundlespec(
compression, wirecompression, version, wireversion, params, contentopts
@@ -343,7 +405,7 @@
return newentries
-class clonebundleentry(object):
+class clonebundleentry:
"""Represents an item in a clone bundles manifest.
This rich class is needed to support sorting since sorted() in Python 3
--- a/mercurial/bundlerepo.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/bundlerepo.py Thu Jun 16 15:28:54 2022 +0200
@@ -11,7 +11,6 @@
were part of the actual repository.
"""
-from __future__ import absolute_import
import os
import shutil
@@ -271,7 +270,7 @@
return filespos
-class bundlerepository(object):
+class bundlerepository:
"""A repository instance that is a union of a local repo and a bundle.
Instances represent a read-only repository composed of a local repository
@@ -551,7 +550,7 @@
return repo
-class bundletransactionmanager(object):
+class bundletransactionmanager:
def transaction(self):
return None
--- a/mercurial/cacheutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/cacheutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,7 +4,6 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from . import repoview
--- a/mercurial/cext/base85.c Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/cext/base85.c Thu Jun 16 15:28:54 2022 +0200
@@ -38,7 +38,7 @@
unsigned int acc, val, ch;
int pad = 0;
- if (!PyArg_ParseTuple(args, PY23("s#|i", "y#|i"), &text, &len, &pad)) {
+ if (!PyArg_ParseTuple(args, "y#|i", &text, &len, &pad)) {
return NULL;
}
@@ -90,7 +90,7 @@
int c;
unsigned int acc;
- if (!PyArg_ParseTuple(args, PY23("s#", "y#"), &text, &len)) {
+ if (!PyArg_ParseTuple(args, "y#", &text, &len)) {
return NULL;
}
@@ -177,7 +177,6 @@
static const int version = 1;
-#ifdef IS_PY3K
static struct PyModuleDef base85_module = {
PyModuleDef_HEAD_INIT, "base85", base85_doc, -1, methods,
};
@@ -191,13 +190,3 @@
PyModule_AddIntConstant(m, "version", version);
return m;
}
-#else
-PyMODINIT_FUNC initbase85(void)
-{
- PyObject *m;
- m = Py_InitModule3("base85", methods, base85_doc);
-
- b85prep();
- PyModule_AddIntConstant(m, "version", version);
-}
-#endif
--- a/mercurial/cext/bdiff.c Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/cext/bdiff.c Thu Jun 16 15:28:54 2022 +0200
@@ -76,8 +76,7 @@
l.next = NULL;
- if (!PyArg_ParseTuple(args, PY23("s*s*:bdiff", "y*y*:bdiff"), &ba,
- &bb)) {
+ if (!PyArg_ParseTuple(args, "y*y*:bdiff", &ba, &bb)) {
return NULL;
}
@@ -233,7 +232,7 @@
Py_ssize_t nelts = 0, size, i, start = 0;
PyObject *result = NULL;
- if (!PyArg_ParseTuple(args, PY23("s#", "y#"), &text, &size)) {
+ if (!PyArg_ParseTuple(args, "y#", &text, &size)) {
goto abort;
}
if (!size) {
@@ -299,8 +298,7 @@
NULL, /* priv */
};
- if (!PyArg_ParseTuple(args, PY23("s#s#", "y#y#"), &a.ptr, &la, &b.ptr,
- &lb)) {
+ if (!PyArg_ParseTuple(args, "y#y#", &a.ptr, &la, &b.ptr, &lb)) {
return NULL;
}
@@ -337,7 +335,6 @@
static const int version = 3;
-#ifdef IS_PY3K
static struct PyModuleDef bdiff_module = {
PyModuleDef_HEAD_INIT, "bdiff", mdiff_doc, -1, methods,
};
@@ -349,11 +346,3 @@
PyModule_AddIntConstant(m, "version", version);
return m;
}
-#else
-PyMODINIT_FUNC initbdiff(void)
-{
- PyObject *m;
- m = Py_InitModule3("bdiff", methods, mdiff_doc);
- PyModule_AddIntConstant(m, "version", version);
-}
-#endif
--- a/mercurial/cext/charencode.c Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/cext/charencode.c Thu Jun 16 15:28:54 2022 +0200
@@ -15,14 +15,6 @@
#include "compat.h"
#include "util.h"
-#ifdef IS_PY3K
-/* The mapping of Python types is meant to be temporary to get Python
- * 3 to compile. We should remove this once Python 3 support is fully
- * supported and proper types are used in the extensions themselves. */
-#define PyInt_Type PyLong_Type
-#define PyInt_AS_LONG PyLong_AS_LONG
-#endif
-
/* clang-format off */
static const char lowertable[128] = {
'\x00', '\x01', '\x02', '\x03', '\x04', '\x05', '\x06', '\x07',
@@ -133,8 +125,7 @@
{
const char *buf;
Py_ssize_t i, len;
- if (!PyArg_ParseTuple(args, PY23("s#:isasciistr", "y#:isasciistr"),
- &buf, &len)) {
+ if (!PyArg_ParseTuple(args, "y#:isasciistr", &buf, &len)) {
return NULL;
}
i = 0;
@@ -228,12 +219,12 @@
const char *table;
if (!PyArg_ParseTuple(args, "O!O!O!:make_file_foldmap", &PyDict_Type,
- &dmap, &PyInt_Type, &spec_obj, &PyFunction_Type,
+ &dmap, &PyLong_Type, &spec_obj, &PyFunction_Type,
&normcase_fallback)) {
goto quit;
}
- spec = (int)PyInt_AS_LONG(spec_obj);
+ spec = (int)PyLong_AS_LONG(spec_obj);
switch (spec) {
case NORMCASE_LOWER:
table = lowertable;
--- a/mercurial/cext/dirs.c Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/cext/dirs.c Thu Jun 16 15:28:54 2022 +0200
@@ -13,11 +13,7 @@
#include "util.h"
-#ifdef IS_PY3K
#define PYLONG_VALUE(o) ((PyLongObject *)o)->ob_digit[0]
-#else
-#define PYLONG_VALUE(o) PyInt_AS_LONG(o)
-#endif
/*
* This is a multiset of directory names, built from the files that
@@ -100,11 +96,7 @@
}
/* Force Python to not reuse a small shared int. */
-#ifdef IS_PY3K
val = PyLong_FromLong(0x1eadbeef);
-#else
- val = PyInt_FromLong(0x1eadbeef);
-#endif
if (val == NULL)
goto bail;
--- a/mercurial/cext/manifest.c Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/cext/manifest.c Thu Jun 16 15:28:54 2022 +0200
@@ -317,12 +317,7 @@
return ret;
}
-#ifdef IS_PY3K
#define LAZYMANIFESTENTRIESITERATOR_TPFLAGS Py_TPFLAGS_DEFAULT
-#else
-#define LAZYMANIFESTENTRIESITERATOR_TPFLAGS Py_TPFLAGS_DEFAULT \
- | Py_TPFLAGS_HAVE_ITER
-#endif
static PyTypeObject lazymanifestEntriesIterator = {
PyVarObject_HEAD_INIT(NULL, 0) /* header */
@@ -365,12 +360,7 @@
return PyBytes_FromStringAndSize(l->start, pl);
}
-#ifdef IS_PY3K
#define LAZYMANIFESTKEYSITERATOR_TPFLAGS Py_TPFLAGS_DEFAULT
-#else
-#define LAZYMANIFESTKEYSITERATOR_TPFLAGS Py_TPFLAGS_DEFAULT \
- | Py_TPFLAGS_HAVE_ITER
-#endif
static PyTypeObject lazymanifestKeysIterator = {
PyVarObject_HEAD_INIT(NULL, 0) /* header */
@@ -790,7 +780,7 @@
Py_INCREF(copy->pydata);
for (i = 0; i < self->numlines; i++) {
PyObject *arglist = NULL, *result = NULL;
- arglist = Py_BuildValue(PY23("(s)", "(y)"),
+ arglist = Py_BuildValue("(y)",
self->lines[i].start);
if (!arglist) {
goto bail;
@@ -955,11 +945,7 @@
{NULL},
};
-#ifdef IS_PY3K
#define LAZYMANIFEST_TPFLAGS Py_TPFLAGS_DEFAULT
-#else
-#define LAZYMANIFEST_TPFLAGS Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_SEQUENCE_IN
-#endif
static PyTypeObject lazymanifestType = {
PyVarObject_HEAD_INIT(NULL, 0) /* header */
--- a/mercurial/cext/mpatch.c Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/cext/mpatch.c Thu Jun 16 15:28:54 2022 +0200
@@ -144,8 +144,7 @@
Py_ssize_t patchlen;
char *bin;
- if (!PyArg_ParseTuple(args, PY23("ls#", "ly#"), &orig, &bin,
- &patchlen)) {
+ if (!PyArg_ParseTuple(args, "ly#", &orig, &bin, &patchlen)) {
return NULL;
}
@@ -182,7 +181,6 @@
static const int version = 1;
-#ifdef IS_PY3K
static struct PyModuleDef mpatch_module = {
PyModuleDef_HEAD_INIT, "mpatch", mpatch_doc, -1, methods,
};
@@ -203,13 +201,3 @@
return m;
}
-#else
-PyMODINIT_FUNC initmpatch(void)
-{
- PyObject *m;
- m = Py_InitModule3("mpatch", methods, mpatch_doc);
- mpatch_Error =
- PyErr_NewException("mercurial.cext.mpatch.mpatchError", NULL, NULL);
- PyModule_AddIntConstant(m, "version", version);
-}
-#endif
--- a/mercurial/cext/osutil.c Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/cext/osutil.c Thu Jun 16 15:28:54 2022 +0200
@@ -73,19 +73,11 @@
};
#endif
-#ifdef IS_PY3K
#define listdir_slot(name) \
static PyObject *listdir_stat_##name(PyObject *self, void *x) \
{ \
return PyLong_FromLong(((struct listdir_stat *)self)->st.name); \
}
-#else
-#define listdir_slot(name) \
- static PyObject *listdir_stat_##name(PyObject *self, void *x) \
- { \
- return PyInt_FromLong(((struct listdir_stat *)self)->st.name); \
- }
-#endif
listdir_slot(st_dev)
listdir_slot(st_mode)
@@ -206,7 +198,7 @@
? _S_IFDIR : _S_IFREG;
if (!wantstat)
- return Py_BuildValue(PY23("si", "yi"), fd->cFileName, kind);
+ return Py_BuildValue("yi", fd->cFileName, kind);
py_st = PyObject_CallObject((PyObject *)&listdir_stat_type, NULL);
if (!py_st)
@@ -224,7 +216,7 @@
if (kind == _S_IFREG)
stp->st_size = ((__int64)fd->nFileSizeHigh << 32)
+ fd->nFileSizeLow;
- return Py_BuildValue(PY23("siN", "yiN"), fd->cFileName,
+ return Py_BuildValue("yiN", fd->cFileName,
kind, py_st);
}
@@ -412,10 +404,10 @@
PyObject *stat = makestat(&st);
if (!stat)
goto error;
- elem = Py_BuildValue(PY23("siN", "yiN"), ent->d_name,
+ elem = Py_BuildValue("yiN", ent->d_name,
kind, stat);
} else
- elem = Py_BuildValue(PY23("si", "yi"), ent->d_name,
+ elem = Py_BuildValue("yi", ent->d_name,
kind);
if (!elem)
goto error;
@@ -593,10 +585,10 @@
stat = makestat(&st);
if (!stat)
goto error;
- elem = Py_BuildValue(PY23("siN", "yiN"),
+ elem = Py_BuildValue("yiN",
filename, kind, stat);
} else
- elem = Py_BuildValue(PY23("si", "yi"),
+ elem = Py_BuildValue("yi",
filename, kind);
if (!elem)
goto error;
@@ -693,84 +685,11 @@
return NULL;
}
-/*
- * recvfds() simply does not release GIL during blocking io operation because
- * command server is known to be single-threaded.
- *
- * Old systems such as Solaris don't provide CMSG_LEN, msg_control, etc.
- * Currently, recvfds() is not supported on these platforms.
- */
-#ifdef CMSG_LEN
-
-static ssize_t recvfdstobuf(int sockfd, int **rfds, void *cbuf, size_t cbufsize)
-{
- char dummy[1];
- struct iovec iov = {dummy, sizeof(dummy)};
- struct msghdr msgh = {0};
- struct cmsghdr *cmsg;
-
- msgh.msg_iov = &iov;
- msgh.msg_iovlen = 1;
- msgh.msg_control = cbuf;
- msgh.msg_controllen = (socklen_t)cbufsize;
- if (recvmsg(sockfd, &msgh, 0) < 0)
- return -1;
-
- for (cmsg = CMSG_FIRSTHDR(&msgh); cmsg;
- cmsg = CMSG_NXTHDR(&msgh, cmsg)) {
- if (cmsg->cmsg_level != SOL_SOCKET ||
- cmsg->cmsg_type != SCM_RIGHTS)
- continue;
- *rfds = (int *)CMSG_DATA(cmsg);
- return (cmsg->cmsg_len - CMSG_LEN(0)) / sizeof(int);
- }
-
- *rfds = cbuf;
- return 0;
-}
-
-static PyObject *recvfds(PyObject *self, PyObject *args)
-{
- int sockfd;
- int *rfds = NULL;
- ssize_t rfdscount, i;
- char cbuf[256];
- PyObject *rfdslist = NULL;
-
- if (!PyArg_ParseTuple(args, "i", &sockfd))
- return NULL;
-
- rfdscount = recvfdstobuf(sockfd, &rfds, cbuf, sizeof(cbuf));
- if (rfdscount < 0)
- return PyErr_SetFromErrno(PyExc_OSError);
-
- rfdslist = PyList_New(rfdscount);
- if (!rfdslist)
- goto bail;
- for (i = 0; i < rfdscount; i++) {
- PyObject *obj = PyLong_FromLong(rfds[i]);
- if (!obj)
- goto bail;
- PyList_SET_ITEM(rfdslist, i, obj);
- }
- return rfdslist;
-
-bail:
- Py_XDECREF(rfdslist);
- return NULL;
-}
-
-#endif /* CMSG_LEN */
-
/* allow disabling setprocname via compiler flags */
#ifndef SETPROCNAME_USE_NONE
#if defined(HAVE_SETPROCTITLE)
/* setproctitle is the first choice - available in FreeBSD */
#define SETPROCNAME_USE_SETPROCTITLE
-#elif (defined(__linux__) || defined(__APPLE__)) && PY_MAJOR_VERSION == 2
-/* rewrite the argv buffer in place - works in Linux and OS X. Py_GetArgcArgv
- * in Python 3 returns the copied wchar_t **argv, thus unsupported. */
-#define SETPROCNAME_USE_ARGVREWRITE
#else
#define SETPROCNAME_USE_NONE
#endif
@@ -780,49 +699,11 @@
static PyObject *setprocname(PyObject *self, PyObject *args)
{
const char *name = NULL;
- if (!PyArg_ParseTuple(args, PY23("s", "y"), &name))
+ if (!PyArg_ParseTuple(args, "y", &name))
return NULL;
#if defined(SETPROCNAME_USE_SETPROCTITLE)
setproctitle("%s", name);
-#elif defined(SETPROCNAME_USE_ARGVREWRITE)
- {
- static char *argvstart = NULL;
- static size_t argvsize = 0;
- if (argvstart == NULL) {
- int argc = 0, i;
- char **argv = NULL;
- char *argvend;
- extern void Py_GetArgcArgv(int *argc, char ***argv);
- Py_GetArgcArgv(&argc, &argv);
- /* Py_GetArgcArgv may not do much if a custom python
- * launcher is used that doesn't record the information
- * it needs. Let's handle this gracefully instead of
- * segfaulting. */
- if (argv != NULL)
- argvend = argvstart = argv[0];
- else
- argvend = argvstart = NULL;
-
- /* Check the memory we can use. Typically, argv[i] and
- * argv[i + 1] are continuous. */
- for (i = 0; i < argc; ++i) {
- size_t len;
- if (argv[i] > argvend || argv[i] < argvstart)
- break; /* not continuous */
- len = strlen(argv[i]);
- argvend = argv[i] + len + 1 /* '\0' */;
- }
- if (argvend > argvstart) /* sanity check */
- argvsize = argvend - argvstart;
- }
-
- if (argvstart && argvsize > 1) {
- int n = snprintf(argvstart, argvsize, "%s", name);
- if (n >= 0 && (size_t)n < argvsize)
- memset(argvstart + n, 0, argvsize - n);
- }
- }
#endif
Py_RETURN_NONE;
@@ -1135,14 +1016,14 @@
const char *path = NULL;
struct statfs buf;
int r;
- if (!PyArg_ParseTuple(args, PY23("s", "y"), &path))
+ if (!PyArg_ParseTuple(args, "y", &path))
return NULL;
memset(&buf, 0, sizeof(buf));
r = statfs(path, &buf);
if (r != 0)
return PyErr_SetFromErrno(PyExc_OSError);
- return Py_BuildValue(PY23("s", "y"), describefstype(&buf));
+ return Py_BuildValue("y", describefstype(&buf));
}
#endif /* defined(HAVE_LINUX_STATFS) || defined(HAVE_BSD_STATFS) */
@@ -1153,14 +1034,14 @@
const char *path = NULL;
struct statfs buf;
int r;
- if (!PyArg_ParseTuple(args, PY23("s", "y"), &path))
+ if (!PyArg_ParseTuple(args, "y", &path))
return NULL;
memset(&buf, 0, sizeof(buf));
r = statfs(path, &buf);
if (r != 0)
return PyErr_SetFromErrno(PyExc_OSError);
- return Py_BuildValue(PY23("s", "y"), buf.f_mntonname);
+ return Py_BuildValue("y", buf.f_mntonname);
}
#endif /* defined(HAVE_BSD_STATFS) */
@@ -1195,8 +1076,7 @@
static char *kwlist[] = {"path", "stat", "skip", NULL};
- if (!PyArg_ParseTupleAndKeywords(args, kwargs, PY23("s#|OO:listdir",
- "y#|OO:listdir"),
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs, "y#|OO:listdir",
kwlist, &path, &plen, &statobj, &skipobj))
return NULL;
@@ -1227,12 +1107,8 @@
char fpmode[4];
int fppos = 0;
int plus;
-#ifndef IS_PY3K
- FILE *fp;
-#endif
- if (!PyArg_ParseTupleAndKeywords(args, kwds, PY23("et|si:posixfile",
- "et|yi:posixfile"),
+ if (!PyArg_ParseTupleAndKeywords(args, kwds, "et|yi:posixfile",
kwlist,
Py_FileSystemDefaultEncoding,
&name, &mode, &bufsize))
@@ -1302,26 +1178,9 @@
PyErr_SetFromErrnoWithFilename(PyExc_IOError, name);
goto bail;
}
-#ifndef IS_PY3K
- fp = _fdopen(fd, fpmode);
- if (fp == NULL) {
- _close(fd);
- PyErr_SetFromErrnoWithFilename(PyExc_IOError, name);
- goto bail;
- }
-
- file_obj = PyFile_FromFile(fp, name, mode, fclose);
- if (file_obj == NULL) {
- fclose(fp);
- goto bail;
- }
-
- PyFile_SetBufSize(file_obj, bufsize);
-#else
file_obj = PyFile_FromFd(fd, name, mode, bufsize, NULL, NULL, NULL, 1);
if (file_obj == NULL)
goto bail;
-#endif
bail:
PyMem_Free(name);
return file_obj;
@@ -1357,10 +1216,6 @@
{"statfiles", (PyCFunction)statfiles, METH_VARARGS | METH_KEYWORDS,
"stat a series of files or symlinks\n"
"Returns None for non-existent entries and entries of other types.\n"},
-#ifdef CMSG_LEN
- {"recvfds", (PyCFunction)recvfds, METH_VARARGS,
- "receive list of file descriptors via socket\n"},
-#endif
#ifndef SETPROCNAME_USE_NONE
{"setprocname", (PyCFunction)setprocname, METH_VARARGS,
"set process title (best-effort)\n"},
@@ -1387,7 +1242,6 @@
static const int version = 4;
-#ifdef IS_PY3K
static struct PyModuleDef osutil_module = {
PyModuleDef_HEAD_INIT,
"osutil",
@@ -1406,14 +1260,3 @@
PyModule_AddIntConstant(m, "version", version);
return m;
}
-#else
-PyMODINIT_FUNC initosutil(void)
-{
- PyObject *m;
- if (PyType_Ready(&listdir_stat_type) == -1)
- return;
-
- m = Py_InitModule3("osutil", methods, osutil_doc);
- PyModule_AddIntConstant(m, "version", version);
-}
-#endif
--- a/mercurial/cext/osutil.pyi Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/cext/osutil.pyi Thu Jun 16 15:28:54 2022 +0200
@@ -18,7 +18,6 @@
def listdir(path: bytes, st: bool, skip: bool) -> List[stat]: ...
def posixfile(name: AnyStr, mode: bytes, buffering: int) -> IO: ...
def statfiles(names: Sequence[bytes]) -> List[stat]: ...
-def recvfds(sockfd: int) -> List[int]: ...
def setprocname(name: bytes) -> None: ...
def getfstype(path: bytes) -> bytes: ...
def getfsmountpoint(path: bytes) -> bytes: ...
--- a/mercurial/cext/parsers.c Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/cext/parsers.c Thu Jun 16 15:28:54 2022 +0200
@@ -17,22 +17,6 @@
#include "charencode.h"
#include "util.h"
-#ifdef IS_PY3K
-/* The mapping of Python types is meant to be temporary to get Python
- * 3 to compile. We should remove this once Python 3 support is fully
- * supported and proper types are used in the extensions themselves. */
-#define PyInt_Check PyLong_Check
-#define PyInt_FromLong PyLong_FromLong
-#define PyInt_FromSsize_t PyLong_FromSsize_t
-#define PyInt_AsLong PyLong_AsLong
-#else
-/* Windows on Python 2.7 doesn't define S_IFLNK. Python 3+ defines via
- * pyport.h. */
-#ifndef S_IFLNK
-#define S_IFLNK 0120000
-#endif
-#endif
-
static const char *const versionerrortext = "Python minor version mismatch";
static const int dirstate_v1_from_p2 = -2;
@@ -305,27 +289,6 @@
self->mtime_ns);
};
-static PyObject *dirstate_item_v1_state(dirstateItemObject *self)
-{
- char state = dirstate_item_c_v1_state(self);
- return PyBytes_FromStringAndSize(&state, 1);
-};
-
-static PyObject *dirstate_item_v1_mode(dirstateItemObject *self)
-{
- return PyInt_FromLong(dirstate_item_c_v1_mode(self));
-};
-
-static PyObject *dirstate_item_v1_size(dirstateItemObject *self)
-{
- return PyInt_FromLong(dirstate_item_c_v1_size(self));
-};
-
-static PyObject *dirstate_item_v1_mtime(dirstateItemObject *self)
-{
- return PyInt_FromLong(dirstate_item_c_v1_mtime(self));
-};
-
static PyObject *dirstate_item_mtime_likely_equal_to(dirstateItemObject *self,
PyObject *other)
{
@@ -411,7 +374,7 @@
} else {
PyErr_Format(PyExc_RuntimeError,
"unknown state: `%c` (%d, %d, %d)", state, mode,
- size, mtime, NULL);
+ size, mtime);
Py_DECREF(t);
return NULL;
}
@@ -419,20 +382,6 @@
return t;
}
-/* This will never change since it's bound to V1, unlike `dirstate_item_new` */
-static PyObject *dirstate_item_from_v1_meth(PyTypeObject *subtype,
- PyObject *args)
-{
- /* We do all the initialization here and not a tp_init function because
- * dirstate_item is immutable. */
- char state;
- int size, mode, mtime;
- if (!PyArg_ParseTuple(args, "ciii", &state, &mode, &size, &mtime)) {
- return NULL;
- }
- return (PyObject *)dirstate_item_from_v1_data(state, mode, size, mtime);
-};
-
static PyObject *dirstate_item_from_v2_meth(PyTypeObject *subtype,
PyObject *args)
{
@@ -542,18 +491,8 @@
static PyMethodDef dirstate_item_methods[] = {
{"v2_data", (PyCFunction)dirstate_item_v2_data, METH_NOARGS,
"return data suitable for v2 serialization"},
- {"v1_state", (PyCFunction)dirstate_item_v1_state, METH_NOARGS,
- "return a \"state\" suitable for v1 serialization"},
- {"v1_mode", (PyCFunction)dirstate_item_v1_mode, METH_NOARGS,
- "return a \"mode\" suitable for v1 serialization"},
- {"v1_size", (PyCFunction)dirstate_item_v1_size, METH_NOARGS,
- "return a \"size\" suitable for v1 serialization"},
- {"v1_mtime", (PyCFunction)dirstate_item_v1_mtime, METH_NOARGS,
- "return a \"mtime\" suitable for v1 serialization"},
{"mtime_likely_equal_to", (PyCFunction)dirstate_item_mtime_likely_equal_to,
METH_O, "True if the stored mtime is likely equal to the given mtime"},
- {"from_v1_data", (PyCFunction)dirstate_item_from_v1_meth,
- METH_VARARGS | METH_CLASS, "build a new DirstateItem object from V1 data"},
{"from_v2_data", (PyCFunction)dirstate_item_from_v2_meth,
METH_VARARGS | METH_CLASS, "build a new DirstateItem object from V2 data"},
{"set_possibly_dirty", (PyCFunction)dirstate_item_set_possibly_dirty,
@@ -571,17 +510,17 @@
static PyObject *dirstate_item_get_mode(dirstateItemObject *self)
{
- return PyInt_FromLong(dirstate_item_c_v1_mode(self));
+ return PyLong_FromLong(dirstate_item_c_v1_mode(self));
};
static PyObject *dirstate_item_get_size(dirstateItemObject *self)
{
- return PyInt_FromLong(dirstate_item_c_v1_size(self));
+ return PyLong_FromLong(dirstate_item_c_v1_size(self));
};
static PyObject *dirstate_item_get_mtime(dirstateItemObject *self)
{
- return PyInt_FromLong(dirstate_item_c_v1_mtime(self));
+ return PyLong_FromLong(dirstate_item_c_v1_mtime(self));
};
static PyObject *dirstate_item_get_state(dirstateItemObject *self)
@@ -831,9 +770,8 @@
Py_ssize_t len = 40;
Py_ssize_t readlen;
- if (!PyArg_ParseTuple(
- args, PY23("O!O!s#:parse_dirstate", "O!O!y#:parse_dirstate"),
- &PyDict_Type, &dmap, &PyDict_Type, &cmap, &str, &readlen)) {
+ if (!PyArg_ParseTuple(args, "O!O!y#:parse_dirstate", &PyDict_Type,
+ &dmap, &PyDict_Type, &cmap, &str, &readlen)) {
goto quit;
}
@@ -846,8 +784,8 @@
goto quit;
}
- parents = Py_BuildValue(PY23("s#s#", "y#y#"), str, (Py_ssize_t)20,
- str + 20, (Py_ssize_t)20);
+ parents = Py_BuildValue("y#y#", str, (Py_ssize_t)20, str + 20,
+ (Py_ssize_t)20);
if (!parents) {
goto quit;
}
@@ -1176,8 +1114,7 @@
Py_ssize_t datalen, offset, stop;
PyObject *markers = NULL;
- if (!PyArg_ParseTuple(args, PY23("s#nn", "y#nn"), &data, &datalen,
- &offset, &stop)) {
+ if (!PyArg_ParseTuple(args, "y#nn", &data, &datalen, &offset, &stop)) {
return NULL;
}
if (offset < 0) {
@@ -1289,7 +1226,7 @@
if (!ver) {
return -1;
}
- hexversion = PyInt_AsLong(ver);
+ hexversion = PyLong_AsLong(ver);
Py_DECREF(ver);
/* sys.hexversion is a 32-bit number by default, so the -1 case
* should only occur in unusual circumstances (e.g. if sys.hexversion
@@ -1309,7 +1246,6 @@
return 0;
}
-#ifdef IS_PY3K
static struct PyModuleDef parsers_module = {PyModuleDef_HEAD_INIT, "parsers",
parsers_doc, -1, methods};
@@ -1323,15 +1259,3 @@
module_init(mod);
return mod;
}
-#else
-PyMODINIT_FUNC initparsers(void)
-{
- PyObject *mod;
-
- if (check_python_version() == -1) {
- return;
- }
- mod = Py_InitModule3("parsers", methods, parsers_doc);
- module_init(mod);
-}
-#endif
--- a/mercurial/cext/pathencode.c Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/cext/pathencode.c Thu Jun 16 15:28:54 2022 +0200
@@ -535,8 +535,7 @@
Py_ssize_t len, newlen;
PyObject *ret;
- if (!PyArg_ParseTuple(args, PY23("s#:lowerencode", "y#:lowerencode"),
- &path, &len)) {
+ if (!PyArg_ParseTuple(args, "y#:lowerencode", &path, &len)) {
return NULL;
}
@@ -711,7 +710,7 @@
}
}
- shaobj = PyObject_CallFunction(shafunc, PY23("s#", "y#"), str, len);
+ shaobj = PyObject_CallFunction(shafunc, "y#", str, len);
if (shaobj == NULL) {
return -1;
--- a/mercurial/cext/revlog.c Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/cext/revlog.c Thu Jun 16 15:28:54 2022 +0200
@@ -23,16 +23,6 @@
#include "revlog.h"
#include "util.h"
-#ifdef IS_PY3K
-/* The mapping of Python types is meant to be temporary to get Python
- * 3 to compile. We should remove this once Python 3 support is fully
- * supported and proper types are used in the extensions themselves. */
-#define PyInt_Check PyLong_Check
-#define PyInt_FromLong PyLong_FromLong
-#define PyInt_FromSsize_t PyLong_FromSsize_t
-#define PyInt_AsLong PyLong_AsLong
-#endif
-
typedef struct indexObjectStruct indexObject;
typedef struct {
@@ -43,6 +33,7 @@
int abi_version;
Py_ssize_t (*index_length)(const indexObject *);
const char *(*index_node)(indexObject *, Py_ssize_t);
+ int (*fast_rank)(indexObject *, Py_ssize_t);
int (*index_parents)(PyObject *, int, int *);
} Revlog_CAPI;
@@ -119,11 +110,9 @@
static int index_find_node(indexObject *self, const char *node);
#if LONG_MAX == 0x7fffffffL
-static const char *const tuple_format =
- PY23("Kiiiiiis#KiBBi", "Kiiiiiiy#KiBBi");
+static const char *const tuple_format = "Kiiiiiiy#KiBBi";
#else
-static const char *const tuple_format =
- PY23("kiiiiiis#kiBBi", "kiiiiiiy#kiBBi");
+static const char *const tuple_format = "kiiiiiiy#kiBBi";
#endif
/* A RevlogNG v1 index entry is 64 bytes long. */
@@ -502,13 +491,13 @@
{
int header;
char out[4];
- if (!PyArg_ParseTuple(args, "I", &header)) {
+ if (!PyArg_ParseTuple(args, "i", &header)) {
return NULL;
}
if (self->format_version != format_v1) {
PyErr_Format(PyExc_RuntimeError,
"version header should go in the docket, not the "
- "index: %lu",
+ "index: %d",
header);
return NULL;
}
@@ -576,6 +565,33 @@
}
/*
+ * Return the stored rank of a given revision if known, or rank_unknown
+ * otherwise.
+ *
+ * The rank of a revision is the size of the sub-graph it defines as a head.
+ * Equivalently, the rank of a revision `r` is the size of the set
+ * `ancestors(r)`, `r` included.
+ *
+ * This method returns the rank retrieved from the revlog in constant time. It
+ * makes no attempt at computing unknown values for versions of the revlog
+ * which do not persist the rank.
+ */
+static int index_fast_rank(indexObject *self, Py_ssize_t pos)
+{
+ Py_ssize_t length = index_length(self);
+
+ if (self->format_version != format_cl2 || pos >= length) {
+ return rank_unknown;
+ }
+
+ if (pos == nullrev) {
+ return 0; /* convention */
+ }
+
+ return getbe32(index_deref(self, pos) + entry_cl2_offset_rank);
+}
+
+/*
* Return the hash of the node corresponding to the given rev. The
* rev is assumed to be existing. If not, an exception is set.
*/
@@ -730,9 +746,9 @@
char comp_mode;
char *data;
#if LONG_MAX == 0x7fffffffL
- const char *const sidedata_format = PY23("nKiKB", "nKiKB");
+ const char *const sidedata_format = "nKiKB";
#else
- const char *const sidedata_format = PY23("nkikB", "nkikB");
+ const char *const sidedata_format = "nkikB";
#endif
if (self->entry_size == v1_entry_size || self->inlined) {
@@ -802,7 +818,7 @@
#define istat(__n, __d) \
do { \
s = PyBytes_FromString(__d); \
- t = PyInt_FromSsize_t(self->__n); \
+ t = PyLong_FromSsize_t(self->__n); \
if (!s || !t) \
goto bail; \
if (PyDict_SetItem(obj, s, t) == -1) \
@@ -953,7 +969,7 @@
l = PyList_GET_SIZE(roots);
for (i = 0; i < l; i++) {
- revnum = PyInt_AsLong(PyList_GET_ITEM(roots, i));
+ revnum = PyLong_AsLong(PyList_GET_ITEM(roots, i));
if (revnum == -1 && PyErr_Occurred())
goto bail;
/* If root is out of range, e.g. wdir(), it must be unreachable
@@ -966,7 +982,7 @@
/* Populate tovisit with all the heads */
l = PyList_GET_SIZE(heads);
for (i = 0; i < l; i++) {
- revnum = PyInt_AsLong(PyList_GET_ITEM(heads, i));
+ revnum = PyLong_AsLong(PyList_GET_ITEM(heads, i));
if (revnum == -1 && PyErr_Occurred())
goto bail;
if (revnum + 1 < 0 || revnum + 1 >= len + 1) {
@@ -986,7 +1002,7 @@
revnum = tovisit[k++];
if (revstates[revnum + 1] & RS_ROOT) {
revstates[revnum + 1] |= RS_REACHABLE;
- val = PyInt_FromLong(revnum);
+ val = PyLong_FromLong(revnum);
if (val == NULL)
goto bail;
r = PyList_Append(reachable, val);
@@ -1031,7 +1047,7 @@
RS_REACHABLE) &&
!(revstates[i + 1] & RS_REACHABLE)) {
revstates[i + 1] |= RS_REACHABLE;
- val = PyInt_FromSsize_t(i);
+ val = PyLong_FromSsize_t(i);
if (val == NULL)
goto bail;
r = PyList_Append(reachable, val);
@@ -1116,7 +1132,7 @@
}
for (i = 0; i < numphases; ++i) {
- PyObject *pyphase = PyInt_FromLong(trackedphases[i]);
+ PyObject *pyphase = PyLong_FromLong(trackedphases[i]);
PyObject *phaseroots = NULL;
if (pyphase == NULL)
goto release;
@@ -1175,7 +1191,7 @@
"bad phase number in internal list");
goto release;
}
- pyrev = PyInt_FromLong(rev);
+ pyrev = PyLong_FromLong(rev);
if (pyrev == NULL)
goto release;
if (PySet_Add(pyphase, pyrev) == -1) {
@@ -1189,7 +1205,7 @@
if (phasesetsdict == NULL)
goto release;
for (i = 0; i < numphases; ++i) {
- PyObject *pyphase = PyInt_FromLong(trackedphases[i]);
+ PyObject *pyphase = PyLong_FromLong(trackedphases[i]);
if (pyphase == NULL)
goto release;
if (PyDict_SetItem(phasesetsdict, pyphase, phasesets[i]) ==
@@ -1247,7 +1263,7 @@
if (heads == NULL)
goto bail;
if (len == 0) {
- PyObject *nullid = PyInt_FromLong(-1);
+ PyObject *nullid = PyLong_FromLong(-1);
if (nullid == NULL || PyList_Append(heads, nullid) == -1) {
Py_XDECREF(nullid);
goto bail;
@@ -1296,7 +1312,7 @@
if (nothead[i])
continue;
- head = PyInt_FromSsize_t(i);
+ head = PyLong_FromSsize_t(i);
if (head == NULL || PyList_Append(heads, head) == -1) {
Py_XDECREF(head);
goto bail;
@@ -1442,7 +1458,7 @@
assert(PyErr_Occurred());
goto bail;
}
- key = PyInt_FromSsize_t(base);
+ key = PyLong_FromSsize_t(base);
allvalues = PyDict_GetItem(cache, key);
if (allvalues == NULL && PyErr_Occurred()) {
goto bail;
@@ -1459,7 +1475,7 @@
goto bail;
}
}
- value = PyInt_FromSsize_t(rev);
+ value = PyLong_FromSsize_t(rev);
if (PyList_Append(allvalues, value)) {
goto bail;
}
@@ -1486,8 +1502,8 @@
return NULL;
}
- if (PyInt_Check(stoparg)) {
- stoprev = (int)PyInt_AsLong(stoparg);
+ if (PyLong_Check(stoparg)) {
+ stoprev = (int)PyLong_AsLong(stoparg);
if (stoprev == -1 && PyErr_Occurred()) {
return NULL;
}
@@ -1521,7 +1537,7 @@
iterrev = rev;
while (iterrev != baserev && iterrev != stoprev) {
- PyObject *value = PyInt_FromLong(iterrev);
+ PyObject *value = PyLong_FromLong(iterrev);
if (value == NULL) {
goto bail;
}
@@ -1560,7 +1576,7 @@
if (iterrev == stoprev) {
stopped = 1;
} else {
- PyObject *value = PyInt_FromLong(iterrev);
+ PyObject *value = PyLong_FromLong(iterrev);
if (value == NULL) {
goto bail;
}
@@ -1712,7 +1728,8 @@
goto bail;
}
for (i = 0; i < num_revs; i++) {
- Py_ssize_t revnum = PyInt_AsLong(PyList_GET_ITEM(list_revs, i));
+ Py_ssize_t revnum =
+ PyLong_AsLong(PyList_GET_ITEM(list_revs, i));
if (revnum == -1 && PyErr_Occurred()) {
goto bail;
}
@@ -2118,7 +2135,7 @@
raise_revlog_error();
return NULL;
}
- return PyInt_FromLong(length);
+ return PyLong_FromLong(length);
}
static void nt_dealloc(nodetree *self)
@@ -2266,7 +2283,7 @@
char *node;
int rev;
- if (PyInt_Check(value)) {
+ if (PyLong_Check(value)) {
long idx;
if (!pylong_to_long(value, &idx)) {
return NULL;
@@ -2278,7 +2295,7 @@
return NULL;
rev = index_find_node(self, node);
if (rev >= -1)
- return PyInt_FromLong(rev);
+ return PyLong_FromLong(rev);
if (rev == -2)
raise_revlog_error();
return NULL;
@@ -2310,7 +2327,7 @@
char *node;
int rev, i;
- if (!PyArg_ParseTuple(args, PY23("s#", "y#"), &node, &nodelen))
+ if (!PyArg_ParseTuple(args, "y#", &node, &nodelen))
return NULL;
if (nodelen < 1) {
@@ -2377,7 +2394,7 @@
raise_revlog_error();
return NULL;
}
- return PyInt_FromLong(length);
+ return PyLong_FromLong(length);
}
static PyObject *index_m_get(indexObject *self, PyObject *args)
@@ -2395,14 +2412,14 @@
return NULL;
if (rev == -2)
Py_RETURN_NONE;
- return PyInt_FromLong(rev);
+ return PyLong_FromLong(rev);
}
static int index_contains(indexObject *self, PyObject *value)
{
char *node;
- if (PyInt_Check(value)) {
+ if (PyLong_Check(value)) {
long rev;
if (!pylong_to_long(value, &rev)) {
return -1;
@@ -2440,7 +2457,7 @@
return NULL;
rev = index_find_node(self, node);
if (rev >= -1)
- return PyInt_FromLong(rev);
+ return PyLong_FromLong(rev);
if (rev == -2)
raise_revlog_error();
return NULL;
@@ -2493,7 +2510,7 @@
if (sv < poison) {
interesting -= 1;
if (sv == allseen) {
- PyObject *obj = PyInt_FromLong(v);
+ PyObject *obj = PyLong_FromLong(v);
if (obj == NULL)
goto bail;
if (PyList_Append(gca, obj) == -1) {
@@ -2561,7 +2578,7 @@
}
for (i = 0; i < revcount; i++) {
- int n = (int)PyInt_AsLong(PyList_GET_ITEM(revs, i));
+ int n = (int)PyLong_AsLong(PyList_GET_ITEM(revs, i));
if (n > maxrev)
maxrev = n;
}
@@ -2586,7 +2603,7 @@
goto bail;
for (i = 0; i < revcount; i++) {
- int n = (int)PyInt_AsLong(PyList_GET_ITEM(revs, i));
+ int n = (int)PyLong_AsLong(PyList_GET_ITEM(revs, i));
long b = 1l << i;
depth[n] = 1;
seen[n] = b;
@@ -2716,13 +2733,13 @@
bitmask x;
long val;
- if (!PyInt_Check(obj)) {
+ if (!PyLong_Check(obj)) {
PyErr_SetString(PyExc_TypeError,
"arguments must all be ints");
Py_DECREF(obj);
goto bail;
}
- val = PyInt_AsLong(obj);
+ val = PyLong_AsLong(obj);
Py_DECREF(obj);
if (val == -1) {
ret = PyList_New(0);
@@ -2763,7 +2780,7 @@
ret = PyList_New(1);
if (ret == NULL)
goto bail;
- obj = PyInt_FromLong(revs[0]);
+ obj = PyLong_FromLong(revs[0]);
if (obj == NULL)
goto bail;
PyList_SET_ITEM(ret, 0, obj);
@@ -2834,14 +2851,8 @@
Py_ssize_t length = index_length(self) + 1;
int ret = 0;
-/* Argument changed from PySliceObject* to PyObject* in Python 3. */
-#ifdef IS_PY3K
if (PySlice_GetIndicesEx(item, length, &start, &stop, &step,
&slicelength) < 0)
-#else
- if (PySlice_GetIndicesEx((PySliceObject *)item, length, &start, &stop,
- &step, &slicelength) < 0)
-#endif
return -1;
if (slicelength <= 0)
@@ -2925,7 +2936,7 @@
if (value == NULL)
return self->ntinitialized ? nt_delete_node(&self->nt, node)
: 0;
- rev = PyInt_AsLong(value);
+ rev = PyLong_AsLong(value);
if (rev > INT_MAX || rev < 0) {
if (!PyErr_Occurred())
PyErr_SetString(PyExc_ValueError, "rev out of range");
@@ -3027,10 +3038,9 @@
self->entry_size = cl2_entry_size;
}
- self->nullentry =
- Py_BuildValue(PY23("iiiiiiis#iiBBi", "iiiiiiiy#iiBBi"), 0, 0, 0, -1,
- -1, -1, -1, nullid, self->nodelen, 0, 0,
- comp_mode_inline, comp_mode_inline, rank_unknown);
+ self->nullentry = Py_BuildValue(
+ "iiiiiiiy#iiBBi", 0, 0, 0, -1, -1, -1, -1, nullid, self->nodelen, 0,
+ 0, comp_mode_inline, comp_mode_inline, rank_unknown);
if (!self->nullentry)
return -1;
@@ -3266,10 +3276,7 @@
static Revlog_CAPI CAPI = {
/* increment the abi_version field upon each change in the Revlog_CAPI
struct or in the ABI of the listed functions */
- 2,
- index_length,
- index_node,
- HgRevlogIndex_GetParents,
+ 3, index_length, index_node, index_fast_rank, HgRevlogIndex_GetParents,
};
void revlog_module_init(PyObject *mod)
--- a/mercurial/cext/util.h Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/cext/util.h Thu Jun 16 15:28:54 2022 +0200
@@ -10,17 +10,6 @@
#include "compat.h"
-#if PY_MAJOR_VERSION >= 3
-#define IS_PY3K
-#endif
-
-/* helper to switch things like string literal depending on Python version */
-#ifdef IS_PY3K
-#define PY23(py2, py3) py3
-#else
-#define PY23(py2, py3) py2
-#endif
-
/* clang-format off */
typedef struct {
PyObject_HEAD
--- a/mercurial/cffi/bdiff.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/cffi/bdiff.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import struct
--- a/mercurial/cffi/bdiffbuild.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/cffi/bdiffbuild.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import cffi
import os
--- a/mercurial/cffi/mpatch.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/cffi/mpatch.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from ..pure.mpatch import *
from ..pure.mpatch import mpatchError # silence pyflakes
--- a/mercurial/cffi/mpatchbuild.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/cffi/mpatchbuild.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import cffi
import os
--- a/mercurial/cffi/osutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/cffi/osutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import os
import stat as statmod
@@ -34,7 +33,7 @@
attrkinds[lib.VFIFO] = statmod.S_IFIFO
attrkinds[lib.VSOCK] = statmod.S_IFSOCK
- class stat_res(object):
+ class stat_res:
def __init__(self, st_mode, st_mtime, st_size):
self.st_mode = st_mode
self.st_mtime = st_mtime
--- a/mercurial/cffi/osutilbuild.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/cffi/osutilbuild.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import cffi
ffi = cffi.FFI()
--- a/mercurial/changegroup.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/changegroup.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import os
import struct
@@ -106,7 +105,7 @@
os.unlink(cleanup)
-class cg1unpacker(object):
+class cg1unpacker:
"""Unpacker for cg1 changegroup streams.
A changegroup unpacker handles the framing of the revision data in
@@ -421,11 +420,11 @@
cl = repo.changelog
ml = repo.manifestlog
# validate incoming csets have their manifests
- for cset in pycompat.xrange(clstart, clend):
+ for cset in range(clstart, clend):
mfnode = cl.changelogrevision(cset).manifest
mfest = ml[mfnode].readdelta()
# store file nodes we must see
- for f, n in pycompat.iteritems(mfest):
+ for f, n in mfest.items():
needfiles.setdefault(f, set()).add(n)
on_filelog_rev = None
@@ -510,7 +509,7 @@
**pycompat.strkwargs(hookargs)
)
- added = pycompat.xrange(clstart, clend)
+ added = range(clstart, clend)
phaseall = None
if srctype in (b'push', b'serve'):
# Old servers can not push the boundary themselves.
@@ -692,7 +691,7 @@
)
-class headerlessfixup(object):
+class headerlessfixup:
def __init__(self, fh, h):
self._h = h
self._fh = fh
@@ -826,7 +825,7 @@
# somewhat unsurprised to find a case in the wild
# where this breaks down a bit. That said, I don't
# know if it would hurt anything.
- for i in pycompat.xrange(rev, 0, -1):
+ for i in range(rev, 0, -1):
if store.linkrev(i) == clrev:
return i
# We failed to resolve a parent for this node, so
@@ -1004,7 +1003,7 @@
progress.complete()
-class cgpacker(object):
+class cgpacker:
def __init__(
self,
repo,
@@ -1957,7 +1956,7 @@
revisions += len(fl) - o
if f in needfiles:
needs = needfiles[f]
- for new in pycompat.xrange(o, len(fl)):
+ for new in range(o, len(fl)):
n = fl.node(new)
if n in needs:
needs.remove(n)
@@ -1967,7 +1966,7 @@
del needfiles[f]
progress.complete()
- for f, needs in pycompat.iteritems(needfiles):
+ for f, needs in needfiles.items():
fl = repo.file(f)
for n in needs:
try:
--- a/mercurial/changelog.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/changelog.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from .i18n import _
from .node import (
@@ -92,7 +91,7 @@
return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
-class appender(object):
+class appender:
"""the changelog index must be updated last on disk, so we use this class
to delay writes to it"""
@@ -162,7 +161,7 @@
return self.fp.__exit__(*args)
-class _divertopener(object):
+class _divertopener:
def __init__(self, opener, target):
self._opener = opener
self._target = target
@@ -189,7 +188,7 @@
@attr.s
-class _changelogrevision(object):
+class _changelogrevision:
# Extensions might modify _defaultextra, so let the constructor below pass
# it in
extra = attr.ib()
@@ -205,7 +204,7 @@
branchinfo = attr.ib(default=(_defaultextra[b'branch'], False))
-class changelogrevision(object):
+class changelogrevision:
"""Holds results of a parsed changelog revision.
Changelog revisions consist of multiple pieces of data, including
--- a/mercurial/chgserver.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/chgserver.py Thu Jun 16 15:28:54 2022 +0200
@@ -39,7 +39,6 @@
skiphash = False
"""
-from __future__ import absolute_import
import inspect
import os
@@ -135,7 +134,7 @@
ignored = set()
envitems = [
(k, v)
- for k, v in pycompat.iteritems(encoding.environ)
+ for k, v in encoding.environ.items()
if _envre.match(k) and k not in ignored
]
envhash = _hashlist(sorted(envitems))
@@ -197,7 +196,7 @@
return _hashlist(pycompat.maplist(trystat, paths))[:12]
-class hashstate(object):
+class hashstate:
"""a structure storing confighash, mtimehash, paths used for mtimehash"""
def __init__(self, confighash, mtimehash, mtimepaths):
@@ -293,7 +292,7 @@
return (newui, newlui)
-class channeledsystem(object):
+class channeledsystem:
"""Propagate ui.system() request in the following format:
payload length (unsigned int),
@@ -321,7 +320,7 @@
def __call__(self, cmd, environ, cwd=None, type=b'system', cmdtable=None):
args = [type, cmd, util.abspath(cwd or b'.')]
- args.extend(b'%s=%s' % (k, v) for k, v in pycompat.iteritems(environ))
+ args.extend(b'%s=%s' % (k, v) for k, v in environ.items())
data = b'\0'.join(args)
self.out.write(struct.pack(b'>cI', self.channel, len(data)))
self.out.write(data)
@@ -390,7 +389,17 @@
# tell client to sendmsg() with 1-byte payload, which makes it
# distinctive from "attachio\n" command consumed by client.read()
self.clientsock.sendall(struct.pack(b'>cI', b'I', 1))
- clientfds = util.recvfds(self.clientsock.fileno())
+
+ data, ancdata, msg_flags, address = self.clientsock.recvmsg(1, 256)
+ assert len(ancdata) == 1
+ cmsg_level, cmsg_type, cmsg_data = ancdata[0]
+ assert cmsg_level == socket.SOL_SOCKET
+ assert cmsg_type == socket.SCM_RIGHTS
+ # memoryview.cast() was added in typeshed 61600d68772a, but pytype
+ # still complains
+ # pytype: disable=attribute-error
+ clientfds = memoryview(cmsg_data).cast('i').tolist()
+ # pytype: enable=attribute-error
self.ui.log(b'chgserver', b'received fds: %r\n', clientfds)
ui = self.ui
@@ -409,22 +418,13 @@
# be unbuffered no matter if it is a tty or not.
if fn == b'ferr':
newfp = fp
- elif pycompat.ispy3:
+ else:
# On Python 3, the standard library doesn't offer line-buffered
# binary streams, so wrap/unwrap it.
if fp.isatty():
newfp = procutil.make_line_buffered(fp)
else:
newfp = procutil.unwrap_line_buffered(fp)
- else:
- # Python 2 uses the I/O streams provided by the C library, so
- # make it line-buffered explicitly. Otherwise the default would
- # be decided on first write(), where fout could be a pager.
- if fp.isatty():
- bufsize = 1 # line buffered
- else:
- bufsize = -1 # system default
- newfp = os.fdopen(fp.fileno(), mode, bufsize)
if newfp is not fp:
setattr(ui, fn, newfp)
setattr(self, cn, newfp)
@@ -448,17 +448,8 @@
nullfd = os.open(os.devnull, os.O_WRONLY)
ui = self.ui
for (ch, fp, fd), (cn, fn, mode) in zip(self._oldios, _iochannels):
- newfp = getattr(ui, fn)
- # On Python 2, newfp and fp may be separate file objects associated
- # with the same fd, so we must close newfp while it's associated
- # with the client. Otherwise the new associated fd would be closed
- # when newfp gets deleted. On Python 3, newfp is just a wrapper
- # around fp even if newfp is not fp, so deleting newfp is safe.
- if not (pycompat.ispy3 or newfp is fp):
- newfp.close()
- # restore original fd: fp is open again
try:
- if (pycompat.ispy3 or newfp is fp) and 'w' in mode:
+ if 'w' in mode:
# Discard buffered data which couldn't be flushed because
# of EPIPE. The data should belong to the current session
# and should never persist.
@@ -636,7 +627,7 @@
return b'%s-%s' % (os.path.join(dirname, basename), hashstr)
-class chgunixservicehandler(object):
+class chgunixservicehandler:
"""Set of operations for chg services"""
pollinterval = 1 # [sec]
--- a/mercurial/cmdutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/cmdutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import copy as copymod
import errno
@@ -562,9 +561,8 @@
backupdir = repo.vfs.join(b'record-backups')
try:
os.mkdir(backupdir)
- except OSError as err:
- if err.errno != errno.EEXIST:
- raise
+ except FileExistsError:
+ pass
try:
# backup continues
for f in tobackup:
@@ -627,7 +625,7 @@
# 5. finally restore backed-up files
try:
dirstate = repo.dirstate
- for realname, tmpname in pycompat.iteritems(backups):
+ for realname, tmpname in backups.items():
ui.debug(b'restoring %r to %r\n' % (tmpname, realname))
if dirstate.get_entry(realname).maybe_clean:
@@ -667,7 +665,7 @@
return commit(ui, repo, recordinwlock, pats, opts)
-class dirnode(object):
+class dirnode:
"""
Represent a directory in user working copy with information required for
the purpose of tersing its status.
@@ -833,7 +831,7 @@
@attr.s(frozen=True)
-class morestatus(object):
+class morestatus:
reporoot = attr.ib()
unfinishedop = attr.ib()
unfinishedmsg = attr.ib()
@@ -1344,7 +1342,7 @@
return not pat or pat == b'-'
-class _unclosablefile(object):
+class _unclosablefile:
def __init__(self, fp):
self._fp = fp
@@ -2934,16 +2932,15 @@
def filectxfn(repo, ctx_, path):
try:
- # Return None for removed files.
- if path in wctx.removed() and path in filestoamend:
- return None
-
# If the file being considered is not amongst the files
# to be amended, we should use the file context from the
# old changeset. This avoids issues when only some files in
# the working copy are being amended but there are also
# changes to other files from the old changeset.
if path in filestoamend:
+ # Return None for removed files.
+ if path in wctx.removed():
+ return None
fctx = wctx[path]
else:
fctx = old.filectx(path)
@@ -3750,10 +3747,18 @@
for f in actions[b'add'][0]:
# Don't checkout modified files, they are already created by the diff
- if f not in newlyaddedandmodifiedfiles:
- prntstatusmsg(b'add', f)
- checkout(f)
- repo.dirstate.set_tracked(f)
+ if f in newlyaddedandmodifiedfiles:
+ continue
+
+ if interactive:
+ choice = repo.ui.promptchoice(
+ _(b"add new file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
+ )
+ if choice != 0:
+ continue
+ prntstatusmsg(b'add', f)
+ checkout(f)
+ repo.dirstate.set_tracked(f)
for f in actions[b'undelete'][0]:
if interactive:
--- a/mercurial/color.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/color.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import re
--- a/mercurial/commands.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/commands.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,9 +5,7 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
-
-import errno
+
import os
import re
import sys
@@ -1572,7 +1570,7 @@
pycompat.bytestr(e),
hint=_(b"see 'hg help bundlespec' for supported values for --type"),
)
- cgversion = bundlespec.contentopts[b"cg.version"]
+ cgversion = bundlespec.params[b"cg.version"]
# Packed bundles are a pseudo bundle format for now.
if cgversion == b's1':
@@ -1601,8 +1599,9 @@
raise error.InputError(
_(b"--base is incompatible with specifying destinations")
)
- common = [repo[rev].node() for rev in base]
- heads = [repo[r].node() for r in revs] if revs else None
+ cl = repo.changelog
+ common = [cl.node(rev) for rev in base]
+ heads = [cl.node(r) for r in revs] if revs else None
outgoing = discovery.outgoing(repo, common, heads)
missing = outgoing.missing
excluded = outgoing.excluded
@@ -1681,14 +1680,14 @@
# Bundling of obsmarker and phases is optional as not all clients
# support the necessary features.
cfg = ui.configbool
- contentopts = {
- b'obsolescence': cfg(b'experimental', b'evolution.bundle-obsmarker'),
- b'obsolescence-mandatory': cfg(
- b'experimental', b'evolution.bundle-obsmarker:mandatory'
- ),
- b'phases': cfg(b'experimental', b'bundle-phases'),
- }
- bundlespec.contentopts.update(contentopts)
+ obsolescence_cfg = cfg(b'experimental', b'evolution.bundle-obsmarker')
+ bundlespec.set_param(b'obsolescence', obsolescence_cfg, overwrite=False)
+ obs_mand_cfg = cfg(b'experimental', b'evolution.bundle-obsmarker:mandatory')
+ bundlespec.set_param(
+ b'obsolescence-mandatory', obs_mand_cfg, overwrite=False
+ )
+ phases_cfg = cfg(b'experimental', b'bundle-phases')
+ bundlespec.set_param(b'phases', phases_cfg, overwrite=False)
bundle2.writenewbundle(
ui,
@@ -1697,7 +1696,7 @@
fname,
bversion,
outgoing,
- bundlespec.contentopts,
+ bundlespec.params,
compression=bcompression,
compopts=compopts,
)
@@ -2477,7 +2476,7 @@
)
def debugcommands(ui, cmd=b'', *args):
"""list all available commands and options"""
- for cmd, vals in sorted(pycompat.iteritems(table)):
+ for cmd, vals in sorted(table.items()):
cmd = cmd.split(b'|')[0]
opts = b', '.join([i[1] for i in vals[1]])
ui.write(b'%s: %s\n' % (cmd, opts))
@@ -2544,7 +2543,8 @@
:hg:`diff` may generate unexpected results for merges, as it will
default to comparing against the working directory's first
- parent changeset if no revisions are specified.
+ parent changeset if no revisions are specified. To diff against the
+ conflict regions, you can use `--config diff.merge=yes`.
By default, the working directory files are compared to its first parent. To
see the differences from another revision, use --from. To see the difference
@@ -3918,9 +3918,7 @@
hexremoterev = hex(remoterev)
bms = [
bm
- for bm, bmr in pycompat.iteritems(
- peer.listkeys(b'bookmarks')
- )
+ for bm, bmr in peer.listkeys(b'bookmarks').items()
if bmr == hexremoterev
]
@@ -6183,9 +6181,8 @@
a = repo.wjoin(f)
try:
util.copyfile(a, a + b".resolve")
- except (IOError, OSError) as inst:
- if inst.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
+ pass
try:
# preresolve file
@@ -6202,9 +6199,8 @@
util.rename(
a + b".resolve", scmutil.backuppath(ui, repo, f)
)
- except OSError as inst:
- if inst.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
+ pass
if hasconflictmarkers:
ui.warn(
@@ -7097,7 +7093,7 @@
c = repo.dirstate.copies()
copied, renamed = [], []
- for d, s in pycompat.iteritems(c):
+ for d, s in c.items():
if s in status.removed:
status.removed.remove(s)
renamed.append(d)
--- a/mercurial/commandserver.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/commandserver.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,24 +5,16 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
-import errno
import gc
import os
import random
+import selectors
import signal
import socket
import struct
import traceback
-try:
- import selectors
-
- selectors.BaseSelector
-except ImportError:
- from .thirdparty import selectors2 as selectors
-
from .i18n import _
from .pycompat import getattr
from . import (
@@ -40,7 +32,7 @@
)
-class channeledoutput(object):
+class channeledoutput:
"""
Write data to out in the following format:
@@ -69,7 +61,7 @@
return getattr(self.out, attr)
-class channeledmessage(object):
+class channeledmessage:
"""
Write encoded message and metadata to out in the following format:
@@ -98,7 +90,7 @@
return getattr(self._cout, attr)
-class channeledinput(object):
+class channeledinput:
"""
Read data from in_.
@@ -201,7 +193,7 @@
)
-class server(object):
+class server:
"""
Listens for commands on fin, runs them and writes the output on a channel
based stream to fout.
@@ -451,7 +443,7 @@
u.setlogger(b'cmdserver', logger)
-class pipeservice(object):
+class pipeservice:
def __init__(self, ui, repo, opts):
self.ui = ui
self.repo = repo
@@ -501,9 +493,8 @@
# known exceptions are caught by dispatch.
except error.Abort as inst:
ui.error(_(b'abort: %s\n') % inst.message)
- except IOError as inst:
- if inst.errno != errno.EPIPE:
- raise
+ except BrokenPipeError:
+ pass
except KeyboardInterrupt:
pass
finally:
@@ -521,12 +512,11 @@
fin.close()
try:
fout.close() # implicit flush() may cause another EPIPE
- except IOError as inst:
- if inst.errno != errno.EPIPE:
- raise
+ except BrokenPipeError:
+ pass
-class unixservicehandler(object):
+class unixservicehandler:
"""Set of pluggable operations for unix-mode services
Almost all methods except for createcmdserver() are called in the main
@@ -560,7 +550,7 @@
return server(self.ui, repo, fin, fout, prereposetups)
-class unixforkingservice(object):
+class unixforkingservice:
"""
Listens on unix domain socket and forks server per connection
"""
@@ -645,15 +635,7 @@
# waiting for recv() will receive ECONNRESET.
self._unlinksocket()
exiting = True
- try:
- events = selector.select(timeout=h.pollinterval)
- except OSError as inst:
- # selectors2 raises ETIMEDOUT if timeout exceeded while
- # handling signal interrupt. That's probably wrong, but
- # we can easily get around it.
- if inst.errno != errno.ETIMEDOUT:
- raise
- events = []
+ events = selector.select(timeout=h.pollinterval)
if not events:
# only exit if we completed all queued requests
if exiting:
@@ -665,12 +647,7 @@
def _acceptnewconnection(self, sock, selector):
h = self._servicehandler
- try:
- conn, _addr = sock.accept()
- except socket.error as inst:
- if inst.args[0] == errno.EINTR:
- return
- raise
+ conn, _addr = sock.accept()
# Future improvement: On Python 3.7, maybe gc.freeze() can be used
# to prevent COW memory from being touched by GC.
@@ -703,12 +680,7 @@
def _handlemainipc(self, sock, selector):
"""Process messages sent from a worker"""
- try:
- path = sock.recv(32768) # large enough to receive path
- except socket.error as inst:
- if inst.args[0] == errno.EINTR:
- return
- raise
+ path = sock.recv(32768) # large enough to receive path
self._repoloader.load(path)
def _sigchldhandler(self, signal, frame):
@@ -718,11 +690,7 @@
while self._workerpids:
try:
pid, _status = os.waitpid(-1, options)
- except OSError as inst:
- if inst.errno == errno.EINTR:
- continue
- if inst.errno != errno.ECHILD:
- raise
+ except ChildProcessError:
# no child processes at all (reaped by other waitpid()?)
self._workerpids.clear()
return
--- a/mercurial/commit.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/commit.py Thu Jun 16 15:28:54 2022 +0200
@@ -3,9 +3,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
-
-import errno
from .i18n import _
from .node import (
@@ -251,11 +248,6 @@
except OSError:
repo.ui.warn(_(b"trouble committing %s!\n") % uipathfn(f))
raise
- except IOError as inst:
- errcode = getattr(inst, 'errno', errno.ENOENT)
- if error or errcode and errcode != errno.ENOENT:
- repo.ui.warn(_(b"trouble committing %s!\n") % uipathfn(f))
- raise
# update manifest
removed = [f for f in removed if f in m1 or f in m2]
--- a/mercurial/config.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/config.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import errno
import os
@@ -15,12 +14,11 @@
from . import (
encoding,
error,
- pycompat,
util,
)
-class config(object):
+class config:
def __init__(self, data=None):
self._current_source_level = 0
self._data = {}
@@ -111,20 +109,19 @@
return sorted(self._data.keys())
def items(self, section):
- items = pycompat.iteritems(self._data.get(section, {}))
+ items = self._data.get(section, {}).items()
return [(k, v[0]) for (k, v) in items]
def set(self, section, item, value, source=b""):
- if pycompat.ispy3:
- assert not isinstance(
- section, str
- ), b'config section may not be unicode strings on Python 3'
- assert not isinstance(
- item, str
- ), b'config item may not be unicode strings on Python 3'
- assert not isinstance(
- value, str
- ), b'config values may not be unicode strings on Python 3'
+ assert not isinstance(
+ section, str
+ ), b'config section may not be unicode strings on Python 3'
+ assert not isinstance(
+ item, str
+ ), b'config item may not be unicode strings on Python 3'
+ assert not isinstance(
+ value, str
+ ), b'config values may not be unicode strings on Python 3'
if section not in self:
self._data[section] = util.cowsortdict()
else:
--- a/mercurial/configitems.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/configitems.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import functools
import re
@@ -30,7 +29,7 @@
knownitems.update(items)
-class configitem(object):
+class configitem:
"""represent a known config item
:section: the official config section where to find this item,
@@ -585,6 +584,11 @@
default=b'',
)
coreconfigitem(
+ b'debug',
+ b'revlog.debug-delta',
+ default=False,
+)
+coreconfigitem(
b'defaults',
b'.*',
default=None,
@@ -1279,6 +1283,18 @@
)
coreconfigitem(
b'format',
+ b'use-dirstate-v2.automatic-upgrade-of-mismatching-repositories',
+ default=False,
+ experimental=True,
+)
+coreconfigitem(
+ b'format',
+ b'use-dirstate-v2.automatic-upgrade-of-mismatching-repositories:quiet',
+ default=False,
+ experimental=True,
+)
+coreconfigitem(
+ b'format',
b'use-dirstate-tracked-hint',
default=False,
experimental=True,
@@ -1291,6 +1307,18 @@
)
coreconfigitem(
b'format',
+ b'use-dirstate-tracked-hint.automatic-upgrade-of-mismatching-repositories',
+ default=False,
+ experimental=True,
+)
+coreconfigitem(
+ b'format',
+ b'use-dirstate-tracked-hint.automatic-upgrade-of-mismatching-repositories:quiet',
+ default=False,
+ experimental=True,
+)
+coreconfigitem(
+ b'format',
b'dotencode',
default=True,
)
@@ -1387,6 +1415,18 @@
)
coreconfigitem(
b'format',
+ b'use-share-safe.automatic-upgrade-of-mismatching-repositories',
+ default=False,
+ experimental=True,
+)
+coreconfigitem(
+ b'format',
+ b'use-share-safe.automatic-upgrade-of-mismatching-repositories:quiet',
+ default=False,
+ experimental=True,
+)
+coreconfigitem(
+ b'format',
b'internal-phase',
default=False,
experimental=True,
@@ -1571,6 +1611,59 @@
default=False,
)
coreconfigitem(
+ b'merge',
+ b'disable-partial-tools',
+ default=False,
+ experimental=True,
+)
+coreconfigitem(
+ b'partial-merge-tools',
+ b'.*',
+ default=None,
+ generic=True,
+ experimental=True,
+)
+coreconfigitem(
+ b'partial-merge-tools',
+ br'.*\.patterns',
+ default=dynamicdefault,
+ generic=True,
+ priority=-1,
+ experimental=True,
+)
+coreconfigitem(
+ b'partial-merge-tools',
+ br'.*\.executable$',
+ default=dynamicdefault,
+ generic=True,
+ priority=-1,
+ experimental=True,
+)
+coreconfigitem(
+ b'partial-merge-tools',
+ br'.*\.order',
+ default=0,
+ generic=True,
+ priority=-1,
+ experimental=True,
+)
+coreconfigitem(
+ b'partial-merge-tools',
+ br'.*\.args',
+ default=b"$local $base $other",
+ generic=True,
+ priority=-1,
+ experimental=True,
+)
+coreconfigitem(
+ b'partial-merge-tools',
+ br'.*\.disable',
+ default=False,
+ generic=True,
+ priority=-1,
+ experimental=True,
+)
+coreconfigitem(
b'merge-tools',
b'.*',
default=None,
@@ -1703,6 +1796,30 @@
generic=True,
)
coreconfigitem(
+ b'paths',
+ b'.*:bookmarks.mode',
+ default='default',
+ generic=True,
+)
+coreconfigitem(
+ b'paths',
+ b'.*:multi-urls',
+ default=False,
+ generic=True,
+)
+coreconfigitem(
+ b'paths',
+ b'.*:pushrev',
+ default=None,
+ generic=True,
+)
+coreconfigitem(
+ b'paths',
+ b'.*:pushurl',
+ default=None,
+ generic=True,
+)
+coreconfigitem(
b'phases',
b'checksubrepos',
default=b'follow',
@@ -2053,6 +2170,16 @@
default=True,
)
coreconfigitem(
+ b'share',
+ b'safe-mismatch.source-not-safe:verbose-upgrade',
+ default=True,
+)
+coreconfigitem(
+ b'share',
+ b'safe-mismatch.source-safe:verbose-upgrade',
+ default=True,
+)
+coreconfigitem(
b'shelve',
b'maxbackups',
default=10,
--- a/mercurial/context.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/context.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,9 +5,7 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
-
-import errno
+
import filecmp
import os
import stat
@@ -33,7 +31,6 @@
patch,
pathutil,
phases,
- pycompat,
repoview,
scmutil,
sparse,
@@ -52,7 +49,7 @@
propertycache = util.propertycache
-class basectx(object):
+class basectx:
"""A basectx object represents the common logic for its children:
changectx: read-only context that is already present in the repo,
workingctx: a context that represents the working directory and can
@@ -124,7 +121,7 @@
deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
deletedset = set(deleted)
d = mf1.diff(mf2, match=match, clean=listclean)
- for fn, value in pycompat.iteritems(d):
+ for fn, value in d.items():
if fn in deletedset:
continue
if value is None:
@@ -797,7 +794,7 @@
return self.walk(match)
-class basefilectx(object):
+class basefilectx:
"""A filecontext object represents the common logic for its children:
filectx: read-only access to a filerevision that is already present
in the repo,
@@ -993,6 +990,16 @@
if self._repo._encodefilterpats:
# can't rely on size() because wdir content may be decoded
return self._filelog.cmp(self._filenode, fctx.data())
+ # filelog.size() has two special cases:
+ # - censored metadata
+ # - copy/rename tracking
+ # The first is detected by peaking into the delta,
+ # the second is detected by abusing parent order
+ # in the revlog index as flag bit. This leaves files using
+ # the dummy encoding and non-standard meta attributes.
+ # The following check is a special case for the empty
+ # metadata block used if the raw file content starts with '\1\n'.
+ # Cases of arbitrary metadata flags are currently mishandled.
if self.size() - 4 == fctx.size():
# size() can match:
# if file data starts with '\1\n', empty metadata block is
@@ -1729,9 +1736,7 @@
def copy(self, source, dest):
try:
st = self._repo.wvfs.lstat(dest)
- except OSError as err:
- if err.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
self._repo.ui.warn(
_(b"%s does not exist!\n") % self._repo.dirstate.pathto(dest)
)
@@ -2161,9 +2166,7 @@
t, tz = self._changectx.date()
try:
return (self._repo.wvfs.lstat(self._path)[stat.ST_MTIME], tz)
- except OSError as err:
- if err.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
return (t, tz)
def exists(self):
@@ -2422,7 +2425,7 @@
# Test that each new directory to be created to write this path from p2
# is not a file in p1.
components = path.split(b'/')
- for i in pycompat.xrange(len(components)):
+ for i in range(len(components)):
component = b"/".join(components[0:i])
if component in self:
fail(path, component)
@@ -3105,7 +3108,7 @@
return scmutil.status(modified, added, removed, [], [], [], [])
-class arbitraryfilectx(object):
+class arbitraryfilectx:
"""Allows you to use filectx-like functions on a file in an arbitrary
location on disk, possibly not in the working directory.
"""
--- a/mercurial/copies.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/copies.py Thu Jun 16 15:28:54 2022 +0200
@@ -6,7 +6,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import collections
import os
@@ -18,7 +17,6 @@
match as matchmod,
pathutil,
policy,
- pycompat,
util,
)
@@ -69,7 +67,7 @@
def _chain(prefix, suffix):
"""chain two sets of copies 'prefix' and 'suffix'"""
result = prefix.copy()
- for key, value in pycompat.iteritems(suffix):
+ for key, value in suffix.items():
result[key] = prefix.get(value, value)
return result
@@ -409,7 +407,7 @@
if childcopies:
newcopies = copies.copy()
- for dest, source in pycompat.iteritems(childcopies):
+ for dest, source in childcopies.items():
prev = copies.get(source)
if prev is not None and prev[1] is not None:
source = prev[1]
@@ -624,7 +622,7 @@
newcopies = copies
if childcopies:
newcopies = copies.copy()
- for dest, source in pycompat.iteritems(childcopies):
+ for dest, source in childcopies.items():
prev = copies.get(source)
if prev is not None and prev[1] is not None:
source = prev[1]
@@ -722,7 +720,7 @@
# can still exist (e.g. hg cp a b; hg mv a c). In those cases we
# arbitrarily pick one of the renames.
r = {}
- for k, v in sorted(pycompat.iteritems(copies)):
+ for k, v in sorted(copies.items()):
if match and not match(v):
continue
# remove copies
@@ -889,7 +887,7 @@
copy[dst] = src
-class branch_copies(object):
+class branch_copies:
"""Information about copies made on one side of a merge/graft.
"copy" is a mapping from destination name -> source name,
@@ -1081,7 +1079,7 @@
# examine each file copy for a potential directory move, which is
# when all the files in a directory are moved to a new directory
- for dst, src in pycompat.iteritems(fullcopy):
+ for dst, src in fullcopy.items():
dsrc, ddst = pathutil.dirname(src), pathutil.dirname(dst)
if dsrc in invalid:
# already seen to be uninteresting
@@ -1104,7 +1102,7 @@
if not dirmove:
return {}, {}
- dirmove = {k + b"/": v + b"/" for k, v in pycompat.iteritems(dirmove)}
+ dirmove = {k + b"/": v + b"/" for k, v in dirmove.items()}
for d in dirmove:
repo.ui.debug(
@@ -1187,7 +1185,7 @@
copies2 = {}
cp = _forwardcopies(base, c2)
- for dst, src in pycompat.iteritems(cp):
+ for dst, src in cp.items():
if src in m1:
copies2[dst] = src
@@ -1305,5 +1303,5 @@
for dest, __ in list(new_copies.items()):
if dest in parent:
del new_copies[dest]
- for dst, src in pycompat.iteritems(new_copies):
+ for dst, src in new_copies.items():
wctx[dst].markcopied(src)
--- a/mercurial/crecord.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/crecord.py Thu Jun 16 15:28:54 2022 +0200
@@ -8,7 +8,6 @@
# This code is based on the Mark Edgington's crecord extension.
# (Itself based on Bryan O'Sullivan's record extension.)
-from __future__ import absolute_import
import os
import re
@@ -83,7 +82,7 @@
return curses and ui.interface(b"chunkselector") == b"curses"
-class patchnode(object):
+class patchnode:
"""abstract class for patch graph nodes
(i.e. patchroot, header, hunk, hunkline)
"""
@@ -506,7 +505,7 @@
text = line.linetext
if line.linetext == diffhelper.MISSING_NEWLINE_MARKER:
noeol = True
- break
+ continue
if line.applied:
if text.startswith(b'+'):
dels.append(text[1:])
@@ -602,7 +601,7 @@
"""
chunkselector = curseschunkselector(headerlist, ui, operation)
- class dummystdscr(object):
+ class dummystdscr:
def clear(self):
pass
@@ -629,7 +628,7 @@
}
-class curseschunkselector(object):
+class curseschunkselector:
def __init__(self, headerlist, ui, operation=None):
# put the headers into a patch object
self.headerlist = patch(headerlist)
--- a/mercurial/dagop.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/dagop.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import heapq
@@ -204,7 +203,7 @@
def _builddescendantsmap(repo, startrev, followfirst):
"""Build map of 'rev -> child revs', offset from startrev"""
cl = repo.changelog
- descmap = [[] for _rev in pycompat.xrange(startrev, len(cl))]
+ descmap = [[] for _rev in range(startrev, len(cl))]
for currev in cl.revs(startrev + 1):
p1rev, p2rev = cl.parentrevs(currev)
if p1rev >= startrev:
@@ -272,7 +271,7 @@
break
-class subsetparentswalker(object):
+class subsetparentswalker:
r"""Scan adjacent ancestors in the graph given by the subset
This computes parent-child relations in the sub graph filtered by
@@ -648,7 +647,7 @@
@attr.s(slots=True, frozen=True)
-class annotateline(object):
+class annotateline:
fctx = attr.ib()
lineno = attr.ib()
# Whether this annotation was the result of a skip-annotate.
@@ -657,7 +656,7 @@
@attr.s(slots=True, frozen=True)
-class _annotatedfile(object):
+class _annotatedfile:
# list indexed by lineno - 1
fctxs = attr.ib()
linenos = attr.ib()
@@ -726,7 +725,7 @@
for idx, (parent, blocks) in enumerate(pblocks):
for (a1, a2, b1, b2), _t in blocks:
if a2 - a1 >= b2 - b1:
- for bk in pycompat.xrange(b1, b2):
+ for bk in range(b1, b2):
if child.fctxs[bk] == childfctx:
ak = min(a1 + (bk - b1), a2 - 1)
child.fctxs[bk] = parent.fctxs[ak]
@@ -739,7 +738,7 @@
# line.
for parent, blocks in remaining:
for a1, a2, b1, b2 in blocks:
- for bk in pycompat.xrange(b1, b2):
+ for bk in range(b1, b2):
if child.fctxs[bk] == childfctx:
ak = min(a1 + (bk - b1), a2 - 1)
child.fctxs[bk] = parent.fctxs[ak]
--- a/mercurial/dagparser.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/dagparser.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import re
import string
@@ -229,7 +228,7 @@
c, digs = nextrun(nextch(), pycompat.bytestr(string.digits))
# pytype: enable=wrong-arg-types
n = int(digs)
- for i in pycompat.xrange(0, n):
+ for i in range(0, n):
yield b'n', (r, [p1])
p1 = r
r += 1
--- a/mercurial/debugcommands.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/debugcommands.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import binascii
import codecs
@@ -74,6 +73,7 @@
repoview,
requirements,
revlog,
+ revlogutils,
revset,
revsetlang,
scmutil,
@@ -104,6 +104,8 @@
)
from .revlogutils import (
+ constants as revlog_constants,
+ debug as revlog_debug,
deltas as deltautil,
nodemap,
rewrite,
@@ -246,9 +248,7 @@
if mergeable_file:
linesperrev = 2
# make a file with k lines per rev
- initialmergedlines = [
- b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
- ]
+ initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
initialmergedlines.append(b"")
tags = []
@@ -494,7 +494,7 @@
b2caps = bundle2.bundle2caps(peer)
if b2caps:
ui.writenoi18n(b'Bundle2 capabilities:\n')
- for key, values in sorted(pycompat.iteritems(b2caps)):
+ for key, values in sorted(b2caps.items()):
ui.write(b' %s\n' % key)
for v in values:
ui.write(b' %s\n' % v)
@@ -758,10 +758,22 @@
Output can be templatized. Available template keywords are:
:``rev``: revision number
+ :``p1``: parent 1 revision number (for reference)
+ :``p2``: parent 2 revision number (for reference)
:``chainid``: delta chain identifier (numbered by unique base)
:``chainlen``: delta chain length to this revision
:``prevrev``: previous revision in delta chain
:``deltatype``: role of delta / how it was computed
+ - base: a full snapshot
+ - snap: an intermediate snapshot
+ - p1: a delta against the first parent
+ - p2: a delta against the second parent
+ - skip1: a delta against the same base as p1
+ (when p1 has empty delta
+ - skip2: a delta against the same base as p2
+ (when p2 has empty delta
+ - prev: a delta against the previous revision
+ - other: a delta against an arbitrary revision
:``compsize``: compressed size of revision
:``uncompsize``: uncompressed size of revision
:``chainsize``: total size of compressed revisions in chain
@@ -795,25 +807,71 @@
generaldelta = r._generaldelta
withsparseread = getattr(r, '_withsparseread', False)
+ # security to avoid crash on corrupted revlogs
+ total_revs = len(index)
+
def revinfo(rev):
e = index[rev]
- compsize = e[1]
- uncompsize = e[2]
+ compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
+ uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
chainsize = 0
+ base = e[revlog_constants.ENTRY_DELTA_BASE]
+ p1 = e[revlog_constants.ENTRY_PARENT_1]
+ p2 = e[revlog_constants.ENTRY_PARENT_2]
+
+ # If the parents of a revision has an empty delta, we never try to delta
+ # against that parent, but directly against the delta base of that
+ # parent (recursively). It avoids adding a useless entry in the chain.
+ #
+ # However we need to detect that as a special case for delta-type, that
+ # is not simply "other".
+ p1_base = p1
+ if p1 != nullrev and p1 < total_revs:
+ e1 = index[p1]
+ while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
+ new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
+ if (
+ new_base == p1_base
+ or new_base == nullrev
+ or new_base >= total_revs
+ ):
+ break
+ p1_base = new_base
+ e1 = index[p1_base]
+ p2_base = p2
+ if p2 != nullrev and p2 < total_revs:
+ e2 = index[p2]
+ while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
+ new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
+ if (
+ new_base == p2_base
+ or new_base == nullrev
+ or new_base >= total_revs
+ ):
+ break
+ p2_base = new_base
+ e2 = index[p2_base]
+
if generaldelta:
- if e[3] == e[5]:
+ if base == p1:
deltatype = b'p1'
- elif e[3] == e[6]:
+ elif base == p2:
deltatype = b'p2'
- elif e[3] == rev - 1:
+ elif base == rev:
+ deltatype = b'base'
+ elif base == p1_base:
+ deltatype = b'skip1'
+ elif base == p2_base:
+ deltatype = b'skip2'
+ elif r.issnapshot(rev):
+ deltatype = b'snap'
+ elif base == rev - 1:
deltatype = b'prev'
- elif e[3] == rev:
- deltatype = b'base'
else:
deltatype = b'other'
else:
- if e[3] == rev:
+ if base == rev:
deltatype = b'base'
else:
deltatype = b'prev'
@@ -821,14 +879,14 @@
chain = r._deltachain(rev)[0]
for iterrev in chain:
e = index[iterrev]
- chainsize += e[1]
-
- return compsize, uncompsize, deltatype, chain, chainsize
+ chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
+
+ return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
fm = ui.formatter(b'debugdeltachain', opts)
fm.plain(
- b' rev chain# chainlen prev delta '
+ b' rev p1 p2 chain# chainlen prev delta '
b'size rawsize chainsize ratio lindist extradist '
b'extraratio'
)
@@ -838,7 +896,7 @@
chainbases = {}
for rev in r:
- comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
+ p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
chainbase = chain[0]
chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
basestart = start(chainbase)
@@ -862,11 +920,13 @@
fm.startitem()
fm.write(
- b'rev chainid chainlen prevrev deltatype compsize '
+ b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
b'uncompsize chainsize chainratio lindist extradist '
b'extraratio',
- b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
+ b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
rev,
+ p1,
+ p2,
chainid,
len(chain),
prevrev,
@@ -929,6 +989,65 @@
@command(
+ b'debug-delta-find',
+ cmdutil.debugrevlogopts + cmdutil.formatteropts,
+ _(b'-c|-m|FILE REV'),
+ optionalrepo=True,
+)
+def debugdeltafind(ui, repo, arg_1, arg_2=None, **opts):
+ """display the computation to get to a valid delta for storing REV
+
+ This command will replay the process used to find the "best" delta to store
+ a revision and display information about all the steps used to get to that
+ result.
+
+ The revision use the revision number of the target storage (not changelog
+ revision number).
+
+ note: the process is initiated from a full text of the revision to store.
+ """
+ opts = pycompat.byteskwargs(opts)
+ if arg_2 is None:
+ file_ = None
+ rev = arg_1
+ else:
+ file_ = arg_1
+ rev = arg_2
+
+ rev = int(rev)
+
+ revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
+
+ deltacomputer = deltautil.deltacomputer(
+ revlog,
+ write_debug=ui.write,
+ debug_search=True,
+ )
+
+ node = revlog.node(rev)
+ p1r, p2r = revlog.parentrevs(rev)
+ p1 = revlog.node(p1r)
+ p2 = revlog.node(p2r)
+ btext = [revlog.revision(rev)]
+ textlen = len(btext[0])
+ cachedelta = None
+ flags = revlog.flags(rev)
+
+ revinfo = revlogutils.revisioninfo(
+ node,
+ p1,
+ p2,
+ btext,
+ textlen,
+ cachedelta,
+ flags,
+ )
+
+ fh = revlog._datafp()
+ deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
+
+
+@command(
b'debugdirstate|debugstate',
[
(
@@ -1018,6 +1137,22 @@
@command(
+ b'debugdirstateignorepatternshash',
+ [],
+ _(b''),
+)
+def debugdirstateignorepatternshash(ui, repo, **opts):
+ """show the hash of ignore patterns stored in dirstate if v2,
+ or nothing for dirstate-v2
+ """
+ if repo.dirstate._use_dirstate_v2:
+ docket = repo.dirstate._map.docket
+ hash_len = 20 # 160 bits for SHA-1
+ hash_bytes = docket.tree_metadata[-hash_len:]
+ ui.write(binascii.hexlify(hash_bytes) + b'\n')
+
+
+@command(
b'debugdiscovery',
[
(b'', b'old', None, _(b'use old-style discovery')),
@@ -1039,7 +1174,7 @@
b'',
b'remote-as-revs',
b"",
- b'use local as remote, with only these these revisions',
+ b'use local as remote, with only these revisions',
),
]
+ cmdutil.remoteopts
@@ -1240,6 +1375,7 @@
# display discovery summary
fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
+ fm.plain(b"queries: %(total-queries)9d\n" % data)
fm.plain(b"heads summary:\n")
fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
@@ -1728,45 +1864,27 @@
@command(
- b'debugindex',
+ b'debug-revlog-index|debugindex',
cmdutil.debugrevlogopts + cmdutil.formatteropts,
_(b'-c|-m|FILE'),
)
def debugindex(ui, repo, file_=None, **opts):
- """dump index data for a storage primitive"""
+ """dump index data for a revlog"""
opts = pycompat.byteskwargs(opts)
store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
- if ui.debugflag:
- shortfn = hex
- else:
- shortfn = short
-
- idlen = 12
- for i in store:
- idlen = len(shortfn(store.node(i)))
- break
-
fm = ui.formatter(b'debugindex', opts)
- fm.plain(
- b' rev linkrev %s %s p2\n'
- % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
+
+ revlog = getattr(store, b'_revlog', store)
+
+ return revlog_debug.debug_index(
+ ui,
+ repo,
+ formatter=fm,
+ revlog=revlog,
+ full_node=ui.debugflag,
)
- for rev in store:
- node = store.node(rev)
- parents = store.parents(node)
-
- fm.startitem()
- fm.write(b'rev', b'%6d ', rev)
- fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
- fm.write(b'node', b'%s ', shortfn(node))
- fm.write(b'p1', b'%s ', shortfn(parents[0]))
- fm.write(b'p2', b'%s', shortfn(parents[1]))
- fm.plain(b'\n')
-
- fm.end()
-
@command(
b'debugindexdot',
@@ -2185,7 +2303,19 @@
except error.LockHeld:
raise error.Abort(_(b'lock is already held'))
if len(locks):
- ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
+ try:
+ if ui.interactive():
+ prompt = _(b"ready to release the lock (y)? $$ &Yes")
+ ui.promptchoice(prompt)
+ else:
+ msg = b"%d locks held, waiting for signal\n"
+ msg %= len(locks)
+ ui.status(msg)
+ while True: # XXX wait for a signal
+ time.sleep(0.1)
+ except KeyboardInterrupt:
+ msg = b"signal-received releasing locks\n"
+ ui.status(msg)
return 0
finally:
release(*locks)
@@ -2220,9 +2350,8 @@
)
ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
return 1
- except OSError as e:
- if e.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
+ pass
ui.writenoi18n(b"%-6s free\n" % (name + b":"))
return 0
@@ -2403,11 +2532,11 @@
fm_files.end()
fm_extras = fm.nested(b'extras')
- for f, d in sorted(pycompat.iteritems(ms.allextras())):
+ for f, d in sorted(ms.allextras().items()):
if f in ms:
# If file is in mergestate, we have already processed it's extras
continue
- for k, v in pycompat.iteritems(d):
+ for k, v in d.items():
fm_extras.startitem()
fm_extras.data(file=f)
fm_extras.data(key=k)
@@ -2424,7 +2553,7 @@
names = set()
# since we previously only listed open branches, we will handle that
# specially (after this for loop)
- for name, ns in pycompat.iteritems(repo.names):
+ for name, ns in repo.names.items():
if name != b'branches':
names.update(ns.listnames(repo))
names.update(
@@ -2542,9 +2671,9 @@
# local repository.
n = bin(s)
if len(n) != repo.nodeconstants.nodelen:
- raise TypeError()
+ raise ValueError
return n
- except TypeError:
+ except ValueError:
raise error.InputError(
b'changeset references must be full hexadecimal '
b'node identifiers'
@@ -2674,7 +2803,7 @@
[(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
_(b'[-r REV]'),
)
-def debugp1copies(ui, repo, **opts):
+def debugp2copies(ui, repo, **opts):
"""dump copy information compared to p2"""
opts = pycompat.byteskwargs(opts)
@@ -2718,7 +2847,7 @@
fullpaths = opts['full']
files, dirs = set(), set()
adddir, addfile = dirs.add, files.add
- for f, st in pycompat.iteritems(dirstate):
+ for f, st in dirstate.items():
if f.startswith(spec) and st.state in acceptable:
if fixpaths:
f = f.replace(b'/', pycompat.ossep)
@@ -2907,7 +3036,7 @@
ui.status(pycompat.bytestr(r) + b'\n')
return not r
else:
- for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
+ for k, v in sorted(target.listkeys(namespace).items()):
ui.write(
b"%s\t%s\n"
% (stringutil.escapestr(k), stringutil.escapestr(v))
@@ -3061,7 +3190,7 @@
ts = 0
heads = set()
- for rev in pycompat.xrange(numrevs):
+ for rev in range(numrevs):
dbase = r.deltaparent(rev)
if dbase == -1:
dbase = rev
@@ -3159,7 +3288,7 @@
l[2] += size
numrevs = len(r)
- for rev in pycompat.xrange(numrevs):
+ for rev in range(numrevs):
p1, p2 = r.parentrevs(rev)
delta = r.deltaparent(rev)
if format > 0:
@@ -4289,7 +4418,7 @@
for opt in cmdutil.remoteopts:
del opts[opt[1]]
args = {}
- for k, v in pycompat.iteritems(opts):
+ for k, v in opts.items():
if v:
args[k] = v
args = pycompat.strkwargs(args)
--- a/mercurial/destutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/destutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from .i18n import _
from . import bookmarks, error, obsutil, scmutil, stack
--- a/mercurial/diffhelper.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/diffhelper.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,13 +5,11 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from .i18n import _
from . import (
error,
- pycompat,
)
MISSING_NEWLINE_MARKER = b'\\ No newline at end of file\n'
@@ -30,7 +28,7 @@
num = max(todoa, todob)
if num == 0:
break
- for i in pycompat.xrange(num):
+ for i in range(num):
s = fp.readline()
if not s:
raise error.ParseError(_(b'incomplete hunk'))
@@ -77,7 +75,7 @@
blen = len(b)
if alen > blen - bstart or bstart < 0:
return False
- for i in pycompat.xrange(alen):
+ for i in range(alen):
if a[i][1:] != b[i + bstart]:
return False
return True
--- a/mercurial/diffutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/diffutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -7,7 +7,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from .i18n import _
--- a/mercurial/dirstate.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/dirstate.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,11 +5,9 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import collections
import contextlib
-import errno
import os
import stat
import uuid
@@ -29,7 +27,6 @@
policy,
pycompat,
scmutil,
- sparse,
util,
)
@@ -91,7 +88,7 @@
@interfaceutil.implementer(intdirstate.idirstate)
-class dirstate(object):
+class dirstate:
def __init__(
self,
opener,
@@ -115,6 +112,7 @@
self._opener = opener
self._validate = validate
self._root = root
+ # Either build a sparse-matcher or None if sparse is disabled
self._sparsematchfn = sparsematchfn
# ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
# UNC path pointing to root share (issue4557)
@@ -186,7 +184,11 @@
The working directory may not include every file from a manifest. The
matcher obtained by this property will match a path if it is to be
included in the working directory.
+
+ When sparse if disabled, return None.
"""
+ if self._sparsematchfn is None:
+ return None
# TODO there is potential to cache this property. For now, the matcher
# is resolved on every access. (But the called function does use a
# cache to keep the lookup fast.)
@@ -196,9 +198,7 @@
def _branch(self):
try:
return self._opener.read(b"branch").strip() or b"default"
- except IOError as inst:
- if inst.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
return b"default"
@property
@@ -343,7 +343,7 @@
return iter(sorted(self._map))
def items(self):
- return pycompat.iteritems(self._map)
+ return self._map.items()
iteritems = items
@@ -427,6 +427,7 @@
return
self._dirty = True
if source is not None:
+ self._check_sparse(source)
self._map.copymap[dest] = source
else:
self._map.copymap.pop(dest, None)
@@ -588,6 +589,19 @@
msg = _(b'file %r in dirstate clashes with %r')
msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
raise error.Abort(msg)
+ self._check_sparse(filename)
+
+ def _check_sparse(self, filename):
+ """Check that a filename is inside the sparse profile"""
+ sparsematch = self._sparsematcher
+ if sparsematch is not None and not sparsematch.always():
+ if not sparsematch(filename):
+ msg = _(b"cannot add '%s' - it is outside the sparse checkout")
+ hint = _(
+ b'include file with `hg debugsparse --include <pattern>` or use '
+ b'`hg add -s <file>` to include file directory while adding'
+ )
+ raise error.Abort(msg % filename, hint=hint)
def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
if exists is None:
@@ -670,6 +684,20 @@
self._dirty = True
def rebuild(self, parent, allfiles, changedfiles=None):
+
+ matcher = self._sparsematcher
+ if matcher is not None and not matcher.always():
+ # should not add non-matching files
+ allfiles = [f for f in allfiles if matcher(f)]
+ if changedfiles:
+ changedfiles = [f for f in changedfiles if matcher(f)]
+
+ if changedfiles is not None:
+ # these files will be deleted from the dirstate when they are
+ # not found to be in allfiles
+ dirstatefilestoremove = {f for f in self if not matcher(f)}
+ changedfiles = dirstatefilestoremove.union(changedfiles)
+
if changedfiles is None:
# Rebuild entire dirstate
to_lookup = allfiles
@@ -771,9 +799,7 @@
def _writedirstate(self, tr, st):
# notify callbacks about parents change
if self._origpl is not None and self._origpl != self._pl:
- for c, callback in sorted(
- pycompat.iteritems(self._plchangecallbacks)
- ):
+ for c, callback in sorted(self._plchangecallbacks.items()):
callback(self, self._origpl, self._pl)
self._origpl = None
self._map.write(tr, st)
@@ -936,7 +962,7 @@
if match.isexact() and self._checkcase:
normed = {}
- for f, st in pycompat.iteritems(results):
+ for f, st in results.items():
if st is None:
continue
@@ -949,7 +975,7 @@
paths.add(f)
- for norm, paths in pycompat.iteritems(normed):
+ for norm, paths in normed.items():
if len(paths) > 1:
for path in paths:
folded = self._discoverpath(
@@ -986,6 +1012,11 @@
ignore = util.always
dirignore = util.always
+ if self._sparsematchfn is not None:
+ em = matchmod.exact(match.files())
+ sm = matchmod.unionmatcher([self._sparsematcher, em])
+ match = matchmod.intersectmatchers(match, sm)
+
matchfn = match.matchfn
matchalways = match.always()
matchtdir = match.traversedir
@@ -1040,13 +1071,11 @@
try:
with tracing.log('dirstate.walk.traverse listdir %s', nd):
entries = listdir(join(nd), stat=True, skip=skip)
- except OSError as inst:
- if inst.errno in (errno.EACCES, errno.ENOENT):
- match.bad(
- self.pathto(nd), encoding.strtolocal(inst.strerror)
- )
- continue
- raise
+ except (PermissionError, FileNotFoundError) as inst:
+ match.bad(
+ self.pathto(nd), encoding.strtolocal(inst.strerror)
+ )
+ continue
for f, kind, st in entries:
# Some matchers may return files in the visitentries set,
# instead of 'this', if the matcher explicitly mentions them
@@ -1149,6 +1178,10 @@
return results
def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
+ if self._sparsematchfn is not None:
+ em = matchmod.exact(matcher.files())
+ sm = matchmod.unionmatcher([self._sparsematcher, em])
+ matcher = matchmod.intersectmatchers(matcher, sm)
# Force Rayon (Rust parallelism library) to respect the number of
# workers. This is a temporary workaround until Rust code knows
# how to read the config file.
@@ -1255,6 +1288,9 @@
matchmod.alwaysmatcher,
matchmod.exactmatcher,
matchmod.includematcher,
+ matchmod.intersectionmatcher,
+ matchmod.nevermatcher,
+ matchmod.unionmatcher,
)
if rustmod is None:
@@ -1264,8 +1300,6 @@
use_rust = False
elif subrepos:
use_rust = False
- elif sparse.enabled:
- use_rust = False
elif not isinstance(match, allowed_matchers):
# Some matchers have yet to be implemented
use_rust = False
@@ -1311,9 +1345,9 @@
# - match.traversedir does something, because match.traversedir should
# be called for every dir in the working dir
full = listclean or match.traversedir is not None
- for fn, st in pycompat.iteritems(
- self.walk(match, subrepos, listunknown, listignored, full=full)
- ):
+ for fn, st in self.walk(
+ match, subrepos, listunknown, listignored, full=full
+ ).items():
if not dcontains(fn):
if (listignored or mexact(fn)) and dirignore(fn):
if listignored:
--- a/mercurial/dirstateguard.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/dirstateguard.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import os
from .i18n import _
--- a/mercurial/dirstatemap.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/dirstatemap.py Thu Jun 16 15:28:54 2022 +0200
@@ -3,9 +3,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
-
-import errno
from .i18n import _
@@ -13,7 +10,6 @@
error,
pathutil,
policy,
- pycompat,
txnutil,
util,
)
@@ -36,7 +32,7 @@
rangemask = 0x7FFFFFFF
-class _dirstatemapcommon(object):
+class _dirstatemapcommon:
"""
Methods that are identical for both implementations of the dirstatemap
class, with and without Rust extensions enabled.
@@ -81,134 +77,6 @@
def __getitem__(self, item):
return self._map[item]
- ### sub-class utility method
- #
- # Use to allow for generic implementation of some method while still coping
- # with minor difference between implementation.
-
- def _dirs_incr(self, filename, old_entry=None):
- """incremente the dirstate counter if applicable
-
- This might be a no-op for some subclass who deal with directory
- tracking in a different way.
- """
-
- def _dirs_decr(self, filename, old_entry=None, remove_variant=False):
- """decremente the dirstate counter if applicable
-
- This might be a no-op for some subclass who deal with directory
- tracking in a different way.
- """
-
- def _refresh_entry(self, f, entry):
- """record updated state of an entry"""
-
- def _insert_entry(self, f, entry):
- """add a new dirstate entry (or replace an unrelated one)
-
- The fact it is actually new is the responsability of the caller
- """
-
- def _drop_entry(self, f):
- """remove any entry for file f
-
- This should also drop associated copy information
-
- The fact we actually need to drop it is the responsability of the caller"""
-
- ### method to manipulate the entries
-
- def set_possibly_dirty(self, filename):
- """record that the current state of the file on disk is unknown"""
- entry = self[filename]
- entry.set_possibly_dirty()
- self._refresh_entry(filename, entry)
-
- def set_clean(self, filename, mode, size, mtime):
- """mark a file as back to a clean state"""
- entry = self[filename]
- size = size & rangemask
- entry.set_clean(mode, size, mtime)
- self._refresh_entry(filename, entry)
- self.copymap.pop(filename, None)
-
- def set_tracked(self, filename):
- new = False
- entry = self.get(filename)
- if entry is None:
- self._dirs_incr(filename)
- entry = DirstateItem(
- wc_tracked=True,
- )
-
- self._insert_entry(filename, entry)
- new = True
- elif not entry.tracked:
- self._dirs_incr(filename, entry)
- entry.set_tracked()
- self._refresh_entry(filename, entry)
- new = True
- else:
- # XXX This is probably overkill for more case, but we need this to
- # fully replace the `normallookup` call with `set_tracked` one.
- # Consider smoothing this in the future.
- entry.set_possibly_dirty()
- self._refresh_entry(filename, entry)
- return new
-
- def set_untracked(self, f):
- """Mark a file as no longer tracked in the dirstate map"""
- entry = self.get(f)
- if entry is None:
- return False
- else:
- self._dirs_decr(f, old_entry=entry, remove_variant=not entry.added)
- if not entry.p2_info:
- self.copymap.pop(f, None)
- entry.set_untracked()
- self._refresh_entry(f, entry)
- return True
-
- def reset_state(
- self,
- filename,
- wc_tracked=False,
- p1_tracked=False,
- p2_info=False,
- has_meaningful_mtime=True,
- has_meaningful_data=True,
- parentfiledata=None,
- ):
- """Set a entry to a given state, diregarding all previous state
-
- This is to be used by the part of the dirstate API dedicated to
- adjusting the dirstate after a update/merge.
-
- note: calling this might result to no entry existing at all if the
- dirstate map does not see any point at having one for this file
- anymore.
- """
- # copy information are now outdated
- # (maybe new information should be in directly passed to this function)
- self.copymap.pop(filename, None)
-
- if not (p1_tracked or p2_info or wc_tracked):
- old_entry = self._map.get(filename)
- self._drop_entry(filename)
- self._dirs_decr(filename, old_entry=old_entry)
- return
-
- old_entry = self._map.get(filename)
- self._dirs_incr(filename, old_entry)
- entry = DirstateItem(
- wc_tracked=wc_tracked,
- p1_tracked=p1_tracked,
- p2_info=p2_info,
- has_meaningful_mtime=has_meaningful_mtime,
- parentfiledata=parentfiledata,
- )
- self._insert_entry(filename, entry)
-
### disk interaction
def _opendirstatefile(self):
@@ -225,9 +93,7 @@
try:
with self._opendirstatefile() as fp:
return fp.read(size)
- except IOError as err:
- if err.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
# File doesn't exist, so the current state is empty
return b''
@@ -355,7 +221,7 @@
util.clearcachedproperty(self, b"dirfoldmap")
def items(self):
- return pycompat.iteritems(self._map)
+ return self._map.items()
# forward for python2,3 compat
iteritems = items
@@ -379,7 +245,7 @@
self._dirtyparents = True
copies = {}
if fold_p2:
- for f, s in pycompat.iteritems(self._map):
+ for f, s in self._map.items():
# Discard "merged" markers when moving away from a merge state
if s.p2_info:
source = self.copymap.pop(f, None)
@@ -465,7 +331,7 @@
# (e.g. "has_dir")
def _dirs_incr(self, filename, old_entry=None):
- """incremente the dirstate counter if applicable"""
+ """increment the dirstate counter if applicable"""
if (
old_entry is None or old_entry.removed
) and "_dirs" in self.__dict__:
@@ -474,7 +340,7 @@
self._alldirs.addpath(filename)
def _dirs_decr(self, filename, old_entry=None, remove_variant=False):
- """decremente the dirstate counter if applicable"""
+ """decrement the dirstate counter if applicable"""
if old_entry is not None:
if "_dirs" in self.__dict__ and not old_entry.removed:
self._dirs.delpath(filename)
@@ -502,7 +368,7 @@
f = {}
normcase = util.normcase
- for name, s in pycompat.iteritems(self._map):
+ for name, s in self._map.items():
if not s.removed:
f[normcase(name)] = name
f[b'.'] = b'.' # prevents useless util.fspath() invocation
@@ -540,14 +406,107 @@
### code related to manipulation of entries and copy-sources
+ def reset_state(
+ self,
+ filename,
+ wc_tracked=False,
+ p1_tracked=False,
+ p2_info=False,
+ has_meaningful_mtime=True,
+ parentfiledata=None,
+ ):
+ """Set a entry to a given state, diregarding all previous state
+
+ This is to be used by the part of the dirstate API dedicated to
+ adjusting the dirstate after a update/merge.
+
+ note: calling this might result to no entry existing at all if the
+ dirstate map does not see any point at having one for this file
+ anymore.
+ """
+ # copy information are now outdated
+ # (maybe new information should be in directly passed to this function)
+ self.copymap.pop(filename, None)
+
+ if not (p1_tracked or p2_info or wc_tracked):
+ old_entry = self._map.get(filename)
+ self._drop_entry(filename)
+ self._dirs_decr(filename, old_entry=old_entry)
+ return
+
+ old_entry = self._map.get(filename)
+ self._dirs_incr(filename, old_entry)
+ entry = DirstateItem(
+ wc_tracked=wc_tracked,
+ p1_tracked=p1_tracked,
+ p2_info=p2_info,
+ has_meaningful_mtime=has_meaningful_mtime,
+ parentfiledata=parentfiledata,
+ )
+ self._map[filename] = entry
+
+ def set_tracked(self, filename):
+ new = False
+ entry = self.get(filename)
+ if entry is None:
+ self._dirs_incr(filename)
+ entry = DirstateItem(
+ wc_tracked=True,
+ )
+
+ self._map[filename] = entry
+ new = True
+ elif not entry.tracked:
+ self._dirs_incr(filename, entry)
+ entry.set_tracked()
+ self._refresh_entry(filename, entry)
+ new = True
+ else:
+ # XXX This is probably overkill for more case, but we need this to
+ # fully replace the `normallookup` call with `set_tracked` one.
+ # Consider smoothing this in the future.
+ entry.set_possibly_dirty()
+ self._refresh_entry(filename, entry)
+ return new
+
+ def set_untracked(self, f):
+ """Mark a file as no longer tracked in the dirstate map"""
+ entry = self.get(f)
+ if entry is None:
+ return False
+ else:
+ self._dirs_decr(f, old_entry=entry, remove_variant=not entry.added)
+ if not entry.p2_info:
+ self.copymap.pop(f, None)
+ entry.set_untracked()
+ self._refresh_entry(f, entry)
+ return True
+
+ def set_clean(self, filename, mode, size, mtime):
+ """mark a file as back to a clean state"""
+ entry = self[filename]
+ size = size & rangemask
+ entry.set_clean(mode, size, mtime)
+ self._refresh_entry(filename, entry)
+ self.copymap.pop(filename, None)
+
+ def set_possibly_dirty(self, filename):
+ """record that the current state of the file on disk is unknown"""
+ entry = self[filename]
+ entry.set_possibly_dirty()
+ self._refresh_entry(filename, entry)
+
def _refresh_entry(self, f, entry):
+ """record updated state of an entry"""
if not entry.any_tracked:
self._map.pop(f, None)
- def _insert_entry(self, f, entry):
- self._map[f] = entry
+ def _drop_entry(self, f):
+ """remove any entry for file f
- def _drop_entry(self, f):
+ This should also drop associated copy information
+
+ The fact we actually need to drop it is the responsability of the caller"""
self._map.pop(f, None)
self.copymap.pop(f, None)
@@ -630,22 +589,7 @@
self._dirtyparents = True
copies = {}
if fold_p2:
- # Collect into an intermediate list to avoid a `RuntimeError`
- # exception due to mutation during iteration.
- # TODO: move this the whole loop to Rust where `iter_mut`
- # enables in-place mutation of elements of a collection while
- # iterating it, without mutating the collection itself.
- files_with_p2_info = [
- f for f, s in self._map.items() if s.p2_info
- ]
- rust_map = self._map
- for f in files_with_p2_info:
- e = rust_map.get(f)
- source = self.copymap.pop(f, None)
- if source:
- copies[f] = source
- e.drop_merge_data()
- rust_map.set_dirstate_item(f, e)
+ copies = self._map.setparents_fixup()
return copies
### disk interaction
@@ -715,18 +659,32 @@
### code related to manipulation of entries and copy-sources
- def _refresh_entry(self, f, entry):
- if not entry.any_tracked:
- self._map.drop_item_and_copy_source(f)
- else:
- self._map.addfile(f, entry)
+ def set_tracked(self, f):
+ return self._map.set_tracked(f)
+
+ def set_untracked(self, f):
+ return self._map.set_untracked(f)
+
+ def set_clean(self, filename, mode, size, mtime):
+ self._map.set_clean(filename, mode, size, mtime)
+
+ def set_possibly_dirty(self, f):
+ self._map.set_possibly_dirty(f)
- def _insert_entry(self, f, entry):
- self._map.addfile(f, entry)
-
- def _drop_entry(self, f):
- self._map.drop_item_and_copy_source(f)
-
- def __setitem__(self, key, value):
- assert isinstance(value, DirstateItem)
- self._map.set_dirstate_item(key, value)
+ def reset_state(
+ self,
+ filename,
+ wc_tracked=False,
+ p1_tracked=False,
+ p2_info=False,
+ has_meaningful_mtime=True,
+ parentfiledata=None,
+ ):
+ return self._map.reset_state(
+ filename,
+ wc_tracked,
+ p1_tracked,
+ p2_info,
+ has_meaningful_mtime,
+ parentfiledata,
+ )
--- a/mercurial/dirstateutils/docket.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/dirstateutils/docket.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import struct
@@ -29,7 +28,7 @@
)
-class DirstateDocket(object):
+class DirstateDocket:
data_filename_pattern = b'dirstate.%s'
def __init__(self, parents, data_size, tree_metadata, uuid):
--- a/mercurial/dirstateutils/timestamp.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/dirstateutils/timestamp.py Thu Jun 16 15:28:54 2022 +0200
@@ -3,7 +3,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import functools
import os
--- a/mercurial/dirstateutils/v2.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/dirstateutils/v2.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import struct
@@ -126,7 +125,7 @@
@attr.s
-class Node(object):
+class Node:
path = attr.ib()
entry = attr.ib()
parent = attr.ib(default=None)
--- a/mercurial/discovery.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/discovery.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import functools
@@ -74,7 +73,7 @@
return (list(common), anyinc, heads or list(srvheads))
-class outgoing(object):
+class outgoing:
"""Represents the result of a findcommonoutgoing() call.
Members:
@@ -238,7 +237,7 @@
knownnode = cl.hasnode # do not use nodemap until it is filtered
# A. register remote heads of branches which are in outgoing set
- for branch, heads in pycompat.iteritems(remotemap):
+ for branch, heads in remotemap.items():
# don't add head info about branches which we don't have locally
if branch not in branches:
continue
@@ -262,14 +261,14 @@
repo,
(
(branch, heads[1])
- for branch, heads in pycompat.iteritems(headssum)
+ for branch, heads in headssum.items()
if heads[0] is not None
),
)
newmap.update(repo, (ctx.rev() for ctx in missingctx))
- for branch, newheads in pycompat.iteritems(newmap):
+ for branch, newheads in newmap.items():
headssum[branch][1][:] = newheads
- for branch, items in pycompat.iteritems(headssum):
+ for branch, items in headssum.items():
for l in items:
if l is not None:
l.sort()
@@ -380,9 +379,7 @@
headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
pushop.pushbranchmap = headssum
newbranches = [
- branch
- for branch, heads in pycompat.iteritems(headssum)
- if heads[0] is None
+ branch for branch, heads in headssum.items() if heads[0] is None
]
# 1. Check for new branches on the remote.
if newbranches and not newbranch: # new branch requires --new-branch
--- a/mercurial/dispatch.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/dispatch.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import, print_function
import errno
import getopt
@@ -54,7 +53,7 @@
)
-class request(object):
+class request:
def __init__(
self,
args,
@@ -150,93 +149,76 @@
sys.exit(status & 255)
-if pycompat.ispy3:
-
- def initstdio():
- # stdio streams on Python 3 are io.TextIOWrapper instances proxying another
- # buffer. These streams will normalize \n to \r\n by default. Mercurial's
- # preferred mechanism for writing output (ui.write()) uses io.BufferedWriter
- # instances, which write to the underlying stdio file descriptor in binary
- # mode. ui.write() uses \n for line endings and no line ending normalization
- # is attempted through this interface. This "just works," even if the system
- # preferred line ending is not \n.
- #
- # But some parts of Mercurial (e.g. hooks) can still send data to sys.stdout
- # and sys.stderr. They will inherit the line ending normalization settings,
- # potentially causing e.g. \r\n to be emitted. Since emitting \n should
- # "just work," here we change the sys.* streams to disable line ending
- # normalization, ensuring compatibility with our ui type.
-
- if sys.stdout is not None:
- # write_through is new in Python 3.7.
- kwargs = {
- "newline": "\n",
- "line_buffering": sys.stdout.line_buffering,
- }
- if util.safehasattr(sys.stdout, "write_through"):
- # pytype: disable=attribute-error
- kwargs["write_through"] = sys.stdout.write_through
- # pytype: enable=attribute-error
- sys.stdout = io.TextIOWrapper(
- sys.stdout.buffer,
- sys.stdout.encoding,
- sys.stdout.errors,
- **kwargs
- )
+def initstdio():
+ # stdio streams on Python 3 are io.TextIOWrapper instances proxying another
+ # buffer. These streams will normalize \n to \r\n by default. Mercurial's
+ # preferred mechanism for writing output (ui.write()) uses io.BufferedWriter
+ # instances, which write to the underlying stdio file descriptor in binary
+ # mode. ui.write() uses \n for line endings and no line ending normalization
+ # is attempted through this interface. This "just works," even if the system
+ # preferred line ending is not \n.
+ #
+ # But some parts of Mercurial (e.g. hooks) can still send data to sys.stdout
+ # and sys.stderr. They will inherit the line ending normalization settings,
+ # potentially causing e.g. \r\n to be emitted. Since emitting \n should
+ # "just work," here we change the sys.* streams to disable line ending
+ # normalization, ensuring compatibility with our ui type.
- if sys.stderr is not None:
- kwargs = {
- "newline": "\n",
- "line_buffering": sys.stderr.line_buffering,
- }
- if util.safehasattr(sys.stderr, "write_through"):
- # pytype: disable=attribute-error
- kwargs["write_through"] = sys.stderr.write_through
- # pytype: enable=attribute-error
- sys.stderr = io.TextIOWrapper(
- sys.stderr.buffer,
- sys.stderr.encoding,
- sys.stderr.errors,
- **kwargs
- )
+ if sys.stdout is not None:
+ # write_through is new in Python 3.7.
+ kwargs = {
+ "newline": "\n",
+ "line_buffering": sys.stdout.line_buffering,
+ }
+ if util.safehasattr(sys.stdout, "write_through"):
+ # pytype: disable=attribute-error
+ kwargs["write_through"] = sys.stdout.write_through
+ # pytype: enable=attribute-error
+ sys.stdout = io.TextIOWrapper(
+ sys.stdout.buffer, sys.stdout.encoding, sys.stdout.errors, **kwargs
+ )
- if sys.stdin is not None:
- # No write_through on read-only stream.
- sys.stdin = io.TextIOWrapper(
- sys.stdin.buffer,
- sys.stdin.encoding,
- sys.stdin.errors,
- # None is universal newlines mode.
- newline=None,
- line_buffering=sys.stdin.line_buffering,
- )
+ if sys.stderr is not None:
+ kwargs = {
+ "newline": "\n",
+ "line_buffering": sys.stderr.line_buffering,
+ }
+ if util.safehasattr(sys.stderr, "write_through"):
+ # pytype: disable=attribute-error
+ kwargs["write_through"] = sys.stderr.write_through
+ # pytype: enable=attribute-error
+ sys.stderr = io.TextIOWrapper(
+ sys.stderr.buffer, sys.stderr.encoding, sys.stderr.errors, **kwargs
+ )
- def _silencestdio():
- for fp in (sys.stdout, sys.stderr):
- if fp is None:
- continue
- # Check if the file is okay
- try:
- fp.flush()
- continue
- except IOError:
- pass
- # Otherwise mark it as closed to silence "Exception ignored in"
- # message emitted by the interpreter finalizer.
- try:
- fp.close()
- except IOError:
- pass
+ if sys.stdin is not None:
+ # No write_through on read-only stream.
+ sys.stdin = io.TextIOWrapper(
+ sys.stdin.buffer,
+ sys.stdin.encoding,
+ sys.stdin.errors,
+ # None is universal newlines mode.
+ newline=None,
+ line_buffering=sys.stdin.line_buffering,
+ )
-else:
-
- def initstdio():
- for fp in (sys.stdin, sys.stdout, sys.stderr):
- procutil.setbinary(fp)
-
- def _silencestdio():
- pass
+def _silencestdio():
+ for fp in (sys.stdout, sys.stderr):
+ if fp is None:
+ continue
+ # Check if the file is okay
+ try:
+ fp.flush()
+ continue
+ except IOError:
+ pass
+ # Otherwise mark it as closed to silence "Exception ignored in"
+ # message emitted by the interpreter finalizer.
+ try:
+ fp.close()
+ except IOError:
+ pass
def _formatargs(args):
@@ -308,9 +290,8 @@
# maybe pager would quit without consuming all the output, and
# SIGPIPE was raised. we cannot print anything in this case.
pass
- except IOError as inst:
- if inst.errno != errno.EPIPE:
- raise
+ except BrokenPipeError:
+ pass
ret = -1
finally:
duration = util.timer() - starttime
@@ -575,7 +556,7 @@
return r.sub(lambda x: replacemap[x.group()], cmd)
-class cmdalias(object):
+class cmdalias:
def __init__(self, ui, name, definition, cmdtable, source):
self.name = self.cmd = name
self.cmdname = b''
@@ -590,7 +571,7 @@
try:
aliases, entry = cmdutil.findcmd(self.name, cmdtable)
- for alias, e in pycompat.iteritems(cmdtable):
+ for alias, e in cmdtable.items():
if e is entry:
self.cmd = alias
break
@@ -758,7 +739,7 @@
raise
-class lazyaliasentry(object):
+class lazyaliasentry:
"""like a typical command entry (func, opts, help), but is lazy"""
def __init__(self, ui, name, definition, cmdtable, source):
--- a/mercurial/encoding.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/encoding.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import, print_function
import locale
import os
@@ -47,8 +46,7 @@
_sysstr = pycompat.sysstr
-if pycompat.ispy3:
- unichr = chr
+unichr = chr
# These unicode characters are ignored by HFS+ (Apple Technote 1150,
# "Unicode Subtleties"), so we need to ignore them in some places for
@@ -79,10 +77,8 @@
# encoding.environ is provided read-only, which may not be used to modify
# the process environment
-_nativeenviron = not pycompat.ispy3 or os.supports_bytes_environ
-if not pycompat.ispy3:
- environ = os.environ # re-exports
-elif _nativeenviron:
+_nativeenviron = os.supports_bytes_environ
+if _nativeenviron:
environ = os.environb # re-exports
else:
# preferred encoding isn't known yet; use utf-8 to avoid unicode error
@@ -99,7 +95,7 @@
# cp65001 is a Windows variant of utf-8, which isn't supported on Python 2.
# No idea if it should be rewritten to the canonical name 'utf-8' on Python 3.
# https://bugs.python.org/issue13216
-if pycompat.iswindows and not pycompat.ispy3:
+if pycompat.iswindows:
_encodingrewrites[b'cp65001'] = b'utf-8'
try:
@@ -271,21 +267,9 @@
# converter functions between native str and byte string. use these if the
# character encoding is not aware (e.g. exception message) or is known to
# be locale dependent (e.g. date formatting.)
-if pycompat.ispy3:
- strtolocal = unitolocal
- strfromlocal = unifromlocal
- strmethod = unimethod
-else:
-
- def strtolocal(s):
- # type: (str) -> bytes
- return s # pytype: disable=bad-return-type
-
- def strfromlocal(s):
- # type: (bytes) -> str
- return s # pytype: disable=bad-return-type
-
- strmethod = pycompat.identity
+strtolocal = unitolocal
+strfromlocal = unifromlocal
+strmethod = unimethod
def lower(s):
@@ -345,7 +329,7 @@
if not _nativeenviron:
# now encoding and helper functions are available, recreate the environ
# dict to be exported to other modules
- if pycompat.iswindows and pycompat.ispy3:
+ if pycompat.iswindows:
class WindowsEnviron(dict):
"""`os.environ` normalizes environment variables to uppercase on windows"""
@@ -361,36 +345,34 @@
DRIVE_RE = re.compile(b'^[a-z]:')
-if pycompat.ispy3:
- # os.getcwd() on Python 3 returns string, but it has os.getcwdb() which
- # returns bytes.
- if pycompat.iswindows:
- # Python 3 on Windows issues a DeprecationWarning about using the bytes
- # API when os.getcwdb() is called.
- #
- # Additionally, py3.8+ uppercases the drive letter when calling
- # os.path.realpath(), which is used on ``repo.root``. Since those
- # strings are compared in various places as simple strings, also call
- # realpath here. See https://bugs.python.org/issue40368
- #
- # However this is not reliable, so lets explicitly make this drive
- # letter upper case.
- #
- # note: we should consider dropping realpath here since it seems to
- # change the semantic of `getcwd`.
+# os.getcwd() on Python 3 returns string, but it has os.getcwdb() which
+# returns bytes.
+if pycompat.iswindows:
+ # Python 3 on Windows issues a DeprecationWarning about using the bytes
+ # API when os.getcwdb() is called.
+ #
+ # Additionally, py3.8+ uppercases the drive letter when calling
+ # os.path.realpath(), which is used on ``repo.root``. Since those
+ # strings are compared in various places as simple strings, also call
+ # realpath here. See https://bugs.python.org/issue40368
+ #
+ # However this is not reliable, so lets explicitly make this drive
+ # letter upper case.
+ #
+ # note: we should consider dropping realpath here since it seems to
+ # change the semantic of `getcwd`.
- def getcwd():
- cwd = os.getcwd() # re-exports
- cwd = os.path.realpath(cwd)
- cwd = strtolocal(cwd)
- if DRIVE_RE.match(cwd):
- cwd = cwd[0:1].upper() + cwd[1:]
- return cwd
+ def getcwd():
+ cwd = os.getcwd() # re-exports
+ cwd = os.path.realpath(cwd)
+ cwd = strtolocal(cwd)
+ if DRIVE_RE.match(cwd):
+ cwd = cwd[0:1].upper() + cwd[1:]
+ return cwd
- else:
- getcwd = os.getcwdb # re-exports
+
else:
- getcwd = os.getcwd # re-exports
+ getcwd = os.getcwdb # re-exports
# How to treat ambiguous-width characters. Set to 'wide' to treat as wide.
_wide = _sysstr(
@@ -419,7 +401,7 @@
# type: (bytes, int, int) -> bytes
"""Use colwidth to find a c-column substring of s starting at byte
index start"""
- for x in pycompat.xrange(start + c, len(s)):
+ for x in range(start + c, len(s)):
t = s[start:x]
if colwidth(t) == c:
return t
@@ -528,7 +510,7 @@
return u + ellipsis
-class normcasespecs(object):
+class normcasespecs:
"""what a platform's normcase does to ASCII strings
This is specified per platform, and should be consistent with what normcase
@@ -601,10 +583,7 @@
# We need to decode/encode U+DCxx codes transparently since invalid UTF-8
# bytes are mapped to that range.
-if pycompat.ispy3:
- _utf8strict = r'surrogatepass'
-else:
- _utf8strict = r'strict'
+_utf8strict = r'surrogatepass'
_utf8len = [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 3, 4]
--- a/mercurial/error.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/error.py Thu Jun 16 15:28:54 2022 +0200
@@ -11,7 +11,6 @@
imports.
"""
-from __future__ import absolute_import
import difflib
@@ -40,7 +39,7 @@
return b'(%s)' % b', '.join(b"'%s'" % pycompat.bytestr(a) for a in exc.args)
-class Hint(object):
+class Hint:
"""Mix-in to provide a hint of an error
This should come first in the inheritance list to consume a hint and
@@ -69,14 +68,12 @@
def __bytes__(self):
return self.message
- if pycompat.ispy3:
-
- def __str__(self):
- # type: () -> str
- # the output would be unreadable if the message was translated,
- # but do not replace it with encoding.strfromlocal(), which
- # may raise another exception.
- return pycompat.sysstr(self.__bytes__())
+ def __str__(self):
+ # type: () -> str
+ # the output would be unreadable if the message was translated,
+ # but do not replace it with encoding.strfromlocal(), which
+ # may raise another exception.
+ return pycompat.sysstr(self.__bytes__())
def format(self):
# type: () -> bytes
--- a/mercurial/exchange.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/exchange.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import collections
import weakref
@@ -81,6 +80,14 @@
)
+def _format_params(params):
+ parts = []
+ for key, value in sorted(params.items()):
+ value = urlreq.quote(value)
+ parts.append(b"%s=%s" % (key, value))
+ return b';'.join(parts)
+
+
def getbundlespec(ui, fh):
"""Infer the bundlespec from a bundle file handle.
@@ -94,6 +101,8 @@
except KeyError:
return None
+ params = {}
+
b = readbundle(ui, fh, None)
if isinstance(b, changegroup.cg1unpacker):
alg = b._type
@@ -116,9 +125,12 @@
version = None
for part in b.iterparts():
if part.type == b'changegroup':
- version = part.params[b'version']
- if version in (b'01', b'02'):
+ cgversion = part.params[b'version']
+ if cgversion in (b'01', b'02'):
version = b'v2'
+ elif cgversion in (b'03',):
+ version = b'v2'
+ params[b'cg.version'] = cgversion
else:
raise error.Abort(
_(
@@ -134,13 +146,21 @@
splitted = requirements.split()
params = bundle2._formatrequirementsparams(splitted)
return b'none-v2;stream=v2;%s' % params
+ elif part.type == b'obsmarkers':
+ params[b'obsolescence'] = b'yes'
+ if not part.mandatory:
+ params[b'obsolescence-mandatory'] = b'no'
if not version:
raise error.Abort(
_(b'could not identify changegroup version in bundle')
)
-
- return b'%s-%s' % (comp, version)
+ spec = b'%s-%s' % (comp, version)
+ if params:
+ spec += b';'
+ spec += _format_params(params)
+ return spec
+
elif isinstance(b, streamclone.streamcloneapplier):
requirements = streamclone.readbundle1header(fh)[2]
formatted = bundle2._formatrequirementsparams(requirements)
@@ -223,7 +243,7 @@
return forcebundle1 or not op.remote.capable(b'bundle2')
-class pushoperation(object):
+class pushoperation:
"""A object that represent a single push operation
Its purpose is to carry push related state and very common operations.
@@ -806,7 +826,7 @@
bundler.newpart(b'check:heads', data=iter(pushop.remoteheads))
else:
affected = set()
- for branch, heads in pycompat.iteritems(pushop.pushbranchmap):
+ for branch, heads in pushop.pushbranchmap.items():
remoteheads, newheads, unsyncedheads, discardedheads = heads
if remoteheads is not None:
remote = set(remoteheads)
@@ -855,7 +875,7 @@
checks = {p: [] for p in phases.allphases}
checks[phases.public].extend(pushop.remotephases.publicheads)
checks[phases.draft].extend(pushop.remotephases.draftroots)
- if any(pycompat.itervalues(checks)):
+ if any(checks.values()):
for phase in checks:
checks[phase].sort()
checkdata = phases.binaryencode(checks)
@@ -1117,7 +1137,7 @@
part = bundler.newpart(b'pushvars')
- for key, value in pycompat.iteritems(shellvars):
+ for key, value in shellvars.items():
part.addparam(key, value, mandatory=False)
@@ -1372,7 +1392,7 @@
pushop.bkresult = 1
-class pulloperation(object):
+class pulloperation:
"""A object that represent a single pull operation
It purpose is to carry pull related state and very common operation.
--- a/mercurial/extensions.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/extensions.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import ast
import collections
@@ -74,7 +73,7 @@
try:
mod = _extensions[name]
except KeyError:
- for k, v in pycompat.iteritems(_extensions):
+ for k, v in _extensions.items():
if k.endswith(b'.' + name) or k.endswith(b'/' + name):
mod = v
break
@@ -171,7 +170,7 @@
def _validatecmdtable(ui, cmdtable):
"""Check if extension commands have required attributes"""
- for c, e in pycompat.iteritems(cmdtable):
+ for c, e in cmdtable.items():
f = e[0]
missing = [a for a in _cmdfuncattrs if not util.safehasattr(f, a)]
if not missing:
@@ -579,7 +578,7 @@
'''
assert callable(wrapper)
aliases, entry = cmdutil.findcmd(command, table)
- for alias, e in pycompat.iteritems(table):
+ for alias, e in table.items():
if e is entry:
key = alias
break
@@ -622,7 +621,7 @@
raise AttributeError("type '%s' has no property '%s'" % (cls, propname))
-class wrappedfunction(object):
+class wrappedfunction:
'''context manager for temporarily wrapping a function'''
def __init__(self, container, funcname, wrapper):
@@ -756,7 +755,7 @@
if name in exts or name in _order or name == b'__init__':
continue
exts[name] = path
- for name, path in pycompat.iteritems(_disabledextensions):
+ for name, path in _disabledextensions.items():
# If no path was provided for a disabled extension (e.g. "color=!"),
# don't replace the path we already found by the scan above.
if path:
@@ -818,7 +817,7 @@
return {
name: gettext(desc)
- for name, desc in pycompat.iteritems(__index__.docs)
+ for name, desc in __index__.docs.items()
if name not in _order
}
except (ImportError, AttributeError):
@@ -829,10 +828,10 @@
return {}
exts = {}
- for name, path in pycompat.iteritems(paths):
+ for name, path in paths.items():
doc = _disabledhelp(path)
if doc and name != b'__index__':
- exts[name] = doc.splitlines()[0]
+ exts[name] = stringutil.firstline(doc)
return exts
@@ -876,7 +875,7 @@
a = node.args[0]
if isinstance(a, ast.Str):
name = pycompat.sysbytes(a.s)
- elif pycompat.ispy3 and isinstance(a, ast.Bytes):
+ elif isinstance(a, ast.Bytes):
name = a.s
else:
continue
@@ -918,7 +917,7 @@
ext = _finddisabledcmd(ui, cmd, cmd, path, strict=strict)
if not ext:
# otherwise, interrogate each extension until there's a match
- for name, path in pycompat.iteritems(paths):
+ for name, path in paths.items():
ext = _finddisabledcmd(ui, cmd, name, path, strict=strict)
if ext:
break
@@ -936,16 +935,14 @@
assert doc is not None # help pytype
if shortname:
ename = ename.split(b'.')[-1]
- exts[ename] = doc.splitlines()[0].strip()
+ exts[ename] = stringutil.firstline(doc).strip()
return exts
def notloaded():
'''return short names of extensions that failed to load'''
- return [
- name for name, mod in pycompat.iteritems(_extensions) if mod is None
- ]
+ return [name for name, mod in _extensions.items() if mod is None]
def moduleversion(module):
--- a/mercurial/exthelper.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/exthelper.py Thu Jun 16 15:28:54 2022 +0200
@@ -9,20 +9,18 @@
### Extension helper ###
#####################################################################
-from __future__ import absolute_import
from . import (
commands,
error,
extensions,
- pycompat,
registrar,
)
from hgdemandimport import tracing
-class exthelper(object):
+class exthelper:
"""Helper for modular extension setup
A single helper should be instantiated for each module of an
@@ -115,7 +113,7 @@
self._extcommandwrappers.extend(other._extcommandwrappers)
self._functionwrappers.extend(other._functionwrappers)
self.cmdtable.update(other.cmdtable)
- for section, items in pycompat.iteritems(other.configtable):
+ for section, items in other.configtable.items():
if section in self.configtable:
self.configtable[section].update(items)
else:
--- a/mercurial/fancyopts.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/fancyopts.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import abc
import functools
@@ -205,7 +204,7 @@
return parsedopts, parsedargs
-class customopt(object): # pytype: disable=ignored-metaclass
+class customopt: # pytype: disable=ignored-metaclass
"""Manage defaults and mutations for any type of opt."""
__metaclass__ = abc.ABCMeta
--- a/mercurial/filelog.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/filelog.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from .i18n import _
from .node import nullrev
@@ -25,7 +24,7 @@
@interfaceutil.implementer(repository.ifilestorage)
-class filelog(object):
+class filelog:
def __init__(self, opener, path):
self._revlog = revlog.revlog(
opener,
@@ -33,6 +32,7 @@
target=(revlog_constants.KIND_FILELOG, path),
radix=b'/'.join((b'data', path)),
censorable=True,
+ canonical_parent_order=False, # see comment in revlog.py
)
# Full name of the user visible file, relative to the repository root.
# Used by LFS.
@@ -208,6 +208,7 @@
return len(self.read(node))
# XXX if self.read(node).startswith("\1\n"), this returns (size+4)
+ # XXX See also basefilectx.cmp.
return self._revlog.size(rev)
def cmp(self, node, text):
@@ -239,7 +240,9 @@
# Used by repo upgrade.
def clone(self, tr, destrevlog, **kwargs):
if not isinstance(destrevlog, filelog):
- raise error.ProgrammingError(b'expected filelog to clone()')
+ msg = b'expected filelog to clone(), not %r'
+ msg %= destrevlog
+ raise error.ProgrammingError(msg)
return self._revlog.clone(tr, destrevlog._revlog, **kwargs)
--- a/mercurial/filemerge.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/filemerge.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import contextlib
import os
@@ -85,7 +84,7 @@
)
-class absentfilectx(object):
+class absentfilectx:
"""Represents a file that's ostensibly in a context but is actually not
present in it.
@@ -849,7 +848,7 @@
props = {b'ctx': ctx}
templateresult = template.renderdefault(props)
- input.label_detail = templateresult.splitlines()[0] # split for safety
+ input.label_detail = stringutil.firstline(templateresult) # avoid '\n'
def _populate_label_details(repo, inputs, tool=None):
@@ -1052,6 +1051,7 @@
markerstyle = internalmarkerstyle
if mergetype == fullmerge:
+ _run_partial_resolution_tools(repo, local, other, base)
# conflict markers generated by premerge will use 'detailed'
# settings if either ui.mergemarkers or the tool's mergemarkers
# setting is 'detailed'. This way tools can have basic labels in
@@ -1116,6 +1116,77 @@
backup.remove()
+def _run_partial_resolution_tools(repo, local, other, base):
+ """Runs partial-resolution tools on the three inputs and updates them."""
+ ui = repo.ui
+ if ui.configbool(b'merge', b'disable-partial-tools'):
+ return
+ # Tuples of (order, name, executable path, args)
+ tools = []
+ seen = set()
+ section = b"partial-merge-tools"
+ for k, v in ui.configitems(section):
+ name = k.split(b'.')[0]
+ if name in seen:
+ continue
+ patterns = ui.configlist(section, b'%s.patterns' % name, [])
+ is_match = True
+ if patterns:
+ m = match.match(repo.root, b'', patterns)
+ is_match = m(local.fctx.path())
+ if is_match:
+ if ui.configbool(section, b'%s.disable' % name):
+ continue
+ order = ui.configint(section, b'%s.order' % name, 0)
+ executable = ui.config(section, b'%s.executable' % name, name)
+ args = ui.config(section, b'%s.args' % name)
+ tools.append((order, name, executable, args))
+
+ if not tools:
+ return
+ # Sort in configured order (first in tuple)
+ tools.sort()
+
+ files = [
+ (b"local", local.fctx.path(), local.text()),
+ (b"base", base.fctx.path(), base.text()),
+ (b"other", other.fctx.path(), other.text()),
+ ]
+
+ with _maketempfiles(files) as temppaths:
+ localpath, basepath, otherpath = temppaths
+
+ for order, name, executable, args in tools:
+ cmd = procutil.shellquote(executable)
+ replace = {
+ b'local': localpath,
+ b'base': basepath,
+ b'other': otherpath,
+ }
+ args = util.interpolate(
+ br'\$',
+ replace,
+ args,
+ lambda s: procutil.shellquote(util.localpath(s)),
+ )
+
+ cmd = b'%s %s' % (cmd, args)
+ r = ui.system(cmd, cwd=repo.root, blockedtag=b'partial-mergetool')
+ if r:
+ raise error.StateError(
+ b'partial merge tool %s exited with code %d' % (name, r)
+ )
+ local_text = util.readfile(localpath)
+ other_text = util.readfile(otherpath)
+ if local_text == other_text:
+ # No need to run other tools if all conflicts have been resolved
+ break
+
+ local.set_text(local_text)
+ base.set_text(util.readfile(basepath))
+ other.set_text(other_text)
+
+
def _haltmerge():
msg = _(b'merge halted after failed merge (see hg resolve)')
raise error.InterventionRequired(msg)
@@ -1199,7 +1270,7 @@
def loadinternalmerge(ui, extname, registrarobj):
"""Load internal merge tool from specified registrarobj"""
- for name, func in pycompat.iteritems(registrarobj._table):
+ for name, func in registrarobj._table.items():
fullname = b':' + name
internals[fullname] = func
internals[b'internal:' + name] = func
--- a/mercurial/fileset.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/fileset.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,9 +5,7 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
-import errno
import re
from .i18n import _
@@ -504,7 +502,7 @@
}
-class matchctx(object):
+class matchctx:
def __init__(self, basectx, ctx, cwd, badfn=None):
self._basectx = basectx
self.ctx = ctx
@@ -576,16 +574,14 @@
return False
try:
return predfn(fctx)
- except (IOError, OSError) as e:
- # open()-ing a directory fails with EACCES on Windows
- if e.errno in (
- errno.ENOENT,
- errno.EACCES,
- errno.ENOTDIR,
- errno.EISDIR,
- ):
- return False
- raise
+ # open()-ing a directory fails with PermissionError on Windows
+ except (
+ FileNotFoundError,
+ PermissionError,
+ NotADirectoryError,
+ IsADirectoryError,
+ ):
+ return False
else:
@@ -614,7 +610,7 @@
def loadpredicate(ui, extname, registrarobj):
"""Load fileset predicates from specified registrarobj"""
- for name, func in pycompat.iteritems(registrarobj._table):
+ for name, func in registrarobj._table.items():
symbols[name] = func
--- a/mercurial/filesetlang.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/filesetlang.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from .i18n import _
from .pycompat import getattr
--- a/mercurial/formatter.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/formatter.py Thu Jun 16 15:28:54 2022 +0200
@@ -105,11 +105,11 @@
baz: foo, bar
"""
-from __future__ import absolute_import, print_function
import contextlib
import itertools
import os
+import pickle
from .i18n import _
from .node import (
@@ -133,8 +133,6 @@
stringutil,
)
-pickle = util.pickle
-
def isprintable(obj):
"""Check if the given object can be directly passed in to formatter's
@@ -143,10 +141,10 @@
Returns False if the object is unsupported or must be pre-processed by
formatdate(), formatdict(), or formatlist().
"""
- return isinstance(obj, (type(None), bool, int, pycompat.long, float, bytes))
+ return isinstance(obj, (type(None), bool, int, int, float, bytes))
-class _nullconverter(object):
+class _nullconverter:
'''convert non-primitive data types to be processed by formatter'''
# set to True if context object should be stored as item
@@ -177,7 +175,7 @@
return list(data)
-class baseformatter(object):
+class baseformatter:
# set to True if the formater output a strict format that does not support
# arbitrary output in the stream.
@@ -295,11 +293,11 @@
def _iteritems(data):
'''iterate key-value pairs in stable order'''
if isinstance(data, dict):
- return sorted(pycompat.iteritems(data))
+ return sorted(data.items())
return data
-class _plainconverter(object):
+class _plainconverter:
'''convert non-primitive data types to text'''
storecontext = False
@@ -454,7 +452,7 @@
self._out.write(b"\n]\n")
-class _templateconverter(object):
+class _templateconverter:
'''convert non-primitive data types to be processed by templater'''
storecontext = True
@@ -543,7 +541,7 @@
@attr.s(frozen=True)
-class templatespec(object):
+class templatespec:
ref = attr.ib()
tmpl = attr.ib()
mapfile = attr.ib()
@@ -560,8 +558,7 @@
def literal_templatespec(tmpl):
- if pycompat.ispy3:
- assert not isinstance(tmpl, str), b'tmpl must not be a str'
+ assert not isinstance(tmpl, str), b'tmpl must not be a str'
return templatespec(b'', tmpl, None)
--- a/mercurial/graphmod.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/graphmod.py Thu Jun 16 15:28:54 2022 +0200
@@ -17,13 +17,11 @@
Data depends on type.
"""
-from __future__ import absolute_import
from .node import nullrev
from .thirdparty import attr
from . import (
dagop,
- pycompat,
smartset,
util,
)
@@ -359,7 +357,7 @@
@attr.s
-class asciistate(object):
+class asciistate:
"""State of ascii() graph rendering"""
seen = attr.ib(init=False, default=attr.Factory(list))
@@ -464,16 +462,16 @@
# shift_interline is the line containing the non-vertical
# edges between this entry and the next
shift_interline = echars[: idx * 2]
- for i in pycompat.xrange(2 + coldiff):
+ for i in range(2 + coldiff):
shift_interline.append(b' ')
count = ncols - idx - 1
if coldiff == -1:
- for i in pycompat.xrange(count):
+ for i in range(count):
shift_interline.extend([b'/', b' '])
elif coldiff == 0:
shift_interline.extend(echars[(idx + 1) * 2 : ncols * 2])
else:
- for i in pycompat.xrange(count):
+ for i in range(count):
shift_interline.extend([b'\\', b' '])
# draw edges from the current node to its parents
--- a/mercurial/grep.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/grep.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,10 +5,8 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import difflib
-import errno
from .i18n import _
@@ -36,7 +34,7 @@
yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
-class linestate(object):
+class linestate:
def __init__(self, line, linenum, colstart, colend):
self.line = line
self.linenum = linenum
@@ -68,19 +66,19 @@
sm = difflib.SequenceMatcher(None, a, b)
for tag, alo, ahi, blo, bhi in sm.get_opcodes():
if tag == 'insert':
- for i in pycompat.xrange(blo, bhi):
+ for i in range(blo, bhi):
yield (b'+', b[i])
elif tag == 'delete':
- for i in pycompat.xrange(alo, ahi):
+ for i in range(alo, ahi):
yield (b'-', a[i])
elif tag == 'replace':
- for i in pycompat.xrange(alo, ahi):
+ for i in range(alo, ahi):
yield (b'-', a[i])
- for i in pycompat.xrange(blo, bhi):
+ for i in range(blo, bhi):
yield (b'+', b[i])
-class grepsearcher(object):
+class grepsearcher:
"""Search files and revisions for lines matching the given pattern
Options:
@@ -159,9 +157,8 @@
fctx = ctx[fn]
try:
return fctx.data()
- except IOError as e:
- if e.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
+ pass
else:
flog = self._getfile(fn)
fnode = ctx.filenode(fn)
--- a/mercurial/hbisect.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/hbisect.py Thu Jun 16 15:28:54 2022 +0200
@@ -8,7 +8,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import collections
import contextlib
--- a/mercurial/help.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/help.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import itertools
import re
@@ -38,6 +37,7 @@
from .utils import (
compression,
resourceutil,
+ stringutil,
)
_exclkeywords = {
@@ -126,7 +126,7 @@
'''return a text listing of the given extensions'''
rst = []
if exts:
- for name, desc in sorted(pycompat.iteritems(exts)):
+ for name, desc in sorted(exts.items()):
if not showdeprecated and any(w in desc for w in _exclkeywords):
continue
rst.append(b'%s:%s: %s\n' % (b' ' * indent, name, desc))
@@ -281,7 +281,7 @@
name = names[0]
if not filtertopic(ui, name):
results[b'topics'].append((names[0], header))
- for cmd, entry in pycompat.iteritems(commands.table):
+ for cmd, entry in commands.table.items():
if len(entry) == 3:
summary = entry[2]
else:
@@ -290,35 +290,34 @@
func = entry[0]
docs = _(pycompat.getdoc(func)) or b''
if kw in cmd or lowercontains(summary) or lowercontains(docs):
- doclines = docs.splitlines()
- if doclines:
- summary = doclines[0]
+ if docs:
+ summary = stringutil.firstline(docs)
cmdname = cmdutil.parsealiases(cmd)[0]
if filtercmd(ui, cmdname, func, kw, docs):
continue
results[b'commands'].append((cmdname, summary))
for name, docs in itertools.chain(
- pycompat.iteritems(extensions.enabled(False)),
- pycompat.iteritems(extensions.disabled()),
+ extensions.enabled(False).items(),
+ extensions.disabled().items(),
):
if not docs:
continue
name = name.rpartition(b'.')[-1]
if lowercontains(name) or lowercontains(docs):
# extension docs are already translated
- results[b'extensions'].append((name, docs.splitlines()[0]))
+ results[b'extensions'].append((name, stringutil.firstline(docs)))
try:
mod = extensions.load(ui, name, b'')
except ImportError:
# debug message would be printed in extensions.load()
continue
- for cmd, entry in pycompat.iteritems(getattr(mod, 'cmdtable', {})):
+ for cmd, entry in getattr(mod, 'cmdtable', {}).items():
if kw in cmd or (len(entry) > 2 and lowercontains(entry[2])):
cmdname = cmdutil.parsealiases(cmd)[0]
func = entry[0]
cmddoc = pycompat.getdoc(func)
if cmddoc:
- cmddoc = gettext(cmddoc).splitlines()[0]
+ cmddoc = stringutil.firstline(gettext(cmddoc))
else:
cmddoc = _(b'(no help text available)')
if filtercmd(ui, cmdname, func, kw, cmddoc):
@@ -608,7 +607,7 @@
# Abuse latin1 to use textwrap.dedent() on bytes.
text = textwrap.dedent(text.decode('latin1')).encode('latin1')
lines = text.splitlines()
- doclines = [(lines[0])]
+ doclines = [lines[0]]
for l in lines[1:]:
# Stop once we find some Python doctest
if l.strip().startswith(b'>>>'):
@@ -665,7 +664,7 @@
h = {}
# Command -> string showing synonyms
syns = {}
- for c, e in pycompat.iteritems(cmdtable):
+ for c, e in cmdtable.items():
fs = cmdutil.parsealiases(c)
f = fs[0]
syns[f] = fs
@@ -678,7 +677,7 @@
doc = gettext(doc)
if not doc:
doc = _(b"(no help text available)")
- h[f] = doc.splitlines()[0].rstrip()
+ h[f] = stringutil.firstline(doc).rstrip()
cat = getattr(func, 'helpcategory', None) or (
registrar.command.CATEGORY_NONE
@@ -1044,7 +1043,7 @@
cmd, ext, doc = extensions.disabledcmd(
ui, name, ui.configbool(b'ui', b'strict')
)
- doc = doc.splitlines()[0]
+ doc = stringutil.firstline(doc)
rst = listexts(
_(b"'%s' is provided by the following extension:") % cmd,
--- a/mercurial/helptext/config.txt Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/helptext/config.txt Thu Jun 16 15:28:54 2022 +0200
@@ -944,6 +944,30 @@
For a more comprehensive guide, see :hg:`help internals.dirstate-v2`.
+``use-dirstate-v2.automatic-upgrade-of-mismatching-repositories``
+ When enabled, an automatic upgrade will be triggered when a repository format
+ does not match its `use-dirstate-v2` config.
+
+ This is an advanced behavior that most users will not need. We recommend you
+ don't use this unless you are a seasoned administrator of a Mercurial install
+ base.
+
+ Automatic upgrade means that any process accessing the repository will
+ upgrade the repository format to use `dirstate-v2`. This only triggers if a
+ change is needed. This also applies to operations that would have been
+ read-only (like hg status).
+
+ If the repository cannot be locked, the automatic-upgrade operation will be
+ skipped. The next operation will attempt it again.
+
+ This configuration will apply for moves in any direction, either adding the
+ `dirstate-v2` format if `format.use-dirstate-v2=yes` or removing the
+ `dirstate-v2` requirement if `format.use-dirstate-v2=no`. So we recommend
+ setting both this value and `format.use-dirstate-v2` at the same time.
+
+``use-dirstate-v2.automatic-upgrade-of-mismatching-repositories:quiet``
+ Hide message when performing such automatic upgrade.
+
``use-dirstate-tracked-hint``
Enable or disable the writing of "tracked key" file alongside the dirstate.
(default to disabled)
@@ -976,6 +1000,34 @@
2) storing the value and comparing it to a later value.
+
+``use-dirstate-tracked-hint.automatic-upgrade-of-mismatching-repositories``
+ When enabled, an automatic upgrade will be triggered when a repository format
+ does not match its `use-dirstate-tracked-hint` config.
+
+ This is an advanced behavior that most users will not need. We recommend you
+ don't use this unless you are a seasoned administrator of a Mercurial install
+ base.
+
+ Automatic upgrade means that any process accessing the repository will
+ upgrade the repository format to use `dirstate-tracked-hint`. This only
+ triggers if a change is needed. This also applies to operations that would
+ have been read-only (like hg status).
+
+ If the repository cannot be locked, the automatic-upgrade operation will be
+ skipped. The next operation will attempt it again.
+
+ This configuration will apply for moves in any direction, either adding the
+ `dirstate-tracked-hint` format if `format.use-dirstate-tracked-hint=yes` or
+ removing the `dirstate-tracked-hint` requirement if
+ `format.use-dirstate-tracked-hint=no`. So we recommend setting both this
+ value and `format.use-dirstate-tracked-hint` at the same time.
+
+
+``use-dirstate-tracked-hint.automatic-upgrade-of-mismatching-repositories:quiet``
+ Hide message when performing such automatic upgrade.
+
+
``use-persistent-nodemap``
Enable or disable the "persistent-nodemap" feature which improves
performance if the Rust extensions are available.
@@ -1032,6 +1084,30 @@
Enabled by default in Mercurial 6.1.
+``use-share-safe.automatic-upgrade-of-mismatching-repositories``
+ When enabled, an automatic upgrade will be triggered when a repository format
+ does not match its `use-share-safe` config.
+
+ This is an advanced behavior that most users will not need. We recommend you
+ don't use this unless you are a seasoned administrator of a Mercurial install
+ base.
+
+ Automatic upgrade means that any process accessing the repository will
+ upgrade the repository format to use `share-safe`. This only triggers if a
+ change is needed. This also applies to operation that would have been
+ read-only (like hg status).
+
+ If the repository cannot be locked, the automatic-upgrade operation will be
+ skipped. The next operation will attempt it again.
+
+ This configuration will apply for moves in any direction, either adding the
+ `share-safe` format if `format.use-share-safe=yes` or removing the
+ `share-safe` requirement if `format.use-share-safe=no`. So we recommend
+ setting both this value and `format.use-share-safe` at the same time.
+
+``use-share-safe.automatic-upgrade-of-mismatching-repositories:quiet``
+ Hide message when performing such automatic upgrade.
+
``usestore``
Enable or disable the "store" repository format which improves
compatibility with systems that fold case or otherwise mangle
@@ -2103,6 +2179,9 @@
Check :hg:`help config.format.use-share-safe` for details about the
share-safe feature.
+``safe-mismatch.source-safe:verbose-upgrade``
+ Display a message when upgrading, (default: True)
+
``safe-mismatch.source-safe.warn``
Shows a warning on operations if the shared repository does not use
share-safe, but the source repository does.
@@ -2128,6 +2207,9 @@
Check :hg:`help config.format.use-share-safe` for details about the
share-safe feature.
+``safe-mismatch.source-not-safe:verbose-upgrade``
+ Display a message when upgrading, (default: True)
+
``safe-mismatch.source-not-safe.warn``
Shows a warning on operations if the shared repository uses share-safe,
but the source repository does not.
--- a/mercurial/hg.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/hg.py Thu Jun 16 15:28:54 2022 +0200
@@ -6,9 +6,7 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
-import errno
import os
import posixpath
import shutil
@@ -77,8 +75,7 @@
# invalid paths specially here.
st = os.stat(path)
isfile = stat.S_ISREG(st.st_mode)
- # Python 2 raises TypeError, Python 3 ValueError.
- except (TypeError, ValueError) as e:
+ except ValueError as e:
raise error.Abort(
_(b'invalid path %s: %s') % (path, stringutil.forcebytestr(e))
)
@@ -530,9 +527,8 @@
# lock class requires the directory to exist.
try:
util.makedir(pooldir, False)
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
+ except FileExistsError:
+ pass
poolvfs = vfsmod.vfs(pooldir)
basename = os.path.basename(sharepath)
@@ -895,13 +891,9 @@
create=True,
createopts=createopts,
)
- except OSError as inst:
- if inst.errno == errno.EEXIST:
- cleandir = None
- raise error.Abort(
- _(b"destination '%s' already exists") % dest
- )
- raise
+ except FileExistsError:
+ cleandir = None
+ raise error.Abort(_(b"destination '%s' already exists") % dest)
if revs:
if not srcpeer.capable(b'lookup'):
@@ -1535,7 +1527,7 @@
]
-class cachedlocalrepo(object):
+class cachedlocalrepo:
"""Holds a localrepository that can be cached and reused."""
def __init__(self, repo):
--- a/mercurial/hgweb/__init__.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/hgweb/__init__.py Thu Jun 16 15:28:54 2022 +0200
@@ -6,7 +6,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import os
@@ -37,7 +36,7 @@
- list of virtual:real tuples (multi-repo view)
"""
- if isinstance(config, pycompat.unicode):
+ if isinstance(config, str):
raise error.ProgrammingError(
b'Mercurial only supports encoded strings: %r' % config
)
@@ -55,7 +54,7 @@
return hgwebdir_mod.hgwebdir(config, baseui=baseui)
-class httpservice(object):
+class httpservice:
def __init__(self, ui, app, opts):
self.ui = ui
self.app = app
--- a/mercurial/hgweb/common.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/hgweb/common.py Thu Jun 16 15:28:54 2022 +0200
@@ -6,7 +6,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import base64
import errno
@@ -116,7 +115,7 @@
self.message = message
-class continuereader(object):
+class continuereader:
"""File object wrapper to handle HTTP 100-continue.
This is used by servers so they automatically handle Expect: 100-continue
--- a/mercurial/hgweb/hgweb_mod.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/hgweb/hgweb_mod.py Thu Jun 16 15:28:54 2022 +0200
@@ -6,7 +6,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import contextlib
import os
@@ -111,7 +110,7 @@
return templateutil.mappinglist(reversed(breadcrumb))
-class requestcontext(object):
+class requestcontext:
"""Holds state/context for an individual request.
Servers can be multi-threaded. Holding state on the WSGI application
@@ -236,7 +235,7 @@
return self.res.sendresponse()
-class hgweb(object):
+class hgweb:
"""HTTP server for individual repositories.
Instances of this class serve HTTP responses for a particular
@@ -413,7 +412,7 @@
if cmd == b'archive':
fn = req.qsparams[b'node']
- for type_, spec in pycompat.iteritems(webutil.archivespecs):
+ for type_, spec in webutil.archivespecs.items():
ext = spec[2]
if fn.endswith(ext):
req.qsparams[b'node'] = fn[: -len(ext)]
--- a/mercurial/hgweb/hgwebdir_mod.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/hgweb/hgwebdir_mod.py Thu Jun 16 15:28:54 2022 +0200
@@ -6,7 +6,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import gc
import os
@@ -269,7 +268,7 @@
return templateutil.mappinggenerator(_indexentriesgen, args=args)
-class hgwebdir(object):
+class hgwebdir:
"""HTTP server for multiple repositories.
Given a configuration, different repositories will be served depending
@@ -461,12 +460,9 @@
if real:
# Re-parse the WSGI environment to take into account our
# repository path component.
- uenv = req.rawenv
- if pycompat.ispy3:
- uenv = {
- k.decode('latin1'): v
- for k, v in pycompat.iteritems(uenv)
- }
+ uenv = {
+ k.decode('latin1'): v for k, v in req.rawenv.items()
+ }
req = requestmod.parserequestfromenv(
uenv,
reponame=virtualrepo,
--- a/mercurial/hgweb/request.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/hgweb/request.py Thu Jun 16 15:28:54 2022 +0200
@@ -6,7 +6,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
# import wsgiref.validate
@@ -22,7 +21,7 @@
)
-class multidict(object):
+class multidict:
"""A dict like object that can store multiple values for a key.
Used to store parsed request parameters.
@@ -78,11 +77,11 @@
return vals[0]
def asdictoflists(self):
- return {k: list(v) for k, v in pycompat.iteritems(self._items)}
+ return {k: list(v) for k, v in self._items.items()}
@attr.s(frozen=True)
-class parsedrequest(object):
+class parsedrequest:
"""Represents a parsed WSGI request.
Contains both parsed parameters as well as a handle on the input stream.
@@ -161,24 +160,22 @@
# TODO enable this once we fix internal violations.
# wsgiref.validate.check_environ(env)
- # PEP-0333 states that environment keys and values are native strings
- # (bytes on Python 2 and str on Python 3). The code points for the Unicode
- # strings on Python 3 must be between \00000-\000FF. We deal with bytes
- # in Mercurial, so mass convert string keys and values to bytes.
- if pycompat.ispy3:
+ # PEP-0333 states that environment keys and values are native strings.
+ # The code points for the Unicode strings on Python 3 must be between
+ # \00000-\000FF. We deal with bytes in Mercurial, so mass convert string
+ # keys and values to bytes.
+ def tobytes(s):
+ if not isinstance(s, str):
+ return s
+ if pycompat.iswindows:
+ # This is what mercurial.encoding does for os.environ on
+ # Windows.
+ return encoding.strtolocal(s)
+ else:
+ # This is what is documented to be used for os.environ on Unix.
+ return pycompat.fsencode(s)
- def tobytes(s):
- if not isinstance(s, str):
- return s
- if pycompat.iswindows:
- # This is what mercurial.encoding does for os.environ on
- # Windows.
- return encoding.strtolocal(s)
- else:
- # This is what is documented to be used for os.environ on Unix.
- return pycompat.fsencode(s)
-
- env = {tobytes(k): tobytes(v) for k, v in pycompat.iteritems(env)}
+ env = {tobytes(k): tobytes(v) for k, v in env.items()}
# Some hosting solutions are emulating hgwebdir, and dispatching directly
# to an hgweb instance using this environment variable. This was always
@@ -312,7 +309,7 @@
# perform case normalization for us. We just rewrite underscore to dash
# so keys match what likely went over the wire.
headers = []
- for k, v in pycompat.iteritems(env):
+ for k, v in env.items():
if k.startswith(b'HTTP_'):
headers.append((k[len(b'HTTP_') :].replace(b'_', b'-'), v))
@@ -358,7 +355,7 @@
)
-class offsettrackingwriter(object):
+class offsettrackingwriter:
"""A file object like object that is append only and tracks write count.
Instances are bound to a callable. This callable is called with data
@@ -391,7 +388,7 @@
return self._offset
-class wsgiresponse(object):
+class wsgiresponse:
"""Represents a response to a WSGI request.
A response consists of a status line, headers, and a body.
--- a/mercurial/hgweb/server.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/hgweb/server.py Thu Jun 16 15:28:54 2022 +0200
@@ -6,10 +6,8 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import errno
-import importlib
import os
import socket
import sys
@@ -53,7 +51,7 @@
return urlreq.unquote(path), query
-class _error_logger(object):
+class _error_logger:
def __init__(self, handler):
self.handler = handler
@@ -117,9 +115,8 @@
def do_write(self):
try:
self.do_hgweb()
- except socket.error as inst:
- if inst.errno != errno.EPIPE:
- raise
+ except BrokenPipeError:
+ pass
def do_POST(self):
try:
@@ -186,18 +183,11 @@
env['REMOTE_ADDR'] = self.client_address[0]
env['QUERY_STRING'] = query or ''
- if pycompat.ispy3:
- if self.headers.get_content_type() is None:
- env['CONTENT_TYPE'] = self.headers.get_default_type()
- else:
- env['CONTENT_TYPE'] = self.headers.get_content_type()
- length = self.headers.get('content-length')
+ if self.headers.get_content_type() is None:
+ env['CONTENT_TYPE'] = self.headers.get_default_type()
else:
- if self.headers.typeheader is None:
- env['CONTENT_TYPE'] = self.headers.type
- else:
- env['CONTENT_TYPE'] = self.headers.typeheader
- length = self.headers.getheader('content-length')
+ env['CONTENT_TYPE'] = self.headers.get_content_type()
+ length = self.headers.get('content-length')
if length:
env['CONTENT_LENGTH'] = length
for header in [
@@ -351,7 +341,7 @@
_mixin = socketserver.ForkingMixIn
else:
- class _mixin(object):
+ class _mixin:
pass
@@ -412,26 +402,9 @@
cls = MercurialHTTPServer
# ugly hack due to python issue5853 (for threaded use)
- try:
- import mimetypes
-
- mimetypes.init()
- except UnicodeDecodeError:
- # Python 2.x's mimetypes module attempts to decode strings
- # from Windows' ANSI APIs as ascii (fail), then re-encode them
- # as ascii (clown fail), because the default Python Unicode
- # codec is hardcoded as ascii.
+ import mimetypes
- sys.argv # unwrap demand-loader so that reload() works
- # resurrect sys.setdefaultencoding()
- try:
- importlib.reload(sys)
- except AttributeError:
- reload(sys)
- oldenc = sys.getdefaultencoding()
- sys.setdefaultencoding(b"latin1") # or any full 8-bit encoding
- mimetypes.init()
- sys.setdefaultencoding(oldenc)
+ mimetypes.init()
address = ui.config(b'web', b'address')
port = urlutil.getport(ui.config(b'web', b'port'))
--- a/mercurial/hgweb/webcommands.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/hgweb/webcommands.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import copy
import mimetypes
@@ -47,7 +46,7 @@
commands = {}
-class webcommand(object):
+class webcommand:
"""Decorator used to register a web command handler.
The decorator takes as its positional arguments the name/path the
@@ -229,7 +228,7 @@
def revgen():
cl = web.repo.changelog
- for i in pycompat.xrange(len(web.repo) - 1, 0, -100):
+ for i in range(len(web.repo) - 1, 0, -100):
l = []
for j in cl.revs(max(0, i - 99), i):
ctx = web.repo[j]
@@ -564,7 +563,7 @@
l = len(path)
abspath = b"/" + path
- for full, n in pycompat.iteritems(mf):
+ for full, n in mf.items():
# the virtual path (working copy path) used for the full
# (repository) path
f = decodepath(full)
@@ -1521,7 +1520,7 @@
early, other = [], []
primary = lambda s: s.partition(b'|')[0]
- for c, e in pycompat.iteritems(commands.table):
+ for c, e in commands.table.items():
doc = _getdoc(e)
if b'DEPRECATED' in doc or c.startswith(b'debug'):
continue
--- a/mercurial/hgweb/webutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/hgweb/webutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -6,7 +6,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import copy
import difflib
@@ -57,7 +56,7 @@
allowed = ui.configlist(b'web', b'allow-archive', untrusted=True)
archives = []
- for typ, spec in pycompat.iteritems(archivespecs):
+ for typ, spec in archivespecs.items():
if typ in allowed or ui.configbool(
b'web', b'allow' + typ, untrusted=True
):
@@ -100,7 +99,7 @@
step *= 10
-class revnav(object):
+class revnav:
def __init__(self, repo):
"""Navigation generation object
@@ -721,7 +720,7 @@
len1 = lhi - llo
len2 = rhi - rlo
count = min(len1, len2)
- for i in pycompat.xrange(count):
+ for i in range(count):
yield _compline(
type=type,
leftlineno=llo + i + 1,
@@ -730,7 +729,7 @@
rightline=rightlines[rlo + i],
)
if len1 > len2:
- for i in pycompat.xrange(llo + count, lhi):
+ for i in range(llo + count, lhi):
yield _compline(
type=type,
leftlineno=i + 1,
@@ -739,7 +738,7 @@
rightline=None,
)
elif len2 > len1:
- for i in pycompat.xrange(rlo + count, rhi):
+ for i in range(rlo + count, rhi):
yield _compline(
type=type,
leftlineno=None,
@@ -864,7 +863,7 @@
def itermaps(self, context):
separator = self._start
- for key, value in sorted(pycompat.iteritems(self._vars)):
+ for key, value in sorted(self._vars.items()):
yield {
b'name': key,
b'value': pycompat.bytestr(value),
--- a/mercurial/hgweb/wsgicgi.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/hgweb/wsgicgi.py Thu Jun 16 15:28:54 2022 +0200
@@ -8,7 +8,6 @@
# This was originally copied from the public domain code at
# http://www.python.org/dev/peps/pep-0333/#the-server-gateway-side
-from __future__ import absolute_import
import os
@@ -24,7 +23,7 @@
procutil.setbinary(procutil.stdin)
procutil.setbinary(procutil.stdout)
- environ = dict(pycompat.iteritems(os.environ)) # re-exports
+ environ = dict(os.environ.items()) # re-exports
environ.setdefault('PATH_INFO', '')
if environ.get('SERVER_SOFTWARE', '').startswith('Microsoft-IIS'):
# IIS includes script_name in PATH_INFO
--- a/mercurial/hgweb/wsgiheaders.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/hgweb/wsgiheaders.py Thu Jun 16 15:28:54 2022 +0200
@@ -9,7 +9,6 @@
# Regular expression that matches `special' characters in parameters, the
# existence of which force quoting of the parameter value.
-from __future__ import absolute_import, print_function
import re
@@ -30,7 +29,7 @@
return param
-class Headers(object):
+class Headers:
"""Manage a collection of HTTP response headers"""
def __init__(self, headers=None):
--- a/mercurial/hook.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/hook.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import contextlib
import errno
@@ -167,7 +166,7 @@
else:
env[b'HGPLAIN'] = b''
- for k, v in pycompat.iteritems(args):
+ for k, v in args.items():
# transaction changes can accumulate MBs of data, so skip it
# for external hooks
if k == b'changes':
--- a/mercurial/httpconnection.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/httpconnection.py Thu Jun 16 15:28:54 2022 +0200
@@ -8,7 +8,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import os
@@ -27,7 +26,7 @@
urlreq = util.urlreq
# moved here from url.py to avoid a cycle
-class httpsendfile(object):
+class httpsendfile:
"""This is a wrapper around the objects returned by python's "open".
Its purpose is to send file-like objects via HTTP.
@@ -94,7 +93,7 @@
bestuser = None
bestlen = 0
bestauth = None
- for group, auth in pycompat.iteritems(groups):
+ for group, auth in groups.items():
if user and user != auth.get(b'username', user):
# If a username was set in the URI, the entry username
# must either match it or be unset
--- a/mercurial/httppeer.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/httppeer.py Thu Jun 16 15:28:54 2022 +0200
@@ -6,7 +6,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import errno
import io
@@ -14,6 +13,7 @@
import socket
import struct
+from concurrent import futures
from .i18n import _
from .pycompat import getattr
from . import (
@@ -55,14 +55,14 @@
result = []
n = 0
- for i in pycompat.xrange(0, len(value), valuelen):
+ for i in range(0, len(value), valuelen):
n += 1
result.append((fmt % str(n), pycompat.strurl(value[i : i + valuelen])))
return result
-class _multifile(object):
+class _multifile:
def __init__(self, *fileobjs):
for f in fileobjs:
if not util.safehasattr(f, b'length'):
@@ -231,15 +231,6 @@
return req, cu, qs
-def _reqdata(req):
- """Get request data, if any. If no data, returns None."""
- if pycompat.ispy3:
- return req.data
- if not req.has_data():
- return None
- return req.get_data()
-
-
def sendrequest(ui, opener, req):
"""Send a prepared HTTP request.
@@ -274,7 +265,7 @@
% b' %d bytes of commands arguments in headers'
% hgargssize
)
- data = _reqdata(req)
+ data = req.data
if data is not None:
length = getattr(data, 'length', None)
if length is None:
@@ -538,12 +529,12 @@
raise exception
-class queuedcommandfuture(pycompat.futures.Future):
+class queuedcommandfuture(futures.Future):
"""Wraps result() on command futures to trigger submission on call."""
def result(self, timeout=None):
if self.done():
- return pycompat.futures.Future.result(self, timeout)
+ return futures.Future.result(self, timeout)
self._peerexecutor.sendcommands()
--- a/mercurial/i18n.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/i18n.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import gettext as gettextmod
import locale
@@ -86,9 +85,9 @@
cache = _msgcache.setdefault(encoding.encoding, {})
if message not in cache:
- if type(message) is pycompat.unicode:
+ if type(message) is str:
# goofy unicode docstrings in test
- paragraphs = message.split(u'\n\n') # type: List[pycompat.unicode]
+ paragraphs = message.split(u'\n\n') # type: List[str]
else:
# should be ascii, but we have unicode docstrings in test, which
# are converted to utf-8 bytes on Python 3.
--- a/mercurial/interfaces/dirstate.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/interfaces/dirstate.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, print_function
-
import contextlib
from . import util as interfaceutil
@@ -63,6 +61,9 @@
used to get real file paths. Use vfs functions instead.
"""
+ def get_entry(path):
+ """return a DirstateItem for the associated path"""
+
def pathto(f, cwd=None):
pass
--- a/mercurial/interfaces/repository.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/interfaces/repository.py Thu Jun 16 15:28:54 2022 +0200
@@ -6,7 +6,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from ..i18n import _
from .. import error
@@ -389,7 +388,7 @@
@interfaceutil.implementer(ipeerbase)
-class peer(object):
+class peer:
"""Base class for peer repositories."""
limitedarguments = False
--- a/mercurial/interfaces/util.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/interfaces/util.py Thu Jun 16 15:28:54 2022 +0200
@@ -9,7 +9,6 @@
# bookkeeping for declaring interfaces. So, we use stubs for various
# zope.interface primitives unless instructed otherwise.
-from __future__ import absolute_import
from .. import encoding
@@ -21,11 +20,11 @@
implementer = zi.implementer
else:
- class Attribute(object):
+ class Attribute:
def __init__(self, __name__, __doc__=b''):
pass
- class Interface(object):
+ class Interface:
def __init__(
self, name, bases=(), attrs=None, __doc__=None, __module__=None
):
--- a/mercurial/keepalive.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/keepalive.py Thu Jun 16 15:28:54 2022 +0200
@@ -82,10 +82,8 @@
# $Id: keepalive.py,v 1.14 2006/04/04 21:00:32 mstenner Exp $
-from __future__ import absolute_import, print_function
import collections
-import errno
import hashlib
import socket
import sys
@@ -108,7 +106,7 @@
DEBUG = None
-class ConnectionManager(object):
+class ConnectionManager:
"""
The connection manager must be able to:
* keep track of all existing
@@ -171,7 +169,7 @@
return dict(self._hostmap)
-class KeepAliveHandler(object):
+class KeepAliveHandler:
def __init__(self, timeout=None):
self._cm = ConnectionManager()
self._timeout = timeout
@@ -194,7 +192,7 @@
def close_all(self):
"""close all open connections"""
- for host, conns in pycompat.iteritems(self._cm.get_all()):
+ for host, conns in self._cm.get_all().items():
for h in conns:
self._cm.remove(h)
h.close()
@@ -399,12 +397,8 @@
# modification from socket.py
def __init__(self, sock, debuglevel=0, strict=0, method=None):
- extrakw = {}
- if not pycompat.ispy3:
- extrakw['strict'] = True
- extrakw['buffering'] = True
httplib.HTTPResponse.__init__(
- self, sock, debuglevel=debuglevel, method=method, **extrakw
+ self, sock, debuglevel=debuglevel, method=method
)
self.fileno = sock.fileno
self.code = None
@@ -662,14 +656,14 @@
else:
self.sock.sendall(str)
self.sentbytescount += len(str)
- except socket.error as v:
- reraise = True
- if v.args[0] == errno.EPIPE: # Broken pipe
- if self._HTTPConnection__state == httplib._CS_REQ_SENT:
- self._broken_pipe_resp = None
- self._broken_pipe_resp = self.getresponse()
- reraise = False
- self.close()
+ except BrokenPipeError:
+ if self._HTTPConnection__state == httplib._CS_REQ_SENT:
+ self._broken_pipe_resp = None
+ self._broken_pipe_resp = self.getresponse()
+ reraise = False
+ else:
+ reraise = True
+ self.close()
if reraise:
raise
@@ -794,7 +788,7 @@
global DEBUG
dbbackup = DEBUG
- class FakeLogger(object):
+ class FakeLogger:
def debug(self, msg, *args):
print(msg % args)
--- a/mercurial/linelog.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/linelog.py Thu Jun 16 15:28:54 2022 +0200
@@ -18,7 +18,6 @@
deletion is performed on the file, a jump instruction is used to patch
in a new body of annotate information.
"""
-from __future__ import absolute_import, print_function
import abc
import struct
@@ -34,7 +33,7 @@
@attr.s
-class lineinfo(object):
+class lineinfo:
# Introducing revision of this line.
rev = attr.ib()
# Line number for this line in its introducing revision.
@@ -44,7 +43,7 @@
@attr.s
-class annotateresult(object):
+class annotateresult:
rev = attr.ib()
lines = attr.ib()
_eof = attr.ib()
@@ -53,7 +52,7 @@
return iter(self.lines)
-class _llinstruction(object): # pytype: disable=ignored-metaclass
+class _llinstruction: # pytype: disable=ignored-metaclass
__metaclass__ = abc.ABCMeta
@@ -234,7 +233,7 @@
raise NotImplementedError(b'Unimplemented opcode %r' % opcode)
-class linelog(object):
+class linelog:
"""Efficient cache for per-line history information."""
def __init__(self, program=None, maxrev=0):
@@ -294,7 +293,7 @@
% (expected, numentries)
)
instructions = [_eof(0, 0)]
- for offset in pycompat.xrange(1, numentries):
+ for offset in range(1, numentries):
instructions.append(_decodeone(buf, offset * _llentry.size))
return cls(instructions, maxrev=maxrev)
@@ -350,7 +349,7 @@
tgt = oldproglen + (b2 - b1 + 1)
# Jump to skip the insert if we're at an older revision.
appendinst(_jl(rev, tgt))
- for linenum in pycompat.xrange(b1, b2):
+ for linenum in range(b1, b2):
if _internal_blines is None:
bappend(lineinfo(rev, linenum, programlen()))
appendinst(_line(rev, linenum))
@@ -448,7 +447,7 @@
# only take as many steps as there are instructions in the
# program - if we don't find an EOF or our stop-line before
# then, something is badly broken.
- for step in pycompat.xrange(len(self._program)):
+ for step in range(len(self._program)):
inst = self._program[pc]
nextpc = pc + 1
if isinstance(inst, _jump):
--- a/mercurial/localrepo.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/localrepo.py Thu Jun 16 15:28:54 2022 +0200
@@ -6,9 +6,7 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
-
-import errno
+
import functools
import os
import random
@@ -16,6 +14,7 @@
import time
import weakref
+from concurrent import futures
from .i18n import _
from .node import (
bin,
@@ -251,7 +250,7 @@
@interfaceutil.implementer(repository.ipeercommandexecutor)
-class localcommandexecutor(object):
+class localcommandexecutor:
def __init__(self, peer):
self._peer = peer
self._sent = False
@@ -278,7 +277,7 @@
# method on the peer and return a resolved future.
fn = getattr(self._peer, pycompat.sysstr(command))
- f = pycompat.futures.Future()
+ f = futures.Future()
try:
result = fn(**pycompat.strkwargs(args))
@@ -517,19 +516,18 @@
"""reads the require file present at root of this vfs
and return a set of requirements
- If allowmissing is True, we suppress ENOENT if raised"""
+ If allowmissing is True, we suppress FileNotFoundError if raised"""
# requires file contains a newline-delimited list of
# features/capabilities the opener (us) must have in order to use
# the repository. This file was introduced in Mercurial 0.9.2,
# which means very old repositories may not have one. We assume
# a missing file translates to no requirements.
try:
- requirements = set(vfs.read(b'requires').splitlines())
- except IOError as e:
- if not (allowmissing and e.errno == errno.ENOENT):
+ return set(vfs.read(b'requires').splitlines())
+ except FileNotFoundError:
+ if not allowmissing:
raise
- requirements = set()
- return requirements
+ return set()
def makelocalrepository(baseui, path, intents=None):
@@ -583,9 +581,8 @@
if not hgvfs.isdir():
try:
hgvfs.stat()
- except OSError as e:
- if e.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
+ pass
except ValueError as e:
# Can be raised on Python 3.8 when path is invalid.
raise error.Abort(
@@ -631,6 +628,9 @@
mismatch_config = ui.config(
b'share', b'safe-mismatch.source-not-safe'
)
+ mismatch_verbose_upgrade = ui.configbool(
+ b'share', b'safe-mismatch.source-not-safe:verbose-upgrade'
+ )
if mismatch_config in (
b'downgrade-allow',
b'allow',
@@ -646,6 +646,7 @@
requirements,
mismatch_config,
mismatch_warn,
+ mismatch_verbose_upgrade,
)
elif mismatch_config == b'abort':
raise error.Abort(
@@ -671,6 +672,9 @@
mismatch_warn = ui.configbool(
b'share', b'safe-mismatch.source-safe.warn'
)
+ mismatch_verbose_upgrade = ui.configbool(
+ b'share', b'safe-mismatch.source-safe:verbose-upgrade'
+ )
if mismatch_config in (
b'upgrade-allow',
b'allow',
@@ -686,6 +690,7 @@
requirements,
mismatch_config,
mismatch_warn,
+ mismatch_verbose_upgrade,
)
elif mismatch_config == b'abort':
raise error.Abort(
@@ -1070,6 +1075,7 @@
b'storage', b'revlog.optimize-delta-parent-choice'
)
options[b'deltabothparents'] = deltabothparents
+ options[b'debug-delta'] = ui.configbool(b'debug', b'revlog.debug-delta')
issue6528 = ui.configbool(b'storage', b'revlog.issue6528.fix-incoming')
options[b'issue6528.fix-incoming'] = issue6528
@@ -1215,7 +1221,7 @@
@interfaceutil.implementer(repository.ilocalrepositoryfilestorage)
-class revlogfilestorage(object):
+class revlogfilestorage:
"""File storage when using revlogs."""
def file(self, path):
@@ -1226,7 +1232,7 @@
@interfaceutil.implementer(repository.ilocalrepositoryfilestorage)
-class revlognarrowfilestorage(object):
+class revlognarrowfilestorage:
"""File storage when using revlogs and narrow files."""
def file(self, path):
@@ -1259,7 +1265,7 @@
@interfaceutil.implementer(repository.ilocalrepositorymain)
-class localrepository(object):
+class localrepository:
"""Main class for representing local repositories.
All local repositories are instances of this class.
@@ -1741,7 +1747,9 @@
def _makedirstate(self):
"""Extension point for wrapping the dirstate per-repo."""
- sparsematchfn = lambda: sparse.matcher(self)
+ sparsematchfn = None
+ if sparse.use_sparse(self):
+ sparsematchfn = lambda: sparse.matcher(self)
v2_req = requirementsmod.DIRSTATE_V2_REQUIREMENT
th = requirementsmod.DIRSTATE_TRACKED_HINT_V1
use_dirstate_v2 = v2_req in self.requirements
@@ -1884,7 +1892,7 @@
# wdirrev isn't contiguous so the slice shouldn't include it
return [
self[i]
- for i in pycompat.xrange(*changeid.indices(len(self)))
+ for i in range(*changeid.indices(len(self)))
if i not in self.changelog.filteredrevs
]
@@ -2044,7 +2052,7 @@
# This simplifies its cache management by having one decorated
# function (this one) and the rest simply fetch things from it.
- class tagscache(object):
+ class tagscache:
def __init__(self):
# These two define the set of tags for this repository. tags
# maps tag name to node; tagtypes maps tag name to 'global' or
@@ -2068,7 +2076,7 @@
else:
tags = self._tagscache.tags
rev = self.changelog.rev
- for k, v in pycompat.iteritems(tags):
+ for k, v in tags.items():
try:
# ignore tags to unknown nodes
rev(v)
@@ -2103,13 +2111,12 @@
# writing to the cache), but the rest of Mercurial wants them in
# local encoding.
tags = {}
- for (name, (node, hist)) in pycompat.iteritems(alltags):
+ for (name, (node, hist)) in alltags.items():
if node != self.nullid:
tags[encoding.tolocal(name)] = node
tags[b'tip'] = self.changelog.tip()
tagtypes = {
- encoding.tolocal(name): value
- for (name, value) in pycompat.iteritems(tagtypes)
+ encoding.tolocal(name): value for (name, value) in tagtypes.items()
}
return (tags, tagtypes)
@@ -2128,7 +2135,7 @@
'''return a list of tags ordered by revision'''
if not self._tagscache.tagslist:
l = []
- for t, n in pycompat.iteritems(self.tags()):
+ for t, n in self.tags().items():
l.append((self.changelog.rev(n), t, n))
self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
@@ -2138,9 +2145,9 @@
'''return the tags associated with a node'''
if not self._tagscache.nodetagscache:
nodetagscache = {}
- for t, n in pycompat.iteritems(self._tagscache.tags):
+ for t, n in self._tagscache.tags.items():
nodetagscache.setdefault(n, []).append(t)
- for tags in pycompat.itervalues(nodetagscache):
+ for tags in nodetagscache.values():
tags.sort()
self._tagscache.nodetagscache = nodetagscache
return self._tagscache.nodetagscache.get(node, [])
@@ -2256,7 +2263,7 @@
mf = matchmod.match(self.root, b'', [pat])
fn = None
params = cmd
- for name, filterfn in pycompat.iteritems(self._datafilters):
+ for name, filterfn in self._datafilters.items():
if cmd.startswith(name):
fn = filterfn
params = cmd[len(name) :].lstrip()
@@ -3503,9 +3510,8 @@
vfs.tryunlink(dest)
try:
vfs.rename(src, dest)
- except OSError as exc: # journal file does not yet exist
- if exc.errno != errno.ENOENT:
- raise
+ except FileNotFoundError: # journal file does not yet exist
+ pass
return a
@@ -3517,11 +3523,20 @@
def instance(ui, path, create, intents=None, createopts=None):
+
+ # prevent cyclic import localrepo -> upgrade -> localrepo
+ from . import upgrade
+
localpath = urlutil.urllocalpath(path)
if create:
createrepository(ui, localpath, createopts=createopts)
- return makelocalrepository(ui, localpath, intents=intents)
+ def repo_maker():
+ return makelocalrepository(ui, localpath, intents=intents)
+
+ repo = repo_maker()
+ repo = upgrade.may_auto_upgrade(repo, repo_maker)
+ return repo
def islocal(path):
@@ -3914,7 +3929,7 @@
#
# But we have to allow the close() method because some constructors
# of repos call close() on repo references.
- class poisonedrepository(object):
+ class poisonedrepository:
def __getattribute__(self, item):
if item == 'close':
return object.__getattribute__(self, item)
--- a/mercurial/lock.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/lock.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import contextlib
import errno
@@ -39,9 +38,8 @@
if pycompat.sysplatform.startswith(b'linux'):
try:
result += b'/%x' % os.stat(b'/proc/self/ns/pid').st_ino
- except OSError as ex:
- if ex.errno not in (errno.ENOENT, errno.EACCES, errno.ENOTDIR):
- raise
+ except (FileNotFoundError, PermissionError, NotADirectoryError):
+ pass
return result
@@ -174,7 +172,7 @@
return l
-class lock(object):
+class lock:
"""An advisory lock held by one process to control access to a set
of files. Non-cooperating processes or incorrectly written scripts
can ignore Mercurial's locking scheme and stomp all over the
@@ -312,10 +310,8 @@
"""
try:
return self.vfs.readlock(self.f)
- except (OSError, IOError) as why:
- if why.errno == errno.ENOENT:
- return None
- raise
+ except FileNotFoundError:
+ return None
def _lockshouldbebroken(self, locker):
if locker is None:
--- a/mercurial/logcmdutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/logcmdutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import itertools
import os
@@ -228,7 +227,7 @@
)
-class changesetdiffer(object):
+class changesetdiffer:
"""Generate diff of changeset with pre-configured filtering functions"""
def _makefilematcher(self, ctx):
@@ -262,7 +261,7 @@
return b' '.join(labels)
-class changesetprinter(object):
+class changesetprinter:
'''show changeset information when templating not requested.'''
def __init__(self, ui, repo, differ=None, diffopts=None, buffered=False):
@@ -328,7 +327,7 @@
if branch != b'default':
self.ui.write(columns[b'branch'] % branch, label=b'log.branch')
- for nsname, ns in pycompat.iteritems(self.repo.names):
+ for nsname, ns in self.repo.names.items():
# branches has special logic already handled above, so here we just
# skip it
if nsname == b'branches':
@@ -416,7 +415,7 @@
self.ui.write(b"\n\n")
else:
self.ui.write(
- columns[b'summary'] % description.splitlines()[0],
+ columns[b'summary'] % stringutil.firstline(description),
label=b'log.summary',
)
self.ui.write(b"\n")
@@ -705,7 +704,7 @@
@attr.s
-class walkopts(object):
+class walkopts:
"""Options to configure a set of revisions and file matcher factory
to scan revision/file history
"""
@@ -990,7 +989,7 @@
opts[b'_patslog'] = list(wopts.pats)
expr = []
- for op, val in sorted(pycompat.iteritems(opts)):
+ for op, val in sorted(opts.items()):
if not val:
continue
revop, listop = _opt2logrevset[op]
--- a/mercurial/logexchange.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/logexchange.py Thu Jun 16 15:28:54 2022 +0200
@@ -6,12 +6,10 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from .node import hex
from . import (
- pycompat,
util,
vfs as vfsmod,
)
@@ -78,7 +76,7 @@
if oldpath != remotepath:
f.write(b'%s\0%s\0%s\n' % (node, oldpath, rname))
- for name, node in sorted(pycompat.iteritems(names)):
+ for name, node in sorted(names.items()):
if nametype == b"branches":
for n in node:
f.write(b'%s\0%s\0%s\n' % (n, remotepath, name))
@@ -160,7 +158,7 @@
with remoterepo.commandexecutor() as e:
branchmap = e.callcommand(b'branchmap', {}).result()
- for branch, nodes in pycompat.iteritems(branchmap):
+ for branch, nodes in branchmap.items():
bmap[branch] = []
for node in nodes:
if node in repo and not repo[node].obsolete():
--- a/mercurial/loggingutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/loggingutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -6,13 +6,11 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import errno
from . import (
encoding,
- pycompat,
)
from .utils import (
@@ -55,7 +53,7 @@
else:
if st.st_size >= maxsize:
path = vfs.join(name)
- for i in pycompat.xrange(maxfiles - 1, 1, -1):
+ for i in range(maxfiles - 1, 1, -1):
rotate(
oldpath=b'%s.%d' % (path, i - 1),
newpath=b'%s.%d' % (path, i),
@@ -74,7 +72,7 @@
return b'*' in tracked or event in tracked
-class filelogger(object):
+class filelogger:
"""Basic logger backed by physical file with optional rotation"""
def __init__(self, vfs, name, tracked, maxfiles=0, maxsize=0):
@@ -105,7 +103,7 @@
)
-class fileobjectlogger(object):
+class fileobjectlogger:
"""Basic logger backed by file-like object"""
def __init__(self, fp, tracked):
@@ -130,7 +128,7 @@
)
-class proxylogger(object):
+class proxylogger:
"""Forward log events to another logger to be set later"""
def __init__(self):
--- a/mercurial/lsprof.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/lsprof.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,10 +1,7 @@
-from __future__ import absolute_import, print_function
-
import _lsprof
import sys
from .pycompat import getattr
-from . import pycompat
Profiler = _lsprof.Profiler
@@ -25,7 +22,7 @@
return Stats(p.getstats())
-class Stats(object):
+class Stats:
"""XXX docstring"""
def __init__(self, data):
@@ -120,13 +117,11 @@
def label(code):
if isinstance(code, str):
- if sys.version_info.major >= 3:
- code = code.encode('latin-1')
- return code
+ return code.encode('latin-1')
try:
mname = _fn2mod[code.co_filename]
except KeyError:
- for k, v in list(pycompat.iteritems(sys.modules)):
+ for k, v in list(sys.modules.items()):
if v is None:
continue
if not isinstance(getattr(v, '__file__', None), str):
@@ -139,7 +134,4 @@
res = '%s:%d(%s)' % (mname, code.co_firstlineno, code.co_name)
- if sys.version_info.major >= 3:
- res = res.encode('latin-1')
-
- return res
+ return res.encode('latin-1')
--- a/mercurial/lsprofcalltree.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/lsprofcalltree.py Thu Jun 16 15:28:54 2022 +0200
@@ -10,7 +10,6 @@
of the GNU General Public License, incorporated herein by reference.
"""
-from __future__ import absolute_import
from . import pycompat
@@ -27,7 +26,7 @@
)
-class KCacheGrind(object):
+class KCacheGrind:
def __init__(self, profiler):
self.data = profiler.getstats()
self.out_file = None
--- a/mercurial/mail.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/mail.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import email
import email.charset
@@ -261,9 +260,11 @@
)
)
else:
- if not procutil.findexe(method):
+ command = procutil.shellsplit(method)
+ command = command[0] if command else b''
+ if not (command and procutil.findexe(command)):
raise error.Abort(
- _(b'%r specified as email transport, but not in PATH') % method
+ _(b'%r specified as email transport, but not in PATH') % command
)
@@ -468,43 +469,28 @@
return mimetextqp(s, 'plain', cs)
-if pycompat.ispy3:
-
- Generator = email.generator.BytesGenerator
-
- def parse(fp):
- # type: (Any) -> email.message.Message
- ep = email.parser.Parser()
- # disable the "universal newlines" mode, which isn't binary safe.
- # I have no idea if ascii/surrogateescape is correct, but that's
- # what the standard Python email parser does.
- fp = io.TextIOWrapper(
- fp, encoding='ascii', errors='surrogateescape', newline=chr(10)
- )
- try:
- return ep.parse(fp)
- finally:
- fp.detach()
-
- def parsebytes(data):
- # type: (bytes) -> email.message.Message
- ep = email.parser.BytesParser()
- return ep.parsebytes(data)
+Generator = email.generator.BytesGenerator
-else:
-
- Generator = email.generator.Generator
+def parse(fp):
+ # type: (Any) -> email.message.Message
+ ep = email.parser.Parser()
+ # disable the "universal newlines" mode, which isn't binary safe.
+ # I have no idea if ascii/surrogateescape is correct, but that's
+ # what the standard Python email parser does.
+ fp = io.TextIOWrapper(
+ fp, encoding='ascii', errors='surrogateescape', newline=chr(10)
+ )
+ try:
+ return ep.parse(fp)
+ finally:
+ fp.detach()
- def parse(fp):
- # type: (Any) -> email.message.Message
- ep = email.parser.Parser()
- return ep.parse(fp)
- def parsebytes(data):
- # type: (str) -> email.message.Message
- ep = email.parser.Parser()
- return ep.parsestr(data)
+def parsebytes(data):
+ # type: (bytes) -> email.message.Message
+ ep = email.parser.BytesParser()
+ return ep.parsebytes(data)
def headdecode(s):
--- a/mercurial/manifest.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/manifest.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import heapq
import itertools
@@ -85,7 +84,7 @@
return b''.join(lines)
-class lazymanifestiter(object):
+class lazymanifestiter:
def __init__(self, lm):
self.pos = 0
self.lm = lm
@@ -108,7 +107,7 @@
__next__ = next
-class lazymanifestiterentries(object):
+class lazymanifestiterentries:
def __init__(self, lm):
self.lm = lm
self.pos = 0
@@ -159,7 +158,7 @@
_manifestflags = {b'', b'l', b't', b'x'}
-class _lazymanifest(object):
+class _lazymanifest:
"""A pure python manifest backed by a byte string. It is supplimented with
internal lists as it is modified, until it is compacted back to a pure byte
string.
@@ -474,7 +473,7 @@
@interfaceutil.implementer(repository.imanifestdict)
-class manifestdict(object):
+class manifestdict:
def __init__(self, nodelen, data=b''):
self._nodelen = nodelen
self._lm = _lazymanifest(nodelen, data)
@@ -797,7 +796,7 @@
@interfaceutil.implementer(repository.imanifestdict)
-class treemanifest(object):
+class treemanifest:
def __init__(self, nodeconstants, dir=b'', text=b''):
self._dir = dir
self.nodeconstants = nodeconstants
@@ -827,9 +826,7 @@
def _loadalllazy(self):
selfdirs = self._dirs
subpath = self._subpath
- for d, (node, readsubtree, docopy) in pycompat.iteritems(
- self._lazydirs
- ):
+ for d, (node, readsubtree, docopy) in self._lazydirs.items():
if docopy:
selfdirs[d] = readsubtree(subpath(d), node).copy()
else:
@@ -868,11 +865,11 @@
differs, load it in both
"""
toloadlazy = []
- for d, v1 in pycompat.iteritems(t1._lazydirs):
+ for d, v1 in t1._lazydirs.items():
v2 = t2._lazydirs.get(d)
if not v2 or v2[0] != v1[0]:
toloadlazy.append(d)
- for d, v1 in pycompat.iteritems(t2._lazydirs):
+ for d, v1 in t2._lazydirs.items():
if d not in t1._lazydirs:
toloadlazy.append(d)
@@ -954,7 +951,7 @@
if p in self._files:
yield self._subpath(p), n
else:
- for f, sn in pycompat.iteritems(n):
+ for f, sn in n.items():
yield f, sn
iteritems = items
@@ -1105,11 +1102,10 @@
def _copyfunc(s):
self._load()
s._lazydirs = {
- d: (n, r, True)
- for d, (n, r, c) in pycompat.iteritems(self._lazydirs)
+ d: (n, r, True) for d, (n, r, c) in self._lazydirs.items()
}
sdirs = s._dirs
- for d, v in pycompat.iteritems(self._dirs):
+ for d, v in self._dirs.items():
sdirs[d] = v.copy()
s._files = dict.copy(self._files)
s._flags = dict.copy(self._flags)
@@ -1137,7 +1133,7 @@
t1._load()
t2._load()
self._loaddifflazy(t1, t2)
- for d, m1 in pycompat.iteritems(t1._dirs):
+ for d, m1 in t1._dirs.items():
if d in t2._dirs:
m2 = t2._dirs[d]
_filesnotin(m1, m2)
@@ -1250,7 +1246,7 @@
ret._flags[fn] = self._flags[fn]
visit = self._loadchildrensetlazy(visit)
- for dir, subm in pycompat.iteritems(self._dirs):
+ for dir, subm in self._dirs.items():
if visit and dir[:-1] not in visit:
continue
m = subm._matches_inner(match)
@@ -1295,15 +1291,15 @@
t2._load()
self._loaddifflazy(t1, t2)
- for d, m1 in pycompat.iteritems(t1._dirs):
+ for d, m1 in t1._dirs.items():
m2 = t2._dirs.get(d, emptytree)
stack.append((m1, m2))
- for d, m2 in pycompat.iteritems(t2._dirs):
+ for d, m2 in t2._dirs.items():
if d not in t1._dirs:
stack.append((emptytree, m2))
- for fn, n1 in pycompat.iteritems(t1._files):
+ for fn, n1 in t1._files.items():
fl1 = t1._flags.get(fn, b'')
n2 = t2._files.get(fn, None)
fl2 = t2._flags.get(fn, b'')
@@ -1312,7 +1308,7 @@
elif clean:
result[t1._subpath(fn)] = None
- for fn, n2 in pycompat.iteritems(t2._files):
+ for fn, n2 in t2._files.items():
if fn not in t1._files:
fl2 = t2._flags.get(fn, b'')
result[t2._subpath(fn)] = ((None, b''), (n2, fl2))
@@ -1362,9 +1358,7 @@
"""
self._load()
flags = self.flags
- lazydirs = [
- (d[:-1], v[0], b't') for d, v in pycompat.iteritems(self._lazydirs)
- ]
+ lazydirs = [(d[:-1], v[0], b't') for d, v in self._lazydirs.items()]
dirs = [(d[:-1], self._dirs[d]._node, b't') for d in self._dirs]
files = [(f, self._files[f], flags(f)) for f in self._files]
return _text(sorted(dirs + files + lazydirs))
@@ -1393,7 +1387,7 @@
visit = self._loadchildrensetlazy(visit)
if visit == b'this' or visit == b'all':
visit = None
- for d, subm in pycompat.iteritems(self._dirs):
+ for d, subm in self._dirs.items():
if visit and d[:-1] not in visit:
continue
subp1 = getnode(m1, d)
@@ -1416,7 +1410,7 @@
self._load()
# OPT: use visitchildrenset to avoid loading everything.
self._loadalllazy()
- for d, subm in pycompat.iteritems(self._dirs):
+ for d, subm in self._dirs.items():
for subtree in subm.walksubtrees(matcher=matcher):
yield subtree
@@ -1556,7 +1550,7 @@
@interfaceutil.implementer(repository.imanifeststorage)
-class manifestrevlog(object):
+class manifestrevlog:
"""A revlog that stores manifest texts. This is responsible for caching the
full-text manifest contents.
"""
@@ -1595,7 +1589,7 @@
self._fulltextcache = manifestfulltextcache(cachesize)
if tree:
- assert self._treeondisk, b'opts is %r' % opts
+ assert self._treeondisk, (tree, b'opts is %r' % opts)
radix = b'00manifest'
if tree:
@@ -1914,7 +1908,7 @@
@interfaceutil.implementer(repository.imanifestlog)
-class manifestlog(object):
+class manifestlog:
"""A collection class representing the collection of manifest snapshots
referenced by commits in the repository.
@@ -2013,7 +2007,7 @@
@interfaceutil.implementer(repository.imanifestrevisionwritable)
-class memmanifestctx(object):
+class memmanifestctx:
def __init__(self, manifestlog):
self._manifestlog = manifestlog
self._manifestdict = manifestdict(manifestlog.nodeconstants.nodelen)
@@ -2043,7 +2037,7 @@
@interfaceutil.implementer(repository.imanifestrevisionstored)
-class manifestctx(object):
+class manifestctx:
"""A class representing a single revision of a manifest, including its
contents, its parent revs, and its linkrev.
"""
@@ -2123,7 +2117,7 @@
@interfaceutil.implementer(repository.imanifestrevisionwritable)
-class memtreemanifestctx(object):
+class memtreemanifestctx:
def __init__(self, manifestlog, dir=b''):
self._manifestlog = manifestlog
self._dir = dir
@@ -2158,7 +2152,7 @@
@interfaceutil.implementer(repository.imanifestrevisionstored)
-class treemanifestctx(object):
+class treemanifestctx:
def __init__(self, manifestlog, dir, node):
self._manifestlog = manifestlog
self._dir = dir
@@ -2249,7 +2243,7 @@
m0 = self._manifestlog.get(self._dir, store.node(r0)).read()
m1 = self.read()
md = treemanifest(self._manifestlog.nodeconstants, dir=self._dir)
- for f, ((n0, fl0), (n1, fl1)) in pycompat.iteritems(m0.diff(m1)):
+ for f, ((n0, fl0), (n1, fl1)) in m0.diff(m1).items():
if n1:
md[f] = n1
if fl1:
--- a/mercurial/match.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/match.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import, print_function
import bisect
import copy
@@ -383,7 +382,7 @@
return kindpats
-class basematcher(object):
+class basematcher:
def __init__(self, badfn=None):
if badfn is not None:
self.bad = badfn
@@ -584,10 +583,7 @@
if b'' in prefix_set:
return True
- if pycompat.ispy3:
- sl = ord(b'/')
- else:
- sl = '/'
+ sl = ord(b'/')
# We already checked that path isn't in prefix_set exactly, so
# `path[len(pf)] should never raise IndexError.
@@ -663,7 +659,7 @@
# This is basically a reimplementation of pathutil.dirs that stores the
# children instead of just a count of them, plus a small optional optimization
# to avoid some directories we don't need.
-class _dirchildren(object):
+class _dirchildren:
def __init__(self, paths, onlyinclude=None):
self._dirs = {}
self._onlyinclude = onlyinclude or []
@@ -1615,7 +1611,7 @@
patterns = []
fp = open(filepath, b'rb')
- for lineno, line in enumerate(util.iterfile(fp), start=1):
+ for lineno, line in enumerate(fp, start=1):
if b"#" in line:
global _commentre
if not _commentre:
@@ -1642,7 +1638,7 @@
continue
linesyntax = syntax
- for s, rels in pycompat.iteritems(syntaxes):
+ for s, rels in syntaxes.items():
if line.startswith(rels):
linesyntax = rels
line = line[len(rels) :]
--- a/mercurial/mdiff.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/mdiff.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import re
import struct
@@ -38,7 +37,7 @@
# TODO: this looks like it could be an attrs, which might help pytype
-class diffopts(object):
+class diffopts:
"""context is the number of context lines
text treats all files as text
showfunc enables diff -p output
@@ -379,7 +378,7 @@
# walk backwards from the start of the context up to the start of
# the previous hunk context until we find a line starting with an
# alphanumeric char.
- for i in pycompat.xrange(astart - 1, lastpos - 1, -1):
+ for i in range(astart - 1, lastpos - 1, -1):
if l1[i][0:1].isalnum():
func = b' ' + l1[i].rstrip()
# split long function name if ASCII. otherwise we have no
@@ -403,7 +402,7 @@
hunklines = (
[b"@@ -%d,%d +%d,%d @@%s\n" % (hunkrange + (func,))]
+ delta
- + [b' ' + l1[x] for x in pycompat.xrange(a2, aend)]
+ + [b' ' + l1[x] for x in range(a2, aend)]
)
# If either file ends without a newline and the last line of
# that file is part of a hunk, a marker is printed. If the
@@ -412,7 +411,7 @@
# which the hunk can end in a shared line without a newline.
skip = False
if not t1.endswith(b'\n') and astart + alen == len(l1) + 1:
- for i in pycompat.xrange(len(hunklines) - 1, -1, -1):
+ for i in range(len(hunklines) - 1, -1, -1):
if hunklines[i].startswith((b'-', b' ')):
if hunklines[i].startswith(b' '):
skip = True
@@ -420,7 +419,7 @@
hunklines.insert(i + 1, diffhelper.MISSING_NEWLINE_MARKER)
break
if not skip and not t2.endswith(b'\n') and bstart + blen == len(l2) + 1:
- for i in pycompat.xrange(len(hunklines) - 1, -1, -1):
+ for i in range(len(hunklines) - 1, -1, -1):
if hunklines[i].startswith(b'+'):
hunklines[i] += b'\n'
hunklines.insert(i + 1, diffhelper.MISSING_NEWLINE_MARKER)
--- a/mercurial/merge.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/merge.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,10 +5,8 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import collections
-import errno
import struct
from .i18n import _
@@ -67,7 +65,7 @@
)
-class _unknowndirschecker(object):
+class _unknowndirschecker:
"""
Look for any unknown files or directories that may have a path conflict
with a file. If any path prefix of the file exists as a file or link,
@@ -538,7 +536,7 @@
raise error.StateError(msg % f)
-class mergeresult(object):
+class mergeresult:
"""An object representing result of merging manifests.
It has information about what actions need to be performed on dirstate
@@ -626,9 +624,7 @@
args, msg = self._actionmapping[a][f]
yield f, args, msg
else:
- for f, (args, msg) in pycompat.iteritems(
- self._actionmapping[a]
- ):
+ for f, (args, msg) in self._actionmapping[a].items():
yield f, args, msg
def len(self, actions=None):
@@ -644,10 +640,10 @@
def filemap(self, sort=False):
if sorted:
- for key, val in sorted(pycompat.iteritems(self._filemapping)):
+ for key, val in sorted(self._filemapping.items()):
yield key, val
else:
- for key, val in pycompat.iteritems(self._filemapping):
+ for key, val in self._filemapping.items():
yield key, val
def addcommitinfo(self, filename, key, value):
@@ -672,15 +668,15 @@
"""returns a dictionary of actions to be perfomed with action as key
and a list of files and related arguments as values"""
res = collections.defaultdict(list)
- for a, d in pycompat.iteritems(self._actionmapping):
- for f, (args, msg) in pycompat.iteritems(d):
+ for a, d in self._actionmapping.items():
+ for f, (args, msg) in d.items():
res[a].append((f, args, msg))
return res
def setactions(self, actions):
self._filemapping = actions
self._actionmapping = collections.defaultdict(dict)
- for f, (act, data, msg) in pycompat.iteritems(self._filemapping):
+ for f, (act, data, msg) in self._filemapping.items():
self._actionmapping[act][f] = data, msg
def hasconflicts(self):
@@ -787,7 +783,7 @@
relevantfiles = set(ma.diff(m2).keys())
# For copied and moved files, we need to add the source file too.
- for copykey, copyvalue in pycompat.iteritems(branch_copies1.copy):
+ for copykey, copyvalue in branch_copies1.copy.items():
if copyvalue in relevantfiles:
relevantfiles.add(copykey)
for movedirkey in branch_copies1.movewithdir:
@@ -797,7 +793,7 @@
diff = m1.diff(m2, match=matcher)
- for f, ((n1, fl1), (n2, fl2)) in pycompat.iteritems(diff):
+ for f, ((n1, fl1), (n2, fl2)) in diff.items():
if n1 and n2: # file exists on both local and remote side
if f not in ma:
# TODO: what if they're renamed from different sources?
@@ -1309,10 +1305,8 @@
def _getcwd():
try:
return encoding.getcwd()
- except OSError as err:
- if err.errno == errno.ENOENT:
- return None
- raise
+ except FileNotFoundError:
+ return None
def batchremove(repo, wctx, actions):
@@ -1470,7 +1464,7 @@
@attr.s(frozen=True)
-class updateresult(object):
+class updateresult:
updatedcount = attr.ib()
mergedcount = attr.ib()
removedcount = attr.ib()
@@ -1512,7 +1506,7 @@
ms = wctx.mergestate(clean=True)
ms.start(wctx.p1().node(), mctx.node(), labels)
- for f, op in pycompat.iteritems(mresult.commitinfo):
+ for f, op in mresult.commitinfo.items():
# the other side of filenode was choosen while merging, store this in
# mergestate so that it can be reused on commit
ms.addcommitinfo(f, op)
@@ -2073,7 +2067,7 @@
_checkcollision(repo, wc.manifest(), mresult)
# divergent renames
- for f, fl in sorted(pycompat.iteritems(mresult.diverge)):
+ for f, fl in sorted(mresult.diverge.items()):
repo.ui.warn(
_(
b"note: possible conflict - %s was renamed "
@@ -2085,7 +2079,7 @@
repo.ui.warn(b" %s\n" % nf)
# rename and delete
- for f, fl in sorted(pycompat.iteritems(mresult.renamedelete)):
+ for f, fl in sorted(mresult.renamedelete.items()):
repo.ui.warn(
_(
b"note: possible conflict - %s was deleted "
@@ -2125,7 +2119,7 @@
if updatedirstate:
if extraactions:
- for k, acts in pycompat.iteritems(extraactions):
+ for k, acts in extraactions.items():
for a in acts:
mresult.addfile(a[0], k, *a[1:])
if k == mergestatemod.ACTION_GET and wantfiledata:
@@ -2196,10 +2190,10 @@
getfiledata = None
else:
now_sec = now[0]
- for f, m in pycompat.iteritems(getfiledata):
+ for f, m in getfiledata.items():
if m is not None and m[2][0] >= now_sec:
ambiguous_mtime[f] = (m[0], m[1], None)
- for f, m in pycompat.iteritems(ambiguous_mtime):
+ for f, m in ambiguous_mtime.items():
getfiledata[f] = m
repo.setparents(fp1, fp2)
--- a/mercurial/mergestate.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/mergestate.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,7 +1,4 @@
-from __future__ import absolute_import
-
import collections
-import errno
import shutil
import struct
import weakref
@@ -15,7 +12,6 @@
from . import (
error,
filemerge,
- pycompat,
util,
)
from .utils import hashutil
@@ -103,7 +99,7 @@
CHANGE_MODIFIED = b'modified'
-class MergeAction(object):
+class MergeAction:
"""represent an "action" merge need to take for a given file
Attributes:
@@ -197,7 +193,7 @@
)
-class _mergestate_base(object):
+class _mergestate_base:
"""track 3-way merge state of individual files
The merge state is stored on disk when needed. Two files are used: one with
@@ -365,7 +361,7 @@
def unresolved(self):
"""Obtain the paths of unresolved files."""
- for f, entry in pycompat.iteritems(self._state):
+ for f, entry in self._state.items():
if entry[0] in (
MERGE_RECORD_UNRESOLVED,
MERGE_RECORD_UNRESOLVED_PATH,
@@ -469,7 +465,7 @@
"""return counts for updated, merged and removed files in this
session"""
updated, merged, removed = 0, 0, 0
- for r, action in pycompat.itervalues(self._results):
+ for r, action in self._results.values():
if r is None:
updated += 1
elif r == 0:
@@ -492,7 +488,7 @@
ACTION_ADD_MODIFIED: [],
ACTION_GET: [],
}
- for f, (r, action) in pycompat.iteritems(self._results):
+ for f, (r, action) in self._results.items():
if action is not None:
actions[action].append((f, None, b"merge result"))
return actions
@@ -632,9 +628,8 @@
else:
records.append((RECORD_MERGED, l[:-1]))
f.close()
- except IOError as err:
- if err.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
+ pass
return records
def _readrecordsv2(self):
@@ -672,9 +667,8 @@
rtype, record = record[0:1], record[1:]
records.append((rtype, record))
f.close()
- except IOError as err:
- if err.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
+ pass
return records
def commit(self):
@@ -692,7 +686,7 @@
# the type of state that is stored, and capital-letter records are used
# to prevent older versions of Mercurial that do not support the feature
# from loading them.
- for filename, v in pycompat.iteritems(self._state):
+ for filename, v in self._state.items():
if v[0] in (
MERGE_RECORD_UNRESOLVED_PATH,
MERGE_RECORD_RESOLVED_PATH,
@@ -716,9 +710,9 @@
else:
# Normal files. These are stored in 'F' records.
records.append((RECORD_MERGED, b'\0'.join([filename] + v)))
- for filename, extras in sorted(pycompat.iteritems(self._stateextras)):
+ for filename, extras in sorted(self._stateextras.items()):
rawextras = b'\0'.join(
- b'%s\0%s' % (k, v) for k, v in pycompat.iteritems(extras)
+ b'%s\0%s' % (k, v) for k, v in extras.items()
)
records.append(
(RECORD_FILE_VALUES, b'%s\0%s' % (filename, rawextras))
--- a/mercurial/mergeutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/mergeutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from .i18n import _
--- a/mercurial/metadata.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/metadata.py Thu Jun 16 15:28:54 2022 +0200
@@ -6,7 +6,6 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import, print_function
import multiprocessing
import struct
@@ -23,7 +22,7 @@
)
-class ChangingFiles(object):
+class ChangingFiles:
"""A class recording the changes made to files by a changeset
Actions performed on files are gathered into 3 sets:
--- a/mercurial/minifileset.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/minifileset.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from .i18n import _
from . import (
--- a/mercurial/minirst.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/minirst.py Thu Jun 16 15:28:54 2022 +0200
@@ -18,7 +18,6 @@
when adding support for new constructs.
"""
-from __future__ import absolute_import
import re
@@ -350,7 +349,7 @@
# position in bytes
columns = [
x
- for x in pycompat.xrange(len(div))
+ for x in range(len(div))
if div[x : x + 1] == b'=' and (x == 0 or div[x - 1 : x] == b' ')
]
rows = []
@@ -770,7 +769,7 @@
if llen and llen != plen:
collapse = False
s = []
- for j in pycompat.xrange(3, plen - 1):
+ for j in range(3, plen - 1):
parent = parents[j]
if j >= llen or lastparents[j] != parent:
s.append(len(blocks))
--- a/mercurial/namespaces.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/namespaces.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,8 +1,5 @@
-from __future__ import absolute_import
-
from .i18n import _
from . import (
- pycompat,
registrar,
templatekw,
util,
@@ -19,7 +16,7 @@
return [val]
-class namespaces(object):
+class namespaces:
"""provides an interface to register and operate on multiple namespaces. See
the namespace class below for details on the namespace object.
@@ -87,7 +84,7 @@
return self._names.get(namespace, default)
def items(self):
- return pycompat.iteritems(self._names)
+ return self._names.items()
iteritems = items
@@ -120,14 +117,14 @@
Raises a KeyError if there is no such node.
"""
- for ns, v in pycompat.iteritems(self._names):
+ for ns, v in self._names.items():
n = v.singlenode(repo, name)
if n:
return n
raise KeyError(_(b'no such name: %s') % name)
-class namespace(object):
+class namespace:
"""provides an interface to a namespace
Namespaces are basically generic many-to-many mapping between some
--- a/mercurial/narrowspec.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/narrowspec.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from .i18n import _
from .pycompat import getattr
--- a/mercurial/node.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/node.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,20 +5,12 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import binascii
# This ugly style has a noticeable effect in manifest parsing
hex = binascii.hexlify
-# Adapt to Python 3 API changes. If this ends up showing up in
-# profiles, we can use this version only on Python 3, and forward
-# binascii.unhexlify like we used to on Python 2.
-def bin(s):
- try:
- return binascii.unhexlify(s)
- except binascii.Error as e:
- raise TypeError(e)
+bin = binascii.unhexlify
def short(node):
@@ -32,7 +24,7 @@
wdirrev = 0x7FFFFFFF
-class sha1nodeconstants(object):
+class sha1nodeconstants:
nodelen = 20
# In hex, this is '0000000000000000000000000000000000000000'
--- a/mercurial/obsolete.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/obsolete.py Thu Jun 16 15:28:54 2022 +0200
@@ -67,9 +67,8 @@
comment associated with each format for details.
"""
-from __future__ import absolute_import
-import errno
+import binascii
import struct
from .i18n import _
@@ -245,11 +244,11 @@
if len(p) != 20:
parents = None
break
- except TypeError:
+ except binascii.Error:
# if content cannot be translated to nodeid drop the data.
parents = None
- metadata = tuple(sorted(pycompat.iteritems(metadata)))
+ metadata = tuple(sorted(metadata.items()))
yield (pre, sucs, flags, metadata, date, parents)
@@ -279,7 +278,7 @@
"""Return encoded metadata string to string mapping.
Assume no ':' in key and no '\0' in both key and value."""
- for key, value in pycompat.iteritems(meta):
+ for key, value in meta.items():
if b':' in key or b'\0' in key:
raise ValueError(b"':' and '\0' are forbidden in metadata key'")
if b'\0' in value:
@@ -339,8 +338,6 @@
_fm1nodesha256size = _calcsize(_fm1nodesha256)
_fm1fsize = _calcsize(_fm1fixed)
_fm1parentnone = 3
-_fm1parentshift = 14
-_fm1parentmask = _fm1parentnone << _fm1parentshift
_fm1metapair = b'BB'
_fm1metapairsize = _calcsize(_fm1metapair)
@@ -399,7 +396,7 @@
off = o3 + metasize * nummeta
metapairsize = unpack(b'>' + (metafmt * nummeta), data[o3:off])
metadata = []
- for idx in pycompat.xrange(0, len(metapairsize), 2):
+ for idx in range(0, len(metapairsize), 2):
o1 = off + metapairsize[idx]
o2 = o1 + metapairsize[idx + 1]
metadata.append((data[off:o1], data[o1:o2]))
@@ -542,7 +539,7 @@
)
-class obsstore(object):
+class obsstore:
"""Store obsolete markers
Markers can be accessed with two mappings:
@@ -584,11 +581,10 @@
if not self._cached('_all'):
try:
return self.svfs.stat(b'obsstore').st_size > 1
- except OSError as inst:
- if inst.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
# just build an empty _all list if no obsstore exists, which
# avoids further stat() syscalls
+ pass
return bool(self._all)
__bool__ = __nonzero__
@@ -649,11 +645,9 @@
if len(succ) != 20:
raise ValueError(succ)
if prec in succs:
- raise ValueError(
- 'in-marker cycle with %s' % pycompat.sysstr(hex(prec))
- )
+ raise ValueError('in-marker cycle with %s' % prec.hex())
- metadata = tuple(sorted(pycompat.iteritems(metadata)))
+ metadata = tuple(sorted(metadata.items()))
for k, v in metadata:
try:
# might be better to reject non-ASCII keys
--- a/mercurial/obsutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/obsutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import re
@@ -19,7 +18,6 @@
encoding,
error,
phases,
- pycompat,
util,
)
from .utils import dateutil
@@ -58,7 +56,7 @@
usingsha256 = 2
-class marker(object):
+class marker:
"""Wrap obsolete marker raw data"""
def __init__(self, repo, data):
@@ -998,7 +996,7 @@
base[tuple(nsuccset)] = n
return [
{b'divergentnodes': divset, b'commonpredecessor': b}
- for divset, b in pycompat.iteritems(base)
+ for divset, b in base.items()
]
--- a/mercurial/parser.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/parser.py Thu Jun 16 15:28:54 2022 +0200
@@ -16,7 +16,6 @@
# an action is a tree node name, a tree label, and an optional match
# __call__(program) parses program into a labeled tree
-from __future__ import absolute_import, print_function
from .i18n import _
from . import (
@@ -26,7 +25,7 @@
from .utils import stringutil
-class parser(object):
+class parser:
def __init__(self, elements, methods=None):
self._elements = elements
self._methods = methods
@@ -416,7 +415,7 @@
return inst.message
-class alias(object):
+class alias:
"""Parsed result of alias"""
def __init__(self, name, args, err, replacement):
@@ -430,7 +429,7 @@
self.warned = False
-class basealiasrules(object):
+class basealiasrules:
"""Parsing and expansion rule set of aliases
This is a helper for fileset/revset/template aliases. A concrete rule set
--- a/mercurial/patch.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/patch.py Thu Jun 16 15:28:54 2022 +0200
@@ -6,12 +6,10 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import, print_function
import collections
import contextlib
import copy
-import errno
import os
import re
import shutil
@@ -150,7 +148,7 @@
def remainder(cur):
yield chunk(cur)
- class fiter(object):
+ class fiter:
def __init__(self, fp):
self.fp = fp
@@ -343,7 +341,7 @@
return data
-class patchmeta(object):
+class patchmeta:
"""Patched file metadata
'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
@@ -436,7 +434,7 @@
return gitpatches
-class linereader(object):
+class linereader:
# simple class to allow pushing lines back into the input stream
def __init__(self, fp):
self.fp = fp
@@ -457,7 +455,7 @@
return iter(self.readline, b'')
-class abstractbackend(object):
+class abstractbackend:
def __init__(self, ui):
self.ui = ui
@@ -504,14 +502,11 @@
isexec = False
try:
isexec = self.opener.lstat(fname).st_mode & 0o100 != 0
- except OSError as e:
- if e.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
+ pass
try:
return (self.opener.read(fname), (False, isexec))
- except IOError as e:
- if e.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
return None, None
def setfile(self, fname, data, mode, copysource):
@@ -593,7 +588,7 @@
return sorted(self.changed)
-class filestore(object):
+class filestore:
def __init__(self, maxsize=None):
self.opener = None
self.files = {}
@@ -682,7 +677,7 @@
eolmodes = [b'strict', b'crlf', b'lf', b'auto']
-class patchfile(object):
+class patchfile:
def __init__(self, ui, gp, backend, store, eolmode=b'strict'):
self.fname = gp.path
self.eolmode = eolmode
@@ -865,9 +860,7 @@
for x, s in enumerate(self.lines):
self.hash.setdefault(s, []).append(x)
- for fuzzlen in pycompat.xrange(
- self.ui.configint(b"patch", b"fuzz") + 1
- ):
+ for fuzzlen in range(self.ui.configint(b"patch", b"fuzz") + 1):
for toponly in [True, False]:
old, oldstart, new, newstart = h.fuzzit(fuzzlen, toponly)
oldstart = oldstart + self.offset + self.skew
@@ -915,7 +908,7 @@
return len(self.rej)
-class header(object):
+class header:
"""patch header"""
diffgit_re = re.compile(b'diff --git a/(.*) b/(.*)$')
@@ -995,7 +988,7 @@
)
-class recordhunk(object):
+class recordhunk:
"""patch hunk
XXX shouldn't we merge this with the other hunk class?
@@ -1260,7 +1253,7 @@
# Remove comment lines
patchfp = open(patchfn, 'rb')
ncpatchfp = stringio()
- for line in util.iterfile(patchfp):
+ for line in patchfp:
line = util.fromnativeeol(line)
if not line.startswith(b'#'):
ncpatchfp.write(line)
@@ -1343,18 +1336,14 @@
fixoffset += chunk.removed - chunk.added
return (
sum(
- [
- h
- for h in pycompat.itervalues(applied)
- if h[0].special() or len(h) > 1
- ],
+ [h for h in applied.values() if h[0].special() or len(h) > 1],
[],
),
{},
)
-class hunk(object):
+class hunk:
def __init__(self, desc, num, lr, context):
self.number = num
self.desc = desc
@@ -1436,7 +1425,7 @@
self.lena = int(aend) - self.starta
if self.starta:
self.lena += 1
- for x in pycompat.xrange(self.lena):
+ for x in range(self.lena):
l = lr.readline()
if l.startswith(b'---'):
# lines addition, old block is empty
@@ -1471,7 +1460,7 @@
if self.startb:
self.lenb += 1
hunki = 1
- for x in pycompat.xrange(self.lenb):
+ for x in range(self.lenb):
l = lr.readline()
if l.startswith(br'\ '):
# XXX: the only way to hit this is with an invalid line range.
@@ -1552,14 +1541,14 @@
top = 0
bot = 0
hlen = len(self.hunk)
- for x in pycompat.xrange(hlen - 1):
+ for x in range(hlen - 1):
# the hunk starts with the @@ line, so use x+1
if self.hunk[x + 1].startswith(b' '):
top += 1
else:
break
if not toponly:
- for x in pycompat.xrange(hlen - 1):
+ for x in range(hlen - 1):
if self.hunk[hlen - bot - 1].startswith(b' '):
bot += 1
else:
@@ -1582,7 +1571,7 @@
return old, oldstart, new, newstart
-class binhunk(object):
+class binhunk:
"""A binary patch file."""
def __init__(self, lr, fname):
@@ -1763,7 +1752,7 @@
+9
"""
- class parser(object):
+ class parser:
"""patch parsing state machine"""
def __init__(self):
@@ -2348,7 +2337,7 @@
ui.debug(b'Using external patch tool: %s\n' % cmd)
fp = procutil.popen(cmd, b'rb')
try:
- for line in util.iterfile(fp):
+ for line in fp:
line = line.rstrip()
ui.note(line + b'\n')
if line.startswith(b'patching file '):
@@ -2644,11 +2633,7 @@
if copysourcematch:
# filter out copies where source side isn't inside the matcher
# (copies.pathcopies() already filtered out the destination)
- copy = {
- dst: src
- for dst, src in pycompat.iteritems(copy)
- if copysourcematch(src)
- }
+ copy = {dst: src for dst, src in copy.items() if copysourcematch(src)}
modifiedset = set(modified)
addedset = set(added)
--- a/mercurial/pathutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/pathutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import contextlib
import errno
import os
@@ -33,7 +31,7 @@
return encoding.hfsignoreclean(s.lower())
-class pathauditor(object):
+class pathauditor:
"""ensure that a filesystem path contains no banned components.
the following properties of a path are checked:
@@ -316,7 +314,7 @@
yield b''
-class dirs(object):
+class dirs:
'''a multiset of directory names from a set of file paths'''
def __init__(self, map, only_tracked=False):
@@ -326,7 +324,7 @@
self._dirs = {}
addpath = self.addpath
if isinstance(map, dict) and only_tracked:
- for f, s in pycompat.iteritems(map):
+ for f, s in map.items():
if s.state != b'r':
addpath(f)
elif only_tracked:
--- a/mercurial/phases.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/phases.py Thu Jun 16 15:28:54 2022 +0200
@@ -100,9 +100,7 @@
"""
-from __future__ import absolute_import
-import errno
import struct
from .i18n import _
@@ -203,9 +201,7 @@
roots[int(phase)].add(bin(nh))
finally:
f.close()
- except IOError as inst:
- if inst.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
if phasedefaults:
for f in phasedefaults:
roots = f(repo, roots)
@@ -220,7 +216,7 @@
The revision lists are encoded as (phase, root) pairs.
"""
binarydata = []
- for phase, nodes in pycompat.iteritems(phasemapping):
+ for phase, nodes in phasemapping.items():
for head in nodes:
binarydata.append(_fphasesentry.pack(phase, head))
return b''.join(binarydata)
@@ -256,14 +252,14 @@
merge_after = r2[0] == rev + 1 and t2 == t
if merge_before and merge_after:
- data[idx - 1] = (pycompat.xrange(r1[0], r2[-1] + 1), t)
+ data[idx - 1] = (range(r1[0], r2[-1] + 1), t)
data.pop(idx)
elif merge_before:
- data[idx - 1] = (pycompat.xrange(r1[0], rev + 1), t)
+ data[idx - 1] = (range(r1[0], rev + 1), t)
elif merge_after:
- data[idx] = (pycompat.xrange(rev, r2[-1] + 1), t)
+ data[idx] = (range(rev, r2[-1] + 1), t)
else:
- data.insert(idx, (pycompat.xrange(rev, rev + 1), t))
+ data.insert(idx, (range(rev, rev + 1), t))
def _sortedrange_split(data, idx, rev, t):
@@ -275,16 +271,16 @@
data.pop(idx)
_sortedrange_insert(data, idx, rev, t)
elif r1[0] == rev:
- data[idx] = (pycompat.xrange(rev + 1, r1[-1] + 1), t1)
+ data[idx] = (range(rev + 1, r1[-1] + 1), t1)
_sortedrange_insert(data, idx, rev, t)
elif r1[-1] == rev:
- data[idx] = (pycompat.xrange(r1[0], rev), t1)
+ data[idx] = (range(r1[0], rev), t1)
_sortedrange_insert(data, idx + 1, rev, t)
else:
data[idx : idx + 1] = [
- (pycompat.xrange(r1[0], rev), t1),
- (pycompat.xrange(rev, rev + 1), t),
- (pycompat.xrange(rev + 1, r1[-1] + 1), t1),
+ (range(r1[0], rev), t1),
+ (range(rev, rev + 1), t),
+ (range(rev + 1, r1[-1] + 1), t1),
]
@@ -298,7 +294,7 @@
# If data is empty, create a one-revision range and done
if not data:
- data.insert(0, (pycompat.xrange(rev, rev + 1), (old, new)))
+ data.insert(0, (range(rev, rev + 1), (old, new)))
return
low = 0
@@ -334,17 +330,17 @@
low = mid + 1
if low == len(data):
- data.append((pycompat.xrange(rev, rev + 1), t))
+ data.append((range(rev, rev + 1), t))
return
r1, t1 = data[low]
if r1[0] > rev:
- data.insert(low, (pycompat.xrange(rev, rev + 1), t))
+ data.insert(low, (range(rev, rev + 1), t))
else:
- data.insert(low + 1, (pycompat.xrange(rev, rev + 1), t))
+ data.insert(low + 1, (range(rev, rev + 1), t))
-class phasecache(object):
+class phasecache:
def __init__(self, repo, phasedefaults, _load=True):
# type: (localrepo.localrepository, Optional[Phasedefaults], bool) -> None
if _load:
@@ -364,9 +360,7 @@
self.invalidate()
self.loadphaserevs(repo)
return any(
- revs
- for phase, revs in pycompat.iteritems(self.phaseroots)
- if phase != public
+ revs for phase, revs in self.phaseroots.items() if phase != public
)
def nonpublicphaseroots(self, repo):
@@ -384,7 +378,7 @@
return set().union(
*[
revs
- for phase, revs in pycompat.iteritems(self.phaseroots)
+ for phase, revs in self.phaseroots.items()
if phase != public
]
)
@@ -529,7 +523,7 @@
f.close()
def _write(self, fp):
- for phase, roots in pycompat.iteritems(self.phaseroots):
+ for phase, roots in self.phaseroots.items():
for h in sorted(roots):
fp.write(b'%i %s\n' % (phase, hex(h)))
self.dirty = False
@@ -613,7 +607,7 @@
def retractboundary(self, repo, tr, targetphase, nodes):
oldroots = {
phase: revs
- for phase, revs in pycompat.iteritems(self.phaseroots)
+ for phase, revs in self.phaseroots.items()
if phase <= targetphase
}
if tr is None:
@@ -632,7 +626,7 @@
affected = set(repo.revs(b'(%ln::) - (%ln::)', new, old))
# find the phase of the affected revision
- for phase in pycompat.xrange(targetphase, -1, -1):
+ for phase in range(targetphase, -1, -1):
if phase:
roots = oldroots.get(phase, [])
revs = set(repo.revs(b'%ln::%ld', roots, affected))
@@ -691,7 +685,7 @@
"""
filtered = False
has_node = repo.changelog.index.has_node # to filter unknown nodes
- for phase, nodes in pycompat.iteritems(self.phaseroots):
+ for phase, nodes in self.phaseroots.items():
missing = sorted(node for node in nodes if not has_node(node))
if missing:
for mnode in missing:
@@ -855,7 +849,7 @@
# build list from dictionary
draftroots = []
has_node = repo.changelog.index.has_node # to filter unknown nodes
- for nhex, phase in pycompat.iteritems(roots):
+ for nhex, phase in roots.items():
if nhex == b'publishing': # ignore data related to publish option
continue
node = bin(nhex)
@@ -882,7 +876,7 @@
return publicheads, draftroots
-class remotephasessummary(object):
+class remotephasessummary:
"""summarize phase information on the remote side
:publishing: True is the remote is publishing
--- a/mercurial/policy.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/policy.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import os
import sys
@@ -54,11 +53,8 @@
policy = b'cffi'
# Environment variable can always force settings.
-if sys.version_info[0] >= 3:
- if 'HGMODULEPOLICY' in os.environ:
- policy = os.environ['HGMODULEPOLICY'].encode('utf-8')
-else:
- policy = os.environ.get('HGMODULEPOLICY', policy)
+if 'HGMODULEPOLICY' in os.environ:
+ policy = os.environ['HGMODULEPOLICY'].encode('utf-8')
def _importfrom(pkgname, modname):
--- a/mercurial/posix.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/posix.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import errno
import fcntl
@@ -60,21 +59,7 @@
umask = os.umask(0)
os.umask(umask)
-if not pycompat.ispy3:
-
- def posixfile(name, mode='r', buffering=-1):
- fp = open(name, mode=mode, buffering=buffering)
- # The position when opening in append mode is implementation defined, so
- # make it consistent by always seeking to the end.
- if 'a' in mode:
- fp.seek(0, os.SEEK_END)
- return fp
-
-
-else:
- # The underlying file object seeks as required in Python 3:
- # https://github.com/python/cpython/blob/v3.7.3/Modules/_io/fileio.c#L474
- posixfile = open
+posixfile = open
def split(p):
@@ -190,9 +175,7 @@
using umask."""
try:
st_mode = os.lstat(src).st_mode & 0o777
- except OSError as inst:
- if inst.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
st_mode = mode
if st_mode is None:
st_mode = ~umask
@@ -241,19 +224,16 @@
try:
m = os.stat(checkisexec).st_mode
- except OSError as e:
- if e.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
# checkisexec does not exist - fall through ...
+ pass
else:
# checkisexec exists, check if it actually is exec
if m & EXECFLAGS != 0:
# ensure checkisexec exists, check it isn't exec
try:
m = os.stat(checknoexec).st_mode
- except OSError as e:
- if e.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
open(checknoexec, b'w').close() # might fail
m = os.stat(checknoexec).st_mode
if m & EXECFLAGS == 0:
@@ -322,18 +302,13 @@
try:
fullpath = os.path.join(cachedir, target)
open(fullpath, b'w').close()
- except IOError as inst:
- # pytype: disable=unsupported-operands
- if inst[0] == errno.EACCES:
- # pytype: enable=unsupported-operands
-
- # If we can't write to cachedir, just pretend
- # that the fs is readonly and by association
- # that the fs won't support symlinks. This
- # seems like the least dangerous way to avoid
- # data loss.
- return False
- raise
+ except PermissionError:
+ # If we can't write to cachedir, just pretend
+ # that the fs is readonly and by association
+ # that the fs won't support symlinks. This
+ # seems like the least dangerous way to avoid
+ # data loss.
+ return False
try:
os.symlink(target, name)
if cachedir is None:
@@ -344,11 +319,9 @@
except OSError:
unlink(name)
return True
- except OSError as inst:
+ except FileExistsError:
# link creation might race, try again
- if inst.errno == errno.EEXIST:
- continue
- raise
+ continue
finally:
if fd is not None:
fd.close()
@@ -608,9 +581,7 @@
st = lstat(nf)
if getkind(st.st_mode) not in _wantedkinds:
st = None
- except OSError as err:
- if err.errno not in (errno.ENOENT, errno.ENOTDIR):
- raise
+ except (FileNotFoundError, NotADirectoryError):
st = None
yield st
@@ -679,7 +650,7 @@
pass
-class cachestat(object):
+class cachestat:
def __init__(self, path):
self.stat = os.stat(path)
@@ -731,14 +702,7 @@
In unsupported cases, it will raise a NotImplementedError"""
try:
- while True:
- try:
- res = select.select(fds, fds, fds)
- break
- except select.error as inst:
- if inst.args[0] == errno.EINTR:
- continue
- raise
+ res = select.select(fds, fds, fds)
except ValueError: # out of range file descriptor
raise NotImplementedError()
return sorted(list(set(sum(res, []))))
--- a/mercurial/profiling.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/profiling.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import, print_function
import contextlib
@@ -174,7 +173,7 @@
statprof.display(fp, data=data, format=displayformat, **kwargs)
-class profile(object):
+class profile:
"""Start profiling.
Profiling is active when the context manager is active. When the context
@@ -232,7 +231,7 @@
self._fp = open(path, b'wb')
elif pycompat.iswindows:
# parse escape sequence by win32print()
- class uifp(object):
+ class uifp:
def __init__(self, ui):
self._ui = ui
--- a/mercurial/progress.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/progress.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,9 +5,7 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
-import errno
import threading
import time
@@ -66,26 +64,7 @@
return _(b"%dy%02dw") % (years, weeks)
-# file_write() and file_flush() of Python 2 do not restart on EINTR if
-# the file is attached to a "slow" device (e.g. a terminal) and raise
-# IOError. We cannot know how many bytes would be written by file_write(),
-# but a progress text is known to be short enough to be written by a
-# single write() syscall, so we can just retry file_write() with the whole
-# text. (issue5532)
-#
-# This should be a short-term workaround. We'll need to fix every occurrence
-# of write() to a terminal or pipe.
-def _eintrretry(func, *args):
- while True:
- try:
- return func(*args)
- except IOError as err:
- if err.errno == errno.EINTR:
- continue
- raise
-
-
-class progbar(object):
+class progbar:
def __init__(self, ui):
self.ui = ui
self._refreshlock = threading.Lock()
@@ -208,10 +187,10 @@
self._flusherr()
def _flusherr(self):
- _eintrretry(self.ui.ferr.flush)
+ self.ui.ferr.flush()
def _writeerr(self, msg):
- _eintrretry(self.ui.ferr.write, msg)
+ self.ui.ferr.write(msg)
def width(self):
tw = self.ui.termwidth()
--- a/mercurial/pure/base85.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/pure/base85.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import struct
--- a/mercurial/pure/bdiff.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/pure/bdiff.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import difflib
import re
--- a/mercurial/pure/charencode.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/pure/charencode.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import array
@@ -68,10 +67,7 @@
raise ValueError
-if pycompat.ispy3:
- _utf8strict = r'surrogatepass'
-else:
- _utf8strict = r'strict'
+_utf8strict = r'surrogatepass'
def jsonescapeu8fallback(u8chars, paranoid):
--- a/mercurial/pure/mpatch.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/pure/mpatch.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,13 +5,12 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
+import io
import struct
-from .. import pycompat
-stringio = pycompat.bytesio
+stringio = io.BytesIO
class mpatchError(Exception):
--- a/mercurial/pure/osutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/pure/osutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,12 +5,10 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import, division
import ctypes
import ctypes.util
import os
-import socket
import stat as statmod
from ..pycompat import getattr
@@ -72,102 +70,6 @@
if not pycompat.iswindows:
posixfile = open
- _SCM_RIGHTS = 0x01
- _socklen_t = ctypes.c_uint
-
- if pycompat.sysplatform.startswith(b'linux'):
- # socket.h says "the type should be socklen_t but the definition of
- # the kernel is incompatible with this."
- _cmsg_len_t = ctypes.c_size_t
- _msg_controllen_t = ctypes.c_size_t
- _msg_iovlen_t = ctypes.c_size_t
- else:
- _cmsg_len_t = _socklen_t
- _msg_controllen_t = _socklen_t
- _msg_iovlen_t = ctypes.c_int
-
- class _iovec(ctypes.Structure):
- _fields_ = [
- (u'iov_base', ctypes.c_void_p),
- (u'iov_len', ctypes.c_size_t),
- ]
-
- class _msghdr(ctypes.Structure):
- _fields_ = [
- (u'msg_name', ctypes.c_void_p),
- (u'msg_namelen', _socklen_t),
- (u'msg_iov', ctypes.POINTER(_iovec)),
- (u'msg_iovlen', _msg_iovlen_t),
- (u'msg_control', ctypes.c_void_p),
- (u'msg_controllen', _msg_controllen_t),
- (u'msg_flags', ctypes.c_int),
- ]
-
- class _cmsghdr(ctypes.Structure):
- _fields_ = [
- (u'cmsg_len', _cmsg_len_t),
- (u'cmsg_level', ctypes.c_int),
- (u'cmsg_type', ctypes.c_int),
- (u'cmsg_data', ctypes.c_ubyte * 0),
- ]
-
- _libc = ctypes.CDLL(ctypes.util.find_library(u'c'), use_errno=True)
- _recvmsg = getattr(_libc, 'recvmsg', None)
- if _recvmsg:
- _recvmsg.restype = getattr(ctypes, 'c_ssize_t', ctypes.c_long)
- _recvmsg.argtypes = (
- ctypes.c_int,
- ctypes.POINTER(_msghdr),
- ctypes.c_int,
- )
- else:
- # recvmsg isn't always provided by libc; such systems are unsupported
- def _recvmsg(sockfd, msg, flags):
- raise NotImplementedError(b'unsupported platform')
-
- def _CMSG_FIRSTHDR(msgh):
- if msgh.msg_controllen < ctypes.sizeof(_cmsghdr):
- return
- cmsgptr = ctypes.cast(msgh.msg_control, ctypes.POINTER(_cmsghdr))
- return cmsgptr.contents
-
- # The pure version is less portable than the native version because the
- # handling of socket ancillary data heavily depends on C preprocessor.
- # Also, some length fields are wrongly typed in Linux kernel.
- def recvfds(sockfd):
- """receive list of file descriptors via socket"""
- dummy = (ctypes.c_ubyte * 1)()
- iov = _iovec(ctypes.cast(dummy, ctypes.c_void_p), ctypes.sizeof(dummy))
- cbuf = ctypes.create_string_buffer(256)
- msgh = _msghdr(
- None,
- 0,
- ctypes.pointer(iov),
- 1,
- ctypes.cast(cbuf, ctypes.c_void_p),
- ctypes.sizeof(cbuf),
- 0,
- )
- r = _recvmsg(sockfd, ctypes.byref(msgh), 0)
- if r < 0:
- e = ctypes.get_errno()
- raise OSError(e, os.strerror(e))
- # assumes that the first cmsg has fds because it isn't easy to write
- # portable CMSG_NXTHDR() with ctypes.
- cmsg = _CMSG_FIRSTHDR(msgh)
- if not cmsg:
- return []
- if (
- cmsg.cmsg_level != socket.SOL_SOCKET
- or cmsg.cmsg_type != _SCM_RIGHTS
- ):
- return []
- rfds = ctypes.cast(cmsg.cmsg_data, ctypes.POINTER(ctypes.c_int))
- rfdscount = (
- cmsg.cmsg_len - _cmsghdr.cmsg_data.offset
- ) // ctypes.sizeof(ctypes.c_int)
- return [rfds[i] for i in pycompat.xrange(rfdscount)]
-
else:
import msvcrt
@@ -221,7 +123,7 @@
err.errno, '%s: %s' % (encoding.strfromlocal(name), err.strerror)
)
- class posixfile(object):
+ class posixfile:
"""a file object aiming for POSIX-like semantics
CPython's open() returns a file that was opened *without* setting the
--- a/mercurial/pure/parsers.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/pure/parsers.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,8 +5,8 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
+import io
import stat
import struct
import zlib
@@ -18,7 +18,6 @@
from ..thirdparty import attr
from .. import (
error,
- pycompat,
revlogutils,
util,
)
@@ -26,7 +25,7 @@
from ..revlogutils import nodemap as nodemaputil
from ..revlogutils import constants as revlog_constants
-stringio = pycompat.bytesio
+stringio = io.BytesIO
_pack = struct.pack
@@ -64,7 +63,7 @@
@attr.s(slots=True, init=False)
-class DirstateItem(object):
+class DirstateItem:
"""represent a dirstate entry
It hold multiple attributes
@@ -279,7 +278,7 @@
self._mtime_ns = None
def drop_merge_data(self):
- """remove all "merge-only" from a DirstateItem
+ """remove all "merge-only" information from a DirstateItem
This is to be call by the dirstatemap code when the second parent is dropped
"""
@@ -292,15 +291,15 @@
@property
def mode(self):
- return self.v1_mode()
+ return self._v1_mode()
@property
def size(self):
- return self.v1_size()
+ return self._v1_size()
@property
def mtime(self):
- return self.v1_mtime()
+ return self._v1_mtime()
def mtime_likely_equal_to(self, other_mtime):
self_sec = self._mtime_s
@@ -339,7 +338,7 @@
"""
if not self.any_tracked:
return b'?'
- return self.v1_state()
+ return self._v1_state()
@property
def has_fallback_exec(self):
@@ -499,7 +498,7 @@
# since we never set _DIRSTATE_V2_HAS_DIRCTORY_MTIME
return (flags, self._size or 0, self._mtime_s or 0, self._mtime_ns or 0)
- def v1_state(self):
+ def _v1_state(self):
"""return a "state" suitable for v1 serialization"""
if not self.any_tracked:
# the object has no state to record, this is -currently-
@@ -514,11 +513,11 @@
else:
return b'n'
- def v1_mode(self):
+ def _v1_mode(self):
"""return a "mode" suitable for v1 serialization"""
return self._mode if self._mode is not None else 0
- def v1_size(self):
+ def _v1_size(self):
"""return a "size" suitable for v1 serialization"""
if not self.any_tracked:
# the object has no state to record, this is -currently-
@@ -537,7 +536,7 @@
else:
return self._size
- def v1_mtime(self):
+ def _v1_mtime(self):
"""return a "mtime" suitable for v1 serialization"""
if not self.any_tracked:
# the object has no state to record, this is -currently-
@@ -561,7 +560,7 @@
return int(q & 0xFFFF)
-class BaseIndexObject(object):
+class BaseIndexObject:
# Can I be passed to an algorithme implemented in Rust ?
rust_ext_compat = 0
# Format of an index entry according to Python's `struct` language
@@ -959,15 +958,15 @@
cs = stringio()
write = cs.write
write(b"".join(pl))
- for f, e in pycompat.iteritems(dmap):
+ for f, e in dmap.items():
if f in copymap:
f = b"%s\0%s" % (f, copymap[f])
e = _pack(
b">cllll",
- e.v1_state(),
- e.v1_mode(),
- e.v1_size(),
- e.v1_mtime(),
+ e._v1_state(),
+ e._v1_mode(),
+ e._v1_size(),
+ e._v1_mtime(),
len(f),
)
write(e)
--- a/mercurial/pushkey.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/pushkey.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from . import (
bookmarks,
--- a/mercurial/pvec.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/pvec.py Thu Jun 16 15:28:54 2022 +0200
@@ -48,7 +48,6 @@
different branches
'''
-from __future__ import absolute_import
from .node import nullrev
from . import (
@@ -76,7 +75,7 @@
def _str(v, l):
# type: (int, int) -> bytes
bs = b""
- for p in pycompat.xrange(l):
+ for p in range(l):
bs = pycompat.bytechr(v & 255) + bs
v >>= 8
return bs
@@ -100,7 +99,7 @@
return c
-_htab = [_hweight(x) for x in pycompat.xrange(256)]
+_htab = [_hweight(x) for x in range(256)]
def _hamming(a, b):
@@ -165,7 +164,7 @@
pvc = r._pveccache
if ctx.rev() not in pvc:
cl = r.changelog
- for n in pycompat.xrange(ctx.rev() + 1):
+ for n in range(ctx.rev() + 1):
if n not in pvc:
node = cl.node(n)
p1, p2 = cl.parentrevs(n)
@@ -181,7 +180,7 @@
return pvec(util.b85encode(bs))
-class pvec(object):
+class pvec:
def __init__(self, hashorctx):
if isinstance(hashorctx, bytes):
self._bs = hashorctx
--- a/mercurial/pycompat.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/pycompat.py Thu Jun 16 15:28:54 2022 +0200
@@ -8,15 +8,26 @@
This contains aliases to hide python version-specific details from the core.
"""
-from __future__ import absolute_import
+import builtins
+import codecs
+import concurrent.futures as futures
+import functools
import getopt
+import http.client as httplib
+import http.cookiejar as cookielib
import inspect
+import io
import json
import os
+import queue
import shlex
+import socketserver
+import struct
import sys
import tempfile
+import xmlrpc.client as xmlrpclib
+
ispy3 = sys.version_info[0] >= 3
ispypy = '__pypy__' in sys.builtin_module_names
@@ -27,36 +38,12 @@
TYPE_CHECKING = typing.TYPE_CHECKING
-if not ispy3:
- import cookielib
- import cPickle as pickle
- import httplib
- import Queue as queue
- import SocketServer as socketserver
- import xmlrpclib
-
- from .thirdparty.concurrent import futures
-
- def future_set_exception_info(f, exc_info):
- f.set_exception_info(*exc_info)
- # this is close enough for our usage
- FileNotFoundError = OSError
+def future_set_exception_info(f, exc_info):
+ f.set_exception(exc_info[0])
-else:
- import builtins
- import concurrent.futures as futures
- import http.cookiejar as cookielib
- import http.client as httplib
- import pickle
- import queue as queue
- import socketserver
- import xmlrpc.client as xmlrpclib
- def future_set_exception_info(f, exc_info):
- f.set_exception(exc_info[0])
-
- FileNotFoundError = builtins.FileNotFoundError
+FileNotFoundError = builtins.FileNotFoundError
def identity(a):
@@ -98,402 +85,297 @@
return _rapply(f, xs)
-if ispy3:
- import builtins
- import codecs
- import functools
- import io
- import struct
-
- if os.name == r'nt' and sys.version_info >= (3, 6):
- # MBCS (or ANSI) filesystem encoding must be used as before.
- # Otherwise non-ASCII filenames in existing repositories would be
- # corrupted.
- # This must be set once prior to any fsencode/fsdecode calls.
- sys._enablelegacywindowsfsencoding() # pytype: disable=module-attr
+if os.name == r'nt':
+ # MBCS (or ANSI) filesystem encoding must be used as before.
+ # Otherwise non-ASCII filenames in existing repositories would be
+ # corrupted.
+ # This must be set once prior to any fsencode/fsdecode calls.
+ sys._enablelegacywindowsfsencoding() # pytype: disable=module-attr
- fsencode = os.fsencode
- fsdecode = os.fsdecode
- oscurdir = os.curdir.encode('ascii')
- oslinesep = os.linesep.encode('ascii')
- osname = os.name.encode('ascii')
- ospathsep = os.pathsep.encode('ascii')
- ospardir = os.pardir.encode('ascii')
- ossep = os.sep.encode('ascii')
- osaltsep = os.altsep
- if osaltsep:
- osaltsep = osaltsep.encode('ascii')
- osdevnull = os.devnull.encode('ascii')
+fsencode = os.fsencode
+fsdecode = os.fsdecode
+oscurdir = os.curdir.encode('ascii')
+oslinesep = os.linesep.encode('ascii')
+osname = os.name.encode('ascii')
+ospathsep = os.pathsep.encode('ascii')
+ospardir = os.pardir.encode('ascii')
+ossep = os.sep.encode('ascii')
+osaltsep = os.altsep
+if osaltsep:
+ osaltsep = osaltsep.encode('ascii')
+osdevnull = os.devnull.encode('ascii')
- sysplatform = sys.platform.encode('ascii')
- sysexecutable = sys.executable
- if sysexecutable:
- sysexecutable = os.fsencode(sysexecutable)
- bytesio = io.BytesIO
- # TODO deprecate stringio name, as it is a lie on Python 3.
- stringio = bytesio
+sysplatform = sys.platform.encode('ascii')
+sysexecutable = sys.executable
+if sysexecutable:
+ sysexecutable = os.fsencode(sysexecutable)
+
- def maplist(*args):
- return list(map(*args))
+def maplist(*args):
+ return list(map(*args))
+
- def rangelist(*args):
- return list(range(*args))
+def rangelist(*args):
+ return list(range(*args))
+
- def ziplist(*args):
- return list(zip(*args))
+def ziplist(*args):
+ return list(zip(*args))
+
- rawinput = input
- getargspec = inspect.getfullargspec
+rawinput = input
+getargspec = inspect.getfullargspec
- long = int
+long = int
- if getattr(sys, 'argv', None) is not None:
- # On POSIX, the char** argv array is converted to Python str using
- # Py_DecodeLocale(). The inverse of this is Py_EncodeLocale(), which
- # isn't directly callable from Python code. In practice, os.fsencode()
- # can be used instead (this is recommended by Python's documentation
- # for sys.argv).
- #
- # On Windows, the wchar_t **argv is passed into the interpreter as-is.
- # Like POSIX, we need to emulate what Py_EncodeLocale() would do. But
- # there's an additional wrinkle. What we really want to access is the
- # ANSI codepage representation of the arguments, as this is what
- # `int main()` would receive if Python 3 didn't define `int wmain()`
- # (this is how Python 2 worked). To get that, we encode with the mbcs
- # encoding, which will pass CP_ACP to the underlying Windows API to
- # produce bytes.
- if os.name == r'nt':
- sysargv = [a.encode("mbcs", "ignore") for a in sys.argv]
- else:
- sysargv = [fsencode(a) for a in sys.argv]
+if getattr(sys, 'argv', None) is not None:
+ # On POSIX, the char** argv array is converted to Python str using
+ # Py_DecodeLocale(). The inverse of this is Py_EncodeLocale(), which
+ # isn't directly callable from Python code. In practice, os.fsencode()
+ # can be used instead (this is recommended by Python's documentation
+ # for sys.argv).
+ #
+ # On Windows, the wchar_t **argv is passed into the interpreter as-is.
+ # Like POSIX, we need to emulate what Py_EncodeLocale() would do. But
+ # there's an additional wrinkle. What we really want to access is the
+ # ANSI codepage representation of the arguments, as this is what
+ # `int main()` would receive if Python 3 didn't define `int wmain()`
+ # (this is how Python 2 worked). To get that, we encode with the mbcs
+ # encoding, which will pass CP_ACP to the underlying Windows API to
+ # produce bytes.
+ if os.name == r'nt':
+ sysargv = [a.encode("mbcs", "ignore") for a in sys.argv]
+ else:
+ sysargv = [fsencode(a) for a in sys.argv]
- bytechr = struct.Struct('>B').pack
- byterepr = b'%r'.__mod__
-
- class bytestr(bytes):
- """A bytes which mostly acts as a Python 2 str
+bytechr = struct.Struct('>B').pack
+byterepr = b'%r'.__mod__
- >>> bytestr(), bytestr(bytearray(b'foo')), bytestr(u'ascii'), bytestr(1)
- ('', 'foo', 'ascii', '1')
- >>> s = bytestr(b'foo')
- >>> assert s is bytestr(s)
- __bytes__() should be called if provided:
+class bytestr(bytes):
+ """A bytes which mostly acts as a Python 2 str
- >>> class bytesable(object):
- ... def __bytes__(self):
- ... return b'bytes'
- >>> bytestr(bytesable())
- 'bytes'
+ >>> bytestr(), bytestr(bytearray(b'foo')), bytestr(u'ascii'), bytestr(1)
+ ('', 'foo', 'ascii', '1')
+ >>> s = bytestr(b'foo')
+ >>> assert s is bytestr(s)
+
+ __bytes__() should be called if provided:
- There's no implicit conversion from non-ascii str as its encoding is
- unknown:
+ >>> class bytesable:
+ ... def __bytes__(self):
+ ... return b'bytes'
+ >>> bytestr(bytesable())
+ 'bytes'
- >>> bytestr(chr(0x80)) # doctest: +ELLIPSIS
- Traceback (most recent call last):
- ...
- UnicodeEncodeError: ...
-
- Comparison between bytestr and bytes should work:
+ There's no implicit conversion from non-ascii str as its encoding is
+ unknown:
- >>> assert bytestr(b'foo') == b'foo'
- >>> assert b'foo' == bytestr(b'foo')
- >>> assert b'f' in bytestr(b'foo')
- >>> assert bytestr(b'f') in b'foo'
+ >>> bytestr(chr(0x80)) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ ...
+ UnicodeEncodeError: ...
- Sliced elements should be bytes, not integer:
+ Comparison between bytestr and bytes should work:
- >>> s[1], s[:2]
- (b'o', b'fo')
- >>> list(s), list(reversed(s))
- ([b'f', b'o', b'o'], [b'o', b'o', b'f'])
-
- As bytestr type isn't propagated across operations, you need to cast
- bytes to bytestr explicitly:
+ >>> assert bytestr(b'foo') == b'foo'
+ >>> assert b'foo' == bytestr(b'foo')
+ >>> assert b'f' in bytestr(b'foo')
+ >>> assert bytestr(b'f') in b'foo'
- >>> s = bytestr(b'foo').upper()
- >>> t = bytestr(s)
- >>> s[0], t[0]
- (70, b'F')
+ Sliced elements should be bytes, not integer:
- Be careful to not pass a bytestr object to a function which expects
- bytearray-like behavior.
+ >>> s[1], s[:2]
+ (b'o', b'fo')
+ >>> list(s), list(reversed(s))
+ ([b'f', b'o', b'o'], [b'o', b'o', b'f'])
- >>> t = bytes(t) # cast to bytes
- >>> assert type(t) is bytes
- """
+ As bytestr type isn't propagated across operations, you need to cast
+ bytes to bytestr explicitly:
- # Trick pytype into not demanding Iterable[int] be passed to __new__(),
- # since the appropriate bytes format is done internally.
- #
- # https://github.com/google/pytype/issues/500
- if TYPE_CHECKING:
+ >>> s = bytestr(b'foo').upper()
+ >>> t = bytestr(s)
+ >>> s[0], t[0]
+ (70, b'F')
- def __init__(self, s=b''):
- pass
+ Be careful to not pass a bytestr object to a function which expects
+ bytearray-like behavior.
+
+ >>> t = bytes(t) # cast to bytes
+ >>> assert type(t) is bytes
+ """
- def __new__(cls, s=b''):
- if isinstance(s, bytestr):
- return s
- if not isinstance(
- s, (bytes, bytearray)
- ) and not hasattr( # hasattr-py3-only
- s, u'__bytes__'
- ):
- s = str(s).encode('ascii')
- return bytes.__new__(cls, s)
+ # Trick pytype into not demanding Iterable[int] be passed to __new__(),
+ # since the appropriate bytes format is done internally.
+ #
+ # https://github.com/google/pytype/issues/500
+ if TYPE_CHECKING:
- def __getitem__(self, key):
- s = bytes.__getitem__(self, key)
- if not isinstance(s, bytes):
- s = bytechr(s)
+ def __init__(self, s=b''):
+ pass
+
+ def __new__(cls, s=b''):
+ if isinstance(s, bytestr):
return s
-
- def __iter__(self):
- return iterbytestr(bytes.__iter__(self))
-
- def __repr__(self):
- return bytes.__repr__(self)[1:] # drop b''
+ if not isinstance(
+ s, (bytes, bytearray)
+ ) and not hasattr( # hasattr-py3-only
+ s, u'__bytes__'
+ ):
+ s = str(s).encode('ascii')
+ return bytes.__new__(cls, s)
- def iterbytestr(s):
- """Iterate bytes as if it were a str object of Python 2"""
- return map(bytechr, s)
-
- def maybebytestr(s):
- """Promote bytes to bytestr"""
- if isinstance(s, bytes):
- return bytestr(s)
+ def __getitem__(self, key):
+ s = bytes.__getitem__(self, key)
+ if not isinstance(s, bytes):
+ s = bytechr(s)
return s
- def sysbytes(s):
- """Convert an internal str (e.g. keyword, __doc__) back to bytes
+ def __iter__(self):
+ return iterbytestr(bytes.__iter__(self))
- This never raises UnicodeEncodeError, but only ASCII characters
- can be round-trip by sysstr(sysbytes(s)).
- """
- if isinstance(s, bytes):
- return s
- return s.encode('utf-8')
+ def __repr__(self):
+ return bytes.__repr__(self)[1:] # drop b''
- def sysstr(s):
- """Return a keyword str to be passed to Python functions such as
- getattr() and str.encode()
- This never raises UnicodeDecodeError. Non-ascii characters are
- considered invalid and mapped to arbitrary but unique code points
- such that 'sysstr(a) != sysstr(b)' for all 'a != b'.
- """
- if isinstance(s, builtins.str):
- return s
- return s.decode('latin-1')
+def iterbytestr(s):
+ """Iterate bytes as if it were a str object of Python 2"""
+ return map(bytechr, s)
+
- def strurl(url):
- """Converts a bytes url back to str"""
- if isinstance(url, bytes):
- return url.decode('ascii')
- return url
+def maybebytestr(s):
+ """Promote bytes to bytestr"""
+ if isinstance(s, bytes):
+ return bytestr(s)
+ return s
- def bytesurl(url):
- """Converts a str url to bytes by encoding in ascii"""
- if isinstance(url, str):
- return url.encode('ascii')
- return url
+
+def sysbytes(s):
+ """Convert an internal str (e.g. keyword, __doc__) back to bytes
- def raisewithtb(exc, tb):
- """Raise exception with the given traceback"""
- raise exc.with_traceback(tb)
+ This never raises UnicodeEncodeError, but only ASCII characters
+ can be round-trip by sysstr(sysbytes(s)).
+ """
+ if isinstance(s, bytes):
+ return s
+ return s.encode('utf-8')
- def getdoc(obj):
- """Get docstring as bytes; may be None so gettext() won't confuse it
- with _('')"""
- doc = getattr(obj, '__doc__', None)
- if doc is None:
- return doc
- return sysbytes(doc)
- def _wrapattrfunc(f):
- @functools.wraps(f)
- def w(object, name, *args):
- return f(object, sysstr(name), *args)
-
- return w
+def sysstr(s):
+ """Return a keyword str to be passed to Python functions such as
+ getattr() and str.encode()
- # these wrappers are automagically imported by hgloader
- delattr = _wrapattrfunc(builtins.delattr)
- getattr = _wrapattrfunc(builtins.getattr)
- hasattr = _wrapattrfunc(builtins.hasattr)
- setattr = _wrapattrfunc(builtins.setattr)
- xrange = builtins.range
- unicode = str
-
- def open(name, mode=b'r', buffering=-1, encoding=None):
- return builtins.open(name, sysstr(mode), buffering, encoding)
-
- safehasattr = _wrapattrfunc(builtins.hasattr)
+ This never raises UnicodeDecodeError. Non-ascii characters are
+ considered invalid and mapped to arbitrary but unique code points
+ such that 'sysstr(a) != sysstr(b)' for all 'a != b'.
+ """
+ if isinstance(s, builtins.str):
+ return s
+ return s.decode('latin-1')
- def _getoptbwrapper(orig, args, shortlist, namelist):
- """
- Takes bytes arguments, converts them to unicode, pass them to
- getopt.getopt(), convert the returned values back to bytes and then
- return them for Python 3 compatibility as getopt.getopt() don't accepts
- bytes on Python 3.
- """
- args = [a.decode('latin-1') for a in args]
- shortlist = shortlist.decode('latin-1')
- namelist = [a.decode('latin-1') for a in namelist]
- opts, args = orig(args, shortlist, namelist)
- opts = [(a[0].encode('latin-1'), a[1].encode('latin-1')) for a in opts]
- args = [a.encode('latin-1') for a in args]
- return opts, args
+
+def strurl(url):
+ """Converts a bytes url back to str"""
+ if isinstance(url, bytes):
+ return url.decode('ascii')
+ return url
+
- def strkwargs(dic):
- """
- Converts the keys of a python dictonary to str i.e. unicodes so that
- they can be passed as keyword arguments as dictionaries with bytes keys
- can't be passed as keyword arguments to functions on Python 3.
- """
- dic = {k.decode('latin-1'): v for k, v in dic.items()}
- return dic
+def bytesurl(url):
+ """Converts a str url to bytes by encoding in ascii"""
+ if isinstance(url, str):
+ return url.encode('ascii')
+ return url
- def byteskwargs(dic):
- """
- Converts keys of python dictionaries to bytes as they were converted to
- str to pass that dictonary as a keyword argument on Python 3.
- """
- dic = {k.encode('latin-1'): v for k, v in dic.items()}
- return dic
- # TODO: handle shlex.shlex().
- def shlexsplit(s, comments=False, posix=True):
- """
- Takes bytes argument, convert it to str i.e. unicodes, pass that into
- shlex.split(), convert the returned value to bytes and return that for
- Python 3 compatibility as shelx.split() don't accept bytes on Python 3.
- """
- ret = shlex.split(s.decode('latin-1'), comments, posix)
- return [a.encode('latin-1') for a in ret]
+def raisewithtb(exc, tb):
+ """Raise exception with the given traceback"""
+ raise exc.with_traceback(tb)
+
- iteritems = lambda x: x.items()
- itervalues = lambda x: x.values()
+def getdoc(obj):
+ """Get docstring as bytes; may be None so gettext() won't confuse it
+ with _('')"""
+ doc = getattr(obj, '__doc__', None)
+ if doc is None:
+ return doc
+ return sysbytes(doc)
- # Python 3.5's json.load and json.loads require str. We polyfill its
- # code for detecting encoding from bytes.
- if sys.version_info[0:2] < (3, 6):
- def _detect_encoding(b):
- bstartswith = b.startswith
- if bstartswith((codecs.BOM_UTF32_BE, codecs.BOM_UTF32_LE)):
- return 'utf-32'
- if bstartswith((codecs.BOM_UTF16_BE, codecs.BOM_UTF16_LE)):
- return 'utf-16'
- if bstartswith(codecs.BOM_UTF8):
- return 'utf-8-sig'
+def _wrapattrfunc(f):
+ @functools.wraps(f)
+ def w(object, name, *args):
+ return f(object, sysstr(name), *args)
+
+ return w
+
- if len(b) >= 4:
- if not b[0]:
- # 00 00 -- -- - utf-32-be
- # 00 XX -- -- - utf-16-be
- return 'utf-16-be' if b[1] else 'utf-32-be'
- if not b[1]:
- # XX 00 00 00 - utf-32-le
- # XX 00 00 XX - utf-16-le
- # XX 00 XX -- - utf-16-le
- return 'utf-16-le' if b[2] or b[3] else 'utf-32-le'
- elif len(b) == 2:
- if not b[0]:
- # 00 XX - utf-16-be
- return 'utf-16-be'
- if not b[1]:
- # XX 00 - utf-16-le
- return 'utf-16-le'
- # default
- return 'utf-8'
+# these wrappers are automagically imported by hgloader
+delattr = _wrapattrfunc(builtins.delattr)
+getattr = _wrapattrfunc(builtins.getattr)
+hasattr = _wrapattrfunc(builtins.hasattr)
+setattr = _wrapattrfunc(builtins.setattr)
+xrange = builtins.range
+unicode = str
- def json_loads(s, *args, **kwargs):
- if isinstance(s, (bytes, bytearray)):
- s = s.decode(_detect_encoding(s), 'surrogatepass')
- return json.loads(s, *args, **kwargs)
+def open(name, mode=b'r', buffering=-1, encoding=None):
+ return builtins.open(name, sysstr(mode), buffering, encoding)
- else:
- json_loads = json.loads
-else:
- import cStringIO
+safehasattr = _wrapattrfunc(builtins.hasattr)
+
- xrange = xrange
- unicode = unicode
- bytechr = chr
- byterepr = repr
- bytestr = str
- iterbytestr = iter
- maybebytestr = identity
- sysbytes = identity
- sysstr = identity
- strurl = identity
- bytesurl = identity
- open = open
- delattr = delattr
- getattr = getattr
- hasattr = hasattr
- setattr = setattr
+def _getoptbwrapper(orig, args, shortlist, namelist):
+ """
+ Takes bytes arguments, converts them to unicode, pass them to
+ getopt.getopt(), convert the returned values back to bytes and then
+ return them for Python 3 compatibility as getopt.getopt() don't accepts
+ bytes on Python 3.
+ """
+ args = [a.decode('latin-1') for a in args]
+ shortlist = shortlist.decode('latin-1')
+ namelist = [a.decode('latin-1') for a in namelist]
+ opts, args = orig(args, shortlist, namelist)
+ opts = [(a[0].encode('latin-1'), a[1].encode('latin-1')) for a in opts]
+ args = [a.encode('latin-1') for a in args]
+ return opts, args
- # this can't be parsed on Python 3
- exec(b'def raisewithtb(exc, tb):\n raise exc, None, tb\n')
- def fsencode(filename):
- """
- Partial backport from os.py in Python 3, which only accepts bytes.
- In Python 2, our paths should only ever be bytes, a unicode path
- indicates a bug.
- """
- if isinstance(filename, str):
- return filename
- else:
- raise TypeError("expect str, not %s" % type(filename).__name__)
-
- # In Python 2, fsdecode() has a very chance to receive bytes. So it's
- # better not to touch Python 2 part as it's already working fine.
- fsdecode = identity
+def strkwargs(dic):
+ """
+ Converts the keys of a python dictonary to str i.e. unicodes so that
+ they can be passed as keyword arguments as dictionaries with bytes keys
+ can't be passed as keyword arguments to functions on Python 3.
+ """
+ dic = {k.decode('latin-1'): v for k, v in dic.items()}
+ return dic
- def getdoc(obj):
- return getattr(obj, '__doc__', None)
-
- _notset = object()
- def safehasattr(thing, attr):
- return getattr(thing, attr, _notset) is not _notset
+def byteskwargs(dic):
+ """
+ Converts keys of python dictionaries to bytes as they were converted to
+ str to pass that dictonary as a keyword argument on Python 3.
+ """
+ dic = {k.encode('latin-1'): v for k, v in dic.items()}
+ return dic
- def _getoptbwrapper(orig, args, shortlist, namelist):
- return orig(args, shortlist, namelist)
-
- strkwargs = identity
- byteskwargs = identity
- oscurdir = os.curdir
- oslinesep = os.linesep
- osname = os.name
- ospathsep = os.pathsep
- ospardir = os.pardir
- ossep = os.sep
- osaltsep = os.altsep
- osdevnull = os.devnull
- long = long
- if getattr(sys, 'argv', None) is not None:
- sysargv = sys.argv
- sysplatform = sys.platform
- sysexecutable = sys.executable
- shlexsplit = shlex.split
- bytesio = cStringIO.StringIO
- stringio = bytesio
- maplist = map
- rangelist = range
- ziplist = zip
- rawinput = raw_input
- getargspec = inspect.getargspec
- iteritems = lambda x: x.iteritems()
- itervalues = lambda x: x.itervalues()
- json_loads = json.loads
+# TODO: handle shlex.shlex().
+def shlexsplit(s, comments=False, posix=True):
+ """
+ Takes bytes argument, convert it to str i.e. unicodes, pass that into
+ shlex.split(), convert the returned value to bytes and return that for
+ Python 3 compatibility as shelx.split() don't accept bytes on Python 3.
+ """
+ ret = shlex.split(s.decode('latin-1'), comments, posix)
+ return [a.encode('latin-1') for a in ret]
+
+
+iteritems = lambda x: x.items()
+itervalues = lambda x: x.values()
+
+json_loads = json.loads
isjython = sysplatform.startswith(b'java')
--- a/mercurial/rcutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/rcutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import os
--- a/mercurial/registrar.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/registrar.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from . import (
configitems,
@@ -22,7 +21,7 @@
configitem = configitems.getitemregister
-class _funcregistrarbase(object):
+class _funcregistrarbase:
"""Base of decorator to register a function for specific purpose
This decorator stores decorated functions into own dict 'table'.
--- a/mercurial/repair.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/repair.py Thu Jun 16 15:28:54 2022 +0200
@@ -6,7 +6,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import errno
@@ -25,7 +24,6 @@
obsutil,
pathutil,
phases,
- pycompat,
requirements,
scmutil,
util,
@@ -92,7 +90,7 @@
"""find out the filelogs affected by the strip"""
files = set()
- for x in pycompat.xrange(striprev, len(repo)):
+ for x in range(striprev, len(repo)):
files.update(repo[x].files())
return sorted(files)
@@ -380,7 +378,7 @@
return [c.node() for c in repo.set(b'roots(%ld)', tostrip)]
-class stripcallback(object):
+class stripcallback:
"""used as a transaction postclose callback"""
def __init__(self, ui, repo, backup, topic):
--- a/mercurial/repocache.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/repocache.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import collections
import gc
@@ -20,7 +19,7 @@
)
-class repoloader(object):
+class repoloader:
"""Load repositories in background thread
This is designed for a forking server. A cached repo cannot be obtained
--- a/mercurial/repoview.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/repoview.py Thu Jun 16 15:28:54 2022 +0200
@@ -6,7 +6,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import copy
import weakref
@@ -169,7 +168,7 @@
firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
# protect from nullrev root
firstmutable = max(0, firstmutable)
- return frozenset(pycompat.xrange(firstmutable, len(cl)))
+ return frozenset(range(firstmutable, len(cl)))
# function to compute filtered set
@@ -262,10 +261,10 @@
return cl
-class filteredchangelogmixin(object):
+class filteredchangelogmixin:
def tiprev(self):
"""filtered version of revlog.tiprev"""
- for i in pycompat.xrange(len(self) - 1, -2, -1):
+ for i in range(len(self) - 1, -2, -1):
if i not in self.filteredrevs:
return i
@@ -277,7 +276,7 @@
"""filtered version of revlog.__iter__"""
def filterediter():
- for i in pycompat.xrange(len(self)):
+ for i in range(len(self)):
if i not in self.filteredrevs:
yield i
@@ -362,7 +361,7 @@
return super(filteredchangelogmixin, self).flags(rev)
-class repoview(object):
+class repoview:
"""Provide a read/write view of a repo through a filtered changelog
This object is used to access a filtered version of a repository without
--- a/mercurial/requirements.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/requirements.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
# obsolete experimental requirements:
# - manifestv2: An experimental new manifest format that allowed
--- a/mercurial/revlog.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/revlog.py Thu Jun 16 15:28:54 2022 +0200
@@ -12,12 +12,10 @@
and O(changes) merge between branches.
"""
-from __future__ import absolute_import
import binascii
import collections
import contextlib
-import errno
import io
import os
import struct
@@ -172,7 +170,7 @@
@interfaceutil.implementer(repository.irevisiondelta)
@attr.s(slots=True)
-class revlogrevisiondelta(object):
+class revlogrevisiondelta:
node = attr.ib()
p1node = attr.ib()
p2node = attr.ib()
@@ -188,7 +186,7 @@
@interfaceutil.implementer(repository.iverifyproblem)
@attr.s(frozen=True)
-class revlogproblem(object):
+class revlogproblem:
warning = attr.ib(default=None)
error = attr.ib(default=None)
node = attr.ib(default=None)
@@ -238,7 +236,7 @@
)
-class revlog(object):
+class revlog:
"""
the underlying revision storage object
@@ -299,6 +297,7 @@
persistentnodemap=False,
concurrencychecker=None,
trypending=False,
+ canonical_parent_order=True,
):
"""
create a revlog object
@@ -346,6 +345,7 @@
self._chunkcachesize = 65536
self._maxchainlen = None
self._deltabothparents = True
+ self._debug_delta = False
self.index = None
self._docket = None
self._nodemap_docket = None
@@ -374,6 +374,13 @@
self._concurrencychecker = concurrencychecker
+ # parent order is supposed to be semantically irrelevant, so we
+ # normally resort parents to ensure that the first parent is non-null,
+ # if there is a non-null parent at all.
+ # filelog abuses the parent order as flag to mark some instances of
+ # meta-encoded files, so allow it to disable this behavior.
+ self.canonical_parent_order = canonical_parent_order
+
def _init_opts(self):
"""process options (from above/config) to setup associated default revlog mode
@@ -416,6 +423,8 @@
self._lazydeltabase = False
if self._lazydelta:
self._lazydeltabase = bool(opts.get(b'lazydeltabase', False))
+ if b'debug-delta' in opts:
+ self._debug_delta = opts[b'debug-delta']
if b'compengine' in opts:
self._compengine = opts[b'compengine']
if b'zlib.level' in opts:
@@ -438,9 +447,7 @@
self._flagprocessors[REVIDX_ELLIPSIS] = ellipsisprocessor
# revlog v0 doesn't have flag processors
- for flag, processor in pycompat.iteritems(
- opts.get(b'flagprocessors', {})
- ):
+ for flag, processor in opts.get(b'flagprocessors', {}).items():
flagutil.insertflagprocessor(flag, processor, self._flagprocessors)
if self._chunkcachesize <= 0:
@@ -478,9 +485,7 @@
return fp.read()
else:
return fp.read(size)
- except IOError as inst:
- if inst.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
return b''
def _loadindex(self, docket=None):
@@ -693,9 +698,7 @@
else:
f.seek(self._docket.index_end, os.SEEK_SET)
return f
- except IOError as inst:
- if inst.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
return self.opener(
self._indexfile, mode=b"w+", checkambig=self._checkambig
)
@@ -735,7 +738,7 @@
return len(self.index)
def __iter__(self):
- return iter(pycompat.xrange(len(self)))
+ return iter(range(len(self)))
def revs(self, start=0, stop=None):
"""iterate over all rev in this revlog (from start to stop)"""
@@ -869,8 +872,10 @@
the revlog which do not persist the rank.
"""
rank = self.index[rev][ENTRY_RANK]
- if rank == RANK_UNKNOWN:
+ if self._format_version != CHANGELOGV2 or rank == RANK_UNKNOWN:
return None
+ if rev == nullrev:
+ return 0 # convention
return rank
def chainbase(self, rev):
@@ -899,7 +904,10 @@
raise error.WdirUnsupported
raise
- return entry[5], entry[6]
+ if self.canonical_parent_order and entry[5] == nullrev:
+ return entry[6], entry[5]
+ else:
+ return entry[5], entry[6]
# fast parentrevs(rev) where rev isn't filtered
_uncheckedparentrevs = parentrevs
@@ -920,7 +928,11 @@
def parents(self, node):
i = self.index
d = i[self.rev(node)]
- return i[d[5]][7], i[d[6]][7] # map revisions to nodes inline
+ # inline node() to avoid function call overhead
+ if self.canonical_parent_order and d[5] == self.nullid:
+ return i[d[6]][7], i[d[5]][7]
+ else:
+ return i[d[5]][7], i[d[6]][7]
def chainlen(self, rev):
return self._chaininfo(rev)[0]
@@ -1043,7 +1055,7 @@
heads = [self.rev(n) for n in heads]
# we want the ancestors, but inclusive
- class lazyset(object):
+ class lazyset:
def __init__(self, lazyvalues):
self.addedvalues = set()
self.lazyvalues = lazyvalues
@@ -1304,7 +1316,7 @@
# But, obviously its parents aren't.
for p in self.parents(n):
heads.pop(p, None)
- heads = [head for head, flag in pycompat.iteritems(heads) if flag]
+ heads = [head for head, flag in heads.items() if flag]
roots = list(roots)
assert orderedout
assert roots
@@ -1470,7 +1482,7 @@
node = bin(id)
self.rev(node)
return node
- except (TypeError, error.LookupError):
+ except (binascii.Error, error.LookupError):
pass
def _partialmatch(self, id):
@@ -1508,10 +1520,13 @@
return self._pcache[id]
if len(id) <= 40:
+ # hex(node)[:...]
+ l = len(id) // 2 * 2 # grab an even number of digits
try:
- # hex(node)[:...]
- l = len(id) // 2 # grab an even number of digits
- prefix = bin(id[: l * 2])
+ prefix = bin(id[:l])
+ except binascii.Error:
+ pass
+ else:
nl = [e[7] for e in self.index if e[7].startswith(prefix)]
nl = [
n for n in nl if hex(n).startswith(id) and self.hasnode(n)
@@ -1528,8 +1543,6 @@
if maybewdir:
raise error.WdirUnsupported
return None
- except TypeError:
- pass
def lookup(self, id):
"""locate a node based on:
@@ -2098,9 +2111,7 @@
dfh.seek(0, os.SEEK_END)
else:
dfh.seek(self._docket.data_end, os.SEEK_SET)
- except IOError as inst:
- if inst.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
dfh = self._datafp(b"w+")
transaction.add(self._datafile, dsize)
if self._sidedatafile is not None:
@@ -2109,9 +2120,7 @@
try:
sdfh = self.opener(self._sidedatafile, mode=b"r+")
dfh.seek(self._docket.sidedata_end, os.SEEK_SET)
- except IOError as inst:
- if inst.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
sdfh = self.opener(self._sidedatafile, mode=b"w+")
transaction.add(
self._sidedatafile, self._docket.sidedata_end
@@ -2412,7 +2421,12 @@
textlen = len(rawtext)
if deltacomputer is None:
- deltacomputer = deltautil.deltacomputer(self)
+ write_debug = None
+ if self._debug_delta:
+ write_debug = transaction._report
+ deltacomputer = deltautil.deltacomputer(
+ self, write_debug=write_debug
+ )
revinfo = revlogutils.revisioninfo(
node,
@@ -2469,9 +2483,12 @@
elif p1r == nullrev and p2r != nullrev:
rank = 1 + self.fast_rank(p2r)
else: # merge node
- pmin, pmax = sorted((p1r, p2r))
- rank = 1 + self.fast_rank(pmax)
- rank += sum(1 for _ in self.findmissingrevs([pmax], [pmin]))
+ if rustdagop is not None and self.index.rust_ext_compat:
+ rank = rustdagop.rank(self.index, p1r, p2r)
+ else:
+ pmin, pmax = sorted((p1r, p2r))
+ rank = 1 + self.fast_rank(pmax)
+ rank += sum(1 for _ in self.findmissingrevs([pmax], [pmin]))
e = revlogutils.entry(
flags=flags,
@@ -2622,7 +2639,13 @@
empty = True
try:
with self._writing(transaction):
- deltacomputer = deltautil.deltacomputer(self)
+ write_debug = None
+ if self._debug_delta:
+ write_debug = transaction._report
+ deltacomputer = deltautil.deltacomputer(
+ self,
+ write_debug=write_debug,
+ )
# loop through our set of deltas
for data in deltas:
(
@@ -2800,9 +2823,7 @@
f.seek(0, io.SEEK_END)
actual = f.tell()
dd = actual - expected
- except IOError as inst:
- if inst.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
dd = 0
try:
@@ -2819,9 +2840,7 @@
databytes += max(0, self.length(r))
dd = 0
di = actual - len(self) * s - databytes
- except IOError as inst:
- if inst.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
di = 0
return (dd, di)
@@ -2998,7 +3017,13 @@
sidedata_helpers,
):
"""perform the core duty of `revlog.clone` after parameter processing"""
- deltacomputer = deltautil.deltacomputer(destrevlog)
+ write_debug = None
+ if self._debug_delta:
+ write_debug = tr._report
+ deltacomputer = deltautil.deltacomputer(
+ destrevlog,
+ write_debug=write_debug,
+ )
index = self.index
for rev in self:
entry = index[rev]
--- a/mercurial/revlogutils/__init__.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/revlogutils/__init__.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from ..thirdparty import attr
from ..interfaces import repository
@@ -63,7 +62,7 @@
@attr.s(slots=True, frozen=True)
-class revisioninfo(object):
+class revisioninfo:
"""Information about a revision that allows building its fulltext
node: expected hash of the revision
p1, p2: parent revs of the revision
--- a/mercurial/revlogutils/constants.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/revlogutils/constants.py Thu Jun 16 15:28:54 2022 +0200
@@ -7,7 +7,6 @@
# GNU General Public License version 2 or any later version.
"""Helper class to compute deltas stored inside revlogs"""
-from __future__ import absolute_import
import struct
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/revlogutils/debug.py Thu Jun 16 15:28:54 2022 +0200
@@ -0,0 +1,218 @@
+# revlogutils/debug.py - utility used for revlog debuging
+#
+# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
+# Copyright 2022 Octobus <contact@octobus.net>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from .. import (
+ node as nodemod,
+)
+
+from . import (
+ constants,
+)
+
+INDEX_ENTRY_DEBUG_COLUMN = []
+
+NODE_SIZE = object()
+
+
+class _column_base:
+ """constains the definition of a revlog column
+
+ name: the column header,
+ value_func: the function called to get a value,
+ size: the width of the column,
+ verbose_only: only include the column in verbose mode.
+ """
+
+ def __init__(self, name, value_func, size=None, verbose=False):
+ self.name = name
+ self.value_func = value_func
+ if size is not NODE_SIZE:
+ if size is None:
+ size = 8 # arbitrary default
+ size = max(len(name), size)
+ self._size = size
+ self.verbose_only = verbose
+
+ def get_size(self, node_size):
+ if self._size is NODE_SIZE:
+ return node_size
+ else:
+ return self._size
+
+
+def debug_column(name, size=None, verbose=False):
+ """decorated function is registered as a column
+
+ name: the name of the column,
+ size: the expected size of the column.
+ """
+
+ def register(func):
+ entry = _column_base(
+ name=name,
+ value_func=func,
+ size=size,
+ verbose=verbose,
+ )
+ INDEX_ENTRY_DEBUG_COLUMN.append(entry)
+ return entry
+
+ return register
+
+
+@debug_column(b"rev", size=6)
+def _rev(index, rev, entry, hexfn):
+ return b"%d" % rev
+
+
+@debug_column(b"rank", size=6, verbose=True)
+def rank(index, rev, entry, hexfn):
+ return b"%d" % entry[constants.ENTRY_RANK]
+
+
+@debug_column(b"linkrev", size=6)
+def _linkrev(index, rev, entry, hexfn):
+ return b"%d" % entry[constants.ENTRY_LINK_REV]
+
+
+@debug_column(b"nodeid", size=NODE_SIZE)
+def _nodeid(index, rev, entry, hexfn):
+ return hexfn(entry[constants.ENTRY_NODE_ID])
+
+
+@debug_column(b"p1-rev", size=6, verbose=True)
+def _p1_rev(index, rev, entry, hexfn):
+ return b"%d" % entry[constants.ENTRY_PARENT_1]
+
+
+@debug_column(b"p1-nodeid", size=NODE_SIZE)
+def _p1_node(index, rev, entry, hexfn):
+ parent = entry[constants.ENTRY_PARENT_1]
+ p_entry = index[parent]
+ return hexfn(p_entry[constants.ENTRY_NODE_ID])
+
+
+@debug_column(b"p2-rev", size=6, verbose=True)
+def _p2_rev(index, rev, entry, hexfn):
+ return b"%d" % entry[constants.ENTRY_PARENT_2]
+
+
+@debug_column(b"p2-nodeid", size=NODE_SIZE)
+def _p2_node(index, rev, entry, hexfn):
+ parent = entry[constants.ENTRY_PARENT_2]
+ p_entry = index[parent]
+ return hexfn(p_entry[constants.ENTRY_NODE_ID])
+
+
+@debug_column(b"full-size", size=20, verbose=True)
+def full_size(index, rev, entry, hexfn):
+ return b"%d" % entry[constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
+
+
+@debug_column(b"delta-base", size=6, verbose=True)
+def delta_base(index, rev, entry, hexfn):
+ return b"%d" % entry[constants.ENTRY_DELTA_BASE]
+
+
+@debug_column(b"flags", size=2, verbose=True)
+def flags(index, rev, entry, hexfn):
+ field = entry[constants.ENTRY_DATA_OFFSET]
+ field &= 0xFFFF
+ return b"%d" % field
+
+
+@debug_column(b"comp-mode", size=4, verbose=True)
+def compression_mode(index, rev, entry, hexfn):
+ return b"%d" % entry[constants.ENTRY_DATA_COMPRESSION_MODE]
+
+
+@debug_column(b"data-offset", size=20, verbose=True)
+def data_offset(index, rev, entry, hexfn):
+ field = entry[constants.ENTRY_DATA_OFFSET]
+ field >>= 16
+ return b"%d" % field
+
+
+@debug_column(b"chunk-size", size=10, verbose=True)
+def data_chunk_size(index, rev, entry, hexfn):
+ return b"%d" % entry[constants.ENTRY_DATA_COMPRESSED_LENGTH]
+
+
+@debug_column(b"sd-comp-mode", size=7, verbose=True)
+def sidedata_compression_mode(index, rev, entry, hexfn):
+ compression = entry[constants.ENTRY_SIDEDATA_COMPRESSION_MODE]
+ if compression == constants.COMP_MODE_PLAIN:
+ return b"plain"
+ elif compression == constants.COMP_MODE_DEFAULT:
+ return b"default"
+ elif compression == constants.COMP_MODE_INLINE:
+ return b"inline"
+ else:
+ return b"%d" % compression
+
+
+@debug_column(b"sidedata-offset", size=20, verbose=True)
+def sidedata_offset(index, rev, entry, hexfn):
+ return b"%d" % entry[constants.ENTRY_SIDEDATA_OFFSET]
+
+
+@debug_column(b"sd-chunk-size", size=10, verbose=True)
+def sidedata_chunk_size(index, rev, entry, hexfn):
+ return b"%d" % entry[constants.ENTRY_SIDEDATA_COMPRESSED_LENGTH]
+
+
+def debug_index(
+ ui,
+ repo,
+ formatter,
+ revlog,
+ full_node,
+):
+ """display index data for a revlog"""
+ if full_node:
+ hexfn = nodemod.hex
+ else:
+ hexfn = nodemod.short
+
+ idlen = 12
+ for i in revlog:
+ idlen = len(hexfn(revlog.node(i)))
+ break
+
+ fm = formatter
+
+ header_pieces = []
+ for column in INDEX_ENTRY_DEBUG_COLUMN:
+ if column.verbose_only and not ui.verbose:
+ continue
+ size = column.get_size(idlen)
+ name = column.name
+ header_pieces.append(name.rjust(size))
+
+ fm.plain(b' '.join(header_pieces) + b'\n')
+
+ index = revlog.index
+
+ for rev in revlog:
+ fm.startitem()
+ entry = index[rev]
+ first = True
+ for column in INDEX_ENTRY_DEBUG_COLUMN:
+ if column.verbose_only and not ui.verbose:
+ continue
+ if not first:
+ fm.plain(b' ')
+ first = False
+
+ size = column.get_size(idlen)
+ value = column.value_func(index, rev, entry, hexfn)
+ display = b"%%%ds" % size
+ fm.write(column.name, display, value)
+ fm.plain(b'\n')
+
+ fm.end()
--- a/mercurial/revlogutils/deltas.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/revlogutils/deltas.py Thu Jun 16 15:28:54 2022 +0200
@@ -7,7 +7,6 @@
# GNU General Public License version 2 or any later version.
"""Helper class to compute deltas stored inside revlogs"""
-from __future__ import absolute_import
import collections
import struct
@@ -21,6 +20,9 @@
COMP_MODE_DEFAULT,
COMP_MODE_INLINE,
COMP_MODE_PLAIN,
+ KIND_CHANGELOG,
+ KIND_FILELOG,
+ KIND_MANIFESTLOG,
REVIDX_ISCENSORED,
REVIDX_RAWTEXT_CHANGING_FLAGS,
)
@@ -39,7 +41,7 @@
LIMIT_DELTA2TEXT = 2
-class _testrevlog(object):
+class _testrevlog:
"""minimalist fake revlog to use in doctests"""
def __init__(self, data, density=0.5, mingap=0, snapshot=()):
@@ -545,7 +547,7 @@
@attr.s(slots=True, frozen=True)
-class _deltainfo(object):
+class _deltainfo:
distance = attr.ib()
deltalen = attr.ib()
data = attr.ib()
@@ -928,9 +930,11 @@
yield (prev,)
-class deltacomputer(object):
- def __init__(self, revlog):
+class deltacomputer:
+ def __init__(self, revlog, write_debug=None, debug_search=False):
self.revlog = revlog
+ self._write_debug = write_debug
+ self._debug_search = debug_search
def buildtext(self, revinfo, fh):
"""Builds a fulltext version of a revision
@@ -977,6 +981,7 @@
def _builddeltainfo(self, revinfo, base, fh):
# can we use the cached delta?
revlog = self.revlog
+ debug_search = self._write_debug is not None and self._debug_search
chainbase = revlog.chainbase(base)
if revlog._generaldelta:
deltabase = base
@@ -1006,13 +1011,27 @@
delta = revinfo.cachedelta[1]
if delta is None:
delta = self._builddeltadiff(base, revinfo, fh)
+ if debug_search:
+ msg = b"DBG-DELTAS-SEARCH: uncompressed-delta-size=%d\n"
+ msg %= len(delta)
+ self._write_debug(msg)
# snapshotdept need to be neither None nor 0 level snapshot
if revlog.upperboundcomp is not None and snapshotdepth:
lowestrealisticdeltalen = len(delta) // revlog.upperboundcomp
snapshotlimit = revinfo.textlen >> snapshotdepth
+ if debug_search:
+ msg = b"DBG-DELTAS-SEARCH: projected-lower-size=%d\n"
+ msg %= lowestrealisticdeltalen
+ self._write_debug(msg)
if snapshotlimit < lowestrealisticdeltalen:
+ if debug_search:
+ msg = b"DBG-DELTAS-SEARCH: DISCARDED (snapshot limit)\n"
+ self._write_debug(msg)
return None
if revlog.length(base) < lowestrealisticdeltalen:
+ if debug_search:
+ msg = b"DBG-DELTAS-SEARCH: DISCARDED (prev size)\n"
+ self._write_debug(msg)
return None
header, data = revlog.compress(delta)
deltalen = len(header) + len(data)
@@ -1084,6 +1103,17 @@
if revinfo.flags & REVIDX_RAWTEXT_CHANGING_FLAGS:
return self._fullsnapshotinfo(fh, revinfo, target_rev)
+ if self._write_debug is not None:
+ start = util.timer()
+
+ debug_search = self._write_debug is not None and self._debug_search
+
+ # count the number of different delta we tried (for debug purpose)
+ dbg_try_count = 0
+ # count the number of "search round" we did. (for debug purpose)
+ dbg_try_rounds = 0
+ dbg_type = b'unknown'
+
cachedelta = revinfo.cachedelta
p1 = revinfo.p1
p2 = revinfo.p2
@@ -1091,25 +1121,114 @@
deltainfo = None
p1r, p2r = revlog.rev(p1), revlog.rev(p2)
+
+ if self._write_debug is not None:
+ if p1r != nullrev:
+ p1_chain_len = revlog._chaininfo(p1r)[0]
+ else:
+ p1_chain_len = -1
+ if p2r != nullrev:
+ p2_chain_len = revlog._chaininfo(p2r)[0]
+ else:
+ p2_chain_len = -1
+ if debug_search:
+ msg = b"DBG-DELTAS-SEARCH: SEARCH rev=%d\n"
+ msg %= target_rev
+ self._write_debug(msg)
+
groups = _candidategroups(
self.revlog, revinfo.textlen, p1r, p2r, cachedelta
)
candidaterevs = next(groups)
while candidaterevs is not None:
+ dbg_try_rounds += 1
+ if debug_search:
+ prev = None
+ if deltainfo is not None:
+ prev = deltainfo.base
+
+ if p1 in candidaterevs or p2 in candidaterevs:
+ round_type = b"parents"
+ elif prev is not None and all(c < prev for c in candidaterevs):
+ round_type = b"refine-down"
+ elif prev is not None and all(c > prev for c in candidaterevs):
+ round_type = b"refine-up"
+ else:
+ round_type = b"search-down"
+ msg = b"DBG-DELTAS-SEARCH: ROUND #%d - %d candidates - %s\n"
+ msg %= (dbg_try_rounds, len(candidaterevs), round_type)
+ self._write_debug(msg)
nominateddeltas = []
if deltainfo is not None:
+ if debug_search:
+ msg = (
+ b"DBG-DELTAS-SEARCH: CONTENDER: rev=%d - length=%d\n"
+ )
+ msg %= (deltainfo.base, deltainfo.deltalen)
+ self._write_debug(msg)
# if we already found a good delta,
# challenge it against refined candidates
nominateddeltas.append(deltainfo)
for candidaterev in candidaterevs:
+ if debug_search:
+ msg = b"DBG-DELTAS-SEARCH: CANDIDATE: rev=%d\n"
+ msg %= candidaterev
+ self._write_debug(msg)
+ candidate_type = None
+ if candidaterev == p1:
+ candidate_type = b"p1"
+ elif candidaterev == p2:
+ candidate_type = b"p2"
+ elif self.revlog.issnapshot(candidaterev):
+ candidate_type = b"snapshot-%d"
+ candidate_type %= self.revlog.snapshotdepth(
+ candidaterev
+ )
+
+ if candidate_type is not None:
+ msg = b"DBG-DELTAS-SEARCH: type=%s\n"
+ msg %= candidate_type
+ self._write_debug(msg)
+ msg = b"DBG-DELTAS-SEARCH: size=%d\n"
+ msg %= self.revlog.length(candidaterev)
+ self._write_debug(msg)
+ msg = b"DBG-DELTAS-SEARCH: base=%d\n"
+ msg %= self.revlog.deltaparent(candidaterev)
+ self._write_debug(msg)
if candidaterev in excluded_bases:
+ if debug_search:
+ msg = b"DBG-DELTAS-SEARCH: EXCLUDED\n"
+ self._write_debug(msg)
continue
if candidaterev >= target_rev:
+ if debug_search:
+ msg = b"DBG-DELTAS-SEARCH: TOO-HIGH\n"
+ self._write_debug(msg)
continue
+ dbg_try_count += 1
+
+ if debug_search:
+ delta_start = util.timer()
candidatedelta = self._builddeltainfo(revinfo, candidaterev, fh)
+ if debug_search:
+ delta_end = util.timer()
+ msg = b"DBG-DELTAS-SEARCH: delta-search-time=%f\n"
+ msg %= delta_end - delta_start
+ self._write_debug(msg)
if candidatedelta is not None:
if isgooddeltainfo(self.revlog, candidatedelta, revinfo):
+ if debug_search:
+ msg = b"DBG-DELTAS-SEARCH: DELTA: length=%d (GOOD)\n"
+ msg %= candidatedelta.deltalen
+ self._write_debug(msg)
nominateddeltas.append(candidatedelta)
+ elif debug_search:
+ msg = b"DBG-DELTAS-SEARCH: DELTA: length=%d (BAD)\n"
+ msg %= candidatedelta.deltalen
+ self._write_debug(msg)
+ elif debug_search:
+ msg = b"DBG-DELTAS-SEARCH: NO-DELTA\n"
+ self._write_debug(msg)
if nominateddeltas:
deltainfo = min(nominateddeltas, key=lambda x: x.deltalen)
if deltainfo is not None:
@@ -1118,7 +1237,73 @@
candidaterevs = next(groups)
if deltainfo is None:
+ dbg_type = b"full"
deltainfo = self._fullsnapshotinfo(fh, revinfo, target_rev)
+ elif deltainfo.snapshotdepth: # pytype: disable=attribute-error
+ dbg_type = b"snapshot"
+ else:
+ dbg_type = b"delta"
+
+ if self._write_debug is not None:
+ end = util.timer()
+ dbg = {
+ 'duration': end - start,
+ 'revision': target_rev,
+ 'search_round_count': dbg_try_rounds,
+ 'delta_try_count': dbg_try_count,
+ 'type': dbg_type,
+ 'p1-chain-len': p1_chain_len,
+ 'p2-chain-len': p2_chain_len,
+ }
+ if (
+ deltainfo.snapshotdepth # pytype: disable=attribute-error
+ is not None
+ ):
+ dbg[
+ 'snapshot-depth'
+ ] = deltainfo.snapshotdepth # pytype: disable=attribute-error
+ else:
+ dbg['snapshot-depth'] = 0
+ target_revlog = b"UNKNOWN"
+ target_type = self.revlog.target[0]
+ target_key = self.revlog.target[1]
+ if target_type == KIND_CHANGELOG:
+ target_revlog = b'CHANGELOG:'
+ elif target_type == KIND_MANIFESTLOG:
+ target_revlog = b'MANIFESTLOG:'
+ if target_key:
+ target_revlog += b'%s:' % target_key
+ elif target_type == KIND_FILELOG:
+ target_revlog = b'FILELOG:'
+ if target_key:
+ target_revlog += b'%s:' % target_key
+ dbg['target-revlog'] = target_revlog
+
+ msg = (
+ b"DBG-DELTAS:"
+ b" %-12s"
+ b" rev=%d:"
+ b" search-rounds=%d"
+ b" try-count=%d"
+ b" - delta-type=%-6s"
+ b" snap-depth=%d"
+ b" - p1-chain-length=%d"
+ b" p2-chain-length=%d"
+ b" - duration=%f"
+ b"\n"
+ )
+ msg %= (
+ dbg["target-revlog"],
+ dbg["revision"],
+ dbg["search_round_count"],
+ dbg["delta_try_count"],
+ dbg["type"],
+ dbg["snapshot-depth"],
+ dbg["p1-chain-len"],
+ dbg["p2-chain-len"],
+ dbg["duration"],
+ )
+ self._write_debug(msg)
return deltainfo
--- a/mercurial/revlogutils/docket.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/revlogutils/docket.py Thu Jun 16 15:28:54 2022 +0200
@@ -15,9 +15,7 @@
#
# * a data file, containing variable width data for these revisions,
-from __future__ import absolute_import
-import errno
import os
import random
import struct
@@ -26,7 +24,6 @@
encoding,
error,
node,
- pycompat,
util,
)
@@ -53,16 +50,9 @@
try:
with open(stable_docket_file, mode='rb') as f:
seed = f.read().strip()
- except IOError as inst:
- if inst.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
seed = b'04' # chosen by a fair dice roll. garanteed to be random
- if pycompat.ispy3:
- iter_seed = iter(seed)
- else:
- # pytype: disable=wrong-arg-types
- iter_seed = (ord(c) for c in seed)
- # pytype: enable=wrong-arg-types
+ iter_seed = iter(seed)
# some basic circular sum hashing on 64 bits
int_seed = 0
low_mask = int('1' * 35, 2)
@@ -71,10 +61,7 @@
low_part = (int_seed & low_mask) << 28
int_seed = high_part + low_part + i
r = random.Random()
- if pycompat.ispy3:
- r.seed(int_seed, version=1)
- else:
- r.seed(int_seed)
+ r.seed(int_seed, version=1)
# once we drop python 3.8 support we can simply use r.randbytes
raw = r.getrandbits(id_size * 4)
assert id_size == 8
@@ -109,7 +96,7 @@
S_OLD_UID = struct.Struct('>BL')
-class RevlogDocket(object):
+class RevlogDocket:
"""metadata associated with revlog"""
def __init__(
--- a/mercurial/revlogutils/flagutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/revlogutils/flagutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -6,7 +6,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from ..i18n import _
--- a/mercurial/revlogutils/nodemap.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/revlogutils/nodemap.py Thu Jun 16 15:28:54 2022 +0200
@@ -6,9 +6,7 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
-import errno
import re
import struct
@@ -84,11 +82,8 @@
data = b''
else:
data = fd.read(data_length)
- except (IOError, OSError) as e:
- if e.errno == errno.ENOENT:
- return None
- else:
- raise
+ except FileNotFoundError:
+ return None
if len(data) < data_length:
return None
return docket, data
@@ -114,7 +109,7 @@
tr.addfinalize(callback_id, lambda tr: persist_nodemap(tr, revlog))
-class _NoTransaction(object):
+class _NoTransaction:
"""transaction like object to update the nodemap outside a transaction"""
def __init__(self):
@@ -305,7 +300,7 @@
S_HEADER = struct.Struct(">BQQQQ")
-class NodeMapDocket(object):
+class NodeMapDocket:
"""metadata associated with persistent nodemap data
The persistent data may come from disk or be on their way to disk.
--- a/mercurial/revlogutils/randomaccessfile.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/revlogutils/randomaccessfile.py Thu Jun 16 15:28:54 2022 +0200
@@ -23,7 +23,7 @@
return (n & (n - 1) == 0) and n != 0
-class randomaccessfile(object):
+class randomaccessfile:
"""Accessing arbitrary chuncks of data within a file, with some caching"""
def __init__(
--- a/mercurial/revlogutils/revlogv0.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/revlogutils/revlogv0.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,7 +4,6 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from ..node import sha1nodeconstants
@@ -16,7 +15,6 @@
from .. import (
error,
node,
- pycompat,
revlogutils,
util,
)
@@ -78,7 +76,7 @@
def __delitem__(self, i):
if not isinstance(i, slice) or not i.stop == -1 or i.step is not None:
raise ValueError(b"deleting slices only supports a:-1 with step 1")
- for r in pycompat.xrange(i.start, len(self)):
+ for r in range(i.start, len(self)):
del self._nodemap[self[r][7]]
super(revlogoldindex, self).__delitem__(i)
--- a/mercurial/revlogutils/sidedata.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/revlogutils/sidedata.py Thu Jun 16 15:28:54 2022 +0200
@@ -30,7 +30,6 @@
the concept.
"""
-from __future__ import absolute_import
import collections
import struct
--- a/mercurial/revset.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/revset.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,8 +5,8 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
-
+
+import binascii
import re
from .i18n import _
@@ -595,7 +595,7 @@
bms.add(repo[bmrev].rev())
else:
matchrevs = set()
- for name, bmrev in pycompat.iteritems(repo._bookmarks):
+ for name, bmrev in repo._bookmarks.items():
if matcher(name):
matchrevs.add(bmrev)
for bmrev in matchrevs:
@@ -1709,7 +1709,7 @@
)
namespaces.add(repo.names[pattern])
else:
- for name, ns in pycompat.iteritems(repo.names):
+ for name, ns in repo.names.items():
if matcher(name):
namespaces.add(ns)
@@ -1731,7 +1731,7 @@
rn = repo.changelog.rev(bin(n))
except error.WdirUnsupported:
rn = wdirrev
- except (LookupError, TypeError):
+ except (binascii.Error, LookupError):
rn = None
else:
try:
@@ -2806,7 +2806,7 @@
def loadpredicate(ui, extname, registrarobj):
"""Load revset predicates from specified registrarobj"""
- for name, func in pycompat.iteritems(registrarobj._table):
+ for name, func in registrarobj._table.items():
symbols[name] = func
if func._safe:
safesymbols.add(name)
--- a/mercurial/revsetlang.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/revsetlang.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import string
@@ -76,7 +75,7 @@
+ pycompat.sysbytes(string.digits)
+ b'._@'
)
-) | set(map(pycompat.bytechr, pycompat.xrange(128, 256)))
+) | set(map(pycompat.bytechr, range(128, 256)))
# default set of valid characters for non-initial letters of symbols
_symletters = _syminitletters | set(pycompat.iterbytestr(b'-/'))
@@ -613,7 +612,7 @@
tree = _aliasrules.expand(aliases, tree)
# warn about problematic (but not referred) aliases
if warn is not None:
- for name, alias in sorted(pycompat.iteritems(aliases)):
+ for name, alias in sorted(aliases.items()):
if alias.error and not alias.warned:
warn(_(b'warning: %s\n') % (alias.error))
alias.warned = True
--- a/mercurial/rewriteutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/rewriteutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import re
--- a/mercurial/scmposix.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/scmposix.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import array
import errno
import fcntl
--- a/mercurial/scmutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/scmutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,8 +5,8 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
+import binascii
import errno
import glob
import os
@@ -63,7 +63,7 @@
@attr.s(slots=True, repr=False)
-class status(object):
+class status:
"""Struct with a list of files per status.
The 'deleted', 'unknown' and 'ignored' properties are only
@@ -109,7 +109,7 @@
del subpaths[subpath]
missing.add(subpath)
- for subpath, ctx in sorted(pycompat.iteritems(subpaths)):
+ for subpath, ctx in sorted(subpaths.items()):
yield subpath, ctx.sub(subpath)
# Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
@@ -228,7 +228,7 @@
except (AttributeError, IndexError):
# it might be anything, for example a string
reason = inst.reason
- if isinstance(reason, pycompat.unicode):
+ if isinstance(reason, str):
# SSLError of Python 2.7.9 contains a unicode
reason = encoding.unitolocal(reason)
ui.error(_(b"abort: error: %s\n") % stringutil.forcebytestr(reason))
@@ -324,7 +324,7 @@
return abort, warn
-class casecollisionauditor(object):
+class casecollisionauditor:
def __init__(self, ui, abort, dirstate):
self._ui = ui
self._abort = abort
@@ -640,7 +640,7 @@
return repo[rev]
except error.FilteredLookupError:
raise
- except (TypeError, LookupError):
+ except (binascii.Error, LookupError):
pass
# look up bookmarks through the name interface
@@ -800,7 +800,7 @@
stopiteration = False
for windowsize in increasingwindows():
nrevs = []
- for i in pycompat.xrange(windowsize):
+ for i in range(windowsize):
rev = next(it, None)
if rev is None:
stopiteration = True
@@ -1020,7 +1020,7 @@
return origvfs.join(filepath)
-class _containsnode(object):
+class _containsnode:
"""proxy __contains__(node) to container.__contains__ which accepts revs"""
def __init__(self, repo, revcontainer):
@@ -1337,7 +1337,7 @@
ignored=False,
full=False,
)
- for abs, st in pycompat.iteritems(walkresults):
+ for abs, st in walkresults.items():
entry = dirstate.get_entry(abs)
if (not entry.any_tracked) and audit_path.check(abs):
unknown.append(abs)
@@ -1384,7 +1384,7 @@
with repo.wlock():
wctx.forget(deleted)
wctx.add(unknown)
- for new, old in pycompat.iteritems(renames):
+ for new, old in renames.items():
wctx.copy(old, new)
@@ -1510,12 +1510,9 @@
# Merge old parent and old working dir copies
oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
oldcopies.update(copies)
- copies = {
- dst: oldcopies.get(src, src)
- for dst, src in pycompat.iteritems(oldcopies)
- }
+ copies = {dst: oldcopies.get(src, src) for dst, src in oldcopies.items()}
# Adjust the dirstate copies
- for dst, src in pycompat.iteritems(copies):
+ for dst, src in copies.items():
if src not in newctx or dst in newctx or not ds.get_entry(dst).added:
src = None
ds.copy(src, dst)
@@ -1571,7 +1568,7 @@
fp.write(b"%s\n" % r)
-class filecachesubentry(object):
+class filecachesubentry:
def __init__(self, path, stat):
self.path = path
self.cachestat = None
@@ -1622,12 +1619,11 @@
def stat(path):
try:
return util.cachestat(path)
- except OSError as e:
- if e.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
+ pass
-class filecacheentry(object):
+class filecacheentry:
def __init__(self, paths, stat=True):
self._entries = []
for path in paths:
@@ -1645,7 +1641,7 @@
entry.refresh()
-class filecache(object):
+class filecache:
"""A property like decorator that tracks files under .hg/ for updates.
On first access, the files defined as arguments are stat()ed and the
@@ -1802,7 +1798,7 @@
return data
-class progress(object):
+class progress:
def __init__(self, ui, updatebar, topic, unit=b"", total=None):
self.ui = ui
self.pos = 0
@@ -1867,7 +1863,7 @@
return ui.configbool(b'format', b'generaldelta')
-class simplekeyvaluefile(object):
+class simplekeyvaluefile:
"""A simple file with key=value lines
Keys must be alphanumerics and start with a letter, values must not
--- a/mercurial/scmwindows.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/scmwindows.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import os
from . import (
@@ -54,7 +52,11 @@
# next look for a system rcpath in the registry
value = util.lookupreg(
- b'SOFTWARE\\Mercurial', None, winreg.HKEY_LOCAL_MACHINE
+ # pytype: disable=module-attr
+ b'SOFTWARE\\Mercurial',
+ None,
+ winreg.HKEY_LOCAL_MACHINE
+ # pytype: enable=module-attr
)
if value and isinstance(value, bytes):
value = util.localpath(value)
--- a/mercurial/server.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/server.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import os
@@ -93,7 +92,7 @@
runargs.append(b'--daemon-postexec=unlink:%s' % lockpath)
# Don't pass --cwd to the child process, because we've already
# changed directory.
- for i in pycompat.xrange(1, len(runargs)):
+ for i in range(1, len(runargs)):
if runargs[i].startswith(b'--cwd='):
del runargs[i]
break
--- a/mercurial/setdiscovery.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/setdiscovery.py Thu Jun 16 15:28:54 2022 +0200
@@ -40,7 +40,6 @@
classified with it (since all ancestors or descendants will be marked as well).
"""
-from __future__ import absolute_import
import collections
import random
@@ -107,7 +106,7 @@
return set(sample[:desiredlen])
-class partialdiscovery(object):
+class partialdiscovery:
"""an object representing ongoing discovery
Feed with data from the remote repository, this object keep track of the
@@ -299,6 +298,9 @@
samplegrowth = float(ui.config(b'devel', b'discovery.grow-sample.rate'))
+ if audit is not None:
+ audit[b'total-queries'] = 0
+
start = util.timer()
roundtrips = 0
@@ -377,6 +379,8 @@
roundtrips += 1
with remote.commandexecutor() as e:
fheads = e.callcommand(b'heads', {})
+ if audit is not None:
+ audit[b'total-queries'] += len(sample)
fknown = e.callcommand(
b'known',
{
@@ -479,6 +483,8 @@
sample = list(sample)
with remote.commandexecutor() as e:
+ if audit is not None:
+ audit[b'total-queries'] += len(sample)
yesno = e.callcommand(
b'known',
{
--- a/mercurial/shelve.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/shelve.py Thu Jun 16 15:28:54 2022 +0200
@@ -20,10 +20,8 @@
shelved change has a distinct name. For details, see the help for "hg
shelve".
"""
-from __future__ import absolute_import
import collections
-import errno
import itertools
import stat
@@ -69,7 +67,7 @@
shelveuser = b'shelve@localhost'
-class ShelfDir(object):
+class ShelfDir:
def __init__(self, repo, for_backups=False):
if for_backups:
self.vfs = vfsmod.vfs(repo.vfs.join(backupdir))
@@ -83,9 +81,7 @@
"""return all shelves in repo as list of (time, name)"""
try:
names = self.vfs.listdir()
- except OSError as err:
- if err.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
return []
info = []
seen = set()
@@ -102,7 +98,7 @@
return sorted(info, reverse=True)
-class Shelf(object):
+class Shelf:
"""Represents a shelf, including possibly multiple files storing it.
Old shelves will have a .patch and a .hg file. Newer shelves will
@@ -214,7 +210,7 @@
self.vfs.tryunlink(self.name + b'.' + ext)
-class shelvedstate(object):
+class shelvedstate:
"""Handle persistence during unshelving operations.
Handles saving and restoring a shelved state. Ensures that different
@@ -239,7 +235,7 @@
d[b'nodestoremove'] = [
bin(h) for h in d[b'nodestoremove'].split(b' ')
]
- except (ValueError, TypeError, KeyError) as err:
+ except (ValueError, KeyError) as err:
raise error.CorruptedState(stringutil.forcebytestr(err))
@classmethod
@@ -725,9 +721,7 @@
state = shelvedstate.load(repo)
if opts.get(b'keep') is None:
opts[b'keep'] = state.keep
- except IOError as err:
- if err.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
cmdutil.wrongtooltocontinue(repo, _(b'unshelve'))
except error.CorruptedState as err:
ui.debug(pycompat.bytestr(err) + b'\n')
@@ -1011,8 +1005,7 @@
tr.close()
nodestoremove = [
- repo.changelog.node(rev)
- for rev in pycompat.xrange(oldtiprev, len(repo))
+ repo.changelog.node(rev) for rev in range(oldtiprev, len(repo))
]
shelvedstate.save(
repo,
--- a/mercurial/similar.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/similar.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,12 +5,10 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from .i18n import _
from . import (
mdiff,
- pycompat,
)
@@ -98,7 +96,7 @@
copies[a] = (r, myscore)
progress.complete()
- for dest, v in pycompat.iteritems(copies):
+ for dest, v in copies.items():
source, bscore = v
yield source, dest, bscore
--- a/mercurial/simplemerge.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/simplemerge.py Thu Jun 16 15:28:54 2022 +0200
@@ -16,13 +16,11 @@
# mbp: "you know that thing where cvs gives you conflict markers?"
# s: "i hate that."
-from __future__ import absolute_import
from .i18n import _
from . import (
error,
mdiff,
- pycompat,
)
from .utils import stringutil
@@ -54,16 +52,14 @@
"""Compare a[astart:aend] == b[bstart:bend], without slicing."""
if (aend - astart) != (bend - bstart):
return False
- for ia, ib in zip(
- pycompat.xrange(astart, aend), pycompat.xrange(bstart, bend)
- ):
+ for ia, ib in zip(range(astart, aend), range(bstart, bend)):
if a[ia] != b[ib]:
return False
else:
return True
-class Merge3Text(object):
+class Merge3Text:
"""3-way merge of texts.
Given strings BASE, OTHER, THIS, tries to produce a combined text
@@ -469,7 +465,7 @@
return lines
-class MergeInput(object):
+class MergeInput:
def __init__(self, fctx, label=None, label_detail=None):
self.fctx = fctx
self.label = label
@@ -491,6 +487,9 @@
self._text = self.fctx.decodeddata()
return self._text
+ def set_text(self, text):
+ self._text = text
+
def simplemerge(
local,
--- a/mercurial/smartset.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/smartset.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from .pycompat import getattr
from . import (
@@ -21,7 +20,7 @@
return pycompat.sysbytes(type(o).__name__).lstrip(b'_')
-class abstractsmartset(object):
+class abstractsmartset:
def __nonzero__(self):
"""True if the smartset is not empty"""
raise NotImplementedError()
@@ -153,11 +152,11 @@
# but start > stop is allowed, which should be an empty set.
ys = []
it = iter(self)
- for x in pycompat.xrange(start):
+ for x in range(start):
y = next(it, None)
if y is None:
break
- for x in pycompat.xrange(stop - start):
+ for x in range(stop - start):
y = next(it, None)
if y is None:
break
@@ -993,7 +992,7 @@
"""Duck type for baseset class which represents a range of revisions and
can work lazily and without having all the range in memory
- Note that spanset(x, y) behave almost like xrange(x, y) except for two
+ Note that spanset(x, y) behave almost like range(x, y) except for two
notable points:
- when x < y it will be automatically descending,
- revision filtered with this repoview will be skipped.
@@ -1031,13 +1030,13 @@
return self.fastdesc()
def fastasc(self):
- iterrange = pycompat.xrange(self._start, self._end)
+ iterrange = range(self._start, self._end)
if self._hiddenrevs:
return self._iterfilter(iterrange)
return iter(iterrange)
def fastdesc(self):
- iterrange = pycompat.xrange(self._end - 1, self._start - 1, -1)
+ iterrange = range(self._end - 1, self._start - 1, -1)
if self._hiddenrevs:
return self._iterfilter(iterrange)
return iter(iterrange)
--- a/mercurial/sparse.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/sparse.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import os
@@ -31,6 +30,16 @@
enabled = False
+def use_sparse(repo):
+ if getattr(repo, "_has_sparse", False):
+ # When enabling sparse the first time we need it to be enabled before
+ # actually enabling it. This hack could be avoided if the code was
+ # improved further, however this is an improvement over the previously
+ # existing global variable.
+ return True
+ return requirements.SPARSE_REQUIREMENT in repo.requirements
+
+
def parseconfig(ui, raw, action):
"""Parse sparse config file content.
@@ -115,7 +124,7 @@
patterns.
"""
# Feature isn't enabled. No-op.
- if not enabled:
+ if not use_sparse(repo):
return set(), set(), set()
raw = repo.vfs.tryread(b'sparse')
@@ -261,7 +270,7 @@
def prunetemporaryincludes(repo):
- if not enabled or not repo.vfs.exists(b'tempsparse'):
+ if not use_sparse(repo) or not repo.vfs.exists(b'tempsparse'):
return
s = repo.status()
@@ -314,7 +323,7 @@
``includetemp`` indicates whether to use the temporary sparse profile.
"""
# If sparse isn't enabled, sparse matcher matches everything.
- if not enabled:
+ if not use_sparse(repo):
return matchmod.always()
if not revs or revs == [None]:
@@ -368,7 +377,7 @@
def filterupdatesactions(repo, wctx, mctx, branchmerge, mresult):
"""Filter updates to only lay out files that match the sparse rules."""
- if not enabled:
+ if not use_sparse(repo):
return
oldrevs = [pctx.rev() for pctx in wctx.parents()]
@@ -555,7 +564,7 @@
)
# Check for files that were only in the dirstate.
- for file, state in pycompat.iteritems(dirstate):
+ for file, state in dirstate.items():
if not file in files:
old = origsparsematch(file)
new = sparsematch(file)
--- a/mercurial/sshpeer.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/sshpeer.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import re
import uuid
@@ -48,7 +47,7 @@
display(_(b"remote: "), l, b'\n')
-class doublepipe(object):
+class doublepipe:
"""Operate a side-channel pipe in addition of a main one
The side-channel pipe contains server output to be forwarded to the user
@@ -473,10 +472,10 @@
else:
wireargs[k] = args[k]
del args[k]
- for k, v in sorted(pycompat.iteritems(wireargs)):
+ for k, v in sorted(wireargs.items()):
self._pipeo.write(b"%s %d\n" % (k, len(v)))
if isinstance(v, dict):
- for dk, dv in pycompat.iteritems(v):
+ for dk, dv in v.items():
self._pipeo.write(b"%s %d\n" % (dk, len(dv)))
self._pipeo.write(dv)
else:
--- a/mercurial/sslutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/sslutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -7,12 +7,12 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import hashlib
import os
import re
import ssl
+import warnings
from .i18n import _
from .pycompat import getattr
@@ -113,16 +113,18 @@
minimumprotocol = ui.config(b'hostsecurity', key, minimumprotocol)
validateprotocol(minimumprotocol, key)
+ ciphers = ui.config(b'hostsecurity', b'ciphers')
+ ciphers = ui.config(b'hostsecurity', b'%s:ciphers' % bhostname, ciphers)
+
# If --insecure is used, we allow the use of TLS 1.0 despite config options.
# We always print a "connection security to %s is disabled..." message when
# --insecure is used. So no need to print anything more here.
if ui.insecureconnections:
minimumprotocol = b'tls1.0'
+ if not ciphers:
+ ciphers = b'DEFAULT'
s[b'minimumprotocol'] = minimumprotocol
-
- ciphers = ui.config(b'hostsecurity', b'ciphers')
- ciphers = ui.config(b'hostsecurity', b'%s:ciphers' % bhostname, ciphers)
s[b'ciphers'] = ciphers
# Look for fingerprints in [hostsecurity] section. Value is a list
@@ -309,12 +311,43 @@
# bundle with a specific CA cert removed. If the system/default CA bundle
# is loaded and contains that removed CA, you've just undone the user's
# choice.
- #
- # Despite its name, PROTOCOL_SSLv23 selects the highest protocol that both
- # ends support, including TLS protocols. commonssloptions() restricts the
- # set of allowed protocols.
- sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
- sslcontext.options |= commonssloptions(settings[b'minimumprotocol'])
+
+ if util.safehasattr(ssl, 'PROTOCOL_TLS_CLIENT'):
+ # python 3.7+
+ sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
+ minimumprotocol = settings[b'minimumprotocol']
+ if minimumprotocol == b'tls1.0':
+ with warnings.catch_warnings():
+ warnings.filterwarnings(
+ 'ignore',
+ 'ssl.TLSVersion.TLSv1 is deprecated',
+ DeprecationWarning,
+ )
+ sslcontext.minimum_version = ssl.TLSVersion.TLSv1
+ elif minimumprotocol == b'tls1.1':
+ with warnings.catch_warnings():
+ warnings.filterwarnings(
+ 'ignore',
+ 'ssl.TLSVersion.TLSv1_1 is deprecated',
+ DeprecationWarning,
+ )
+ sslcontext.minimum_version = ssl.TLSVersion.TLSv1_1
+ elif minimumprotocol == b'tls1.2':
+ sslcontext.minimum_version = ssl.TLSVersion.TLSv1_2
+ else:
+ raise error.Abort(_(b'this should not happen'))
+ # Prevent CRIME.
+ # There is no guarantee this attribute is defined on the module.
+ sslcontext.options |= getattr(ssl, 'OP_NO_COMPRESSION', 0)
+ else:
+ # Despite its name, PROTOCOL_SSLv23 selects the highest protocol that both
+ # ends support, including TLS protocols. commonssloptions() restricts the
+ # set of allowed protocols.
+ sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
+ sslcontext.options |= commonssloptions(settings[b'minimumprotocol'])
+
+ # We check the hostname ourselves in _verifycert
+ sslcontext.check_hostname = False
sslcontext.verify_mode = settings[b'verifymode']
if settings[b'ciphers']:
@@ -392,7 +425,10 @@
# outright. Hopefully the reason for this error is that we require
# TLS 1.1+ and the server only supports TLS 1.0. Whatever the
# reason, try to emit an actionable warning.
- if e.reason == 'UNSUPPORTED_PROTOCOL':
+ if e.reason in (
+ 'UNSUPPORTED_PROTOCOL',
+ 'TLSV1_ALERT_PROTOCOL_VERSION',
+ ):
# We attempted TLS 1.0+.
if settings[b'minimumprotocol'] == b'tls1.0':
# We support more than just TLS 1.0+. If this happens,
@@ -510,44 +546,87 @@
_(b'referenced certificate file (%s) does not exist') % f
)
- # Despite its name, PROTOCOL_SSLv23 selects the highest protocol that both
- # ends support, including TLS protocols. commonssloptions() restricts the
- # set of allowed protocols.
- protocol = ssl.PROTOCOL_SSLv23
- options = commonssloptions(b'tls1.0')
+ if util.safehasattr(ssl, 'PROTOCOL_TLS_SERVER'):
+ # python 3.7+
+ sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
+ sslcontext.options |= getattr(ssl, 'OP_NO_COMPRESSION', 0)
- # This config option is intended for use in tests only. It is a giant
- # footgun to kill security. Don't define it.
- exactprotocol = ui.config(b'devel', b'serverexactprotocol')
- if exactprotocol == b'tls1.0':
- if b'tls1.0' not in supportedprotocols:
- raise error.Abort(_(b'TLS 1.0 not supported by this Python'))
- protocol = ssl.PROTOCOL_TLSv1
- elif exactprotocol == b'tls1.1':
- if b'tls1.1' not in supportedprotocols:
- raise error.Abort(_(b'TLS 1.1 not supported by this Python'))
- protocol = ssl.PROTOCOL_TLSv1_1
- elif exactprotocol == b'tls1.2':
- if b'tls1.2' not in supportedprotocols:
- raise error.Abort(_(b'TLS 1.2 not supported by this Python'))
- protocol = ssl.PROTOCOL_TLSv1_2
- elif exactprotocol:
- raise error.Abort(
- _(b'invalid value for serverexactprotocol: %s') % exactprotocol
- )
+ # This config option is intended for use in tests only. It is a giant
+ # footgun to kill security. Don't define it.
+ exactprotocol = ui.config(b'devel', b'serverexactprotocol')
+ if exactprotocol == b'tls1.0':
+ if b'tls1.0' not in supportedprotocols:
+ raise error.Abort(_(b'TLS 1.0 not supported by this Python'))
+ with warnings.catch_warnings():
+ warnings.filterwarnings(
+ 'ignore',
+ 'ssl.TLSVersion.TLSv1 is deprecated',
+ DeprecationWarning,
+ )
+ sslcontext.minimum_version = ssl.TLSVersion.TLSv1
+ sslcontext.maximum_version = ssl.TLSVersion.TLSv1
+ elif exactprotocol == b'tls1.1':
+ if b'tls1.1' not in supportedprotocols:
+ raise error.Abort(_(b'TLS 1.1 not supported by this Python'))
+ with warnings.catch_warnings():
+ warnings.filterwarnings(
+ 'ignore',
+ 'ssl.TLSVersion.TLSv1_1 is deprecated',
+ DeprecationWarning,
+ )
+ sslcontext.minimum_version = ssl.TLSVersion.TLSv1_1
+ sslcontext.maximum_version = ssl.TLSVersion.TLSv1_1
+ elif exactprotocol == b'tls1.2':
+ if b'tls1.2' not in supportedprotocols:
+ raise error.Abort(_(b'TLS 1.2 not supported by this Python'))
+ sslcontext.minimum_version = ssl.TLSVersion.TLSv1_2
+ sslcontext.maximum_version = ssl.TLSVersion.TLSv1_2
+ elif exactprotocol:
+ raise error.Abort(
+ _(b'invalid value for serverexactprotocol: %s') % exactprotocol
+ )
+ else:
+ # Despite its name, PROTOCOL_SSLv23 selects the highest protocol that both
+ # ends support, including TLS protocols. commonssloptions() restricts the
+ # set of allowed protocols.
+ protocol = ssl.PROTOCOL_SSLv23
+ options = commonssloptions(b'tls1.0')
- # We /could/ use create_default_context() here since it doesn't load
- # CAs when configured for client auth. However, it is hard-coded to
- # use ssl.PROTOCOL_SSLv23 which may not be appropriate here.
- sslcontext = ssl.SSLContext(protocol)
- sslcontext.options |= options
+ # This config option is intended for use in tests only. It is a giant
+ # footgun to kill security. Don't define it.
+ exactprotocol = ui.config(b'devel', b'serverexactprotocol')
+ if exactprotocol == b'tls1.0':
+ if b'tls1.0' not in supportedprotocols:
+ raise error.Abort(_(b'TLS 1.0 not supported by this Python'))
+ protocol = ssl.PROTOCOL_TLSv1
+ elif exactprotocol == b'tls1.1':
+ if b'tls1.1' not in supportedprotocols:
+ raise error.Abort(_(b'TLS 1.1 not supported by this Python'))
+ protocol = ssl.PROTOCOL_TLSv1_1
+ elif exactprotocol == b'tls1.2':
+ if b'tls1.2' not in supportedprotocols:
+ raise error.Abort(_(b'TLS 1.2 not supported by this Python'))
+ protocol = ssl.PROTOCOL_TLSv1_2
+ elif exactprotocol:
+ raise error.Abort(
+ _(b'invalid value for serverexactprotocol: %s') % exactprotocol
+ )
+
+ # We /could/ use create_default_context() here since it doesn't load
+ # CAs when configured for client auth. However, it is hard-coded to
+ # use ssl.PROTOCOL_SSLv23 which may not be appropriate here.
+ sslcontext = ssl.SSLContext(protocol)
+ sslcontext.options |= options
# Improve forward secrecy.
sslcontext.options |= getattr(ssl, 'OP_SINGLE_DH_USE', 0)
sslcontext.options |= getattr(ssl, 'OP_SINGLE_ECDH_USE', 0)
- # Use the list of more secure ciphers if found in the ssl module.
- if util.safehasattr(ssl, b'_RESTRICTED_SERVER_CIPHERS'):
+ # In tests, allow insecure ciphers
+ # Otherwise, use the list of more secure ciphers if found in the ssl module.
+ if exactprotocol:
+ sslcontext.set_ciphers('DEFAULT')
+ elif util.safehasattr(ssl, b'_RESTRICTED_SERVER_CIPHERS'):
sslcontext.options |= getattr(ssl, 'OP_CIPHER_SERVER_PREFERENCE', 0)
# pytype: disable=module-attr
sslcontext.set_ciphers(ssl._RESTRICTED_SERVER_CIPHERS)
--- a/mercurial/stack.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/stack.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,8 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
-
def getstack(repo, rev=None):
"""return a sorted smartrev of the stack containing either rev if it is
--- a/mercurial/state.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/state.py Thu Jun 16 15:28:54 2022 +0200
@@ -17,7 +17,6 @@
the data.
"""
-from __future__ import absolute_import
import contextlib
@@ -40,7 +39,7 @@
assert t
-class cmdstate(object):
+class cmdstate:
"""a wrapper class to store the state of commands like `rebase`, `graft`,
`histedit`, `shelve` etc. Extensions can also use this to write state files.
@@ -103,7 +102,7 @@
return self._repo.vfs.exists(self.fname)
-class _statecheck(object):
+class _statecheck:
"""a utility class that deals with multistep operations like graft,
histedit, bisect, update etc and check whether such commands
are in an unfinished conditition or not and return appropriate message
--- a/mercurial/statichttprepo.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/statichttprepo.py Thu Jun 16 15:28:54 2022 +0200
@@ -7,7 +7,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import errno
@@ -35,7 +34,7 @@
urlreq = util.urlreq
-class httprangereader(object):
+class httprangereader:
def __init__(self, url, opener):
# we assume opener has HTTPRangeHandler
self.url = url
@@ -182,9 +181,7 @@
try:
requirements = set(self.vfs.read(b'requires').splitlines())
- except IOError as inst:
- if inst.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
requirements = set()
# check if it is a non-empty old-style repository
@@ -192,9 +189,7 @@
fp = self.vfs(b"00changelog.i")
fp.read(1)
fp.close()
- except IOError as inst:
- if inst.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
# we do not care about empty old-style repositories here
msg = _(b"'%s' does not appear to be an hg repository") % path
raise error.RepoError(msg)
--- a/mercurial/statprof.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/statprof.py Thu Jun 16 15:28:54 2022 +0200
@@ -101,7 +101,6 @@
main thread's work patterns.
"""
# no-check-code
-from __future__ import absolute_import, division, print_function
import collections
import contextlib
@@ -155,7 +154,7 @@
## Collection data structures
-class ProfileState(object):
+class ProfileState:
def __init__(self, frequency=None):
self.reset(frequency)
self.track = b'cpu'
@@ -203,7 +202,7 @@
state = ProfileState()
-class CodeSite(object):
+class CodeSite:
cache = {}
__slots__ = ('path', 'lineno', 'function', 'source')
@@ -261,7 +260,7 @@
return '%s:%s' % (self.filename(), self.function)
-class Sample(object):
+class Sample:
__slots__ = ('stack', 'time')
def __init__(self, stack, time):
@@ -435,7 +434,7 @@
## Reporting API
-class SiteStats(object):
+class SiteStats:
def __init__(self, site):
self.site = site
self.selfcount = 0
@@ -475,7 +474,7 @@
if i == 0:
sitestat.addself()
- return [s for s in pycompat.itervalues(stats)]
+ return [s for s in stats.values()]
class DisplayFormats:
@@ -574,7 +573,7 @@
# compute sums for each function
functiondata = []
- for fname, sitestats in pycompat.iteritems(grouped):
+ for fname, sitestats in grouped.items():
total_cum_sec = 0
total_self_sec = 0
total_percent = 0
@@ -608,9 +607,7 @@
# only show line numbers for significant locations (>1% time spent)
if stat.selfpercent() > 1:
source = stat.site.getsource(25)
- if sys.version_info.major >= 3 and not isinstance(
- source, bytes
- ):
+ if not isinstance(source, bytes):
source = pycompat.bytestr(source)
stattuple = (
@@ -653,7 +650,7 @@
else:
children[site] = 1
- parents = [(parent, count) for parent, count in pycompat.iteritems(parents)]
+ parents = [(parent, count) for parent, count in parents.items()]
parents.sort(reverse=True, key=lambda x: x[1])
for parent, count in parents:
fp.write(
@@ -697,7 +694,7 @@
)
)
- children = [(child, count) for child, count in pycompat.iteritems(children)]
+ children = [(child, count) for child, count in children.items()]
children.sort(reverse=True, key=lambda x: x[1])
for child, count in children:
fp.write(
@@ -711,7 +708,7 @@
def display_hotpath(data, fp, limit=0.05, **kwargs):
- class HotNode(object):
+ class HotNode:
def __init__(self, site):
self.site = site
self.count = 0
@@ -746,9 +743,7 @@
def _write(node, depth, multiple_siblings):
site = node.site
visiblechildren = [
- c
- for c in pycompat.itervalues(node.children)
- if c.count >= (limit * root.count)
+ c for c in node.children.values() if c.count >= (limit * root.count)
]
if site:
indent = depth * 2 - 1
@@ -784,9 +779,7 @@
)
finalstring = liststring + codestring
- childrensamples = sum(
- [c.count for c in pycompat.itervalues(node.children)]
- )
+ childrensamples = sum([c.count for c in node.children.values()])
# Make frames that performed more than 10% of the operation red
if node.count - childrensamples > (0.1 * root.count):
finalstring = b'\033[91m' + finalstring + b'\033[0m'
@@ -828,7 +821,7 @@
fd, path = pycompat.mkstemp()
with open(path, b"w+") as file:
- for line, count in pycompat.iteritems(lines):
+ for line, count in lines.items():
file.write(b"%s %d\n" % (line, count))
if outputfile is None:
--- a/mercurial/store.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/store.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,9 +5,7 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
-import errno
import functools
import os
import re
@@ -145,13 +143,13 @@
cmap[xchr(x)] = e + xchr(x).lower()
dmap = {}
- for k, v in pycompat.iteritems(cmap):
+ for k, v in cmap.items():
dmap[v] = k
def decode(s):
i = 0
while i < len(s):
- for l in pycompat.xrange(1, 4):
+ for l in range(1, 4):
try:
yield dmap[s[i : i + l]]
i += l
@@ -162,9 +160,7 @@
raise KeyError
return (
- lambda s: b''.join(
- [cmap[s[c : c + 1]] for c in pycompat.xrange(len(s))]
- ),
+ lambda s: b''.join([cmap[s[c : c + 1]] for c in range(len(s))]),
lambda s: b''.join(list(decode(s))),
)
@@ -201,7 +197,7 @@
'the~07quick~adshot'
"""
xchr = pycompat.bytechr
- cmap = {xchr(x): xchr(x) for x in pycompat.xrange(127)}
+ cmap = {xchr(x): xchr(x) for x in range(127)}
for x in _reserved():
cmap[xchr(x)] = b"~%02x" % x
for x in range(ord(b"A"), ord(b"Z") + 1):
@@ -456,7 +452,7 @@
FILETYPE_OTHER = FILEFLAGS_OTHER
-class basicstore(object):
+class basicstore:
'''base class for local repository stores'''
def __init__(self, path, vfstype):
@@ -602,7 +598,7 @@
return [b'requires', b'00changelog.i'] + [b'store/' + f for f in _data]
-class fncache(object):
+class fncache:
# the filename used to be partially encoded
# hence the encodedir/decodedir dance
def __init__(self, vfs):
@@ -662,7 +658,7 @@
"""make sure there is no empty string in entries"""
if b'' in self.entries:
fp.seek(0)
- for n, line in enumerate(util.iterfile(fp)):
+ for n, line in enumerate(fp):
if not line.rstrip(b'\n'):
t = _(b'invalid entry in fncache, line %d') % (n + 1)
if warn:
@@ -791,9 +787,8 @@
assert t is not None, f
t |= FILEFLAGS_FILELOG
yield t, f, self.getsize(ef)
- except OSError as err:
- if err.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
+ pass
def copylist(self):
d = (
@@ -828,10 +823,7 @@
try:
self.getsize(ef)
return True
- except OSError as err:
- if err.errno != errno.ENOENT:
- raise
- # nonexistent entry
+ except FileNotFoundError:
return False
def __contains__(self, path):
--- a/mercurial/streamclone.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/streamclone.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import contextlib
import errno
@@ -427,7 +426,7 @@
with repo.transaction(b'clone'):
with repo.svfs.backgroundclosing(repo.ui, expectedcount=filecount):
- for i in pycompat.xrange(filecount):
+ for i in range(filecount):
# XXX doesn't support '\n' or '\r' in filenames
l = fp.readline()
try:
@@ -517,7 +516,7 @@
nodemap.post_stream_cleanup(repo)
-class streamcloneapplier(object):
+class streamcloneapplier:
"""Class to manage applying streaming clone bundles.
We need to wrap ``applybundlev1()`` in a dedicated type to enable bundle
@@ -559,11 +558,15 @@
@contextlib.contextmanager
def maketempcopies():
"""return a function to temporary copy file"""
+
files = []
+ dst_dir = pycompat.mkdtemp(prefix=b'hg-clone-')
try:
def copy(src):
- fd, dst = pycompat.mkstemp()
+ fd, dst = pycompat.mkstemp(
+ prefix=os.path.basename(src), dir=dst_dir
+ )
os.close(fd)
files.append(dst)
util.copyfiles(src, dst, hardlink=True)
@@ -573,6 +576,7 @@
finally:
for tmp in files:
util.tryunlink(tmp)
+ util.tryrmdir(dst_dir)
def _makemap(repo):
--- a/mercurial/strip.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/strip.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
from .i18n import _
from .pycompat import getattr
from . import (
@@ -195,7 +193,7 @@
# a revision we have to only delete the bookmark and not strip
# anything. revsets cannot detect that case.
nodetobookmarks = {}
- for mark, node in pycompat.iteritems(repomarks):
+ for mark, node in repomarks.items():
nodetobookmarks.setdefault(node, []).append(mark)
for marks in nodetobookmarks.values():
if bookmarks.issuperset(marks):
--- a/mercurial/subrepo.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/subrepo.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import copy
import errno
@@ -221,7 +220,7 @@
# subrepo classes need to implement the following abstract class:
-class abstractsubrepo(object):
+class abstractsubrepo:
def __init__(self, ctx, path):
"""Initialize abstractsubrepo part
@@ -1771,7 +1770,7 @@
for b in rev2branch[self._state[1]]:
if b.startswith(b'refs/remotes/origin/'):
return True
- for b, revision in pycompat.iteritems(branch2rev):
+ for b, revision in branch2rev.items():
if b.startswith(b'refs/remotes/origin/'):
if self._gitisancestor(self._state[1], revision):
return True
--- a/mercurial/subrepoutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/subrepoutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,9 +5,7 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
-import errno
import os
import posixpath
import re
@@ -64,9 +62,7 @@
if f in ctx:
try:
data = ctx[f].data()
- except IOError as err:
- if err.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
# handle missing subrepo spec files as removed
ui.warn(
_(b"warning: subrepo spec file \'%s\' not found\n")
@@ -103,9 +99,8 @@
% (repo.pathto(b'.hgsubstate'), (i + 1))
)
rev[path] = revision
- except IOError as err:
- if err.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
+ pass
def remap(src):
# type: (bytes) -> bytes
@@ -191,7 +186,7 @@
repo.ui.debug(b" subrepo %s: %s %s\n" % (s, msg, r))
promptssrc = filemerge.partextras(labels)
- for s, l in sorted(pycompat.iteritems(s1)):
+ for s, l in sorted(s1.items()):
a = sa.get(s, nullstate)
ld = l # local state with possible dirty flag for compares
if wctx.sub(s).dirty():
--- a/mercurial/tagmerge.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/tagmerge.py Thu Jun 16 15:28:54 2022 +0200
@@ -71,7 +71,6 @@
# - put blocks whose nodes come all from p2 first
# - write the tag blocks in the sorted order
-from __future__ import absolute_import
from .i18n import _
from . import (
--- a/mercurial/tags.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/tags.py Thu Jun 16 15:28:54 2022 +0200
@@ -10,9 +10,8 @@
# Eventually, it could take care of updating (adding/removing/moving)
# tags too.
-from __future__ import absolute_import
-import errno
+import binascii
import io
from .node import (
@@ -26,7 +25,6 @@
encoding,
error,
match as matchmod,
- pycompat,
scmutil,
util,
)
@@ -243,9 +241,7 @@
'''Read local tags in repo. Update alltags and tagtypes.'''
try:
data = repo.vfs.read(b"localtags")
- except IOError as inst:
- if inst.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
return
# localtags is in the local encoding; re-encode to UTF-8 on
@@ -305,7 +301,7 @@
name = recode(name)
try:
nodebin = bin(nodehex)
- except TypeError:
+ except binascii.Error:
dbg(b"node '%s' is not well formed" % nodehex)
continue
@@ -355,7 +351,7 @@
if tagtype is None:
assert tagtypes is None
- for name, nodehist in pycompat.iteritems(filetags):
+ for name, nodehist in filetags.items():
if name not in alltags:
alltags[name] = nodehist
if tagtype is not None:
@@ -508,7 +504,7 @@
if unknown_entries:
fixed_nodemap = fnodescache.refresh_invalid_nodes(unknown_entries)
- for node, fnode in pycompat.iteritems(fixed_nodemap):
+ for node, fnode in fixed_nodemap.items():
if fnode != repo.nullid:
cachefnode[node] = fnode
@@ -550,7 +546,7 @@
# we keep them in UTF-8 throughout this module. If we converted
# them local encoding on input, we would lose info writing them to
# the cache.
- for (name, (node, hist)) in sorted(pycompat.iteritems(cachetags)):
+ for (name, (node, hist)) in sorted(cachetags.items()):
for n in hist:
cachefile.write(b"%s %s\n" % (hex(n), name))
cachefile.write(b"%s %s\n" % (hex(node), name))
@@ -653,9 +649,7 @@
try:
fp = repo.wvfs(b'.hgtags', b'rb+')
- except IOError as e:
- if e.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
fp = repo.wvfs(b'.hgtags', b'ab')
else:
prevtags = fp.read()
@@ -686,7 +680,7 @@
_fnodesmissingrec = b'\xff' * 24
-class hgtagsfnodescache(object):
+class hgtagsfnodescache:
"""Persistent cache mapping revisions to .hgtags filenodes.
The cache is an array of records. Each item in the array corresponds to
--- a/mercurial/templatefilters.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/templatefilters.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import os
import re
@@ -141,7 +140,7 @@
b = b[: len(a)]
if a == b:
return a
- for i in pycompat.xrange(len(a)):
+ for i in range(len(a)):
if a[i] != b[i]:
return a[:i]
return a
@@ -269,10 +268,7 @@
@templatefilter(b'firstline', intype=bytes)
def firstline(text):
"""Any text. Returns the first line of text."""
- try:
- return text.splitlines(True)[0].rstrip(b'\r\n')
- except IndexError:
- return b''
+ return stringutil.firstline(text)
@templatefilter(b'hex', intype=bytes)
@@ -315,7 +311,7 @@
endswithnewline = text[-1:] == b'\n'
def indenter():
- for i in pycompat.xrange(num_lines):
+ for i in range(num_lines):
l = lines[i]
if l.strip():
yield prefix if i else firstline
@@ -335,7 +331,7 @@
return b'false'
elif obj is True:
return b'true'
- elif isinstance(obj, (int, pycompat.long, float)):
+ elif isinstance(obj, (int, int, float)):
return pycompat.bytestr(obj)
elif isinstance(obj, bytes):
return b'"%s"' % encoding.jsonescape(obj, paranoid=paranoid)
@@ -347,7 +343,7 @@
out = [
b'"%s": %s'
% (encoding.jsonescape(k, paranoid=paranoid), json(v, paranoid))
- for k, v in sorted(pycompat.iteritems(obj))
+ for k, v in sorted(obj.items())
]
return b'{' + b', '.join(out) + b'}'
elif util.safehasattr(obj, b'__iter__'):
@@ -373,9 +369,7 @@
"""Any text. Returns the input text rendered as a sequence of
XML entities.
"""
- text = pycompat.unicode(
- text, pycompat.sysstr(encoding.encoding), r'replace'
- )
+ text = str(text, pycompat.sysstr(encoding.encoding), r'replace')
return b''.join([b'&#%d;' % ord(c) for c in text])
@@ -549,7 +543,7 @@
def loadfilter(ui, extname, registrarobj):
"""Load template filter from specified registrarobj"""
- for name, func in pycompat.iteritems(registrarobj._table):
+ for name, func in registrarobj._table.items():
filters[name] = func
--- a/mercurial/templatefuncs.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/templatefuncs.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,8 +5,8 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
+import binascii
import re
from .i18n import _
@@ -89,7 +89,7 @@
data.update(
(k, evalfuncarg(context, mapping, v))
- for k, v in pycompat.iteritems(args[b'kwargs'])
+ for k, v in args[b'kwargs'].items()
)
return templateutil.hybriddict(data)
@@ -770,7 +770,7 @@
elif len(hexnode) == hexnodelen:
try:
node = bin(hexnode)
- except TypeError:
+ except binascii.Error:
return hexnode
else:
try:
@@ -911,7 +911,7 @@
def loadfunction(ui, extname, registrarobj):
"""Load template function from specified registrarobj"""
- for name, func in pycompat.iteritems(registrarobj._table):
+ for name, func in registrarobj._table.items():
funcs[name] = func
--- a/mercurial/templatekw.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/templatekw.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from .i18n import _
from .node import (
@@ -603,7 +602,7 @@
# 'name' for iterating over namespaces, templatename for local reference
return lambda v: {b'name': v, ns.templatename: v}
- for k, ns in pycompat.iteritems(repo.names):
+ for k, ns in repo.names.items():
names = ns.names(repo, ctx.node())
f = _showcompatlist(context, mapping, b'name', names)
namespaces[k] = _hybrid(f, names, makensmapfn(ns), pycompat.identity)
@@ -686,12 +685,12 @@
d = {b'name': k}
if len(ps) == 1:
d[b'url'] = ps[0].rawloc
- sub_opts = pycompat.iteritems(ps[0].suboptions)
+ sub_opts = ps[0].suboptions.items()
sub_opts = util.sortdict(sorted(sub_opts))
d.update(sub_opts)
path_dict = util.sortdict()
for p in ps:
- sub_opts = util.sortdict(sorted(pycompat.iteritems(p.suboptions)))
+ sub_opts = util.sortdict(sorted(p.suboptions.items()))
path_dict[b'url'] = p.rawloc
path_dict.update(sub_opts)
d[b'urls'] = [path_dict]
@@ -1024,7 +1023,7 @@
def loadkeyword(ui, extname, registrarobj):
"""Load template keyword from specified registrarobj"""
- for name, func in pycompat.iteritems(registrarobj._table):
+ for name, func in registrarobj._table.items():
keywords[name] = func
--- a/mercurial/templater.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/templater.py Thu Jun 16 15:28:54 2022 +0200
@@ -65,7 +65,6 @@
operation.
"""
-from __future__ import absolute_import, print_function
import abc
import os
@@ -531,8 +530,7 @@
def compiledict(xs):
return util.sortdict(
- (k, compileexp(x, context, curmethods))
- for k, x in pycompat.iteritems(xs)
+ (k, compileexp(x, context, curmethods)) for k, x in xs.items()
)
def compilelist(xs):
@@ -628,7 +626,7 @@
return s[1:-1]
-class resourcemapper(object): # pytype: disable=ignored-metaclass
+class resourcemapper: # pytype: disable=ignored-metaclass
"""Mapper of internal template resources"""
__metaclass__ = abc.ABCMeta
@@ -665,7 +663,7 @@
return {}
-class engine(object):
+class engine:
"""template expansion engine.
template expansion works like this. a map file contains key=value
@@ -709,7 +707,7 @@
newres = self._resources.availablekeys(newmapping)
mapping = {
k: v
- for k, v in pycompat.iteritems(origmapping)
+ for k, v in origmapping.items()
if (
k in knownres # not a symbol per self.symbol()
or newres.isdisjoint(self._defaultrequires(k))
@@ -921,7 +919,7 @@
return cache, tmap, aliases
-class loader(object):
+class loader:
"""Load template fragments optionally from a map file"""
def __init__(self, cache, aliases):
@@ -996,7 +994,7 @@
return syms
-class templater(object):
+class templater:
def __init__(
self,
filters=None,
--- a/mercurial/templates/static/followlines.js Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/templates/static/followlines.js Thu Jun 16 15:28:54 2022 +0200
@@ -50,13 +50,13 @@
btn.classList.add('btn-followlines');
var plusSpan = document.createElement('span');
plusSpan.classList.add('followlines-plus');
- plusSpan.textContent = '+';
+ plusSpan.innerHTML = '+';
btn.appendChild(plusSpan);
var br = document.createElement('br');
btn.appendChild(br);
var minusSpan = document.createElement('span');
minusSpan.classList.add('followlines-minus');
- minusSpan.textContent = '−';
+ minusSpan.innerHTML = '−';
btn.appendChild(minusSpan);
return btn;
}
--- a/mercurial/templateutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/templateutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import abc
import types
@@ -32,7 +31,7 @@
pass
-class wrapped(object): # pytype: disable=ignored-metaclass
+class wrapped: # pytype: disable=ignored-metaclass
"""Object requiring extra conversion prior to displaying or processing
as value
@@ -109,7 +108,7 @@
"""
-class mappable(object): # pytype: disable=ignored-metaclass
+class mappable: # pytype: disable=ignored-metaclass
"""Object which can be converted to a single template mapping"""
__metaclass__ = abc.ABCMeta
@@ -311,7 +310,7 @@
if util.safehasattr(self._values, b'get'):
values = {
k: v
- for k, v in pycompat.iteritems(self._values)
+ for k, v in self._values.items()
if select(self._wrapvalue(k, v))
}
else:
@@ -343,10 +342,7 @@
# TODO: make it non-recursive for trivial lists/dicts
xs = self._values
if util.safehasattr(xs, b'get'):
- return {
- k: unwrapvalue(context, mapping, v)
- for k, v in pycompat.iteritems(xs)
- }
+ return {k: unwrapvalue(context, mapping, v) for k, v in xs.items()}
return [unwrapvalue(context, mapping, x) for x in xs]
@@ -538,7 +534,7 @@
items.append(
{
k: unwrapvalue(context, lm, v)
- for k, v in pycompat.iteritems(nm)
+ for k, v in nm.items()
if k not in knownres
}
)
@@ -716,7 +712,7 @@
This exists for backward compatibility with the old-style template. Use
hybriddict() for new template keywords.
"""
- c = [{key: k, value: v} for k, v in pycompat.iteritems(data)]
+ c = [{key: k, value: v} for k, v in data.items()]
f = _showcompatlist(context, mapping, name, c, plural, separator)
return hybriddict(data, key=key, value=value, fmt=fmt, gen=f)
--- a/mercurial/testing/__init__.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/testing/__init__.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,8 +1,3 @@
-from __future__ import (
- absolute_import,
- division,
-)
-
import os
import time
--- a/mercurial/testing/revlog.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/testing/revlog.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,4 +1,3 @@
-from __future__ import absolute_import
import unittest
# picked from test-parse-index2, copied rather than imported
--- a/mercurial/testing/storage.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/testing/storage.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import unittest
--- a/mercurial/thirdparty/concurrent/LICENSE Thu Jun 16 15:15:03 2022 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,48 +0,0 @@
-PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
---------------------------------------------
-
-1. This LICENSE AGREEMENT is between the Python Software Foundation
-("PSF"), and the Individual or Organization ("Licensee") accessing and
-otherwise using this software ("Python") in source or binary form and
-its associated documentation.
-
-2. Subject to the terms and conditions of this License Agreement, PSF
-hereby grants Licensee a nonexclusive, royalty-free, world-wide
-license to reproduce, analyze, test, perform and/or display publicly,
-prepare derivative works, distribute, and otherwise use Python
-alone or in any derivative version, provided, however, that PSF's
-License Agreement and PSF's notice of copyright, i.e., "Copyright (c)
-2001, 2002, 2003, 2004, 2005, 2006 Python Software Foundation; All Rights
-Reserved" are retained in Python alone or in any derivative version
-prepared by Licensee.
-
-3. In the event Licensee prepares a derivative work that is based on
-or incorporates Python or any part thereof, and wants to make
-the derivative work available to others as provided herein, then
-Licensee hereby agrees to include in any such work a brief summary of
-the changes made to Python.
-
-4. PSF is making Python available to Licensee on an "AS IS"
-basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
-IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
-DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
-FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
-INFRINGE ANY THIRD PARTY RIGHTS.
-
-5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
-FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
-A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
-OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
-
-6. This License Agreement will automatically terminate upon a material
-breach of its terms and conditions.
-
-7. Nothing in this License Agreement shall be deemed to create any
-relationship of agency, partnership, or joint venture between PSF and
-Licensee. This License Agreement does not grant permission to use PSF
-trademarks or trade name in a trademark sense to endorse or promote
-products or services of Licensee, or any third party.
-
-8. By copying, installing or otherwise using Python, Licensee
-agrees to be bound by the terms and conditions of this License
-Agreement.
--- a/mercurial/thirdparty/concurrent/futures/__init__.py Thu Jun 16 15:15:03 2022 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,27 +0,0 @@
-# Copyright 2009 Brian Quinlan. All Rights Reserved.
-# Licensed to PSF under a Contributor Agreement.
-
-"""Execute computations asynchronously using threads or processes."""
-
-from __future__ import absolute_import
-
-__author__ = 'Brian Quinlan (brian@sweetapp.com)'
-
-from ._base import (
- FIRST_COMPLETED,
- FIRST_EXCEPTION,
- ALL_COMPLETED,
- CancelledError,
- TimeoutError,
- Future,
- Executor,
- wait,
- as_completed,
-)
-from .thread import ThreadPoolExecutor
-
-try:
- from .process import ProcessPoolExecutor
-except ImportError:
- # some platforms don't have multiprocessing
- pass
--- a/mercurial/thirdparty/concurrent/futures/_base.py Thu Jun 16 15:15:03 2022 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,669 +0,0 @@
-# Copyright 2009 Brian Quinlan. All Rights Reserved.
-# Licensed to PSF under a Contributor Agreement.
-
-from __future__ import absolute_import
-
-import collections
-import logging
-import threading
-import itertools
-import time
-import types
-
-__author__ = 'Brian Quinlan (brian@sweetapp.com)'
-
-FIRST_COMPLETED = 'FIRST_COMPLETED'
-FIRST_EXCEPTION = 'FIRST_EXCEPTION'
-ALL_COMPLETED = 'ALL_COMPLETED'
-_AS_COMPLETED = '_AS_COMPLETED'
-
-# Possible future states (for internal use by the futures package).
-PENDING = 'PENDING'
-RUNNING = 'RUNNING'
-# The future was cancelled by the user...
-CANCELLED = 'CANCELLED'
-# ...and _Waiter.add_cancelled() was called by a worker.
-CANCELLED_AND_NOTIFIED = 'CANCELLED_AND_NOTIFIED'
-FINISHED = 'FINISHED'
-
-_FUTURE_STATES = [
- PENDING,
- RUNNING,
- CANCELLED,
- CANCELLED_AND_NOTIFIED,
- FINISHED
-]
-
-_STATE_TO_DESCRIPTION_MAP = {
- PENDING: "pending",
- RUNNING: "running",
- CANCELLED: "cancelled",
- CANCELLED_AND_NOTIFIED: "cancelled",
- FINISHED: "finished"
-}
-
-# Logger for internal use by the futures package.
-LOGGER = logging.getLogger("concurrent.futures")
-
-class Error(Exception):
- """Base class for all future-related exceptions."""
- pass
-
-class CancelledError(Error):
- """The Future was cancelled."""
- pass
-
-class TimeoutError(Error):
- """The operation exceeded the given deadline."""
- pass
-
-class _Waiter(object):
- """Provides the event that wait() and as_completed() block on."""
- def __init__(self):
- self.event = threading.Event()
- self.finished_futures = []
-
- def add_result(self, future):
- self.finished_futures.append(future)
-
- def add_exception(self, future):
- self.finished_futures.append(future)
-
- def add_cancelled(self, future):
- self.finished_futures.append(future)
-
-class _AsCompletedWaiter(_Waiter):
- """Used by as_completed()."""
-
- def __init__(self):
- super(_AsCompletedWaiter, self).__init__()
- self.lock = threading.Lock()
-
- def add_result(self, future):
- with self.lock:
- super(_AsCompletedWaiter, self).add_result(future)
- self.event.set()
-
- def add_exception(self, future):
- with self.lock:
- super(_AsCompletedWaiter, self).add_exception(future)
- self.event.set()
-
- def add_cancelled(self, future):
- with self.lock:
- super(_AsCompletedWaiter, self).add_cancelled(future)
- self.event.set()
-
-class _FirstCompletedWaiter(_Waiter):
- """Used by wait(return_when=FIRST_COMPLETED)."""
-
- def add_result(self, future):
- super(_FirstCompletedWaiter, self).add_result(future)
- self.event.set()
-
- def add_exception(self, future):
- super(_FirstCompletedWaiter, self).add_exception(future)
- self.event.set()
-
- def add_cancelled(self, future):
- super(_FirstCompletedWaiter, self).add_cancelled(future)
- self.event.set()
-
-class _AllCompletedWaiter(_Waiter):
- """Used by wait(return_when=FIRST_EXCEPTION and ALL_COMPLETED)."""
-
- def __init__(self, num_pending_calls, stop_on_exception):
- self.num_pending_calls = num_pending_calls
- self.stop_on_exception = stop_on_exception
- self.lock = threading.Lock()
- super(_AllCompletedWaiter, self).__init__()
-
- def _decrement_pending_calls(self):
- with self.lock:
- self.num_pending_calls -= 1
- if not self.num_pending_calls:
- self.event.set()
-
- def add_result(self, future):
- super(_AllCompletedWaiter, self).add_result(future)
- self._decrement_pending_calls()
-
- def add_exception(self, future):
- super(_AllCompletedWaiter, self).add_exception(future)
- if self.stop_on_exception:
- self.event.set()
- else:
- self._decrement_pending_calls()
-
- def add_cancelled(self, future):
- super(_AllCompletedWaiter, self).add_cancelled(future)
- self._decrement_pending_calls()
-
-class _AcquireFutures(object):
- """A context manager that does an ordered acquire of Future conditions."""
-
- def __init__(self, futures):
- self.futures = sorted(futures, key=id)
-
- def __enter__(self):
- for future in self.futures:
- future._condition.acquire()
-
- def __exit__(self, *args):
- for future in self.futures:
- future._condition.release()
-
-def _create_and_install_waiters(fs, return_when):
- if return_when == _AS_COMPLETED:
- waiter = _AsCompletedWaiter()
- elif return_when == FIRST_COMPLETED:
- waiter = _FirstCompletedWaiter()
- else:
- pending_count = sum(
- f._state not in [CANCELLED_AND_NOTIFIED, FINISHED] for f in fs)
-
- if return_when == FIRST_EXCEPTION:
- waiter = _AllCompletedWaiter(pending_count, stop_on_exception=True)
- elif return_when == ALL_COMPLETED:
- waiter = _AllCompletedWaiter(pending_count, stop_on_exception=False)
- else:
- raise ValueError("Invalid return condition: %r" % return_when)
-
- for f in fs:
- f._waiters.append(waiter)
-
- return waiter
-
-
-def _yield_finished_futures(fs, waiter, ref_collect):
- """
- Iterate on the list *fs*, yielding finished futures one by one in
- reverse order.
- Before yielding a future, *waiter* is removed from its waiters
- and the future is removed from each set in the collection of sets
- *ref_collect*.
-
- The aim of this function is to avoid keeping stale references after
- the future is yielded and before the iterator resumes.
- """
- while fs:
- f = fs[-1]
- for futures_set in ref_collect:
- futures_set.remove(f)
- with f._condition:
- f._waiters.remove(waiter)
- del f
- # Careful not to keep a reference to the popped value
- yield fs.pop()
-
-
-def as_completed(fs, timeout=None):
- """An iterator over the given futures that yields each as it completes.
-
- Args:
- fs: The sequence of Futures (possibly created by different Executors) to
- iterate over.
- timeout: The maximum number of seconds to wait. If None, then there
- is no limit on the wait time.
-
- Returns:
- An iterator that yields the given Futures as they complete (finished or
- cancelled). If any given Futures are duplicated, they will be returned
- once.
-
- Raises:
- TimeoutError: If the entire result iterator could not be generated
- before the given timeout.
- """
- if timeout is not None:
- end_time = timeout + time.time()
-
- fs = set(fs)
- total_futures = len(fs)
- with _AcquireFutures(fs):
- finished = set(
- f for f in fs
- if f._state in [CANCELLED_AND_NOTIFIED, FINISHED])
- pending = fs - finished
- waiter = _create_and_install_waiters(fs, _AS_COMPLETED)
- finished = list(finished)
- try:
- for f in _yield_finished_futures(finished, waiter,
- ref_collect=(fs,)):
- f = [f]
- yield f.pop()
-
- while pending:
- if timeout is None:
- wait_timeout = None
- else:
- wait_timeout = end_time - time.time()
- if wait_timeout < 0:
- raise TimeoutError(
- '%d (of %d) futures unfinished' % (
- len(pending), total_futures))
-
- waiter.event.wait(wait_timeout)
-
- with waiter.lock:
- finished = waiter.finished_futures
- waiter.finished_futures = []
- waiter.event.clear()
-
- # reverse to keep finishing order
- finished.reverse()
- for f in _yield_finished_futures(finished, waiter,
- ref_collect=(fs, pending)):
- f = [f]
- yield f.pop()
-
- finally:
- # Remove waiter from unfinished futures
- for f in fs:
- with f._condition:
- f._waiters.remove(waiter)
-
-DoneAndNotDoneFutures = collections.namedtuple(
- 'DoneAndNotDoneFutures', 'done not_done')
-def wait(fs, timeout=None, return_when=ALL_COMPLETED):
- """Wait for the futures in the given sequence to complete.
-
- Args:
- fs: The sequence of Futures (possibly created by different Executors) to
- wait upon.
- timeout: The maximum number of seconds to wait. If None, then there
- is no limit on the wait time.
- return_when: Indicates when this function should return. The options
- are:
-
- FIRST_COMPLETED - Return when any future finishes or is
- cancelled.
- FIRST_EXCEPTION - Return when any future finishes by raising an
- exception. If no future raises an exception
- then it is equivalent to ALL_COMPLETED.
- ALL_COMPLETED - Return when all futures finish or are cancelled.
-
- Returns:
- A named 2-tuple of sets. The first set, named 'done', contains the
- futures that completed (is finished or cancelled) before the wait
- completed. The second set, named 'not_done', contains uncompleted
- futures.
- """
- with _AcquireFutures(fs):
- done = set(f for f in fs
- if f._state in [CANCELLED_AND_NOTIFIED, FINISHED])
- not_done = set(fs) - done
-
- if (return_when == FIRST_COMPLETED) and done:
- return DoneAndNotDoneFutures(done, not_done)
- elif (return_when == FIRST_EXCEPTION) and done:
- if any(f for f in done
- if not f.cancelled() and f.exception() is not None):
- return DoneAndNotDoneFutures(done, not_done)
-
- if len(done) == len(fs):
- return DoneAndNotDoneFutures(done, not_done)
-
- waiter = _create_and_install_waiters(fs, return_when)
-
- waiter.event.wait(timeout)
- for f in fs:
- with f._condition:
- f._waiters.remove(waiter)
-
- done.update(waiter.finished_futures)
- return DoneAndNotDoneFutures(done, set(fs) - done)
-
-class Future(object):
- """Represents the result of an asynchronous computation."""
-
- def __init__(self):
- """Initializes the future. Should not be called by clients."""
- self._condition = threading.Condition()
- self._state = PENDING
- self._result = None
- self._exception = None
- self._traceback = None
- self._waiters = []
- self._done_callbacks = []
-
- def _invoke_callbacks(self):
- for callback in self._done_callbacks:
- try:
- callback(self)
- except Exception:
- LOGGER.exception('exception calling callback for %r', self)
- except BaseException:
- # Explicitly let all other new-style exceptions through so
- # that we can catch all old-style exceptions with a simple
- # "except:" clause below.
- #
- # All old-style exception objects are instances of
- # types.InstanceType, but "except types.InstanceType:" does
- # not catch old-style exceptions for some reason. Thus, the
- # only way to catch all old-style exceptions without catching
- # any new-style exceptions is to filter out the new-style
- # exceptions, which all derive from BaseException.
- raise
- except:
- # Because of the BaseException clause above, this handler only
- # executes for old-style exception objects.
- LOGGER.exception('exception calling callback for %r', self)
-
- def __repr__(self):
- with self._condition:
- if self._state == FINISHED:
- if self._exception:
- return '<%s at %#x state=%s raised %s>' % (
- self.__class__.__name__,
- id(self),
- _STATE_TO_DESCRIPTION_MAP[self._state],
- self._exception.__class__.__name__)
- else:
- return '<%s at %#x state=%s returned %s>' % (
- self.__class__.__name__,
- id(self),
- _STATE_TO_DESCRIPTION_MAP[self._state],
- self._result.__class__.__name__)
- return '<%s at %#x state=%s>' % (
- self.__class__.__name__,
- id(self),
- _STATE_TO_DESCRIPTION_MAP[self._state])
-
- def cancel(self):
- """Cancel the future if possible.
-
- Returns True if the future was cancelled, False otherwise. A future
- cannot be cancelled if it is running or has already completed.
- """
- with self._condition:
- if self._state in [RUNNING, FINISHED]:
- return False
-
- if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
- return True
-
- self._state = CANCELLED
- self._condition.notify_all()
-
- self._invoke_callbacks()
- return True
-
- def cancelled(self):
- """Return True if the future was cancelled."""
- with self._condition:
- return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]
-
- def running(self):
- """Return True if the future is currently executing."""
- with self._condition:
- return self._state == RUNNING
-
- def done(self):
- """Return True of the future was cancelled or finished executing."""
- with self._condition:
- return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED]
-
- def __get_result(self):
- if self._exception:
- if isinstance(self._exception, types.InstanceType):
- # The exception is an instance of an old-style class, which
- # means type(self._exception) returns types.ClassType instead
- # of the exception's actual class type.
- exception_type = self._exception.__class__
- else:
- exception_type = type(self._exception)
- raise exception_type, self._exception, self._traceback
- else:
- return self._result
-
- def add_done_callback(self, fn):
- """Attaches a callable that will be called when the future finishes.
-
- Args:
- fn: A callable that will be called with this future as its only
- argument when the future completes or is cancelled. The callable
- will always be called by a thread in the same process in which
- it was added. If the future has already completed or been
- cancelled then the callable will be called immediately. These
- callables are called in the order that they were added.
- """
- with self._condition:
- if self._state not in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED]:
- self._done_callbacks.append(fn)
- return
- fn(self)
-
- def result(self, timeout=None):
- """Return the result of the call that the future represents.
-
- Args:
- timeout: The number of seconds to wait for the result if the future
- isn't done. If None, then there is no limit on the wait time.
-
- Returns:
- The result of the call that the future represents.
-
- Raises:
- CancelledError: If the future was cancelled.
- TimeoutError: If the future didn't finish executing before the given
- timeout.
- Exception: If the call raised then that exception will be raised.
- """
- with self._condition:
- if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
- raise CancelledError()
- elif self._state == FINISHED:
- return self.__get_result()
-
- self._condition.wait(timeout)
-
- if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
- raise CancelledError()
- elif self._state == FINISHED:
- return self.__get_result()
- else:
- raise TimeoutError()
-
- def exception_info(self, timeout=None):
- """Return a tuple of (exception, traceback) raised by the call that the
- future represents.
-
- Args:
- timeout: The number of seconds to wait for the exception if the
- future isn't done. If None, then there is no limit on the wait
- time.
-
- Returns:
- The exception raised by the call that the future represents or None
- if the call completed without raising.
-
- Raises:
- CancelledError: If the future was cancelled.
- TimeoutError: If the future didn't finish executing before the given
- timeout.
- """
- with self._condition:
- if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
- raise CancelledError()
- elif self._state == FINISHED:
- return self._exception, self._traceback
-
- self._condition.wait(timeout)
-
- if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
- raise CancelledError()
- elif self._state == FINISHED:
- return self._exception, self._traceback
- else:
- raise TimeoutError()
-
- def exception(self, timeout=None):
- """Return the exception raised by the call that the future represents.
-
- Args:
- timeout: The number of seconds to wait for the exception if the
- future isn't done. If None, then there is no limit on the wait
- time.
-
- Returns:
- The exception raised by the call that the future represents or None
- if the call completed without raising.
-
- Raises:
- CancelledError: If the future was cancelled.
- TimeoutError: If the future didn't finish executing before the given
- timeout.
- """
- return self.exception_info(timeout)[0]
-
- # The following methods should only be used by Executors and in tests.
- def set_running_or_notify_cancel(self):
- """Mark the future as running or process any cancel notifications.
-
- Should only be used by Executor implementations and unit tests.
-
- If the future has been cancelled (cancel() was called and returned
- True) then any threads waiting on the future completing (though calls
- to as_completed() or wait()) are notified and False is returned.
-
- If the future was not cancelled then it is put in the running state
- (future calls to running() will return True) and True is returned.
-
- This method should be called by Executor implementations before
- executing the work associated with this future. If this method returns
- False then the work should not be executed.
-
- Returns:
- False if the Future was cancelled, True otherwise.
-
- Raises:
- RuntimeError: if this method was already called or if set_result()
- or set_exception() was called.
- """
- with self._condition:
- if self._state == CANCELLED:
- self._state = CANCELLED_AND_NOTIFIED
- for waiter in self._waiters:
- waiter.add_cancelled(self)
- # self._condition.notify_all() is not necessary because
- # self.cancel() triggers a notification.
- return False
- elif self._state == PENDING:
- self._state = RUNNING
- return True
- else:
- LOGGER.critical('Future %s in unexpected state: %s',
- id(self),
- self._state)
- raise RuntimeError('Future in unexpected state')
-
- def set_result(self, result):
- """Sets the return value of work associated with the future.
-
- Should only be used by Executor implementations and unit tests.
- """
- with self._condition:
- self._result = result
- self._state = FINISHED
- for waiter in self._waiters:
- waiter.add_result(self)
- self._condition.notify_all()
- self._invoke_callbacks()
-
- def set_exception_info(self, exception, traceback):
- """Sets the result of the future as being the given exception
- and traceback.
-
- Should only be used by Executor implementations and unit tests.
- """
- with self._condition:
- self._exception = exception
- self._traceback = traceback
- self._state = FINISHED
- for waiter in self._waiters:
- waiter.add_exception(self)
- self._condition.notify_all()
- self._invoke_callbacks()
-
- def set_exception(self, exception):
- """Sets the result of the future as being the given exception.
-
- Should only be used by Executor implementations and unit tests.
- """
- self.set_exception_info(exception, None)
-
-class Executor(object):
- """This is an abstract base class for concrete asynchronous executors."""
-
- def submit(self, fn, *args, **kwargs):
- """Submits a callable to be executed with the given arguments.
-
- Schedules the callable to be executed as fn(*args, **kwargs) and returns
- a Future instance representing the execution of the callable.
-
- Returns:
- A Future representing the given call.
- """
- raise NotImplementedError()
-
- def map(self, fn, *iterables, **kwargs):
- """Returns an iterator equivalent to map(fn, iter).
-
- Args:
- fn: A callable that will take as many arguments as there are
- passed iterables.
- timeout: The maximum number of seconds to wait. If None, then there
- is no limit on the wait time.
-
- Returns:
- An iterator equivalent to: map(func, *iterables) but the calls may
- be evaluated out-of-order.
-
- Raises:
- TimeoutError: If the entire result iterator could not be generated
- before the given timeout.
- Exception: If fn(*args) raises for any values.
- """
- timeout = kwargs.get('timeout')
- if timeout is not None:
- end_time = timeout + time.time()
-
- fs = [self.submit(fn, *args) for args in itertools.izip(*iterables)]
-
- # Yield must be hidden in closure so that the futures are submitted
- # before the first iterator value is required.
- def result_iterator():
- try:
- # reverse to keep finishing order
- fs.reverse()
- while fs:
- # Careful not to keep a reference to the popped future
- if timeout is None:
- yield fs.pop().result()
- else:
- yield fs.pop().result(end_time - time.time())
- finally:
- for future in fs:
- future.cancel()
- return result_iterator()
-
- def shutdown(self, wait=True):
- """Clean-up the resources associated with the Executor.
-
- It is safe to call this method several times. Otherwise, no other
- methods can be called after this one.
-
- Args:
- wait: If True then shutdown will not return until all running
- futures have finished executing and the resources used by the
- executor have been reclaimed.
- """
- pass
-
- def __enter__(self):
- return self
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- self.shutdown(wait=True)
- return False
--- a/mercurial/thirdparty/concurrent/futures/process.py Thu Jun 16 15:15:03 2022 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,365 +0,0 @@
-# Copyright 2009 Brian Quinlan. All Rights Reserved.
-# Licensed to PSF under a Contributor Agreement.
-
-"""Implements ProcessPoolExecutor.
-
-The follow diagram and text describe the data-flow through the system:
-
-|======================= In-process =====================|== Out-of-process ==|
-
-+----------+ +----------+ +--------+ +-----------+ +---------+
-| | => | Work Ids | => | | => | Call Q | => | |
-| | +----------+ | | +-----------+ | |
-| | | ... | | | | ... | | |
-| | | 6 | | | | 5, call() | | |
-| | | 7 | | | | ... | | |
-| Process | | ... | | Local | +-----------+ | Process |
-| Pool | +----------+ | Worker | | #1..n |
-| Executor | | Thread | | |
-| | +----------- + | | +-----------+ | |
-| | <=> | Work Items | <=> | | <= | Result Q | <= | |
-| | +------------+ | | +-----------+ | |
-| | | 6: call() | | | | ... | | |
-| | | future | | | | 4, result | | |
-| | | ... | | | | 3, except | | |
-+----------+ +------------+ +--------+ +-----------+ +---------+
-
-Executor.submit() called:
-- creates a uniquely numbered _WorkItem and adds it to the "Work Items" dict
-- adds the id of the _WorkItem to the "Work Ids" queue
-
-Local worker thread:
-- reads work ids from the "Work Ids" queue and looks up the corresponding
- WorkItem from the "Work Items" dict: if the work item has been cancelled then
- it is simply removed from the dict, otherwise it is repackaged as a
- _CallItem and put in the "Call Q". New _CallItems are put in the "Call Q"
- until "Call Q" is full. NOTE: the size of the "Call Q" is kept small because
- calls placed in the "Call Q" can no longer be cancelled with Future.cancel().
-- reads _ResultItems from "Result Q", updates the future stored in the
- "Work Items" dict and deletes the dict entry
-
-Process #1..n:
-- reads _CallItems from "Call Q", executes the calls, and puts the resulting
- _ResultItems in "Request Q"
-"""
-
-from __future__ import absolute_import
-
-import atexit
-from . import _base
-import Queue as queue
-import multiprocessing
-import threading
-import weakref
-import sys
-
-__author__ = 'Brian Quinlan (brian@sweetapp.com)'
-
-# Workers are created as daemon threads and processes. This is done to allow the
-# interpreter to exit when there are still idle processes in a
-# ProcessPoolExecutor's process pool (i.e. shutdown() was not called). However,
-# allowing workers to die with the interpreter has two undesirable properties:
-# - The workers would still be running during interpretor shutdown,
-# meaning that they would fail in unpredictable ways.
-# - The workers could be killed while evaluating a work item, which could
-# be bad if the callable being evaluated has external side-effects e.g.
-# writing to a file.
-#
-# To work around this problem, an exit handler is installed which tells the
-# workers to exit when their work queues are empty and then waits until the
-# threads/processes finish.
-
-_threads_queues = weakref.WeakKeyDictionary()
-_shutdown = False
-
-def _python_exit():
- global _shutdown
- _shutdown = True
- items = list(_threads_queues.items()) if _threads_queues else ()
- for t, q in items:
- q.put(None)
- for t, q in items:
- t.join(sys.maxint)
-
-# Controls how many more calls than processes will be queued in the call queue.
-# A smaller number will mean that processes spend more time idle waiting for
-# work while a larger number will make Future.cancel() succeed less frequently
-# (Futures in the call queue cannot be cancelled).
-EXTRA_QUEUED_CALLS = 1
-
-class _WorkItem(object):
- def __init__(self, future, fn, args, kwargs):
- self.future = future
- self.fn = fn
- self.args = args
- self.kwargs = kwargs
-
-class _ResultItem(object):
- def __init__(self, work_id, exception=None, result=None):
- self.work_id = work_id
- self.exception = exception
- self.result = result
-
-class _CallItem(object):
- def __init__(self, work_id, fn, args, kwargs):
- self.work_id = work_id
- self.fn = fn
- self.args = args
- self.kwargs = kwargs
-
-def _process_worker(call_queue, result_queue):
- """Evaluates calls from call_queue and places the results in result_queue.
-
- This worker is run in a separate process.
-
- Args:
- call_queue: A multiprocessing.Queue of _CallItems that will be read and
- evaluated by the worker.
- result_queue: A multiprocessing.Queue of _ResultItems that will written
- to by the worker.
- shutdown: A multiprocessing.Event that will be set as a signal to the
- worker that it should exit when call_queue is empty.
- """
- while True:
- call_item = call_queue.get(block=True)
- if call_item is None:
- # Wake up queue management thread
- result_queue.put(None)
- return
- try:
- r = call_item.fn(*call_item.args, **call_item.kwargs)
- except:
- e = sys.exc_info()[1]
- result_queue.put(_ResultItem(call_item.work_id,
- exception=e))
- else:
- result_queue.put(_ResultItem(call_item.work_id,
- result=r))
-
-def _add_call_item_to_queue(pending_work_items,
- work_ids,
- call_queue):
- """Fills call_queue with _WorkItems from pending_work_items.
-
- This function never blocks.
-
- Args:
- pending_work_items: A dict mapping work ids to _WorkItems e.g.
- {5: <_WorkItem...>, 6: <_WorkItem...>, ...}
- work_ids: A queue.Queue of work ids e.g. Queue([5, 6, ...]). Work ids
- are consumed and the corresponding _WorkItems from
- pending_work_items are transformed into _CallItems and put in
- call_queue.
- call_queue: A multiprocessing.Queue that will be filled with _CallItems
- derived from _WorkItems.
- """
- while True:
- if call_queue.full():
- return
- try:
- work_id = work_ids.get(block=False)
- except queue.Empty:
- return
- else:
- work_item = pending_work_items[work_id]
-
- if work_item.future.set_running_or_notify_cancel():
- call_queue.put(_CallItem(work_id,
- work_item.fn,
- work_item.args,
- work_item.kwargs),
- block=True)
- else:
- del pending_work_items[work_id]
- continue
-
-def _queue_management_worker(executor_reference,
- processes,
- pending_work_items,
- work_ids_queue,
- call_queue,
- result_queue):
- """Manages the communication between this process and the worker processes.
-
- This function is run in a local thread.
-
- Args:
- executor_reference: A weakref.ref to the ProcessPoolExecutor that owns
- this thread. Used to determine if the ProcessPoolExecutor has been
- garbage collected and that this function can exit.
- process: A list of the multiprocessing.Process instances used as
- workers.
- pending_work_items: A dict mapping work ids to _WorkItems e.g.
- {5: <_WorkItem...>, 6: <_WorkItem...>, ...}
- work_ids_queue: A queue.Queue of work ids e.g. Queue([5, 6, ...]).
- call_queue: A multiprocessing.Queue that will be filled with _CallItems
- derived from _WorkItems for processing by the process workers.
- result_queue: A multiprocessing.Queue of _ResultItems generated by the
- process workers.
- """
- nb_shutdown_processes = [0]
- def shutdown_one_process():
- """Tell a worker to terminate, which will in turn wake us again"""
- call_queue.put(None)
- nb_shutdown_processes[0] += 1
- while True:
- _add_call_item_to_queue(pending_work_items,
- work_ids_queue,
- call_queue)
-
- result_item = result_queue.get(block=True)
- if result_item is not None:
- work_item = pending_work_items[result_item.work_id]
- del pending_work_items[result_item.work_id]
-
- if result_item.exception:
- work_item.future.set_exception(result_item.exception)
- else:
- work_item.future.set_result(result_item.result)
- # Delete references to object. See issue16284
- del work_item
- # Check whether we should start shutting down.
- executor = executor_reference()
- # No more work items can be added if:
- # - The interpreter is shutting down OR
- # - The executor that owns this worker has been collected OR
- # - The executor that owns this worker has been shutdown.
- if _shutdown or executor is None or executor._shutdown_thread:
- # Since no new work items can be added, it is safe to shutdown
- # this thread if there are no pending work items.
- if not pending_work_items:
- while nb_shutdown_processes[0] < len(processes):
- shutdown_one_process()
- # If .join() is not called on the created processes then
- # some multiprocessing.Queue methods may deadlock on Mac OS
- # X.
- for p in processes:
- p.join()
- call_queue.close()
- return
- del executor
-
-_system_limits_checked = False
-_system_limited = None
-def _check_system_limits():
- global _system_limits_checked, _system_limited
- if _system_limits_checked:
- if _system_limited:
- raise NotImplementedError(_system_limited)
- _system_limits_checked = True
- try:
- import os
- nsems_max = os.sysconf("SC_SEM_NSEMS_MAX")
- except (AttributeError, ValueError):
- # sysconf not available or setting not available
- return
- if nsems_max == -1:
- # indetermine limit, assume that limit is determined
- # by available memory only
- return
- if nsems_max >= 256:
- # minimum number of semaphores available
- # according to POSIX
- return
- _system_limited = "system provides too few semaphores (%d available, 256 necessary)" % nsems_max
- raise NotImplementedError(_system_limited)
-
-
-class ProcessPoolExecutor(_base.Executor):
- def __init__(self, max_workers=None):
- """Initializes a new ProcessPoolExecutor instance.
-
- Args:
- max_workers: The maximum number of processes that can be used to
- execute the given calls. If None or not given then as many
- worker processes will be created as the machine has processors.
- """
- _check_system_limits()
-
- if max_workers is None:
- self._max_workers = multiprocessing.cpu_count()
- else:
- if max_workers <= 0:
- raise ValueError("max_workers must be greater than 0")
-
- self._max_workers = max_workers
-
- # Make the call queue slightly larger than the number of processes to
- # prevent the worker processes from idling. But don't make it too big
- # because futures in the call queue cannot be cancelled.
- self._call_queue = multiprocessing.Queue(self._max_workers +
- EXTRA_QUEUED_CALLS)
- self._result_queue = multiprocessing.Queue()
- self._work_ids = queue.Queue()
- self._queue_management_thread = None
- self._processes = set()
-
- # Shutdown is a two-step process.
- self._shutdown_thread = False
- self._shutdown_lock = threading.Lock()
- self._queue_count = 0
- self._pending_work_items = {}
-
- def _start_queue_management_thread(self):
- # When the executor gets lost, the weakref callback will wake up
- # the queue management thread.
- def weakref_cb(_, q=self._result_queue):
- q.put(None)
- if self._queue_management_thread is None:
- self._queue_management_thread = threading.Thread(
- target=_queue_management_worker,
- args=(weakref.ref(self, weakref_cb),
- self._processes,
- self._pending_work_items,
- self._work_ids,
- self._call_queue,
- self._result_queue))
- self._queue_management_thread.daemon = True
- self._queue_management_thread.start()
- _threads_queues[self._queue_management_thread] = self._result_queue
-
- def _adjust_process_count(self):
- for _ in range(len(self._processes), self._max_workers):
- p = multiprocessing.Process(
- target=_process_worker,
- args=(self._call_queue,
- self._result_queue))
- p.start()
- self._processes.add(p)
-
- def submit(self, fn, *args, **kwargs):
- with self._shutdown_lock:
- if self._shutdown_thread:
- raise RuntimeError('cannot schedule new futures after shutdown')
-
- f = _base.Future()
- w = _WorkItem(f, fn, args, kwargs)
-
- self._pending_work_items[self._queue_count] = w
- self._work_ids.put(self._queue_count)
- self._queue_count += 1
- # Wake up queue management thread
- self._result_queue.put(None)
-
- self._start_queue_management_thread()
- self._adjust_process_count()
- return f
- submit.__doc__ = _base.Executor.submit.__doc__
-
- def shutdown(self, wait=True):
- with self._shutdown_lock:
- self._shutdown_thread = True
- if self._queue_management_thread:
- # Wake up queue management thread
- self._result_queue.put(None)
- if wait:
- self._queue_management_thread.join(sys.maxint)
- # To reduce the risk of openning too many files, remove references to
- # objects that use file descriptors.
- self._queue_management_thread = None
- self._call_queue = None
- self._result_queue = None
- self._processes = None
- shutdown.__doc__ = _base.Executor.shutdown.__doc__
-
-atexit.register(_python_exit)
--- a/mercurial/thirdparty/concurrent/futures/thread.py Thu Jun 16 15:15:03 2022 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,162 +0,0 @@
-# Copyright 2009 Brian Quinlan. All Rights Reserved.
-# Licensed to PSF under a Contributor Agreement.
-
-"""Implements ThreadPoolExecutor."""
-
-from __future__ import absolute_import
-
-import atexit
-from . import _base
-import itertools
-import Queue as queue
-import threading
-import weakref
-import sys
-
-try:
- from multiprocessing import cpu_count
-except ImportError:
- # some platforms don't have multiprocessing
- def cpu_count():
- return None
-
-__author__ = 'Brian Quinlan (brian@sweetapp.com)'
-
-# Workers are created as daemon threads. This is done to allow the interpreter
-# to exit when there are still idle threads in a ThreadPoolExecutor's thread
-# pool (i.e. shutdown() was not called). However, allowing workers to die with
-# the interpreter has two undesirable properties:
-# - The workers would still be running during interpretor shutdown,
-# meaning that they would fail in unpredictable ways.
-# - The workers could be killed while evaluating a work item, which could
-# be bad if the callable being evaluated has external side-effects e.g.
-# writing to a file.
-#
-# To work around this problem, an exit handler is installed which tells the
-# workers to exit when their work queues are empty and then waits until the
-# threads finish.
-
-_threads_queues = weakref.WeakKeyDictionary()
-_shutdown = False
-
-def _python_exit():
- global _shutdown
- _shutdown = True
- items = list(_threads_queues.items()) if _threads_queues else ()
- for t, q in items:
- q.put(None)
- for t, q in items:
- t.join(sys.maxint)
-
-atexit.register(_python_exit)
-
-class _WorkItem(object):
- def __init__(self, future, fn, args, kwargs):
- self.future = future
- self.fn = fn
- self.args = args
- self.kwargs = kwargs
-
- def run(self):
- if not self.future.set_running_or_notify_cancel():
- return
-
- try:
- result = self.fn(*self.args, **self.kwargs)
- except:
- e, tb = sys.exc_info()[1:]
- self.future.set_exception_info(e, tb)
- else:
- self.future.set_result(result)
-
-def _worker(executor_reference, work_queue):
- try:
- while True:
- work_item = work_queue.get(block=True)
- if work_item is not None:
- work_item.run()
- # Delete references to object. See issue16284
- del work_item
- continue
- executor = executor_reference()
- # Exit if:
- # - The interpreter is shutting down OR
- # - The executor that owns the worker has been collected OR
- # - The executor that owns the worker has been shutdown.
- if _shutdown or executor is None or executor._shutdown:
- # Notice other workers
- work_queue.put(None)
- return
- del executor
- except:
- _base.LOGGER.critical('Exception in worker', exc_info=True)
-
-
-class ThreadPoolExecutor(_base.Executor):
-
- # Used to assign unique thread names when thread_name_prefix is not supplied.
- _counter = itertools.count().next
-
- def __init__(self, max_workers=None, thread_name_prefix=''):
- """Initializes a new ThreadPoolExecutor instance.
-
- Args:
- max_workers: The maximum number of threads that can be used to
- execute the given calls.
- thread_name_prefix: An optional name prefix to give our threads.
- """
- if max_workers is None:
- # Use this number because ThreadPoolExecutor is often
- # used to overlap I/O instead of CPU work.
- max_workers = (cpu_count() or 1) * 5
- if max_workers <= 0:
- raise ValueError("max_workers must be greater than 0")
-
- self._max_workers = max_workers
- self._work_queue = queue.Queue()
- self._threads = set()
- self._shutdown = False
- self._shutdown_lock = threading.Lock()
- self._thread_name_prefix = (thread_name_prefix or
- ("ThreadPoolExecutor-%d" % self._counter()))
-
- def submit(self, fn, *args, **kwargs):
- with self._shutdown_lock:
- if self._shutdown:
- raise RuntimeError('cannot schedule new futures after shutdown')
-
- f = _base.Future()
- w = _WorkItem(f, fn, args, kwargs)
-
- self._work_queue.put(w)
- self._adjust_thread_count()
- return f
- submit.__doc__ = _base.Executor.submit.__doc__
-
- def _adjust_thread_count(self):
- # When the executor gets lost, the weakref callback will wake up
- # the worker threads.
- def weakref_cb(_, q=self._work_queue):
- q.put(None)
- # TODO(bquinlan): Should avoid creating new threads if there are more
- # idle threads than items in the work queue.
- num_threads = len(self._threads)
- if num_threads < self._max_workers:
- thread_name = '%s_%d' % (self._thread_name_prefix or self,
- num_threads)
- t = threading.Thread(name=thread_name, target=_worker,
- args=(weakref.ref(self, weakref_cb),
- self._work_queue))
- t.daemon = True
- t.start()
- self._threads.add(t)
- _threads_queues[t] = self._work_queue
-
- def shutdown(self, wait=True):
- with self._shutdown_lock:
- self._shutdown = True
- self._work_queue.put(None)
- if wait:
- for t in self._threads:
- t.join(sys.maxint)
- shutdown.__doc__ = _base.Executor.shutdown.__doc__
--- a/mercurial/thirdparty/selectors2.py Thu Jun 16 15:15:03 2022 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,743 +0,0 @@
-""" Back-ported, durable, and portable selectors """
-
-# MIT License
-#
-# Copyright (c) 2017 Seth Michael Larson
-#
-# Permission is hereby granted, free of charge, to any person obtaining a copy
-# of this software and associated documentation files (the "Software"), to deal
-# in the Software without restriction, including without limitation the rights
-# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be included in all
-# copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-# SOFTWARE.
-
-from __future__ import absolute_import
-
-import collections
-import errno
-import math
-import select
-import socket
-import sys
-import time
-
-from .. import pycompat
-
-namedtuple = collections.namedtuple
-Mapping = collections.Mapping
-
-try:
- monotonic = time.monotonic
-except AttributeError:
- monotonic = time.time
-
-__author__ = 'Seth Michael Larson'
-__email__ = 'sethmichaellarson@protonmail.com'
-__version__ = '2.0.0'
-__license__ = 'MIT'
-__url__ = 'https://www.github.com/SethMichaelLarson/selectors2'
-
-__all__ = ['EVENT_READ',
- 'EVENT_WRITE',
- 'SelectorKey',
- 'DefaultSelector',
- 'BaseSelector']
-
-EVENT_READ = (1 << 0)
-EVENT_WRITE = (1 << 1)
-_DEFAULT_SELECTOR = None
-_SYSCALL_SENTINEL = object() # Sentinel in case a system call returns None.
-_ERROR_TYPES = (OSError, IOError, socket.error)
-
-
-SelectorKey = namedtuple('SelectorKey', ['fileobj', 'fd', 'events', 'data'])
-
-
-class _SelectorMapping(Mapping):
- """ Mapping of file objects to selector keys """
-
- def __init__(self, selector):
- self._selector = selector
-
- def __len__(self):
- return len(self._selector._fd_to_key)
-
- def __getitem__(self, fileobj):
- try:
- fd = self._selector._fileobj_lookup(fileobj)
- return self._selector._fd_to_key[fd]
- except KeyError:
- raise KeyError("{0!r} is not registered.".format(fileobj))
-
- def __iter__(self):
- return iter(self._selector._fd_to_key)
-
-
-def _fileobj_to_fd(fileobj):
- """ Return a file descriptor from a file object. If
- given an integer will simply return that integer back. """
- if isinstance(fileobj, int):
- fd = fileobj
- else:
- try:
- fd = int(fileobj.fileno())
- except (AttributeError, TypeError, ValueError):
- raise ValueError("Invalid file object: {0!r}".format(fileobj))
- if fd < 0:
- raise ValueError("Invalid file descriptor: {0}".format(fd))
- return fd
-
-
-class BaseSelector(object):
- """ Abstract Selector class
-
- A selector supports registering file objects to be monitored
- for specific I/O events.
-
- A file object is a file descriptor or any object with a
- `fileno()` method. An arbitrary object can be attached to the
- file object which can be used for example to store context info,
- a callback, etc.
-
- A selector can use various implementations (select(), poll(), epoll(),
- and kqueue()) depending on the platform. The 'DefaultSelector' class uses
- the most efficient implementation for the current platform.
- """
- def __init__(self):
- # Maps file descriptors to keys.
- self._fd_to_key = {}
-
- # Read-only mapping returned by get_map()
- self._map = _SelectorMapping(self)
-
- def _fileobj_lookup(self, fileobj):
- """ Return a file descriptor from a file object.
- This wraps _fileobj_to_fd() to do an exhaustive
- search in case the object is invalid but we still
- have it in our map. Used by unregister() so we can
- unregister an object that was previously registered
- even if it is closed. It is also used by _SelectorMapping
- """
- try:
- return _fileobj_to_fd(fileobj)
- except ValueError:
-
- # Search through all our mapped keys.
- for key in self._fd_to_key.values():
- if key.fileobj is fileobj:
- return key.fd
-
- # Raise ValueError after all.
- raise
-
- def register(self, fileobj, events, data=None):
- """ Register a file object for a set of events to monitor. """
- if (not events) or (events & ~(EVENT_READ | EVENT_WRITE)):
- raise ValueError("Invalid events: {0!r}".format(events))
-
- key = SelectorKey(fileobj, self._fileobj_lookup(fileobj), events, data)
-
- if key.fd in self._fd_to_key:
- raise KeyError("{0!r} (FD {1}) is already registered"
- .format(fileobj, key.fd))
-
- self._fd_to_key[key.fd] = key
- return key
-
- def unregister(self, fileobj):
- """ Unregister a file object from being monitored. """
- try:
- key = self._fd_to_key.pop(self._fileobj_lookup(fileobj))
- except KeyError:
- raise KeyError("{0!r} is not registered".format(fileobj))
-
- # Getting the fileno of a closed socket on Windows errors with EBADF.
- except socket.error as err:
- if err.errno != errno.EBADF:
- raise
- else:
- for key in self._fd_to_key.values():
- if key.fileobj is fileobj:
- self._fd_to_key.pop(key.fd)
- break
- else:
- raise KeyError("{0!r} is not registered".format(fileobj))
- return key
-
- def modify(self, fileobj, events, data=None):
- """ Change a registered file object monitored events and data. """
- # NOTE: Some subclasses optimize this operation even further.
- try:
- key = self._fd_to_key[self._fileobj_lookup(fileobj)]
- except KeyError:
- raise KeyError("{0!r} is not registered".format(fileobj))
-
- if events != key.events:
- self.unregister(fileobj)
- key = self.register(fileobj, events, data)
-
- elif data != key.data:
- # Use a shortcut to update the data.
- key = key._replace(data=data)
- self._fd_to_key[key.fd] = key
-
- return key
-
- def select(self, timeout=None):
- """ Perform the actual selection until some monitored file objects
- are ready or the timeout expires. """
- raise NotImplementedError()
-
- def close(self):
- """ Close the selector. This must be called to ensure that all
- underlying resources are freed. """
- self._fd_to_key.clear()
- self._map = None
-
- def get_key(self, fileobj):
- """ Return the key associated with a registered file object. """
- mapping = self.get_map()
- if mapping is None:
- raise RuntimeError("Selector is closed")
- try:
- return mapping[fileobj]
- except KeyError:
- raise KeyError("{0!r} is not registered".format(fileobj))
-
- def get_map(self):
- """ Return a mapping of file objects to selector keys """
- return self._map
-
- def _key_from_fd(self, fd):
- """ Return the key associated to a given file descriptor
- Return None if it is not found. """
- try:
- return self._fd_to_key[fd]
- except KeyError:
- return None
-
- def __enter__(self):
- return self
-
- def __exit__(self, *_):
- self.close()
-
-
-# Almost all platforms have select.select()
-if hasattr(select, "select"):
- class SelectSelector(BaseSelector):
- """ Select-based selector. """
- def __init__(self):
- super(SelectSelector, self).__init__()
- self._readers = set()
- self._writers = set()
-
- def register(self, fileobj, events, data=None):
- key = super(SelectSelector, self).register(fileobj, events, data)
- if events & EVENT_READ:
- self._readers.add(key.fd)
- if events & EVENT_WRITE:
- self._writers.add(key.fd)
- return key
-
- def unregister(self, fileobj):
- key = super(SelectSelector, self).unregister(fileobj)
- self._readers.discard(key.fd)
- self._writers.discard(key.fd)
- return key
-
- def select(self, timeout=None):
- # Selecting on empty lists on Windows errors out.
- if not len(self._readers) and not len(self._writers):
- return []
-
- timeout = None if timeout is None else max(timeout, 0.0)
- ready = []
- r, w, _ = _syscall_wrapper(self._wrap_select, True, self._readers,
- self._writers, timeout)
- r = set(r)
- w = set(w)
- for fd in r | w:
- events = 0
- if fd in r:
- events |= EVENT_READ
- if fd in w:
- events |= EVENT_WRITE
-
- key = self._key_from_fd(fd)
- if key:
- ready.append((key, events & key.events))
- return ready
-
- def _wrap_select(self, r, w, timeout=None):
- """ Wrapper for select.select because timeout is a positional arg """
- return select.select(r, w, [], timeout)
-
- __all__.append('SelectSelector')
-
- # Jython has a different implementation of .fileno() for socket objects.
- if pycompat.isjython:
- class _JythonSelectorMapping(object):
- """ This is an implementation of _SelectorMapping that is built
- for use specifically with Jython, which does not provide a hashable
- value from socket.socket.fileno(). """
-
- def __init__(self, selector):
- assert isinstance(selector, JythonSelectSelector)
- self._selector = selector
-
- def __len__(self):
- return len(self._selector._sockets)
-
- def __getitem__(self, fileobj):
- for sock, key in self._selector._sockets:
- if sock is fileobj:
- return key
- else:
- raise KeyError("{0!r} is not registered.".format(fileobj))
-
- class JythonSelectSelector(SelectSelector):
- """ This is an implementation of SelectSelector that is for Jython
- which works around that Jython's socket.socket.fileno() does not
- return an integer fd value. All SelectorKey.fd will be equal to -1
- and should not be used. This instead uses object id to compare fileobj
- and will only use select.select as it's the only selector that allows
- directly passing in socket objects rather than registering fds.
- See: http://bugs.jython.org/issue1678
- https://wiki.python.org/jython/NewSocketModule#socket.fileno.28.29_does_not_return_an_integer
- """
-
- def __init__(self):
- super(JythonSelectSelector, self).__init__()
-
- self._sockets = [] # Uses a list of tuples instead of dictionary.
- self._map = _JythonSelectorMapping(self)
- self._readers = []
- self._writers = []
-
- # Jython has a select.cpython_compatible_select function in older versions.
- self._select_func = getattr(select, 'cpython_compatible_select', select.select)
-
- def register(self, fileobj, events, data=None):
- for sock, _ in self._sockets:
- if sock is fileobj:
- raise KeyError("{0!r} is already registered"
- .format(fileobj, sock))
-
- key = SelectorKey(fileobj, -1, events, data)
- self._sockets.append((fileobj, key))
-
- if events & EVENT_READ:
- self._readers.append(fileobj)
- if events & EVENT_WRITE:
- self._writers.append(fileobj)
- return key
-
- def unregister(self, fileobj):
- for i, (sock, key) in enumerate(self._sockets):
- if sock is fileobj:
- break
- else:
- raise KeyError("{0!r} is not registered.".format(fileobj))
-
- if key.events & EVENT_READ:
- self._readers.remove(fileobj)
- if key.events & EVENT_WRITE:
- self._writers.remove(fileobj)
-
- del self._sockets[i]
- return key
-
- def _wrap_select(self, r, w, timeout=None):
- """ Wrapper for select.select because timeout is a positional arg """
- return self._select_func(r, w, [], timeout)
-
- __all__.append('JythonSelectSelector')
- SelectSelector = JythonSelectSelector # Override so the wrong selector isn't used.
-
-
-if hasattr(select, "poll"):
- class PollSelector(BaseSelector):
- """ Poll-based selector """
- def __init__(self):
- super(PollSelector, self).__init__()
- self._poll = select.poll()
-
- def register(self, fileobj, events, data=None):
- key = super(PollSelector, self).register(fileobj, events, data)
- event_mask = 0
- if events & EVENT_READ:
- event_mask |= select.POLLIN
- if events & EVENT_WRITE:
- event_mask |= select.POLLOUT
- self._poll.register(key.fd, event_mask)
- return key
-
- def unregister(self, fileobj):
- key = super(PollSelector, self).unregister(fileobj)
- self._poll.unregister(key.fd)
- return key
-
- def _wrap_poll(self, timeout=None):
- """ Wrapper function for select.poll.poll() so that
- _syscall_wrapper can work with only seconds. """
- if timeout is not None:
- if timeout <= 0:
- timeout = 0
- else:
- # select.poll.poll() has a resolution of 1 millisecond,
- # round away from zero to wait *at least* timeout seconds.
- timeout = math.ceil(timeout * 1000)
-
- result = self._poll.poll(timeout)
- return result
-
- def select(self, timeout=None):
- ready = []
- fd_events = _syscall_wrapper(self._wrap_poll, True, timeout=timeout)
- for fd, event_mask in fd_events:
- events = 0
- if event_mask & ~select.POLLIN:
- events |= EVENT_WRITE
- if event_mask & ~select.POLLOUT:
- events |= EVENT_READ
-
- key = self._key_from_fd(fd)
- if key:
- ready.append((key, events & key.events))
-
- return ready
-
- __all__.append('PollSelector')
-
-if hasattr(select, "epoll"):
- class EpollSelector(BaseSelector):
- """ Epoll-based selector """
- def __init__(self):
- super(EpollSelector, self).__init__()
- self._epoll = select.epoll()
-
- def fileno(self):
- return self._epoll.fileno()
-
- def register(self, fileobj, events, data=None):
- key = super(EpollSelector, self).register(fileobj, events, data)
- events_mask = 0
- if events & EVENT_READ:
- events_mask |= select.EPOLLIN
- if events & EVENT_WRITE:
- events_mask |= select.EPOLLOUT
- _syscall_wrapper(self._epoll.register, False, key.fd, events_mask)
- return key
-
- def unregister(self, fileobj):
- key = super(EpollSelector, self).unregister(fileobj)
- try:
- _syscall_wrapper(self._epoll.unregister, False, key.fd)
- except _ERROR_TYPES:
- # This can occur when the fd was closed since registry.
- pass
- return key
-
- def select(self, timeout=None):
- if timeout is not None:
- if timeout <= 0:
- timeout = 0.0
- else:
- # select.epoll.poll() has a resolution of 1 millisecond
- # but luckily takes seconds so we don't need a wrapper
- # like PollSelector. Just for better rounding.
- timeout = math.ceil(timeout * 1000) * 0.001
- timeout = float(timeout)
- else:
- timeout = -1.0 # epoll.poll() must have a float.
-
- # We always want at least 1 to ensure that select can be called
- # with no file descriptors registered. Otherwise will fail.
- max_events = max(len(self._fd_to_key), 1)
-
- ready = []
- fd_events = _syscall_wrapper(self._epoll.poll, True,
- timeout=timeout,
- maxevents=max_events)
- for fd, event_mask in fd_events:
- events = 0
- if event_mask & ~select.EPOLLIN:
- events |= EVENT_WRITE
- if event_mask & ~select.EPOLLOUT:
- events |= EVENT_READ
-
- key = self._key_from_fd(fd)
- if key:
- ready.append((key, events & key.events))
- return ready
-
- def close(self):
- self._epoll.close()
- super(EpollSelector, self).close()
-
- __all__.append('EpollSelector')
-
-
-if hasattr(select, "devpoll"):
- class DevpollSelector(BaseSelector):
- """Solaris /dev/poll selector."""
-
- def __init__(self):
- super(DevpollSelector, self).__init__()
- self._devpoll = select.devpoll()
-
- def fileno(self):
- return self._devpoll.fileno()
-
- def register(self, fileobj, events, data=None):
- key = super(DevpollSelector, self).register(fileobj, events, data)
- poll_events = 0
- if events & EVENT_READ:
- poll_events |= select.POLLIN
- if events & EVENT_WRITE:
- poll_events |= select.POLLOUT
- self._devpoll.register(key.fd, poll_events)
- return key
-
- def unregister(self, fileobj):
- key = super(DevpollSelector, self).unregister(fileobj)
- self._devpoll.unregister(key.fd)
- return key
-
- def _wrap_poll(self, timeout=None):
- """ Wrapper function for select.poll.poll() so that
- _syscall_wrapper can work with only seconds. """
- if timeout is not None:
- if timeout <= 0:
- timeout = 0
- else:
- # select.devpoll.poll() has a resolution of 1 millisecond,
- # round away from zero to wait *at least* timeout seconds.
- timeout = math.ceil(timeout * 1000)
-
- result = self._devpoll.poll(timeout)
- return result
-
- def select(self, timeout=None):
- ready = []
- fd_events = _syscall_wrapper(self._wrap_poll, True, timeout=timeout)
- for fd, event_mask in fd_events:
- events = 0
- if event_mask & ~select.POLLIN:
- events |= EVENT_WRITE
- if event_mask & ~select.POLLOUT:
- events |= EVENT_READ
-
- key = self._key_from_fd(fd)
- if key:
- ready.append((key, events & key.events))
-
- return ready
-
- def close(self):
- self._devpoll.close()
- super(DevpollSelector, self).close()
-
- __all__.append('DevpollSelector')
-
-
-if hasattr(select, "kqueue"):
- class KqueueSelector(BaseSelector):
- """ Kqueue / Kevent-based selector """
- def __init__(self):
- super(KqueueSelector, self).__init__()
- self._kqueue = select.kqueue()
-
- def fileno(self):
- return self._kqueue.fileno()
-
- def register(self, fileobj, events, data=None):
- key = super(KqueueSelector, self).register(fileobj, events, data)
- if events & EVENT_READ:
- kevent = select.kevent(key.fd,
- select.KQ_FILTER_READ,
- select.KQ_EV_ADD)
-
- _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)
-
- if events & EVENT_WRITE:
- kevent = select.kevent(key.fd,
- select.KQ_FILTER_WRITE,
- select.KQ_EV_ADD)
-
- _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)
-
- return key
-
- def unregister(self, fileobj):
- key = super(KqueueSelector, self).unregister(fileobj)
- if key.events & EVENT_READ:
- kevent = select.kevent(key.fd,
- select.KQ_FILTER_READ,
- select.KQ_EV_DELETE)
- try:
- _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)
- except _ERROR_TYPES:
- pass
- if key.events & EVENT_WRITE:
- kevent = select.kevent(key.fd,
- select.KQ_FILTER_WRITE,
- select.KQ_EV_DELETE)
- try:
- _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)
- except _ERROR_TYPES:
- pass
-
- return key
-
- def select(self, timeout=None):
- if timeout is not None:
- timeout = max(timeout, 0)
-
- max_events = len(self._fd_to_key) * 2
- ready_fds = {}
-
- kevent_list = _syscall_wrapper(self._kqueue.control, True,
- None, max_events, timeout)
-
- for kevent in kevent_list:
- fd = kevent.ident
- event_mask = kevent.filter
- events = 0
- if event_mask == select.KQ_FILTER_READ:
- events |= EVENT_READ
- if event_mask == select.KQ_FILTER_WRITE:
- events |= EVENT_WRITE
-
- key = self._key_from_fd(fd)
- if key:
- if key.fd not in ready_fds:
- ready_fds[key.fd] = (key, events & key.events)
- else:
- old_events = ready_fds[key.fd][1]
- ready_fds[key.fd] = (key, (events | old_events) & key.events)
-
- return list(ready_fds.values())
-
- def close(self):
- self._kqueue.close()
- super(KqueueSelector, self).close()
-
- __all__.append('KqueueSelector')
-
-
-def _can_allocate(struct):
- """ Checks that select structs can be allocated by the underlying
- operating system, not just advertised by the select module. We don't
- check select() because we'll be hopeful that most platforms that
- don't have it available will not advertise it. (ie: GAE) """
- try:
- # select.poll() objects won't fail until used.
- if struct == 'poll':
- p = select.poll()
- p.poll(0)
-
- # All others will fail on allocation.
- else:
- getattr(select, struct)().close()
- return True
- except (OSError, AttributeError):
- return False
-
-
-# Python 3.5 uses a more direct route to wrap system calls to increase speed.
-if sys.version_info >= (3, 5):
- def _syscall_wrapper(func, _, *args, **kwargs):
- """ This is the short-circuit version of the below logic
- because in Python 3.5+ all selectors restart system calls. """
- return func(*args, **kwargs)
-else:
- def _syscall_wrapper(func, recalc_timeout, *args, **kwargs):
- """ Wrapper function for syscalls that could fail due to EINTR.
- All functions should be retried if there is time left in the timeout
- in accordance with PEP 475. """
- timeout = kwargs.get("timeout", None)
- if timeout is None:
- expires = None
- recalc_timeout = False
- else:
- timeout = float(timeout)
- if timeout < 0.0: # Timeout less than 0 treated as no timeout.
- expires = None
- else:
- expires = monotonic() + timeout
-
- args = list(args)
- if recalc_timeout and "timeout" not in kwargs:
- raise ValueError(
- "Timeout must be in args or kwargs to be recalculated")
-
- result = _SYSCALL_SENTINEL
- while result is _SYSCALL_SENTINEL:
- try:
- result = func(*args, **kwargs)
- # OSError is thrown by select.select
- # IOError is thrown by select.epoll.poll
- # select.error is thrown by select.poll.poll
- # Aren't we thankful for Python 3.x rework for exceptions?
- except (OSError, IOError, select.error) as e:
- # select.error wasn't a subclass of OSError in the past.
- errcode = None
- if hasattr(e, "errno"):
- errcode = e.errno
- elif hasattr(e, "args"):
- errcode = e.args[0]
-
- # Also test for the Windows equivalent of EINTR.
- is_interrupt = (errcode == errno.EINTR or (hasattr(errno, "WSAEINTR") and
- errcode == errno.WSAEINTR))
-
- if is_interrupt:
- if expires is not None:
- current_time = monotonic()
- if current_time > expires:
- raise OSError(errno.ETIMEDOUT, 'Connection timed out')
- if recalc_timeout:
- if "timeout" in kwargs:
- kwargs["timeout"] = expires - current_time
- continue
- raise
- return result
-
-
-# Choose the best implementation, roughly:
-# kqueue == devpoll == epoll > poll > select
-# select() also can't accept a FD > FD_SETSIZE (usually around 1024)
-def DefaultSelector():
- """ This function serves as a first call for DefaultSelector to
- detect if the select module is being monkey-patched incorrectly
- by eventlet, greenlet, and preserve proper behavior. """
- global _DEFAULT_SELECTOR
- if _DEFAULT_SELECTOR is None:
- if pycompat.isjython:
- _DEFAULT_SELECTOR = JythonSelectSelector
- elif _can_allocate('kqueue'):
- _DEFAULT_SELECTOR = KqueueSelector
- elif _can_allocate('devpoll'):
- _DEFAULT_SELECTOR = DevpollSelector
- elif _can_allocate('epoll'):
- _DEFAULT_SELECTOR = EpollSelector
- elif _can_allocate('poll'):
- _DEFAULT_SELECTOR = PollSelector
- elif hasattr(select, 'select'):
- _DEFAULT_SELECTOR = SelectSelector
- else: # Platform-specific: AppEngine
- raise RuntimeError('Platform does not have a selector.')
- return _DEFAULT_SELECTOR()
--- a/mercurial/transaction.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/transaction.py Thu Jun 16 15:28:54 2022 +0200
@@ -11,9 +11,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
-
-import errno
from .i18n import _
from . import (
@@ -72,9 +69,8 @@
else:
try:
opener.unlink(f)
- except (IOError, OSError) as inst:
- if inst.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
+ pass
backupfiles = []
for l, f, b, c in backupentries:
@@ -96,9 +92,8 @@
target = f or b
try:
vfs.unlink(target)
- except (IOError, OSError) as inst:
- if inst.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
+ pass
except (IOError, OSError, error.Abort):
if not c:
raise
@@ -383,7 +378,7 @@
skip_pre = group == GEN_GROUP_POST_FINALIZE
skip_post = group == GEN_GROUP_PRE_FINALIZE
- for id, entry in sorted(pycompat.iteritems(self._filegenerators)):
+ for id, entry in sorted(self._filegenerators.items()):
any = True
order, filenames, genfunc, location, post_finalize = entry
--- a/mercurial/treediscovery.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/treediscovery.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import collections
@@ -13,7 +12,6 @@
from .node import short
from . import (
error,
- pycompat,
)
@@ -40,6 +38,7 @@
if audit is not None:
audit[b'total-roundtrips'] = 1
+ audit[b'total-queries'] = 0
if repo.changelog.tip() == repo.nullid:
base.add(repo.nullid)
@@ -70,6 +69,8 @@
# head, root, first parent, second parent
# (a branch always has two parents (or none) by definition)
with remote.commandexecutor() as e:
+ if audit is not None:
+ audit[b'total-queries'] += len(unknown)
branches = e.callcommand(b'branches', {b'nodes': unknown}).result()
unknown = collections.deque(branches)
@@ -114,12 +115,15 @@
repo.ui.debug(
b"request %d: %s\n" % (reqcnt, b" ".join(map(short, r)))
)
- for p in pycompat.xrange(0, len(r), 10):
+ for p in range(0, len(r), 10):
with remote.commandexecutor() as e:
+ subset = r[p : p + 10]
+ if audit is not None:
+ audit[b'total-queries'] += len(subset)
branches = e.callcommand(
b'branches',
{
- b'nodes': r[p : p + 10],
+ b'nodes': subset,
},
).result()
@@ -136,6 +140,8 @@
progress.increment()
with remote.commandexecutor() as e:
+ if audit is not None:
+ audit[b'total-queries'] += len(search)
between = e.callcommand(b'between', {b'pairs': search}).result()
for n, l in zip(search, between):
--- a/mercurial/txnutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/txnutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,9 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
-
-import errno
from . import encoding
@@ -30,7 +27,6 @@
if mayhavepending(root):
try:
return (vfs(b'%s.pending' % filename, **kwargs), True)
- except IOError as inst:
- if inst.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
+ pass
return (vfs(filename, **kwargs), False)
--- a/mercurial/ui.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/ui.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import collections
import contextlib
@@ -170,7 +169,7 @@
return pycompat.rapply(pycompat.bytesurl, maybestr)
-class httppasswordmgrdbproxy(object):
+class httppasswordmgrdbproxy:
"""Delays loading urllib2 until it's needed."""
def __init__(self):
@@ -208,7 +207,7 @@
_reqexithandlers = []
-class ui(object):
+class ui:
def __init__(self, src=None):
"""Create a fresh new ui object if no src given
@@ -1433,6 +1432,14 @@
# HGPLAINEXCEPT=pager, and the user didn't specify --debug.
return
+ # py2exe doesn't appear to be able to use legacy I/O, and nothing is
+ # output to the pager for paged commands. Piping to `more` in cmd.exe
+ # works, but is easy to forget. Just disable pager for py2exe, but
+ # leave it working for pyoxidizer and exewrapper builds.
+ if pycompat.iswindows and getattr(sys, "frozen", None) == "console_exe":
+ self.debug(b"pager is unavailable with py2exe packaging\n")
+ return
+
pagercmd = self.config(b'pager', b'pager', rcutil.fallbackpager)
if not pagercmd:
return
@@ -1510,8 +1517,8 @@
stderr=procutil.stderr,
env=procutil.tonativeenv(procutil.shellenviron(env)),
)
- except OSError as e:
- if e.errno == errno.ENOENT and not shell:
+ except FileNotFoundError:
+ if not shell:
self.warn(
_(b"missing pager command '%s', skipping pager\n") % command
)
@@ -1726,9 +1733,9 @@
if usereadline:
self.flush()
prompt = encoding.strfromlocal(prompt)
- line = encoding.strtolocal(pycompat.rawinput(prompt))
+ line = encoding.strtolocal(input(prompt))
# When stdin is in binary mode on Windows, it can cause
- # raw_input() to emit an extra trailing carriage return
+ # input() to emit an extra trailing carriage return
if pycompat.oslinesep == b'\r\n' and line.endswith(b'\r'):
line = line[:-1]
else:
@@ -2118,9 +2125,7 @@
"""
if not self._loggers:
return
- activeloggers = [
- l for l in pycompat.itervalues(self._loggers) if l.tracked(event)
- ]
+ activeloggers = [l for l in self._loggers.values() if l.tracked(event)]
if not activeloggers:
return
msg = msgfmt % msgargs
--- a/mercurial/unionrepo.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/unionrepo.py Thu Jun 16 15:28:54 2022 +0200
@@ -11,7 +11,6 @@
allowing operations like diff and log with revsets.
"""
-from __future__ import absolute_import
from .i18n import _
from .pycompat import getattr
@@ -210,7 +209,7 @@
return False
-class unionrepository(object):
+class unionrepository:
"""Represents the union of data in 2 repositories.
Instances are not usable if constructed directly. Use ``instance()``
--- a/mercurial/upgrade.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/upgrade.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from .i18n import _
from . import (
@@ -20,6 +19,7 @@
from .upgrade_utils import (
actions as upgrade_actions,
+ auto_upgrade,
engine as upgrade_engine,
)
@@ -27,6 +27,7 @@
stringutil,
)
+may_auto_upgrade = auto_upgrade.may_auto_upgrade
allformatvariant = upgrade_actions.allformatvariant
@@ -304,6 +305,7 @@
current_requirements,
mismatch_config,
mismatch_warn,
+ mismatch_verbose_upgrade,
):
"""Upgrades a share to use share-safe mechanism"""
wlock = None
@@ -336,7 +338,8 @@
diffrequires.add(requirementsmod.SHARESAFE_REQUIREMENT)
current_requirements.add(requirementsmod.SHARESAFE_REQUIREMENT)
scmutil.writerequires(hgvfs, diffrequires)
- ui.warn(_(b'repository upgraded to use share-safe mode\n'))
+ if mismatch_verbose_upgrade:
+ ui.warn(_(b'repository upgraded to use share-safe mode\n'))
except error.LockError as e:
hint = _(
b"see `hg help config.format.use-share-safe` for more information"
@@ -365,6 +368,7 @@
current_requirements,
mismatch_config,
mismatch_warn,
+ mismatch_verbose_upgrade,
):
"""Downgrades a share which use share-safe to not use it"""
wlock = None
@@ -393,7 +397,8 @@
current_requirements |= source_requirements
current_requirements -= set(requirementsmod.SHARESAFE_REQUIREMENT)
scmutil.writerequires(hgvfs, current_requirements)
- ui.warn(_(b'repository downgraded to not use share-safe mode\n'))
+ if mismatch_verbose_upgrade:
+ ui.warn(_(b'repository downgraded to not use share-safe mode\n'))
except error.LockError as e:
hint = _(
b"see `hg help config.format.use-share-safe` for more information"
--- a/mercurial/upgrade_utils/actions.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/upgrade_utils/actions.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from ..i18n import _
from .. import (
@@ -38,6 +37,7 @@
def preservedrequirements(repo):
preserved = {
requirements.SHARED_REQUIREMENT,
+ requirements.NARROW_REQUIREMENT,
}
return preserved & repo.requirements
@@ -46,7 +46,7 @@
OPTIMISATION = b'optimization'
-class improvement(object):
+class improvement:
"""Represents an improvement that can be made as part of an upgrade."""
### The following attributes should be defined for each subclass:
@@ -362,6 +362,9 @@
b'Allows to use more efficient algorithm to deal with ' b'copy tracing.'
)
+ touches_filelogs = False
+ touches_manifests = False
+
@registerformatvariant
class revlogv2(requirementformatvariant):
@@ -380,6 +383,9 @@
description = _(b'An iteration of the revlog focussed on changelog needs.')
upgrademessage = _(b'quite experimental')
+ touches_filelogs = False
+ touches_manifests = False
+
@registerformatvariant
class removecldeltachain(formatvariant):
@@ -685,7 +691,24 @@
return newactions
-class UpgradeOperation(object):
+class BaseOperation:
+ """base class that contains the minimum for an upgrade to work
+
+ (this might need to be extended as the usage for subclass alternative to
+ UpgradeOperation extends)
+ """
+
+ def __init__(
+ self,
+ new_requirements,
+ backup_store,
+ ):
+ self.new_requirements = new_requirements
+ # should this operation create a backup of the store
+ self.backup_store = backup_store
+
+
+class UpgradeOperation(BaseOperation):
"""represent the work to be done during an upgrade"""
def __init__(
@@ -698,8 +721,11 @@
revlogs_to_process,
backup_store,
):
+ super().__init__(
+ new_requirements,
+ backup_store,
+ )
self.ui = ui
- self.new_requirements = new_requirements
self.current_requirements = current_requirements
# list of upgrade actions the operation will perform
self.upgrade_actions = upgrade_actions
@@ -741,9 +767,6 @@
b're-delta-multibase' in upgrade_actions_names
)
- # should this operation create a backup of the store
- self.backup_store = backup_store
-
@property
def upgrade_actions_names(self):
return set([a.name for a in self.upgrade_actions])
@@ -1005,7 +1028,7 @@
def supporteddestrequirements(repo):
"""Obtain requirements that upgrade supports in the destination.
- If the result of the upgrade would create requirements not in this set,
+ If the result of the upgrade would have requirements not in this set,
the upgrade is disallowed.
Extensions should monkeypatch this to add their custom requirements.
@@ -1025,6 +1048,7 @@
requirements.SHARESAFE_REQUIREMENT,
requirements.SPARSEREVLOG_REQUIREMENT,
requirements.STORE_REQUIREMENT,
+ requirements.NARROW_REQUIREMENT,
}
for name in compression.compengines:
engine = compression.compengines[name]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/upgrade_utils/auto_upgrade.py Thu Jun 16 15:28:54 2022 +0200
@@ -0,0 +1,254 @@
+# upgrade.py - functions for automatic upgrade of Mercurial repository
+#
+# Copyright (c) 2022-present, Pierre-Yves David
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+from ..i18n import _
+
+from .. import (
+ error,
+ requirements as requirementsmod,
+ scmutil,
+)
+
+from . import (
+ actions,
+ engine,
+)
+
+
+class AutoUpgradeOperation(actions.BaseOperation):
+ """A limited Upgrade Operation used to run simple auto upgrade task
+
+ (Expand it as needed in the future)
+ """
+
+ def __init__(self, req):
+ super().__init__(
+ new_requirements=req,
+ backup_store=False,
+ )
+
+
+def get_share_safe_action(repo):
+ """return an automatic-upgrade action for `share-safe` if applicable
+
+ If no action is needed, return None, otherwise return a callback to upgrade
+ or downgrade the repository according the configuration and repository
+ format.
+ """
+ ui = repo.ui
+ requirements = repo.requirements
+ auto_upgrade_share_source = ui.configbool(
+ b'format',
+ b'use-share-safe.automatic-upgrade-of-mismatching-repositories',
+ )
+ auto_upgrade_quiet = ui.configbool(
+ b'format',
+ b'use-share-safe.automatic-upgrade-of-mismatching-repositories:quiet',
+ )
+
+ action = None
+
+ if (
+ auto_upgrade_share_source
+ and requirementsmod.SHARED_REQUIREMENT not in requirements
+ ):
+ sf_config = ui.configbool(b'format', b'use-share-safe')
+ sf_local = requirementsmod.SHARESAFE_REQUIREMENT in requirements
+ if sf_config and not sf_local:
+ msg = _(
+ b"automatically upgrading repository to the `share-safe`"
+ b" feature\n"
+ )
+ hint = b"(see `hg help config.format.use-share-safe` for details)\n"
+
+ def action():
+ if not (ui.quiet or auto_upgrade_quiet):
+ ui.write_err(msg)
+ ui.write_err(hint)
+ requirements.add(requirementsmod.SHARESAFE_REQUIREMENT)
+ scmutil.writereporequirements(repo, requirements)
+
+ elif sf_local and not sf_config:
+ msg = _(
+ b"automatically downgrading repository from the `share-safe`"
+ b" feature\n"
+ )
+ hint = b"(see `hg help config.format.use-share-safe` for details)\n"
+
+ def action():
+ if not (ui.quiet or auto_upgrade_quiet):
+ ui.write_err(msg)
+ ui.write_err(hint)
+ requirements.discard(requirementsmod.SHARESAFE_REQUIREMENT)
+ scmutil.writereporequirements(repo, requirements)
+
+ return action
+
+
+def get_tracked_hint_action(repo):
+ """return an automatic-upgrade action for `tracked-hint` if applicable
+
+ If no action is needed, return None, otherwise return a callback to upgrade
+ or downgrade the repository according the configuration and repository
+ format.
+ """
+ ui = repo.ui
+ requirements = set(repo.requirements)
+ auto_upgrade_tracked_hint = ui.configbool(
+ b'format',
+ b'use-dirstate-tracked-hint.automatic-upgrade-of-mismatching-repositories',
+ )
+ auto_upgrade_quiet = ui.configbool(
+ b'format',
+ b'use-dirstate-tracked-hint.automatic-upgrade-of-mismatching-repositories:quiet',
+ )
+
+ action = None
+
+ if auto_upgrade_tracked_hint:
+ th_config = ui.configbool(b'format', b'use-dirstate-tracked-hint')
+ th_local = requirementsmod.DIRSTATE_TRACKED_HINT_V1 in requirements
+ if th_config and not th_local:
+ msg = _(
+ b"automatically upgrading repository to the `tracked-hint`"
+ b" feature\n"
+ )
+ hint = b"(see `hg help config.format.use-dirstate-tracked-hint` for details)\n"
+
+ def action():
+ if not (ui.quiet or auto_upgrade_quiet):
+ ui.write_err(msg)
+ ui.write_err(hint)
+ requirements.add(requirementsmod.DIRSTATE_TRACKED_HINT_V1)
+ op = AutoUpgradeOperation(requirements)
+ engine.upgrade_tracked_hint(ui, repo, op, add=True)
+
+ elif th_local and not th_config:
+ msg = _(
+ b"automatically downgrading repository from the `tracked-hint`"
+ b" feature\n"
+ )
+ hint = b"(see `hg help config.format.use-dirstate-tracked-hint` for details)\n"
+
+ def action():
+ if not (ui.quiet or auto_upgrade_quiet):
+ ui.write_err(msg)
+ ui.write_err(hint)
+ requirements.discard(requirementsmod.DIRSTATE_TRACKED_HINT_V1)
+ op = AutoUpgradeOperation(requirements)
+ engine.upgrade_tracked_hint(ui, repo, op, add=False)
+
+ return action
+
+
+def get_dirstate_v2_action(repo):
+ """return an automatic-upgrade action for `dirstate-v2` if applicable
+
+ If no action is needed, return None, otherwise return a callback to upgrade
+ or downgrade the repository according the configuration and repository
+ format.
+ """
+ ui = repo.ui
+ requirements = set(repo.requirements)
+ auto_upgrade_dv2 = ui.configbool(
+ b'format',
+ b'use-dirstate-v2.automatic-upgrade-of-mismatching-repositories',
+ )
+ auto_upgrade_dv2_quiet = ui.configbool(
+ b'format',
+ b'use-dirstate-v2.automatic-upgrade-of-mismatching-repositories:quiet',
+ )
+
+ action = None
+
+ if auto_upgrade_dv2:
+ d2_config = ui.configbool(b'format', b'use-dirstate-v2')
+ d2_local = requirementsmod.DIRSTATE_V2_REQUIREMENT in requirements
+ if d2_config and not d2_local:
+ msg = _(
+ b"automatically upgrading repository to the `dirstate-v2`"
+ b" feature\n"
+ )
+ hint = (
+ b"(see `hg help config.format.use-dirstate-v2` for details)\n"
+ )
+
+ def action():
+ if not (ui.quiet or auto_upgrade_dv2_quiet):
+ ui.write_err(msg)
+ ui.write_err(hint)
+ requirements.add(requirementsmod.DIRSTATE_V2_REQUIREMENT)
+ fake_op = AutoUpgradeOperation(requirements)
+ engine.upgrade_dirstate(repo.ui, repo, fake_op, b'v1', b'v2')
+
+ elif d2_local and not d2_config:
+ msg = _(
+ b"automatically downgrading repository from the `dirstate-v2`"
+ b" feature\n"
+ )
+ hint = (
+ b"(see `hg help config.format.use-dirstate-v2` for details)\n"
+ )
+
+ def action():
+ if not (ui.quiet or auto_upgrade_dv2_quiet):
+ ui.write_err(msg)
+ ui.write_err(hint)
+ requirements.discard(requirementsmod.DIRSTATE_V2_REQUIREMENT)
+ fake_op = AutoUpgradeOperation(requirements)
+ engine.upgrade_dirstate(repo.ui, repo, fake_op, b'v2', b'v1')
+
+ return action
+
+
+AUTO_UPGRADE_ACTIONS = [
+ get_dirstate_v2_action,
+ get_share_safe_action,
+ get_tracked_hint_action,
+]
+
+
+def may_auto_upgrade(repo, maker_func):
+ """potentially perform auto-upgrade and return the final repository to use
+
+ Auto-upgrade are "quick" repository upgrade that might automatically be run
+ by "any" repository access. See `hg help config.format` for automatic
+ upgrade documentation.
+
+ note: each relevant upgrades are done one after the other for simplicity.
+ This avoid having repository is partially inconsistent state while
+ upgrading.
+
+ repo: the current repository instance
+ maker_func: a factory function that can recreate a repository after an upgrade
+ """
+ clear = False
+
+ loop = 0
+
+ try:
+ while not clear:
+ loop += 1
+ if loop > 100:
+ # XXX basic protection against infinite loop, make it better.
+ raise error.ProgrammingError("Too many auto upgrade loops")
+ clear = True
+ for get_action in AUTO_UPGRADE_ACTIONS:
+ action = get_action(repo)
+ if action is not None:
+ clear = False
+ with repo.wlock(wait=False), repo.lock(wait=False):
+ action = get_action(repo)
+ if action is not None:
+ action()
+ repo = maker_func()
+ except error.LockError:
+ # if we cannot get the lock, ignore the auto-upgrade attemps and
+ # proceed. We might want to make this behavior configurable in the
+ # future.
+ pass
+
+ return repo
--- a/mercurial/upgrade_utils/engine.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/upgrade_utils/engine.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,9 +5,7 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
-import errno
import stat
from ..i18n import _
@@ -647,11 +645,10 @@
util.copyfile(
srcrepo.vfs.join(b'dirstate'), backupvfs.join(b'dirstate')
)
- except (IOError, OSError) as e:
+ except FileNotFoundError:
# The dirstate does not exist on an empty repo or a repo with no
# revision checked out
- if e.errno != errno.ENOENT:
- raise
+ pass
assert srcrepo.dirstate._use_dirstate_v2 == (old == b'v2')
srcrepo.dirstate._map.preload()
@@ -660,11 +657,10 @@
srcrepo.dirstate._dirty = True
try:
srcrepo.vfs.unlink(b'dirstate')
- except (IOError, OSError) as e:
+ except FileNotFoundError:
# The dirstate does not exist on an empty repo or a repo with no
# revision checked out
- if e.errno != errno.ENOENT:
- raise
+ pass
srcrepo.dirstate.write(None)
--- a/mercurial/url.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/url.py Thu Jun 16 15:28:54 2022 +0200
@@ -7,11 +7,9 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import base64
import socket
-import sys
from .i18n import _
from . import (
@@ -51,7 +49,7 @@
return s
-class passwordmgr(object):
+class passwordmgr:
def __init__(self, ui, passwddb):
self.ui = ui
self.passwddb = passwddb
@@ -231,19 +229,15 @@
if x.lower().startswith('proxy-')
}
self.send(b'CONNECT %s HTTP/1.0\r\n' % self.realhostport)
- for header in pycompat.iteritems(proxyheaders):
+ for header in proxyheaders.items():
self.send(b'%s: %s\r\n' % header)
self.send(b'\r\n')
# majority of the following code is duplicated from
# httplib.HTTPConnection as there are no adequate places to
- # override functions to provide the needed functionality
- # strict was removed in Python 3.4.
- kwargs = {}
- if not pycompat.ispy3:
- kwargs[b'strict'] = self.strict
+ # override functions to provide the needed functionality.
- res = self.response_class(self.sock, method=self._method, **kwargs)
+ res = self.response_class(self.sock, method=self._method)
while True:
version, status, reason = res._read_status()
@@ -276,16 +270,6 @@
keepalive.HTTPConnection.__init__(self, *args, **kwargs)
self._create_connection = createconn
- if sys.version_info < (2, 7, 7):
- # copied from 2.7.14, since old implementations directly call
- # socket.create_connection()
- def connect(self):
- self.sock = self._create_connection(
- (self.host, self.port), self.timeout, self.source_address
- )
- if self._tunnel_host:
- self._tunnel()
-
class logginghttphandler(httphandler):
"""HTTP handler that logs socket I/O."""
--- a/mercurial/urllibcompat.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/urllibcompat.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,7 +4,12 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
+
+import http.server
+import urllib.error
+import urllib.parse
+import urllib.request
+import urllib.response
from .pycompat import getattr
from . import pycompat
@@ -12,7 +17,7 @@
_sysstr = pycompat.sysstr
-class _pycompatstub(object):
+class _pycompatstub:
def __init__(self):
self._aliases = {}
@@ -40,199 +45,109 @@
urlreq = _pycompatstub()
urlerr = _pycompatstub()
-if pycompat.ispy3:
- import urllib.parse
-
- urlreq._registeraliases(
- urllib.parse,
- (
- b"splitattr",
- b"splitpasswd",
- b"splitport",
- b"splituser",
- b"urlparse",
- b"urlunparse",
- ),
- )
- urlreq._registeralias(urllib.parse, b"parse_qs", b"parseqs")
- urlreq._registeralias(urllib.parse, b"parse_qsl", b"parseqsl")
- urlreq._registeralias(urllib.parse, b"unquote_to_bytes", b"unquote")
- import urllib.request
-
- urlreq._registeraliases(
- urllib.request,
- (
- b"AbstractHTTPHandler",
- b"BaseHandler",
- b"build_opener",
- b"FileHandler",
- b"FTPHandler",
- b"ftpwrapper",
- b"HTTPHandler",
- b"HTTPSHandler",
- b"install_opener",
- b"pathname2url",
- b"HTTPBasicAuthHandler",
- b"HTTPDigestAuthHandler",
- b"HTTPPasswordMgrWithDefaultRealm",
- b"ProxyHandler",
- b"Request",
- b"url2pathname",
- b"urlopen",
- ),
- )
- import urllib.response
-
- urlreq._registeraliases(
- urllib.response,
- (
- b"addclosehook",
- b"addinfourl",
- ),
- )
- import urllib.error
+urlreq._registeraliases(
+ urllib.parse,
+ (
+ b"splitattr",
+ b"splitpasswd",
+ b"splitport",
+ b"splituser",
+ b"urlparse",
+ b"urlunparse",
+ ),
+)
+urlreq._registeralias(urllib.parse, b"parse_qs", b"parseqs")
+urlreq._registeralias(urllib.parse, b"parse_qsl", b"parseqsl")
+urlreq._registeralias(urllib.parse, b"unquote_to_bytes", b"unquote")
- urlerr._registeraliases(
- urllib.error,
- (
- b"HTTPError",
- b"URLError",
- ),
- )
- import http.server
-
- httpserver._registeraliases(
- http.server,
- (
- b"HTTPServer",
- b"BaseHTTPRequestHandler",
- b"SimpleHTTPRequestHandler",
- b"CGIHTTPRequestHandler",
- ),
- )
-
- # urllib.parse.quote() accepts both str and bytes, decodes bytes
- # (if necessary), and returns str. This is wonky. We provide a custom
- # implementation that only accepts bytes and emits bytes.
- def quote(s, safe='/'):
- # bytestr has an __iter__ that emits characters. quote_from_bytes()
- # does an iteration and expects ints. We coerce to bytes to appease it.
- if isinstance(s, pycompat.bytestr):
- s = bytes(s)
- s = urllib.parse.quote_from_bytes(s, safe=safe)
- return s.encode('ascii', 'strict')
-
- # urllib.parse.urlencode() returns str. We use this function to make
- # sure we return bytes.
- def urlencode(query, doseq=False):
- s = urllib.parse.urlencode(query, doseq=doseq)
- return s.encode('ascii')
-
- urlreq.quote = quote
- urlreq.urlencode = urlencode
-
- def getfullurl(req):
- return req.full_url
-
- def gethost(req):
- return req.host
-
- def getselector(req):
- return req.selector
-
- def getdata(req):
- return req.data
-
- def hasdata(req):
- return req.data is not None
+urlreq._registeraliases(
+ urllib.request,
+ (
+ b"AbstractHTTPHandler",
+ b"BaseHandler",
+ b"build_opener",
+ b"FileHandler",
+ b"FTPHandler",
+ b"ftpwrapper",
+ b"HTTPHandler",
+ b"HTTPSHandler",
+ b"install_opener",
+ b"pathname2url",
+ b"HTTPBasicAuthHandler",
+ b"HTTPDigestAuthHandler",
+ b"HTTPPasswordMgrWithDefaultRealm",
+ b"ProxyHandler",
+ b"Request",
+ b"url2pathname",
+ b"urlopen",
+ ),
+)
-else:
- # pytype: disable=import-error
- import BaseHTTPServer
- import CGIHTTPServer
- import SimpleHTTPServer
- import urllib2
- import urllib
- import urlparse
+urlreq._registeraliases(
+ urllib.response,
+ (
+ b"addclosehook",
+ b"addinfourl",
+ ),
+)
- # pytype: enable=import-error
+urlerr._registeraliases(
+ urllib.error,
+ (
+ b"HTTPError",
+ b"URLError",
+ ),
+)
+
+httpserver._registeraliases(
+ http.server,
+ (
+ b"HTTPServer",
+ b"BaseHTTPRequestHandler",
+ b"SimpleHTTPRequestHandler",
+ b"CGIHTTPRequestHandler",
+ ),
+)
- urlreq._registeraliases(
- urllib,
- (
- b"addclosehook",
- b"addinfourl",
- b"ftpwrapper",
- b"pathname2url",
- b"quote",
- b"splitattr",
- b"splitpasswd",
- b"splitport",
- b"splituser",
- b"unquote",
- b"url2pathname",
- b"urlencode",
- ),
- )
- urlreq._registeraliases(
- urllib2,
- (
- b"AbstractHTTPHandler",
- b"BaseHandler",
- b"build_opener",
- b"FileHandler",
- b"FTPHandler",
- b"HTTPBasicAuthHandler",
- b"HTTPDigestAuthHandler",
- b"HTTPHandler",
- b"HTTPPasswordMgrWithDefaultRealm",
- b"HTTPSHandler",
- b"install_opener",
- b"ProxyHandler",
- b"Request",
- b"urlopen",
- ),
- )
- urlreq._registeraliases(
- urlparse,
- (
- b"urlparse",
- b"urlunparse",
- ),
- )
- urlreq._registeralias(urlparse, b"parse_qs", b"parseqs")
- urlreq._registeralias(urlparse, b"parse_qsl", b"parseqsl")
- urlerr._registeraliases(
- urllib2,
- (
- b"HTTPError",
- b"URLError",
- ),
- )
- httpserver._registeraliases(
- BaseHTTPServer,
- (
- b"HTTPServer",
- b"BaseHTTPRequestHandler",
- ),
- )
- httpserver._registeraliases(
- SimpleHTTPServer, (b"SimpleHTTPRequestHandler",)
- )
- httpserver._registeraliases(CGIHTTPServer, (b"CGIHTTPRequestHandler",))
+# urllib.parse.quote() accepts both str and bytes, decodes bytes
+# (if necessary), and returns str. This is wonky. We provide a custom
+# implementation that only accepts bytes and emits bytes.
+def quote(s, safe='/'):
+ # bytestr has an __iter__ that emits characters. quote_from_bytes()
+ # does an iteration and expects ints. We coerce to bytes to appease it.
+ if isinstance(s, pycompat.bytestr):
+ s = bytes(s)
+ s = urllib.parse.quote_from_bytes(s, safe=safe)
+ return s.encode('ascii', 'strict')
+
+
+# urllib.parse.urlencode() returns str. We use this function to make
+# sure we return bytes.
+def urlencode(query, doseq=False):
+ s = urllib.parse.urlencode(query, doseq=doseq)
+ return s.encode('ascii')
+
- def gethost(req):
- return req.get_host()
+urlreq.quote = quote
+urlreq.urlencode = urlencode
+
- def getselector(req):
- return req.get_selector()
+def getfullurl(req):
+ return req.full_url
+
+
+def gethost(req):
+ return req.host
- def getfullurl(req):
- return req.get_full_url()
+
+def getselector(req):
+ return req.selector
+
- def getdata(req):
- return req.get_data()
+def getdata(req):
+ return req.data
- def hasdata(req):
- return req.has_data()
+
+def hasdata(req):
+ return req.data is not None
--- a/mercurial/util.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/util.py Thu Jun 16 15:28:54 2022 +0200
@@ -13,7 +13,6 @@
hide platform-specific details from the core.
"""
-from __future__ import absolute_import, print_function
import abc
import collections
@@ -21,11 +20,12 @@
import errno
import gc
import hashlib
+import io
import itertools
import locale
import mmap
import os
-import platform as pyplatform
+import pickle # provides util.pickle symbol
import re as remod
import shutil
import stat
@@ -42,7 +42,6 @@
open,
setattr,
)
-from .node import hex
from hgdemandimport import tracing
from . import (
encoding,
@@ -76,10 +75,9 @@
cookielib = pycompat.cookielib
httplib = pycompat.httplib
-pickle = pycompat.pickle
safehasattr = pycompat.safehasattr
socketserver = pycompat.socketserver
-bytesio = pycompat.bytesio
+bytesio = io.BytesIO
# TODO deprecate stringio name, as it is a lie on Python 3.
stringio = bytesio
xmlrpclib = pycompat.xmlrpclib
@@ -158,11 +156,6 @@
SERVERROLE = compression.SERVERROLE
CLIENTROLE = compression.CLIENTROLE
-try:
- recvfds = osutil.recvfds
-except AttributeError:
- pass
-
# Python compatibility
_notset = object()
@@ -189,7 +182,7 @@
warnings.filterwarnings('default', '', DeprecationWarning, 'mercurial')
warnings.filterwarnings('default', '', DeprecationWarning, 'hgext')
warnings.filterwarnings('default', '', DeprecationWarning, 'hgext3rd')
-if _dowarn and pycompat.ispy3:
+if _dowarn:
# silence warning emitted by passing user string to re.sub()
warnings.filterwarnings(
'ignore', 'bad escape', DeprecationWarning, 'mercurial'
@@ -233,7 +226,7 @@
assert k in DIGESTS
-class digester(object):
+class digester:
"""helper to compute digests.
This helper can be used to compute one or more digests given their name.
@@ -281,7 +274,7 @@
return None
-class digestchecker(object):
+class digestchecker:
"""file handle wrapper that additionally checks content against a given
size and digests.
@@ -331,7 +324,7 @@
_chunksize = 4096
-class bufferedinputpipe(object):
+class bufferedinputpipe:
"""a manually buffered input pipe
Python will not let us use buffered IO and lazy reading with 'polling' at
@@ -459,7 +452,7 @@
raise
-class fileobjectproxy(object):
+class fileobjectproxy:
"""A proxy around file objects that tells a watcher when events occur.
This type is intended to only be used for testing purposes. Think hard
@@ -695,7 +688,7 @@
}
-class socketproxy(object):
+class socketproxy:
"""A proxy around a socket that tells a watcher when events occur.
This is like ``fileobjectproxy`` except for sockets.
@@ -818,7 +811,7 @@
)
-class baseproxyobserver(object):
+class baseproxyobserver:
def __init__(self, fh, name, logdata, logdataapis):
self.fh = fh
self.name = name
@@ -1258,7 +1251,7 @@
return f
-class cow(object):
+class cow:
"""helper class to make copy-on-write easier
Call preparewrite before doing any writes.
@@ -1302,7 +1295,7 @@
# __setitem__() isn't called as of PyPy 5.8.0
def update(self, src, **f):
if isinstance(src, dict):
- src = pycompat.iteritems(src)
+ src = src.items()
for k, v in src:
self[k] = v
for k in f:
@@ -1351,7 +1344,7 @@
"""
-class transactional(object): # pytype: disable=ignored-metaclass
+class transactional: # pytype: disable=ignored-metaclass
"""Base class for making a transactional type into a context manager."""
__metaclass__ = abc.ABCMeta
@@ -1402,7 +1395,7 @@
yield enter_result
-class _lrucachenode(object):
+class _lrucachenode:
"""A node in a doubly linked list.
Holds a reference to nodes on either side as well as a key-value
@@ -1426,7 +1419,7 @@
self.cost = 0
-class lrucachedict(object):
+class lrucachedict:
"""Dict that caches most recent accesses and sets.
The dict consists of an actual backing dict - indexed by original
@@ -1757,7 +1750,7 @@
return f
-class propertycache(object):
+class propertycache:
def __init__(self, func):
self.func = func
self.name = func.__name__
@@ -2216,7 +2209,7 @@
_re2 = False
-class _re(object):
+class _re:
def _checkre2(self):
global _re2
global _re2_input
@@ -2418,7 +2411,7 @@
return temp
-class filestat(object):
+class filestat:
"""help to exactly detect change of a file
'stat' attribute is result of 'os.stat()' if specified 'path'
@@ -2433,9 +2426,7 @@
def frompath(cls, path):
try:
stat = os.stat(path)
- except OSError as err:
- if err.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
stat = None
return cls(stat)
@@ -2512,19 +2503,17 @@
advanced = (old.stat[stat.ST_MTIME] + 1) & 0x7FFFFFFF
try:
os.utime(path, (advanced, advanced))
- except OSError as inst:
- if inst.errno == errno.EPERM:
- # utime() on the file created by another user causes EPERM,
- # if a process doesn't have appropriate privileges
- return False
- raise
+ except PermissionError:
+ # utime() on the file created by another user causes EPERM,
+ # if a process doesn't have appropriate privileges
+ return False
return True
def __ne__(self, other):
return not self == other
-class atomictempfile(object):
+class atomictempfile:
"""writable file object that atomically updates a file
All writes will go to a temporary copy of the original file. Call
@@ -2594,6 +2583,14 @@
self.close()
+def tryrmdir(f):
+ try:
+ removedirs(f)
+ except OSError as e:
+ if e.errno != errno.ENOENT and e.errno != errno.ENOTEMPTY:
+ raise
+
+
def unlinkpath(f, ignoremissing=False, rmdir=True):
# type: (bytes, bool, bool) -> None
"""unlink and remove the directory if it is empty"""
@@ -2611,12 +2608,11 @@
def tryunlink(f):
# type: (bytes) -> None
- """Attempt to remove a file, ignoring ENOENT errors."""
+ """Attempt to remove a file, ignoring FileNotFoundError."""
try:
unlink(f)
- except OSError as e:
- if e.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
+ pass
def makedirs(name, mode=None, notindexed=False):
@@ -2667,7 +2663,7 @@
fp.write(text)
-class chunkbuffer(object):
+class chunkbuffer:
"""Allow arbitrary sized chunks of data to be efficiently read from an
iterator over chunks of arbitrary size."""
@@ -2772,7 +2768,7 @@
yield s
-class cappedreader(object):
+class cappedreader:
"""A file object proxy that allows reading up to N bytes.
Given a source file object, instances of this type allow reading up to
@@ -2860,7 +2856,7 @@
)
-class transformingwriter(object):
+class transformingwriter:
"""Writable file wrapper to transform data by function"""
def __init__(self, fp, encode):
@@ -2906,50 +2902,10 @@
fromnativeeol = pycompat.identity
nativeeolwriter = pycompat.identity
-if pyplatform.python_implementation() == b'CPython' and sys.version_info < (
- 3,
- 0,
-):
- # There is an issue in CPython that some IO methods do not handle EINTR
- # correctly. The following table shows what CPython version (and functions)
- # are affected (buggy: has the EINTR bug, okay: otherwise):
- #
- # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
- # --------------------------------------------------
- # fp.__iter__ | buggy | buggy | okay
- # fp.read* | buggy | okay [1] | okay
- #
- # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
- #
- # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
- # like "read*" work fine, as we do not support Python < 2.7.4.
- #
- # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
- # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
- # CPython 2, because CPython 2 maintains an internal readahead buffer for
- # fp.__iter__ but not other fp.read* methods.
- #
- # On modern systems like Linux, the "read" syscall cannot be interrupted
- # when reading "fast" files like on-disk files. So the EINTR issue only
- # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
- # files approximately as "fast" files and use the fast (unsafe) code path,
- # to minimize the performance impact.
-
- def iterfile(fp):
- fastpath = True
- if type(fp) is file:
- fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
- if fastpath:
- return fp
- else:
- # fp.readline deals with EINTR correctly, use it as a workaround.
- return iter(fp.readline, b'')
-
-
-else:
- # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
- def iterfile(fp):
- return fp
+
+# TODO delete since workaround variant for Python 2 no longer needed.
+def iterfile(fp):
+ return fp
def iterlines(iterator):
@@ -3008,7 +2964,7 @@
@attr.s
-class timedcmstats(object):
+class timedcmstats:
"""Stats information produced by the timedcm context manager on entering."""
# the starting value of the timer as a float (meaning and resulution is
@@ -3109,7 +3065,7 @@
raise error.ParseError(_(b"couldn't parse size: %s") % s)
-class hooks(object):
+class hooks:
"""A collection of hook functions that can be used to extend a
function's behavior. Hooks are called in lexicographic order,
based on the names of their sources."""
--- a/mercurial/utils/cborutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/utils/cborutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,12 +5,9 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import struct
-import sys
-from .. import pycompat
# Very short very of RFC 7049...
#
@@ -175,9 +172,7 @@
"""
yield encodelength(MAJOR_TYPE_MAP, len(d))
- for key, value in sorted(
- pycompat.iteritems(d), key=lambda x: _mixedtypesortkey(x[0])
- ):
+ for key, value in sorted(d.items(), key=lambda x: _mixedtypesortkey(x[0])):
for chunk in streamencode(key):
yield chunk
for chunk in streamencode(value):
@@ -210,7 +205,7 @@
STREAM_ENCODERS = {
bytes: streamencodebytestring,
int: streamencodeint,
- pycompat.long: streamencodeint,
+ int: streamencodeint,
list: streamencodearray,
tuple: streamencodearray,
dict: streamencodemap,
@@ -250,16 +245,8 @@
"""Represents an error decoding CBOR."""
-if sys.version_info.major >= 3:
-
- def _elementtointeger(b, i):
- return b[i]
-
-
-else:
-
- def _elementtointeger(b, i):
- return ord(b[i])
+def _elementtointeger(b, i):
+ return b[i]
STRUCT_BIG_UBYTE = struct.Struct('>B')
@@ -496,7 +483,7 @@
return self
-class sansiodecoder(object):
+class sansiodecoder:
"""A CBOR decoder that doesn't perform its own I/O.
To use, construct an instance and feed it segments containing
@@ -989,7 +976,7 @@
return l
-class bufferingdecoder(object):
+class bufferingdecoder:
"""A CBOR decoder that buffers undecoded input.
This is a glorified wrapper around ``sansiodecoder`` that adds a buffering
--- a/mercurial/utils/compression.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/utils/compression.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,8 +4,6 @@
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import, print_function
-
import bz2
import collections
import zlib
@@ -34,7 +32,7 @@
)
-class propertycache(object):
+class propertycache:
def __init__(self, func):
self.func = func
self.name = func.__name__
@@ -49,7 +47,7 @@
obj.__dict__[self.name] = value
-class compressormanager(object):
+class compressormanager:
"""Holds registrations of various compression engines.
This class essentially abstracts the differences between compression
@@ -221,7 +219,7 @@
compengines = compressormanager()
-class compressionengine(object):
+class compressionengine:
"""Base class for compression engines.
Compression engines must implement the interface defined by this class.
@@ -340,7 +338,7 @@
raise NotImplementedError()
-class _CompressedStreamReader(object):
+class _CompressedStreamReader:
def __init__(self, fh):
if safehasattr(fh, 'unbufferedread'):
self._reader = fh.unbufferedread
@@ -484,7 +482,7 @@
def decompressorreader(self, fh):
return _GzipCompressedStreamReader(fh)
- class zlibrevlogcompressor(object):
+ class zlibrevlogcompressor:
def __init__(self, level=None):
self._level = level
@@ -628,7 +626,7 @@
def decompressorreader(self, fh):
return fh
- class nooprevlogcompressor(object):
+ class nooprevlogcompressor:
def compress(self, data):
return None
@@ -700,7 +698,7 @@
def decompressorreader(self, fh):
return _ZstdCompressedStreamReader(fh, self._module)
- class zstdrevlogcompressor(object):
+ class zstdrevlogcompressor:
def __init__(self, zstd, level=3):
# TODO consider omitting frame magic to save 4 bytes.
# This writes content sizes into the frame header. That is
@@ -784,7 +782,7 @@
# We need to format the docstring. So use a dummy object/type to hold it
# rather than mutating the original.
- class docobject(object):
+ class docobject:
pass
for name in compengines:
--- a/mercurial/utils/dateutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/utils/dateutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import, print_function
import calendar
import datetime
--- a/mercurial/utils/hashutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/utils/hashutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import hashlib
try:
--- a/mercurial/utils/procutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/utils/procutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -7,7 +7,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import contextlib
import errno
@@ -60,7 +59,7 @@
raise IOError(errno.EBADF, 'Bad file descriptor')
-class LineBufferedWrapper(object):
+class LineBufferedWrapper:
def __init__(self, orig):
self.orig = orig
@@ -81,7 +80,7 @@
def make_line_buffered(stream):
- if pycompat.ispy3 and not isinstance(stream, io.BufferedIOBase):
+ if not isinstance(stream, io.BufferedIOBase):
# On Python 3, buffered streams can be expected to subclass
# BufferedIOBase. This is definitively the case for the streams
# initialized by the interpreter. For unbuffered streams, we don't need
@@ -99,7 +98,7 @@
return stream
-class WriteAllWrapper(object):
+class WriteAllWrapper:
def __init__(self, orig):
self.orig = orig
@@ -124,7 +123,6 @@
def _make_write_all(stream):
- assert pycompat.ispy3
if isinstance(stream, WriteAllWrapper):
return stream
if isinstance(stream, io.BufferedIOBase):
@@ -136,52 +134,32 @@
return WriteAllWrapper(stream)
-if pycompat.ispy3:
- # Python 3 implements its own I/O streams. Unlike stdio of C library,
- # sys.stdin/stdout/stderr may be None if underlying fd is closed.
-
- # TODO: .buffer might not exist if std streams were replaced; we'll need
- # a silly wrapper to make a bytes stream backed by a unicode one.
+# Python 3 implements its own I/O streams. Unlike stdio of C library,
+# sys.stdin/stdout/stderr may be None if underlying fd is closed.
- if sys.stdin is None:
- stdin = BadFile()
- else:
- stdin = sys.stdin.buffer
- if sys.stdout is None:
- stdout = BadFile()
- else:
- stdout = _make_write_all(sys.stdout.buffer)
- if sys.stderr is None:
- stderr = BadFile()
- else:
- stderr = _make_write_all(sys.stderr.buffer)
+# TODO: .buffer might not exist if std streams were replaced; we'll need
+# a silly wrapper to make a bytes stream backed by a unicode one.
- if pycompat.iswindows:
- # Work around Windows bugs.
- stdout = platform.winstdout(stdout) # pytype: disable=module-attr
- stderr = platform.winstdout(stderr) # pytype: disable=module-attr
- if isatty(stdout):
- # The standard library doesn't offer line-buffered binary streams.
- stdout = make_line_buffered(stdout)
+if sys.stdin is None:
+ stdin = BadFile()
+else:
+ stdin = sys.stdin.buffer
+if sys.stdout is None:
+ stdout = BadFile()
else:
- # Python 2 uses the I/O streams provided by the C library.
- stdin = sys.stdin
- stdout = sys.stdout
- stderr = sys.stderr
- if pycompat.iswindows:
- # Work around Windows bugs.
- stdout = platform.winstdout(stdout) # pytype: disable=module-attr
- stderr = platform.winstdout(stderr) # pytype: disable=module-attr
- if isatty(stdout):
- if pycompat.iswindows:
- # The Windows C runtime library doesn't support line buffering.
- stdout = make_line_buffered(stdout)
- else:
- # glibc determines buffering on first write to stdout - if we
- # replace a TTY destined stdout with a pipe destined stdout (e.g.
- # pager), we want line buffering.
- stdout = os.fdopen(stdout.fileno(), 'wb', 1)
+ stdout = _make_write_all(sys.stdout.buffer)
+if sys.stderr is None:
+ stderr = BadFile()
+else:
+ stderr = _make_write_all(sys.stderr.buffer)
+if pycompat.iswindows:
+ # Work around Windows bugs.
+ stdout = platform.winstdout(stdout) # pytype: disable=module-attr
+ stderr = platform.winstdout(stderr) # pytype: disable=module-attr
+if isatty(stdout):
+ # The standard library doesn't offer line-buffered binary streams.
+ stdout = make_line_buffered(stdout)
findexe = platform.findexe
_gethgcmd = platform.gethgcmd
@@ -217,7 +195,7 @@
return _(b"killed by signal %d") % -code
-class _pfile(object):
+class _pfile:
"""File-like wrapper for a stream opened by subprocess.Popen()"""
def __init__(self, proc, fp):
@@ -366,7 +344,7 @@
def filter(s, cmd):
"""filter a string through a command that transforms its input to its
output"""
- for name, fn in pycompat.iteritems(_filtertable):
+ for name, fn in _filtertable.items():
if cmd.startswith(name):
return fn(s, cmd[len(name) :].lstrip())
return pipefilter(s, cmd)
@@ -472,7 +450,7 @@
env = dict(encoding.environ)
if environ:
- env.update((k, py2shell(v)) for k, v in pycompat.iteritems(environ))
+ env.update((k, py2shell(v)) for k, v in environ.items())
env[b'HG'] = hgexecutable()
return env
@@ -707,7 +685,7 @@
else:
- def runbgcommandpy3(
+ def runbgcommand(
cmd,
env,
shell=False,
@@ -790,128 +768,3 @@
returncode = p.wait
if record_wait is not None:
record_wait(returncode)
-
- def runbgcommandpy2(
- cmd,
- env,
- shell=False,
- stdout=None,
- stderr=None,
- ensurestart=True,
- record_wait=None,
- stdin_bytes=None,
- ):
- """Spawn a command without waiting for it to finish.
-
-
- When `record_wait` is not None, the spawned process will not be fully
- detached and the `record_wait` argument will be called with a the
- `Subprocess.wait` function for the spawned process. This is mostly
- useful for developers that need to make sure the spawned process
- finished before a certain point. (eg: writing test)"""
- if pycompat.isdarwin:
- # avoid crash in CoreFoundation in case another thread
- # calls gui() while we're calling fork().
- gui()
-
- # double-fork to completely detach from the parent process
- # based on http://code.activestate.com/recipes/278731
- if record_wait is None:
- pid = os.fork()
- if pid:
- if not ensurestart:
- # Even though we're not waiting on the child process,
- # we still must call waitpid() on it at some point so
- # it's not a zombie/defunct. This is especially relevant for
- # chg since the parent process won't die anytime soon.
- # We use a thread to make the overhead tiny.
- def _do_wait():
- os.waitpid(pid, 0)
-
- t = threading.Thread(target=_do_wait)
- t.daemon = True
- t.start()
- return
- # Parent process
- (_pid, status) = os.waitpid(pid, 0)
- if os.WIFEXITED(status):
- returncode = os.WEXITSTATUS(status)
- else:
- returncode = -(os.WTERMSIG(status))
- if returncode != 0:
- # The child process's return code is 0 on success, an errno
- # value on failure, or 255 if we don't have a valid errno
- # value.
- #
- # (It would be slightly nicer to return the full exception info
- # over a pipe as the subprocess module does. For now it
- # doesn't seem worth adding that complexity here, though.)
- if returncode == 255:
- returncode = errno.EINVAL
- raise OSError(
- returncode,
- b'error running %r: %s'
- % (cmd, os.strerror(returncode)),
- )
- return
-
- returncode = 255
- stdin = None
-
- try:
- if record_wait is None:
- # Start a new session
- os.setsid()
- # connect stdin to devnull to make sure the subprocess can't
- # muck up that stream for mercurial.
- if stdin_bytes is None:
- stdin = open(os.devnull, b'r')
- else:
- stdin = pycompat.unnamedtempfile()
- stdin.write(stdin_bytes)
- stdin.flush()
- stdin.seek(0)
-
- if stdout is None:
- stdout = open(os.devnull, b'w')
- if stderr is None:
- stderr = open(os.devnull, b'w')
-
- p = subprocess.Popen(
- cmd,
- shell=shell,
- env=env,
- close_fds=True,
- stdin=stdin,
- stdout=stdout,
- stderr=stderr,
- )
- if record_wait is not None:
- record_wait(p.wait)
- returncode = 0
- except EnvironmentError as ex:
- returncode = ex.errno & 0xFF
- if returncode == 0:
- # This shouldn't happen, but just in case make sure the
- # return code is never 0 here.
- returncode = 255
- except Exception:
- returncode = 255
- finally:
- # mission accomplished, this child needs to exit and not
- # continue the hg process here.
- if stdin is not None:
- stdin.close()
- if record_wait is None:
- os._exit(returncode)
-
- if pycompat.ispy3:
- # This branch is more robust, because it avoids running python
- # code (hence gc finalizers, like sshpeer.__del__, which
- # blocks). But we can't easily do the equivalent in py2,
- # because of the lack of start_new_session=True flag. Given
- # that the py2 branch should die soon, the short-lived
- # duplication seems acceptable.
- runbgcommand = runbgcommandpy3
- else:
- runbgcommand = runbgcommandpy2
--- a/mercurial/utils/repoviewutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/utils/repoviewutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -6,7 +6,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
### Nearest subset relation
# Nearest subset of filter X is a filter Y so that:
--- a/mercurial/utils/resourceutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/utils/resourceutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -7,7 +7,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import imp
import os
@@ -62,6 +61,10 @@
# Force loading of the resources module
resources.open_binary # pytype: disable=module-attr
+ # py2exe raises an AssertionError if uses importlib.resources
+ if getattr(sys, "frozen", None) in ("console_exe", "windows_exe"):
+ raise ImportError
+
except (ImportError, AttributeError):
# importlib.resources was not found (almost definitely because we're on a
# Python version before 3.7)
--- a/mercurial/utils/storageutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/utils/storageutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import re
import struct
@@ -20,7 +19,6 @@
dagop,
error,
mdiff,
- pycompat,
)
from ..interfaces import repository
from ..revlogutils import sidedata as sidedatamod
@@ -182,7 +180,7 @@
else:
stop = storelen
- return pycompat.xrange(start, stop, step)
+ return range(start, stop, step)
def fileidlookup(store, fileid, identifier):
--- a/mercurial/utils/stringutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/utils/stringutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -7,7 +7,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import ast
import codecs
@@ -497,7 +496,7 @@
@attr.s(hash=True)
-class mailmapping(object):
+class mailmapping:
"""Represents a username/email key or value in
a mailmap file"""
@@ -686,6 +685,18 @@
return _correctauthorformat.match(author) is not None
+def firstline(text):
+ """Return the first line of the input"""
+ # Try to avoid running splitlines() on the whole string
+ i = text.find(b'\n')
+ if i != -1:
+ text = text[:i]
+ try:
+ return text.splitlines()[0]
+ except IndexError:
+ return b''
+
+
def ellipsis(text, maxlength=400):
"""Trim string to at most maxlength (default: 400) columns in display."""
return encoding.trim(text, maxlength, ellipsis=b'...')
@@ -739,7 +750,7 @@
def _cutdown(self, ucstr, space_left):
l = 0
colwidth = encoding.ucolwidth
- for i in pycompat.xrange(len(ucstr)):
+ for i in range(len(ucstr)):
l += colwidth(ucstr[i])
if space_left < l:
return (ucstr[:i], ucstr[i:])
@@ -965,6 +976,4 @@
def evalpythonliteral(s):
"""Evaluate a string containing a Python literal expression"""
# We could backport our tokenizer hack to rewrite '' to u'' if we want
- if pycompat.ispy3:
- return ast.literal_eval(s.decode('latin1'))
- return ast.literal_eval(s)
+ return ast.literal_eval(s.decode('latin1'))
--- a/mercurial/utils/urlutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/utils/urlutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -54,7 +54,7 @@
)
-class url(object):
+class url:
r"""Reliable URL parser.
This parses URLs and provides attributes for the following
@@ -453,7 +453,7 @@
"""list all the (name, paths) in the passed ui"""
result = []
if target_path is None:
- for name, paths in sorted(pycompat.iteritems(ui.paths)):
+ for name, paths in sorted(ui.paths.items()):
for p in paths:
result.append((name, p))
@@ -832,7 +832,7 @@
return new_paths
-class path(object):
+class path:
"""Represents an individual path and its configuration."""
def __init__(
@@ -919,7 +919,7 @@
# Now process the sub-options. If a sub-option is registered, its
# attribute will always be present. The value will be None if there
# was no valid sub-option.
- for suboption, (attr, func) in pycompat.iteritems(_pathsuboptions):
+ for suboption, (attr, func) in _pathsuboptions.items():
if suboption not in sub_options:
setattr(self, attr, None)
continue
@@ -945,7 +945,7 @@
This is intended to be used for presentation purposes.
"""
d = {}
- for subopt, (attr, _func) in pycompat.iteritems(_pathsuboptions):
+ for subopt, (attr, _func) in _pathsuboptions.items():
value = getattr(self, attr)
if value is not None:
d[subopt] = value
--- a/mercurial/verify.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/verify.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import os
@@ -55,7 +54,7 @@
)
-class verifier(object):
+class verifier:
def __init__(self, repo, level=None):
self.repo = repo.unfiltered()
self.ui = repo.ui
@@ -406,11 +405,11 @@
_(b'checking'), unit=_(b'manifests'), total=len(subdirs)
)
- for subdir, linkrevs in pycompat.iteritems(subdirnodes):
+ for subdir, linkrevs in subdirnodes.items():
subdirfilenodes = self._verifymanifest(
linkrevs, subdir, storefiles, subdirprogress
)
- for f, onefilenodes in pycompat.iteritems(subdirfilenodes):
+ for f, onefilenodes in subdirfilenodes.items():
filenodes.setdefault(f, {}).update(onefilenodes)
if not dir and subdirnodes:
@@ -575,7 +574,7 @@
# cross-check
if f in filenodes:
- fns = [(v, k) for k, v in pycompat.iteritems(filenodes[f])]
+ fns = [(v, k) for k, v in filenodes[f].items()]
for lr, node in sorted(fns):
msg = _(b"manifest refers to unknown revision %s")
self._err(lr, msg % short(node), f)
--- a/mercurial/vfs.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/vfs.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,10 +4,8 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import contextlib
-import errno
import os
import shutil
import stat
@@ -47,7 +45,7 @@
checkandavoid()
-class abstractvfs(object):
+class abstractvfs:
"""Abstract base class; cannot be instantiated"""
# default directory separator for vfs
@@ -75,18 +73,16 @@
'''gracefully return an empty string for missing files'''
try:
return self.read(path)
- except IOError as inst:
- if inst.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
+ pass
return b""
def tryreadlines(self, path, mode=b'rb'):
'''gracefully return an empty array for missing files'''
try:
return self.readlines(path, mode=mode)
- except IOError as inst:
- if inst.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
+ pass
return []
@util.propertycache
@@ -477,9 +473,7 @@
nlink = util.nlinks(f)
if nlink < 1:
nlink = 2 # force mktempcopy (issue1922)
- except (OSError, IOError) as e:
- if e.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
nlink = 0
if makeparentdirs:
util.makedirs(dirname, self.createmode, notindexed)
@@ -607,7 +601,7 @@
return self.vfs.join(path, *insidef)
-class closewrapbase(object):
+class closewrapbase:
"""Base class of wrapper, which hooks closing
Do not instantiate outside of the vfs layer.
@@ -653,7 +647,7 @@
self._closer.close(self._origfh)
-class backgroundfilecloser(object):
+class backgroundfilecloser:
"""Coordinates background closing of file handles on multiple threads."""
def __init__(self, ui, expectedcount=-1):
--- a/mercurial/win32.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/win32.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import ctypes
import ctypes.wintypes as wintypes
@@ -733,14 +732,13 @@
# callers to recreate f immediately while having other readers do their
# implicit zombie filename blocking on a temporary name.
- for tries in pycompat.xrange(10):
+ for tries in range(10):
temp = b'%s-%08x' % (f, random.randint(0, 0xFFFFFFFF))
try:
- os.rename(f, temp) # raises OSError EEXIST if temp exists
+ os.rename(f, temp)
break
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
+ except FileExistsError:
+ pass
else:
raise IOError(errno.EEXIST, "No usable temporary filename found")
--- a/mercurial/windows.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/windows.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,16 +5,16 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import errno
import getpass
-import msvcrt
+import msvcrt # pytype: disable=import-error
import os
import re
import stat
import string
import sys
+import winreg # pytype: disable=import-error
from .i18n import _
from .pycompat import getattr
@@ -26,13 +26,6 @@
win32,
)
-try:
- import _winreg as winreg # pytype: disable=import-error
-
- winreg.CloseKey
-except ImportError:
- # py2 only
- import winreg # pytype: disable=import-error
osutil = policy.importmod('osutil')
@@ -54,7 +47,7 @@
umask = 0o022
-class mixedfilemodewrapper(object):
+class mixedfilemodewrapper:
"""Wraps a file handle when it is opened in read/write mode.
fopen() and fdopen() on Windows have a specific-to-Windows requirement
@@ -131,7 +124,7 @@
return self._fp.readlines(*args, **kwargs)
-class fdproxy(object):
+class fdproxy:
"""Wraps osutil.posixfile() to override the name attribute to reflect the
underlying file name.
"""
@@ -163,8 +156,7 @@
# PyFile_FromFd() ignores the name, and seems to report fp.name as the
# underlying file descriptor.
- if pycompat.ispy3:
- fp = fdproxy(name, fp)
+ fp = fdproxy(name, fp)
# The position when opening in append mode is implementation defined, so
# make it consistent with other platforms, which position at EOF.
@@ -216,7 +208,7 @@
return encoding.unitolocal(pw)
-class winstdout(object):
+class winstdout:
"""Some files on Windows misbehave.
When writing to a broken pipe, EINVAL instead of EPIPE may be raised.
@@ -227,7 +219,6 @@
def __init__(self, fp):
self.fp = fp
- self.throttle = not pycompat.ispy3 and _isatty(fp)
def __getattr__(self, key):
return getattr(self.fp, key)
@@ -240,17 +231,7 @@
def write(self, s):
try:
- if not self.throttle:
- return self.fp.write(s)
- # This is workaround for "Not enough space" error on
- # writing large size of data to console.
- limit = 16000
- l = len(s)
- start = 0
- while start < l:
- end = start + limit
- self.fp.write(s[start:end])
- start = end
+ return self.fp.write(s)
except IOError as inst:
if inst.errno != 0 and not win32.lasterrorwaspipeerror(inst):
raise
@@ -589,11 +570,7 @@
for n, k, s in listdir(dir, True)
if getkind(s.st_mode) in _wantedkinds
}
- except OSError as err:
- # Python >= 2.5 returns ENOENT and adds winerror field
- # EINVAL is raised if dir is not a directory.
- if err.errno not in (errno.ENOENT, errno.EINVAL, errno.ENOTDIR):
- raise
+ except (FileNotFoundError, NotADirectoryError):
dmap = {}
cache = dircache.setdefault(dir, dmap)
yield cache.get(base, None)
@@ -651,9 +628,7 @@
'''atomically rename file src to dst, replacing dst if it exists'''
try:
os.rename(src, dst)
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
+ except FileExistsError:
unlink(dst)
os.rename(src, dst)
@@ -671,7 +646,7 @@
return False
-class cachestat(object):
+class cachestat:
def __init__(self, path):
pass
@@ -689,14 +664,23 @@
LOCAL_MACHINE).
"""
if scope is None:
+ # pytype: disable=module-attr
scope = (winreg.HKEY_CURRENT_USER, winreg.HKEY_LOCAL_MACHINE)
+ # pytype: enable=module-attr
elif not isinstance(scope, (list, tuple)):
scope = (scope,)
for s in scope:
try:
+ # pytype: disable=module-attr
with winreg.OpenKey(s, encoding.strfromlocal(key)) as hkey:
- name = valname and encoding.strfromlocal(valname) or valname
+ # pytype: enable=module-attr
+ name = None
+ if valname is not None:
+ name = encoding.strfromlocal(valname)
+ # pytype: disable=module-attr
val = winreg.QueryValueEx(hkey, name)[0]
+ # pytype: enable=module-attr
+
# never let a Unicode string escape into the wild
return encoding.unitolocal(val)
except EnvironmentError:
--- a/mercurial/wireprotoframing.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/wireprotoframing.py Thu Jun 16 15:28:54 2022 +0200
@@ -9,7 +9,6 @@
# protocol. For details about the protocol, see
# `hg help internals.wireprotocol`.
-from __future__ import absolute_import
import collections
import struct
@@ -123,7 +122,7 @@
def humanflags(mapping, value):
"""Convert a numeric flags value to a human value, using a mapping table."""
- namemap = {v: k for k, v in pycompat.iteritems(mapping)}
+ namemap = {v: k for k, v in mapping.items()}
flags = []
val = 1
while value >= val:
@@ -135,7 +134,7 @@
@attr.s(slots=True)
-class frameheader(object):
+class frameheader:
"""Represents the data in a frame header."""
length = attr.ib()
@@ -147,7 +146,7 @@
@attr.s(slots=True, repr=False)
-class frame(object):
+class frame:
"""Represents a parsed frame."""
requestid = attr.ib()
@@ -160,7 +159,7 @@
@encoding.strmethod
def __repr__(self):
typename = b'<unknown 0x%02x>' % self.typeid
- for name, value in pycompat.iteritems(FRAME_TYPES):
+ for name, value in FRAME_TYPES.items():
if value == self.typeid:
typename = name
break
@@ -590,7 +589,7 @@
)
-class bufferingcommandresponseemitter(object):
+class bufferingcommandresponseemitter:
"""Helper object to emit command response frames intelligently.
Raw command response data is likely emitted in chunks much smaller
@@ -700,7 +699,7 @@
# mechanism.
-class identityencoder(object):
+class identityencoder:
"""Encoder for the "identity" stream encoding profile."""
def __init__(self, ui):
@@ -716,7 +715,7 @@
return b''
-class identitydecoder(object):
+class identitydecoder:
"""Decoder for the "identity" stream encoding profile."""
def __init__(self, ui, extraobjs):
@@ -729,7 +728,7 @@
return data
-class zlibencoder(object):
+class zlibencoder:
def __init__(self, ui):
import zlib
@@ -750,7 +749,7 @@
return res
-class zlibdecoder(object):
+class zlibdecoder:
def __init__(self, ui, extraobjs):
import zlib
@@ -762,15 +761,10 @@
self._decompressor = zlib.decompressobj()
def decode(self, data):
- # Python 2's zlib module doesn't use the buffer protocol and can't
- # handle all bytes-like types.
- if not pycompat.ispy3 and isinstance(data, bytearray):
- data = bytes(data)
-
return self._decompressor.decompress(data)
-class zstdbaseencoder(object):
+class zstdbaseencoder:
def __init__(self, level):
from . import zstd
@@ -798,7 +792,7 @@
super(zstd8mbencoder, self).__init__(3)
-class zstdbasedecoder(object):
+class zstdbasedecoder:
def __init__(self, maxwindowsize):
from . import zstd
@@ -848,7 +842,7 @@
STREAM_ENCODERS_ORDER.append(b'identity')
-class stream(object):
+class stream:
"""Represents a logical unidirectional series of frames."""
def __init__(self, streamid, active=False):
@@ -1001,7 +995,7 @@
}
-class serverreactor(object):
+class serverreactor:
"""Holds state of a server handling frame-based protocol requests.
This class is the "brain" of the unified frame-based protocol server
@@ -1689,7 +1683,7 @@
return self._makeerrorresult(_(b'server already errored'))
-class commandrequest(object):
+class commandrequest:
"""Represents a request to run a command."""
def __init__(self, requestid, name, args, datafh=None, redirect=None):
@@ -1701,7 +1695,7 @@
self.state = b'pending'
-class clientreactor(object):
+class clientreactor:
"""Holds state of a client issuing frame-based protocol requests.
This is like ``serverreactor`` but for client-side state.
--- a/mercurial/wireprotoserver.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/wireprotoserver.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,7 +4,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import contextlib
import struct
@@ -57,7 +56,7 @@
@interfaceutil.implementer(wireprototypes.baseprotocolhandler)
-class httpv1protocolhandler(object):
+class httpv1protocolhandler:
def __init__(self, req, ui, checkperm):
self._req = req
self._ui = ui
@@ -375,7 +374,7 @@
@interfaceutil.implementer(wireprototypes.baseprotocolhandler)
-class sshv1protocolhandler(object):
+class sshv1protocolhandler:
"""Handler for requests services via version 1 of SSH protocol."""
def __init__(self, ui, fin, fout):
@@ -391,14 +390,14 @@
def getargs(self, args):
data = {}
keys = args.split()
- for n in pycompat.xrange(len(keys)):
+ for n in range(len(keys)):
argline = self._fin.readline()[:-1]
arg, l = argline.split()
if arg not in keys:
raise error.Abort(_(b"unexpected parameter %r") % arg)
if arg == b'*':
star = {}
- for k in pycompat.xrange(int(l)):
+ for k in range(int(l)):
argline = self._fin.readline()[:-1]
arg, l = argline.split()
val = self._fin.read(int(l))
@@ -521,7 +520,7 @@
)
-class sshserver(object):
+class sshserver:
def __init__(self, ui, repo, logfh=None):
self._ui = ui
self._repo = repo
--- a/mercurial/wireprototypes.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/wireprototypes.py Thu Jun 16 15:28:54 2022 +0200
@@ -3,7 +3,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from .node import (
bin,
@@ -40,14 +39,14 @@
}
-class bytesresponse(object):
+class bytesresponse:
"""A wire protocol response consisting of raw bytes."""
def __init__(self, data):
self.data = data
-class ooberror(object):
+class ooberror:
"""wireproto reply: failure of a batch of operation
Something failed during a batch call. The error message is stored in
@@ -58,7 +57,7 @@
self.message = message
-class pushres(object):
+class pushres:
"""wireproto reply: success with simple integer return
The call was successful and returned an integer contained in `self.res`.
@@ -69,7 +68,7 @@
self.output = output
-class pusherr(object):
+class pusherr:
"""wireproto reply: failure
The call failed. The `self.res` attribute contains the error message.
@@ -80,7 +79,7 @@
self.output = output
-class streamres(object):
+class streamres:
"""wireproto reply: binary stream
The call was successful and the result is a stream.
@@ -97,7 +96,7 @@
self.prefer_uncompressed = prefer_uncompressed
-class streamreslegacy(object):
+class streamreslegacy:
"""wireproto reply: uncompressed binary stream
The call was successful and the result is a stream.
@@ -244,7 +243,7 @@
"""
-class commandentry(object):
+class commandentry:
"""Represents a declared wire protocol command."""
def __init__(
@@ -407,7 +406,7 @@
@attr.s
-class encodedresponse(object):
+class encodedresponse:
"""Represents response data that is already content encoded.
Wire protocol version 2 only.
@@ -421,7 +420,7 @@
@attr.s
-class alternatelocationresponse(object):
+class alternatelocationresponse:
"""Represents a response available at an alternate location.
Instances are sent in place of actual response objects when the server
@@ -440,7 +439,7 @@
@attr.s
-class indefinitebytestringresponse(object):
+class indefinitebytestringresponse:
"""Represents an object to be encoded to an indefinite length bytestring.
Instances are initialized from an iterable of chunks, with each chunk being
--- a/mercurial/wireprotov1peer.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/wireprotov1peer.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,11 +5,11 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import sys
import weakref
+from concurrent import futures
from .i18n import _
from .node import bin
from .pycompat import (
@@ -80,15 +80,14 @@
assert all(escapearg(k) == k for k in argsdict)
args = b','.join(
- b'%s=%s' % (escapearg(k), escapearg(v))
- for k, v in pycompat.iteritems(argsdict)
+ b'%s=%s' % (escapearg(k), escapearg(v)) for k, v in argsdict.items()
)
cmds.append(b'%s %s' % (op, args))
return b';'.join(cmds)
-class unsentfuture(pycompat.futures.Future):
+class unsentfuture(futures.Future):
"""A Future variation to represent an unsent command.
Because we buffer commands and don't submit them immediately, calling
@@ -99,7 +98,7 @@
def result(self, timeout=None):
if self.done():
- return pycompat.futures.Future.result(self, timeout)
+ return futures.Future.result(self, timeout)
self._peerexecutor.sendcommands()
@@ -110,7 +109,7 @@
@interfaceutil.implementer(repository.ipeercommandexecutor)
-class peerexecutor(object):
+class peerexecutor:
def __init__(self, peer):
self._peer = peer
self._sent = False
@@ -154,7 +153,7 @@
# a batchable one and refuse to service it.
def addcall():
- f = pycompat.futures.Future()
+ f = futures.Future()
self._futures.add(f)
self._calls.append((command, args, fn, f))
return f
@@ -194,7 +193,7 @@
# cycle between us and futures.
for f in self._futures:
if isinstance(f, unsentfuture):
- f.__class__ = pycompat.futures.Future
+ f.__class__ = futures.Future
f._peerexecutor = None
calls = self._calls
@@ -258,7 +257,7 @@
# hard and it is easy to encounter race conditions, deadlocks, etc.
# concurrent.futures already solves these problems and its thread pool
# executor has minimal overhead. So we use it.
- self._responseexecutor = pycompat.futures.ThreadPoolExecutor(1)
+ self._responseexecutor = futures.ThreadPoolExecutor(1)
self._responsef = self._responseexecutor.submit(
self._readbatchresponse, states, wireresults
)
@@ -438,7 +437,7 @@
self.requirecap(b'getbundle', _(b'look up remote changes'))
opts = {}
bundlecaps = kwargs.get(b'bundlecaps') or set()
- for key, value in pycompat.iteritems(kwargs):
+ for key, value in kwargs.items():
if value is None:
continue
keytype = wireprototypes.GETBUNDLE_ARGUMENTS.get(key)
@@ -520,7 +519,7 @@
def between(self, pairs):
batch = 8 # avoid giant requests
r = []
- for i in pycompat.xrange(0, len(pairs), batch):
+ for i in range(0, len(pairs), batch):
n = b" ".join(
[
wireprototypes.encodelist(p, b'-')
--- a/mercurial/wireprotov1server.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/wireprotov1server.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import binascii
import os
@@ -236,7 +235,7 @@
def branchmap(repo, proto):
branchmap = repo.branchmap()
heads = []
- for branch, nodes in pycompat.iteritems(branchmap):
+ for branch, nodes in branchmap.items():
branchname = urlreq.quote(encoding.fromlocal(branch))
branchnodes = wireprototypes.encodelist(nodes)
heads.append(b'%s %s' % (branchname, branchnodes))
@@ -433,7 +432,7 @@
opts = options(
b'getbundle', wireprototypes.GETBUNDLE_ARGUMENTS.keys(), others
)
- for k, v in pycompat.iteritems(opts):
+ for k, v in opts.items():
keytype = wireprototypes.GETBUNDLE_ARGUMENTS[k]
if keytype == b'nodes':
opts[k] = wireprototypes.decodelist(v)
--- a/mercurial/worker.py Thu Jun 16 15:15:03 2022 +0200
+++ b/mercurial/worker.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,29 +5,21 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
-import errno
import os
+import pickle
+import selectors
import signal
import sys
import threading
import time
-try:
- import selectors
-
- selectors.BaseSelector
-except ImportError:
- from .thirdparty import selectors2 as selectors
-
from .i18n import _
from . import (
encoding,
error,
pycompat,
scmutil,
- util,
)
@@ -65,66 +57,47 @@
return min(max(countcpus(), 4), 32)
-if pycompat.ispy3:
+def ismainthread():
+ return threading.current_thread() == threading.main_thread()
- def ismainthread():
- return threading.current_thread() == threading.main_thread()
-
- class _blockingreader(object):
- def __init__(self, wrapped):
- self._wrapped = wrapped
- # Do NOT implement readinto() by making it delegate to
- # _wrapped.readinto(), since that is unbuffered. The unpickler is fine
- # with just read() and readline(), so we don't need to implement it.
+class _blockingreader:
+ """Wrap unbuffered stream such that pickle.load() works with it.
- if (3, 8, 0) <= sys.version_info[:3] < (3, 8, 2):
-
- # This is required for python 3.8, prior to 3.8.2. See issue6444.
- def readinto(self, b):
- pos = 0
- size = len(b)
+ pickle.load() expects that calls to read() and readinto() read as many
+ bytes as requested. On EOF, it is fine to read fewer bytes. In this case,
+ pickle.load() raises an EOFError.
+ """
- while pos < size:
- ret = self._wrapped.readinto(b[pos:])
- if not ret:
- break
- pos += ret
+ def __init__(self, wrapped):
+ self._wrapped = wrapped
- return pos
-
- def readline(self):
- return self._wrapped.readline()
+ def readline(self):
+ return self._wrapped.readline()
- # issue multiple reads until size is fulfilled
- def read(self, size=-1):
- if size < 0:
- return self._wrapped.readall()
+ def readinto(self, buf):
+ pos = 0
+ size = len(buf)
- buf = bytearray(size)
- view = memoryview(buf)
- pos = 0
-
+ with memoryview(buf) as view:
while pos < size:
- ret = self._wrapped.readinto(view[pos:])
+ with view[pos:] as subview:
+ ret = self._wrapped.readinto(subview)
if not ret:
break
pos += ret
- del view
- del buf[pos:]
- return bytes(buf)
-
-
-else:
+ return pos
- def ismainthread():
- # pytype: disable=module-attr
- return isinstance(threading.current_thread(), threading._MainThread)
- # pytype: enable=module-attr
+ # issue multiple reads until size is fulfilled (or EOF is encountered)
+ def read(self, size=-1):
+ if size < 0:
+ return self._wrapped.readall()
- def _blockingreader(wrapped):
- return wrapped
+ buf = bytearray(size)
+ n_read = self.readinto(buf)
+ del buf[n_read:]
+ return bytes(buf)
if pycompat.isposix or pycompat.iswindows:
@@ -203,27 +176,18 @@
for p in pids:
try:
os.kill(p, signal.SIGTERM)
- except OSError as err:
- if err.errno != errno.ESRCH:
- raise
+ except ProcessLookupError:
+ pass
def waitforworkers(blocking=True):
for pid in pids.copy():
p = st = 0
- while True:
- try:
- p, st = os.waitpid(pid, (0 if blocking else os.WNOHANG))
- break
- except OSError as e:
- if e.errno == errno.EINTR:
- continue
- elif e.errno == errno.ECHILD:
- # child would already be reaped, but pids yet been
- # updated (maybe interrupted just after waitpid)
- pids.discard(pid)
- break
- else:
- raise
+ try:
+ p, st = os.waitpid(pid, (0 if blocking else os.WNOHANG))
+ except ChildProcessError:
+ # child would already be reaped, but pids yet been
+ # updated (maybe interrupted just after waitpid)
+ pids.discard(pid)
if not p:
# skip subsequent steps, because child process should
# be still running in this case
@@ -270,8 +234,10 @@
os.close(r)
os.close(w)
os.close(rfd)
- for result in func(*(staticargs + (pargs,))):
- os.write(wfd, util.pickle.dumps(result))
+ with os.fdopen(wfd, 'wb') as wf:
+ for result in func(*(staticargs + (pargs,))):
+ pickle.dump(result, wf)
+ wf.flush()
return 0
ret = scmutil.callcatch(ui, workerfunc)
@@ -293,6 +259,10 @@
selector = selectors.DefaultSelector()
for rfd, wfd in pipes:
os.close(wfd)
+ # The stream has to be unbuffered. Otherwise, if all data is read from
+ # the raw file into the buffer, the selector thinks that the FD is not
+ # ready to read while pickle.load() could read from the buffer. This
+ # would delay the processing of readable items.
selector.register(os.fdopen(rfd, 'rb', 0), selectors.EVENT_READ)
def cleanup():
@@ -307,19 +277,21 @@
while openpipes > 0:
for key, events in selector.select():
try:
- res = util.pickle.load(_blockingreader(key.fileobj))
+ # The pytype error likely goes away on a modern version of
+ # pytype having a modern typeshed snapshot.
+ # pytype: disable=wrong-arg-types
+ res = pickle.load(_blockingreader(key.fileobj))
+ # pytype: enable=wrong-arg-types
if hasretval and res[0]:
retval.update(res[1])
else:
yield res
except EOFError:
selector.unregister(key.fileobj)
+ # pytype: disable=attribute-error
key.fileobj.close()
+ # pytype: enable=attribute-error
openpipes -= 1
- except IOError as e:
- if e.errno == errno.EINTR:
- continue
- raise
except: # re-raises
killworkers()
cleanup()
--- a/rust/Cargo.lock Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/Cargo.lock Thu Jun 16 15:28:54 2022 +0200
@@ -15,10 +15,16 @@
checksum = "ee2a4ec343196209d6594e19543ae87a39f96d5534d7174822a3ad825dd6ed7e"
[[package]]
+name = "ahash"
+version = "0.4.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "739f4a8db6605981345c5654f3a85b056ce52f37a39d34da03f25bf2151ea16e"
+
+[[package]]
name = "aho-corasick"
-version = "0.7.15"
+version = "0.7.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7404febffaa47dac81aa44dba71523c9d069b1bdc50a77db41195149e17f68e5"
+checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f"
dependencies = [
"memchr",
]
@@ -31,9 +37,9 @@
[[package]]
name = "ansi_term"
-version = "0.11.0"
+version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b"
+checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2"
dependencies = [
"winapi",
]
@@ -57,9 +63,9 @@
[[package]]
name = "bitflags"
-version = "1.2.1"
+version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
+checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitmaps"
@@ -80,10 +86,19 @@
]
[[package]]
+name = "block-buffer"
+version = "0.10.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0bf7fe51849ea569fd452f37822f606a5cabb684dc918707a0193fd4664ff324"
+dependencies = [
+ "generic-array",
+]
+
+[[package]]
name = "byteorder"
-version = "1.3.4"
+version = "1.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de"
+checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
[[package]]
name = "bytes-cast"
@@ -141,9 +156,9 @@
[[package]]
name = "clap"
-version = "2.33.3"
+version = "2.34.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "37e58ac78573c40708d45522f0d80fa2f01cc4f9b4e2bf749807255454312002"
+checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c"
dependencies = [
"ansi_term",
"atty",
@@ -161,6 +176,12 @@
checksum = "cd51eab21ab4fd6a3bf889e2d0958c0a6e3a61ad04260325e919e652a2a62826"
[[package]]
+name = "convert_case"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e"
+
+[[package]]
name = "cpufeatures"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -170,6 +191,15 @@
]
[[package]]
+name = "cpufeatures"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "95059428f66df56b63431fdb4e1947ed2190586af5c5a8a8b71122bdf5a7f469"
+dependencies = [
+ "libc",
+]
+
+[[package]]
name = "cpython"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -178,7 +208,6 @@
"libc",
"num-traits",
"paste",
- "python27-sys",
"python3-sys",
]
@@ -203,9 +232,9 @@
[[package]]
name = "crossbeam-channel"
-version = "0.5.0"
+version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dca26ee1f8d361640700bde38b2c37d8c22b3ce2d360e1fc1c74ea4b0aa7d775"
+checksum = "e54ea8bc3fb1ee042f5aace6e3c6e025d3874866da222930f70ce62aceba0bfa"
dependencies = [
"cfg-if 1.0.0",
"crossbeam-utils 0.8.1",
@@ -259,6 +288,15 @@
]
[[package]]
+name = "crypto-common"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a4600d695eb3f6ce1cd44e6e291adceb2cc3ab12f20a33777ecd0bf6eba34e06"
+dependencies = [
+ "generic-array",
+]
+
+[[package]]
name = "ctor"
version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -270,20 +308,22 @@
[[package]]
name = "derive_more"
-version = "0.99.11"
+version = "0.99.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "41cb0e6161ad61ed084a36ba71fbba9e3ac5aee3606fb607fe08da6acbcf3d8c"
+checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321"
dependencies = [
+ "convert_case",
"proc-macro2",
"quote",
+ "rustc_version",
"syn",
]
[[package]]
-name = "difference"
-version = "2.0.0"
+name = "diff"
+version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198"
+checksum = "0e25ea47919b1560c4e3b7fe0aaab9becf5b84a10325ddf7db0f0ba5e1026499"
[[package]]
name = "digest"
@@ -295,6 +335,16 @@
]
[[package]]
+name = "digest"
+version = "0.10.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8cb780dce4f9a8f5c087362b3a4595936b2019e7c8b30f2c3e9a7e94e6ae9837"
+dependencies = [
+ "block-buffer 0.10.2",
+ "crypto-common",
+]
+
+[[package]]
name = "either"
version = "1.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -302,9 +352,9 @@
[[package]]
name = "env_logger"
-version = "0.7.1"
+version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "44533bbbb3bb3c1fa17d9f2e4e38bbbaf8396ba82193c4cb1b6445d711445d36"
+checksum = "0b2cf0344971ee6c64c31be0d530793fba457d322dfec2810c453d0ef228f9c3"
dependencies = [
"atty",
"humantime",
@@ -314,10 +364,19 @@
]
[[package]]
+name = "fastrand"
+version = "1.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c3fcf0cee53519c866c09b5de1f6c56ff9d647101f81c1964fa632e148896cdf"
+dependencies = [
+ "instant",
+]
+
+[[package]]
name = "flate2"
-version = "1.0.19"
+version = "1.0.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7411863d55df97a419aa64cb4d2f167103ea9d767e2c54a1868b7ac3f6b47129"
+checksum = "1e6988e897c1c9c485f43b47a529cef42fde0547f9d8d41a7062518f1d8fc53f"
dependencies = [
"cfg-if 1.0.0",
"crc32fast",
@@ -385,6 +444,16 @@
checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574"
[[package]]
+name = "hashbrown"
+version = "0.9.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d7afe4a420e3fe79967a00898cc1f4db7c8a49a9333a29f8a4bd76a253d5cd04"
+dependencies = [
+ "ahash",
+ "rayon",
+]
+
+[[package]]
name = "hermit-abi"
version = "0.1.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -411,23 +480,24 @@
"derive_more",
"flate2",
"format-bytes",
+ "hashbrown",
"home",
"im-rc",
- "itertools",
+ "itertools 0.10.3",
"lazy_static",
"libc",
"log",
"memmap2",
- "micro-timer",
+ "micro-timer 0.3.1",
"ouroboros",
"pretty_assertions",
- "rand 0.8.4",
+ "rand 0.8.5",
"rand_distr",
"rand_pcg",
"rayon",
"regex",
"same-file",
- "sha-1",
+ "sha-1 0.10.0",
"tempfile",
"twox-hash",
"zstd",
@@ -438,7 +508,7 @@
version = "0.1.0"
dependencies = [
"cpython",
- "crossbeam-channel 0.4.4",
+ "crossbeam-channel 0.5.2",
"env_logger",
"hg-core",
"libc",
@@ -458,12 +528,9 @@
[[package]]
name = "humantime"
-version = "1.3.0"
+version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "df004cfca50ef23c36850aaaa59ad52cc70d0e90243c3c7737a4dd32dc7a3c4f"
-dependencies = [
- "quick-error",
-]
+checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
[[package]]
name = "im-rc"
@@ -480,6 +547,15 @@
]
[[package]]
+name = "instant"
+version = "0.1.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
+dependencies = [
+ "cfg-if 1.0.0",
+]
+
+[[package]]
name = "itertools"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -489,6 +565,15 @@
]
[[package]]
+name = "itertools"
+version = "0.10.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3"
+dependencies = [
+ "either",
+]
+
+[[package]]
name = "jobserver"
version = "0.1.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -528,11 +613,11 @@
[[package]]
name = "log"
-version = "0.4.11"
+version = "0.4.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4fabed175da42fed1fa0746b0ea71f412aa9d35e76e95e59b192c64b9dc2bf8b"
+checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710"
dependencies = [
- "cfg-if 0.1.10",
+ "cfg-if 1.0.0",
]
[[package]]
@@ -543,9 +628,9 @@
[[package]]
name = "memchr"
-version = "2.3.4"
+version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0ee1c47aaa256ecabcaea351eae4a9b01ef39ed810004e298d2511ed284b1525"
+checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"
[[package]]
name = "memmap2"
@@ -572,7 +657,17 @@
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2620153e1d903d26b72b89f0e9c48d8c4756cba941c185461dddc234980c298c"
dependencies = [
- "micro-timer-macros",
+ "micro-timer-macros 0.3.1",
+ "scopeguard",
+]
+
+[[package]]
+name = "micro-timer"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5de32cb59a062672560d6f0842c4aa7714727457b9fe2daf8987d995a176a405"
+dependencies = [
+ "micro-timer-macros 0.4.0",
"scopeguard",
]
@@ -589,6 +684,18 @@
]
[[package]]
+name = "micro-timer-macros"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cee948b94700125b52dfb68dd17c19f6326696c1df57f92c05ee857463c93ba1"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "scopeguard",
+ "syn",
+]
+
+[[package]]
name = "miniz_oxide"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -687,13 +794,13 @@
[[package]]
name = "pretty_assertions"
-version = "0.6.1"
+version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3f81e1644e1b54f5a68959a29aa86cde704219254669da328ecfdf6a1f09d427"
+checksum = "76d5b548b725018ab5496482b45cb8bef21e9fed1858a6d674e3a8a0f0bb5d50"
dependencies = [
"ansi_term",
"ctor",
- "difference",
+ "diff",
"output_vt100",
]
@@ -731,16 +838,6 @@
]
[[package]]
-name = "python27-sys"
-version = "0.7.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "94670354e264300dde81a5864cbb6bfc9d56ac3dcf3a278c32cb52f816f4dfd1"
-dependencies = [
- "libc",
- "regex",
-]
-
-[[package]]
name = "python3-sys"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -751,12 +848,6 @@
]
[[package]]
-name = "quick-error"
-version = "1.2.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
-
-[[package]]
name = "quote"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -775,19 +866,18 @@
"libc",
"rand_chacha 0.2.2",
"rand_core 0.5.1",
- "rand_hc 0.2.0",
+ "rand_hc",
]
[[package]]
name = "rand"
-version = "0.8.4"
+version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2e7573632e6454cf6b99d7aac4ccca54be06da05aca2ef7423d22d27d4d4bcd8"
+checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
dependencies = [
"libc",
"rand_chacha 0.3.1",
"rand_core 0.6.3",
- "rand_hc 0.3.1",
]
[[package]]
@@ -830,12 +920,12 @@
[[package]]
name = "rand_distr"
-version = "0.4.2"
+version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "964d548f8e7d12e102ef183a0de7e98180c9f8729f555897a857b96e48122d2f"
+checksum = "32cb0b9bc82b0a0876c2dd994a7e7a2683d3e7390ca40e6886785ef0c7e3ee31"
dependencies = [
"num-traits",
- "rand 0.8.4",
+ "rand 0.8.5",
]
[[package]]
@@ -848,15 +938,6 @@
]
[[package]]
-name = "rand_hc"
-version = "0.3.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d51e9f596de227fda2ea6c84607f5558e196eeaf43c986b724ba4fb8fdf497e7"
-dependencies = [
- "rand_core 0.6.3",
-]
-
-[[package]]
name = "rand_pcg"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -876,9 +957,9 @@
[[package]]
name = "rayon"
-version = "1.5.0"
+version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8b0d8e0819fadc20c74ea8373106ead0600e3a67ef1fe8da56e39b9ae7275674"
+checksum = "c06aca804d41dbc8ba42dfd964f0d01334eceb64314b9ecf7c5fad5188a06d90"
dependencies = [
"autocfg",
"crossbeam-deque",
@@ -888,11 +969,11 @@
[[package]]
name = "rayon-core"
-version = "1.9.0"
+version = "1.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9ab346ac5921dc62ffa9f89b7a773907511cdfa5490c572ae9be1be33e8afa4a"
+checksum = "d78120e2c850279833f1dd3582f730c4ab53ed95aeaaaa862a2a5c71b1656d8e"
dependencies = [
- "crossbeam-channel 0.5.0",
+ "crossbeam-channel 0.5.2",
"crossbeam-deque",
"crossbeam-utils 0.8.1",
"lazy_static",
@@ -901,27 +982,29 @@
[[package]]
name = "redox_syscall"
-version = "0.1.57"
+version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce"
+checksum = "8380fe0152551244f0747b1bf41737e0f8a74f97a14ccefd1148187271634f3c"
+dependencies = [
+ "bitflags",
+]
[[package]]
name = "regex"
-version = "1.4.2"
+version = "1.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "38cf2c13ed4745de91a5eb834e11c00bcc3709e773173b2ce4c56c9fbde04b9c"
+checksum = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax",
- "thread_local",
]
[[package]]
name = "regex-syntax"
-version = "0.6.21"
+version = "0.6.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3b181ba2dcf07aaccad5448e8ead58db5b742cf85dfe035e2227f137a539a189"
+checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
[[package]]
name = "remove_dir_all"
@@ -946,13 +1029,22 @@
"home",
"lazy_static",
"log",
- "micro-timer",
+ "micro-timer 0.4.0",
"regex",
"users",
"which",
]
[[package]]
+name = "rustc_version"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"
+dependencies = [
+ "semver",
+]
+
+[[package]]
name = "same-file"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -968,19 +1060,36 @@
checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]]
+name = "semver"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a4a3381e03edd24287172047536f20cabde766e2cd3e65e6b00fb3af51c4f38d"
+
+[[package]]
name = "sha-1"
version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c4cfa741c5832d0ef7fab46cabed29c2aae926db0b11bb2069edd8db5e64e16"
dependencies = [
- "block-buffer",
+ "block-buffer 0.9.0",
"cfg-if 1.0.0",
- "cpufeatures",
- "digest",
+ "cpufeatures 0.1.4",
+ "digest 0.9.0",
"opaque-debug",
]
[[package]]
+name = "sha-1"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "028f48d513f9678cda28f6e4064755b3fbb2af6acd672f2c209b62323f7aea0f"
+dependencies = [
+ "cfg-if 1.0.0",
+ "cpufeatures 0.2.1",
+ "digest 0.10.2",
+]
+
+[[package]]
name = "sized-chunks"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1021,13 +1130,13 @@
[[package]]
name = "tempfile"
-version = "3.1.0"
+version = "3.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9"
+checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4"
dependencies = [
- "cfg-if 0.1.10",
+ "cfg-if 1.0.0",
+ "fastrand",
"libc",
- "rand 0.7.3",
"redox_syscall",
"remove_dir_all",
"winapi",
@@ -1052,15 +1161,6 @@
]
[[package]]
-name = "thread_local"
-version = "1.0.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d40c6d1b69745a6ec6fb1ca717914848da4b44ae29d9b3080cbee91d72a69b14"
-dependencies = [
- "lazy_static",
-]
-
-[[package]]
name = "time"
version = "0.1.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1073,12 +1173,12 @@
[[package]]
name = "twox-hash"
-version = "1.6.0"
+version = "1.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "04f8ab788026715fa63b31960869617cba39117e520eb415b0139543e325ab59"
+checksum = "4ee73e6e4924fe940354b8d4d98cad5231175d615cd855b758adc658c0aac6a0"
dependencies = [
- "cfg-if 0.1.10",
- "rand 0.7.3",
+ "cfg-if 1.0.0",
+ "rand 0.8.5",
"static_assertions",
]
@@ -1090,9 +1190,9 @@
[[package]]
name = "unicode-width"
-version = "0.1.8"
+version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9337591893a19b88d8d87f2cec1e73fad5cdfd10e5a6f349f498ad6ea2ffb1e3"
+checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973"
[[package]]
name = "unicode-xid"
@@ -1124,7 +1224,7 @@
dependencies = [
"hex",
"rand 0.7.3",
- "sha-1",
+ "sha-1 0.9.6",
]
[[package]]
@@ -1195,18 +1295,18 @@
[[package]]
name = "zstd"
-version = "0.5.3+zstd.1.4.5"
+version = "0.5.4+zstd.1.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "01b32eaf771efa709e8308605bbf9319bf485dc1503179ec0469b611937c0cd8"
+checksum = "69996ebdb1ba8b1517f61387a883857818a66c8a295f487b1ffd8fd9d2c82910"
dependencies = [
"zstd-safe",
]
[[package]]
name = "zstd-safe"
-version = "2.0.5+zstd.1.4.5"
+version = "2.0.6+zstd.1.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1cfb642e0d27f64729a639c52db457e0ae906e7bc6f5fe8f5c453230400f1055"
+checksum = "98aa931fb69ecee256d44589d19754e61851ae4769bf963b385119b1cc37a49e"
dependencies = [
"libc",
"zstd-sys",
@@ -1214,12 +1314,12 @@
[[package]]
name = "zstd-sys"
-version = "1.4.17+zstd.1.4.5"
+version = "1.4.18+zstd.1.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b89249644df056b522696b1bb9e7c18c87e8ffa3e2f0dc3b0155875d6498f01b"
+checksum = "a1e6e8778706838f43f771d80d37787cb2fe06dafe89dd3aebaf6721b9eaec81"
dependencies = [
"cc",
"glob",
- "itertools",
+ "itertools 0.9.0",
"libc",
]
--- a/rust/README.rst Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/README.rst Thu Jun 16 15:28:54 2022 +0200
@@ -40,8 +40,8 @@
Special features
================
-You might want to check the `features` section in ``hg-cpython/Cargo.toml``.
-It may contain features that might be interesting to try out.
+In the future, compile-time opt-ins may be added
+to the `features` section in ``hg-cpython/Cargo.toml``.
To use features from the Makefile, use the `HG_RUST_FEATURES` environment
variable: for instance `HG_RUST_FEATURES="some-feature other-feature"`
--- a/rust/hg-core/Cargo.toml Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/hg-core/Cargo.toml Thu Jun 16 15:28:54 2022 +0200
@@ -9,23 +9,24 @@
name = "hg"
[dependencies]
-bitflags = "1.2"
-bytes-cast = "0.2"
-byteorder = "1.3.4"
-derive_more = "0.99"
-home = "0.5"
+bitflags = "1.3.2"
+bytes-cast = "0.2.0"
+byteorder = "1.4.3"
+derive_more = "0.99.17"
+hashbrown = { version = "0.9.1", features = ["rayon"] }
+home = "0.5.3"
im-rc = "15.0"
-itertools = "0.9"
+itertools = "0.10.3"
lazy_static = "1.4.0"
libc = "0.2"
ouroboros = "0.15.0"
rand = "0.8.4"
rand_pcg = "0.3.1"
-rand_distr = "0.4.2"
-rayon = "1.3.0"
-regex = "1.3.9"
-sha-1 = "0.9.6"
-twox-hash = "1.5.0"
+rand_distr = "0.4.3"
+rayon = "1.5.1"
+regex = "1.5.5"
+sha-1 = "0.10.0"
+twox-hash = "1.6.2"
same-file = "1.0.6"
tempfile = "3.1.0"
crossbeam-channel = "0.4"
@@ -38,10 +39,10 @@
# We don't use the `miniz-oxide` backend to not change rhg benchmarks and until
# we have a clearer view of which backend is the fastest.
[dependencies.flate2]
-version = "1.0.16"
+version = "1.0.22"
features = ["zlib"]
default-features = false
[dev-dependencies]
-clap = "*"
-pretty_assertions = "0.6.1"
+clap = "2.34.0"
+pretty_assertions = "1.1.0"
--- a/rust/hg-core/src/dirstate/dirs_multiset.rs Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/hg-core/src/dirstate/dirs_multiset.rs Thu Jun 16 15:28:54 2022 +0200
@@ -10,7 +10,6 @@
//! Used to counts the references to directories in a manifest or dirstate.
use crate::dirstate_tree::on_disk::DirstateV2ParseError;
use crate::{
- dirstate::EntryState,
utils::{
files,
hg_path::{HgPath, HgPathBuf, HgPathError},
@@ -49,7 +48,7 @@
let filename = filename.as_ref();
// This `if` is optimized out of the loop
if only_tracked {
- if entry.state() != EntryState::Removed {
+ if !entry.removed() {
multiset.add_path(filename)?;
}
} else {
@@ -215,6 +214,8 @@
#[cfg(test)]
mod tests {
+ use crate::EntryState;
+
use super::*;
#[test]
--- a/rust/hg-core/src/dirstate/entry.rs Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/hg-core/src/dirstate/entry.rs Thu Jun 16 15:28:54 2022 +0200
@@ -248,23 +248,41 @@
/// dirstate v1 format.
pub const SIZE_NON_NORMAL: i32 = -1;
+#[derive(Debug, Default, Copy, Clone)]
+pub struct DirstateV2Data {
+ pub wc_tracked: bool,
+ pub p1_tracked: bool,
+ pub p2_info: bool,
+ pub mode_size: Option<(u32, u32)>,
+ pub mtime: Option<TruncatedTimestamp>,
+ pub fallback_exec: Option<bool>,
+ pub fallback_symlink: Option<bool>,
+}
+
+#[derive(Debug, Default, Copy, Clone)]
+pub struct ParentFileData {
+ pub mode_size: Option<(u32, u32)>,
+ pub mtime: Option<TruncatedTimestamp>,
+}
+
impl DirstateEntry {
- pub fn from_v2_data(
- wdir_tracked: bool,
- p1_tracked: bool,
- p2_info: bool,
- mode_size: Option<(u32, u32)>,
- mtime: Option<TruncatedTimestamp>,
- fallback_exec: Option<bool>,
- fallback_symlink: Option<bool>,
- ) -> Self {
+ pub fn from_v2_data(v2_data: DirstateV2Data) -> Self {
+ let DirstateV2Data {
+ wc_tracked,
+ p1_tracked,
+ p2_info,
+ mode_size,
+ mtime,
+ fallback_exec,
+ fallback_symlink,
+ } = v2_data;
if let Some((mode, size)) = mode_size {
// TODO: return an error for out of range values?
assert!(mode & !RANGE_MASK_31BIT == 0);
assert!(size & !RANGE_MASK_31BIT == 0);
}
let mut flags = Flags::empty();
- flags.set(Flags::WDIR_TRACKED, wdir_tracked);
+ flags.set(Flags::WDIR_TRACKED, wc_tracked);
flags.set(Flags::P1_TRACKED, p1_tracked);
flags.set(Flags::P2_INFO, p2_info);
if let Some(exec) = fallback_exec {
@@ -367,6 +385,14 @@
Self::from_v1_data(EntryState::Removed, 0, size, 0)
}
+ pub fn new_tracked() -> Self {
+ let data = DirstateV2Data {
+ wc_tracked: true,
+ ..Default::default()
+ };
+ Self::from_v2_data(data)
+ }
+
pub fn tracked(&self) -> bool {
self.flags.contains(Flags::WDIR_TRACKED)
}
@@ -391,6 +417,11 @@
self.flags.contains(Flags::WDIR_TRACKED) && !self.in_either_parent()
}
+ pub fn modified(&self) -> bool {
+ self.flags
+ .contains(Flags::WDIR_TRACKED | Flags::P1_TRACKED | Flags::P2_INFO)
+ }
+
pub fn maybe_clean(&self) -> bool {
if !self.flags.contains(Flags::WDIR_TRACKED) {
false
@@ -409,36 +440,25 @@
)
}
- /// Returns `(wdir_tracked, p1_tracked, p2_info, mode_size, mtime)`
- pub(crate) fn v2_data(
- &self,
- ) -> (
- bool,
- bool,
- bool,
- Option<(u32, u32)>,
- Option<TruncatedTimestamp>,
- Option<bool>,
- Option<bool>,
- ) {
+ pub(crate) fn v2_data(&self) -> DirstateV2Data {
if !self.any_tracked() {
// TODO: return an Option instead?
- panic!("Accessing v1_state of an untracked DirstateEntry")
+ panic!("Accessing v2_data of an untracked DirstateEntry")
}
- let wdir_tracked = self.flags.contains(Flags::WDIR_TRACKED);
+ let wc_tracked = self.flags.contains(Flags::WDIR_TRACKED);
let p1_tracked = self.flags.contains(Flags::P1_TRACKED);
let p2_info = self.flags.contains(Flags::P2_INFO);
let mode_size = self.mode_size;
let mtime = self.mtime;
- (
- wdir_tracked,
+ DirstateV2Data {
+ wc_tracked,
p1_tracked,
p2_info,
mode_size,
mtime,
- self.get_fallback_exec(),
- self.get_fallback_symlink(),
- )
+ fallback_exec: self.get_fallback_exec(),
+ fallback_symlink: self.get_fallback_symlink(),
+ }
}
fn v1_state(&self) -> EntryState {
@@ -448,10 +468,7 @@
}
if self.removed() {
EntryState::Removed
- } else if self
- .flags
- .contains(Flags::WDIR_TRACKED | Flags::P1_TRACKED | Flags::P2_INFO)
- {
+ } else if self.modified() {
EntryState::Merged
} else if self.added() {
EntryState::Added
@@ -638,8 +655,7 @@
}
pub(crate) fn is_from_other_parent(&self) -> bool {
- self.state() == EntryState::Normal
- && self.size() == SIZE_FROM_OTHER_PARENT
+ self.flags.contains(Flags::WDIR_TRACKED | Flags::P2_INFO)
}
// TODO: other platforms
--- a/rust/hg-core/src/dirstate_tree/dirstate_map.rs Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/hg-core/src/dirstate_tree/dirstate_map.rs Thu Jun 16 15:28:54 2022 +0200
@@ -11,18 +11,18 @@
use crate::dirstate::parsers::packed_entry_size;
use crate::dirstate::parsers::parse_dirstate_entries;
use crate::dirstate::CopyMapIter;
+use crate::dirstate::DirstateV2Data;
+use crate::dirstate::ParentFileData;
use crate::dirstate::StateMapIter;
use crate::dirstate::TruncatedTimestamp;
-use crate::dirstate::SIZE_FROM_OTHER_PARENT;
-use crate::dirstate::SIZE_NON_NORMAL;
use crate::matchers::Matcher;
use crate::utils::hg_path::{HgPath, HgPathBuf};
use crate::DirstateEntry;
use crate::DirstateError;
+use crate::DirstateMapError;
use crate::DirstateParents;
use crate::DirstateStatus;
-use crate::EntryState;
-use crate::FastHashMap;
+use crate::FastHashbrownMap as FastHashMap;
use crate::PatternFileWarning;
use crate::StatusError;
use crate::StatusOptions;
@@ -38,6 +38,7 @@
V2,
}
+#[derive(Debug)]
pub struct DirstateMap<'on_disk> {
/// Contents of the `.hg/dirstate` file
pub(super) on_disk: &'on_disk [u8],
@@ -73,21 +74,25 @@
/// Similar to `&'tree Cow<'on_disk, HgPath>`, but can also be returned
/// for on-disk nodes that don’t actually have a `Cow` to borrow.
+#[derive(Debug)]
pub(super) enum BorrowedPath<'tree, 'on_disk> {
InMemory(&'tree HgPathBuf),
OnDisk(&'on_disk HgPath),
}
+#[derive(Debug)]
pub(super) enum ChildNodes<'on_disk> {
InMemory(FastHashMap<NodeKey<'on_disk>, Node<'on_disk>>),
OnDisk(&'on_disk [on_disk::Node]),
}
+#[derive(Debug)]
pub(super) enum ChildNodesRef<'tree, 'on_disk> {
InMemory(&'tree FastHashMap<NodeKey<'on_disk>, Node<'on_disk>>),
OnDisk(&'on_disk [on_disk::Node]),
}
+#[derive(Debug)]
pub(super) enum NodeRef<'tree, 'on_disk> {
InMemory(&'tree NodeKey<'on_disk>, &'tree Node<'on_disk>),
OnDisk(&'on_disk on_disk::Node),
@@ -353,12 +358,6 @@
}
}
- pub(super) fn state(
- &self,
- ) -> Result<Option<EntryState>, DirstateV2ParseError> {
- Ok(self.entry()?.map(|e| e.state()))
- }
-
pub(super) fn cached_directory_mtime(
&self,
) -> Result<Option<TruncatedTimestamp>, DirstateV2ParseError> {
@@ -389,7 +388,7 @@
}
/// Represents a file or a directory
-#[derive(Default)]
+#[derive(Default, Debug)]
pub(super) struct Node<'on_disk> {
pub(super) data: NodeData,
@@ -405,6 +404,7 @@
pub(super) tracked_descendants_count: u32,
}
+#[derive(Debug)]
pub(super) enum NodeData {
Entry(DirstateEntry),
CachedDirectory { mtime: TruncatedTimestamp },
@@ -431,6 +431,13 @@
_ => None,
}
}
+
+ fn as_entry_mut(&mut self) -> Option<&mut DirstateEntry> {
+ match self {
+ NodeData::Entry(entry) => Some(entry),
+ _ => None,
+ }
+ }
}
impl<'on_disk> DirstateMap<'on_disk> {
@@ -472,8 +479,8 @@
let parents = parse_dirstate_entries(
map.on_disk,
|path, entry, copy_source| {
- let tracked = entry.state().is_tracked();
- let node = Self::get_or_insert_node(
+ let tracked = entry.tracked();
+ let node = Self::get_or_insert_node_inner(
map.on_disk,
&mut map.unreachable_bytes,
&mut map.root,
@@ -540,13 +547,37 @@
/// Returns a mutable reference to the node at `path` if it exists
///
+ /// `each_ancestor` is a callback that is called for each ancestor node
+ /// when descending the tree. It is used to keep the different counters
+ /// of the `DirstateMap` up-to-date.
+ fn get_node_mut<'tree>(
+ &'tree mut self,
+ path: &HgPath,
+ each_ancestor: impl FnMut(&mut Node),
+ ) -> Result<Option<&'tree mut Node<'on_disk>>, DirstateV2ParseError> {
+ Self::get_node_mut_inner(
+ self.on_disk,
+ &mut self.unreachable_bytes,
+ &mut self.root,
+ path,
+ each_ancestor,
+ )
+ }
+
+ /// Lower-level version of `get_node_mut`.
+ ///
/// This takes `root` instead of `&mut self` so that callers can mutate
- /// other fields while the returned borrow is still valid
- fn get_node_mut<'tree>(
+ /// other fields while the returned borrow is still valid.
+ ///
+ /// `each_ancestor` is a callback that is called for each ancestor node
+ /// when descending the tree. It is used to keep the different counters
+ /// of the `DirstateMap` up-to-date.
+ fn get_node_mut_inner<'tree>(
on_disk: &'on_disk [u8],
unreachable_bytes: &mut u32,
root: &'tree mut ChildNodes<'on_disk>,
path: &HgPath,
+ mut each_ancestor: impl FnMut(&mut Node),
) -> Result<Option<&'tree mut Node<'on_disk>>, DirstateV2ParseError> {
let mut children = root;
let mut components = path.components();
@@ -558,6 +589,7 @@
.get_mut(component)
{
if let Some(next_component) = components.next() {
+ each_ancestor(child);
component = next_component;
children = &mut child.children;
} else {
@@ -569,21 +601,30 @@
}
}
- pub(super) fn get_or_insert<'tree, 'path>(
+ /// Get a mutable reference to the node at `path`, creating it if it does
+ /// not exist.
+ ///
+ /// `each_ancestor` is a callback that is called for each ancestor node
+ /// when descending the tree. It is used to keep the different counters
+ /// of the `DirstateMap` up-to-date.
+ fn get_or_insert_node<'tree, 'path>(
&'tree mut self,
- path: &HgPath,
+ path: &'path HgPath,
+ each_ancestor: impl FnMut(&mut Node),
) -> Result<&'tree mut Node<'on_disk>, DirstateV2ParseError> {
- Self::get_or_insert_node(
+ Self::get_or_insert_node_inner(
self.on_disk,
&mut self.unreachable_bytes,
&mut self.root,
path,
WithBasename::to_cow_owned,
- |_| {},
+ each_ancestor,
)
}
- fn get_or_insert_node<'tree, 'path>(
+ /// Lower-level version of `get_or_insert_node_inner`, which is used when
+ /// parsing disk data to remove allocations for new nodes.
+ fn get_or_insert_node_inner<'tree, 'path>(
on_disk: &'on_disk [u8],
unreachable_bytes: &mut u32,
root: &'tree mut ChildNodes<'on_disk>,
@@ -600,13 +641,11 @@
.next()
.expect("expected at least one inclusive ancestor");
loop {
- // TODO: can we avoid allocating an owned key in cases where the
- // map already contains that key, without introducing double
- // lookup?
- let child_node = child_nodes
+ let (_, child_node) = child_nodes
.make_mut(on_disk, unreachable_bytes)?
- .entry(to_cow(ancestor_path))
- .or_default();
+ .raw_entry_mut()
+ .from_key(ancestor_path.base_name())
+ .or_insert_with(|| (to_cow(ancestor_path), Node::default()));
if let Some(next) = inclusive_ancestor_paths.next() {
each_ancestor(child_node);
ancestor_path = next;
@@ -617,46 +656,208 @@
}
}
- fn add_or_remove_file(
+ fn reset_state(
+ &mut self,
+ filename: &HgPath,
+ old_entry_opt: Option<DirstateEntry>,
+ wc_tracked: bool,
+ p1_tracked: bool,
+ p2_info: bool,
+ has_meaningful_mtime: bool,
+ parent_file_data_opt: Option<ParentFileData>,
+ ) -> Result<(), DirstateError> {
+ let (had_entry, was_tracked) = match old_entry_opt {
+ Some(old_entry) => (true, old_entry.tracked()),
+ None => (false, false),
+ };
+ let node = self.get_or_insert_node(filename, |ancestor| {
+ if !had_entry {
+ ancestor.descendants_with_entry_count += 1;
+ }
+ if was_tracked {
+ if !wc_tracked {
+ ancestor.tracked_descendants_count = ancestor
+ .tracked_descendants_count
+ .checked_sub(1)
+ .expect("tracked count to be >= 0");
+ }
+ } else {
+ if wc_tracked {
+ ancestor.tracked_descendants_count += 1;
+ }
+ }
+ })?;
+
+ let v2_data = if let Some(parent_file_data) = parent_file_data_opt {
+ DirstateV2Data {
+ wc_tracked,
+ p1_tracked,
+ p2_info,
+ mode_size: parent_file_data.mode_size,
+ mtime: if has_meaningful_mtime {
+ parent_file_data.mtime
+ } else {
+ None
+ },
+ ..Default::default()
+ }
+ } else {
+ DirstateV2Data {
+ wc_tracked,
+ p1_tracked,
+ p2_info,
+ ..Default::default()
+ }
+ };
+ node.data = NodeData::Entry(DirstateEntry::from_v2_data(v2_data));
+ if !had_entry {
+ self.nodes_with_entry_count += 1;
+ }
+ Ok(())
+ }
+
+ fn set_tracked(
+ &mut self,
+ filename: &HgPath,
+ old_entry_opt: Option<DirstateEntry>,
+ ) -> Result<bool, DirstateV2ParseError> {
+ let was_tracked = old_entry_opt.map_or(false, |e| e.tracked());
+ let had_entry = old_entry_opt.is_some();
+ let tracked_count_increment = if was_tracked { 0 } else { 1 };
+ let mut new = false;
+
+ let node = self.get_or_insert_node(filename, |ancestor| {
+ if !had_entry {
+ ancestor.descendants_with_entry_count += 1;
+ }
+
+ ancestor.tracked_descendants_count += tracked_count_increment;
+ })?;
+ if let Some(old_entry) = old_entry_opt {
+ let mut e = old_entry.clone();
+ if e.tracked() {
+ // XXX
+ // This is probably overkill for more case, but we need this to
+ // fully replace the `normallookup` call with `set_tracked`
+ // one. Consider smoothing this in the future.
+ e.set_possibly_dirty();
+ } else {
+ new = true;
+ e.set_tracked();
+ }
+ node.data = NodeData::Entry(e)
+ } else {
+ node.data = NodeData::Entry(DirstateEntry::new_tracked());
+ self.nodes_with_entry_count += 1;
+ new = true;
+ };
+ Ok(new)
+ }
+
+ /// Set a node as untracked in the dirstate.
+ ///
+ /// It is the responsibility of the caller to remove the copy source and/or
+ /// the entry itself if appropriate.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the node does not exist.
+ fn set_untracked(
+ &mut self,
+ filename: &HgPath,
+ old_entry: DirstateEntry,
+ ) -> Result<(), DirstateV2ParseError> {
+ let node = self
+ .get_node_mut(filename, |ancestor| {
+ ancestor.tracked_descendants_count = ancestor
+ .tracked_descendants_count
+ .checked_sub(1)
+ .expect("tracked_descendants_count should be >= 0");
+ })?
+ .expect("node should exist");
+ let mut new_entry = old_entry.clone();
+ new_entry.set_untracked();
+ node.data = NodeData::Entry(new_entry);
+ Ok(())
+ }
+
+ /// Set a node as clean in the dirstate.
+ ///
+ /// It is the responsibility of the caller to remove the copy source.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the node does not exist.
+ fn set_clean(
+ &mut self,
+ filename: &HgPath,
+ old_entry: DirstateEntry,
+ mode: u32,
+ size: u32,
+ mtime: TruncatedTimestamp,
+ ) -> Result<(), DirstateError> {
+ let node = self
+ .get_node_mut(filename, |ancestor| {
+ if !old_entry.tracked() {
+ ancestor.tracked_descendants_count += 1;
+ }
+ })?
+ .expect("node should exist");
+ let mut new_entry = old_entry.clone();
+ new_entry.set_clean(mode, size, mtime);
+ node.data = NodeData::Entry(new_entry);
+ Ok(())
+ }
+
+ /// Set a node as possibly dirty in the dirstate.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the node does not exist.
+ fn set_possibly_dirty(
+ &mut self,
+ filename: &HgPath,
+ ) -> Result<(), DirstateError> {
+ let node = self
+ .get_node_mut(filename, |_ancestor| {})?
+ .expect("node should exist");
+ let entry = node.data.as_entry_mut().expect("entry should exist");
+ entry.set_possibly_dirty();
+ node.data = NodeData::Entry(*entry);
+ Ok(())
+ }
+
+ /// Clears the cached mtime for the (potential) folder at `path`.
+ pub(super) fn clear_cached_mtime(
&mut self,
path: &HgPath,
- old_state: Option<EntryState>,
- new_entry: DirstateEntry,
) -> Result<(), DirstateV2ParseError> {
- let had_entry = old_state.is_some();
- let was_tracked = old_state.map_or(false, |s| s.is_tracked());
- let tracked_count_increment =
- match (was_tracked, new_entry.state().is_tracked()) {
- (false, true) => 1,
- (true, false) => -1,
- _ => 0,
- };
+ let node = match self.get_node_mut(path, |_ancestor| {})? {
+ Some(node) => node,
+ None => return Ok(()),
+ };
+ if let NodeData::CachedDirectory { .. } = &node.data {
+ node.data = NodeData::None
+ }
+ Ok(())
+ }
- let node = Self::get_or_insert_node(
- self.on_disk,
- &mut self.unreachable_bytes,
- &mut self.root,
- path,
- WithBasename::to_cow_owned,
- |ancestor| {
- if !had_entry {
- ancestor.descendants_with_entry_count += 1;
- }
-
- // We can’t use `+= increment` because the counter is unsigned,
- // and we want debug builds to detect accidental underflow
- // through zero
- match tracked_count_increment {
- 1 => ancestor.tracked_descendants_count += 1,
- -1 => ancestor.tracked_descendants_count -= 1,
- _ => {}
- }
- },
- )?;
- if !had_entry {
- self.nodes_with_entry_count += 1
+ /// Sets the cached mtime for the (potential) folder at `path`.
+ pub(super) fn set_cached_mtime(
+ &mut self,
+ path: &HgPath,
+ mtime: TruncatedTimestamp,
+ ) -> Result<(), DirstateV2ParseError> {
+ let node = match self.get_node_mut(path, |_ancestor| {})? {
+ Some(node) => node,
+ None => return Ok(()),
+ };
+ match &node.data {
+ NodeData::Entry(_) => {} // Don’t overwrite an entry
+ NodeData::CachedDirectory { .. } | NodeData::None => {
+ node.data = NodeData::CachedDirectory { mtime }
+ }
}
- node.data = NodeData::Entry(new_entry);
Ok(())
}
@@ -747,59 +948,103 @@
});
}
- pub fn set_entry(
+ pub fn set_tracked(
+ &mut self,
+ filename: &HgPath,
+ ) -> Result<bool, DirstateV2ParseError> {
+ let old_entry_opt = self.get(filename)?;
+ self.with_dmap_mut(|map| map.set_tracked(filename, old_entry_opt))
+ }
+
+ pub fn set_untracked(
&mut self,
filename: &HgPath,
- entry: DirstateEntry,
- ) -> Result<(), DirstateV2ParseError> {
- self.with_dmap_mut(|map| {
- map.get_or_insert(&filename)?.data = NodeData::Entry(entry);
- Ok(())
- })
+ ) -> Result<bool, DirstateError> {
+ let old_entry_opt = self.get(filename)?;
+ match old_entry_opt {
+ None => Ok(false),
+ Some(old_entry) => {
+ if !old_entry.tracked() {
+ // `DirstateMap::set_untracked` is not a noop if
+ // already not tracked as it will decrement the
+ // tracked counters while going down.
+ return Ok(true);
+ }
+ if old_entry.added() {
+ // Untracking an "added" entry will just result in a
+ // worthless entry (and other parts of the code will
+ // complain about it), just drop it entirely.
+ self.drop_entry_and_copy_source(filename)?;
+ return Ok(true);
+ }
+ if !old_entry.p2_info() {
+ self.copy_map_remove(filename)?;
+ }
+
+ self.with_dmap_mut(|map| {
+ map.set_untracked(filename, old_entry)?;
+ Ok(true)
+ })
+ }
+ }
}
- pub fn add_file(
+ pub fn set_clean(
&mut self,
filename: &HgPath,
- entry: DirstateEntry,
+ mode: u32,
+ size: u32,
+ mtime: TruncatedTimestamp,
) -> Result<(), DirstateError> {
- let old_state = self.get(filename)?.map(|e| e.state());
+ let old_entry = match self.get(filename)? {
+ None => {
+ return Err(
+ DirstateMapError::PathNotFound(filename.into()).into()
+ )
+ }
+ Some(e) => e,
+ };
+ self.copy_map_remove(filename)?;
self.with_dmap_mut(|map| {
- Ok(map.add_or_remove_file(filename, old_state, entry)?)
+ map.set_clean(filename, old_entry, mode, size, mtime)
})
}
- pub fn remove_file(
+ pub fn set_possibly_dirty(
+ &mut self,
+ filename: &HgPath,
+ ) -> Result<(), DirstateError> {
+ if self.get(filename)?.is_none() {
+ return Err(DirstateMapError::PathNotFound(filename.into()).into());
+ }
+ self.with_dmap_mut(|map| map.set_possibly_dirty(filename))
+ }
+
+ pub fn reset_state(
&mut self,
filename: &HgPath,
- in_merge: bool,
+ wc_tracked: bool,
+ p1_tracked: bool,
+ p2_info: bool,
+ has_meaningful_mtime: bool,
+ parent_file_data_opt: Option<ParentFileData>,
) -> Result<(), DirstateError> {
+ if !(p1_tracked || p2_info || wc_tracked) {
+ self.drop_entry_and_copy_source(filename)?;
+ return Ok(());
+ }
+ self.copy_map_remove(filename)?;
let old_entry_opt = self.get(filename)?;
- let old_state = old_entry_opt.map(|e| e.state());
- let mut size = 0;
- if in_merge {
- // XXX we should not be able to have 'm' state and 'FROM_P2' if not
- // during a merge. So I (marmoute) am not sure we need the
- // conditionnal at all. Adding double checking this with assert
- // would be nice.
- if let Some(old_entry) = old_entry_opt {
- // backup the previous state
- if old_entry.state() == EntryState::Merged {
- size = SIZE_NON_NORMAL;
- } else if old_entry.state() == EntryState::Normal
- && old_entry.size() == SIZE_FROM_OTHER_PARENT
- {
- // other parent
- size = SIZE_FROM_OTHER_PARENT;
- }
- }
- }
- if size == 0 {
- self.copy_map_remove(filename)?;
- }
self.with_dmap_mut(|map| {
- let entry = DirstateEntry::new_removed(size);
- Ok(map.add_or_remove_file(filename, old_state, entry)?)
+ map.reset_state(
+ filename,
+ old_entry_opt,
+ wc_tracked,
+ p1_tracked,
+ p2_info,
+ has_meaningful_mtime,
+ parent_file_data_opt,
+ )
})
}
@@ -807,9 +1052,7 @@
&mut self,
filename: &HgPath,
) -> Result<(), DirstateError> {
- let was_tracked = self
- .get(filename)?
- .map_or(false, |e| e.state().is_tracked());
+ let was_tracked = self.get(filename)?.map_or(false, |e| e.tracked());
struct Dropped {
was_tracked: bool,
had_entry: bool,
@@ -941,8 +1184,8 @@
if let Some(node) = map.get_node(directory)? {
// A node without a `DirstateEntry` was created to hold child
// nodes, and is therefore a directory.
- let state = node.state()?;
- Ok(state.is_none() && node.tracked_descendants_count() > 0)
+ let is_dir = node.entry()?.is_none();
+ Ok(is_dir && node.tracked_descendants_count() > 0)
} else {
Ok(false)
}
@@ -957,8 +1200,8 @@
if let Some(node) = map.get_node(directory)? {
// A node without a `DirstateEntry` was created to hold child
// nodes, and is therefore a directory.
- let state = node.state()?;
- Ok(state.is_none() && node.descendants_with_entry_count() > 0)
+ let is_dir = node.entry()?.is_none();
+ Ok(is_dir && node.descendants_with_entry_count() > 0)
} else {
Ok(false)
}
@@ -1088,15 +1331,18 @@
self.with_dmap_mut(|map| {
let count = &mut map.nodes_with_copy_source_count;
let unreachable_bytes = &mut map.unreachable_bytes;
- Ok(DirstateMap::get_node_mut(
+ Ok(DirstateMap::get_node_mut_inner(
map.on_disk,
unreachable_bytes,
&mut map.root,
key,
+ |_ancestor| {},
)?
.and_then(|node| {
if let Some(source) = &node.copy_source {
- *count -= 1;
+ *count = count
+ .checked_sub(1)
+ .expect("nodes_with_copy_source_count should be >= 0");
DirstateMap::count_dropped_path(unreachable_bytes, source);
}
node.copy_source.take().map(Cow::into_owned)
@@ -1106,22 +1352,20 @@
pub fn copy_map_insert(
&mut self,
- key: HgPathBuf,
- value: HgPathBuf,
+ key: &HgPath,
+ value: &HgPath,
) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
self.with_dmap_mut(|map| {
- let node = DirstateMap::get_or_insert_node(
- map.on_disk,
- &mut map.unreachable_bytes,
- &mut map.root,
- &key,
- WithBasename::to_cow_owned,
- |_ancestor| {},
- )?;
- if node.copy_source.is_none() {
+ let node = map.get_or_insert_node(&key, |_ancestor| {})?;
+ let had_copy_source = node.copy_source.is_none();
+ let old = node
+ .copy_source
+ .replace(value.to_owned().into())
+ .map(Cow::into_owned);
+ if had_copy_source {
map.nodes_with_copy_source_count += 1
}
- Ok(node.copy_source.replace(value.into()).map(Cow::into_owned))
+ Ok(old)
})
}
@@ -1184,6 +1428,41 @@
)))
}
+ /// Only public because it needs to be exposed to the Python layer.
+ /// It is not the full `setparents` logic, only the parts that mutate the
+ /// entries.
+ pub fn setparents_fixup(
+ &mut self,
+ ) -> Result<Vec<(HgPathBuf, HgPathBuf)>, DirstateV2ParseError> {
+ // XXX
+ // All the copying and re-querying is quite inefficient, but this is
+ // still a lot better than doing it from Python.
+ //
+ // The better solution is to develop a mechanism for `iter_mut`,
+ // which will be a lot more involved: we're dealing with a lazy,
+ // append-mostly, tree-like data structure. This will do for now.
+ let mut copies = vec![];
+ let mut files_with_p2_info = vec![];
+ for res in self.iter() {
+ let (path, entry) = res?;
+ if entry.p2_info() {
+ files_with_p2_info.push(path.to_owned())
+ }
+ }
+ self.with_dmap_mut(|map| {
+ for path in files_with_p2_info.iter() {
+ let node = map.get_or_insert_node(path, |_| {})?;
+ let entry =
+ node.data.as_entry_mut().expect("entry should exist");
+ entry.drop_merge_data();
+ if let Some(source) = node.copy_source.take().as_deref() {
+ copies.push((path.to_owned(), source.to_owned()));
+ }
+ }
+ Ok(copies)
+ })
+ }
+
pub fn debug_iter(
&self,
all: bool,
@@ -1211,3 +1490,418 @@
}))
}
}
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ /// Shortcut to return tracked descendants of a path.
+ /// Panics if the path does not exist.
+ fn tracked_descendants(map: &OwningDirstateMap, path: &[u8]) -> u32 {
+ let path = dbg!(HgPath::new(path));
+ let node = map.get_map().get_node(path);
+ node.unwrap().unwrap().tracked_descendants_count()
+ }
+
+ /// Shortcut to return descendants with an entry.
+ /// Panics if the path does not exist.
+ fn descendants_with_an_entry(map: &OwningDirstateMap, path: &[u8]) -> u32 {
+ let path = dbg!(HgPath::new(path));
+ let node = map.get_map().get_node(path);
+ node.unwrap().unwrap().descendants_with_entry_count()
+ }
+
+ fn assert_does_not_exist(map: &OwningDirstateMap, path: &[u8]) {
+ let path = dbg!(HgPath::new(path));
+ let node = map.get_map().get_node(path);
+ assert!(node.unwrap().is_none());
+ }
+
+ /// Shortcut for path creation in tests
+ fn p(b: &[u8]) -> &HgPath {
+ HgPath::new(b)
+ }
+
+ /// Test the very simple case a single tracked file
+ #[test]
+ fn test_tracked_descendants_simple() -> Result<(), DirstateError> {
+ let mut map = OwningDirstateMap::new_empty(vec![]);
+ assert_eq!(map.len(), 0);
+
+ map.set_tracked(p(b"some/nested/path"))?;
+
+ assert_eq!(map.len(), 1);
+ assert_eq!(tracked_descendants(&map, b"some"), 1);
+ assert_eq!(tracked_descendants(&map, b"some/nested"), 1);
+ assert_eq!(tracked_descendants(&map, b"some/nested/path"), 0);
+
+ map.set_untracked(p(b"some/nested/path"))?;
+ assert_eq!(map.len(), 0);
+ assert!(map.get_map().get_node(p(b"some"))?.is_none());
+
+ Ok(())
+ }
+
+ /// Test the simple case of all tracked, but multiple files
+ #[test]
+ fn test_tracked_descendants_multiple() -> Result<(), DirstateError> {
+ let mut map = OwningDirstateMap::new_empty(vec![]);
+
+ map.set_tracked(p(b"some/nested/path"))?;
+ map.set_tracked(p(b"some/nested/file"))?;
+ // one layer without any files to test deletion cascade
+ map.set_tracked(p(b"some/other/nested/path"))?;
+ map.set_tracked(p(b"root_file"))?;
+ map.set_tracked(p(b"some/file"))?;
+ map.set_tracked(p(b"some/file2"))?;
+ map.set_tracked(p(b"some/file3"))?;
+
+ assert_eq!(map.len(), 7);
+ assert_eq!(tracked_descendants(&map, b"some"), 6);
+ assert_eq!(tracked_descendants(&map, b"some/nested"), 2);
+ assert_eq!(tracked_descendants(&map, b"some/other"), 1);
+ assert_eq!(tracked_descendants(&map, b"some/other/nested"), 1);
+ assert_eq!(tracked_descendants(&map, b"some/nested/path"), 0);
+
+ map.set_untracked(p(b"some/nested/path"))?;
+ assert_eq!(map.len(), 6);
+ assert_eq!(tracked_descendants(&map, b"some"), 5);
+ assert_eq!(tracked_descendants(&map, b"some/nested"), 1);
+ assert_eq!(tracked_descendants(&map, b"some/other"), 1);
+ assert_eq!(tracked_descendants(&map, b"some/other/nested"), 1);
+
+ map.set_untracked(p(b"some/nested/file"))?;
+ assert_eq!(map.len(), 5);
+ assert_eq!(tracked_descendants(&map, b"some"), 4);
+ assert_eq!(tracked_descendants(&map, b"some/other"), 1);
+ assert_eq!(tracked_descendants(&map, b"some/other/nested"), 1);
+ assert_does_not_exist(&map, b"some_nested");
+
+ map.set_untracked(p(b"some/other/nested/path"))?;
+ assert_eq!(map.len(), 4);
+ assert_eq!(tracked_descendants(&map, b"some"), 3);
+ assert_does_not_exist(&map, b"some/other");
+
+ map.set_untracked(p(b"root_file"))?;
+ assert_eq!(map.len(), 3);
+ assert_eq!(tracked_descendants(&map, b"some"), 3);
+ assert_does_not_exist(&map, b"root_file");
+
+ map.set_untracked(p(b"some/file"))?;
+ assert_eq!(map.len(), 2);
+ assert_eq!(tracked_descendants(&map, b"some"), 2);
+ assert_does_not_exist(&map, b"some/file");
+
+ map.set_untracked(p(b"some/file2"))?;
+ assert_eq!(map.len(), 1);
+ assert_eq!(tracked_descendants(&map, b"some"), 1);
+ assert_does_not_exist(&map, b"some/file2");
+
+ map.set_untracked(p(b"some/file3"))?;
+ assert_eq!(map.len(), 0);
+ assert_does_not_exist(&map, b"some/file3");
+
+ Ok(())
+ }
+
+ /// Check with a mix of tracked and non-tracked items
+ #[test]
+ fn test_tracked_descendants_different() -> Result<(), DirstateError> {
+ let mut map = OwningDirstateMap::new_empty(vec![]);
+
+ // A file that was just added
+ map.set_tracked(p(b"some/nested/path"))?;
+ // This has no information, the dirstate should ignore it
+ map.reset_state(p(b"some/file"), false, false, false, false, None)?;
+ assert_does_not_exist(&map, b"some/file");
+
+ // A file that was removed
+ map.reset_state(
+ p(b"some/nested/file"),
+ false,
+ true,
+ false,
+ false,
+ None,
+ )?;
+ assert!(!map.get(p(b"some/nested/file"))?.unwrap().tracked());
+ // Only present in p2
+ map.reset_state(p(b"some/file3"), false, false, true, false, None)?;
+ assert!(!map.get(p(b"some/file3"))?.unwrap().tracked());
+ // A file that was merged
+ map.reset_state(p(b"root_file"), true, true, true, false, None)?;
+ assert!(map.get(p(b"root_file"))?.unwrap().tracked());
+ // A file that is added, with info from p2
+ // XXX is that actually possible?
+ map.reset_state(p(b"some/file2"), true, false, true, false, None)?;
+ assert!(map.get(p(b"some/file2"))?.unwrap().tracked());
+ // A clean file
+ // One layer without any files to test deletion cascade
+ map.reset_state(
+ p(b"some/other/nested/path"),
+ true,
+ true,
+ false,
+ false,
+ None,
+ )?;
+ assert!(map.get(p(b"some/other/nested/path"))?.unwrap().tracked());
+
+ assert_eq!(map.len(), 6);
+ assert_eq!(tracked_descendants(&map, b"some"), 3);
+ assert_eq!(descendants_with_an_entry(&map, b"some"), 5);
+ assert_eq!(tracked_descendants(&map, b"some/other/nested"), 1);
+ assert_eq!(descendants_with_an_entry(&map, b"some/other/nested"), 1);
+ assert_eq!(tracked_descendants(&map, b"some/other/nested/path"), 0);
+ assert_eq!(
+ descendants_with_an_entry(&map, b"some/other/nested/path"),
+ 0
+ );
+ assert_eq!(tracked_descendants(&map, b"some/nested"), 1);
+ assert_eq!(descendants_with_an_entry(&map, b"some/nested"), 2);
+
+ // might as well check this
+ map.set_untracked(p(b"path/does/not/exist"))?;
+ assert_eq!(map.len(), 6);
+
+ map.set_untracked(p(b"some/other/nested/path"))?;
+ // It is set untracked but not deleted since it held other information
+ assert_eq!(map.len(), 6);
+ assert_eq!(tracked_descendants(&map, b"some"), 2);
+ assert_eq!(descendants_with_an_entry(&map, b"some"), 5);
+ assert_eq!(descendants_with_an_entry(&map, b"some/other"), 1);
+ assert_eq!(descendants_with_an_entry(&map, b"some/other/nested"), 1);
+ assert_eq!(tracked_descendants(&map, b"some/nested"), 1);
+ assert_eq!(descendants_with_an_entry(&map, b"some/nested"), 2);
+
+ map.set_untracked(p(b"some/nested/path"))?;
+ // It is set untracked *and* deleted since it was only added
+ assert_eq!(map.len(), 5);
+ assert_eq!(tracked_descendants(&map, b"some"), 1);
+ assert_eq!(descendants_with_an_entry(&map, b"some"), 4);
+ assert_eq!(tracked_descendants(&map, b"some/nested"), 0);
+ assert_eq!(descendants_with_an_entry(&map, b"some/nested"), 1);
+ assert_does_not_exist(&map, b"some/nested/path");
+
+ map.set_untracked(p(b"root_file"))?;
+ // Untracked but not deleted
+ assert_eq!(map.len(), 5);
+ assert!(map.get(p(b"root_file"))?.is_some());
+
+ map.set_untracked(p(b"some/file2"))?;
+ assert_eq!(map.len(), 5);
+ assert_eq!(tracked_descendants(&map, b"some"), 0);
+ assert!(map.get(p(b"some/file2"))?.is_some());
+
+ map.set_untracked(p(b"some/file3"))?;
+ assert_eq!(map.len(), 5);
+ assert_eq!(tracked_descendants(&map, b"some"), 0);
+ assert!(map.get(p(b"some/file3"))?.is_some());
+
+ Ok(())
+ }
+
+ /// Check that copies counter is correctly updated
+ #[test]
+ fn test_copy_source() -> Result<(), DirstateError> {
+ let mut map = OwningDirstateMap::new_empty(vec![]);
+
+ // Clean file
+ map.reset_state(p(b"files/clean"), true, true, false, false, None)?;
+ // Merged file
+ map.reset_state(p(b"files/from_p2"), true, true, true, false, None)?;
+ // Removed file
+ map.reset_state(p(b"removed"), false, true, false, false, None)?;
+ // Added file
+ map.reset_state(p(b"files/added"), true, false, false, false, None)?;
+ // Add copy
+ map.copy_map_insert(p(b"files/clean"), p(b"clean_copy_source"))?;
+ assert_eq!(map.copy_map_len(), 1);
+
+ // Copy override
+ map.copy_map_insert(p(b"files/clean"), p(b"other_clean_copy_source"))?;
+ assert_eq!(map.copy_map_len(), 1);
+
+ // Multiple copies
+ map.copy_map_insert(p(b"removed"), p(b"removed_copy_source"))?;
+ assert_eq!(map.copy_map_len(), 2);
+
+ map.copy_map_insert(p(b"files/added"), p(b"added_copy_source"))?;
+ assert_eq!(map.copy_map_len(), 3);
+
+ // Added, so the entry is completely removed
+ map.set_untracked(p(b"files/added"))?;
+ assert_does_not_exist(&map, b"files/added");
+ assert_eq!(map.copy_map_len(), 2);
+
+ // Removed, so the entry is kept around, so is its copy
+ map.set_untracked(p(b"removed"))?;
+ assert!(map.get(p(b"removed"))?.is_some());
+ assert_eq!(map.copy_map_len(), 2);
+
+ // Clean, so the entry is kept around, but not its copy
+ map.set_untracked(p(b"files/clean"))?;
+ assert!(map.get(p(b"files/clean"))?.is_some());
+ assert_eq!(map.copy_map_len(), 1);
+
+ map.copy_map_insert(p(b"files/from_p2"), p(b"from_p2_copy_source"))?;
+ assert_eq!(map.copy_map_len(), 2);
+
+ // Info from p2, so its copy source info is kept around
+ map.set_untracked(p(b"files/from_p2"))?;
+ assert!(map.get(p(b"files/from_p2"))?.is_some());
+ assert_eq!(map.copy_map_len(), 2);
+
+ Ok(())
+ }
+
+ /// Test with "on disk" data. For the sake of this test, the "on disk" data
+ /// does not actually come from the disk, but it's opaque to the code being
+ /// tested.
+ #[test]
+ fn test_on_disk() -> Result<(), DirstateError> {
+ // First let's create some data to put "on disk"
+ let mut map = OwningDirstateMap::new_empty(vec![]);
+
+ // A file that was just added
+ map.set_tracked(p(b"some/nested/added"))?;
+ map.copy_map_insert(p(b"some/nested/added"), p(b"added_copy_source"))?;
+
+ // A file that was removed
+ map.reset_state(
+ p(b"some/nested/removed"),
+ false,
+ true,
+ false,
+ false,
+ None,
+ )?;
+ // Only present in p2
+ map.reset_state(
+ p(b"other/p2_info_only"),
+ false,
+ false,
+ true,
+ false,
+ None,
+ )?;
+ map.copy_map_insert(
+ p(b"other/p2_info_only"),
+ p(b"other/p2_info_copy_source"),
+ )?;
+ // A file that was merged
+ map.reset_state(p(b"merged"), true, true, true, false, None)?;
+ // A file that is added, with info from p2
+ // XXX is that actually possible?
+ map.reset_state(
+ p(b"other/added_with_p2"),
+ true,
+ false,
+ true,
+ false,
+ None,
+ )?;
+ // One layer without any files to test deletion cascade
+ // A clean file
+ map.reset_state(
+ p(b"some/other/nested/clean"),
+ true,
+ true,
+ false,
+ false,
+ None,
+ )?;
+
+ let (packed, metadata, _should_append, _old_data_size) =
+ map.pack_v2(false)?;
+ let packed_len = packed.len();
+ assert!(packed_len > 0);
+
+ // Recreate "from disk"
+ let mut map = OwningDirstateMap::new_v2(
+ packed,
+ packed_len,
+ metadata.as_bytes(),
+ )?;
+
+ // Check that everything is accounted for
+ assert!(map.contains_key(p(b"some/nested/added"))?);
+ assert!(map.contains_key(p(b"some/nested/removed"))?);
+ assert!(map.contains_key(p(b"merged"))?);
+ assert!(map.contains_key(p(b"other/p2_info_only"))?);
+ assert!(map.contains_key(p(b"other/added_with_p2"))?);
+ assert!(map.contains_key(p(b"some/other/nested/clean"))?);
+ assert_eq!(
+ map.copy_map_get(p(b"some/nested/added"))?,
+ Some(p(b"added_copy_source"))
+ );
+ assert_eq!(
+ map.copy_map_get(p(b"other/p2_info_only"))?,
+ Some(p(b"other/p2_info_copy_source"))
+ );
+ assert_eq!(tracked_descendants(&map, b"some"), 2);
+ assert_eq!(descendants_with_an_entry(&map, b"some"), 3);
+ assert_eq!(tracked_descendants(&map, b"other"), 1);
+ assert_eq!(descendants_with_an_entry(&map, b"other"), 2);
+ assert_eq!(tracked_descendants(&map, b"some/other"), 1);
+ assert_eq!(descendants_with_an_entry(&map, b"some/other"), 1);
+ assert_eq!(tracked_descendants(&map, b"some/other/nested"), 1);
+ assert_eq!(descendants_with_an_entry(&map, b"some/other/nested"), 1);
+ assert_eq!(tracked_descendants(&map, b"some/nested"), 1);
+ assert_eq!(descendants_with_an_entry(&map, b"some/nested"), 2);
+ assert_eq!(map.len(), 6);
+ assert_eq!(map.get_map().unreachable_bytes, 0);
+ assert_eq!(map.copy_map_len(), 2);
+
+ // Shouldn't change anything since it's already not tracked
+ map.set_untracked(p(b"some/nested/removed"))?;
+ assert_eq!(map.get_map().unreachable_bytes, 0);
+
+ match map.get_map().root {
+ ChildNodes::InMemory(_) => {
+ panic!("root should not have been mutated")
+ }
+ _ => (),
+ }
+ // We haven't mutated enough (nothing, actually), we should still be in
+ // the append strategy
+ assert!(map.get_map().write_should_append());
+
+ // But this mutates the structure, so there should be unreachable_bytes
+ assert!(map.set_untracked(p(b"some/nested/added"))?);
+ let unreachable_bytes = map.get_map().unreachable_bytes;
+ assert!(unreachable_bytes > 0);
+
+ match map.get_map().root {
+ ChildNodes::OnDisk(_) => panic!("root should have been mutated"),
+ _ => (),
+ }
+
+ // This should not mutate the structure either, since `root` has
+ // already been mutated along with its direct children.
+ map.set_untracked(p(b"merged"))?;
+ assert_eq!(map.get_map().unreachable_bytes, unreachable_bytes);
+
+ match map.get_map().get_node(p(b"other/added_with_p2"))?.unwrap() {
+ NodeRef::InMemory(_, _) => {
+ panic!("'other/added_with_p2' should not have been mutated")
+ }
+ _ => (),
+ }
+ // But this should, since it's in a different path
+ // than `<root>some/nested/add`
+ map.set_untracked(p(b"other/added_with_p2"))?;
+ assert!(map.get_map().unreachable_bytes > unreachable_bytes);
+
+ match map.get_map().get_node(p(b"other/added_with_p2"))?.unwrap() {
+ NodeRef::OnDisk(_) => {
+ panic!("'other/added_with_p2' should have been mutated")
+ }
+ _ => (),
+ }
+
+ // We have rewritten most of the tree, we should create a new file
+ assert!(!map.get_map().write_should_append());
+
+ Ok(())
+ }
+}
--- a/rust/hg-core/src/dirstate_tree/on_disk.rs Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/hg-core/src/dirstate_tree/on_disk.rs Thu Jun 16 15:28:54 2022 +0200
@@ -2,7 +2,7 @@
//!
//! See `mercurial/helptext/internals/dirstate-v2.txt`
-use crate::dirstate::TruncatedTimestamp;
+use crate::dirstate::{DirstateV2Data, TruncatedTimestamp};
use crate::dirstate_tree::dirstate_map::DirstateVersion;
use crate::dirstate_tree::dirstate_map::{self, DirstateMap, NodeRef};
use crate::dirstate_tree::path_with_basename::WithBasename;
@@ -85,7 +85,7 @@
/// Fields are documented in the *The data file format*
/// section of `mercurial/helptext/internals/dirstate-v2.txt`
-#[derive(BytesCast)]
+#[derive(BytesCast, Debug)]
#[repr(C)]
pub(super) struct Node {
full_path: PathSlice,
@@ -125,7 +125,7 @@
}
/// Duration since the Unix epoch
-#[derive(BytesCast, Copy, Clone)]
+#[derive(BytesCast, Copy, Clone, Debug)]
#[repr(C)]
struct PackedTruncatedTimestamp {
truncated_seconds: U32Be,
@@ -153,7 +153,7 @@
/// Always sorted by ascending `full_path`, to allow binary search.
/// Since nodes with the same parent nodes also have the same parent path,
/// only the `base_name`s need to be compared during binary search.
-#[derive(BytesCast, Copy, Clone)]
+#[derive(BytesCast, Copy, Clone, Debug)]
#[repr(C)]
struct ChildNodes {
start: Offset,
@@ -161,7 +161,7 @@
}
/// A `HgPath` of `len` bytes
-#[derive(BytesCast, Copy, Clone)]
+#[derive(BytesCast, Copy, Clone, Debug)]
#[repr(C)]
struct PathSlice {
start: Offset,
@@ -417,7 +417,7 @@
fn assume_entry(&self) -> Result<DirstateEntry, DirstateV2ParseError> {
// TODO: convert through raw bits instead?
- let wdir_tracked = self.flags().contains(Flags::WDIR_TRACKED);
+ let wc_tracked = self.flags().contains(Flags::WDIR_TRACKED);
let p1_tracked = self.flags().contains(Flags::P1_TRACKED);
let p2_info = self.flags().contains(Flags::P2_INFO);
let mode_size = if self.flags().contains(Flags::HAS_MODE_AND_SIZE)
@@ -447,15 +447,15 @@
} else {
None
};
- Ok(DirstateEntry::from_v2_data(
- wdir_tracked,
+ Ok(DirstateEntry::from_v2_data(DirstateV2Data {
+ wc_tracked,
p1_tracked,
p2_info,
mode_size,
mtime,
fallback_exec,
fallback_symlink,
- ))
+ }))
}
pub(super) fn entry(
@@ -495,18 +495,18 @@
fn from_dirstate_entry(
entry: &DirstateEntry,
) -> (Flags, U32Be, PackedTruncatedTimestamp) {
- let (
- wdir_tracked,
+ let DirstateV2Data {
+ wc_tracked,
p1_tracked,
p2_info,
- mode_size_opt,
- mtime_opt,
+ mode_size: mode_size_opt,
+ mtime: mtime_opt,
fallback_exec,
fallback_symlink,
- ) = entry.v2_data();
- // TODO: convert throug raw flag bits instead?
+ } = entry.v2_data();
+ // TODO: convert through raw flag bits instead?
let mut flags = Flags::empty();
- flags.set(Flags::WDIR_TRACKED, wdir_tracked);
+ flags.set(Flags::WDIR_TRACKED, wc_tracked);
flags.set(Flags::P1_TRACKED, p1_tracked);
flags.set(Flags::P2_INFO, p2_info);
let size = if let Some((m, s)) = mode_size_opt {
@@ -592,7 +592,7 @@
) -> Result<(), DirstateV2ParseError> {
for node in read_nodes(on_disk, nodes)? {
if let Some(entry) = node.entry()? {
- if entry.state().is_tracked() {
+ if entry.tracked() {
f(node.full_path(on_disk)?)
}
}
--- a/rust/hg-core/src/dirstate_tree/status.rs Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/hg-core/src/dirstate_tree/status.rs Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
use crate::dirstate_tree::dirstate_map::ChildNodesRef;
use crate::dirstate_tree::dirstate_map::DirstateMap;
use crate::dirstate_tree::dirstate_map::DirstateVersion;
-use crate::dirstate_tree::dirstate_map::NodeData;
use crate::dirstate_tree::dirstate_map::NodeRef;
use crate::dirstate_tree::on_disk::DirstateV2ParseError;
use crate::matchers::get_ignore_function;
@@ -15,7 +14,6 @@
use crate::utils::hg_path::HgPath;
use crate::BadMatch;
use crate::DirstateStatus;
-use crate::EntryState;
use crate::HgPathBuf;
use crate::HgPathCow;
use crate::PatternFileWarning;
@@ -155,19 +153,10 @@
// Remove outdated mtimes before adding new mtimes, in case a given
// directory is both
for path in &outdated {
- let node = dmap.get_or_insert(path)?;
- if let NodeData::CachedDirectory { .. } = &node.data {
- node.data = NodeData::None
- }
+ dmap.clear_cached_mtime(path)?;
}
for (path, mtime) in &new_cachable {
- let node = dmap.get_or_insert(path)?;
- match &node.data {
- NodeData::Entry(_) => {} // Don’t overwrite an entry
- NodeData::CachedDirectory { .. } | NodeData::None => {
- node.data = NodeData::CachedDirectory { mtime: *mtime }
- }
- }
+ dmap.set_cached_mtime(path, *mtime)?;
}
Ok((outcome, warnings))
@@ -484,17 +473,23 @@
)?
} else {
if file_or_symlink && self.matcher.matches(hg_path) {
- if let Some(state) = dirstate_node.state()? {
- match state {
- EntryState::Added => {
- self.push_outcome(Outcome::Added, &dirstate_node)?
- }
- EntryState::Removed => self
- .push_outcome(Outcome::Removed, &dirstate_node)?,
- EntryState::Merged => self
- .push_outcome(Outcome::Modified, &dirstate_node)?,
- EntryState::Normal => self
- .handle_normal_file(&dirstate_node, fs_metadata)?,
+ if let Some(entry) = dirstate_node.entry()? {
+ if !entry.any_tracked() {
+ // Forward-compat if we start tracking unknown/ignored
+ // files for caching reasons
+ self.mark_unknown_or_ignored(
+ has_ignored_ancestor,
+ hg_path,
+ );
+ }
+ if entry.added() {
+ self.push_outcome(Outcome::Added, &dirstate_node)?;
+ } else if entry.removed() {
+ self.push_outcome(Outcome::Removed, &dirstate_node)?;
+ } else if entry.modified() {
+ self.push_outcome(Outcome::Modified, &dirstate_node)?;
+ } else {
+ self.handle_normal_file(&dirstate_node, fs_metadata)?;
}
} else {
// `node.entry.is_none()` indicates a "directory"
@@ -604,8 +599,7 @@
Ok(())
}
- /// A file with `EntryState::Normal` in the dirstate was found in the
- /// filesystem
+ /// A file that is clean in the dirstate was found in the filesystem
fn handle_normal_file(
&self,
dirstate_node: &NodeRef<'tree, 'on_disk>,
@@ -678,10 +672,15 @@
&self,
dirstate_node: &NodeRef<'tree, 'on_disk>,
) -> Result<(), DirstateV2ParseError> {
- if let Some(state) = dirstate_node.state()? {
+ if let Some(entry) = dirstate_node.entry()? {
+ if !entry.any_tracked() {
+ // Future-compat for when we start storing ignored and unknown
+ // files for caching reasons
+ return Ok(());
+ }
let path = dirstate_node.full_path(self.dmap.on_disk)?;
if self.matcher.matches(path) {
- if let EntryState::Removed = state {
+ if entry.removed() {
self.push_outcome(Outcome::Removed, dirstate_node)?
} else {
self.push_outcome(Outcome::Deleted, &dirstate_node)?
--- a/rust/hg-core/src/errors.rs Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/hg-core/src/errors.rs Thu Jun 16 15:28:54 2022 +0200
@@ -42,6 +42,9 @@
/// and syntax of each value.
#[from]
ConfigValueParseError(ConfigValueParseError),
+
+ /// Censored revision data.
+ CensoredNodeError,
}
/// Details about where an I/O error happened
@@ -101,6 +104,9 @@
HgError::UnsupportedFeature(explanation) => {
write!(f, "unsupported feature: {}", explanation)
}
+ HgError::CensoredNodeError => {
+ write!(f, "encountered a censored node")
+ }
HgError::ConfigValueParseError(error) => error.fmt(f),
}
}
--- a/rust/hg-core/src/lib.rs Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/hg-core/src/lib.rs Thu Jun 16 15:28:54 2022 +0200
@@ -56,6 +56,11 @@
/// write access to your repository, you have other issues.
pub type FastHashMap<K, V> = HashMap<K, V, RandomXxHashBuilder64>;
+// TODO: should this be the default `FastHashMap` for all of hg-core, not just
+// dirstate_tree? How does XxHash compare with AHash, hashbrown’s default?
+pub type FastHashbrownMap<K, V> =
+ hashbrown::HashMap<K, V, RandomXxHashBuilder64>;
+
#[derive(Debug, PartialEq)]
pub enum DirstateMapError {
PathNotFound(HgPathBuf),
--- a/rust/hg-core/src/matchers.rs Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/hg-core/src/matchers.rs Thu Jun 16 15:28:54 2022 +0200
@@ -34,21 +34,21 @@
use micro_timer::timed;
#[derive(Debug, PartialEq)]
-pub enum VisitChildrenSet<'a> {
+pub enum VisitChildrenSet {
/// Don't visit anything
Empty,
/// Only visit this directory
This,
/// Visit this directory and these subdirectories
/// TODO Should we implement a `NonEmptyHashSet`?
- Set(HashSet<&'a HgPath>),
+ Set(HashSet<HgPathBuf>),
/// Visit this directory and all subdirectories
Recursive,
}
pub trait Matcher {
/// Explicitly listed files
- fn file_set(&self) -> Option<&HashSet<&HgPath>>;
+ fn file_set(&self) -> Option<&HashSet<HgPathBuf>>;
/// Returns whether `filename` is in `file_set`
fn exact_match(&self, filename: &HgPath) -> bool;
/// Returns whether `filename` is matched by this matcher
@@ -114,7 +114,7 @@
pub struct AlwaysMatcher;
impl Matcher for AlwaysMatcher {
- fn file_set(&self) -> Option<&HashSet<&HgPath>> {
+ fn file_set(&self) -> Option<&HashSet<HgPathBuf>> {
None
}
fn exact_match(&self, _filename: &HgPath) -> bool {
@@ -134,14 +134,39 @@
}
}
+/// Matches nothing.
+#[derive(Debug)]
+pub struct NeverMatcher;
+
+impl Matcher for NeverMatcher {
+ fn file_set(&self) -> Option<&HashSet<HgPathBuf>> {
+ None
+ }
+ fn exact_match(&self, _filename: &HgPath) -> bool {
+ false
+ }
+ fn matches(&self, _filename: &HgPath) -> bool {
+ false
+ }
+ fn visit_children_set(&self, _directory: &HgPath) -> VisitChildrenSet {
+ VisitChildrenSet::Empty
+ }
+ fn matches_everything(&self) -> bool {
+ false
+ }
+ fn is_exact(&self) -> bool {
+ true
+ }
+}
+
/// Matches the input files exactly. They are interpreted as paths, not
/// patterns.
///
///```
/// use hg::{ matchers::{Matcher, FileMatcher}, utils::hg_path::{HgPath, HgPathBuf} };
///
-/// let files = [HgPathBuf::from_bytes(b"a.txt"), HgPathBuf::from_bytes(br"re:.*\.c$")];
-/// let matcher = FileMatcher::new(&files).unwrap();
+/// let files = vec![HgPathBuf::from_bytes(b"a.txt"), HgPathBuf::from_bytes(br"re:.*\.c$")];
+/// let matcher = FileMatcher::new(files).unwrap();
///
/// assert_eq!(matcher.matches(HgPath::new(b"a.txt")), true);
/// assert_eq!(matcher.matches(HgPath::new(b"b.txt")), false);
@@ -149,16 +174,17 @@
/// assert_eq!(matcher.matches(HgPath::new(br"re:.*\.c$")), true);
/// ```
#[derive(Debug)]
-pub struct FileMatcher<'a> {
- files: HashSet<&'a HgPath>,
+pub struct FileMatcher {
+ files: HashSet<HgPathBuf>,
dirs: DirsMultiset,
}
-impl<'a> FileMatcher<'a> {
- pub fn new(files: &'a [HgPathBuf]) -> Result<Self, DirstateMapError> {
+impl FileMatcher {
+ pub fn new(files: Vec<HgPathBuf>) -> Result<Self, DirstateMapError> {
+ let dirs = DirsMultiset::from_manifest(&files)?;
Ok(Self {
- files: HashSet::from_iter(files.iter().map(AsRef::as_ref)),
- dirs: DirsMultiset::from_manifest(files)?,
+ files: HashSet::from_iter(files.into_iter()),
+ dirs,
})
}
fn inner_matches(&self, filename: &HgPath) -> bool {
@@ -166,8 +192,8 @@
}
}
-impl<'a> Matcher for FileMatcher<'a> {
- fn file_set(&self) -> Option<&HashSet<&HgPath>> {
+impl Matcher for FileMatcher {
+ fn file_set(&self) -> Option<&HashSet<HgPathBuf>> {
Some(&self.files)
}
fn exact_match(&self, filename: &HgPath) -> bool {
@@ -180,10 +206,10 @@
if self.files.is_empty() || !self.dirs.contains(&directory) {
return VisitChildrenSet::Empty;
}
- let dirs_as_set = self.dirs.iter().map(Deref::deref).collect();
+ let mut candidates: HashSet<HgPathBuf> =
+ self.dirs.iter().cloned().collect();
- let mut candidates: HashSet<&HgPath> =
- self.files.union(&dirs_as_set).cloned().collect();
+ candidates.extend(self.files.iter().cloned());
candidates.remove(HgPath::new(b""));
if !directory.as_ref().is_empty() {
@@ -192,7 +218,9 @@
.iter()
.filter_map(|c| {
if c.as_bytes().starts_with(&directory) {
- Some(HgPath::new(&c.as_bytes()[directory.len()..]))
+ Some(HgPathBuf::from_bytes(
+ &c.as_bytes()[directory.len()..],
+ ))
} else {
None
}
@@ -207,10 +235,10 @@
// subdir will be in there without a slash.
VisitChildrenSet::Set(
candidates
- .iter()
+ .into_iter()
.filter_map(|c| {
if c.bytes().all(|b| *b != b'/') {
- Some(*c)
+ Some(c)
} else {
None
}
@@ -256,7 +284,7 @@
}
impl<'a> Matcher for IncludeMatcher<'a> {
- fn file_set(&self) -> Option<&HashSet<&HgPath>> {
+ fn file_set(&self) -> Option<&HashSet<HgPathBuf>> {
None
}
@@ -284,7 +312,9 @@
if self.parents.contains(directory.as_ref()) {
let multiset = self.get_all_parents_children();
if let Some(children) = multiset.get(dir) {
- return VisitChildrenSet::Set(children.to_owned());
+ return VisitChildrenSet::Set(
+ children.into_iter().map(HgPathBuf::from).collect(),
+ );
}
}
VisitChildrenSet::Empty
@@ -299,6 +329,151 @@
}
}
+/// The union of multiple matchers. Will match if any of the matchers match.
+pub struct UnionMatcher {
+ matchers: Vec<Box<dyn Matcher + Sync>>,
+}
+
+impl Matcher for UnionMatcher {
+ fn file_set(&self) -> Option<&HashSet<HgPathBuf>> {
+ None
+ }
+
+ fn exact_match(&self, _filename: &HgPath) -> bool {
+ false
+ }
+
+ fn matches(&self, filename: &HgPath) -> bool {
+ self.matchers.iter().any(|m| m.matches(filename))
+ }
+
+ fn visit_children_set(&self, directory: &HgPath) -> VisitChildrenSet {
+ let mut result = HashSet::new();
+ let mut this = false;
+ for matcher in self.matchers.iter() {
+ let visit = matcher.visit_children_set(directory);
+ match visit {
+ VisitChildrenSet::Empty => continue,
+ VisitChildrenSet::This => {
+ this = true;
+ // Don't break, we might have an 'all' in here.
+ continue;
+ }
+ VisitChildrenSet::Set(set) => {
+ result.extend(set);
+ }
+ VisitChildrenSet::Recursive => {
+ return visit;
+ }
+ }
+ }
+ if this {
+ return VisitChildrenSet::This;
+ }
+ if result.is_empty() {
+ VisitChildrenSet::Empty
+ } else {
+ VisitChildrenSet::Set(result)
+ }
+ }
+
+ fn matches_everything(&self) -> bool {
+ // TODO Maybe if all are AlwaysMatcher?
+ false
+ }
+
+ fn is_exact(&self) -> bool {
+ false
+ }
+}
+
+impl UnionMatcher {
+ pub fn new(matchers: Vec<Box<dyn Matcher + Sync>>) -> Self {
+ Self { matchers }
+ }
+}
+
+pub struct IntersectionMatcher {
+ m1: Box<dyn Matcher + Sync>,
+ m2: Box<dyn Matcher + Sync>,
+ files: Option<HashSet<HgPathBuf>>,
+}
+
+impl Matcher for IntersectionMatcher {
+ fn file_set(&self) -> Option<&HashSet<HgPathBuf>> {
+ self.files.as_ref()
+ }
+
+ fn exact_match(&self, filename: &HgPath) -> bool {
+ self.files.as_ref().map_or(false, |f| f.contains(filename))
+ }
+
+ fn matches(&self, filename: &HgPath) -> bool {
+ self.m1.matches(filename) && self.m2.matches(filename)
+ }
+
+ fn visit_children_set(&self, directory: &HgPath) -> VisitChildrenSet {
+ let m1_set = self.m1.visit_children_set(directory);
+ if m1_set == VisitChildrenSet::Empty {
+ return VisitChildrenSet::Empty;
+ }
+ let m2_set = self.m2.visit_children_set(directory);
+ if m2_set == VisitChildrenSet::Empty {
+ return VisitChildrenSet::Empty;
+ }
+
+ if m1_set == VisitChildrenSet::Recursive {
+ return m2_set;
+ } else if m2_set == VisitChildrenSet::Recursive {
+ return m1_set;
+ }
+
+ match (&m1_set, &m2_set) {
+ (VisitChildrenSet::Recursive, _) => m2_set,
+ (_, VisitChildrenSet::Recursive) => m1_set,
+ (VisitChildrenSet::This, _) | (_, VisitChildrenSet::This) => {
+ VisitChildrenSet::This
+ }
+ (VisitChildrenSet::Set(m1), VisitChildrenSet::Set(m2)) => {
+ let set: HashSet<_> = m1.intersection(&m2).cloned().collect();
+ if set.is_empty() {
+ VisitChildrenSet::Empty
+ } else {
+ VisitChildrenSet::Set(set)
+ }
+ }
+ _ => unreachable!(),
+ }
+ }
+
+ fn matches_everything(&self) -> bool {
+ self.m1.matches_everything() && self.m2.matches_everything()
+ }
+
+ fn is_exact(&self) -> bool {
+ self.m1.is_exact() || self.m2.is_exact()
+ }
+}
+
+impl IntersectionMatcher {
+ pub fn new(
+ mut m1: Box<dyn Matcher + Sync>,
+ mut m2: Box<dyn Matcher + Sync>,
+ ) -> Self {
+ let files = if m1.is_exact() || m2.is_exact() {
+ if !m1.is_exact() {
+ std::mem::swap(&mut m1, &mut m2);
+ }
+ m1.file_set().map(|m1_files| {
+ m1_files.iter().cloned().filter(|f| m2.matches(f)).collect()
+ })
+ } else {
+ None
+ };
+ Self { m1, m2, files }
+ }
+}
+
/// Returns a function that matches an `HgPath` against the given regex
/// pattern.
///
@@ -721,24 +896,24 @@
fn test_filematcher_visit_children_set() {
// Visitchildrenset
let files = vec![HgPathBuf::from_bytes(b"dir/subdir/foo.txt")];
- let matcher = FileMatcher::new(&files).unwrap();
+ let matcher = FileMatcher::new(files).unwrap();
let mut set = HashSet::new();
- set.insert(HgPath::new(b"dir"));
+ set.insert(HgPathBuf::from_bytes(b"dir"));
assert_eq!(
matcher.visit_children_set(HgPath::new(b"")),
VisitChildrenSet::Set(set)
);
let mut set = HashSet::new();
- set.insert(HgPath::new(b"subdir"));
+ set.insert(HgPathBuf::from_bytes(b"subdir"));
assert_eq!(
matcher.visit_children_set(HgPath::new(b"dir")),
VisitChildrenSet::Set(set)
);
let mut set = HashSet::new();
- set.insert(HgPath::new(b"foo.txt"));
+ set.insert(HgPathBuf::from_bytes(b"foo.txt"));
assert_eq!(
matcher.visit_children_set(HgPath::new(b"dir/subdir")),
VisitChildrenSet::Set(set)
@@ -767,40 +942,40 @@
// No file in a/b/c
HgPathBuf::from_bytes(b"a/b/c/d/file4.txt"),
];
- let matcher = FileMatcher::new(&files).unwrap();
+ let matcher = FileMatcher::new(files).unwrap();
let mut set = HashSet::new();
- set.insert(HgPath::new(b"a"));
- set.insert(HgPath::new(b"rootfile.txt"));
+ set.insert(HgPathBuf::from_bytes(b"a"));
+ set.insert(HgPathBuf::from_bytes(b"rootfile.txt"));
assert_eq!(
matcher.visit_children_set(HgPath::new(b"")),
VisitChildrenSet::Set(set)
);
let mut set = HashSet::new();
- set.insert(HgPath::new(b"b"));
- set.insert(HgPath::new(b"file1.txt"));
+ set.insert(HgPathBuf::from_bytes(b"b"));
+ set.insert(HgPathBuf::from_bytes(b"file1.txt"));
assert_eq!(
matcher.visit_children_set(HgPath::new(b"a")),
VisitChildrenSet::Set(set)
);
let mut set = HashSet::new();
- set.insert(HgPath::new(b"c"));
- set.insert(HgPath::new(b"file2.txt"));
+ set.insert(HgPathBuf::from_bytes(b"c"));
+ set.insert(HgPathBuf::from_bytes(b"file2.txt"));
assert_eq!(
matcher.visit_children_set(HgPath::new(b"a/b")),
VisitChildrenSet::Set(set)
);
let mut set = HashSet::new();
- set.insert(HgPath::new(b"d"));
+ set.insert(HgPathBuf::from_bytes(b"d"));
assert_eq!(
matcher.visit_children_set(HgPath::new(b"a/b/c")),
VisitChildrenSet::Set(set)
);
let mut set = HashSet::new();
- set.insert(HgPath::new(b"file4.txt"));
+ set.insert(HgPathBuf::from_bytes(b"file4.txt"));
assert_eq!(
matcher.visit_children_set(HgPath::new(b"a/b/c/d")),
VisitChildrenSet::Set(set)
@@ -827,14 +1002,14 @@
.unwrap();
let mut set = HashSet::new();
- set.insert(HgPath::new(b"dir"));
+ set.insert(HgPathBuf::from_bytes(b"dir"));
assert_eq!(
matcher.visit_children_set(HgPath::new(b"")),
VisitChildrenSet::Set(set)
);
let mut set = HashSet::new();
- set.insert(HgPath::new(b"subdir"));
+ set.insert(HgPathBuf::from_bytes(b"subdir"));
assert_eq!(
matcher.visit_children_set(HgPath::new(b"dir")),
VisitChildrenSet::Set(set)
@@ -862,14 +1037,14 @@
.unwrap();
let mut set = HashSet::new();
- set.insert(HgPath::new(b"dir"));
+ set.insert(HgPathBuf::from_bytes(b"dir"));
assert_eq!(
matcher.visit_children_set(HgPath::new(b"")),
VisitChildrenSet::Set(set)
);
let mut set = HashSet::new();
- set.insert(HgPath::new(b"subdir"));
+ set.insert(HgPathBuf::from_bytes(b"subdir"));
assert_eq!(
matcher.visit_children_set(HgPath::new(b"dir")),
VisitChildrenSet::Set(set)
@@ -897,7 +1072,7 @@
.unwrap();
let mut set = HashSet::new();
- set.insert(HgPath::new(b"dir"));
+ set.insert(HgPathBuf::from_bytes(b"dir"));
assert_eq!(
matcher.visit_children_set(HgPath::new(b"")),
VisitChildrenSet::Set(set)
@@ -920,4 +1095,373 @@
VisitChildrenSet::This
);
}
+
+ #[test]
+ fn test_unionmatcher() {
+ // Path + Rootfiles
+ let m1 = IncludeMatcher::new(vec![IgnorePattern::new(
+ PatternSyntax::RelPath,
+ b"dir/subdir",
+ Path::new(""),
+ )])
+ .unwrap();
+ let m2 = IncludeMatcher::new(vec![IgnorePattern::new(
+ PatternSyntax::RootFiles,
+ b"dir",
+ Path::new(""),
+ )])
+ .unwrap();
+ let matcher = UnionMatcher::new(vec![Box::new(m1), Box::new(m2)]);
+
+ let mut set = HashSet::new();
+ set.insert(HgPathBuf::from_bytes(b"dir"));
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"")),
+ VisitChildrenSet::Set(set)
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir")),
+ VisitChildrenSet::This
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/subdir")),
+ VisitChildrenSet::Recursive
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/foo")),
+ VisitChildrenSet::Empty
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"folder")),
+ VisitChildrenSet::Empty
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"folder")),
+ VisitChildrenSet::Empty
+ );
+
+ // OPT: These next two could be 'all' instead of 'this'.
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/subdir/z")),
+ VisitChildrenSet::This
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/subdir/x")),
+ VisitChildrenSet::This
+ );
+
+ // Path + unrelated Path
+ let m1 = IncludeMatcher::new(vec![IgnorePattern::new(
+ PatternSyntax::RelPath,
+ b"dir/subdir",
+ Path::new(""),
+ )])
+ .unwrap();
+ let m2 = IncludeMatcher::new(vec![IgnorePattern::new(
+ PatternSyntax::RelPath,
+ b"folder",
+ Path::new(""),
+ )])
+ .unwrap();
+ let matcher = UnionMatcher::new(vec![Box::new(m1), Box::new(m2)]);
+
+ let mut set = HashSet::new();
+ set.insert(HgPathBuf::from_bytes(b"folder"));
+ set.insert(HgPathBuf::from_bytes(b"dir"));
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"")),
+ VisitChildrenSet::Set(set)
+ );
+ let mut set = HashSet::new();
+ set.insert(HgPathBuf::from_bytes(b"subdir"));
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir")),
+ VisitChildrenSet::Set(set)
+ );
+
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/subdir")),
+ VisitChildrenSet::Recursive
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/foo")),
+ VisitChildrenSet::Empty
+ );
+
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"folder")),
+ VisitChildrenSet::Recursive
+ );
+ // OPT: These next two could be 'all' instead of 'this'.
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/subdir/z")),
+ VisitChildrenSet::This
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/subdir/x")),
+ VisitChildrenSet::This
+ );
+
+ // Path + subpath
+ let m1 = IncludeMatcher::new(vec![IgnorePattern::new(
+ PatternSyntax::RelPath,
+ b"dir/subdir/x",
+ Path::new(""),
+ )])
+ .unwrap();
+ let m2 = IncludeMatcher::new(vec![IgnorePattern::new(
+ PatternSyntax::RelPath,
+ b"dir/subdir",
+ Path::new(""),
+ )])
+ .unwrap();
+ let matcher = UnionMatcher::new(vec![Box::new(m1), Box::new(m2)]);
+
+ let mut set = HashSet::new();
+ set.insert(HgPathBuf::from_bytes(b"dir"));
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"")),
+ VisitChildrenSet::Set(set)
+ );
+ let mut set = HashSet::new();
+ set.insert(HgPathBuf::from_bytes(b"subdir"));
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir")),
+ VisitChildrenSet::Set(set)
+ );
+
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/subdir")),
+ VisitChildrenSet::Recursive
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/foo")),
+ VisitChildrenSet::Empty
+ );
+
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"folder")),
+ VisitChildrenSet::Empty
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/subdir/x")),
+ VisitChildrenSet::Recursive
+ );
+ // OPT: this should probably be 'all' not 'this'.
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/subdir/z")),
+ VisitChildrenSet::This
+ );
+ }
+
+ #[test]
+ fn test_intersectionmatcher() {
+ // Include path + Include rootfiles
+ let m1 = Box::new(
+ IncludeMatcher::new(vec![IgnorePattern::new(
+ PatternSyntax::RelPath,
+ b"dir/subdir",
+ Path::new(""),
+ )])
+ .unwrap(),
+ );
+ let m2 = Box::new(
+ IncludeMatcher::new(vec![IgnorePattern::new(
+ PatternSyntax::RootFiles,
+ b"dir",
+ Path::new(""),
+ )])
+ .unwrap(),
+ );
+ let matcher = IntersectionMatcher::new(m1, m2);
+
+ let mut set = HashSet::new();
+ set.insert(HgPathBuf::from_bytes(b"dir"));
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"")),
+ VisitChildrenSet::Set(set)
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir")),
+ VisitChildrenSet::This
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/subdir")),
+ VisitChildrenSet::Empty
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/foo")),
+ VisitChildrenSet::Empty
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"folder")),
+ VisitChildrenSet::Empty
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/subdir/z")),
+ VisitChildrenSet::Empty
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/subdir/x")),
+ VisitChildrenSet::Empty
+ );
+
+ // Non intersecting paths
+ let m1 = Box::new(
+ IncludeMatcher::new(vec![IgnorePattern::new(
+ PatternSyntax::RelPath,
+ b"dir/subdir",
+ Path::new(""),
+ )])
+ .unwrap(),
+ );
+ let m2 = Box::new(
+ IncludeMatcher::new(vec![IgnorePattern::new(
+ PatternSyntax::RelPath,
+ b"folder",
+ Path::new(""),
+ )])
+ .unwrap(),
+ );
+ let matcher = IntersectionMatcher::new(m1, m2);
+
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"")),
+ VisitChildrenSet::Empty
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir")),
+ VisitChildrenSet::Empty
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/subdir")),
+ VisitChildrenSet::Empty
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/foo")),
+ VisitChildrenSet::Empty
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"folder")),
+ VisitChildrenSet::Empty
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/subdir/z")),
+ VisitChildrenSet::Empty
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/subdir/x")),
+ VisitChildrenSet::Empty
+ );
+
+ // Nested paths
+ let m1 = Box::new(
+ IncludeMatcher::new(vec![IgnorePattern::new(
+ PatternSyntax::RelPath,
+ b"dir/subdir/x",
+ Path::new(""),
+ )])
+ .unwrap(),
+ );
+ let m2 = Box::new(
+ IncludeMatcher::new(vec![IgnorePattern::new(
+ PatternSyntax::RelPath,
+ b"dir/subdir",
+ Path::new(""),
+ )])
+ .unwrap(),
+ );
+ let matcher = IntersectionMatcher::new(m1, m2);
+
+ let mut set = HashSet::new();
+ set.insert(HgPathBuf::from_bytes(b"dir"));
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"")),
+ VisitChildrenSet::Set(set)
+ );
+
+ let mut set = HashSet::new();
+ set.insert(HgPathBuf::from_bytes(b"subdir"));
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir")),
+ VisitChildrenSet::Set(set)
+ );
+ let mut set = HashSet::new();
+ set.insert(HgPathBuf::from_bytes(b"x"));
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/subdir")),
+ VisitChildrenSet::Set(set)
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/foo")),
+ VisitChildrenSet::Empty
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"folder")),
+ VisitChildrenSet::Empty
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/subdir/z")),
+ VisitChildrenSet::Empty
+ );
+ // OPT: this should probably be 'all' not 'this'.
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/subdir/x")),
+ VisitChildrenSet::This
+ );
+
+ // Diverging paths
+ let m1 = Box::new(
+ IncludeMatcher::new(vec![IgnorePattern::new(
+ PatternSyntax::RelPath,
+ b"dir/subdir/x",
+ Path::new(""),
+ )])
+ .unwrap(),
+ );
+ let m2 = Box::new(
+ IncludeMatcher::new(vec![IgnorePattern::new(
+ PatternSyntax::RelPath,
+ b"dir/subdir/z",
+ Path::new(""),
+ )])
+ .unwrap(),
+ );
+ let matcher = IntersectionMatcher::new(m1, m2);
+
+ // OPT: these next two could probably be Empty as well.
+ let mut set = HashSet::new();
+ set.insert(HgPathBuf::from_bytes(b"dir"));
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"")),
+ VisitChildrenSet::Set(set)
+ );
+ // OPT: these next two could probably be Empty as well.
+ let mut set = HashSet::new();
+ set.insert(HgPathBuf::from_bytes(b"subdir"));
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir")),
+ VisitChildrenSet::Set(set)
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/subdir")),
+ VisitChildrenSet::Empty
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/foo")),
+ VisitChildrenSet::Empty
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"folder")),
+ VisitChildrenSet::Empty
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/subdir/z")),
+ VisitChildrenSet::Empty
+ );
+ assert_eq!(
+ matcher.visit_children_set(HgPath::new(b"dir/subdir/x")),
+ VisitChildrenSet::Empty
+ );
+ }
}
--- a/rust/hg-core/src/operations/debugdata.rs Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/hg-core/src/operations/debugdata.rs Thu Jun 16 15:28:54 2022 +0200
@@ -6,6 +6,7 @@
// GNU General Public License version 2 or any later version.
use crate::repo::Repo;
+use crate::requirements;
use crate::revlog::revlog::{Revlog, RevlogError};
/// Kind of data to debug
@@ -25,7 +26,11 @@
DebugDataKind::Changelog => "00changelog.i",
DebugDataKind::Manifest => "00manifest.i",
};
- let revlog = Revlog::open(repo, index_file, None)?;
+ let use_nodemap = repo
+ .requirements()
+ .contains(requirements::NODEMAP_REQUIREMENT);
+ let revlog =
+ Revlog::open(&repo.store_vfs(), index_file, None, use_nodemap)?;
let rev =
crate::revset::resolve_rev_number_or_hex_prefix(revset, &revlog)?;
let data = revlog.get_rev_data(rev)?;
--- a/rust/hg-core/src/operations/list_tracked_files.rs Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/hg-core/src/operations/list_tracked_files.rs Thu Jun 16 15:28:54 2022 +0200
@@ -51,7 +51,7 @@
let _parents = parse_dirstate_entries(
&self.content,
|path, entry, _copy_source| {
- if entry.state().is_tracked() {
+ if entry.tracked() {
files.push(path)
}
Ok(())
--- a/rust/hg-core/src/repo.rs Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/hg-core/src/repo.rs Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
use crate::dirstate_tree::owning::OwningDirstateMap;
use crate::errors::HgResultExt;
use crate::errors::{HgError, IoResultExt};
-use crate::exit_codes;
use crate::lock::{try_with_lock_no_wait, LockError};
use crate::manifest::{Manifest, Manifestlog};
use crate::revlog::filelog::Filelog;
@@ -30,11 +29,11 @@
store: PathBuf,
requirements: HashSet<String>,
config: Config,
- dirstate_parents: LazyCell<DirstateParents, HgError>,
- dirstate_data_file_uuid: LazyCell<Option<Vec<u8>>, HgError>,
- dirstate_map: LazyCell<OwningDirstateMap, DirstateError>,
- changelog: LazyCell<Changelog, HgError>,
- manifestlog: LazyCell<Manifestlog, HgError>,
+ dirstate_parents: LazyCell<DirstateParents>,
+ dirstate_data_file_uuid: LazyCell<Option<Vec<u8>>>,
+ dirstate_map: LazyCell<OwningDirstateMap>,
+ changelog: LazyCell<Changelog>,
+ manifestlog: LazyCell<Manifestlog>,
}
#[derive(Debug, derive_more::From)]
@@ -159,31 +158,8 @@
requirements::load(Vfs { base: &shared_path })?
.contains(requirements::SHARESAFE_REQUIREMENT);
- if share_safe && !source_is_share_safe {
- return Err(match config
- .get(b"share", b"safe-mismatch.source-not-safe")
- {
- Some(b"abort") | None => HgError::abort(
- "abort: share source does not support share-safe requirement\n\
- (see `hg help config.format.use-share-safe` for more information)",
- exit_codes::ABORT,
- ),
- _ => HgError::unsupported("share-safe downgrade"),
- }
- .into());
- } else if source_is_share_safe && !share_safe {
- return Err(
- match config.get(b"share", b"safe-mismatch.source-safe") {
- Some(b"abort") | None => HgError::abort(
- "abort: version mismatch: source uses share-safe \
- functionality while the current share does not\n\
- (see `hg help config.format.use-share-safe` for more information)",
- exit_codes::ABORT,
- ),
- _ => HgError::unsupported("share-safe upgrade"),
- }
- .into(),
- );
+ if share_safe != source_is_share_safe {
+ return Err(HgError::unsupported("share-safe mismatch").into());
}
if share_safe {
@@ -206,13 +182,11 @@
store: store_path,
dot_hg,
config: repo_config,
- dirstate_parents: LazyCell::new(Self::read_dirstate_parents),
- dirstate_data_file_uuid: LazyCell::new(
- Self::read_dirstate_data_file_uuid,
- ),
- dirstate_map: LazyCell::new(Self::new_dirstate_map),
- changelog: LazyCell::new(Changelog::open),
- manifestlog: LazyCell::new(Manifestlog::open),
+ dirstate_parents: LazyCell::new(),
+ dirstate_data_file_uuid: LazyCell::new(),
+ dirstate_map: LazyCell::new(),
+ changelog: LazyCell::new(),
+ manifestlog: LazyCell::new(),
};
requirements::check(&repo)?;
@@ -270,6 +244,11 @@
self.requirements.contains(requirements::NARROW_REQUIREMENT)
}
+ pub fn has_nodemap(&self) -> bool {
+ self.requirements
+ .contains(requirements::NODEMAP_REQUIREMENT)
+ }
+
fn dirstate_file_contents(&self) -> Result<Vec<u8>, HgError> {
Ok(self
.hg_vfs()
@@ -279,7 +258,9 @@
}
pub fn dirstate_parents(&self) -> Result<DirstateParents, HgError> {
- Ok(*self.dirstate_parents.get_or_init(self)?)
+ Ok(*self
+ .dirstate_parents
+ .get_or_init(|| self.read_dirstate_parents())?)
}
fn read_dirstate_parents(&self) -> Result<DirstateParents, HgError> {
@@ -359,29 +340,38 @@
pub fn dirstate_map(
&self,
) -> Result<Ref<OwningDirstateMap>, DirstateError> {
- self.dirstate_map.get_or_init(self)
+ self.dirstate_map.get_or_init(|| self.new_dirstate_map())
}
pub fn dirstate_map_mut(
&self,
) -> Result<RefMut<OwningDirstateMap>, DirstateError> {
- self.dirstate_map.get_mut_or_init(self)
+ self.dirstate_map
+ .get_mut_or_init(|| self.new_dirstate_map())
+ }
+
+ fn new_changelog(&self) -> Result<Changelog, HgError> {
+ Changelog::open(&self.store_vfs(), self.has_nodemap())
}
pub fn changelog(&self) -> Result<Ref<Changelog>, HgError> {
- self.changelog.get_or_init(self)
+ self.changelog.get_or_init(|| self.new_changelog())
}
pub fn changelog_mut(&self) -> Result<RefMut<Changelog>, HgError> {
- self.changelog.get_mut_or_init(self)
+ self.changelog.get_mut_or_init(|| self.new_changelog())
+ }
+
+ fn new_manifestlog(&self) -> Result<Manifestlog, HgError> {
+ Manifestlog::open(&self.store_vfs(), self.has_nodemap())
}
pub fn manifestlog(&self) -> Result<Ref<Manifestlog>, HgError> {
- self.manifestlog.get_or_init(self)
+ self.manifestlog.get_or_init(|| self.new_manifestlog())
}
pub fn manifestlog_mut(&self) -> Result<RefMut<Manifestlog>, HgError> {
- self.manifestlog.get_mut_or_init(self)
+ self.manifestlog.get_mut_or_init(|| self.new_manifestlog())
}
/// Returns the manifest of the *changeset* with the given node ID
@@ -412,7 +402,7 @@
pub fn has_subrepos(&self) -> Result<bool, DirstateError> {
if let Some(entry) = self.dirstate_map()?.get(HgPath::new(".hgsub"))? {
- Ok(entry.state().is_tracked())
+ Ok(entry.tracked())
} else {
Ok(false)
}
@@ -435,7 +425,9 @@
// it’s unset
let parents = self.dirstate_parents()?;
let (packed_dirstate, old_uuid_to_remove) = if self.has_dirstate_v2() {
- let uuid_opt = self.dirstate_data_file_uuid.get_or_init(self)?;
+ let uuid_opt = self
+ .dirstate_data_file_uuid
+ .get_or_init(|| self.read_dirstate_data_file_uuid())?;
let uuid_opt = uuid_opt.as_ref();
let can_append = uuid_opt.is_some();
let (data, tree_metadata, append, old_data_size) =
@@ -528,19 +520,17 @@
/// Lazily-initialized component of `Repo` with interior mutability
///
/// This differs from `OnceCell` in that the value can still be "deinitialized"
-/// later by setting its inner `Option` to `None`.
-struct LazyCell<T, E> {
+/// later by setting its inner `Option` to `None`. It also takes the
+/// initialization function as an argument when the value is requested, not
+/// when the instance is created.
+struct LazyCell<T> {
value: RefCell<Option<T>>,
- // `Fn`s that don’t capture environment are zero-size, so this box does
- // not allocate:
- init: Box<dyn Fn(&Repo) -> Result<T, E>>,
}
-impl<T, E> LazyCell<T, E> {
- fn new(init: impl Fn(&Repo) -> Result<T, E> + 'static) -> Self {
+impl<T> LazyCell<T> {
+ fn new() -> Self {
Self {
value: RefCell::new(None),
- init: Box::new(init),
}
}
@@ -548,23 +538,29 @@
*self.value.borrow_mut() = Some(value)
}
- fn get_or_init(&self, repo: &Repo) -> Result<Ref<T>, E> {
+ fn get_or_init<E>(
+ &self,
+ init: impl Fn() -> Result<T, E>,
+ ) -> Result<Ref<T>, E> {
let mut borrowed = self.value.borrow();
if borrowed.is_none() {
drop(borrowed);
// Only use `borrow_mut` if it is really needed to avoid panic in
// case there is another outstanding borrow but mutation is not
// needed.
- *self.value.borrow_mut() = Some((self.init)(repo)?);
+ *self.value.borrow_mut() = Some(init()?);
borrowed = self.value.borrow()
}
Ok(Ref::map(borrowed, |option| option.as_ref().unwrap()))
}
- fn get_mut_or_init(&self, repo: &Repo) -> Result<RefMut<T>, E> {
+ fn get_mut_or_init<E>(
+ &self,
+ init: impl Fn() -> Result<T, E>,
+ ) -> Result<RefMut<T>, E> {
let mut borrowed = self.value.borrow_mut();
if borrowed.is_none() {
- *borrowed = Some((self.init)(repo)?);
+ *borrowed = Some(init()?);
}
Ok(RefMut::map(borrowed, |option| option.as_mut().unwrap()))
}
--- a/rust/hg-core/src/requirements.rs Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/hg-core/src/requirements.rs Thu Jun 16 15:28:54 2022 +0200
@@ -92,34 +92,45 @@
// not should opt out by checking `has_sparse` and `has_narrow`.
SPARSE_REQUIREMENT,
NARROW_REQUIREMENT,
+ // rhg doesn't care about bookmarks at all yet
+ BOOKMARKS_IN_STORE_REQUIREMENT,
];
// Copied from mercurial/requirements.py:
-pub(crate) const DIRSTATE_V2_REQUIREMENT: &str = "dirstate-v2";
+pub const DIRSTATE_V2_REQUIREMENT: &str = "dirstate-v2";
+
+/// A repository that uses the tracked hint dirstate file
+#[allow(unused)]
+pub const DIRSTATE_TRACKED_HINT_V1: &str = "dirstate-tracked-key-v1";
/// When narrowing is finalized and no longer subject to format changes,
/// we should move this to just "narrow" or similar.
#[allow(unused)]
-pub(crate) const NARROW_REQUIREMENT: &str = "narrowhg-experimental";
+pub const NARROW_REQUIREMENT: &str = "narrowhg-experimental";
+
+/// Bookmarks must be stored in the `store` part of the repository and will be
+/// share accross shares
+#[allow(unused)]
+pub const BOOKMARKS_IN_STORE_REQUIREMENT: &str = "bookmarksinstore";
/// Enables sparse working directory usage
#[allow(unused)]
-pub(crate) const SPARSE_REQUIREMENT: &str = "exp-sparse";
+pub const SPARSE_REQUIREMENT: &str = "exp-sparse";
/// Enables the internal phase which is used to hide changesets instead
/// of stripping them
#[allow(unused)]
-pub(crate) const INTERNAL_PHASE_REQUIREMENT: &str = "internal-phase";
+pub const INTERNAL_PHASE_REQUIREMENT: &str = "internal-phase";
/// Stores manifest in Tree structure
#[allow(unused)]
-pub(crate) const TREEMANIFEST_REQUIREMENT: &str = "treemanifest";
+pub const TREEMANIFEST_REQUIREMENT: &str = "treemanifest";
/// Increment the sub-version when the revlog v2 format changes to lock out old
/// clients.
#[allow(unused)]
-pub(crate) const REVLOGV2_REQUIREMENT: &str = "exp-revlogv2.1";
+pub const REVLOGV2_REQUIREMENT: &str = "exp-revlogv2.1";
/// A repository with the sparserevlog feature will have delta chains that
/// can spread over a larger span. Sparse reading cuts these large spans into
@@ -130,32 +141,32 @@
/// chain. This is why once a repository has enabled sparse-read, it becomes
/// required.
#[allow(unused)]
-pub(crate) const SPARSEREVLOG_REQUIREMENT: &str = "sparserevlog";
+pub const SPARSEREVLOG_REQUIREMENT: &str = "sparserevlog";
/// A repository with the the copies-sidedata-changeset requirement will store
/// copies related information in changeset's sidedata.
#[allow(unused)]
-pub(crate) const COPIESSDC_REQUIREMENT: &str = "exp-copies-sidedata-changeset";
+pub const COPIESSDC_REQUIREMENT: &str = "exp-copies-sidedata-changeset";
/// The repository use persistent nodemap for the changelog and the manifest.
#[allow(unused)]
-pub(crate) const NODEMAP_REQUIREMENT: &str = "persistent-nodemap";
+pub const NODEMAP_REQUIREMENT: &str = "persistent-nodemap";
/// Denotes that the current repository is a share
#[allow(unused)]
-pub(crate) const SHARED_REQUIREMENT: &str = "shared";
+pub const SHARED_REQUIREMENT: &str = "shared";
/// Denotes that current repository is a share and the shared source path is
/// relative to the current repository root path
#[allow(unused)]
-pub(crate) const RELATIVE_SHARED_REQUIREMENT: &str = "relshared";
+pub const RELATIVE_SHARED_REQUIREMENT: &str = "relshared";
/// A repository with share implemented safely. The repository has different
/// store and working copy requirements i.e. both `.hg/requires` and
/// `.hg/store/requires` are present.
#[allow(unused)]
-pub(crate) const SHARESAFE_REQUIREMENT: &str = "share-safe";
+pub const SHARESAFE_REQUIREMENT: &str = "share-safe";
/// A repository that use zstd compression inside its revlog
#[allow(unused)]
-pub(crate) const REVLOG_COMPRESSION_ZSTD: &str = "revlog-compression-zstd";
+pub const REVLOG_COMPRESSION_ZSTD: &str = "revlog-compression-zstd";
--- a/rust/hg-core/src/revlog/changelog.rs Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/hg-core/src/revlog/changelog.rs Thu Jun 16 15:28:54 2022 +0200
@@ -1,9 +1,13 @@
use crate::errors::HgError;
-use crate::repo::Repo;
-use crate::revlog::node::NULL_NODE;
-use crate::revlog::revlog::{Revlog, RevlogError};
+use crate::revlog::revlog::{Revlog, RevlogEntry, RevlogError};
use crate::revlog::Revision;
use crate::revlog::{Node, NodePrefix};
+use crate::utils::hg_path::HgPath;
+use crate::vfs::Vfs;
+use itertools::Itertools;
+use std::ascii::escape_default;
+use std::borrow::Cow;
+use std::fmt::{Debug, Formatter};
/// A specialized `Revlog` to work with `changelog` data format.
pub struct Changelog {
@@ -13,8 +17,9 @@
impl Changelog {
/// Open the `changelog` of a repository given by its root.
- pub fn open(repo: &Repo) -> Result<Self, HgError> {
- let revlog = Revlog::open(repo, "00changelog.i", None)?;
+ pub fn open(store_vfs: &Vfs, use_nodemap: bool) -> Result<Self, HgError> {
+ let revlog =
+ Revlog::open(store_vfs, "00changelog.i", None, use_nodemap)?;
Ok(Self { revlog })
}
@@ -27,41 +32,240 @@
self.data_for_rev(rev)
}
+ /// Return the `RevlogEntry` of the given revision number.
+ pub fn entry_for_rev(
+ &self,
+ rev: Revision,
+ ) -> Result<RevlogEntry, RevlogError> {
+ self.revlog.get_entry(rev)
+ }
+
/// Return the `ChangelogEntry` of the given revision number.
pub fn data_for_rev(
&self,
rev: Revision,
) -> Result<ChangelogRevisionData, RevlogError> {
- let bytes = self.revlog.get_rev_data(rev)?.into_owned();
- Ok(ChangelogRevisionData { bytes })
+ let bytes = self.revlog.get_rev_data(rev)?;
+ if bytes.is_empty() {
+ Ok(ChangelogRevisionData::null())
+ } else {
+ Ok(ChangelogRevisionData::new(bytes).map_err(|err| {
+ RevlogError::Other(HgError::CorruptedRepository(format!(
+ "Invalid changelog data for revision {}: {:?}",
+ rev, err
+ )))
+ })?)
+ }
}
pub fn node_from_rev(&self, rev: Revision) -> Option<&Node> {
self.revlog.node_from_rev(rev)
}
+
+ pub fn rev_from_node(
+ &self,
+ node: NodePrefix,
+ ) -> Result<Revision, RevlogError> {
+ self.revlog.rev_from_node(node)
+ }
}
/// `Changelog` entry which knows how to interpret the `changelog` data bytes.
-#[derive(Debug)]
-pub struct ChangelogRevisionData {
+#[derive(PartialEq)]
+pub struct ChangelogRevisionData<'changelog> {
/// The data bytes of the `changelog` entry.
- bytes: Vec<u8>,
+ bytes: Cow<'changelog, [u8]>,
+ /// The end offset for the hex manifest (not including the newline)
+ manifest_end: usize,
+ /// The end offset for the user+email (not including the newline)
+ user_end: usize,
+ /// The end offset for the timestamp+timezone+extras (not including the
+ /// newline)
+ timestamp_end: usize,
+ /// The end offset for the file list (not including the newline)
+ files_end: usize,
}
-impl ChangelogRevisionData {
+impl<'changelog> ChangelogRevisionData<'changelog> {
+ fn new(bytes: Cow<'changelog, [u8]>) -> Result<Self, HgError> {
+ let mut line_iter = bytes.split(|b| b == &b'\n');
+ let manifest_end = line_iter
+ .next()
+ .expect("Empty iterator from split()?")
+ .len();
+ let user_slice = line_iter.next().ok_or_else(|| {
+ HgError::corrupted("Changeset data truncated after manifest line")
+ })?;
+ let user_end = manifest_end + 1 + user_slice.len();
+ let timestamp_slice = line_iter.next().ok_or_else(|| {
+ HgError::corrupted("Changeset data truncated after user line")
+ })?;
+ let timestamp_end = user_end + 1 + timestamp_slice.len();
+ let mut files_end = timestamp_end + 1;
+ loop {
+ let line = line_iter.next().ok_or_else(|| {
+ HgError::corrupted("Changeset data truncated in files list")
+ })?;
+ if line.is_empty() {
+ if files_end == bytes.len() {
+ // The list of files ended with a single newline (there
+ // should be two)
+ return Err(HgError::corrupted(
+ "Changeset data truncated after files list",
+ ));
+ }
+ files_end -= 1;
+ break;
+ }
+ files_end += line.len() + 1;
+ }
+
+ Ok(Self {
+ bytes,
+ manifest_end,
+ user_end,
+ timestamp_end,
+ files_end,
+ })
+ }
+
+ fn null() -> Self {
+ Self::new(Cow::Borrowed(
+ b"0000000000000000000000000000000000000000\n\n0 0\n\n",
+ ))
+ .unwrap()
+ }
+
/// Return an iterator over the lines of the entry.
pub fn lines(&self) -> impl Iterator<Item = &[u8]> {
- self.bytes
- .split(|b| b == &b'\n')
- .filter(|line| !line.is_empty())
+ self.bytes.split(|b| b == &b'\n')
}
/// Return the node id of the `manifest` referenced by this `changelog`
/// entry.
pub fn manifest_node(&self) -> Result<Node, HgError> {
- match self.lines().next() {
- None => Ok(NULL_NODE),
- Some(x) => Node::from_hex_for_repo(x),
- }
+ let manifest_node_hex = &self.bytes[..self.manifest_end];
+ Node::from_hex_for_repo(manifest_node_hex)
+ }
+
+ /// The full user string (usually a name followed by an email enclosed in
+ /// angle brackets)
+ pub fn user(&self) -> &[u8] {
+ &self.bytes[self.manifest_end + 1..self.user_end]
+ }
+
+ /// The full timestamp line (timestamp in seconds, offset in seconds, and
+ /// possibly extras)
+ // TODO: We should expose this in a more useful way
+ pub fn timestamp_line(&self) -> &[u8] {
+ &self.bytes[self.user_end + 1..self.timestamp_end]
+ }
+
+ /// The files changed in this revision.
+ pub fn files(&self) -> impl Iterator<Item = &HgPath> {
+ self.bytes[self.timestamp_end + 1..self.files_end]
+ .split(|b| b == &b'\n')
+ .map(|path| HgPath::new(path))
+ }
+
+ /// The change description.
+ pub fn description(&self) -> &[u8] {
+ &self.bytes[self.files_end + 2..]
+ }
+}
+
+impl Debug for ChangelogRevisionData<'_> {
+ fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("ChangelogRevisionData")
+ .field("bytes", &debug_bytes(&self.bytes))
+ .field("manifest", &debug_bytes(&self.bytes[..self.manifest_end]))
+ .field(
+ "user",
+ &debug_bytes(
+ &self.bytes[self.manifest_end + 1..self.user_end],
+ ),
+ )
+ .field(
+ "timestamp",
+ &debug_bytes(
+ &self.bytes[self.user_end + 1..self.timestamp_end],
+ ),
+ )
+ .field(
+ "files",
+ &debug_bytes(
+ &self.bytes[self.timestamp_end + 1..self.files_end],
+ ),
+ )
+ .field(
+ "description",
+ &debug_bytes(&self.bytes[self.files_end + 2..]),
+ )
+ .finish()
}
}
+
+fn debug_bytes(bytes: &[u8]) -> String {
+ String::from_utf8_lossy(
+ &bytes.iter().flat_map(|b| escape_default(*b)).collect_vec(),
+ )
+ .to_string()
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use pretty_assertions::assert_eq;
+
+ #[test]
+ fn test_create_changelogrevisiondata_invalid() {
+ // Completely empty
+ assert!(ChangelogRevisionData::new(Cow::Borrowed(b"abcd")).is_err());
+ // No newline after manifest
+ assert!(ChangelogRevisionData::new(Cow::Borrowed(b"abcd")).is_err());
+ // No newline after user
+ assert!(ChangelogRevisionData::new(Cow::Borrowed(b"abcd\n")).is_err());
+ // No newline after timestamp
+ assert!(
+ ChangelogRevisionData::new(Cow::Borrowed(b"abcd\n\n0 0")).is_err()
+ );
+ // Missing newline after files
+ assert!(ChangelogRevisionData::new(Cow::Borrowed(
+ b"abcd\n\n0 0\nfile1\nfile2"
+ ))
+ .is_err(),);
+ // Only one newline after files
+ assert!(ChangelogRevisionData::new(Cow::Borrowed(
+ b"abcd\n\n0 0\nfile1\nfile2\n"
+ ))
+ .is_err(),);
+ }
+
+ #[test]
+ fn test_create_changelogrevisiondata() {
+ let data = ChangelogRevisionData::new(Cow::Borrowed(
+ b"0123456789abcdef0123456789abcdef01234567
+Some One <someone@example.com>
+0 0
+file1
+file2
+
+some
+commit
+message",
+ ))
+ .unwrap();
+ assert_eq!(
+ data.manifest_node().unwrap(),
+ Node::from_hex("0123456789abcdef0123456789abcdef01234567")
+ .unwrap()
+ );
+ assert_eq!(data.user(), b"Some One <someone@example.com>");
+ assert_eq!(data.timestamp_line(), b"0 0");
+ assert_eq!(
+ data.files().collect_vec(),
+ vec![HgPath::new("file1"), HgPath::new("file2")]
+ );
+ assert_eq!(data.description(), b"some\ncommit\nmessage");
+ }
+}
--- a/rust/hg-core/src/revlog/filelog.rs Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/hg-core/src/revlog/filelog.rs Thu Jun 16 15:28:54 2022 +0200
@@ -20,7 +20,12 @@
pub fn open(repo: &Repo, file_path: &HgPath) -> Result<Self, HgError> {
let index_path = store_path(file_path, b".i");
let data_path = store_path(file_path, b".d");
- let revlog = Revlog::open(repo, index_path, Some(&data_path))?;
+ let revlog = Revlog::open(
+ &repo.store_vfs(),
+ index_path,
+ Some(&data_path),
+ false,
+ )?;
Ok(Self { revlog })
}
@@ -90,7 +95,7 @@
// Let’s call `file_data_len` what would be returned by
// `self.data().file_data().len()`.
- if self.0.is_cencored() {
+ if self.0.is_censored() {
let file_data_len = 0;
return other_len != file_data_len;
}
--- a/rust/hg-core/src/revlog/index.rs Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/hg-core/src/revlog/index.rs Thu Jun 16 15:28:54 2022 +0200
@@ -282,6 +282,10 @@
BigEndian::read_i32(&self.bytes[16..])
}
+ pub fn link_revision(&self) -> Revision {
+ BigEndian::read_i32(&self.bytes[20..])
+ }
+
pub fn p1(&self) -> Revision {
BigEndian::read_i32(&self.bytes[24..])
}
@@ -302,6 +306,7 @@
#[cfg(test)]
mod tests {
use super::*;
+ use crate::node::NULL_NODE;
#[cfg(test)]
#[derive(Debug, Copy, Clone)]
@@ -314,6 +319,10 @@
compressed_len: usize,
uncompressed_len: usize,
base_revision_or_base_of_delta_chain: Revision,
+ link_revision: Revision,
+ p1: Revision,
+ p2: Revision,
+ node: Node,
}
#[cfg(test)]
@@ -323,11 +332,15 @@
is_first: false,
is_inline: false,
is_general_delta: true,
- version: 2,
+ version: 1,
offset: 0,
compressed_len: 0,
uncompressed_len: 0,
base_revision_or_base_of_delta_chain: 0,
+ link_revision: 0,
+ p1: NULL_REVISION,
+ p2: NULL_REVISION,
+ node: NULL_NODE,
}
}
@@ -374,6 +387,26 @@
self
}
+ pub fn with_link_revision(&mut self, value: Revision) -> &mut Self {
+ self.link_revision = value;
+ self
+ }
+
+ pub fn with_p1(&mut self, value: Revision) -> &mut Self {
+ self.p1 = value;
+ self
+ }
+
+ pub fn with_p2(&mut self, value: Revision) -> &mut Self {
+ self.p2 = value;
+ self
+ }
+
+ pub fn with_node(&mut self, value: Node) -> &mut Self {
+ self.node = value;
+ self
+ }
+
pub fn build(&self) -> Vec<u8> {
let mut bytes = Vec::with_capacity(INDEX_ENTRY_SIZE);
if self.is_first {
@@ -396,6 +429,11 @@
bytes.extend(
&self.base_revision_or_base_of_delta_chain.to_be_bytes(),
);
+ bytes.extend(&self.link_revision.to_be_bytes());
+ bytes.extend(&self.p1.to_be_bytes());
+ bytes.extend(&self.p2.to_be_bytes());
+ bytes.extend(self.node.as_bytes());
+ bytes.extend(vec![0u8; 12]);
bytes
}
}
@@ -514,13 +552,63 @@
}
#[test]
+ fn link_revision_test() {
+ let bytes = IndexEntryBuilder::new().with_link_revision(123).build();
+
+ let entry = IndexEntry {
+ bytes: &bytes,
+ offset_override: None,
+ };
+
+ assert_eq!(entry.link_revision(), 123);
+ }
+
+ #[test]
+ fn p1_test() {
+ let bytes = IndexEntryBuilder::new().with_p1(123).build();
+
+ let entry = IndexEntry {
+ bytes: &bytes,
+ offset_override: None,
+ };
+
+ assert_eq!(entry.p1(), 123);
+ }
+
+ #[test]
+ fn p2_test() {
+ let bytes = IndexEntryBuilder::new().with_p2(123).build();
+
+ let entry = IndexEntry {
+ bytes: &bytes,
+ offset_override: None,
+ };
+
+ assert_eq!(entry.p2(), 123);
+ }
+
+ #[test]
+ fn node_test() {
+ let node = Node::from_hex("0123456789012345678901234567890123456789")
+ .unwrap();
+ let bytes = IndexEntryBuilder::new().with_node(node).build();
+
+ let entry = IndexEntry {
+ bytes: &bytes,
+ offset_override: None,
+ };
+
+ assert_eq!(*entry.hash(), node);
+ }
+
+ #[test]
fn version_test() {
let bytes = IndexEntryBuilder::new()
.is_first(true)
- .with_version(1)
+ .with_version(2)
.build();
- assert_eq!(get_version(&bytes), 1)
+ assert_eq!(get_version(&bytes), 2)
}
}
--- a/rust/hg-core/src/revlog/manifest.rs Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/hg-core/src/revlog/manifest.rs Thu Jun 16 15:28:54 2022 +0200
@@ -1,10 +1,10 @@
use crate::errors::HgError;
-use crate::repo::Repo;
use crate::revlog::revlog::{Revlog, RevlogError};
use crate::revlog::Revision;
use crate::revlog::{Node, NodePrefix};
use crate::utils::hg_path::HgPath;
use crate::utils::SliceExt;
+use crate::vfs::Vfs;
/// A specialized `Revlog` to work with `manifest` data format.
pub struct Manifestlog {
@@ -14,8 +14,9 @@
impl Manifestlog {
/// Open the `manifest` of a repository given by its root.
- pub fn open(repo: &Repo) -> Result<Self, HgError> {
- let revlog = Revlog::open(repo, "00manifest.i", None)?;
+ pub fn open(store_vfs: &Vfs, use_nodemap: bool) -> Result<Self, HgError> {
+ let revlog =
+ Revlog::open(store_vfs, "00manifest.i", None, use_nodemap)?;
Ok(Self { revlog })
}
--- a/rust/hg-core/src/revlog/node.rs Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/hg-core/src/revlog/node.rs Thu Jun 16 15:28:54 2022 +0200
@@ -53,12 +53,21 @@
/// the size or return an error at runtime.
///
/// [`nybbles_len`]: #method.nybbles_len
-#[derive(Copy, Clone, Debug, PartialEq, BytesCast, derive_more::From)]
+#[derive(Copy, Clone, PartialEq, BytesCast, derive_more::From)]
#[repr(transparent)]
pub struct Node {
data: NodeData,
}
+impl fmt::Debug for Node {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let n = format!("{:x?}", self.data);
+ // We're using debug_tuple because it makes the output a little
+ // more compact without losing data.
+ f.debug_tuple("Node").field(&n).finish()
+ }
+}
+
/// The node value for NULL_REVISION
pub const NULL_NODE: Node = Node {
data: [0; NODE_BYTES_LENGTH],
--- a/rust/hg-core/src/revlog/nodemap.rs Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/hg-core/src/revlog/nodemap.rs Thu Jun 16 15:28:54 2022 +0200
@@ -403,7 +403,7 @@
Err(NodeMapError::MultipleResults)
}
- fn visit<'n>(&'n self, prefix: NodePrefix) -> NodeTreeVisitor<'n> {
+ fn visit(&self, prefix: NodePrefix) -> NodeTreeVisitor {
NodeTreeVisitor {
nt: self,
prefix,
--- a/rust/hg-core/src/revlog/nodemap_docket.rs Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/hg-core/src/revlog/nodemap_docket.rs Thu Jun 16 15:28:54 2022 +0200
@@ -1,11 +1,10 @@
use crate::errors::{HgError, HgResultExt};
-use crate::requirements;
use bytes_cast::{unaligned, BytesCast};
use memmap2::Mmap;
use std::path::{Path, PathBuf};
-use crate::repo::Repo;
use crate::utils::strip_suffix;
+use crate::vfs::Vfs;
const ONDISK_VERSION: u8 = 1;
@@ -35,20 +34,12 @@
/// * The docket file points to a missing (likely deleted) data file (this
/// can happen in a rare race condition).
pub fn read_from_file(
- repo: &Repo,
+ store_vfs: &Vfs,
index_path: &Path,
) -> Result<Option<(Self, Mmap)>, HgError> {
- if !repo
- .requirements()
- .contains(requirements::NODEMAP_REQUIREMENT)
- {
- // If .hg/requires does not opt it, don’t try to open a nodemap
- return Ok(None);
- }
-
let docket_path = index_path.with_extension("n");
let docket_bytes = if let Some(bytes) =
- repo.store_vfs().read(&docket_path).io_not_found_as_none()?
+ store_vfs.read(&docket_path).io_not_found_as_none()?
{
bytes
} else {
@@ -84,10 +75,8 @@
let data_path = rawdata_path(&docket_path, uid);
// TODO: use `vfs.read()` here when the `persistent-nodemap.mmap`
// config is false?
- if let Some(mmap) = repo
- .store_vfs()
- .mmap_open(&data_path)
- .io_not_found_as_none()?
+ if let Some(mmap) =
+ store_vfs.mmap_open(&data_path).io_not_found_as_none()?
{
if mmap.len() >= data_length {
Ok(Some((docket, mmap)))
--- a/rust/hg-core/src/revlog/revlog.rs Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/hg-core/src/revlog/revlog.rs Thu Jun 16 15:28:54 2022 +0200
@@ -16,8 +16,8 @@
use super::nodemap_docket::NodeMapDocket;
use super::patch;
use crate::errors::HgError;
-use crate::repo::Repo;
use crate::revlog::Revision;
+use crate::vfs::Vfs;
use crate::{Node, NULL_REVISION};
const REVISION_FLAG_CENSORED: u16 = 1 << 15;
@@ -34,7 +34,7 @@
const NULL_REVLOG_ENTRY_FLAGS: u16 = 0;
-#[derive(derive_more::From)]
+#[derive(Debug, derive_more::From)]
pub enum RevlogError {
InvalidRevision,
/// Working directory is not supported
@@ -83,13 +83,14 @@
/// interleaved.
#[timed]
pub fn open(
- repo: &Repo,
+ store_vfs: &Vfs,
index_path: impl AsRef<Path>,
data_path: Option<&Path>,
+ use_nodemap: bool,
) -> Result<Self, HgError> {
let index_path = index_path.as_ref();
let index = {
- match repo.store_vfs().mmap_open_opt(&index_path)? {
+ match store_vfs.mmap_open_opt(&index_path)? {
None => Index::new(Box::new(vec![])),
Some(index_mmap) => {
let index = Index::new(Box::new(index_mmap))?;
@@ -107,14 +108,16 @@
None
} else {
let data_path = data_path.unwrap_or(&default_data_path);
- let data_mmap = repo.store_vfs().mmap_open(data_path)?;
+ let data_mmap = store_vfs.mmap_open(data_path)?;
Some(Box::new(data_mmap))
};
let nodemap = if index.is_inline() {
None
+ } else if !use_nodemap {
+ None
} else {
- NodeMapDocket::read_from_file(repo, index_path)?.map(
+ NodeMapDocket::read_from_file(store_vfs, index_path)?.map(
|(docket, data)| {
nodemap::NodeTree::load_bytes(
Box::new(data),
@@ -351,6 +354,10 @@
self.rev
}
+ pub fn node(&self) -> &Node {
+ &self.hash
+ }
+
pub fn uncompressed_len(&self) -> Option<u32> {
u32::try_from(self.uncompressed_len).ok()
}
@@ -359,7 +366,39 @@
self.p1 != NULL_REVISION
}
- pub fn is_cencored(&self) -> bool {
+ pub fn p1_entry(&self) -> Result<Option<RevlogEntry>, RevlogError> {
+ if self.p1 == NULL_REVISION {
+ Ok(None)
+ } else {
+ Ok(Some(self.revlog.get_entry(self.p1)?))
+ }
+ }
+
+ pub fn p2_entry(&self) -> Result<Option<RevlogEntry>, RevlogError> {
+ if self.p2 == NULL_REVISION {
+ Ok(None)
+ } else {
+ Ok(Some(self.revlog.get_entry(self.p2)?))
+ }
+ }
+
+ pub fn p1(&self) -> Option<Revision> {
+ if self.p1 == NULL_REVISION {
+ None
+ } else {
+ Some(self.p1)
+ }
+ }
+
+ pub fn p2(&self) -> Option<Revision> {
+ if self.p2 == NULL_REVISION {
+ None
+ } else {
+ Some(self.p2)
+ }
+ }
+
+ pub fn is_censored(&self) -> bool {
(self.flags & REVISION_FLAG_CENSORED) != 0
}
@@ -370,7 +409,7 @@
}
/// The data for this entry, after resolving deltas if any.
- pub fn data(&self) -> Result<Cow<'a, [u8]>, HgError> {
+ pub fn rawdata(&self) -> Result<Cow<'a, [u8]>, HgError> {
let mut entry = self.clone();
let mut delta_chain = vec![];
@@ -395,6 +434,13 @@
Revlog::build_data_from_deltas(entry, &delta_chain)?.into()
};
+ Ok(data)
+ }
+
+ fn check_data(
+ &self,
+ data: Cow<'a, [u8]>,
+ ) -> Result<Cow<'a, [u8]>, HgError> {
if self.revlog.check_hash(
self.p1,
self.p2,
@@ -407,6 +453,14 @@
}
}
+ pub fn data(&self) -> Result<Cow<'a, [u8]>, HgError> {
+ let data = self.rawdata()?;
+ if self.is_censored() {
+ return Err(HgError::CensoredNodeError);
+ }
+ self.check_data(data)
+ }
+
/// Extract the data contained in the entry.
/// This may be a delta. (See `is_delta`.)
fn data_chunk(&self) -> Result<Cow<'a, [u8]>, HgError> {
@@ -486,3 +540,92 @@
hasher.update(data);
*hasher.finalize().as_ref()
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::index::{IndexEntryBuilder, INDEX_ENTRY_SIZE};
+ use itertools::Itertools;
+
+ #[test]
+ fn test_empty() {
+ let temp = tempfile::tempdir().unwrap();
+ let vfs = Vfs { base: temp.path() };
+ std::fs::write(temp.path().join("foo.i"), b"").unwrap();
+ let revlog = Revlog::open(&vfs, "foo.i", None, false).unwrap();
+ assert!(revlog.is_empty());
+ assert_eq!(revlog.len(), 0);
+ assert!(revlog.get_entry(0).is_err());
+ assert!(!revlog.has_rev(0));
+ }
+
+ #[test]
+ fn test_inline() {
+ let temp = tempfile::tempdir().unwrap();
+ let vfs = Vfs { base: temp.path() };
+ let node0 = Node::from_hex("2ed2a3912a0b24502043eae84ee4b279c18b90dd")
+ .unwrap();
+ let node1 = Node::from_hex("b004912a8510032a0350a74daa2803dadfb00e12")
+ .unwrap();
+ let node2 = Node::from_hex("dd6ad206e907be60927b5a3117b97dffb2590582")
+ .unwrap();
+ let entry0_bytes = IndexEntryBuilder::new()
+ .is_first(true)
+ .with_version(1)
+ .with_inline(true)
+ .with_offset(INDEX_ENTRY_SIZE)
+ .with_node(node0)
+ .build();
+ let entry1_bytes = IndexEntryBuilder::new()
+ .with_offset(INDEX_ENTRY_SIZE)
+ .with_node(node1)
+ .build();
+ let entry2_bytes = IndexEntryBuilder::new()
+ .with_offset(INDEX_ENTRY_SIZE)
+ .with_p1(0)
+ .with_p2(1)
+ .with_node(node2)
+ .build();
+ let contents = vec![entry0_bytes, entry1_bytes, entry2_bytes]
+ .into_iter()
+ .flatten()
+ .collect_vec();
+ std::fs::write(temp.path().join("foo.i"), contents).unwrap();
+ let revlog = Revlog::open(&vfs, "foo.i", None, false).unwrap();
+
+ let entry0 = revlog.get_entry(0).ok().unwrap();
+ assert_eq!(entry0.revision(), 0);
+ assert_eq!(*entry0.node(), node0);
+ assert!(!entry0.has_p1());
+ assert_eq!(entry0.p1(), None);
+ assert_eq!(entry0.p2(), None);
+ let p1_entry = entry0.p1_entry().unwrap();
+ assert!(p1_entry.is_none());
+ let p2_entry = entry0.p2_entry().unwrap();
+ assert!(p2_entry.is_none());
+
+ let entry1 = revlog.get_entry(1).ok().unwrap();
+ assert_eq!(entry1.revision(), 1);
+ assert_eq!(*entry1.node(), node1);
+ assert!(!entry1.has_p1());
+ assert_eq!(entry1.p1(), None);
+ assert_eq!(entry1.p2(), None);
+ let p1_entry = entry1.p1_entry().unwrap();
+ assert!(p1_entry.is_none());
+ let p2_entry = entry1.p2_entry().unwrap();
+ assert!(p2_entry.is_none());
+
+ let entry2 = revlog.get_entry(2).ok().unwrap();
+ assert_eq!(entry2.revision(), 2);
+ assert_eq!(*entry2.node(), node2);
+ assert!(entry2.has_p1());
+ assert_eq!(entry2.p1(), Some(0));
+ assert_eq!(entry2.p2(), Some(1));
+ let p1_entry = entry2.p1_entry().unwrap();
+ assert!(p1_entry.is_some());
+ assert_eq!(p1_entry.unwrap().revision(), 0);
+ let p2_entry = entry2.p2_entry().unwrap();
+ assert!(p2_entry.is_some());
+ assert_eq!(p2_entry.unwrap().revision(), 1);
+ }
+}
--- a/rust/hg-cpython/Cargo.toml Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/hg-cpython/Cargo.toml Thu Jun 16 15:28:54 2022 +0200
@@ -8,25 +8,12 @@
name='rusthg'
crate-type = ["cdylib"]
-[features]
-default = ["python3"]
-
-# Features to build an extension module:
-python27 = ["cpython/python27-sys", "cpython/extension-module-2-7"]
-python3 = ["cpython/python3-sys", "cpython/extension-module"]
-
-# Enable one of these features to build a test executable linked to libpython:
-# e.g. cargo test --no-default-features --features python27-bin
-python27-bin = ["cpython/python27-sys"]
-python3-bin = ["cpython/python3-sys"]
-
[dependencies]
-cpython = { version = "0.7.0", default-features = false }
-crossbeam-channel = "0.4"
+cpython = { version = "0.7.0", features = ["extension-module"] }
+crossbeam-channel = "0.5.2"
hg-core = { path = "../hg-core"}
-libc = "0.2"
-log = "0.4.8"
-env_logger = "0.7.1"
+libc = "0.2.119"
+log = "0.4.14"
+env_logger = "0.9.0"
stable_deref_trait = "1.2.0"
vcsgraph = "0.2.0"
-
--- a/rust/hg-cpython/src/cindex.rs Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/hg-cpython/src/cindex.rs Thu Jun 16 15:28:54 2022 +0200
@@ -18,7 +18,7 @@
use hg::{Graph, GraphError, Revision, WORKING_DIRECTORY_REVISION};
use libc::{c_int, ssize_t};
-const REVLOG_CABI_VERSION: c_int = 2;
+const REVLOG_CABI_VERSION: c_int = 3;
#[repr(C)]
pub struct Revlog_CAPI {
@@ -29,6 +29,10 @@
index: *mut revlog_capi::RawPyObject,
rev: ssize_t,
) -> *const Node,
+ fast_rank: unsafe extern "C" fn(
+ index: *mut revlog_capi::RawPyObject,
+ rev: ssize_t,
+ ) -> ssize_t,
index_parents: unsafe extern "C" fn(
index: *mut revlog_capi::RawPyObject,
rev: c_int,
@@ -173,6 +177,20 @@
}
}
+impl vcsgraph::graph::RankedGraph for Index {
+ fn rank(
+ &self,
+ rev: Revision,
+ ) -> Result<vcsgraph::graph::Rank, vcsgraph::graph::GraphReadError> {
+ match unsafe {
+ (self.capi.fast_rank)(self.index.as_ptr(), rev as ssize_t)
+ } {
+ -1 => Err(vcsgraph::graph::GraphReadError::InconsistentGraphData),
+ rank => Ok(rank as usize),
+ }
+ }
+}
+
impl RevlogIndex for Index {
/// Note C return type is Py_ssize_t (hence signed), but we shall
/// force it to unsigned, because it's a length
--- a/rust/hg-cpython/src/dagops.rs Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/hg-cpython/src/dagops.rs Thu Jun 16 15:28:54 2022 +0200
@@ -14,6 +14,8 @@
use hg::dagops;
use hg::Revision;
use std::collections::HashSet;
+use vcsgraph::ancestors::node_rank;
+use vcsgraph::graph::{Parents, Rank};
use crate::revlog::pyindex_to_graph;
@@ -31,6 +33,18 @@
Ok(as_set)
}
+/// Computes the rank, i.e. the number of ancestors including itself,
+/// of a node represented by its parents.
+pub fn rank(
+ py: Python,
+ index: PyObject,
+ p1r: Revision,
+ p2r: Revision,
+) -> PyResult<Rank> {
+ node_rank(&pyindex_to_graph(py, index)?, &Parents([p1r, p2r]))
+ .map_err(|e| GraphError::pynew_from_vcsgraph(py, e))
+}
+
/// Create the module, with `__package__` given from parent
pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> {
let dotted_name = &format!("{}.dagop", package);
@@ -42,6 +56,11 @@
"headrevs",
py_fn!(py, headrevs(index: PyObject, revs: PyObject)),
)?;
+ m.add(
+ py,
+ "rank",
+ py_fn!(py, rank(index: PyObject, p1r: Revision, p2r: Revision)),
+ )?;
let sys = PyModule::import(py, "sys")?;
let sys_modules: PyDict = sys.get(py, "modules")?.extract(py)?;
--- a/rust/hg-cpython/src/dirstate/dirstate_map.rs Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/hg-cpython/src/dirstate/dirstate_map.rs Thu Jun 16 15:28:54 2022 +0200
@@ -15,6 +15,7 @@
exc, PyBool, PyBytes, PyClone, PyDict, PyErr, PyList, PyNone, PyObject,
PyResult, Python, PythonObject, ToPyObject, UnsafePyLeaked,
};
+use hg::dirstate::{ParentFileData, TruncatedTimestamp};
use crate::{
dirstate::copymap::{CopyMap, CopyMapItemsIterator, CopyMapKeysIterator},
@@ -22,13 +23,10 @@
pybytes_deref::PyBytesDeref,
};
use hg::{
- dirstate::StateMapIter,
- dirstate_tree::on_disk::DirstateV2ParseError,
- dirstate_tree::owning::OwningDirstateMap,
- revlog::Node,
- utils::files::normalize_case,
- utils::hg_path::{HgPath, HgPathBuf},
- DirstateEntry, DirstateError, DirstateParents, EntryState,
+ dirstate::StateMapIter, dirstate_tree::on_disk::DirstateV2ParseError,
+ dirstate_tree::owning::OwningDirstateMap, revlog::Node,
+ utils::files::normalize_case, utils::hg_path::HgPath, DirstateEntry,
+ DirstateError, DirstateParents,
};
// TODO
@@ -103,61 +101,104 @@
}
}
- def set_dirstate_item(
- &self,
- path: PyObject,
- item: DirstateItem
- ) -> PyResult<PyObject> {
- let f = path.extract::<PyBytes>(py)?;
- let filename = HgPath::new(f.data(py));
- self.inner(py)
- .borrow_mut()
- .set_entry(filename, item.get_entry(py))
- .map_err(|e| v2_error(py, e))?;
- Ok(py.None())
+ def set_tracked(&self, f: PyObject) -> PyResult<PyBool> {
+ let bytes = f.extract::<PyBytes>(py)?;
+ let path = HgPath::new(bytes.data(py));
+ let res = self.inner(py).borrow_mut().set_tracked(path);
+ let was_tracked = res.or_else(|_| {
+ Err(PyErr::new::<exc::OSError, _>(py, "Dirstate error".to_string()))
+ })?;
+ Ok(was_tracked.to_py_object(py))
+ }
+
+ def set_untracked(&self, f: PyObject) -> PyResult<PyBool> {
+ let bytes = f.extract::<PyBytes>(py)?;
+ let path = HgPath::new(bytes.data(py));
+ let res = self.inner(py).borrow_mut().set_untracked(path);
+ let was_tracked = res.or_else(|_| {
+ Err(PyErr::new::<exc::OSError, _>(py, "Dirstate error".to_string()))
+ })?;
+ Ok(was_tracked.to_py_object(py))
}
- def addfile(
+ def set_clean(
&self,
- f: PyBytes,
- item: DirstateItem,
+ f: PyObject,
+ mode: u32,
+ size: u32,
+ mtime: (i64, u32, bool)
) -> PyResult<PyNone> {
- let filename = HgPath::new(f.data(py));
- let entry = item.get_entry(py);
- self.inner(py)
- .borrow_mut()
- .add_file(filename, entry)
- .map_err(|e |dirstate_error(py, e))?;
+ let (mtime_s, mtime_ns, second_ambiguous) = mtime;
+ let timestamp = TruncatedTimestamp::new_truncate(
+ mtime_s, mtime_ns, second_ambiguous
+ );
+ let bytes = f.extract::<PyBytes>(py)?;
+ let path = HgPath::new(bytes.data(py));
+ let res = self.inner(py).borrow_mut().set_clean(
+ path, mode, size, timestamp,
+ );
+ res.or_else(|_| {
+ Err(PyErr::new::<exc::OSError, _>(py, "Dirstate error".to_string()))
+ })?;
Ok(PyNone)
}
- def removefile(
+ def set_possibly_dirty(&self, f: PyObject) -> PyResult<PyNone> {
+ let bytes = f.extract::<PyBytes>(py)?;
+ let path = HgPath::new(bytes.data(py));
+ let res = self.inner(py).borrow_mut().set_possibly_dirty(path);
+ res.or_else(|_| {
+ Err(PyErr::new::<exc::OSError, _>(py, "Dirstate error".to_string()))
+ })?;
+ Ok(PyNone)
+ }
+
+ def reset_state(
&self,
f: PyObject,
- in_merge: PyObject
- ) -> PyResult<PyObject> {
- self.inner(py).borrow_mut()
- .remove_file(
- HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
- in_merge.extract::<PyBool>(py)?.is_true(),
- )
- .or_else(|_| {
- Err(PyErr::new::<exc::OSError, _>(
- py,
- "Dirstate error".to_string(),
- ))
- })?;
- Ok(py.None())
- }
-
- def drop_item_and_copy_source(
- &self,
- f: PyBytes,
+ wc_tracked: bool,
+ p1_tracked: bool,
+ p2_info: bool,
+ has_meaningful_mtime: bool,
+ parentfiledata: Option<(u32, u32, Option<(i64, u32, bool)>)>,
) -> PyResult<PyNone> {
- self.inner(py)
- .borrow_mut()
- .drop_entry_and_copy_source(HgPath::new(f.data(py)))
- .map_err(|e |dirstate_error(py, e))?;
+ let mut has_meaningful_mtime = has_meaningful_mtime;
+ let parent_file_data = match parentfiledata {
+ None => {
+ has_meaningful_mtime = false;
+ None
+ },
+ Some(data) => {
+ let (mode, size, mtime_info) = data;
+ let mtime = if let Some(mtime_info) = mtime_info {
+ let (mtime_s, mtime_ns, second_ambiguous) = mtime_info;
+ let timestamp = TruncatedTimestamp::new_truncate(
+ mtime_s, mtime_ns, second_ambiguous
+ );
+ Some(timestamp)
+ } else {
+ has_meaningful_mtime = false;
+ None
+ };
+ Some(ParentFileData {
+ mode_size: Some((mode, size)),
+ mtime,
+ })
+ }
+ };
+ let bytes = f.extract::<PyBytes>(py)?;
+ let path = HgPath::new(bytes.data(py));
+ let res = self.inner(py).borrow_mut().reset_state(
+ path,
+ wc_tracked,
+ p1_tracked,
+ p2_info,
+ has_meaningful_mtime,
+ parent_file_data,
+ );
+ res.or_else(|_| {
+ Err(PyErr::new::<exc::OSError, _>(py, "Dirstate error".to_string()))
+ })?;
Ok(PyNone)
}
@@ -228,7 +269,7 @@
let dict = PyDict::new(py);
for item in self.inner(py).borrow_mut().iter() {
let (path, entry) = item.map_err(|e| v2_error(py, e))?;
- if entry.state() != EntryState::Removed {
+ if !entry.removed() {
let key = normalize_case(path);
let value = path;
dict.set_item(
@@ -367,8 +408,8 @@
self.inner(py)
.borrow_mut()
.copy_map_insert(
- HgPathBuf::from_bytes(key.data(py)),
- HgPathBuf::from_bytes(value.data(py)),
+ HgPath::new(key.data(py)),
+ HgPath::new(value.data(py)),
)
.map_err(|e| v2_error(py, e))?;
Ok(py.None())
@@ -420,6 +461,19 @@
Ok(dirs)
}
+ def setparents_fixup(&self) -> PyResult<PyDict> {
+ let dict = PyDict::new(py);
+ let copies = self.inner(py).borrow_mut().setparents_fixup();
+ for (key, value) in copies.map_err(|e| v2_error(py, e))? {
+ dict.set_item(
+ py,
+ PyBytes::new(py, key.as_bytes()),
+ PyBytes::new(py, value.as_bytes()),
+ )?;
+ }
+ Ok(dict)
+ }
+
def debug_iter(&self, all: bool) -> PyResult<PyList> {
let dirs = PyList::new(py, &[]);
for item in self.inner(py).borrow().debug_iter(all) {
--- a/rust/hg-cpython/src/dirstate/item.rs Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/hg-cpython/src/dirstate/item.rs Thu Jun 16 15:28:54 2022 +0200
@@ -8,10 +8,9 @@
use cpython::Python;
use cpython::PythonObject;
use hg::dirstate::DirstateEntry;
-use hg::dirstate::EntryState;
+use hg::dirstate::DirstateV2Data;
use hg::dirstate::TruncatedTimestamp;
use std::cell::Cell;
-use std::convert::TryFrom;
py_class!(pub class DirstateItem |py| {
data entry: Cell<DirstateEntry>;
@@ -40,15 +39,15 @@
}
}
}
- let entry = DirstateEntry::from_v2_data(
- wc_tracked,
+ let entry = DirstateEntry::from_v2_data(DirstateV2Data {
+ wc_tracked: wc_tracked,
p1_tracked,
p2_info,
- mode_size_opt,
- mtime_opt,
+ mode_size: mode_size_opt,
+ mtime: mtime_opt,
fallback_exec,
fallback_symlink,
- );
+ });
DirstateItem::create_instance(py, Cell::new(entry))
}
@@ -173,27 +172,6 @@
Ok(self.entry(py).get().any_tracked())
}
- def v1_state(&self) -> PyResult<PyBytes> {
- let (state, _mode, _size, _mtime) = self.entry(py).get().v1_data();
- let state_byte: u8 = state.into();
- Ok(PyBytes::new(py, &[state_byte]))
- }
-
- def v1_mode(&self) -> PyResult<i32> {
- let (_state, mode, _size, _mtime) = self.entry(py).get().v1_data();
- Ok(mode)
- }
-
- def v1_size(&self) -> PyResult<i32> {
- let (_state, _mode, size, _mtime) = self.entry(py).get().v1_data();
- Ok(size)
- }
-
- def v1_mtime(&self) -> PyResult<i32> {
- let (_state, _mode, _size, mtime) = self.entry(py).get().v1_data();
- Ok(mtime)
- }
-
def mtime_likely_equal_to(&self, other: (u32, u32, bool))
-> PyResult<bool> {
if let Some(mtime) = self.entry(py).get().truncated_mtime() {
@@ -203,22 +181,6 @@
}
}
- @classmethod
- def from_v1_data(
- _cls,
- state: PyBytes,
- mode: i32,
- size: i32,
- mtime: i32,
- ) -> PyResult<Self> {
- let state = <[u8; 1]>::try_from(state.data(py))
- .ok()
- .and_then(|state| EntryState::try_from(state[0]).ok())
- .ok_or_else(|| PyErr::new::<exc::ValueError, _>(py, "invalid state"))?;
- let entry = DirstateEntry::from_v1_data(state, mode, size, mtime);
- DirstateItem::create_instance(py, Cell::new(entry))
- }
-
def drop_merge_data(&self) -> PyResult<PyNone> {
self.update(py, |entry| entry.drop_merge_data());
Ok(PyNone)
--- a/rust/hg-cpython/src/dirstate/status.rs Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/hg-cpython/src/dirstate/status.rs Thu Jun 16 15:28:54 2022 +0200
@@ -15,6 +15,7 @@
PyResult, PyTuple, Python, PythonObject, ToPyObject,
};
use hg::dirstate::status::StatusPath;
+use hg::matchers::{IntersectionMatcher, Matcher, NeverMatcher, UnionMatcher};
use hg::{
matchers::{AlwaysMatcher, FileMatcher, IncludeMatcher},
parse_pattern_syntax,
@@ -133,24 +134,31 @@
build_response(py, status_res, warnings)
};
+ let matcher = extract_matcher(py, matcher)?;
+ dmap.with_status(
+ &*matcher,
+ root_dir.to_path_buf(),
+ ignore_files,
+ StatusOptions {
+ check_exec,
+ list_clean,
+ list_ignored,
+ list_unknown,
+ list_copies,
+ collect_traversed_dirs,
+ },
+ after_status,
+ )
+}
+
+/// Transform a Python matcher into a Rust matcher.
+fn extract_matcher(
+ py: Python,
+ matcher: PyObject,
+) -> PyResult<Box<dyn Matcher + Sync>> {
match matcher.get_type(py).name(py).borrow() {
- "alwaysmatcher" => {
- let matcher = AlwaysMatcher;
- dmap.with_status(
- &matcher,
- root_dir.to_path_buf(),
- ignore_files,
- StatusOptions {
- check_exec,
- list_clean,
- list_ignored,
- list_unknown,
- list_copies,
- collect_traversed_dirs,
- },
- after_status,
- )
- }
+ "alwaysmatcher" => Ok(Box::new(AlwaysMatcher)),
+ "nevermatcher" => Ok(Box::new(NeverMatcher)),
"exactmatcher" => {
let files = matcher.call_method(
py,
@@ -169,22 +177,9 @@
.collect();
let files = files?;
- let matcher = FileMatcher::new(files.as_ref())
+ let file_matcher = FileMatcher::new(files)
.map_err(|e| PyErr::new::<ValueError, _>(py, e.to_string()))?;
- dmap.with_status(
- &matcher,
- root_dir.to_path_buf(),
- ignore_files,
- StatusOptions {
- check_exec,
- list_clean,
- list_ignored,
- list_unknown,
- list_copies,
- collect_traversed_dirs,
- },
- after_status,
- )
+ Ok(Box::new(file_matcher))
}
"includematcher" => {
// Get the patterns from Python even though most of them are
@@ -221,22 +216,24 @@
let matcher = IncludeMatcher::new(ignore_patterns)
.map_err(|e| handle_fallback(py, e.into()))?;
- dmap.with_status(
- &matcher,
- root_dir.to_path_buf(),
- ignore_files,
- StatusOptions {
- check_exec,
- list_clean,
- list_ignored,
- list_unknown,
- list_copies,
- collect_traversed_dirs,
- },
- after_status,
- )
+ Ok(Box::new(matcher))
}
- e => Err(PyErr::new::<ValueError, _>(
+ "unionmatcher" => {
+ let matchers: PyResult<Vec<_>> = matcher
+ .getattr(py, "_matchers")?
+ .iter(py)?
+ .map(|py_matcher| extract_matcher(py, py_matcher?))
+ .collect();
+
+ Ok(Box::new(UnionMatcher::new(matchers?)))
+ }
+ "intersectionmatcher" => {
+ let m1 = extract_matcher(py, matcher.getattr(py, "_m1")?)?;
+ let m2 = extract_matcher(py, matcher.getattr(py, "_m2")?)?;
+
+ Ok(Box::new(IntersectionMatcher::new(m1, m2)))
+ }
+ e => Err(PyErr::new::<FallbackError, _>(
py,
format!("Unsupported matcher {}", e),
)),
--- a/rust/hg-cpython/src/lib.rs Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/hg-cpython/src/lib.rs Thu Jun 16 15:28:54 2022 +0200
@@ -62,7 +62,7 @@
Ok(())
});
-#[cfg(not(any(feature = "python27-bin", feature = "python3-bin")))]
+#[cfg(not(feature = "python3-bin"))]
#[test]
#[ignore]
fn libpython_must_be_linked_to_run_tests() {
--- a/rust/hgcli/pyoxidizer.bzl Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/hgcli/pyoxidizer.bzl Thu Jun 16 15:28:54 2022 +0200
@@ -292,7 +292,6 @@
"Platform": platform,
"Version": VERSION,
"Comments": "Installs Mercurial version %s" % VERSION,
- "PythonVersion": "3",
"MercurialHasLib": "1",
}
--- a/rust/rhg/Cargo.toml Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/rhg/Cargo.toml Thu Jun 16 15:28:54 2022 +0200
@@ -8,17 +8,17 @@
edition = "2018"
[dependencies]
-atty = "0.2"
+atty = "0.2.14"
hg-core = { path = "../hg-core"}
chrono = "0.4.19"
-clap = "2.33.1"
-derive_more = "0.99"
+clap = "2.34.0"
+derive_more = "0.99.17"
home = "0.5.3"
lazy_static = "1.4.0"
-log = "0.4.11"
-micro-timer = "0.3.1"
-regex = "1.3.9"
-env_logger = "0.7.1"
+log = "0.4.14"
+micro-timer = "0.4.0"
+regex = "1.5.5"
+env_logger = "0.9.0"
format-bytes = "0.3.0"
users = "0.11.0"
which = "4.2.5"
--- a/rust/rhg/src/blackbox.rs Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/rhg/src/blackbox.rs Thu Jun 16 15:28:54 2022 +0200
@@ -5,6 +5,7 @@
use hg::errors::HgError;
use hg::repo::Repo;
use hg::utils::{files::get_bytes_from_os_str, shell_quote};
+use std::ffi::OsString;
const ONE_MEBIBYTE: u64 = 1 << 20;
@@ -83,14 +84,21 @@
})
}
- pub fn log_command_start(&self) {
+ pub fn log_command_start<'arg>(
+ &self,
+ argv: impl Iterator<Item = &'arg OsString>,
+ ) {
if let Some(configured) = &self.configured {
- let message = format_bytes!(b"(rust) {}", format_cli_args());
+ let message = format_bytes!(b"(rust) {}", format_cli_args(argv));
configured.log(&self.process_start_time.calendar_based, &message);
}
}
- pub fn log_command_end(&self, exit_code: i32) {
+ pub fn log_command_end<'arg>(
+ &self,
+ argv: impl Iterator<Item = &'arg OsString>,
+ exit_code: i32,
+ ) {
if let Some(configured) = &self.configured {
let now = chrono::Local::now();
let duration = self
@@ -100,7 +108,7 @@
.as_secs_f64();
let message = format_bytes!(
b"(rust) {} exited {} after {} seconds",
- format_cli_args(),
+ format_cli_args(argv),
exit_code,
format_bytes::Utf8(format_args!("{:.03}", duration))
);
@@ -147,8 +155,9 @@
}
}
-fn format_cli_args() -> Vec<u8> {
- let mut args = std::env::args_os();
+fn format_cli_args<'a>(
+ mut args: impl Iterator<Item = &'a OsString>,
+) -> Vec<u8> {
let _ = args.next(); // Skip the first (or zeroth) arg, the name of the `rhg` executable
let mut args = args.map(|arg| shell_quote(&get_bytes_from_os_str(arg)));
let mut formatted = Vec::new();
--- a/rust/rhg/src/commands/status.rs Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/rhg/src/commands/status.rs Thu Jun 16 15:28:54 2022 +0200
@@ -15,7 +15,6 @@
use hg::dirstate::has_exec_bit;
use hg::dirstate::status::StatusPath;
use hg::dirstate::TruncatedTimestamp;
-use hg::dirstate::RANGE_MASK_31BIT;
use hg::errors::{HgError, IoResultExt};
use hg::lock::LockError;
use hg::manifest::Manifest;
@@ -390,12 +389,8 @@
.when_reading_file(&fs_path)?
{
let mode = fs_metadata.mode();
- let size = fs_metadata.len() as u32 & RANGE_MASK_31BIT;
- let mut entry = dmap
- .get(&hg_path)?
- .expect("ambiguous file not in dirstate");
- entry.set_clean(mode, size, mtime);
- dmap.add_file(&hg_path, entry)?;
+ let size = fs_metadata.len();
+ dmap.set_clean(&hg_path, mode, size as u32, mtime)?;
dirstate_write_needed = true
}
}
--- a/rust/rhg/src/error.rs Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/rhg/src/error.rs Thu Jun 16 15:28:54 2022 +0200
@@ -73,6 +73,9 @@
HgError::UnsupportedFeature(message) => {
CommandError::unsupported(message)
}
+ HgError::CensoredNodeError => {
+ CommandError::unsupported("Encountered a censored node")
+ }
HgError::Abort {
message,
detailed_exit_code,
--- a/rust/rhg/src/main.rs Thu Jun 16 15:15:03 2022 +0200
+++ b/rust/rhg/src/main.rs Thu Jun 16 15:28:54 2022 +0200
@@ -7,10 +7,10 @@
use clap::ArgMatches;
use format_bytes::{format_bytes, join};
use hg::config::{Config, ConfigSource};
-use hg::exit_codes;
use hg::repo::{Repo, RepoError};
use hg::utils::files::{get_bytes_from_os_str, get_path_from_bytes};
use hg::utils::SliceExt;
+use hg::{exit_codes, requirements};
use std::collections::HashSet;
use std::ffi::OsString;
use std::os::unix::prelude::CommandExt;
@@ -26,6 +26,7 @@
}
fn main_with_result(
+ argv: Vec<OsString>,
process_start_time: &blackbox::ProcessStartTime,
ui: &ui::Ui,
repo: Result<&Repo, &NoRepoInCwdError>,
@@ -79,7 +80,7 @@
.version("0.0.1");
let app = add_subcommand_args(app);
- let matches = app.clone().get_matches_safe()?;
+ let matches = app.clone().get_matches_from_safe(argv.iter())?;
let (subcommand_name, subcommand_matches) = matches.subcommand();
@@ -124,23 +125,26 @@
if config.is_extension_enabled(b"blackbox") {
let blackbox =
blackbox::Blackbox::new(&invocation, process_start_time)?;
- blackbox.log_command_start();
+ blackbox.log_command_start(argv.iter());
let result = run(&invocation);
- blackbox.log_command_end(exit_code(
- &result,
- // TODO: show a warning or combine with original error if
- // `get_bool` returns an error
- config
- .get_bool(b"ui", b"detailed-exit-code")
- .unwrap_or(false),
- ));
+ blackbox.log_command_end(
+ argv.iter(),
+ exit_code(
+ &result,
+ // TODO: show a warning or combine with original error if
+ // `get_bool` returns an error
+ config
+ .get_bool(b"ui", b"detailed-exit-code")
+ .unwrap_or(false),
+ ),
+ );
result
} else {
run(&invocation)
}
}
-fn main() {
+fn rhg_main(argv: Vec<OsString>) -> ! {
// Run this first, before we find out if the blackbox extension is even
// enabled, in order to include everything in-between in the duration
// measurements. Reading config files can be slow if they’re on NFS.
@@ -148,7 +152,7 @@
env_logger::init();
- let early_args = EarlyArgs::parse(std::env::args_os());
+ let early_args = EarlyArgs::parse(&argv);
let initial_current_dir = early_args.cwd.map(|cwd| {
let cwd = get_path_from_bytes(&cwd);
@@ -159,6 +163,7 @@
})
.unwrap_or_else(|error| {
exit(
+ &argv,
&None,
&Ui::new_infallible(&Config::empty()),
OnUnsupported::Abort,
@@ -180,6 +185,7 @@
let on_unsupported = OnUnsupported::Abort;
exit(
+ &argv,
&initial_current_dir,
&Ui::new_infallible(&Config::empty()),
on_unsupported,
@@ -192,6 +198,7 @@
.load_cli_args(early_args.config, early_args.color)
.unwrap_or_else(|error| {
exit(
+ &argv,
&initial_current_dir,
&Ui::new_infallible(&non_repo_config),
OnUnsupported::from_config(&non_repo_config),
@@ -210,6 +217,7 @@
}
if SCHEME_RE.is_match(&repo_path_bytes) {
exit(
+ &argv,
&initial_current_dir,
&Ui::new_infallible(&non_repo_config),
OnUnsupported::from_config(&non_repo_config),
@@ -300,6 +308,7 @@
Err(NoRepoInCwdError { cwd: at })
}
Err(error) => exit(
+ &argv,
&initial_current_dir,
&Ui::new_infallible(&non_repo_config),
OnUnsupported::from_config(&non_repo_config),
@@ -319,6 +328,7 @@
};
let ui = Ui::new(&config).unwrap_or_else(|error| {
exit(
+ &argv,
&initial_current_dir,
&Ui::new_infallible(&config),
OnUnsupported::from_config(&config),
@@ -331,12 +341,14 @@
let on_unsupported = OnUnsupported::from_config(config);
let result = main_with_result(
+ argv.iter().map(|s| s.to_owned()).collect(),
&process_start_time,
&ui,
repo_result.as_ref(),
config,
);
exit(
+ &argv,
&initial_current_dir,
&ui,
on_unsupported,
@@ -349,6 +361,10 @@
)
}
+fn main() -> ! {
+ rhg_main(std::env::args_os().collect())
+}
+
fn exit_code(
result: &Result<(), CommandError>,
use_detailed_exit_code: bool,
@@ -377,7 +393,8 @@
}
}
-fn exit(
+fn exit<'a>(
+ original_args: &'a [OsString],
initial_current_dir: &Option<PathBuf>,
ui: &Ui,
mut on_unsupported: OnUnsupported,
@@ -389,7 +406,7 @@
Err(CommandError::UnsupportedFeature { message }),
) = (&on_unsupported, &result)
{
- let mut args = std::env::args_os();
+ let mut args = original_args.iter();
let executable = match executable {
None => {
exit_no_fallback(
@@ -567,7 +584,7 @@
}
impl EarlyArgs {
- fn parse(args: impl IntoIterator<Item = OsString>) -> Self {
+ fn parse<'a>(args: impl IntoIterator<Item = &'a OsString>) -> Self {
let mut args = args.into_iter().map(get_bytes_from_os_str);
let mut config = Vec::new();
let mut color = None;
@@ -664,6 +681,11 @@
&[b"blackbox", b"share", b"sparse", b"narrow", b"*"];
fn check_extensions(config: &Config) -> Result<(), CommandError> {
+ if let Some(b"*") = config.get(b"rhg", b"ignored-extensions") {
+ // All extensions are to be ignored, nothing to do here
+ return Ok(());
+ }
+
let enabled: HashSet<&[u8]> = config
.get_section_keys(b"extensions")
.into_iter()
@@ -690,6 +712,9 @@
if unsupported.is_empty() {
Ok(())
} else {
+ let mut unsupported: Vec<_> = unsupported.into_iter().collect();
+ // Sort the extensions to get a stable output
+ unsupported.sort();
Err(CommandError::UnsupportedFeature {
message: format_bytes!(
b"extensions: {} (consider adding them to 'rhg.ignored-extensions' config)",
@@ -699,6 +724,60 @@
}
}
+/// Array of tuples of (auto upgrade conf, feature conf, local requirement)
+const AUTO_UPGRADES: &[((&str, &str), (&str, &str), &str)] = &[
+ (
+ ("format", "use-share-safe.automatic-upgrade-of-mismatching-repositories"),
+ ("format", "use-share-safe"),
+ requirements::SHARESAFE_REQUIREMENT,
+ ),
+ (
+ ("format", "use-dirstate-tracked-hint.automatic-upgrade-of-mismatching-repositories"),
+ ("format", "use-dirstate-tracked-hint"),
+ requirements::DIRSTATE_TRACKED_HINT_V1,
+ ),
+ (
+ ("use-dirstate-v2", "automatic-upgrade-of-mismatching-repositories"),
+ ("format", "use-dirstate-v2"),
+ requirements::DIRSTATE_V2_REQUIREMENT,
+ ),
+];
+
+/// Mercurial allows users to automatically upgrade their repository.
+/// `rhg` does not have the ability to upgrade yet, so fallback if an upgrade
+/// is needed.
+fn check_auto_upgrade(
+ config: &Config,
+ reqs: &HashSet<String>,
+) -> Result<(), CommandError> {
+ for (upgrade_conf, feature_conf, local_req) in AUTO_UPGRADES.iter() {
+ let auto_upgrade = config
+ .get_bool(upgrade_conf.0.as_bytes(), upgrade_conf.1.as_bytes())?;
+
+ if auto_upgrade {
+ let want_it = config.get_bool(
+ feature_conf.0.as_bytes(),
+ feature_conf.1.as_bytes(),
+ )?;
+ let have_it = reqs.contains(*local_req);
+
+ let action = match (want_it, have_it) {
+ (true, false) => Some("upgrade"),
+ (false, true) => Some("downgrade"),
+ _ => None,
+ };
+ if let Some(action) = action {
+ let message = format!(
+ "automatic {} {}.{}",
+ action, upgrade_conf.0, upgrade_conf.1
+ );
+ return Err(CommandError::unsupported(message));
+ }
+ }
+ }
+ Ok(())
+}
+
fn check_unsupported(
config: &Config,
repo: Result<&Repo, &NoRepoInCwdError>,
@@ -715,6 +794,7 @@
if repo.has_subrepos()? {
Err(CommandError::unsupported("sub-repositories"))?
}
+ check_auto_upgrade(config, repo.requirements())?;
}
if config.has_non_empty_section(b"encode") {
--- a/setup.py Thu Jun 16 15:15:03 2022 +0200
+++ b/setup.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,99 +5,24 @@
# 'python setup.py --help' for more options
import os
-# Mercurial will never work on Python 3 before 3.5 due to a lack
-# of % formatting on bytestrings, and can't work on 3.6.0 or 3.6.1
-# due to a bug in % formatting in bytestrings.
-# We cannot support Python 3.5.0, 3.5.1, 3.5.2 because of bug in
-# codecs.escape_encode() where it raises SystemError on empty bytestring
-# bug link: https://bugs.python.org/issue25270
+# Mercurial can't work on 3.6.0 or 3.6.1 due to a bug in % formatting
+# in bytestrings.
supportedpy = ','.join(
[
- '>=2.7.4',
- '!=3.0.*',
- '!=3.1.*',
- '!=3.2.*',
- '!=3.3.*',
- '!=3.4.*',
- '!=3.5.0',
- '!=3.5.1',
- '!=3.5.2',
- '!=3.6.0',
- '!=3.6.1',
+ '>=3.6.2',
]
)
import sys, platform
import sysconfig
-if sys.version_info[0] >= 3:
- printf = eval('print')
- libdir_escape = 'unicode_escape'
- def sysstr(s):
- return s.decode('latin-1')
-
-
-else:
- libdir_escape = 'string_escape'
-
- def printf(*args, **kwargs):
- f = kwargs.get('file', sys.stdout)
- end = kwargs.get('end', '\n')
- f.write(b' '.join(args) + end)
-
- def sysstr(s):
- return s
+def sysstr(s):
+ return s.decode('latin-1')
-# Attempt to guide users to a modern pip - this means that 2.6 users
-# should have a chance of getting a 4.2 release, and when we ratchet
-# the version requirement forward again hopefully everyone will get
-# something that works for them.
-if sys.version_info < (2, 7, 4, 'final'):
- pip_message = (
- 'This may be due to an out of date pip. '
- 'Make sure you have pip >= 9.0.1.'
- )
- try:
- import pip
-
- pip_version = tuple([int(x) for x in pip.__version__.split('.')[:3]])
- if pip_version < (9, 0, 1):
- pip_message = (
- 'Your pip version is out of date, please install '
- 'pip >= 9.0.1. pip {} detected.'.format(pip.__version__)
- )
- else:
- # pip is new enough - it must be something else
- pip_message = ''
- except Exception:
- pass
- error = """
-Mercurial does not support Python older than 2.7.4.
-Python {py} detected.
-{pip}
-""".format(
- py=sys.version_info, pip=pip_message
- )
- printf(error, file=sys.stderr)
- sys.exit(1)
-
import ssl
-try:
- ssl.SSLContext
-except AttributeError:
- error = """
-The `ssl` module does not have the `SSLContext` class. This indicates an old
-Python version which does not support modern security features (which were
-added to Python 2.7 as part of "PEP 466"). Please make sure you have installed
-at least Python 2.7.9 or a Python version with backports of these security
-features.
-"""
- printf(error, file=sys.stderr)
- sys.exit(1)
-
# ssl.HAS_TLSv1* are preferred to check support but they were added in Python
# 3.7. Prior to CPython commit 6e8cda91d92da72800d891b2fc2073ecbc134d98
# (backported to the 3.7 branch), ssl.PROTOCOL_TLSv1_1 / ssl.PROTOCOL_TLSv1_2
@@ -117,14 +42,10 @@
version enabling these features (likely this requires the OpenSSL version to
be at least 1.0.1).
"""
- printf(error, file=sys.stderr)
+ print(error, file=sys.stderr)
sys.exit(1)
-if sys.version_info[0] >= 3:
- DYLIB_SUFFIX = sysconfig.get_config_vars()['EXT_SUFFIX']
-else:
- # deprecated in Python 3
- DYLIB_SUFFIX = sysconfig.get_config_vars()['SO']
+DYLIB_SUFFIX = sysconfig.get_config_vars()['EXT_SUFFIX']
# Solaris Python packaging brain damage
try:
@@ -174,7 +95,6 @@
ispypy = "PyPy" in sys.version
import ctypes
-import errno
import stat, subprocess, time
import re
import shutil
@@ -276,7 +196,7 @@
try:
import py2exe
- py2exe.Distribution # silence unused import warning
+ py2exe.patch_distutils()
py2exeloaded = True
# import py2exe's patched Distribution class
from distutils.core import Distribution
@@ -292,7 +212,7 @@
return p.returncode, out, err
-class hgcommand(object):
+class hgcommand:
def __init__(self, cmd, env):
self.cmd = cmd
self.env = env
@@ -302,8 +222,8 @@
returncode, out, err = runcmd(cmd, self.env)
err = filterhgerr(err)
if err or returncode != 0:
- printf("stderr from '%s':" % (' '.join(cmd)), file=sys.stderr)
- printf(err, file=sys.stderr)
+ print("stderr from '%s':" % (' '.join(cmd)), file=sys.stderr)
+ print(err, file=sys.stderr)
return b''
return out
@@ -536,7 +456,7 @@
if hgrustext != 'cpython' and hgrustext is not None:
if hgrustext:
msg = 'unknown HGWITHRUSTEXT value: %s' % hgrustext
- printf(msg, file=sys.stderr)
+ print(msg, file=sys.stderr)
hgrustext = None
self.rust = hgrustext is not None
self.no_rust = not self.rust
@@ -810,12 +730,9 @@
# Copy the pythonXY.dll next to the binary so that it runs
# without tampering with PATH.
- fsdecode = lambda x: x
- if sys.version_info[0] >= 3:
- fsdecode = os.fsdecode
dest = os.path.join(
os.path.dirname(self.hgtarget),
- fsdecode(dllbasename),
+ os.fsdecode(dllbasename),
)
if not os.path.exists(dest):
@@ -823,19 +740,18 @@
# Also overwrite python3.dll so that hgext.git is usable.
# TODO: also handle the MSYS flavor
- if sys.version_info[0] >= 3:
- python_x = os.path.join(
- os.path.dirname(fsdecode(buf.value)),
- "python3.dll",
+ python_x = os.path.join(
+ os.path.dirname(os.fsdecode(buf.value)),
+ "python3.dll",
+ )
+
+ if os.path.exists(python_x):
+ dest = os.path.join(
+ os.path.dirname(self.hgtarget),
+ os.path.basename(python_x),
)
- if os.path.exists(python_x):
- dest = os.path.join(
- os.path.dirname(self.hgtarget),
- os.path.basename(python_x),
- )
-
- shutil.copy(python_x, dest)
+ shutil.copy(python_x, dest)
if not pythonlib:
log.warn(
@@ -850,14 +766,10 @@
f.write(b'/* this file is autogenerated by setup.py */\n')
f.write(b'#define HGPYTHONLIB "%s"\n' % pythonlib)
- macros = None
- if sys.version_info[0] >= 3:
- macros = [('_UNICODE', None), ('UNICODE', None)]
-
objects = self.compiler.compile(
['mercurial/exewrapper.c'],
output_dir=self.build_temp,
- macros=macros,
+ macros=[('_UNICODE', None), ('UNICODE', None)],
)
self.compiler.link_executable(
objects, self.hgtarget, libraries=[], output_dir=self.build_temp
@@ -1069,6 +981,10 @@
),
]
+ sub_commands = install.sub_commands + [
+ ('install_completion', lambda self: True)
+ ]
+
# Also helps setuptools not be sad while we refuse to create eggs.
single_version_externally_managed = True
@@ -1183,11 +1099,43 @@
)
continue
- data = data.replace(b'@LIBDIR@', libdir.encode(libdir_escape))
+ data = data.replace(b'@LIBDIR@', libdir.encode('unicode_escape'))
with open(outfile, 'wb') as fp:
fp.write(data)
+class hginstallcompletion(Command):
+ description = 'Install shell completion'
+
+ def initialize_options(self):
+ self.install_dir = None
+ self.outputs = []
+
+ def finalize_options(self):
+ self.set_undefined_options(
+ 'install_data', ('install_dir', 'install_dir')
+ )
+
+ def get_outputs(self):
+ return self.outputs
+
+ def run(self):
+ for src, dir_path, dest in (
+ (
+ 'bash_completion',
+ ('share', 'bash-completion', 'completions'),
+ 'hg',
+ ),
+ ('zsh_completion', ('share', 'zsh', 'site-functions'), '_hg'),
+ ):
+ dir = os.path.join(self.install_dir, *dir_path)
+ self.mkpath(dir)
+
+ dest = os.path.join(dir, dest)
+ self.outputs.append(dest)
+ self.copy_file(os.path.join('contrib', src), dest)
+
+
# virtualenv installs custom distutils/__init__.py and
# distutils/distutils.cfg files which essentially proxy back to the
# "real" distutils in the main Python install. The presence of this
@@ -1278,6 +1226,7 @@
'build_scripts': hgbuildscripts,
'build_hgextindex': buildhgextindex,
'install': hginstall,
+ 'install_completion': hginstallcompletion,
'install_lib': hginstalllib,
'install_scripts': hginstallscripts,
'build_hgexe': buildhgexe,
@@ -1324,27 +1273,12 @@
'hgdemandimport',
]
-# The pygit2 dependency dropped py2 support with the 1.0 release in Dec 2019.
-# Prior releases do not build at all on Windows, because Visual Studio 2008
-# doesn't understand C 11. Older Linux releases are buggy.
-if sys.version_info[0] == 2:
- packages.remove('hgext.git')
-
-
for name in os.listdir(os.path.join('mercurial', 'templates')):
if name != '__pycache__' and os.path.isdir(
os.path.join('mercurial', 'templates', name)
):
packages.append('mercurial.templates.%s' % name)
-if sys.version_info[0] == 2:
- packages.extend(
- [
- 'mercurial.thirdparty.concurrent',
- 'mercurial.thirdparty.concurrent.futures',
- ]
- )
-
if 'HG_PY2EXE_EXTRA_INSTALL_PACKAGES' in os.environ:
# py2exe can't cope with namespace packages very well, so we have to
# install any hgext3rd.* extensions that we want in the final py2exe
@@ -1476,19 +1410,9 @@
cargocmd = ['cargo', 'rustc', '--release']
- feature_flags = []
-
- cargocmd.append('--no-default-features')
- if sys.version_info[0] == 2:
- feature_flags.append('python27')
- elif sys.version_info[0] == 3:
- feature_flags.append('python3')
-
rust_features = env.get("HG_RUST_FEATURES")
if rust_features:
- feature_flags.append(rust_features)
-
- cargocmd.extend(('--features', " ".join(feature_flags)))
+ cargocmd.extend(('--features', rust_features))
cargocmd.append('--')
if sys.platform == 'darwin':
@@ -1497,15 +1421,12 @@
)
try:
subprocess.check_call(cargocmd, env=env, cwd=self.rustsrcdir)
- except OSError as exc:
- if exc.errno == errno.ENOENT:
- raise RustCompilationError("Cargo not found")
- elif exc.errno == errno.EACCES:
- raise RustCompilationError(
- "Cargo found, but permission to execute it is denied"
- )
- else:
- raise
+ except FileNotFoundError:
+ raise RustCompilationError("Cargo not found")
+ except PermissionError:
+ raise RustCompilationError(
+ "Cargo found, but permission to execute it is denied"
+ )
except subprocess.CalledProcessError:
raise RustCompilationError(
"Cargo failed. Working directory: %r, "
@@ -1640,7 +1561,7 @@
# the cygwinccompiler package is not available on some Python
# distributions like the ones from the optware project for Synology
# DiskStation boxes
- class HackedMingw32CCompiler(object):
+ class HackedMingw32CCompiler:
pass
@@ -1763,9 +1684,7 @@
if sys.platform == 'darwin' and os.path.exists('/usr/bin/xcodebuild'):
version = runcmd(['/usr/bin/xcodebuild', '-version'], {})[1].splitlines()
if version:
- version = version[0]
- if sys.version_info[0] == 3:
- version = version.decode('utf-8')
+ version = version[0].decode('utf-8')
xcode4 = version.startswith('Xcode') and StrictVersion(
version.split()[1]
) >= StrictVersion('4.0')
--- a/tests/artifacts/scripts/generate-churning-bundle.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/artifacts/scripts/generate-churning-bundle.py Thu Jun 16 15:28:54 2022 +0200
@@ -17,7 +17,6 @@
#
# Running with `chg` in your path and `CHGHG` set is recommended for speed.
-from __future__ import absolute_import, print_function
import hashlib
import os
--- a/tests/autodiff.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/autodiff.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,6 +1,5 @@
# Extension dedicated to test patch.diff() upgrade modes
-from __future__ import absolute_import
from mercurial import (
error,
--- a/tests/basic_test_result.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/basic_test_result.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, print_function
-
import sys
import unittest
--- a/tests/blackbox-readonly-dispatch.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/blackbox-readonly-dispatch.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,4 +1,3 @@
-from __future__ import absolute_import
import os
from mercurial import (
dispatch,
--- a/tests/bruterebase.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/bruterebase.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from mercurial import (
error,
@@ -15,11 +14,6 @@
from hgext import rebase
-try:
- xrange
-except NameError:
- xrange = range
-
cmdtable = {}
command = registrar.command(cmdtable)
@@ -42,7 +36,7 @@
result += b"'"
return result
- for i in xrange(1, 2 ** len(srevs)):
+ for i in range(1, 2 ** len(srevs)):
subset = [rev for j, rev in enumerate(srevs) if i & (1 << j) != 0]
spec = revsetlang.formatspec(b'%ld', subset)
tr = repo.transaction(b'rebase')
@@ -59,7 +53,7 @@
# short summary about new nodes
cl = repo.changelog
descs = []
- for rev in xrange(repolen, len(repo)):
+ for rev in range(repolen, len(repo)):
desc = b'%s:' % getdesc(rev)
for prev in cl.parentrevs(rev):
if prev > -1:
--- a/tests/bundles/test-revlog-diff-relative-to-nullrev.sh Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/bundles/test-revlog-diff-relative-to-nullrev.sh Thu Jun 16 15:28:54 2022 +0200
@@ -1,9 +1,22 @@
#!/bin/bash
#
# Make sure to patch mercurial to create the delta against nullrev
+#
+# # Parent cdb85d0512b81031d4a7b30d6a5ddbe69ef1a876
+#
+# diff --git a/mercurial/revlogutils/deltas.py b/mercurial/revlogutils/deltas.py
+# --- a/mercurial/revlogutils/deltas.py
+# +++ b/mercurial/revlogutils/deltas.py
+# @@ -1117,7 +1117,10 @@ class deltacomputer:
+# candidaterevs = next(groups)
+#
# if deltainfo is None:
-#- deltainfo = self._fullsnapshotinfo(fh, revinfo, target_rev)
-#+ deltainfo = self._builddeltainfo(revinfo, nullrev, fh)
+# - deltainfo = self._fullsnapshotinfo(fh, revinfo, target_rev)
+# + if revlog._generaldelta:
+# + deltainfo = self._builddeltainfo(revinfo, nullrev, fh)
+# + else:
+# + deltainfo = self._fullsnapshotinfo(fh, revinfo, target_rev)
+
cd "`dirname \"$0\"`"
export HGRCPATH=
@@ -14,6 +27,11 @@
cd nullrev-diff
echo hi > a
../../../hg commit -Am root-B
+echo ho > a
+../../../hg commit -Am child-A
+hg up null
+echo ha > a
+../../../hg commit -Am root-A
../../../hg debugdeltachain a
rm -rf .hg/cache/ .hg/wcache/
cd ..
Binary file tests/bundles/test-revlog-diff-relative-to-nullrev.tar has changed
--- a/tests/check-perf-code.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/check-perf-code.py Thu Jun 16 15:28:54 2022 +0200
@@ -2,7 +2,6 @@
#
# check-perf-code - (historical) portability checker for contrib/perf.py
-from __future__ import absolute_import
import os
import sys
--- a/tests/common-pattern.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/common-pattern.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,4 @@
# common patterns in test at can safely be replaced
-from __future__ import absolute_import
import os
--- a/tests/crashgetbundler.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/crashgetbundler.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
from mercurial.i18n import _
from mercurial import changegroup, error, extensions
--- a/tests/drawdag.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/drawdag.py Thu Jun 16 15:28:54 2022 +0200
@@ -80,7 +80,6 @@
# split: A -> B, C # 1 to many
# prune: A, B, C # many to nothing
"""
-from __future__ import absolute_import, print_function
import collections
import itertools
@@ -266,7 +265,7 @@
return dict(edges)
-class simplefilectx(object):
+class simplefilectx:
def __init__(self, path, data):
self._data = data
self._path = path
--- a/tests/dumbhttp.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/dumbhttp.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,6 +1,5 @@
#!/usr/bin/env python
-from __future__ import absolute_import
"""
Small and dumb HTTP server for use in tests.
@@ -38,7 +37,7 @@
sys.stderr.flush()
-class simplehttpservice(object):
+class simplehttpservice:
def __init__(self, host, port):
self.address = (host, port)
--- a/tests/dummysmtpd.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/dummysmtpd.py Thu Jun 16 15:28:54 2022 +0200
@@ -2,7 +2,6 @@
"""dummy SMTP server for use in tests"""
-from __future__ import absolute_import
import asyncore
import optparse
--- a/tests/dummyssh Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/dummyssh Thu Jun 16 15:28:54 2022 +0200
@@ -1,6 +1,5 @@
#!/usr/bin/env python3
-from __future__ import absolute_import
import os
import shlex
--- a/tests/f Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/f Thu Jun 16 15:28:54 2022 +0200
@@ -23,7 +23,6 @@
md5sum.py
"""
-from __future__ import absolute_import
import binascii
import glob
--- a/tests/failfilemerge.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/failfilemerge.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,6 +1,5 @@
# extension to emulate interrupting filemerge._filemerge
-from __future__ import absolute_import
from mercurial import (
error,
--- a/tests/fakedirstatewritetime.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/fakedirstatewritetime.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# - 'workingctx._poststatusfixup()' (= 'repo.status()')
# - 'committablectx.markcommitted()'
-from __future__ import absolute_import
from mercurial import (
context,
--- a/tests/fakemergerecord.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/fakemergerecord.py Thu Jun 16 15:28:54 2022 +0200
@@ -2,7 +2,6 @@
#
#
-from __future__ import absolute_import
from mercurial import (
mergestate as mergestatemod,
--- a/tests/fakepatchtime.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/fakepatchtime.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,7 +1,6 @@
# extension to emulate invoking 'patch.internalpatch()' at the time
# specified by '[fakepatchtime] fakenow'
-from __future__ import absolute_import
from mercurial import (
extensions,
--- a/tests/filterpyflakes.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/filterpyflakes.py Thu Jun 16 15:28:54 2022 +0200
@@ -2,7 +2,6 @@
# Filter output by pyflakes to control which warnings we check
-from __future__ import absolute_import, print_function
import re
import sys
--- a/tests/filtertraceback.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/filtertraceback.py Thu Jun 16 15:28:54 2022 +0200
@@ -2,7 +2,6 @@
# Filters traceback lines from stdin.
-from __future__ import absolute_import, print_function
import io
import sys
--- a/tests/flagprocessorext.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/flagprocessorext.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,6 +1,5 @@
# coding=UTF-8
-from __future__ import absolute_import
import base64
import zlib
--- a/tests/fsmonitor-run-tests.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/fsmonitor-run-tests.py Thu Jun 16 15:28:54 2022 +0200
@@ -11,8 +11,6 @@
# Watchman and runs the Mercurial tests against it. This ensures that the global
# version of Watchman isn't affected by anything this test does.
-from __future__ import absolute_import
-from __future__ import print_function
import argparse
import contextlib
@@ -28,7 +26,6 @@
if sys.version_info > (3, 5, 0):
PYTHON3 = True
- xrange = range # we use xrange in one place, and we'd rather not use range
def _sys2bytes(p):
return p.encode('utf-8')
--- a/tests/generate-working-copy-states.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/generate-working-copy-states.py Thu Jun 16 15:28:54 2022 +0200
@@ -29,7 +29,6 @@
# $ hg forget *_*_*-untracked
# $ rm *_*_missing-*
-from __future__ import absolute_import, print_function
import os
import sys
--- a/tests/get-with-headers.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/get-with-headers.py Thu Jun 16 15:28:54 2022 +0200
@@ -3,7 +3,6 @@
"""This does HTTP GET requests given a host:port and path and returns
a subset of the headers plus the body of the result."""
-from __future__ import absolute_import
import argparse
import json
--- a/tests/heredoctest.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/heredoctest.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, print_function
-
import sys
--- a/tests/hghave Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/hghave Thu Jun 16 15:28:54 2022 +0200
@@ -4,7 +4,6 @@
prefixed with "no-", the absence of feature is tested.
"""
-from __future__ import absolute_import, print_function
import hghave
import optparse
--- a/tests/hghave.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/hghave.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, print_function
-
import distutils.version
import os
import re
@@ -200,7 +198,7 @@
@check("pyoxidizer", "running with pyoxidizer build as 'hg'")
-def has_rhg():
+def has_pyoxidizer():
return 'PYOXIDIZED_INSTALLED_AS_HG' in os.environ
@@ -410,7 +408,7 @@
@check("pygit2", "pygit2 Python library")
-def has_git():
+def has_pygit2():
try:
import pygit2
@@ -752,7 +750,7 @@
@check("network-io", "whether tests are allowed to access 3rd party services")
-def has_test_repo():
+def has_network_io():
t = os.environ.get("HGTESTS_ALLOW_NETIO")
return t == "1"
--- a/tests/hgweberror.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/hgweberror.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,6 +1,5 @@
# A dummy extension that installs an hgweb command that throws an Exception.
-from __future__ import absolute_import
from mercurial.hgweb import webcommands
--- a/tests/httpserverauth.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/httpserverauth.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import base64
import hashlib
@@ -18,7 +16,7 @@
return parsed
-class digestauthserver(object):
+class digestauthserver:
def __init__(self):
self._user_hashes = {}
--- a/tests/hypothesishelpers.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/hypothesishelpers.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,7 +4,6 @@
#
# For details see http://hypothesis.readthedocs.org
-from __future__ import absolute_import, print_function
import os
import sys
import traceback
--- a/tests/killdaemons.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/killdaemons.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,7 +1,5 @@
#!/usr/bin/env python3
-from __future__ import absolute_import
-import errno
import os
import signal
import sys
@@ -94,9 +92,8 @@
os.kill(pid, 0)
logfn('# Daemon process %d is stuck - really killing it' % pid)
os.kill(pid, signal.SIGKILL)
- except OSError as err:
- if err.errno != errno.ESRCH:
- raise
+ except ProcessLookupError:
+ pass
def killdaemons(pidfile, tryhard=True, remove=False, logfn=None):
--- a/tests/list-tree.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/list-tree.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,8 +1,3 @@
-from __future__ import (
- absolute_import,
- print_function,
-)
-
import argparse
import os
--- a/tests/lockdelay.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/lockdelay.py Thu Jun 16 15:28:54 2022 +0200
@@ -2,7 +2,6 @@
#
# This extension can be used to test race conditions between lock acquisition.
-from __future__ import absolute_import
import os
import time
--- a/tests/logexceptions.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/logexceptions.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import inspect
import os
--- a/tests/ls-l.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/ls-l.py Thu Jun 16 15:28:54 2022 +0200
@@ -2,7 +2,6 @@
# like ls -l, but do not print date, user, or non-common mode bit, to avoid
# using globs in tests.
-from __future__ import absolute_import, print_function
import os
import stat
--- a/tests/md5sum.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/md5sum.py Thu Jun 16 15:28:54 2022 +0200
@@ -6,7 +6,6 @@
# of the PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2, which is
# GPL-compatible.
-from __future__ import absolute_import
import hashlib
import os
--- a/tests/mockblackbox.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/mockblackbox.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,4 +1,3 @@
-from __future__ import absolute_import
from mercurial.utils import procutil
# XXX: we should probably offer a devel option to do this in blackbox directly
--- a/tests/mockmakedate.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/mockmakedate.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,6 +1,5 @@
# mock out util.makedate() to supply testable values
-from __future__ import absolute_import
import os
--- a/tests/mocktime.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/mocktime.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,10 +1,8 @@
-from __future__ import absolute_import
-
import os
import time
-class mocktime(object):
+class mocktime:
def __init__(self, increment):
self.time = 0
self.increment = [float(s) for s in increment.split()]
--- a/tests/printenv.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/printenv.py Thu Jun 16 15:28:54 2022 +0200
@@ -12,7 +12,6 @@
# - [output] is the name of the output file (default: use sys.stdout)
# the file will be opened in append mode.
#
-from __future__ import absolute_import
import argparse
import os
import sys
--- a/tests/printrevset.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/printrevset.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,4 +1,3 @@
-from __future__ import absolute_import
from mercurial.thirdparty import attr
from mercurial import (
cmdutil,
--- a/tests/pullext.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/pullext.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from mercurial.i18n import _
from mercurial import (
--- a/tests/readlink.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/readlink.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,6 +1,5 @@
#!/usr/bin/env python3
-from __future__ import absolute_import, print_function
import errno
import os
--- a/tests/remotefilelog-getflogheads.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/remotefilelog-getflogheads.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
from mercurial.i18n import _
from mercurial import (
hg,
--- a/tests/revlog-formatv0.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/revlog-formatv0.py Thu Jun 16 15:28:54 2022 +0200
@@ -17,7 +17,6 @@
empty file
"""
-from __future__ import absolute_import
import binascii
import os
import sys
--- a/tests/revnamesext.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/revnamesext.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,6 +1,5 @@
# Dummy extension to define a namespace containing revision names
-from __future__ import absolute_import
from mercurial import namespaces
--- a/tests/run-tests.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/run-tests.py Thu Jun 16 15:28:54 2022 +0200
@@ -43,7 +43,6 @@
# completes fairly quickly, includes both shell and Python scripts, and
# includes some scripts that run daemon processes.)
-from __future__ import absolute_import, print_function
import argparse
import collections
@@ -51,12 +50,15 @@
import difflib
import distutils.version as version
import errno
+import functools
import json
import multiprocessing
import os
import platform
+import queue
import random
import re
+import shlex
import shutil
import signal
import socket
@@ -70,21 +72,15 @@
import uuid
import xml.dom.minidom as minidom
+if sys.version_info < (3, 5, 0):
+ print(
+ '%s is only supported on Python 3.5+, not %s'
+ % (sys.argv[0], '.'.join(str(v) for v in sys.version_info[:3]))
+ )
+ sys.exit(70) # EX_SOFTWARE from `man 3 sysexit`
+
WINDOWS = os.name == r'nt'
-
-try:
- import Queue as queue
-except ImportError:
- import queue
-
-try:
- import shlex
-
- shellquote = shlex.quote
-except (ImportError, AttributeError):
- import pipes
-
- shellquote = pipes.quote
+shellquote = shlex.quote
processlock = threading.Lock()
@@ -155,80 +151,59 @@
origenviron = os.environ.copy()
-if sys.version_info > (3, 5, 0):
- PYTHON3 = True
- xrange = range # we use xrange in one place, and we'd rather not use range
-
- def _sys2bytes(p):
- if p is None:
- return p
- return p.encode('utf-8')
-
- def _bytes2sys(p):
- if p is None:
- return p
- return p.decode('utf-8')
-
- osenvironb = getattr(os, 'environb', None)
- if osenvironb is None:
- # Windows lacks os.environb, for instance. A proxy over the real thing
- # instead of a copy allows the environment to be updated via bytes on
- # all platforms.
- class environbytes(object):
- def __init__(self, strenv):
- self.__len__ = strenv.__len__
- self.clear = strenv.clear
- self._strenv = strenv
-
- def __getitem__(self, k):
- v = self._strenv.__getitem__(_bytes2sys(k))
- return _sys2bytes(v)
-
- def __setitem__(self, k, v):
- self._strenv.__setitem__(_bytes2sys(k), _bytes2sys(v))
-
- def __delitem__(self, k):
- self._strenv.__delitem__(_bytes2sys(k))
-
- def __contains__(self, k):
- return self._strenv.__contains__(_bytes2sys(k))
-
- def __iter__(self):
- return iter([_sys2bytes(k) for k in iter(self._strenv)])
-
- def get(self, k, default=None):
- v = self._strenv.get(_bytes2sys(k), _bytes2sys(default))
- return _sys2bytes(v)
-
- def pop(self, k, default=None):
- v = self._strenv.pop(_bytes2sys(k), _bytes2sys(default))
- return _sys2bytes(v)
-
- osenvironb = environbytes(os.environ)
-
- getcwdb = getattr(os, 'getcwdb')
- if not getcwdb or WINDOWS:
- getcwdb = lambda: _sys2bytes(os.getcwd())
-
-elif sys.version_info >= (3, 0, 0):
- print(
- '%s is only supported on Python 3.5+ and 2.7, not %s'
- % (sys.argv[0], '.'.join(str(v) for v in sys.version_info[:3]))
- )
- sys.exit(70) # EX_SOFTWARE from `man 3 sysexit`
-else:
- PYTHON3 = False
-
- # In python 2.x, path operations are generally done using
- # bytestrings by default, so we don't have to do any extra
- # fiddling there. We define the wrapper functions anyway just to
- # help keep code consistent between platforms.
- def _sys2bytes(p):
+def _sys2bytes(p):
+ if p is None:
+ return p
+ return p.encode('utf-8')
+
+
+def _bytes2sys(p):
+ if p is None:
return p
-
- _bytes2sys = _sys2bytes
- osenvironb = os.environ
- getcwdb = os.getcwd
+ return p.decode('utf-8')
+
+
+osenvironb = getattr(os, 'environb', None)
+if osenvironb is None:
+ # Windows lacks os.environb, for instance. A proxy over the real thing
+ # instead of a copy allows the environment to be updated via bytes on
+ # all platforms.
+ class environbytes:
+ def __init__(self, strenv):
+ self.__len__ = strenv.__len__
+ self.clear = strenv.clear
+ self._strenv = strenv
+
+ def __getitem__(self, k):
+ v = self._strenv.__getitem__(_bytes2sys(k))
+ return _sys2bytes(v)
+
+ def __setitem__(self, k, v):
+ self._strenv.__setitem__(_bytes2sys(k), _bytes2sys(v))
+
+ def __delitem__(self, k):
+ self._strenv.__delitem__(_bytes2sys(k))
+
+ def __contains__(self, k):
+ return self._strenv.__contains__(_bytes2sys(k))
+
+ def __iter__(self):
+ return iter([_sys2bytes(k) for k in iter(self._strenv)])
+
+ def get(self, k, default=None):
+ v = self._strenv.get(_bytes2sys(k), _bytes2sys(default))
+ return _sys2bytes(v)
+
+ def pop(self, k, default=None):
+ v = self._strenv.pop(_bytes2sys(k), _bytes2sys(default))
+ return _sys2bytes(v)
+
+ osenvironb = environbytes(os.environ)
+
+getcwdb = getattr(os, 'getcwdb')
+if not getcwdb or WINDOWS:
+ getcwdb = lambda: _sys2bytes(os.getcwd())
+
if WINDOWS:
_getcwdb = getcwdb
@@ -260,10 +235,14 @@
s.bind(('localhost', port))
s.close()
return True
- except socket.error as exc:
+ except (socket.error, OSError) as exc:
if exc.errno == errno.EADDRINUSE:
return True
- elif exc.errno in (errno.EADDRNOTAVAIL, errno.EPROTONOSUPPORT):
+ elif exc.errno in (
+ errno.EADDRNOTAVAIL,
+ errno.EPROTONOSUPPORT,
+ errno.EAFNOSUPPORT,
+ ):
return False
else:
raise
@@ -288,12 +267,11 @@
except socket.error as exc:
if WINDOWS and exc.errno == errno.WSAEACCES:
return False
- elif PYTHON3:
- # TODO: make a proper exception handler after dropping py2. This
- # works because socket.error is an alias for OSError on py3,
- # which is also the baseclass of PermissionError.
- if isinstance(exc, PermissionError):
- return False
+ # TODO: make a proper exception handler after dropping py2. This
+ # works because socket.error is an alias for OSError on py3,
+ # which is also the baseclass of PermissionError.
+ elif isinstance(exc, PermissionError):
+ return False
if exc.errno not in (
errno.EADDRINUSE,
errno.EADDRNOTAVAIL,
@@ -372,18 +350,10 @@
def which(exe):
- if PYTHON3:
- # shutil.which only accept bytes from 3.8
- cmd = _bytes2sys(exe)
- real_exec = shutil.which(cmd)
- return _sys2bytes(real_exec)
- else:
- # let us do the os work
- for p in osenvironb[b'PATH'].split(os.pathsep):
- f = os.path.join(p, exe)
- if os.path.isfile(f):
- return f
- return None
+ # shutil.which only accept bytes from 3.8
+ cmd = _bytes2sys(exe)
+ real_exec = shutil.which(cmd)
+ return _sys2bytes(real_exec)
def parselistfiles(files, listtype, warn=True):
@@ -392,9 +362,7 @@
try:
path = os.path.expanduser(os.path.expandvars(filename))
f = open(path, "rb")
- except IOError as err:
- if err.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
if warn:
print("warning: no such %s file: %s" % (listtype, filename))
continue
@@ -420,9 +388,8 @@
for l in f:
if l.startswith(b'#testcases '):
cases.append(sorted(l[11:].split()))
- except IOError as ex:
- if ex.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
+ pass
return cases
@@ -898,11 +865,7 @@
pass
-_unified_diff = difflib.unified_diff
-if PYTHON3:
- import functools
-
- _unified_diff = functools.partial(difflib.diff_bytes, difflib.unified_diff)
+_unified_diff = functools.partial(difflib.diff_bytes, difflib.unified_diff)
def getdiff(expected, output, ref, err):
@@ -1133,9 +1096,8 @@
try:
os.mkdir(self._threadtmp)
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
+ except FileExistsError:
+ pass
name = self._tmpname
self._testtmp = os.path.join(self._threadtmp, name)
@@ -1145,12 +1107,11 @@
if os.path.exists(self.errpath):
try:
os.remove(self.errpath)
- except OSError as e:
- # We might have raced another test to clean up a .err
- # file, so ignore ENOENT when removing a previous .err
+ except FileNotFoundError:
+ # We might have raced another test to clean up a .err file,
+ # so ignore FileNotFoundError when removing a previous .err
# file.
- if e.errno != errno.ENOENT:
- raise
+ pass
if self._usechg:
self._chgsockdir = os.path.join(
@@ -1453,7 +1414,7 @@
env['HGTEST_TIMEOUT_DEFAULT'] = formated_timeout
env['HGTEST_TIMEOUT'] = _bytes2sys(b"%d" % self._timeout)
# This number should match portneeded in _getport
- for port in xrange(3):
+ for port in range(3):
# This list should be parallel to _portmap in _getreplacements
defineport(port)
env["HGRCPATH"] = _bytes2sys(os.path.join(self._threadtmp, b'.hgrc'))
@@ -1494,7 +1455,7 @@
# This has the same effect as Py_LegacyWindowsStdioFlag in exewrapper.c,
# but this is needed for testing python instances like dummyssh,
# dummysmtpd.py, and dumbhttp.py.
- if PYTHON3 and WINDOWS:
+ if WINDOWS:
env['PYTHONLEGACYWINDOWSSTDIO'] = '1'
# Modified HOME in test environment can confuse Rust tools. So set
@@ -1685,9 +1646,7 @@
re.compile(br'.*\$LOCALIP.*$'),
]
-bchr = chr
-if PYTHON3:
- bchr = lambda x: bytes([x])
+bchr = lambda x: bytes([x])
WARN_UNDEFINED = 1
WARN_YES = 2
@@ -1826,9 +1785,7 @@
script.append(b'echo %s %d $?\n' % (salt, line))
activetrace = []
- session = str(uuid.uuid4())
- if PYTHON3:
- session = session.encode('ascii')
+ session = str(uuid.uuid4()).encode('ascii')
hgcatapult = os.getenv('HGTESTCATAPULTSERVERPIPE') or os.getenv(
'HGCATAPULTSERVERPIPE'
)
@@ -1882,11 +1839,8 @@
script.append(b'alias pwd="pwd -W"\n')
if hgcatapult and hgcatapult != os.devnull:
- if PYTHON3:
- hgcatapult = hgcatapult.encode('utf8')
- cataname = self.name.encode('utf8')
- else:
- cataname = self.name
+ hgcatapult = hgcatapult.encode('utf8')
+ cataname = self.name.encode('utf8')
# Kludge: use a while loop to keep the pipe from getting
# closed by our echo commands. The still-running file gets
@@ -2191,11 +2145,8 @@
return "retry", False
if el.endswith(b" (esc)\n"):
- if PYTHON3:
- el = el[:-7].decode('unicode_escape') + '\n'
- el = el.encode('latin-1')
- else:
- el = el[:-7].decode('string-escape') + '\n'
+ el = el[:-7].decode('unicode_escape') + '\n'
+ el = el.encode('latin-1')
if el == l or WINDOWS and el[:-1] + b'\r\n' == l:
return True, True
if el.endswith(b" (re)\n"):
@@ -2243,10 +2194,7 @@
firstlock = threading.RLock()
firsterror = False
-if PYTHON3:
- base_class = unittest.TextTestResult
-else:
- base_class = unittest._TextTestResult
+base_class = unittest.TextTestResult
class TestResult(base_class):
@@ -2370,13 +2318,9 @@
self.stream.write('\n')
for line in lines:
line = highlightdiff(line, self.color)
- if PYTHON3:
- self.stream.flush()
- self.stream.buffer.write(line)
- self.stream.buffer.flush()
- else:
- self.stream.write(line)
- self.stream.flush()
+ self.stream.flush()
+ self.stream.buffer.write(line)
+ self.stream.buffer.flush()
if servefail:
raise test.failureException(
@@ -2551,7 +2495,7 @@
if ignored:
continue
- for _ in xrange(self._runs_per_test):
+ for _ in range(self._runs_per_test):
tests.append(get())
runtests = list(tests)
@@ -2600,7 +2544,7 @@
with iolock:
sys.stdout.write(d + ' ')
sys.stdout.flush()
- for x in xrange(10):
+ for x in range(10):
if channels:
time.sleep(0.1)
count += 1
@@ -2674,9 +2618,8 @@
times.append(
(m.group(1), [float(t) for t in m.group(2).split()])
)
- except IOError as err:
- if err.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
+ pass
return times
@@ -3031,9 +2974,7 @@
except KeyError:
try:
val = -os.stat(f).st_size
- except OSError as e:
- if e.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
perf[f] = -1e9 # file does not exist, tell early
return -1e9
for kw, mul in slow.items():
@@ -3047,7 +2988,7 @@
testdescs.sort(key=sortkey)
-class TestRunner(object):
+class TestRunner:
"""Holds context for executing tests.
Tests rely on a lot of state. This object holds it for them.
@@ -3276,10 +3217,7 @@
osenvironb[b'RUNTESTDIR_FORWARD_SLASH'] = runtestdir.replace(
os.sep.encode('ascii'), b'/'
)
- if PYTHON3:
- sepb = _sys2bytes(os.pathsep)
- else:
- sepb = os.pathsep
+ sepb = _sys2bytes(os.pathsep)
path = [self._bindir, runtestdir] + osenvironb[b"PATH"].split(sepb)
if os.path.islink(__file__):
# test helper will likely be at the end of the symlink
@@ -3331,9 +3269,8 @@
exceptionsdir = os.path.join(self._outputdir, b'exceptions')
try:
os.makedirs(exceptionsdir)
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
+ except FileExistsError:
+ pass
# Remove all existing exception reports.
for f in os.listdir(exceptionsdir):
@@ -3475,7 +3412,7 @@
failed = False
kws = self.options.keywords
- if kws is not None and PYTHON3:
+ if kws is not None:
kws = kws.encode('utf-8')
suite = TestSuite(
@@ -3553,10 +3490,10 @@
if port is None:
portneeded = 3
# above 100 tries we just give up and let test reports failure
- for tries in xrange(100):
+ for tries in range(100):
allfree = True
port = self.options.port + self._portoffset
- for idx in xrange(portneeded):
+ for idx in range(portneeded):
if not checkportisavailable(port + idx):
allfree = False
break
@@ -3623,14 +3560,10 @@
def _usecorrectpython(self):
"""Configure the environment to use the appropriate Python in tests."""
# Tests must use the same interpreter as us or bad things will happen.
- if WINDOWS and PYTHON3:
+ if WINDOWS:
pyexe_names = [b'python', b'python3', b'python.exe']
- elif WINDOWS:
- pyexe_names = [b'python', b'python.exe']
- elif PYTHON3:
+ else:
pyexe_names = [b'python', b'python3']
- else:
- pyexe_names = [b'python', b'python2']
# os.symlink() is a thing with py3 on Windows, but it requires
# Administrator rights.
@@ -3644,17 +3577,15 @@
if os.readlink(mypython) == sysexecutable:
continue
os.unlink(mypython)
- except OSError as err:
- if err.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
+ pass
if self._findprogram(pyexename) != sysexecutable:
try:
os.symlink(sysexecutable, mypython)
self._createdfiles.append(mypython)
- except OSError as err:
+ except FileExistsError:
# child processes may race, which is harmless
- if err.errno != errno.EEXIST:
- raise
+ pass
elif WINDOWS and not os.getenv('MSYSTEM'):
raise AssertionError('cannot run test on Windows without MSYSTEM')
else:
@@ -3673,14 +3604,6 @@
f.write(b'%s "$@"\n' % esc_executable)
if WINDOWS:
- if not PYTHON3:
- # lets try to build a valid python3 executable for the
- # scrip that requires it.
- py3exe_name = os.path.join(self._custom_bin_dir, b'python3')
- with open(py3exe_name, 'wb') as f:
- f.write(b'#!/bin/sh\n')
- f.write(b'py -3 "$@"\n')
-
# adjust the path to make sur the main python finds it own dll
path = os.environ['PATH'].split(os.pathsep)
main_exec_dir = os.path.dirname(sysexecutable)
@@ -3693,8 +3616,6 @@
if appdata is not None:
python_dir = 'Python%d%d' % (vi[0], vi[1])
scripts_path = [appdata, 'Python', python_dir, 'Scripts']
- if not PYTHON3:
- scripts_path = [appdata, 'Python', 'Scripts']
scripts_dir = os.path.join(*scripts_path)
extra_paths.append(scripts_dir)
@@ -3738,12 +3659,9 @@
setup_opts = b"--no-rust"
# Run installer in hg root
- script = os.path.realpath(sys.argv[0])
- exe = sysexecutable
- if PYTHON3:
- compiler = _sys2bytes(compiler)
- script = _sys2bytes(script)
- exe = _sys2bytes(exe)
+ compiler = _sys2bytes(compiler)
+ script = _sys2bytes(os.path.realpath(sys.argv[0]))
+ exe = _sys2bytes(sysexecutable)
hgroot = os.path.dirname(os.path.dirname(script))
self._hgroot = hgroot
os.chdir(hgroot)
@@ -3777,9 +3695,8 @@
def makedirs(p):
try:
os.makedirs(p)
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
+ except FileExistsError:
+ pass
makedirs(self._pythondir)
makedirs(self._bindir)
@@ -3789,16 +3706,12 @@
if not self.options.verbose:
try:
os.remove(installerrs)
- except OSError as e:
- if e.errno != errno.ENOENT:
- raise
+ except FileNotFoundError:
+ pass
else:
with open(installerrs, 'rb') as f:
for line in f:
- if PYTHON3:
- sys.stdout.buffer.write(line)
- else:
- sys.stdout.write(line)
+ sys.stdout.buffer.write(line)
sys.exit(1)
os.chdir(self._testdir)
@@ -3831,9 +3744,8 @@
covdir = os.path.join(self._installdir, b'..', b'coverage')
try:
os.mkdir(covdir)
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
+ except FileExistsError:
+ pass
osenvironb[b'COVERAGE_DIR'] = covdir
@@ -3859,9 +3771,7 @@
return self._hgpath
cmd = b'"%s" -c "import mercurial; print (mercurial.__path__[0])"'
- cmd = cmd % PYTHON
- if PYTHON3:
- cmd = _bytes2sys(cmd)
+ cmd = _bytes2sys(cmd % PYTHON)
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
out, err = p.communicate()
@@ -3891,10 +3801,7 @@
)
out, _err = proc.communicate()
if proc.returncode != 0:
- if PYTHON3:
- sys.stdout.buffer.write(out)
- else:
- sys.stdout.write(out)
+ sys.stdout.buffer.write(out)
sys.exit(1)
def _installrhg(self):
@@ -3918,10 +3825,7 @@
)
out, _err = proc.communicate()
if proc.returncode != 0:
- if PYTHON3:
- sys.stdout.buffer.write(out)
- else:
- sys.stdout.write(out)
+ sys.stdout.buffer.write(out)
sys.exit(1)
def _build_pyoxidized(self):
@@ -3949,10 +3853,7 @@
)
out, _err = proc.communicate()
if proc.returncode != 0:
- if PYTHON3:
- sys.stdout.buffer.write(out)
- else:
- sys.stdout.write(out)
+ sys.stdout.buffer.write(out)
sys.exit(1)
def _outputcoverage(self):
--- a/tests/seq.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/seq.py Thu Jun 16 15:28:54 2022 +0200
@@ -7,7 +7,6 @@
# seq START STOP [START, STOP] stepping by 1
# seq START STEP STOP [START, STOP] stepping by STEP
-from __future__ import absolute_import, print_function
import os
import sys
@@ -20,9 +19,6 @@
except ImportError:
pass
-if sys.version_info[0] >= 3:
- xrange = range
-
start = 1
if len(sys.argv) > 2:
start = int(sys.argv[1])
@@ -33,5 +29,5 @@
stop = int(sys.argv[-1]) + 1
-for i in xrange(start, stop, step):
+for i in range(start, stop, step):
print(i)
--- a/tests/silenttestrunner.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/silenttestrunner.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,4 +1,3 @@
-from __future__ import absolute_import, print_function
import os
import sys
import unittest
--- a/tests/simplestorerepo.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/simplestorerepo.py Thu Jun 16 15:28:54 2022 +0200
@@ -10,7 +10,6 @@
# $ HGREPOFEATURES="simplestore" ./run-tests.py \
# --extra-config-opt extensions.simplestore=`pwd`/simplestorerepo.py
-from __future__ import absolute_import
import stat
@@ -71,7 +70,7 @@
@interfaceutil.implementer(repository.irevisiondelta)
@attr.s(slots=True)
-class simplestorerevisiondelta(object):
+class simplestorerevisiondelta:
node = attr.ib()
p1node = attr.ib()
p2node = attr.ib()
@@ -85,14 +84,14 @@
@interfaceutil.implementer(repository.iverifyproblem)
@attr.s(frozen=True)
-class simplefilestoreproblem(object):
+class simplefilestoreproblem:
warning = attr.ib(default=None)
error = attr.ib(default=None)
node = attr.ib(default=None)
@interfaceutil.implementer(repository.ifilestorage)
-class filestorage(object):
+class filestorage:
"""Implements storage for a tracked path.
Data is stored in the VFS in a directory corresponding to the tracked
--- a/tests/sitecustomize.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/sitecustomize.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,4 +1,3 @@
-from __future__ import absolute_import
import os
if os.environ.get('COVERAGE_PROCESS_START'):
--- a/tests/sshprotoext.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/sshprotoext.py Thu Jun 16 15:28:54 2022 +0200
@@ -8,7 +8,6 @@
# This extension replaces the SSH server started via `hg serve --stdio`.
# The server behaves differently depending on environment variables.
-from __future__ import absolute_import
from mercurial import (
error,
--- a/tests/svn-safe-append.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/svn-safe-append.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,6 +1,5 @@
#!/usr/bin/env python3
-from __future__ import absolute_import
__doc__ = """Same as `echo a >> b`, but ensures a changed mtime of b.
Without this svn will not detect workspace changes."""
--- a/tests/svnurlof.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/svnurlof.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,4 +1,3 @@
-from __future__ import absolute_import, print_function
import sys
from mercurial import (
--- a/tests/svnxml.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/svnxml.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,7 +1,6 @@
# Read the output of a "svn log --xml" command on stdin, parse it and
# print a subset of attributes common to all svn versions tested by
# hg.
-from __future__ import absolute_import
import sys
import xml.dom.minidom
--- a/tests/test-absorb-edit-lines.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-absorb-edit-lines.t Thu Jun 16 15:28:54 2022 +0200
@@ -15,10 +15,10 @@
absorb --edit-lines will run the editor if filename is provided:
- $ hg absorb --edit-lines --apply-changes
+ $ hg absorb --edit-lines
nothing applied
[1]
- $ HGEDITOR=cat hg absorb --edit-lines --apply-changes a
+ $ HGEDITOR=cat hg absorb --edit-lines a
HG: editing a
HG: "y" means the line to the right exists in the changeset to the top
HG:
@@ -43,7 +43,7 @@
> y : f
> yyy : g
> EOF
- $ HGEDITOR='cat editortext >' hg absorb -q --edit-lines --apply-changes a
+ $ HGEDITOR='cat editortext >' hg absorb -q --edit-lines a
$ hg cat -r 0 a
d
e
--- a/tests/test-absorb-filefixupstate.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-absorb-filefixupstate.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,11 +1,9 @@
-from __future__ import absolute_import, print_function
-
import itertools
from mercurial import pycompat
from hgext import absorb
-class simplefctx(object):
+class simplefctx:
def __init__(self, content):
self.content = content
--- a/tests/test-ancestor.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-ancestor.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, print_function
-
import binascii
import getopt
import math
@@ -13,15 +11,10 @@
ancestor,
debugcommands,
hg,
- pycompat,
ui as uimod,
util,
)
-if pycompat.ispy3:
- long = int
- xrange = range
-
def buildgraph(rng, nodes=100, rootprob=0.05, mergeprob=0.2, prevprob=0.7):
"""nodes: total number of nodes in the graph
@@ -32,7 +25,7 @@
return value is a graph represented as an adjacency list.
"""
graph = [None] * nodes
- for i in xrange(nodes):
+ for i in range(nodes):
if i == 0 or rng.random() < rootprob:
graph[i] = [nullrev]
elif i == 1:
@@ -55,7 +48,7 @@
def buildancestorsets(graph):
ancs = [None] * len(graph)
- for i in xrange(len(graph)):
+ for i in range(len(graph)):
ancs[i] = {i}
if graph[i] == [nullrev]:
continue
@@ -64,7 +57,7 @@
return ancs
-class naiveincrementalmissingancestors(object):
+class naiveincrementalmissingancestors:
def __init__(self, ancs, bases):
self.ancs = ancs
self.bases = set(bases)
@@ -116,11 +109,11 @@
nerrs[0] += 1
gerrs[0] += 1
- for g in xrange(graphcount):
+ for g in range(graphcount):
graph = buildgraph(rng)
ancs = buildancestorsets(graph)
gerrs = [0]
- for _ in xrange(testcount):
+ for _ in range(testcount):
# start from nullrev to include it as a possibility
graphnodes = range(nullrev, len(graph))
bases = samplerevs(graphnodes)
@@ -130,7 +123,7 @@
# reference slow algorithm
naiveinc = naiveincrementalmissingancestors(ancs, bases)
seq = []
- for _ in xrange(inccount):
+ for _ in range(inccount):
if rng.random() < 0.2:
newbases = samplerevs(graphnodes)
seq.append(('addbases', newbases))
@@ -217,7 +210,7 @@
"""
for i, (bases, revs) in enumerate(
(
- ({1, 2, 3, 4, 7}, set(xrange(10))),
+ ({1, 2, 3, 4, 7}, set(range(10))),
({10}, set({11, 12, 13, 14})),
({7}, set({1, 2, 3, 4, 5})),
)
@@ -454,13 +447,13 @@
opts, args = getopt.getopt(sys.argv[1:], 's:', ['seed='])
for o, a in opts:
if o in ('-s', '--seed'):
- seed = long(a, base=0) # accepts base 10 or 16 strings
+ seed = int(a, base=0) # accepts base 10 or 16 strings
if seed is None:
try:
- seed = long(binascii.hexlify(os.urandom(16)), 16)
+ seed = int(binascii.hexlify(os.urandom(16)), 16)
except AttributeError:
- seed = long(time.time() * 1000)
+ seed = int(time.time() * 1000)
rng = random.Random(seed)
test_missingancestors_explicit()
--- a/tests/test-annotate.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-annotate.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,6 +1,3 @@
-from __future__ import absolute_import
-from __future__ import print_function
-
import unittest
from mercurial import (
--- a/tests/test-annotate.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-annotate.t Thu Jun 16 15:28:54 2022 +0200
@@ -478,7 +478,6 @@
and its ancestor by overriding "repo._filecommit".
$ cat > ../legacyrepo.py <<EOF
- > from __future__ import absolute_import
> from mercurial import commit, error, extensions
> def _filecommit(orig, repo, fctx, manifest1, manifest2,
> linkrev, tr, includecopymeta, ms):
--- a/tests/test-arbitraryfilectx.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-arbitraryfilectx.t Thu Jun 16 15:28:54 2022 +0200
@@ -1,6 +1,5 @@
Setup:
$ cat > eval.py <<EOF
- > from __future__ import absolute_import
> import filecmp
> from mercurial import commands, context, pycompat, registrar
> cmdtable = {}
--- a/tests/test-archive.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-archive.t Thu Jun 16 15:28:54 2022 +0200
@@ -320,7 +320,6 @@
$ TIP=`hg id -v | cut -f1 -d' '`
$ QTIP=`hg id -q`
$ cat > getarchive.py <<EOF
- > from __future__ import absolute_import
> import os
> import sys
> from mercurial import (
@@ -455,7 +454,6 @@
> done
$ cat > md5comp.py <<EOF
- > from __future__ import absolute_import, print_function
> import hashlib
> import sys
> f1, f2 = sys.argv[1:3]
@@ -582,16 +580,11 @@
Strms Blocks Compressed Uncompressed Ratio Check Filename (xz !)
$ rm -f ../archive.txz
#endif
-#if py3 no-lzma
+#if no-lzma
$ hg archive ../archive.txz
abort: lzma module is not available
[255]
#endif
-#if no-py3
- $ hg archive ../archive.txz
- abort: xz compression is only available in Python 3
- [255]
-#endif
show an error when a provided pattern matches no files
@@ -617,7 +610,6 @@
$ hg -R repo add repo/a
$ hg -R repo commit -m '#0' -d '456789012 21600'
$ cat > show_mtime.py <<EOF
- > from __future__ import absolute_import, print_function
> import os
> import sys
> print(int(os.stat(sys.argv[1]).st_mtime))
--- a/tests/test-atomictempfile.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-atomictempfile.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import glob
import os
import shutil
@@ -8,15 +6,11 @@
import unittest
from mercurial import (
- pycompat,
util,
)
atomictempfile = util.atomictempfile
-if pycompat.ispy3:
- xrange = range
-
class testatomictempfile(unittest.TestCase):
def setUp(self):
@@ -70,7 +64,7 @@
# try some times, because reproduction of ambiguity depends on
# "filesystem time"
- for i in xrange(5):
+ for i in range(5):
atomicwrite(False)
oldstat = os.stat(self._filename)
if oldstat[stat.ST_CTIME] != oldstat[stat.ST_MTIME]:
@@ -81,7 +75,7 @@
# repeat atomic write with checkambig=True, to examine
# whether st_mtime is advanced multiple times as expected
- for j in xrange(repetition):
+ for j in range(repetition):
atomicwrite(True)
newstat = os.stat(self._filename)
if oldstat[stat.ST_CTIME] != newstat[stat.ST_CTIME]:
--- a/tests/test-bad-extension.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-bad-extension.t Thu Jun 16 15:28:54 2022 +0200
@@ -54,7 +54,6 @@
$ hg -q help help 2>&1 |grep extension
*** failed to import extension "badext" from $TESTTMP/badext.py: bit bucket overflow
*** failed to import extension "badext2": No module named 'badext2' (py3 !)
- *** failed to import extension "badext2": No module named badext2 (no-py3 !)
show traceback
@@ -63,9 +62,7 @@
Traceback (most recent call last):
Exception: bit bucket overflow
*** failed to import extension "badext2": No module named 'badext2' (py3 !)
- *** failed to import extension "badext2": No module named badext2 (no-py3 !)
Traceback (most recent call last):
- ImportError: No module named badext2 (no-py3 !)
ImportError: No module named 'hgext.badext2' (py3 no-py36 !)
ModuleNotFoundError: No module named 'hgext.badext2' (py36 !)
Traceback (most recent call last): (py3 !)
@@ -114,19 +111,16 @@
YYYY/MM/DD HH:MM:SS (PID)> - loading extension: badext2
YYYY/MM/DD HH:MM:SS (PID)> - could not import hgext.badext2 (No module named *badext2*): trying hgext3rd.badext2 (glob)
Traceback (most recent call last):
- ImportError: No module named badext2 (no-py3 !)
ImportError: No module named 'hgext.badext2' (py3 no-py36 !)
ModuleNotFoundError: No module named 'hgext.badext2' (py36 !)
YYYY/MM/DD HH:MM:SS (PID)> - could not import hgext3rd.badext2 (No module named *badext2*): trying badext2 (glob)
Traceback (most recent call last):
- ImportError: No module named badext2 (no-py3 !)
ImportError: No module named 'hgext.badext2' (py3 no-py36 !)
ModuleNotFoundError: No module named 'hgext.badext2' (py36 !)
Traceback (most recent call last): (py3 !)
ImportError: No module named 'hgext3rd.badext2' (py3 no-py36 !)
ModuleNotFoundError: No module named 'hgext3rd.badext2' (py36 !)
*** failed to import extension "badext2": No module named 'badext2' (py3 !)
- *** failed to import extension "badext2": No module named badext2 (no-py3 !)
Traceback (most recent call last):
ImportError: No module named 'hgext.badext2' (py3 no-py36 !)
ModuleNotFoundError: No module named 'hgext.badext2' (py36 !)
@@ -136,7 +130,6 @@
Traceback (most recent call last): (py3 !)
ModuleNotFoundError: No module named 'badext2' (py36 !)
ImportError: No module named 'badext2' (py3 no-py36 !)
- ImportError: No module named badext2 (no-py3 !)
YYYY/MM/DD HH:MM:SS (PID)> > loaded 2 extensions, total time * (glob)
YYYY/MM/DD HH:MM:SS (PID)> - loading configtable attributes
YYYY/MM/DD HH:MM:SS (PID)> - executing uisetup hooks
@@ -165,7 +158,6 @@
$ hg help --keyword baddocext
*** failed to import extension "badext" from $TESTTMP/badext.py: bit bucket overflow
*** failed to import extension "badext2": No module named 'badext2' (py3 !)
- *** failed to import extension "badext2": No module named badext2 (no-py3 !)
Topics:
extensions Using Additional Features
--- a/tests/test-basic.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-basic.t Thu Jun 16 15:28:54 2022 +0200
@@ -240,15 +240,16 @@
Underlying message streams should be updated when ui.fout/ferr are set:
$ cat <<'EOF' > capui.py
- > from mercurial import pycompat, registrar
+ > import io
+ > from mercurial import registrar
> cmdtable = {}
> command = registrar.command(cmdtable)
> @command(b'capui', norepo=True)
> def capui(ui):
> out = ui.fout
- > ui.fout = pycompat.bytesio()
+ > ui.fout = io.BytesIO()
> ui.status(b'status\n')
- > ui.ferr = pycompat.bytesio()
+ > ui.ferr = io.BytesIO()
> ui.warn(b'warn\n')
> out.write(b'stdout: %s' % ui.fout.getvalue())
> out.write(b'stderr: %s' % ui.ferr.getvalue())
--- a/tests/test-batching.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-batching.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import, print_function
import contextlib
@@ -21,7 +20,7 @@
# equivalent of repo.repository
-class thing(object):
+class thing:
def hello(self):
return b"Ready."
@@ -108,7 +107,7 @@
# server side
# equivalent of wireproto's global functions
-class server(object):
+class server:
def __init__(self, local):
self.local = local
--- a/tests/test-bdiff.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-bdiff.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,4 +1,3 @@
-from __future__ import absolute_import, print_function
import collections
import struct
import unittest
--- a/tests/test-bisect.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-bisect.t Thu Jun 16 15:28:54 2022 +0200
@@ -462,7 +462,6 @@
$ cat > script.py <<EOF
> #!$PYTHON
- > from __future__ import absolute_import
> import sys
> from mercurial import hg, ui as uimod
> repo = hg.repository(uimod.ui.load(), b'.')
--- a/tests/test-blackbox.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-blackbox.t Thu Jun 16 15:28:54 2022 +0200
@@ -403,7 +403,6 @@
when using chg, blackbox.log should get rotated correctly
$ cat > $TESTTMP/noop.py << EOF
- > from __future__ import absolute_import
> import time
> from mercurial import registrar, scmutil
> cmdtable = {}
@@ -463,7 +462,6 @@
blackbox should work if repo.ui.log is not called (issue5518)
$ cat > $TESTTMP/raise.py << EOF
- > from __future__ import absolute_import
> from mercurial import registrar, scmutil
> cmdtable = {}
> command = registrar.command(cmdtable)
--- a/tests/test-bookmarks.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-bookmarks.t Thu Jun 16 15:28:54 2022 +0200
@@ -1069,7 +1069,6 @@
$ echo a > a
$ cat > $TESTTMP/pausefinalize.py <<EOF
- > from __future__ import absolute_import
> import os
> import time
> from mercurial import extensions, localrepo
--- a/tests/test-bugzilla.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-bugzilla.t Thu Jun 16 15:28:54 2022 +0200
@@ -1,7 +1,6 @@
mock bugzilla driver for testing template output:
$ cat <<EOF > bzmock.py
- > from __future__ import absolute_import
> from mercurial import extensions
> from mercurial import pycompat
> from mercurial import registrar
--- a/tests/test-bundle-type.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-bundle-type.t Thu Jun 16 15:28:54 2022 +0200
@@ -239,3 +239,30 @@
(see 'hg help bundlespec' for supported values for --type)
[10]
$ cd ..
+
+Test controlling the changegroup version
+
+ $ hg -R t1 bundle --config experimental.changegroup3=yes -a -t v2 ./v2-cg-default.hg
+ 1 changesets found
+ $ hg debugbundle ./v2-cg-default.hg --part-type changegroup
+ Stream params: {Compression: BZ}
+ changegroup -- {nbchanges: 1, version: 02} (mandatory: True)
+ c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
+ $ hg debugbundle ./v2-cg-default.hg --spec
+ bzip2-v2
+ $ hg -R t1 bundle --config experimental.changegroup3=yes -a -t 'v2;cg.version=02' ./v2-cg-02.hg
+ 1 changesets found
+ $ hg debugbundle ./v2-cg-02.hg --part-type changegroup
+ Stream params: {Compression: BZ}
+ changegroup -- {nbchanges: 1, version: 02} (mandatory: True)
+ c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
+ $ hg debugbundle ./v2-cg-02.hg --spec
+ bzip2-v2
+ $ hg -R t1 bundle --config experimental.changegroup3=yes -a -t 'v2;cg.version=03' ./v2-cg-03.hg
+ 1 changesets found
+ $ hg debugbundle ./v2-cg-03.hg --part-type changegroup
+ Stream params: {Compression: BZ}
+ changegroup -- {nbchanges: 1, version: 03} (mandatory: True)
+ c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
+ $ hg debugbundle ./v2-cg-03.hg --spec
+ bzip2-v2;cg.version=03
--- a/tests/test-bundle.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-bundle.t Thu Jun 16 15:28:54 2022 +0200
@@ -466,7 +466,6 @@
transaction)
$ cat > $TESTTMP/showtip.py <<EOF
- > from __future__ import absolute_import
>
> def showtip(ui, repo, hooktype, **kwargs):
> ui.warn(b'%s: %s\n' % (hooktype, repo[b'tip'].hex()[:12]))
@@ -1039,3 +1038,28 @@
Test the option that create and no-delta's bundle
$ hg bundle -a --config devel.bundle.delta=full ./full.hg
3 changesets found
+
+Test the debug output when applying delta
+-----------------------------------------
+
+ $ hg init foo
+ $ hg -R foo unbundle ./slim.hg \
+ > --config debug.revlog.debug-delta=yes \
+ > --config storage.revlog.reuse-external-delta=no \
+ > --config storage.revlog.reuse-external-delta-parent=no
+ adding changesets
+ DBG-DELTAS: CHANGELOG: rev=0: search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
+ DBG-DELTAS: CHANGELOG: rev=1: search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
+ DBG-DELTAS: CHANGELOG: rev=2: search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
+ adding manifests
+ DBG-DELTAS: MANIFESTLOG: rev=0: search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
+ DBG-DELTAS: MANIFESTLOG: rev=1: search-rounds=1 try-count=1 - delta-type=delta snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
+ DBG-DELTAS: MANIFESTLOG: rev=2: search-rounds=1 try-count=1 - delta-type=delta snap-depth=0 - p1-chain-length=1 p2-chain-length=-1 - duration=* (glob)
+ adding file changes
+ DBG-DELTAS: FILELOG:a: rev=0: search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
+ DBG-DELTAS: FILELOG:b: rev=0: search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
+ DBG-DELTAS: FILELOG:c: rev=0: search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
+ added 3 changesets with 3 changes to 3 files
+ new changesets 4fe08cd4693e:4652c276ac4f (3 drafts)
+ (run 'hg update' to get a working copy)
+
--- a/tests/test-bundle2-pushback.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-bundle2-pushback.t Thu Jun 16 15:28:54 2022 +0200
@@ -3,7 +3,6 @@
> Current bundle2 implementation doesn't provide a way to generate those
> parts, so they must be created by extensions.
> """
- > from __future__ import absolute_import
> from mercurial import bundle2, exchange, pushkey, util
> def _newhandlechangegroup(op, inpart):
> """This function wraps the changegroup part handler for getbundle.
--- a/tests/test-cappedreader.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-cappedreader.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, print_function
-
import io
import unittest
--- a/tests/test-cbor.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-cbor.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import os
import sys
import unittest
--- a/tests/test-censor.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-censor.t Thu Jun 16 15:28:54 2022 +0200
@@ -10,10 +10,6 @@
#endif
- $ cat >> $HGRCPATH <<EOF
- > [extensions]
- > censor=
- > EOF
$ cp $HGRCPATH $HGRCPATH.orig
Create repo with unimpeachable content
@@ -81,7 +77,7 @@
(this also tests file pattern matching: path relative to cwd case)
$ mkdir -p foo/bar/baz
- $ hg --cwd foo/bar/baz censor -r $C2 -t "remove password" ../../../target
+ $ hg --config extensions.censor= --cwd foo/bar/baz censor -r $C2 -t "remove password" ../../../target
$ hg cat -r $H1 target | head -n 10
Tainted file is now sanitized
$ hg cat -r $H2 target | head -n 10
@@ -99,7 +95,7 @@
(this also tests file pattern matching: with 'path:' scheme)
- $ hg --cwd foo/bar/baz censor -r $C1 path:target
+ $ hg --config extensions.censor= --cwd foo/bar/baz censor -r $C1 path:target
$ hg cat -r $H1 target | head -n 10
Tainted file is now sanitized
$ hg cat -r $H2 target | head -n 10
@@ -242,7 +238,7 @@
$ echo 'advanced head H1' > target
$ hg ci -m 'advance head H1' target
$ H1=`hg id --debug -i`
- $ hg censor -r $C3 target
+ $ hg --config extensions.censor= censor -r $C3 target
$ hg update -r $H2
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg merge -r $C3
@@ -254,14 +250,14 @@
$ hg update -C -r $H2
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ hg censor -r $H2 target
+ $ hg --config extensions.censor= censor -r $H2 target
abort: cannot censor file in heads (78a8fc215e79)
(clean/delete and commit first)
[255]
$ echo 'twiddling thumbs' > bystander
$ hg ci -m 'bystander commit'
$ H2=`hg id --debug -i`
- $ hg censor -r "$H2^" target
+ $ hg --config extensions.censor= censor -r "$H2^" target
abort: cannot censor file in heads (efbe78065929)
(clean/delete and commit first)
[255]
@@ -273,7 +269,7 @@
$ H2=`hg id --debug -i`
$ hg update -r "$H2^"
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ hg censor -r . target
+ $ hg --config extensions.censor= censor -r . target
abort: cannot censor working directory
(clean/delete/update first)
[255]
@@ -286,7 +282,7 @@
$ hg rm target
$ hg ci -m 'delete target so it may be censored'
$ H2=`hg id --debug -i`
- $ hg censor -r $C4 target
+ $ hg --config extensions.censor= censor -r $C4 target
$ hg cat -r $C4 target | head -n 10
$ hg cat -r "$H2^^" target | head -n 10
Tainted file now super sanitized
@@ -314,7 +310,7 @@
$ hg revert -r "$H2^" target
$ hg ci -m 'cleaned 100k passwords'
$ H2=`hg id --debug -i`
- $ hg censor -r $C5 target
+ $ hg --config extensions.censor= censor -r $C5 target
$ hg cat -r $C5 target | head -n 10
$ hg cat -r $H2 target | head -n 10
fresh start
@@ -393,7 +389,7 @@
$ CLEANREV=$H2
$ hg cat -r $REV target | head -n 10
Passwords: hunter2hunter2
- $ hg censor -r $REV target
+ $ hg --config extensions.censor= censor -r $REV target
$ hg cat -r $REV target | head -n 10
$ hg cat -r $CLEANREV target | head -n 10
Re-sanitized; nothing to see here
@@ -503,7 +499,7 @@
Can import bundle where first revision of a file is censored
$ hg init ../rinit
- $ hg censor -r 0 target
+ $ hg --config extensions.censor= censor -r 0 target
$ hg bundle -r 0 --base null ../rinit/initbundle
1 changesets found
$ cd ../rinit
@@ -553,7 +549,7 @@
$ hg cat -r $B1 target | wc -l
*50002 (re)
- $ hg censor -r $B1 target
+ $ hg --config extensions.censor= censor -r $B1 target
$ hg cat -r $B1 target | wc -l
*0 (re)
--- a/tests/test-changelog-exec.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-changelog-exec.t Thu Jun 16 15:28:54 2022 +0200
@@ -51,7 +51,7 @@
$ hg debugindex bar
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 1 b004912a8510 000000000000 000000000000
$ cd ..
--- a/tests/test-check-code.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-check-code.t Thu Jun 16 15:28:54 2022 +0200
@@ -27,7 +27,6 @@
Skipping contrib/packaging/hgpackaging/cli.py it has no-che?k-code (glob)
Skipping contrib/packaging/hgpackaging/downloads.py it has no-che?k-code (glob)
Skipping contrib/packaging/hgpackaging/inno.py it has no-che?k-code (glob)
- Skipping contrib/packaging/hgpackaging/py2exe.py it has no-che?k-code (glob)
Skipping contrib/packaging/hgpackaging/pyoxidizer.py it has no-che?k-code (glob)
Skipping contrib/packaging/hgpackaging/util.py it has no-che?k-code (glob)
Skipping contrib/packaging/hgpackaging/wix.py it has no-che?k-code (glob)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-check-encoding.t Thu Jun 16 15:28:54 2022 +0200
@@ -0,0 +1,26 @@
+#require test-repo hg10
+
+ $ . "$TESTDIR/helpers-testrepo.sh"
+
+ $ cat > $TESTTMP/check_ascii.py <<EOF
+ > import sys
+ > for file_path in sys.argv[1:]:
+ > with open(file_path, 'br') as f:
+ > try:
+ > f.read().decode('ascii', 'strict')
+ > except UnicodeDecodeError as exc:
+ > print('%s: %s' % (file_path, exc))
+ > EOF
+
+There are some web servers in the wild that can serve static files with an
+incorrect encoding (e.g. https://bz.mercurial-scm.org/show_bug.cgi?id=6559).
+One way to prevent any issues is to not use any non-ASCII characters, e.g.
+URL-encoding them or using HTML entities.
+
+check charset of all tracked files ending in .js
+
+ $ cd "`dirname "$TESTDIR"`"
+
+ $ testrepohg locate 'set:**.js' \
+ > 2>/dev/null \
+ > | xargs "$PYTHON" $TESTTMP/check_ascii.py
--- a/tests/test-check-help.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-check-help.t Thu Jun 16 15:28:54 2022 +0200
@@ -3,7 +3,6 @@
$ . "$TESTDIR/helpers-testrepo.sh"
$ cat <<'EOF' > scanhelptopics.py
- > from __future__ import absolute_import, print_function
> import re
> import sys
> if sys.platform == "win32":
--- a/tests/test-check-interfaces.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-check-interfaces.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,6 +1,5 @@
# Test that certain objects conform to well-defined interfaces.
-from __future__ import absolute_import, print_function
from mercurial import encoding
@@ -81,7 +80,7 @@
# Facilitates testing localpeer.
-class dummyrepo(object):
+class dummyrepo:
def __init__(self):
self.ui = uimod.ui()
self._wanted_sidedata = set()
@@ -93,7 +92,7 @@
pass
-class dummyopener(object):
+class dummyopener:
handlers = []
@@ -109,7 +108,7 @@
pass
-class dummypipe(object):
+class dummypipe:
def close(self):
pass
--- a/tests/test-check-module-imports.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-check-module-imports.t Thu Jun 16 15:28:54 2022 +0200
@@ -1,4 +1,4 @@
-#require test-repo
+#require test-repo hg10
$ . "$TESTDIR/helpers-testrepo.sh"
$ import_checker="$TESTDIR"/../contrib/import-checker.py
--- a/tests/test-check-py3-compat.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-check-py3-compat.t Thu Jun 16 15:28:54 2022 +0200
@@ -3,35 +3,7 @@
$ . "$TESTDIR/helpers-testrepo.sh"
$ cd "$TESTDIR"/..
-#if no-py3
- $ testrepohg files 'set:(**.py)' \
- > -X contrib/automation/ \
- > -X contrib/packaging/hgpackaging/ \
- > -X contrib/packaging/inno/ \
- > -X contrib/packaging/packaging.py \
- > -X contrib/packaging/wix/ \
- > -X hgdemandimport/demandimportpy2.py \
- > -X mercurial/thirdparty/cbor \
- > | sed 's|\\|/|g' | xargs "$PYTHON" contrib/check-py3-compat.py
- contrib/python-zstandard/setup.py not using absolute_import
- contrib/python-zstandard/setup_zstd.py not using absolute_import
- contrib/python-zstandard/tests/common.py not using absolute_import
- contrib/python-zstandard/tests/test_buffer_util.py not using absolute_import
- contrib/python-zstandard/tests/test_compressor.py not using absolute_import
- contrib/python-zstandard/tests/test_compressor_fuzzing.py not using absolute_import
- contrib/python-zstandard/tests/test_data_structures.py not using absolute_import
- contrib/python-zstandard/tests/test_data_structures_fuzzing.py not using absolute_import
- contrib/python-zstandard/tests/test_decompressor.py not using absolute_import
- contrib/python-zstandard/tests/test_decompressor_fuzzing.py not using absolute_import
- contrib/python-zstandard/tests/test_estimate_sizes.py not using absolute_import
- contrib/python-zstandard/tests/test_module_attributes.py not using absolute_import
- contrib/python-zstandard/tests/test_train_dictionary.py not using absolute_import
- setup.py not using absolute_import
-#endif
-
-#if py3
$ testrepohg files 'set:(**.py) - grep(pygments)' \
- > -X hgdemandimport/demandimportpy2.py \
> -X hgext/fsmonitor/pywatchman \
> -X mercurial/cffi \
> -X mercurial/thirdparty \
@@ -44,9 +16,8 @@
mercurial/windows.py: error importing: <*Error> No module named 'msvcrt' (error at windows.py:*) (glob) (no-windows !)
mercurial/posix.py: error importing: <*Error> No module named 'fcntl' (error at posix.py:*) (glob) (windows !)
mercurial/scmposix.py: error importing: <*Error> No module named 'fcntl' (error at scmposix.py:*) (glob) (windows !)
-#endif
-#if py3 pygments
+#if pygments
$ testrepohg files 'set:(**.py) and grep(pygments)' | sed 's|\\|/|g' \
> | xargs "$PYTHON" contrib/check-py3-compat.py \
> | sed 's/[0-9][0-9]*)$/*)/'
--- a/tests/test-check-pyflakes.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-check-pyflakes.t Thu Jun 16 15:28:54 2022 +0200
@@ -15,13 +15,18 @@
$ testrepohg locate 'set:**.py or grep("^#!.*python")' \
> -X hgext/fsmonitor/pywatchman \
- > -X mercurial/pycompat.py -X contrib/python-zstandard \
+ > -X contrib/python-zstandard \
> -X mercurial/thirdparty \
> 2>/dev/null \
> | xargs "$PYTHON" -m pyflakes 2>/dev/null | "$TESTDIR/filterpyflakes.py"
contrib/perf.py:*:* undefined name 'xrange' (glob) (?)
- mercurial/hgweb/server.py:*:* undefined name 'reload' (glob) (?)
- mercurial/util.py:*:* undefined name 'file' (glob) (?)
- mercurial/encoding.py:*:* undefined name 'localstr' (glob) (?)
- tests/run-tests.py:*:* undefined name 'PermissionError' (glob) (?)
+ mercurial/pycompat.py:*:* 'codecs' imported but unused (glob)
+ mercurial/pycompat.py:*:* 'concurrent.futures' imported but unused (glob)
+ mercurial/pycompat.py:*:* 'http.client as httplib' imported but unused (glob)
+ mercurial/pycompat.py:*:* 'http.cookiejar as cookielib' imported but unused (glob)
+ mercurial/pycompat.py:*:* 'io' imported but unused (glob)
+ mercurial/pycompat.py:*:* 'queue' imported but unused (glob)
+ mercurial/pycompat.py:*:* 'socketserver' imported but unused (glob)
+ mercurial/pycompat.py:*:* 'xmlrpc.client as xmlrpclib' imported but unused (glob)
+ mercurial/util.py:*:* 'pickle' imported but unused (glob)
--- a/tests/test-check-pylint.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-check-pylint.t Thu Jun 16 15:28:54 2022 +0200
@@ -1,4 +1,4 @@
-#require test-repo pylint hg10
+#require test-repo pylint
Run pylint for known rules we care about.
-----------------------------------------
--- a/tests/test-check-pytype.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-check-pytype.t Thu Jun 16 15:28:54 2022 +0200
@@ -30,7 +30,6 @@
mercurial/testing/storage.py # tons of [attribute-error]
mercurial/ui.py # [attribute-error], [wrong-arg-types]
mercurial/unionrepo.py # ui, svfs, unfiltered [attribute-error]
-mercurial/utils/memorytop.py # not 3.6 compatible
mercurial/win32.py # [not-callable]
mercurial/wireprotoframing.py # [unsupported-operands], [attribute-error], [import-error]
mercurial/wireprotov1peer.py # [attribute-error]
@@ -39,7 +38,7 @@
TODO: use --no-cache on test server? Caching the files locally helps during
development, but may be a hinderance for CI testing.
- $ pytype -V 3.6 --keep-going --jobs auto mercurial \
+ $ pytype -V 3.7 --keep-going --jobs auto mercurial \
> -x mercurial/bundlerepo.py \
> -x mercurial/context.py \
> -x mercurial/crecord.py \
@@ -62,7 +61,6 @@
> -x mercurial/thirdparty \
> -x mercurial/ui.py \
> -x mercurial/unionrepo.py \
- > -x mercurial/utils/memorytop.py \
> -x mercurial/win32.py \
> -x mercurial/wireprotoframing.py \
> -x mercurial/wireprotov1peer.py \
--- a/tests/test-chg.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-chg.t Thu Jun 16 15:28:54 2022 +0200
@@ -132,7 +132,6 @@
> EOF
$ cat > $TESTTMP/fakepager.py <<EOF
- > from __future__ import absolute_import
> import sys
> import time
> for line in iter(sys.stdin.readline, ''):
--- a/tests/test-clone-r.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-clone-r.t Thu Jun 16 15:28:54 2022 +0200
@@ -45,19 +45,19 @@
3 0000 8 3 2 -1 19b1fc555737
$ hg debugindex adifferentfile
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 7 2565f3199a74 000000000000 000000000000
$ hg debugindex anotherfile
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 8 2565f3199a74 000000000000 000000000000
$ hg debugindex fred
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 6 12ab3bcc5ea4 000000000000 000000000000
$ hg debugindex --manifest
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 43eadb1d2d06 000000000000 000000000000
1 1 8b89697eba2c 43eadb1d2d06 000000000000
2 2 626a32663c2f 8b89697eba2c 000000000000
--- a/tests/test-commandserver.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-commandserver.t Thu Jun 16 15:28:54 2022 +0200
@@ -23,7 +23,6 @@
$ hg init repo
$ cd repo
- >>> from __future__ import absolute_import
>>> import os
>>> import sys
>>> from hgclient import bprint, check, readchannel, runcommand
--- a/tests/test-commit-amend.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-commit-amend.t Thu Jun 16 15:28:54 2022 +0200
@@ -1203,7 +1203,7 @@
R olddirname/commonfile.py
R olddirname/newfile.py
$ hg debugindex newdirname/newfile.py
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 3 34a4d536c0c0 000000000000 000000000000
$ echo a >> newdirname/commonfile.py
@@ -1211,7 +1211,7 @@
$ hg debugrename newdirname/newfile.py
newdirname/newfile.py renamed from olddirname/newfile.py:690b295714aed510803d3020da9c70fca8336def
$ hg debugindex newdirname/newfile.py
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 3 34a4d536c0c0 000000000000 000000000000
#if execbit
--- a/tests/test-commit-interactive.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-commit-interactive.t Thu Jun 16 15:28:54 2022 +0200
@@ -938,7 +938,6 @@
$ export LANGUAGE
$ cat > $TESTTMP/escape.py <<EOF
- > from __future__ import absolute_import
> from mercurial import (
> pycompat,
> )
--- a/tests/test-commit.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-commit.t Thu Jun 16 15:28:54 2022 +0200
@@ -627,7 +627,7 @@
$ hg debugrename foo
foo renamed from bar:26d3ca0dfd18e44d796b564e38dd173c9668d3a9
$ hg debugindex bar
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 26d3ca0dfd18 000000000000 000000000000
1 1 d267bddd54f7 26d3ca0dfd18 000000000000
@@ -645,7 +645,6 @@
verify pathauditor blocks evil filepaths
$ cat > evil-commit.py <<EOF
- > from __future__ import absolute_import
> from mercurial import context, hg, ui as uimod
> notrc = u".h\u200cg".encode('utf-8') + b'/hgrc'
> u = uimod.ui.load()
@@ -671,7 +670,6 @@
$ hg rollback -f
repository tip rolled back to revision 2 (undo commit)
$ cat > evil-commit.py <<EOF
- > from __future__ import absolute_import
> from mercurial import context, hg, ui as uimod
> notrc = b"HG~1/hgrc"
> u = uimod.ui.load()
@@ -691,7 +689,6 @@
$ hg rollback -f
repository tip rolled back to revision 2 (undo commit)
$ cat > evil-commit.py <<EOF
- > from __future__ import absolute_import
> from mercurial import context, hg, ui as uimod
> notrc = b"HG8B6C~2/hgrc"
> u = uimod.ui.load()
--- a/tests/test-completion.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-completion.t Thu Jun 16 15:28:54 2022 +0200
@@ -74,7 +74,9 @@
Show debug commands if there are no other candidates
$ hg debugcomplete debug
+ debug-delta-find
debug-repair-issue6528
+ debug-revlog-index
debugancestor
debugantivirusrunning
debugapplystreamclonebundle
@@ -94,6 +96,7 @@
debugdate
debugdeltachain
debugdirstate
+ debugdirstateignorepatternshash
debugdiscovery
debugdownload
debugextensions
@@ -102,7 +105,6 @@
debugfsinfo
debuggetbundle
debugignore
- debugindex
debugindexdot
debugindexstats
debuginstall
@@ -266,7 +268,9 @@
config: untrusted, exp-all-known, edit, local, source, shared, non-shared, global, template
continue: dry-run
copy: forget, after, at-rev, force, include, exclude, dry-run
+ debug-delta-find: changelog, manifest, dir, template
debug-repair-issue6528: to-report, from-report, paranoid, dry-run
+ debug-revlog-index: changelog, manifest, dir, template
debugancestor:
debugantivirusrunning:
debugapplystreamclonebundle:
@@ -284,6 +288,7 @@
debugdata: changelog, manifest, dir
debugdate: extended
debugdeltachain: changelog, manifest, dir, template
+ debugdirstateignorepatternshash:
debugdirstate: nodates, dates, datesort, docket, all
debugdiscovery: old, nonheads, rev, seed, local-as-revs, remote-as-revs, ssh, remotecmd, insecure, template
debugdownload: output
@@ -293,7 +298,6 @@
debugfsinfo:
debuggetbundle: head, common, type
debugignore:
- debugindex: changelog, manifest, dir, template
debugindexdot: changelog, manifest, dir
debugindexstats:
debuginstall: template
--- a/tests/test-config-env.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-config-env.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,6 +1,5 @@
# Test the config layer generated by environment variables
-from __future__ import absolute_import, print_function
import os
--- a/tests/test-context-metadata.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-context-metadata.t Thu Jun 16 15:28:54 2022 +0200
@@ -12,7 +12,6 @@
$ hg commit -m 'Remove A'
$ cat > metaedit.py <<EOF
- > from __future__ import absolute_import
> from mercurial import context, pycompat, registrar
> cmdtable = {}
> command = registrar.command(cmdtable)
--- a/tests/test-context.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-context.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,4 +1,3 @@
-from __future__ import absolute_import, print_function
import os
import stat
import sys
@@ -126,8 +125,6 @@
# R bar-r
# C foo
-from mercurial import scmutil
-
print('== checking workingctx.status:')
wctx = repo[None]
--- a/tests/test-contrib-check-code.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-contrib-check-code.t Thu Jun 16 15:28:54 2022 +0200
@@ -51,12 +51,6 @@
./quote.py:5:
> '"""', 42+1, """and
missing whitespace in expression
- ./classstyle.py:4:
- > class oldstyle_class:
- old-style class, use class foo(object)
- ./classstyle.py:7:
- > class empty():
- class foo() creates old style object, use class foo(object)
[1]
$ cat > python3-compat.py << NO_CHECK_EOF
> foo <> bar
--- a/tests/test-contrib-perf.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-contrib-perf.t Thu Jun 16 15:28:54 2022 +0200
@@ -301,7 +301,6 @@
malformatted run limit entry, missing "-": 500
! wall * comb * user * sys * (best of 5) (glob)
$ hg perfparents --config perf.stub=no --config perf.run-limits='aaa-12, 0.000000001-5'
- malformatted run limit entry, could not convert string to float: aaa: aaa-12 (no-py3 !)
malformatted run limit entry, could not convert string to float: 'aaa': aaa-12 (py3 !)
! wall * comb * user * sys * (best of 5) (glob)
$ hg perfparents --config perf.stub=no --config perf.run-limits='12-aaaaaa, 0.000000001-5'
--- a/tests/test-convert-clonebranches.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-convert-clonebranches.t Thu Jun 16 15:28:54 2022 +0200
@@ -31,7 +31,6 @@
Miss perl... sometimes
$ cat > filter.py <<EOF
- > from __future__ import absolute_import
> import re
> import sys
>
--- a/tests/test-convert-git.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-convert-git.t Thu Jun 16 15:28:54 2022 +0200
@@ -435,7 +435,7 @@
$ cd git-repo3-hg
$ hg up -C
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ "$PYTHON" -c 'from __future__ import print_function; print(len(open("b", "rb").read()))'
+ $ "$PYTHON" -c 'print(len(open("b", "rb").read()))'
4096
$ cd ..
--- a/tests/test-copies-chain-merge.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-copies-chain-merge.t Thu Jun 16 15:28:54 2022 +0200
@@ -511,7 +511,7 @@
$ hg mv --force i d
$ hg commit -m "f-2: rename i -> d"
$ hg debugindex d | "$PYTHON" ../no-linkrev
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 * d8252ab2e760 000000000000 000000000000 (no-changeset !)
0 * ae258f702dfe 000000000000 000000000000 (changeset !)
1 * b004912a8510 000000000000 000000000000
@@ -567,7 +567,7 @@
$ hg mv --force x t
$ hg commit -m "r-2: rename t -> x"
$ hg debugindex t | "$PYTHON" ../no-linkrev
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 * d74efbf65309 000000000000 000000000000 (no-changeset !)
1 * 02a930b9d7ad 000000000000 000000000000 (no-changeset !)
0 * 5aed6a8dbff0 000000000000 000000000000 (changeset !)
@@ -934,7 +934,7 @@
2 files updated, 0 files merged, 2 files removed, 0 files unresolved
#if no-changeset
$ hg debugindex d | "$PYTHON" ../no-linkrev
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 * d8252ab2e760 000000000000 000000000000
1 * b004912a8510 000000000000 000000000000
2 * 7b79e2fe0c89 000000000000 000000000000
@@ -945,7 +945,7 @@
7 * d55cb4e9ef57 000000000000 000000000000
#else
$ hg debugindex d | "$PYTHON" ../no-linkrev
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 * ae258f702dfe 000000000000 000000000000
1 * b004912a8510 000000000000 000000000000
2 * 5cce88bf349f ae258f702dfe 000000000000
@@ -979,7 +979,7 @@
cea2d99c0fde64672ef61953786fdff34f16e230 644 d (changeset !)
#if no-changeset
$ hg debugindex d | "$PYTHON" ../no-linkrev
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 * d8252ab2e760 000000000000 000000000000
1 * b004912a8510 000000000000 000000000000
2 * 7b79e2fe0c89 000000000000 000000000000
@@ -991,7 +991,7 @@
8 * 1c334238bd42 7b79e2fe0c89 000000000000
#else
$ hg debugindex d | "$PYTHON" ../no-linkrev
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 * ae258f702dfe 000000000000 000000000000
1 * b004912a8510 000000000000 000000000000
2 * 5cce88bf349f ae258f702dfe 000000000000
@@ -1661,9 +1661,7 @@
added: exp-changelog-v2, exp-copies-sidedata-changeset
processed revlogs:
- - all-filelogs
- changelog
- - manifest
#endif
@@ -1689,9 +1687,7 @@
added: exp-changelog-v2, exp-copies-sidedata-changeset
processed revlogs:
- - all-filelogs
- changelog
- - manifest
#endif
@@ -2406,7 +2402,7 @@
d8252ab2e760b0d4e5288fd44cbd15a0fa567e16 644 d (no-changeset !)
ae258f702dfeca05bf9b6a22a97a4b5645570f11 644 d (changeset !)
$ hg debugindex d | head -n 4 | "$PYTHON" ../no-linkrev
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 * d8252ab2e760 000000000000 000000000000 (no-changeset !)
0 * ae258f702dfe 000000000000 000000000000 (changeset !)
1 * b004912a8510 000000000000 000000000000
@@ -2479,7 +2475,7 @@
$ hg manifest --debug --rev 'desc("e-2")' | grep '644 f'
e8825b386367b29fec957283a80bb47b47483fe1 644 f
$ hg debugindex f | "$PYTHON" ../no-linkrev
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 * b76eb76580df 000000000000 000000000000
1 * e8825b386367 000000000000 000000000000
2 * 2ff93c643948 b76eb76580df e8825b386367
@@ -2495,7 +2491,7 @@
$ hg manifest --debug --rev 'desc("e-2")' | grep '644 f'
ae258f702dfeca05bf9b6a22a97a4b5645570f11 644 f
$ hg debugindex f | "$PYTHON" ../no-linkrev
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 * ae258f702dfe 000000000000 000000000000
1 * d3613c1ec831 ae258f702dfe 000000000000
2 * 05e03c868bbc ae258f702dfe 000000000000
@@ -3067,7 +3063,7 @@
$ hg manifest --debug --rev 'desc("q-2")' | grep '644 v'
c43c088b811fd27983c0a9aadf44f3343cd4cd7e 644 v
$ hg debugindex v | "$PYTHON" ../no-linkrev
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 * 3f91841cd75c 000000000000 000000000000
1 * c43c088b811f 000000000000 000000000000
2 * 0946c662ef16 3f91841cd75c c43c088b811f
@@ -3082,7 +3078,7 @@
$ hg manifest --debug --rev 'desc("q-2")' | grep '644 v'
a38b2fa170219750dac9bc7d19df831f213ba708 644 v
$ hg debugindex v | "$PYTHON" ../no-linkrev
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 * 5aed6a8dbff0 000000000000 000000000000
1 * a38b2fa17021 000000000000 000000000000
2 * 65fde9f6e4d4 5aed6a8dbff0 a38b2fa17021
@@ -3365,7 +3361,7 @@
$ hg manifest --debug --rev 'desc("e-2")' | grep '644 f'
e8825b386367b29fec957283a80bb47b47483fe1 644 f
$ hg debugindex f | "$PYTHON" ../no-linkrev
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 * b76eb76580df 000000000000 000000000000
1 * e8825b386367 000000000000 000000000000
2 * 2ff93c643948 b76eb76580df e8825b386367
@@ -3381,7 +3377,7 @@
$ hg manifest --debug --rev 'desc("e-2")' | grep '644 f'
ae258f702dfeca05bf9b6a22a97a4b5645570f11 644 f
$ hg debugindex f | "$PYTHON" ../no-linkrev
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 * ae258f702dfe 000000000000 000000000000
1 * d3613c1ec831 ae258f702dfe 000000000000
2 * 05e03c868bbc ae258f702dfe 000000000000
--- a/tests/test-copies-in-changeset.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-copies-in-changeset.t Thu Jun 16 15:28:54 2022 +0200
@@ -121,13 +121,13 @@
#if extra
$ hg debugindex c
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 1 b789fdd96dc2 000000000000 000000000000
#else
$ hg debugindex c
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 1 37d9b5d994ea 000000000000 000000000000
#endif
@@ -155,13 +155,13 @@
#if extra
$ hg debugindex c
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 1 b789fdd96dc2 000000000000 000000000000
#else
$ hg debugindex c
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 1 37d9b5d994ea 000000000000 000000000000
1 3 029625640347 000000000000 000000000000
@@ -434,14 +434,21 @@
$ cat << EOF > .hg/hgrc
> [format]
> exp-use-copies-side-data-changeset = no
- > [experimental]
- > revlogv2 = enable-unstable-format-and-corrupt-my-data
> EOF
- $ hg debugupgraderepo --run --quiet --no-backup > /dev/null
+ $ hg debugupgraderepo --run --quiet --no-backup
+ upgrade will perform the following actions:
+
+ requirements
+ preserved: * (glob)
+ removed: exp-changelog-v2, exp-copies-sidedata-changeset
+
+ processed revlogs:
+ - changelog
+
$ hg debugformat -v | egrep 'format-variant|revlog-v2|copies-sdc|changelog-v2'
format-variant repo config default
copies-sdc: no no no
- revlog-v2: yes yes no
+ revlog-v2: no no no
changelog-v2: no no no
$ hg debugsidedata -c -- 0
$ hg debugsidedata -c -- 1
@@ -453,7 +460,16 @@
> [format]
> exp-use-copies-side-data-changeset = yes
> EOF
- $ hg debugupgraderepo --run --quiet --no-backup > /dev/null
+ $ hg debugupgraderepo --run --quiet --no-backup
+ upgrade will perform the following actions:
+
+ requirements
+ preserved: * (glob)
+ added: exp-changelog-v2, exp-copies-sidedata-changeset
+
+ processed revlogs:
+ - changelog
+
$ hg debugformat -v | egrep 'format-variant|revlog-v2|copies-sdc|changelog-v2'
format-variant repo config default
copies-sdc: yes yes no
--- a/tests/test-copy.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-copy.t Thu Jun 16 15:28:54 2022 +0200
@@ -61,7 +61,7 @@
this should show a revision linked to changeset 0
$ hg debugindex a
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 b789fdd96dc2 000000000000 000000000000
we should see one log entry for b
@@ -77,7 +77,7 @@
this should show a revision linked to changeset 1
$ hg debugindex b
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 1 37d9b5d994ea 000000000000 000000000000
this should show the rename information in the metadata
@@ -187,7 +187,7 @@
should match
$ hg debugindex foo
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 2ed2a3912a0b 000000000000 000000000000
$ hg debugrename bar
bar renamed from foo:2ed2a3912a0b24502043eae84ee4b279c18b90dd
@@ -217,13 +217,13 @@
should show no parents for tip
$ hg debugindex bar
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 1 7711d36246cc 000000000000 000000000000
1 2 bdf70a2b8d03 7711d36246cc 000000000000
2 3 b2558327ea8d 000000000000 000000000000
should match
$ hg debugindex foo
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 2ed2a3912a0b 000000000000 000000000000
1 2 dd12c926cf16 2ed2a3912a0b 000000000000
$ hg debugrename bar
--- a/tests/test-debugcommands.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-debugcommands.t Thu Jun 16 15:28:54 2022 +0200
@@ -148,31 +148,31 @@
#endif
$ hg debugindex -c
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 07f494440405 000000000000 000000000000
1 1 8cccb4b5fec2 07f494440405 000000000000
2 2 b1e228c512c5 8cccb4b5fec2 000000000000
$ hg debugindex -c --debug
- rev linkrev nodeid p1 p2
- 0 0 07f4944404050f47db2e5c5071e0e84e7a27bba9 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000
- 1 1 8cccb4b5fec20cafeb99dd01c26d4dee8ea4388a 07f4944404050f47db2e5c5071e0e84e7a27bba9 0000000000000000000000000000000000000000
- 2 2 b1e228c512c5d7066d70562ed839c3323a62d6d2 8cccb4b5fec20cafeb99dd01c26d4dee8ea4388a 0000000000000000000000000000000000000000
+ rev rank linkrev nodeid p1-rev p1-nodeid p2-rev p2-nodeid full-size delta-base flags comp-mode data-offset chunk-size sd-comp-mode sidedata-offset sd-chunk-size
+ 0 -1 0 07f4944404050f47db2e5c5071e0e84e7a27bba9 -1 0000000000000000000000000000000000000000 -1 0000000000000000000000000000000000000000 57 0 0 2 0 58 inline 0 0
+ 1 -1 1 8cccb4b5fec20cafeb99dd01c26d4dee8ea4388a 0 07f4944404050f47db2e5c5071e0e84e7a27bba9 -1 0000000000000000000000000000000000000000 66 1 0 2 58 67 inline 0 0
+ 2 -1 2 b1e228c512c5d7066d70562ed839c3323a62d6d2 1 8cccb4b5fec20cafeb99dd01c26d4dee8ea4388a -1 0000000000000000000000000000000000000000 65 2 0 2 125 66 inline 0 0
$ hg debugindex -m
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 a0c8bcbbb45c 000000000000 000000000000
1 1 57faf8a737ae a0c8bcbbb45c 000000000000
2 2 a35b10320954 57faf8a737ae 000000000000
$ hg debugindex -m --debug
- rev linkrev nodeid p1 p2
- 0 0 a0c8bcbbb45c63b90b70ad007bf38961f64f2af0 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000
- 1 1 57faf8a737ae7faf490582941a82319ba6529dca a0c8bcbbb45c63b90b70ad007bf38961f64f2af0 0000000000000000000000000000000000000000
- 2 2 a35b103209548032201c16c7688cb2657f037a38 57faf8a737ae7faf490582941a82319ba6529dca 0000000000000000000000000000000000000000
+ rev rank linkrev nodeid p1-rev p1-nodeid p2-rev p2-nodeid full-size delta-base flags comp-mode data-offset chunk-size sd-comp-mode sidedata-offset sd-chunk-size
+ 0 -1 0 a0c8bcbbb45c63b90b70ad007bf38961f64f2af0 -1 0000000000000000000000000000000000000000 -1 0000000000000000000000000000000000000000 43 0 0 2 0 44 inline 0 0
+ 1 -1 1 57faf8a737ae7faf490582941a82319ba6529dca 0 a0c8bcbbb45c63b90b70ad007bf38961f64f2af0 -1 0000000000000000000000000000000000000000 0 1 0 2 44 0 inline 0 0
+ 2 -1 2 a35b103209548032201c16c7688cb2657f037a38 1 57faf8a737ae7faf490582941a82319ba6529dca -1 0000000000000000000000000000000000000000 43 2 0 2 44 44 inline 0 0
$ hg debugindex a
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 b789fdd96dc2 000000000000 000000000000
$ hg debugindex --debug a
- rev linkrev nodeid p1 p2
- 0 0 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000
+ rev rank linkrev nodeid p1-rev p1-nodeid p2-rev p2-nodeid full-size delta-base flags comp-mode data-offset chunk-size sd-comp-mode sidedata-offset sd-chunk-size
+ 0 -1 0 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 -1 0000000000000000000000000000000000000000 -1 0000000000000000000000000000000000000000 2 0 0 2 0 3 inline 0 0
debugdelta chain basic output
@@ -197,10 +197,10 @@
#if reporevlogstore no-pure
$ hg debugdeltachain -m
- rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio readsize largestblk rddensity srchunks
- 0 1 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000 1
- 1 2 1 -1 base 0 0 0 0.00000 0 0 0.00000 0 0 1.00000 1
- 2 3 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000 1
+ rev p1 p2 chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio readsize largestblk rddensity srchunks
+ 0 -1 -1 1 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000 1
+ 1 0 -1 2 1 -1 base 0 0 0 0.00000 0 0 0.00000 0 0 1.00000 1
+ 2 1 -1 3 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000 1
$ hg debugdeltachain -m -T '{rev} {chainid} {chainlen}\n'
0 1 1
@@ -212,7 +212,6 @@
{
"chainid": 1,
"chainlen": 1,
- "chainratio": 1.02325581395, (no-py3 !)
"chainratio": 1.0232558139534884, (py3 !)
"chainsize": 44,
"compsize": 44,
@@ -221,6 +220,8 @@
"extraratio": 0.0,
"largestblock": 44,
"lindist": 44,
+ "p1": -1,
+ "p2": -1,
"prevrev": -1,
"readdensity": 1.0,
"readsize": 44,
@@ -239,6 +240,8 @@
"extraratio": 0,
"largestblock": 0,
"lindist": 0,
+ "p1": 0,
+ "p2": -1,
"prevrev": -1,
"readdensity": 1,
"readsize": 0,
@@ -249,7 +252,6 @@
{
"chainid": 3,
"chainlen": 1,
- "chainratio": 1.02325581395, (no-py3 !)
"chainratio": 1.0232558139534884, (py3 !)
"chainsize": 44,
"compsize": 44,
@@ -258,6 +260,8 @@
"extraratio": 0.0,
"largestblock": 44,
"lindist": 44,
+ "p1": 1,
+ "p2": -1,
"prevrev": -1,
"readdensity": 1.0,
"readsize": 44,
@@ -274,10 +278,10 @@
> sparse-read = True
> EOF
$ hg debugdeltachain -m
- rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio readsize largestblk rddensity srchunks
- 0 1 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000 1
- 1 2 1 -1 base 0 0 0 0.00000 0 0 0.00000 0 0 1.00000 1
- 2 3 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000 1
+ rev p1 p2 chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio readsize largestblk rddensity srchunks
+ 0 -1 -1 1 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000 1
+ 1 0 -1 2 1 -1 base 0 0 0 0.00000 0 0 0.00000 0 0 1.00000 1
+ 2 1 -1 3 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000 1
$ hg debugdeltachain -m -T '{rev} {chainid} {chainlen} {readsize} {largestblock} {readdensity}\n'
0 1 1 44 44 1.0
@@ -289,7 +293,6 @@
{
"chainid": 1,
"chainlen": 1,
- "chainratio": 1.02325581395, (no-py3 !)
"chainratio": 1.0232558139534884, (py3 !)
"chainsize": 44,
"compsize": 44,
@@ -298,6 +301,8 @@
"extraratio": 0.0,
"largestblock": 44,
"lindist": 44,
+ "p1": -1,
+ "p2": -1,
"prevrev": -1,
"readdensity": 1.0,
"readsize": 44,
@@ -316,6 +321,8 @@
"extraratio": 0,
"largestblock": 0,
"lindist": 0,
+ "p1": 0,
+ "p2": -1,
"prevrev": -1,
"readdensity": 1,
"readsize": 0,
@@ -326,7 +333,6 @@
{
"chainid": 3,
"chainlen": 1,
- "chainratio": 1.02325581395, (no-py3 !)
"chainratio": 1.0232558139534884, (py3 !)
"chainsize": 44,
"compsize": 44,
@@ -335,6 +341,8 @@
"extraratio": 0.0,
"largestblock": 44,
"lindist": 44,
+ "p1": 1,
+ "p2": -1,
"prevrev": -1,
"readdensity": 1.0,
"readsize": 44,
@@ -574,7 +582,6 @@
Test internal debugstacktrace command
$ cat > debugstacktrace.py << EOF
- > from __future__ import absolute_import
> from mercurial import (
> util,
> )
@@ -593,15 +600,15 @@
> EOF
$ "$PYTHON" debugstacktrace.py
stacktrace at:
- *debugstacktrace.py:16 in * (glob)
- *debugstacktrace.py:9 in f (glob)
+ *debugstacktrace.py:15 in * (glob)
+ *debugstacktrace.py:8 in f (glob)
hello from g at:
- *debugstacktrace.py:16 in * (glob)
- *debugstacktrace.py:10 in f (glob)
+ *debugstacktrace.py:15 in * (glob)
+ *debugstacktrace.py:9 in f (glob)
hi ...
from h hidden in g at:
- *debugstacktrace.py:10 in f (glob)
- *debugstacktrace.py:13 in g (glob)
+ *debugstacktrace.py:9 in f (glob)
+ *debugstacktrace.py:12 in g (glob)
Test debugcapabilities command:
--- a/tests/test-demandimport.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-demandimport.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, print_function
-
from mercurial import demandimport
demandimport.enable()
--- a/tests/test-dirs.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-dirs.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import unittest
import silenttestrunner
--- a/tests/test-dirstate.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-dirstate.t Thu Jun 16 15:28:54 2022 +0200
@@ -77,7 +77,6 @@
coherent (issue4353)
$ cat > ../dirstateexception.py <<EOF
- > from __future__ import absolute_import
> from mercurial import (
> error,
> extensions,
--- a/tests/test-dispatch.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-dispatch.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,4 +1,3 @@
-from __future__ import absolute_import, print_function
import os
import sys
from mercurial import dispatch
--- a/tests/test-doctest.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-doctest.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,7 +1,5 @@
# this is hack to make sure no escape characters are inserted into the output
-from __future__ import absolute_import
-from __future__ import print_function
import doctest
import os
--- a/tests/test-duplicateoptions.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-duplicateoptions.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,4 +1,3 @@
-from __future__ import absolute_import, print_function
import os
from mercurial import (
commands,
--- a/tests/test-encoding-func.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-encoding-func.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import unittest
from mercurial import encoding
--- a/tests/test-eol.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-eol.t Thu Jun 16 15:28:54 2022 +0200
@@ -8,7 +8,6 @@
Set up helpers
$ cat > switch-eol.py <<'EOF'
- > from __future__ import absolute_import
> import os
> import sys
> try:
--- a/tests/test-excessive-merge.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-excessive-merge.t Thu Jun 16 15:28:54 2022 +0200
@@ -64,7 +64,7 @@
summary: test
$ hg debugindex --changelog
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 5e0375449e74 000000000000 000000000000
1 1 96155394af80 5e0375449e74 000000000000
2 2 92cc4c306b19 5e0375449e74 000000000000
@@ -89,7 +89,7 @@
79d7492df40aa0fa093ec4209be78043c181f094 644 b
$ hg debugindex a
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 2ed2a3912a0b 000000000000 000000000000
1 1 79d7492df40a 2ed2a3912a0b 000000000000
--- a/tests/test-extension.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-extension.t Thu Jun 16 15:28:54 2022 +0200
@@ -1,16 +1,4 @@
Test basic extension support
- $ cat > unflush.py <<EOF
- > import sys
- > from mercurial import pycompat
- > if pycompat.ispy3:
- > # no changes required
- > sys.exit(0)
- > with open(sys.argv[1], 'rb') as f:
- > data = f.read()
- > with open(sys.argv[1], 'wb') as f:
- > f.write(data.replace(b', flush=True', b''))
- > EOF
-
$ cat > foobar.py <<EOF
> import os
> from mercurial import commands, exthelper, registrar
@@ -150,7 +138,6 @@
Check that extensions are loaded in phases:
$ cat > foo.py <<EOF
- > from __future__ import print_function
> import os
> from mercurial import exthelper
> from mercurial.utils import procutil
@@ -190,7 +177,6 @@
> def custompredicate(repo, subset, x):
> return smartset.baseset([r for r in subset if r in {0}])
> EOF
- $ "$PYTHON" $TESTTMP/unflush.py foo.py
$ cp foo.py bar.py
$ echo 'foo = foo.py' >> $HGRCPATH
@@ -295,7 +281,6 @@
$ echo "s = 'libroot/mod/ambig.py'" > $TESTTMP/libroot/mod/ambig.py
$ cat > $TESTTMP/libroot/mod/ambigabs.py <<NO_CHECK_EOF
- > from __future__ import absolute_import, print_function
> import ambig # should load "libroot/ambig.py"
> s = ambig.s
> NO_CHECK_EOF
@@ -304,28 +289,10 @@
> def extsetup(ui):
> print('ambigabs.s=%s' % ambigabs.s, flush=True)
> NO_CHECK_EOF
- $ "$PYTHON" $TESTTMP/unflush.py loadabs.py
$ (PYTHONPATH=${PYTHONPATH}${PATHSEP}${TESTTMP}/libroot; hg --config extensions.loadabs=loadabs.py root)
ambigabs.s=libroot/ambig.py
$TESTTMP/a
-#if no-py3
- $ cat > $TESTTMP/libroot/mod/ambigrel.py <<NO_CHECK_EOF
- > from __future__ import print_function
- > import ambig # should load "libroot/mod/ambig.py"
- > s = ambig.s
- > NO_CHECK_EOF
- $ cat > loadrel.py <<NO_CHECK_EOF
- > import mod.ambigrel as ambigrel
- > def extsetup(ui):
- > print('ambigrel.s=%s' % ambigrel.s, flush=True)
- > NO_CHECK_EOF
- $ "$PYTHON" $TESTTMP/unflush.py loadrel.py
- $ (PYTHONPATH=${PYTHONPATH}${PATHSEP}${TESTTMP}/libroot; hg --config extensions.loadrel=loadrel.py root)
- ambigrel.s=libroot/mod/ambig.py
- $TESTTMP/a
-#endif
-
Check absolute/relative import of extension specific modules
$ mkdir $TESTTMP/extroot
@@ -340,7 +307,6 @@
> s = b'this is extroot.sub1.baz'
> NO_CHECK_EOF
$ cat > $TESTTMP/extroot/__init__.py <<NO_CHECK_EOF
- > from __future__ import absolute_import
> s = b'this is extroot.__init__'
> from . import foo
> def extsetup(ui):
@@ -377,39 +343,6 @@
(extroot) import extroot.bar in func(): this is extroot.bar
$TESTTMP/a
-#if no-py3
- $ rm "$TESTTMP"/extroot/foo.*
- $ rm -Rf "$TESTTMP/extroot/__pycache__"
- $ cat > $TESTTMP/extroot/foo.py <<NO_CHECK_EOF
- > # test relative import
- > buf = []
- > def func():
- > # "not locals" case
- > import bar
- > buf.append('import bar in func(): %s' % bar.s)
- > return '\n(extroot) '.join(buf)
- > # "fromlist == ('*',)" case
- > from bar import *
- > buf.append('from bar import *: %s' % s)
- > # "not fromlist" and "if '.' in name" case
- > import sub1.baz
- > buf.append('import sub1.baz: %s' % sub1.baz.s)
- > # "not fromlist" and NOT "if '.' in name" case
- > import sub1
- > buf.append('import sub1: %s' % sub1.s)
- > # NOT "not fromlist" and NOT "level != -1" case
- > from bar import s
- > buf.append('from bar import s: %s' % s)
- > NO_CHECK_EOF
- $ hg --config extensions.extroot=$TESTTMP/extroot root
- (extroot) from bar import *: this is extroot.bar
- (extroot) import sub1.baz: this is extroot.sub1.baz
- (extroot) import sub1: this is extroot.sub1.__init__
- (extroot) from bar import s: this is extroot.bar
- (extroot) import bar in func(): this is extroot.bar
- $TESTTMP/a
-#endif
-
#if demandimport
Examine whether module loading is delayed until actual referring, even
@@ -453,7 +386,6 @@
> detail = b"this is extlibroot.recursedown.abs.used"
> NO_CHECK_EOF
$ cat > $TESTTMP/extlibroot/recursedown/abs/__init__.py <<NO_CHECK_EOF
- > from __future__ import absolute_import
> from extlibroot.recursedown.abs.used import detail
> NO_CHECK_EOF
@@ -467,7 +399,6 @@
> NO_CHECK_EOF
$ cat > $TESTTMP/extlibroot/recursedown/__init__.py <<NO_CHECK_EOF
- > from __future__ import absolute_import
> from extlibroot.recursedown.abs import detail as absdetail
> from .legacy import detail as legacydetail
> NO_CHECK_EOF
@@ -481,11 +412,9 @@
> detail = b"this is extlibroot.shadowing.used"
> NO_CHECK_EOF
$ cat > $TESTTMP/extlibroot/shadowing/proxied.py <<NO_CHECK_EOF
- > from __future__ import absolute_import
> from extlibroot.shadowing.used import detail
> NO_CHECK_EOF
$ cat > $TESTTMP/extlibroot/shadowing/__init__.py <<NO_CHECK_EOF
- > from __future__ import absolute_import
> from .used import detail as used
> NO_CHECK_EOF
@@ -514,7 +443,6 @@
> detail = b"this is absextroot.relimportee"
> NO_CHECK_EOF
$ cat > $TESTTMP/absextroot/xsub1/xsub2/relimporter.py <<NO_CHECK_EOF
- > from __future__ import absolute_import
> from mercurial import pycompat
> from ... import relimportee
> detail = b"this relimporter imports %r" % (
@@ -525,7 +453,6 @@
runtime.
$ cat > $TESTTMP/absextroot/absolute.py << NO_CHECK_EOF
- > from __future__ import absolute_import
>
> # import extension local modules absolutely (level = 0)
> from absextroot.xsub1.xsub2 import used, unused
@@ -539,7 +466,6 @@
> NO_CHECK_EOF
$ cat > $TESTTMP/absextroot/relative.py << NO_CHECK_EOF
- > from __future__ import absolute_import
>
> # import extension local modules relatively (level == 1)
> from .xsub1.xsub2 import used, unused
@@ -559,7 +485,6 @@
Setup main procedure of extension.
$ cat > $TESTTMP/absextroot/__init__.py <<NO_CHECK_EOF
- > from __future__ import absolute_import
> from mercurial import registrar
> cmdtable = {}
> command = registrar.command(cmdtable)
@@ -1925,7 +1850,6 @@
$ hg init $TESTTMP/opt-unicode-default
$ cat > $TESTTMP/test_unicode_default_value.py << EOF
- > from __future__ import print_function
> from mercurial import registrar
> cmdtable = {}
> command = registrar.command(cmdtable)
@@ -1933,14 +1857,12 @@
> def ext(*args, **opts):
> print(opts[b'opt'], flush=True)
> EOF
- $ "$PYTHON" $TESTTMP/unflush.py $TESTTMP/test_unicode_default_value.py
$ cat > $TESTTMP/opt-unicode-default/.hg/hgrc << EOF
> [extensions]
> test_unicode_default_value = $TESTTMP/test_unicode_default_value.py
> EOF
$ hg -R $TESTTMP/opt-unicode-default dummy
*** failed to import extension "test_unicode_default_value" from $TESTTMP/test_unicode_default_value.py: unicode 'value' found in cmdtable.dummy (py3 !)
- *** failed to import extension "test_unicode_default_value" from $TESTTMP/test_unicode_default_value.py: unicode u'value' found in cmdtable.dummy (no-py3 !)
*** (use b'' to make it byte string)
hg: unknown command 'dummy'
(did you mean summary?)
--- a/tests/test-extensions-wrapfunction.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-extensions-wrapfunction.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, print_function
-
from mercurial import extensions
@@ -18,7 +16,7 @@
wrappers = [genwrapper(i) for i in range(5)]
-class dummyclass(object):
+class dummyclass:
def getstack(self):
return ['orig']
@@ -69,7 +67,7 @@
print('context manager', dummy.getstack())
# Wrap callable object which has no __name__
-class callableobj(object):
+class callableobj:
def __call__(self):
return ['orig']
--- a/tests/test-extra-filelog-entry.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-extra-filelog-entry.t Thu Jun 16 15:28:54 2022 +0200
@@ -16,6 +16,6 @@
$ hg qrefresh
$ hg debugindex b
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 1e88685f5dde 000000000000 000000000000
--- a/tests/test-fastannotate-hg.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-fastannotate-hg.t Thu Jun 16 15:28:54 2022 +0200
@@ -481,7 +481,6 @@
and its ancestor by overriding "repo._filecommit".
$ cat > ../legacyrepo.py <<EOF
- > from __future__ import absolute_import
> from mercurial import commit, error, extensions
> def _filecommit(orig, repo, fctx, manifest1, manifest2,
> linkrev, tr, includecopymeta, ms):
--- a/tests/test-fastannotate-revmap.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-fastannotate-revmap.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, print_function
-
import os
import tempfile
@@ -10,9 +8,6 @@
from hgext.fastannotate import error, revmap
-if pycompat.ispy3:
- xrange = range
-
def genhsh(i):
return pycompat.bytechr(i) + b'\0' * 19
@@ -35,7 +30,7 @@
rm = revmap.revmap(path)
ensure(rm.maxrev == 0)
- for i in xrange(5):
+ for i in range(5):
ensure(rm.rev2hsh(i) is None)
ensure(rm.hsh2rev(b'\0' * 20) is None)
@@ -53,11 +48,11 @@
b'a',
b'a',
]
- for i in xrange(1, 5):
+ for i in range(1, 5):
ensure(rm.append(genhsh(i), sidebranch=(i & 1), path=paths[i]) == i)
ensure(rm.maxrev == 4)
- for i in xrange(1, 5):
+ for i in range(1, 5):
ensure(rm.hsh2rev(genhsh(i)) == i)
ensure(rm.rev2hsh(i) == genhsh(i))
@@ -65,13 +60,13 @@
rm.flush()
rm = revmap.revmap(path)
ensure(rm.maxrev == 4)
- for i in xrange(1, 5):
+ for i in range(1, 5):
ensure(rm.hsh2rev(genhsh(i)) == i)
ensure(rm.rev2hsh(i) == genhsh(i))
ensure(bool(rm.rev2flag(i) & revmap.sidebranchflag) == bool(i & 1))
# append without calling save() explicitly
- for i in xrange(5, 12):
+ for i in range(5, 12):
ensure(
rm.append(genhsh(i), sidebranch=(i & 1), path=paths[i], flush=True)
== i
@@ -80,7 +75,7 @@
# re-load and verify
rm = revmap.revmap(path)
ensure(rm.maxrev == 11)
- for i in xrange(1, 12):
+ for i in range(1, 12):
ensure(rm.hsh2rev(genhsh(i)) == i)
ensure(rm.rev2hsh(i) == genhsh(i))
ensure(rm.rev2path(i) == paths[i] or paths[i - 1])
@@ -150,7 +145,7 @@
def testcopyfrom():
path = gettemppath()
rm = revmap.revmap(path)
- for i in xrange(1, 10):
+ for i in range(1, 10):
ensure(
rm.append(genhsh(i), sidebranch=(i & 1), path=(b'%d' % (i // 3)))
== i
@@ -171,7 +166,7 @@
os.unlink(path2)
-class fakefctx(object):
+class fakefctx:
def __init__(self, node, path=None):
self._node = node
self._path = path
@@ -187,21 +182,21 @@
path = gettemppath()
rm = revmap.revmap(path)
- for i in xrange(1, 5):
+ for i in range(1, 5):
ensure(rm.append(genhsh(i), sidebranch=(i & 1)) == i)
- for i in xrange(1, 5):
+ for i in range(1, 5):
ensure(((genhsh(i), None) in rm) == ((i & 1) == 0))
ensure((fakefctx(genhsh(i)) in rm) == ((i & 1) == 0))
- for i in xrange(5, 10):
+ for i in range(5, 10):
ensure(fakefctx(genhsh(i)) not in rm)
ensure((genhsh(i), None) not in rm)
# "contains" checks paths
rm = revmap.revmap()
- for i in xrange(1, 5):
+ for i in range(1, 5):
ensure(rm.append(genhsh(i), path=(b'%d' % (i // 2))) == i)
- for i in xrange(1, 5):
+ for i in range(1, 5):
ensure(fakefctx(genhsh(i), path=(b'%d' % (i // 2))) in rm)
ensure(fakefctx(genhsh(i), path=b'a') not in rm)
@@ -211,7 +206,7 @@
ensure(revmap.getlastnode(path) is None)
rm = revmap.revmap(path)
ensure(revmap.getlastnode(path) is None)
- for i in xrange(1, 10):
+ for i in range(1, 10):
hsh = genhsh(i)
rm.append(hsh, path=(b'%d' % (i // 2)), flush=True)
ensure(revmap.getlastnode(path) == hsh)
--- a/tests/test-filebranch.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-filebranch.t Thu Jun 16 15:28:54 2022 +0200
@@ -2,7 +2,6 @@
when we do a merge.
$ cat <<EOF > merge
- > from __future__ import print_function
> import sys, os
> print("merging for", os.path.basename(sys.argv[1]))
> EOF
@@ -73,7 +72,7 @@
main: we should have a merge here:
$ hg debugindex --changelog
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 cdca01651b96 000000000000 000000000000
1 1 f6718a9cb7f3 cdca01651b96 000000000000
2 2 bdd988058d16 cdca01651b96 000000000000
@@ -97,7 +96,7 @@
foo: we should have a merge here:
$ hg debugindex foo
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 b8e02f643373 000000000000 000000000000
1 1 2ffeddde1b65 b8e02f643373 000000000000
2 2 33d1fb69067a b8e02f643373 000000000000
@@ -106,21 +105,21 @@
bar: we should not have a merge here:
$ hg debugindex bar
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 b8e02f643373 000000000000 000000000000
1 2 33d1fb69067a b8e02f643373 000000000000
baz: we should not have a merge here:
$ hg debugindex baz
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 b8e02f643373 000000000000 000000000000
1 1 2ffeddde1b65 b8e02f643373 000000000000
quux: we should not have a merge here:
$ hg debugindex quux
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 b8e02f643373 000000000000 000000000000
1 3 6128c0f33108 b8e02f643373 000000000000
--- a/tests/test-filecache.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-filecache.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,4 +1,3 @@
-from __future__ import absolute_import, print_function
import os
import stat
import subprocess
@@ -32,15 +31,12 @@
vfs as vfsmod,
)
-if pycompat.ispy3:
- xrange = range
-
-class fakerepo(object):
+class fakerepo:
def __init__(self):
self._filecache = {}
- class fakevfs(object):
+ class fakevfs:
def join(self, p):
return p
@@ -215,7 +211,7 @@
# try some times, because reproduction of ambiguity depends on
# "filesystem time"
- for i in xrange(5):
+ for i in range(5):
fp = open(filename, 'w')
fp.write('FOO')
fp.close()
@@ -229,7 +225,7 @@
# repeat changing via checkambigatclosing, to examine whether
# st_mtime is advanced multiple times as expected
- for i in xrange(repetition):
+ for i in range(repetition):
# explicit closing
fp = vfsmod.checkambigatclosing(open(filename, 'a'))
fp.write('FOO')
--- a/tests/test-filelog.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-filelog.py Thu Jun 16 15:28:54 2022 +0200
@@ -2,7 +2,6 @@
"""
Tests the behavior of filelog w.r.t. data starting with '\1\n'
"""
-from __future__ import absolute_import, print_function
from mercurial.node import hex
from mercurial import (
--- a/tests/test-flagprocessor.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-flagprocessor.t Thu Jun 16 15:28:54 2022 +0200
@@ -214,12 +214,10 @@
File "mercurial.revlogutils.flagutil", line *, in insertflagprocessor (glob) (pyoxidizer !)
raise error.Abort(msg)
mercurial.error.Abort: cannot register multiple processors on flag '0x8'. (py3 !)
- Abort: cannot register multiple processors on flag '0x8'. (no-py3 !)
*** failed to set up extension duplicate: cannot register multiple processors on flag '0x8'.
$ hg st 2>&1 | egrep 'cannot register multiple processors|flagprocessorext'
File "*/tests/flagprocessorext.py", line *, in extsetup (glob)
mercurial.error.Abort: cannot register multiple processors on flag '0x8'. (py3 !)
- Abort: cannot register multiple processors on flag '0x8'. (no-py3 !)
*** failed to set up extension duplicate: cannot register multiple processors on flag '0x8'.
File "*/tests/flagprocessorext.py", line *, in b64decode (glob)
--- a/tests/test-flags.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-flags.t Thu Jun 16 15:28:54 2022 +0200
@@ -145,13 +145,13 @@
-rwxr-x---
$ hg debugindex a
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 b80de5d13875 000000000000 000000000000
$ hg debugindex -R ../test2 a
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 b80de5d13875 000000000000 000000000000
$ hg debugindex -R ../test1 a
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 b80de5d13875 000000000000 000000000000
1 1 7fe919cc0336 b80de5d13875 000000000000
--- a/tests/test-fncache.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-fncache.t Thu Jun 16 15:28:54 2022 +0200
@@ -4,7 +4,6 @@
does not break
$ cat > chunksize.py <<EOF
- > from __future__ import absolute_import
> from mercurial import store
> store.fncache_chunksize = 1
> EOF
@@ -232,7 +231,6 @@
Aborting lock does not prevent fncache writes
$ cat > exceptionext.py <<EOF
- > from __future__ import absolute_import
> import os
> from mercurial import commands, error, extensions
>
@@ -279,7 +277,6 @@
Aborting transaction prevents fncache change
$ cat > ../exceptionext.py <<EOF
- > from __future__ import absolute_import
> import os
> from mercurial import commands, error, extensions, localrepo
>
@@ -315,7 +312,6 @@
Aborted transactions can be recovered later
$ cat > ../exceptionext.py <<EOF
- > from __future__ import absolute_import
> import os
> from mercurial import (
> commands,
@@ -483,7 +479,6 @@
changesets that only contain changes to existing files:
$ cat > fncacheloadwarn.py << EOF
- > from __future__ import absolute_import
> from mercurial import extensions, localrepo
>
> def extsetup(ui):
--- a/tests/test-generaldelta.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-generaldelta.t Thu Jun 16 15:28:54 2022 +0200
@@ -25,7 +25,6 @@
> done
$ cd ..
- >>> from __future__ import print_function
>>> import os
>>> regsize = os.stat("repo/.hg/store/00manifest.i").st_size
>>> gdsize = os.stat("gdrepo/.hg/store/00manifest.i").st_size
@@ -75,8 +74,8 @@
$ cd client
$ hg pull -q ../server -r 4
$ hg debugdeltachain x
- rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
- 0 1 1 -1 base 3 2 3 1.50000 3 0 0.00000
+ rev p1 p2 chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
+ 0 -1 -1 1 1 -1 base 3 2 3 1.50000 3 0 0.00000
$ cd ..
@@ -105,34 +104,23 @@
updating to branch default
3 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg -R repo debugdeltachain -m
- rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
- 0 1 1 -1 base 104 135 104 0.77037 104 0 0.00000 (no-zstd !)
- 1 1 2 0 prev 57 135 161 1.19259 161 0 0.00000 (no-zstd !)
- 2 1 3 1 prev 57 135 218 1.61481 218 0 0.00000 (no-zstd !)
- 0 1 1 -1 base 107 135 107 0.79259 107 0 0.00000 (zstd !)
- 1 1 2 0 prev 57 135 164 1.21481 164 0 0.00000 (zstd !)
- 2 1 3 1 prev 57 135 221 1.63704 221 0 0.00000 (zstd !)
- 3 2 1 -1 base 104 135 104 0.77037 104 0 0.00000
+ rev p1 p2 chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
+ 0 -1 -1 1 1 -1 base 10? 135 10? 0.7???? 10? 0 0.00000 (glob)
+ 1 0 -1 1 2 0 prev 57 135 1?? 1.????? 16? 0 0.00000 (glob)
+ 2 0 -1 1 3 1 prev 57 135 2?? 1.6???? 2?? 0 0.00000 (glob)
+ 3 0 -1 2 1 -1 base 104 135 104 0.77037 104 0 0.00000
$ hg -R usegd debugdeltachain -m
- rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
- 0 1 1 -1 base 104 135 104 0.77037 104 0 0.00000 (no-zstd !)
- 1 1 2 0 p1 57 135 161 1.19259 161 0 0.00000 (no-zstd !)
- 2 1 3 1 prev 57 135 218 1.61481 218 0 0.00000 (no-zstd !)
- 3 1 2 0 p1 57 135 161 1.19259 275 114 0.70807 (no-zstd !)
- 0 1 1 -1 base 107 135 107 0.79259 107 0 0.00000 (zstd !)
- 1 1 2 0 p1 57 135 164 1.21481 164 0 0.00000 (zstd !)
- 2 1 3 1 prev 57 135 221 1.63704 221 0 0.00000 (zstd !)
- 3 1 2 0 p1 57 135 164 1.21481 278 114 0.69512 (zstd !)
+ rev p1 p2 chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
+ 0 -1 -1 1 1 -1 base 10? 135 10? 0.7???? 10? 0 0.00000 (glob)
+ 1 0 -1 1 2 0 p1 57 135 16? 1.????? 16? 0 0.00000 (glob)
+ 2 0 -1 1 3 1 prev 57 135 2?? 1.6???? 2?? 0 0.00000 (glob)
+ 3 0 -1 1 2 0 p1 57 135 16? 1.????? 27? 114 0.????? (glob)
$ hg -R full debugdeltachain -m
- rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
- 0 1 1 -1 base 104 135 104 0.77037 104 0 0.00000 (no-zstd !)
- 1 1 2 0 p1 57 135 161 1.19259 161 0 0.00000 (no-zstd !)
- 2 1 2 0 p1 57 135 161 1.19259 218 57 0.35404 (no-zstd !)
- 3 1 2 0 p1 57 135 161 1.19259 275 114 0.70807 (no-zstd !)
- 0 1 1 -1 base 107 135 107 0.79259 107 0 0.00000 (zstd !)
- 1 1 2 0 p1 57 135 164 1.21481 164 0 0.00000 (zstd !)
- 2 1 2 0 p1 57 135 164 1.21481 221 57 0.34756 (zstd !)
- 3 1 2 0 p1 57 135 164 1.21481 278 114 0.69512 (zstd !)
+ rev p1 p2 chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
+ 0 -1 -1 1 1 -1 base 10? 135 10? 0.7???? 10? 0 0.00000 (glob)
+ 1 0 -1 1 2 0 p1 57 135 16? 1.????? 16? 0 0.00000 (glob)
+ 2 0 -1 1 2 0 p1 57 135 16? 1.????? 2?? 57 0.3???? (glob)
+ 3 0 -1 1 2 0 p1 57 135 16? 1.????? 27? 114 0.????? (glob)
Test revlog.optimize-delta-parent-choice
@@ -152,13 +140,10 @@
$ hg merge -q 0
$ hg commit -q -m merge
$ hg debugdeltachain -m
- rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
- 0 1 1 -1 base 59 215 59 0.27442 59 0 0.00000 (no-zstd !)
- 1 1 2 0 prev 61 86 120 1.39535 120 0 0.00000 (no-zstd !)
- 2 1 2 0 p2 62 301 121 0.40199 182 61 0.50413 (no-zstd !)
- 0 1 1 -1 base 68 215 68 0.31628 68 0 0.00000 (zstd !)
- 1 1 2 0 prev 70 86 138 1.60465 138 0 0.00000 (zstd !)
- 2 1 2 0 p2 68 301 136 0.45183 206 70 0.51471 (zstd !)
+ rev p1 p2 chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
+ 0 -1 -1 1 1 -1 base ?? 215 ?? 0.????? ?? 0 0.00000 (glob)
+ 1 -1 -1 1 2 0 prev ?? 86 1?? 1.????? 1?? 0 0.00000 (glob)
+ 2 1 0 1 2 0 p2 ?? 301 1?? 0.4???? ??? ?? 0.5???? (glob)
$ hg strip -q -r . --config extensions.strip=
@@ -167,13 +152,10 @@
$ hg merge -q 0
$ hg commit -q -m merge --config storage.revlog.optimize-delta-parent-choice=yes
$ hg debugdeltachain -m
- rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
- 0 1 1 -1 base 59 215 59 0.27442 59 0 0.00000 (no-zstd !)
- 1 1 2 0 prev 61 86 120 1.39535 120 0 0.00000 (no-zstd !)
- 2 1 2 0 p2 62 301 121 0.40199 182 61 0.50413 (no-zstd !)
- 0 1 1 -1 base 68 215 68 0.31628 68 0 0.00000 (zstd !)
- 1 1 2 0 prev 70 86 138 1.60465 138 0 0.00000 (zstd !)
- 2 1 2 0 p2 68 301 136 0.45183 206 70 0.51471 (zstd !)
+ rev p1 p2 chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
+ 0 -1 -1 1 1 -1 base ?? 215 ?? 0.????? ?? 0 0.00000 (glob)
+ 1 -1 -1 1 2 0 prev ?? 86 1?? 1.????? 1?? 0 0.00000 (glob)
+ 2 1 0 1 2 0 p2 ?? 301 1?? 0.4???? ??? ?? 0.5???? (glob)
Test that strip bundle use bundle2
$ hg --config extensions.strip= strip .
@@ -234,70 +216,62 @@
$
$ cd ..
$ hg -R source-repo debugdeltachain -m
- rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
- 0 1 1 -1 base 46 45 46 1.02222 46 0 0.00000
- 1 1 2 0 p1 57 90 103 1.14444 103 0 0.00000
- 2 1 3 1 p1 57 135 160 1.18519 160 0 0.00000
- 3 1 4 2 p1 57 180 217 1.20556 217 0 0.00000
- 4 1 5 3 p1 57 225 274 1.21778 274 0 0.00000
- 5 1 6 4 p1 57 270 331 1.22593 331 0 0.00000
- 6 2 1 -1 base 46 45 46 1.02222 46 0 0.00000
- 7 2 2 6 p1 57 90 103 1.14444 103 0 0.00000
- 8 2 3 7 p1 57 135 160 1.18519 160 0 0.00000
- 9 2 4 8 p1 57 180 217 1.20556 217 0 0.00000
- 10 2 5 9 p1 58 226 275 1.21681 275 0 0.00000
- 11 2 6 10 p1 58 272 333 1.22426 333 0 0.00000
- 12 2 7 11 p1 58 318 391 1.22956 391 0 0.00000
- 13 2 8 12 p1 58 364 449 1.23352 449 0 0.00000
- 14 2 9 13 p1 58 410 507 1.23659 507 0 0.00000
- 15 2 10 14 p1 58 456 565 1.23904 565 0 0.00000
- 16 2 11 15 p1 58 502 623 1.24104 623 0 0.00000
- 17 2 12 16 p1 58 548 681 1.24270 681 0 0.00000
- 18 3 1 -1 base 47 46 47 1.02174 47 0 0.00000
- 19 3 2 18 p1 58 92 105 1.14130 105 0 0.00000
- 20 3 3 19 p1 58 138 163 1.18116 163 0 0.00000
- 21 3 4 20 p1 58 184 221 1.20109 221 0 0.00000
- 22 3 5 21 p1 58 230 279 1.21304 279 0 0.00000
- 23 3 6 22 p1 58 276 337 1.22101 337 0 0.00000
- 24 3 7 23 p1 58 322 395 1.22671 395 0 0.00000
- 25 3 8 24 p1 58 368 453 1.23098 453 0 0.00000
- 26 3 9 25 p1 58 414 511 1.23430 511 0 0.00000
- 27 3 10 26 p1 58 460 569 1.23696 569 0 0.00000
- 28 3 11 27 p1 58 506 627 1.23913 627 0 0.00000
- 29 3 12 28 p1 58 552 685 1.24094 685 0 0.00000
- 30 3 13 29 p1 58 598 743 1.24247 743 0 0.00000
- 31 3 14 30 p1 58 644 801 1.24379 801 0 0.00000
- 32 3 15 31 p1 58 690 859 1.24493 859 0 0.00000
- 33 3 16 32 p1 58 736 917 1.24592 917 0 0.00000
- 34 3 17 33 p1 58 782 975 1.24680 975 0 0.00000
- 35 3 18 34 p1 58 828 1033 1.24758 1033 0 0.00000
- 36 3 19 35 p1 58 874 1091 1.24828 1091 0 0.00000
- 37 3 20 36 p1 58 920 1149 1.24891 1149 0 0.00000
- 38 3 21 37 p1 58 966 1207 1.24948 1207 0 0.00000
- 39 3 22 38 p1 58 1012 1265 1.25000 1265 0 0.00000
- 40 3 23 39 p1 58 1058 1323 1.25047 1323 0 0.00000
- 41 3 24 40 p1 58 1104 1381 1.25091 1381 0 0.00000
- 42 3 25 41 p1 58 1150 1439 1.25130 1439 0 0.00000
- 43 3 26 42 p1 58 1196 1497 1.25167 1497 0 0.00000
- 44 3 27 43 p1 58 1242 1555 1.25201 1555 0 0.00000
- 45 3 28 44 p1 58 1288 1613 1.25233 1613 0 0.00000
- 46 3 29 45 p1 58 1334 1671 1.25262 1671 0 0.00000
- 47 3 30 46 p1 58 1380 1729 1.25290 1729 0 0.00000
- 48 3 31 47 p1 58 1426 1787 1.25316 1787 0 0.00000
- 49 4 1 -1 base 197 316 197 0.62342 197 0 0.00000 (no-zstd !)
- 50 4 2 49 p1 58 362 255 0.70442 255 0 0.00000 (no-zstd !)
- 51 4 3 50 prev 356 594 611 1.02862 611 0 0.00000 (no-zstd !)
- 52 4 4 51 p1 58 640 669 1.04531 669 0 0.00000 (no-zstd !)
- 49 4 1 -1 base 205 316 205 0.64873 205 0 0.00000 (zstd !)
- 50 4 2 49 p1 58 362 263 0.72652 263 0 0.00000 (zstd !)
- 51 4 3 50 prev 366 594 629 1.05892 629 0 0.00000 (zstd no-bigendian !)
- 52 4 4 51 p1 58 640 687 1.07344 687 0 0.00000 (zstd no-bigendian !)
- 51 4 3 50 prev 367 594 630 1.06061 630 0 0.00000 (zstd bigendian !)
- 52 4 4 51 p1 58 640 688 1.07500 688 0 0.00000 (zstd bigendian !)
- 53 5 1 -1 base 0 0 0 0.00000 0 0 0.00000
- 54 6 1 -1 base 369 640 369 0.57656 369 0 0.00000 (no-zstd !)
- 54 6 1 -1 base 375 640 375 0.58594 375 0 0.00000 (zstd no-bigendian !)
- 54 6 1 -1 base 376 640 376 0.58750 376 0 0.00000 (zstd bigendian !)
+ rev p1 p2 chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
+ 0 -1 -1 1 1 -1 base 46 45 46 1.02222 46 0 0.00000
+ 1 0 -1 1 2 0 p1 57 90 103 1.14444 103 0 0.00000
+ 2 1 -1 1 3 1 p1 57 135 160 1.18519 160 0 0.00000
+ 3 2 -1 1 4 2 p1 57 180 217 1.20556 217 0 0.00000
+ 4 3 -1 1 5 3 p1 57 225 274 1.21778 274 0 0.00000
+ 5 4 -1 1 6 4 p1 57 270 331 1.22593 331 0 0.00000
+ 6 -1 -1 2 1 -1 base 46 45 46 1.02222 46 0 0.00000
+ 7 6 -1 2 2 6 p1 57 90 103 1.14444 103 0 0.00000
+ 8 7 -1 2 3 7 p1 57 135 160 1.18519 160 0 0.00000
+ 9 8 -1 2 4 8 p1 57 180 217 1.20556 217 0 0.00000
+ 10 9 -1 2 5 9 p1 58 226 275 1.21681 275 0 0.00000
+ 11 10 -1 2 6 10 p1 58 272 333 1.22426 333 0 0.00000
+ 12 11 -1 2 7 11 p1 58 318 391 1.22956 391 0 0.00000
+ 13 12 -1 2 8 12 p1 58 364 449 1.23352 449 0 0.00000
+ 14 13 -1 2 9 13 p1 58 410 507 1.23659 507 0 0.00000
+ 15 14 -1 2 10 14 p1 58 456 565 1.23904 565 0 0.00000
+ 16 15 -1 2 11 15 p1 58 502 623 1.24104 623 0 0.00000
+ 17 16 -1 2 12 16 p1 58 548 681 1.24270 681 0 0.00000
+ 18 -1 -1 3 1 -1 base 47 46 47 1.02174 47 0 0.00000
+ 19 18 -1 3 2 18 p1 58 92 105 1.14130 105 0 0.00000
+ 20 19 -1 3 3 19 p1 58 138 163 1.18116 163 0 0.00000
+ 21 20 -1 3 4 20 p1 58 184 221 1.20109 221 0 0.00000
+ 22 21 -1 3 5 21 p1 58 230 279 1.21304 279 0 0.00000
+ 23 22 -1 3 6 22 p1 58 276 337 1.22101 337 0 0.00000
+ 24 23 -1 3 7 23 p1 58 322 395 1.22671 395 0 0.00000
+ 25 24 -1 3 8 24 p1 58 368 453 1.23098 453 0 0.00000
+ 26 25 -1 3 9 25 p1 58 414 511 1.23430 511 0 0.00000
+ 27 26 -1 3 10 26 p1 58 460 569 1.23696 569 0 0.00000
+ 28 27 -1 3 11 27 p1 58 506 627 1.23913 627 0 0.00000
+ 29 28 -1 3 12 28 p1 58 552 685 1.24094 685 0 0.00000
+ 30 29 -1 3 13 29 p1 58 598 743 1.24247 743 0 0.00000
+ 31 30 -1 3 14 30 p1 58 644 801 1.24379 801 0 0.00000
+ 32 31 -1 3 15 31 p1 58 690 859 1.24493 859 0 0.00000
+ 33 32 -1 3 16 32 p1 58 736 917 1.24592 917 0 0.00000
+ 34 33 -1 3 17 33 p1 58 782 975 1.24680 975 0 0.00000
+ 35 34 -1 3 18 34 p1 58 828 1033 1.24758 1033 0 0.00000
+ 36 35 -1 3 19 35 p1 58 874 1091 1.24828 1091 0 0.00000
+ 37 36 -1 3 20 36 p1 58 920 1149 1.24891 1149 0 0.00000
+ 38 37 -1 3 21 37 p1 58 966 1207 1.24948 1207 0 0.00000
+ 39 38 -1 3 22 38 p1 58 1012 1265 1.25000 1265 0 0.00000
+ 40 39 -1 3 23 39 p1 58 1058 1323 1.25047 1323 0 0.00000
+ 41 40 -1 3 24 40 p1 58 1104 1381 1.25091 1381 0 0.00000
+ 42 41 -1 3 25 41 p1 58 1150 1439 1.25130 1439 0 0.00000
+ 43 42 -1 3 26 42 p1 58 1196 1497 1.25167 1497 0 0.00000
+ 44 43 -1 3 27 43 p1 58 1242 1555 1.25201 1555 0 0.00000
+ 45 44 -1 3 28 44 p1 58 1288 1613 1.25233 1613 0 0.00000
+ 46 45 -1 3 29 45 p1 58 1334 1671 1.25262 1671 0 0.00000
+ 47 46 -1 3 30 46 p1 58 1380 1729 1.25290 1729 0 0.00000
+ 48 47 -1 3 31 47 p1 58 1426 1787 1.25316 1787 0 0.00000
+ 49 5 -1 4 1 -1 base ??? 316 ??? 0.6???? ??? 0 0.00000 (glob)
+ 50 49 -1 4 2 49 p1 58 362 2?? 0.7???? 2?? 0 0.00000 (glob)
+ 51 17 -1 4 3 50 prev 3?? 5?? 6?? 1.0???? 6?? 0 0.00000 (glob)
+ 52 51 -1 4 4 51 p1 58 640 6?? 1.0???? 6?? 0 0.00000 (glob)
+ 53 52 -1 5 1 -1 base 0 0 0 0.00000 0 0 0.00000
+ 54 53 -1 6 1 -1 base 3?? 640 3?? 0.5???? 3?? 0 0.00000 (glob)
$ hg clone --pull source-repo --config experimental.maxdeltachainspan=2800 relax-chain --config format.generaldelta=yes
requesting all changes
adding changesets
@@ -308,69 +282,62 @@
updating to branch default
14 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg -R relax-chain debugdeltachain -m
- rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
- 0 1 1 -1 base 46 45 46 1.02222 46 0 0.00000
- 1 1 2 0 p1 57 90 103 1.14444 103 0 0.00000
- 2 1 3 1 p1 57 135 160 1.18519 160 0 0.00000
- 3 1 4 2 p1 57 180 217 1.20556 217 0 0.00000
- 4 1 5 3 p1 57 225 274 1.21778 274 0 0.00000
- 5 1 6 4 p1 57 270 331 1.22593 331 0 0.00000
- 6 2 1 -1 base 46 45 46 1.02222 46 0 0.00000
- 7 2 2 6 p1 57 90 103 1.14444 103 0 0.00000
- 8 2 3 7 p1 57 135 160 1.18519 160 0 0.00000
- 9 2 4 8 p1 57 180 217 1.20556 217 0 0.00000
- 10 2 5 9 p1 58 226 275 1.21681 275 0 0.00000
- 11 2 6 10 p1 58 272 333 1.22426 333 0 0.00000
- 12 2 7 11 p1 58 318 391 1.22956 391 0 0.00000
- 13 2 8 12 p1 58 364 449 1.23352 449 0 0.00000
- 14 2 9 13 p1 58 410 507 1.23659 507 0 0.00000
- 15 2 10 14 p1 58 456 565 1.23904 565 0 0.00000
- 16 2 11 15 p1 58 502 623 1.24104 623 0 0.00000
- 17 2 12 16 p1 58 548 681 1.24270 681 0 0.00000
- 18 3 1 -1 base 47 46 47 1.02174 47 0 0.00000
- 19 3 2 18 p1 58 92 105 1.14130 105 0 0.00000
- 20 3 3 19 p1 58 138 163 1.18116 163 0 0.00000
- 21 3 4 20 p1 58 184 221 1.20109 221 0 0.00000
- 22 3 5 21 p1 58 230 279 1.21304 279 0 0.00000
- 23 3 6 22 p1 58 276 337 1.22101 337 0 0.00000
- 24 3 7 23 p1 58 322 395 1.22671 395 0 0.00000
- 25 3 8 24 p1 58 368 453 1.23098 453 0 0.00000
- 26 3 9 25 p1 58 414 511 1.23430 511 0 0.00000
- 27 3 10 26 p1 58 460 569 1.23696 569 0 0.00000
- 28 3 11 27 p1 58 506 627 1.23913 627 0 0.00000
- 29 3 12 28 p1 58 552 685 1.24094 685 0 0.00000
- 30 3 13 29 p1 58 598 743 1.24247 743 0 0.00000
- 31 3 14 30 p1 58 644 801 1.24379 801 0 0.00000
- 32 3 15 31 p1 58 690 859 1.24493 859 0 0.00000
- 33 3 16 32 p1 58 736 917 1.24592 917 0 0.00000
- 34 3 17 33 p1 58 782 975 1.24680 975 0 0.00000
- 35 3 18 34 p1 58 828 1033 1.24758 1033 0 0.00000
- 36 3 19 35 p1 58 874 1091 1.24828 1091 0 0.00000
- 37 3 20 36 p1 58 920 1149 1.24891 1149 0 0.00000
- 38 3 21 37 p1 58 966 1207 1.24948 1207 0 0.00000
- 39 3 22 38 p1 58 1012 1265 1.25000 1265 0 0.00000
- 40 3 23 39 p1 58 1058 1323 1.25047 1323 0 0.00000
- 41 3 24 40 p1 58 1104 1381 1.25091 1381 0 0.00000
- 42 3 25 41 p1 58 1150 1439 1.25130 1439 0 0.00000
- 43 3 26 42 p1 58 1196 1497 1.25167 1497 0 0.00000
- 44 3 27 43 p1 58 1242 1555 1.25201 1555 0 0.00000
- 45 3 28 44 p1 58 1288 1613 1.25233 1613 0 0.00000
- 46 3 29 45 p1 58 1334 1671 1.25262 1671 0 0.00000
- 47 3 30 46 p1 58 1380 1729 1.25290 1729 0 0.00000
- 48 3 31 47 p1 58 1426 1787 1.25316 1787 0 0.00000
- 49 4 1 -1 base 197 316 197 0.62342 197 0 0.00000 (no-zstd !)
- 50 4 2 49 p1 58 362 255 0.70442 255 0 0.00000 (no-zstd !)
- 51 2 13 17 p1 58 594 739 1.24411 2781 2042 2.76319 (no-zstd !)
- 52 5 1 -1 base 369 640 369 0.57656 369 0 0.00000 (no-zstd !)
- 49 4 1 -1 base 205 316 205 0.64873 205 0 0.00000 (zstd !)
- 50 4 2 49 p1 58 362 263 0.72652 263 0 0.00000 (zstd !)
- 51 2 13 17 p1 58 594 739 1.24411 2789 2050 2.77402 (zstd !)
- 52 5 1 -1 base 375 640 375 0.58594 375 0 0.00000 (zstd no-bigendian !)
- 52 5 1 -1 base 376 640 376 0.58750 376 0 0.00000 (zstd bigendian !)
- 53 6 1 -1 base 0 0 0 0.00000 0 0 0.00000
- 54 7 1 -1 base 369 640 369 0.57656 369 0 0.00000 (no-zstd !)
- 54 7 1 -1 base 375 640 375 0.58594 375 0 0.00000 (zstd no-bigendian !)
- 54 7 1 -1 base 376 640 376 0.58750 376 0 0.00000 (zstd bigendian !)
+ rev p1 p2 chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
+ 0 -1 -1 1 1 -1 base 46 45 46 1.02222 46 0 0.00000
+ 1 0 -1 1 2 0 p1 57 90 103 1.14444 103 0 0.00000
+ 2 1 -1 1 3 1 p1 57 135 160 1.18519 160 0 0.00000
+ 3 2 -1 1 4 2 p1 57 180 217 1.20556 217 0 0.00000
+ 4 3 -1 1 5 3 p1 57 225 274 1.21778 274 0 0.00000
+ 5 4 -1 1 6 4 p1 57 270 331 1.22593 331 0 0.00000
+ 6 -1 -1 2 1 -1 base 46 45 46 1.02222 46 0 0.00000
+ 7 6 -1 2 2 6 p1 57 90 103 1.14444 103 0 0.00000
+ 8 7 -1 2 3 7 p1 57 135 160 1.18519 160 0 0.00000
+ 9 8 -1 2 4 8 p1 57 180 217 1.20556 217 0 0.00000
+ 10 9 -1 2 5 9 p1 58 226 275 1.21681 275 0 0.00000
+ 11 10 -1 2 6 10 p1 58 272 333 1.22426 333 0 0.00000
+ 12 11 -1 2 7 11 p1 58 318 391 1.22956 391 0 0.00000
+ 13 12 -1 2 8 12 p1 58 364 449 1.23352 449 0 0.00000
+ 14 13 -1 2 9 13 p1 58 410 507 1.23659 507 0 0.00000
+ 15 14 -1 2 10 14 p1 58 456 565 1.23904 565 0 0.00000
+ 16 15 -1 2 11 15 p1 58 502 623 1.24104 623 0 0.00000
+ 17 16 -1 2 12 16 p1 58 548 681 1.24270 681 0 0.00000
+ 18 -1 -1 3 1 -1 base 47 46 47 1.02174 47 0 0.00000
+ 19 18 -1 3 2 18 p1 58 92 105 1.14130 105 0 0.00000
+ 20 19 -1 3 3 19 p1 58 138 163 1.18116 163 0 0.00000
+ 21 20 -1 3 4 20 p1 58 184 221 1.20109 221 0 0.00000
+ 22 21 -1 3 5 21 p1 58 230 279 1.21304 279 0 0.00000
+ 23 22 -1 3 6 22 p1 58 276 337 1.22101 337 0 0.00000
+ 24 23 -1 3 7 23 p1 58 322 395 1.22671 395 0 0.00000
+ 25 24 -1 3 8 24 p1 58 368 453 1.23098 453 0 0.00000
+ 26 25 -1 3 9 25 p1 58 414 511 1.23430 511 0 0.00000
+ 27 26 -1 3 10 26 p1 58 460 569 1.23696 569 0 0.00000
+ 28 27 -1 3 11 27 p1 58 506 627 1.23913 627 0 0.00000
+ 29 28 -1 3 12 28 p1 58 552 685 1.24094 685 0 0.00000
+ 30 29 -1 3 13 29 p1 58 598 743 1.24247 743 0 0.00000
+ 31 30 -1 3 14 30 p1 58 644 801 1.24379 801 0 0.00000
+ 32 31 -1 3 15 31 p1 58 690 859 1.24493 859 0 0.00000
+ 33 32 -1 3 16 32 p1 58 736 917 1.24592 917 0 0.00000
+ 34 33 -1 3 17 33 p1 58 782 975 1.24680 975 0 0.00000
+ 35 34 -1 3 18 34 p1 58 828 1033 1.24758 1033 0 0.00000
+ 36 35 -1 3 19 35 p1 58 874 1091 1.24828 1091 0 0.00000
+ 37 36 -1 3 20 36 p1 58 920 1149 1.24891 1149 0 0.00000
+ 38 37 -1 3 21 37 p1 58 966 1207 1.24948 1207 0 0.00000
+ 39 38 -1 3 22 38 p1 58 1012 1265 1.25000 1265 0 0.00000
+ 40 39 -1 3 23 39 p1 58 1058 1323 1.25047 1323 0 0.00000
+ 41 40 -1 3 24 40 p1 58 1104 1381 1.25091 1381 0 0.00000
+ 42 41 -1 3 25 41 p1 58 1150 1439 1.25130 1439 0 0.00000
+ 43 42 -1 3 26 42 p1 58 1196 1497 1.25167 1497 0 0.00000
+ 44 43 -1 3 27 43 p1 58 1242 1555 1.25201 1555 0 0.00000
+ 45 44 -1 3 28 44 p1 58 1288 1613 1.25233 1613 0 0.00000
+ 46 45 -1 3 29 45 p1 58 1334 1671 1.25262 1671 0 0.00000
+ 47 46 -1 3 30 46 p1 58 1380 1729 1.25290 1729 0 0.00000
+ 48 47 -1 3 31 47 p1 58 1426 1787 1.25316 1787 0 0.00000
+ 49 5 -1 4 1 -1 base ??? 316 ??? 0.6???? ??? 0 0.00000 (glob)
+ 50 49 -1 4 2 49 p1 58 362 2?? 0.7???? 2?? 0 0.00000 (glob)
+ 51 17 -1 2 13 17 p1 58 594 739 1.24411 278? 20?? 2.7???? (glob)
+ 52 51 -1 5 1 -1 base 3?? 640 3?? 0.5???? 3?? 0 0.00000 (glob)
+ 53 52 -1 6 1 -1 base 0 0 0 0.00000 0 0 0.00000
+ 54 53 -1 7 1 -1 base 3?? 640 3?? 0.5???? 3?? 0 0.00000 (glob)
$ hg clone --pull source-repo --config experimental.maxdeltachainspan=0 noconst-chain --config format.usegeneraldelta=yes --config storage.revlog.reuse-external-delta-parent=no
requesting all changes
adding changesets
@@ -381,61 +348,59 @@
updating to branch default
14 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg -R noconst-chain debugdeltachain -m
- rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
- 0 1 1 -1 base 46 45 46 1.02222 46 0 0.00000
- 1 1 2 0 p1 57 90 103 1.14444 103 0 0.00000
- 2 1 3 1 p1 57 135 160 1.18519 160 0 0.00000
- 3 1 4 2 p1 57 180 217 1.20556 217 0 0.00000
- 4 1 5 3 p1 57 225 274 1.21778 274 0 0.00000
- 5 1 6 4 p1 57 270 331 1.22593 331 0 0.00000
- 6 2 1 -1 base 46 45 46 1.02222 46 0 0.00000
- 7 2 2 6 p1 57 90 103 1.14444 103 0 0.00000
- 8 2 3 7 p1 57 135 160 1.18519 160 0 0.00000
- 9 2 4 8 p1 57 180 217 1.20556 217 0 0.00000
- 10 2 5 9 p1 58 226 275 1.21681 275 0 0.00000
- 11 2 6 10 p1 58 272 333 1.22426 333 0 0.00000
- 12 2 7 11 p1 58 318 391 1.22956 391 0 0.00000
- 13 2 8 12 p1 58 364 449 1.23352 449 0 0.00000
- 14 2 9 13 p1 58 410 507 1.23659 507 0 0.00000
- 15 2 10 14 p1 58 456 565 1.23904 565 0 0.00000
- 16 2 11 15 p1 58 502 623 1.24104 623 0 0.00000
- 17 2 12 16 p1 58 548 681 1.24270 681 0 0.00000
- 18 3 1 -1 base 47 46 47 1.02174 47 0 0.00000
- 19 3 2 18 p1 58 92 105 1.14130 105 0 0.00000
- 20 3 3 19 p1 58 138 163 1.18116 163 0 0.00000
- 21 3 4 20 p1 58 184 221 1.20109 221 0 0.00000
- 22 3 5 21 p1 58 230 279 1.21304 279 0 0.00000
- 23 3 6 22 p1 58 276 337 1.22101 337 0 0.00000
- 24 3 7 23 p1 58 322 395 1.22671 395 0 0.00000
- 25 3 8 24 p1 58 368 453 1.23098 453 0 0.00000
- 26 3 9 25 p1 58 414 511 1.23430 511 0 0.00000
- 27 3 10 26 p1 58 460 569 1.23696 569 0 0.00000
- 28 3 11 27 p1 58 506 627 1.23913 627 0 0.00000
- 29 3 12 28 p1 58 552 685 1.24094 685 0 0.00000
- 30 3 13 29 p1 58 598 743 1.24247 743 0 0.00000
- 31 3 14 30 p1 58 644 801 1.24379 801 0 0.00000
- 32 3 15 31 p1 58 690 859 1.24493 859 0 0.00000
- 33 3 16 32 p1 58 736 917 1.24592 917 0 0.00000
- 34 3 17 33 p1 58 782 975 1.24680 975 0 0.00000
- 35 3 18 34 p1 58 828 1033 1.24758 1033 0 0.00000
- 36 3 19 35 p1 58 874 1091 1.24828 1091 0 0.00000
- 37 3 20 36 p1 58 920 1149 1.24891 1149 0 0.00000
- 38 3 21 37 p1 58 966 1207 1.24948 1207 0 0.00000
- 39 3 22 38 p1 58 1012 1265 1.25000 1265 0 0.00000
- 40 3 23 39 p1 58 1058 1323 1.25047 1323 0 0.00000
- 41 3 24 40 p1 58 1104 1381 1.25091 1381 0 0.00000
- 42 3 25 41 p1 58 1150 1439 1.25130 1439 0 0.00000
- 43 3 26 42 p1 58 1196 1497 1.25167 1497 0 0.00000
- 44 3 27 43 p1 58 1242 1555 1.25201 1555 0 0.00000
- 45 3 28 44 p1 58 1288 1613 1.25233 1613 0 0.00000
- 46 3 29 45 p1 58 1334 1671 1.25262 1671 0 0.00000
- 47 3 30 46 p1 58 1380 1729 1.25290 1729 0 0.00000
- 48 3 31 47 p1 58 1426 1787 1.25316 1787 0 0.00000
- 49 1 7 5 p1 58 316 389 1.23101 2857 2468 6.34447
- 50 1 8 49 p1 58 362 447 1.23481 2915 2468 5.52125
- 51 2 13 17 p1 58 594 739 1.24411 2642 1903 2.57510
- 52 2 14 51 p1 58 640 797 1.24531 2700 1903 2.38770
- 53 4 1 -1 base 0 0 0 0.00000 0 0 0.00000
- 54 5 1 -1 base 369 640 369 0.57656 369 0 0.00000 (no-zstd !)
- 54 5 1 -1 base 375 640 375 0.58594 375 0 0.00000 (zstd no-bigendian !)
- 54 5 1 -1 base 376 640 376 0.58750 376 0 0.00000 (zstd bigendian !)
+ rev p1 p2 chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
+ 0 -1 -1 1 1 -1 base 46 45 46 1.02222 46 0 0.00000
+ 1 0 -1 1 2 0 p1 57 90 103 1.14444 103 0 0.00000
+ 2 1 -1 1 3 1 p1 57 135 160 1.18519 160 0 0.00000
+ 3 2 -1 1 4 2 p1 57 180 217 1.20556 217 0 0.00000
+ 4 3 -1 1 5 3 p1 57 225 274 1.21778 274 0 0.00000
+ 5 4 -1 1 6 4 p1 57 270 331 1.22593 331 0 0.00000
+ 6 -1 -1 2 1 -1 base 46 45 46 1.02222 46 0 0.00000
+ 7 6 -1 2 2 6 p1 57 90 103 1.14444 103 0 0.00000
+ 8 7 -1 2 3 7 p1 57 135 160 1.18519 160 0 0.00000
+ 9 8 -1 2 4 8 p1 57 180 217 1.20556 217 0 0.00000
+ 10 9 -1 2 5 9 p1 58 226 275 1.21681 275 0 0.00000
+ 11 10 -1 2 6 10 p1 58 272 333 1.22426 333 0 0.00000
+ 12 11 -1 2 7 11 p1 58 318 391 1.22956 391 0 0.00000
+ 13 12 -1 2 8 12 p1 58 364 449 1.23352 449 0 0.00000
+ 14 13 -1 2 9 13 p1 58 410 507 1.23659 507 0 0.00000
+ 15 14 -1 2 10 14 p1 58 456 565 1.23904 565 0 0.00000
+ 16 15 -1 2 11 15 p1 58 502 623 1.24104 623 0 0.00000
+ 17 16 -1 2 12 16 p1 58 548 681 1.24270 681 0 0.00000
+ 18 -1 -1 3 1 -1 base 47 46 47 1.02174 47 0 0.00000
+ 19 18 -1 3 2 18 p1 58 92 105 1.14130 105 0 0.00000
+ 20 19 -1 3 3 19 p1 58 138 163 1.18116 163 0 0.00000
+ 21 20 -1 3 4 20 p1 58 184 221 1.20109 221 0 0.00000
+ 22 21 -1 3 5 21 p1 58 230 279 1.21304 279 0 0.00000
+ 23 22 -1 3 6 22 p1 58 276 337 1.22101 337 0 0.00000
+ 24 23 -1 3 7 23 p1 58 322 395 1.22671 395 0 0.00000
+ 25 24 -1 3 8 24 p1 58 368 453 1.23098 453 0 0.00000
+ 26 25 -1 3 9 25 p1 58 414 511 1.23430 511 0 0.00000
+ 27 26 -1 3 10 26 p1 58 460 569 1.23696 569 0 0.00000
+ 28 27 -1 3 11 27 p1 58 506 627 1.23913 627 0 0.00000
+ 29 28 -1 3 12 28 p1 58 552 685 1.24094 685 0 0.00000
+ 30 29 -1 3 13 29 p1 58 598 743 1.24247 743 0 0.00000
+ 31 30 -1 3 14 30 p1 58 644 801 1.24379 801 0 0.00000
+ 32 31 -1 3 15 31 p1 58 690 859 1.24493 859 0 0.00000
+ 33 32 -1 3 16 32 p1 58 736 917 1.24592 917 0 0.00000
+ 34 33 -1 3 17 33 p1 58 782 975 1.24680 975 0 0.00000
+ 35 34 -1 3 18 34 p1 58 828 1033 1.24758 1033 0 0.00000
+ 36 35 -1 3 19 35 p1 58 874 1091 1.24828 1091 0 0.00000
+ 37 36 -1 3 20 36 p1 58 920 1149 1.24891 1149 0 0.00000
+ 38 37 -1 3 21 37 p1 58 966 1207 1.24948 1207 0 0.00000
+ 39 38 -1 3 22 38 p1 58 1012 1265 1.25000 1265 0 0.00000
+ 40 39 -1 3 23 39 p1 58 1058 1323 1.25047 1323 0 0.00000
+ 41 40 -1 3 24 40 p1 58 1104 1381 1.25091 1381 0 0.00000
+ 42 41 -1 3 25 41 p1 58 1150 1439 1.25130 1439 0 0.00000
+ 43 42 -1 3 26 42 p1 58 1196 1497 1.25167 1497 0 0.00000
+ 44 43 -1 3 27 43 p1 58 1242 1555 1.25201 1555 0 0.00000
+ 45 44 -1 3 28 44 p1 58 1288 1613 1.25233 1613 0 0.00000
+ 46 45 -1 3 29 45 p1 58 1334 1671 1.25262 1671 0 0.00000
+ 47 46 -1 3 30 46 p1 58 1380 1729 1.25290 1729 0 0.00000
+ 48 47 -1 3 31 47 p1 58 1426 1787 1.25316 1787 0 0.00000
+ 49 5 -1 1 7 5 p1 58 316 389 1.23101 2857 2468 6.34447
+ 50 49 -1 1 8 49 p1 58 362 447 1.23481 2915 2468 5.52125
+ 51 17 -1 2 13 17 p1 58 594 739 1.24411 2642 1903 2.57510
+ 52 51 -1 2 14 51 p1 58 640 797 1.24531 2700 1903 2.38770
+ 53 52 -1 4 1 -1 base 0 0 0 0.00000 0 0 0.00000
+ 54 53 -1 5 1 -1 base 3?? 640 3?? 0.5???? 3?? 0 0.00000 (glob)
--- a/tests/test-hardlinks.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-hardlinks.t Thu Jun 16 15:28:54 2022 +0200
@@ -1,7 +1,6 @@
#require hardlink reporevlogstore
$ cat > nlinks.py <<EOF
- > from __future__ import print_function
> import sys
> from mercurial import pycompat, util
> for f in sorted(sys.stdin.readlines()):
@@ -17,7 +16,6 @@
Some implementations of cp can't create hardlinks (replaces 'cp -al' on Linux):
$ cat > linkcp.py <<EOF
- > from __future__ import absolute_import
> import sys
> from mercurial import pycompat, util
> util.copyfiles(pycompat.fsencode(sys.argv[1]),
--- a/tests/test-hashutil.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-hashutil.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,6 +1,5 @@
# Tests to ensure that sha1dc.sha1 is exactly a drop-in for
# hashlib.sha1 for our needs.
-from __future__ import absolute_import
import hashlib
import unittest
@@ -13,7 +12,7 @@
sha1dc = None
-class hashertestsbase(object):
+class hashertestsbase:
def test_basic_hash(self):
h = self.hasher()
h.update(b'foo')
--- a/tests/test-help.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-help.t Thu Jun 16 15:28:54 2022 +0200
@@ -644,7 +644,8 @@
Note:
'hg diff' may generate unexpected results for merges, as it will
default to comparing against the working directory's first parent
- changeset if no revisions are specified.
+ changeset if no revisions are specified. To diff against the conflict
+ regions, you can use '--config diff.merge=yes'.
By default, the working directory files are compared to its first parent.
To see the differences from another revision, use --from. To see the
@@ -977,9 +978,13 @@
$ hg help debug
debug commands (internal and unsupported):
+ debug-delta-find
+ display the computation to get to a valid delta for storing REV
debug-repair-issue6528
find affected revisions and repair them. See issue6528 for more
details.
+ debug-revlog-index
+ dump index data for a revlog
debugancestor
find the ancestor revision of two revisions in a given index
debugantivirusrunning
@@ -1013,6 +1018,8 @@
dump information about delta chains in a revlog
debugdirstate
show the contents of the current dirstate
+ debugdirstateignorepatternshash
+ show the hash of ignore patterns stored in dirstate if v2,
debugdiscovery
runs the changeset discovery protocol in isolation
debugdownload
@@ -1026,7 +1033,6 @@
retrieves a bundle from a repo
debugignore display the combined ignore pattern and information about
ignored files
- debugindex dump index data for a storage primitive
debugindexdot
dump an index DAG as a graphviz dot file
debugindexstats
@@ -1597,12 +1603,24 @@
"use-dirstate-v2"
+ "use-dirstate-v2.automatic-upgrade-of-mismatching-repositories"
+
+ "use-dirstate-v2.automatic-upgrade-of-mismatching-repositories:quiet"
+
"use-dirstate-tracked-hint"
+ "use-dirstate-tracked-hint.automatic-upgrade-of-mismatching-repositories"
+
+ "use-dirstate-tracked-hint.automatic-upgrade-of-mismatching-repositories:quiet"
+
"use-persistent-nodemap"
"use-share-safe"
+ "use-share-safe.automatic-upgrade-of-mismatching-repositories"
+
+ "use-share-safe.automatic-upgrade-of-mismatching-repositories:quiet"
+
"usestore"
"sparse-revlog"
@@ -1790,7 +1808,6 @@
>
> This paragraph is never omitted, too (for extension)
> '''
- > from __future__ import absolute_import
> from mercurial import commands, help
> testtopic = br"""This paragraph is never omitted (for topic).
>
--- a/tests/test-hg-parseurl.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-hg-parseurl.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, print_function
-
import unittest
from mercurial.utils import urlutil
--- a/tests/test-hgrc.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-hgrc.t Thu Jun 16 15:28:54 2022 +0200
@@ -71,7 +71,7 @@
config error at $TESTTMP/hgrc:2: unexpected leading whitespace: x = y
[255]
- $ "$PYTHON" -c "from __future__ import print_function; print('[foo]\nbar = a\n b\n c \n de\n fg \nbaz = bif cb \n')" \
+ $ "$PYTHON" -c "print('[foo]\nbar = a\n b\n c \n de\n fg \nbaz = bif cb \n')" \
> > $HGRC
$ hg showconfig foo
foo.bar=a\nb\nc\nde\nfg
--- a/tests/test-hgweb-auth.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-hgweb-auth.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, print_function
-
from mercurial import demandimport
demandimport.enable()
--- a/tests/test-hgweb-no-path-info.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-hgweb-no-path-info.t Thu Jun 16 15:28:54 2022 +0200
@@ -15,7 +15,6 @@
summary: test
$ cat > request.py <<EOF
- > from __future__ import absolute_import
> import os
> import sys
> from mercurial import (
--- a/tests/test-hgweb-no-request-uri.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-hgweb-no-request-uri.t Thu Jun 16 15:28:54 2022 +0200
@@ -15,7 +15,6 @@
summary: test
$ cat > request.py <<EOF
- > from __future__ import absolute_import
> import os
> import sys
> from mercurial import (
--- a/tests/test-hgweb-non-interactive.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-hgweb-non-interactive.t Thu Jun 16 15:28:54 2022 +0200
@@ -7,7 +7,6 @@
$ hg add bar
$ hg commit -m "test"
$ cat > request.py <<EOF
- > from __future__ import absolute_import
> import os
> import sys
> from mercurial import (
--- a/tests/test-hgweb.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-hgweb.t Thu Jun 16 15:28:54 2022 +0200
@@ -329,7 +329,7 @@
Test the access/error files are opened in append mode
- $ "$PYTHON" -c "from __future__ import print_function; print(len(open('access.log', 'rb').readlines()), 'log lines written')"
+ $ "$PYTHON" -c "print(len(open('access.log', 'rb').readlines()), 'log lines written')"
14 log lines written
static file
--- a/tests/test-hgwebdir-gc.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-hgwebdir-gc.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import os
from mercurial.hgweb import hgwebdir_mod
--- a/tests/test-hgwebdir-paths.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-hgwebdir-paths.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import os
from mercurial import (
hg,
--- a/tests/test-hook.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-hook.t Thu Jun 16 15:28:54 2022 +0200
@@ -831,7 +831,6 @@
$ cd "$TESTTMP/b"
$ cat > hooktests.py <<EOF
- > from __future__ import print_function
> from mercurial import (
> error,
> pycompat,
@@ -979,7 +978,6 @@
Traceback (most recent call last): (py3 !)
SyntaxError: * (glob) (py3 !)
Traceback (most recent call last):
- ImportError: No module named hgext_syntaxerror (no-py3 !)
ImportError: No module named 'hgext_syntaxerror' (py3 no-py36 !)
ModuleNotFoundError: No module named 'hgext_syntaxerror' (py36 !)
Traceback (most recent call last):
@@ -988,7 +986,6 @@
ImportError: No module named 'hgext_syntaxerror' (py3 no-py36 !)
ModuleNotFoundError: No module named 'hgext_syntaxerror' (py36 !)
Traceback (most recent call last): (py3 !)
- HookLoadError: preoutgoing.syntaxerror hook is invalid: import of "syntaxerror" failed (no-py3 !)
raise error.HookLoadError( (py38 !)
mercurial.error.HookLoadError: preoutgoing.syntaxerror hook is invalid: import of "syntaxerror" failed (py3 !)
abort: preoutgoing.syntaxerror hook is invalid: import of "syntaxerror" failed
@@ -1123,7 +1120,6 @@
$ hg id
loading pre-identify.npmd hook failed:
- abort: No module named repo (no-py3 !)
abort: No module named 'repo' (py3 !)
[255]
@@ -1144,7 +1140,6 @@
$ hg --traceback commit -ma 2>&1 | egrep '^exception|ImportError|ModuleNotFoundError|Traceback|HookLoadError|abort'
exception from first failed import attempt:
Traceback (most recent call last):
- ImportError: No module named somebogusmodule (no-py3 !)
ImportError: No module named 'somebogusmodule' (py3 no-py36 !)
ModuleNotFoundError: No module named 'somebogusmodule' (py36 !)
exception from second failed import attempt:
@@ -1158,11 +1153,9 @@
ImportError: No module named 'somebogusmodule' (py3 no-py36 !)
ModuleNotFoundError: No module named 'somebogusmodule' (py36 !)
Traceback (most recent call last):
- ImportError: No module named hgext_importfail (no-py3 !)
ImportError: No module named 'hgext_importfail' (py3 no-py36 !)
ModuleNotFoundError: No module named 'hgext_importfail' (py36 !)
Traceback (most recent call last):
- HookLoadError: precommit.importfail hook is invalid: import of "importfail" failed (no-py3 !)
raise error.HookLoadError( (py38 !)
mercurial.error.HookLoadError: precommit.importfail hook is invalid: import of "importfail" failed (py3 !)
abort: precommit.importfail hook is invalid: import of "importfail" failed
--- a/tests/test-http-bad-server.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-http-bad-server.t Thu Jun 16 15:28:54 2022 +0200
@@ -134,13 +134,6 @@
sendall(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (py36 !)
write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob) (py3 no-py36 !)
write(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (py3 no-py36 !)
- write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !)
- write(23) -> Server: badhttpserver\r\n (no-py3 !)
- write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !)
- write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !)
- write(21) -> Content-Length: *\r\n (glob) (no-py3 !)
- write(2) -> \r\n (no-py3 !)
- write(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (no-py3 !)
readline(~) -> (26) GET /?cmd=batch HTTP/1.1\r\n (glob)
readline(*) -> (1?) Accept-Encoding* (glob)
read limit reached; closing socket
@@ -183,13 +176,6 @@
sendall(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (py36 !)
write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob) (py3 no-py36 !)
write(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (py3 no-py36 !)
- write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !)
- write(23) -> Server: badhttpserver\r\n (no-py3 !)
- write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !)
- write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !)
- write(21) -> Content-Length: *\r\n (glob) (no-py3 !)
- write(2) -> \r\n (no-py3 !)
- write(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (no-py3 !)
readline(~) -> (26) GET /?cmd=batch HTTP/1.1\r\n (glob)
readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
@@ -209,13 +195,6 @@
sendall(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py36 !)
write(159) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py3 no-py36 !)
write(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py3 no-py36 !)
- write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !)
- write(23) -> Server: badhttpserver\r\n (no-py3 !)
- write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !)
- write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !)
- write(20) -> Content-Length: 42\r\n (no-py3 !)
- write(2) -> \r\n (no-py3 !)
- write(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (no-py3 !)
readline(24 from ~) -> (*) GET /?cmd=getbundle HTTP* (glob)
read limit reached; closing socket
readline(~) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n
@@ -253,13 +232,6 @@
sendall(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx httppostargs known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (py36 !)
write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob) (py3 no-py36 !)
write(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx httppostargs known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (py3 no-py36 !)
- write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !)
- write(23) -> Server: badhttpserver\r\n (no-py3 !)
- write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !)
- write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !)
- write(21) -> Content-Length: *\r\n (glob) (no-py3 !)
- write(2) -> \r\n (no-py3 !)
- write(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx httppostargs known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (no-py3 !)
readline(~) -> (27) POST /?cmd=batch HTTP/1.1\r\n (glob)
readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
readline(*) -> (41) content-type: application/mercurial-0.1\r\n (glob)
@@ -312,7 +284,6 @@
readline(*) -> (2) \r\n (glob)
sendall(1 from 160) -> (0) H (py36 !)
write(1 from 160) -> (0) H (py3 no-py36 !)
- write(1 from 36) -> (0) H (no-py3 !)
write limit reached; closing socket
$LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=capabilities': (glob)
Traceback (most recent call last):
@@ -348,13 +319,6 @@
sendall(20 from *) -> (0) batch branchmap bund (glob) (py36 !)
write(160) -> (20) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob) (py3 no-py36 !)
write(20 from *) -> (0) batch branchmap bund (glob) (py3 no-py36 !)
- write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !)
- write(23) -> Server: badhttpserver\r\n (no-py3 !)
- write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !)
- write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !)
- write(21) -> Content-Length: *\r\n (glob) (no-py3 !)
- write(2) -> \r\n (no-py3 !)
- write(20 from *) -> (0) batch branchmap bund (glob) (no-py3 !)
write limit reached; closing socket
$LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=capabilities': (glob)
Traceback (most recent call last):
@@ -394,13 +358,6 @@
sendall(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (py36 !)
write(160) -> (568) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob) (py3 no-py36 !)
write(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (py3 no-py36 !)
- write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !)
- write(23) -> Server: badhttpserver\r\n (no-py3 !)
- write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !)
- write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !)
- write(21) -> Content-Length: *\r\n (glob) (no-py3 !)
- write(2) -> \r\n (no-py3 !)
- write(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (no-py3 !)
readline(~) -> (26) GET /?cmd=batch HTTP/1.1\r\n
readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
@@ -412,10 +369,6 @@
readline(*) -> (2) \r\n (glob)
sendall(118 from 159) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: applicat (py36 !)
write(118 from 159) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: applicat (py3 no-py36 !)
- write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !)
- write(23) -> Server: badhttpserver\r\n (no-py3 !)
- write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !)
- write(22 from 41) -> (0) Content-Type: applicat (no-py3 !)
write limit reached; closing socket
$LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=batch': (glob)
Traceback (most recent call last):
@@ -433,14 +386,10 @@
> -p $HGPORT -d --pid-file=hg.pid -E error.log
$ cat hg.pid > $DAEMON_PIDS
-TODO client spews a stack due to uncaught ValueError in batch.results()
-#if no-chg
- $ hg clone http://localhost:$HGPORT/ clone 2> /dev/null
- [1]
-#else
- $ hg clone http://localhost:$HGPORT/ clone 2> /dev/null
+ $ hg clone http://localhost:$HGPORT/ clone
+ abort: unexpected response:
+ '96ee1d7354c4ad7372047672'
[255]
-#endif
$ killdaemons.py $DAEMON_PIDS
@@ -455,13 +404,6 @@
sendall(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (py36 !)
write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob) (py3 no-py36 !)
write(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (py3 no-py36 !)
- write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !)
- write(23) -> Server: badhttpserver\r\n (no-py3 !)
- write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !)
- write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !)
- write(21) -> Content-Length: *\r\n (glob) (no-py3 !)
- write(2) -> \r\n (no-py3 !)
- write(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (no-py3 !)
readline(~) -> (26) GET /?cmd=batch HTTP/1.1\r\n
readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
@@ -475,13 +417,6 @@
sendall(24 from 42) -> (0) 96ee1d7354c4ad7372047672 (py36 !)
write(159) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py3 no-py36 !)
write(24 from 42) -> (0) 96ee1d7354c4ad7372047672 (py3 no-py36 !)
- write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !)
- write(23) -> Server: badhttpserver\r\n (no-py3 !)
- write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !)
- write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !)
- write(20) -> Content-Length: 42\r\n (no-py3 !)
- write(2) -> \r\n (no-py3 !)
- write(24 from 42) -> (0) 96ee1d7354c4ad7372047672 (no-py3 !)
write limit reached; closing socket
$LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=batch': (glob)
Traceback (most recent call last):
@@ -522,13 +457,6 @@
sendall(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (py36 !)
write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob) (py3 no-py36 !)
write(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (py3 no-py36 !)
- write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !)
- write(23) -> Server: badhttpserver\r\n (no-py3 !)
- write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !)
- write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !)
- write(21) -> Content-Length: *\r\n (glob) (no-py3 !)
- write(2) -> \r\n (no-py3 !)
- write(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (no-py3 !)
readline(~) -> (26) GET /?cmd=batch HTTP/1.1\r\n
readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
@@ -542,13 +470,6 @@
sendall(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py36 !)
write(159) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py3 no-py36 !)
write(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py3 no-py36 !)
- write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !)
- write(23) -> Server: badhttpserver\r\n (no-py3 !)
- write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !)
- write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !)
- write(20) -> Content-Length: 42\r\n (no-py3 !)
- write(2) -> \r\n (no-py3 !)
- write(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (no-py3 !)
readline(~) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n
readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
@@ -560,10 +481,6 @@
readline(*) -> (2) \r\n (glob)
sendall(129 from 167) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercuri (py36 !)
write(129 from 167) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercuri (py3 no-py36 !)
- write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !)
- write(23) -> Server: badhttpserver\r\n (no-py3 !)
- write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !)
- write(33 from 41) -> (0) Content-Type: application/mercuri (no-py3 !)
write limit reached; closing socket
$LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
Traceback (most recent call last):
@@ -638,13 +555,6 @@
sendall(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (py36 !)
write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob) (py3 no-py36 !)
write(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (py3 no-py36 !)
- write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !)
- write(23) -> Server: badhttpserver\r\n (no-py3 !)
- write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !)
- write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !)
- write(21) -> Content-Length: *\r\n (glob) (no-py3 !)
- write(2) -> \r\n (no-py3 !)
- write(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (no-py3 !)
readline(~) -> (26) GET /?cmd=batch HTTP/1.1\r\n
readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
@@ -658,13 +568,6 @@
sendall(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py36 !)
write(159) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py3 no-py36 !)
write(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py3 no-py36 !)
- write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !)
- write(23) -> Server: badhttpserver\r\n (no-py3 !)
- write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !)
- write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !)
- write(20) -> Content-Length: 42\r\n (no-py3 !)
- write(2) -> \r\n (no-py3 !)
- write(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (no-py3 !)
readline(~) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n
readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
@@ -676,12 +579,6 @@
readline(*) -> (2) \r\n (glob)
sendall(167 from 167) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py36 !)
write(167 from 167) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 no-py36 !)
- write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !)
- write(23) -> Server: badhttpserver\r\n (no-py3 !)
- write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !)
- write(41) -> Content-Type: application/mercurial-0.2\r\n (no-py3 !)
- write(28) -> Transfer-Encoding: chunked\r\n (no-py3 !)
- write(2 from 2) -> (0) \r\n (no-py3 !)
write limit reached; closing socket
$LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
Traceback (most recent call last):
@@ -718,13 +615,6 @@
sendall(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (py36 !)
write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob) (py3 no-py36 !)
write(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (py3 no-py36 !)
- write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !)
- write(23) -> Server: badhttpserver\r\n (no-py3 !)
- write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !)
- write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !)
- write(21) -> Content-Length: *\r\n (glob) (no-py3 !)
- write(2) -> \r\n (no-py3 !)
- write(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (no-py3 !)
readline(~) -> (26) GET /?cmd=batch HTTP/1.1\r\n
readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
@@ -737,13 +627,6 @@
sendall(159) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py36 !)
sendall(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py36 !)
write(159) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py3 no-py36 !)
- write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !)
- write(23) -> Server: badhttpserver\r\n (no-py3 !)
- write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !)
- write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !)
- write(20) -> Content-Length: 42\r\n (no-py3 !)
- write(2) -> \r\n (no-py3 !)
- write(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (no-py3 !)
readline(~) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n
readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
@@ -758,15 +641,6 @@
sendall(9) -> 4\r\nnone\r\n (py36 !)
sendall(9 from 9) -> (0) 4\r\nHG20\r\n (py36 !)
write(167) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 no-py36 !)
- write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !)
- write(23) -> Server: badhttpserver\r\n (no-py3 !)
- write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !)
- write(41) -> Content-Type: application/mercurial-0.2\r\n (no-py3 !)
- write(28) -> Transfer-Encoding: chunked\r\n (no-py3 !)
- write(2) -> \r\n (no-py3 !)
- write(6) -> 1\\r\\n\x04\\r\\n (esc) (no-py3 !)
- write(9) -> 4\r\nnone\r\n (no-py3 !)
- write(9 from 9) -> (0) 4\r\nHG20\r\n (no-py3 !)
write limit reached; closing socket
$LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
Traceback (most recent call last):
@@ -786,7 +660,6 @@
$ hg clone http://localhost:$HGPORT/ clone
requesting all changes
abort: HTTP request error (incomplete response) (py3 !)
- abort: HTTP request error (incomplete response; expected 4 bytes got 3) (no-py3 !)
(this may be an intermittent network failure; if the error persists, consider contacting the network or server operator)
[255]
@@ -808,9 +681,6 @@
$ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -11
readline(~) -> (2) \r\n (py3 !)
write(167) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !)
- write(41) -> Content-Type: application/mercurial-0.2\r\n (no-py3 !)
- write(28) -> Transfer-Encoding: chunked\r\n (no-py3 !)
- write(2) -> \r\n (no-py3 !)
write(6) -> 1\\r\\n\x04\\r\\n (esc)
write(9) -> 4\r\nnone\r\n
write(6 from 9) -> (0) 4\r\nHG2
@@ -834,7 +704,6 @@
$ hg clone http://localhost:$HGPORT/ clone
requesting all changes
abort: HTTP request error (incomplete response) (py3 !)
- abort: HTTP request error (incomplete response; expected 4 bytes got 3) (no-py3 !)
(this may be an intermittent network failure; if the error persists, consider contacting the network or server operator)
[255]
@@ -858,8 +727,6 @@
readline(~) -> (2) \r\n (py3 !)
write(167) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !)
write(41) -> Content-Type: application/mercurial-0.2\r\n
- write(28) -> Transfer-Encoding: chunked\r\n (no-py3 !)
- write(2) -> \r\n (no-py3 !)
write(6) -> 1\\r\\n\x04\\r\\n (esc)
write(9) -> 4\r\nnone\r\n
write(9) -> 4\r\nHG20\r\n
@@ -907,8 +774,6 @@
readline(~) -> (2) \r\n (py3 !)
write(167) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !)
write(41) -> Content-Type: application/mercurial-0.2\r\n
- write(28) -> Transfer-Encoding: chunked\r\n (no-py3 !)
- write(2) -> \r\n (no-py3 !)
write(6) -> 1\\r\\n\x04\\r\\n (esc)
write(9) -> 4\r\nnone\r\n
write(9) -> 4\r\nHG20\r\n
@@ -958,8 +823,6 @@
readline(~) -> (2) \r\n (py3 !)
write(167) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !)
write(41) -> Content-Type: application/mercurial-0.2\r\n
- write(28) -> Transfer-Encoding: chunked\r\n (no-py3 !)
- write(2) -> \r\n (no-py3 !)
write(6) -> 1\\r\\n\x04\\r\\n (esc)
write(9) -> 4\r\nnone\r\n
write(9) -> 4\r\nHG20\r\n
@@ -1013,8 +876,6 @@
readline(~) -> (2) \r\n (py3 !)
write(167) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !)
write(41) -> Content-Type: application/mercurial-0.2\r\n
- write(28) -> Transfer-Encoding: chunked\r\n (no-py3 !)
- write(2) -> \r\n (no-py3 !)
write(6) -> 1\\r\\n\x04\\r\\n (esc)
write(9) -> 4\r\nnone\r\n
write(9) -> 4\r\nHG20\r\n
@@ -1044,7 +905,6 @@
transaction abort!
rollback completed
abort: HTTP request error (incomplete response) (py3 !)
- abort: HTTP request error (incomplete response; expected 466 bytes got 7) (no-py3 !)
(this may be an intermittent network failure; if the error persists, consider contacting the network or server operator)
[255]
@@ -1071,7 +931,6 @@
$ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -15
write(167) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !)
write(28) -> Transfer-Encoding: chunked\r\n
- write(2) -> \r\n (no-py3 !)
write(6) -> 1\\r\\n\x04\\r\\n (esc)
write(9) -> 4\r\nnone\r\n
write(9) -> 4\r\nHG20\r\n
@@ -1130,8 +989,6 @@
readline(~) -> (2) \r\n (py3 !)
write(167) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !)
write(41) -> Content-Type: application/mercurial-0.2\r\n
- write(28) -> Transfer-Encoding: chunked\r\n (no-py3 !)
- write(2) -> \r\n (no-py3 !)
write(6) -> 1\\r\\n\x04\\r\\n (esc)
write(9) -> 4\r\nnone\r\n
write(9) -> 4\r\nHG20\r\n
@@ -1165,7 +1022,6 @@
transaction abort!
rollback completed
abort: HTTP request error (incomplete response) (py3 !)
- abort: HTTP request error (incomplete response; expected 32 bytes got 9) (no-py3 !)
(this may be an intermittent network failure; if the error persists, consider contacting the network or server operator)
[255]
--- a/tests/test-https.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-https.t Thu Jun 16 15:28:54 2022 +0200
@@ -361,9 +361,9 @@
Clients talking same TLS versions work
- $ P="$CERTSDIR" hg --config hostsecurity.minimumprotocol=tls1.0 id https://localhost:$HGPORT/
+ $ P="$CERTSDIR" hg --config hostsecurity.minimumprotocol=tls1.0 --config hostsecurity.ciphers=DEFAULT id https://localhost:$HGPORT/
5fed3813f7f5
- $ P="$CERTSDIR" hg --config hostsecurity.minimumprotocol=tls1.1 id https://localhost:$HGPORT1/
+ $ P="$CERTSDIR" hg --config hostsecurity.minimumprotocol=tls1.1 --config hostsecurity.ciphers=DEFAULT id https://localhost:$HGPORT1/
5fed3813f7f5
$ P="$CERTSDIR" hg --config hostsecurity.minimumprotocol=tls1.2 id https://localhost:$HGPORT2/
5fed3813f7f5
@@ -374,26 +374,26 @@
(could not negotiate a common security protocol (tls1.1+) with localhost; the likely cause is Mercurial is configured to be more secure than the server can support)
(consider contacting the operator of this server and ask them to support modern TLS protocol versions; or, set hostsecurity.localhost:minimumprotocol=tls1.0 to allow use of legacy, less secure protocols when communicating with this server)
(see https://mercurial-scm.org/wiki/SecureConnections for more info)
- abort: error: .*(unsupported protocol|wrong ssl version).* (re)
+ abort: error: .*(unsupported protocol|wrong ssl version|alert protocol version).* (re)
[100]
$ P="$CERTSDIR" hg --config hostsecurity.minimumprotocol=tls1.1 id https://localhost:$HGPORT/
(could not negotiate a common security protocol (tls1.1+) with localhost; the likely cause is Mercurial is configured to be more secure than the server can support)
(consider contacting the operator of this server and ask them to support modern TLS protocol versions; or, set hostsecurity.localhost:minimumprotocol=tls1.0 to allow use of legacy, less secure protocols when communicating with this server)
(see https://mercurial-scm.org/wiki/SecureConnections for more info)
- abort: error: .*(unsupported protocol|wrong ssl version).* (re)
+ abort: error: .*(unsupported protocol|wrong ssl version|alert protocol version).* (re)
[100]
$ P="$CERTSDIR" hg --config hostsecurity.minimumprotocol=tls1.2 id https://localhost:$HGPORT/
(could not negotiate a common security protocol (tls1.2+) with localhost; the likely cause is Mercurial is configured to be more secure than the server can support)
(consider contacting the operator of this server and ask them to support modern TLS protocol versions; or, set hostsecurity.localhost:minimumprotocol=tls1.0 to allow use of legacy, less secure protocols when communicating with this server)
(see https://mercurial-scm.org/wiki/SecureConnections for more info)
- abort: error: .*(unsupported protocol|wrong ssl version).* (re)
+ abort: error: .*(unsupported protocol|wrong ssl version|alert protocol version).* (re)
[100]
$ P="$CERTSDIR" hg --config hostsecurity.minimumprotocol=tls1.2 id https://localhost:$HGPORT1/
(could not negotiate a common security protocol (tls1.2+) with localhost; the likely cause is Mercurial is configured to be more secure than the server can support)
(consider contacting the operator of this server and ask them to support modern TLS protocol versions; or, set hostsecurity.localhost:minimumprotocol=tls1.0 to allow use of legacy, less secure protocols when communicating with this server)
(see https://mercurial-scm.org/wiki/SecureConnections for more info)
- abort: error: .*(unsupported protocol|wrong ssl version).* (re)
+ abort: error: .*(unsupported protocol|wrong ssl version|alert protocol version).* (re)
[100]
--insecure will allow TLS 1.0 connections and override configs
@@ -405,6 +405,7 @@
The per-host config option overrides the default
$ P="$CERTSDIR" hg id https://localhost:$HGPORT/ \
+ > --config hostsecurity.ciphers=DEFAULT \
> --config hostsecurity.minimumprotocol=tls1.2 \
> --config hostsecurity.localhost:minimumprotocol=tls1.0
5fed3813f7f5
@@ -416,7 +417,7 @@
(could not negotiate a common security protocol (tls1.2+) with localhost; the likely cause is Mercurial is configured to be more secure than the server can support)
(consider contacting the operator of this server and ask them to support modern TLS protocol versions; or, set hostsecurity.localhost:minimumprotocol=tls1.0 to allow use of legacy, less secure protocols when communicating with this server)
(see https://mercurial-scm.org/wiki/SecureConnections for more info)
- abort: error: .*(unsupported protocol|wrong ssl version).* (re)
+ abort: error: .*(unsupported protocol|wrong ssl version|alert protocol version).* (re)
[100]
.hg/hgrc file [hostsecurity] settings are applied to remote ui instances (issue5305)
@@ -429,7 +430,7 @@
(could not negotiate a common security protocol (tls1.2+) with localhost; the likely cause is Mercurial is configured to be more secure than the server can support)
(consider contacting the operator of this server and ask them to support modern TLS protocol versions; or, set hostsecurity.localhost:minimumprotocol=tls1.0 to allow use of legacy, less secure protocols when communicating with this server)
(see https://mercurial-scm.org/wiki/SecureConnections for more info)
- abort: error: .*(unsupported protocol|wrong ssl version).* (re)
+ abort: error: .*(unsupported protocol|wrong ssl version|alert protocol version).* (re)
[100]
$ killdaemons.py hg0.pid
@@ -524,7 +525,7 @@
without client certificate:
$ P="$CERTSDIR" hg id https://localhost:$HGPORT/
- abort: error: .*(\$ECONNRESET\$|certificate required|handshake failure).* (re)
+ abort: error: .*(\$ECONNRESET\$|certificate required|handshake failure|EOF occurred).* (re)
[100]
with client certificate:
--- a/tests/test-hybridencode.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-hybridencode.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, print_function
-
import unittest
from mercurial import store
--- a/tests/test-impexp-branch.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-impexp-branch.t Thu Jun 16 15:28:54 2022 +0200
@@ -2,7 +2,6 @@
$ echo 'strip =' >> $HGRCPATH
$ cat >findbranch.py <<EOF
- > from __future__ import absolute_import
> import re
> import sys
>
--- a/tests/test-import.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-import.t Thu Jun 16 15:28:54 2022 +0200
@@ -71,7 +71,6 @@
regardless of the commit message in the patch)
$ cat > dummypatch.py <<EOF
- > from __future__ import print_function
> print('patching file a')
> open('a', 'wb').write(b'line2\n')
> EOF
--- a/tests/test-imports-checker.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-imports-checker.t Thu Jun 16 15:28:54 2022 +0200
@@ -16,74 +16,61 @@
$ touch testpackage/__init__.py
$ cat > testpackage/multiple.py << EOF
- > from __future__ import absolute_import
> import os, sys
> EOF
$ cat > testpackage/unsorted.py << EOF
- > from __future__ import absolute_import
> import sys
> import os
> EOF
$ cat > testpackage/stdafterlocal.py << EOF
- > from __future__ import absolute_import
> from . import unsorted
> import os
> EOF
$ cat > testpackage/requirerelative.py << EOF
- > from __future__ import absolute_import
> import testpackage.unsorted
> EOF
$ cat > testpackage/importalias.py << EOF
- > from __future__ import absolute_import
> import ui
> EOF
$ cat > testpackage/relativestdlib.py << EOF
- > from __future__ import absolute_import
> from .. import os
> EOF
$ cat > testpackage/stdlibfrom.py << EOF
- > from __future__ import absolute_import
> from collections import abc
> EOF
$ cat > testpackage/symbolimport.py << EOF
- > from __future__ import absolute_import
> from .unsorted import foo
> EOF
$ cat > testpackage/latesymbolimport.py << EOF
- > from __future__ import absolute_import
> from . import unsorted
> from mercurial.node import hex
> EOF
$ cat > testpackage/multiplegroups.py << EOF
- > from __future__ import absolute_import
> from . import unsorted
> from . import more
> EOF
$ mkdir testpackage/subpackage
$ cat > testpackage/subpackage/levelpriority.py << EOF
- > from __future__ import absolute_import
> from . import foo
> from .. import parent
> EOF
$ touch testpackage/subpackage/foo.py
$ cat > testpackage/subpackage/__init__.py << EOF
- > from __future__ import absolute_import
> from . import levelpriority # should not cause cycle
> EOF
$ cat > testpackage/subpackage/localimport.py << EOF
- > from __future__ import absolute_import
> from . import foo
> def bar():
> # should not cause "higher-level import should come first"
@@ -94,17 +81,14 @@
> EOF
$ cat > testpackage/importmodulefromsub.py << EOF
- > from __future__ import absolute_import
> from .subpackage import foo # not a "direct symbol import"
> EOF
$ cat > testpackage/importsymbolfromsub.py << EOF
- > from __future__ import absolute_import
> from .subpackage import foo, nonmodule
> EOF
$ cat > testpackage/sortedentries.py << EOF
- > from __future__ import absolute_import
> from . import (
> foo,
> bar,
@@ -112,12 +96,10 @@
> EOF
$ cat > testpackage/importfromalias.py << EOF
- > from __future__ import absolute_import
> from . import ui
> EOF
$ cat > testpackage/importfromrelative.py << EOF
- > from __future__ import absolute_import
> from testpackage.unsorted import foo
> EOF
@@ -125,7 +107,6 @@
$ touch testpackage2/__init__.py
$ cat > testpackage2/latesymbolimport.py << EOF
- > from __future__ import absolute_import
> from testpackage import unsorted
> from mercurial.node import hex
> EOF
@@ -137,29 +118,28 @@
$ touch email/__init__.py
$ touch email/errors.py
$ cat > email/utils.py << EOF
- > from __future__ import absolute_import
> from . import errors
> EOF
$ "$PYTHON" "$import_checker" testpackage*/*.py testpackage/subpackage/*.py \
> email/*.py
- testpackage/importalias.py:2: ui module must be "as" aliased to uimod
- testpackage/importfromalias.py:2: ui from testpackage must be "as" aliased to uimod
- testpackage/importfromrelative.py:2: import should be relative: testpackage.unsorted
- testpackage/importfromrelative.py:2: direct symbol import foo from testpackage.unsorted
- testpackage/importsymbolfromsub.py:2: direct symbol import nonmodule from testpackage.subpackage
- testpackage/latesymbolimport.py:3: symbol import follows non-symbol import: mercurial.node
- testpackage/multiple.py:2: multiple imported names: os, sys
- testpackage/multiplegroups.py:3: multiple "from . import" statements
- testpackage/relativestdlib.py:2: relative import of stdlib module
- testpackage/requirerelative.py:2: import should be relative: testpackage.unsorted
- testpackage/sortedentries.py:2: imports from testpackage not lexically sorted: bar < foo
- testpackage/stdafterlocal.py:3: stdlib import "os" follows local import: testpackage
- testpackage/stdlibfrom.py:2: direct symbol import abc from collections
- testpackage/subpackage/levelpriority.py:3: higher-level import should come first: testpackage
- testpackage/subpackage/localimport.py:7: multiple "from .. import" statements
- testpackage/subpackage/localimport.py:8: import should be relative: testpackage.subpackage.levelpriority
- testpackage/symbolimport.py:2: direct symbol import foo from testpackage.unsorted
- testpackage/unsorted.py:3: imports not lexically sorted: os < sys
- testpackage2/latesymbolimport.py:3: symbol import follows non-symbol import: mercurial.node
+ testpackage/importalias.py:1: ui module must be "as" aliased to uimod
+ testpackage/importfromalias.py:1: ui from testpackage must be "as" aliased to uimod
+ testpackage/importfromrelative.py:1: import should be relative: testpackage.unsorted
+ testpackage/importfromrelative.py:1: direct symbol import foo from testpackage.unsorted
+ testpackage/importsymbolfromsub.py:1: direct symbol import nonmodule from testpackage.subpackage
+ testpackage/latesymbolimport.py:2: symbol import follows non-symbol import: mercurial.node
+ testpackage/multiple.py:1: multiple imported names: os, sys
+ testpackage/multiplegroups.py:2: multiple "from . import" statements
+ testpackage/relativestdlib.py:1: relative import of stdlib module
+ testpackage/requirerelative.py:1: import should be relative: testpackage.unsorted
+ testpackage/sortedentries.py:1: imports from testpackage not lexically sorted: bar < foo
+ testpackage/stdafterlocal.py:2: stdlib import "os" follows local import: testpackage
+ testpackage/stdlibfrom.py:1: direct symbol import abc from collections
+ testpackage/subpackage/levelpriority.py:2: higher-level import should come first: testpackage
+ testpackage/subpackage/localimport.py:6: multiple "from .. import" statements
+ testpackage/subpackage/localimport.py:7: import should be relative: testpackage.subpackage.levelpriority
+ testpackage/symbolimport.py:1: direct symbol import foo from testpackage.unsorted
+ testpackage/unsorted.py:2: imports not lexically sorted: os < sys
+ testpackage2/latesymbolimport.py:2: symbol import follows non-symbol import: mercurial.node
[1]
--- a/tests/test-inherit-mode.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-inherit-mode.t Thu Jun 16 15:28:54 2022 +0200
@@ -10,7 +10,6 @@
$ cd dir
$ cat >printmodes.py <<EOF
- > from __future__ import absolute_import, print_function
> import os
> import sys
>
@@ -31,7 +30,6 @@
> EOF
$ cat >mode.py <<EOF
- > from __future__ import absolute_import, print_function
> import os
> import sys
> print('%05o' % os.lstat(sys.argv[1]).st_mode)
--- a/tests/test-install.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-install.t Thu Jun 16 15:28:54 2022 +0200
@@ -3,7 +3,6 @@
checking encoding (ascii)...
checking Python executable (*) (glob)
checking Python implementation (*) (glob)
- checking Python version (2.*) (glob) (no-py3 !)
checking Python version (3.*) (glob) (py3 !)
checking Python lib (.*[Ll]ib.*)... (re) (no-pyoxidizer !)
checking Python lib (.*pyoxidizer.*)... (re) (pyoxidizer !)
@@ -67,7 +66,6 @@
checking encoding (ascii)...
checking Python executable (*) (glob)
checking Python implementation (*) (glob)
- checking Python version (2.*) (glob) (no-py3 !)
checking Python version (3.*) (glob) (py3 !)
checking Python lib (.*[Ll]ib.*)... (re) (no-pyoxidizer !)
checking Python lib (.*pyoxidizer.*)... (re) (pyoxidizer !)
@@ -117,7 +115,6 @@
checking encoding (ascii)...
checking Python executable (*) (glob)
checking Python implementation (*) (glob)
- checking Python version (2.*) (glob) (no-py3 !)
checking Python version (3.*) (glob) (py3 !)
checking Python lib (.*[Ll]ib.*)... (re) (no-pyoxidizer !)
checking Python lib (.*pyoxidizer.*)... (re) (pyoxidizer !)
@@ -147,7 +144,6 @@
checking encoding (ascii)...
checking Python executable (*) (glob)
checking Python implementation (*) (glob)
- checking Python version (2.*) (glob) (no-py3 !)
checking Python version (3.*) (glob) (py3 !)
checking Python lib (.*[Ll]ib.*)... (re) (no-pyoxidizer !)
checking Python lib (.*pyoxidizer.*)... (re) (pyoxidizer !)
--- a/tests/test-issue522.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-issue522.t Thu Jun 16 15:28:54 2022 +0200
@@ -45,7 +45,7 @@
c6fc755d7e68f49f880599da29f15add41f42f5a 644 foo
$ hg debugindex foo
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 2ed2a3912a0b 000000000000 000000000000
1 1 6f4310b00b9a 2ed2a3912a0b 000000000000
2 2 c6fc755d7e68 6f4310b00b9a 000000000000
--- a/tests/test-issue660.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-issue660.t Thu Jun 16 15:28:54 2022 +0200
@@ -1,7 +1,19 @@
+#testcases dirstate-v1 dirstate-v2
+
+#if dirstate-v2
+ $ cat >> $HGRCPATH << EOF
+ > [format]
+ > use-dirstate-v2=1
+ > [storage]
+ > dirstate-v2.slow-path=allow
+ > EOF
+#endif
+
https://bz.mercurial-scm.org/660 and:
https://bz.mercurial-scm.org/322
- $ hg init
+ $ hg init repo
+ $ cd repo
$ echo a > a
$ mkdir b
$ echo b > b/b
--- a/tests/test-keyword.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-keyword.t Thu Jun 16 15:28:54 2022 +0200
@@ -1412,7 +1412,6 @@
$ grep -v '^promptecho ' < $HGRCPATH >> $HGRCPATH.new
$ mv $HGRCPATH.new $HGRCPATH
- >>> from __future__ import print_function
>>> from hgclient import check, readchannel, runcommand
>>> @check
... def check(server):
--- a/tests/test-largefiles-cache.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-largefiles-cache.t Thu Jun 16 15:28:54 2022 +0200
@@ -96,7 +96,6 @@
$ cat > ls-l.py <<EOF
> #!$PYTHON
- > from __future__ import absolute_import, print_function
> import os
> import sys
> path = sys.argv[1]
--- a/tests/test-largefiles-small-disk.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-largefiles-small-disk.t Thu Jun 16 15:28:54 2022 +0200
@@ -1,7 +1,6 @@
Test how largefiles abort in case the disk runs full
$ cat > criple.py <<EOF
- > from __future__ import absolute_import
> import errno
> import os
> import shutil
--- a/tests/test-lfs-pointer.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-lfs-pointer.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, print_function
-
# Import something from Mercurial, so the module loader gets initialized.
from mercurial import pycompat
--- a/tests/test-lfs-serve-access.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-lfs-serve-access.t Thu Jun 16 15:28:54 2022 +0200
@@ -355,7 +355,6 @@
$LOCALIP - - [$ERRDATE$] HG error: super(badstore, self).download(oid, src, contentlength)
$LOCALIP - - [$ERRDATE$] HG error: raise LfsCorruptionError( (glob) (py38 !)
$LOCALIP - - [$ERRDATE$] HG error: _(b'corrupt remote lfs object: %s') % oid (glob) (no-py38 !)
- $LOCALIP - - [$ERRDATE$] HG error: LfsCorruptionError: corrupt remote lfs object: b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c (no-py3 !)
$LOCALIP - - [$ERRDATE$] HG error: hgext.lfs.blobstore.LfsCorruptionError: corrupt remote lfs object: b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c (py3 !)
$LOCALIP - - [$ERRDATE$] HG error: (glob)
$LOCALIP - - [$ERRDATE$] Exception happened during processing request '/.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d': (glob)
@@ -367,7 +366,6 @@
handled = wireprotoserver.handlewsgirequest( (py38 !)
return _processbasictransfer( (py38 !)
rctx, req, res, self.check_perm (no-py38 !)
- return func(*(args + a), **kw) (no-py3 !)
rctx.repo, req, res, lambda perm: checkperm(rctx, req, perm) (no-py38 !)
res.setbodybytes(localstore.read(oid))
blob = self._read(self.vfs, oid, verify)
@@ -381,7 +379,6 @@
$LOCALIP - - [$ERRDATE$] HG error: blobstore._verify(oid, b'dummy content') (glob)
$LOCALIP - - [$ERRDATE$] HG error: raise LfsCorruptionError( (glob) (py38 !)
$LOCALIP - - [$ERRDATE$] HG error: hint=_(b'run hg verify'), (glob) (no-py38 !)
- $LOCALIP - - [$ERRDATE$] HG error: LfsCorruptionError: detected corrupt lfs object: 276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d (no-py3 !)
$LOCALIP - - [$ERRDATE$] HG error: hgext.lfs.blobstore.LfsCorruptionError: detected corrupt lfs object: 276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d (py3 !)
$LOCALIP - - [$ERRDATE$] HG error: (glob)
--- a/tests/test-lfs-serve.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-lfs-serve.t Thu Jun 16 15:28:54 2022 +0200
@@ -107,7 +107,6 @@
$ cd client
$ echo 'non-lfs' > nonlfs.txt
- >>> from __future__ import absolute_import
>>> from hgclient import check, readchannel, runcommand
>>> @check
... def diff(server):
@@ -240,7 +239,6 @@
$ cd ../cmdserve_client3
- >>> from __future__ import absolute_import
>>> from hgclient import check, readchannel, runcommand
>>> @check
... def addrequirement(server):
@@ -355,7 +353,6 @@
$ mv $HGRCPATH $HGRCPATH.tmp
$ cp $HGRCPATH.orig $HGRCPATH
- >>> from __future__ import absolute_import
>>> from hgclient import bprint, check, readchannel, runcommand, stdout
>>> @check
... def checkflags(server):
@@ -404,7 +401,6 @@
> lfs = !
> EOF
- >>> from __future__ import absolute_import, print_function
>>> from hgclient import bprint, check, readchannel, runcommand, stdout
>>> @check
... def checkflags2(server):
--- a/tests/test-linelog.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-linelog.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, print_function
-
import difflib
import random
import unittest
--- a/tests/test-linerange.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-linerange.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import unittest
from mercurial import error, mdiff
from mercurial.utils import stringutil
--- a/tests/test-lock.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-lock.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import copy
import errno
import tempfile
@@ -37,7 +35,7 @@
return super(lockwrapper, self)._getpid() + self._pidoffset
-class teststate(object):
+class teststate:
def __init__(self, testcase, dir, pidoffset=0):
self._testcase = testcase
self._acquirecalled = False
--- a/tests/test-log-exthook.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-log-exthook.t Thu Jun 16 15:28:54 2022 +0200
@@ -2,7 +2,6 @@
-------------------------------------------
$ cat > $TESTTMP/logexthook.py <<EOF
- > from __future__ import absolute_import
> import codecs
> from mercurial import (
> commands,
--- a/tests/test-log.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-log.t Thu Jun 16 15:28:54 2022 +0200
@@ -2451,7 +2451,6 @@
$ cat > ../names.py <<EOF
> """A small extension to test adding arbitrary names to a repo"""
- > from __future__ import absolute_import
> from mercurial import namespaces
>
> def reposetup(ui, repo):
--- a/tests/test-logtoprocess.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-logtoprocess.t Thu Jun 16 15:28:54 2022 +0200
@@ -8,7 +8,6 @@
$ hg init
$ cat > $TESTTMP/foocommand.py << EOF
- > from __future__ import absolute_import
> from mercurial import registrar
> cmdtable = {}
> command = registrar.command(cmdtable)
--- a/tests/test-lrucachedict.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-lrucachedict.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, print_function
-
import unittest
import silenttestrunner
--- a/tests/test-mac-packages.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-mac-packages.t Thu Jun 16 15:28:54 2022 +0200
@@ -39,8 +39,8 @@
./Library/Python/2.7/site-packages/mercurial/pure/bdiff.pyo 100644 0/0
$ grep zsh/site-functions/_hg boms.txt | cut -d ' ' -f 1,2,3
./usr/local/share/zsh/site-functions/_hg 100644 0/0
- $ grep hg-completion.bash boms.txt | cut -d ' ' -f 1,2,3
- ./usr/local/hg/contrib/hg-completion.bash 100644 0/0
+ $ grep bash-completion/completions/hg boms.txt | cut -d ' ' -f 1,2,3
+ ./usr/local/share/bash-completion-completions/hg 100644 0/0
$ egrep 'man[15]' boms.txt | cut -d ' ' -f 1,2,3
./usr/local/share/man/man1 40755 0/0
./usr/local/share/man/man1/chg.1 100644 0/0
--- a/tests/test-manifest.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-manifest.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import binascii
import itertools
import silenttestrunner
@@ -60,15 +58,11 @@
HUGE_MANIFEST_ENTRIES = 200001
-izip = getattr(itertools, 'izip', zip)
-if 'xrange' not in globals():
- xrange = range
-
A_HUGE_MANIFEST = b''.join(
sorted(
b'file%d\0%s%s\n' % (i, h, f)
- for i, h, f in izip(
- xrange(200001),
+ for i, h, f in zip(
+ range(200001),
itertools.cycle((HASH_1, HASH_2)),
itertools.cycle((b'', b'x', b'l')),
)
@@ -76,7 +70,7 @@
)
-class basemanifesttests(object):
+class basemanifesttests:
def parsemanifest(self, text):
raise NotImplementedError('parsemanifest not implemented by test case')
--- a/tests/test-match.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-match.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import unittest
import silenttestrunner
--- a/tests/test-mdiff.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-mdiff.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,6 +1,3 @@
-from __future__ import absolute_import
-from __future__ import print_function
-
import unittest
from mercurial import mdiff
--- a/tests/test-merge-commit.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-merge-commit.t Thu Jun 16 15:28:54 2022 +0200
@@ -35,7 +35,7 @@
$ hg ci -m '3: merge with local rename'
$ hg debugindex bar
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 2 d35118874825 000000000000 000000000000
1 3 5345f5ab8abd 000000000000 d35118874825
@@ -43,7 +43,7 @@
bar renamed from foo:9e25c27b87571a1edee5ae4dddee5687746cc8e2
$ hg debugindex foo
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 690b295714ae 000000000000 000000000000
1 1 9e25c27b8757 690b295714ae 000000000000
@@ -87,7 +87,7 @@
$ hg ci -m '5: merge'
$ hg debugindex bar
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 2 d35118874825 000000000000 000000000000
1 3 5345f5ab8abd 000000000000 d35118874825
2 4 ff4b45017382 d35118874825 000000000000
@@ -122,7 +122,7 @@
$ hg ci -m '3: merge with remote rename'
$ hg debugindex bar
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 2 d35118874825 000000000000 000000000000
1 3 5345f5ab8abd 000000000000 d35118874825
@@ -130,7 +130,7 @@
bar renamed from foo:9e25c27b87571a1edee5ae4dddee5687746cc8e2
$ hg debugindex foo
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 690b295714ae 000000000000 000000000000
1 1 9e25c27b8757 690b295714ae 000000000000
@@ -174,7 +174,7 @@
$ hg ci -m '5: merge'
$ hg debugindex bar
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 2 d35118874825 000000000000 000000000000
1 3 5345f5ab8abd 000000000000 d35118874825
2 4 ff4b45017382 d35118874825 000000000000
--- a/tests/test-merge-halt.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-merge-halt.t Thu Jun 16 15:28:54 2022 +0200
@@ -210,6 +210,6 @@
merge halted after failed merge (see hg resolve)
[240]
$ hg shelve --list
- default (* ago) changes to: foo (glob)
+ default (*s ago) * changes to: foo (glob)
$ hg unshelve --abort
unshelve of 'default' aborted
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-merge-partial-tool.t Thu Jun 16 15:28:54 2022 +0200
@@ -0,0 +1,292 @@
+Test support for partial-resolution tools
+
+Create a tool that resolves conflicts after line 5 by simply dropping those
+lines (even if there are no conflicts there)
+ $ cat >> "$TESTTMP/head.sh" <<'EOF'
+ > #!/bin/sh
+ > for f in "$@"; do
+ > head -5 $f > tmp
+ > mv -f tmp $f
+ > done
+ > EOF
+ $ chmod +x "$TESTTMP/head.sh"
+...and another tool that keeps only the last 5 lines instead of the first 5.
+ $ cat >> "$TESTTMP/tail.sh" <<'EOF'
+ > #!/bin/sh
+ > for f in "$@"; do
+ > tail -5 $f > tmp
+ > mv -f tmp $f
+ > done
+ > EOF
+ $ chmod +x "$TESTTMP/tail.sh"
+
+Set up both tools to run on all patterns (the default), and let the `tail` tool
+run after the `head` tool, which means it will have no effect (we'll override it
+to test order later)
+ $ cat >> "$HGRCPATH" <<EOF
+ > [partial-merge-tools]
+ > head.executable=$TESTTMP/head.sh
+ > tail.executable=$TESTTMP/tail.sh
+ > tail.order=1
+ > EOF
+
+ $ make_commit() {
+ > echo "$@" | xargs -n1 > file
+ > hg add file 2> /dev/null
+ > hg ci -m "$*"
+ > }
+
+
+Let a partial-resolution tool resolve some conflicts and leave other conflicts
+for the regular merge tool (:merge3 here)
+
+ $ hg init repo
+ $ cd repo
+ $ make_commit a b c d e f
+ $ make_commit a b2 c d e f2
+ $ hg up 0
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ make_commit a b3 c d e f3
+ created new head
+ $ hg merge 1 -t :merge3
+ merging file
+ warning: conflicts while merging file! (edit, then use 'hg resolve --mark')
+ 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
+ [1]
+ $ cat file
+ a
+ <<<<<<< working copy: e11a49d4b620 - test: a b3 c d e f3
+ b3
+ ||||||| common ancestor: 8ae8bb9cc43a - test: a b c d e f
+ b
+ =======
+ b2
+ >>>>>>> merge rev: fbc096a40cc5 - test: a b2 c d e f2
+ c
+ d
+ e
+
+
+With premerge=keep, the partial-resolution tools runs before and doesn't see
+the conflict markers
+
+ $ hg up -C 2
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ cat >> .hg/hgrc <<EOF
+ > [merge-tools]
+ > my-local.executable = cat
+ > my-local.args = $local
+ > my-local.premerge = keep-merge3
+ > EOF
+ $ hg merge 1 -t my-local
+ merging file
+ 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ cat file
+ a
+ <<<<<<< working copy: e11a49d4b620 - test: a b3 c d e f3
+ b3
+ ||||||| common ancestor: 8ae8bb9cc43a - test: a b c d e f
+ b
+ =======
+ b2
+ >>>>>>> merge rev: fbc096a40cc5 - test: a b2 c d e f2
+ c
+ d
+ e
+
+
+When a partial-resolution tool resolves all conflicts, the resolution should
+be recorded and the regular merge tool should not be invoked for the file.
+
+ $ hg up -C 0
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ make_commit a b c d e f2
+ created new head
+ $ hg up 0
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ make_commit a b c d e f3
+ created new head
+ $ hg merge 3 -t false
+ merging file
+ 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ cat file
+ a
+ b
+ c
+ d
+ e
+
+
+Can disable all partial merge tools (the `head` tool would have resolved this
+conflict it had been enabled)
+
+ $ hg up -C 4
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg merge 3 -t :merge3 --config merge.disable-partial-tools=yes
+ merging file
+ warning: conflicts while merging file! (edit, then use 'hg resolve --mark')
+ 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
+ [1]
+ $ cat file
+ a
+ b
+ c
+ d
+ e
+ <<<<<<< working copy: d57edaa6e21a - test: a b c d e f3
+ f3
+ ||||||| common ancestor: 8ae8bb9cc43a - test: a b c d e f
+ f
+ =======
+ f2
+ >>>>>>> merge rev: 8c217da987be - test: a b c d e f2
+
+
+Can disable one partial merge tool (the `head` tool would have resolved this
+conflict it had been enabled)
+
+ $ hg up -C 4
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg merge 3 -t :merge3 --config partial-merge-tools.head.disable=yes
+ merging file
+ warning: conflicts while merging file! (edit, then use 'hg resolve --mark')
+ 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
+ [1]
+ $ cat file
+ b
+ c
+ d
+ e
+ <<<<<<< working copy: d57edaa6e21a - test: a b c d e f3
+ f3
+ ||||||| common ancestor: 8ae8bb9cc43a - test: a b c d e f
+ f
+ =======
+ f2
+ >>>>>>> merge rev: 8c217da987be - test: a b c d e f2
+
+
+Only tools whose patterns match are run. We make `head` not match here, so
+only `tail` should run
+
+ $ hg up -C 4
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg merge 3 -t :merge3 --config partial-merge-tools.head.patterns=other
+ merging file
+ warning: conflicts while merging file! (edit, then use 'hg resolve --mark')
+ 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
+ [1]
+ $ cat file
+ b
+ c
+ d
+ e
+ <<<<<<< working copy: d57edaa6e21a - test: a b c d e f3
+ f3
+ ||||||| common ancestor: 8ae8bb9cc43a - test: a b c d e f
+ f
+ =======
+ f2
+ >>>>>>> merge rev: 8c217da987be - test: a b c d e f2
+
+
+If there are several matching tools, they are run in requested order. We move
+`head` after `tail` in order here so it has no effect (the conflict in "f" thus
+remains).
+
+ $ hg up -C 4
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg merge 3 -t :merge3 --config partial-merge-tools.head.order=2
+ merging file
+ warning: conflicts while merging file! (edit, then use 'hg resolve --mark')
+ 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
+ [1]
+ $ cat file
+ b
+ c
+ d
+ e
+ <<<<<<< working copy: d57edaa6e21a - test: a b c d e f3
+ f3
+ ||||||| common ancestor: 8ae8bb9cc43a - test: a b c d e f
+ f
+ =======
+ f2
+ >>>>>>> merge rev: 8c217da987be - test: a b c d e f2
+
+
+When using "nomerge" tools (e.g. `:other`), the partial-resolution tools
+should not be run.
+
+ $ hg up -C 4
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg merge 3 -t :other
+ 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ cat file
+ a
+ b
+ c
+ d
+ e
+ f2
+
+
+If a partial-resolution tool resolved some conflict and simplemerge can
+merge the rest, then the regular merge tool should not be used. Here we merge
+"a b c d e3 f3" with "a b2 c d e f2". The `head` tool resolves the conflict in
+"f" and the internal simplemerge merges the remaining changes in "b" and "e".
+
+ $ hg up -C 0
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ make_commit a b c d e3 f3
+ created new head
+ $ hg merge 1 -t false
+ merging file
+ 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ cat file
+ a
+ b2
+ c
+ d
+ e3
+
+Test that arguments get passed as expected.
+
+ $ cat >> "$TESTTMP/log-args.sh" <<'EOF'
+ > #!/bin/sh
+ > echo "$@" > args.log
+ > EOF
+ $ chmod +x "$TESTTMP/log-args.sh"
+ $ cat >> "$HGRCPATH" <<EOF
+ > [partial-merge-tools]
+ > log-args.executable=$TESTTMP/log-args.sh
+ > EOF
+ $ hg up -C 2
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg merge 1
+ merging file
+ warning: conflicts while merging file! (edit, then use 'hg resolve --mark')
+ 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
+ [1]
+ $ cat args.log
+ */hgmerge-*/file~local */hgmerge-*/file~base */hgmerge-*/file~other (glob)
+ $ hg up -C 2
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg merge 1 --config partial-merge-tools.log-args.args='--other $other $base --foo --local $local --also-other $other'
+ merging file
+ warning: conflicts while merging file! (edit, then use 'hg resolve --mark')
+ 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
+ [1]
+ $ cat args.log
+ --other */hgmerge-*/file~other */hgmerge-*/file~base --foo --local */hgmerge-*/file~local --also-other */hgmerge-*/file~other (glob)
--- a/tests/test-merge-symlinks.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-merge-symlinks.t Thu Jun 16 15:28:54 2022 +0200
@@ -1,6 +1,5 @@
$ cat > echo.py <<EOF
> #!$PYTHON
- > from __future__ import absolute_import, print_function
> import os
> import sys
> try:
--- a/tests/test-merge1.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-merge1.t Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,4 @@
$ cat <<EOF > merge
- > from __future__ import print_function
> import sys, os
>
> try:
@@ -354,7 +353,6 @@
trigger it. If you see flakyness here, there is a race.
$ cat > $TESTTMP/abort.py <<EOF
- > from __future__ import absolute_import
> # emulate aborting before "recordupdates()". in this case, files
> # are changed without updating dirstate
> from mercurial import (
--- a/tests/test-merge7.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-merge7.t Thu Jun 16 15:28:54 2022 +0200
@@ -105,7 +105,7 @@
three
$ hg debugindex test.txt
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 01365c4cca56 000000000000 000000000000
1 1 7b013192566a 01365c4cca56 000000000000
2 2 8fe46a3eb557 01365c4cca56 000000000000
--- a/tests/test-minifileset.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-minifileset.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,6 +1,3 @@
-from __future__ import absolute_import
-from __future__ import print_function
-
from mercurial import minifileset
--- a/tests/test-minirst.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-minirst.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,4 +1,3 @@
-from __future__ import absolute_import, print_function
from mercurial import minirst
from mercurial.utils import stringutil
--- a/tests/test-mq-missingfiles.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-mq-missingfiles.t Thu Jun 16 15:28:54 2022 +0200
@@ -5,10 +5,7 @@
$ cat > writelines.py <<EOF
> import sys
- > if sys.version_info[0] >= 3:
- > encode = lambda x: x.encode('utf-8').decode('unicode_escape').encode('utf-8')
- > else:
- > encode = lambda x: x.decode('string_escape')
+ > encode = lambda x: x.encode('utf-8').decode('unicode_escape').encode('utf-8')
> path = sys.argv[1]
> args = sys.argv[2:]
> assert (len(args) % 2) == 0
--- a/tests/test-mq-qimport.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-mq-qimport.t Thu Jun 16 15:28:54 2022 +0200
@@ -1,9 +1,6 @@
$ cat > writelines.py <<EOF
> import sys
- > if sys.version_info[0] >= 3:
- > encode = lambda x: x.encode('utf-8').decode('unicode_escape').encode('utf-8')
- > else:
- > encode = lambda x: x.decode('string_escape')
+ > encode = lambda x: x.encode('utf-8').decode('unicode_escape').encode('utf-8')
> path = sys.argv[1]
> args = sys.argv[2:]
> assert (len(args) % 2) == 0
--- a/tests/test-narrow-clone-non-narrow-server.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-narrow-clone-non-narrow-server.t Thu Jun 16 15:28:54 2022 +0200
@@ -20,7 +20,6 @@
Verify that narrow is advertised in the bundle2 capabilities:
$ cat >> unquote.py <<EOF
- > from __future__ import print_function
> import sys
> if sys.version[0] == '3':
> import urllib.parse as up
--- a/tests/test-narrow-debugcommands.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-narrow-debugcommands.t Thu Jun 16 15:28:54 2022 +0200
@@ -16,19 +16,19 @@
adding foo/bar/f
adding foo/f
$ hg debugindex -m
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 14a5d056d75a 000000000000 000000000000
$ hg debugindex --dir foo
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 e635c7857aef 000000000000 000000000000
$ hg debugindex --dir foo/
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 e635c7857aef 000000000000 000000000000
$ hg debugindex --dir foo/bar
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 e091d4224761 000000000000 000000000000
$ hg debugindex --dir foo/bar/
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 e091d4224761 000000000000 000000000000
$ hg debugdata -m 0
foo\x00e635c7857aef92ac761ce5741a99da159abbbb24t (esc)
--- a/tests/test-narrow-shallow-merges.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-narrow-shallow-merges.t Thu Jun 16 15:28:54 2022 +0200
@@ -179,7 +179,7 @@
$ hg log -T '{if(ellipsis,"...")}{node|short} {p1node|short} {p2node|short} {desc}\n' | sort
- ...2a20009de83e 000000000000 3ac1f5779de3 outside 10
+ ...2a20009de83e 3ac1f5779de3 000000000000 outside 10
...3ac1f5779de3 bb96a08b062a 465567bdfb2d merge a/b/c/d 9
...8d874d57adea 7ef88b4dd4fa 000000000000 outside 12
...b844052e7b3b 000000000000 000000000000 outside 2c
--- a/tests/test-narrow-update.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-narrow-update.t Thu Jun 16 15:28:54 2022 +0200
@@ -33,7 +33,7 @@
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cd narrow
$ hg debugindex -c
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 9958b1af2add 000000000000 000000000000
1 1 2db4ce2a3bfe 9958b1af2add 000000000000
2 2 0980ee31a742 2db4ce2a3bfe 000000000000
--- a/tests/test-narrow.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-narrow.t Thu Jun 16 15:28:54 2022 +0200
@@ -71,6 +71,17 @@
updating to branch default
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+The "narrow" repo requirement is ignored by [debugupgraderepo]
+
+#if tree
+ $ (cd should-work; hg debugupgraderepo)
+ abort: cannot upgrade repository; unsupported source requirement: treemanifest
+ [255]
+#else
+ $ (cd should-work; hg debugupgraderepo | grep 'no format upgrades found in existing repository')
+ (no format upgrades found in existing repository)
+#endif
+
Test repo with local changes
$ hg clone --narrow ssh://user@dummy/master narrow-local-changes --include d0 --include d3 --include d6
requesting all changes
--- a/tests/test-notify-changegroup.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-notify-changegroup.t Thu Jun 16 15:28:54 2022 +0200
@@ -40,7 +40,7 @@
$ hg --traceback --cwd b push ../a 2>&1 |
> "$PYTHON" $TESTDIR/unwrap-message-id.py | \
- > "$PYTHON" -c 'from __future__ import print_function ; import sys,re; print(re.sub("\n\t", " ", sys.stdin.read()), end="")'
+ > "$PYTHON" -c 'import sys,re; print(re.sub("\n\t", " ", sys.stdin.read()), end="")'
pushing to ../a
searching for changes
adding changesets
@@ -95,7 +95,7 @@
$ hg --config notify.sources=unbundle --cwd a unbundle ../test.hg 2>&1 |
> "$PYTHON" $TESTDIR/unwrap-message-id.py | \
- > "$PYTHON" -c 'from __future__ import print_function ; import sys,re; print(re.sub("\n\t", " ", sys.stdin.read()), end="")'
+ > "$PYTHON" -c 'import sys,re; print(re.sub("\n\t", " ", sys.stdin.read()), end="")'
adding changesets
adding manifests
adding file changes
@@ -172,7 +172,7 @@
$ hg --traceback --cwd b --config notify.fromauthor=True push ../a 2>&1 |
> "$PYTHON" $TESTDIR/unwrap-message-id.py | \
- > "$PYTHON" -c 'from __future__ import print_function ; import sys,re; print(re.sub("\n\t", " ", sys.stdin.read()), end="")'
+ > "$PYTHON" -c 'import sys,re; print(re.sub("\n\t", " ", sys.stdin.read()), end="")'
pushing to ../a
searching for changes
adding changesets
--- a/tests/test-notify.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-notify.t Thu Jun 16 15:28:54 2022 +0200
@@ -1,16 +1,14 @@
$ cat > $TESTTMP/filter.py <<EOF
- > from __future__ import absolute_import, print_function
> import io
> import re
> import sys
- > if sys.version_info[0] >= 3:
- > sys.stdout = io.TextIOWrapper(
- > sys.stdout.buffer,
- > sys.stdout.encoding,
- > sys.stdout.errors,
- > newline="\n",
- > line_buffering=sys.stdout.line_buffering,
- > )
+ > sys.stdout = io.TextIOWrapper(
+ > sys.stdout.buffer,
+ > sys.stdout.encoding,
+ > sys.stdout.errors,
+ > newline="\n",
+ > line_buffering=sys.stdout.line_buffering,
+ > )
> print(re.sub("\n[ \t]", " ", sys.stdin.read()), end="")
> EOF
@@ -469,7 +467,6 @@
Content-Transfer-Encoding: 8bit
X-Test: foo
Date: * (glob)
- Subject: \xc3\xa0... (esc) (no-py3 !)
Subject: =?utf-8?b?w6AuLi4=?= (py3 !)
From: test@test.com
X-Hg-Notification: changeset 0f25f9c22b4c
--- a/tests/test-obsolete-bundle-strip.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-obsolete-bundle-strip.t Thu Jun 16 15:28:54 2022 +0200
@@ -1446,6 +1446,7 @@
# unbundling: (run 'hg update' to get a working copy)
Test that advisory obsolescence markers in bundles are ignored if unsupported
+-----------------------------------------------------------------------------
$ hg init repo-with-obs
$ cd repo-with-obs
@@ -1476,3 +1477,57 @@
added 1 changesets with 0 changes to 0 files
new changesets 1ea73414a91b (1 drafts)
(run 'hg update' to get a working copy)
+ $ cd ..
+
+Test bundlespec overwrite default
+---------------------------------
+
+# move back to the default
+
+ $ grep -v evolution.bundle-obsmarker $HGRCPATH > a
+ $ mv a $HGRCPATH
+
+ $ hg bundle -R repo-with-obs --type 'v2;obsolescence=yes' --all --hidden bundle-type-with-obs
+ 1 changesets found
+ $ hg debugbundle --spec bundle-type-with-obs
+ bzip2-v2;obsolescence=yes
+ $ hg debugbundle bundle-type-with-obs --part-type obsmarkers
+ Stream params: {Compression: BZ}
+ obsmarkers -- {} (mandatory: True)
+ version: 1 (50 bytes)
+ 1ea73414a91b0920940797d8fc6a11e447f8ea1e 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+
+ $ hg bundle -R repo-with-obs --type 'v2;obsolescence=yes;obsolescence-mandatory=no' --all --hidden bundle-type-with-obs-adv
+ 1 changesets found
+ $ hg debugbundle --spec bundle-type-with-obs-adv
+ bzip2-v2;obsolescence=yes;obsolescence-mandatory=no
+ $ hg debugbundle bundle-type-with-obs-adv --part-type obsmarkers
+ Stream params: {Compression: BZ}
+ obsmarkers -- {} (mandatory: False)
+ version: 1 (50 bytes)
+ 1ea73414a91b0920940797d8fc6a11e447f8ea1e 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ $ hg bundle -R repo-with-obs --type 'v2;obsolescence=no' --all --hidden bundle-type-without-obs
+ 1 changesets found
+ $ hg debugbundle --spec bundle-type-without-obs
+ bzip2-v2
+ $ hg debugbundle bundle-type-without-obs --part-type obsmarkers
+ Stream params: {Compression: BZ}
+
+Test bundlespec overwrite local config
+--------------------------------------
+
+ $ hg bundle -R repo-with-obs --config experimental.evolution.bundle-obsmarker=false --type 'v2;obsolescence=yes' --all --hidden bundle-type-with-obs2
+ 1 changesets found
+ $ hg debugbundle --spec bundle-type-with-obs2
+ bzip2-v2;obsolescence=yes
+ $ hg debugbundle bundle-type-with-obs2 --part-type obsmarkers
+ Stream params: {Compression: BZ}
+ obsmarkers -- {} (mandatory: True)
+ version: 1 (50 bytes)
+ 1ea73414a91b0920940797d8fc6a11e447f8ea1e 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ $ hg bundle -R repo-with-obs --config experimental.evolution.bundle-obsmarker=true --type 'v2;obsolescence=no' --all --hidden bundle-type-without-obs2
+ 1 changesets found
+ $ hg debugbundle --spec bundle-type-without-obs2
+ bzip2-v2
+ $ hg debugbundle bundle-type-without-obs2 --part-type obsmarkers
+ Stream params: {Compression: BZ}
--- a/tests/test-obsolete.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-obsolete.t Thu Jun 16 15:28:54 2022 +0200
@@ -1465,7 +1465,6 @@
Test heads computation on pending index changes with obsolescence markers
$ cd ..
$ cat >$TESTTMP/test_extension.py << EOF
- > from __future__ import absolute_import
> from mercurial.i18n import _
> from mercurial import cmdutil, pycompat, registrar
> from mercurial.utils import stringutil
@@ -1499,7 +1498,6 @@
bookmarks change
$ cd ..
$ cat >$TESTTMP/test_extension.py << EOF
- > from __future__ import absolute_import, print_function
> import weakref
> from mercurial import (
> bookmarks,
--- a/tests/test-pager.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-pager.t Thu Jun 16 15:28:54 2022 +0200
@@ -411,7 +411,6 @@
Environment variables like LESS and LV are set automatically:
$ cat > $TESTTMP/printlesslv.py <<EOF
- > from __future__ import absolute_import
> import os
> import sys
> sys.stdin.read()
--- a/tests/test-parseindex.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-parseindex.t Thu Jun 16 15:28:54 2022 +0200
@@ -26,7 +26,6 @@
summary: change foo
$ cat >> test.py << EOF
- > from __future__ import print_function
> from mercurial import changelog, node, pycompat, vfs
>
> class singlebyteread(object):
@@ -75,7 +74,6 @@
$ cd a
$ "$PYTHON" <<EOF
- > from __future__ import print_function
> from mercurial import changelog, vfs
> cl = changelog.changelog(vfs.vfs(b'.hg/store'))
> print('good heads:')
@@ -113,7 +111,7 @@
10000: head out of range
-2: head out of range
-10000: head out of range
- None: an integer is required( .got type NoneType.)? (re)
+ None: (an integer is required( .got type NoneType.)?|'NoneType' object cannot be interpreted as an integer) (re)
good roots:
0: [0]
1: [1]
@@ -124,7 +122,7 @@
-2: []
-10000: []
bad roots:
- None: an integer is required( .got type NoneType.)? (re)
+ None: (an integer is required( .got type NoneType.)?|'NoneType' object cannot be interpreted as an integer) (re)
$ cd ..
@@ -157,9 +155,9 @@
1 0000 65 1 0 2 26333235a41c
$ hg -R limit debugdeltachain -c
- rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
- 0 1 1 -1 base 63 62 63 1.01613 63 0 0.00000
- 1 2 1 -1 base 66 65 66 1.01538 66 0 0.00000
+ rev p1 p2 chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
+ 0 2 -1 1 1 -1 base 63 62 63 1.01613 63 0 0.00000
+ 1 0 2 2 1 -1 base 66 65 66 1.01538 66 0 0.00000
$ hg -R neglimit debugrevlogindex -f1 -c
rev flag size link p1 p2 nodeid
@@ -172,12 +170,11 @@
1 0000 65 1 0 65536 26333235a41c
$ hg -R segv debugdeltachain -c
- rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
- 0 1 1 -1 base 63 62 63 1.01613 63 0 0.00000
- 1 2 1 -1 base 66 65 66 1.01538 66 0 0.00000
+ rev p1 p2 chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
+ 0 65536 -1 1 1 -1 base 63 62 63 1.01613 63 0 0.00000
+ 1 0 65536 2 1 -1 base 66 65 66 1.01538 66 0 0.00000
$ cat <<EOF > test.py
- > from __future__ import print_function
> import sys
> from mercurial import changelog, pycompat, vfs
> cl = changelog.changelog(vfs.vfs(pycompat.fsencode(sys.argv[1])))
--- a/tests/test-parseindex2.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-parseindex2.py Thu Jun 16 15:28:54 2022 +0200
@@ -3,7 +3,6 @@
It also checks certain aspects of the parsers module as a whole.
"""
-from __future__ import absolute_import, print_function
import os
import struct
--- a/tests/test-patch.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-patch.t Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,4 @@
$ cat > patchtool.py <<EOF
- > from __future__ import absolute_import, print_function
> import sys
> print('Using custom patch')
> if '--binary' in sys.argv:
--- a/tests/test-patchbomb.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-patchbomb.t Thu Jun 16 15:28:54 2022 +0200
@@ -8,7 +8,6 @@
--===+[0-9]+=+$ -> --===*= (glob)
$ cat > prune-blank-after-boundary.py <<EOF
- > from __future__ import absolute_import, print_function
> import sys
> skipblank = False
> trim = lambda x: x.strip(' \r\n')
@@ -514,7 +513,6 @@
X-Mercurial-Series-Id: <f81ef97829467e868fc4.240@test-hostname>
User-Agent: Mercurial-patchbomb/* (glob)
Date: Thu, 01 Jan 1970 00:04:00 +0000
- From: Q <quux> (no-py3 !)
From: =?iso-8859-1?q?Q?= <quux> (py3 !)
To: foo
Cc: bar
@@ -2400,9 +2398,6 @@
User-Agent: Mercurial-patchbomb/* (glob)
Date: Tue, 01 Jan 1980 00:01:00 +0000
From: quux
- To: spam <spam>, eggs, toast (no-py3 !)
- Cc: foo, bar@example.com, "A, B <>" <a@example.com> (no-py3 !)
- Bcc: "Quux, A." <quux> (no-py3 !)
To: =?iso-8859-1?q?spam?= <spam>, eggs, toast (py3 !)
Cc: foo, bar@example.com, =?iso-8859-1?q?A=2C_B_=3C=3E?= <a@example.com> (py3 !)
Bcc: =?iso-8859-1?q?Quux=2C_A=2E?= <quux> (py3 !)
@@ -2722,7 +2717,6 @@
MIME-Version: 1.0
Content-Type: text/plain; charset="iso-8859-1"
Content-Transfer-Encoding: quoted-printable
- Subject: [PATCH 2 of 6] \xe7a (esc) (no-py3 !)
Subject: =?utf-8?b?W1BBVENIIDIgb2YgNl0gw6dh?= (py3 !)
X-Mercurial-Node: f81ef97829467e868fc405fccbcfa66217e4d3e6
X-Mercurial-Series-Index: 2
--- a/tests/test-pathencode.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-pathencode.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# that have proven likely to expose bugs and divergent behavior in
# different encoding implementations.
-from __future__ import absolute_import, print_function
import binascii
import collections
@@ -20,11 +19,6 @@
store,
)
-try:
- xrange
-except NameError:
- xrange = range
-
validchars = set(map(pycompat.bytechr, range(0, 256)))
alphanum = range(ord('A'), ord('Z'))
@@ -33,8 +27,8 @@
winreserved = (
b'aux con prn nul'.split()
- + [b'com%d' % i for i in xrange(1, 10)]
- + [b'lpt%d' % i for i in xrange(1, 10)]
+ + [b'com%d' % i for i in range(1, 10)]
+ + [b'lpt%d' % i for i in range(1, 10)]
)
@@ -44,8 +38,8 @@
combos = set()
for r in names:
- for i in xrange(len(r) + 1):
- for c in itertools.combinations(xrange(len(r)), i):
+ for i in range(len(r) + 1):
+ for c in itertools.combinations(range(len(r)), i):
d = r
for j in c:
d = b''.join((d[:j], d[j : j + 1].upper(), d[j + 1 :]))
@@ -67,7 +61,7 @@
counts[c] += 1
for c in '\r/\n':
counts.pop(c, None)
- t = sum(pycompat.itervalues(counts)) / 100.0
+ t = sum(counts.values()) / 100.0
fp.write('probtable = (')
for i, (k, v) in enumerate(
sorted(counts.items(), key=lambda x: x[1], reverse=True)
@@ -212,7 +206,7 @@
return (
b'data/'
- + b'/'.join(makepart(rng, k) for _ in xrange(j))
+ + b'/'.join(makepart(rng, k) for _ in range(j))
+ rng.choice([b'.d', b'.i'])
)
@@ -223,7 +217,7 @@
mink, maxk = 1, 4096
def steps():
- for i in xrange(count):
+ for i in range(count):
yield mink + int(round(math.sqrt((maxk - mink) * float(i) / count)))
for k in steps():
--- a/tests/test-profile.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-profile.t Thu Jun 16 15:28:54 2022 +0200
@@ -132,7 +132,6 @@
profiler extension could be loaded before other extensions
$ cat > fooprof.py <<EOF
- > from __future__ import absolute_import
> import contextlib
> import sys
> @contextlib.contextmanager
@@ -147,7 +146,6 @@
> EOF
$ cat > otherextension.py <<EOF
- > from __future__ import absolute_import
> def extsetup(ui):
> ui.write(b'otherextension: loaded\n')
> EOF
--- a/tests/test-progress.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-progress.t Thu Jun 16 15:28:54 2022 +0200
@@ -1,6 +1,5 @@
$ cat > loop.py <<EOF
- > from __future__ import absolute_import
> import time
> from mercurial import commands, registrar
>
--- a/tests/test-propertycache.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-propertycache.py Thu Jun 16 15:28:54 2022 +0200
@@ -4,7 +4,6 @@
property cache of both localrepo and repoview to prevent
regression."""
-from __future__ import absolute_import, print_function
import os
import subprocess
--- a/tests/test-pull-network.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-pull-network.t Thu Jun 16 15:28:54 2022 +0200
@@ -90,12 +90,12 @@
It's tricky to make file:// URLs working on every platform with
regular shell commands.
- $ URL=`"$PYTHON" -c "from __future__ import print_function; import os; print('file://foobar' + ('/' + os.getcwd().replace(os.sep, '/')).replace('//', '/') + '/../test')"`
+ $ URL=`"$PYTHON" -c "import os; print('file://foobar' + ('/' + os.getcwd().replace(os.sep, '/')).replace('//', '/') + '/../test')"`
$ hg pull -q "$URL"
abort: file:// URLs can only refer to localhost
[255]
- $ URL=`"$PYTHON" -c "from __future__ import print_function; import os; print('file://localhost' + ('/' + os.getcwd().replace(os.sep, '/')).replace('//', '/') + '/../test')"`
+ $ URL=`"$PYTHON" -c "import os; print('file://localhost' + ('/' + os.getcwd().replace(os.sep, '/')).replace('//', '/') + '/../test')"`
$ hg pull -q "$URL"
SEC: check for unsafe ssh url
--- a/tests/test-push-checkheads-partial-C1.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-push-checkheads-partial-C1.t Thu Jun 16 15:28:54 2022 +0200
@@ -17,7 +17,7 @@
..
.. new-state:
..
-.. * 1 new changesets branches superceeding only the head of the old one
+.. * 1 new changesets branches superseding only the head of the old one
.. * base of the old branch is still alive
..
.. expected-result:
--- a/tests/test-push-checkheads-partial-C2.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-push-checkheads-partial-C2.t Thu Jun 16 15:28:54 2022 +0200
@@ -17,7 +17,7 @@
..
.. new-state:
..
-.. * 1 new changesets branches superceeding only the base of the old one
+.. * 1 new changesets branches superseding only the base of the old one
.. * The old branch is still alive (base is obsolete, head is alive)
..
.. expected-result:
--- a/tests/test-push-checkheads-pruned-B2.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-push-checkheads-pruned-B2.t Thu Jun 16 15:28:54 2022 +0200
@@ -9,7 +9,7 @@
This case is part of a series of tests checking this behavior.
Category B: simple case involving pruned changesets
-TestCase 2: multi-changeset branch, head is pruned, rest is superceeded
+TestCase 2: multi-changeset branch, head is pruned, rest is superseded
.. old-state:
..
--- a/tests/test-push-checkheads-pruned-B3.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-push-checkheads-pruned-B3.t Thu Jun 16 15:28:54 2022 +0200
@@ -9,7 +9,7 @@
This case is part of a series of tests checking this behavior.
Category B: simple case involving pruned changesets
-TestCase 3: multi-changeset branch, other is pruned, rest is superceeded
+TestCase 3: multi-changeset branch, other is pruned, rest is superseded
.. old-state:
..
@@ -17,7 +17,7 @@
..
.. new-state:
..
-.. * old head is superceeded
+.. * old head is superseded
.. * old other is pruned
..
.. expected-result:
--- a/tests/test-push-checkheads-pruned-B5.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-push-checkheads-pruned-B5.t Thu Jun 16 15:28:54 2022 +0200
@@ -9,7 +9,7 @@
This case is part of a series of tests checking this behavior.
Category B: simple case involving pruned changesets
-TestCase 5: multi-changeset branch, mix of pruned and superceeded
+TestCase 5: multi-changeset branch, mix of pruned and superseded
.. old-state:
..
@@ -18,7 +18,7 @@
.. new-state:
..
.. * old head is pruned
-.. * old mid is superceeded
+.. * old mid is superseded
.. * old root is pruned
..
.. expected-result:
--- a/tests/test-push-checkheads-pruned-B8.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-push-checkheads-pruned-B8.t Thu Jun 16 15:28:54 2022 +0200
@@ -9,7 +9,7 @@
This case is part of a series of tests checking this behavior.
Category B: simple case involving pruned changesets
-TestCase 2: multi-changeset branch, head is pruned, rest is superceeded, through other
+TestCase 2: multi-changeset branch, head is pruned, rest is superseded, through other
.. old-state:
..
--- a/tests/test-push-checkheads-superceed-A1.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-push-checkheads-superceed-A1.t Thu Jun 16 15:28:54 2022 +0200
@@ -8,7 +8,7 @@
This case is part of a series of tests checking this behavior.
-Category A: simple case involving a branch being superceeded by another.
+Category A: simple case involving a branch being superseded by another.
TestCase 1: single-changeset branch
.. old-state:
--- a/tests/test-push-checkheads-superceed-A2.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-push-checkheads-superceed-A2.t Thu Jun 16 15:28:54 2022 +0200
@@ -8,7 +8,7 @@
This case is part of a series of tests checking this behavior.
-Category A: simple case involving a branch being superceeded by another.
+Category A: simple case involving a branch being superseded by another.
TestCase 2: multi-changeset branch
.. old-state:
--- a/tests/test-push-checkheads-superceed-A3.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-push-checkheads-superceed-A3.t Thu Jun 16 15:28:54 2022 +0200
@@ -8,7 +8,7 @@
This case is part of a series of tests checking this behavior.
-Category A: simple case involving a branch being superceeded by another.
+Category A: simple case involving a branch being superseded by another.
TestCase 3: multi-changeset branch with reordering
Push should be allowed
--- a/tests/test-push-checkheads-superceed-A4.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-push-checkheads-superceed-A4.t Thu Jun 16 15:28:54 2022 +0200
@@ -8,7 +8,7 @@
This case is part of a series of tests checking this behavior.
-Category A: simple case involving a branch being superceeded by another.
+Category A: simple case involving a branch being superseded by another.
TestCase 4: New changeset as children of the successor
.. old-state:
--- a/tests/test-push-checkheads-superceed-A5.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-push-checkheads-superceed-A5.t Thu Jun 16 15:28:54 2022 +0200
@@ -8,7 +8,7 @@
This case is part of a series of tests checking this behavior.
-Category A: simple case involving a branch being superceeded by another.
+Category A: simple case involving a branch being superseded by another.
TestCase 5: New changeset as parent of the successor
.. old-state:
--- a/tests/test-push-checkheads-superceed-A6.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-push-checkheads-superceed-A6.t Thu Jun 16 15:28:54 2022 +0200
@@ -8,7 +8,7 @@
This case is part of a series of tests checking this behavior.
-Category A: simple case involving a branch being superceeded by another.
+Category A: simple case involving a branch being superseded by another.
TestCase 6: multi-changeset branch, split on multiple other, (base on its own branch), same number of head
.. old-state:
@@ -17,8 +17,8 @@
..
.. new-state:
..
-.. * 1 new branch superceeding the base of the old-2-changesets-branch,
-.. * 1 new changesets on the old-1-changeset-branch superceeding the head of the other
+.. * 1 new branch superseding the base of the old-2-changesets-branch,
+.. * 1 new changesets on the old-1-changeset-branch superseding the head of the other
..
.. expected-result:
..
--- a/tests/test-push-checkheads-superceed-A7.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-push-checkheads-superceed-A7.t Thu Jun 16 15:28:54 2022 +0200
@@ -8,7 +8,7 @@
This case is part of a series of tests checking this behavior.
-Category A: simple case involving a branch being superceeded by another.
+Category A: simple case involving a branch being superseded by another.
TestCase 7: multi-changeset branch, split on multiple other, (head on its own branch), same number of head
.. old-state:
@@ -17,8 +17,8 @@
..
.. new-state:
..
-.. * 1 new branch superceeding the head of the old-2-changesets-branch,
-.. * 1 new changesets on the old-1-changeset-branch superceeding the base of the other
+.. * 1 new branch superseding the head of the old-2-changesets-branch,
+.. * 1 new changesets on the old-1-changeset-branch superseding the base of the other
..
.. expected-result:
..
--- a/tests/test-push-checkheads-superceed-A8.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-push-checkheads-superceed-A8.t Thu Jun 16 15:28:54 2022 +0200
@@ -8,7 +8,7 @@
This case is part of a series of tests checking this behavior.
-Category A: simple case involving a branch being superceeded by another.
+Category A: simple case involving a branch being superseded by another.
TestCase 8: single-changeset branch indirect rewrite
.. old-state:
--- a/tests/test-push-checkheads-unpushed-D4.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-push-checkheads-unpushed-D4.t Thu Jun 16 15:28:54 2022 +0200
@@ -17,8 +17,8 @@
..
.. new-state:
..
-.. * 1 new branch superceeding the base of the old-2-changesets-branch,
-.. * 1 new changesets on the old-1-changeset-branch superceeding the head of the other
+.. * 1 new branch superseding the base of the old-2-changesets-branch,
+.. * 1 new changesets on the old-1-changeset-branch superseding the head of the other
..
.. expected-result:
..
--- a/tests/test-push-checkheads-unpushed-D5.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-push-checkheads-unpushed-D5.t Thu Jun 16 15:28:54 2022 +0200
@@ -17,8 +17,8 @@
..
.. new-state:
..
-.. * 1 new branch superceeding the head of the old-2-changesets-branch,
-.. * 1 new changesets on the old-1-changeset-branch superceeding the base of the other
+.. * 1 new branch superseding the head of the old-2-changesets-branch,
+.. * 1 new changesets on the old-1-changeset-branch superseding the base of the other
..
.. expected-result:
..
--- a/tests/test-push-race.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-push-race.t Thu Jun 16 15:28:54 2022 +0200
@@ -58,9 +58,8 @@
> def delete():
> try:
> os.unlink(watchpath)
- > except OSError as exc:
- > if exc.errno != errno.ENOENT:
- > raise
+ > except FileNotFoundError:
+ > pass
> ui.atexit(delete)
> return orig(pushop)
>
--- a/tests/test-rebase-dest.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-rebase-dest.t Thu Jun 16 15:28:54 2022 +0200
@@ -81,7 +81,6 @@
$ cd $TESTTMP
$ cat >> $TESTTMP/maprevset.py <<EOF
- > from __future__ import absolute_import
> from mercurial import registrar, revset, revsetlang, smartset
> revsetpredicate = registrar.revsetpredicate()
> cache = {}
--- a/tests/test-rebase-scenario-global.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-rebase-scenario-global.t Thu Jun 16 15:28:54 2022 +0200
@@ -949,7 +949,6 @@
$ hg init tr-state
$ cd tr-state
$ cat > $TESTTMP/wraprebase.py <<EOF
- > from __future__ import absolute_import
> from mercurial import extensions
> def _rebase(orig, ui, repo, *args, **kwargs):
> with repo.wlock():
--- a/tests/test-relink.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-relink.t Thu Jun 16 15:28:54 2022 +0200
@@ -8,7 +8,6 @@
> }
$ cat > arelinked.py <<EOF
- > from __future__ import absolute_import, print_function
> import os
> import sys
> from mercurial import (
--- a/tests/test-remotefilelog-cacheprocess.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-remotefilelog-cacheprocess.t Thu Jun 16 15:28:54 2022 +0200
@@ -18,8 +18,6 @@
> import os
> import shutil
> import sys
- > if sys.version_info[0] > 2:
- > xrange = range
> f = open('$TESTTMP/cachelog.log', 'w')
> srccache = os.path.join('$TESTTMP', 'oldhgcache')
> def log(message):
@@ -36,7 +34,7 @@
> count = int(sys.stdin.readline())
> log('client wants %r blobs\n' % count)
> wants = []
- > for _ in xrange(count):
+ > for _ in range(count):
> key = sys.stdin.readline()[:-1]
> wants.append(key)
> if '\0' in key:
--- a/tests/test-remotefilelog-corrupt-cache.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-remotefilelog-corrupt-cache.t Thu Jun 16 15:28:54 2022 +0200
@@ -38,7 +38,6 @@
$ chmod u+w $CACHEDIR/master/11/f6ad8ec52a2984abaafd7c3b516503785c2072/1406e74118627694268417491f018a4a883152f0
$ echo x > $CACHEDIR/master/11/f6ad8ec52a2984abaafd7c3b516503785c2072/1406e74118627694268417491f018a4a883152f0
$ hg up tip 2>&1 | egrep "^[^ ].*unexpected remotefilelog"
- abort: unexpected remotefilelog header: illegal format (no-py3 !)
hgext.remotefilelog.shallowutil.BadRemotefilelogHeader: unexpected remotefilelog header: illegal format (py3 !)
Verify detection and remediation when remotefilelog.validatecachelog is set
--- a/tests/test-remotefilelog-datapack.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-remotefilelog-datapack.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-from __future__ import absolute_import, print_function
import hashlib
import os
@@ -36,7 +35,7 @@
)
-class datapacktestsbase(object):
+class datapacktestsbase:
def __init__(self, datapackreader, paramsavailable):
self.datapackreader = datapackreader
self.paramsavailable = paramsavailable
@@ -268,7 +267,7 @@
revisions = []
blobs = {}
total = basepack.SMALLFANOUTCUTOFF + 1
- for i in pycompat.xrange(total):
+ for i in range(total):
filename = b"filename-%d" % i
content = filename
node = self.getHash(content)
@@ -358,7 +357,7 @@
]
for packsize in packsizes:
revisions = []
- for i in pycompat.xrange(packsize):
+ for i in range(packsize):
filename = b"filename-%d" % i
content = b"content-%d" % i
node = self.getHash(content)
--- a/tests/test-remotefilelog-histpack.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-remotefilelog-histpack.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-from __future__ import absolute_import
import hashlib
import os
@@ -284,7 +283,7 @@
This causes it to use a 2^16 fanout table instead."""
total = basepack.SMALLFANOUTCUTOFF + 1
revisions = []
- for i in pycompat.xrange(total):
+ for i in range(total):
filename = b"foo-%d" % i
node = self.getFakeHash()
p1 = self.getFakeHash()
--- a/tests/test-rename-merge1.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-rename-merge1.t Thu Jun 16 15:28:54 2022 +0200
@@ -65,7 +65,7 @@
$ hg ci -m "merge"
$ hg debugindex b
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 1 57eacc201a7f 000000000000 000000000000
1 3 4727ba907962 000000000000 57eacc201a7f
--- a/tests/test-rename.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-rename.t Thu Jun 16 15:28:54 2022 +0200
@@ -682,7 +682,6 @@
"hg cp" does not preserve the mtime, so it should be newer than the 2009
timestamp.
$ hg cp -q mtime mtime_cp
- >>> from __future__ import print_function
>>> import os
>>> filename = "mtime_cp/f"
>>> print(os.stat(filename).st_mtime < 1234567999)
@@ -691,7 +690,6 @@
(modulo some fudge factor due to not every system supporting 1s-level
precision).
$ hg mv -q mtime mtime_mv
- >>> from __future__ import print_function
>>> import os
>>> filename = "mtime_mv/f"
>>> print(os.stat(filename).st_mtime < 1234567999)
--- a/tests/test-requires.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-requires.t Thu Jun 16 15:28:54 2022 +0200
@@ -32,7 +32,6 @@
$ echo 'featuresetup-test' >> supported/.hg/requires
$ cat > $TESTTMP/supported-locally/supportlocally.py <<EOF
- > from __future__ import absolute_import
> from mercurial import extensions, localrepo
> def featuresetup(ui, supported):
> for name, module in extensions.extensions(ui):
--- a/tests/test-revert-interactive-curses.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-revert-interactive-curses.t Thu Jun 16 15:28:54 2022 +0200
@@ -1,4 +1,5 @@
#require curses
+#testcases committed wdir
Revert interactive tests with the Curses interface
@@ -12,6 +13,22 @@
TODO: Make a curses version of the other tests from test-revert-interactive.t.
+#if committed
+ $ maybe_commit() {
+ > hg ci "$@"
+ > }
+ $ do_revert() {
+ > hg revert -ir'.^'
+ > }
+#else
+ $ maybe_commit() {
+ > true
+ > }
+ $ do_revert() {
+ > hg revert -i
+ > }
+#endif
+
When a line without EOL is selected during "revert -i"
$ hg init $TESTTMP/revert-i-curses-eol
@@ -19,7 +36,7 @@
$ echo 0 > a
$ hg ci -qAm 0
$ printf 1 >> a
- $ hg ci -qAm 1
+ $ maybe_commit -qAm 1
$ cat a
0
1 (no-eol)
@@ -28,7 +45,7 @@
> c
> EOF
- $ hg revert -ir'.^'
+ $ do_revert
reverting a
$ cat a
0
@@ -40,7 +57,7 @@
$ printf 0 > a
$ hg ci -qAm 0
$ echo 0 > a
- $ hg ci -qAm 1
+ $ maybe_commit -qAm 1
$ cat a
0
@@ -48,7 +65,7 @@
> c
> EOF
- $ hg revert -ir'.^'
+ $ do_revert
reverting a
$ cat a
0 (no-eol)
--- a/tests/test-revert-interactive.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-revert-interactive.t Thu Jun 16 15:28:54 2022 +0200
@@ -420,6 +420,19 @@
forgetting newfile
$ hg status
? newfile
+ $ rm newfile
+ $ hg up 0
+ 1 files updated, 0 files merged, 4 files removed, 0 files unresolved
+ $ hg status
+ $ hg revert -r 2 -i <<EOF
+ > y
+ > n
+ > EOF
+ add new file folder1/g (Yn)? y
+ adding folder1/g
+ add new file folder2/h (Yn)? n
+ $ hg status
+ A folder1/g
When a line without EOL is selected during "revert -i" (issue5651)
--- a/tests/test-revert.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-revert.t Thu Jun 16 15:28:54 2022 +0200
@@ -550,7 +550,6 @@
$ cat << EOF >> dircontent.py
> # generate a simple text view of the directory for easy comparison
- > from __future__ import print_function
> import os
> files = os.listdir('.')
> files.sort()
--- a/tests/test-revlog-ancestry.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-revlog-ancestry.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,4 +1,3 @@
-from __future__ import absolute_import, print_function
import os
from mercurial import (
hg,
--- a/tests/test-revlog-mmapindex.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-revlog-mmapindex.t Thu Jun 16 15:28:54 2022 +0200
@@ -2,7 +2,6 @@
$ cat << EOF > verbosemmap.py
> # extension to make util.mmapread verbose
>
- > from __future__ import absolute_import
>
> from mercurial import (
> extensions,
--- a/tests/test-revlog-packentry.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-revlog-packentry.t Thu Jun 16 15:28:54 2022 +0200
@@ -16,7 +16,7 @@
created new head
$ hg debugindex foo
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 b80de5d13875 000000000000 000000000000
1 1 0376abec49b8 000000000000 000000000000
--- a/tests/test-revlog-raw.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-revlog-raw.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,6 +1,5 @@
# test revlog interaction about raw data (flagprocessor)
-from __future__ import absolute_import, print_function
import collections
import hashlib
@@ -20,7 +19,7 @@
)
-class _NoTransaction(object):
+class _NoTransaction:
"""transaction like object to update the nodemap outside a transaction"""
def __init__(self):
@@ -151,7 +150,7 @@
code path, which is not covered by "appendrev" alone.
"""
- class dummychangegroup(object):
+ class dummychangegroup:
@staticmethod
def deltachunk(pnode):
pnode = pnode or rlog.nullid
--- a/tests/test-revlog.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-revlog.t Thu Jun 16 15:28:54 2022 +0200
@@ -76,8 +76,14 @@
$ tar --force-local -xf "$TESTDIR"/bundles/test-revlog-diff-relative-to-nullrev.tar
$ cd nullrev-diff
$ hg debugdeltachain a
- rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio readsize largestblk rddensity srchunks
- 0 1 2 -1 p1 15 3 15 5.00000 15 0 0.00000 15 15 1.00000 1
+ rev p1 p2 chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio readsize largestblk rddensity srchunks
+ 0 -1 -1 1 2 -1 p1 15 3 15 5.00000 15 0 0.00000 15 15 1.00000 1
+ 1 0 -1 1 2 -1 p2 15 3 15 5.00000 30 15 1.00000 30 30 0.50000 1
+ 2 -1 -1 1 2 -1 p1 15 3 15 5.00000 45 30 2.00000 45 45 0.33333 1
$ hg cat --config rhg.cat=true -r 0 a
hi
+ $ hg cat --config rhg.cat=true -r 1 a
+ ho
+ $ hg cat --config rhg.cat=true -r 2 a
+ ha
$ cd ..
--- a/tests/test-revset.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-revset.t Thu Jun 16 15:28:54 2022 +0200
@@ -36,7 +36,6 @@
these predicates use '\0' as a separator:
$ cat <<EOF > debugrevlistspec.py
- > from __future__ import absolute_import
> from mercurial import (
> node as nodemod,
> registrar,
--- a/tests/test-rhg.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-rhg.t Thu Jun 16 15:28:54 2022 +0200
@@ -384,3 +384,17 @@
$ echo "*:required = yes" >> $HGRCPATH
$ rhg files
a
+
+We can ignore all extensions at once
+
+ $ echo "[extensions]" >> $HGRCPATH
+ $ echo "thisextensionbetternotexist=" >> $HGRCPATH
+ $ echo "thisextensionbetternotexisteither=" >> $HGRCPATH
+ $ $NO_FALLBACK rhg files
+ unsupported feature: extensions: thisextensionbetternotexist, thisextensionbetternotexisteither (consider adding them to 'rhg.ignored-extensions' config)
+ [252]
+
+ $ echo "[rhg]" >> $HGRCPATH
+ $ echo "ignored-extensions=*" >> $HGRCPATH
+ $ $NO_FALLBACK rhg files
+ a
--- a/tests/test-run-tests.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-run-tests.py Thu Jun 16 15:28:54 2022 +0200
@@ -3,7 +3,6 @@
run-test.t only checks positive matches and can not see warnings
(both by design)
"""
-from __future__ import absolute_import, print_function
import doctest
import os
--- a/tests/test-run-tests.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-run-tests.t Thu Jun 16 15:28:54 2022 +0200
@@ -2086,5 +2086,4 @@
$ ./test-py3.py
3.* (glob)
$ ./test-py.py
- 2.* (glob) (no-py3 !)
3.* (glob) (py3 !)
--- a/tests/test-rust-ancestor.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-rust-ancestor.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,4 +1,3 @@
-from __future__ import absolute_import
import sys
import unittest
--- a/tests/test-rust-discovery.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-rust-discovery.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,4 +1,3 @@
-from __future__ import absolute_import
import unittest
from mercurial import policy
@@ -32,12 +31,12 @@
)
-class fakechangelog(object):
+class fakechangelog:
def __init__(self, idx):
self.index = idx
-class fakerepo(object):
+class fakerepo:
def __init__(self, idx):
"""Just make so that self.changelog.index is the given idx."""
self.changelog = fakechangelog(idx)
--- a/tests/test-rust-revlog.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-rust-revlog.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,4 +1,3 @@
-from __future__ import absolute_import
import unittest
try:
--- a/tests/test-setdiscovery.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-setdiscovery.t Thu Jun 16 15:28:54 2022 +0200
@@ -45,6 +45,7 @@
unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
elapsed time: * seconds (glob)
round-trips: 2
+ queries: 6
heads summary:
total common heads: 2
also local heads: 2
@@ -77,6 +78,7 @@
all local changesets known remotely
elapsed time: * seconds (glob)
round-trips: 1
+ queries: 2
heads summary:
total common heads: 2
also local heads: 2
@@ -109,6 +111,7 @@
all local changesets known remotely
elapsed time: * seconds (glob)
round-trips: 1
+ queries: 1
heads summary:
total common heads: 1
also local heads: 1
@@ -140,6 +143,7 @@
unpruned common: 01241442b3c2 b5714e113bc0
elapsed time: * seconds (glob)
round-trips: 1
+ queries: 0
heads summary:
total common heads: 2
also local heads: 1
@@ -172,6 +176,7 @@
all remote heads known locally
elapsed time: * seconds (glob)
round-trips: 1
+ queries: 3
heads summary:
total common heads: 2
also local heads: 1
@@ -204,6 +209,7 @@
all remote heads known locally
elapsed time: * seconds (glob)
round-trips: 1
+ queries: 1
heads summary:
total common heads: 2
also local heads: 1
@@ -242,6 +248,7 @@
unpruned common: bebd167eb94d
elapsed time: * seconds (glob)
round-trips: 2
+ queries: 3
heads summary:
total common heads: 1
also local heads: 1
@@ -277,6 +284,7 @@
2 total queries in *.????s (glob)
elapsed time: * seconds (glob)
round-trips: 2
+ queries: 31
heads summary:
total common heads: 1
also local heads: 1
@@ -312,6 +320,7 @@
2 total queries in *.????s (glob)
elapsed time: * seconds (glob)
round-trips: 2
+ queries: 32
heads summary:
total common heads: 1
also local heads: 0
@@ -343,6 +352,7 @@
unpruned common: 66f7d451a68b bebd167eb94d
elapsed time: * seconds (glob)
round-trips: 4
+ queries: 5
heads summary:
total common heads: 1
also local heads: 0
@@ -378,6 +388,7 @@
2 total queries in *.????s (glob)
elapsed time: * seconds (glob)
round-trips: 2
+ queries: 3
heads summary:
total common heads: 1
also local heads: 0
@@ -413,6 +424,7 @@
2 total queries in *.????s (glob)
elapsed time: * seconds (glob)
round-trips: 2
+ queries: 3
heads summary:
total common heads: 1
also local heads: 0
@@ -450,6 +462,7 @@
unpruned common: 2dc09a01254d
elapsed time: * seconds (glob)
round-trips: 4
+ queries: 5
heads summary:
total common heads: 1
also local heads: 1
@@ -485,6 +498,7 @@
2 total queries in *.????s (glob)
elapsed time: * seconds (glob)
round-trips: 2
+ queries: 31
heads summary:
total common heads: 1
also local heads: 1
@@ -520,6 +534,7 @@
2 total queries in *.????s (glob)
elapsed time: * seconds (glob)
round-trips: 2
+ queries: 32
heads summary:
total common heads: 1
also local heads: 0
@@ -551,6 +566,7 @@
unpruned common: 2dc09a01254d 66f7d451a68b
elapsed time: * seconds (glob)
round-trips: 4
+ queries: 5
heads summary:
total common heads: 1
also local heads: 0
@@ -586,6 +602,7 @@
2 total queries in *.????s (glob)
elapsed time: * seconds (glob)
round-trips: 2
+ queries: 30
heads summary:
total common heads: 1
also local heads: 0
@@ -621,6 +638,7 @@
2 total queries in *.????s (glob)
elapsed time: * seconds (glob)
round-trips: 2
+ queries: 30
heads summary:
total common heads: 1
also local heads: 0
@@ -659,6 +677,7 @@
unpruned common: 66f7d451a68b
elapsed time: * seconds (glob)
round-trips: 4
+ queries: 5
heads summary:
total common heads: 1
also local heads: 0
@@ -694,6 +713,7 @@
2 total queries in *.????s (glob)
elapsed time: * seconds (glob)
round-trips: 2
+ queries: 32
heads summary:
total common heads: 1
also local heads: 0
@@ -729,6 +749,7 @@
2 total queries in *.????s (glob)
elapsed time: * seconds (glob)
round-trips: 2
+ queries: 32
heads summary:
total common heads: 1
also local heads: 0
@@ -760,6 +781,7 @@
unpruned common: 66f7d451a68b
elapsed time: * seconds (glob)
round-trips: 4
+ queries: 5
heads summary:
total common heads: 1
also local heads: 0
@@ -795,6 +817,7 @@
2 total queries in *.????s (glob)
elapsed time: * seconds (glob)
round-trips: 2
+ queries: 32
heads summary:
total common heads: 1
also local heads: 0
@@ -830,6 +853,7 @@
2 total queries in *.????s (glob)
elapsed time: * seconds (glob)
round-trips: 2
+ queries: 32
heads summary:
total common heads: 1
also local heads: 0
@@ -868,6 +892,7 @@
unpruned common: 66f7d451a68b
elapsed time: * seconds (glob)
round-trips: 4
+ queries: 5
heads summary:
total common heads: 1
also local heads: 0
@@ -903,6 +928,7 @@
2 total queries in *.????s (glob)
elapsed time: * seconds (glob)
round-trips: 2
+ queries: 52
heads summary:
total common heads: 1
also local heads: 0
@@ -938,6 +964,7 @@
2 total queries in *.????s (glob)
elapsed time: * seconds (glob)
round-trips: 2
+ queries: 52
heads summary:
total common heads: 1
also local heads: 0
@@ -969,6 +996,7 @@
unpruned common: 66f7d451a68b
elapsed time: * seconds (glob)
round-trips: 3
+ queries: 4
heads summary:
total common heads: 1
also local heads: 0
@@ -1004,6 +1032,7 @@
2 total queries in *.????s (glob)
elapsed time: * seconds (glob)
round-trips: 2
+ queries: 32
heads summary:
total common heads: 1
also local heads: 0
@@ -1039,6 +1068,7 @@
2 total queries in *.????s (glob)
elapsed time: * seconds (glob)
round-trips: 2
+ queries: 32
heads summary:
total common heads: 1
also local heads: 0
@@ -1077,6 +1107,7 @@
unpruned common: 7ead0cba2838
elapsed time: * seconds (glob)
round-trips: 4
+ queries: 5
heads summary:
total common heads: 1
also local heads: 0
@@ -1115,6 +1146,7 @@
3 total queries in *.????s (glob)
elapsed time: * seconds (glob)
round-trips: 3
+ queries: 43
heads summary:
total common heads: 1
also local heads: 0
@@ -1153,6 +1185,7 @@
3 total queries in *.????s (glob)
elapsed time: * seconds (glob)
round-trips: 3
+ queries: 43
heads summary:
total common heads: 1
also local heads: 0
@@ -1184,6 +1217,7 @@
unpruned common: 7ead0cba2838
elapsed time: * seconds (glob)
round-trips: 3
+ queries: 4
heads summary:
total common heads: 1
also local heads: 0
@@ -1222,6 +1256,7 @@
3 total queries in *.????s (glob)
elapsed time: * seconds (glob)
round-trips: 3
+ queries: 27
heads summary:
total common heads: 1
also local heads: 0
@@ -1260,6 +1295,7 @@
3 total queries in *.????s (glob)
elapsed time: * seconds (glob)
round-trips: 3
+ queries: 27
heads summary:
total common heads: 1
also local heads: 0
@@ -1350,6 +1386,7 @@
6 total queries in *.????s (glob)
elapsed time: * seconds (glob)
round-trips: 6
+ queries: 1054
heads summary:
total common heads: 1
also local heads: 0
@@ -1387,6 +1424,7 @@
3 total queries in *.????s (glob)
elapsed time: * seconds (glob)
round-trips: 3
+ queries: 13
heads summary:
total common heads: 1
also local heads: 0
@@ -1436,6 +1474,7 @@
9 total queries in *s (glob)
elapsed time: * seconds (glob)
round-trips: 9
+ queries: 993
heads summary:
total common heads: 1
also local heads: 0
@@ -1564,6 +1603,7 @@
searching for changes
elapsed time: * seconds (glob)
round-trips: 1
+ queries: 1
heads summary:
total common heads: 1
also local heads: 1
@@ -1610,6 +1650,7 @@
all remote heads known locally
elapsed time: * seconds (glob)
round-trips: 1
+ queries: 260
heads summary:
total common heads: 25
also local heads: 25
@@ -1655,6 +1696,7 @@
3 total queries *s (glob)
elapsed time: * seconds (glob)
round-trips: 3
+ queries: 109
heads summary:
total common heads: 1
also local heads: 0
@@ -1700,6 +1742,7 @@
3 total queries in *s (glob)
elapsed time: * seconds (glob)
round-trips: 3
+ queries: 109
heads summary:
total common heads: 1
also local heads: 0
@@ -1757,6 +1800,7 @@
"nb-revs-common": 300,
"nb-revs-missing": 100,
"output": "query 1; heads\nsearching for changes\ntaking quick initial sample\nquery 2; still undecided: 375, sample size is: 81\nsampling from both directions\nquery 3; still undecided: 3, sample size is: 3\n3 total queries in *s\n", (glob)
+ "total-queries": 109,
"total-roundtrips": 3
}
]
--- a/tests/test-share-bookmarks.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-share-bookmarks.t Thu Jun 16 15:28:54 2022 +0200
@@ -222,7 +222,6 @@
$ cat > failpullbookmarks.py << EOF
> """A small extension that makes bookmark pulls fail, for testing"""
- > from __future__ import absolute_import
> from mercurial import (
> error,
> exchange,
--- a/tests/test-share-safe.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-share-safe.t Thu Jun 16 15:28:54 2022 +0200
@@ -521,12 +521,21 @@
[255]
$ rm ../ss-share/.hg/wlock
+ $ cp -R ../ss-share ../ss-share-bck
$ hg log -GT "{node}: {desc}\n" -R ../ss-share --config share.safe-mismatch.source-not-safe=downgrade-abort
repository downgraded to not use share-safe mode
@ f63db81e6dde1d9c78814167f77fb1fb49283f4f: added bar
|
o f3ba8b99bb6f897c87bbc1c07b75c6ddf43a4f77: added foo
+ $ rm -rf ../ss-share
+ $ mv ../ss-share-bck ../ss-share
+
+ $ hg log -GT "{node}: {desc}\n" -R ../ss-share --config share.safe-mismatch.source-not-safe=downgrade-abort --config share.safe-mismatch.source-not-safe:verbose-upgrade=no
+ @ f63db81e6dde1d9c78814167f77fb1fb49283f4f: added bar
+ |
+ o f3ba8b99bb6f897c87bbc1c07b75c6ddf43a4f77: added foo
+
$ hg log -GT "{node}: {desc}\n" -R ../ss-share
@ f63db81e6dde1d9c78814167f77fb1fb49283f4f: added bar
@@ -588,12 +597,20 @@
[255]
$ rm ../nss-share/.hg/wlock
+ $ cp -R ../nss-share ../nss-share-bck
$ hg log -GT "{node}: {desc}\n" -R ../nss-share --config share.safe-mismatch.source-safe=upgrade-abort
repository upgraded to use share-safe mode
@ f63db81e6dde1d9c78814167f77fb1fb49283f4f: added bar
|
o f3ba8b99bb6f897c87bbc1c07b75c6ddf43a4f77: added foo
+ $ rm -rf ../nss-share
+ $ mv ../nss-share-bck ../nss-share
+ $ hg log -GT "{node}: {desc}\n" -R ../nss-share --config share.safe-mismatch.source-safe=upgrade-abort --config share.safe-mismatch.source-safe:verbose-upgrade=no
+ @ f63db81e6dde1d9c78814167f77fb1fb49283f4f: added bar
+ |
+ o f3ba8b99bb6f897c87bbc1c07b75c6ddf43a4f77: added foo
+
Test that unshare works
@@ -603,3 +620,36 @@
|
o f3ba8b99bb6f897c87bbc1c07b75c6ddf43a4f77: added foo
+
+Test automatique upgrade/downgrade of main-repository
+------------------------------------------------------
+
+create an initial repository
+
+ $ hg init auto-upgrade \
+ > --config format.use-share-safe=no
+ $ hg debugbuilddag -R auto-upgrade --new-file .+5
+ $ hg -R auto-upgrade update
+ 6 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg debugformat -R auto-upgrade | grep share-safe
+ share-safe: no
+
+upgrade it to share-safe automatically
+
+ $ hg status -R auto-upgrade \
+ > --config format.use-share-safe.automatic-upgrade-of-mismatching-repositories=yes \
+ > --config format.use-share-safe=yes
+ automatically upgrading repository to the `share-safe` feature
+ (see `hg help config.format.use-share-safe` for details)
+ $ hg debugformat -R auto-upgrade | grep share-safe
+ share-safe: yes
+
+downgrade it from share-safe automatically
+
+ $ hg status -R auto-upgrade \
+ > --config format.use-share-safe.automatic-upgrade-of-mismatching-repositories=yes \
+ > --config format.use-share-safe=no
+ automatically downgrading repository from the `share-safe` feature
+ (see `hg help config.format.use-share-safe` for details)
+ $ hg debugformat -R auto-upgrade | grep share-safe
+ share-safe: no
--- a/tests/test-shelve.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-shelve.t Thu Jun 16 15:28:54 2022 +0200
@@ -976,7 +976,7 @@
Test shelve --delete
$ hg shelve --list
- default (*s ago) changes to: create conflict (glob)
+ default (*s ago) * changes to: create conflict (glob)
$ hg shelve --delete doesnotexist
abort: shelved change 'doesnotexist' not found
[10]
@@ -1209,7 +1209,7 @@
$ hg add e
$ hg ci -m e
$ hg shelve --patch
- default (*s ago) changes to: b (glob)
+ default (*s ago) * changes to: b (glob)
diff --git a/c b/c
new file mode 100644
@@ -1258,7 +1258,7 @@
e
-- shelve should not contain `c` now
$ hg shelve --patch
- default (*s ago) changes to: b (glob)
+ default (*s ago) * changes to: b (glob)
diff --git a/d b/d
new file mode 100644
@@ -1357,7 +1357,7 @@
A
B
$ hg shelve --patch
- default (*s ago) changes to: add B to foo (glob)
+ default (*s ago) * changes to: add B to foo (glob)
diff --git a/foo b/foo
--- a/foo
--- a/tests/test-sidedata.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-sidedata.t Thu Jun 16 15:28:54 2022 +0200
@@ -40,6 +40,18 @@
entry-0001 size 4
entry-0002 size 32
+ $ hg debug-revlog-index --verbose -c
+ rev rank linkrev nodeid p1-rev p1-nodeid p2-rev p2-nodeid full-size delta-base flags comp-mode data-offset chunk-size sd-comp-mode sidedata-offset sd-chunk-size
+ 0 -1 0 7049e48789d7 -1 000000000000 -1 000000000000 54 0 0 0 0 54 plain 0 90
+ 1 -1 1 2707720c6597 0 7049e48789d7 -1 000000000000 54 1 0 0 54 54 plain 90 90
+ 2 -1 2 40f977031323 1 2707720c6597 -1 000000000000 55 2 0 0 108 55 plain 180 90
+
+ $ hg debug-revlog-index --verbose -m
+ rev rank linkrev nodeid p1-rev p1-nodeid p2-rev p2-nodeid full-size delta-base flags comp-mode data-offset chunk-size sd-comp-mode sidedata-offset sd-chunk-size
+ 0 -1 0 b85d294330e3 -1 000000000000 -1 000000000000 43 0 0 0 0 43 plain 0 90
+ 1 -1 1 1a0aec305c63 0 b85d294330e3 -1 000000000000 86 0 0 0 43 55 plain 90 90
+ 2 -1 2 104258a4f75f 1 1a0aec305c63 -1 000000000000 86 1 0 0 98 55 plain 180 90
+
Check upgrade behavior
======================
--- a/tests/test-simplekeyvaluefile.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-simplekeyvaluefile.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import unittest
import silenttestrunner
@@ -9,7 +7,7 @@
)
-class mockfile(object):
+class mockfile:
def __init__(self, name, fs):
self.name = name
self.fs = fs
@@ -27,7 +25,7 @@
return self.fs.contents[self.name]
-class mockvfs(object):
+class mockvfs:
def __init__(self):
self.contents = {}
--- a/tests/test-simplemerge.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-simplemerge.py Thu Jun 16 15:28:54 2022 +0200
@@ -13,7 +13,6 @@
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
-from __future__ import absolute_import
import unittest
from mercurial import (
--- a/tests/test-sparse-merges.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-sparse-merges.t Thu Jun 16 15:28:54 2022 +0200
@@ -182,6 +182,10 @@
merging a and amove to amove
0 files updated, 1 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
+ $ hg status --copies
+ M amove
+ a
+ R a
$ hg up -C 4
cleaned up 1 temporarily added file(s) from the sparse checkout
--- a/tests/test-sparse-revlog.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-sparse-revlog.t Thu Jun 16 15:28:54 2022 +0200
@@ -91,7 +91,7 @@
$ f -s .hg/store/data/*.d
- .hg/store/data/_s_p_a_r_s_e-_r_e_v_l_o_g-_t_e_s_t-_f_i_l_e.d: size=63327412
+ .hg/store/data/_s_p_a_r_s_e-_r_e_v_l_o_g-_t_e_s_t-_f_i_l_e.d: size=58616973
$ hg debugrevlog *
format : 1
flags : generaldelta
@@ -105,43 +105,90 @@
delta : 0 (100.00%)
snapshot : 383 ( 7.66%)
lvl-0 : 3 ( 0.06%)
- lvl-1 : 20 ( 0.40%)
- lvl-2 : 68 ( 1.36%)
- lvl-3 : 112 ( 2.24%)
- lvl-4 : 180 ( 3.60%)
+ lvl-1 : 18 ( 0.36%)
+ lvl-2 : 62 ( 1.24%)
+ lvl-3 : 108 ( 2.16%)
+ lvl-4 : 191 ( 3.82%)
+ lvl-5 : 1 ( 0.02%)
deltas : 4618 (92.34%)
- revision size : 63327412
- snapshot : 9886710 (15.61%)
- lvl-0 : 603104 ( 0.95%)
- lvl-1 : 1559991 ( 2.46%)
- lvl-2 : 2295592 ( 3.62%)
- lvl-3 : 2531199 ( 4.00%)
- lvl-4 : 2896824 ( 4.57%)
- deltas : 53440702 (84.39%)
+ revision size : 58616973
+ snapshot : 9247844 (15.78%)
+ lvl-0 : 539532 ( 0.92%)
+ lvl-1 : 1467743 ( 2.50%)
+ lvl-2 : 1873820 ( 3.20%)
+ lvl-3 : 2326874 ( 3.97%)
+ lvl-4 : 3029118 ( 5.17%)
+ lvl-5 : 10757 ( 0.02%)
+ deltas : 49369129 (84.22%)
chunks : 5001
- 0x78 (x) : 5001 (100.00%)
- chunks size : 63327412
- 0x78 (x) : 63327412 (100.00%)
+ 0x28 : 5001 (100.00%)
+ chunks size : 58616973
+ 0x28 : 58616973 (100.00%)
avg chain length : 9
max chain length : 15
- max chain reach : 28248745
- compression ratio : 27
+ max chain reach : 27366701
+ compression ratio : 29
uncompressed data size (min/max/avg) : 346468 / 346472 / 346471
- full revision size (min/max/avg) : 201008 / 201050 / 201034
- inter-snapshot size (min/max/avg) : 11596 / 168150 / 24430
- level-1 (min/max/avg) : 16653 / 168150 / 77999
- level-2 (min/max/avg) : 12951 / 85595 / 33758
- level-3 (min/max/avg) : 11608 / 43029 / 22599
- level-4 (min/max/avg) : 11596 / 21632 / 16093
- delta size (min/max/avg) : 10649 / 107163 / 11572
+ full revision size (min/max/avg) : 179288 / 180786 / 179844
+ inter-snapshot size (min/max/avg) : 10757 / 169507 / 22916
+ level-1 (min/max/avg) : 13905 / 169507 / 81541
+ level-2 (min/max/avg) : 10887 / 83873 / 30222
+ level-3 (min/max/avg) : 10911 / 43047 / 21545
+ level-4 (min/max/avg) : 10838 / 21390 / 15859
+ level-5 (min/max/avg) : 10757 / 10757 / 10757
+ delta size (min/max/avg) : 9672 / 108072 / 10690
- deltas against prev : 3910 (84.67%)
- where prev = p1 : 3910 (100.00%)
+ deltas against prev : 3906 (84.58%)
+ where prev = p1 : 3906 (100.00%)
where prev = p2 : 0 ( 0.00%)
other : 0 ( 0.00%)
- deltas against p1 : 648 (14.03%)
- deltas against p2 : 60 ( 1.30%)
+ deltas against p1 : 649 (14.05%)
+ deltas against p2 : 63 ( 1.36%)
deltas against other : 0 ( 0.00%)
+
+
+Test `debug-delta-find`
+-----------------------
+
+ $ ls -1
+ SPARSE-REVLOG-TEST-FILE
+ $ hg debugdeltachain SPARSE-REVLOG-TEST-FILE | grep snap | tail -1
+ 4971 4970 -1 3 5 4930 snap 19179 346472 427596 1.23414 15994877 15567281 36.40652 427596 179288 1.00000 5
+ $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971
+ DBG-DELTAS-SEARCH: SEARCH rev=4971
+ DBG-DELTAS-SEARCH: ROUND #1 - 2 candidates - search-down
+ DBG-DELTAS-SEARCH: CANDIDATE: rev=4962
+ DBG-DELTAS-SEARCH: type=snapshot-4
+ DBG-DELTAS-SEARCH: size=18296
+ DBG-DELTAS-SEARCH: base=4930
+ DBG-DELTAS-SEARCH: uncompressed-delta-size=30377
+ DBG-DELTAS-SEARCH: delta-search-time=* (glob)
+ DBG-DELTAS-SEARCH: DELTA: length=16872 (BAD)
+ DBG-DELTAS-SEARCH: CANDIDATE: rev=4971
+ DBG-DELTAS-SEARCH: type=snapshot-4
+ DBG-DELTAS-SEARCH: size=19179
+ DBG-DELTAS-SEARCH: base=4930
+ DBG-DELTAS-SEARCH: TOO-HIGH
+ DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
+ DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
+ DBG-DELTAS-SEARCH: type=snapshot-3
+ DBG-DELTAS-SEARCH: size=39228
+ DBG-DELTAS-SEARCH: base=4799
+ DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
+ DBG-DELTAS-SEARCH: delta-search-time=* (glob)
+ DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
+ DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - refine-down
+ DBG-DELTAS-SEARCH: CONTENDER: rev=4930 - length=19179
+ DBG-DELTAS-SEARCH: CANDIDATE: rev=4799
+ DBG-DELTAS-SEARCH: type=snapshot-2
+ DBG-DELTAS-SEARCH: size=50213
+ DBG-DELTAS-SEARCH: base=4623
+ DBG-DELTAS-SEARCH: uncompressed-delta-size=82661
+ DBG-DELTAS-SEARCH: delta-search-time=* (glob)
+ DBG-DELTAS-SEARCH: DELTA: length=49132 (BAD)
+ DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
+
+ $ cd ..
--- a/tests/test-ssh-proto-unbundle.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-ssh-proto-unbundle.t Thu Jun 16 15:28:54 2022 +0200
@@ -99,7 +99,6 @@
Test pushing to a server that has a pretxnchangegroup Python hook that fails
$ cat > $TESTTMP/failhook << EOF
- > from __future__ import print_function
> import sys
> def hook1line(ui, repo, **kwargs):
> ui.write(b'ui.write 1 line\n')
--- a/tests/test-ssh.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-ssh.t Thu Jun 16 15:28:54 2022 +0200
@@ -326,7 +326,6 @@
remote: added 1 changesets with 1 changes to 1 files (py3 !)
remote: KABOOM
remote: KABOOM IN PROCESS
- remote: added 1 changesets with 1 changes to 1 files (no-py3 !)
#endif
--- a/tests/test-sshserver.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-sshserver.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, print_function
-
import io
import unittest
@@ -40,12 +38,12 @@
return wireprotoserver.sshserver(ui, repo)
-class mockrepo(object):
+class mockrepo:
def __init__(self, ui):
self.ui = ui
-class mockui(object):
+class mockui:
def __init__(self, inbytes):
self.fin = io.BytesIO(inbytes)
self.fout = io.BytesIO()
--- a/tests/test-status-inprocess.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-status-inprocess.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-from __future__ import absolute_import, print_function
import sys
--- a/tests/test-status-tracked-key.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-status-tracked-key.t Thu Jun 16 15:28:54 2022 +0200
@@ -202,3 +202,37 @@
.hg/dirstate-tracked-hint
$ hg debugrequires | grep 'tracked'
dirstate-tracked-key-v1
+ $ cd ..
+
+Test automatic upgrade and downgrade
+------------------------------------
+
+create an initial repository
+
+ $ hg init auto-upgrade \
+ > --config format.use-dirstate-tracked-hint=no
+ $ hg debugbuilddag -R auto-upgrade --new-file .+5
+ $ hg -R auto-upgrade update
+ 6 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg debugformat -R auto-upgrade | grep tracked
+ tracked-hint: no
+
+upgrade it to dirstate-tracked-hint automatically
+
+ $ hg status -R auto-upgrade \
+ > --config format.use-dirstate-tracked-hint.automatic-upgrade-of-mismatching-repositories=yes \
+ > --config format.use-dirstate-tracked-hint=yes
+ automatically upgrading repository to the `tracked-hint` feature
+ (see `hg help config.format.use-dirstate-tracked-hint` for details)
+ $ hg debugformat -R auto-upgrade | grep tracked
+ tracked-hint: yes
+
+downgrade it from dirstate-tracked-hint automatically
+
+ $ hg status -R auto-upgrade \
+ > --config format.use-dirstate-tracked-hint.automatic-upgrade-of-mismatching-repositories=yes \
+ > --config format.use-dirstate-tracked-hint=no
+ automatically downgrading repository from the `tracked-hint` feature
+ (see `hg help config.format.use-dirstate-tracked-hint` for details)
+ $ hg debugformat -R auto-upgrade | grep tracked
+ tracked-hint: no
--- a/tests/test-status.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-status.t Thu Jun 16 15:28:54 2022 +0200
@@ -315,9 +315,8 @@
]
$ hg status -A -Tpickle > pickle
- >>> from __future__ import print_function
+ >>> import pickle
>>> from mercurial import util
- >>> pickle = util.pickle
>>> data = sorted((x[b'status'].decode(), x[b'path'].decode()) for x in pickle.load(open("pickle", r"rb")))
>>> for s, p in data: print("%s %s" % (s, p))
! deleted
--- a/tests/test-stdio.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-stdio.py Thu Jun 16 15:28:54 2022 +0200
@@ -2,11 +2,11 @@
"""
Tests the buffering behavior of stdio streams in `mercurial.utils.procutil`.
"""
-from __future__ import absolute_import
import contextlib
import errno
import os
+import pickle
import signal
import subprocess
import sys
@@ -16,25 +16,6 @@
from mercurial import pycompat, util
-if pycompat.ispy3:
-
- def set_noninheritable(fd):
- # On Python 3, file descriptors are non-inheritable by default.
- pass
-
-
-else:
- if pycompat.iswindows:
- # unused
- set_noninheritable = None
- else:
- import fcntl
-
- def set_noninheritable(fd):
- old = fcntl.fcntl(fd, fcntl.F_GETFD)
- fcntl.fcntl(fd, fcntl.F_SETFD, old | fcntl.FD_CLOEXEC)
-
-
TEST_BUFFERING_CHILD_SCRIPT = r'''
import os
@@ -127,10 +108,6 @@
@contextlib.contextmanager
def _pipes():
rwpair = os.pipe()
- # Pipes are already non-inheritable on Windows.
- if not pycompat.iswindows:
- set_noninheritable(rwpair[0])
- set_noninheritable(rwpair[1])
with _closing(rwpair):
yield rwpair
@@ -143,8 +120,6 @@
import tty
rwpair = pty.openpty()
- set_noninheritable(rwpair[0])
- set_noninheritable(rwpair[1])
with _closing(rwpair):
tty.setraw(rwpair[0])
yield rwpair
@@ -236,22 +211,7 @@
def test_buffering_stdout_ptys_unbuffered(self):
self._test_buffering('stdout', _ptys, UNBUFFERED, python_args=['-u'])
- if not pycompat.ispy3 and not pycompat.iswindows:
- # On Python 2 on non-Windows, we manually open stdout in line-buffered
- # mode if connected to a TTY. We should check if Python was configured
- # to use unbuffered stdout, but it's hard to do that.
- test_buffering_stdout_ptys_unbuffered = unittest.expectedFailure(
- test_buffering_stdout_ptys_unbuffered
- )
-
def _test_large_write(self, stream, rwpair_generator, python_args=[]):
- if not pycompat.ispy3 and pycompat.isdarwin:
- # Python 2 doesn't always retry on EINTR, but the libc might retry.
- # So far, it was observed only on macOS that EINTR is raised at the
- # Python level. As Python 2 support will be dropped soon-ish, we
- # won't attempt to fix it.
- raise unittest.SkipTest("raises EINTR on macOS")
-
def check_output(stream_receiver, proc):
if not pycompat.iswindows:
# On Unix, we can provoke a partial write() by interrupting it
@@ -268,16 +228,7 @@
)
def post_child_check():
- write_result_str = write_result_f.read()
- if pycompat.ispy3:
- # On Python 3, we test that the correct number of bytes is
- # claimed to have been written.
- expected_write_result_str = '1048576'
- else:
- # On Python 2, we only check that the large write does not
- # crash.
- expected_write_result_str = 'None'
- self.assertEqual(write_result_str, expected_write_result_str)
+ self.assertEqual(write_result_f.read(), '1048576')
with tempfile.NamedTemporaryFile('r') as write_result_f:
self._test(
@@ -336,7 +287,7 @@
proc.stdin.close()
def post_child_check():
- err = util.pickle.load(err_f)
+ err = pickle.load(err_f)
self.assertEqual(err.errno, errno.EPIPE)
self.assertEqual(err.strerror, "Broken pipe")
--- a/tests/test-storage.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-storage.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,6 +1,5 @@
# This test verifies the conformance of various classes to various
# storage interfaces.
-from __future__ import absolute_import
import silenttestrunner
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-strip-branch-cache.t Thu Jun 16 15:28:54 2022 +0200
@@ -0,0 +1,56 @@
+Define helpers.
+
+ $ hg_log () { hg log -G -T "{rev}:{node|short}"; }
+ $ commit () { echo "foo - ${2:-$1}" > $1; hg commit -Aqm "Edited $1"; }
+ $ strip() { hg --config extensions.strip= strip -q -r "$1" ; }
+
+Setup hg repo.
+
+ $ hg init repo
+ $ cd repo
+ $ touch x; hg add x; hg commit -m "initial"
+ $ hg clone -q . ../clone
+ $ commit a
+
+ $ cd ../clone
+
+ $ commit b
+
+ $ hg pull -q ../repo
+
+ $ cat .hg/cache/branch2-visible
+ 222ae9789a75703f9836e44de7db179cbfd420ee 2
+ a3498d6e39376d2456425dd8c692367bdbf00fa2 o default
+ 222ae9789a75703f9836e44de7db179cbfd420ee o default
+
+ $ hg_log
+ o 2:222ae9789a75
+ |
+ | @ 1:a3498d6e3937
+ |/
+ o 0:7ab0a3bd758a
+
+
+ $ strip '1:'
+
+The branchmap cache is not adjusted on strip.
+Now mentions a changelog entry that has been stripped.
+
+ $ cat .hg/cache/branch2-visible
+ 222ae9789a75703f9836e44de7db179cbfd420ee 2
+ a3498d6e39376d2456425dd8c692367bdbf00fa2 o default
+ 222ae9789a75703f9836e44de7db179cbfd420ee o default
+
+ $ commit c
+
+Not adjusted on commit, either.
+
+ $ cat .hg/cache/branch2-visible
+ 222ae9789a75703f9836e44de7db179cbfd420ee 2
+ a3498d6e39376d2456425dd8c692367bdbf00fa2 o default
+ 222ae9789a75703f9836e44de7db179cbfd420ee o default
+
+On pull we end up with the same tip, and so wrongly reuse the invalid cache and crash.
+
+ $ hg pull ../repo 2>&1 | grep 'ValueError:'
+ ValueError: node a3498d6e39376d2456425dd8c692367bdbf00fa2 does not exist (known-bad-output !)
--- a/tests/test-strip-cross.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-strip-cross.t Thu Jun 16 15:28:54 2022 +0200
@@ -39,37 +39,37 @@
> echo
> done
012
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 b8e02f643373 000000000000 000000000000
1 1 5d9299349fc0 000000000000 000000000000
2 2 2661d26c6496 000000000000 000000000000
021
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 b8e02f643373 000000000000 000000000000
1 2 5d9299349fc0 000000000000 000000000000
2 1 2661d26c6496 000000000000 000000000000
102
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 1 b8e02f643373 000000000000 000000000000
1 0 5d9299349fc0 000000000000 000000000000
2 2 2661d26c6496 000000000000 000000000000
120
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 1 b8e02f643373 000000000000 000000000000
1 2 5d9299349fc0 000000000000 000000000000
2 0 2661d26c6496 000000000000 000000000000
201
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 2 b8e02f643373 000000000000 000000000000
1 0 5d9299349fc0 000000000000 000000000000
2 1 2661d26c6496 000000000000 000000000000
210
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 2 b8e02f643373 000000000000 000000000000
1 1 5d9299349fc0 000000000000 000000000000
2 0 2661d26c6496 000000000000 000000000000
@@ -127,7 +127,7 @@
$ hg clone -q -U -r 1 -r 2 -r 3 -r 4 orig crossed
$ cd crossed
$ hg debugindex --manifest
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 2 6bbc6fee55c2 000000000000 000000000000
1 0 1c556153fe54 000000000000 000000000000
2 1 1f76dba919fd 000000000000 000000000000
@@ -182,7 +182,7 @@
$ hg --config experimental.treemanifest=True clone -q -U -r 1 -r 2 -r 3 -r 4 orig crossed
$ cd crossed
$ hg debugindex --dir dir
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 2 6bbc6fee55c2 000000000000 000000000000
1 0 1c556153fe54 000000000000 000000000000
2 1 1f76dba919fd 000000000000 000000000000
--- a/tests/test-strip.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-strip.t Thu Jun 16 15:28:54 2022 +0200
@@ -1290,7 +1290,6 @@
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ echo 3 >> I
$ cat > $TESTTMP/delayedstrip.py <<EOF
- > from __future__ import absolute_import
> from mercurial import commands, registrar, repair
> cmdtable = {}
> command = registrar.command(cmdtable)
--- a/tests/test-subrepo-git.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-subrepo-git.t Thu Jun 16 15:28:54 2022 +0200
@@ -1,20 +1,5 @@
#require git
-# XXX-CHG When running with python2 + chg this test tend to get stuck and end up
-# as a time-out error. My effort to reproduce this outside of the CI failed. The
-# test itself seems to pass fine, but never "complete". Debugging it is slow and
-# tedious. This as a bad impact on the development process as most CI run end up
-# wasting abotu 1h until that one fails.
-#
-# Pierre-Yves David, Augie Fackler and Raphaël Gomès all agreed to disable this
-# case in that specific case until we figure this out (or we drop python2 o:-) )
-
-#if no-py3 chg
- $ echo 'skipped: this test get stuck on the CI with python2 + chg. investigation needed'
- $ exit 80
-#endif
-
-
make git commits repeatable
$ cat >> $HGRCPATH <<EOF
--- a/tests/test-subrepo-svn.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-subrepo-svn.t Thu Jun 16 15:28:54 2022 +0200
@@ -249,7 +249,7 @@
verify subrepo is contained within the repo directory
- $ "$PYTHON" -c "from __future__ import print_function; import os.path; print(os.path.exists('s'))"
+ $ "$PYTHON" -c "import os.path; print(os.path.exists('s'))"
True
update to nullrev (must delete the subrepo)
--- a/tests/test-symlink-os-yes-fs-no.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-symlink-os-yes-fs-no.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import os
import sys
import time
--- a/tests/test-template-functions.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-template-functions.t Thu Jun 16 15:28:54 2022 +0200
@@ -192,7 +192,6 @@
$ cd unstable-hash
$ hg log --template '{date|age}\n' > /dev/null || exit 1
- >>> from __future__ import absolute_import
>>> import datetime
>>> fp = open('a', 'wb')
>>> n = datetime.datetime.now() + datetime.timedelta(366 * 7)
@@ -1572,7 +1571,6 @@
Test cbor filter:
$ cat <<'EOF' > "$TESTTMP/decodecbor.py"
- > from __future__ import absolute_import
> from mercurial import (
> dispatch,
> )
--- a/tests/test-template-map.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-template-map.t Thu Jun 16 15:28:54 2022 +0200
@@ -722,7 +722,6 @@
test CBOR style:
$ cat <<'EOF' > "$TESTTMP/decodecborarray.py"
- > from __future__ import absolute_import
> from mercurial import (
> dispatch,
> )
--- a/tests/test-treemanifest.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-treemanifest.t Thu Jun 16 15:28:54 2022 +0200
@@ -130,7 +130,7 @@
$ cat dir1/b
6
$ hg debugindex --dir dir1
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 1 8b3ffd73f901 000000000000 000000000000
1 2 68e9d057c5a8 8b3ffd73f901 000000000000
2 4 4698198d2624 68e9d057c5a8 000000000000
@@ -276,7 +276,7 @@
Parent of tree root manifest should be flat manifest, and two for merge
$ hg debugindex -m
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 0 40536115ed9e 000000000000 000000000000
1 1 f3376063c255 40536115ed9e 000000000000
2 2 5d9b9da231a2 40536115ed9e 000000000000
@@ -296,13 +296,13 @@
Turning off treemanifest config has no effect
$ hg debugindex --dir dir1
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 4 064927a0648a 000000000000 000000000000
1 5 25ecb8cb8618 000000000000 000000000000
$ echo 2 > dir1/a
$ hg --config experimental.treemanifest=False ci -qm 'modify dir1/a'
$ hg debugindex --dir dir1
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 4 064927a0648a 000000000000 000000000000
1 5 25ecb8cb8618 000000000000 000000000000
2 6 5b16163a30c6 25ecb8cb8618 000000000000
@@ -315,7 +315,7 @@
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
saved backup bundle to $TESTTMP/repo-mixed/.hg/strip-backup/51cfd7b1e13b-78a2f3ed-backup.hg
$ hg debugindex --dir dir1
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 4 064927a0648a 000000000000 000000000000
1 5 25ecb8cb8618 000000000000 000000000000
@@ -342,7 +342,7 @@
saved backup bundle to $TESTTMP/repo-mixed/.hg/strip-backup/*-backup.hg (glob)
$ hg unbundle -q .hg/strip-backup/*
$ hg debugindex --dir dir1
- rev linkrev nodeid p1 p2
+ rev linkrev nodeid p1-nodeid p2-nodeid
0 4 064927a0648a 000000000000 000000000000
1 5 25ecb8cb8618 000000000000 000000000000
2 6 5b16163a30c6 25ecb8cb8618 000000000000
--- a/tests/test-trusted.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-trusted.py Thu Jun 16 15:28:54 2022 +0200
@@ -2,7 +2,6 @@
# with files from different users/groups, we cheat a bit by
# monkey-patching some functions in the util module
-from __future__ import absolute_import, print_function
import os
import sys
--- a/tests/test-ui-color.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-ui-color.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, print_function
-
import os
from mercurial import (
dispatch,
--- a/tests/test-ui-config.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-ui-config.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,4 +1,3 @@
-from __future__ import absolute_import, print_function
from mercurial import (
dispatch,
error,
--- a/tests/test-ui-verbosity.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-ui-verbosity.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,14 +1,9 @@
-from __future__ import absolute_import, print_function
-
import os
from mercurial import (
pycompat,
ui as uimod,
)
-if pycompat.ispy3:
- xrange = range
-
hgrc = os.environ['HGRCPATH']
f = open(hgrc)
basehgrc = f.read()
@@ -17,7 +12,7 @@
print(' hgrc settings command line options final result ')
print(' quiet verbo debug quiet verbo debug quiet verbo debug')
-for i in xrange(64):
+for i in range(64):
hgrc_quiet = bool(i & 1 << 0)
hgrc_verbose = bool(i & 1 << 1)
hgrc_debug = bool(i & 1 << 2)
--- a/tests/test-unified-test.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-unified-test.t Thu Jun 16 15:28:54 2022 +0200
@@ -26,7 +26,6 @@
Doctest commands:
- >>> from __future__ import print_function
>>> print('foo')
foo
$ echo interleaved
--- a/tests/test-update-atomic.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-update-atomic.t Thu Jun 16 15:28:54 2022 +0200
@@ -3,7 +3,6 @@
Checking that experimental.atomic-file works.
$ cat > $TESTTMP/show_mode.py <<EOF
- > from __future__ import print_function
> import os
> import stat
> import sys
@@ -20,7 +19,6 @@
$ cd repo
$ cat > .hg/showwrites.py <<EOF
- > from __future__ import print_function
> from mercurial import pycompat
> from mercurial.utils import stringutil
> def uisetup(ui):
--- a/tests/test-upgrade-repo.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-upgrade-repo.t Thu Jun 16 15:28:54 2022 +0200
@@ -734,7 +734,6 @@
$ touch FooBarDirectory.d/f1
$ hg -q commit -A -m 'add f1'
$ hg -q up -r 0
- >>> from __future__ import absolute_import, print_function
>>> import random
>>> random.seed(0) # have a reproducible content
>>> with open("f2", "wb") as f:
@@ -958,7 +957,6 @@
Check that the repo still works fine
$ hg log -G --stat
- @ changeset: 2:76d4395f5413 (no-py3 !)
@ changeset: 2:fca376863211 (py3 !)
| tag: tip
| parent: 0:ba592bf28da2
@@ -1455,10 +1453,10 @@
format.revlog-compression=$BUNDLE2_COMPRESSIONS$
format.maxchainlen=9001
$ hg debugdeltachain file
- rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio readsize largestblk rddensity srchunks
- 0 1 1 -1 base 77 182 77 0.42308 77 0 0.00000 77 77 1.00000 1
- 1 1 2 0 p1 21 191 98 0.51309 98 0 0.00000 98 98 1.00000 1
- 2 1 2 0 other 30 200 107 0.53500 128 21 0.19626 128 128 0.83594 1
+ rev p1 p2 chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio readsize largestblk rddensity srchunks
+ 0 -1 -1 1 1 -1 base 77 182 77 0.42308 77 0 0.00000 77 77 1.00000 1
+ 1 0 -1 1 2 0 p1 21 191 98 0.51309 98 0 0.00000 98 98 1.00000 1
+ 2 1 -1 1 2 0 snap 30 200 107 0.53500 128 21 0.19626 128 128 0.83594 1
$ hg debugupgraderepo --run --optimize 're-delta-all'
upgrade will perform the following actions:
@@ -1503,10 +1501,10 @@
copy of old repository backed up at $TESTTMP/localconfig/.hg/upgradebackup.* (glob)
the old repository will not be deleted; remove it to free up disk space once the upgraded repository is verified
$ hg debugdeltachain file
- rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio readsize largestblk rddensity srchunks
- 0 1 1 -1 base 77 182 77 0.42308 77 0 0.00000 77 77 1.00000 1
- 1 1 2 0 p1 21 191 98 0.51309 98 0 0.00000 98 98 1.00000 1
- 2 1 3 1 p1 21 200 119 0.59500 119 0 0.00000 119 119 1.00000 1
+ rev p1 p2 chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio readsize largestblk rddensity srchunks
+ 0 -1 -1 1 1 -1 base 77 182 77 0.42308 77 0 0.00000 77 77 1.00000 1
+ 1 0 -1 1 2 0 p1 21 191 98 0.51309 98 0 0.00000 98 98 1.00000 1
+ 2 1 -1 1 3 1 p1 21 200 119 0.59500 119 0 0.00000 119 119 1.00000 1
$ cd ..
$ cat << EOF >> $HGRCPATH
@@ -1996,3 +1994,135 @@
dirstate-v2: no
$ cd ..
+
+Test automatic upgrade/downgrade
+================================
+
+
+For dirstate v2
+---------------
+
+create an initial repository
+
+ $ hg init auto-upgrade \
+ > --config format.use-dirstate-v2=no \
+ > --config format.use-dirstate-tracked-hint=yes \
+ > --config format.use-share-safe=no
+ $ hg debugbuilddag -R auto-upgrade --new-file .+5
+ $ hg -R auto-upgrade update
+ 6 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg debugformat -R auto-upgrade | grep dirstate-v2
+ dirstate-v2: no
+
+upgrade it to dirstate-v2 automatically
+
+ $ hg status -R auto-upgrade \
+ > --config format.use-dirstate-v2.automatic-upgrade-of-mismatching-repositories=yes \
+ > --config format.use-dirstate-v2=yes
+ automatically upgrading repository to the `dirstate-v2` feature
+ (see `hg help config.format.use-dirstate-v2` for details)
+ $ hg debugformat -R auto-upgrade | grep dirstate-v2
+ dirstate-v2: yes
+
+downgrade it from dirstate-v2 automatically
+
+ $ hg status -R auto-upgrade \
+ > --config format.use-dirstate-v2.automatic-upgrade-of-mismatching-repositories=yes \
+ > --config format.use-dirstate-v2=no
+ automatically downgrading repository from the `dirstate-v2` feature
+ (see `hg help config.format.use-dirstate-v2` for details)
+ $ hg debugformat -R auto-upgrade | grep dirstate-v2
+ dirstate-v2: no
+
+
+For multiple change at the same time
+------------------------------------
+
+ $ hg debugformat -R auto-upgrade | egrep '(dirstate-v2|tracked|share-safe)'
+ dirstate-v2: no
+ tracked-hint: yes
+ share-safe: no
+
+ $ hg status -R auto-upgrade \
+ > --config format.use-dirstate-v2.automatic-upgrade-of-mismatching-repositories=yes \
+ > --config format.use-dirstate-v2=yes \
+ > --config format.use-dirstate-tracked-hint.automatic-upgrade-of-mismatching-repositories=yes \
+ > --config format.use-dirstate-tracked-hint=no\
+ > --config format.use-share-safe.automatic-upgrade-of-mismatching-repositories=yes \
+ > --config format.use-share-safe=yes
+ automatically upgrading repository to the `dirstate-v2` feature
+ (see `hg help config.format.use-dirstate-v2` for details)
+ automatically upgrading repository to the `share-safe` feature
+ (see `hg help config.format.use-share-safe` for details)
+ automatically downgrading repository from the `tracked-hint` feature
+ (see `hg help config.format.use-dirstate-tracked-hint` for details)
+ $ hg debugformat -R auto-upgrade | egrep '(dirstate-v2|tracked|share-safe)'
+ dirstate-v2: yes
+ tracked-hint: no
+ share-safe: yes
+
+Quiet upgrade and downgrade
+---------------------------
+
+
+ $ hg debugformat -R auto-upgrade | egrep '(dirstate-v2|tracked|share-safe)'
+ dirstate-v2: yes
+ tracked-hint: no
+ share-safe: yes
+ $ hg status -R auto-upgrade \
+ > --config format.use-dirstate-v2.automatic-upgrade-of-mismatching-repositories=yes \
+ > --config format.use-dirstate-v2.automatic-upgrade-of-mismatching-repositories:quiet=yes \
+ > --config format.use-dirstate-v2=no \
+ > --config format.use-dirstate-tracked-hint.automatic-upgrade-of-mismatching-repositories=yes \
+ > --config format.use-dirstate-tracked-hint.automatic-upgrade-of-mismatching-repositories:quiet=yes \
+ > --config format.use-dirstate-tracked-hint=yes \
+ > --config format.use-share-safe.automatic-upgrade-of-mismatching-repositories=yes \
+ > --config format.use-share-safe.automatic-upgrade-of-mismatching-repositories:quiet=yes \
+ > --config format.use-share-safe=no
+
+ $ hg debugformat -R auto-upgrade | egrep '(dirstate-v2|tracked|share-safe)'
+ dirstate-v2: no
+ tracked-hint: yes
+ share-safe: no
+
+ $ hg status -R auto-upgrade \
+ > --config format.use-dirstate-v2.automatic-upgrade-of-mismatching-repositories=yes \
+ > --config format.use-dirstate-v2.automatic-upgrade-of-mismatching-repositories:quiet=yes \
+ > --config format.use-dirstate-v2=yes \
+ > --config format.use-dirstate-tracked-hint.automatic-upgrade-of-mismatching-repositories=yes \
+ > --config format.use-dirstate-tracked-hint.automatic-upgrade-of-mismatching-repositories:quiet=yes \
+ > --config format.use-dirstate-tracked-hint=no\
+ > --config format.use-share-safe.automatic-upgrade-of-mismatching-repositories=yes \
+ > --config format.use-share-safe.automatic-upgrade-of-mismatching-repositories:quiet=yes \
+ > --config format.use-share-safe=yes
+ $ hg debugformat -R auto-upgrade | egrep '(dirstate-v2|tracked|share-safe)'
+ dirstate-v2: yes
+ tracked-hint: no
+ share-safe: yes
+
+Attempting Auto-upgrade on a read-only repository
+-------------------------------------------------
+
+ $ chmod -R a-w auto-upgrade
+
+ $ hg status -R auto-upgrade \
+ > --config format.use-dirstate-v2.automatic-upgrade-of-mismatching-repositories=yes \
+ > --config format.use-dirstate-v2=no
+ $ hg debugformat -R auto-upgrade | grep dirstate-v2
+ dirstate-v2: yes
+
+ $ chmod -R u+w auto-upgrade
+
+Attempting Auto-upgrade on a locked repository
+----------------------------------------------
+
+ $ hg -R auto-upgrade debuglock --set-lock --quiet &
+ $ echo $! >> $DAEMON_PIDS
+ $ $RUNTESTDIR/testlib/wait-on-file 10 auto-upgrade/.hg/store/lock
+ $ hg status -R auto-upgrade \
+ > --config format.use-dirstate-v2.automatic-upgrade-of-mismatching-repositories=yes \
+ > --config format.use-dirstate-v2=no
+ $ hg debugformat -R auto-upgrade | grep dirstate-v2
+ dirstate-v2: yes
+
+ $ killdaemons.py
--- a/tests/test-url.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-url.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,4 @@
# coding=utf-8
-from __future__ import absolute_import, print_function
import doctest
import os
--- a/tests/test-util.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-util.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,7 +1,7 @@
# unit tests for mercuril.util utilities
-from __future__ import absolute_import
import contextlib
+import io
import itertools
import unittest
@@ -55,7 +55,7 @@
@contextlib.contextmanager
def capturestderr():
- """Replace utils.procutil.stderr with a pycompat.bytesio instance
+ """Replace utils.procutil.stderr with an io.BytesIO instance
The instance is made available as the return value of __enter__.
@@ -63,7 +63,7 @@
"""
orig = utils.procutil.stderr
- utils.procutil.stderr = pycompat.bytesio()
+ utils.procutil.stderr = io.BytesIO()
try:
yield utils.procutil.stderr
finally:
--- a/tests/test-verify-repo-operations.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-verify-repo-operations.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import print_function, absolute_import
-
"""Fuzz testing for operations against a Mercurial repository
This uses Hypothesis's stateful testing to generate random repository
@@ -38,7 +36,6 @@
import binascii
from contextlib import contextmanager
-import errno
import pipes
import shutil
import silenttestrunner
@@ -88,9 +85,8 @@
try:
os.close(os.open(savefile, os.O_CREAT | os.O_EXCL | os.O_WRONLY))
break
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
+ except FileExistsError:
+ pass
assert os.path.exists(savefile)
hgrc = os.path.join(".hg", "hgrc")
--- a/tests/test-verify.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-verify.t Thu Jun 16 15:28:54 2022 +0200
@@ -338,7 +338,6 @@
checked 1 changesets with 1 changes to 1 files
$ cat >> $TESTTMP/break-base64.py <<EOF
- > from __future__ import absolute_import
> import base64
> base64.b64decode=lambda x: x
> EOF
--- a/tests/test-walk.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-walk.t Thu Jun 16 15:28:54 2022 +0200
@@ -640,7 +640,6 @@
$ cd t
$ echo fennel > overflow.list
$ cat >> printnum.py <<EOF
- > from __future__ import print_function
> for i in range(20000 // 100):
> print('x' * 100)
> EOF
--- a/tests/test-walkrepo.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-walkrepo.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, print_function
-
import os
from mercurial import (
--- a/tests/test-wireproto-clientreactor.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-wireproto-clientreactor.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import sys
import unittest
import zlib
--- a/tests/test-wireproto-framing.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-wireproto-framing.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, print_function
-
import unittest
from mercurial import (
--- a/tests/test-wireproto-serverreactor.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-wireproto-serverreactor.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, print_function
-
import unittest
from mercurial import (
--- a/tests/test-wireproto.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-wireproto.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, print_function
-
import sys
from mercurial import (
@@ -16,7 +14,7 @@
stringio = util.stringio
-class proto(object):
+class proto:
def __init__(self, args):
self.args = args
self.name = 'dummyproto'
@@ -78,7 +76,7 @@
return {b'name': mangle(name)}, unmangle
-class serverrepo(object):
+class serverrepo:
def __init__(self, ui):
self.ui = ui
--- a/tests/test-worker.t Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-worker.t Thu Jun 16 15:28:54 2022 +0200
@@ -1,7 +1,6 @@
Test UI worker interaction
$ cat > t.py <<EOF
- > from __future__ import absolute_import, print_function
> import sys
> import time
> from mercurial import (
@@ -88,9 +87,7 @@
> test 100000.0 abort --traceback 2>&1 | egrep '(WorkerError|Abort)'
raise error.Abort(b'known exception')
mercurial.error.Abort: known exception (py3 !)
- Abort: known exception (no-py3 !)
raise error.WorkerError(status)
- WorkerError: 255 (no-py3 !)
mercurial.error.WorkerError: 255 (py3 !)
Traceback must be printed for unknown exceptions
@@ -102,7 +99,6 @@
Workers should not do cleanups in all cases
$ cat > $TESTTMP/detectcleanup.py <<EOF
- > from __future__ import absolute_import
> import atexit
> import os
> import sys
@@ -136,7 +132,6 @@
Do not crash on partially read result
$ cat > $TESTTMP/detecttruncated.py <<EOF
- > from __future__ import absolute_import
> import os
> import sys
> import time
--- a/tests/test-wsgirequest.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/test-wsgirequest.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, print_function
-
import unittest
from mercurial.hgweb import request as requestmod
--- a/tests/testlib/badserverext.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/testlib/badserverext.py Thu Jun 16 15:28:54 2022 +0200
@@ -44,13 +44,11 @@
request)
"""
-from __future__ import absolute_import
import re
import socket
from mercurial import (
- pycompat,
registrar,
)
@@ -91,7 +89,7 @@
)
-class ConditionTracker(object):
+class ConditionTracker:
def __init__(
self,
close_after_recv_bytes,
@@ -257,7 +255,7 @@
# We can't adjust __class__ on a socket instance. So we define a proxy type.
-class socketproxy(object):
+class socketproxy:
__slots__ = ('_orig', '_logfp', '_cond')
def __init__(self, obj, logfp, condition_tracked):
@@ -301,7 +299,7 @@
# We can't adjust __class__ on socket._fileobject, so define a proxy.
-class fileobjectproxy(object):
+class fileobjectproxy:
__slots__ = ('_orig', '_logfp', '_cond')
def __init__(self, obj, logfp, condition_tracked):
@@ -336,17 +334,8 @@
object.__getattribute__(self, '_logfp').flush()
def _close(self):
- # Python 3 uses an io.BufferedIO instance. Python 2 uses some file
- # object wrapper.
- if pycompat.ispy3:
- orig = object.__getattribute__(self, '_orig')
-
- if hasattr(orig, 'raw'):
- orig.raw._sock.shutdown(socket.SHUT_RDWR)
- else:
- self.close()
- else:
- self._sock.shutdown(socket.SHUT_RDWR)
+ # We wrap an io.BufferedIO instance.
+ self.raw._sock.shutdown(socket.SHUT_RDWR)
def read(self, size=-1):
cond = object.__getattribute__(self, '_cond')
--- a/tests/testlib/crash_transaction_late.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/testlib/crash_transaction_late.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from mercurial import (
error,
--- a/tests/testlib/ext-phase-report.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/testlib/ext-phase-report.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,7 +1,5 @@
# tiny extension to report phase changes during transaction
-from __future__ import absolute_import
-
def reposetup(ui, repo):
def reportphasemove(tr):
--- a/tests/testlib/ext-sidedata-2.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/testlib/ext-sidedata-2.py Thu Jun 16 15:28:54 2022 +0200
@@ -8,7 +8,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import hashlib
import struct
--- a/tests/testlib/ext-sidedata-3.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/testlib/ext-sidedata-3.py Thu Jun 16 15:28:54 2022 +0200
@@ -9,7 +9,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import hashlib
import struct
--- a/tests/testlib/ext-sidedata-4.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/testlib/ext-sidedata-4.py Thu Jun 16 15:28:54 2022 +0200
@@ -9,7 +9,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
from mercurial.revlogutils import sidedata
--- a/tests/testlib/ext-sidedata-5.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/testlib/ext-sidedata-5.py Thu Jun 16 15:28:54 2022 +0200
@@ -9,7 +9,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import hashlib
import struct
--- a/tests/testlib/ext-sidedata.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/testlib/ext-sidedata.py Thu Jun 16 15:28:54 2022 +0200
@@ -5,7 +5,6 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from __future__ import absolute_import
import hashlib
import struct
--- a/tests/testlib/ext-stream-clone-steps.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/testlib/ext-stream-clone-steps.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
from mercurial import (
encoding,
extensions,
--- a/tests/testlib/persistent-nodemap-race-ext.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/testlib/persistent-nodemap-race-ext.py Thu Jun 16 15:28:54 2022 +0200
@@ -35,7 +35,6 @@
/!\ valid.
"""
-from __future__ import print_function
import os
--- a/tests/testlib/sigpipe-remote.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/testlib/sigpipe-remote.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,4 @@
#!/usr/bin/env python3
-from __future__ import print_function
import io
import os
@@ -7,14 +6,6 @@
import sys
import time
-# we cannot use mercurial.testing as long as python2 is not dropped as the test
-# will only install the mercurial module for python2 in python2 run
-if sys.version_info[0] < 3:
- ver = '.'.join(str(x) for x in sys.version_info)
- exe = sys.executable
- print('SIGPIPE-HELPER: script should run with Python 3', file=sys.stderr)
- print('SIGPIPE-HELPER: %s is running %s' % (exe, ver), file=sys.stderr)
- sys.exit(255)
if isinstance(sys.stdout.buffer, io.BufferedWriter):
print('SIGPIPE-HELPER: script need unbuffered output', file=sys.stderr)
--- a/tests/testlib/sigpipe-worker.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/testlib/sigpipe-worker.py Thu Jun 16 15:28:54 2022 +0200
@@ -3,7 +3,6 @@
# This is literally `cat` but in python, one char at a time.
#
# see sigpipe-remote.py for details.
-from __future__ import print_function
import io
import os
--- a/tests/testlib/wait-on-file Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/testlib/wait-on-file Thu Jun 16 15:28:54 2022 +0200
@@ -29,7 +29,7 @@
touch "$create"
create=""
fi
-while [ "$timer" -gt 0 ] && [ ! -f "$wait_on" ]; do
+while [ "$timer" -gt 0 ] && !([ -e "$wait_on" ] || [ -L "$wait_on" ]) ; do
timer=$(( $timer - 1))
sleep 0.02
done
--- a/tests/tinyproxy.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/tinyproxy.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,6 +1,5 @@
#!/usr/bin/env python
-from __future__ import absolute_import, print_function
__doc__ = """Tiny HTTP Proxy.
--- a/tests/unwrap-message-id.py Thu Jun 16 15:15:03 2022 +0200
+++ b/tests/unwrap-message-id.py Thu Jun 16 15:28:54 2022 +0200
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, print_function
-
import sys
for line in sys.stdin: