Mercurial > hg
changeset 51755:9b8c71d0b785
branching: merge stable into default
Post 6.8.1 release.
author | Pierre-Yves David <pierre-yves.david@octobus.net> |
---|---|
date | Thu, 01 Aug 2024 16:42:38 +0200 |
parents | c87c56ad6913 (diff) a431b41299fc (current diff) |
children | a53162bd73ed |
files | setup.py |
diffstat | 179 files changed, 1280 insertions(+), 693 deletions(-) [+] |
line wrap: on
line diff
--- a/contrib/automation/hgautomation/cli.py Thu Aug 01 16:34:37 2024 +0200 +++ b/contrib/automation/hgautomation/cli.py Thu Aug 01 16:42:38 2024 +0200 @@ -201,7 +201,6 @@ with aws.temporary_linux_dev_instances( c, image, instance_type, ensure_extra_volume=ensure_extra_volume ) as insts: - instance = insts[0] linux.prepare_exec_environment(
--- a/contrib/check-config.py Thu Aug 01 16:34:37 2024 +0200 +++ b/contrib/check-config.py Thu Aug 01 16:42:38 2024 +0200 @@ -57,7 +57,6 @@ return b return b.decode('utf8') - else: mkstr = lambda x: x
--- a/contrib/check-pytype.sh Thu Aug 01 16:34:37 2024 +0200 +++ b/contrib/check-pytype.sh Thu Aug 01 16:42:38 2024 +0200 @@ -56,7 +56,6 @@ # mercurial/localrepo.py # [attribute-error] # mercurial/manifest.py # [unsupported-operands], [wrong-arg-types] # mercurial/minirst.py # [unsupported-operands], [attribute-error] -# mercurial/pure/osutil.py # [invalid-typevar], [not-callable] # mercurial/pure/parsers.py # [attribute-error] # mercurial/repoview.py # [attribute-error] # mercurial/testing/storage.py # tons of [attribute-error] @@ -116,7 +115,6 @@ -x mercurial/localrepo.py \ -x mercurial/manifest.py \ -x mercurial/minirst.py \ - -x mercurial/pure/osutil.py \ -x mercurial/pure/parsers.py \ -x mercurial/repoview.py \ -x mercurial/testing/storage.py \
--- a/contrib/fuzz/mpatch_corpus.py Thu Aug 01 16:34:37 2024 +0200 +++ b/contrib/fuzz/mpatch_corpus.py Thu Aug 01 16:42:38 2024 +0200 @@ -25,7 +25,6 @@ """Py2 calls __repr__ for `bytes(foo)`, forward to __bytes__""" return self.__bytes__() - else: class py2reprhack:
--- a/contrib/heptapod-ci.yml Thu Aug 01 16:34:37 2024 +0200 +++ b/contrib/heptapod-ci.yml Thu Aug 01 16:42:38 2024 +0200 @@ -27,7 +27,7 @@ variables: PYTHON: python TEST_HGMODULEPOLICY: "allow" - HG_CI_IMAGE_TAG: "v1.0" + HG_CI_IMAGE_TAG: "v2.1" TEST_HGTESTS_ALLOW_NETIO: "0" .all_template: &all @@ -39,15 +39,16 @@ # The runner made a clone as root. # We make a new clone owned by user used to run the step. before_script: + - export PATH="/home/ci-runner/vendor/pyenv/pyenv-2.4.7-adf3c2bccf09cdb81febcfd15b186711a33ac7a8/shims:/home/ci-runner/vendor/pyenv/pyenv-2.4.7-adf3c2bccf09cdb81febcfd15b186711a33ac7a8/bin:$PATH" + - echo "python used, $PYTHON" + - $PYTHON --version + - black --version + - clang-format --version - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'` - cd /tmp/mercurial-ci/ - ls -1 tests/test-check-*.* > /tmp/check-tests.txt - - black --version - - clang-format --version script: - - echo "python used, $PYTHON" - - $PYTHON --version - echo "$RUNTEST_ARGS" - HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" "$PYTHON" tests/run-tests.py --color=always $RUNTEST_ARGS @@ -55,25 +56,21 @@ <<: *runtests variables: RUNTEST_ARGS: "--time --test-list /tmp/check-tests.txt" - PYTHON: python3 CI_CLEVER_CLOUD_FLAVOR: S rust-cargo-test: <<: *all stage: tests script: - - echo "python used, $PYTHON" - make rust-tests - make cargo-clippy variables: - PYTHON: python3 CI_CLEVER_CLOUD_FLAVOR: S test-c: <<: *runtests variables: RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt" - PYTHON: python3 TEST_HGMODULEPOLICY: "c" TEST_HGTESTS_ALLOW_NETIO: "1" @@ -81,7 +78,6 @@ <<: *runtests variables: RUNTEST_ARGS: "--pure --blacklist /tmp/check-tests.txt" - PYTHON: python3 TEST_HGMODULEPOLICY: "py" test-rust: @@ -89,7 +85,6 @@ variables: HGWITHRUSTEXT: cpython RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt" - PYTHON: python3 TEST_HGMODULEPOLICY: "rust+c" test-rhg: @@ -97,30 +92,27 @@ variables: HGWITHRUSTEXT: cpython RUNTEST_ARGS: "--rust --rhg --blacklist /tmp/check-tests.txt" - PYTHON: python3 TEST_HGMODULEPOLICY: "rust+c" test-chg: <<: *runtests variables: - PYTHON: python3 RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt --chg" TEST_HGMODULEPOLICY: "c" check-pytype: extends: .runtests_template before_script: + - export PATH="/home/ci-runner/vendor/pyenv/pyenv-2.4.7-adf3c2bccf09cdb81febcfd15b186711a33ac7a8/shims:/home/ci-runner/vendor/pyenv/pyenv-2.4.7-adf3c2bccf09cdb81febcfd15b186711a33ac7a8/bin:$PATH" + - echo "PATH, $PATH" - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'` - cd /tmp/mercurial-ci/ - make local PYTHON=$PYTHON - - $PYTHON -m pip install --user -U libcst==0.3.20 pytype==2022.11.18 - ./contrib/setup-pytype.sh script: - echo "Entering script section" - sh contrib/check-pytype.sh - variables: - PYTHON: python3 # `sh.exe --login` sets a couple of extra environment variables that are defined # in the MinGW shell, but switches CWD to /home/$username. The previous value
--- a/contrib/hgclient.py Thu Aug 01 16:34:37 2024 +0200 +++ b/contrib/hgclient.py Thu Aug 01 16:42:38 2024 +0200 @@ -21,7 +21,6 @@ pargs = [re.sub(br'''\bb(['"])''', br'\1', b'%s' % a) for a in args] stdout.write(b' '.join(pargs) + b'\n') - else: import cStringIO
--- a/contrib/nix/flake.nix Thu Aug 01 16:34:37 2024 +0200 +++ b/contrib/nix/flake.nix Thu Aug 01 16:42:38 2024 +0200 @@ -53,7 +53,7 @@ # but uses formatter features from nightly. # TODO: make cargo use the formatter from nightly automatically # (not supported by rustup/cargo yet? workaround?) - # rustPlatform = pkgs.rust-bin.stable."1.61.0".default; + # rustPlatform = pkgs.rust-bin.stable."1.79.0".default; # rustPlatformFormatter = pkgs.rust-bin.nightly."2023-04-20".default; # The CI uses an old version of the Black code formatter,
--- a/contrib/perf-utils/compare-discovery-case Thu Aug 01 16:34:37 2024 +0200 +++ b/contrib/perf-utils/compare-discovery-case Thu Aug 01 16:42:38 2024 +0200 @@ -205,7 +205,6 @@ if __name__ == '__main__': - argv = sys.argv[:] kwargs = {}
--- a/contrib/perf.py Thu Aug 01 16:34:37 2024 +0200 +++ b/contrib/perf.py Thu Aug 01 16:42:38 2024 +0200 @@ -130,7 +130,6 @@ def revlog(opener, *args, **kwargs): return mercurial.revlog.revlog(opener, perf_rl_kind, *args, **kwargs) - except (ImportError, AttributeError): perf_rl_kind = None @@ -261,7 +260,6 @@ commands.norepo += b' %s' % b' '.join(parsealiases(name)) return _command(name, list(options), synopsis) - else: # for "historical portability": # define "@command" annotation locally, because cmdutil.command @@ -1926,7 +1924,7 @@ opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) - mercurial.revlog._prereadsize = 2 ** 24 # disable lazy parser in old hg + mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg if opts[b'no_lookup']: if opts['rev']: raise error.Abort('--no-lookup and --rev are mutually exclusive') @@ -1985,7 +1983,7 @@ opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) - mercurial.revlog._prereadsize = 2 ** 24 # disable lazy parser in old hg + mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg unfi = repo.unfiltered() clearcaches = opts[b'clear_caches'] @@ -2389,7 +2387,7 @@ timer, fm = gettimer(ui, opts) import mercurial.revlog - mercurial.revlog._prereadsize = 2 ** 24 # disable lazy parser in old hg + mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg n = scmutil.revsingle(repo, rev).node() try: @@ -3102,7 +3100,7 @@ # disable inlining old_max_inline = mercurial.revlog._maxinline # large enough to never happen - mercurial.revlog._maxinline = 2 ** 50 + mercurial.revlog._maxinline = 2**50 with repo.lock(): bundle = [None, None]
--- a/contrib/python-zstandard/tests/test_compressor_fuzzing.py Thu Aug 01 16:34:37 2024 +0200 +++ b/contrib/python-zstandard/tests/test_compressor_fuzzing.py Thu Aug 01 16:42:38 2024 +0200 @@ -137,7 +137,6 @@ def test_buffer_source_read_variance( self, original, level, source_read_size, read_sizes ): - refctx = zstd.ZstdCompressor(level=level) ref_frame = refctx.compress(original) @@ -203,7 +202,6 @@ def test_buffer_source_readinto( self, original, level, source_read_size, read_size ): - refctx = zstd.ZstdCompressor(level=level) ref_frame = refctx.compress(original) @@ -273,7 +271,6 @@ def test_buffer_source_readinto_variance( self, original, level, source_read_size, read_sizes ): - refctx = zstd.ZstdCompressor(level=level) ref_frame = refctx.compress(original) @@ -410,7 +407,6 @@ def test_buffer_source_read1_variance( self, original, level, source_read_size, read_sizes ): - refctx = zstd.ZstdCompressor(level=level) ref_frame = refctx.compress(original) @@ -551,7 +547,6 @@ def test_buffer_source_readinto1_variance( self, original, level, source_read_size, read_sizes ): - refctx = zstd.ZstdCompressor(level=level) ref_frame = refctx.compress(original)
--- a/contrib/python-zstandard/tests/test_decompressor.py Thu Aug 01 16:34:37 2024 +0200 +++ b/contrib/python-zstandard/tests/test_decompressor.py Thu Aug 01 16:42:38 2024 +0200 @@ -189,7 +189,7 @@ # Will get OverflowError on some Python distributions that can't # handle really large integers. with self.assertRaises((MemoryError, OverflowError)): - dctx.decompress(compressed, max_output_size=2 ** 62) + dctx.decompress(compressed, max_output_size=2**62) def test_dictionary(self): samples = [] @@ -238,7 +238,7 @@ cctx = zstd.ZstdCompressor(write_content_size=False) frame = cctx.compress(source) - dctx = zstd.ZstdDecompressor(max_window_size=2 ** zstd.WINDOWLOG_MIN) + dctx = zstd.ZstdDecompressor(max_window_size=2**zstd.WINDOWLOG_MIN) with self.assertRaisesRegex( zstd.ZstdError,
--- a/contrib/python-zstandard/tests/test_decompressor_fuzzing.py Thu Aug 01 16:34:37 2024 +0200 +++ b/contrib/python-zstandard/tests/test_decompressor_fuzzing.py Thu Aug 01 16:42:38 2024 +0200 @@ -353,7 +353,6 @@ def test_multiple_frames( self, originals, frame_count, level, source_read_size, read_sizes ): - cctx = zstd.ZstdCompressor(level=level) source = io.BytesIO() buffer = io.BytesIO()
--- a/contrib/python-zstandard/zstandard/cffi.py Thu Aug 01 16:34:37 2024 +0200 +++ b/contrib/python-zstandard/zstandard/cffi.py Thu Aug 01 16:42:38 2024 +0200 @@ -273,7 +273,6 @@ ldm_hash_every_log=-1, threads=0, ): - params = lib.ZSTD_createCCtxParams() if params == ffi.NULL: raise MemoryError() @@ -1423,7 +1422,6 @@ read_size=COMPRESSION_RECOMMENDED_INPUT_SIZE, write_size=COMPRESSION_RECOMMENDED_OUTPUT_SIZE, ): - if not hasattr(ifh, "read"): raise ValueError("first argument must have a read() method") if not hasattr(ofh, "write"): @@ -1523,7 +1521,6 @@ write_size=COMPRESSION_RECOMMENDED_OUTPUT_SIZE, write_return_read=False, ): - if not hasattr(writer, "write"): raise ValueError("must pass an object with a write() method")
--- a/contrib/revsetbenchmarks.py Thu Aug 01 16:34:37 2024 +0200 +++ b/contrib/revsetbenchmarks.py Thu Aug 01 16:42:38 2024 +0200 @@ -191,7 +191,7 @@ def formattiming(value): """format a value to strictly 8 char, dropping some precision if needed""" - if value < 10 ** 7: + if value < 10**7: return ('%.6f' % value)[:8] else: # value is HUGE very unlikely to happen (4+ month run) @@ -371,7 +371,6 @@ print() for ridx, rset in enumerate(revsets): - print("revset #%i: %s" % (ridx, rset)) printheader(variants, len(results), verbose=options.verbose, relative=True) ref = None
--- a/contrib/setup-pytype.sh Thu Aug 01 16:34:37 2024 +0200 +++ b/contrib/setup-pytype.sh Thu Aug 01 16:42:38 2024 +0200 @@ -5,7 +5,7 @@ # Find the python3 setup that would run pytype PYTYPE=`which pytype` -PYTHON3=`head -n1 ${PYTYPE} | sed -s 's/#!//'` +PYTHON3=${PYTHON:-`head -n1 ${PYTYPE} | sed -s 's/#!//'`} # Existing stubs that pytype processes live here TYPESHED=$(${PYTHON3} -c "import pytype; print(pytype.__path__[0])")/typeshed/stubs
--- a/contrib/win32/hgwebdir_wsgi.py Thu Aug 01 16:34:37 2024 +0200 +++ b/contrib/win32/hgwebdir_wsgi.py Thu Aug 01 16:42:38 2024 +0200 @@ -101,6 +101,7 @@ import isapi_wsgi from mercurial.hgweb.hgwebdir_mod import hgwebdir + # Example tweak: Replace isapi_wsgi's handler to provide better error message # Other stuff could also be done here, like logging errors etc. class WsgiHandler(isapi_wsgi.IsapiWsgiHandler): @@ -114,7 +115,6 @@ def handler(environ, start_response): - # Translate IIS's weird URLs url = environ['SCRIPT_NAME'] + environ['PATH_INFO'] paths = url[1:].split('/')[path_strip:]
--- a/doc/hgmanpage.py Thu Aug 01 16:34:37 2024 +0200 +++ b/doc/hgmanpage.py Thu Aug 01 16:42:38 2024 +0200 @@ -95,7 +95,6 @@ class Writer(writers.Writer): - supported = 'manpage' """Formats this writer supports.""" @@ -297,7 +296,7 @@ (u'´', u"\\'"), (u'`', u'\\(ga'), ] - for (in_char, out_markup) in replace_pairs: + for in_char, out_markup in replace_pairs: text = text.replace(in_char, out_markup) # unicode text = self.deunicode(text)
--- a/hgext/acl.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/acl.py Thu Aug 01 16:42:38 2024 +0200 @@ -279,7 +279,6 @@ def _getusers(ui, group): - # First, try to use group definition from section [acl.groups] hgrcusers = ui.configlist(b'acl.groups', group) if hgrcusers: @@ -294,12 +293,10 @@ def _usermatch(ui, user, usersorgroups): - if usersorgroups == b'*': return True for ug in usersorgroups.replace(b',', b' ').split(): - if ug.startswith(b'!'): # Test for excluded user or group. Format: # if ug is a user name: !username @@ -368,7 +365,6 @@ def hook(ui, repo, hooktype, node=None, source=None, **kwargs): - ensureenabled(ui) if hooktype not in [b'pretxnchangegroup', b'pretxncommit', b'prepushkey']:
--- a/hgext/convert/common.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/convert/common.py Thu Aug 01 16:42:38 2024 +0200 @@ -6,12 +6,18 @@ # GNU General Public License version 2 or any later version. import base64 -import datetime import os import pickle import re import shlex import subprocess +import typing + +from typing import ( + Any, + AnyStr, + Optional, +) from mercurial.i18n import _ from mercurial.pycompat import open @@ -22,11 +28,33 @@ pycompat, util, ) -from mercurial.utils import procutil +from mercurial.utils import ( + dateutil, + procutil, +) + +if typing.TYPE_CHECKING: + from typing import ( + overload, + ) + from mercurial import ( + ui as uimod, + ) propertycache = util.propertycache +if typing.TYPE_CHECKING: + + @overload + def _encodeornone(d: str) -> bytes: + pass + + @overload + def _encodeornone(d: None) -> None: + pass + + def _encodeornone(d): if d is None: return @@ -34,7 +62,7 @@ class _shlexpy3proxy: - def __init__(self, l): + def __init__(self, l: shlex.shlex) -> None: self._l = l def __iter__(self): @@ -44,15 +72,22 @@ return _encodeornone(self._l.get_token()) @property - def infile(self): - return self._l.infile or b'<unknown>' + def infile(self) -> bytes: + if self._l.infile is not None: + return encoding.strtolocal(self._l.infile) + return b'<unknown>' @property - def lineno(self): + def lineno(self) -> int: return self._l.lineno -def shlexer(data=None, filepath=None, wordchars=None, whitespace=None): +def shlexer( + data=None, + filepath: Optional[bytes] = None, + wordchars: Optional[bytes] = None, + whitespace: Optional[bytes] = None, +): if data is None: data = open(filepath, b'r', encoding='latin1') else: @@ -61,7 +96,8 @@ b'shlexer only accepts data or filepath, not both' ) data = data.decode('latin1') - l = shlex.shlex(data, infile=filepath, posix=True) + infile = encoding.strfromlocal(filepath) if filepath is not None else None + l = shlex.shlex(data, infile=infile, posix=True) if whitespace is not None: l.whitespace_split = True l.whitespace += whitespace.decode('latin1') @@ -70,8 +106,8 @@ return _shlexpy3proxy(l) -def encodeargs(args): - def encodearg(s): +def encodeargs(args: Any) -> bytes: + def encodearg(s: bytes) -> bytes: lines = base64.encodebytes(s) lines = [l.splitlines()[0] for l in pycompat.iterbytestr(lines)] return b''.join(lines) @@ -80,7 +116,7 @@ return encodearg(s) -def decodeargs(s): +def decodeargs(s: bytes) -> Any: s = base64.decodebytes(s) return pickle.loads(s) @@ -89,7 +125,9 @@ pass -def checktool(exe, name=None, abort=True): +def checktool( + exe: bytes, name: Optional[bytes] = None, abort: bool = True +) -> None: name = name or exe if not procutil.findexe(exe): if abort: @@ -103,25 +141,25 @@ pass -SKIPREV = b'SKIP' +SKIPREV: bytes = b'SKIP' class commit: def __init__( self, - author, - date, - desc, + author: bytes, + date: bytes, + desc: bytes, parents, - branch=None, + branch: Optional[bytes] = None, rev=None, extra=None, sortkey=None, saverev=True, - phase=phases.draft, + phase: int = phases.draft, optparents=None, ctx=None, - ): + ) -> None: self.author = author or b'unknown' self.date = date or b'0 0' self.desc = desc @@ -139,7 +177,13 @@ class converter_source: """Conversion source interface""" - def __init__(self, ui, repotype, path=None, revs=None): + def __init__( + self, + ui: "uimod.ui", + repotype: bytes, + path: Optional[bytes] = None, + revs=None, + ) -> None: """Initialize conversion source (or raise NoRepo("message") exception if path is not a valid repository)""" self.ui = ui @@ -149,7 +193,9 @@ self.encoding = b'utf-8' - def checkhexformat(self, revstr, mapname=b'splicemap'): + def checkhexformat( + self, revstr: bytes, mapname: bytes = b'splicemap' + ) -> None: """fails if revstr is not a 40 byte hex. mercurial and git both uses such format for their revision numbering """ @@ -159,10 +205,10 @@ % (mapname, revstr) ) - def before(self): + def before(self) -> None: pass - def after(self): + def after(self) -> None: pass def targetfilebelongstosource(self, targetfilename): @@ -221,7 +267,7 @@ """ raise NotImplementedError - def recode(self, s, encoding=None): + def recode(self, s: AnyStr, encoding: Optional[bytes] = None) -> bytes: if not encoding: encoding = self.encoding or b'utf-8' @@ -250,17 +296,17 @@ """ raise NotImplementedError - def converted(self, rev, sinkrev): + def converted(self, rev, sinkrev) -> None: '''Notify the source that a revision has been converted.''' - def hasnativeorder(self): + def hasnativeorder(self) -> bool: """Return true if this source has a meaningful, native revision order. For instance, Mercurial revisions are store sequentially while there is no such global ordering with Darcs. """ return False - def hasnativeclose(self): + def hasnativeclose(self) -> bool: """Return true if this source has ability to close branch.""" return False @@ -278,7 +324,7 @@ """ return {} - def checkrevformat(self, revstr, mapname=b'splicemap'): + def checkrevformat(self, revstr, mapname: bytes = b'splicemap') -> bool: """revstr is a string that describes a revision in the given source control system. Return true if revstr has correct format. @@ -289,7 +335,7 @@ class converter_sink: """Conversion sink (target) interface""" - def __init__(self, ui, repotype, path): + def __init__(self, ui: "uimod.ui", repotype: bytes, path: bytes) -> None: """Initialize conversion sink (or raise NoRepo("message") exception if path is not a valid repository) @@ -357,10 +403,10 @@ filter empty revisions. """ - def before(self): + def before(self) -> None: pass - def after(self): + def after(self) -> None: pass def putbookmarks(self, bookmarks): @@ -383,17 +429,17 @@ class commandline: - def __init__(self, ui, command): + def __init__(self, ui: "uimod.ui", command: bytes) -> None: self.ui = ui self.command = command - def prerun(self): + def prerun(self) -> None: pass - def postrun(self): + def postrun(self) -> None: pass - def _cmdline(self, cmd, *args, **kwargs): + def _cmdline(self, cmd: bytes, *args: bytes, **kwargs) -> bytes: kwargs = pycompat.byteskwargs(kwargs) cmdline = [self.command, cmd] + list(args) for k, v in kwargs.items(): @@ -414,7 +460,7 @@ cmdline = b' '.join(cmdline) return cmdline - def _run(self, cmd, *args, **kwargs): + def _run(self, cmd: bytes, *args: bytes, **kwargs): def popen(cmdline): p = subprocess.Popen( procutil.tonativestr(cmdline), @@ -427,13 +473,13 @@ return self._dorun(popen, cmd, *args, **kwargs) - def _run2(self, cmd, *args, **kwargs): + def _run2(self, cmd: bytes, *args: bytes, **kwargs): return self._dorun(procutil.popen2, cmd, *args, **kwargs) - def _run3(self, cmd, *args, **kwargs): + def _run3(self, cmd: bytes, *args: bytes, **kwargs): return self._dorun(procutil.popen3, cmd, *args, **kwargs) - def _dorun(self, openfunc, cmd, *args, **kwargs): + def _dorun(self, openfunc, cmd: bytes, *args: bytes, **kwargs): cmdline = self._cmdline(cmd, *args, **kwargs) self.ui.debug(b'running: %s\n' % (cmdline,)) self.prerun() @@ -442,20 +488,20 @@ finally: self.postrun() - def run(self, cmd, *args, **kwargs): + def run(self, cmd: bytes, *args: bytes, **kwargs): p = self._run(cmd, *args, **kwargs) output = p.communicate()[0] self.ui.debug(output) return output, p.returncode - def runlines(self, cmd, *args, **kwargs): + def runlines(self, cmd: bytes, *args: bytes, **kwargs): p = self._run(cmd, *args, **kwargs) output = p.stdout.readlines() p.wait() self.ui.debug(b''.join(output)) return output, p.returncode - def checkexit(self, status, output=b''): + def checkexit(self, status, output: bytes = b'') -> None: if status: if output: self.ui.warn(_(b'%s error:\n') % self.command) @@ -463,12 +509,12 @@ msg = procutil.explainexit(status) raise error.Abort(b'%s %s' % (self.command, msg)) - def run0(self, cmd, *args, **kwargs): + def run0(self, cmd: bytes, *args: bytes, **kwargs): output, status = self.run(cmd, *args, **kwargs) self.checkexit(status, output) return output - def runlines0(self, cmd, *args, **kwargs): + def runlines0(self, cmd: bytes, *args: bytes, **kwargs): output, status = self.runlines(cmd, *args, **kwargs) self.checkexit(status, b''.join(output)) return output @@ -491,7 +537,7 @@ # (and make happy Windows shells while doing this). return argmax // 2 - 1 - def _limit_arglist(self, arglist, cmd, *args, **kwargs): + def _limit_arglist(self, arglist, cmd: bytes, *args: bytes, **kwargs): cmdlen = len(self._cmdline(cmd, *args, **kwargs)) limit = self.argmax - cmdlen numbytes = 0 @@ -508,13 +554,13 @@ if fl: yield fl - def xargs(self, arglist, cmd, *args, **kwargs): + def xargs(self, arglist, cmd: bytes, *args: bytes, **kwargs): for l in self._limit_arglist(arglist, cmd, *args, **kwargs): self.run0(cmd, *(list(args) + l), **kwargs) class mapfile(dict): - def __init__(self, ui, path): + def __init__(self, ui: "uimod.ui", path: bytes) -> None: super(mapfile, self).__init__() self.ui = ui self.path = path @@ -522,31 +568,34 @@ self.order = [] self._read() - def _read(self): + def _read(self) -> None: if not self.path: return try: fp = open(self.path, b'rb') except FileNotFoundError: return - for i, line in enumerate(fp): - line = line.splitlines()[0].rstrip() - if not line: - # Ignore blank lines - continue - try: - key, value = line.rsplit(b' ', 1) - except ValueError: - raise error.Abort( - _(b'syntax error in %s(%d): key/value pair expected') - % (self.path, i + 1) - ) - if key not in self: - self.order.append(key) - super(mapfile, self).__setitem__(key, value) - fp.close() - def __setitem__(self, key, value): + try: + for i, line in enumerate(fp): + line = line.splitlines()[0].rstrip() + if not line: + # Ignore blank lines + continue + try: + key, value = line.rsplit(b' ', 1) + except ValueError: + raise error.Abort( + _(b'syntax error in %s(%d): key/value pair expected') + % (self.path, i + 1) + ) + if key not in self: + self.order.append(key) + super(mapfile, self).__setitem__(key, value) + finally: + fp.close() + + def __setitem__(self, key, value) -> None: if self.fp is None: try: self.fp = open(self.path, b'ab') @@ -559,18 +608,11 @@ self.fp.flush() super(mapfile, self).__setitem__(key, value) - def close(self): + def close(self) -> None: if self.fp: self.fp.close() self.fp = None -def makedatetimestamp(t): - """Like dateutil.makedate() but for time t instead of current time""" - tz = round( - t - - datetime.datetime.fromtimestamp(t) - .replace(tzinfo=datetime.timezone.utc) - .timestamp() - ) - return t, tz +def makedatetimestamp(t: float) -> dateutil.hgdate: + return dateutil.makedate(t)
--- a/hgext/convert/convcmd.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/convert/convcmd.py Thu Aug 01 16:42:38 2024 +0200 @@ -9,6 +9,16 @@ import heapq import os import shutil +import typing + +from typing import ( + AnyStr, + Dict, + List, + Mapping, + Optional, + Union, +) from mercurial.i18n import _ from mercurial.pycompat import open @@ -36,6 +46,11 @@ subversion, ) +if typing.TYPE_CHECKING: + from mercurial import ( + ui as uimod, + ) + mapfile = common.mapfile MissingTool = common.MissingTool NoRepo = common.NoRepo @@ -53,15 +68,14 @@ svn_sink = subversion.svn_sink svn_source = subversion.svn_source -orig_encoding = b'ascii' +orig_encoding: bytes = b'ascii' -def readauthormap(ui, authorfile, authors=None): +def readauthormap(ui: "uimod.ui", authorfile, authors=None): if authors is None: authors = {} with open(authorfile, b'rb') as afile: for line in afile: - line = line.strip() if not line or line.startswith(b'#'): continue @@ -86,7 +100,7 @@ return authors -def recode(s): +def recode(s: AnyStr) -> bytes: if isinstance(s, str): return s.encode(pycompat.sysstr(orig_encoding), 'replace') else: @@ -95,7 +109,7 @@ ) -def mapbranch(branch, branchmap): +def mapbranch(branch: bytes, branchmap: Mapping[bytes, bytes]) -> bytes: """ >>> bmap = {b'default': b'branch1'} >>> for i in [b'', None]: @@ -147,7 +161,7 @@ ] -def convertsource(ui, path, type, revs): +def convertsource(ui: "uimod.ui", path: bytes, type: bytes, revs): exceptions = [] if type and type not in [s[0] for s in source_converters]: raise error.Abort(_(b'%s: invalid source repository type') % type) @@ -163,7 +177,9 @@ raise error.Abort(_(b'%s: missing or unsupported repository') % path) -def convertsink(ui, path, type): +def convertsink( + ui: "uimod.ui", path: bytes, type: bytes +) -> Union[hgconvert.mercurial_sink, subversion.svn_sink]: if type and type not in [s[0] for s in sink_converters]: raise error.Abort(_(b'%s: invalid destination repository type') % type) for name, sink in sink_converters: @@ -178,7 +194,9 @@ class progresssource: - def __init__(self, ui, source, filecount): + def __init__( + self, ui: "uimod.ui", source, filecount: Optional[int] + ) -> None: self.ui = ui self.source = source self.progress = ui.makeprogress( @@ -253,8 +271,7 @@ class converter: - def __init__(self, ui, source, dest, revmapfile, opts): - + def __init__(self, ui: "uimod.ui", source, dest, revmapfile, opts) -> None: self.source = source self.dest = dest self.ui = ui @@ -280,7 +297,7 @@ self.splicemap = self.parsesplicemap(opts.get(b'splicemap')) self.branchmap = mapfile(ui, opts.get(b'branchmap')) - def parsesplicemap(self, path): + def parsesplicemap(self, path: bytes) -> Dict[bytes, List[bytes]]: """check and validate the splicemap format and return a child/parents dictionary. Format checking has two parts. @@ -295,31 +312,31 @@ return {} m = {} try: - fp = open(path, b'rb') - for i, line in enumerate(fp): - line = line.splitlines()[0].rstrip() - if not line: - # Ignore blank lines - continue - # split line - lex = common.shlexer(data=line, whitespace=b',') - line = list(lex) - # check number of parents - if not (2 <= len(line) <= 3): - raise error.Abort( - _( - b'syntax error in %s(%d): child parent1' - b'[,parent2] expected' + with open(path, b'rb') as fp: + for i, line in enumerate(fp): + line = line.splitlines()[0].rstrip() + if not line: + # Ignore blank lines + continue + # split line + lex = common.shlexer(data=line, whitespace=b',') + line = list(lex) + # check number of parents + if not (2 <= len(line) <= 3): + raise error.Abort( + _( + b'syntax error in %s(%d): child parent1' + b'[,parent2] expected' + ) + % (path, i + 1) ) - % (path, i + 1) - ) - for part in line: - self.source.checkrevformat(part) - child, p1, p2 = line[0], line[1:2], line[2:] - if p1 == p2: - m[child] = p1 - else: - m[child] = p1 + p2 + for part in line: + self.source.checkrevformat(part) + child, p1, p2 = line[0], line[1:2], line[2:] + if p1 == p2: + m[child] = p1 + else: + m[child] = p1 + p2 # if file does not exist or error reading, exit except IOError: raise error.Abort( @@ -356,7 +373,7 @@ return parents - def mergesplicemap(self, parents, splicemap): + def mergesplicemap(self, parents, splicemap) -> None: """A splicemap redefines child/parent relationships. Check the map contains valid revision identifiers and merge the new links in the source graph. @@ -488,20 +505,19 @@ return s - def writeauthormap(self): + def writeauthormap(self) -> None: authorfile = self.authorfile if authorfile: self.ui.status(_(b'writing author map file %s\n') % authorfile) - ofile = open(authorfile, b'wb+') - for author in self.authors: - ofile.write( - util.tonativeeol( - b"%s=%s\n" % (author, self.authors[author]) + with open(authorfile, b'wb+') as ofile: + for author in self.authors: + ofile.write( + util.tonativeeol( + b"%s=%s\n" % (author, self.authors[author]) + ) ) - ) - ofile.close() - def readauthormap(self, authorfile): + def readauthormap(self, authorfile) -> None: self.authors = readauthormap(self.ui, authorfile, self.authors) def cachecommit(self, rev): @@ -511,7 +527,7 @@ self.commitcache[rev] = commit return commit - def copy(self, rev): + def copy(self, rev) -> None: commit = self.commitcache[rev] full = self.opts.get(b'full') changes = self.source.getchanges(rev, full) @@ -563,7 +579,7 @@ self.source.converted(rev, newnode) self.map[rev] = newnode - def convert(self, sortmode): + def convert(self, sortmode) -> None: try: self.source.before() self.dest.before() @@ -628,7 +644,7 @@ finally: self.cleanup() - def cleanup(self): + def cleanup(self) -> None: try: self.dest.after() finally: @@ -636,7 +652,9 @@ self.map.close() -def convert(ui, src, dest=None, revmapfile=None, **opts): +def convert( + ui: "uimod.ui", src, dest: Optional[bytes] = None, revmapfile=None, **opts +) -> None: opts = pycompat.byteskwargs(opts) global orig_encoding orig_encoding = encoding.encoding
--- a/hgext/convert/cvs.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/convert/cvs.py Thu Aug 01 16:42:38 2024 +0200 @@ -11,9 +11,6 @@ import socket from mercurial.i18n import _ -from mercurial.pycompat import ( - open, -) from mercurial import ( encoding, error, @@ -52,8 +49,8 @@ self.tags = {} self.lastbranch = {} self.socket = None - self.cvsroot = open(os.path.join(cvs, b"Root"), b'rb').read()[:-1] - self.cvsrepo = open(os.path.join(cvs, b"Repository"), b'rb').read()[:-1] + self.cvsroot = util.readfile(os.path.join(cvs, b"Root"))[:-1] + self.cvsrepo = util.readfile(os.path.join(cvs, b"Repository"))[:-1] self.encoding = encoding.encoding self._connect() @@ -160,8 +157,7 @@ passw = b"A" cvspass = os.path.expanduser(b"~/.cvspass") try: - pf = open(cvspass, b'rb') - for line in pf.read().splitlines(): + for line in util.readfile(cvspass).splitlines(): part1, part2 = line.split(b' ', 1) # /1 :pserver:user@example.com:2401/cvsroot/foo # Ah<Z @@ -174,7 +170,6 @@ if part1 == format: passw = part2 break - pf.close() except IOError as inst: if inst.errno != errno.ENOENT: if not getattr(inst, 'filename', None):
--- a/hgext/convert/cvsps.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/convert/cvsps.py Thu Aug 01 16:42:38 2024 +0200 @@ -161,7 +161,7 @@ # Use the Root file in the sandbox, if it exists try: - root = open(os.path.join(b'CVS', b'Root'), b'rb').read().strip() + root = util.readfile(os.path.join(b'CVS', b'Root')).strip() except IOError: pass @@ -195,16 +195,17 @@ if cache == b'update': try: ui.note(_(b'reading cvs log cache %s\n') % cachefile) - oldlog = pickle.load(open(cachefile, b'rb')) - for e in oldlog: - if not ( - hasattr(e, 'branchpoints') - and hasattr(e, 'commitid') - and hasattr(e, 'mergepoint') - ): - ui.status(_(b'ignoring old cache\n')) - oldlog = [] - break + with open(cachefile, b'rb') as fp: + oldlog = pickle.load(fp) + for e in oldlog: + if not ( + hasattr(e, 'branchpoints') + and hasattr(e, 'commitid') + and hasattr(e, 'mergepoint') + ): + ui.status(_(b'ignoring old cache\n')) + oldlog = [] + break ui.note(_(b'cache has %d log entries\n') % len(oldlog)) except Exception as e: @@ -526,7 +527,9 @@ # write the new cachefile ui.note(_(b'writing cvs log cache %s\n') % cachefile) - pickle.dump(log, open(cachefile, b'wb')) + + with open(cachefile, b'wb') as fp: + pickle.dump(log, fp) else: log = oldlog @@ -636,7 +639,6 @@ files = set() c = None for i, e in enumerate(log): - # Check if log entry belongs to the current changeset or not. # Since CVS is file-centric, two different file revisions with @@ -980,7 +982,6 @@ branches = {} # latest version number in each branch ancestors = {} # parent branch for cs in changesets: - if opts[b"ancestors"]: if cs.branch not in branches and cs.parents and cs.parents[0].id: ancestors[cs.branch] = (
--- a/hgext/convert/filemap.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/convert/filemap.py Thu Aug 01 16:42:38 2024 +0200 @@ -6,6 +6,17 @@ import posixpath +import typing + +from typing import ( + Iterator, + Mapping, + MutableMapping, + Optional, + Set, + Tuple, + overload, +) from mercurial.i18n import _ from mercurial import ( @@ -14,10 +25,15 @@ ) from . import common +if typing.TYPE_CHECKING: + from mercurial import ( + ui as uimod, + ) + SKIPREV = common.SKIPREV -def rpairs(path): +def rpairs(path: bytes) -> Iterator[Tuple[bytes, bytes]]: """Yield tuples with path split at '/', starting with the full path. No leading, trailing or double '/', please. >>> for x in rpairs(b'foo/bar/baz'): print(x) @@ -33,6 +49,17 @@ yield b'.', path +if typing.TYPE_CHECKING: + + @overload + def normalize(path: bytes) -> bytes: + pass + + @overload + def normalize(path: None) -> None: + pass + + def normalize(path): """We use posixpath.normpath to support cross-platform path format. However, it doesn't handle None input. So we wrap it up.""" @@ -46,7 +73,10 @@ A name can be mapped to itself, a new name, or None (omit from new repository).""" - def __init__(self, ui, path=None): + rename: MutableMapping[bytes, bytes] + targetprefixes: Optional[Set[bytes]] + + def __init__(self, ui: "uimod.ui", path: Optional[bytes] = None) -> None: self.ui = ui self.include = {} self.exclude = {} @@ -56,10 +86,10 @@ if self.parse(path): raise error.Abort(_(b'errors in filemap')) - def parse(self, path): + def parse(self, path: Optional[bytes]) -> int: errs = 0 - def check(name, mapping, listname): + def check(name: bytes, mapping, listname: bytes): if not name: self.ui.warn( _(b'%s:%d: path to %s is missing\n') @@ -110,7 +140,9 @@ cmd = lex.get_token() return errs - def lookup(self, name, mapping): + def lookup( + self, name: bytes, mapping: Mapping[bytes, bytes] + ) -> Tuple[bytes, bytes, bytes]: name = normalize(name) for pre, suf in rpairs(name): try: @@ -119,7 +151,7 @@ pass return b'', name, b'' - def istargetfile(self, filename): + def istargetfile(self, filename: bytes) -> bool: """Return true if the given target filename is covered as a destination of the filemap. This is useful for identifying what parts of the target repo belong to the source repo and what parts don't.""" @@ -143,7 +175,7 @@ return True return False - def __call__(self, name): + def __call__(self, name: bytes) -> Optional[bytes]: if self.include: inc = self.lookup(name, self.include)[0] else: @@ -165,7 +197,7 @@ return newpre return name - def active(self): + def active(self) -> bool: return bool(self.include or self.exclude or self.rename) @@ -185,7 +217,9 @@ class filemap_source(common.converter_source): - def __init__(self, ui, baseconverter, filemap): + def __init__( + self, ui: "uimod.ui", baseconverter, filemap: Optional[bytes] + ) -> None: super(filemap_source, self).__init__(ui, baseconverter.repotype) self.base = baseconverter self.filemapper = filemapper(ui, filemap) @@ -206,10 +240,10 @@ b'convert', b'ignoreancestorcheck' ) - def before(self): + def before(self) -> None: self.base.before() - def after(self): + def after(self) -> None: self.base.after() def setrevmap(self, revmap): @@ -243,7 +277,7 @@ self.convertedorder = converted return self.base.setrevmap(revmap) - def rebuild(self): + def rebuild(self) -> bool: if self._rebuilt: return True self._rebuilt = True @@ -276,7 +310,7 @@ def getheads(self): return self.base.getheads() - def getcommit(self, rev): + def getcommit(self, rev: bytes): # We want to save a reference to the commit objects to be able # to rewrite their parents later on. c = self.commits[rev] = self.base.getcommit(rev) @@ -292,7 +326,7 @@ return self.commits[rev] return self.base.getcommit(rev) - def _discard(self, *revs): + def _discard(self, *revs) -> None: for r in revs: if r is None: continue @@ -304,7 +338,7 @@ if self._rebuilt: del self.children[r] - def wanted(self, rev, i): + def wanted(self, rev, i) -> bool: # Return True if we're directly interested in rev. # # i is an index selecting one of the parents of rev (if rev @@ -332,7 +366,7 @@ # doesn't consider it significant, and this revision should be dropped. return not files and b'close' not in self.commits[rev].extra - def mark_not_wanted(self, rev, p): + def mark_not_wanted(self, rev, p) -> None: # Mark rev as not interesting and update data structures. if p is None: @@ -347,7 +381,7 @@ self.parentmap[rev] = self.parentmap[p] self.wantedancestors[rev] = self.wantedancestors[p] - def mark_wanted(self, rev, parents): + def mark_wanted(self, rev, parents) -> None: # Mark rev ss wanted and update data structures. # rev will be in the restricted graph, so children of rev in @@ -474,7 +508,7 @@ return files, ncopies, ncleanp2 - def targetfilebelongstosource(self, targetfilename): + def targetfilebelongstosource(self, targetfilename: bytes) -> bool: return self.filemapper.istargetfile(targetfilename) def getfile(self, name, rev): @@ -484,7 +518,7 @@ def gettags(self): return self.base.gettags() - def hasnativeorder(self): + def hasnativeorder(self) -> bool: return self.base.hasnativeorder() def lookuprev(self, rev):
--- a/hgext/convert/monotone.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/convert/monotone.py Thu Aug 01 16:42:38 2024 +0200 @@ -43,9 +43,8 @@ if not os.path.exists(os.path.join(path, b'_MTN')): # Could be a monotone repository (SQLite db file) try: - f = open(path, b'rb') - header = f.read(16) - f.close() + with open(path, b'rb') as f: + header = f.read(16) except IOError: header = b'' if header != b'SQLite format 3\x00':
--- a/hgext/convert/subversion.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/convert/subversion.py Thu Aug 01 16:42:38 2024 +0200 @@ -1425,7 +1425,6 @@ return self.join(b'hg-authormap') def __init__(self, ui, repotype, path): - converter_sink.__init__(self, ui, repotype, path) commandline.__init__(self, ui, b'svn') self.delete = [] @@ -1488,9 +1487,11 @@ prop_actions_allowed.append((b'M', b'svn:date')) hook = os.path.join(created, b'hooks', b'pre-revprop-change') - fp = open(hook, b'wb') - fp.write(gen_pre_revprop_change_hook(prop_actions_allowed)) - fp.close() + + util.writefile( + hook, gen_pre_revprop_change_hook(prop_actions_allowed) + ) + util.setflags(hook, False, True) output = self.run0(b'info')
--- a/hgext/extdiff.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/extdiff.py Thu Aug 01 16:42:38 2024 +0200 @@ -405,7 +405,6 @@ guitool, opts, ): - subrepos = opts.get(b'subrepos') # calculate list of files changed between both revs
--- a/hgext/fastannotate/commands.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/fastannotate/commands.py Thu Aug 01 16:42:38 2024 +0200 @@ -7,6 +7,9 @@ import os +from typing import ( + Set, +) from mercurial.i18n import _ from mercurial import ( @@ -254,7 +257,7 @@ _newopts = set() -_knownopts = { +_knownopts: Set[bytes] = { opt[1].replace(b'-', b'_') for opt in (fastannotatecommandargs['options'] + commands.globalopts) }
--- a/hgext/fastannotate/context.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/fastannotate/context.py Thu Aug 01 16:42:38 2024 +0200 @@ -38,6 +38,7 @@ revmap as revmapmod, ) + # given path, get filelog, cached @util.lrucachefunc def _getflog(repo, path): @@ -173,12 +174,16 @@ 'followmerge': True, } + diffopts: mdiff.diffopts + followrename: bool + followmerge: bool + def __init__(self, **opts): for k, v in self.defaults.items(): setattr(self, k, opts.get(k, v)) @util.propertycache - def shortstr(self): + def shortstr(self) -> bytes: """represent opts in a short string, suitable for a directory name""" result = b'' if not self.followrename:
--- a/hgext/fastannotate/formatter.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/fastannotate/formatter.py Thu Aug 01 16:42:38 2024 +0200 @@ -17,6 +17,7 @@ ) from mercurial.utils import dateutil + # imitating mercurial.commands.annotate, not using the vanilla formatter since # the data structures are a bit different, and we have some fast paths. class defaultformatter:
--- a/hgext/fsmonitor/__init__.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/fsmonitor/__init__.py Thu Aug 01 16:42:38 2024 +0200 @@ -893,7 +893,6 @@ matcher=None, **kwargs ): - distance = 0 partial = True oldnode = repo[b'.'].node()
--- a/hgext/fsmonitor/pywatchman/__init__.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/fsmonitor/pywatchman/__init__.py Thu Aug 01 16:42:38 2024 +0200 @@ -210,7 +210,6 @@ ) ) - else: def log(fmt, *args):
--- a/hgext/fsmonitor/pywatchman/encoding.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/fsmonitor/pywatchman/encoding.py Thu Aug 01 16:42:38 2024 +0200 @@ -46,7 +46,6 @@ # returns None. return sys.getfilesystemencoding() - else: # Python 2 doesn't support surrogateescape, so use 'strict' by # default. Users can register a custom surrogateescape error handler and use
--- a/hgext/highlight/highlight.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/highlight/highlight.py Thu Aug 01 16:42:38 2024 +0200 @@ -43,7 +43,6 @@ def pygmentize(field, fctx, style, tmpl, guessfilenameonly=False): - # append a <link ...> to the syntax highlighting css tmpl.load(b'header') old_header = tmpl.cache[b'header']
--- a/hgext/histedit.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/histedit.py Thu Aug 01 16:42:38 2024 +0200 @@ -1526,7 +1526,8 @@ def move_cursor(self, oldpos, newpos): """Change the rule/changeset that the cursor is pointing to, regardless of - current mode (you can switch between patches from the view patch window).""" + current mode (you can switch between patches from the view patch window). + """ self.pos = newpos mode, _ = self.mode @@ -1605,7 +1606,8 @@ def change_view(self, delta, unit): """Change the region of whatever is being viewed (a patch or the list of - changesets). 'delta' is an amount (+/- 1) and 'unit' is 'page' or 'line'.""" + changesets). 'delta' is an amount (+/- 1) and 'unit' is 'page' or 'line'. + """ mode, _ = self.mode if mode != MODE_PATCH: return
--- a/hgext/journal.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/journal.py Thu Aug 01 16:42:38 2024 +0200 @@ -64,6 +64,7 @@ bookmarktype: hg.sharedbookmarks, } + # Journal recording, register hooks and storage object def extsetup(ui): extensions.wrapfunction(dispatch, 'runcommand', runcommand)
--- a/hgext/keyword.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/keyword.py Thu Aug 01 16:42:38 2024 +0200 @@ -160,6 +160,8 @@ b'svn', default=False, ) + + # date like in cvs' $Date @templatefilter(b'utcdate', intype=templateutil.date) def utcdate(date):
--- a/hgext/largefiles/overrides.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/largefiles/overrides.py Thu Aug 01 16:42:38 2024 +0200 @@ -825,11 +825,11 @@ if not os.path.isdir(makestandin(dest)): os.makedirs(makestandin(dest)) + # When we call orig below it creates the standins but we don't add + # them to the dir state until later so lock during that time. + wlock = repo.wlock() + try: - # When we call orig below it creates the standins but we don't add - # them to the dir state until later so lock during that time. - wlock = repo.wlock() - manifest = repo[None].manifest() def overridematch( @@ -897,7 +897,7 @@ result += orig(ui, repo, listpats, opts, rename) lfdirstate = lfutil.openlfdirstate(ui, repo) - for (src, dest) in copiedfiles: + for src, dest in copiedfiles: if lfutil.shortname in src and dest.startswith( repo.wjoin(lfutil.shortname) ):
--- a/hgext/largefiles/reposetup.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/largefiles/reposetup.py Thu Aug 01 16:42:38 2024 +0200 @@ -140,7 +140,6 @@ wlock = util.nullcontextmanager() gotlock = False with wlock, self.dirstate.running_status(self): - # First check if paths or patterns were specified on the # command line. If there were, and they don't match any # largefiles, we should just bail here and let super
--- a/hgext/narrow/narrowbundle2.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/narrow/narrowbundle2.py Thu Aug 01 16:42:38 2024 +0200 @@ -37,6 +37,7 @@ _CSHEADERSIZE = struct.calcsize(_ELIDEDCSHEADER) _MFHEADERSIZE = struct.calcsize(_ELIDEDMFHEADER) + # Serve a changegroup for a client with a narrow clone. def getbundlechangegrouppart_narrow( bundler,
--- a/hgext/notify.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/notify.py Thu Aug 01 16:42:38 2024 +0200 @@ -543,7 +543,6 @@ ) def diff(self, ctx, ref=None): - maxdiff = int(self.ui.config(b'notify', b'maxdiff')) prev = ctx.p1().node() if ref:
--- a/hgext/patchbomb.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/patchbomb.py Thu Aug 01 16:42:38 2024 +0200 @@ -261,7 +261,6 @@ numbered, patchname=None, ): - desc = [] node = None body = b''
--- a/hgext/phabricator.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/phabricator.py Thu Aug 01 16:42:38 2024 +0200 @@ -68,10 +68,17 @@ import operator import re import time +import typing from mercurial.node import bin, short from mercurial.i18n import _ from mercurial.thirdparty import attr + +# Force pytype to use the non-vendored package +if typing.TYPE_CHECKING: + # noinspection PyPackageRequirements + import attr + from mercurial import ( cmdutil, context, @@ -698,7 +705,7 @@ oldLength = attr.ib(default=0) # camelcase-required newOffset = attr.ib(default=0) # camelcase-required newLength = attr.ib(default=0) # camelcase-required - corpus = attr.ib(default='') + corpus = attr.ib(default=b'') # These get added to the phabchange's equivalents addLines = attr.ib(default=0) # camelcase-required delLines = attr.ib(default=0) # camelcase-required
--- a/hgext/rebase.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/rebase.py Thu Aug 01 16:42:38 2024 +0200 @@ -830,7 +830,6 @@ cleanup = False if cleanup: - if rebased: strippoints = [ c.node() for c in repo.set(b'roots(%ld)', rebased)
--- a/hgext/remotefilelog/basepack.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/remotefilelog/basepack.py Thu Aug 01 16:42:38 2024 +0200 @@ -45,7 +45,7 @@ # bisect) with (8 step fanout scan + 1 step bisect) # 5 step bisect = log(2^16 / 8 / 255) # fanout # 10 step fanout scan = 2^16 / (2^16 / 8) # fanout space divided by entries -SMALLFANOUTCUTOFF = 2 ** 16 // 8 +SMALLFANOUTCUTOFF = 2**16 // 8 # The amount of time to wait between checking for new packs. This prevents an # exception when data is moved to a new pack after the process has already @@ -275,7 +275,7 @@ class basepack(versionmixin): # The maximum amount we should read via mmap before remmaping so the old # pages can be released (100MB) - MAXPAGEDIN = 100 * 1024 ** 2 + MAXPAGEDIN = 100 * 1024**2 SUPPORTED_VERSIONS = [2]
--- a/hgext/remotefilelog/connectionpool.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/remotefilelog/connectionpool.py Thu Aug 01 16:42:38 2024 +0200 @@ -38,7 +38,6 @@ pass if conn is None: - peer = hg.peer(self._repo.ui, {}, path) if hasattr(peer, '_cleanup'):
--- a/hgext/remotefilelog/datapack.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/remotefilelog/datapack.py Thu Aug 01 16:42:38 2024 +0200 @@ -414,7 +414,7 @@ def add(self, name, node, deltabasenode, delta, metadata=None): # metadata is a dict, ex. {METAKEYFLAG: flag} - if len(name) > 2 ** 16: + if len(name) > 2**16: raise RuntimeError(_(b"name too long %s") % name) if len(node) != 20: raise RuntimeError(_(b"node should be 20 bytes %s") % node)
--- a/hgext/remotefilelog/remotefilelog.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/remotefilelog/remotefilelog.py Thu Aug 01 16:42:38 2024 +0200 @@ -41,7 +41,6 @@ class remotefilelog: - _flagserrorclass = error.RevlogError def __init__(self, opener, path, repo):
--- a/hgext/remotefilelog/shallowrepo.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/remotefilelog/shallowrepo.py Thu Aug 01 16:42:38 2024 +0200 @@ -32,6 +32,7 @@ shallowutil, ) + # These make*stores functions are global so that other extensions can replace # them. def makelocalstores(repo):
--- a/hgext/remotenames.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/remotenames.py Thu Aug 01 16:42:38 2024 +0200 @@ -259,7 +259,6 @@ def reposetup(ui, repo): - # set the config option to store remotenames repo.ui.setconfig(b'experimental', b'remotenames', True, b'remotenames-ext')
--- a/hgext/sqlitestore.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/sqlitestore.py Thu Aug 01 16:42:38 2024 +0200 @@ -47,6 +47,7 @@ import sqlite3 import struct import threading +import typing import zlib from mercurial.i18n import _ @@ -56,6 +57,12 @@ short, ) from mercurial.thirdparty import attr + +# Force pytype to use the non-vendored package +if typing.TYPE_CHECKING: + # noinspection PyPackageRequirements + import attr + from mercurial import ( ancestor, dagop, @@ -649,7 +656,6 @@ deltamode=deltamode, sidedata_helpers=sidedata_helpers, ): - yield delta # End of ifiledata interface.
--- a/hgext/uncommit.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/uncommit.py Thu Aug 01 16:42:38 2024 +0200 @@ -154,7 +154,6 @@ cmdutil.resolve_commit_options(ui, opts) with repo.wlock(), repo.lock(): - st = repo.status() m, a, r, d = st.modified, st.added, st.removed, st.deleted isdirtypath = any(set(m + a + r + d) & set(pats)) @@ -264,7 +263,6 @@ unfi = repo.unfiltered() with repo.wlock(), repo.lock(), repo.transaction(b'unamend'): - # identify the commit from which to unamend curctx = repo[b'.']
--- a/hgext/zeroconf/Zeroconf.py Thu Aug 01 16:34:37 2024 +0200 +++ b/hgext/zeroconf/Zeroconf.py Thu Aug 01 16:42:38 2024 +0200 @@ -1307,6 +1307,7 @@ delay = _LISTENER_TIME next = now + delay last = now + timeout + result = False try: zeroconf.addListener( self, DNSQuestion(self.name, _TYPE_ANY, _CLASS_IN) @@ -1352,7 +1353,7 @@ zeroconf.wait(min(next, last) - now) now = currentTimeMillis() - result = 1 + result = True finally: zeroconf.removeListener(self)
--- a/i18n/polib.py Thu Aug 01 16:34:37 2024 +0200 +++ b/i18n/polib.py Thu Aug 01 16:42:38 2024 +0200 @@ -64,7 +64,6 @@ def u(s): return unicode(s, "unicode_escape") - else: PY3 = True text_type = str @@ -1889,7 +1888,6 @@ chunks.reverse() while chunks: - # Start the list of chunks that will make up the current line. # cur_len is just the length of all the chunks in cur_line. cur_line = []
--- a/mercurial/ancestor.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/ancestor.py Thu Aug 01 16:42:38 2024 +0200 @@ -88,7 +88,7 @@ depth = [0] * count seen = [0] * count mapping = [] - for (i, n) in enumerate(sorted(nodes)): + for i, n in enumerate(sorted(nodes)): depth[n] = 1 b = 1 << i seen[n] = b
--- a/mercurial/bookmarks.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/bookmarks.py Thu Aug 01 16:42:38 2024 +0200 @@ -685,7 +685,7 @@ remotemarks""" changed = [] localmarks = repo._bookmarks - for (b, id) in remotemarks.items(): + for b, id in remotemarks.items(): if id != localmarks.get(b, None) and id in repo: changed.append((b, id, ui.debug, _(b"updating bookmark %s\n") % b)) for b in localmarks:
--- a/mercurial/bundle2.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/bundle2.py Thu Aug 01 16:42:38 2024 +0200 @@ -153,6 +153,7 @@ import string import struct import sys +import typing from .i18n import _ from .node import ( @@ -181,6 +182,17 @@ ) from .interfaces import repository +if typing.TYPE_CHECKING: + from typing import ( + Dict, + List, + Optional, + Tuple, + Union, + ) + + Capabilities = Dict[bytes, Union[List[bytes], Tuple[bytes, ...]]] + urlerr = util.urlerr urlreq = util.urlreq @@ -602,7 +614,7 @@ ) -def decodecaps(blob): +def decodecaps(blob: bytes) -> "Capabilities": """decode a bundle2 caps bytes blob into a dictionary The blob is a list of capabilities (one per line) @@ -662,11 +674,14 @@ _magicstring = b'HG20' - def __init__(self, ui, capabilities=()): + def __init__(self, ui, capabilities: "Optional[Capabilities]" = None): + if capabilities is None: + capabilities = {} + self.ui = ui self._params = [] self._parts = [] - self.capabilities = dict(capabilities) + self.capabilities: "Capabilities" = dict(capabilities) self._compengine = util.compengines.forbundletype(b'UN') self._compopts = None # If compression is being handled by a consumer of the raw @@ -1271,7 +1286,6 @@ return None def __call__(self): - self.ui.debug( b'bundle2-input-stream-interrupt: opening out of band context\n' ) @@ -1612,7 +1626,7 @@ # These are only the static capabilities. # Check the 'getrepocaps' function for the rest. -capabilities = { +capabilities: "Capabilities" = { b'HG20': (), b'bookmarks': (), b'error': (b'abort', b'unsupportedcontent', b'pushraced', b'pushkey'), @@ -1626,7 +1640,8 @@ } -def getrepocaps(repo, allowpushback=False, role=None): +# TODO: drop the default value for 'role' +def getrepocaps(repo, allowpushback: bool = False, role=None) -> "Capabilities": """return the bundle2 capabilities for a given repo Exists to allow extensions (like evolution) to mutate the capabilities. @@ -1675,7 +1690,7 @@ return caps -def bundle2caps(remote): +def bundle2caps(remote) -> "Capabilities": """return the bundle capabilities of a peer as dict""" raw = remote.capable(b'bundle2') if not raw and raw != b'': @@ -1684,7 +1699,7 @@ return decodecaps(capsblob) -def obsmarkersversion(caps): +def obsmarkersversion(caps: "Capabilities"): """extract the list of supported obsmarkers versions from a bundle2caps dict""" obscaps = caps.get(b'obsmarkers', ()) return [int(c[1:]) for c in obscaps if c.startswith(b'V')] @@ -1725,7 +1740,7 @@ msg %= count raise error.ProgrammingError(msg) - caps = {} + caps: "Capabilities" = {} if opts.get(b'obsolescence', False): caps[b'obsmarkers'] = (b'V1',) stream_version = opts.get(b'stream', b"") @@ -2598,7 +2613,6 @@ @parthandler(b'stream2', (b'requirements', b'filecount', b'bytecount')) def handlestreamv2bundle(op, part): - requirements = urlreq.unquote(part.params[b'requirements']) requirements = requirements.split(b',') if requirements else [] filecount = int(part.params[b'filecount'])
--- a/mercurial/bundlecaches.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/bundlecaches.py Thu Aug 01 16:42:38 2024 +0200 @@ -4,6 +4,7 @@ # GNU General Public License version 2 or any later version. import collections +import typing from typing import ( Dict, @@ -15,6 +16,11 @@ from .thirdparty import attr +# Force pytype to use the non-vendored package +if typing.TYPE_CHECKING: + # noinspection PyPackageRequirements + import attr + from . import ( error, requirements as requirementsmod, @@ -30,7 +36,7 @@ CLONEBUNDLESCHEME = b"peer-bundle-cache://" -def get_manifest(repo): +def get_manifest(repo) -> bytes: """get the bundle manifest to be served to a client from a server""" raw_text = repo.vfs.tryread(CB_MANIFEST_FILE) entries = [e.split(b' ', 1) for e in raw_text.splitlines()] @@ -46,7 +52,7 @@ return b''.join(new_lines) -def alter_bundle_url(repo, url): +def alter_bundle_url(repo, url: bytes) -> bytes: """a function that exist to help extension and hosting to alter the url This will typically be used to inject authentication information in the url
--- a/mercurial/bundlerepo.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/bundlerepo.py Thu Aug 01 16:42:38 2024 +0200 @@ -408,7 +408,7 @@ with os.fdopen(fdtemp, 'wb') as fptemp: fptemp.write(header) while True: - chunk = readfn(2 ** 18) + chunk = readfn(2**18) if not chunk: break fptemp.write(chunk)
--- a/mercurial/cext/base85.c Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/cext/base85.c Thu Aug 01 16:42:38 2024 +0200 @@ -38,7 +38,7 @@ unsigned int acc, val, ch; int pad = 0; - if (!PyArg_ParseTuple(args, "y#|i", &text, &len, &pad)) { + if (!PyArg_ParseTuple(args, "y#|p", &text, &len, &pad)) { return NULL; }
--- a/mercurial/cext/base85.pyi Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/cext/base85.pyi Thu Aug 01 16:42:38 2024 +0200 @@ -1,6 +1,4 @@ -from typing import Optional - version: int -def b85encode(text: bytes, pad: Optional[int]) -> bytes: ... +def b85encode(text: bytes, pad: bool = False) -> bytes: ... def b85decode(text: bytes) -> bytes: ...
--- a/mercurial/cext/osutil.c Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/cext/osutil.c Thu Aug 01 16:42:38 2024 +0200 @@ -36,6 +36,12 @@ #endif #endif +#ifndef _WIN32 +#include <sys/mman.h> +#include <pthread.h> +#endif + + #ifdef __APPLE__ #include <sys/attr.h> #include <sys/vnode.h> @@ -1203,6 +1209,49 @@ } #endif +#ifdef MADV_POPULATE_READ + +typedef struct { + void * mmap_address; + size_t length; +} mmap_info; + +static void _mmap_populate(mmap_info *info) { + /* We explicitly does not check the return value as we don't care about it. + * the madvise is here to help performance and we don't care if it fails + * (for example because the mapping is no longer valid) */ + void * mmap_address = info->mmap_address; + size_t length = info->length; + free(info); + madvise(mmap_address, length, MADV_POPULATE_READ); +} + +static PyObject *background_mmap_populate(PyObject *self, PyObject *mmap) { + Py_buffer b; + pthread_t thread_id; + mmap_info *info; + if (PyObject_GetBuffer(mmap, &b, PyBUF_CONTIG_RO | PyBUF_C_CONTIGUOUS) == -1) { + return NULL; + } + info = (mmap_info *)malloc(sizeof(mmap_info)); + info->mmap_address=b.buf; + info->length=b.len; + /* note: for very large map, we could spin multiple thread populating + * different area */ + pthread_create(&thread_id, NULL, (void *) &_mmap_populate, info); + /* We don't keep track of this thread as it is fine for it to die when we + * exit. */ + pthread_detach(thread_id); + /* We release the PyBuffer in the main thread to let the object be garbage + * collected as soon as possible. This might result in the memory map being + * closed while the background thread is working. That will result in a + * error in the background thread we can ignore. */ + PyBuffer_Release(&b); + Py_RETURN_NONE; +} + +#endif + static char osutil_doc[] = "Native operating system services."; static PyMethodDef methods[] = { @@ -1237,6 +1286,10 @@ "Is a CoreGraphics session available?" }, #endif +#ifdef MADV_POPULATE_READ + {"background_mmap_populate", (PyCFunction)background_mmap_populate, METH_O, + "populate a mmap in the background\n"}, +#endif {NULL, NULL} };
--- a/mercurial/cext/parsers.c Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/cext/parsers.c Thu Aug 01 16:42:38 2024 +0200 @@ -1232,6 +1232,15 @@ * should only occur in unusual circumstances (e.g. if sys.hexversion * is manually set to an invalid value). */ if ((hexversion == -1) || (hexversion >> 16 != PY_VERSION_HEX >> 16)) { + PyObject *sys = PyImport_ImportModule("sys"), *executable; + if (!sys) { + return -1; + } + executable = PyObject_GetAttrString(sys, "executable"); + Py_DECREF(sys); + if (!executable) { + return -1; + } PyErr_Format(PyExc_ImportError, "%s: The Mercurial extension " "modules were compiled with Python " PY_VERSION @@ -1240,7 +1249,8 @@ "sys.hexversion=%ld: " "Python %s\n at: %s", versionerrortext, hexversion, Py_GetVersion(), - Py_GetProgramFullPath()); + PyUnicode_AsUTF8(executable)); + Py_DECREF(executable); return -1; } return 0;
--- a/mercurial/changegroup.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/changegroup.py Thu Aug 01 16:42:38 2024 +0200 @@ -407,7 +407,7 @@ yield chunkheader(len(chunk)) pos = 0 while pos < len(chunk): - next = pos + 2 ** 20 + next = pos + 2**20 yield chunk[pos:next] pos = next yield closechunk()
--- a/mercurial/changelog.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/changelog.py Thu Aug 01 16:42:38 2024 +0200 @@ -6,6 +6,8 @@ # GNU General Public License version 2 or any later version. +import typing + from .i18n import _ from .node import ( bin, @@ -13,6 +15,11 @@ ) from .thirdparty import attr +# Force pytype to use the non-vendored package +if typing.TYPE_CHECKING: + # noinspection PyPackageRequirements + import attr + from . import ( encoding, error,
--- a/mercurial/chgserver.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/chgserver.py Thu Aug 01 16:42:38 2024 +0200 @@ -151,7 +151,7 @@ """ modules = [m for n, m in extensions.extensions(ui)] try: - from . import __version__ + from . import __version__ # pytype: disable=import-error modules.append(__version__) except ImportError:
--- a/mercurial/cmdutil.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/cmdutil.py Thu Aug 01 16:42:38 2024 +0200 @@ -11,6 +11,7 @@ import functools import os import re +import typing from typing import ( Any, @@ -33,6 +34,11 @@ ) from .thirdparty import attr +# Force pytype to use the non-vendored package +if typing.TYPE_CHECKING: + # noinspection PyPackageRequirements + import attr + from . import ( bookmarks, bundle2, @@ -1112,7 +1118,7 @@ ui.status(_(b"changed branch on %d changesets\n") % len(replacements)) -def findrepo(p): +def findrepo(p: bytes) -> Optional[bytes]: while not os.path.isdir(os.path.join(p, b".hg")): oldp, p = p, os.path.dirname(p) if p == oldp: @@ -3833,7 +3839,6 @@ original_headers = patch.parsepatch(diff) try: - chunks, opts = recordfilter( repo.ui, original_headers, match, operation=operation )
--- a/mercurial/configitems.toml Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/configitems.toml Thu Aug 01 16:42:38 2024 +0200 @@ -987,10 +987,6 @@ [[items]] section = "experimental" -name = "mmapindexthreshold" - -[[items]] -section = "experimental" name = "narrow" default = false @@ -2167,6 +2163,16 @@ [[items]] section = "storage" +name = "revlog.mmap.index" +default-type = "dynamic" + +[[items]] +section = "storage" +name = "revlog.mmap.index:size-threshold" +default = "1 MB" + +[[items]] +section = "storage" name = "revlog.optimize-delta-parent-choice" default = true alias = [["format", "aggressivemergedeltas"]]
--- a/mercurial/copies.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/copies.py Thu Aug 01 16:42:38 2024 +0200 @@ -915,11 +915,14 @@ self.movewithdir = {} if movewithdir is None else movewithdir def __repr__(self): - return '<branch_copies\n copy=%r\n renamedelete=%r\n dirmove=%r\n movewithdir=%r\n>' % ( - self.copy, - self.renamedelete, - self.dirmove, - self.movewithdir, + return ( + '<branch_copies\n copy=%r\n renamedelete=%r\n dirmove=%r\n movewithdir=%r\n>' + % ( + self.copy, + self.renamedelete, + self.dirmove, + self.movewithdir, + ) )
--- a/mercurial/dagop.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/dagop.py Thu Aug 01 16:42:38 2024 +0200 @@ -7,8 +7,15 @@ import heapq +import typing from .thirdparty import attr + +# Force pytype to use the non-vendored package +if typing.TYPE_CHECKING: + # noinspection PyPackageRequirements + import attr + from .node import nullrev from . import ( error,
--- a/mercurial/dirstate.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/dirstate.py Thu Aug 01 16:42:38 2024 +0200 @@ -136,7 +136,6 @@ @interfaceutil.implementer(intdirstate.idirstate) class dirstate: - # used by largefile to avoid overwritting transaction callback _tr_key_suffix = b'' @@ -644,8 +643,8 @@ p2 = self._nodeconstants.nullid if self._changing_level == 0: raise ValueError( - b"cannot set dirstate parent outside of " - b"dirstate.changing_parents context manager" + "cannot set dirstate parent outside of " + "dirstate.changing_parents context manager" ) self._dirty = True @@ -789,7 +788,7 @@ a `with dirstate.changing_parents(repo):` context. """ if self.in_merge: - msg = b'update_file_reference should not be called when merging' + msg = 'update_file_reference should not be called when merging' raise error.ProgrammingError(msg) entry = self._map.get(filename) if entry is None: @@ -880,7 +879,6 @@ possibly_dirty=False, parentfiledata=None, ): - # note: I do not think we need to double check name clash here since we # are in a update/merge case that should already have taken care of # this. The test agrees @@ -1092,7 +1090,6 @@ write_key = self._use_tracked_hint and self._dirty_tracked_set if tr: - self._setup_tr_abort(tr) self._attached_to_a_transaction = True @@ -1286,7 +1283,7 @@ badfn(ff, badtype(kind)) if nf in dmap: results[nf] = None - except (OSError) as inst: + except OSError as inst: # nf not found on disk - it is dirstate only if nf in dmap: # does it exactly match a missing file? results[nf] = None
--- a/mercurial/dirstatemap.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/dirstatemap.py Thu Aug 01 16:42:38 2024 +0200 @@ -4,6 +4,11 @@ # GNU General Public License version 2 or any later version. +from typing import ( + Optional, + TYPE_CHECKING, +) + from .i18n import _ from . import ( @@ -12,6 +17,7 @@ policy, testing, txnutil, + typelib, util, ) @@ -20,6 +26,11 @@ v2, ) +if TYPE_CHECKING: + from . import ( + ui as uimod, + ) + parsers = policy.importmod('parsers') rustmod = policy.importrust('dirstate') @@ -46,12 +57,31 @@ class, with and without Rust extensions enabled. """ + _use_dirstate_v2: bool + _nodeconstants: typelib.NodeConstants + _ui: "uimod.ui" + _root: bytes + _filename: bytes + _nodelen: int + _dirtyparents: bool + _docket: Optional["docketmod.DirstateDocket"] + _write_mode: int + _pendingmode: Optional[bool] + identity: Optional[typelib.CacheStat] + # please pytype _map = None copymap = None - def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2): + def __init__( + self, + ui: "uimod.ui", + opener, + root: bytes, + nodeconstants: typelib.NodeConstants, + use_dirstate_v2: bool, + ) -> None: self._use_dirstate_v2 = use_dirstate_v2 self._nodeconstants = nodeconstants self._ui = ui @@ -76,16 +106,16 @@ # for consistent view between _pl() and _read() invocations self._pendingmode = None - def _set_identity(self): + def _set_identity(self) -> None: self.identity = self._get_current_identity() - def _get_current_identity(self): + def _get_current_identity(self) -> Optional[typelib.CacheStat]: try: return util.cachestat(self._opener.join(self._filename)) except FileNotFoundError: return None - def may_need_refresh(self): + def may_need_refresh(self) -> bool: if 'identity' not in vars(self): # no existing identity, we need a refresh return True @@ -104,7 +134,7 @@ return True return current_identity != self.identity - def preload(self): + def preload(self) -> None: """Loads the underlying data, if it's not already loaded""" self._map @@ -135,7 +165,7 @@ self._pendingmode = mode return fp - def _readdirstatefile(self, size=-1): + def _readdirstatefile(self, size: int = -1) -> bytes: try: with self._opendirstatefile() as fp: return fp.read(size) @@ -144,7 +174,7 @@ return b'' @property - def docket(self): + def docket(self) -> "docketmod.DirstateDocket": if not self._docket: if not self._use_dirstate_v2: raise error.ProgrammingError( @@ -331,7 +361,7 @@ `all` is unused when Rust is not enabled """ - for (filename, item) in self.items(): + for filename, item in self.items(): yield (filename, item.state, item.mode, item.size, item.mtime) def keys(self): @@ -617,7 +647,8 @@ This should also drop associated copy information - The fact we actually need to drop it is the responsability of the caller""" + The fact we actually need to drop it is the responsability of the caller + """ self._map.pop(f, None) self.copymap.pop(f, None) @@ -625,7 +656,6 @@ if rustmod is not None: class dirstatemap(_dirstatemapcommon): - ### Core data storage and access @propertycache
--- a/mercurial/dirstateutils/v2.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/dirstateutils/v2.py Thu Aug 01 16:42:38 2024 +0200 @@ -7,8 +7,15 @@ import struct +import typing from ..thirdparty import attr + +# Force pytype to use the non-vendored package +if typing.TYPE_CHECKING: + # noinspection PyPackageRequirements + import attr + from .. import error, policy parsers = policy.importmod('parsers')
--- a/mercurial/encoding.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/encoding.py Thu Aug 01 16:42:38 2024 +0200 @@ -40,6 +40,16 @@ unichr = chr +if typing.TYPE_CHECKING: + # TODO: make a stub file for .cext.charencode, and import here + from .pure.charencode import ( + asciilower, + asciiupper, + isasciistr, + jsonescapeu8fast as _jsonescapeu8fast, + ) + + # These unicode characters are ignored by HFS+ (Apple Technote 1150, # "Unicode Subtleties"), so we need to ignore them in some places for # sanity. @@ -103,14 +113,15 @@ if pycompat.iswindows: _encodingrewrites[b'cp65001'] = b'utf-8' +encoding: bytes = b'' # help pytype avoid seeing None value try: - encoding = environ.get(b"HGENCODING") + encoding = environ.get(b"HGENCODING", b'') if not encoding: encoding = locale.getpreferredencoding().encode('ascii') or b'ascii' encoding = _encodingrewrites.get(encoding, encoding) except locale.Error: encoding = b'ascii' -encodingmode = environ.get(b"HGENCODINGMODE", b"strict") +encodingmode: bytes = environ.get(b"HGENCODINGMODE", b"strict") fallbackencoding = b'ISO-8859-1' @@ -366,7 +377,6 @@ cwd = cwd[0:1].upper() + cwd[1:] return cwd - else: getcwd = os.getcwdb # re-exports @@ -524,7 +534,7 @@ other = 0 -def jsonescape(s: Any, paranoid: Any = False) -> Any: +def jsonescape(s: bytes, paranoid: bool = False) -> bytes: """returns a string suitable for JSON JSON is problematic for us because it doesn't support non-Unicode
--- a/mercurial/extensions.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/extensions.py Thu Aug 01 16:42:38 2024 +0200 @@ -290,7 +290,7 @@ with util.timedcm('load all extensions') as stats: default_sub_options = ui.configsuboptions(b"extensions", b"*")[1] - for (name, path) in result: + for name, path in result: if path: if path[0:1] == b'!': if name not in _disabledextensions:
--- a/mercurial/filelog.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/filelog.py Thu Aug 01 16:42:38 2024 +0200 @@ -175,7 +175,6 @@ ) with self._revlog._writing(transaction): - if self._fix_issue6528: deltas = rewrite.filter_delta_issue6528(self._revlog, deltas)
--- a/mercurial/formatter.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/formatter.py Thu Aug 01 16:42:38 2024 +0200 @@ -110,6 +110,7 @@ import itertools import os import pickle +import typing from .i18n import _ from .node import ( @@ -118,6 +119,11 @@ ) from .thirdparty import attr +# Force pytype to use the non-vendored package +if typing.TYPE_CHECKING: + # noinspection PyPackageRequirements + import attr + from . import ( error, pycompat, @@ -176,7 +182,6 @@ class baseformatter: - # set to True if the formater output a strict format that does not support # arbitrary output in the stream. strict_format = False @@ -421,7 +426,6 @@ class jsonformatter(baseformatter): - strict_format = True def __init__(self, ui, out, topic, opts):
--- a/mercurial/graphmod.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/graphmod.py Thu Aug 01 16:42:38 2024 +0200 @@ -18,8 +18,16 @@ """ +import typing + from .node import nullrev from .thirdparty import attr + +# Force pytype to use the non-vendored package +if typing.TYPE_CHECKING: + # noinspection PyPackageRequirements + import attr + from . import ( dagop, smartset, @@ -133,8 +141,7 @@ else: getconf = lambda rev: {} - for (cur, type, data, parents) in dag: - + for cur, type, data, parents in dag: # Compute seen and next if cur not in seen: seen.append(cur) # new head @@ -244,7 +251,7 @@ def _fixlongrightedges(edges): - for (i, (start, end)) in enumerate(edges): + for i, (start, end) in enumerate(edges): if end > start: edges[i] = (start, end + 1) @@ -265,7 +272,7 @@ def _drawedges(echars, edges, nodeline, interline): - for (start, end) in edges: + for start, end in edges: if start == end + 1: interline[2 * end + 1] = b"/" elif start == end - 1: @@ -381,7 +388,7 @@ this function can be monkey-patched by extensions to alter graph display without needing to mimic all of the edge-fixup logic in ascii() """ - for (ln, logstr) in graph: + for ln, logstr in graph: ui.write((ln + logstr).rstrip() + b"\n")
--- a/mercurial/helptext/config.txt Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/helptext/config.txt Thu Aug 01 16:42:38 2024 +0200 @@ -2344,6 +2344,17 @@ you do, consider talking with the mercurial developer community about your repositories. +``revlog.mmap.index`` + Whether to use the Operating System "memory mapping" feature (when + possible) to access the revlog index. This improves performance + and reduces memory pressure. + +.. container:: verbose + + ``revlog.mmap.index:size-threshold`` + + The size of index above which to use the "memory mapping" feature. + ``revlog.optimize-delta-parent-choice`` When storing a merge revision, both parents will be equally considered as a possible delta base. This results in better delta selection and improved
--- a/mercurial/hg.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/hg.py Thu Aug 01 16:42:38 2024 +0200 @@ -11,6 +11,7 @@ import posixpath import shutil import stat +import typing import weakref from .i18n import _ @@ -57,6 +58,11 @@ urlutil, ) +if typing.TYPE_CHECKING: + from typing import ( + List, + Tuple, + ) release = lock.release @@ -1597,7 +1603,7 @@ # Files of interest # Used to check if the repository has changed looking at mtime and size of # these files. -foi = [ +foi: "List[Tuple[str, bytes]]" = [ ('spath', b'00changelog.i'), ('spath', b'phaseroots'), # ! phase can change content at the same size ('spath', b'obsstore'),
--- a/mercurial/hgweb/hgwebdir_mod.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/hgweb/hgwebdir_mod.py Thu Aug 01 16:42:38 2024 +0200 @@ -120,7 +120,6 @@ seenrepos = set() seendirs = set() for name, path in repos: - if not name.startswith(subdir): continue name = name[len(subdir) :]
--- a/mercurial/hgweb/request.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/hgweb/request.py Thu Aug 01 16:42:38 2024 +0200 @@ -9,7 +9,15 @@ # import wsgiref.validate +import typing + from ..thirdparty import attr + +# Force pytype to use the non-vendored package +if typing.TYPE_CHECKING: + # noinspection PyPackageRequirements + import attr + from .. import ( error, pycompat,
--- a/mercurial/hgweb/server.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/hgweb/server.py Thu Aug 01 16:42:38 2024 +0200 @@ -66,7 +66,6 @@ class _httprequesthandler(httpservermod.basehttprequesthandler): - url_scheme = b'http' @staticmethod @@ -358,7 +357,6 @@ class MercurialHTTPServer(_mixin, httpservermod.httpserver, object): - # SO_REUSEADDR has broken semantics on windows if pycompat.iswindows: allow_reuse_address = 0 @@ -396,7 +394,6 @@ def create_server(ui, app): - if ui.config(b'web', b'certificate'): handler = _httprequesthandlerssl else:
--- a/mercurial/hgweb/webcommands.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/hgweb/webcommands.py Thu Aug 01 16:42:38 2024 +0200 @@ -601,7 +601,6 @@ def dirlist(context): for d in sorted(dirs): - emptydirs = [] h = dirs[d] while isinstance(h, dict) and len(h) == 1: @@ -1427,7 +1426,7 @@ return tree def jsdata(context): - for (id, type, ctx, vtx, edges) in fulltree(): + for id, type, ctx, vtx, edges in fulltree(): yield { b'node': pycompat.bytestr(ctx), b'graphnode': webutil.getgraphnode(web.repo, ctx),
--- a/mercurial/httpconnection.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/httpconnection.py Thu Aug 01 16:42:38 2024 +0200 @@ -25,6 +25,7 @@ urlerr = util.urlerr urlreq = util.urlreq + # moved here from url.py to avoid a cycle class httpsendfile: """This is a wrapper around the objects returned by python's "open".
--- a/mercurial/httppeer.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/httppeer.py Thu Aug 01 16:42:38 2024 +0200 @@ -245,8 +245,9 @@ Returns the response object. """ dbg = ui.debug + line = b'devel-peer-request: %s\n' + if ui.debugflag and ui.configbool(b'devel', b'debug.peer-request'): - line = b'devel-peer-request: %s\n' dbg( line % b'%s %s' @@ -491,6 +492,9 @@ # boolean capability. They only support headerless/uncompressed # bundles. types = [b""] + + type = b"" + for x in types: if x in bundle2.bundletypes: type = x @@ -520,10 +524,9 @@ os.unlink(tempname) def _calltwowaystream(self, cmd, fp, **args): - filename = None + # dump bundle to disk + fd, filename = pycompat.mkstemp(prefix=b"hg-bundle-", suffix=b".hg") try: - # dump bundle to disk - fd, filename = pycompat.mkstemp(prefix=b"hg-bundle-", suffix=b".hg") with os.fdopen(fd, "wb") as fh: d = fp.read(4096) while d: @@ -534,8 +537,7 @@ headers = {'Content-Type': 'application/mercurial-0.1'} return self._callstream(cmd, data=fp_, headers=headers, **args) finally: - if filename is not None: - os.unlink(filename) + os.unlink(filename) def _callcompressable(self, cmd, **args): return self._callstream(cmd, _compressible=True, **args)
--- a/mercurial/i18n.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/i18n.py Thu Aug 01 16:42:38 2024 +0200 @@ -12,6 +12,7 @@ import sys from typing import ( + Dict, List, ) @@ -61,7 +62,9 @@ _ugettext = t.gettext -_msgcache = {} # encoding: {message: translation} +_msgcache: Dict[ + bytes, Dict[bytes, bytes] +] = {} # encoding: {message: translation} def gettext(message: bytes) -> bytes: @@ -119,6 +122,5 @@ def _(message: bytes) -> bytes: return message - else: _ = gettext
--- a/mercurial/linelog.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/linelog.py Thu Aug 01 16:42:38 2024 +0200 @@ -21,8 +21,19 @@ import abc import struct +import typing + +from typing import ( + List, +) from .thirdparty import attr + +# Force pytype to use the non-vendored package +if typing.TYPE_CHECKING: + # noinspection PyPackageRequirements + import attr + from . import pycompat _llentry = struct.Struct(b'>II') @@ -45,7 +56,7 @@ @attr.s class annotateresult: rev = attr.ib() - lines = attr.ib(type=bytearray) + lines = attr.ib(type=List[lineinfo]) _eof = attr.ib() def __iter__(self): @@ -53,7 +64,6 @@ class _llinstruction: # pytype: disable=ignored-metaclass - __metaclass__ = abc.ABCMeta @abc.abstractmethod @@ -401,7 +411,7 @@ def annotate(self, rev): pc = 1 - lines = [] + lines: List[lineinfo] = [] executed = 0 # Sanity check: if instructions executed exceeds len(program), we # hit an infinite loop in the linelog program somehow and we
--- a/mercurial/localrepo.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/localrepo.py Thu Aug 01 16:42:38 2024 +0200 @@ -1123,9 +1123,12 @@ if 0 <= chainspan: delta_config.max_deltachain_span = chainspan - mmapindexthreshold = ui.configbytes(b'experimental', b'mmapindexthreshold') - if mmapindexthreshold is not None: - data_config.mmap_index_threshold = mmapindexthreshold + has_populate = util.has_mmap_populate() + if ui.configbool(b'storage', b'revlog.mmap.index', has_populate): + data_config.mmap_index_threshold = ui.configbytes( + b'storage', + b'revlog.mmap.index:size-threshold', + ) withsparseread = ui.configbool(b'experimental', b'sparse-read') srdensitythres = float(
--- a/mercurial/logcmdutil.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/logcmdutil.py Thu Aug 01 16:42:38 2024 +0200 @@ -9,6 +9,7 @@ import itertools import os import posixpath +import typing from typing import ( Any, @@ -24,6 +25,11 @@ from .thirdparty import attr +# Force pytype to use the non-vendored package +if typing.TYPE_CHECKING: + # noinspection PyPackageRequirements + import attr + from . import ( dagop, diffutil,
--- a/mercurial/match.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/match.py Thu Aug 01 16:42:38 2024 +0200 @@ -399,12 +399,12 @@ if badfn is not None: self.bad = badfn - def was_tampered_with_nonrec(self): + def was_tampered_with_nonrec(self) -> bool: # [_was_tampered_with] is used to track if when extensions changed the matcher # behavior (crazy stuff!), so we disable the rust fast path. return self._was_tampered_with - def was_tampered_with(self): + def was_tampered_with(self) -> bool: return self.was_tampered_with_nonrec() def __call__(self, fn): @@ -894,7 +894,7 @@ self.bad = m1.bad self.traversedir = m1.traversedir - def was_tampered_with(self): + def was_tampered_with(self) -> bool: return ( self.was_tampered_with_nonrec() or self._m1.was_tampered_with() @@ -984,7 +984,7 @@ self.bad = m1.bad self.traversedir = m1.traversedir - def was_tampered_with(self): + def was_tampered_with(self) -> bool: return ( self.was_tampered_with_nonrec() or self._m1.was_tampered_with() @@ -1088,7 +1088,7 @@ if matcher.prefix(): self._always = any(f == path for f in matcher._files) - def was_tampered_with(self): + def was_tampered_with(self) -> bool: return ( self.was_tampered_with_nonrec() or self._matcher.was_tampered_with() ) @@ -1227,7 +1227,7 @@ self.traversedir = m1.traversedir self._matchers = matchers - def was_tampered_with(self): + def was_tampered_with(self) -> bool: return self.was_tampered_with_nonrec() or any( map(lambda m: m.was_tampered_with(), self._matchers) )
--- a/mercurial/merge.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/merge.py Thu Aug 01 16:42:38 2024 +0200 @@ -8,10 +8,17 @@ import collections import struct +import typing from .i18n import _ from .node import nullrev from .thirdparty import attr + +# Force pytype to use the non-vendored package +if typing.TYPE_CHECKING: + # noinspection PyPackageRequirements + import attr + from .utils import stringutil from .dirstateutils import timestamp from . import ( @@ -420,11 +427,11 @@ # Track the names of all deleted files. for f in mresult.files((mergestatemod.ACTION_REMOVE,)): deletedfiles.add(f) - for (f, args, msg) in mresult.getactions((mergestatemod.ACTION_MERGE,)): + for f, args, msg in mresult.getactions((mergestatemod.ACTION_MERGE,)): f1, f2, fa, move, anc = args if move: deletedfiles.add(f1) - for (f, args, msg) in mresult.getactions( + for f, args, msg in mresult.getactions( (mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL,) ): f2, flags = args
--- a/mercurial/mergestate.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/mergestate.py Thu Aug 01 16:42:38 2024 +0200 @@ -495,7 +495,6 @@ class mergestate(_mergestate_base): - statepathv1 = b'merge/state' statepathv2 = b'merge/state2'
--- a/mercurial/metadata.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/metadata.py Thu Aug 01 16:42:38 2024 +0200 @@ -433,14 +433,12 @@ # Iteration over d1 content will deal with all cases, but the one in the # first column of the table. for filename, d1 in diff_p1.items(): - d2 = diff_p2.pop(filename, None) if d2 is None: # this deal with the first line of the table. _process_other_unchanged(md, mas, filename, d1) else: - if d1[0][0] is None and d2[0][0] is None: # case 🄼 — both deleted the file. md.mark_added(filename)
--- a/mercurial/narrowspec.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/narrowspec.py Thu Aug 01 16:42:38 2024 +0200 @@ -225,7 +225,6 @@ m = "changing narrow spec outside of a transaction" raise error.ProgrammingError(m) else: - reporef = weakref.ref(repo) def clean_pending(tr):
--- a/mercurial/obsolete.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/obsolete.py Thu Aug 01 16:42:38 2024 +0200 @@ -1038,7 +1038,6 @@ def makefoldid(relation, user): - folddigest = hashutil.sha1(user) for p in relation[0] + relation[1]: folddigest.update(b'%d' % p.rev())
--- a/mercurial/obsutil.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/obsutil.py Thu Aug 01 16:42:38 2024 +0200 @@ -961,7 +961,6 @@ single_successor = short(successors[0][0]) return filteredmsgtable[b'superseded'] % (changeid, single_successor) elif fate == b'superseded_split': - succs = [] for node_id in successors[0]: succs.append(short(node_id))
--- a/mercurial/patch.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/patch.py Thu Aug 01 16:42:38 2024 +0200 @@ -229,7 +229,6 @@ def _extract(ui, fileobj, tmpname, tmpfp): - # attempt to detect the start of a patch # (this heuristic is borrowed from quilt) diffre = re.compile( @@ -596,7 +595,7 @@ self.created = 0 self.maxsize = maxsize if self.maxsize is None: - self.maxsize = 4 * (2 ** 20) + self.maxsize = 4 * (2**20) self.size = 0 self.data = {}
--- a/mercurial/phases.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/phases.py Thu Aug 01 16:42:38 2024 +0200 @@ -893,7 +893,6 @@ this_phase_set = self._phasesets[targetphase] for r in range(start, end): - # gather information about the current_rev r_phase = phase(repo, r) p_phase = None # phase inherited from parents
--- a/mercurial/policy.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/policy.py Thu Aug 01 16:42:38 2024 +0200 @@ -8,6 +8,14 @@ import os import sys +import typing + +if typing.TYPE_CHECKING: + from typing import ( + Dict, + Optional, + Tuple, + ) # Rules for how modules can be loaded. Values are: # @@ -23,8 +31,8 @@ # By default, fall back to the pure modules so the in-place build can # run without recompiling the C extensions. This will be overridden by # __modulepolicy__ generated by setup.py. -policy = b'allow' -_packageprefs = { +policy: bytes = b'allow' +_packageprefs: "Dict[bytes, Tuple[Optional[str], Optional[str]]]" = { # policy: (versioned package, pure package) b'c': ('cext', None), b'allow': ('cext', 'pure'), @@ -37,9 +45,9 @@ } try: - from . import __modulepolicy__ + from . import __modulepolicy__ # pytype: disable=import-error - policy = __modulepolicy__.modulepolicy + policy: bytes = __modulepolicy__.modulepolicy except ImportError: pass @@ -48,14 +56,14 @@ # The canonical way to do this is to test platform.python_implementation(). # But we don't import platform and don't bloat for it here. if '__pypy__' in sys.builtin_module_names: - policy = b'cffi' + policy: bytes = b'cffi' # Environment variable can always force settings. if 'HGMODULEPOLICY' in os.environ: - policy = os.environ['HGMODULEPOLICY'].encode('utf-8') + policy: bytes = os.environ['HGMODULEPOLICY'].encode('utf-8') -def _importfrom(pkgname, modname): +def _importfrom(pkgname: str, modname: str): # from .<pkgname> import <modname> (where . is looked through this module) fakelocals = {} pkg = __import__(pkgname, globals(), fakelocals, [modname], level=1) @@ -69,7 +77,7 @@ # keep in sync with "version" in C modules -_cextversions = { +_cextversions: "Dict[Tuple[str, str], int]" = { ('cext', 'base85'): 1, ('cext', 'bdiff'): 3, ('cext', 'mpatch'): 1, @@ -78,7 +86,7 @@ } # map import request to other package or module -_modredirects = { +_modredirects: "Dict[Tuple[str, str], Tuple[str, str]]" = { ('cext', 'charencode'): ('cext', 'parsers'), ('cffi', 'base85'): ('pure', 'base85'), ('cffi', 'charencode'): ('pure', 'charencode'), @@ -86,7 +94,7 @@ } -def _checkmod(pkgname, modname, mod): +def _checkmod(pkgname: str, modname: str, mod) -> None: expected = _cextversions.get((pkgname, modname)) actual = getattr(mod, 'version', None) if actual != expected: @@ -97,7 +105,7 @@ ) -def importmod(modname): +def importmod(modname: str): """Import module according to policy and check API version""" try: verpkg, purepkg = _packageprefs[policy] @@ -118,12 +126,12 @@ return _importfrom(pn, mn) -def _isrustpermissive(): +def _isrustpermissive() -> bool: """Assuming the policy is a Rust one, tell if it's permissive.""" return policy.endswith(b'-allow') -def importrust(modname, member=None, default=None): +def importrust(modname: str, member: "Optional[str]" = None, default=None): """Import Rust module according to policy and availability. If policy isn't a Rust one, this returns `default`.
--- a/mercurial/posix.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/posix.py Thu Aug 01 16:42:38 2024 +0200 @@ -386,20 +386,20 @@ return None # on posix platforms, every path is ok -def getfsmountpoint(dirpath: bytes) -> Optional[bytes]: +def getfsmountpoint(path: bytes) -> Optional[bytes]: """Get the filesystem mount point from a directory (best-effort) Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc. """ - return getattr(osutil, 'getfsmountpoint', lambda x: None)(dirpath) + return getattr(osutil, 'getfsmountpoint', lambda x: None)(path) -def getfstype(dirpath: bytes) -> Optional[bytes]: +def getfstype(path: bytes) -> Optional[bytes]: """Get the filesystem type name from a directory (best-effort) Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc. """ - return getattr(osutil, 'getfstype', lambda x: None)(dirpath) + return getattr(osutil, 'getfstype', lambda x: None)(path) def get_password() -> bytes: @@ -548,7 +548,7 @@ if pycompat.sysplatform == b'OpenVMS': # OpenVMS's symlink emulation is broken on some OpenVMS versions. - def checklink(path): + def checklink(path: bytes) -> bool: return False @@ -691,7 +691,7 @@ def lookupreg( key: bytes, - name: Optional[bytes] = None, + valname: Optional[bytes] = None, scope: Optional[Union[int, Iterable[int]]] = None, ) -> Optional[bytes]: return None @@ -707,6 +707,8 @@ class cachestat: + stat: os.stat_result + def __init__(self, path: bytes) -> None: self.stat = os.stat(path)
--- a/mercurial/pure/base85.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/pure/base85.py Thu Aug 01 16:42:38 2024 +0200 @@ -23,7 +23,7 @@ _b85dec[c] = i -def b85encode(text, pad=False): +def b85encode(text: bytes, pad: bool = False) -> bytes: """encode text in base85 format""" l = len(text) r = l % 4 @@ -50,7 +50,7 @@ return out[:olen] -def b85decode(text): +def b85decode(text: bytes) -> bytes: """decode base85-encoded text""" if not _b85dec: _mkb85dec()
--- a/mercurial/pure/charencode.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/pure/charencode.py Thu Aug 01 16:42:38 2024 +0200 @@ -11,7 +11,7 @@ from .. import pycompat -def isasciistr(s): +def isasciistr(s: bytes) -> bool: try: s.decode('ascii') return True @@ -19,7 +19,7 @@ return False -def asciilower(s): +def asciilower(s: bytes) -> bytes: """convert a string to lowercase if ASCII Raises UnicodeDecodeError if non-ASCII characters are found.""" @@ -27,7 +27,7 @@ return s.lower() -def asciiupper(s): +def asciiupper(s: bytes) -> bytes: """convert a string to uppercase if ASCII Raises UnicodeDecodeError if non-ASCII characters are found.""" @@ -52,7 +52,7 @@ _jsonmap.extend(pycompat.bytechr(x) for x in range(128, 256)) -def jsonescapeu8fast(u8chars, paranoid): +def jsonescapeu8fast(u8chars: bytes, paranoid: bool) -> bytes: """Convert a UTF-8 byte string to JSON-escaped form (fast path) Raises ValueError if non-ASCII characters have to be escaped. @@ -70,7 +70,7 @@ _utf8strict = r'surrogatepass' -def jsonescapeu8fallback(u8chars, paranoid): +def jsonescapeu8fallback(u8chars: bytes, paranoid: bool) -> bytes: """Convert a UTF-8 byte string to JSON-escaped form (slow path) Escapes all non-ASCII characters no matter if paranoid is False.
--- a/mercurial/pure/mpatch.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/pure/mpatch.py Thu Aug 01 16:42:38 2024 +0200 @@ -106,7 +106,7 @@ try: p1, p2, l = struct.unpack(b">lll", m.read(12)) except struct.error: - raise mpatchError(b"patch cannot be decoded") + raise mpatchError("patch cannot be decoded") _pull(new, frags, p1 - last) # what didn't change _pull([], frags, p2 - p1) # what got deleted new.append((l, pos + 12)) # what got added @@ -137,7 +137,7 @@ outlen += length if bin != binend: - raise mpatchError(b"patch cannot be decoded") + raise mpatchError("patch cannot be decoded") outlen += orig - last return outlen
--- a/mercurial/pure/parsers.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/pure/parsers.py Thu Aug 01 16:42:38 2024 +0200 @@ -9,6 +9,7 @@ import io import stat import struct +import typing import zlib from ..node import ( @@ -16,8 +17,15 @@ sha1nodeconstants, ) from ..thirdparty import attr + +# Force pytype to use the non-vendored package +if typing.TYPE_CHECKING: + # noinspection PyPackageRequirements + import attr + from .. import ( error, + pycompat, revlogutils, util, ) @@ -228,7 +236,7 @@ parentfiledata=(mode, size, (mtime, 0, False)), ) else: - raise RuntimeError(b'unknown state: %s' % state) + raise RuntimeError('unknown state: %s' % pycompat.sysstr(state)) def set_possibly_dirty(self): """Mark a file as "possibly dirty" @@ -644,7 +652,7 @@ def _check_index(self, i): if not isinstance(i, int): - raise TypeError(b"expecting int indexes") + raise TypeError("expecting int indexes") if i < 0 or i >= len(self): raise IndexError(i) @@ -704,7 +712,7 @@ def __delitem__(self, i): if not isinstance(i, slice) or not i.stop == -1 or i.step is not None: - raise ValueError(b"deleting slices only supports a:-1 with step 1") + raise ValueError("deleting slices only supports a:-1 with step 1") i = i.start self._check_index(i) self._stripnodes(i) @@ -783,12 +791,12 @@ count += 1 off += self.entry_size + s if off != len(self._data): - raise ValueError(b"corrupted data") + raise ValueError("corrupted data") return count def __delitem__(self, i): if not isinstance(i, slice) or not i.stop == -1 or i.step is not None: - raise ValueError(b"deleting slices only supports a:-1 with step 1") + raise ValueError("deleting slices only supports a:-1 with step 1") i = i.start self._check_index(i) self._stripnodes(i) @@ -841,7 +849,7 @@ raise KeyError self._check_index(rev) if rev < self._lgt: - msg = b"cannot rewrite entries outside of this transaction" + msg = "cannot rewrite entries outside of this transaction" raise KeyError(msg) else: entry = list(self[rev]) @@ -911,7 +919,6 @@ ) def _pack_entry(self, rev, entry): - base = entry[revlog_constants.ENTRY_DELTA_BASE] link_rev = entry[revlog_constants.ENTRY_LINK_REV] assert base == rev, (base, rev)
--- a/mercurial/revlog.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/revlog.py Thu Aug 01 16:42:38 2024 +0200 @@ -20,9 +20,15 @@ import io import os import struct +import typing import weakref import zlib +from typing import ( + Optional, + Tuple, +) + # import stuff from node for others to import from revlog from .node import ( bin, @@ -69,6 +75,12 @@ REVIDX_RAWTEXT_CHANGING_FLAGS, ) from .thirdparty import attr + +# Force pytype to use the non-vendored package +if typing.TYPE_CHECKING: + # noinspection PyPackageRequirements + import attr + from . import ( ancestor, dagop, @@ -221,7 +233,6 @@ index, cache = parsers.parse_index_devel_nodemap(data, inline) return index, cache - else: parse_index_v1_nodemap = None @@ -558,7 +569,7 @@ c = self._get_decompressor(t) return c.decompress - def _get_decompressor(self, t): + def _get_decompressor(self, t: bytes): try: compressor = self._decompressors[t] except KeyError: @@ -574,7 +585,7 @@ ) return compressor - def compress(self, data): + def compress(self, data: bytes) -> Tuple[bytes, bytes]: """Generate a possibly-compressed representation of data.""" if not data: return b'', data @@ -589,7 +600,7 @@ return b'', data return b'u', data - def decompress(self, data): + def decompress(self, data: bytes): """Decompress a revlog chunk. The chunk is expected to begin with a header identifying the @@ -1296,6 +1307,8 @@ features = FEATURES_BY_VERSION[_format_version] return features[b'inline'](_format_flags) + _docket_file: Optional[bytes] + def __init__( self, opener, @@ -3081,7 +3094,7 @@ sidedata=sidedata, ) - def compress(self, data): + def compress(self, data: bytes) -> Tuple[bytes, bytes]: return self._inner.compress(data) def decompress(self, data):
--- a/mercurial/revlogutils/__init__.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/revlogutils/__init__.py Thu Aug 01 16:42:38 2024 +0200 @@ -6,7 +6,15 @@ # GNU General Public License version 2 or any later version. +import typing + from ..thirdparty import attr + +# Force pytype to use the non-vendored package +if typing.TYPE_CHECKING: + # noinspection PyPackageRequirements + import attr + from ..interfaces import repository # See mercurial.revlogutils.constants for doc
--- a/mercurial/revlogutils/deltas.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/revlogutils/deltas.py Thu Aug 01 16:42:38 2024 +0200 @@ -11,6 +11,7 @@ import abc import collections import struct +import typing # import stuff from node for others to import from revlog from ..node import nullrev @@ -31,6 +32,11 @@ from ..thirdparty import attr +# Force pytype to use the non-vendored package +if typing.TYPE_CHECKING: + # noinspection PyPackageRequirements + import attr + from .. import ( error, mdiff, @@ -428,7 +434,6 @@ # Cut the revs at collected indices previdx = 0 for idx in selected: - chunk = _trimchunk(revlog, revs, previdx, idx) if chunk: yield chunk
--- a/mercurial/revlogutils/nodemap.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/revlogutils/nodemap.py Thu Aug 01 16:42:38 2024 +0200 @@ -553,7 +553,7 @@ Children blocks are always yield before their parent block. """ - for (__, item) in sorted(block.items()): + for __, item in sorted(block.items()): if isinstance(item, dict): for sub_block in _walk_trie(item): yield sub_block
--- a/mercurial/revlogutils/rewrite.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/revlogutils/rewrite.py Thu Aug 01 16:42:38 2024 +0200 @@ -258,7 +258,6 @@ # this revision is empty, we can delta against nullrev rewritten_entries[rev] = (nullrev, 0, 0, COMP_MODE_PLAIN) else: - text = revlog.rawdata(rev) info = revlogutils.revisioninfo( node=entry[ENTRY_NODE_ID],
--- a/mercurial/scmutil.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/scmutil.py Thu Aug 01 16:42:38 2024 +0200 @@ -13,8 +13,20 @@ import posixpath import re import subprocess +import typing import weakref +from typing import ( + Callable, + Dict, + Iterable, + Iterator, + List, + Optional, + Set, + Tuple, +) + from .i18n import _ from .node import ( bin, @@ -24,6 +36,12 @@ wdirrev, ) from .thirdparty import attr + +# Force pytype to use the non-vendored package +if typing.TYPE_CHECKING: + # noinspection PyPackageRequirements + import attr + from . import ( copies as copiesmod, encoding, @@ -39,6 +57,7 @@ revsetlang, similar, smartset, + typelib, url, util, vfs, @@ -55,6 +74,11 @@ else: from . import scmposix as scmplatform +if typing.TYPE_CHECKING: + from . import ( + ui as uimod, + ) + parsers = policy.importmod('parsers') rustrevlog = policy.importrust('revlog') @@ -69,15 +93,15 @@ relevant to the working copy. """ - modified = attr.ib(default=attr.Factory(list)) - added = attr.ib(default=attr.Factory(list)) - removed = attr.ib(default=attr.Factory(list)) - deleted = attr.ib(default=attr.Factory(list)) - unknown = attr.ib(default=attr.Factory(list)) - ignored = attr.ib(default=attr.Factory(list)) - clean = attr.ib(default=attr.Factory(list)) - - def __iter__(self): + modified = attr.ib(default=attr.Factory(list), type=List[bytes]) + added = attr.ib(default=attr.Factory(list), type=List[bytes]) + removed = attr.ib(default=attr.Factory(list), type=List[bytes]) + deleted = attr.ib(default=attr.Factory(list), type=List[bytes]) + unknown = attr.ib(default=attr.Factory(list), type=List[bytes]) + ignored = attr.ib(default=attr.Factory(list), type=List[bytes]) + clean = attr.ib(default=attr.Factory(list), type=List[bytes]) + + def __iter__(self) -> Iterator[List[bytes]]: yield self.modified yield self.added yield self.removed @@ -86,7 +110,7 @@ yield self.ignored yield self.clean - def __repr__(self): + def __repr__(self) -> str: return ( r'<status modified=%s, added=%s, removed=%s, deleted=%s, ' r'unknown=%s, ignored=%s, clean=%s>' @@ -119,7 +143,7 @@ yield subpath, ctx2.nullsub(subpath, ctx1) -def nochangesfound(ui, repo, excluded=None): +def nochangesfound(ui: "uimod.ui", repo, excluded=None) -> None: """Report no changes for push/pull, excluded is None or a list of nodes excluded from the push/pull. """ @@ -139,7 +163,7 @@ ui.status(_(b"no changes found\n")) -def callcatch(ui, func): +def callcatch(ui: "uimod.ui", func: Callable[[], int]) -> int: """call func() with global exception handling return func() if no exception happens. otherwise do some error handling @@ -261,7 +285,7 @@ return coarse_exit_code -def checknewlabel(repo, lbl, kind): +def checknewlabel(repo, lbl: bytes, kind) -> None: # Do not use the "kind" parameter in ui output. # It makes strings difficult to translate. if lbl in [b'tip', b'.', b'null']: @@ -287,7 +311,7 @@ ) -def checkfilename(f): +def checkfilename(f: bytes) -> None: '''Check that the filename f is an acceptable filename for a tracked file''' if b'\r' in f or b'\n' in f: raise error.InputError( @@ -296,7 +320,7 @@ ) -def checkportable(ui, f): +def checkportable(ui: "uimod.ui", f: bytes) -> None: '''Check if filename f is portable and warn or abort depending on config''' checkfilename(f) abort, warn = checkportabilityalert(ui) @@ -309,7 +333,7 @@ ui.warn(_(b"warning: %s\n") % msg) -def checkportabilityalert(ui): +def checkportabilityalert(ui: "uimod.ui") -> Tuple[bool, bool]: """check if the user's config requests nothing, a warning, or abort for non-portable filenames""" val = ui.config(b'ui', b'portablefilenames') @@ -325,7 +349,7 @@ class casecollisionauditor: - def __init__(self, ui, abort, dirstate): + def __init__(self, ui: "uimod.ui", abort: bool, dirstate) -> None: self._ui = ui self._abort = abort allfiles = b'\0'.join(dirstate) @@ -336,7 +360,7 @@ # same filename twice. self._newfiles = set() - def __call__(self, f): + def __call__(self, f: bytes) -> None: if f in self._newfiles: return fl = encoding.lower(f) @@ -349,7 +373,9 @@ self._newfiles.add(f) -def combined_filtered_and_obsolete_hash(repo, maxrev, needobsolete=False): +def combined_filtered_and_obsolete_hash( + repo, maxrev, needobsolete: bool = False +): """build hash of filtered revisions in the current repoview. Multiple caches perform up-to-date validation by checking that the @@ -430,7 +456,7 @@ return (filtered_set, obs_set) -def _hash_revs(revs): +def _hash_revs(revs: Iterable[int]) -> bytes: """return a hash from a list of revision numbers""" s = hashutil.sha1() for rev in revs: @@ -438,7 +464,12 @@ return s.digest() -def walkrepos(path, followsym=False, seen_dirs=None, recurse=False): +def walkrepos( + path, + followsym: bool = False, + seen_dirs: Optional[List[bytes]] = None, + recurse: bool = False, +) -> Iterable[bytes]: """yield every hg repository under path, always recursively. The recurse flag will only control recursion into repo working dirs""" @@ -487,7 +518,7 @@ dirs[:] = newdirs -def binnode(ctx): +def binnode(ctx) -> bytes: """Return binary node id for a given basectx""" node = ctx.node() if node is None: @@ -495,7 +526,7 @@ return node -def intrev(ctx): +def intrev(ctx) -> int: """Return integer for a given basectx that can be used in comparison or arithmetic operation""" rev = ctx.rev() @@ -504,14 +535,14 @@ return rev -def formatchangeid(ctx): +def formatchangeid(ctx) -> bytes: """Format changectx as '{rev}:{node|formatnode}', which is the default template provided by logcmdutil.changesettemplater""" repo = ctx.repo() return formatrevnode(repo.ui, intrev(ctx), binnode(ctx)) -def formatrevnode(ui, rev, node): +def formatrevnode(ui: "uimod.ui", rev: int, node: bytes) -> bytes: """Format given revision and node depending on the current verbosity""" if ui.debugflag: hexfunc = hex @@ -520,7 +551,7 @@ return b'%d:%s' % (rev, hexfunc(node)) -def resolvehexnodeidprefix(repo, prefix): +def resolvehexnodeidprefix(repo, prefix: bytes): if prefix.startswith(b'x'): prefix = prefix[1:] try: @@ -552,7 +583,7 @@ return node -def mayberevnum(repo, prefix): +def mayberevnum(repo, prefix: bytes) -> bool: """Checks if the given prefix may be mistaken for a revision number""" try: i = int(prefix) @@ -567,7 +598,7 @@ return False -def shortesthexnodeidprefix(repo, node, minlength=1, cache=None): +def shortesthexnodeidprefix(repo, node: bytes, minlength: int = 1, cache=None): """Find the shortest unambiguous prefix that matches hexnode. If "cache" is not None, it must be a dictionary that can be used for @@ -579,7 +610,7 @@ minlength = max(minlength, 1) - def disambiguate(prefix): + def disambiguate(prefix: bytes): """Disambiguate against revnums.""" if repo.ui.configbool(b'experimental', b'revisions.prefixhexnode'): if mayberevnum(repo, prefix): @@ -644,7 +675,7 @@ raise error.RepoLookupError() -def isrevsymbol(repo, symbol): +def isrevsymbol(repo, symbol: bytes) -> bool: """Checks if a symbol exists in the repo. See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the @@ -657,7 +688,7 @@ return False -def revsymbol(repo, symbol): +def revsymbol(repo, symbol: bytes): """Returns a context given a single revision symbol (as string). This is similar to revsingle(), but accepts only a single revision symbol, @@ -724,13 +755,12 @@ raise _filterederror(repo, symbol) -def _filterederror(repo, changeid): +def _filterederror(repo, changeid: bytes) -> error.FilteredRepoLookupError: """build an exception to be raised about a filtered changeid This is extracted in a function to help extensions (eg: evolve) to experiment with various message variants.""" if repo.filtername.startswith(b'visible'): - # Check if the changeset is obsolete unfilteredrepo = repo.unfiltered() ctx = revsymbol(unfilteredrepo, changeid) @@ -760,7 +790,7 @@ return repo[l.last()] -def _pairspec(revspec): +def _pairspec(revspec) -> bool: tree = revsetlang.parse(revspec) return tree and tree[0] in ( b'range', @@ -825,7 +855,9 @@ return repo.anyrevs(allspecs, user=True, localalias=localalias) -def increasingwindows(windowsize=8, sizelimit=512): +def increasingwindows( + windowsize: int = 8, sizelimit: int = 512 +) -> Iterable[int]: while True: yield windowsize if windowsize < sizelimit: @@ -891,7 +923,11 @@ return parents -def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None): +def getuipathfn( + repo, + legacyrelativevalue: bool = False, + forcerelativevalue: Optional[bool] = None, +) -> typelib.UiPathFn: """Return a function that produced paths for presenting to the user. The returned function takes a repo-relative path and produces a path @@ -931,12 +967,14 @@ return util.localpath -def subdiruipathfn(subpath, uipathfn): +def subdiruipathfn( + subpath: bytes, uipathfn: typelib.UiPathFn +) -> typelib.UiPathFn: '''Create a new uipathfn that treats the file as relative to subpath.''' return lambda f: uipathfn(posixpath.join(subpath, f)) -def anypats(pats, opts): +def anypats(pats, opts) -> bool: """Checks if any patterns, including --include and --exclude were given. Some commands (e.g. addremove) use this condition for deciding whether to @@ -945,7 +983,7 @@ return bool(pats or opts.get(b'include') or opts.get(b'exclude')) -def expandpats(pats): +def expandpats(pats: Iterable[bytes]) -> List[bytes]: """Expand bare globs when running on windows. On posix we assume it already has already been done by sh.""" if not util.expandglobs: @@ -966,7 +1004,12 @@ def matchandpats( - ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None + ctx, + pats=(), + opts=None, + globbed: bool = False, + default: bytes = b'relpath', + badfn=None, ): """Return a matcher and the patterns that were used. The matcher will warn about bad matches, unless an alternate badfn callback @@ -999,7 +1042,12 @@ def match( - ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None + ctx, + pats=(), + opts=None, + globbed: bool = False, + default: bytes = b'relpath', + badfn=None, ): '''Return a matcher that will warn about bad matches.''' return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0] @@ -1010,12 +1058,12 @@ return matchmod.always() -def matchfiles(repo, files, badfn=None): +def matchfiles(repo, files, badfn=None) -> matchmod.exactmatcher: '''Return a matcher that will efficiently match exactly these files.''' return matchmod.exact(files, badfn=badfn) -def parsefollowlinespattern(repo, rev, pat, msg): +def parsefollowlinespattern(repo, rev, pat: bytes, msg: bytes) -> bytes: """Return a file name from `pat` pattern suitable for usage in followlines logic. """ @@ -1030,7 +1078,7 @@ return files[0] -def getorigvfs(ui, repo): +def getorigvfs(ui: "uimod.ui", repo): """return a vfs suitable to save 'orig' file return None if no special directory is configured""" @@ -1040,7 +1088,7 @@ return vfs.vfs(repo.wvfs.join(origbackuppath)) -def backuppath(ui, repo, filepath): +def backuppath(ui: "uimod.ui", repo, filepath: bytes) -> bytes: """customize where working copy backup files (.orig files) are created Fetch user defined path from config file: [ui] origbackuppath = <path> @@ -1083,7 +1131,7 @@ self._torev = repo.changelog.rev self._revcontains = revcontainer.__contains__ - def __contains__(self, node): + def __contains__(self, node) -> bool: return self._revcontains(self._torev(node)) @@ -1096,7 +1144,7 @@ fixphase=False, targetphase=None, backup=True, -): +) -> None: """do common cleanups when old nodes are replaced by new nodes That includes writing obsmarkers or stripping nodes, and moving bookmarks. @@ -1270,7 +1318,14 @@ ) -def addremove(repo, matcher, prefix, uipathfn, opts=None, open_tr=None): +def addremove( + repo, + matcher, + prefix: bytes, + uipathfn: typelib.UiPathFn, + opts=None, + open_tr=None, +) -> int: if opts is None: opts = {} m = matcher @@ -1303,7 +1358,7 @@ rejected = [] - def badfn(f, msg): + def badfn(f: bytes, msg: bytes) -> None: if f in m.files(): m.bad(f, msg) rejected.append(f) @@ -1341,7 +1396,7 @@ return ret -def marktouched(repo, files, similarity=0.0): +def marktouched(repo, files, similarity: float = 0.0) -> int: """Assert that files have somehow been operated upon. files are relative to the repo root.""" m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x)) @@ -1376,7 +1431,9 @@ return 0 -def _interestingfiles(repo, matcher): +def _interestingfiles( + repo, matcher +) -> Tuple[List[bytes], List[bytes], List[bytes], List[bytes], List[bytes]]: """Walk dirstate with matcher, looking for files that addremove would care about. @@ -1412,7 +1469,9 @@ return added, unknown, deleted, removed, forgotten -def _findrenames(repo, matcher, added, removed, similarity, uipathfn): +def _findrenames( + repo, matcher, added, removed, similarity, uipathfn: typelib.UiPathFn +) -> Dict[bytes, bytes]: '''Find renames from removed files to added ones.''' renames = {} if similarity > 0: @@ -1435,7 +1494,7 @@ return renames -def _markchanges(repo, unknown, deleted, renames): +def _markchanges(repo, unknown, deleted, renames) -> None: """Marks the files in unknown as added, the files in deleted as removed, and the files in renames as copied.""" wctx = repo[None] @@ -1518,7 +1577,15 @@ return copiesfn -def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None): +def dirstatecopy( + ui: "uimod.ui", + repo, + wctx, + src, + dst, + dryrun: bool = False, + cwd: Optional[bytes] = None, +) -> None: """Update the dirstate to reflect the intent of copying src to dst. For different reasons it might not end with dst being marked as copied from src. """ @@ -1543,7 +1610,7 @@ wctx.copy(origsrc, dst) -def movedirstate(repo, newctx, match=None): +def movedirstate(repo, newctx, match=None) -> None: """Move the dirstate to newctx and adjust it as necessary. A matcher can be provided as an optimization. It is probably a bug to pass @@ -1596,12 +1663,12 @@ return requirements, None -def istreemanifest(repo): +def istreemanifest(repo) -> bool: """returns whether the repository is using treemanifest or not""" return requirementsmod.TREEMANIFEST_REQUIREMENT in repo.requirements -def writereporequirements(repo, requirements=None): +def writereporequirements(repo, requirements=None) -> None: """writes requirements for the repo Requirements are written to .hg/requires and .hg/store/requires based @@ -1620,14 +1687,16 @@ repo.svfs.tryunlink(b'requires') -def writerequires(opener, requirements): +def writerequires(opener, requirements) -> None: with opener(b'requires', b'w', atomictemp=True) as fp: for r in sorted(requirements): fp.write(b"%s\n" % r) class filecachesubentry: - def __init__(self, path, stat): + _cacheable: Optional[bool] = None + + def __init__(self, path, stat: bool): self.path = path self.cachestat = None self._cacheable = None @@ -1641,18 +1710,18 @@ # None means we don't know yet self._cacheable = None - def refresh(self): + def refresh(self) -> None: if self.cacheable(): self.cachestat = filecachesubentry.stat(self.path) - def cacheable(self): + def cacheable(self) -> bool: if self._cacheable is not None: return self._cacheable # we don't know yet, assume it is for now return True - def changed(self): + def changed(self) -> bool: # no point in going further if we can't cache it if not self.cacheable(): return True @@ -1674,7 +1743,7 @@ return False @staticmethod - def stat(path): + def stat(path: bytes) -> Optional[typelib.CacheStat]: try: return util.cachestat(path) except FileNotFoundError: @@ -1682,19 +1751,19 @@ class filecacheentry: - def __init__(self, paths, stat=True): + def __init__(self, paths, stat: bool = True) -> None: self._entries = [] for path in paths: self._entries.append(filecachesubentry(path, stat)) - def changed(self): + def changed(self) -> bool: '''true if any entry has changed''' for entry in self._entries: if entry.changed(): return True return False - def refresh(self): + def refresh(self) -> None: for entry in self._entries: entry.refresh() @@ -1725,13 +1794,15 @@ remove the ``filecacheentry``. """ - def __init__(self, *paths): + paths: Tuple[bytes, ...] + + def __init__(self, *paths: bytes) -> None: self.paths = paths def tracked_paths(self, obj): return [self.join(obj, path) for path in self.paths] - def join(self, obj, fname): + def join(self, obj, fname: bytes): """Used to compute the runtime path of a cached file. Users should subclass filecache and provide their own version of this @@ -1792,7 +1863,7 @@ obj.__dict__[self.sname] = value # update copy returned by obj.x -def extdatasource(repo, source): +def extdatasource(repo, source: bytes): """Gather a map of rev -> value dict from the specified source A source spec is treated as a URL, with a special case shell: type @@ -1861,7 +1932,21 @@ class progress: - def __init__(self, ui, updatebar, topic, unit=b"", total=None): + ui: "uimod.ui" + pos: Optional[int] # None once complete + topic: bytes + unit: bytes + total: Optional[int] + debug: bool + + def __init__( + self, + ui: "uimod.ui", + updatebar, + topic: bytes, + unit: bytes = b"", + total: Optional[int] = None, + ) -> None: self.ui = ui self.pos = 0 self.topic = topic @@ -1876,7 +1961,9 @@ def __exit__(self, exc_type, exc_value, exc_tb): self.complete() - def update(self, pos, item=b"", total=None): + def update( + self, pos: int, item: bytes = b"", total: Optional[int] = None + ) -> None: assert pos is not None if total: self.total = total @@ -1885,16 +1972,18 @@ if self.debug: self._printdebug(item) - def increment(self, step=1, item=b"", total=None): + def increment( + self, step: int = 1, item: bytes = b"", total: Optional[int] = None + ) -> None: self.update(self.pos + step, item, total) - def complete(self): + def complete(self) -> None: self.pos = None self.unit = b"" self.total = None self._updatebar(self.topic, self.pos, b"", self.unit, self.total) - def _printdebug(self, item): + def _printdebug(self, item: bytes) -> None: unit = b'' if self.unit: unit = b' ' + self.unit @@ -1911,7 +2000,7 @@ self.ui.debug(b'%s:%s %d%s\n' % (self.topic, item, self.pos, unit)) -def gdinitconfig(ui): +def gdinitconfig(ui: "uimod.ui"): """helper function to know if a repo should be created as general delta""" # experimental config: format.generaldelta return ui.configbool(b'format', b'generaldelta') or ui.configbool( @@ -1919,7 +2008,7 @@ ) -def gddeltaconfig(ui): +def gddeltaconfig(ui: "uimod.ui"): """helper function to know if incoming deltas should be optimized The `format.generaldelta` config is an old form of the config that also @@ -1938,11 +2027,11 @@ firstlinekey = b'__firstline' - def __init__(self, vfs, path, keys=None): + def __init__(self, vfs, path: bytes, keys=None) -> None: self.vfs = vfs self.path = path - def read(self, firstlinenonkeyval=False): + def read(self, firstlinenonkeyval: bool = False): """Read the contents of a simple key-value file 'firstlinenonkeyval' indicates whether the first line of file should @@ -1973,7 +2062,7 @@ raise error.CorruptedState(stringutil.forcebytestr(e)) return d - def write(self, data, firstline=None): + def write(self, data, firstline: Optional[bytes] = None) -> None: """Write key=>value mapping to a file data is a dict. Keys must be alphanumerical and start with a letter. Values must not contain newline characters. @@ -2002,7 +2091,7 @@ fp.write(b''.join(lines)) -_reportobsoletedsource = [ +_reportobsoletedsource: List[bytes] = [ b'debugobsolete', b'pull', b'push', @@ -2010,13 +2099,13 @@ b'unbundle', ] -_reportnewcssource = [ +_reportnewcssource: List[bytes] = [ b'pull', b'unbundle', ] -def prefetchfiles(repo, revmatches): +def prefetchfiles(repo, revmatches) -> None: """Invokes the registered file prefetch functions, allowing extensions to ensure the corresponding files are available locally, before the command uses them. @@ -2045,10 +2134,12 @@ fileprefetchhooks = util.hooks() # A marker that tells the evolve extension to suppress its own reporting -_reportstroubledchangesets = True - - -def registersummarycallback(repo, otr, txnname=b'', as_validator=False): +_reportstroubledchangesets: bool = True + + +def registersummarycallback( + repo, otr, txnname: bytes = b'', as_validator: bool = False +) -> None: """register a callback to issue a summary after the transaction is closed If as_validator is true, then the callbacks are registered as transaction @@ -2219,7 +2310,7 @@ repo.ui.status(msg % len(published)) -def getinstabilitymessage(delta, instability): +def getinstabilitymessage(delta: int, instability: bytes) -> Optional[bytes]: """function to return the message to show warning about new instabilities exists as a separate function so that extension can wrap to show more @@ -2228,14 +2319,14 @@ return _(b'%i new %s changesets\n') % (delta, instability) -def nodesummaries(repo, nodes, maxnumnodes=4): +def nodesummaries(repo, nodes, maxnumnodes: int = 4) -> bytes: if len(nodes) <= maxnumnodes or repo.ui.verbose: return b' '.join(short(h) for h in nodes) first = b' '.join(short(h) for h in nodes[:maxnumnodes]) return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes) -def enforcesinglehead(repo, tr, desc, accountclosed, filtername): +def enforcesinglehead(repo, tr, desc: bytes, accountclosed, filtername) -> None: """check that no named branch has multiple heads""" if desc in (b'strip', b'repair'): # skip the logic during strip @@ -2260,7 +2351,7 @@ return sink -def unhidehashlikerevs(repo, specs, hiddentype): +def unhidehashlikerevs(repo, specs, hiddentype: bytes): """parse the user specs and unhide changesets whose hash or revision number is passed. @@ -2313,7 +2404,7 @@ return repo.filtered(b'visible-hidden', revs) -def _getrevsfromsymbols(repo, symbols): +def _getrevsfromsymbols(repo, symbols) -> Set[int]: """parse the list of symbols and returns a set of revision numbers of hidden changesets present in symbols""" revs = set() @@ -2348,7 +2439,7 @@ return revs -def bookmarkrevs(repo, mark): +def bookmarkrevs(repo, mark: bytes): """Select revisions reachable by a given bookmark If the bookmarked revision isn't a head, an empty set will be returned. @@ -2356,7 +2447,7 @@ return repo.revs(format_bookmark_revspec(mark)) -def format_bookmark_revspec(mark): +def format_bookmark_revspec(mark: bytes) -> bytes: """Build a revset expression to select revisions reachable by a given bookmark""" mark = b'literal:' + mark @@ -2370,7 +2461,7 @@ ) -def ismember(ui, username, userlist): +def ismember(ui: "uimod.ui", username: bytes, userlist: List[bytes]) -> bool: """Check if username is a member of userlist. If userlist has a single '*' member, all users are considered members. @@ -2380,22 +2471,24 @@ return userlist == [b'*'] or username in userlist -RESOURCE_HIGH = 3 -RESOURCE_MEDIUM = 2 -RESOURCE_LOW = 1 -RESOURCE_DEFAULT = 0 - -RESOURCE_MAPPING = { +RESOURCE_HIGH: int = 3 +RESOURCE_MEDIUM: int = 2 +RESOURCE_LOW: int = 1 +RESOURCE_DEFAULT: int = 0 + +RESOURCE_MAPPING: Dict[bytes, int] = { b'default': RESOURCE_DEFAULT, b'low': RESOURCE_LOW, b'medium': RESOURCE_MEDIUM, b'high': RESOURCE_HIGH, } -DEFAULT_RESOURCE = RESOURCE_MEDIUM - - -def get_resource_profile(ui, dimension=None): +DEFAULT_RESOURCE: int = RESOURCE_MEDIUM + + +def get_resource_profile( + ui: "uimod.ui", dimension: Optional[bytes] = None +) -> int: """return the resource profile for a dimension If no dimension is specified, the generic value is returned"""
--- a/mercurial/setdiscovery.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/setdiscovery.py Thu Aug 01 16:42:38 2024 +0200 @@ -190,7 +190,6 @@ return getparents def _childrengetter(self): - if self._childrenmap is not None: # During discovery, the `undecided` set keep shrinking. # Therefore, the map computed for an iteration N will be @@ -454,7 +453,6 @@ full = not initial_head_exchange progress = ui.makeprogress(_(b'searching'), unit=_(b'queries')) while not disco.iscomplete(): - if full or disco.hasinfo(): if full: ui.note(_(b"sampling from both directions\n"))
--- a/mercurial/similar.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/similar.py Thu Aug 01 16:42:38 2024 +0200 @@ -118,14 +118,14 @@ # Find exact matches. matchedfiles = set() - for (a, b) in _findexactmatches(repo, addedfiles, removedfiles): + for a, b in _findexactmatches(repo, addedfiles, removedfiles): matchedfiles.add(b) yield (a.path(), b.path(), 1.0) # If the user requested similar files to be matched, search for them also. if threshold < 1.0: addedfiles = [x for x in addedfiles if x not in matchedfiles] - for (a, b, score) in _findsimilarmatches( + for a, b, score in _findsimilarmatches( repo, addedfiles, removedfiles, threshold ): yield (a.path(), b.path(), score)
--- a/mercurial/sslutil.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/sslutil.py Thu Aug 01 16:42:38 2024 +0200 @@ -497,7 +497,6 @@ ) elif e.reason == 'CERTIFICATE_VERIFY_FAILED' and pycompat.iswindows: - ui.warn( _( b'(the full certificate chain may not be available '
--- a/mercurial/store.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/store.py Thu Aug 01 16:42:38 2024 +0200 @@ -10,10 +10,18 @@ import os import re import stat +import typing + from typing import Generator, List from .i18n import _ from .thirdparty import attr + +# Force pytype to use the non-vendored package +if typing.TYPE_CHECKING: + # noinspection PyPackageRequirements + import attr + from .node import hex from .revlogutils.constants import ( INDEX_HEADER, @@ -37,10 +45,10 @@ parsers = policy.importmod('parsers') # how much bytes should be read from fncache in one read # It is done to prevent loading large fncache files into memory -fncache_chunksize = 10 ** 6 +fncache_chunksize = 10**6 -def _match_tracked_entry(entry, matcher): +def _match_tracked_entry(entry: "BaseStoreEntry", matcher): """parses a fncache entry and returns whether the entry is tracking a path matched by matcher or not. @@ -48,10 +56,16 @@ if matcher is None: return True + + # TODO: make this safe for other entry types. Currently, the various + # store.data_entry generators only yield RevlogStoreEntry, so the + # attributes do exist on `entry`. + # pytype: disable=attribute-error if entry.is_filelog: return matcher(entry.target_id) elif entry.is_manifestlog: return matcher.visitdir(entry.target_id.rstrip(b'/')) + # pytype: enable=attribute-error raise error.ProgrammingError(b"cannot process entry %r" % entry)
--- a/mercurial/streamclone.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/streamclone.py Thu Aug 01 16:42:38 2024 +0200 @@ -547,6 +547,7 @@ _srcstore = b's' # store (svfs) _srccache = b'c' # cache (cache) + # This is it's own function so extensions can override it. def _walkstreamfullstorefiles(repo): """list snapshot file from the store""" @@ -809,7 +810,6 @@ """ with repo.lock(): - repo.ui.debug(b'scanning\n') entries = _entries_walk( @@ -857,7 +857,6 @@ # considering the files to preserve, disabling the gc while we do so helps # performance a lot. with repo.lock(), util.nogc(): - repo.ui.debug(b'scanning\n') entries = _entries_walk( @@ -990,7 +989,6 @@ with repo.transaction(b'clone'): ctxs = (vfs.backgroundclosing(repo.ui) for vfs in vfsmap.values()) with nested(*ctxs): - for i in range(entrycount): filecount = util.uvarintdecodestream(fp) if filecount == 0: @@ -1123,7 +1121,6 @@ with dest_repo.lock(): with src_repo.lock(): - # bookmark is not integrated to the streaming as it might use the # `repo.vfs` and they are too many sentitive data accessible # through `repo.vfs` to expose it to streaming clone.
--- a/mercurial/strip.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/strip.py Thu Aug 01 16:42:38 2024 +0200 @@ -63,7 +63,6 @@ soft=False, ): with repo.wlock(), repo.lock(): - if update: checklocalchanges(repo, force=force) urev = _findupdatetarget(repo, revs)
--- a/mercurial/subrepo.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/subrepo.py Thu Aug 01 16:42:38 2024 +0200 @@ -1227,16 +1227,12 @@ externals.append(path) elif item == 'missing': missing.append(path) - if ( - item - not in ( - '', - 'normal', - 'unversioned', - 'external', - ) - or props not in ('', 'none', 'normal') - ): + if item not in ( + '', + 'normal', + 'unversioned', + 'external', + ) or props not in ('', 'none', 'normal'): changes.append(path) for path in changes: for ext in externals:
--- a/mercurial/subrepoutil.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/subrepoutil.py Thu Aug 01 16:42:38 2024 +0200 @@ -131,7 +131,7 @@ # extra escapes are needed because re.sub string decodes. repl = re.sub(br'\\\\([0-9]+)', br'\\\1', repl) try: - src = re.sub(pattern, repl, src, 1) + src = re.sub(pattern, repl, src, count=1) except re.error as e: raise error.Abort( _(b"bad subrepository pattern in %s: %s")
--- a/mercurial/tags.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/tags.py Thu Aug 01 16:42:38 2024 +0200 @@ -601,7 +601,7 @@ # we keep them in UTF-8 throughout this module. If we converted # them local encoding on input, we would lose info writing them to # the cache. - for (name, (node, hist)) in sorted(cachetags.items()): + for name, (node, hist) in sorted(cachetags.items()): for n in hist: cachefile.write(b"%s %s\n" % (hex(n), name)) cachefile.write(b"%s %s\n" % (hex(node), name))
--- a/mercurial/templater.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/templater.py Thu Aug 01 16:42:38 2024 +0200 @@ -69,6 +69,12 @@ import abc import os +from typing import ( + BinaryIO, + Optional, + Tuple, +) + from .i18n import _ from .pycompat import ( FileNotFoundError, @@ -1121,7 +1127,9 @@ return path if os.path.isdir(path) else None -def open_template(name, templatepath=None): +def open_template( + name: bytes, templatepath: Optional[bytes] = None +) -> Tuple[bytes, BinaryIO]: """returns a file-like object for the given template, and its full path If the name is a relative path and we're in a frozen binary, the template @@ -1156,7 +1164,9 @@ ) -def try_open_template(name, templatepath=None): +def try_open_template( + name: bytes, templatepath: Optional[bytes] = None +) -> Tuple[Optional[bytes], Optional[BinaryIO]]: try: return open_template(name, templatepath) except (EnvironmentError, ImportError):
--- a/mercurial/typelib.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/typelib.py Thu Aug 01 16:42:38 2024 +0200 @@ -7,6 +7,10 @@ import typing +from typing import ( + Callable, +) + # Note: this is slightly different from pycompat.TYPE_CHECKING, as using # pycompat causes the BinaryIO_Proxy type to be resolved to ``object`` when # used as the base class during a pytype run. @@ -21,8 +25,24 @@ if TYPE_CHECKING: from typing import ( BinaryIO, + Union, + ) + + from . import ( + node, + posix, + windows, ) BinaryIO_Proxy = BinaryIO + CacheStat = Union[posix.cachestat, windows.cachestat] + NodeConstants = node.sha1nodeconstants else: + from typing import Any + BinaryIO_Proxy = object + CacheStat = Any + NodeConstants = Any + +# scmutil.getuipathfn() related callback. +UiPathFn = Callable[[bytes], bytes]
--- a/mercurial/upgrade.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/upgrade.py Thu Aug 01 16:42:38 2024 +0200 @@ -194,7 +194,6 @@ onlydefault.append(d) if fromconfig or onlydefault: - if fromconfig: ui.status( _(
--- a/mercurial/urllibcompat.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/urllibcompat.py Thu Aug 01 16:42:38 2024 +0200 @@ -109,6 +109,7 @@ ), ) + # urllib.parse.quote() accepts both str and bytes, decodes bytes # (if necessary), and returns str. This is wonky. We provide a custom # implementation that only accepts bytes and emits bytes.
--- a/mercurial/util.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/util.py Thu Aug 01 16:42:38 2024 +0200 @@ -32,10 +32,13 @@ import sys import time import traceback +import typing import warnings from typing import ( Any, + BinaryIO, + Callable, Iterable, Iterator, List, @@ -45,6 +48,12 @@ from .node import hex from .thirdparty import attr + +# Force pytype to use the non-vendored package +if typing.TYPE_CHECKING: + # noinspection PyPackageRequirements + import attr + from .pycompat import ( open, ) @@ -55,6 +64,7 @@ i18n, policy, pycompat, + typelib, urllibcompat, ) from .utils import ( @@ -440,13 +450,24 @@ return data -def mmapread(fp, size=None): +def has_mmap_populate(): + return hasattr(osutil, "background_mmap_populate") or hasattr( + mmap, 'MAP_POPULATE' + ) + + +def mmapread(fp, size=None, pre_populate=True): """Read a file content using mmap The responsability of checking the file system is mmap safe is the - responsability of the caller. + responsability of the caller (see `vfs.is_mmap_safe`). In some case, a normal string might be returned. + + If `pre_populate` is True (the default), the mmapped data will be + pre-populated in memory if the system support this option, this slow down + the initial mmaping but avoid potentially crippling page fault on later + access. If this is not the desired behavior, set `pre_populate` to False. """ if size == 0: # size of 0 to mmap.mmap() means "all data" @@ -455,8 +476,15 @@ elif size is None: size = 0 fd = getattr(fp, 'fileno', lambda: fp)() + flags = mmap.MAP_PRIVATE + bg_populate = hasattr(osutil, "background_mmap_populate") + if pre_populate and not bg_populate: + flags |= getattr(mmap, 'MAP_POPULATE', 0) try: - return mmap.mmap(fd, size, access=mmap.ACCESS_READ) + m = mmap.mmap(fd, size, flags=flags, prot=mmap.PROT_READ) + if pre_populate and bg_populate: + osutil.background_mmap_populate(m) + return m except ValueError: # Empty files cannot be mmapped, but mmapread should still work. Check # if the file is empty, and if so, return an empty buffer. @@ -1139,7 +1167,7 @@ def version(): """Return version information if available.""" try: - from . import __version__ + from . import __version__ # pytype: disable=import-error return __version__.version except ImportError: @@ -1315,7 +1343,7 @@ self[k] = f[k] def insert(self, position, key, value): - for (i, (k, v)) in enumerate(list(self.items())): + for i, (k, v) in enumerate(list(self.items())): if i == position: self[key] = value if i >= position: @@ -2711,10 +2739,10 @@ def splitbig(chunks): for chunk in chunks: - if len(chunk) > 2 ** 20: + if len(chunk) > 2**20: pos = 0 while pos < len(chunk): - end = pos + 2 ** 18 + end = pos + 2**18 yield chunk[pos:end] pos = end else: @@ -2738,7 +2766,7 @@ while left > 0: # refill the queue if not queue: - target = 2 ** 18 + target = 2**18 for chunk in self.iter: queue.append(chunk) target -= len(chunk) @@ -2894,20 +2922,20 @@ ) -class transformingwriter: +class transformingwriter(typelib.BinaryIO_Proxy): """Writable file wrapper to transform data by function""" - def __init__(self, fp, encode): + def __init__(self, fp: BinaryIO, encode: Callable[[bytes], bytes]) -> None: self._fp = fp self._encode = encode - def close(self): + def close(self) -> None: self._fp.close() - def flush(self): + def flush(self) -> None: self._fp.flush() - def write(self, data): + def write(self, data: bytes) -> int: return self._fp.write(self._encode(data)) @@ -2925,7 +2953,7 @@ return _eolre.sub(b'\r\n', s) -def _crlfwriter(fp): +def _crlfwriter(fp: typelib.BinaryIO_Proxy) -> typelib.BinaryIO_Proxy: return transformingwriter(fp, tocrlf) @@ -2938,6 +2966,21 @@ fromnativeeol = pycompat.identity nativeeolwriter = pycompat.identity +if typing.TYPE_CHECKING: + # Replace the various overloads that come along with aliasing other methods + # with the narrow definition that we care about in the type checking phase + # only. This ensures that both Windows and POSIX see only the definition + # that is actually available. + + def tonativeeol(s: bytes) -> bytes: + raise NotImplementedError + + def fromnativeeol(s: bytes) -> bytes: + raise NotImplementedError + + def nativeeolwriter(fp: typelib.BinaryIO_Proxy) -> typelib.BinaryIO_Proxy: + raise NotImplementedError + # TODO delete since workaround variant for Python 2 no longer needed. def iterfile(fp): @@ -3068,12 +3111,12 @@ _sizeunits = ( - (b'm', 2 ** 20), - (b'k', 2 ** 10), - (b'g', 2 ** 30), - (b'kb', 2 ** 10), - (b'mb', 2 ** 20), - (b'gb', 2 ** 30), + (b'm', 2**20), + (b'k', 2**10), + (b'g', 2**30), + (b'kb', 2**10), + (b'mb', 2**20), + (b'gb', 2**30), (b'b', 1), )
--- a/mercurial/utils/compression.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/utils/compression.py Thu Aug 01 16:42:38 2024 +0200 @@ -511,7 +511,7 @@ parts = [] pos = 0 while pos < insize: - pos2 = pos + 2 ** 20 + pos2 = pos + 2**20 parts.append(z.compress(data[pos:pos2])) pos = pos2 parts.append(z.flush())
--- a/mercurial/utils/dateutil.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/utils/dateutil.py Thu Aug 01 16:42:38 2024 +0200 @@ -83,7 +83,37 @@ def makedate(timestamp: Optional[float] = None) -> hgdate: """Return a unix timestamp (or the current time) as a (unixtime, - offset) tuple based off the local timezone.""" + offset) tuple based off the local timezone. + + >>> import os, time + >>> os.environ['TZ'] = 'Asia/Novokuznetsk' + >>> time.tzset() + + >>> def dtu(*a): + ... return datetime.datetime(*a, tzinfo=datetime.timezone.utc) + + # Old winter timezone, +7 + >>> makedate(dtu(2010, 1, 1, 5, 0, 0).timestamp()) + (1262322000.0, -25200) + + # Same timezone in summer, +7, so no DST + >>> makedate(dtu(2010, 7, 1, 5, 0, 0).timestamp()) + (1277960400.0, -25200) + + # Changing to new winter timezone, from +7 to +6 (ae04af1ce78d testcase) + >>> makedate(dtu(2010, 10, 30, 20, 0, 0).timestamp() - 1) + (1288468799.0, -25200) + >>> makedate(dtu(2010, 10, 30, 20, 0, 0).timestamp()) + (1288468800.0, -21600) + >>> makedate(dtu(2011, 1, 1, 5, 0, 0).timestamp()) + (1293858000.0, -21600) + + # Introducing DST, changing +6 to +7 + >>> makedate(dtu(2011, 3, 26, 20, 0, 0).timestamp() - 1) + (1301169599.0, -21600) + >>> makedate(dtu(2011, 3, 26, 20, 0, 0).timestamp()) + (1301169600.0, -25200) + """ if timestamp is None: timestamp = time.time() if timestamp < 0:
--- a/mercurial/utils/procutil.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/utils/procutil.py Thu Aug 01 16:42:38 2024 +0200 @@ -711,7 +711,6 @@ if stdin is not None: stdin.close() - else: def runbgcommand(
--- a/mercurial/utils/resourceutil.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/utils/resourceutil.py Thu Aug 01 16:42:38 2024 +0200 @@ -8,13 +8,20 @@ # GNU General Public License version 2 or any later version. -import _imp import os import sys +import typing from .. import pycompat +if typing.TYPE_CHECKING: + from typing import ( + BinaryIO, + Iterator, + ) + + def mainfrozen(): """return True if we are a frozen executable. @@ -24,7 +31,12 @@ return ( hasattr(sys, "frozen") # new py2exe or hasattr(sys, "importers") # old py2exe - or _imp.is_frozen("__main__") # tools/freeze + or getattr( + getattr(sys.modules.get('__main__'), '__spec__', None), + 'origin', + None, + ) + == 'frozen' # tools/freeze ) @@ -39,17 +51,16 @@ # leading "mercurial." off of the package name, so that these # pseudo resources are found in their directory next to the # executable. - def _package_path(package): + def _package_path(package: bytes) -> bytes: dirs = package.split(b".") assert dirs[0] == b"mercurial" return os.path.join(_rootpath, *dirs[1:]) - else: datapath = os.path.dirname(os.path.dirname(pycompat.fsencode(__file__))) _rootpath = os.path.dirname(datapath) - def _package_path(package): + def _package_path(package: bytes) -> bytes: return os.path.join(_rootpath, *package.split(b".")) @@ -59,7 +70,7 @@ from importlib import resources # pytype: disable=import-error # Force loading of the resources module - if hasattr(resources, 'files'): + if hasattr(resources, 'files'): # Introduced in Python 3.9 resources.files # pytype: disable=module-attr else: resources.open_binary # pytype: disable=module-attr @@ -72,11 +83,11 @@ # importlib.resources was not found (almost definitely because we're on a # Python version before 3.7) - def open_resource(package, name): + def open_resource(package: bytes, name: bytes) -> "BinaryIO": path = os.path.join(_package_path(package), name) return open(path, "rb") - def is_resource(package, name): + def is_resource(package: bytes, name: bytes) -> bool: path = os.path.join(_package_path(package), name) try: @@ -84,17 +95,16 @@ except (IOError, OSError): return False - def contents(package): + def contents(package: bytes) -> "Iterator[bytes]": path = pycompat.fsdecode(_package_path(package)) for p in os.listdir(path): yield pycompat.fsencode(p) - else: from .. import encoding - def open_resource(package, name): + def open_resource(package: bytes, name: bytes) -> "BinaryIO": if hasattr(resources, 'files'): return ( resources.files( # pytype: disable=module-attr @@ -108,13 +118,25 @@ pycompat.sysstr(package), pycompat.sysstr(name) ) - def is_resource(package, name): - return resources.is_resource( # pytype: disable=module-attr - pycompat.sysstr(package), encoding.strfromlocal(name) - ) + def is_resource(package: bytes, name: bytes) -> bool: + if hasattr(resources, 'files'): # Introduced in Python 3.9 + return ( + resources.files(pycompat.sysstr(package)) + .joinpath(encoding.strfromlocal(name)) + .is_file() + ) + else: + return resources.is_resource( # pytype: disable=module-attr + pycompat.sysstr(package), encoding.strfromlocal(name) + ) - def contents(package): - # pytype: disable=module-attr - for r in resources.contents(pycompat.sysstr(package)): - # pytype: enable=module-attr - yield encoding.strtolocal(r) + def contents(package: bytes) -> "Iterator[bytes]": + if hasattr(resources, 'files'): # Introduced in Python 3.9 + for path in resources.files(pycompat.sysstr(package)).iterdir(): + if path.is_file(): + yield encoding.strtolocal(path.name) + else: + # pytype: disable=module-attr + for r in resources.contents(pycompat.sysstr(package)): + # pytype: enable=module-attr + yield encoding.strtolocal(r)
--- a/mercurial/utils/stringutil.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/utils/stringutil.py Thu Aug 01 16:42:38 2024 +0200 @@ -13,6 +13,7 @@ import re as remod import textwrap import types +import typing from typing import ( Optional, @@ -22,6 +23,11 @@ from ..i18n import _ from ..thirdparty import attr +# Force pytype to use the non-vendored package +if typing.TYPE_CHECKING: + # noinspection PyPackageRequirements + import attr + from .. import ( encoding, error, @@ -574,7 +580,6 @@ return mailmap for line in mailmapcontent.splitlines(): - # Don't bother checking the line if it is a comment or # is an improperly formed author field if line.lstrip().startswith(b'#'): @@ -801,7 +806,6 @@ chunks.reverse() while chunks: - # Start the list of chunks that will make up the current line. # cur_len is just the length of all the chunks in cur_line. cur_line = []
--- a/mercurial/utils/urlutil.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/utils/urlutil.py Thu Aug 01 16:42:38 2024 +0200 @@ -9,6 +9,9 @@ import socket from typing import ( + Callable, + Dict, + Tuple, Union, ) @@ -29,7 +32,7 @@ ) # keeps pyflakes happy -assert [Union] +assert [Callable, Dict, Tuple, Union] urlreq = urllibcompat.urlreq @@ -652,12 +655,12 @@ self[name] = new_paths -_pathsuboptions = {} +_pathsuboptions: "Dict[bytes, Tuple[str, Callable]]" = {} # a dictionnary of methods that can be used to format a sub-option value path_suboptions_display = {} -def pathsuboption(option, attr, display=pycompat.bytestr): +def pathsuboption(option: bytes, attr: str, display=pycompat.bytestr): """Decorator used to declare a path sub-option. Arguments are the sub-option name and the attribute it should set on
--- a/mercurial/verify.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/verify.py Thu Aug 01 16:42:38 2024 +0200 @@ -31,7 +31,7 @@ return v.verify() -def _normpath(f): +def _normpath(f: bytes) -> bytes: # under hg < 2.4, convert didn't sanitize paths properly, so a # converted repo may contain repeated slashes while b'//' in f:
--- a/mercurial/vfs.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/vfs.py Thu Aug 01 16:42:38 2024 +0200 @@ -291,7 +291,7 @@ def rmtree( self, path: Optional[bytes] = None, ignore_errors=False, forcibly=False - ): + ) -> None: """Remove a directory tree recursively If ``forcibly``, this tries to remove READ-ONLY files, too.
--- a/mercurial/win32.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/win32.py Thu Aug 01 16:42:38 2024 +0200 @@ -172,6 +172,7 @@ X509_ASN_ENCODING = 0x00000001 PKCS_7_ASN_ENCODING = 0x00010000 + # These structs are only complete enough to achieve what we need. class CERT_CHAIN_CONTEXT(ctypes.Structure): _fields_ = ( @@ -368,7 +369,7 @@ # See https://bugs.python.org/issue28474 code = _kernel32.GetLastError() if code > 0x7FFFFFFF: - code -= 2 ** 32 + code -= 2**32 err = ctypes.WinError(code=code) # pytype: disable=module-attr raise OSError( err.errno, '%s: %s' % (encoding.strfromlocal(name), err.strerror) @@ -459,10 +460,10 @@ return _getfileinfo(name).nNumberOfLinks -def samefile(path1: bytes, path2: bytes) -> bool: - '''Returns whether path1 and path2 refer to the same file or directory.''' - res1 = _getfileinfo(path1) - res2 = _getfileinfo(path2) +def samefile(fpath1: bytes, fpath2: bytes) -> bool: + '''Returns whether fpath1 and fpath2 refer to the same file or directory.''' + res1 = _getfileinfo(fpath1) + res2 = _getfileinfo(fpath2) return ( res1.dwVolumeSerialNumber == res2.dwVolumeSerialNumber and res1.nFileIndexHigh == res2.nFileIndexHigh @@ -470,10 +471,10 @@ ) -def samedevice(path1: bytes, path2: bytes) -> bool: - '''Returns whether path1 and path2 are on the same device.''' - res1 = _getfileinfo(path1) - res2 = _getfileinfo(path2) +def samedevice(fpath1: bytes, fpath2: bytes) -> bool: + '''Returns whether fpath1 and fpath2 are on the same device.''' + res1 = _getfileinfo(fpath1) + res2 = _getfileinfo(fpath2) return res1.dwVolumeSerialNumber == res2.dwVolumeSerialNumber @@ -710,16 +711,16 @@ return pi.dwProcessId -def unlink(f: bytes) -> None: +def unlink(path: bytes) -> None: '''try to implement POSIX' unlink semantics on Windows''' - if os.path.isdir(f): + if os.path.isdir(path): # use EPERM because it is POSIX prescribed value, even though # unlink(2) on directories returns EISDIR on Linux raise IOError( errno.EPERM, r"Unlinking directory not permitted: '%s'" - % encoding.strfromlocal(f), + % encoding.strfromlocal(path), ) # POSIX allows to unlink and rename open files. Windows has serious @@ -740,9 +741,9 @@ # implicit zombie filename blocking on a temporary name. for tries in range(10): - temp = b'%s-%08x' % (f, random.randint(0, 0xFFFFFFFF)) + temp = b'%s-%08x' % (path, random.randint(0, 0xFFFFFFFF)) try: - os.rename(f, temp) + os.rename(path, temp) break except FileExistsError: pass
--- a/mercurial/windows.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/windows.py Thu Aug 01 16:42:38 2024 +0200 @@ -620,10 +620,10 @@ return None -def readlink(pathname: bytes) -> bytes: - path = pycompat.fsdecode(pathname) +def readlink(path: bytes) -> bytes: + path_str = pycompat.fsdecode(path) try: - link = os.readlink(path) + link = os.readlink(path_str) except ValueError as e: # On py2, os.readlink() raises an AttributeError since it is # unsupported. On py3, reading a non-link raises a ValueError. Simply @@ -675,8 +675,10 @@ class cachestat: + stat: Optional[os.stat_result] + def __init__(self, path: bytes) -> None: - pass + self.stat = None def cacheable(self) -> bool: return False
--- a/mercurial/wireprotoframing.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/wireprotoframing.py Thu Aug 01 16:42:38 2024 +0200 @@ -12,9 +12,16 @@ import collections import struct +import typing from .i18n import _ from .thirdparty import attr + +# Force pytype to use the non-vendored package +if typing.TYPE_CHECKING: + # noinspection PyPackageRequirements + import attr + from . import ( encoding, error, @@ -546,7 +553,7 @@ """ atomdicts = [] - for (formatting, args, labels) in atoms: + for formatting, args, labels in atoms: # TODO look for localstr, other types here? if not isinstance(formatting, bytes): @@ -1198,7 +1205,6 @@ b'%s' % stringutil.forcebytestr(e), errtype=b'server', ): - yield frame break @@ -1259,7 +1265,6 @@ for chunk in cborutil.streamencodebytestringfromiter( o.chunks ): - for frame in emitter.send(chunk): yield frame
--- a/mercurial/wireprototypes.py Thu Aug 01 16:34:37 2024 +0200 +++ b/mercurial/wireprototypes.py Thu Aug 01 16:42:38 2024 +0200 @@ -4,12 +4,20 @@ # GNU General Public License version 2 or any later version. +import typing + from .node import ( bin, hex, ) from .i18n import _ from .thirdparty import attr + +# Force pytype to use the non-vendored package +if typing.TYPE_CHECKING: + # noinspection PyPackageRequirements + import attr + from . import ( error, util,
--- a/rust/.cargo/config Thu Aug 01 16:34:37 2024 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,7 +0,0 @@ -# Rust builds with a modern MSVC and uses a newer CRT. -# Python 2.7 has a shared library dependency on an older CRT (msvcr90.dll). -# We statically link the modern CRT to avoid multiple msvcr*.dll libraries -# being loaded and Python possibly picking up symbols from the newer runtime -# (which would be loaded first). -[target.'cfg(target_os = "windows")'] -rustflags = ["-Ctarget-feature=+crt-static"]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/rust/.cargo/config.toml Thu Aug 01 16:42:38 2024 +0200 @@ -0,0 +1,7 @@ +# Rust builds with a modern MSVC and uses a newer CRT. +# Python 2.7 has a shared library dependency on an older CRT (msvcr90.dll). +# We statically link the modern CRT to avoid multiple msvcr*.dll libraries +# being loaded and Python possibly picking up symbols from the newer runtime +# (which would be loaded first). +[target.'cfg(target_os = "windows")'] +rustflags = ["-Ctarget-feature=+crt-static"]
--- a/rust/README.rst Thu Aug 01 16:34:37 2024 +0200 +++ b/rust/README.rst Thu Aug 01 16:42:38 2024 +0200 @@ -83,7 +83,7 @@ ------------------------------ The minimum supported rust version (MSRV) is specified in the `Clippy`_ -configuration file at ``rust/clippy.toml``. It is set to be ``1.61.0`` as of +configuration file at ``rust/clippy.toml``. It is set to be ``1.79.0`` as of this writing, but keep in mind that the authoritative value is the one from the configuration file.
--- a/rust/clippy.toml Thu Aug 01 16:34:37 2024 +0200 +++ b/rust/clippy.toml Thu Aug 01 16:42:38 2024 +0200 @@ -1,1 +1,1 @@ -msrv = "1.61.0" +msrv = "1.79.0"
--- a/rust/hg-core/src/dirstate_tree/dirstate_map.rs Thu Aug 01 16:34:37 2024 +0200 +++ b/rust/hg-core/src/dirstate_tree/dirstate_map.rs Thu Aug 01 16:42:38 2024 +0200 @@ -427,19 +427,16 @@ pub(super) tracked_descendants_count: u32, } -#[derive(Debug)] +#[derive(Debug, Default)] pub(super) enum NodeData { Entry(DirstateEntry), - CachedDirectory { mtime: TruncatedTimestamp }, + CachedDirectory { + mtime: TruncatedTimestamp, + }, + #[default] None, } -impl Default for NodeData { - fn default() -> Self { - NodeData::None - } -} - impl NodeData { fn has_entry(&self) -> bool { matches!(self, NodeData::Entry(_))
--- a/rust/hg-core/src/dirstate_tree/on_disk.rs Thu Aug 01 16:34:37 2024 +0200 +++ b/rust/hg-core/src/dirstate_tree/on_disk.rs Thu Aug 01 16:42:38 2024 +0200 @@ -332,9 +332,7 @@ ) -> Result<usize, DirstateV2ParseError> { let start = self.base_name_start.get(); if start < self.full_path.len.get() { - let start = usize::try_from(start) - // u32 -> usize, could only panic on a 16-bit CPU - .expect("dirstate-v2 base_name_start out of bounds"); + let start = usize::from(start); Ok(start) } else { Err(DirstateV2ParseError::new("not enough bytes for base name")) @@ -593,8 +591,8 @@ { // Either `usize::MAX` would result in "out of bounds" error since a single // `&[u8]` cannot occupy the entire addess space. - let start = start.get().try_into().unwrap_or(std::usize::MAX); - let len = len.try_into().unwrap_or(std::usize::MAX); + let start = start.get().try_into().unwrap_or(usize::MAX); + let len = len.try_into().unwrap_or(usize::MAX); let bytes = match on_disk.get(start..) { Some(bytes) => bytes, None => {
--- a/rust/hg-core/src/dirstate_tree/status.rs Thu Aug 01 16:34:37 2024 +0200 +++ b/rust/hg-core/src/dirstate_tree/status.rs Thu Aug 01 16:42:38 2024 +0200 @@ -677,14 +677,14 @@ // The directory was modified too recently, // don’t cache its `read_dir` results. // - // 1. A change to this directory (direct child was - // added or removed) cause its mtime to be set - // (possibly truncated) to `directory_mtime` + // 1. A change to this directory (direct child was added or + // removed) cause its mtime to be set (possibly truncated) + // to `directory_mtime` // 2. This `status` algorithm calls `read_dir` - // 3. An other change is made to the same directory is - // made so that calling `read_dir` agin would give - // different results, but soon enough after 1. that - // the mtime stays the same + // 3. An other change is made to the same directory is made so + // that calling `read_dir` agin would give different + // results, but soon enough after 1. that the mtime stays + // the same // // On a system where the time resolution poor, this // scenario is not unlikely if all three steps are caused
--- a/rust/hg-core/src/matchers.rs Thu Aug 01 16:34:37 2024 +0200 +++ b/rust/hg-core/src/matchers.rs Thu Aug 01 16:42:38 2024 +0200 @@ -617,7 +617,11 @@ std::mem::swap(&mut m1, &mut m2); } m1.file_set().map(|m1_files| { - m1_files.iter().cloned().filter(|f| m2.matches(f)).collect() + m1_files + .iter() + .filter(|&f| m2.matches(f)) + .cloned() + .collect() }) } else { // without exact input file sets, we can't do an exact @@ -710,7 +714,7 @@ }; if base_is_exact { new.files = base_files.map(|files| { - files.iter().cloned().filter(|f| new.matches(f)).collect() + files.iter().filter(|&f| new.matches(f)).cloned().collect() }); } new
--- a/rust/hg-core/src/revlog/changelog.rs Thu Aug 01 16:34:37 2024 +0200 +++ b/rust/hg-core/src/revlog/changelog.rs Thu Aug 01 16:42:38 2024 +0200 @@ -8,6 +8,7 @@ use itertools::{Either, Itertools}; use crate::errors::HgError; +use crate::revlog::Index; use crate::revlog::Revision; use crate::revlog::{Node, NodePrefix}; use crate::revlog::{Revlog, RevlogEntry, RevlogError}; @@ -81,6 +82,10 @@ ) -> Result<Revision, RevlogError> { self.revlog.rev_from_node(node) } + + pub fn get_index(&self) -> &Index { + &self.revlog.index + } } impl Graph for Changelog { @@ -617,7 +622,7 @@ #[test] fn test_unescape_nul_followed_by_octal() { // Escaped NUL chars followed by octal digits are decoded correctly. - let expected = b"\012"; + let expected = b"\x0012"; let escaped = br"\012"; let unescaped = unescape_extra(escaped); assert_eq!(&expected[..], &unescaped[..]); @@ -713,7 +718,7 @@ for (extra, msg) in test_cases { assert!( - decode_extra(&extra).is_err(), + decode_extra(extra).is_err(), "corrupt extra should have failed to parse: {}", msg );
--- a/rust/hg-core/src/revlog/index.rs Thu Aug 01 16:34:37 2024 +0200 +++ b/rust/hg-core/src/revlog/index.rs Thu Aug 01 16:42:38 2024 +0200 @@ -1387,6 +1387,7 @@ fn vec_of_empty(sets_size: usize, vec_len: usize) -> Vec<Self>; /// The size of the bit mask in memory + #[allow(unused)] fn size(&self) -> usize; /// The number of elements that can be represented in the set. @@ -1394,12 +1395,14 @@ /// Another way to put it is that it is the highest integer `C` such that /// the set is guaranteed to always be a subset of the integer range /// `[0, C)` + #[allow(unused)] fn capacity(&self) -> usize; /// Declare `n` to belong to the set fn add(&mut self, n: usize); /// Declare `n` not to belong to the set + #[allow(unused)] fn discard(&mut self, n: usize); /// Replace this bit set by its union with other @@ -1749,6 +1752,9 @@ } #[cfg(test)] +pub use tests::IndexEntryBuilder; + +#[cfg(test)] mod tests { use super::*; use crate::node::NULL_NODE; @@ -2027,6 +2033,3 @@ assert_eq!(get_version(&bytes), 2) } } - -#[cfg(test)] -pub use tests::IndexEntryBuilder;
--- a/rust/hg-core/src/revlog/node.rs Thu Aug 01 16:34:37 2024 +0200 +++ b/rust/hg-core/src/revlog/node.rs Thu Aug 01 16:42:38 2024 +0200 @@ -83,7 +83,7 @@ #[inline] fn try_from(bytes: &'a [u8]) -> Result<Self, Self::Error> { match Node::from_bytes(bytes) { - Ok((node, rest)) if rest.is_empty() => Ok(node), + Ok((node, [])) => Ok(node), _ => Err(()), } } @@ -323,6 +323,9 @@ } #[cfg(test)] +pub use tests::hex_pad_right; + +#[cfg(test)] mod tests { use super::*; @@ -428,6 +431,3 @@ assert_eq!(prefix.first_different_nybble(&node), None); } } - -#[cfg(test)] -pub use tests::hex_pad_right;
--- a/rust/hg-core/tests/test_missing_ancestors.rs Thu Aug 01 16:34:37 2024 +0200 +++ b/rust/hg-core/tests/test_missing_ancestors.rs Thu Aug 01 16:42:38 2024 +0200 @@ -69,6 +69,7 @@ ancs } +#[allow(unused)] // Useful when debugging #[derive(Clone, Debug)] enum MissingAncestorsAction { InitialBases(HashSet<Revision>),
--- a/rust/hg-cpython/src/ancestors.rs Thu Aug 01 16:34:37 2024 +0200 +++ b/rust/hg-cpython/src/ancestors.rs Thu Aug 01 16:42:38 2024 +0200 @@ -19,8 +19,8 @@ //! `mercurial.ancestor.incrementalmissingancestors`. //! //! API differences: -//! + it is instantiated with a C `parsers.index` -//! instance instead of a parents function. +//! + it is instantiated with a C `parsers.index` instance instead of a +//! parents function. //! + `MissingAncestors.bases` is a method returning a tuple instead of //! a set-valued attribute. We could return a Python set easily if our //! [PySet PR](https://github.com/dgrunwald/rust-cpython/pull/165)
--- a/setup.py Thu Aug 01 16:34:37 2024 +0200 +++ b/setup.py Thu Aug 01 16:42:38 2024 +0200 @@ -463,7 +463,6 @@ class hgbuildmo(build): - description = "build translations (.mo files)" def run(self): @@ -1056,7 +1055,6 @@ class hginstall(install): - user_options = install.user_options + [ ( 'old-and-unmanageable',
--- a/tests/dumbhttp.py Thu Aug 01 16:34:37 2024 +0200 +++ b/tests/dumbhttp.py Thu Aug 01 16:42:38 2024 +0200 @@ -26,7 +26,6 @@ class simplehttpserver(httpserver.httpserver): address_family = socket.AF_INET6 - else: simplehttpserver = httpserver.httpserver
--- a/tests/dummysmtpd.py Thu Aug 01 16:34:37 2024 +0200 +++ b/tests/dummysmtpd.py Thu Aug 01 16:42:38 2024 +0200 @@ -31,8 +31,15 @@ def mocksmtpserversession(conn, addr): conn.send(b'220 smtp.example.com ESMTP\r\n') - line = conn.recv(1024) + try: + # Newer versions of OpenSSL raise on EOF + line = conn.recv(1024) + except ssl.SSLError: + log('no hello: EOF\n') + return + if not line.lower().startswith(b'ehlo '): + # Older versions of OpenSSl don't raise log('no hello: %s\n' % line) return
--- a/tests/fsmonitor-run-tests.py Thu Aug 01 16:34:37 2024 +0200 +++ b/tests/fsmonitor-run-tests.py Thu Aug 01 16:42:38 2024 +0200 @@ -30,7 +30,6 @@ def _sys2bytes(p): return p.encode('utf-8') - elif sys.version_info >= (3, 0, 0): print( '%s is only supported on Python 3.5+ and 2.7, not %s'
--- a/tests/generate-working-copy-states.py Thu Aug 01 16:34:37 2024 +0200 +++ b/tests/generate-working-copy-states.py Thu Aug 01 16:42:38 2024 +0200 @@ -33,6 +33,7 @@ import os import sys + # Generates pairs of (filename, contents), where 'contents' is a list # describing the file's content at each revision (or in the working copy). # At each revision, it is either None or the file's actual content. When not
--- a/tests/hghave.py Thu Aug 01 16:34:37 2024 +0200 +++ b/tests/hghave.py Thu Aug 01 16:42:38 2024 +0200 @@ -1,4 +1,3 @@ -import distutils.version import os import re import socket @@ -7,6 +6,11 @@ import sys import tempfile +try: + from setuptools.extern.packaging.version import Version +except ImportError: + from distutils.version import StrictVersion as Version + tempprefix = 'hg-hghave-' checks = { @@ -1113,28 +1117,30 @@ return matchoutput('emacs --version', b'GNU Emacs 2(4.4|4.5|5|6|7|8|9)') -@check('black', 'the black formatter for python (>= 20.8b1)') +@check('black', 'the black formatter for python >=23.3.0') def has_black(): blackcmd = 'black --version' version_regex = b'black, (?:version )?([0-9a-b.]+)' version = matchoutput(blackcmd, version_regex) - sv = distutils.version.StrictVersion - return version and sv(_bytes2sys(version.group(1))) >= sv('20.8b1') + if not version: + return False + return Version(_bytes2sys(version.group(1))) >= Version('23.3.0') @check('pytype', 'the pytype type checker') def has_pytype(): pytypecmd = 'pytype --version' version = matchoutput(pytypecmd, b'[0-9a-b.]+') - sv = distutils.version.StrictVersion - return version and sv(_bytes2sys(version.group(0))) >= sv('2019.10.17') + if not version: + return False + return Version(_bytes2sys(version.group(0))) >= Version('2019.10.17') -@check("rustfmt", "rustfmt tool at version nightly-2021-11-02") +@check("rustfmt", "rustfmt tool at version nightly-2024-07-16") def has_rustfmt(): # We use Nightly's rustfmt due to current unstable config options. return matchoutput( - '`rustup which --toolchain nightly-2021-11-02 rustfmt` --version', + '`rustup which --toolchain nightly-2024-07-16 rustfmt` --version', b'rustfmt', )
--- a/tests/killdaemons.py Thu Aug 01 16:34:37 2024 +0200 +++ b/tests/killdaemons.py Thu Aug 01 16:42:38 2024 +0200 @@ -75,7 +75,6 @@ raise _check(ctypes.windll.kernel32.CloseHandle(handle)) - else: def kill(pid, logfn, tryhard=True):
--- a/tests/mockblackbox.py Thu Aug 01 16:34:37 2024 +0200 +++ b/tests/mockblackbox.py Thu Aug 01 16:42:38 2024 +0200 @@ -1,5 +1,6 @@ from mercurial.utils import procutil + # XXX: we should probably offer a devel option to do this in blackbox directly def getuser(): return b'bob'
--- a/tests/run-tests.py Thu Aug 01 16:34:37 2024 +0200 +++ b/tests/run-tests.py Thu Aug 01 16:42:38 2024 +0200 @@ -223,6 +223,7 @@ # For Windows support wifexited = getattr(os, "WIFEXITED", lambda x: False) + # Whether to use IPv6 def checksocketfamily(name, port=20058): """return true if we can listen on localhost using family=name @@ -3397,7 +3398,6 @@ os.path.basename(t).startswith(b'test-') and (t.endswith(b'.py') or t.endswith(b'.t')) ): - m = testcasepattern.match(os.path.basename(t)) if m is not None: t_basename, casestr = m.groups()
--- a/tests/test-ancestor.py Thu Aug 01 16:34:37 2024 +0200 +++ b/tests/test-ancestor.py Thu Aug 01 16:42:38 2024 +0200 @@ -87,6 +87,7 @@ testcount = 10 inccount = 10 nerrs = [0] + # the default mu and sigma give us a nice distribution of mostly # single-digit counts (including 0) with some higher ones def lognormrandom(mu, sigma):
--- a/tests/test-batching.py Thu Aug 01 16:34:37 2024 +0200 +++ b/tests/test-batching.py Thu Aug 01 16:42:38 2024 +0200 @@ -55,7 +55,6 @@ # usage of "thing" interface def use(it): - # Direct call to base method shared between client and server. bprint(it.hello()) @@ -106,6 +105,7 @@ # server side + # equivalent of wireproto's global functions class server: def __init__(self, local): @@ -156,6 +156,7 @@ # local side + # equivalent of wireproto.encode/decodelist, that is, type-specific marshalling # here we just transform the strings a bit to check we're properly en-/decoding def mangle(s):
--- a/tests/test-cbor.py Thu Aug 01 16:34:37 2024 +0200 +++ b/tests/test-cbor.py Thu Aug 01 16:42:38 2024 +0200 @@ -216,11 +216,11 @@ for size in lens: if size < 24: hlen = 1 - elif size < 2 ** 8: + elif size < 2**8: hlen = 2 - elif size < 2 ** 16: + elif size < 2**16: hlen = 3 - elif size < 2 ** 32: + elif size < 2**32: hlen = 5 else: assert False @@ -487,7 +487,7 @@ ) def testdecodepartialushort(self): - encoded = b''.join(cborutil.streamencode(2 ** 15)) + encoded = b''.join(cborutil.streamencode(2**15)) self.assertEqual( cborutil.decodeitem(encoded[0:1]), @@ -499,7 +499,7 @@ ) self.assertEqual( cborutil.decodeitem(encoded[0:5]), - (True, 2 ** 15, 3, cborutil.SPECIAL_NONE), + (True, 2**15, 3, cborutil.SPECIAL_NONE), ) def testdecodepartialshort(self): @@ -519,7 +519,7 @@ ) def testdecodepartialulong(self): - encoded = b''.join(cborutil.streamencode(2 ** 28)) + encoded = b''.join(cborutil.streamencode(2**28)) self.assertEqual( cborutil.decodeitem(encoded[0:1]), @@ -539,7 +539,7 @@ ) self.assertEqual( cborutil.decodeitem(encoded[0:5]), - (True, 2 ** 28, 5, cborutil.SPECIAL_NONE), + (True, 2**28, 5, cborutil.SPECIAL_NONE), ) def testdecodepartiallong(self): @@ -567,7 +567,7 @@ ) def testdecodepartialulonglong(self): - encoded = b''.join(cborutil.streamencode(2 ** 32)) + encoded = b''.join(cborutil.streamencode(2**32)) self.assertEqual( cborutil.decodeitem(encoded[0:1]), @@ -603,7 +603,7 @@ ) self.assertEqual( cborutil.decodeitem(encoded[0:9]), - (True, 2 ** 32, 9, cborutil.SPECIAL_NONE), + (True, 2**32, 9, cborutil.SPECIAL_NONE), ) with self.assertRaisesRegex(
--- a/tests/test-check-rust-format.t Thu Aug 01 16:34:37 2024 +0200 +++ b/tests/test-check-rust-format.t Thu Aug 01 16:42:38 2024 +0200 @@ -5,7 +5,7 @@ $ cd "$TESTDIR"/.. Warning: Keep this in sync with hghave.py - $ RUSTFMT=$(rustup which --toolchain nightly-2021-11-02 rustfmt) + $ RUSTFMT=$(rustup which --toolchain nightly-2024-07-16 rustfmt) $ for f in `testrepohg files 'glob:**/*.rs'` ; do - > $RUSTFMT --check --edition=2018 --unstable-features --color=never $f + > $RUSTFMT --check --edition=2021 --unstable-features --color=never $f > done
--- a/tests/test-config-env.py Thu Aug 01 16:34:37 2024 +0200 +++ b/tests/test-config-env.py Thu Aug 01 16:42:38 2024 +0200 @@ -15,6 +15,7 @@ testtmp = encoding.environ[b'TESTTMP'] + # prepare hgrc files def join(name): return os.path.join(testtmp, name) @@ -26,6 +27,7 @@ with open(join(b'userrc'), 'wb') as f: f.write(b'[ui]\neditor=e1') + # replace rcpath functions so they point to the files above def systemrcpath(): return [join(b'sysrc')] @@ -40,6 +42,7 @@ rcutil.systemrcpath = systemrcpath rcutil.userrcpath = userrcpath + # utility to print configs def printconfigs(env): encoding.environ = env
--- a/tests/test-demandimport.py Thu Aug 01 16:34:37 2024 +0200 +++ b/tests/test-demandimport.py Thu Aug 01 16:42:38 2024 +0200 @@ -31,18 +31,6 @@ except ImportError: moduletype = types.ModuleType -if os.name != 'nt': - try: - import distutils.msvc9compiler - - print( - 'distutils.msvc9compiler needs to be an immediate ' - 'importerror on non-windows platforms' - ) - distutils.msvc9compiler - except ImportError: - pass - import re rsub = re.sub
--- a/tests/test-doctest.py Thu Aug 01 16:34:37 2024 +0200 +++ b/tests/test-doctest.py Thu Aug 01 16:42:38 2024 +0200 @@ -21,9 +21,11 @@ r'''^mercurial\.\w+\.(\w+): (['"])(.*?)\2''', r'\1: \3', got2, - re.MULTILINE, + flags=re.MULTILINE, ) - got2 = re.sub(r'^mercurial\.\w+\.(\w+): ', r'\1: ', got2, re.MULTILINE) + got2 = re.sub( + r'^mercurial\.\w+\.(\w+): ', r'\1: ', got2, flags=re.MULTILINE + ) return any( doctest.OutputChecker.check_output(self, w, g, optionflags) for w, g in [(want, got), (want2, got2)]
--- a/tests/test-extensions-wrapfunction.py Thu Aug 01 16:34:37 2024 +0200 +++ b/tests/test-extensions-wrapfunction.py Thu Aug 01 16:42:38 2024 +0200 @@ -66,6 +66,7 @@ print('context manager', dummy.getstack()) print('context manager', dummy.getstack()) + # Wrap callable object which has no __name__ class callableobj: def __call__(self):
--- a/tests/test-hg-parseurl.py Thu Aug 01 16:34:37 2024 +0200 +++ b/tests/test-hg-parseurl.py Thu Aug 01 16:42:38 2024 +0200 @@ -5,7 +5,6 @@ class ParseRequestTests(unittest.TestCase): def testparse(self): - self.assertEqual( urlutil.parseurl(b'http://example.com/no/anchor'), (b'http://example.com/no/anchor', (None, [])),
--- a/tests/test-hybridencode.py Thu Aug 01 16:34:37 2024 +0200 +++ b/tests/test-hybridencode.py Thu Aug 01 16:42:38 2024 +0200 @@ -5,7 +5,6 @@ class hybridencodetests(unittest.TestCase): def hybridencode(self, input, want): - # Check the C implementation if it's in use got = store._pathencode(input) self.assertEqual(want, got)
--- a/tests/test-install.t Thu Aug 01 16:34:37 2024 +0200 +++ b/tests/test-install.t Thu Aug 01 16:42:38 2024 +0200 @@ -216,6 +216,9 @@ Failed building wheel for mercurial (?) WARNING: You are using pip version *; however, version * is available. (glob) (?) You should consider upgrading via the '$TESTTMP/installenv/bin/python* -m pip install --upgrade pip' command. (glob) (?) + (?) + [notice] A new release of pip is available: * -> * (glob) (?) + [notice] To update, run: python -m pip install --upgrade pip (?) $ ./installenv/*/hg debuginstall || cat pip.log checking encoding (ascii)... checking Python executable (*) (glob)
--- a/tests/test-parseindex2.py Thu Aug 01 16:34:37 2024 +0200 +++ b/tests/test-parseindex2.py Thu Aug 01 16:42:38 2024 +0200 @@ -26,6 +26,7 @@ parsers = policy.importmod('parsers') + # original python implementation def gettype(q): return int(q & 0xFFFF)
--- a/tests/test-patchbomb-tls.t Thu Aug 01 16:34:37 2024 +0200 +++ b/tests/test-patchbomb-tls.t Thu Aug 01 16:42:38 2024 +0200 @@ -98,7 +98,7 @@ $ wait_log "no hello:" $ cat ../log connection from * (glob) - no hello: b'' + no hello: (b''|EOF) (re) $ : > ../log With global certificates:
--- a/tests/test-remotefilelog-datapack.py Thu Aug 01 16:34:37 2024 +0200 +++ b/tests/test-remotefilelog-datapack.py Thu Aug 01 16:42:38 2024 +0200 @@ -186,7 +186,7 @@ content = b'put-something-here \n' * i node = self.getHash(content) meta = { - constants.METAKEYFLAG: i ** 4, + constants.METAKEYFLAG: i**4, constants.METAKEYSIZE: len(content), b'Z': b'random_string', b'_': b'\0' * i,
--- a/tests/test-remotefilelog-histpack.py Thu Aug 01 16:34:37 2024 +0200 +++ b/tests/test-remotefilelog-histpack.py Thu Aug 01 16:42:38 2024 +0200 @@ -177,7 +177,7 @@ pack = self.createPack(revisions) # Verify the pack contents - for (filename, node) in allentries: + for filename, node in allentries: ancestors = pack.getancestors(filename, node) self.assertEqual(ancestorcounts[(filename, node)], len(ancestors)) for anode, (ap1, ap2, alinknode, copyfrom) in ancestors.items():
--- a/tests/test-revlog-mmapindex.t Thu Aug 01 16:34:37 2024 +0200 +++ b/tests/test-revlog-mmapindex.t Thu Aug 01 16:42:38 2024 +0200 @@ -36,7 +36,9 @@ > EOF mmap index which is now more than 4k long - $ hg log -l 5 -T '{rev}\n' --config experimental.mmapindexthreshold=4k + $ hg log -l 5 -T '{rev}\n' \ + > --config storage.revlog.mmap.index=yes \ + > --config storage.revlog.mmap.index:size-threshold=4k mmapping $TESTTMP/a/.hg/store/00changelog.i (no-pure !) mmapping $TESTTMP/a/.hg/store/00changelog-????????.nd (glob) (rust !) 100 @@ -46,7 +48,9 @@ 96 do not mmap index which is still less than 32k - $ hg log -l 5 -T '{rev}\n' --config experimental.mmapindexthreshold=32k + $ hg log -l 5 -T '{rev}\n' \ + > --config storage.revlog.mmap.index=yes \ + > --config storage.revlog.mmap.index:size-threshold=32k mmapping $TESTTMP/a/.hg/store/00changelog-????????.nd (glob) (rust !) 100 99
--- a/tests/test-revlog-raw.py Thu Aug 01 16:34:37 2024 +0200 +++ b/tests/test-revlog-raw.py Thu Aug 01 16:42:38 2024 +0200 @@ -246,7 +246,7 @@ That is to say, given any x, y where both x, and y are in range(2 ** n), there is an x followed immediately by y in the generated sequence. """ - m = 2 ** n + m = 2**n # Gray Code. See https://en.wikipedia.org/wiki/Gray_code gray = lambda x: x ^ (x >> 1)
--- a/tests/test-simplemerge.py Thu Aug 01 16:34:37 2024 +0200 +++ b/tests/test-simplemerge.py Thu Aug 01 16:42:38 2024 +0200 @@ -24,6 +24,8 @@ from mercurial.utils import stringutil TestCase = unittest.TestCase + + # bzr compatible interface, for the tests class Merge3(simplemerge.Merge3Text): """3-way merge of texts.
--- a/tests/test-symlink-os-yes-fs-no.py Thu Aug 01 16:34:37 2024 +0200 +++ b/tests/test-symlink-os-yes-fs-no.py Thu Aug 01 16:42:38 2024 +0200 @@ -30,6 +30,7 @@ time.sleep(1) commands.status(u, repo) + # now disable symlink support -- this is what os.symlink would do on a # non-symlink file system def symlink_failure(src, dst):
--- a/tests/test-ui-color.py Thu Aug 01 16:34:37 2024 +0200 +++ b/tests/test-ui-color.py Thu Aug 01 16:42:38 2024 +0200 @@ -25,6 +25,7 @@ # we're not interested in the output, so write that to devnull ui_.fout = open(os.devnull, 'wb') + # call some arbitrary command just so we go through # color's wrapped _runcommand twice. def runcmd():
--- a/tests/test-verify-repo-operations.py Thu Aug 01 16:34:37 2024 +0200 +++ b/tests/test-verify-repo-operations.py Thu Aug 01 16:42:38 2024 +0200 @@ -615,8 +615,8 @@ settings( timeout=-1, stateful_step_count=1000, - max_examples=10 ** 8, - max_iterations=10 ** 8, + max_examples=10**8, + max_iterations=10**8, database=writeonlydatabase(settings.default.database), ), )