--- a/.gitlab/merge_request_templates/Default.md Wed Jan 04 12:06:07 2023 +0100
+++ b/.gitlab/merge_request_templates/Default.md Wed Jan 04 16:02:22 2023 +0100
@@ -1,5 +1,8 @@
/assign_reviewer @mercurial.review
+
+<!--
+
Welcome to the Mercurial Merge Request creation process:
* Set a simple title for your MR,
@@ -11,3 +14,5 @@
* https://www.mercurial-scm.org/wiki/ContributingChanges
* https://www.mercurial-scm.org/wiki/Heptapod
+
+-->
--- a/contrib/check-code.py Wed Jan 04 12:06:07 2023 +0100
+++ b/contrib/check-code.py Wed Jan 04 16:02:22 2023 +0100
@@ -372,10 +372,6 @@
),
(r'[^^+=*/!<>&| %-](\s=|=\s)[^= ]', "wrong whitespace around ="),
(
- r'\([^()]*( =[^=]|[^<>!=]= )',
- "no whitespace around = for named parameters",
- ),
- (
r'raise [^,(]+, (\([^\)]+\)|[^,\(\)]+)$',
"don't use old-style two-argument raise, use Exception(message)",
),
--- a/contrib/check-pytype.sh Wed Jan 04 12:06:07 2023 +0100
+++ b/contrib/check-pytype.sh Wed Jan 04 16:02:22 2023 +0100
@@ -31,7 +31,6 @@
# mercurial/pure/parsers.py # [attribute-error]
# mercurial/repoview.py # [attribute-error]
# mercurial/testing/storage.py # tons of [attribute-error]
-# mercurial/ui.py # [attribute-error], [wrong-arg-types]
# mercurial/unionrepo.py # ui, svfs, unfiltered [attribute-error]
# mercurial/win32.py # [not-callable]
# mercurial/wireprotoframing.py # [unsupported-operands], [attribute-error], [import-error]
@@ -43,7 +42,7 @@
# TODO: include hgext and hgext3rd
-pytype -V 3.7 --keep-going --jobs auto mercurial \
+pytype -V 3.7 --keep-going --jobs auto doc/check-seclevel.py mercurial \
-x mercurial/bundlerepo.py \
-x mercurial/context.py \
-x mercurial/crecord.py \
@@ -64,9 +63,11 @@
-x mercurial/repoview.py \
-x mercurial/testing/storage.py \
-x mercurial/thirdparty \
- -x mercurial/ui.py \
-x mercurial/unionrepo.py \
-x mercurial/win32.py \
-x mercurial/wireprotoframing.py \
-x mercurial/wireprotov1peer.py \
-x mercurial/wireprotov1server.py
+
+echo 'pytype crashed while generating the following type stubs:'
+find .pytype/pyi -name '*.pyi' | xargs grep -l '# Caught error' | sort
--- a/contrib/fuzz/revlog.cc Wed Jan 04 12:06:07 2023 +0100
+++ b/contrib/fuzz/revlog.cc Wed Jan 04 16:02:22 2023 +0100
@@ -20,7 +20,7 @@
index, cache = parsers.parse_index2(data, inline)
index.slicechunktodensity(list(range(len(index))), 0.5, 262144)
index.stats()
- index.findsnapshots({}, 0)
+ index.findsnapshots({}, 0, len(index) - 1)
10 in index
for rev in range(len(index)):
index.reachableroots(0, [len(index)-1], [rev])
--- a/contrib/heptapod-ci.yml Wed Jan 04 12:06:07 2023 +0100
+++ b/contrib/heptapod-ci.yml Wed Jan 04 16:02:22 2023 +0100
@@ -89,7 +89,8 @@
- hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'`
- cd /tmp/mercurial-ci/
- make local PYTHON=$PYTHON
- - $PYTHON -m pip install --user -U libcst==0.3.20 pytype==2022.03.29
+ - $PYTHON -m pip install --user -U libcst==0.3.20 pytype==2022.11.18
+ - ./contrib/setup-pytype.sh
script:
- echo "Entering script section"
- sh contrib/check-pytype.sh
--- a/contrib/perf.py Wed Jan 04 12:06:07 2023 +0100
+++ b/contrib/perf.py Wed Jan 04 16:02:22 2023 +0100
@@ -997,11 +997,16 @@
timer, fm = gettimer(ui, opts)
try:
- from mercurial.utils.urlutil import get_unique_pull_path
-
- path = get_unique_pull_path(b'perfdiscovery', repo, ui, path)[0]
+ from mercurial.utils.urlutil import get_unique_pull_path_obj
+
+ path = get_unique_pull_path_obj(b'perfdiscovery', ui, path)
except ImportError:
- path = ui.expandpath(path)
+ try:
+ from mercurial.utils.urlutil import get_unique_pull_path
+
+ path = get_unique_pull_path(b'perfdiscovery', repo, ui, path)[0]
+ except ImportError:
+ path = ui.expandpath(path)
def s():
repos[1] = hg.peer(ui, opts, path)
@@ -1613,7 +1618,11 @@
b'default repository not configured!',
hint=b"see 'hg help config.paths'",
)
- dest = path.pushloc or path.loc
+ if util.safehasattr(path, 'main_path'):
+ path = path.get_push_variant()
+ dest = path.loc
+ else:
+ dest = path.pushloc or path.loc
ui.statusnoi18n(b'analysing phase of %s\n' % util.hidepassword(dest))
other = hg.peer(repo, opts, dest)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/setup-pytype.sh Wed Jan 04 16:02:22 2023 +0100
@@ -0,0 +1,33 @@
+#!/bin/bash
+
+set -e
+set -u
+
+# Find the python3 setup that would run pytype
+PYTYPE=`which pytype`
+PYTHON3=`head -n1 ${PYTYPE} | sed -s 's/#!//'`
+
+# Existing stubs that pytype processes live here
+TYPESHED=$(${PYTHON3} -c "import pytype; print(pytype.__path__[0])")/typeshed/stubs
+HG_STUBS=${TYPESHED}/mercurial
+
+echo "Patching typeshed at $HG_STUBS"
+
+rm -rf ${HG_STUBS}
+mkdir -p ${HG_STUBS}
+
+cat > ${HG_STUBS}/METADATA.toml <<EOF
+version = "0.1"
+EOF
+
+
+mkdir -p ${HG_STUBS}/mercurial/cext ${HG_STUBS}/mercurial/thirdparty/attr
+
+touch ${HG_STUBS}/mercurial/__init__.pyi
+touch ${HG_STUBS}/mercurial/cext/__init__.pyi
+touch ${HG_STUBS}/mercurial/thirdparty/__init__.pyi
+
+ln -sf $(hg root)/mercurial/cext/*.{pyi,typed} \
+ ${HG_STUBS}/mercurial/cext
+ln -sf $(hg root)/mercurial/thirdparty/attr/*.{pyi,typed} \
+ ${HG_STUBS}/mercurial/thirdparty/attr
--- a/doc/check-seclevel.py Wed Jan 04 12:06:07 2023 +0100
+++ b/doc/check-seclevel.py Wed Jan 04 16:02:22 2023 +0100
@@ -46,7 +46,7 @@
def checkseclevel(ui, doc, name, initlevel):
- ui.notenoi18n('checking "%s"\n' % name)
+ ui.notenoi18n(('checking "%s"\n' % name).encode('utf-8'))
if not isinstance(doc, bytes):
doc = doc.encode('utf-8')
blocks, pruned = minirst.parse(doc, 0, ['verbose'])
@@ -70,14 +70,18 @@
nextlevel = mark2level[mark]
if curlevel < nextlevel and curlevel + 1 != nextlevel:
ui.warnnoi18n(
- 'gap of section level at "%s" of %s\n' % (title, name)
+ ('gap of section level at "%s" of %s\n' % (title, name)).encode(
+ 'utf-8'
+ )
)
showavailables(ui, initlevel)
errorcnt += 1
continue
ui.notenoi18n(
- 'appropriate section level for "%s %s"\n'
- % (mark * (nextlevel * 2), title)
+ (
+ 'appropriate section level for "%s %s"\n'
+ % (mark * (nextlevel * 2), title)
+ ).encode('utf-8')
)
curlevel = nextlevel
@@ -90,7 +94,9 @@
name = k.split(b"|")[0].lstrip(b"^")
if not entry[0].__doc__:
ui.notenoi18n(
- 'skip checking %s: no help document\n' % (namefmt % name)
+ (
+ 'skip checking %s: no help document\n' % (namefmt % name)
+ ).encode('utf-8')
)
continue
errorcnt += checkseclevel(
@@ -117,7 +123,9 @@
mod = extensions.load(ui, name, None)
if not mod.__doc__:
ui.notenoi18n(
- 'skip checking %s extension: no help document\n' % name
+ (
+ 'skip checking %s extension: no help document\n' % name
+ ).encode('utf-8')
)
continue
errorcnt += checkseclevel(
@@ -144,7 +152,9 @@
doc = fp.read()
ui.notenoi18n(
- 'checking input from %s with initlevel %d\n' % (filename, initlevel)
+ (
+ 'checking input from %s with initlevel %d\n' % (filename, initlevel)
+ ).encode('utf-8')
)
return checkseclevel(ui, doc, 'input from %s' % filename, initlevel)
--- a/hgdemandimport/demandimportpy3.py Wed Jan 04 12:06:07 2023 +0100
+++ b/hgdemandimport/demandimportpy3.py Wed Jan 04 16:02:22 2023 +0100
@@ -23,8 +23,6 @@
enabled.
"""
-# This line is unnecessary, but it satisfies test-check-py3-compat.t.
-
import contextlib
import importlib.util
import sys
@@ -43,6 +41,10 @@
"""Make the module load lazily."""
with tracing.log('demandimport %s', module):
if _deactivated or module.__name__ in ignores:
+ # Reset the loader on the module as super() does (issue6725)
+ module.__spec__.loader = self.loader
+ module.__loader__ = self.loader
+
self.loader.exec_module(module)
else:
super().exec_module(module)
--- a/hgext/amend.py Wed Jan 04 12:06:07 2023 +0100
+++ b/hgext/amend.py Wed Jan 04 16:02:22 2023 +0100
@@ -46,6 +46,7 @@
_(b'mark a branch as closed, hiding it from the branch list'),
),
(b's', b'secret', None, _(b'use the secret phase for committing')),
+ (b'', b'draft', None, _(b'use the draft phase for committing')),
(b'n', b'note', b'', _(b'store a note on the amend')),
]
+ cmdutil.walkopts
@@ -64,6 +65,7 @@
See :hg:`help commit` for more details.
"""
+ cmdutil.check_at_most_one_arg(opts, 'draft', 'secret')
cmdutil.check_note_size(opts)
with repo.wlock(), repo.lock():
--- a/hgext/fastannotate/protocol.py Wed Jan 04 12:06:07 2023 +0100
+++ b/hgext/fastannotate/protocol.py Wed Jan 04 16:02:22 2023 +0100
@@ -151,8 +151,11 @@
ui = repo.ui
remotedest = ui.config(b'fastannotate', b'remotepath', b'default')
- r = urlutil.get_unique_pull_path(b'fastannotate', repo, ui, remotedest)
- remotepath = r[0]
+ remotepath = urlutil.get_unique_pull_path_obj(
+ b'fastannotate',
+ ui,
+ remotedest,
+ )
peer = hg.peer(ui, {}, remotepath)
try:
--- a/hgext/fetch.py Wed Jan 04 12:06:07 2023 +0100
+++ b/hgext/fetch.py Wed Jan 04 16:02:22 2023 +0100
@@ -108,9 +108,9 @@
)
)
- path = urlutil.get_unique_pull_path(b'fetch', repo, ui, source)[0]
+ path = urlutil.get_unique_pull_path_obj(b'fetch', ui, source)
other = hg.peer(repo, opts, path)
- ui.status(_(b'pulling from %s\n') % urlutil.hidepassword(path))
+ ui.status(_(b'pulling from %s\n') % urlutil.hidepassword(path.loc))
revs = None
if opts[b'rev']:
try:
--- a/hgext/fsmonitor/pywatchman/__init__.py Wed Jan 04 12:06:07 2023 +0100
+++ b/hgext/fsmonitor/pywatchman/__init__.py Wed Jan 04 16:02:22 2023 +0100
@@ -26,8 +26,6 @@
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-# no unicode literals
-
import inspect
import math
import os
--- a/hgext/fsmonitor/pywatchman/capabilities.py Wed Jan 04 12:06:07 2023 +0100
+++ b/hgext/fsmonitor/pywatchman/capabilities.py Wed Jan 04 16:02:22 2023 +0100
@@ -26,8 +26,6 @@
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-# no unicode literals
-
def parse_version(vstr):
res = 0
--- a/hgext/fsmonitor/pywatchman/compat.py Wed Jan 04 12:06:07 2023 +0100
+++ b/hgext/fsmonitor/pywatchman/compat.py Wed Jan 04 16:02:22 2023 +0100
@@ -26,8 +26,6 @@
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-# no unicode literals
-
import sys
--- a/hgext/fsmonitor/pywatchman/encoding.py Wed Jan 04 12:06:07 2023 +0100
+++ b/hgext/fsmonitor/pywatchman/encoding.py Wed Jan 04 16:02:22 2023 +0100
@@ -26,8 +26,6 @@
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-# no unicode literals
-
import sys
from . import compat
--- a/hgext/fsmonitor/pywatchman/load.py Wed Jan 04 12:06:07 2023 +0100
+++ b/hgext/fsmonitor/pywatchman/load.py Wed Jan 04 16:02:22 2023 +0100
@@ -26,8 +26,6 @@
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-# no unicode literals
-
import ctypes
--- a/hgext/fsmonitor/pywatchman/pybser.py Wed Jan 04 12:06:07 2023 +0100
+++ b/hgext/fsmonitor/pywatchman/pybser.py Wed Jan 04 16:02:22 2023 +0100
@@ -26,8 +26,6 @@
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-# no unicode literals
-
import binascii
import collections
import ctypes
--- a/hgext/histedit.py Wed Jan 04 12:06:07 2023 +0100
+++ b/hgext/histedit.py Wed Jan 04 16:02:22 2023 +0100
@@ -1051,12 +1051,11 @@
if opts is None:
opts = {}
path = urlutil.get_unique_push_path(b'histedit', repo, ui, remote)
- dest = path.pushloc or path.loc
-
- ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(dest))
+
+ ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
revs, checkout = hg.addbranchrevs(repo, repo, (path.branch, []), None)
- other = hg.peer(repo, opts, dest)
+ other = hg.peer(repo, opts, path)
if revs:
revs = [repo.lookup(rev) for rev in revs]
--- a/hgext/infinitepush/__init__.py Wed Jan 04 12:06:07 2023 +0100
+++ b/hgext/infinitepush/__init__.py Wed Jan 04 16:02:22 2023 +0100
@@ -683,12 +683,10 @@
def _pull(orig, ui, repo, source=b"default", **opts):
opts = pycompat.byteskwargs(opts)
# Copy paste from `pull` command
- source, branches = urlutil.get_unique_pull_path(
+ path = urlutil.get_unique_pull_path_obj(
b"infinite-push's pull",
- repo,
ui,
source,
- default_branches=opts.get(b'branch'),
)
scratchbookmarks = {}
@@ -709,7 +707,7 @@
bookmarks.append(bookmark)
if scratchbookmarks:
- other = hg.peer(repo, opts, source)
+ other = hg.peer(repo, opts, path)
try:
fetchedbookmarks = other.listkeyspatterns(
b'bookmarks', patterns=scratchbookmarks
@@ -734,14 +732,14 @@
try:
# Remote scratch bookmarks will be deleted because remotenames doesn't
# know about them. Let's save it before pull and restore after
- remotescratchbookmarks = _readscratchremotebookmarks(ui, repo, source)
- result = orig(ui, repo, source, **pycompat.strkwargs(opts))
+ remotescratchbookmarks = _readscratchremotebookmarks(ui, repo, path.loc)
+ result = orig(ui, repo, path.loc, **pycompat.strkwargs(opts))
# TODO(stash): race condition is possible
# if scratch bookmarks was updated right after orig.
# But that's unlikely and shouldn't be harmful.
if common.isremotebooksenabled(ui):
remotescratchbookmarks.update(scratchbookmarks)
- _saveremotebookmarks(repo, remotescratchbookmarks, source)
+ _saveremotebookmarks(repo, remotescratchbookmarks, path.loc)
else:
_savelocalbookmarks(repo, scratchbookmarks)
return result
@@ -849,14 +847,14 @@
raise error.Abort(msg)
path = paths[0]
- destpath = path.pushloc or path.loc
+ destpath = path.loc
# Remote scratch bookmarks will be deleted because remotenames doesn't
# know about them. Let's save it before push and restore after
remotescratchbookmarks = _readscratchremotebookmarks(ui, repo, destpath)
result = orig(ui, repo, *dests, **pycompat.strkwargs(opts))
if common.isremotebooksenabled(ui):
if bookmark and scratchpush:
- other = hg.peer(repo, opts, destpath)
+ other = hg.peer(repo, opts, path)
try:
fetchedbookmarks = other.listkeyspatterns(
b'bookmarks', patterns=[bookmark]
--- a/hgext/largefiles/storefactory.py Wed Jan 04 12:06:07 2023 +0100
+++ b/hgext/largefiles/storefactory.py Wed Jan 04 16:02:22 2023 +0100
@@ -36,22 +36,23 @@
b'lfpullsource', repo, ui, lfpullsource
)
else:
- path, _branches = urlutil.get_unique_pull_path(
- b'lfpullsource', repo, ui, lfpullsource
+ path = urlutil.get_unique_pull_path_obj(
+ b'lfpullsource', ui, lfpullsource
)
# XXX we should not explicitly pass b'default', as this will result in
# b'default' being returned if no `paths.default` was defined. We
# should explicitely handle the lack of value instead.
if repo is None:
- path, _branches = urlutil.get_unique_pull_path(
- b'lfs', repo, ui, b'default'
+ path = urlutil.get_unique_pull_path_obj(
+ b'lfs',
+ ui,
+ b'default',
)
remote = hg.peer(repo or ui, {}, path)
- elif path == b'default-push' or path == b'default':
+ elif path.loc == b'default-push' or path.loc == b'default':
remote = repo
else:
- path, _branches = urlutil.parseurl(path)
remote = hg.peer(repo or ui, {}, path)
# The path could be a scheme so use Mercurial's normal functionality
--- a/hgext/lfs/blobstore.py Wed Jan 04 12:06:07 2023 +0100
+++ b/hgext/lfs/blobstore.py Wed Jan 04 16:02:22 2023 +0100
@@ -168,12 +168,16 @@
# producing the response (but the server has no way of telling us
# that), and we really don't need to try to write the response to
# the localstore, because it's not going to match the expected.
+ # The server also uses this method to store data uploaded by the
+ # client, so if this happens on the server side, it's possible
+ # that the client crashed or an antivirus interfered with the
+ # upload.
if content_length is not None and int(content_length) != size:
msg = (
b"Response length (%d) does not match Content-Length "
- b"header (%d): likely server-side crash"
+ b"header (%d) for %s"
)
- raise LfsRemoteError(_(msg) % (size, int(content_length)))
+ raise LfsRemoteError(_(msg) % (size, int(content_length), oid))
realoid = hex(sha256.digest())
if realoid != oid:
--- a/hgext/mq.py Wed Jan 04 12:06:07 2023 +0100
+++ b/hgext/mq.py Wed Jan 04 16:02:22 2023 +0100
@@ -2854,16 +2854,17 @@
# main repo (destination and sources)
if dest is None:
dest = hg.defaultdest(source)
- __, source_path, __ = urlutil.get_clone_path(ui, source)
+ source_path = urlutil.get_clone_path_obj(ui, source)
sr = hg.peer(ui, opts, source_path)
# patches repo (source only)
if opts.get(b'patches'):
- __, patchespath, __ = urlutil.get_clone_path(ui, opts.get(b'patches'))
+ patches_path = urlutil.get_clone_path_obj(ui, opts.get(b'patches'))
else:
- patchespath = patchdir(sr)
+ # XXX path: we should turn this into a path object
+ patches_path = patchdir(sr)
try:
- hg.peer(ui, opts, patchespath)
+ hg.peer(ui, opts, patches_path)
except error.RepoError:
raise error.Abort(
_(b'versioned patch repository not found (see init --mq)')
--- a/hgext/narrow/narrowcommands.py Wed Jan 04 12:06:07 2023 +0100
+++ b/hgext/narrow/narrowcommands.py Wed Jan 04 16:02:22 2023 +0100
@@ -416,7 +416,7 @@
repo, trmanager.transaction, source=b'widen'
)
# TODO: we should catch error.Abort here
- bundle2.processbundle(repo, bundle, op=op)
+ bundle2.processbundle(repo, bundle, op=op, remote=remote)
if ellipsesremote:
with ds.parentchange():
@@ -606,10 +606,9 @@
# Find the revisions we have in common with the remote. These will
# be used for finding local-only changes for narrowing. They will
# also define the set of revisions to update for widening.
- r = urlutil.get_unique_pull_path(b'tracked', repo, ui, remotepath)
- url, branches = r
- ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(url))
- remote = hg.peer(repo, opts, url)
+ path = urlutil.get_unique_pull_path_obj(b'tracked', ui, remotepath)
+ ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
+ remote = hg.peer(repo, opts, path)
try:
# check narrow support before doing anything if widening needs to be
--- a/hgext/narrow/narrowrepo.py Wed Jan 04 12:06:07 2023 +0100
+++ b/hgext/narrow/narrowrepo.py Wed Jan 04 16:02:22 2023 +0100
@@ -19,8 +19,8 @@
dirstate = super(narrowrepository, self)._makedirstate()
return narrowdirstate.wrapdirstate(self, dirstate)
- def peer(self):
- peer = super(narrowrepository, self).peer()
+ def peer(self, path=None):
+ peer = super(narrowrepository, self).peer(path=path)
peer._caps.add(wireprototypes.NARROWCAP)
peer._caps.add(wireprototypes.ELLIPSESCAP)
return peer
--- a/hgext/relink.py Wed Jan 04 12:06:07 2023 +0100
+++ b/hgext/relink.py Wed Jan 04 16:02:22 2023 +0100
@@ -67,8 +67,8 @@
if origin is None and b'default-relink' in ui.paths:
origin = b'default-relink'
- path, __ = urlutil.get_unique_pull_path(b'relink', repo, ui, origin)
- src = hg.repository(repo.baseui, path)
+ path = urlutil.get_unique_pull_path_obj(b'relink', ui, origin)
+ src = hg.repository(repo.baseui, path.loc)
ui.status(_(b'relinking %s to %s\n') % (src.store.path, repo.store.path))
if repo.root == src.root:
ui.status(_(b'there is nothing to relink\n'))
--- a/hgext/remotefilelog/remotefilelog.py Wed Jan 04 12:06:07 2023 +0100
+++ b/hgext/remotefilelog/remotefilelog.py Wed Jan 04 16:02:22 2023 +0100
@@ -299,6 +299,7 @@
deltaprevious=False,
deltamode=None,
sidedata_helpers=None,
+ debug_info=None,
):
# we don't use any of these parameters here
del nodesorder, revisiondata, assumehaveparentrevisions, deltaprevious
--- a/hgext/schemes.py Wed Jan 04 12:06:07 2023 +0100
+++ b/hgext/schemes.py Wed Jan 04 16:02:22 2023 +0100
@@ -80,9 +80,25 @@
def __repr__(self):
return b'<ShortRepository: %s>' % self.scheme
+ def make_peer(self, ui, path, *args, **kwargs):
+ new_url = self.resolve(path.rawloc)
+ path = path.copy(new_raw_location=new_url)
+ cls = hg.peer_schemes.get(path.url.scheme)
+ if cls is not None:
+ return cls.make_peer(ui, path, *args, **kwargs)
+ return None
+
def instance(self, ui, url, create, intents=None, createopts=None):
url = self.resolve(url)
- return hg._peerlookup(url).instance(
+ u = urlutil.url(url)
+ scheme = u.scheme or b'file'
+ if scheme in hg.peer_schemes:
+ cls = hg.peer_schemes[scheme]
+ elif scheme in hg.repo_schemes:
+ cls = hg.repo_schemes[scheme]
+ else:
+ cls = hg.LocalFactory
+ return cls.instance(
ui, url, create, intents=intents, createopts=createopts
)
@@ -119,24 +135,29 @@
}
+def _check_drive_letter(scheme):
+ """check if a scheme conflict with a Windows drive letter"""
+ if (
+ pycompat.iswindows
+ and len(scheme) == 1
+ and scheme.isalpha()
+ and os.path.exists(b'%s:\\' % scheme)
+ ):
+ msg = _(b'custom scheme %s:// conflicts with drive letter %s:\\\n')
+ msg %= (scheme, scheme.upper())
+ raise error.Abort(msg)
+
+
def extsetup(ui):
schemes.update(dict(ui.configitems(b'schemes')))
t = templater.engine(templater.parse)
for scheme, url in schemes.items():
- if (
- pycompat.iswindows
- and len(scheme) == 1
- and scheme.isalpha()
- and os.path.exists(b'%s:\\' % scheme)
- ):
- raise error.Abort(
- _(
- b'custom scheme %s:// conflicts with drive '
- b'letter %s:\\\n'
- )
- % (scheme, scheme.upper())
- )
- hg.schemes[scheme] = ShortRepository(url, scheme, t)
+ _check_drive_letter(schemes)
+ url_scheme = urlutil.url(url).scheme
+ if url_scheme in hg.peer_schemes:
+ hg.peer_schemes[scheme] = ShortRepository(url, scheme, t)
+ else:
+ hg.repo_schemes[scheme] = ShortRepository(url, scheme, t)
extensions.wrapfunction(urlutil, b'hasdriveletter', hasdriveletter)
@@ -144,7 +165,11 @@
@command(b'debugexpandscheme', norepo=True)
def expandscheme(ui, url, **opts):
"""given a repo path, provide the scheme-expanded path"""
- repo = hg._peerlookup(url)
- if isinstance(repo, ShortRepository):
- url = repo.resolve(url)
+ scheme = urlutil.url(url).scheme
+ if scheme in hg.peer_schemes:
+ cls = hg.peer_schemes[scheme]
+ else:
+ cls = hg.repo_schemes.get(scheme)
+ if cls is not None and isinstance(cls, ShortRepository):
+ url = cls.resolve(url)
ui.write(url + b'\n')
--- a/hgext/sqlitestore.py Wed Jan 04 12:06:07 2023 +0100
+++ b/hgext/sqlitestore.py Wed Jan 04 16:02:22 2023 +0100
@@ -608,6 +608,7 @@
assumehaveparentrevisions=False,
deltamode=repository.CG_DELTAMODE_STD,
sidedata_helpers=None,
+ debug_info=None,
):
if nodesorder not in (b'nodes', b'storage', b'linear', None):
raise error.ProgrammingError(
--- a/hgext/transplant.py Wed Jan 04 12:06:07 2023 +0100
+++ b/hgext/transplant.py Wed Jan 04 16:02:22 2023 +0100
@@ -817,8 +817,8 @@
sourcerepo = opts.get(b'source')
if sourcerepo:
- u = urlutil.get_unique_pull_path(b'transplant', repo, ui, sourcerepo)[0]
- peer = hg.peer(repo, opts, u)
+ path = urlutil.get_unique_pull_path_obj(b'transplant', ui, sourcerepo)
+ peer = hg.peer(repo, opts, path)
heads = pycompat.maplist(peer.lookup, opts.get(b'branch', ()))
target = set(heads)
for r in revs:
--- a/mercurial/archival.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/archival.py Wed Jan 04 16:02:22 2023 +0100
@@ -154,9 +154,14 @@
)
self.fileobj = gzfileobj
return (
+ # taropen() wants Literal['a', 'r', 'w', 'x'] for the mode,
+ # but Literal[] is only available in 3.8+ without the
+ # typing_extensions backport.
+ # pytype: disable=wrong-arg-types
tarfile.TarFile.taropen( # pytype: disable=attribute-error
name, pycompat.sysstr(mode), gzfileobj
)
+ # pytype: enable=wrong-arg-types
)
else:
try:
--- a/mercurial/bundle2.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/bundle2.py Wed Jan 04 16:02:22 2023 +0100
@@ -315,8 +315,17 @@
* a way to construct a bundle response when applicable.
"""
- def __init__(self, repo, transactiongetter, captureoutput=True, source=b''):
+ def __init__(
+ self,
+ repo,
+ transactiongetter,
+ captureoutput=True,
+ source=b'',
+ remote=None,
+ ):
self.repo = repo
+ # the peer object who produced this bundle if available
+ self.remote = remote
self.ui = repo.ui
self.records = unbundlerecords()
self.reply = None
@@ -363,7 +372,7 @@
raise TransactionUnavailable()
-def applybundle(repo, unbundler, tr, source, url=None, **kwargs):
+def applybundle(repo, unbundler, tr, source, url=None, remote=None, **kwargs):
# transform me into unbundler.apply() as soon as the freeze is lifted
if isinstance(unbundler, unbundle20):
tr.hookargs[b'bundle2'] = b'1'
@@ -371,10 +380,12 @@
tr.hookargs[b'source'] = source
if url is not None and b'url' not in tr.hookargs:
tr.hookargs[b'url'] = url
- return processbundle(repo, unbundler, lambda: tr, source=source)
+ return processbundle(
+ repo, unbundler, lambda: tr, source=source, remote=remote
+ )
else:
# the transactiongetter won't be used, but we might as well set it
- op = bundleoperation(repo, lambda: tr, source=source)
+ op = bundleoperation(repo, lambda: tr, source=source, remote=remote)
_processchangegroup(op, unbundler, tr, source, url, **kwargs)
return op
@@ -450,7 +461,14 @@
)
-def processbundle(repo, unbundler, transactiongetter=None, op=None, source=b''):
+def processbundle(
+ repo,
+ unbundler,
+ transactiongetter=None,
+ op=None,
+ source=b'',
+ remote=None,
+):
"""This function process a bundle, apply effect to/from a repo
It iterates over each part then searches for and uses the proper handling
@@ -466,7 +484,12 @@
if op is None:
if transactiongetter is None:
transactiongetter = _notransaction
- op = bundleoperation(repo, transactiongetter, source=source)
+ op = bundleoperation(
+ repo,
+ transactiongetter,
+ source=source,
+ remote=remote,
+ )
# todo:
# - replace this is a init function soon.
# - exception catching
@@ -494,6 +517,10 @@
def _processchangegroup(op, cg, tr, source, url, **kwargs):
+ if op.remote is not None and op.remote.path is not None:
+ remote_path = op.remote.path
+ kwargs = kwargs.copy()
+ kwargs['delta_base_reuse_policy'] = remote_path.delta_reuse_policy
ret = cg.apply(op.repo, tr, source, url, **kwargs)
op.records.add(
b'changegroup',
@@ -1938,7 +1965,12 @@
raise error.Abort(
_(b'old bundle types only supports v1 changegroups')
)
+
+ # HG20 is the case without 2 values to unpack, but is handled above.
+ # pytype: disable=bad-unpacking
header, comp = bundletypes[bundletype]
+ # pytype: enable=bad-unpacking
+
if comp not in util.compengines.supportedbundletypes:
raise error.Abort(_(b'unknown stream compression type: %s') % comp)
compengine = util.compengines.forbundletype(comp)
--- a/mercurial/bundlecaches.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/bundlecaches.py Wed Jan 04 16:02:22 2023 +0100
@@ -5,6 +5,10 @@
import collections
+from typing import (
+ cast,
+)
+
from .i18n import _
from .thirdparty import attr
@@ -247,7 +251,7 @@
# required to apply it. If we see this metadata, compare against what the
# repo supports and error if the bundle isn't compatible.
if version == b'packed1' and b'requirements' in params:
- requirements = set(params[b'requirements'].split(b','))
+ requirements = set(cast(bytes, params[b'requirements']).split(b','))
missingreqs = requirements - requirementsmod.STREAM_FIXED_REQUIREMENTS
if missingreqs:
raise error.UnsupportedBundleSpecification(
--- a/mercurial/bundlerepo.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/bundlerepo.py Wed Jan 04 16:02:22 2023 +0100
@@ -458,8 +458,8 @@
def cancopy(self):
return False
- def peer(self):
- return bundlepeer(self)
+ def peer(self, path=None):
+ return bundlepeer(self, path=path)
def getcwd(self):
return encoding.getcwd() # always outside the repo
--- a/mercurial/cext/bdiff.pyi Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/cext/bdiff.pyi Wed Jan 04 16:02:22 2023 +0100
@@ -5,7 +5,7 @@
version: int
-def bdiff(a: bytes, b: bytes): bytes
+def bdiff(a: bytes, b: bytes) -> bytes: ...
def blocks(a: bytes, b: bytes) -> List[Tuple[int, int, int, int]]: ...
def fixws(s: bytes, allws: bool) -> bytes: ...
def splitnewlines(text: bytes) -> List[bytes]: ...
--- a/mercurial/cext/osutil.pyi Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/cext/osutil.pyi Wed Jan 04 16:02:22 2023 +0100
@@ -2,6 +2,7 @@
AnyStr,
IO,
List,
+ Optional,
Sequence,
)
@@ -15,7 +16,7 @@
st_mtime: int
st_ctime: int
-def listdir(path: bytes, st: bool, skip: bool) -> List[stat]: ...
+def listdir(path: bytes, st: bool, skip: Optional[bool]) -> List[stat]: ...
def posixfile(name: AnyStr, mode: bytes, buffering: int) -> IO: ...
def statfiles(names: Sequence[bytes]) -> List[stat]: ...
def setprocname(name: bytes) -> None: ...
--- a/mercurial/cext/parsers.c Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/cext/parsers.c Wed Jan 04 16:02:22 2023 +0100
@@ -177,7 +177,7 @@
(dirstate_flag_p1_tracked | dirstate_flag_p2_info));
}
-static inline bool dirstate_item_c_merged(dirstateItemObject *self)
+static inline bool dirstate_item_c_modified(dirstateItemObject *self)
{
return ((self->flags & dirstate_flag_wc_tracked) &&
(self->flags & dirstate_flag_p1_tracked) &&
@@ -195,7 +195,7 @@
{
if (dirstate_item_c_removed(self)) {
return 'r';
- } else if (dirstate_item_c_merged(self)) {
+ } else if (dirstate_item_c_modified(self)) {
return 'm';
} else if (dirstate_item_c_added(self)) {
return 'a';
@@ -642,9 +642,9 @@
}
};
-static PyObject *dirstate_item_get_merged(dirstateItemObject *self)
+static PyObject *dirstate_item_get_modified(dirstateItemObject *self)
{
- if (dirstate_item_c_merged(self)) {
+ if (dirstate_item_c_modified(self)) {
Py_RETURN_TRUE;
} else {
Py_RETURN_FALSE;
@@ -709,7 +709,7 @@
NULL},
{"added", (getter)dirstate_item_get_added, NULL, "added", NULL},
{"p2_info", (getter)dirstate_item_get_p2_info, NULL, "p2_info", NULL},
- {"merged", (getter)dirstate_item_get_merged, NULL, "merged", NULL},
+ {"modified", (getter)dirstate_item_get_modified, NULL, "modified", NULL},
{"from_p2", (getter)dirstate_item_get_from_p2, NULL, "from_p2", NULL},
{"maybe_clean", (getter)dirstate_item_get_maybe_clean, NULL, "maybe_clean",
NULL},
@@ -1187,7 +1187,7 @@
void manifest_module_init(PyObject *mod);
void revlog_module_init(PyObject *mod);
-static const int version = 20;
+static const int version = 21;
static void module_init(PyObject *mod)
{
--- a/mercurial/cext/parsers.pyi Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/cext/parsers.pyi Wed Jan 04 16:02:22 2023 +0100
@@ -76,3 +76,7 @@
def insert(self, rev: int) -> None: ...
def shortest(self, node: bytes) -> int: ...
+
+# The IndexObject type here is defined in C, and there's no type for a buffer
+# return, as of py3.11. https://github.com/python/typing/issues/593
+def parse_index2(data: object, inline: object, format: int = ...) -> Tuple[object, Optional[Tuple[int, object]]]: ...
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/py.typed Wed Jan 04 16:02:22 2023 +0100
@@ -0,0 +1,1 @@
+partial
--- a/mercurial/cext/revlog.c Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/cext/revlog.c Wed Jan 04 16:02:22 2023 +0100
@@ -1446,16 +1446,25 @@
static PyObject *index_findsnapshots(indexObject *self, PyObject *args)
{
Py_ssize_t start_rev;
+ Py_ssize_t end_rev;
PyObject *cache;
Py_ssize_t base;
Py_ssize_t rev;
PyObject *key = NULL;
PyObject *value = NULL;
const Py_ssize_t length = index_length(self);
- if (!PyArg_ParseTuple(args, "O!n", &PyDict_Type, &cache, &start_rev)) {
+ if (!PyArg_ParseTuple(args, "O!nn", &PyDict_Type, &cache, &start_rev,
+ &end_rev)) {
return NULL;
}
- for (rev = start_rev; rev < length; rev++) {
+ end_rev += 1;
+ if (end_rev > length) {
+ end_rev = length;
+ }
+ if (start_rev < 0) {
+ start_rev = 0;
+ }
+ for (rev = start_rev; rev < end_rev; rev++) {
int issnap;
PyObject *allvalues = NULL;
issnap = index_issnapshotrev(self, rev);
@@ -1480,7 +1489,7 @@
}
if (allvalues == NULL) {
int r;
- allvalues = PyList_New(0);
+ allvalues = PySet_New(0);
if (!allvalues) {
goto bail;
}
@@ -1491,7 +1500,7 @@
}
}
value = PyLong_FromSsize_t(rev);
- if (PyList_Append(allvalues, value)) {
+ if (PySet_Add(allvalues, value)) {
goto bail;
}
Py_CLEAR(key);
--- a/mercurial/cffi/bdiff.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/cffi/bdiff.py Wed Jan 04 16:02:22 2023 +0100
@@ -8,6 +8,11 @@
import struct
+from typing import (
+ List,
+ Tuple,
+)
+
from ..pure.bdiff import *
from . import _bdiff # pytype: disable=import-error
@@ -15,7 +20,7 @@
lib = _bdiff.lib
-def blocks(sa, sb):
+def blocks(sa: bytes, sb: bytes) -> List[Tuple[int, int, int, int]]:
a = ffi.new(b"struct bdiff_line**")
b = ffi.new(b"struct bdiff_line**")
ac = ffi.new(b"char[]", str(sa))
@@ -29,7 +34,7 @@
count = lib.bdiff_diff(a[0], an, b[0], bn, l)
if count < 0:
raise MemoryError
- rl = [None] * count
+ rl = [(0, 0, 0, 0)] * count
h = l.next
i = 0
while h:
@@ -43,7 +48,7 @@
return rl
-def bdiff(sa, sb):
+def bdiff(sa: bytes, sb: bytes) -> bytes:
a = ffi.new(b"struct bdiff_line**")
b = ffi.new(b"struct bdiff_line**")
ac = ffi.new(b"char[]", str(sa))
--- a/mercurial/cffi/mpatch.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/cffi/mpatch.py Wed Jan 04 16:02:22 2023 +0100
@@ -6,6 +6,8 @@
# GNU General Public License version 2 or any later version.
+from typing import List
+
from ..pure.mpatch import *
from ..pure.mpatch import mpatchError # silence pyflakes
from . import _mpatch # pytype: disable=import-error
@@ -26,7 +28,7 @@
return container[0]
-def patches(text, bins):
+def patches(text: bytes, bins: List[bytes]) -> bytes:
lgt = len(bins)
all = []
if not lgt:
--- a/mercurial/changegroup.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/changegroup.py Wed Jan 04 16:02:22 2023 +0100
@@ -105,6 +105,164 @@
os.unlink(cleanup)
+def _dbg_ubdl_line(
+ ui,
+ indent,
+ key,
+ base_value=None,
+ percentage_base=None,
+ percentage_key=None,
+):
+ """Print one line of debug_unbundle_debug_info"""
+ line = b"DEBUG-UNBUNDLING: "
+ line += b' ' * (2 * indent)
+ key += b":"
+ padding = b''
+ if base_value is not None:
+ assert len(key) + 1 + (2 * indent) <= _KEY_PART_WIDTH
+ line += key.ljust(_KEY_PART_WIDTH - (2 * indent))
+ if isinstance(base_value, float):
+ line += b"%14.3f seconds" % base_value
+ else:
+ line += b"%10d" % base_value
+ padding = b' '
+ else:
+ line += key
+
+ if percentage_base is not None:
+ line += padding
+ padding = b''
+ assert base_value is not None
+ percentage = base_value * 100 // percentage_base
+ if percentage_key is not None:
+ line += b" (%3d%% of %s)" % (
+ percentage,
+ percentage_key,
+ )
+ else:
+ line += b" (%3d%%)" % percentage
+
+ line += b'\n'
+ ui.write_err(line)
+
+
+def _sumf(items):
+ # python < 3.8 does not support a `start=0.0` argument to sum
+ # So we have to cheat a bit until we drop support for those version
+ if not items:
+ return 0.0
+ return sum(items)
+
+
+def display_unbundle_debug_info(ui, debug_info):
+ """display an unbundling report from debug information"""
+ cl_info = []
+ mn_info = []
+ fl_info = []
+ _dispatch = [
+ (b'CHANGELOG:', cl_info),
+ (b'MANIFESTLOG:', mn_info),
+ (b'FILELOG:', fl_info),
+ ]
+ for e in debug_info:
+ for prefix, info in _dispatch:
+ if e["target-revlog"].startswith(prefix):
+ info.append(e)
+ break
+ else:
+ assert False, 'unreachable'
+ each_info = [
+ (b'changelog', cl_info),
+ (b'manifests', mn_info),
+ (b'files', fl_info),
+ ]
+
+ # General Revision Countss
+ _dbg_ubdl_line(ui, 0, b'revisions', len(debug_info))
+ for key, info in each_info:
+ if not info:
+ continue
+ _dbg_ubdl_line(ui, 1, key, len(info), len(debug_info))
+
+ # General Time spent
+ all_durations = [e['duration'] for e in debug_info]
+ all_durations.sort()
+ total_duration = _sumf(all_durations)
+ _dbg_ubdl_line(ui, 0, b'total-time', total_duration)
+
+ for key, info in each_info:
+ if not info:
+ continue
+ durations = [e['duration'] for e in info]
+ durations.sort()
+ _dbg_ubdl_line(ui, 1, key, _sumf(durations), total_duration)
+
+ # Count and cache reuse per delta types
+ each_types = {}
+ for key, info in each_info:
+ each_types[key] = types = {
+ b'full': 0,
+ b'full-cached': 0,
+ b'snapshot': 0,
+ b'snapshot-cached': 0,
+ b'delta': 0,
+ b'delta-cached': 0,
+ b'unknown': 0,
+ b'unknown-cached': 0,
+ }
+ for e in info:
+ types[e['type']] += 1
+ if e['using-cached-base']:
+ types[e['type'] + b'-cached'] += 1
+
+ EXPECTED_TYPES = (b'full', b'snapshot', b'delta', b'unknown')
+ if debug_info:
+ _dbg_ubdl_line(ui, 0, b'type-count')
+ for key, info in each_info:
+ if info:
+ _dbg_ubdl_line(ui, 1, key)
+ t = each_types[key]
+ for tn in EXPECTED_TYPES:
+ if t[tn]:
+ tc = tn + b'-cached'
+ _dbg_ubdl_line(ui, 2, tn, t[tn])
+ _dbg_ubdl_line(ui, 3, b'cached', t[tc], t[tn])
+
+ # time perf delta types and reuse
+ each_type_time = {}
+ for key, info in each_info:
+ each_type_time[key] = t = {
+ b'full': [],
+ b'full-cached': [],
+ b'snapshot': [],
+ b'snapshot-cached': [],
+ b'delta': [],
+ b'delta-cached': [],
+ b'unknown': [],
+ b'unknown-cached': [],
+ }
+ for e in info:
+ t[e['type']].append(e['duration'])
+ if e['using-cached-base']:
+ t[e['type'] + b'-cached'].append(e['duration'])
+ for t_key, value in list(t.items()):
+ value.sort()
+ t[t_key] = _sumf(value)
+
+ if debug_info:
+ _dbg_ubdl_line(ui, 0, b'type-time')
+ for key, info in each_info:
+ if info:
+ _dbg_ubdl_line(ui, 1, key)
+ t = each_type_time[key]
+ td = total_duration # to same space on next lines
+ for tn in EXPECTED_TYPES:
+ if t[tn]:
+ tc = tn + b'-cached'
+ _dbg_ubdl_line(ui, 2, tn, t[tn], td, b"total")
+ _dbg_ubdl_line(ui, 3, b'cached', t[tc], td, b"total")
+
+
class cg1unpacker:
"""Unpacker for cg1 changegroup streams.
@@ -254,7 +412,16 @@
pos = next
yield closechunk()
- def _unpackmanifests(self, repo, revmap, trp, prog, addrevisioncb=None):
+ def _unpackmanifests(
+ self,
+ repo,
+ revmap,
+ trp,
+ prog,
+ addrevisioncb=None,
+ debug_info=None,
+ delta_base_reuse_policy=None,
+ ):
self.callback = prog.increment
# no need to check for empty manifest group here:
# if the result of the merge of 1 and 2 is the same in 3 and 4,
@@ -263,7 +430,14 @@
self.manifestheader()
deltas = self.deltaiter()
storage = repo.manifestlog.getstorage(b'')
- storage.addgroup(deltas, revmap, trp, addrevisioncb=addrevisioncb)
+ storage.addgroup(
+ deltas,
+ revmap,
+ trp,
+ addrevisioncb=addrevisioncb,
+ debug_info=debug_info,
+ delta_base_reuse_policy=delta_base_reuse_policy,
+ )
prog.complete()
self.callback = None
@@ -276,6 +450,7 @@
targetphase=phases.draft,
expectedtotal=None,
sidedata_categories=None,
+ delta_base_reuse_policy=None,
):
"""Add the changegroup returned by source.read() to this repo.
srctype is a string like 'push', 'pull', or 'unbundle'. url is
@@ -289,9 +464,19 @@
`sidedata_categories` is an optional set of the remote's sidedata wanted
categories.
+
+ `delta_base_reuse_policy` is an optional argument, when set to a value
+ it will control the way the delta contained into the bundle are reused
+ when applied in the revlog.
+
+ See `DELTA_BASE_REUSE_*` entry in mercurial.revlogutils.constants.
"""
repo = repo.unfiltered()
+ debug_info = None
+ if repo.ui.configbool(b'debug', b'unbundling-stats'):
+ debug_info = []
+
# Only useful if we're adding sidedata categories. If both peers have
# the same categories, then we simply don't do anything.
adding_sidedata = (
@@ -366,6 +551,8 @@
alwayscache=True,
addrevisioncb=onchangelog,
duplicaterevisioncb=ondupchangelog,
+ debug_info=debug_info,
+ delta_base_reuse_policy=delta_base_reuse_policy,
):
repo.ui.develwarn(
b'applied empty changelog from changegroup',
@@ -413,6 +600,8 @@
trp,
progress,
addrevisioncb=on_manifest_rev,
+ debug_info=debug_info,
+ delta_base_reuse_policy=delta_base_reuse_policy,
)
needfiles = {}
@@ -449,6 +638,8 @@
efiles,
needfiles,
addrevisioncb=on_filelog_rev,
+ debug_info=debug_info,
+ delta_base_reuse_policy=delta_base_reuse_policy,
)
if sidedata_helpers:
@@ -567,6 +758,8 @@
b'changegroup-runhooks-%020i' % clstart,
lambda tr: repo._afterlock(runhooks),
)
+ if debug_info is not None:
+ display_unbundle_debug_info(repo.ui, debug_info)
finally:
repo.ui.flush()
# never return 0 here:
@@ -626,9 +819,24 @@
protocol_flags = 0
return node, p1, p2, deltabase, cs, flags, protocol_flags
- def _unpackmanifests(self, repo, revmap, trp, prog, addrevisioncb=None):
+ def _unpackmanifests(
+ self,
+ repo,
+ revmap,
+ trp,
+ prog,
+ addrevisioncb=None,
+ debug_info=None,
+ delta_base_reuse_policy=None,
+ ):
super(cg3unpacker, self)._unpackmanifests(
- repo, revmap, trp, prog, addrevisioncb=addrevisioncb
+ repo,
+ revmap,
+ trp,
+ prog,
+ addrevisioncb=addrevisioncb,
+ debug_info=debug_info,
+ delta_base_reuse_policy=delta_base_reuse_policy,
)
for chunkdata in iter(self.filelogheader, {}):
# If we get here, there are directory manifests in the changegroup
@@ -636,7 +844,12 @@
repo.ui.debug(b"adding %s revisions\n" % d)
deltas = self.deltaiter()
if not repo.manifestlog.getstorage(d).addgroup(
- deltas, revmap, trp, addrevisioncb=addrevisioncb
+ deltas,
+ revmap,
+ trp,
+ addrevisioncb=addrevisioncb,
+ debug_info=debug_info,
+ delta_base_reuse_policy=delta_base_reuse_policy,
):
raise error.Abort(_(b"received dir revlog group is empty"))
@@ -869,6 +1082,7 @@
fullclnodes=None,
precomputedellipsis=None,
sidedata_helpers=None,
+ debug_info=None,
):
"""Calculate deltas for a set of revisions.
@@ -978,6 +1192,7 @@
assumehaveparentrevisions=not ellipses,
deltamode=deltamode,
sidedata_helpers=sidedata_helpers,
+ debug_info=debug_info,
)
for i, revision in enumerate(revisions):
@@ -1003,6 +1218,187 @@
progress.complete()
+def make_debug_info():
+ """ "build a "new" debug_info dictionnary
+
+ That dictionnary can be used to gather information about the bundle process
+ """
+ return {
+ 'revision-total': 0,
+ 'revision-changelog': 0,
+ 'revision-manifest': 0,
+ 'revision-files': 0,
+ 'file-count': 0,
+ 'merge-total': 0,
+ 'available-delta': 0,
+ 'available-full': 0,
+ 'delta-against-prev': 0,
+ 'delta-full': 0,
+ 'delta-against-p1': 0,
+ 'denied-delta-candeltafn': 0,
+ 'denied-base-not-available': 0,
+ 'reused-storage-delta': 0,
+ 'computed-delta': 0,
+ }
+
+
+def merge_debug_info(base, other):
+ """merge the debug information from <other> into <base>
+
+ This function can be used to gather lower level information into higher level ones.
+ """
+ for key in (
+ 'revision-total',
+ 'revision-changelog',
+ 'revision-manifest',
+ 'revision-files',
+ 'merge-total',
+ 'available-delta',
+ 'available-full',
+ 'delta-against-prev',
+ 'delta-full',
+ 'delta-against-p1',
+ 'denied-delta-candeltafn',
+ 'denied-base-not-available',
+ 'reused-storage-delta',
+ 'computed-delta',
+ ):
+ base[key] += other[key]
+
+
+_KEY_PART_WIDTH = 17
+
+
+def _dbg_bdl_line(
+ ui,
+ indent,
+ key,
+ base_value=None,
+ percentage_base=None,
+ percentage_key=None,
+ percentage_ref=None,
+ extra=None,
+):
+ """Print one line of debug_bundle_debug_info"""
+ line = b"DEBUG-BUNDLING: "
+ line += b' ' * (2 * indent)
+ key += b":"
+ if base_value is not None:
+ assert len(key) + 1 + (2 * indent) <= _KEY_PART_WIDTH
+ line += key.ljust(_KEY_PART_WIDTH - (2 * indent))
+ line += b"%10d" % base_value
+ else:
+ line += key
+
+ if percentage_base is not None:
+ assert base_value is not None
+ percentage = base_value * 100 // percentage_base
+ if percentage_key is not None:
+ line += b" (%d%% of %s %d)" % (
+ percentage,
+ percentage_key,
+ percentage_ref,
+ )
+ else:
+ line += b" (%d%%)" % percentage
+
+ if extra:
+ line += b" "
+ line += extra
+
+ line += b'\n'
+ ui.write_err(line)
+
+
+def display_bundling_debug_info(
+ ui,
+ debug_info,
+ cl_debug_info,
+ mn_debug_info,
+ fl_debug_info,
+):
+ """display debug information gathered during a bundling through `ui`"""
+ d = debug_info
+ c = cl_debug_info
+ m = mn_debug_info
+ f = fl_debug_info
+ all_info = [
+ (b"changelog", b"cl", c),
+ (b"manifests", b"mn", m),
+ (b"files", b"fl", f),
+ ]
+ _dbg_bdl_line(ui, 0, b'revisions', d['revision-total'])
+ _dbg_bdl_line(ui, 1, b'changelog', d['revision-changelog'])
+ _dbg_bdl_line(ui, 1, b'manifest', d['revision-manifest'])
+ extra = b'(for %d revlogs)' % d['file-count']
+ _dbg_bdl_line(ui, 1, b'files', d['revision-files'], extra=extra)
+ if d['merge-total']:
+ _dbg_bdl_line(ui, 1, b'merge', d['merge-total'], d['revision-total'])
+ for k, __, v in all_info:
+ if v['merge-total']:
+ _dbg_bdl_line(ui, 2, k, v['merge-total'], v['revision-total'])
+
+ _dbg_bdl_line(ui, 0, b'deltas')
+ _dbg_bdl_line(
+ ui,
+ 1,
+ b'from-storage',
+ d['reused-storage-delta'],
+ percentage_base=d['available-delta'],
+ percentage_key=b"available",
+ percentage_ref=d['available-delta'],
+ )
+
+ if d['denied-delta-candeltafn']:
+ _dbg_bdl_line(ui, 2, b'denied-fn', d['denied-delta-candeltafn'])
+ for __, k, v in all_info:
+ if v['denied-delta-candeltafn']:
+ _dbg_bdl_line(ui, 3, k, v['denied-delta-candeltafn'])
+
+ if d['denied-base-not-available']:
+ _dbg_bdl_line(ui, 2, b'denied-nb', d['denied-base-not-available'])
+ for k, __, v in all_info:
+ if v['denied-base-not-available']:
+ _dbg_bdl_line(ui, 3, k, v['denied-base-not-available'])
+
+ if d['computed-delta']:
+ _dbg_bdl_line(ui, 1, b'computed', d['computed-delta'])
+
+ if d['available-full']:
+ _dbg_bdl_line(
+ ui,
+ 2,
+ b'full',
+ d['delta-full'],
+ percentage_base=d['available-full'],
+ percentage_key=b"native",
+ percentage_ref=d['available-full'],
+ )
+ for k, __, v in all_info:
+ if v['available-full']:
+ _dbg_bdl_line(
+ ui,
+ 3,
+ k,
+ v['delta-full'],
+ percentage_base=v['available-full'],
+ percentage_key=b"native",
+ percentage_ref=v['available-full'],
+ )
+
+ if d['delta-against-prev']:
+ _dbg_bdl_line(ui, 2, b'previous', d['delta-against-prev'])
+ for k, __, v in all_info:
+ if v['delta-against-prev']:
+ _dbg_bdl_line(ui, 3, k, v['delta-against-prev'])
+
+ if d['delta-against-p1']:
+ _dbg_bdl_line(ui, 2, b'parent-1', d['delta-against-prev'])
+ for k, __, v in all_info:
+ if v['delta-against-p1']:
+ _dbg_bdl_line(ui, 3, k, v['delta-against-p1'])
+
+
class cgpacker:
def __init__(
self,
@@ -1086,13 +1482,21 @@
self._verbosenote = lambda s: None
def generate(
- self, commonrevs, clnodes, fastpathlinkrev, source, changelog=True
+ self,
+ commonrevs,
+ clnodes,
+ fastpathlinkrev,
+ source,
+ changelog=True,
):
"""Yield a sequence of changegroup byte chunks.
If changelog is False, changelog data won't be added to changegroup
"""
+ debug_info = None
repo = self._repo
+ if repo.ui.configbool(b'debug', b'bundling-stats'):
+ debug_info = make_debug_info()
cl = repo.changelog
self._verbosenote(_(b'uncompressed size of bundle content:\n'))
@@ -1107,14 +1511,19 @@
# correctly advertise its sidedata categories directly.
remote_sidedata = repo._wanted_sidedata
sidedata_helpers = sidedatamod.get_sidedata_helpers(
- repo, remote_sidedata
+ repo,
+ remote_sidedata,
)
+ cl_debug_info = None
+ if debug_info is not None:
+ cl_debug_info = make_debug_info()
clstate, deltas = self._generatechangelog(
cl,
clnodes,
generate=changelog,
sidedata_helpers=sidedata_helpers,
+ debug_info=cl_debug_info,
)
for delta in deltas:
for chunk in _revisiondeltatochunks(
@@ -1126,6 +1535,9 @@
close = closechunk()
size += len(close)
yield closechunk()
+ if debug_info is not None:
+ merge_debug_info(debug_info, cl_debug_info)
+ debug_info['revision-changelog'] = cl_debug_info['revision-total']
self._verbosenote(_(b'%8.i (changelog)\n') % size)
@@ -1133,6 +1545,9 @@
manifests = clstate[b'manifests']
changedfiles = clstate[b'changedfiles']
+ if debug_info is not None:
+ debug_info['file-count'] = len(changedfiles)
+
# We need to make sure that the linkrev in the changegroup refers to
# the first changeset that introduced the manifest or file revision.
# The fastpath is usually safer than the slowpath, because the filelogs
@@ -1156,6 +1571,9 @@
fnodes = {} # needed file nodes
size = 0
+ mn_debug_info = None
+ if debug_info is not None:
+ mn_debug_info = make_debug_info()
it = self.generatemanifests(
commonrevs,
clrevorder,
@@ -1165,6 +1583,7 @@
source,
clstate[b'clrevtomanifestrev'],
sidedata_helpers=sidedata_helpers,
+ debug_info=mn_debug_info,
)
for tree, deltas in it:
@@ -1185,6 +1604,9 @@
close = closechunk()
size += len(close)
yield close
+ if debug_info is not None:
+ merge_debug_info(debug_info, mn_debug_info)
+ debug_info['revision-manifest'] = mn_debug_info['revision-total']
self._verbosenote(_(b'%8.i (manifests)\n') % size)
yield self._manifestsend
@@ -1199,6 +1621,9 @@
manifests.clear()
clrevs = {cl.rev(x) for x in clnodes}
+ fl_debug_info = None
+ if debug_info is not None:
+ fl_debug_info = make_debug_info()
it = self.generatefiles(
changedfiles,
commonrevs,
@@ -1208,6 +1633,7 @@
fnodes,
clrevs,
sidedata_helpers=sidedata_helpers,
+ debug_info=fl_debug_info,
)
for path, deltas in it:
@@ -1230,12 +1656,29 @@
self._verbosenote(_(b'%8.i %s\n') % (size, path))
yield closechunk()
+ if debug_info is not None:
+ merge_debug_info(debug_info, fl_debug_info)
+ debug_info['revision-files'] = fl_debug_info['revision-total']
+
+ if debug_info is not None:
+ display_bundling_debug_info(
+ repo.ui,
+ debug_info,
+ cl_debug_info,
+ mn_debug_info,
+ fl_debug_info,
+ )
if clnodes:
repo.hook(b'outgoing', node=hex(clnodes[0]), source=source)
def _generatechangelog(
- self, cl, nodes, generate=True, sidedata_helpers=None
+ self,
+ cl,
+ nodes,
+ generate=True,
+ sidedata_helpers=None,
+ debug_info=None,
):
"""Generate data for changelog chunks.
@@ -1332,6 +1775,7 @@
fullclnodes=self._fullclnodes,
precomputedellipsis=self._precomputedellipsis,
sidedata_helpers=sidedata_helpers,
+ debug_info=debug_info,
)
return state, gen
@@ -1346,6 +1790,7 @@
source,
clrevtolocalrev,
sidedata_helpers=None,
+ debug_info=None,
):
"""Returns an iterator of changegroup chunks containing manifests.
@@ -1444,6 +1889,7 @@
fullclnodes=self._fullclnodes,
precomputedellipsis=self._precomputedellipsis,
sidedata_helpers=sidedata_helpers,
+ debug_info=debug_info,
)
if not self._oldmatcher.visitdir(store.tree[:-1]):
@@ -1483,6 +1929,7 @@
fnodes,
clrevs,
sidedata_helpers=None,
+ debug_info=None,
):
changedfiles = [
f
@@ -1578,6 +2025,7 @@
fullclnodes=self._fullclnodes,
precomputedellipsis=self._precomputedellipsis,
sidedata_helpers=sidedata_helpers,
+ debug_info=debug_info,
)
yield fname, deltas
@@ -1867,7 +2315,12 @@
def makechangegroup(
- repo, outgoing, version, source, fastpath=False, bundlecaps=None
+ repo,
+ outgoing,
+ version,
+ source,
+ fastpath=False,
+ bundlecaps=None,
):
cgstream = makestream(
repo,
@@ -1917,7 +2370,12 @@
repo.hook(b'preoutgoing', throw=True, source=source)
_changegroupinfo(repo, csets, source)
- return bundler.generate(commonrevs, csets, fastpathlinkrev, source)
+ return bundler.generate(
+ commonrevs,
+ csets,
+ fastpathlinkrev,
+ source,
+ )
def _addchangegroupfiles(
@@ -1928,6 +2386,8 @@
expectedfiles,
needfiles,
addrevisioncb=None,
+ debug_info=None,
+ delta_base_reuse_policy=None,
):
revisions = 0
files = 0
@@ -1948,6 +2408,8 @@
revmap,
trp,
addrevisioncb=addrevisioncb,
+ debug_info=debug_info,
+ delta_base_reuse_policy=delta_base_reuse_policy,
)
if not added:
raise error.Abort(_(b"received file revlog group is empty"))
--- a/mercurial/cmdutil.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/cmdutil.py Wed Jan 04 16:02:22 2023 +0100
@@ -3019,6 +3019,8 @@
commitphase = None
if opts.get(b'secret'):
commitphase = phases.secret
+ elif opts.get(b'draft'):
+ commitphase = phases.draft
newid = repo.commitctx(new)
ms.reset()
--- a/mercurial/commands.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/commands.py Wed Jan 04 16:02:22 2023 +0100
@@ -1635,7 +1635,7 @@
missing = set()
excluded = set()
for path in urlutil.get_push_paths(repo, ui, dests):
- other = hg.peer(repo, opts, path.rawloc)
+ other = hg.peer(repo, opts, path)
if revs is not None:
hex_revs = [repo[r].hex() for r in revs]
else:
@@ -2008,6 +2008,7 @@
(b'', b'close-branch', None, _(b'mark a branch head as closed')),
(b'', b'amend', None, _(b'amend the parent of the working directory')),
(b's', b'secret', None, _(b'use the secret phase for committing')),
+ (b'', b'draft', None, _(b'use the draft phase for committing')),
(b'e', b'edit', None, _(b'invoke editor on commit messages')),
(
b'',
@@ -2082,6 +2083,8 @@
hg commit --amend --date now
"""
+ cmdutil.check_at_most_one_arg(opts, 'draft', 'secret')
+ cmdutil.check_incompatible_arguments(opts, 'subrepos', ['amend'])
with repo.wlock(), repo.lock():
return _docommit(ui, repo, *pats, **opts)
@@ -2097,7 +2100,6 @@
return 1 if ret == 0 else ret
if opts.get('subrepos'):
- cmdutil.check_incompatible_arguments(opts, 'subrepos', ['amend'])
# Let --subrepos on the command line override config setting.
ui.setconfig(b'ui', b'commitsubrepos', True, b'commit')
@@ -2174,6 +2176,8 @@
overrides = {}
if opts.get(b'secret'):
overrides[(b'phases', b'new-commit')] = b'secret'
+ elif opts.get(b'draft'):
+ overrides[(b'phases', b'new-commit')] = b'draft'
baseui = repo.baseui
with baseui.configoverride(overrides, b'commit'):
@@ -3911,12 +3915,11 @@
peer = None
try:
if source:
- source, branches = urlutil.get_unique_pull_path(
- b'identify', repo, ui, source
- )
+ path = urlutil.get_unique_pull_path_obj(b'identify', ui, source)
# only pass ui when no repo
- peer = hg.peer(repo or ui, opts, source)
+ peer = hg.peer(repo or ui, opts, path)
repo = peer.local()
+ branches = (path.branch, [])
revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
fm = ui.formatter(b'identify', opts)
@@ -4383,17 +4386,15 @@
if opts.get(b'bookmarks'):
srcs = urlutil.get_pull_paths(repo, ui, [source])
for path in srcs:
- source, branches = urlutil.parseurl(
- path.rawloc, opts.get(b'branch')
- )
- other = hg.peer(repo, opts, source)
+ # XXX the "branches" options are not used. Should it be used?
+ other = hg.peer(repo, opts, path)
try:
if b'bookmarks' not in other.listkeys(b'namespaces'):
ui.warn(_(b"remote doesn't support bookmarks\n"))
return 0
ui.pager(b'incoming')
ui.status(
- _(b'comparing with %s\n') % urlutil.hidepassword(source)
+ _(b'comparing with %s\n') % urlutil.hidepassword(path.loc)
)
return bookmarks.incoming(
ui, repo, other, mode=path.bookmarks_mode
@@ -4426,7 +4427,7 @@
Returns 0 on success.
"""
opts = pycompat.byteskwargs(opts)
- path = urlutil.get_clone_path(ui, dest)[1]
+ path = urlutil.get_clone_path_obj(ui, dest)
peer = hg.peer(ui, opts, path, create=True)
peer.close()
@@ -5038,14 +5039,13 @@
opts = pycompat.byteskwargs(opts)
if opts.get(b'bookmarks'):
for path in urlutil.get_push_paths(repo, ui, dests):
- dest = path.pushloc or path.loc
- other = hg.peer(repo, opts, dest)
+ other = hg.peer(repo, opts, path)
try:
if b'bookmarks' not in other.listkeys(b'namespaces'):
ui.warn(_(b"remote doesn't support bookmarks\n"))
return 0
ui.status(
- _(b'comparing with %s\n') % urlutil.hidepassword(dest)
+ _(b'comparing with %s\n') % urlutil.hidepassword(path.loc)
)
ui.pager(b'outgoing')
return bookmarks.outgoing(ui, repo, other)
@@ -5434,12 +5434,12 @@
raise error.InputError(msg, hint=hint)
for path in urlutil.get_pull_paths(repo, ui, sources):
- source, branches = urlutil.parseurl(path.rawloc, opts.get(b'branch'))
- ui.status(_(b'pulling from %s\n') % urlutil.hidepassword(source))
+ ui.status(_(b'pulling from %s\n') % urlutil.hidepassword(path.loc))
ui.flush()
- other = hg.peer(repo, opts, source)
+ other = hg.peer(repo, opts, path)
update_conflict = None
try:
+ branches = (path.branch, opts.get(b'branch', []))
revs, checkout = hg.addbranchrevs(
repo, other, branches, opts.get(b'rev')
)
@@ -5515,8 +5515,12 @@
elif opts.get(b'branch'):
brev = opts[b'branch'][0]
else:
- brev = branches[0]
- repo._subtoppath = source
+ brev = path.branch
+
+ # XXX path: we are losing the `path` object here. Keeping it
+ # would be valuable. For example as a "variant" as we do
+ # for pushes.
+ repo._subtoppath = path.loc
try:
update_conflict = postincoming(
ui, repo, modheads, opts.get(b'update'), checkout, brev
@@ -5766,7 +5770,7 @@
some_pushed = False
result = 0
for path in urlutil.get_push_paths(repo, ui, dests):
- dest = path.pushloc or path.loc
+ dest = path.loc
branches = (path.branch, opts.get(b'branch') or [])
ui.status(_(b'pushing to %s\n') % urlutil.hidepassword(dest))
revs, checkout = hg.addbranchrevs(
@@ -7252,23 +7256,22 @@
# XXX We should actually skip this if no default is specified, instead
# of passing "default" which will resolve as "./default/" if no default
# path is defined.
- source, branches = urlutil.get_unique_pull_path(
- b'summary', repo, ui, b'default'
- )
- sbranch = branches[0]
+ path = urlutil.get_unique_pull_path_obj(b'summary', ui, b'default')
+ sbranch = path.branch
try:
- other = hg.peer(repo, {}, source)
+ other = hg.peer(repo, {}, path)
except error.RepoError:
if opts.get(b'remote'):
raise
- return source, sbranch, None, None, None
+ return path.loc, sbranch, None, None, None
+ branches = (path.branch, [])
revs, checkout = hg.addbranchrevs(repo, other, branches, None)
if revs:
revs = [other.lookup(rev) for rev in revs]
- ui.debug(b'comparing with %s\n' % urlutil.hidepassword(source))
+ ui.debug(b'comparing with %s\n' % urlutil.hidepassword(path.loc))
with repo.ui.silent():
commoninc = discovery.findcommonincoming(repo, other, heads=revs)
- return source, sbranch, other, commoninc, commoninc[1]
+ return path.loc, sbranch, other, commoninc, commoninc[1]
if needsincoming:
source, sbranch, sother, commoninc, incoming = getincoming()
@@ -7284,9 +7287,10 @@
d = b'default-push'
elif b'default' in ui.paths:
d = b'default'
+ path = None
if d is not None:
path = urlutil.get_unique_push_path(b'summary', repo, ui, d)
- dest = path.pushloc or path.loc
+ dest = path.loc
dbranch = path.branch
else:
dest = b'default'
@@ -7294,7 +7298,7 @@
revs, checkout = hg.addbranchrevs(repo, repo, (dbranch, []), None)
if source != dest:
try:
- dother = hg.peer(repo, {}, dest)
+ dother = hg.peer(repo, {}, path if path is not None else dest)
except error.RepoError:
if opts.get(b'remote'):
raise
--- a/mercurial/configitems.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/configitems.py Wed Jan 04 16:02:22 2023 +0100
@@ -588,6 +588,18 @@
b'revlog.debug-delta',
default=False,
)
+# display extra information about the bundling process
+coreconfigitem(
+ b'debug',
+ b'bundling-stats',
+ default=False,
+)
+# display extra information about the unbundling process
+coreconfigitem(
+ b'debug',
+ b'unbundling-stats',
+ default=False,
+)
coreconfigitem(
b'defaults',
b'.*',
@@ -911,6 +923,13 @@
b'changegroup4',
default=False,
)
+
+# might remove rank configuration once the computation has no impact
+coreconfigitem(
+ b'experimental',
+ b'changelog-v2.compute-rank',
+ default=True,
+)
coreconfigitem(
b'experimental',
b'cleanup-as-archived',
@@ -1774,6 +1793,13 @@
)
coreconfigitem(
b'merge-tools',
+ br'.*\.regappend$',
+ default=b"",
+ generic=True,
+ priority=-1,
+)
+coreconfigitem(
+ b'merge-tools',
br'.*\.symlink$',
default=False,
generic=True,
@@ -2023,6 +2049,11 @@
)
coreconfigitem(
b'storage',
+ b'revlog.delta-parent-search.candidate-group-chunk-size',
+ default=10,
+)
+coreconfigitem(
+ b'storage',
b'revlog.issue6528.fix-incoming',
default=True,
)
@@ -2044,6 +2075,7 @@
b'revlog.reuse-external-delta',
default=True,
)
+# This option is True unless `format.generaldelta` is set.
coreconfigitem(
b'storage',
b'revlog.reuse-external-delta-parent',
@@ -2123,7 +2155,7 @@
coreconfigitem(
b'server',
b'pullbundle',
- default=False,
+ default=True,
)
coreconfigitem(
b'server',
--- a/mercurial/debugcommands.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/debugcommands.py Wed Jan 04 16:02:22 2023 +0100
@@ -21,7 +21,6 @@
import socket
import ssl
import stat
-import string
import subprocess
import sys
import time
@@ -73,7 +72,6 @@
repoview,
requirements,
revlog,
- revlogutils,
revset,
revsetlang,
scmutil,
@@ -89,6 +87,7 @@
upgrade,
url as urlmod,
util,
+ verify,
vfs as vfsmod,
wireprotoframing,
wireprotoserver,
@@ -556,15 +555,9 @@
@command(b'debugcheckstate', [], b'')
def debugcheckstate(ui, repo):
"""validate the correctness of the current dirstate"""
- parent1, parent2 = repo.dirstate.parents()
- m1 = repo[parent1].manifest()
- m2 = repo[parent2].manifest()
- errors = 0
- for err in repo.dirstate.verify(m1, m2):
- ui.warn(err[0] % err[1:])
- errors += 1
+ errors = verify.verifier(repo)._verify_dirstate()
if errors:
- errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
+ errstr = _(b"dirstate inconsistent with current parent's manifest")
raise error.Abort(errstr)
@@ -990,17 +983,29 @@
@command(
b'debug-delta-find',
- cmdutil.debugrevlogopts + cmdutil.formatteropts,
+ cmdutil.debugrevlogopts
+ + cmdutil.formatteropts
+ + [
+ (
+ b'',
+ b'source',
+ b'full',
+ _(b'input data feed to the process (full, storage, p1, p2, prev)'),
+ ),
+ ],
_(b'-c|-m|FILE REV'),
optionalrepo=True,
)
-def debugdeltafind(ui, repo, arg_1, arg_2=None, **opts):
+def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
"""display the computation to get to a valid delta for storing REV
This command will replay the process used to find the "best" delta to store
a revision and display information about all the steps used to get to that
result.
+ By default, the process is fed with a the full-text for the revision. This
+ can be controlled with the --source flag.
+
The revision use the revision number of the target storage (not changelog
revision number).
@@ -1017,34 +1022,22 @@
rev = int(rev)
revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
-
- deltacomputer = deltautil.deltacomputer(
- revlog,
- write_debug=ui.write,
- debug_search=not ui.quiet,
- )
-
- node = revlog.node(rev)
p1r, p2r = revlog.parentrevs(rev)
- p1 = revlog.node(p1r)
- p2 = revlog.node(p2r)
- btext = [revlog.revision(rev)]
- textlen = len(btext[0])
- cachedelta = None
- flags = revlog.flags(rev)
-
- revinfo = revlogutils.revisioninfo(
- node,
- p1,
- p2,
- btext,
- textlen,
- cachedelta,
- flags,
- )
-
- fh = revlog._datafp()
- deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
+
+ if source == b'full':
+ base_rev = nullrev
+ elif source == b'storage':
+ base_rev = revlog.deltaparent(rev)
+ elif source == b'p1':
+ base_rev = p1r
+ elif source == b'p2':
+ base_rev = p2r
+ elif source == b'prev':
+ base_rev = rev - 1
+ else:
+ raise error.InputError(b"invalid --source value: %s" % source)
+
+ revlog_debug.debug_delta_find(ui, revlog, rev, base_rev=base_rev)
@command(
@@ -1236,12 +1229,12 @@
random.seed(int(opts[b'seed']))
if not remote_revs:
-
- remoteurl, branches = urlutil.get_unique_pull_path(
- b'debugdiscovery', repo, ui, remoteurl
+ path = urlutil.get_unique_pull_path_obj(
+ b'debugdiscovery', ui, remoteurl
)
- remote = hg.peer(repo, opts, remoteurl)
- ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
+ branches = (path.branch, [])
+ remote = hg.peer(repo, opts, path)
+ ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
else:
branches = (None, [])
remote_filtered_revs = logcmdutil.revrange(
@@ -3207,348 +3200,10 @@
r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
if opts.get(b"dump"):
- numrevs = len(r)
- ui.write(
- (
- b"# rev p1rev p2rev start end deltastart base p1 p2"
- b" rawsize totalsize compression heads chainlen\n"
- )
- )
- ts = 0
- heads = set()
-
- for rev in range(numrevs):
- dbase = r.deltaparent(rev)
- if dbase == -1:
- dbase = rev
- cbase = r.chainbase(rev)
- clen = r.chainlen(rev)
- p1, p2 = r.parentrevs(rev)
- rs = r.rawsize(rev)
- ts = ts + rs
- heads -= set(r.parentrevs(rev))
- heads.add(rev)
- try:
- compression = ts / r.end(rev)
- except ZeroDivisionError:
- compression = 0
- ui.write(
- b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
- b"%11d %5d %8d\n"
- % (
- rev,
- p1,
- p2,
- r.start(rev),
- r.end(rev),
- r.start(dbase),
- r.start(cbase),
- r.start(p1),
- r.start(p2),
- rs,
- ts,
- compression,
- len(heads),
- clen,
- )
- )
- return 0
-
- format = r._format_version
- v = r._format_flags
- flags = []
- gdelta = False
- if v & revlog.FLAG_INLINE_DATA:
- flags.append(b'inline')
- if v & revlog.FLAG_GENERALDELTA:
- gdelta = True
- flags.append(b'generaldelta')
- if not flags:
- flags = [b'(none)']
-
- ### tracks merge vs single parent
- nummerges = 0
-
- ### tracks ways the "delta" are build
- # nodelta
- numempty = 0
- numemptytext = 0
- numemptydelta = 0
- # full file content
- numfull = 0
- # intermediate snapshot against a prior snapshot
- numsemi = 0
- # snapshot count per depth
- numsnapdepth = collections.defaultdict(lambda: 0)
- # delta against previous revision
- numprev = 0
- # delta against first or second parent (not prev)
- nump1 = 0
- nump2 = 0
- # delta against neither prev nor parents
- numother = 0
- # delta against prev that are also first or second parent
- # (details of `numprev`)
- nump1prev = 0
- nump2prev = 0
-
- # data about delta chain of each revs
- chainlengths = []
- chainbases = []
- chainspans = []
-
- # data about each revision
- datasize = [None, 0, 0]
- fullsize = [None, 0, 0]
- semisize = [None, 0, 0]
- # snapshot count per depth
- snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
- deltasize = [None, 0, 0]
- chunktypecounts = {}
- chunktypesizes = {}
-
- def addsize(size, l):
- if l[0] is None or size < l[0]:
- l[0] = size
- if size > l[1]:
- l[1] = size
- l[2] += size
-
- numrevs = len(r)
- for rev in range(numrevs):
- p1, p2 = r.parentrevs(rev)
- delta = r.deltaparent(rev)
- if format > 0:
- addsize(r.rawsize(rev), datasize)
- if p2 != nullrev:
- nummerges += 1
- size = r.length(rev)
- if delta == nullrev:
- chainlengths.append(0)
- chainbases.append(r.start(rev))
- chainspans.append(size)
- if size == 0:
- numempty += 1
- numemptytext += 1
- else:
- numfull += 1
- numsnapdepth[0] += 1
- addsize(size, fullsize)
- addsize(size, snapsizedepth[0])
- else:
- chainlengths.append(chainlengths[delta] + 1)
- baseaddr = chainbases[delta]
- revaddr = r.start(rev)
- chainbases.append(baseaddr)
- chainspans.append((revaddr - baseaddr) + size)
- if size == 0:
- numempty += 1
- numemptydelta += 1
- elif r.issnapshot(rev):
- addsize(size, semisize)
- numsemi += 1
- depth = r.snapshotdepth(rev)
- numsnapdepth[depth] += 1
- addsize(size, snapsizedepth[depth])
- else:
- addsize(size, deltasize)
- if delta == rev - 1:
- numprev += 1
- if delta == p1:
- nump1prev += 1
- elif delta == p2:
- nump2prev += 1
- elif delta == p1:
- nump1 += 1
- elif delta == p2:
- nump2 += 1
- elif delta != nullrev:
- numother += 1
-
- # Obtain data on the raw chunks in the revlog.
- if util.safehasattr(r, b'_getsegmentforrevs'):
- segment = r._getsegmentforrevs(rev, rev)[1]
- else:
- segment = r._revlog._getsegmentforrevs(rev, rev)[1]
- if segment:
- chunktype = bytes(segment[0:1])
- else:
- chunktype = b'empty'
-
- if chunktype not in chunktypecounts:
- chunktypecounts[chunktype] = 0
- chunktypesizes[chunktype] = 0
-
- chunktypecounts[chunktype] += 1
- chunktypesizes[chunktype] += size
-
- # Adjust size min value for empty cases
- for size in (datasize, fullsize, semisize, deltasize):
- if size[0] is None:
- size[0] = 0
-
- numdeltas = numrevs - numfull - numempty - numsemi
- numoprev = numprev - nump1prev - nump2prev
- totalrawsize = datasize[2]
- datasize[2] /= numrevs
- fulltotal = fullsize[2]
- if numfull == 0:
- fullsize[2] = 0
+ revlog_debug.dump(ui, r)
else:
- fullsize[2] /= numfull
- semitotal = semisize[2]
- snaptotal = {}
- if numsemi > 0:
- semisize[2] /= numsemi
- for depth in snapsizedepth:
- snaptotal[depth] = snapsizedepth[depth][2]
- snapsizedepth[depth][2] /= numsnapdepth[depth]
-
- deltatotal = deltasize[2]
- if numdeltas > 0:
- deltasize[2] /= numdeltas
- totalsize = fulltotal + semitotal + deltatotal
- avgchainlen = sum(chainlengths) / numrevs
- maxchainlen = max(chainlengths)
- maxchainspan = max(chainspans)
- compratio = 1
- if totalsize:
- compratio = totalrawsize / totalsize
-
- basedfmtstr = b'%%%dd\n'
- basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
-
- def dfmtstr(max):
- return basedfmtstr % len(str(max))
-
- def pcfmtstr(max, padding=0):
- return basepcfmtstr % (len(str(max)), b' ' * padding)
-
- def pcfmt(value, total):
- if total:
- return (value, 100 * float(value) / total)
- else:
- return value, 100.0
-
- ui.writenoi18n(b'format : %d\n' % format)
- ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
-
- ui.write(b'\n')
- fmt = pcfmtstr(totalsize)
- fmt2 = dfmtstr(totalsize)
- ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
- ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
- ui.writenoi18n(
- b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
- )
- ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
- ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
- ui.writenoi18n(
- b' text : '
- + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
- )
- ui.writenoi18n(
- b' delta : '
- + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
- )
- ui.writenoi18n(
- b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
- )
- for depth in sorted(numsnapdepth):
- ui.write(
- (b' lvl-%-3d : ' % depth)
- + fmt % pcfmt(numsnapdepth[depth], numrevs)
- )
- ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
- ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
- ui.writenoi18n(
- b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
- )
- for depth in sorted(numsnapdepth):
- ui.write(
- (b' lvl-%-3d : ' % depth)
- + fmt % pcfmt(snaptotal[depth], totalsize)
- )
- ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
-
- def fmtchunktype(chunktype):
- if chunktype == b'empty':
- return b' %s : ' % chunktype
- elif chunktype in pycompat.bytestr(string.ascii_letters):
- return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
- else:
- return b' 0x%s : ' % hex(chunktype)
-
- ui.write(b'\n')
- ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
- for chunktype in sorted(chunktypecounts):
- ui.write(fmtchunktype(chunktype))
- ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
- ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
- for chunktype in sorted(chunktypecounts):
- ui.write(fmtchunktype(chunktype))
- ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
-
- ui.write(b'\n')
- fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
- ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
- ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
- ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
- ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
-
- if format > 0:
- ui.write(b'\n')
- ui.writenoi18n(
- b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
- % tuple(datasize)
- )
- ui.writenoi18n(
- b'full revision size (min/max/avg) : %d / %d / %d\n'
- % tuple(fullsize)
- )
- ui.writenoi18n(
- b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
- % tuple(semisize)
- )
- for depth in sorted(snapsizedepth):
- if depth == 0:
- continue
- ui.writenoi18n(
- b' level-%-3d (min/max/avg) : %d / %d / %d\n'
- % ((depth,) + tuple(snapsizedepth[depth]))
- )
- ui.writenoi18n(
- b'delta size (min/max/avg) : %d / %d / %d\n'
- % tuple(deltasize)
- )
-
- if numdeltas > 0:
- ui.write(b'\n')
- fmt = pcfmtstr(numdeltas)
- fmt2 = pcfmtstr(numdeltas, 4)
- ui.writenoi18n(
- b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
- )
- if numprev > 0:
- ui.writenoi18n(
- b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
- )
- ui.writenoi18n(
- b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
- )
- ui.writenoi18n(
- b' other : ' + fmt2 % pcfmt(numoprev, numprev)
- )
- if gdelta:
- ui.writenoi18n(
- b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
- )
- ui.writenoi18n(
- b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
- )
- ui.writenoi18n(
- b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
- )
+ revlog_debug.debug_revlog(ui, r)
+ return 0
@command(
@@ -3935,10 +3590,8 @@
)
source = b"default"
- source, branches = urlutil.get_unique_pull_path(
- b'debugssl', repo, ui, source
- )
- url = urlutil.url(source)
+ path = urlutil.get_unique_pull_path_obj(b'debugssl', ui, source)
+ url = path.url
defaultport = {b'https': 443, b'ssh': 22}
if url.scheme in defaultport:
@@ -4049,20 +3702,19 @@
for backup in backups:
# Much of this is copied from the hg incoming logic
source = os.path.relpath(backup, encoding.getcwd())
- source, branches = urlutil.get_unique_pull_path(
+ path = urlutil.get_unique_pull_path_obj(
b'debugbackupbundle',
- repo,
ui,
source,
- default_branches=opts.get(b'branch'),
)
try:
- other = hg.peer(repo, opts, source)
+ other = hg.peer(repo, opts, path)
except error.LookupError as ex:
- msg = _(b"\nwarning: unable to open bundle %s") % source
+ msg = _(b"\nwarning: unable to open bundle %s") % path.loc
hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
ui.warn(msg, hint=hint)
continue
+ branches = (path.branch, opts.get(b'branch', []))
revs, checkout = hg.addbranchrevs(
repo, other, branches, opts.get(b"rev")
)
@@ -4085,29 +3737,29 @@
with repo.lock(), repo.transaction(b"unbundle") as tr:
if scmutil.isrevsymbol(other, recovernode):
ui.status(_(b"Unbundling %s\n") % (recovernode))
- f = hg.openpath(ui, source)
- gen = exchange.readbundle(ui, f, source)
+ f = hg.openpath(ui, path.loc)
+ gen = exchange.readbundle(ui, f, path.loc)
if isinstance(gen, bundle2.unbundle20):
bundle2.applybundle(
repo,
gen,
tr,
source=b"unbundle",
- url=b"bundle:" + source,
+ url=b"bundle:" + path.loc,
)
else:
- gen.apply(repo, b"unbundle", b"bundle:" + source)
+ gen.apply(repo, b"unbundle", b"bundle:" + path.loc)
break
else:
backupdate = encoding.strtolocal(
time.strftime(
"%a %H:%M, %Y-%m-%d",
- time.localtime(os.path.getmtime(source)),
+ time.localtime(os.path.getmtime(path.loc)),
)
)
ui.status(b"\n%s\n" % (backupdate.ljust(50)))
if ui.verbose:
- ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
+ ui.status(b"%s%s\n" % (b"bundle:".ljust(13), path.loc))
else:
opts[
b"template"
@@ -4152,6 +3804,33 @@
@command(
+ b'debug-revlog-stats',
+ [
+ (b'c', b'changelog', None, _(b'Display changelog statistics')),
+ (b'm', b'manifest', None, _(b'Display manifest statistics')),
+ (b'f', b'filelogs', None, _(b'Display filelogs statistics')),
+ ]
+ + cmdutil.formatteropts,
+)
+def debug_revlog_stats(ui, repo, **opts):
+ """display statistics about revlogs in the store"""
+ opts = pycompat.byteskwargs(opts)
+ changelog = opts[b"changelog"]
+ manifest = opts[b"manifest"]
+ filelogs = opts[b"filelogs"]
+
+ if changelog is None and manifest is None and filelogs is None:
+ changelog = True
+ manifest = True
+ filelogs = True
+
+ repo = repo.unfiltered()
+ fm = ui.formatter(b'debug-revlog-stats', opts)
+ revlog_debug.debug_revlog_stats(repo, fm, changelog, manifest, filelogs)
+ fm.end()
+
+
+@command(
b'debugsuccessorssets',
[(b'', b'closest', False, _(b'return closest successors sets only'))],
_(b'[REV]'),
@@ -4843,7 +4522,8 @@
_(b'--peer %s not supported with HTTP peers') % opts[b'peer']
)
else:
- peer = httppeer.makepeer(ui, path, opener=opener)
+ peer_path = urlutil.try_path(ui, path)
+ peer = httppeer.makepeer(ui, peer_path, opener=opener)
# We /could/ populate stdin/stdout with sock.makefile()...
else:
--- a/mercurial/dirstate.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/dirstate.py Wed Jan 04 16:02:22 2023 +0100
@@ -1540,21 +1540,31 @@
if data_backup is not None:
o.unlink(data_backup[0])
- def verify(self, m1, m2):
- """check the dirstate content again the parent manifest and yield errors"""
- missing_from_p1 = b"%s in state %s, but not in manifest1\n"
- unexpected_in_p1 = b"%s in state %s, but also in manifest1\n"
- missing_from_ps = b"%s in state %s, but not in either manifest\n"
- missing_from_ds = b"%s in manifest1, but listed as state %s\n"
+ def verify(self, m1, m2, p1, narrow_matcher=None):
+ """
+ check the dirstate contents against the parent manifest and yield errors
+ """
+ missing_from_p1 = _(
+ b"%s marked as tracked in p1 (%s) but not in manifest1\n"
+ )
+ unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
+ missing_from_ps = _(
+ b"%s marked as modified, but not in either manifest\n"
+ )
+ missing_from_ds = _(
+ b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
+ )
for f, entry in self.items():
- state = entry.state
- if state in b"nr" and f not in m1:
- yield (missing_from_p1, f, state)
- if state in b"a" and f in m1:
- yield (unexpected_in_p1, f, state)
- if state in b"m" and f not in m1 and f not in m2:
- yield (missing_from_ps, f, state)
+ if entry.p1_tracked:
+ if entry.modified and f not in m1 and f not in m2:
+ yield missing_from_ps % f
+ elif f not in m1:
+ yield missing_from_p1 % (f, node.short(p1))
+ if entry.added and f in m1:
+ yield unexpected_in_p1 % f
for f in m1:
- state = self.get_entry(f).state
- if state not in b"nrm":
- yield (missing_from_ds, f, state)
+ if narrow_matcher is not None and not narrow_matcher(f):
+ continue
+ entry = self.get_entry(f)
+ if not entry.p1_tracked:
+ yield missing_from_ds % (f, node.short(p1))
--- a/mercurial/dispatch.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/dispatch.py Wed Jan 04 16:02:22 2023 +0100
@@ -980,7 +980,8 @@
lui.readconfig(os.path.join(path, b".hg", b"hgrc-not-shared"), path)
if rpath:
- path = urlutil.get_clone_path(lui, rpath)[0]
+ path_obj = urlutil.get_clone_path_obj(lui, rpath)
+ path = path_obj.rawloc
lui = ui.copy()
if rcutil.use_repo_hgrc():
_readsharedsourceconfig(lui, path)
--- a/mercurial/exchange.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/exchange.py Wed Jan 04 16:02:22 2023 +0100
@@ -1183,7 +1183,12 @@
trgetter = None
if pushback:
trgetter = pushop.trmanager.transaction
- op = bundle2.processbundle(pushop.repo, reply, trgetter)
+ op = bundle2.processbundle(
+ pushop.repo,
+ reply,
+ trgetter,
+ remote=pushop.remote,
+ )
except error.BundleValueError as exc:
raise error.RemoteError(_(b'missing support for %s') % exc)
except bundle2.AbortFromPart as exc:
@@ -1903,10 +1908,18 @@
try:
op = bundle2.bundleoperation(
- pullop.repo, pullop.gettransaction, source=b'pull'
+ pullop.repo,
+ pullop.gettransaction,
+ source=b'pull',
+ remote=pullop.remote,
)
op.modes[b'bookmarks'] = b'records'
- bundle2.processbundle(pullop.repo, bundle, op=op)
+ bundle2.processbundle(
+ pullop.repo,
+ bundle,
+ op=op,
+ remote=pullop.remote,
+ )
except bundle2.AbortFromPart as exc:
pullop.repo.ui.error(_(b'remote: abort: %s\n') % exc)
raise error.RemoteError(_(b'pull failed on remote'), hint=exc.hint)
@@ -1995,7 +2008,12 @@
).result()
bundleop = bundle2.applybundle(
- pullop.repo, cg, tr, b'pull', pullop.remote.url()
+ pullop.repo,
+ cg,
+ tr,
+ b'pull',
+ pullop.remote.url(),
+ remote=pullop.remote,
)
pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
--- a/mercurial/filelog.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/filelog.py Wed Jan 04 16:02:22 2023 +0100
@@ -111,6 +111,7 @@
assumehaveparentrevisions=False,
deltamode=repository.CG_DELTAMODE_STD,
sidedata_helpers=None,
+ debug_info=None,
):
return self._revlog.emitrevisions(
nodes,
@@ -119,6 +120,7 @@
assumehaveparentrevisions=assumehaveparentrevisions,
deltamode=deltamode,
sidedata_helpers=sidedata_helpers,
+ debug_info=debug_info,
)
def addrevision(
@@ -151,6 +153,8 @@
addrevisioncb=None,
duplicaterevisioncb=None,
maybemissingparents=False,
+ debug_info=None,
+ delta_base_reuse_policy=None,
):
if maybemissingparents:
raise error.Abort(
@@ -171,6 +175,8 @@
transaction,
addrevisioncb=addrevisioncb,
duplicaterevisioncb=duplicaterevisioncb,
+ debug_info=debug_info,
+ delta_base_reuse_policy=delta_base_reuse_policy,
)
def getstrippoint(self, minlink):
--- a/mercurial/filemerge.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/filemerge.py Wed Jan 04 16:02:22 2023 +0100
@@ -158,7 +158,7 @@
continue
p = util.lookupreg(k, _toolstr(ui, tool, b"regname"))
if p:
- p = procutil.findexe(p + _toolstr(ui, tool, b"regappend", b""))
+ p = procutil.findexe(p + _toolstr(ui, tool, b"regappend"))
if p:
return p
exe = _toolstr(ui, tool, b"executable", tool)
--- a/mercurial/help.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/help.py Wed Jan 04 16:02:22 2023 +0100
@@ -10,6 +10,18 @@
import re
import textwrap
+from typing import (
+ Callable,
+ Dict,
+ Iterable,
+ List,
+ Optional,
+ Set,
+ Tuple,
+ Union,
+ cast,
+)
+
from .i18n import (
_,
gettext,
@@ -40,7 +52,16 @@
stringutil,
)
-_exclkeywords = {
+_DocLoader = Callable[[uimod.ui], bytes]
+# Old extensions may not register with a category
+_HelpEntry = Union["_HelpEntryNoCategory", "_HelpEntryWithCategory"]
+_HelpEntryNoCategory = Tuple[List[bytes], bytes, _DocLoader]
+_HelpEntryWithCategory = Tuple[List[bytes], bytes, _DocLoader, bytes]
+_SelectFn = Callable[[object], bool]
+_SynonymTable = Dict[bytes, List[bytes]]
+_TopicHook = Callable[[uimod.ui, bytes, bytes], bytes]
+
+_exclkeywords: Set[bytes] = {
b"(ADVANCED)",
b"(DEPRECATED)",
b"(EXPERIMENTAL)",
@@ -56,7 +77,7 @@
# Extensions with custom categories should insert them into this list
# after/before the appropriate item, rather than replacing the list or
# assuming absolute positions.
-CATEGORY_ORDER = [
+CATEGORY_ORDER: List[bytes] = [
registrar.command.CATEGORY_REPO_CREATION,
registrar.command.CATEGORY_REMOTE_REPO_MANAGEMENT,
registrar.command.CATEGORY_COMMITTING,
@@ -74,7 +95,7 @@
# Human-readable category names. These are translated.
# Extensions with custom categories should add their names here.
-CATEGORY_NAMES = {
+CATEGORY_NAMES: Dict[bytes, bytes] = {
registrar.command.CATEGORY_REPO_CREATION: b'Repository creation',
registrar.command.CATEGORY_REMOTE_REPO_MANAGEMENT: b'Remote repository management',
registrar.command.CATEGORY_COMMITTING: b'Change creation',
@@ -102,7 +123,7 @@
# Extensions with custom categories should insert them into this list
# after/before the appropriate item, rather than replacing the list or
# assuming absolute positions.
-TOPIC_CATEGORY_ORDER = [
+TOPIC_CATEGORY_ORDER: List[bytes] = [
TOPIC_CATEGORY_IDS,
TOPIC_CATEGORY_OUTPUT,
TOPIC_CATEGORY_CONFIG,
@@ -112,7 +133,7 @@
]
# Human-readable topic category names. These are translated.
-TOPIC_CATEGORY_NAMES = {
+TOPIC_CATEGORY_NAMES: Dict[bytes, bytes] = {
TOPIC_CATEGORY_IDS: b'Mercurial identifiers',
TOPIC_CATEGORY_OUTPUT: b'Mercurial output',
TOPIC_CATEGORY_CONFIG: b'Mercurial configuration',
@@ -122,7 +143,12 @@
}
-def listexts(header, exts, indent=1, showdeprecated=False):
+def listexts(
+ header: bytes,
+ exts: Dict[bytes, bytes],
+ indent: int = 1,
+ showdeprecated: bool = False,
+) -> List[bytes]:
'''return a text listing of the given extensions'''
rst = []
if exts:
@@ -135,7 +161,7 @@
return rst
-def extshelp(ui):
+def extshelp(ui: uimod.ui) -> bytes:
rst = loaddoc(b'extensions')(ui).splitlines(True)
rst.extend(
listexts(
@@ -153,7 +179,7 @@
return doc
-def parsedefaultmarker(text):
+def parsedefaultmarker(text: bytes) -> Optional[Tuple[bytes, List[bytes]]]:
"""given a text 'abc (DEFAULT: def.ghi)',
returns (b'abc', (b'def', b'ghi')). Otherwise return None"""
if text[-1:] == b')':
@@ -164,7 +190,7 @@
return text[:pos], item.split(b'.', 2)
-def optrst(header, options, verbose, ui):
+def optrst(header: bytes, options, verbose: bool, ui: uimod.ui) -> bytes:
data = []
multioccur = False
for option in options:
@@ -220,13 +246,15 @@
return b''.join(rst)
-def indicateomitted(rst, omitted, notomitted=None):
+def indicateomitted(
+ rst: List[bytes], omitted: bytes, notomitted: Optional[bytes] = None
+) -> None:
rst.append(b'\n\n.. container:: omitted\n\n %s\n\n' % omitted)
if notomitted:
rst.append(b'\n\n.. container:: notomitted\n\n %s\n\n' % notomitted)
-def filtercmd(ui, cmd, func, kw, doc):
+def filtercmd(ui: uimod.ui, cmd: bytes, func, kw: bytes, doc: bytes) -> bool:
if not ui.debugflag and cmd.startswith(b"debug") and kw != b"debug":
# Debug command, and user is not looking for those.
return True
@@ -249,11 +277,13 @@
return False
-def filtertopic(ui, topic):
+def filtertopic(ui: uimod.ui, topic: bytes) -> bool:
return ui.configbool(b'help', b'hidden-topic.%s' % topic, False)
-def topicmatch(ui, commands, kw):
+def topicmatch(
+ ui: uimod.ui, commands, kw: bytes
+) -> Dict[bytes, List[Tuple[bytes, bytes]]]:
"""Return help topics matching kw.
Returns {'section': [(name, summary), ...], ...} where section is
@@ -326,10 +356,10 @@
return results
-def loaddoc(topic, subdir=None):
+def loaddoc(topic: bytes, subdir: Optional[bytes] = None) -> _DocLoader:
"""Return a delayed loader for help/topic.txt."""
- def loader(ui):
+ def loader(ui: uimod.ui) -> bytes:
package = b'mercurial.helptext'
if subdir:
package += b'.' + subdir
@@ -342,7 +372,7 @@
return loader
-internalstable = sorted(
+internalstable: List[_HelpEntryNoCategory] = sorted(
[
(
[b'bid-merge'],
@@ -407,7 +437,7 @@
)
-def internalshelp(ui):
+def internalshelp(ui: uimod.ui) -> bytes:
"""Generate the index for the "internals" topic."""
lines = [
b'To access a subtopic, use "hg help internals.{subtopic-name}"\n',
@@ -419,7 +449,7 @@
return b''.join(lines)
-helptable = sorted(
+helptable: List[_HelpEntryWithCategory] = sorted(
[
(
[b'bundlespec'],
@@ -581,20 +611,27 @@
)
# Maps topics with sub-topics to a list of their sub-topics.
-subtopics = {
+subtopics: Dict[bytes, List[_HelpEntryNoCategory]] = {
b'internals': internalstable,
}
# Map topics to lists of callable taking the current topic help and
# returning the updated version
-helphooks = {}
+helphooks: Dict[bytes, List[_TopicHook]] = {}
-def addtopichook(topic, rewriter):
+def addtopichook(topic: bytes, rewriter: _TopicHook) -> None:
helphooks.setdefault(topic, []).append(rewriter)
-def makeitemsdoc(ui, topic, doc, marker, items, dedent=False):
+def makeitemsdoc(
+ ui: uimod.ui,
+ topic: bytes,
+ doc: bytes,
+ marker: bytes,
+ items: Dict[bytes, bytes],
+ dedent: bool = False,
+) -> bytes:
"""Extract docstring from the items key to function mapping, build a
single documentation block and use it to overwrite the marker in doc.
"""
@@ -622,8 +659,10 @@
return doc.replace(marker, entries)
-def addtopicsymbols(topic, marker, symbols, dedent=False):
- def add(ui, topic, doc):
+def addtopicsymbols(
+ topic: bytes, marker: bytes, symbols, dedent: bool = False
+) -> None:
+ def add(ui: uimod.ui, topic: bytes, doc: bytes):
return makeitemsdoc(ui, topic, doc, marker, symbols, dedent=dedent)
addtopichook(topic, add)
@@ -647,7 +686,7 @@
)
-def inserttweakrc(ui, topic, doc):
+def inserttweakrc(ui: uimod.ui, topic: bytes, doc: bytes) -> bytes:
marker = b'.. tweakdefaultsmarker'
repl = uimod.tweakrc
@@ -658,7 +697,9 @@
return re.sub(br'( *)%s' % re.escape(marker), sub, doc)
-def _getcategorizedhelpcmds(ui, cmdtable, name, select=None):
+def _getcategorizedhelpcmds(
+ ui: uimod.ui, cmdtable, name: bytes, select: Optional[_SelectFn] = None
+) -> Tuple[Dict[bytes, List[bytes]], Dict[bytes, bytes], _SynonymTable]:
# Category -> list of commands
cats = {}
# Command -> short description
@@ -687,16 +728,18 @@
return cats, h, syns
-def _getcategorizedhelptopics(ui, topictable):
+def _getcategorizedhelptopics(
+ ui: uimod.ui, topictable: List[_HelpEntry]
+) -> Tuple[Dict[bytes, List[Tuple[bytes, bytes]]], Dict[bytes, List[bytes]]]:
# Group commands by category.
topiccats = {}
syns = {}
for topic in topictable:
names, header, doc = topic[0:3]
if len(topic) > 3 and topic[3]:
- category = topic[3]
+ category: bytes = cast(bytes, topic[3]) # help pytype
else:
- category = TOPIC_CATEGORY_NONE
+ category: bytes = TOPIC_CATEGORY_NONE
topicname = names[0]
syns[topicname] = list(names)
@@ -709,15 +752,15 @@
def help_(
- ui,
+ ui: uimod.ui,
commands,
- name,
- unknowncmd=False,
- full=True,
- subtopic=None,
- fullname=None,
+ name: bytes,
+ unknowncmd: bool = False,
+ full: bool = True,
+ subtopic: Optional[bytes] = None,
+ fullname: Optional[bytes] = None,
**opts
-):
+) -> bytes:
"""
Generate the help for 'name' as unformatted restructured text. If
'name' is None, describe the commands available.
@@ -725,7 +768,7 @@
opts = pycompat.byteskwargs(opts)
- def helpcmd(name, subtopic=None):
+ def helpcmd(name: bytes, subtopic: Optional[bytes]) -> List[bytes]:
try:
aliases, entry = cmdutil.findcmd(
name, commands.table, strict=unknowncmd
@@ -826,7 +869,7 @@
return rst
- def helplist(select=None, **opts):
+ def helplist(select: Optional[_SelectFn] = None, **opts) -> List[bytes]:
cats, h, syns = _getcategorizedhelpcmds(
ui, commands.table, name, select
)
@@ -846,7 +889,7 @@
else:
rst.append(_(b'list of commands:\n'))
- def appendcmds(cmds):
+ def appendcmds(cmds: Iterable[bytes]) -> None:
cmds = sorted(cmds)
for c in cmds:
display_cmd = c
@@ -955,7 +998,7 @@
)
return rst
- def helptopic(name, subtopic=None):
+ def helptopic(name: bytes, subtopic: Optional[bytes] = None) -> List[bytes]:
# Look for sub-topic entry first.
header, doc = None, None
if subtopic and name in subtopics:
@@ -998,7 +1041,7 @@
pass
return rst
- def helpext(name, subtopic=None):
+ def helpext(name: bytes, subtopic: Optional[bytes] = None) -> List[bytes]:
try:
mod = extensions.find(name)
doc = gettext(pycompat.getdoc(mod)) or _(b'no help text available')
@@ -1040,7 +1083,9 @@
)
return rst
- def helpextcmd(name, subtopic=None):
+ def helpextcmd(
+ name: bytes, subtopic: Optional[bytes] = None
+ ) -> List[bytes]:
cmd, ext, doc = extensions.disabledcmd(
ui, name, ui.configbool(b'ui', b'strict')
)
@@ -1127,8 +1172,14 @@
def formattedhelp(
- ui, commands, fullname, keep=None, unknowncmd=False, full=True, **opts
-):
+ ui: uimod.ui,
+ commands,
+ fullname: Optional[bytes],
+ keep: Optional[Iterable[bytes]] = None,
+ unknowncmd: bool = False,
+ full: bool = True,
+ **opts
+) -> bytes:
"""get help for a given topic (as a dotted name) as rendered rst
Either returns the rendered help text or raises an exception.
--- a/mercurial/helptext/config.txt Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/helptext/config.txt Wed Jan 04 16:02:22 2023 +0100
@@ -1922,6 +1922,42 @@
- ``ignore``: ignore bookmarks during exchange.
(This currently only affect pulling)
+.. container:: verbose
+
+ ``delta-reuse-policy``
+ Control the policy regarding deltas sent by the remote during pulls.
+
+ This is an advanced option that non-admin users should not need to understand
+ or set. This option can be used to speed up pulls from trusted central
+ servers, or to fix-up deltas from older servers.
+
+ It supports the following values:
+
+ - ``default``: use the policy defined by
+ `storage.revlog.reuse-external-delta-parent`,
+
+ - ``no-reuse``: start a new optimal delta search for each new revision we add
+ to the repository. The deltas from the server will be reused when the base
+ it applies to is tested (this can be frequent if that base is the one and
+ unique parent of that revision). This can significantly slowdown pulls but
+ will result in an optimized storage space if the remote peer is sending poor
+ quality deltas.
+
+ - ``try-base``: try to reuse the deltas from the remote peer as long as they
+ create a valid delta-chain in the local repository. This speeds up the
+ unbundling process, but can result in sub-optimal storage space if the
+ remote peer is sending poor quality deltas.
+
+ - ``forced``: the deltas from the peer will be reused in all cases, even if
+ the resulting delta-chain is "invalid". This setting will ensure the bundle
+ is applied at minimal CPU cost, but it can result in longer delta chains
+ being created on the client, making revisions potentially slower to access
+ in the future. If you think you need this option, you should make sure you
+ are also talking to the Mercurial developer community to get confirmation.
+
+ See `hg help config.storage.revlog.reuse-external-delta-parent` for a similar
+ global option. That option defines the behavior of `default`.
+
The following special named paths exist:
``default``
@@ -2281,6 +2317,21 @@
To fix affected revisions that already exist within the repository, one can
use :hg:`debug-repair-issue-6528`.
+.. container:: verbose
+
+ ``revlog.delta-parent-search.candidate-group-chunk-size``
+ Tune the number of delta bases the storage will consider in the
+ same "round" of search. In some very rare cases, using a smaller value
+ might result in faster processing at the possible expense of storage
+ space, while using larger values might result in slower processing at the
+ possible benefit of storage space. A value of "0" means no limitation.
+
+ default: no limitation
+
+ This is unlikely that you'll have to tune this configuration. If you think
+ you do, consider talking with the mercurial developer community about your
+ repositories.
+
``revlog.optimize-delta-parent-choice``
When storing a merge revision, both parents will be equally considered as
a possible delta base. This results in better delta selection and improved
--- a/mercurial/helptext/rust.txt Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/helptext/rust.txt Wed Jan 04 16:02:22 2023 +0100
@@ -76,8 +76,8 @@
MSRV
====
-The minimum supported Rust version is currently 1.48.0. The project's policy is
-to follow the version from Debian stable, to make the distributions' job easier.
+The minimum supported Rust version is currently 1.61.0. The project's policy is
+to follow the version from Debian testing, to make the distributions' job easier.
rhg
===
--- a/mercurial/hg.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/hg.py Wed Jan 04 16:02:22 2023 +0100
@@ -65,28 +65,12 @@
sharedbookmarks = b'bookmarks'
-def _local(path):
- path = util.expandpath(urlutil.urllocalpath(path))
-
- try:
- # we use os.stat() directly here instead of os.path.isfile()
- # because the latter started returning `False` on invalid path
- # exceptions starting in 3.8 and we care about handling
- # invalid paths specially here.
- st = os.stat(path)
- isfile = stat.S_ISREG(st.st_mode)
- except ValueError as e:
- raise error.Abort(
- _(b'invalid path %s: %s') % (path, stringutil.forcebytestr(e))
- )
- except OSError:
- isfile = False
-
- return isfile and bundlerepo or localrepo
-
-
def addbranchrevs(lrepo, other, branches, revs):
- peer = other.peer() # a courtesy to callers using a localrepo for other
+ if util.safehasattr(other, 'peer'):
+ # a courtesy to callers using a localrepo for other
+ peer = other.peer()
+ else:
+ peer = other
hashbranch, branches = branches
if not hashbranch and not branches:
x = revs or None
@@ -129,10 +113,47 @@
return revs, revs[0]
-schemes = {
+def _isfile(path):
+ try:
+ # we use os.stat() directly here instead of os.path.isfile()
+ # because the latter started returning `False` on invalid path
+ # exceptions starting in 3.8 and we care about handling
+ # invalid paths specially here.
+ st = os.stat(path)
+ except ValueError as e:
+ msg = stringutil.forcebytestr(e)
+ raise error.Abort(_(b'invalid path %s: %s') % (path, msg))
+ except OSError:
+ return False
+ else:
+ return stat.S_ISREG(st.st_mode)
+
+
+class LocalFactory:
+ """thin wrapper to dispatch between localrepo and bundle repo"""
+
+ @staticmethod
+ def islocal(path: bytes) -> bool:
+ path = util.expandpath(urlutil.urllocalpath(path))
+ return not _isfile(path)
+
+ @staticmethod
+ def instance(ui, path, *args, **kwargs):
+ path = util.expandpath(urlutil.urllocalpath(path))
+ if _isfile(path):
+ cls = bundlerepo
+ else:
+ cls = localrepo
+ return cls.instance(ui, path, *args, **kwargs)
+
+
+repo_schemes = {
b'bundle': bundlerepo,
b'union': unionrepo,
- b'file': _local,
+ b'file': LocalFactory,
+}
+
+peer_schemes = {
b'http': httppeer,
b'https': httppeer,
b'ssh': sshpeer,
@@ -140,27 +161,23 @@
}
-def _peerlookup(path):
- u = urlutil.url(path)
- scheme = u.scheme or b'file'
- thing = schemes.get(scheme) or schemes[b'file']
- try:
- return thing(path)
- except TypeError:
- # we can't test callable(thing) because 'thing' can be an unloaded
- # module that implements __call__
- if not util.safehasattr(thing, b'instance'):
- raise
- return thing
-
-
def islocal(repo):
'''return true if repo (or path pointing to repo) is local'''
if isinstance(repo, bytes):
- try:
- return _peerlookup(repo).islocal(repo)
- except AttributeError:
- return False
+ u = urlutil.url(repo)
+ scheme = u.scheme or b'file'
+ if scheme in peer_schemes:
+ cls = peer_schemes[scheme]
+ cls.make_peer # make sure we load the module
+ elif scheme in repo_schemes:
+ cls = repo_schemes[scheme]
+ cls.instance # make sure we load the module
+ else:
+ cls = LocalFactory
+ if util.safehasattr(cls, 'islocal'):
+ return cls.islocal(repo) # pytype: disable=module-attr
+ return False
+ repo.ui.deprecwarn(b"use obj.local() instead of islocal(obj)", b"6.4")
return repo.local()
@@ -177,13 +194,7 @@
wirepeersetupfuncs = []
-def _peerorrepo(
- ui, path, create=False, presetupfuncs=None, intents=None, createopts=None
-):
- """return a repository object for the specified path"""
- obj = _peerlookup(path).instance(
- ui, path, create, intents=intents, createopts=createopts
- )
+def _setup_repo_or_peer(ui, obj, presetupfuncs=None):
ui = getattr(obj, "ui", ui)
for f in presetupfuncs or []:
f(ui, obj)
@@ -195,14 +206,12 @@
if hook:
with util.timedcm('reposetup %r', name) as stats:
hook(ui, obj)
- ui.log(
- b'extension', b' > reposetup for %s took %s\n', name, stats
- )
+ msg = b' > reposetup for %s took %s\n'
+ ui.log(b'extension', msg, name, stats)
ui.log(b'extension', b'> all reposetup took %s\n', allreposetupstats)
if not obj.local():
for f in wirepeersetupfuncs:
f(ui, obj)
- return obj
def repository(
@@ -214,28 +223,59 @@
createopts=None,
):
"""return a repository object for the specified path"""
- peer = _peerorrepo(
+ scheme = urlutil.url(path).scheme
+ if scheme is None:
+ scheme = b'file'
+ cls = repo_schemes.get(scheme)
+ if cls is None:
+ if scheme in peer_schemes:
+ raise error.Abort(_(b"repository '%s' is not local") % path)
+ cls = LocalFactory
+ repo = cls.instance(
ui,
path,
create,
- presetupfuncs=presetupfuncs,
intents=intents,
createopts=createopts,
)
- repo = peer.local()
- if not repo:
- raise error.Abort(
- _(b"repository '%s' is not local") % (path or peer.url())
- )
+ _setup_repo_or_peer(ui, repo, presetupfuncs=presetupfuncs)
return repo.filtered(b'visible')
def peer(uiorrepo, opts, path, create=False, intents=None, createopts=None):
'''return a repository peer for the specified path'''
+ ui = getattr(uiorrepo, 'ui', uiorrepo)
rui = remoteui(uiorrepo, opts)
- return _peerorrepo(
- rui, path, create, intents=intents, createopts=createopts
- ).peer()
+ if util.safehasattr(path, 'url'):
+ # this is already a urlutil.path object
+ peer_path = path
+ else:
+ peer_path = urlutil.path(ui, None, rawloc=path, validate_path=False)
+ scheme = peer_path.url.scheme # pytype: disable=attribute-error
+ if scheme in peer_schemes:
+ cls = peer_schemes[scheme]
+ peer = cls.make_peer(
+ rui,
+ peer_path,
+ create,
+ intents=intents,
+ createopts=createopts,
+ )
+ _setup_repo_or_peer(rui, peer)
+ else:
+ # this is a repository
+ repo_path = peer_path.loc # pytype: disable=attribute-error
+ if not repo_path:
+ repo_path = peer_path.rawloc # pytype: disable=attribute-error
+ repo = repository(
+ rui,
+ repo_path,
+ create,
+ intents=intents,
+ createopts=createopts,
+ )
+ peer = repo.peer(path=peer_path)
+ return peer
def defaultdest(source):
@@ -290,17 +330,23 @@
):
'''create a shared repository'''
- if not islocal(source):
- raise error.Abort(_(b'can only share local repositories'))
+ not_local_msg = _(b'can only share local repositories')
+ if util.safehasattr(source, 'local'):
+ if source.local() is None:
+ raise error.Abort(not_local_msg)
+ elif not islocal(source):
+ # XXX why are we getting bytes here ?
+ raise error.Abort(not_local_msg)
if not dest:
dest = defaultdest(source)
else:
- dest = urlutil.get_clone_path(ui, dest)[1]
+ dest = urlutil.get_clone_path_obj(ui, dest).loc
if isinstance(source, bytes):
- origsource, source, branches = urlutil.get_clone_path(ui, source)
- srcrepo = repository(ui, source)
+ source_path = urlutil.get_clone_path_obj(ui, source)
+ srcrepo = repository(ui, source_path.loc)
+ branches = (source_path.branch, [])
rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
else:
srcrepo = source.local()
@@ -661,12 +707,23 @@
"""
if isinstance(source, bytes):
- src = urlutil.get_clone_path(ui, source, branch)
- origsource, source, branches = src
- srcpeer = peer(ui, peeropts, source)
+ src_path = urlutil.get_clone_path_obj(ui, source)
+ if src_path is None:
+ srcpeer = peer(ui, peeropts, b'')
+ origsource = source = b''
+ branches = (None, branch or [])
+ else:
+ srcpeer = peer(ui, peeropts, src_path)
+ origsource = src_path.rawloc
+ branches = (src_path.branch, branch or [])
+ source = src_path.loc
else:
- srcpeer = source.peer() # in case we were called with a localrepo
+ if util.safehasattr(source, 'peer'):
+ srcpeer = source.peer() # in case we were called with a localrepo
+ else:
+ srcpeer = source
branches = (None, branch or [])
+ # XXX path: simply use the peer `path` object when this become available
origsource = source = srcpeer.url()
srclock = destlock = destwlock = cleandir = None
destpeer = None
@@ -678,7 +735,11 @@
if dest:
ui.status(_(b"destination directory: %s\n") % dest)
else:
- dest = urlutil.get_clone_path(ui, dest)[0]
+ dest_path = urlutil.get_clone_path_obj(ui, dest)
+ if dest_path is not None:
+ dest = dest_path.rawloc
+ else:
+ dest = b''
dest = urlutil.urllocalpath(dest)
source = urlutil.urllocalpath(source)
@@ -1271,23 +1332,28 @@
msg %= len(srcs)
raise error.Abort(msg)
path = srcs[0]
- source, branches = urlutil.parseurl(path.rawloc, opts.get(b'branch'))
- if subpath is not None:
+ if subpath is None:
+ peer_path = path
+ url = path.loc
+ else:
+ # XXX path: we are losing the `path` object here. Keeping it would be
+ # valuable. For example as a "variant" as we do for pushes.
subpath = urlutil.url(subpath)
if subpath.isabs():
- source = bytes(subpath)
+ peer_path = url = bytes(subpath)
else:
- p = urlutil.url(source)
+ p = urlutil.url(path.loc)
if p.islocal():
normpath = os.path.normpath
else:
normpath = posixpath.normpath
p.path = normpath(b'%s/%s' % (p.path, subpath))
- source = bytes(p)
- other = peer(repo, opts, source)
+ peer_path = url = bytes(p)
+ other = peer(repo, opts, peer_path)
cleanupfn = other.close
try:
- ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(source))
+ ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(url))
+ branches = (path.branch, opts.get(b'branch', []))
revs, checkout = addbranchrevs(repo, other, branches, opts.get(b'rev'))
if revs:
@@ -1346,7 +1412,7 @@
out = set()
others = []
for path in urlutil.get_push_paths(repo, ui, dests):
- dest = path.pushloc or path.loc
+ dest = path.loc
if subpath is not None:
subpath = urlutil.url(subpath)
if subpath.isabs():
--- a/mercurial/httppeer.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/httppeer.py Wed Jan 04 16:02:22 2023 +0100
@@ -382,8 +382,7 @@
class httppeer(wireprotov1peer.wirepeer):
def __init__(self, ui, path, url, opener, requestbuilder, caps):
- self.ui = ui
- self._path = path
+ super().__init__(ui, path=path)
self._url = url
self._caps = caps
self.limitedarguments = caps is not None and b'httppostargs' not in caps
@@ -398,14 +397,11 @@
# Begin of ipeerconnection interface.
def url(self):
- return self._path
+ return self.path.loc
def local(self):
return None
- def peer(self):
- return self
-
def canpush(self):
return True
@@ -605,14 +601,13 @@
``requestbuilder`` is the type used for constructing HTTP requests.
It exists as an argument so extensions can override the default.
"""
- u = urlutil.url(path)
- if u.query or u.fragment:
- raise error.Abort(
- _(b'unsupported URL component: "%s"') % (u.query or u.fragment)
- )
+ if path.url.query or path.url.fragment:
+ msg = _(b'unsupported URL component: "%s"')
+ msg %= path.url.query or path.url.fragment
+ raise error.Abort(msg)
# urllib cannot handle URLs with embedded user or passwd.
- url, authinfo = u.authinfo()
+ url, authinfo = path.url.authinfo()
ui.debug(b'using %s\n' % url)
opener = opener or urlmod.opener(ui, authinfo)
@@ -624,11 +619,11 @@
)
-def instance(ui, path, create, intents=None, createopts=None):
+def make_peer(ui, path, create, intents=None, createopts=None):
if create:
raise error.Abort(_(b'cannot create new http repository'))
try:
- if path.startswith(b'https:') and not urlmod.has_https:
+ if path.url.scheme == b'https' and not urlmod.has_https:
raise error.Abort(
_(b'Python support for SSL and HTTPS is not installed')
)
@@ -638,7 +633,7 @@
return inst
except error.RepoError as httpexception:
try:
- r = statichttprepo.instance(ui, b"static-" + path, create)
+ r = statichttprepo.make_peer(ui, b"static-" + path.loc, create)
ui.note(_(b'(falling back to static-http)\n'))
return r
except error.RepoError:
--- a/mercurial/interfaces/repository.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/interfaces/repository.py Wed Jan 04 16:02:22 2023 +0100
@@ -103,6 +103,7 @@
"""
ui = interfaceutil.Attribute("""ui.ui instance""")
+ path = interfaceutil.Attribute("""a urlutil.path instance or None""")
def url():
"""Returns a URL string representing this peer.
@@ -123,12 +124,6 @@
can be used to interface with it. Otherwise returns ``None``.
"""
- def peer():
- """Returns an object conforming to this interface.
-
- Most implementations will ``return self``.
- """
-
def canpush():
"""Returns a boolean indicating if this peer can be pushed to."""
@@ -393,6 +388,10 @@
limitedarguments = False
+ def __init__(self, ui, path=None):
+ self.ui = ui
+ self.path = path
+
def capable(self, name):
caps = self.capabilities()
if name in caps:
@@ -1613,7 +1612,7 @@
def close():
"""Close the handle on this repository."""
- def peer():
+ def peer(path=None):
"""Obtain an object conforming to the ``peer`` interface."""
def unfiltered():
--- a/mercurial/localrepo.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/localrepo.py Wed Jan 04 16:02:22 2023 +0100
@@ -15,6 +15,10 @@
import weakref
from concurrent import futures
+from typing import (
+ Optional,
+)
+
from .i18n import _
from .node import (
bin,
@@ -299,13 +303,12 @@
class localpeer(repository.peer):
'''peer for a local repo; reflects only the most recent API'''
- def __init__(self, repo, caps=None):
- super(localpeer, self).__init__()
+ def __init__(self, repo, caps=None, path=None):
+ super(localpeer, self).__init__(repo.ui, path=path)
if caps is None:
caps = moderncaps.copy()
self._repo = repo.filtered(b'served')
- self.ui = repo.ui
if repo._wanted_sidedata:
formatted = bundle2.format_remote_wanted_sidedata(repo)
@@ -321,9 +324,6 @@
def local(self):
return self._repo
- def peer(self):
- return self
-
def canpush(self):
return True
@@ -451,8 +451,8 @@
"""peer extension which implements legacy methods too; used for tests with
restricted capabilities"""
- def __init__(self, repo):
- super(locallegacypeer, self).__init__(repo, caps=legacycaps)
+ def __init__(self, repo, path=None):
+ super(locallegacypeer, self).__init__(repo, caps=legacycaps, path=path)
# Begin of baselegacywirecommands interface.
@@ -526,7 +526,7 @@
return set(read(b'requires').splitlines())
-def makelocalrepository(baseui, path, intents=None):
+def makelocalrepository(baseui, path: bytes, intents=None):
"""Create a local repository object.
Given arguments needed to construct a local repository, this function
@@ -845,7 +845,13 @@
)
-def loadhgrc(ui, wdirvfs, hgvfs, requirements, sharedvfs=None):
+def loadhgrc(
+ ui,
+ wdirvfs: vfsmod.vfs,
+ hgvfs: vfsmod.vfs,
+ requirements,
+ sharedvfs: Optional[vfsmod.vfs] = None,
+):
"""Load hgrc files/content into a ui instance.
This is called during repository opening to load any additional
@@ -1058,6 +1064,8 @@
options[b'revlogv2'] = True
if requirementsmod.CHANGELOGV2_REQUIREMENT in requirements:
options[b'changelogv2'] = True
+ cmp_rank = ui.configbool(b'experimental', b'changelog-v2.compute-rank')
+ options[b'changelogv2.compute-rank'] = cmp_rank
if requirementsmod.GENERALDELTA_REQUIREMENT in requirements:
options[b'generaldelta'] = True
@@ -1071,6 +1079,11 @@
b'storage', b'revlog.optimize-delta-parent-choice'
)
options[b'deltabothparents'] = deltabothparents
+ dps_cgds = ui.configint(
+ b'storage',
+ b'revlog.delta-parent-search.candidate-group-chunk-size',
+ )
+ options[b'delta-parent-search.candidate-group-chunk-size'] = dps_cgds
options[b'debug-delta'] = ui.configbool(b'debug', b'revlog.debug-delta')
issue6528 = ui.configbool(b'storage', b'revlog.issue6528.fix-incoming')
@@ -1323,15 +1336,15 @@
self,
baseui,
ui,
- origroot,
- wdirvfs,
- hgvfs,
+ origroot: bytes,
+ wdirvfs: vfsmod.vfs,
+ hgvfs: vfsmod.vfs,
requirements,
supportedrequirements,
- sharedpath,
+ sharedpath: bytes,
store,
- cachevfs,
- wcachevfs,
+ cachevfs: vfsmod.vfs,
+ wcachevfs: vfsmod.vfs,
features,
intents=None,
):
@@ -1620,8 +1633,8 @@
parts.pop()
return False
- def peer(self):
- return localpeer(self) # not cached to avoid reference cycle
+ def peer(self, path=None):
+ return localpeer(self, path=path) # not cached to avoid reference cycle
def unfiltered(self):
"""Return unfiltered version of the repository
@@ -1977,7 +1990,7 @@
def __iter__(self):
return iter(self.changelog)
- def revs(self, expr, *args):
+ def revs(self, expr: bytes, *args):
"""Find revisions matching a revset.
The revset is specified as a string ``expr`` that may contain
@@ -1993,7 +2006,7 @@
tree = revsetlang.spectree(expr, *args)
return revset.makematcher(tree)(self)
- def set(self, expr, *args):
+ def set(self, expr: bytes, *args):
"""Find revisions matching a revset and emit changectx instances.
This is a convenience wrapper around ``revs()`` that iterates the
@@ -2005,7 +2018,7 @@
for r in self.revs(expr, *args):
yield self[r]
- def anyrevs(self, specs, user=False, localalias=None):
+ def anyrevs(self, specs: bytes, user=False, localalias=None):
"""Find revisions matching one of the given revsets.
Revset aliases from the configuration are not expanded by default. To
@@ -2030,7 +2043,7 @@
m = revset.matchany(None, specs, localalias=localalias)
return m(self)
- def url(self):
+ def url(self) -> bytes:
return b'file:' + self.root
def hook(self, name, throw=False, **args):
@@ -2229,7 +2242,7 @@
return b'store'
return None
- def wjoin(self, f, *insidef):
+ def wjoin(self, f: bytes, *insidef: bytes) -> bytes:
return self.vfs.reljoin(self.root, f, *insidef)
def setparents(self, p1, p2=None):
@@ -2238,17 +2251,17 @@
self[None].setparents(p1, p2)
self._quick_access_changeid_invalidate()
- def filectx(self, path, changeid=None, fileid=None, changectx=None):
+ def filectx(self, path: bytes, changeid=None, fileid=None, changectx=None):
"""changeid must be a changeset revision, if specified.
fileid can be a file revision or node."""
return context.filectx(
self, path, changeid, fileid, changectx=changectx
)
- def getcwd(self):
+ def getcwd(self) -> bytes:
return self.dirstate.getcwd()
- def pathto(self, f, cwd=None):
+ def pathto(self, f: bytes, cwd: Optional[bytes] = None) -> bytes:
return self.dirstate.pathto(f, cwd)
def _loadfilter(self, filter):
@@ -2300,14 +2313,21 @@
def adddatafilter(self, name, filter):
self._datafilters[name] = filter
- def wread(self, filename):
+ def wread(self, filename: bytes) -> bytes:
if self.wvfs.islink(filename):
data = self.wvfs.readlink(filename)
else:
data = self.wvfs.read(filename)
return self._filter(self._encodefilterpats, filename, data)
- def wwrite(self, filename, data, flags, backgroundclose=False, **kwargs):
+ def wwrite(
+ self,
+ filename: bytes,
+ data: bytes,
+ flags: bytes,
+ backgroundclose=False,
+ **kwargs
+ ) -> int:
"""write ``data`` into ``filename`` in the working directory
This returns length of written (maybe decoded) data.
@@ -2325,7 +2345,7 @@
self.wvfs.setflags(filename, False, False)
return len(data)
- def wwritedata(self, filename, data):
+ def wwritedata(self, filename: bytes, data: bytes) -> bytes:
return self._filter(self._decodefilterpats, filename, data)
def currenttransaction(self):
@@ -3520,13 +3540,13 @@
return a
-def undoname(fn):
+def undoname(fn: bytes) -> bytes:
base, name = os.path.split(fn)
assert name.startswith(b'journal')
return os.path.join(base, name.replace(b'journal', b'undo', 1))
-def instance(ui, path, create, intents=None, createopts=None):
+def instance(ui, path: bytes, create, intents=None, createopts=None):
# prevent cyclic import localrepo -> upgrade -> localrepo
from . import upgrade
@@ -3543,7 +3563,7 @@
return repo
-def islocal(path):
+def islocal(path: bytes) -> bool:
return True
@@ -3803,7 +3823,7 @@
return {k: v for k, v in createopts.items() if k not in known}
-def createrepository(ui, path, createopts=None, requirements=None):
+def createrepository(ui, path: bytes, createopts=None, requirements=None):
"""Create a new repository in a vfs.
``path`` path to the new repo's working directory.
--- a/mercurial/logexchange.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/logexchange.py Wed Jan 04 16:02:22 2023 +0100
@@ -113,7 +113,7 @@
if local:
rpath = util.pconvert(remote._repo.root)
elif not isinstance(remote, bytes):
- rpath = remote._url
+ rpath = remote.url()
# represent the remotepath with user defined path name if exists
for path, url in repo.ui.configitems(b'paths'):
--- a/mercurial/manifest.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/manifest.py Wed Jan 04 16:02:22 2023 +0100
@@ -1836,6 +1836,7 @@
assumehaveparentrevisions=False,
deltamode=repository.CG_DELTAMODE_STD,
sidedata_helpers=None,
+ debug_info=None,
):
return self._revlog.emitrevisions(
nodes,
@@ -1844,6 +1845,7 @@
assumehaveparentrevisions=assumehaveparentrevisions,
deltamode=deltamode,
sidedata_helpers=sidedata_helpers,
+ debug_info=debug_info,
)
def addgroup(
@@ -1854,6 +1856,8 @@
alwayscache=False,
addrevisioncb=None,
duplicaterevisioncb=None,
+ debug_info=None,
+ delta_base_reuse_policy=None,
):
return self._revlog.addgroup(
deltas,
@@ -1862,6 +1866,8 @@
alwayscache=alwayscache,
addrevisioncb=addrevisioncb,
duplicaterevisioncb=duplicaterevisioncb,
+ debug_info=debug_info,
+ delta_base_reuse_policy=delta_base_reuse_policy,
)
def rawsize(self, rev):
--- a/mercurial/match.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/match.py Wed Jan 04 16:02:22 2023 +0100
@@ -368,7 +368,7 @@
% (
pat,
inst.message,
- ) # pytype: disable=unsupported-operands
+ )
)
except IOError as inst:
if warn:
--- a/mercurial/policy.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/policy.py Wed Jan 04 16:02:22 2023 +0100
@@ -76,7 +76,7 @@
('cext', 'bdiff'): 3,
('cext', 'mpatch'): 1,
('cext', 'osutil'): 4,
- ('cext', 'parsers'): 20,
+ ('cext', 'parsers'): 21,
}
# map import request to other package or module
--- a/mercurial/posix.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/posix.py Wed Jan 04 16:02:22 2023 +0100
@@ -17,8 +17,23 @@
import stat
import sys
import tempfile
+import typing
import unicodedata
+from typing import (
+ Any,
+ AnyStr,
+ Iterable,
+ Iterator,
+ List,
+ Match,
+ NoReturn,
+ Optional,
+ Sequence,
+ Tuple,
+ Union,
+)
+
from .i18n import _
from .pycompat import (
getattr,
@@ -44,7 +59,7 @@
# vaguely unix-like but don't have hardlink support. For those
# poor souls, just say we tried and that it failed so we fall back
# to copies.
- def oslink(src, dst):
+ def oslink(src: bytes, dst: bytes) -> NoReturn:
raise OSError(
errno.EINVAL, b'hardlinks not supported: %s to %s' % (src, dst)
)
@@ -54,15 +69,47 @@
unlink = os.unlink
rename = os.rename
removedirs = os.removedirs
-expandglobs = False
+
+if typing.TYPE_CHECKING:
+ # Replace the various overloads that come along with aliasing stdlib methods
+ # with the narrow definition that we care about in the type checking phase
+ # only. This ensures that both Windows and POSIX see only the definition
+ # that is actually available.
+ #
+ # Note that if we check pycompat.TYPE_CHECKING here, it is always False, and
+ # the methods aren't replaced.
+
+ def normpath(path: bytes) -> bytes:
+ raise NotImplementedError
+
+ def abspath(path: AnyStr) -> AnyStr:
+ raise NotImplementedError
-umask = os.umask(0)
+ def oslink(src: bytes, dst: bytes) -> None:
+ raise NotImplementedError
+
+ def readlink(path: bytes) -> bytes:
+ raise NotImplementedError
+
+ def unlink(path: bytes) -> None:
+ raise NotImplementedError
+
+ def rename(src: bytes, dst: bytes) -> None:
+ raise NotImplementedError
+
+ def removedirs(name: bytes) -> None:
+ raise NotImplementedError
+
+
+expandglobs: bool = False
+
+umask: int = os.umask(0)
os.umask(umask)
posixfile = open
-def split(p):
+def split(p: bytes) -> Tuple[bytes, bytes]:
"""Same as posixpath.split, but faster
>>> import posixpath
@@ -85,17 +132,17 @@
return ht[0] + b'/', ht[1]
-def openhardlinks():
+def openhardlinks() -> bool:
'''return true if it is safe to hold open file handles to hardlinks'''
return True
-def nlinks(name):
+def nlinks(name: bytes) -> int:
'''return number of hardlinks for the given file'''
return os.lstat(name).st_nlink
-def parsepatchoutput(output_line):
+def parsepatchoutput(output_line: bytes) -> bytes:
"""parses the output produced by patch and returns the filename"""
pf = output_line[14:]
if pycompat.sysplatform == b'OpenVMS':
@@ -107,7 +154,9 @@
return pf
-def sshargs(sshcmd, host, user, port):
+def sshargs(
+ sshcmd: bytes, host: bytes, user: Optional[bytes], port: Optional[bytes]
+) -> bytes:
'''Build argument list for ssh'''
args = user and (b"%s@%s" % (user, host)) or host
if b'-' in args[:1]:
@@ -120,12 +169,12 @@
return args
-def isexec(f):
+def isexec(f: bytes) -> bool:
"""check whether a file is executable"""
return os.lstat(f).st_mode & 0o100 != 0
-def setflags(f, l, x):
+def setflags(f: bytes, l: bool, x: bool) -> None:
st = os.lstat(f)
s = st.st_mode
if l:
@@ -169,7 +218,12 @@
os.chmod(f, s & 0o666)
-def copymode(src, dst, mode=None, enforcewritable=False):
+def copymode(
+ src: bytes,
+ dst: bytes,
+ mode: Optional[bytes] = None,
+ enforcewritable: bool = False,
+) -> None:
"""Copy the file mode from the file at path src to dst.
If src doesn't exist, we're using mode instead. If mode is None, we're
using umask."""
@@ -189,7 +243,7 @@
os.chmod(dst, new_mode)
-def checkexec(path):
+def checkexec(path: bytes) -> bool:
"""
Check whether the given path is on a filesystem with UNIX-like exec flags
@@ -269,7 +323,7 @@
return False
-def checklink(path):
+def checklink(path: bytes) -> bool:
"""check whether the given path is on a symlink-capable filesystem"""
# mktemp is not racy because symlink creation will fail if the
# file already exists
@@ -334,13 +388,13 @@
return False
-def checkosfilename(path):
+def checkosfilename(path: bytes) -> Optional[bytes]:
"""Check that the base-relative path is a valid filename on this platform.
Returns None if the path is ok, or a UI string describing the problem."""
return None # on posix platforms, every path is ok
-def getfsmountpoint(dirpath):
+def getfsmountpoint(dirpath: bytes) -> Optional[bytes]:
"""Get the filesystem mount point from a directory (best-effort)
Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
@@ -348,7 +402,7 @@
return getattr(osutil, 'getfsmountpoint', lambda x: None)(dirpath)
-def getfstype(dirpath):
+def getfstype(dirpath: bytes) -> Optional[bytes]:
"""Get the filesystem type name from a directory (best-effort)
Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
@@ -356,29 +410,29 @@
return getattr(osutil, 'getfstype', lambda x: None)(dirpath)
-def get_password():
+def get_password() -> bytes:
return encoding.strtolocal(getpass.getpass(''))
-def setbinary(fd):
+def setbinary(fd) -> None:
pass
-def pconvert(path):
+def pconvert(path: bytes) -> bytes:
return path
-def localpath(path):
+def localpath(path: bytes) -> bytes:
return path
-def samefile(fpath1, fpath2):
+def samefile(fpath1: bytes, fpath2: bytes) -> bool:
"""Returns whether path1 and path2 refer to the same file. This is only
guaranteed to work for files, not directories."""
return os.path.samefile(fpath1, fpath2)
-def samedevice(fpath1, fpath2):
+def samedevice(fpath1: bytes, fpath2: bytes) -> bool:
"""Returns whether fpath1 and fpath2 are on the same device. This is only
guaranteed to work for files, not directories."""
st1 = os.lstat(fpath1)
@@ -387,18 +441,18 @@
# os.path.normcase is a no-op, which doesn't help us on non-native filesystems
-def normcase(path):
+def normcase(path: bytes) -> bytes:
return path.lower()
# what normcase does to ASCII strings
-normcasespec = encoding.normcasespecs.lower
+normcasespec: int = encoding.normcasespecs.lower
# fallback normcase function for non-ASCII strings
normcasefallback = normcase
if pycompat.isdarwin:
- def normcase(path):
+ def normcase(path: bytes) -> bytes:
"""
Normalize a filename for OS X-compatible comparison:
- escape-encode invalid characters
@@ -423,7 +477,7 @@
normcasespec = encoding.normcasespecs.lower
- def normcasefallback(path):
+ def normcasefallback(path: bytes) -> bytes:
try:
u = path.decode('utf-8')
except UnicodeDecodeError:
@@ -464,7 +518,7 @@
)
# use upper-ing as normcase as same as NTFS workaround
- def normcase(path):
+ def normcase(path: bytes) -> bytes:
pathlen = len(path)
if (pathlen == 0) or (path[0] != pycompat.ossep):
# treat as relative
@@ -490,20 +544,20 @@
# but these translations are not supported by native
# tools, so the exec bit tends to be set erroneously.
# Therefore, disable executable bit access on Cygwin.
- def checkexec(path):
+ def checkexec(path: bytes) -> bool:
return False
# Similarly, Cygwin's symlink emulation is likely to create
# problems when Mercurial is used from both Cygwin and native
# Windows, with other native tools, or on shared volumes
- def checklink(path):
+ def checklink(path: bytes) -> bool:
return False
-_needsshellquote = None
+_needsshellquote: Optional[Match[bytes]] = None
-def shellquote(s):
+def shellquote(s: bytes) -> bytes:
if pycompat.sysplatform == b'OpenVMS':
return b'"%s"' % s
global _needsshellquote
@@ -516,12 +570,12 @@
return b"'%s'" % s.replace(b"'", b"'\\''")
-def shellsplit(s):
+def shellsplit(s: bytes) -> List[bytes]:
"""Parse a command string in POSIX shell way (best-effort)"""
return pycompat.shlexsplit(s, posix=True)
-def testpid(pid):
+def testpid(pid: int) -> bool:
'''return False if pid dead, True if running or not sure'''
if pycompat.sysplatform == b'OpenVMS':
return True
@@ -532,12 +586,12 @@
return inst.errno != errno.ESRCH
-def isowner(st):
+def isowner(st: os.stat_result) -> bool:
"""Return True if the stat object st is from the current user."""
return st.st_uid == os.getuid()
-def findexe(command):
+def findexe(command: bytes) -> Optional[bytes]:
"""Find executable for command searching like which does.
If command is a basename then PATH is searched for command.
PATH isn't searched if command is an absolute or relative path.
@@ -545,7 +599,7 @@
if pycompat.sysplatform == b'OpenVMS':
return command
- def findexisting(executable):
+ def findexisting(executable: bytes) -> Optional[bytes]:
b'Will return executable if existing file'
if os.path.isfile(executable) and os.access(executable, os.X_OK):
return executable
@@ -564,14 +618,14 @@
return None
-def setsignalhandler():
+def setsignalhandler() -> None:
pass
_wantedkinds = {stat.S_IFREG, stat.S_IFLNK}
-def statfiles(files):
+def statfiles(files: Sequence[bytes]) -> Iterator[Optional[os.stat_result]]:
"""Stat each file in files. Yield each stat, or None if a file does not
exist or has a type we don't care about."""
lstat = os.lstat
@@ -586,12 +640,12 @@
yield st
-def getuser():
+def getuser() -> bytes:
'''return name of current user'''
return pycompat.fsencode(getpass.getuser())
-def username(uid=None):
+def username(uid: Optional[int] = None) -> Optional[bytes]:
"""Return the name of the user with the given uid.
If uid is None, return the name of the current user."""
@@ -604,7 +658,7 @@
return b'%d' % uid
-def groupname(gid=None):
+def groupname(gid: Optional[int] = None) -> Optional[bytes]:
"""Return the name of the group with the given gid.
If gid is None, return the name of the current group."""
@@ -617,7 +671,7 @@
return pycompat.bytestr(gid)
-def groupmembers(name):
+def groupmembers(name: bytes) -> List[bytes]:
"""Return the list of members of the group with the given
name, KeyError if the group does not exist.
"""
@@ -625,23 +679,27 @@
return pycompat.rapply(pycompat.fsencode, list(grp.getgrnam(name).gr_mem))
-def spawndetached(args):
+def spawndetached(args: List[bytes]) -> int:
return os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0), args[0], args)
-def gethgcmd():
+def gethgcmd(): # TODO: convert to bytes, like on Windows?
return sys.argv[:1]
-def makedir(path, notindexed):
+def makedir(path: bytes, notindexed: bool) -> None:
os.mkdir(path)
-def lookupreg(key, name=None, scope=None):
+def lookupreg(
+ key: bytes,
+ name: Optional[bytes] = None,
+ scope: Optional[Union[int, Iterable[int]]] = None,
+) -> Optional[bytes]:
return None
-def hidewindow():
+def hidewindow() -> None:
"""Hide current shell window.
Used to hide the window opened when starting asynchronous
@@ -651,15 +709,15 @@
class cachestat:
- def __init__(self, path):
+ def __init__(self, path: bytes) -> None:
self.stat = os.stat(path)
- def cacheable(self):
+ def cacheable(self) -> bool:
return bool(self.stat.st_ino)
__hash__ = object.__hash__
- def __eq__(self, other):
+ def __eq__(self, other: Any) -> bool:
try:
# Only dev, ino, size, mtime and atime are likely to change. Out
# of these, we shouldn't compare atime but should compare the
@@ -680,18 +738,18 @@
except AttributeError:
return False
- def __ne__(self, other):
+ def __ne__(self, other: Any) -> bool:
return not self == other
-def statislink(st):
+def statislink(st: Optional[os.stat_result]) -> bool:
'''check whether a stat result is a symlink'''
- return st and stat.S_ISLNK(st.st_mode)
+ return stat.S_ISLNK(st.st_mode) if st else False
-def statisexec(st):
+def statisexec(st: Optional[os.stat_result]) -> bool:
'''check whether a stat result is an executable file'''
- return st and (st.st_mode & 0o100 != 0)
+ return (st.st_mode & 0o100 != 0) if st else False
def poll(fds):
@@ -708,7 +766,7 @@
return sorted(list(set(sum(res, []))))
-def readpipe(pipe):
+def readpipe(pipe) -> bytes:
"""Read all available data from a pipe."""
# We can't fstat() a pipe because Linux will always report 0.
# So, we set the pipe to non-blocking mode and read everything
@@ -733,7 +791,7 @@
fcntl.fcntl(pipe, fcntl.F_SETFL, oldflags)
-def bindunixsocket(sock, path):
+def bindunixsocket(sock, path: bytes) -> None:
"""Bind the UNIX domain socket to the specified path"""
# use relative path instead of full path at bind() if possible, since
# AF_UNIX path has very small length limit (107 chars) on common
--- a/mercurial/pure/bdiff.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/pure/bdiff.py Wed Jan 04 16:02:22 2023 +0100
@@ -10,8 +10,13 @@
import re
import struct
+from typing import (
+ List,
+ Tuple,
+)
-def splitnewlines(text):
+
+def splitnewlines(text: bytes) -> List[bytes]:
'''like str.splitlines, but only split on newlines.'''
lines = [l + b'\n' for l in text.split(b'\n')]
if lines:
@@ -22,7 +27,9 @@
return lines
-def _normalizeblocks(a, b, blocks):
+def _normalizeblocks(
+ a: List[bytes], b: List[bytes], blocks
+) -> List[Tuple[int, int, int]]:
prev = None
r = []
for curr in blocks:
@@ -57,7 +64,7 @@
return r
-def bdiff(a, b):
+def bdiff(a: bytes, b: bytes) -> bytes:
a = bytes(a).splitlines(True)
b = bytes(b).splitlines(True)
@@ -84,7 +91,7 @@
return b"".join(bin)
-def blocks(a, b):
+def blocks(a: bytes, b: bytes) -> List[Tuple[int, int, int, int]]:
an = splitnewlines(a)
bn = splitnewlines(b)
d = difflib.SequenceMatcher(None, an, bn).get_matching_blocks()
@@ -92,7 +99,7 @@
return [(i, i + n, j, j + n) for (i, j, n) in d]
-def fixws(text, allws):
+def fixws(text: bytes, allws: bool) -> bytes:
if allws:
text = re.sub(b'[ \t\r]+', b'', text)
else:
--- a/mercurial/pure/mpatch.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/pure/mpatch.py Wed Jan 04 16:02:22 2023 +0100
@@ -9,6 +9,11 @@
import io
import struct
+from typing import (
+ List,
+ Tuple,
+)
+
stringio = io.BytesIO
@@ -28,7 +33,9 @@
# temporary string buffers.
-def _pull(dst, src, l): # pull l bytes from src
+def _pull(
+ dst: List[Tuple[int, int]], src: List[Tuple[int, int]], l: int
+) -> None: # pull l bytes from src
while l:
f = src.pop()
if f[0] > l: # do we need to split?
@@ -39,7 +46,7 @@
l -= f[0]
-def _move(m, dest, src, count):
+def _move(m: stringio, dest: int, src: int, count: int) -> None:
"""move count bytes from src to dest
The file pointer is left at the end of dest.
@@ -50,7 +57,9 @@
m.write(buf)
-def _collect(m, buf, list):
+def _collect(
+ m: stringio, buf: int, list: List[Tuple[int, int]]
+) -> Tuple[int, int]:
start = buf
for l, p in reversed(list):
_move(m, buf, p, l)
@@ -58,7 +67,7 @@
return (buf - start, start)
-def patches(a, bins):
+def patches(a: bytes, bins: List[bytes]) -> bytes:
if not bins:
return a
@@ -111,7 +120,7 @@
return m.read(t[0])
-def patchedsize(orig, delta):
+def patchedsize(orig: int, delta: bytes) -> int:
outlen, last, bin = 0, 0, 0
binend = len(delta)
data = 12
--- a/mercurial/pure/parsers.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/pure/parsers.py Wed Jan 04 16:02:22 2023 +0100
@@ -435,6 +435,11 @@
return self._wc_tracked and not (self._p1_tracked or self._p2_info)
@property
+ def modified(self):
+ """True if the file has been modified"""
+ return self._wc_tracked and self._p1_tracked and self._p2_info
+
+ @property
def maybe_clean(self):
"""True if the file has a chance to be in the "clean" state"""
if not self._wc_tracked:
--- a/mercurial/pycompat.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/pycompat.py Wed Jan 04 16:02:22 2023 +0100
@@ -28,6 +28,24 @@
import tempfile
import xmlrpc.client as xmlrpclib
+from typing import (
+ Any,
+ AnyStr,
+ BinaryIO,
+ Dict,
+ Iterable,
+ Iterator,
+ List,
+ Mapping,
+ NoReturn,
+ Optional,
+ Sequence,
+ Tuple,
+ Type,
+ TypeVar,
+ cast,
+ overload,
+)
ispy3 = sys.version_info[0] >= 3
ispypy = '__pypy__' in sys.builtin_module_names
@@ -38,6 +56,10 @@
TYPE_CHECKING = typing.TYPE_CHECKING
+_GetOptResult = Tuple[List[Tuple[bytes, bytes]], List[bytes]]
+_T0 = TypeVar('_T0')
+_Tbytestr = TypeVar('_Tbytestr', bound='bytestr')
+
def future_set_exception_info(f, exc_info):
f.set_exception(exc_info[0])
@@ -46,7 +68,7 @@
FileNotFoundError = builtins.FileNotFoundError
-def identity(a):
+def identity(a: _T0) -> _T0:
return a
@@ -94,21 +116,17 @@
fsencode = os.fsencode
fsdecode = os.fsdecode
-oscurdir = os.curdir.encode('ascii')
-oslinesep = os.linesep.encode('ascii')
-osname = os.name.encode('ascii')
-ospathsep = os.pathsep.encode('ascii')
-ospardir = os.pardir.encode('ascii')
-ossep = os.sep.encode('ascii')
-osaltsep = os.altsep
-if osaltsep:
- osaltsep = osaltsep.encode('ascii')
-osdevnull = os.devnull.encode('ascii')
+oscurdir: bytes = os.curdir.encode('ascii')
+oslinesep: bytes = os.linesep.encode('ascii')
+osname: bytes = os.name.encode('ascii')
+ospathsep: bytes = os.pathsep.encode('ascii')
+ospardir: bytes = os.pardir.encode('ascii')
+ossep: bytes = os.sep.encode('ascii')
+osaltsep: Optional[bytes] = os.altsep.encode('ascii') if os.altsep else None
+osdevnull: bytes = os.devnull.encode('ascii')
-sysplatform = sys.platform.encode('ascii')
-sysexecutable = sys.executable
-if sysexecutable:
- sysexecutable = os.fsencode(sysexecutable)
+sysplatform: bytes = sys.platform.encode('ascii')
+sysexecutable: bytes = os.fsencode(sys.executable) if sys.executable else b''
def maplist(*args):
@@ -128,7 +146,7 @@
long = int
-if getattr(sys, 'argv', None) is not None:
+if builtins.getattr(sys, 'argv', None) is not None:
# On POSIX, the char** argv array is converted to Python str using
# Py_DecodeLocale(). The inverse of this is Py_EncodeLocale(), which
# isn't directly callable from Python code. In practice, os.fsencode()
@@ -143,6 +161,7 @@
# (this is how Python 2 worked). To get that, we encode with the mbcs
# encoding, which will pass CP_ACP to the underlying Windows API to
# produce bytes.
+ sysargv: List[bytes] = []
if os.name == r'nt':
sysargv = [a.encode("mbcs", "ignore") for a in sys.argv]
else:
@@ -211,38 +230,49 @@
# https://github.com/google/pytype/issues/500
if TYPE_CHECKING:
- def __init__(self, s=b''):
+ def __init__(self, s: object = b'') -> None:
pass
- def __new__(cls, s=b''):
+ def __new__(cls: Type[_Tbytestr], s: object = b'') -> _Tbytestr:
if isinstance(s, bytestr):
return s
if not isinstance(
s, (bytes, bytearray)
- ) and not hasattr( # hasattr-py3-only
+ ) and not builtins.hasattr( # hasattr-py3-only
s, u'__bytes__'
):
s = str(s).encode('ascii')
return bytes.__new__(cls, s)
- def __getitem__(self, key):
+ def __getitem__(self, key) -> bytes:
s = bytes.__getitem__(self, key)
if not isinstance(s, bytes):
s = bytechr(s)
return s
- def __iter__(self):
+ def __iter__(self) -> Iterator[bytes]:
return iterbytestr(bytes.__iter__(self))
- def __repr__(self):
+ def __repr__(self) -> str:
return bytes.__repr__(self)[1:] # drop b''
-def iterbytestr(s):
+def iterbytestr(s: Iterable[int]) -> Iterator[bytes]:
"""Iterate bytes as if it were a str object of Python 2"""
return map(bytechr, s)
+if TYPE_CHECKING:
+
+ @overload
+ def maybebytestr(s: bytes) -> bytestr:
+ ...
+
+ @overload
+ def maybebytestr(s: _T0) -> _T0:
+ ...
+
+
def maybebytestr(s):
"""Promote bytes to bytestr"""
if isinstance(s, bytes):
@@ -250,7 +280,7 @@
return s
-def sysbytes(s):
+def sysbytes(s: AnyStr) -> bytes:
"""Convert an internal str (e.g. keyword, __doc__) back to bytes
This never raises UnicodeEncodeError, but only ASCII characters
@@ -261,7 +291,7 @@
return s.encode('utf-8')
-def sysstr(s):
+def sysstr(s: AnyStr) -> str:
"""Return a keyword str to be passed to Python functions such as
getattr() and str.encode()
@@ -274,29 +304,29 @@
return s.decode('latin-1')
-def strurl(url):
+def strurl(url: AnyStr) -> str:
"""Converts a bytes url back to str"""
if isinstance(url, bytes):
return url.decode('ascii')
return url
-def bytesurl(url):
+def bytesurl(url: AnyStr) -> bytes:
"""Converts a str url to bytes by encoding in ascii"""
if isinstance(url, str):
return url.encode('ascii')
return url
-def raisewithtb(exc, tb):
+def raisewithtb(exc: BaseException, tb) -> NoReturn:
"""Raise exception with the given traceback"""
raise exc.with_traceback(tb)
-def getdoc(obj):
+def getdoc(obj: object) -> Optional[bytes]:
"""Get docstring as bytes; may be None so gettext() won't confuse it
with _('')"""
- doc = getattr(obj, '__doc__', None)
+ doc = builtins.getattr(obj, '__doc__', None)
if doc is None:
return doc
return sysbytes(doc)
@@ -319,14 +349,22 @@
unicode = str
-def open(name, mode=b'r', buffering=-1, encoding=None):
+def open(
+ name,
+ mode: AnyStr = b'r',
+ buffering: int = -1,
+ encoding: Optional[str] = None,
+) -> Any:
+ # TODO: assert binary mode, and cast result to BinaryIO?
return builtins.open(name, sysstr(mode), buffering, encoding)
safehasattr = _wrapattrfunc(builtins.hasattr)
-def _getoptbwrapper(orig, args, shortlist, namelist):
+def _getoptbwrapper(
+ orig, args: Sequence[bytes], shortlist: bytes, namelist: Sequence[bytes]
+) -> _GetOptResult:
"""
Takes bytes arguments, converts them to unicode, pass them to
getopt.getopt(), convert the returned values back to bytes and then
@@ -342,7 +380,7 @@
return opts, args
-def strkwargs(dic):
+def strkwargs(dic: Mapping[bytes, _T0]) -> Dict[str, _T0]:
"""
Converts the keys of a python dictonary to str i.e. unicodes so that
they can be passed as keyword arguments as dictionaries with bytes keys
@@ -352,7 +390,7 @@
return dic
-def byteskwargs(dic):
+def byteskwargs(dic: Mapping[str, _T0]) -> Dict[bytes, _T0]:
"""
Converts keys of python dictionaries to bytes as they were converted to
str to pass that dictonary as a keyword argument on Python 3.
@@ -362,7 +400,9 @@
# TODO: handle shlex.shlex().
-def shlexsplit(s, comments=False, posix=True):
+def shlexsplit(
+ s: bytes, comments: bool = False, posix: bool = True
+) -> List[bytes]:
"""
Takes bytes argument, convert it to str i.e. unicodes, pass that into
shlex.split(), convert the returned value to bytes and return that for
@@ -377,46 +417,59 @@
json_loads = json.loads
-isjython = sysplatform.startswith(b'java')
+isjython: bool = sysplatform.startswith(b'java')
-isdarwin = sysplatform.startswith(b'darwin')
-islinux = sysplatform.startswith(b'linux')
-isposix = osname == b'posix'
-iswindows = osname == b'nt'
+isdarwin: bool = sysplatform.startswith(b'darwin')
+islinux: bool = sysplatform.startswith(b'linux')
+isposix: bool = osname == b'posix'
+iswindows: bool = osname == b'nt'
-def getoptb(args, shortlist, namelist):
+def getoptb(
+ args: Sequence[bytes], shortlist: bytes, namelist: Sequence[bytes]
+) -> _GetOptResult:
return _getoptbwrapper(getopt.getopt, args, shortlist, namelist)
-def gnugetoptb(args, shortlist, namelist):
+def gnugetoptb(
+ args: Sequence[bytes], shortlist: bytes, namelist: Sequence[bytes]
+) -> _GetOptResult:
return _getoptbwrapper(getopt.gnu_getopt, args, shortlist, namelist)
-def mkdtemp(suffix=b'', prefix=b'tmp', dir=None):
+def mkdtemp(
+ suffix: bytes = b'', prefix: bytes = b'tmp', dir: Optional[bytes] = None
+) -> bytes:
return tempfile.mkdtemp(suffix, prefix, dir)
# text=True is not supported; use util.from/tonativeeol() instead
-def mkstemp(suffix=b'', prefix=b'tmp', dir=None):
+def mkstemp(
+ suffix: bytes = b'', prefix: bytes = b'tmp', dir: Optional[bytes] = None
+) -> Tuple[int, bytes]:
return tempfile.mkstemp(suffix, prefix, dir)
# TemporaryFile does not support an "encoding=" argument on python2.
# This wrapper file are always open in byte mode.
-def unnamedtempfile(mode=None, *args, **kwargs):
+def unnamedtempfile(mode: Optional[bytes] = None, *args, **kwargs) -> BinaryIO:
if mode is None:
mode = 'w+b'
else:
mode = sysstr(mode)
assert 'b' in mode
- return tempfile.TemporaryFile(mode, *args, **kwargs)
+ return cast(BinaryIO, tempfile.TemporaryFile(mode, *args, **kwargs))
# NamedTemporaryFile does not support an "encoding=" argument on python2.
# This wrapper file are always open in byte mode.
def namedtempfile(
- mode=b'w+b', bufsize=-1, suffix=b'', prefix=b'tmp', dir=None, delete=True
+ mode: bytes = b'w+b',
+ bufsize: int = -1,
+ suffix: bytes = b'',
+ prefix: bytes = b'tmp',
+ dir: Optional[bytes] = None,
+ delete: bool = True,
):
mode = sysstr(mode)
assert 'b' in mode
--- a/mercurial/revlog.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/revlog.py Wed Jan 04 16:02:22 2023 +0100
@@ -38,12 +38,15 @@
COMP_MODE_DEFAULT,
COMP_MODE_INLINE,
COMP_MODE_PLAIN,
+ DELTA_BASE_REUSE_NO,
+ DELTA_BASE_REUSE_TRY,
ENTRY_RANK,
FEATURES_BY_VERSION,
FLAG_GENERALDELTA,
FLAG_INLINE_DATA,
INDEX_HEADER,
KIND_CHANGELOG,
+ KIND_FILELOG,
RANK_UNKNOWN,
REVLOGV0,
REVLOGV1,
@@ -125,7 +128,7 @@
# Aliased for performance.
_zlibdecompress = zlib.decompress
-# max size of revlog with inline data
+# max size of inline data embedded into a revlog
_maxinline = 131072
# Flag processors for REVIDX_ELLIPSIS.
@@ -347,6 +350,7 @@
self._chunkcachesize = 65536
self._maxchainlen = None
self._deltabothparents = True
+ self._candidate_group_chunk_size = 0
self._debug_delta = False
self.index = None
self._docket = None
@@ -363,6 +367,11 @@
self._srdensitythreshold = 0.50
self._srmingapsize = 262144
+ # other optionnals features
+
+ # might remove rank configuration once the computation has no impact
+ self._compute_rank = False
+
# Make copy of flag processors so each revlog instance can support
# custom flags.
self._flagprocessors = dict(flagutil.flagprocessors)
@@ -404,6 +413,7 @@
if b'changelogv2' in opts and self.revlog_kind == KIND_CHANGELOG:
new_header = CHANGELOGV2
+ self._compute_rank = opts.get(b'changelogv2.compute-rank', True)
elif b'revlogv2' in opts:
new_header = REVLOGV2
elif b'revlogv1' in opts:
@@ -421,6 +431,9 @@
self._maxchainlen = opts[b'maxchainlen']
if b'deltabothparents' in opts:
self._deltabothparents = opts[b'deltabothparents']
+ dps_cgds = opts.get(b'delta-parent-search.candidate-group-chunk-size')
+ if dps_cgds:
+ self._candidate_group_chunk_size = dps_cgds
self._lazydelta = bool(opts.get(b'lazydelta', True))
self._lazydeltabase = False
if self._lazydelta:
@@ -505,7 +518,6 @@
self._docket = docket
self._docket_file = entry_point
else:
- entry_data = b''
self._initempty = True
entry_data = self._get_data(entry_point, mmapindexthreshold)
if len(entry_data) > 0:
@@ -653,9 +665,12 @@
@util.propertycache
def display_id(self):
"""The public facing "ID" of the revlog that we use in message"""
- # Maybe we should build a user facing representation of
- # revlog.target instead of using `self.radix`
- return self.radix
+ if self.revlog_kind == KIND_FILELOG:
+ # Reference the file without the "data/" prefix, so it is familiar
+ # to the user.
+ return self.target[1]
+ else:
+ return self.radix
def _get_decompressor(self, t):
try:
@@ -2445,6 +2460,16 @@
self, write_debug=write_debug
)
+ if cachedelta is not None and len(cachedelta) == 2:
+ # If the cached delta has no information about how it should be
+ # reused, add the default reuse instruction according to the
+ # revlog's configuration.
+ if self._generaldelta and self._lazydeltabase:
+ delta_base_reuse = DELTA_BASE_REUSE_TRY
+ else:
+ delta_base_reuse = DELTA_BASE_REUSE_NO
+ cachedelta = (cachedelta[0], cachedelta[1], delta_base_reuse)
+
revinfo = revlogutils.revisioninfo(
node,
p1,
@@ -2492,7 +2517,7 @@
sidedata_offset = 0
rank = RANK_UNKNOWN
- if self._format_version == CHANGELOGV2:
+ if self._compute_rank:
if (p1r, p2r) == (nullrev, nullrev):
rank = 1
elif p1r != nullrev and p2r == nullrev:
@@ -2637,6 +2662,8 @@
alwayscache=False,
addrevisioncb=None,
duplicaterevisioncb=None,
+ debug_info=None,
+ delta_base_reuse_policy=None,
):
"""
add a delta group
@@ -2652,6 +2679,14 @@
if self._adding_group:
raise error.ProgrammingError(b'cannot nest addgroup() calls')
+ # read the default delta-base reuse policy from revlog config if the
+ # group did not specify one.
+ if delta_base_reuse_policy is None:
+ if self._generaldelta and self._lazydeltabase:
+ delta_base_reuse_policy = DELTA_BASE_REUSE_TRY
+ else:
+ delta_base_reuse_policy = DELTA_BASE_REUSE_NO
+
self._adding_group = True
empty = True
try:
@@ -2662,6 +2697,7 @@
deltacomputer = deltautil.deltacomputer(
self,
write_debug=write_debug,
+ debug_info=debug_info,
)
# loop through our set of deltas
for data in deltas:
@@ -2731,7 +2767,7 @@
p1,
p2,
flags,
- (baserev, delta),
+ (baserev, delta, delta_base_reuse_policy),
alwayscache=alwayscache,
deltacomputer=deltacomputer,
sidedata=sidedata,
@@ -2886,6 +2922,7 @@
assumehaveparentrevisions=False,
deltamode=repository.CG_DELTAMODE_STD,
sidedata_helpers=None,
+ debug_info=None,
):
if nodesorder not in (b'nodes', b'storage', b'linear', None):
raise error.ProgrammingError(
@@ -2915,6 +2952,7 @@
revisiondata=revisiondata,
assumehaveparentrevisions=assumehaveparentrevisions,
sidedata_helpers=sidedata_helpers,
+ debug_info=debug_info,
)
DELTAREUSEALWAYS = b'always'
--- a/mercurial/revlogutils/__init__.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/revlogutils/__init__.py Wed Jan 04 16:02:22 2023 +0100
@@ -67,7 +67,7 @@
node: expected hash of the revision
p1, p2: parent revs of the revision
btext: built text cache consisting of a one-element list
- cachedelta: (baserev, uncompressed_delta) or None
+ cachedelta: (baserev, uncompressed_delta, usage_mode) or None
flags: flags associated to the revision storage
One of btext[0] or cachedelta must be set.
--- a/mercurial/revlogutils/constants.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/revlogutils/constants.py Wed Jan 04 16:02:22 2023 +0100
@@ -301,3 +301,18 @@
SPARSE_REVLOG_MAX_CHAIN_LENGTH = 1000
+
+### What should be done with a cached delta and its base ?
+
+# Ignore the cache when considering candidates.
+#
+# The cached delta might be used, but the delta base will not be scheduled for
+# usage earlier than in "normal" order.
+DELTA_BASE_REUSE_NO = 0
+
+# Prioritize trying the cached delta base
+#
+# The delta base will be tested for validy first. So that the cached deltas get
+# used when possible.
+DELTA_BASE_REUSE_TRY = 1
+DELTA_BASE_REUSE_FORCE = 2
--- a/mercurial/revlogutils/debug.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/revlogutils/debug.py Wed Jan 04 16:02:22 2023 +0100
@@ -6,12 +6,19 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
+import collections
+import string
+
from .. import (
+ mdiff,
node as nodemod,
+ revlogutils,
+ util,
)
from . import (
constants,
+ deltas as deltautil,
)
INDEX_ENTRY_DEBUG_COLUMN = []
@@ -216,3 +223,499 @@
fm.plain(b'\n')
fm.end()
+
+
+def dump(ui, revlog):
+ """perform the work for `hg debugrevlog --dump"""
+ # XXX seems redundant with debug index ?
+ r = revlog
+ numrevs = len(r)
+ ui.write(
+ (
+ b"# rev p1rev p2rev start end deltastart base p1 p2"
+ b" rawsize totalsize compression heads chainlen\n"
+ )
+ )
+ ts = 0
+ heads = set()
+
+ for rev in range(numrevs):
+ dbase = r.deltaparent(rev)
+ if dbase == -1:
+ dbase = rev
+ cbase = r.chainbase(rev)
+ clen = r.chainlen(rev)
+ p1, p2 = r.parentrevs(rev)
+ rs = r.rawsize(rev)
+ ts = ts + rs
+ heads -= set(r.parentrevs(rev))
+ heads.add(rev)
+ try:
+ compression = ts / r.end(rev)
+ except ZeroDivisionError:
+ compression = 0
+ ui.write(
+ b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
+ b"%11d %5d %8d\n"
+ % (
+ rev,
+ p1,
+ p2,
+ r.start(rev),
+ r.end(rev),
+ r.start(dbase),
+ r.start(cbase),
+ r.start(p1),
+ r.start(p2),
+ rs,
+ ts,
+ compression,
+ len(heads),
+ clen,
+ )
+ )
+
+
+def debug_revlog(ui, revlog):
+ """code for `hg debugrevlog`"""
+ r = revlog
+ format = r._format_version
+ v = r._format_flags
+ flags = []
+ gdelta = False
+ if v & constants.FLAG_INLINE_DATA:
+ flags.append(b'inline')
+ if v & constants.FLAG_GENERALDELTA:
+ gdelta = True
+ flags.append(b'generaldelta')
+ if not flags:
+ flags = [b'(none)']
+
+ ### the total size of stored content if incompressed.
+ full_text_total_size = 0
+ ### tracks merge vs single parent
+ nummerges = 0
+
+ ### tracks ways the "delta" are build
+ # nodelta
+ numempty = 0
+ numemptytext = 0
+ numemptydelta = 0
+ # full file content
+ numfull = 0
+ # intermediate snapshot against a prior snapshot
+ numsemi = 0
+ # snapshot count per depth
+ numsnapdepth = collections.defaultdict(lambda: 0)
+ # number of snapshots with a non-ancestor delta
+ numsnapdepth_nad = collections.defaultdict(lambda: 0)
+ # delta against previous revision
+ numprev = 0
+ # delta against prev, where prev is a non-ancestor
+ numprev_nad = 0
+ # delta against first or second parent (not prev)
+ nump1 = 0
+ nump2 = 0
+ # delta against neither prev nor parents
+ numother = 0
+ # delta against other that is a non-ancestor
+ numother_nad = 0
+ # delta against prev that are also first or second parent
+ # (details of `numprev`)
+ nump1prev = 0
+ nump2prev = 0
+
+ # data about delta chain of each revs
+ chainlengths = []
+ chainbases = []
+ chainspans = []
+
+ # data about each revision
+ datasize = [None, 0, 0]
+ fullsize = [None, 0, 0]
+ semisize = [None, 0, 0]
+ # snapshot count per depth
+ snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
+ deltasize = [None, 0, 0]
+ chunktypecounts = {}
+ chunktypesizes = {}
+
+ def addsize(size, l):
+ if l[0] is None or size < l[0]:
+ l[0] = size
+ if size > l[1]:
+ l[1] = size
+ l[2] += size
+
+ numrevs = len(r)
+ for rev in range(numrevs):
+ p1, p2 = r.parentrevs(rev)
+ delta = r.deltaparent(rev)
+ if format > 0:
+ s = r.rawsize(rev)
+ full_text_total_size += s
+ addsize(s, datasize)
+ if p2 != nodemod.nullrev:
+ nummerges += 1
+ size = r.length(rev)
+ if delta == nodemod.nullrev:
+ chainlengths.append(0)
+ chainbases.append(r.start(rev))
+ chainspans.append(size)
+ if size == 0:
+ numempty += 1
+ numemptytext += 1
+ else:
+ numfull += 1
+ numsnapdepth[0] += 1
+ addsize(size, fullsize)
+ addsize(size, snapsizedepth[0])
+ else:
+ nad = (
+ delta != p1 and delta != p2 and not r.isancestorrev(delta, rev)
+ )
+ chainlengths.append(chainlengths[delta] + 1)
+ baseaddr = chainbases[delta]
+ revaddr = r.start(rev)
+ chainbases.append(baseaddr)
+ chainspans.append((revaddr - baseaddr) + size)
+ if size == 0:
+ numempty += 1
+ numemptydelta += 1
+ elif r.issnapshot(rev):
+ addsize(size, semisize)
+ numsemi += 1
+ depth = r.snapshotdepth(rev)
+ numsnapdepth[depth] += 1
+ if nad:
+ numsnapdepth_nad[depth] += 1
+ addsize(size, snapsizedepth[depth])
+ else:
+ addsize(size, deltasize)
+ if delta == rev - 1:
+ numprev += 1
+ if delta == p1:
+ nump1prev += 1
+ elif delta == p2:
+ nump2prev += 1
+ elif nad:
+ numprev_nad += 1
+ elif delta == p1:
+ nump1 += 1
+ elif delta == p2:
+ nump2 += 1
+ elif delta != nodemod.nullrev:
+ numother += 1
+ numother_nad += 1
+
+ # Obtain data on the raw chunks in the revlog.
+ if util.safehasattr(r, '_getsegmentforrevs'):
+ segment = r._getsegmentforrevs(rev, rev)[1]
+ else:
+ segment = r._revlog._getsegmentforrevs(rev, rev)[1]
+ if segment:
+ chunktype = bytes(segment[0:1])
+ else:
+ chunktype = b'empty'
+
+ if chunktype not in chunktypecounts:
+ chunktypecounts[chunktype] = 0
+ chunktypesizes[chunktype] = 0
+
+ chunktypecounts[chunktype] += 1
+ chunktypesizes[chunktype] += size
+
+ # Adjust size min value for empty cases
+ for size in (datasize, fullsize, semisize, deltasize):
+ if size[0] is None:
+ size[0] = 0
+
+ numdeltas = numrevs - numfull - numempty - numsemi
+ numoprev = numprev - nump1prev - nump2prev - numprev_nad
+ num_other_ancestors = numother - numother_nad
+ totalrawsize = datasize[2]
+ datasize[2] /= numrevs
+ fulltotal = fullsize[2]
+ if numfull == 0:
+ fullsize[2] = 0
+ else:
+ fullsize[2] /= numfull
+ semitotal = semisize[2]
+ snaptotal = {}
+ if numsemi > 0:
+ semisize[2] /= numsemi
+ for depth in snapsizedepth:
+ snaptotal[depth] = snapsizedepth[depth][2]
+ snapsizedepth[depth][2] /= numsnapdepth[depth]
+
+ deltatotal = deltasize[2]
+ if numdeltas > 0:
+ deltasize[2] /= numdeltas
+ totalsize = fulltotal + semitotal + deltatotal
+ avgchainlen = sum(chainlengths) / numrevs
+ maxchainlen = max(chainlengths)
+ maxchainspan = max(chainspans)
+ compratio = 1
+ if totalsize:
+ compratio = totalrawsize / totalsize
+
+ basedfmtstr = b'%%%dd\n'
+ basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
+
+ def dfmtstr(max):
+ return basedfmtstr % len(str(max))
+
+ def pcfmtstr(max, padding=0):
+ return basepcfmtstr % (len(str(max)), b' ' * padding)
+
+ def pcfmt(value, total):
+ if total:
+ return (value, 100 * float(value) / total)
+ else:
+ return value, 100.0
+
+ ui.writenoi18n(b'format : %d\n' % format)
+ ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
+
+ ui.write(b'\n')
+ fmt = pcfmtstr(totalsize)
+ fmt2 = dfmtstr(totalsize)
+ ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
+ ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
+ ui.writenoi18n(
+ b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
+ )
+ ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
+ ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
+ ui.writenoi18n(
+ b' text : '
+ + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
+ )
+ ui.writenoi18n(
+ b' delta : '
+ + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
+ )
+ ui.writenoi18n(
+ b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
+ )
+ for depth in sorted(numsnapdepth):
+ base = b' lvl-%-3d : ' % depth
+ count = fmt % pcfmt(numsnapdepth[depth], numrevs)
+ pieces = [base, count]
+ if numsnapdepth_nad[depth]:
+ pieces[-1] = count = count[:-1] # drop the final '\n'
+ more = b' non-ancestor-bases: '
+ anc_count = fmt
+ anc_count %= pcfmt(numsnapdepth_nad[depth], numsnapdepth[depth])
+ pieces.append(more)
+ pieces.append(anc_count)
+ ui.write(b''.join(pieces))
+ ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
+ ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
+ ui.writenoi18n(
+ b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
+ )
+ for depth in sorted(numsnapdepth):
+ ui.write(
+ (b' lvl-%-3d : ' % depth)
+ + fmt % pcfmt(snaptotal[depth], totalsize)
+ )
+ ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
+
+ letters = string.ascii_letters.encode('ascii')
+
+ def fmtchunktype(chunktype):
+ if chunktype == b'empty':
+ return b' %s : ' % chunktype
+ elif chunktype in letters:
+ return b' 0x%s (%s) : ' % (nodemod.hex(chunktype), chunktype)
+ else:
+ return b' 0x%s : ' % nodemod.hex(chunktype)
+
+ ui.write(b'\n')
+ ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
+ for chunktype in sorted(chunktypecounts):
+ ui.write(fmtchunktype(chunktype))
+ ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
+ ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
+ for chunktype in sorted(chunktypecounts):
+ ui.write(fmtchunktype(chunktype))
+ ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
+
+ ui.write(b'\n')
+ b_total = b"%d" % full_text_total_size
+ p_total = []
+ while len(b_total) > 3:
+ p_total.append(b_total[-3:])
+ b_total = b_total[:-3]
+ p_total.append(b_total)
+ p_total.reverse()
+ b_total = b' '.join(p_total)
+
+ ui.write(b'\n')
+ ui.writenoi18n(b'total-stored-content: %s bytes\n' % b_total)
+ ui.write(b'\n')
+ fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
+ ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
+ ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
+ ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
+ ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
+
+ if format > 0:
+ ui.write(b'\n')
+ ui.writenoi18n(
+ b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
+ % tuple(datasize)
+ )
+ ui.writenoi18n(
+ b'full revision size (min/max/avg) : %d / %d / %d\n'
+ % tuple(fullsize)
+ )
+ ui.writenoi18n(
+ b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
+ % tuple(semisize)
+ )
+ for depth in sorted(snapsizedepth):
+ if depth == 0:
+ continue
+ ui.writenoi18n(
+ b' level-%-3d (min/max/avg) : %d / %d / %d\n'
+ % ((depth,) + tuple(snapsizedepth[depth]))
+ )
+ ui.writenoi18n(
+ b'delta size (min/max/avg) : %d / %d / %d\n'
+ % tuple(deltasize)
+ )
+
+ if numdeltas > 0:
+ ui.write(b'\n')
+ fmt = pcfmtstr(numdeltas)
+ fmt2 = pcfmtstr(numdeltas, 4)
+ ui.writenoi18n(
+ b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
+ )
+ if numprev > 0:
+ ui.writenoi18n(
+ b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
+ )
+ ui.writenoi18n(
+ b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
+ )
+ ui.writenoi18n(
+ b' other-ancestor : ' + fmt2 % pcfmt(numoprev, numprev)
+ )
+ ui.writenoi18n(
+ b' unrelated : ' + fmt2 % pcfmt(numoprev, numprev)
+ )
+ if gdelta:
+ ui.writenoi18n(
+ b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
+ )
+ ui.writenoi18n(
+ b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
+ )
+ ui.writenoi18n(
+ b'deltas against ancs : '
+ + fmt % pcfmt(num_other_ancestors, numdeltas)
+ )
+ ui.writenoi18n(
+ b'deltas against other : '
+ + fmt % pcfmt(numother_nad, numdeltas)
+ )
+
+
+def debug_delta_find(ui, revlog, rev, base_rev=nodemod.nullrev):
+ """display the search process for a delta"""
+ deltacomputer = deltautil.deltacomputer(
+ revlog,
+ write_debug=ui.write,
+ debug_search=not ui.quiet,
+ )
+
+ node = revlog.node(rev)
+ p1r, p2r = revlog.parentrevs(rev)
+ p1 = revlog.node(p1r)
+ p2 = revlog.node(p2r)
+ full_text = revlog.revision(rev)
+ btext = [full_text]
+ textlen = len(btext[0])
+ cachedelta = None
+ flags = revlog.flags(rev)
+
+ if base_rev != nodemod.nullrev:
+ base_text = revlog.revision(base_rev)
+ delta = mdiff.textdiff(base_text, full_text)
+
+ cachedelta = (base_rev, delta, constants.DELTA_BASE_REUSE_TRY)
+ btext = [None]
+
+ revinfo = revlogutils.revisioninfo(
+ node,
+ p1,
+ p2,
+ btext,
+ textlen,
+ cachedelta,
+ flags,
+ )
+
+ fh = revlog._datafp()
+ deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
+
+
+def _get_revlogs(repo, changelog: bool, manifest: bool, filelogs: bool):
+ """yield revlogs from this repository"""
+ if changelog:
+ yield repo.changelog
+
+ if manifest:
+ # XXX: Handle tree manifest
+ root_mf = repo.manifestlog.getstorage(b'')
+ assert not root_mf._treeondisk
+ yield root_mf._revlog
+
+ if filelogs:
+ files = set()
+ for rev in repo:
+ ctx = repo[rev]
+ files |= set(ctx.files())
+
+ for f in sorted(files):
+ yield repo.file(f)._revlog
+
+
+def debug_revlog_stats(
+ repo, fm, changelog: bool, manifest: bool, filelogs: bool
+):
+ """Format revlog statistics for debugging purposes
+
+ fm: the output formatter.
+ """
+ fm.plain(b'rev-count data-size inl type target \n')
+
+ for rlog in _get_revlogs(repo, changelog, manifest, filelogs):
+ fm.startitem()
+ nb_rev = len(rlog)
+ inline = rlog._inline
+ data_size = rlog._get_data_offset(nb_rev - 1)
+
+ target = rlog.target
+ revlog_type = b'unknown'
+ revlog_target = b''
+ if target[0] == constants.KIND_CHANGELOG:
+ revlog_type = b'changelog'
+ elif target[0] == constants.KIND_MANIFESTLOG:
+ revlog_type = b'manifest'
+ revlog_target = target[1]
+ elif target[0] == constants.KIND_FILELOG:
+ revlog_type = b'file'
+ revlog_target = target[1]
+
+ fm.write(b'revlog.rev-count', b'%9d', nb_rev)
+ fm.write(b'revlog.data-size', b'%12d', data_size)
+
+ fm.write(b'revlog.inline', b' %-3s', b'yes' if inline else b'no')
+ fm.write(b'revlog.type', b' %-9s', revlog_type)
+ fm.write(b'revlog.target', b' %s', revlog_target)
+
+ fm.plain(b'\n')
--- a/mercurial/revlogutils/deltas.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/revlogutils/deltas.py Wed Jan 04 16:02:22 2023 +0100
@@ -20,6 +20,8 @@
COMP_MODE_DEFAULT,
COMP_MODE_INLINE,
COMP_MODE_PLAIN,
+ DELTA_BASE_REUSE_FORCE,
+ DELTA_BASE_REUSE_NO,
KIND_CHANGELOG,
KIND_FILELOG,
KIND_MANIFESTLOG,
@@ -576,13 +578,20 @@
)
-def isgooddeltainfo(revlog, deltainfo, revinfo):
+def is_good_delta_info(revlog, deltainfo, revinfo):
"""Returns True if the given delta is good. Good means that it is within
the disk span, disk size, and chain length bounds that we know to be
performant."""
if deltainfo is None:
return False
+ if (
+ revinfo.cachedelta is not None
+ and deltainfo.base == revinfo.cachedelta[0]
+ and revinfo.cachedelta[2] == DELTA_BASE_REUSE_FORCE
+ ):
+ return True
+
# - 'deltainfo.distance' is the distance from the base revision --
# bounding it limits the amount of I/O we need to do.
# - 'deltainfo.compresseddeltalen' is the sum of the total size of
@@ -655,7 +664,16 @@
LIMIT_BASE2TEXT = 500
-def _candidategroups(revlog, textlen, p1, p2, cachedelta):
+def _candidategroups(
+ revlog,
+ textlen,
+ p1,
+ p2,
+ cachedelta,
+ excluded_bases=None,
+ target_rev=None,
+ snapshot_cache=None,
+):
"""Provides group of revision to be tested as delta base
This top level function focus on emitting groups with unique and worthwhile
@@ -666,15 +684,31 @@
yield None
return
+ if (
+ cachedelta is not None
+ and nullrev == cachedelta[0]
+ and cachedelta[2] == DELTA_BASE_REUSE_FORCE
+ ):
+ # instruction are to forcibly do a full snapshot
+ yield None
+ return
+
deltalength = revlog.length
deltaparent = revlog.deltaparent
sparse = revlog._sparserevlog
good = None
deltas_limit = textlen * LIMIT_DELTA2TEXT
+ group_chunk_size = revlog._candidate_group_chunk_size
tested = {nullrev}
- candidates = _refinedgroups(revlog, p1, p2, cachedelta)
+ candidates = _refinedgroups(
+ revlog,
+ p1,
+ p2,
+ cachedelta,
+ snapshot_cache=snapshot_cache,
+ )
while True:
temptative = candidates.send(good)
if temptative is None:
@@ -694,15 +728,37 @@
# filter out revision we tested already
if rev in tested:
continue
- tested.add(rev)
+
+ if (
+ cachedelta is not None
+ and rev == cachedelta[0]
+ and cachedelta[2] == DELTA_BASE_REUSE_FORCE
+ ):
+ # instructions are to forcibly consider/use this delta base
+ group.append(rev)
+ continue
+
+ # an higher authority deamed the base unworthy (e.g. censored)
+ if excluded_bases is not None and rev in excluded_bases:
+ tested.add(rev)
+ continue
+ # We are in some recomputation cases and that rev is too high in
+ # the revlog
+ if target_rev is not None and rev >= target_rev:
+ tested.add(rev)
+ continue
# filter out delta base that will never produce good delta
if deltas_limit < revlog.length(rev):
+ tested.add(rev)
continue
if sparse and revlog.rawsize(rev) < (textlen // LIMIT_BASE2TEXT):
+ tested.add(rev)
continue
# no delta for rawtext-changing revs (see "candelta" for why)
if revlog.flags(rev) & REVIDX_RAWTEXT_CHANGING_FLAGS:
+ tested.add(rev)
continue
+
# If we reach here, we are about to build and test a delta.
# The delta building process will compute the chaininfo in all
# case, since that computation is cached, it is fine to access it
@@ -710,9 +766,11 @@
chainlen, chainsize = revlog._chaininfo(rev)
# if chain will be too long, skip base
if revlog._maxchainlen and chainlen >= revlog._maxchainlen:
+ tested.add(rev)
continue
# if chain already have too much data, skip base
if deltas_limit < chainsize:
+ tested.add(rev)
continue
if sparse and revlog.upperboundcomp is not None:
maxcomp = revlog.upperboundcomp
@@ -731,36 +789,46 @@
snapshotlimit = textlen >> snapshotdepth
if snapshotlimit < lowestrealisticdeltalen:
# delta lower bound is larger than accepted upper bound
+ tested.add(rev)
continue
# check the relative constraint on the delta size
revlength = revlog.length(rev)
if revlength < lowestrealisticdeltalen:
# delta probable lower bound is larger than target base
+ tested.add(rev)
continue
group.append(rev)
if group:
- # XXX: in the sparse revlog case, group can become large,
- # impacting performances. Some bounding or slicing mecanism
- # would help to reduce this impact.
- good = yield tuple(group)
+ # When the size of the candidate group is big, it can result in a
+ # quite significant performance impact. To reduce this, we can send
+ # them in smaller batches until the new batch does not provide any
+ # improvements.
+ #
+ # This might reduce the overall efficiency of the compression in
+ # some corner cases, but that should also prevent very pathological
+ # cases from being an issue. (eg. 20 000 candidates).
+ #
+ # XXX note that the ordering of the group becomes important as it
+ # now impacts the final result. The current order is unprocessed
+ # and can be improved.
+ if group_chunk_size == 0:
+ tested.update(group)
+ good = yield tuple(group)
+ else:
+ prev_good = good
+ for start in range(0, len(group), group_chunk_size):
+ sub_group = group[start : start + group_chunk_size]
+ tested.update(sub_group)
+ good = yield tuple(sub_group)
+ if prev_good == good:
+ break
+
yield None
-def _findsnapshots(revlog, cache, start_rev):
- """find snapshot from start_rev to tip"""
- if util.safehasattr(revlog.index, b'findsnapshots'):
- revlog.index.findsnapshots(cache, start_rev)
- else:
- deltaparent = revlog.deltaparent
- issnapshot = revlog.issnapshot
- for rev in revlog.revs(start_rev):
- if issnapshot(rev):
- cache[deltaparent(rev)].append(rev)
-
-
-def _refinedgroups(revlog, p1, p2, cachedelta):
+def _refinedgroups(revlog, p1, p2, cachedelta, snapshot_cache=None):
good = None
# First we try to reuse a the delta contained in the bundle.
# (or from the source revlog)
@@ -768,15 +836,28 @@
# This logic only applies to general delta repositories and can be disabled
# through configuration. Disabling reuse source delta is useful when
# we want to make sure we recomputed "optimal" deltas.
- if cachedelta and revlog._generaldelta and revlog._lazydeltabase:
+ debug_info = None
+ if cachedelta is not None and cachedelta[2] > DELTA_BASE_REUSE_NO:
# Assume what we received from the server is a good choice
# build delta will reuse the cache
+ if debug_info is not None:
+ debug_info['cached-delta.tested'] += 1
good = yield (cachedelta[0],)
if good is not None:
+ if debug_info is not None:
+ debug_info['cached-delta.accepted'] += 1
yield None
return
- snapshots = collections.defaultdict(list)
- for candidates in _rawgroups(revlog, p1, p2, cachedelta, snapshots):
+ if snapshot_cache is None:
+ snapshot_cache = SnapshotCache()
+ groups = _rawgroups(
+ revlog,
+ p1,
+ p2,
+ cachedelta,
+ snapshot_cache,
+ )
+ for candidates in groups:
good = yield candidates
if good is not None:
break
@@ -797,19 +878,22 @@
break
good = yield (base,)
# refine snapshot up
- if not snapshots:
- _findsnapshots(revlog, snapshots, good + 1)
+ if not snapshot_cache.snapshots:
+ snapshot_cache.update(revlog, good + 1)
previous = None
while good != previous:
previous = good
- children = tuple(sorted(c for c in snapshots[good]))
+ children = tuple(sorted(c for c in snapshot_cache.snapshots[good]))
good = yield children
- # we have found nothing
+ if debug_info is not None:
+ if good is None:
+ debug_info['no-solution'] += 1
+
yield None
-def _rawgroups(revlog, p1, p2, cachedelta, snapshots=None):
+def _rawgroups(revlog, p1, p2, cachedelta, snapshot_cache=None):
"""Provides group of revision to be tested as delta base
This lower level function focus on emitting delta theorically interresting
@@ -840,9 +924,9 @@
yield parents
if sparse and parents:
- if snapshots is None:
- # map: base-rev: snapshot-rev
- snapshots = collections.defaultdict(list)
+ if snapshot_cache is None:
+ # map: base-rev: [snapshot-revs]
+ snapshot_cache = SnapshotCache()
# See if we can use an existing snapshot in the parent chains to use as
# a base for a new intermediate-snapshot
#
@@ -856,7 +940,7 @@
break
parents_snaps[idx].add(s)
snapfloor = min(parents_snaps[0]) + 1
- _findsnapshots(revlog, snapshots, snapfloor)
+ snapshot_cache.update(revlog, snapfloor)
# search for the highest "unrelated" revision
#
# Adding snapshots used by "unrelated" revision increase the odd we
@@ -879,14 +963,14 @@
# chain.
max_depth = max(parents_snaps.keys())
chain = deltachain(other)
- for idx, s in enumerate(chain):
+ for depth, s in enumerate(chain):
if s < snapfloor:
continue
- if max_depth < idx:
+ if max_depth < depth:
break
if not revlog.issnapshot(s):
break
- parents_snaps[idx].add(s)
+ parents_snaps[depth].add(s)
# Test them as possible intermediate snapshot base
# We test them from highest to lowest level. High level one are more
# likely to result in small delta
@@ -894,7 +978,7 @@
for idx, snaps in sorted(parents_snaps.items(), reverse=True):
siblings = set()
for s in snaps:
- siblings.update(snapshots[s])
+ siblings.update(snapshot_cache.snapshots[s])
# Before considering making a new intermediate snapshot, we check
# if an existing snapshot, children of base we consider, would be
# suitable.
@@ -922,7 +1006,8 @@
# revisions instead of starting our own. Without such re-use,
# topological branches would keep reopening new full chains. Creating
# more and more snapshot as the repository grow.
- yield tuple(snapshots[nullrev])
+ full = [r for r in snapshot_cache.snapshots[nullrev] if snapfloor <= r]
+ yield tuple(sorted(full))
if not sparse:
# other approach failed try against prev to hopefully save us a
@@ -930,11 +1015,74 @@
yield (prev,)
+class SnapshotCache:
+ __slots__ = ('snapshots', '_start_rev', '_end_rev')
+
+ def __init__(self):
+ self.snapshots = collections.defaultdict(set)
+ self._start_rev = None
+ self._end_rev = None
+
+ def update(self, revlog, start_rev=0):
+ """find snapshots from start_rev to tip"""
+ nb_revs = len(revlog)
+ end_rev = nb_revs - 1
+ if start_rev > end_rev:
+ return # range is empty
+
+ if self._start_rev is None:
+ assert self._end_rev is None
+ self._update(revlog, start_rev, end_rev)
+ elif not (self._start_rev <= start_rev and end_rev <= self._end_rev):
+ if start_rev < self._start_rev:
+ self._update(revlog, start_rev, self._start_rev - 1)
+ if self._end_rev < end_rev:
+ self._update(revlog, self._end_rev + 1, end_rev)
+
+ if self._start_rev is None:
+ assert self._end_rev is None
+ self._end_rev = end_rev
+ self._start_rev = start_rev
+ else:
+ self._start_rev = min(self._start_rev, start_rev)
+ self._end_rev = max(self._end_rev, end_rev)
+ assert self._start_rev <= self._end_rev, (
+ self._start_rev,
+ self._end_rev,
+ )
+
+ def _update(self, revlog, start_rev, end_rev):
+ """internal method that actually do update content"""
+ assert self._start_rev is None or (
+ start_rev < self._start_rev or start_rev > self._end_rev
+ ), (self._start_rev, self._end_rev, start_rev, end_rev)
+ assert self._start_rev is None or (
+ end_rev < self._start_rev or end_rev > self._end_rev
+ ), (self._start_rev, self._end_rev, start_rev, end_rev)
+ cache = self.snapshots
+ if util.safehasattr(revlog.index, b'findsnapshots'):
+ revlog.index.findsnapshots(cache, start_rev, end_rev)
+ else:
+ deltaparent = revlog.deltaparent
+ issnapshot = revlog.issnapshot
+ for rev in revlog.revs(start_rev, end_rev):
+ if issnapshot(rev):
+ cache[deltaparent(rev)].add(rev)
+
+
class deltacomputer:
- def __init__(self, revlog, write_debug=None, debug_search=False):
+ def __init__(
+ self,
+ revlog,
+ write_debug=None,
+ debug_search=False,
+ debug_info=None,
+ ):
self.revlog = revlog
self._write_debug = write_debug
self._debug_search = debug_search
+ self._debug_info = debug_info
+ self._snapshot_cache = SnapshotCache()
def buildtext(self, revinfo, fh):
"""Builds a fulltext version of a revision
@@ -998,7 +1146,7 @@
snapshotdepth = len(revlog._deltachain(deltabase)[0])
delta = None
if revinfo.cachedelta:
- cachebase, cachediff = revinfo.cachedelta
+ cachebase = revinfo.cachedelta[0]
# check if the diff still apply
currentbase = cachebase
while (
@@ -1103,11 +1251,14 @@
if revinfo.flags & REVIDX_RAWTEXT_CHANGING_FLAGS:
return self._fullsnapshotinfo(fh, revinfo, target_rev)
- if self._write_debug is not None:
+ gather_debug = (
+ self._write_debug is not None or self._debug_info is not None
+ )
+ debug_search = self._write_debug is not None and self._debug_search
+
+ if gather_debug:
start = util.timer()
- debug_search = self._write_debug is not None and self._debug_search
-
# count the number of different delta we tried (for debug purpose)
dbg_try_count = 0
# count the number of "search round" we did. (for debug purpose)
@@ -1122,7 +1273,7 @@
deltainfo = None
p1r, p2r = revlog.rev(p1), revlog.rev(p2)
- if self._write_debug is not None:
+ if gather_debug:
if p1r != nullrev:
p1_chain_len = revlog._chaininfo(p1r)[0]
else:
@@ -1137,7 +1288,14 @@
self._write_debug(msg)
groups = _candidategroups(
- self.revlog, revinfo.textlen, p1r, p2r, cachedelta
+ self.revlog,
+ revinfo.textlen,
+ p1r,
+ p2r,
+ cachedelta,
+ excluded_bases,
+ target_rev,
+ snapshot_cache=self._snapshot_cache,
)
candidaterevs = next(groups)
while candidaterevs is not None:
@@ -1147,7 +1305,13 @@
if deltainfo is not None:
prev = deltainfo.base
- if p1 in candidaterevs or p2 in candidaterevs:
+ if (
+ cachedelta is not None
+ and len(candidaterevs) == 1
+ and cachedelta[0] in candidaterevs
+ ):
+ round_type = b"cached-delta"
+ elif p1 in candidaterevs or p2 in candidaterevs:
round_type = b"parents"
elif prev is not None and all(c < prev for c in candidaterevs):
round_type = b"refine-down"
@@ -1195,16 +1359,7 @@
msg = b"DBG-DELTAS-SEARCH: base=%d\n"
msg %= self.revlog.deltaparent(candidaterev)
self._write_debug(msg)
- if candidaterev in excluded_bases:
- if debug_search:
- msg = b"DBG-DELTAS-SEARCH: EXCLUDED\n"
- self._write_debug(msg)
- continue
- if candidaterev >= target_rev:
- if debug_search:
- msg = b"DBG-DELTAS-SEARCH: TOO-HIGH\n"
- self._write_debug(msg)
- continue
+
dbg_try_count += 1
if debug_search:
@@ -1216,7 +1371,7 @@
msg %= delta_end - delta_start
self._write_debug(msg)
if candidatedelta is not None:
- if isgooddeltainfo(self.revlog, candidatedelta, revinfo):
+ if is_good_delta_info(self.revlog, candidatedelta, revinfo):
if debug_search:
msg = b"DBG-DELTAS-SEARCH: DELTA: length=%d (GOOD)\n"
msg %= candidatedelta.deltalen
@@ -1244,12 +1399,28 @@
else:
dbg_type = b"delta"
- if self._write_debug is not None:
+ if gather_debug:
end = util.timer()
+ if dbg_type == b'full':
+ used_cached = (
+ cachedelta is not None
+ and dbg_try_rounds == 0
+ and dbg_try_count == 0
+ and cachedelta[0] == nullrev
+ )
+ else:
+ used_cached = (
+ cachedelta is not None
+ and dbg_try_rounds == 1
+ and dbg_try_count == 1
+ and deltainfo.base == cachedelta[0]
+ )
dbg = {
'duration': end - start,
'revision': target_rev,
+ 'delta-base': deltainfo.base, # pytype: disable=attribute-error
'search_round_count': dbg_try_rounds,
+ 'using-cached-base': used_cached,
'delta_try_count': dbg_try_count,
'type': dbg_type,
'p1-chain-len': p1_chain_len,
@@ -1279,31 +1450,39 @@
target_revlog += b'%s:' % target_key
dbg['target-revlog'] = target_revlog
- msg = (
- b"DBG-DELTAS:"
- b" %-12s"
- b" rev=%d:"
- b" search-rounds=%d"
- b" try-count=%d"
- b" - delta-type=%-6s"
- b" snap-depth=%d"
- b" - p1-chain-length=%d"
- b" p2-chain-length=%d"
- b" - duration=%f"
- b"\n"
- )
- msg %= (
- dbg["target-revlog"],
- dbg["revision"],
- dbg["search_round_count"],
- dbg["delta_try_count"],
- dbg["type"],
- dbg["snapshot-depth"],
- dbg["p1-chain-len"],
- dbg["p2-chain-len"],
- dbg["duration"],
- )
- self._write_debug(msg)
+ if self._debug_info is not None:
+ self._debug_info.append(dbg)
+
+ if self._write_debug is not None:
+ msg = (
+ b"DBG-DELTAS:"
+ b" %-12s"
+ b" rev=%d:"
+ b" delta-base=%d"
+ b" is-cached=%d"
+ b" - search-rounds=%d"
+ b" try-count=%d"
+ b" - delta-type=%-6s"
+ b" snap-depth=%d"
+ b" - p1-chain-length=%d"
+ b" p2-chain-length=%d"
+ b" - duration=%f"
+ b"\n"
+ )
+ msg %= (
+ dbg["target-revlog"],
+ dbg["revision"],
+ dbg["delta-base"],
+ dbg["using-cached-base"],
+ dbg["search_round_count"],
+ dbg["delta_try_count"],
+ dbg["type"],
+ dbg["snapshot-depth"],
+ dbg["p1-chain-len"],
+ dbg["p2-chain-len"],
+ dbg["duration"],
+ )
+ self._write_debug(msg)
return deltainfo
--- a/mercurial/revlogutils/docket.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/revlogutils/docket.py Wed Jan 04 16:02:22 2023 +0100
@@ -90,7 +90,7 @@
# * 8 bytes: pending size of data
# * 8 bytes: pending size of sidedata
# * 1 bytes: default compression header
-S_HEADER = struct.Struct(constants.INDEX_HEADER_FMT + b'BBBBBBLLLLLLc')
+S_HEADER = struct.Struct(constants.INDEX_HEADER_FMT + b'BBBBBBQQQQQQc')
# * 1 bytes: size of index uuid
# * 8 bytes: size of file
S_OLD_UID = struct.Struct('>BL')
--- a/mercurial/revset.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/revset.py Wed Jan 04 16:02:22 2023 +0100
@@ -1869,13 +1869,12 @@
dests = []
missing = set()
for path in urlutil.get_push_paths(repo, repo.ui, dests):
- dest = path.pushloc or path.loc
branches = path.branch, []
revs, checkout = hg.addbranchrevs(repo, repo, branches, [])
if revs:
revs = [repo.lookup(rev) for rev in revs]
- other = hg.peer(repo, {}, dest)
+ other = hg.peer(repo, {}, path)
try:
with repo.ui.silent():
outgoing = discovery.findcommonoutgoing(
@@ -2131,11 +2130,9 @@
dest = getstring(l[1], _(b"remote requires a repository path"))
if not dest:
dest = b'default'
- dest, branches = urlutil.get_unique_pull_path(
- b'remote', repo, repo.ui, dest
- )
-
- other = hg.peer(repo, {}, dest)
+ path = urlutil.get_unique_pull_path_obj(b'remote', repo.ui, dest)
+
+ other = hg.peer(repo, {}, path)
n = other.lookup(q)
if n in repo:
r = repo[n].rev()
--- a/mercurial/scmposix.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/scmposix.py Wed Jan 04 16:02:22 2023 +0100
@@ -4,6 +4,11 @@
import os
import sys
+from typing import (
+ List,
+ Tuple,
+)
+
from .pycompat import getattr
from . import (
encoding,
@@ -11,6 +16,9 @@
util,
)
+if pycompat.TYPE_CHECKING:
+ from . import ui as uimod
+
# BSD 'more' escapes ANSI color sequences by default. This can be disabled by
# $MORE variable, but there's no compatible option with Linux 'more'. Given
# OS X is widely used and most modern Unix systems would have 'less', setting
@@ -18,7 +26,7 @@
fallbackpager = b'less'
-def _rcfiles(path):
+def _rcfiles(path: bytes) -> List[bytes]:
rcs = [os.path.join(path, b'hgrc')]
rcdir = os.path.join(path, b'hgrc.d')
try:
@@ -34,7 +42,7 @@
return rcs
-def systemrcpath():
+def systemrcpath() -> List[bytes]:
path = []
if pycompat.sysplatform == b'plan9':
root = b'lib/mercurial'
@@ -49,7 +57,7 @@
return path
-def userrcpath():
+def userrcpath() -> List[bytes]:
if pycompat.sysplatform == b'plan9':
return [encoding.environ[b'home'] + b'/lib/hgrc']
elif pycompat.isdarwin:
@@ -65,7 +73,7 @@
]
-def termsize(ui):
+def termsize(ui: "uimod.ui") -> Tuple[int, int]:
try:
import termios
@@ -88,7 +96,7 @@
except ValueError:
pass
except IOError as e:
- if e[0] == errno.EINVAL: # pytype: disable=unsupported-operands
+ if e.errno == errno.EINVAL:
pass
else:
raise
--- a/mercurial/scmutil.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/scmutil.py Wed Jan 04 16:02:22 2023 +0100
@@ -1858,7 +1858,12 @@
def gddeltaconfig(ui):
- """helper function to know if incoming delta should be optimised"""
+ """helper function to know if incoming deltas should be optimized
+
+ The `format.generaldelta` config is an old form of the config that also
+ implies that incoming delta-bases should be never be trusted. This function
+ exists for this purpose.
+ """
# experimental config: format.generaldelta
return ui.configbool(b'format', b'generaldelta')
--- a/mercurial/scmwindows.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/scmwindows.py Wed Jan 04 16:02:22 2023 +0100
@@ -1,4 +1,10 @@
import os
+import winreg # pytype: disable=import-error
+
+from typing import (
+ List,
+ Tuple,
+)
from . import (
encoding,
@@ -7,19 +13,14 @@
win32,
)
-try:
- import _winreg as winreg # pytype: disable=import-error
-
- winreg.CloseKey
-except ImportError:
- # py2 only
- import winreg # pytype: disable=import-error
+if pycompat.TYPE_CHECKING:
+ from . import ui as uimod
# MS-DOS 'more' is the only pager available by default on Windows.
fallbackpager = b'more'
-def systemrcpath():
+def systemrcpath() -> List[bytes]:
'''return default os-specific hgrc search path'''
rcpath = []
filename = win32.executablepath()
@@ -27,7 +28,7 @@
progrc = os.path.join(os.path.dirname(filename), b'mercurial.ini')
rcpath.append(progrc)
- def _processdir(progrcd):
+ def _processdir(progrcd: bytes) -> None:
if os.path.isdir(progrcd):
for f, kind in sorted(util.listdir(progrcd)):
if f.endswith(b'.rc'):
@@ -68,7 +69,7 @@
return rcpath
-def userrcpath():
+def userrcpath() -> List[bytes]:
'''return os-specific hgrc search path to the user dir'''
home = _legacy_expanduser(b'~')
path = [os.path.join(home, b'mercurial.ini'), os.path.join(home, b'.hgrc')]
@@ -79,7 +80,7 @@
return path
-def _legacy_expanduser(path):
+def _legacy_expanduser(path: bytes) -> bytes:
"""Expand ~ and ~user constructs in the pre 3.8 style"""
# Python 3.8+ changed the expansion of '~' from HOME to USERPROFILE. See
@@ -111,5 +112,5 @@
return userhome + path[i:]
-def termsize(ui):
+def termsize(ui: "uimod.ui") -> Tuple[int, int]:
return win32.termsize()
--- a/mercurial/shelve.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/shelve.py Wed Jan 04 16:02:22 2023 +0100
@@ -247,6 +247,14 @@
for ext in shelvefileextensions:
self.vfs.tryunlink(self.name + b'.' + ext)
+ def changed_files(self, ui, repo):
+ try:
+ ctx = repo.unfiltered()[self.readinfo()[b'node']]
+ return ctx.files()
+ except (FileNotFoundError, error.RepoLookupError):
+ filename = self.vfs.join(self.name + b'.patch')
+ return patch.changedfiles(ui, repo, filename)
+
def _optimized_match(repo, node):
"""
--- a/mercurial/sshpeer.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/sshpeer.py Wed Jan 04 16:02:22 2023 +0100
@@ -372,7 +372,7 @@
class sshv1peer(wireprotov1peer.wirepeer):
def __init__(
- self, ui, url, proc, stdin, stdout, stderr, caps, autoreadstderr=True
+ self, ui, path, proc, stdin, stdout, stderr, caps, autoreadstderr=True
):
"""Create a peer from an existing SSH connection.
@@ -383,8 +383,7 @@
``autoreadstderr`` denotes whether to automatically read from
stderr and to forward its output.
"""
- self._url = url
- self.ui = ui
+ super().__init__(ui, path=path)
# self._subprocess is unused. Keeping a handle on the process
# holds a reference and prevents it from being garbage collected.
self._subprocess = proc
@@ -411,14 +410,11 @@
# Begin of ipeerconnection interface.
def url(self):
- return self._url
+ return self.path.loc
def local(self):
return None
- def peer(self):
- return self
-
def canpush(self):
return True
@@ -610,16 +606,16 @@
)
-def instance(ui, path, create, intents=None, createopts=None):
+def make_peer(ui, path, create, intents=None, createopts=None):
"""Create an SSH peer.
The returned object conforms to the ``wireprotov1peer.wirepeer`` interface.
"""
- u = urlutil.url(path, parsequery=False, parsefragment=False)
+ u = urlutil.url(path.loc, parsequery=False, parsefragment=False)
if u.scheme != b'ssh' or not u.host or u.path is None:
raise error.RepoError(_(b"couldn't parse location %s") % path)
- urlutil.checksafessh(path)
+ urlutil.checksafessh(path.loc)
if u.passwd is not None:
raise error.RepoError(_(b'password in URL not supported'))
--- a/mercurial/statichttprepo.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/statichttprepo.py Wed Jan 04 16:02:22 2023 +0100
@@ -236,8 +236,8 @@
def local(self):
return False
- def peer(self):
- return statichttppeer(self)
+ def peer(self, path=None):
+ return statichttppeer(self, path=path)
def wlock(self, wait=True):
raise error.LockUnavailable(
@@ -259,7 +259,8 @@
pass # statichttprepository are read only
-def instance(ui, path, create, intents=None, createopts=None):
+def make_peer(ui, path, create, intents=None, createopts=None):
if create:
raise error.Abort(_(b'cannot create new static-http repository'))
- return statichttprepository(ui, path[7:])
+ url = path.loc[7:]
+ return statichttprepository(ui, url).peer(path=path)
--- a/mercurial/statprof.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/statprof.py Wed Jan 04 16:02:22 2023 +0100
@@ -1049,7 +1049,7 @@
# process options
try:
opts, args = pycompat.getoptb(
- sys.argv[optstart:],
+ pycompat.sysargv[optstart:],
b"hl:f:o:p:",
[b"help", b"limit=", b"file=", b"output-file=", b"script-path="],
)
--- a/mercurial/templater.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/templater.py Wed Jan 04 16:02:22 2023 +0100
@@ -177,10 +177,17 @@
quote = program[pos : pos + 2]
s = pos = pos + 2
while pos < end: # find closing escaped quote
+ # pycompat.bytestr (and bytes) both have .startswith() that
+ # takes an optional start and an optional end, but pytype thinks
+ # it only takes 2 args.
+
+ # pytype: disable=wrong-arg-count
if program.startswith(b'\\\\\\', pos, end):
pos += 4 # skip over double escaped characters
continue
if program.startswith(quote, pos, end):
+ # pytype: enable=wrong-arg-count
+
# interpret as if it were a part of an outer string
data = parser.unescapestr(program[s:pos])
if token == b'template':
@@ -300,7 +307,14 @@
return
parseres, pos = p.parse(tokenize(tmpl, n + 1, stop, b'}'))
+
+ # pycompat.bytestr (and bytes) both have .startswith() that
+ # takes an optional start and an optional end, but pytype thinks
+ # it only takes 2 args.
+
+ # pytype: disable=wrong-arg-count
if not tmpl.startswith(b'}', pos):
+ # pytype: enable=wrong-arg-count
raise error.ParseError(_(b"invalid token"), pos)
yield (b'template', parseres, n)
pos += 1
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/attr/LICENSE Wed Jan 04 16:02:22 2023 +0100
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2015 Hynek Schlawack and the attrs contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
--- a/mercurial/thirdparty/attr/LICENSE.txt Wed Jan 04 12:06:07 2023 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2015 Hynek Schlawack
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
--- a/mercurial/thirdparty/attr/__init__.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/thirdparty/attr/__init__.py Wed Jan 04 16:02:22 2023 +0100
@@ -1,37 +1,35 @@
-from __future__ import absolute_import, division, print_function
+# SPDX-License-Identifier: MIT
+
+
+import sys
+
+from functools import partial
-from ._funcs import (
- asdict,
- assoc,
- astuple,
- evolve,
- has,
-)
+from . import converters, exceptions, filters, setters, validators
+from ._cmp import cmp_using
+from ._config import get_run_validators, set_run_validators
+from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types
from ._make import (
+ NOTHING,
Attribute,
Factory,
- NOTHING,
- attr,
- attributes,
+ attrib,
+ attrs,
fields,
+ fields_dict,
make_class,
validate,
)
-from ._config import (
- get_run_validators,
- set_run_validators,
-)
-from . import exceptions
-from . import filters
-from . import converters
-from . import validators
+from ._version_info import VersionInfo
-__version__ = "17.2.0"
+__version__ = "22.1.0"
+__version_info__ = VersionInfo._from_version_string(__version__)
__title__ = "attrs"
__description__ = "Classes Without Boilerplate"
-__uri__ = "http://www.attrs.org/"
+__url__ = "https://www.attrs.org/"
+__uri__ = __url__
__doc__ = __description__ + " <" + __uri__ + ">"
__author__ = "Hynek Schlawack"
@@ -41,8 +39,9 @@
__copyright__ = "Copyright (c) 2015 Hynek Schlawack"
-s = attrs = attributes
-ib = attrib = attr
+s = attributes = attrs
+ib = attr = attrib
+dataclass = partial(attrs, auto_attribs=True) # happy Easter ;)
__all__ = [
"Attribute",
@@ -55,17 +54,26 @@
"attrib",
"attributes",
"attrs",
+ "cmp_using",
"converters",
"evolve",
"exceptions",
"fields",
+ "fields_dict",
"filters",
"get_run_validators",
"has",
"ib",
"make_class",
+ "resolve_types",
"s",
"set_run_validators",
+ "setters",
"validate",
"validators",
]
+
+if sys.version_info[:2] >= (3, 6):
+ from ._next_gen import define, field, frozen, mutable # noqa: F401
+
+ __all__.extend(("define", "field", "frozen", "mutable"))
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/attr/__init__.pyi Wed Jan 04 16:02:22 2023 +0100
@@ -0,0 +1,486 @@
+import sys
+
+from typing import (
+ Any,
+ Callable,
+ ClassVar,
+ Dict,
+ Generic,
+ List,
+ Mapping,
+ Optional,
+ Protocol,
+ Sequence,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+ overload,
+)
+
+# `import X as X` is required to make these public
+from . import converters as converters
+from . import exceptions as exceptions
+from . import filters as filters
+from . import setters as setters
+from . import validators as validators
+from ._cmp import cmp_using as cmp_using
+from ._version_info import VersionInfo
+
+__version__: str
+__version_info__: VersionInfo
+__title__: str
+__description__: str
+__url__: str
+__uri__: str
+__author__: str
+__email__: str
+__license__: str
+__copyright__: str
+
+_T = TypeVar("_T")
+_C = TypeVar("_C", bound=type)
+
+_EqOrderType = Union[bool, Callable[[Any], Any]]
+_ValidatorType = Callable[[Any, Attribute[_T], _T], Any]
+_ConverterType = Callable[[Any], Any]
+_FilterType = Callable[[Attribute[_T], _T], bool]
+_ReprType = Callable[[Any], str]
+_ReprArgType = Union[bool, _ReprType]
+_OnSetAttrType = Callable[[Any, Attribute[Any], Any], Any]
+_OnSetAttrArgType = Union[
+ _OnSetAttrType, List[_OnSetAttrType], setters._NoOpType
+]
+_FieldTransformer = Callable[
+ [type, List[Attribute[Any]]], List[Attribute[Any]]
+]
+# FIXME: in reality, if multiple validators are passed they must be in a list
+# or tuple, but those are invariant and so would prevent subtypes of
+# _ValidatorType from working when passed in a list or tuple.
+_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]]
+
+# A protocol to be able to statically accept an attrs class.
+class AttrsInstance(Protocol):
+ __attrs_attrs__: ClassVar[Any]
+
+# _make --
+
+NOTHING: object
+
+# NOTE: Factory lies about its return type to make this possible:
+# `x: List[int] # = Factory(list)`
+# Work around mypy issue #4554 in the common case by using an overload.
+if sys.version_info >= (3, 8):
+ from typing import Literal
+ @overload
+ def Factory(factory: Callable[[], _T]) -> _T: ...
+ @overload
+ def Factory(
+ factory: Callable[[Any], _T],
+ takes_self: Literal[True],
+ ) -> _T: ...
+ @overload
+ def Factory(
+ factory: Callable[[], _T],
+ takes_self: Literal[False],
+ ) -> _T: ...
+
+else:
+ @overload
+ def Factory(factory: Callable[[], _T]) -> _T: ...
+ @overload
+ def Factory(
+ factory: Union[Callable[[Any], _T], Callable[[], _T]],
+ takes_self: bool = ...,
+ ) -> _T: ...
+
+# Static type inference support via __dataclass_transform__ implemented as per:
+# https://github.com/microsoft/pyright/blob/1.1.135/specs/dataclass_transforms.md
+# This annotation must be applied to all overloads of "define" and "attrs"
+#
+# NOTE: This is a typing construct and does not exist at runtime. Extensions
+# wrapping attrs decorators should declare a separate __dataclass_transform__
+# signature in the extension module using the specification linked above to
+# provide pyright support.
+def __dataclass_transform__(
+ *,
+ eq_default: bool = True,
+ order_default: bool = False,
+ kw_only_default: bool = False,
+ field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()),
+) -> Callable[[_T], _T]: ...
+
+class Attribute(Generic[_T]):
+ name: str
+ default: Optional[_T]
+ validator: Optional[_ValidatorType[_T]]
+ repr: _ReprArgType
+ cmp: _EqOrderType
+ eq: _EqOrderType
+ order: _EqOrderType
+ hash: Optional[bool]
+ init: bool
+ converter: Optional[_ConverterType]
+ metadata: Dict[Any, Any]
+ type: Optional[Type[_T]]
+ kw_only: bool
+ on_setattr: _OnSetAttrType
+ def evolve(self, **changes: Any) -> "Attribute[Any]": ...
+
+# NOTE: We had several choices for the annotation to use for type arg:
+# 1) Type[_T]
+# - Pros: Handles simple cases correctly
+# - Cons: Might produce less informative errors in the case of conflicting
+# TypeVars e.g. `attr.ib(default='bad', type=int)`
+# 2) Callable[..., _T]
+# - Pros: Better error messages than #1 for conflicting TypeVars
+# - Cons: Terrible error messages for validator checks.
+# e.g. attr.ib(type=int, validator=validate_str)
+# -> error: Cannot infer function type argument
+# 3) type (and do all of the work in the mypy plugin)
+# - Pros: Simple here, and we could customize the plugin with our own errors.
+# - Cons: Would need to write mypy plugin code to handle all the cases.
+# We chose option #1.
+
+# `attr` lies about its return type to make the following possible:
+# attr() -> Any
+# attr(8) -> int
+# attr(validator=<some callable>) -> Whatever the callable expects.
+# This makes this type of assignments possible:
+# x: int = attr(8)
+#
+# This form catches explicit None or no default but with no other arguments
+# returns Any.
+@overload
+def attrib(
+ default: None = ...,
+ validator: None = ...,
+ repr: _ReprArgType = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ type: None = ...,
+ converter: None = ...,
+ factory: None = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> Any: ...
+
+# This form catches an explicit None or no default and infers the type from the
+# other arguments.
+@overload
+def attrib(
+ default: None = ...,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: _ReprArgType = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ type: Optional[Type[_T]] = ...,
+ converter: Optional[_ConverterType] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> _T: ...
+
+# This form catches an explicit default argument.
+@overload
+def attrib(
+ default: _T,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: _ReprArgType = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ type: Optional[Type[_T]] = ...,
+ converter: Optional[_ConverterType] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> _T: ...
+
+# This form covers type=non-Type: e.g. forward references (str), Any
+@overload
+def attrib(
+ default: Optional[_T] = ...,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: _ReprArgType = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ type: object = ...,
+ converter: Optional[_ConverterType] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> Any: ...
+@overload
+def field(
+ *,
+ default: None = ...,
+ validator: None = ...,
+ repr: _ReprArgType = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ converter: None = ...,
+ factory: None = ...,
+ kw_only: bool = ...,
+ eq: Optional[bool] = ...,
+ order: Optional[bool] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> Any: ...
+
+# This form catches an explicit None or no default and infers the type from the
+# other arguments.
+@overload
+def field(
+ *,
+ default: None = ...,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: _ReprArgType = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ converter: Optional[_ConverterType] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> _T: ...
+
+# This form catches an explicit default argument.
+@overload
+def field(
+ *,
+ default: _T,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: _ReprArgType = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ converter: Optional[_ConverterType] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> _T: ...
+
+# This form covers type=non-Type: e.g. forward references (str), Any
+@overload
+def field(
+ *,
+ default: Optional[_T] = ...,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: _ReprArgType = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ converter: Optional[_ConverterType] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> Any: ...
+@overload
+@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field))
+def attrs(
+ maybe_cls: _C,
+ these: Optional[Dict[str, Any]] = ...,
+ repr_ns: Optional[str] = ...,
+ repr: bool = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ auto_detect: bool = ...,
+ collect_by_mro: bool = ...,
+ getstate_setstate: Optional[bool] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+ field_transformer: Optional[_FieldTransformer] = ...,
+ match_args: bool = ...,
+) -> _C: ...
+@overload
+@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field))
+def attrs(
+ maybe_cls: None = ...,
+ these: Optional[Dict[str, Any]] = ...,
+ repr_ns: Optional[str] = ...,
+ repr: bool = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ auto_detect: bool = ...,
+ collect_by_mro: bool = ...,
+ getstate_setstate: Optional[bool] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+ field_transformer: Optional[_FieldTransformer] = ...,
+ match_args: bool = ...,
+) -> Callable[[_C], _C]: ...
+@overload
+@__dataclass_transform__(field_descriptors=(attrib, field))
+def define(
+ maybe_cls: _C,
+ *,
+ these: Optional[Dict[str, Any]] = ...,
+ repr: bool = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: Optional[bool] = ...,
+ order: Optional[bool] = ...,
+ auto_detect: bool = ...,
+ getstate_setstate: Optional[bool] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+ field_transformer: Optional[_FieldTransformer] = ...,
+ match_args: bool = ...,
+) -> _C: ...
+@overload
+@__dataclass_transform__(field_descriptors=(attrib, field))
+def define(
+ maybe_cls: None = ...,
+ *,
+ these: Optional[Dict[str, Any]] = ...,
+ repr: bool = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: Optional[bool] = ...,
+ order: Optional[bool] = ...,
+ auto_detect: bool = ...,
+ getstate_setstate: Optional[bool] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+ field_transformer: Optional[_FieldTransformer] = ...,
+ match_args: bool = ...,
+) -> Callable[[_C], _C]: ...
+
+mutable = define
+frozen = define # they differ only in their defaults
+
+def fields(cls: Type[AttrsInstance]) -> Any: ...
+def fields_dict(cls: Type[AttrsInstance]) -> Dict[str, Attribute[Any]]: ...
+def validate(inst: AttrsInstance) -> None: ...
+def resolve_types(
+ cls: _C,
+ globalns: Optional[Dict[str, Any]] = ...,
+ localns: Optional[Dict[str, Any]] = ...,
+ attribs: Optional[List[Attribute[Any]]] = ...,
+) -> _C: ...
+
+# TODO: add support for returning a proper attrs class from the mypy plugin
+# we use Any instead of _CountingAttr so that e.g. `make_class('Foo',
+# [attr.ib()])` is valid
+def make_class(
+ name: str,
+ attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]],
+ bases: Tuple[type, ...] = ...,
+ repr_ns: Optional[str] = ...,
+ repr: bool = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ collect_by_mro: bool = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+ field_transformer: Optional[_FieldTransformer] = ...,
+) -> type: ...
+
+# _funcs --
+
+# TODO: add support for returning TypedDict from the mypy plugin
+# FIXME: asdict/astuple do not honor their factory args. Waiting on one of
+# these:
+# https://github.com/python/mypy/issues/4236
+# https://github.com/python/typing/issues/253
+# XXX: remember to fix attrs.asdict/astuple too!
+def asdict(
+ inst: AttrsInstance,
+ recurse: bool = ...,
+ filter: Optional[_FilterType[Any]] = ...,
+ dict_factory: Type[Mapping[Any, Any]] = ...,
+ retain_collection_types: bool = ...,
+ value_serializer: Optional[
+ Callable[[type, Attribute[Any], Any], Any]
+ ] = ...,
+ tuple_keys: Optional[bool] = ...,
+) -> Dict[str, Any]: ...
+
+# TODO: add support for returning NamedTuple from the mypy plugin
+def astuple(
+ inst: AttrsInstance,
+ recurse: bool = ...,
+ filter: Optional[_FilterType[Any]] = ...,
+ tuple_factory: Type[Sequence[Any]] = ...,
+ retain_collection_types: bool = ...,
+) -> Tuple[Any, ...]: ...
+def has(cls: type) -> bool: ...
+def assoc(inst: _T, **changes: Any) -> _T: ...
+def evolve(inst: _T, **changes: Any) -> _T: ...
+
+# _config --
+
+def set_run_validators(run: bool) -> None: ...
+def get_run_validators() -> bool: ...
+
+# aliases --
+
+s = attributes = attrs
+ib = attr = attrib
+dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/attr/_cmp.py Wed Jan 04 16:02:22 2023 +0100
@@ -0,0 +1,155 @@
+# SPDX-License-Identifier: MIT
+
+
+import functools
+import types
+
+from ._make import _make_ne
+
+
+_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="}
+
+
+def cmp_using(
+ eq=None,
+ lt=None,
+ le=None,
+ gt=None,
+ ge=None,
+ require_same_type=True,
+ class_name="Comparable",
+):
+ """
+ Create a class that can be passed into `attr.ib`'s ``eq``, ``order``, and
+ ``cmp`` arguments to customize field comparison.
+
+ The resulting class will have a full set of ordering methods if
+ at least one of ``{lt, le, gt, ge}`` and ``eq`` are provided.
+
+ :param Optional[callable] eq: `callable` used to evaluate equality
+ of two objects.
+ :param Optional[callable] lt: `callable` used to evaluate whether
+ one object is less than another object.
+ :param Optional[callable] le: `callable` used to evaluate whether
+ one object is less than or equal to another object.
+ :param Optional[callable] gt: `callable` used to evaluate whether
+ one object is greater than another object.
+ :param Optional[callable] ge: `callable` used to evaluate whether
+ one object is greater than or equal to another object.
+
+ :param bool require_same_type: When `True`, equality and ordering methods
+ will return `NotImplemented` if objects are not of the same type.
+
+ :param Optional[str] class_name: Name of class. Defaults to 'Comparable'.
+
+ See `comparison` for more details.
+
+ .. versionadded:: 21.1.0
+ """
+
+ body = {
+ "__slots__": ["value"],
+ "__init__": _make_init(),
+ "_requirements": [],
+ "_is_comparable_to": _is_comparable_to,
+ }
+
+ # Add operations.
+ num_order_functions = 0
+ has_eq_function = False
+
+ if eq is not None:
+ has_eq_function = True
+ body["__eq__"] = _make_operator("eq", eq)
+ body["__ne__"] = _make_ne()
+
+ if lt is not None:
+ num_order_functions += 1
+ body["__lt__"] = _make_operator("lt", lt)
+
+ if le is not None:
+ num_order_functions += 1
+ body["__le__"] = _make_operator("le", le)
+
+ if gt is not None:
+ num_order_functions += 1
+ body["__gt__"] = _make_operator("gt", gt)
+
+ if ge is not None:
+ num_order_functions += 1
+ body["__ge__"] = _make_operator("ge", ge)
+
+ type_ = types.new_class(
+ class_name, (object,), {}, lambda ns: ns.update(body)
+ )
+
+ # Add same type requirement.
+ if require_same_type:
+ type_._requirements.append(_check_same_type)
+
+ # Add total ordering if at least one operation was defined.
+ if 0 < num_order_functions < 4:
+ if not has_eq_function:
+ # functools.total_ordering requires __eq__ to be defined,
+ # so raise early error here to keep a nice stack.
+ raise ValueError(
+ "eq must be define is order to complete ordering from "
+ "lt, le, gt, ge."
+ )
+ type_ = functools.total_ordering(type_)
+
+ return type_
+
+
+def _make_init():
+ """
+ Create __init__ method.
+ """
+
+ def __init__(self, value):
+ """
+ Initialize object with *value*.
+ """
+ self.value = value
+
+ return __init__
+
+
+def _make_operator(name, func):
+ """
+ Create operator method.
+ """
+
+ def method(self, other):
+ if not self._is_comparable_to(other):
+ return NotImplemented
+
+ result = func(self.value, other.value)
+ if result is NotImplemented:
+ return NotImplemented
+
+ return result
+
+ method.__name__ = "__%s__" % (name,)
+ method.__doc__ = "Return a %s b. Computed by attrs." % (
+ _operation_names[name],
+ )
+
+ return method
+
+
+def _is_comparable_to(self, other):
+ """
+ Check whether `other` is comparable to `self`.
+ """
+ for func in self._requirements:
+ if not func(self, other):
+ return False
+ return True
+
+
+def _check_same_type(self, other):
+ """
+ Return True if *self* and *other* are of the same type, False otherwise.
+ """
+ return other.value.__class__ is self.value.__class__
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/attr/_cmp.pyi Wed Jan 04 16:02:22 2023 +0100
@@ -0,0 +1,13 @@
+from typing import Any, Callable, Optional, Type
+
+_CompareWithType = Callable[[Any, Any], bool]
+
+def cmp_using(
+ eq: Optional[_CompareWithType],
+ lt: Optional[_CompareWithType],
+ le: Optional[_CompareWithType],
+ gt: Optional[_CompareWithType],
+ ge: Optional[_CompareWithType],
+ require_same_type: bool,
+ class_name: str,
+) -> Type: ...
--- a/mercurial/thirdparty/attr/_compat.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/thirdparty/attr/_compat.py Wed Jan 04 16:02:22 2023 +0100
@@ -1,90 +1,185 @@
-from __future__ import absolute_import, division, print_function
+# SPDX-License-Identifier: MIT
+
+
+import inspect
+import platform
+import sys
+import threading
+import types
+import warnings
+
+from collections.abc import Mapping, Sequence # noqa
+
+
+PYPY = platform.python_implementation() == "PyPy"
+PY36 = sys.version_info[:2] >= (3, 6)
+HAS_F_STRINGS = PY36
+PY310 = sys.version_info[:2] >= (3, 10)
-import sys
-import types
+
+if PYPY or PY36:
+ ordered_dict = dict
+else:
+ from collections import OrderedDict
+
+ ordered_dict = OrderedDict
+
+
+def just_warn(*args, **kw):
+ warnings.warn(
+ "Running interpreter doesn't sufficiently support code object "
+ "introspection. Some features like bare super() or accessing "
+ "__class__ will not work with slotted classes.",
+ RuntimeWarning,
+ stacklevel=2,
+ )
-PY2 = sys.version_info[0] == 2
+class _AnnotationExtractor:
+ """
+ Extract type annotations from a callable, returning None whenever there
+ is none.
+ """
+
+ __slots__ = ["sig"]
+
+ def __init__(self, callable):
+ try:
+ self.sig = inspect.signature(callable)
+ except (ValueError, TypeError): # inspect failed
+ self.sig = None
+
+ def get_first_param_type(self):
+ """
+ Return the type annotation of the first argument if it's not empty.
+ """
+ if not self.sig:
+ return None
+
+ params = list(self.sig.parameters.values())
+ if params and params[0].annotation is not inspect.Parameter.empty:
+ return params[0].annotation
+
+ return None
+
+ def get_return_type(self):
+ """
+ Return the return type if it's not empty.
+ """
+ if (
+ self.sig
+ and self.sig.return_annotation is not inspect.Signature.empty
+ ):
+ return self.sig.return_annotation
+
+ return None
-if PY2:
- from UserDict import IterableUserDict
-
- # We 'bundle' isclass instead of using inspect as importing inspect is
- # fairly expensive (order of 10-15 ms for a modern machine in 2016)
- def isclass(klass):
- return isinstance(klass, (type, types.ClassType))
+def make_set_closure_cell():
+ """Return a function of two arguments (cell, value) which sets
+ the value stored in the closure cell `cell` to `value`.
+ """
+ # pypy makes this easy. (It also supports the logic below, but
+ # why not do the easy/fast thing?)
+ if PYPY:
- # TYPE is used in exceptions, repr(int) is different on Python 2 and 3.
- TYPE = "type"
+ def set_closure_cell(cell, value):
+ cell.__setstate__((value,))
+
+ return set_closure_cell
- def iteritems(d):
- return d.iteritems()
+ # Otherwise gotta do it the hard way.
- def iterkeys(d):
- return d.iterkeys()
+ # Create a function that will set its first cellvar to `value`.
+ def set_first_cellvar_to(value):
+ x = value
+ return
- # Python 2 is bereft of a read-only dict proxy, so we make one!
- class ReadOnlyDict(IterableUserDict):
- """
- Best-effort read-only dict wrapper.
- """
+ # This function will be eliminated as dead code, but
+ # not before its reference to `x` forces `x` to be
+ # represented as a closure cell rather than a local.
+ def force_x_to_be_a_cell(): # pragma: no cover
+ return x
- def __setitem__(self, key, val):
- # We gently pretend we're a Python 3 mappingproxy.
- raise TypeError("'mappingproxy' object does not support item "
- "assignment")
+ try:
+ # Extract the code object and make sure our assumptions about
+ # the closure behavior are correct.
+ co = set_first_cellvar_to.__code__
+ if co.co_cellvars != ("x",) or co.co_freevars != ():
+ raise AssertionError # pragma: no cover
- def update(self, _):
- # We gently pretend we're a Python 3 mappingproxy.
- raise AttributeError("'mappingproxy' object has no attribute "
- "'update'")
+ # Convert this code object to a code object that sets the
+ # function's first _freevar_ (not cellvar) to the argument.
+ if sys.version_info >= (3, 8):
- def __delitem__(self, _):
- # We gently pretend we're a Python 3 mappingproxy.
- raise TypeError("'mappingproxy' object does not support item "
- "deletion")
+ def set_closure_cell(cell, value):
+ cell.cell_contents = value
- def clear(self):
- # We gently pretend we're a Python 3 mappingproxy.
- raise AttributeError("'mappingproxy' object has no attribute "
- "'clear'")
-
- def pop(self, key, default=None):
- # We gently pretend we're a Python 3 mappingproxy.
- raise AttributeError("'mappingproxy' object has no attribute "
- "'pop'")
+ else:
+ args = [co.co_argcount]
+ args.append(co.co_kwonlyargcount)
+ args.extend(
+ [
+ co.co_nlocals,
+ co.co_stacksize,
+ co.co_flags,
+ co.co_code,
+ co.co_consts,
+ co.co_names,
+ co.co_varnames,
+ co.co_filename,
+ co.co_name,
+ co.co_firstlineno,
+ co.co_lnotab,
+ # These two arguments are reversed:
+ co.co_cellvars,
+ co.co_freevars,
+ ]
+ )
+ set_first_freevar_code = types.CodeType(*args)
- def popitem(self):
- # We gently pretend we're a Python 3 mappingproxy.
- raise AttributeError("'mappingproxy' object has no attribute "
- "'popitem'")
-
- def setdefault(self, key, default=None):
- # We gently pretend we're a Python 3 mappingproxy.
- raise AttributeError("'mappingproxy' object has no attribute "
- "'setdefault'")
+ def set_closure_cell(cell, value):
+ # Create a function using the set_first_freevar_code,
+ # whose first closure cell is `cell`. Calling it will
+ # change the value of that cell.
+ setter = types.FunctionType(
+ set_first_freevar_code, {}, "setter", (), (cell,)
+ )
+ # And call it to set the cell.
+ setter(value)
- def __repr__(self):
- # Override to be identical to the Python 3 version.
- return "mappingproxy(" + repr(self.data) + ")"
+ # Make sure it works on this interpreter:
+ def make_func_with_cell():
+ x = None
+
+ def func():
+ return x # pragma: no cover
- def metadata_proxy(d):
- res = ReadOnlyDict()
- res.data.update(d) # We blocked update, so we have to do it like this.
- return res
+ return func
+
+ cell = make_func_with_cell().__closure__[0]
+ set_closure_cell(cell, 100)
+ if cell.cell_contents != 100:
+ raise AssertionError # pragma: no cover
-else:
- def isclass(klass):
- return isinstance(klass, type)
+ except Exception:
+ return just_warn
+ else:
+ return set_closure_cell
- TYPE = "class"
+
+set_closure_cell = make_set_closure_cell()
- def iteritems(d):
- return d.items()
-
- def iterkeys(d):
- return d.keys()
-
- def metadata_proxy(d):
- return types.MappingProxyType(dict(d))
+# Thread-local global to track attrs instances which are already being repr'd.
+# This is needed because there is no other (thread-safe) way to pass info
+# about the instances that are already being repr'd through the call stack
+# in order to ensure we don't perform infinite recursion.
+#
+# For instance, if an instance contains a dict which contains that instance,
+# we need to know that we're already repr'ing the outside instance from within
+# the dict's repr() call.
+#
+# This lives here rather than in _make.py so that the functions in _make.py
+# don't have a direct reference to the thread-local in their globals dict.
+# If they have such a reference, it breaks cloudpickle.
+repr_context = threading.local()
--- a/mercurial/thirdparty/attr/_config.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/thirdparty/attr/_config.py Wed Jan 04 16:02:22 2023 +0100
@@ -1,4 +1,4 @@
-from __future__ import absolute_import, division, print_function
+# SPDX-License-Identifier: MIT
__all__ = ["set_run_validators", "get_run_validators"]
@@ -9,6 +9,10 @@
def set_run_validators(run):
"""
Set whether or not validators are run. By default, they are run.
+
+ .. deprecated:: 21.3.0 It will not be removed, but it also will not be
+ moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()`
+ instead.
"""
if not isinstance(run, bool):
raise TypeError("'run' must be bool.")
@@ -19,5 +23,9 @@
def get_run_validators():
"""
Return whether or not validators are run.
+
+ .. deprecated:: 21.3.0 It will not be removed, but it also will not be
+ moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()`
+ instead.
"""
return _run_validators
--- a/mercurial/thirdparty/attr/_funcs.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/thirdparty/attr/_funcs.py Wed Jan 04 16:02:22 2023 +0100
@@ -1,14 +1,20 @@
-from __future__ import absolute_import, division, print_function
+# SPDX-License-Identifier: MIT
+
import copy
-from ._compat import iteritems
-from ._make import NOTHING, fields, _obj_setattr
+from ._make import NOTHING, _obj_setattr, fields
from .exceptions import AttrsAttributeNotFoundError
-def asdict(inst, recurse=True, filter=None, dict_factory=dict,
- retain_collection_types=False):
+def asdict(
+ inst,
+ recurse=True,
+ filter=None,
+ dict_factory=dict,
+ retain_collection_types=False,
+ value_serializer=None,
+):
"""
Return the ``attrs`` attribute values of *inst* as a dict.
@@ -17,9 +23,9 @@
:param inst: Instance of an ``attrs``-decorated class.
:param bool recurse: Recurse into classes that are also
``attrs``-decorated.
- :param callable filter: A callable whose return code deteremines whether an
+ :param callable filter: A callable whose return code determines whether an
attribute or element is included (``True``) or dropped (``False``). Is
- called with the :class:`attr.Attribute` as the first argument and the
+ called with the `attrs.Attribute` as the first argument and the
value as the second argument.
:param callable dict_factory: A callable to produce dictionaries from. For
example, to produce ordered dictionaries instead of normal Python
@@ -27,6 +33,10 @@
:param bool retain_collection_types: Do not convert to ``list`` when
encountering an attribute whose type is ``tuple`` or ``set``. Only
meaningful if ``recurse`` is ``True``.
+ :param Optional[callable] value_serializer: A hook that is called for every
+ attribute or dict key/value. It receives the current instance, field
+ and value and must return the (updated) value. The hook is run *after*
+ the optional *filter* has been applied.
:rtype: return type of *dict_factory*
@@ -35,6 +45,9 @@
.. versionadded:: 16.0.0 *dict_factory*
.. versionadded:: 16.1.0 *retain_collection_types*
+ .. versionadded:: 20.3.0 *value_serializer*
+ .. versionadded:: 21.3.0 If a dict has a collection for a key, it is
+ serialized as a tuple.
"""
attrs = fields(inst.__class__)
rv = dict_factory()
@@ -42,24 +55,58 @@
v = getattr(inst, a.name)
if filter is not None and not filter(a, v):
continue
+
+ if value_serializer is not None:
+ v = value_serializer(inst, a, v)
+
if recurse is True:
if has(v.__class__):
- rv[a.name] = asdict(v, recurse=True, filter=filter,
- dict_factory=dict_factory)
- elif isinstance(v, (tuple, list, set)):
+ rv[a.name] = asdict(
+ v,
+ recurse=True,
+ filter=filter,
+ dict_factory=dict_factory,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ )
+ elif isinstance(v, (tuple, list, set, frozenset)):
cf = v.__class__ if retain_collection_types is True else list
- rv[a.name] = cf([
- asdict(i, recurse=True, filter=filter,
- dict_factory=dict_factory)
- if has(i.__class__) else i
- for i in v
- ])
+ rv[a.name] = cf(
+ [
+ _asdict_anything(
+ i,
+ is_key=False,
+ filter=filter,
+ dict_factory=dict_factory,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ )
+ for i in v
+ ]
+ )
elif isinstance(v, dict):
df = dict_factory
- rv[a.name] = df((
- asdict(kk, dict_factory=df) if has(kk.__class__) else kk,
- asdict(vv, dict_factory=df) if has(vv.__class__) else vv)
- for kk, vv in iteritems(v))
+ rv[a.name] = df(
+ (
+ _asdict_anything(
+ kk,
+ is_key=True,
+ filter=filter,
+ dict_factory=df,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ ),
+ _asdict_anything(
+ vv,
+ is_key=False,
+ filter=filter,
+ dict_factory=df,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ ),
+ )
+ for kk, vv in v.items()
+ )
else:
rv[a.name] = v
else:
@@ -67,8 +114,86 @@
return rv
-def astuple(inst, recurse=True, filter=None, tuple_factory=tuple,
- retain_collection_types=False):
+def _asdict_anything(
+ val,
+ is_key,
+ filter,
+ dict_factory,
+ retain_collection_types,
+ value_serializer,
+):
+ """
+ ``asdict`` only works on attrs instances, this works on anything.
+ """
+ if getattr(val.__class__, "__attrs_attrs__", None) is not None:
+ # Attrs class.
+ rv = asdict(
+ val,
+ recurse=True,
+ filter=filter,
+ dict_factory=dict_factory,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ )
+ elif isinstance(val, (tuple, list, set, frozenset)):
+ if retain_collection_types is True:
+ cf = val.__class__
+ elif is_key:
+ cf = tuple
+ else:
+ cf = list
+
+ rv = cf(
+ [
+ _asdict_anything(
+ i,
+ is_key=False,
+ filter=filter,
+ dict_factory=dict_factory,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ )
+ for i in val
+ ]
+ )
+ elif isinstance(val, dict):
+ df = dict_factory
+ rv = df(
+ (
+ _asdict_anything(
+ kk,
+ is_key=True,
+ filter=filter,
+ dict_factory=df,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ ),
+ _asdict_anything(
+ vv,
+ is_key=False,
+ filter=filter,
+ dict_factory=df,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ ),
+ )
+ for kk, vv in val.items()
+ )
+ else:
+ rv = val
+ if value_serializer is not None:
+ rv = value_serializer(None, None, rv)
+
+ return rv
+
+
+def astuple(
+ inst,
+ recurse=True,
+ filter=None,
+ tuple_factory=tuple,
+ retain_collection_types=False,
+):
"""
Return the ``attrs`` attribute values of *inst* as a tuple.
@@ -79,7 +204,7 @@
``attrs``-decorated.
:param callable filter: A callable whose return code determines whether an
attribute or element is included (``True``) or dropped (``False``). Is
- called with the :class:`attr.Attribute` as the first argument and the
+ called with the `attrs.Attribute` as the first argument and the
value as the second argument.
:param callable tuple_factory: A callable to produce tuples from. For
example, to produce lists instead of tuples.
@@ -104,38 +229,61 @@
continue
if recurse is True:
if has(v.__class__):
- rv.append(astuple(v, recurse=True, filter=filter,
- tuple_factory=tuple_factory,
- retain_collection_types=retain))
- elif isinstance(v, (tuple, list, set)):
+ rv.append(
+ astuple(
+ v,
+ recurse=True,
+ filter=filter,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ )
+ elif isinstance(v, (tuple, list, set, frozenset)):
cf = v.__class__ if retain is True else list
- rv.append(cf([
- astuple(j, recurse=True, filter=filter,
- tuple_factory=tuple_factory,
- retain_collection_types=retain)
- if has(j.__class__) else j
- for j in v
- ]))
+ rv.append(
+ cf(
+ [
+ astuple(
+ j,
+ recurse=True,
+ filter=filter,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(j.__class__)
+ else j
+ for j in v
+ ]
+ )
+ )
elif isinstance(v, dict):
df = v.__class__ if retain is True else dict
- rv.append(df(
+ rv.append(
+ df(
(
astuple(
kk,
tuple_factory=tuple_factory,
- retain_collection_types=retain
- ) if has(kk.__class__) else kk,
+ retain_collection_types=retain,
+ )
+ if has(kk.__class__)
+ else kk,
astuple(
vv,
tuple_factory=tuple_factory,
- retain_collection_types=retain
- ) if has(vv.__class__) else vv
+ retain_collection_types=retain,
+ )
+ if has(vv.__class__)
+ else vv,
)
- for kk, vv in iteritems(v)))
+ for kk, vv in v.items()
+ )
+ )
else:
rv.append(v)
else:
rv.append(v)
+
return rv if tuple_factory is list else tuple_factory(rv)
@@ -146,7 +294,7 @@
:param type cls: Class to introspect.
:raise TypeError: If *cls* is not a class.
- :rtype: :class:`bool`
+ :rtype: bool
"""
return getattr(cls, "__attrs_attrs__", None) is not None
@@ -166,19 +314,26 @@
class.
.. deprecated:: 17.1.0
- Use :func:`evolve` instead.
+ Use `attrs.evolve` instead if you can.
+ This function will not be removed du to the slightly different approach
+ compared to `attrs.evolve`.
"""
import warnings
- warnings.warn("assoc is deprecated and will be removed after 2018/01.",
- DeprecationWarning)
+
+ warnings.warn(
+ "assoc is deprecated and will be removed after 2018/01.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
new = copy.copy(inst)
attrs = fields(inst.__class__)
- for k, v in iteritems(changes):
+ for k, v in changes.items():
a = getattr(attrs, k, NOTHING)
if a is NOTHING:
raise AttrsAttributeNotFoundError(
- "{k} is not an attrs attribute on {cl}."
- .format(k=k, cl=new.__class__)
+ "{k} is not an attrs attribute on {cl}.".format(
+ k=k, cl=new.__class__
+ )
)
_obj_setattr(new, k, v)
return new
@@ -209,4 +364,57 @@
init_name = attr_name if attr_name[0] != "_" else attr_name[1:]
if init_name not in changes:
changes[init_name] = getattr(inst, attr_name)
+
return cls(**changes)
+
+
+def resolve_types(cls, globalns=None, localns=None, attribs=None):
+ """
+ Resolve any strings and forward annotations in type annotations.
+
+ This is only required if you need concrete types in `Attribute`'s *type*
+ field. In other words, you don't need to resolve your types if you only
+ use them for static type checking.
+
+ With no arguments, names will be looked up in the module in which the class
+ was created. If this is not what you want, e.g. if the name only exists
+ inside a method, you may pass *globalns* or *localns* to specify other
+ dictionaries in which to look up these names. See the docs of
+ `typing.get_type_hints` for more details.
+
+ :param type cls: Class to resolve.
+ :param Optional[dict] globalns: Dictionary containing global variables.
+ :param Optional[dict] localns: Dictionary containing local variables.
+ :param Optional[list] attribs: List of attribs for the given class.
+ This is necessary when calling from inside a ``field_transformer``
+ since *cls* is not an ``attrs`` class yet.
+
+ :raise TypeError: If *cls* is not a class.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class and you didn't pass any attribs.
+ :raise NameError: If types cannot be resolved because of missing variables.
+
+ :returns: *cls* so you can use this function also as a class decorator.
+ Please note that you have to apply it **after** `attrs.define`. That
+ means the decorator has to come in the line **before** `attrs.define`.
+
+ .. versionadded:: 20.1.0
+ .. versionadded:: 21.1.0 *attribs*
+
+ """
+ # Since calling get_type_hints is expensive we cache whether we've
+ # done it already.
+ if getattr(cls, "__attrs_types_resolved__", None) != cls:
+ import typing
+
+ hints = typing.get_type_hints(cls, globalns=globalns, localns=localns)
+ for field in fields(cls) if attribs is None else attribs:
+ if field.name in hints:
+ # Since fields have been frozen we must work around it.
+ _obj_setattr(field, "type", hints[field.name])
+ # We store the class we resolved so that subclasses know they haven't
+ # been resolved.
+ cls.__attrs_types_resolved__ = cls
+
+ # Return the class so you can use it as a decorator too.
+ return cls
--- a/mercurial/thirdparty/attr/_make.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/thirdparty/attr/_make.py Wed Jan 04 16:02:22 2023 +0100
@@ -1,50 +1,79 @@
-from __future__ import absolute_import, division, print_function
-
-import hashlib
+# SPDX-License-Identifier: MIT
+
+import copy
import linecache
+import sys
+import types
+import typing
from operator import itemgetter
-from . import _config
-from ._compat import PY2, iteritems, isclass, iterkeys, metadata_proxy
+# We need to import _compat itself in addition to the _compat members to avoid
+# having the thread-local in the globals here.
+from . import _compat, _config, setters
+from ._compat import (
+ HAS_F_STRINGS,
+ PY310,
+ PYPY,
+ _AnnotationExtractor,
+ ordered_dict,
+ set_closure_cell,
+)
from .exceptions import (
DefaultAlreadySetError,
FrozenInstanceError,
NotAnAttrsClassError,
+ UnannotatedAttributeError,
)
# This is used at least twice, so cache it here.
_obj_setattr = object.__setattr__
-_init_convert_pat = "__attr_convert_{}"
+_init_converter_pat = "__attr_converter_%s"
_init_factory_pat = "__attr_factory_{}"
-_tuple_property_pat = " {attr_name} = property(itemgetter({index}))"
-_empty_metadata_singleton = metadata_proxy({})
-
-
-class _Nothing(object):
+_tuple_property_pat = (
+ " {attr_name} = _attrs_property(_attrs_itemgetter({index}))"
+)
+_classvar_prefixes = (
+ "typing.ClassVar",
+ "t.ClassVar",
+ "ClassVar",
+ "typing_extensions.ClassVar",
+)
+# we don't use a double-underscore prefix because that triggers
+# name mangling when trying to create a slot for the field
+# (when slots=True)
+_hash_cache_field = "_attrs_cached_hash"
+
+_empty_metadata_singleton = types.MappingProxyType({})
+
+# Unique object for unequivocal getattr() defaults.
+_sentinel = object()
+
+_ng_default_on_setattr = setters.pipe(setters.convert, setters.validate)
+
+
+class _Nothing:
"""
Sentinel class to indicate the lack of a value when ``None`` is ambiguous.
- All instances of `_Nothing` are equal.
+ ``_Nothing`` is a singleton. There is only ever one of it.
+
+ .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False.
"""
- def __copy__(self):
- return self
-
- def __deepcopy__(self, _):
- return self
-
- def __eq__(self, other):
- return other.__class__ == _Nothing
-
- def __ne__(self, other):
- return not self == other
+
+ _singleton = None
+
+ def __new__(cls):
+ if _Nothing._singleton is None:
+ _Nothing._singleton = super().__new__(cls)
+ return _Nothing._singleton
def __repr__(self):
return "NOTHING"
- def __hash__(self):
- return 0xdeadbeef
+ def __bool__(self):
+ return False
NOTHING = _Nothing()
@@ -53,92 +82,255 @@
"""
-def attr(default=NOTHING, validator=None,
- repr=True, cmp=True, hash=None, init=True,
- convert=None, metadata={}):
- r"""
+class _CacheHashWrapper(int):
+ """
+ An integer subclass that pickles / copies as None
+
+ This is used for non-slots classes with ``cache_hash=True``, to avoid
+ serializing a potentially (even likely) invalid hash value. Since ``None``
+ is the default value for uncalculated hashes, whenever this is copied,
+ the copy's value for the hash should automatically reset.
+
+ See GH #613 for more details.
+ """
+
+ def __reduce__(self, _none_constructor=type(None), _args=()):
+ return _none_constructor, _args
+
+
+def attrib(
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ hash=None,
+ init=True,
+ metadata=None,
+ type=None,
+ converter=None,
+ factory=None,
+ kw_only=False,
+ eq=None,
+ order=None,
+ on_setattr=None,
+):
+ """
Create a new attribute on a class.
.. warning::
Does *not* do anything unless the class is also decorated with
- :func:`attr.s`!
+ `attr.s`!
:param default: A value that is used if an ``attrs``-generated ``__init__``
is used and no value is passed while instantiating or the attribute is
excluded using ``init=False``.
- If the value is an instance of :class:`Factory`, its callable will be
- used to construct a new value (useful for mutable datatypes like lists
+ If the value is an instance of `attrs.Factory`, its callable will be
+ used to construct a new value (useful for mutable data types like lists
or dicts).
- If a default is not set (or set manually to ``attr.NOTHING``), a value
- *must* be supplied when instantiating; otherwise a :exc:`TypeError`
+ If a default is not set (or set manually to `attrs.NOTHING`), a value
+ *must* be supplied when instantiating; otherwise a `TypeError`
will be raised.
The default can also be set using decorator notation as shown below.
- :type default: Any value.
-
- :param validator: :func:`callable` that is called by ``attrs``-generated
+ :type default: Any value
+
+ :param callable factory: Syntactic sugar for
+ ``default=attr.Factory(factory)``.
+
+ :param validator: `callable` that is called by ``attrs``-generated
``__init__`` methods after the instance has been initialized. They
- receive the initialized instance, the :class:`Attribute`, and the
+ receive the initialized instance, the :func:`~attrs.Attribute`, and the
passed value.
The return value is *not* inspected so the validator has to throw an
exception itself.
- If a ``list`` is passed, its items are treated as validators and must
+ If a `list` is passed, its items are treated as validators and must
all pass.
Validators can be globally disabled and re-enabled using
- :func:`get_run_validators`.
+ `get_run_validators`.
The validator can also be set using decorator notation as shown below.
- :type validator: ``callable`` or a ``list`` of ``callable``\ s.
-
- :param bool repr: Include this attribute in the generated ``__repr__``
- method.
- :param bool cmp: Include this attribute in the generated comparison methods
- (``__eq__`` et al).
- :param hash: Include this attribute in the generated ``__hash__``
- method. If ``None`` (default), mirror *cmp*'s value. This is the
- correct behavior according the Python spec. Setting this value to
- anything else than ``None`` is *discouraged*.
- :type hash: ``bool`` or ``None``
+ :type validator: `callable` or a `list` of `callable`\\ s.
+
+ :param repr: Include this attribute in the generated ``__repr__``
+ method. If ``True``, include the attribute; if ``False``, omit it. By
+ default, the built-in ``repr()`` function is used. To override how the
+ attribute value is formatted, pass a ``callable`` that takes a single
+ value and returns a string. Note that the resulting string is used
+ as-is, i.e. it will be used directly *instead* of calling ``repr()``
+ (the default).
+ :type repr: a `bool` or a `callable` to use a custom function.
+
+ :param eq: If ``True`` (default), include this attribute in the
+ generated ``__eq__`` and ``__ne__`` methods that check two instances
+ for equality. To override how the attribute value is compared,
+ pass a ``callable`` that takes a single value and returns the value
+ to be compared.
+ :type eq: a `bool` or a `callable`.
+
+ :param order: If ``True`` (default), include this attributes in the
+ generated ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods.
+ To override how the attribute value is ordered,
+ pass a ``callable`` that takes a single value and returns the value
+ to be ordered.
+ :type order: a `bool` or a `callable`.
+
+ :param cmp: Setting *cmp* is equivalent to setting *eq* and *order* to the
+ same value. Must not be mixed with *eq* or *order*.
+ :type cmp: a `bool` or a `callable`.
+
+ :param Optional[bool] hash: Include this attribute in the generated
+ ``__hash__`` method. If ``None`` (default), mirror *eq*'s value. This
+ is the correct behavior according the Python spec. Setting this value
+ to anything else than ``None`` is *discouraged*.
:param bool init: Include this attribute in the generated ``__init__``
method. It is possible to set this to ``False`` and set a default
value. In that case this attributed is unconditionally initialized
with the specified default value or factory.
- :param callable convert: :func:`callable` that is called by
+ :param callable converter: `callable` that is called by
``attrs``-generated ``__init__`` methods to convert attribute's value
to the desired format. It is given the passed-in value, and the
returned value will be used as the new value of the attribute. The
value is converted before being passed to the validator, if any.
:param metadata: An arbitrary mapping, to be used by third-party
- components. See :ref:`extending_metadata`.
-
- .. versionchanged:: 17.1.0 *validator* can be a ``list`` now.
- .. versionchanged:: 17.1.0
- *hash* is ``None`` and therefore mirrors *cmp* by default .
+ components. See `extending_metadata`.
+ :param type: The type of the attribute. In Python 3.6 or greater, the
+ preferred method to specify the type is using a variable annotation
+ (see :pep:`526`).
+ This argument is provided for backward compatibility.
+ Regardless of the approach used, the type will be stored on
+ ``Attribute.type``.
+
+ Please note that ``attrs`` doesn't do anything with this metadata by
+ itself. You can use it as part of your own code or for
+ `static type checking <types>`.
+ :param kw_only: Make this attribute keyword-only (Python 3+)
+ in the generated ``__init__`` (if ``init`` is ``False``, this
+ parameter is ignored).
+ :param on_setattr: Allows to overwrite the *on_setattr* setting from
+ `attr.s`. If left `None`, the *on_setattr* value from `attr.s` is used.
+ Set to `attrs.setters.NO_OP` to run **no** `setattr` hooks for this
+ attribute -- regardless of the setting in `attr.s`.
+ :type on_setattr: `callable`, or a list of callables, or `None`, or
+ `attrs.setters.NO_OP`
+
+ .. versionadded:: 15.2.0 *convert*
+ .. versionadded:: 16.3.0 *metadata*
+ .. versionchanged:: 17.1.0 *validator* can be a ``list`` now.
+ .. versionchanged:: 17.1.0
+ *hash* is ``None`` and therefore mirrors *eq* by default.
+ .. versionadded:: 17.3.0 *type*
+ .. deprecated:: 17.4.0 *convert*
+ .. versionadded:: 17.4.0 *converter* as a replacement for the deprecated
+ *convert* to achieve consistency with other noun-based arguments.
+ .. versionadded:: 18.1.0
+ ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``.
+ .. versionadded:: 18.2.0 *kw_only*
+ .. versionchanged:: 19.2.0 *convert* keyword argument removed.
+ .. versionchanged:: 19.2.0 *repr* also accepts a custom callable.
+ .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
+ .. versionadded:: 19.2.0 *eq* and *order*
+ .. versionadded:: 20.1.0 *on_setattr*
+ .. versionchanged:: 20.3.0 *kw_only* backported to Python 2
+ .. versionchanged:: 21.1.0
+ *eq*, *order*, and *cmp* also accept a custom callable
+ .. versionchanged:: 21.1.0 *cmp* undeprecated
"""
+ eq, eq_key, order, order_key = _determine_attrib_eq_order(
+ cmp, eq, order, True
+ )
+
if hash is not None and hash is not True and hash is not False:
raise TypeError(
"Invalid value for hash. Must be True, False, or None."
)
+
+ if factory is not None:
+ if default is not NOTHING:
+ raise ValueError(
+ "The `default` and `factory` arguments are mutually "
+ "exclusive."
+ )
+ if not callable(factory):
+ raise ValueError("The `factory` argument must be a callable.")
+ default = Factory(factory)
+
+ if metadata is None:
+ metadata = {}
+
+ # Apply syntactic sugar by auto-wrapping.
+ if isinstance(on_setattr, (list, tuple)):
+ on_setattr = setters.pipe(*on_setattr)
+
+ if validator and isinstance(validator, (list, tuple)):
+ validator = and_(*validator)
+
+ if converter and isinstance(converter, (list, tuple)):
+ converter = pipe(*converter)
+
return _CountingAttr(
default=default,
validator=validator,
repr=repr,
- cmp=cmp,
+ cmp=None,
hash=hash,
init=init,
- convert=convert,
+ converter=converter,
metadata=metadata,
+ type=type,
+ kw_only=kw_only,
+ eq=eq,
+ eq_key=eq_key,
+ order=order,
+ order_key=order_key,
+ on_setattr=on_setattr,
)
+def _compile_and_eval(script, globs, locs=None, filename=""):
+ """
+ "Exec" the script with the given global (globs) and local (locs) variables.
+ """
+ bytecode = compile(script, filename, "exec")
+ eval(bytecode, globs, locs)
+
+
+def _make_method(name, script, filename, globs):
+ """
+ Create the method with the script given and return the method object.
+ """
+ locs = {}
+
+ # In order of debuggers like PDB being able to step through the code,
+ # we add a fake linecache entry.
+ count = 1
+ base_filename = filename
+ while True:
+ linecache_tuple = (
+ len(script),
+ None,
+ script.splitlines(True),
+ filename,
+ )
+ old_val = linecache.cache.setdefault(filename, linecache_tuple)
+ if old_val == linecache_tuple:
+ break
+ else:
+ filename = "{}-{}>".format(base_filename[:-1], count)
+ count += 1
+
+ _compile_and_eval(script, globs, locs, filename)
+
+ return locs[name]
+
+
def _make_attr_tuple_class(cls_name, attr_names):
"""
Create a tuple subclass to hold `Attribute`s for an `attrs` class.
@@ -156,75 +348,273 @@
]
if attr_names:
for i, attr_name in enumerate(attr_names):
- attr_class_template.append(_tuple_property_pat.format(
- index=i,
- attr_name=attr_name,
- ))
+ attr_class_template.append(
+ _tuple_property_pat.format(index=i, attr_name=attr_name)
+ )
else:
attr_class_template.append(" pass")
- globs = {"itemgetter": itemgetter}
- eval(compile("\n".join(attr_class_template), "", "exec"), globs)
+ globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property}
+ _compile_and_eval("\n".join(attr_class_template), globs)
return globs[attr_class_name]
-def _transform_attrs(cls, these):
+# Tuple class for extracted attributes from a class definition.
+# `base_attrs` is a subset of `attrs`.
+_Attributes = _make_attr_tuple_class(
+ "_Attributes",
+ [
+ # all attributes to build dunder methods for
+ "attrs",
+ # attributes that have been inherited
+ "base_attrs",
+ # map inherited attributes to their originating classes
+ "base_attrs_map",
+ ],
+)
+
+
+def _is_class_var(annot):
+ """
+ Check whether *annot* is a typing.ClassVar.
+
+ The string comparison hack is used to avoid evaluating all string
+ annotations which would put attrs-based classes at a performance
+ disadvantage compared to plain old classes.
+ """
+ annot = str(annot)
+
+ # Annotation can be quoted.
+ if annot.startswith(("'", '"')) and annot.endswith(("'", '"')):
+ annot = annot[1:-1]
+
+ return annot.startswith(_classvar_prefixes)
+
+
+def _has_own_attribute(cls, attrib_name):
+ """
+ Check whether *cls* defines *attrib_name* (and doesn't just inherit it).
+
+ Requires Python 3.
+ """
+ attr = getattr(cls, attrib_name, _sentinel)
+ if attr is _sentinel:
+ return False
+
+ for base_cls in cls.__mro__[1:]:
+ a = getattr(base_cls, attrib_name, None)
+ if attr is a:
+ return False
+
+ return True
+
+
+def _get_annotations(cls):
+ """
+ Get annotations for *cls*.
+ """
+ if _has_own_attribute(cls, "__annotations__"):
+ return cls.__annotations__
+
+ return {}
+
+
+def _counter_getter(e):
+ """
+ Key function for sorting to avoid re-creating a lambda for every class.
"""
- Transforms all `_CountingAttr`s on a class into `Attribute`s and saves the
- list in `__attrs_attrs__`.
+ return e[1].counter
+
+
+def _collect_base_attrs(cls, taken_attr_names):
+ """
+ Collect attr.ibs from base classes of *cls*, except *taken_attr_names*.
+ """
+ base_attrs = []
+ base_attr_map = {} # A dictionary of base attrs to their classes.
+
+ # Traverse the MRO and collect attributes.
+ for base_cls in reversed(cls.__mro__[1:-1]):
+ for a in getattr(base_cls, "__attrs_attrs__", []):
+ if a.inherited or a.name in taken_attr_names:
+ continue
+
+ a = a.evolve(inherited=True)
+ base_attrs.append(a)
+ base_attr_map[a.name] = base_cls
+
+ # For each name, only keep the freshest definition i.e. the furthest at the
+ # back. base_attr_map is fine because it gets overwritten with every new
+ # instance.
+ filtered = []
+ seen = set()
+ for a in reversed(base_attrs):
+ if a.name in seen:
+ continue
+ filtered.insert(0, a)
+ seen.add(a.name)
+
+ return filtered, base_attr_map
+
+
+def _collect_base_attrs_broken(cls, taken_attr_names):
+ """
+ Collect attr.ibs from base classes of *cls*, except *taken_attr_names*.
+
+ N.B. *taken_attr_names* will be mutated.
+
+ Adhere to the old incorrect behavior.
+
+ Notably it collects from the front and considers inherited attributes which
+ leads to the buggy behavior reported in #428.
+ """
+ base_attrs = []
+ base_attr_map = {} # A dictionary of base attrs to their classes.
+
+ # Traverse the MRO and collect attributes.
+ for base_cls in cls.__mro__[1:-1]:
+ for a in getattr(base_cls, "__attrs_attrs__", []):
+ if a.name in taken_attr_names:
+ continue
+
+ a = a.evolve(inherited=True)
+ taken_attr_names.add(a.name)
+ base_attrs.append(a)
+ base_attr_map[a.name] = base_cls
+
+ return base_attrs, base_attr_map
+
+
+def _transform_attrs(
+ cls, these, auto_attribs, kw_only, collect_by_mro, field_transformer
+):
+ """
+ Transform all `_CountingAttr`s on a class into `Attribute`s.
If *these* is passed, use that and don't look for them on the class.
+
+ *collect_by_mro* is True, collect them in the correct MRO order, otherwise
+ use the old -- incorrect -- order. See #428.
+
+ Return an `_Attributes`.
"""
- super_cls = []
- for c in reversed(cls.__mro__[1:-1]):
- sub_attrs = getattr(c, "__attrs_attrs__", None)
- if sub_attrs is not None:
- super_cls.extend(a for a in sub_attrs if a not in super_cls)
- if these is None:
- ca_list = [(name, attr)
- for name, attr
- in cls.__dict__.items()
- if isinstance(attr, _CountingAttr)]
+ cd = cls.__dict__
+ anns = _get_annotations(cls)
+
+ if these is not None:
+ ca_list = [(name, ca) for name, ca in these.items()]
+
+ if not isinstance(these, ordered_dict):
+ ca_list.sort(key=_counter_getter)
+ elif auto_attribs is True:
+ ca_names = {
+ name
+ for name, attr in cd.items()
+ if isinstance(attr, _CountingAttr)
+ }
+ ca_list = []
+ annot_names = set()
+ for attr_name, type in anns.items():
+ if _is_class_var(type):
+ continue
+ annot_names.add(attr_name)
+ a = cd.get(attr_name, NOTHING)
+
+ if not isinstance(a, _CountingAttr):
+ if a is NOTHING:
+ a = attrib()
+ else:
+ a = attrib(default=a)
+ ca_list.append((attr_name, a))
+
+ unannotated = ca_names - annot_names
+ if len(unannotated) > 0:
+ raise UnannotatedAttributeError(
+ "The following `attr.ib`s lack a type annotation: "
+ + ", ".join(
+ sorted(unannotated, key=lambda n: cd.get(n).counter)
+ )
+ + "."
+ )
else:
- ca_list = [(name, ca)
- for name, ca
- in iteritems(these)]
-
- non_super_attrs = [
- Attribute.from_counting_attr(name=attr_name, ca=ca)
- for attr_name, ca
- in sorted(ca_list, key=lambda e: e[1].counter)
+ ca_list = sorted(
+ (
+ (name, attr)
+ for name, attr in cd.items()
+ if isinstance(attr, _CountingAttr)
+ ),
+ key=lambda e: e[1].counter,
+ )
+
+ own_attrs = [
+ Attribute.from_counting_attr(
+ name=attr_name, ca=ca, type=anns.get(attr_name)
+ )
+ for attr_name, ca in ca_list
]
- attr_names = [a.name for a in super_cls + non_super_attrs]
-
- AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names)
-
- cls.__attrs_attrs__ = AttrsClass(super_cls + [
- Attribute.from_counting_attr(name=attr_name, ca=ca)
- for attr_name, ca
- in sorted(ca_list, key=lambda e: e[1].counter)
- ])
-
+
+ if collect_by_mro:
+ base_attrs, base_attr_map = _collect_base_attrs(
+ cls, {a.name for a in own_attrs}
+ )
+ else:
+ base_attrs, base_attr_map = _collect_base_attrs_broken(
+ cls, {a.name for a in own_attrs}
+ )
+
+ if kw_only:
+ own_attrs = [a.evolve(kw_only=True) for a in own_attrs]
+ base_attrs = [a.evolve(kw_only=True) for a in base_attrs]
+
+ attrs = base_attrs + own_attrs
+
+ # Mandatory vs non-mandatory attr order only matters when they are part of
+ # the __init__ signature and when they aren't kw_only (which are moved to
+ # the end and can be mandatory or non-mandatory in any order, as they will
+ # be specified as keyword args anyway). Check the order of those attrs:
had_default = False
- for a in cls.__attrs_attrs__:
- if these is None and a not in super_cls:
- setattr(cls, a.name, a)
- if had_default is True and a.default is NOTHING and a.init is True:
+ for a in (a for a in attrs if a.init is not False and a.kw_only is False):
+ if had_default is True and a.default is NOTHING:
raise ValueError(
"No mandatory attributes allowed after an attribute with a "
- "default value or factory. Attribute in question: {a!r}"
- .format(a=a)
+ "default value or factory. Attribute in question: %r" % (a,)
)
- elif had_default is False and \
- a.default is not NOTHING and \
- a.init is not False:
+
+ if had_default is False and a.default is not NOTHING:
had_default = True
-
-def _frozen_setattrs(self, name, value):
- """
- Attached to frozen classes as __setattr__.
- """
- raise FrozenInstanceError()
+ if field_transformer is not None:
+ attrs = field_transformer(cls, attrs)
+
+ # Create AttrsClass *after* applying the field_transformer since it may
+ # add or remove attributes!
+ attr_names = [a.name for a in attrs]
+ AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names)
+
+ return _Attributes((AttrsClass(attrs), base_attrs, base_attr_map))
+
+
+if PYPY:
+
+ def _frozen_setattrs(self, name, value):
+ """
+ Attached to frozen classes as __setattr__.
+ """
+ if isinstance(self, BaseException) and name in (
+ "__cause__",
+ "__context__",
+ ):
+ BaseException.__setattr__(self, name, value)
+ return
+
+ raise FrozenInstanceError()
+
+else:
+
+ def _frozen_setattrs(self, name, value):
+ """
+ Attached to frozen classes as __setattr__.
+ """
+ raise FrozenInstanceError()
def _frozen_delattrs(self, name):
@@ -234,44 +624,661 @@
raise FrozenInstanceError()
-def attributes(maybe_cls=None, these=None, repr_ns=None,
- repr=True, cmp=True, hash=None, init=True,
- slots=False, frozen=False, str=False):
+class _ClassBuilder:
+ """
+ Iteratively build *one* class.
+ """
+
+ __slots__ = (
+ "_attr_names",
+ "_attrs",
+ "_base_attr_map",
+ "_base_names",
+ "_cache_hash",
+ "_cls",
+ "_cls_dict",
+ "_delete_attribs",
+ "_frozen",
+ "_has_pre_init",
+ "_has_post_init",
+ "_is_exc",
+ "_on_setattr",
+ "_slots",
+ "_weakref_slot",
+ "_wrote_own_setattr",
+ "_has_custom_setattr",
+ )
+
+ def __init__(
+ self,
+ cls,
+ these,
+ slots,
+ frozen,
+ weakref_slot,
+ getstate_setstate,
+ auto_attribs,
+ kw_only,
+ cache_hash,
+ is_exc,
+ collect_by_mro,
+ on_setattr,
+ has_custom_setattr,
+ field_transformer,
+ ):
+ attrs, base_attrs, base_map = _transform_attrs(
+ cls,
+ these,
+ auto_attribs,
+ kw_only,
+ collect_by_mro,
+ field_transformer,
+ )
+
+ self._cls = cls
+ self._cls_dict = dict(cls.__dict__) if slots else {}
+ self._attrs = attrs
+ self._base_names = {a.name for a in base_attrs}
+ self._base_attr_map = base_map
+ self._attr_names = tuple(a.name for a in attrs)
+ self._slots = slots
+ self._frozen = frozen
+ self._weakref_slot = weakref_slot
+ self._cache_hash = cache_hash
+ self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False))
+ self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False))
+ self._delete_attribs = not bool(these)
+ self._is_exc = is_exc
+ self._on_setattr = on_setattr
+
+ self._has_custom_setattr = has_custom_setattr
+ self._wrote_own_setattr = False
+
+ self._cls_dict["__attrs_attrs__"] = self._attrs
+
+ if frozen:
+ self._cls_dict["__setattr__"] = _frozen_setattrs
+ self._cls_dict["__delattr__"] = _frozen_delattrs
+
+ self._wrote_own_setattr = True
+ elif on_setattr in (
+ _ng_default_on_setattr,
+ setters.validate,
+ setters.convert,
+ ):
+ has_validator = has_converter = False
+ for a in attrs:
+ if a.validator is not None:
+ has_validator = True
+ if a.converter is not None:
+ has_converter = True
+
+ if has_validator and has_converter:
+ break
+ if (
+ (
+ on_setattr == _ng_default_on_setattr
+ and not (has_validator or has_converter)
+ )
+ or (on_setattr == setters.validate and not has_validator)
+ or (on_setattr == setters.convert and not has_converter)
+ ):
+ # If class-level on_setattr is set to convert + validate, but
+ # there's no field to convert or validate, pretend like there's
+ # no on_setattr.
+ self._on_setattr = None
+
+ if getstate_setstate:
+ (
+ self._cls_dict["__getstate__"],
+ self._cls_dict["__setstate__"],
+ ) = self._make_getstate_setstate()
+
+ def __repr__(self):
+ return "<_ClassBuilder(cls={cls})>".format(cls=self._cls.__name__)
+
+ def build_class(self):
+ """
+ Finalize class based on the accumulated configuration.
+
+ Builder cannot be used after calling this method.
+ """
+ if self._slots is True:
+ return self._create_slots_class()
+ else:
+ return self._patch_original_class()
+
+ def _patch_original_class(self):
+ """
+ Apply accumulated methods and return the class.
+ """
+ cls = self._cls
+ base_names = self._base_names
+
+ # Clean class of attribute definitions (`attr.ib()`s).
+ if self._delete_attribs:
+ for name in self._attr_names:
+ if (
+ name not in base_names
+ and getattr(cls, name, _sentinel) is not _sentinel
+ ):
+ try:
+ delattr(cls, name)
+ except AttributeError:
+ # This can happen if a base class defines a class
+ # variable and we want to set an attribute with the
+ # same name by using only a type annotation.
+ pass
+
+ # Attach our dunder methods.
+ for name, value in self._cls_dict.items():
+ setattr(cls, name, value)
+
+ # If we've inherited an attrs __setattr__ and don't write our own,
+ # reset it to object's.
+ if not self._wrote_own_setattr and getattr(
+ cls, "__attrs_own_setattr__", False
+ ):
+ cls.__attrs_own_setattr__ = False
+
+ if not self._has_custom_setattr:
+ cls.__setattr__ = _obj_setattr
+
+ return cls
+
+ def _create_slots_class(self):
+ """
+ Build and return a new class with a `__slots__` attribute.
+ """
+ cd = {
+ k: v
+ for k, v in self._cls_dict.items()
+ if k not in tuple(self._attr_names) + ("__dict__", "__weakref__")
+ }
+
+ # If our class doesn't have its own implementation of __setattr__
+ # (either from the user or by us), check the bases, if one of them has
+ # an attrs-made __setattr__, that needs to be reset. We don't walk the
+ # MRO because we only care about our immediate base classes.
+ # XXX: This can be confused by subclassing a slotted attrs class with
+ # XXX: a non-attrs class and subclass the resulting class with an attrs
+ # XXX: class. See `test_slotted_confused` for details. For now that's
+ # XXX: OK with us.
+ if not self._wrote_own_setattr:
+ cd["__attrs_own_setattr__"] = False
+
+ if not self._has_custom_setattr:
+ for base_cls in self._cls.__bases__:
+ if base_cls.__dict__.get("__attrs_own_setattr__", False):
+ cd["__setattr__"] = _obj_setattr
+ break
+
+ # Traverse the MRO to collect existing slots
+ # and check for an existing __weakref__.
+ existing_slots = dict()
+ weakref_inherited = False
+ for base_cls in self._cls.__mro__[1:-1]:
+ if base_cls.__dict__.get("__weakref__", None) is not None:
+ weakref_inherited = True
+ existing_slots.update(
+ {
+ name: getattr(base_cls, name)
+ for name in getattr(base_cls, "__slots__", [])
+ }
+ )
+
+ base_names = set(self._base_names)
+
+ names = self._attr_names
+ if (
+ self._weakref_slot
+ and "__weakref__" not in getattr(self._cls, "__slots__", ())
+ and "__weakref__" not in names
+ and not weakref_inherited
+ ):
+ names += ("__weakref__",)
+
+ # We only add the names of attributes that aren't inherited.
+ # Setting __slots__ to inherited attributes wastes memory.
+ slot_names = [name for name in names if name not in base_names]
+ # There are slots for attributes from current class
+ # that are defined in parent classes.
+ # As their descriptors may be overridden by a child class,
+ # we collect them here and update the class dict
+ reused_slots = {
+ slot: slot_descriptor
+ for slot, slot_descriptor in existing_slots.items()
+ if slot in slot_names
+ }
+ slot_names = [name for name in slot_names if name not in reused_slots]
+ cd.update(reused_slots)
+ if self._cache_hash:
+ slot_names.append(_hash_cache_field)
+ cd["__slots__"] = tuple(slot_names)
+
+ cd["__qualname__"] = self._cls.__qualname__
+
+ # Create new class based on old class and our methods.
+ cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd)
+
+ # The following is a fix for
+ # <https://github.com/python-attrs/attrs/issues/102>. On Python 3,
+ # if a method mentions `__class__` or uses the no-arg super(), the
+ # compiler will bake a reference to the class in the method itself
+ # as `method.__closure__`. Since we replace the class with a
+ # clone, we rewrite these references so it keeps working.
+ for item in cls.__dict__.values():
+ if isinstance(item, (classmethod, staticmethod)):
+ # Class- and staticmethods hide their functions inside.
+ # These might need to be rewritten as well.
+ closure_cells = getattr(item.__func__, "__closure__", None)
+ elif isinstance(item, property):
+ # Workaround for property `super()` shortcut (PY3-only).
+ # There is no universal way for other descriptors.
+ closure_cells = getattr(item.fget, "__closure__", None)
+ else:
+ closure_cells = getattr(item, "__closure__", None)
+
+ if not closure_cells: # Catch None or the empty list.
+ continue
+ for cell in closure_cells:
+ try:
+ match = cell.cell_contents is self._cls
+ except ValueError: # ValueError: Cell is empty
+ pass
+ else:
+ if match:
+ set_closure_cell(cell, cls)
+
+ return cls
+
+ def add_repr(self, ns):
+ self._cls_dict["__repr__"] = self._add_method_dunders(
+ _make_repr(self._attrs, ns, self._cls)
+ )
+ return self
+
+ def add_str(self):
+ repr = self._cls_dict.get("__repr__")
+ if repr is None:
+ raise ValueError(
+ "__str__ can only be generated if a __repr__ exists."
+ )
+
+ def __str__(self):
+ return self.__repr__()
+
+ self._cls_dict["__str__"] = self._add_method_dunders(__str__)
+ return self
+
+ def _make_getstate_setstate(self):
+ """
+ Create custom __setstate__ and __getstate__ methods.
+ """
+ # __weakref__ is not writable.
+ state_attr_names = tuple(
+ an for an in self._attr_names if an != "__weakref__"
+ )
+
+ def slots_getstate(self):
+ """
+ Automatically created by attrs.
+ """
+ return tuple(getattr(self, name) for name in state_attr_names)
+
+ hash_caching_enabled = self._cache_hash
+
+ def slots_setstate(self, state):
+ """
+ Automatically created by attrs.
+ """
+ __bound_setattr = _obj_setattr.__get__(self, Attribute)
+ for name, value in zip(state_attr_names, state):
+ __bound_setattr(name, value)
+
+ # The hash code cache is not included when the object is
+ # serialized, but it still needs to be initialized to None to
+ # indicate that the first call to __hash__ should be a cache
+ # miss.
+ if hash_caching_enabled:
+ __bound_setattr(_hash_cache_field, None)
+
+ return slots_getstate, slots_setstate
+
+ def make_unhashable(self):
+ self._cls_dict["__hash__"] = None
+ return self
+
+ def add_hash(self):
+ self._cls_dict["__hash__"] = self._add_method_dunders(
+ _make_hash(
+ self._cls,
+ self._attrs,
+ frozen=self._frozen,
+ cache_hash=self._cache_hash,
+ )
+ )
+
+ return self
+
+ def add_init(self):
+ self._cls_dict["__init__"] = self._add_method_dunders(
+ _make_init(
+ self._cls,
+ self._attrs,
+ self._has_pre_init,
+ self._has_post_init,
+ self._frozen,
+ self._slots,
+ self._cache_hash,
+ self._base_attr_map,
+ self._is_exc,
+ self._on_setattr,
+ attrs_init=False,
+ )
+ )
+
+ return self
+
+ def add_match_args(self):
+ self._cls_dict["__match_args__"] = tuple(
+ field.name
+ for field in self._attrs
+ if field.init and not field.kw_only
+ )
+
+ def add_attrs_init(self):
+ self._cls_dict["__attrs_init__"] = self._add_method_dunders(
+ _make_init(
+ self._cls,
+ self._attrs,
+ self._has_pre_init,
+ self._has_post_init,
+ self._frozen,
+ self._slots,
+ self._cache_hash,
+ self._base_attr_map,
+ self._is_exc,
+ self._on_setattr,
+ attrs_init=True,
+ )
+ )
+
+ return self
+
+ def add_eq(self):
+ cd = self._cls_dict
+
+ cd["__eq__"] = self._add_method_dunders(
+ _make_eq(self._cls, self._attrs)
+ )
+ cd["__ne__"] = self._add_method_dunders(_make_ne())
+
+ return self
+
+ def add_order(self):
+ cd = self._cls_dict
+
+ cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = (
+ self._add_method_dunders(meth)
+ for meth in _make_order(self._cls, self._attrs)
+ )
+
+ return self
+
+ def add_setattr(self):
+ if self._frozen:
+ return self
+
+ sa_attrs = {}
+ for a in self._attrs:
+ on_setattr = a.on_setattr or self._on_setattr
+ if on_setattr and on_setattr is not setters.NO_OP:
+ sa_attrs[a.name] = a, on_setattr
+
+ if not sa_attrs:
+ return self
+
+ if self._has_custom_setattr:
+ # We need to write a __setattr__ but there already is one!
+ raise ValueError(
+ "Can't combine custom __setattr__ with on_setattr hooks."
+ )
+
+ # docstring comes from _add_method_dunders
+ def __setattr__(self, name, val):
+ try:
+ a, hook = sa_attrs[name]
+ except KeyError:
+ nval = val
+ else:
+ nval = hook(self, a, val)
+
+ _obj_setattr(self, name, nval)
+
+ self._cls_dict["__attrs_own_setattr__"] = True
+ self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__)
+ self._wrote_own_setattr = True
+
+ return self
+
+ def _add_method_dunders(self, method):
+ """
+ Add __module__ and __qualname__ to a *method* if possible.
+ """
+ try:
+ method.__module__ = self._cls.__module__
+ except AttributeError:
+ pass
+
+ try:
+ method.__qualname__ = ".".join(
+ (self._cls.__qualname__, method.__name__)
+ )
+ except AttributeError:
+ pass
+
+ try:
+ method.__doc__ = "Method generated by attrs for class %s." % (
+ self._cls.__qualname__,
+ )
+ except AttributeError:
+ pass
+
+ return method
+
+
+def _determine_attrs_eq_order(cmp, eq, order, default_eq):
+ """
+ Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
+ values of eq and order. If *eq* is None, set it to *default_eq*.
+ """
+ if cmp is not None and any((eq is not None, order is not None)):
+ raise ValueError("Don't mix `cmp` with `eq' and `order`.")
+
+ # cmp takes precedence due to bw-compatibility.
+ if cmp is not None:
+ return cmp, cmp
+
+ # If left None, equality is set to the specified default and ordering
+ # mirrors equality.
+ if eq is None:
+ eq = default_eq
+
+ if order is None:
+ order = eq
+
+ if eq is False and order is True:
+ raise ValueError("`order` can only be True if `eq` is True too.")
+
+ return eq, order
+
+
+def _determine_attrib_eq_order(cmp, eq, order, default_eq):
+ """
+ Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
+ values of eq and order. If *eq* is None, set it to *default_eq*.
+ """
+ if cmp is not None and any((eq is not None, order is not None)):
+ raise ValueError("Don't mix `cmp` with `eq' and `order`.")
+
+ def decide_callable_or_boolean(value):
+ """
+ Decide whether a key function is used.
+ """
+ if callable(value):
+ value, key = True, value
+ else:
+ key = None
+ return value, key
+
+ # cmp takes precedence due to bw-compatibility.
+ if cmp is not None:
+ cmp, cmp_key = decide_callable_or_boolean(cmp)
+ return cmp, cmp_key, cmp, cmp_key
+
+ # If left None, equality is set to the specified default and ordering
+ # mirrors equality.
+ if eq is None:
+ eq, eq_key = default_eq, None
+ else:
+ eq, eq_key = decide_callable_or_boolean(eq)
+
+ if order is None:
+ order, order_key = eq, eq_key
+ else:
+ order, order_key = decide_callable_or_boolean(order)
+
+ if eq is False and order is True:
+ raise ValueError("`order` can only be True if `eq` is True too.")
+
+ return eq, eq_key, order, order_key
+
+
+def _determine_whether_to_implement(
+ cls, flag, auto_detect, dunders, default=True
+):
+ """
+ Check whether we should implement a set of methods for *cls*.
+
+ *flag* is the argument passed into @attr.s like 'init', *auto_detect* the
+ same as passed into @attr.s and *dunders* is a tuple of attribute names
+ whose presence signal that the user has implemented it themselves.
+
+ Return *default* if no reason for either for or against is found.
+ """
+ if flag is True or flag is False:
+ return flag
+
+ if flag is None and auto_detect is False:
+ return default
+
+ # Logically, flag is None and auto_detect is True here.
+ for dunder in dunders:
+ if _has_own_attribute(cls, dunder):
+ return False
+
+ return default
+
+
+def attrs(
+ maybe_cls=None,
+ these=None,
+ repr_ns=None,
+ repr=None,
+ cmp=None,
+ hash=None,
+ init=None,
+ slots=False,
+ frozen=False,
+ weakref_slot=True,
+ str=False,
+ auto_attribs=False,
+ kw_only=False,
+ cache_hash=False,
+ auto_exc=False,
+ eq=None,
+ order=None,
+ auto_detect=False,
+ collect_by_mro=False,
+ getstate_setstate=None,
+ on_setattr=None,
+ field_transformer=None,
+ match_args=True,
+):
r"""
A class decorator that adds `dunder
<https://wiki.python.org/moin/DunderAlias>`_\ -methods according to the
- specified attributes using :func:`attr.ib` or the *these* argument.
-
- :param these: A dictionary of name to :func:`attr.ib` mappings. This is
+ specified attributes using `attr.ib` or the *these* argument.
+
+ :param these: A dictionary of name to `attr.ib` mappings. This is
useful to avoid the definition of your attributes within the class body
because you can't (e.g. if you want to add ``__repr__`` methods to
Django models) or don't want to.
If *these* is not ``None``, ``attrs`` will *not* search the class body
- for attributes.
-
- :type these: :class:`dict` of :class:`str` to :func:`attr.ib`
+ for attributes and will *not* remove any attributes from it.
+
+ If *these* is an ordered dict (`dict` on Python 3.6+,
+ `collections.OrderedDict` otherwise), the order is deduced from
+ the order of the attributes inside *these*. Otherwise the order
+ of the definition of the attributes is used.
+
+ :type these: `dict` of `str` to `attr.ib`
:param str repr_ns: When using nested classes, there's no way in Python 2
to automatically detect that. Therefore it's possible to set the
namespace explicitly for a more meaningful ``repr`` output.
+ :param bool auto_detect: Instead of setting the *init*, *repr*, *eq*,
+ *order*, and *hash* arguments explicitly, assume they are set to
+ ``True`` **unless any** of the involved methods for one of the
+ arguments is implemented in the *current* class (i.e. it is *not*
+ inherited from some base class).
+
+ So for example by implementing ``__eq__`` on a class yourself,
+ ``attrs`` will deduce ``eq=False`` and will create *neither*
+ ``__eq__`` *nor* ``__ne__`` (but Python classes come with a sensible
+ ``__ne__`` by default, so it *should* be enough to only implement
+ ``__eq__`` in most cases).
+
+ .. warning::
+
+ If you prevent ``attrs`` from creating the ordering methods for you
+ (``order=False``, e.g. by implementing ``__le__``), it becomes
+ *your* responsibility to make sure its ordering is sound. The best
+ way is to use the `functools.total_ordering` decorator.
+
+
+ Passing ``True`` or ``False`` to *init*, *repr*, *eq*, *order*,
+ *cmp*, or *hash* overrides whatever *auto_detect* would determine.
+
+ *auto_detect* requires Python 3. Setting it ``True`` on Python 2 raises
+ an `attrs.exceptions.PythonTooOldError`.
+
:param bool repr: Create a ``__repr__`` method with a human readable
- represantation of ``attrs`` attributes..
+ representation of ``attrs`` attributes..
:param bool str: Create a ``__str__`` method that is identical to
``__repr__``. This is usually not necessary except for
- :class:`Exception`\ s.
- :param bool cmp: Create ``__eq__``, ``__ne__``, ``__lt__``, ``__le__``,
- ``__gt__``, and ``__ge__`` methods that compare the class as if it were
- a tuple of its ``attrs`` attributes. But the attributes are *only*
- compared, if the type of both classes is *identical*!
- :param hash: If ``None`` (default), the ``__hash__`` method is generated
- according how *cmp* and *frozen* are set.
+ `Exception`\ s.
+ :param Optional[bool] eq: If ``True`` or ``None`` (default), add ``__eq__``
+ and ``__ne__`` methods that check two instances for equality.
+
+ They compare the instances as if they were tuples of their ``attrs``
+ attributes if and only if the types of both classes are *identical*!
+ :param Optional[bool] order: If ``True``, add ``__lt__``, ``__le__``,
+ ``__gt__``, and ``__ge__`` methods that behave like *eq* above and
+ allow instances to be ordered. If ``None`` (default) mirror value of
+ *eq*.
+ :param Optional[bool] cmp: Setting *cmp* is equivalent to setting *eq*
+ and *order* to the same value. Must not be mixed with *eq* or *order*.
+ :param Optional[bool] hash: If ``None`` (default), the ``__hash__`` method
+ is generated according how *eq* and *frozen* are set.
1. If *both* are True, ``attrs`` will generate a ``__hash__`` for you.
- 2. If *cmp* is True and *frozen* is False, ``__hash__`` will be set to
+ 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set to
None, marking it unhashable (which it is).
- 3. If *cmp* is False, ``__hash__`` will be left untouched meaning the
- ``__hash__`` method of the superclass will be used (if superclass is
+ 3. If *eq* is False, ``__hash__`` will be left untouched meaning the
+ ``__hash__`` method of the base class will be used (if base class is
``object``, this means it will fall back to id-based hashing.).
Although not recommended, you can decide for yourself and force
@@ -279,29 +1286,37 @@
didn't freeze it programmatically) by passing ``True`` or not. Both of
these cases are rather special and should be used carefully.
- See the `Python documentation \
- <https://docs.python.org/3/reference/datamodel.html#object.__hash__>`_
- and the `GitHub issue that led to the default behavior \
- <https://github.com/python-attrs/attrs/issues/136>`_ for more details.
- :type hash: ``bool`` or ``None``
- :param bool init: Create a ``__init__`` method that initialiazes the
- ``attrs`` attributes. Leading underscores are stripped for the
- argument name. If a ``__attrs_post_init__`` method exists on the
- class, it will be called after the class is fully initialized.
- :param bool slots: Create a slots_-style class that's more
- memory-efficient. See :ref:`slots` for further ramifications.
+ See our documentation on `hashing`, Python's documentation on
+ `object.__hash__`, and the `GitHub issue that led to the default \
+ behavior <https://github.com/python-attrs/attrs/issues/136>`_ for more
+ details.
+ :param bool init: Create a ``__init__`` method that initializes the
+ ``attrs`` attributes. Leading underscores are stripped for the argument
+ name. If a ``__attrs_pre_init__`` method exists on the class, it will
+ be called before the class is initialized. If a ``__attrs_post_init__``
+ method exists on the class, it will be called after the class is fully
+ initialized.
+
+ If ``init`` is ``False``, an ``__attrs_init__`` method will be
+ injected instead. This allows you to define a custom ``__init__``
+ method that can do pre-init work such as ``super().__init__()``,
+ and then call ``__attrs_init__()`` and ``__attrs_post_init__()``.
+ :param bool slots: Create a `slotted class <slotted classes>` that's more
+ memory-efficient. Slotted classes are generally superior to the default
+ dict classes, but have some gotchas you should know about, so we
+ encourage you to read the `glossary entry <slotted classes>`.
:param bool frozen: Make instances immutable after initialization. If
someone attempts to modify a frozen instance,
- :exc:`attr.exceptions.FrozenInstanceError` is raised.
-
- Please note:
+ `attr.exceptions.FrozenInstanceError` is raised.
+
+ .. note::
1. This is achieved by installing a custom ``__setattr__`` method
- on your class so you can't implement an own one.
+ on your class, so you can't implement your own.
2. True immutability is impossible in Python.
- 3. This *does* have a minor a runtime performance :ref:`impact
+ 3. This *does* have a minor a runtime performance `impact
<how-frozen>` when initializing new instances. In other words:
``__init__`` is slightly slower with ``frozen=True``.
@@ -310,316 +1325,651 @@
circumvent that limitation by using
``object.__setattr__(self, "attribute_name", value)``.
- .. _slots: https://docs.python.org/3.5/reference/datamodel.html#slots
-
- .. versionadded:: 16.0.0 *slots*
- .. versionadded:: 16.1.0 *frozen*
- .. versionadded:: 16.3.0 *str*, and support for ``__attrs_post_init__``.
- .. versionchanged::
- 17.1.0 *hash* supports ``None`` as value which is also the default
- now.
+ 5. Subclasses of a frozen class are frozen too.
+
+ :param bool weakref_slot: Make instances weak-referenceable. This has no
+ effect unless ``slots`` is also enabled.
+ :param bool auto_attribs: If ``True``, collect :pep:`526`-annotated
+ attributes (Python 3.6 and later only) from the class body.
+
+ In this case, you **must** annotate every field. If ``attrs``
+ encounters a field that is set to an `attr.ib` but lacks a type
+ annotation, an `attr.exceptions.UnannotatedAttributeError` is
+ raised. Use ``field_name: typing.Any = attr.ib(...)`` if you don't
+ want to set a type.
+
+ If you assign a value to those attributes (e.g. ``x: int = 42``), that
+ value becomes the default value like if it were passed using
+ ``attr.ib(default=42)``. Passing an instance of `attrs.Factory` also
+ works as expected in most cases (see warning below).
+
+ Attributes annotated as `typing.ClassVar`, and attributes that are
+ neither annotated nor set to an `attr.ib` are **ignored**.
+
+ .. warning::
+ For features that use the attribute name to create decorators (e.g.
+ `validators <validators>`), you still *must* assign `attr.ib` to
+ them. Otherwise Python will either not find the name or try to use
+ the default value to call e.g. ``validator`` on it.
+
+ These errors can be quite confusing and probably the most common bug
+ report on our bug tracker.
+
+ :param bool kw_only: Make all attributes keyword-only (Python 3+)
+ in the generated ``__init__`` (if ``init`` is ``False``, this
+ parameter is ignored).
+ :param bool cache_hash: Ensure that the object's hash code is computed
+ only once and stored on the object. If this is set to ``True``,
+ hashing must be either explicitly or implicitly enabled for this
+ class. If the hash code is cached, avoid any reassignments of
+ fields involved in hash code computation or mutations of the objects
+ those fields point to after object creation. If such changes occur,
+ the behavior of the object's hash code is undefined.
+ :param bool auto_exc: If the class subclasses `BaseException`
+ (which implicitly includes any subclass of any exception), the
+ following happens to behave like a well-behaved Python exceptions
+ class:
+
+ - the values for *eq*, *order*, and *hash* are ignored and the
+ instances compare and hash by the instance's ids (N.B. ``attrs`` will
+ *not* remove existing implementations of ``__hash__`` or the equality
+ methods. It just won't add own ones.),
+ - all attributes that are either passed into ``__init__`` or have a
+ default value are additionally available as a tuple in the ``args``
+ attribute,
+ - the value of *str* is ignored leaving ``__str__`` to base classes.
+ :param bool collect_by_mro: Setting this to `True` fixes the way ``attrs``
+ collects attributes from base classes. The default behavior is
+ incorrect in certain cases of multiple inheritance. It should be on by
+ default but is kept off for backward-compatibility.
+
+ See issue `#428 <https://github.com/python-attrs/attrs/issues/428>`_ for
+ more details.
+
+ :param Optional[bool] getstate_setstate:
+ .. note::
+ This is usually only interesting for slotted classes and you should
+ probably just set *auto_detect* to `True`.
+
+ If `True`, ``__getstate__`` and
+ ``__setstate__`` are generated and attached to the class. This is
+ necessary for slotted classes to be pickleable. If left `None`, it's
+ `True` by default for slotted classes and ``False`` for dict classes.
+
+ If *auto_detect* is `True`, and *getstate_setstate* is left `None`,
+ and **either** ``__getstate__`` or ``__setstate__`` is detected directly
+ on the class (i.e. not inherited), it is set to `False` (this is usually
+ what you want).
+
+ :param on_setattr: A callable that is run whenever the user attempts to set
+ an attribute (either by assignment like ``i.x = 42`` or by using
+ `setattr` like ``setattr(i, "x", 42)``). It receives the same arguments
+ as validators: the instance, the attribute that is being modified, and
+ the new value.
+
+ If no exception is raised, the attribute is set to the return value of
+ the callable.
+
+ If a list of callables is passed, they're automatically wrapped in an
+ `attrs.setters.pipe`.
+ :type on_setattr: `callable`, or a list of callables, or `None`, or
+ `attrs.setters.NO_OP`
+
+ :param Optional[callable] field_transformer:
+ A function that is called with the original class object and all
+ fields right before ``attrs`` finalizes the class. You can use
+ this, e.g., to automatically add converters or validators to
+ fields based on their types. See `transform-fields` for more details.
+
+ :param bool match_args:
+ If `True` (default), set ``__match_args__`` on the class to support
+ :pep:`634` (Structural Pattern Matching). It is a tuple of all
+ non-keyword-only ``__init__`` parameter names on Python 3.10 and later.
+ Ignored on older Python versions.
+
+ .. versionadded:: 16.0.0 *slots*
+ .. versionadded:: 16.1.0 *frozen*
+ .. versionadded:: 16.3.0 *str*
+ .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``.
+ .. versionchanged:: 17.1.0
+ *hash* supports ``None`` as value which is also the default now.
+ .. versionadded:: 17.3.0 *auto_attribs*
+ .. versionchanged:: 18.1.0
+ If *these* is passed, no attributes are deleted from the class body.
+ .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained.
+ .. versionadded:: 18.2.0 *weakref_slot*
+ .. deprecated:: 18.2.0
+ ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a
+ `DeprecationWarning` if the classes compared are subclasses of
+ each other. ``__eq`` and ``__ne__`` never tried to compared subclasses
+ to each other.
+ .. versionchanged:: 19.2.0
+ ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider
+ subclasses comparable anymore.
+ .. versionadded:: 18.2.0 *kw_only*
+ .. versionadded:: 18.2.0 *cache_hash*
+ .. versionadded:: 19.1.0 *auto_exc*
+ .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
+ .. versionadded:: 19.2.0 *eq* and *order*
+ .. versionadded:: 20.1.0 *auto_detect*
+ .. versionadded:: 20.1.0 *collect_by_mro*
+ .. versionadded:: 20.1.0 *getstate_setstate*
+ .. versionadded:: 20.1.0 *on_setattr*
+ .. versionadded:: 20.3.0 *field_transformer*
+ .. versionchanged:: 21.1.0
+ ``init=False`` injects ``__attrs_init__``
+ .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__``
+ .. versionchanged:: 21.1.0 *cmp* undeprecated
+ .. versionadded:: 21.3.0 *match_args*
"""
+ eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None)
+ hash_ = hash # work around the lack of nonlocal
+
+ if isinstance(on_setattr, (list, tuple)):
+ on_setattr = setters.pipe(*on_setattr)
+
def wrap(cls):
- if getattr(cls, "__class__", None) is None:
- raise TypeError("attrs only works with new-style classes.")
-
- if repr is False and str is True:
- raise ValueError(
- "__str__ can only be generated if a __repr__ exists."
- )
-
- if slots:
- # Only need this later if we're using slots.
- if these is None:
- ca_list = [name
- for name, attr
- in cls.__dict__.items()
- if isinstance(attr, _CountingAttr)]
- else:
- ca_list = list(iterkeys(these))
- _transform_attrs(cls, these)
-
- # Can't just re-use frozen name because Python's scoping. :(
- # Can't compare function objects because Python 2 is terrible. :(
- effectively_frozen = _has_frozen_superclass(cls) or frozen
- if repr is True:
- cls = _add_repr(cls, ns=repr_ns)
+ is_frozen = frozen or _has_frozen_base_class(cls)
+ is_exc = auto_exc is True and issubclass(cls, BaseException)
+ has_own_setattr = auto_detect and _has_own_attribute(
+ cls, "__setattr__"
+ )
+
+ if has_own_setattr and is_frozen:
+ raise ValueError("Can't freeze a class with a custom __setattr__.")
+
+ builder = _ClassBuilder(
+ cls,
+ these,
+ slots,
+ is_frozen,
+ weakref_slot,
+ _determine_whether_to_implement(
+ cls,
+ getstate_setstate,
+ auto_detect,
+ ("__getstate__", "__setstate__"),
+ default=slots,
+ ),
+ auto_attribs,
+ kw_only,
+ cache_hash,
+ is_exc,
+ collect_by_mro,
+ on_setattr,
+ has_own_setattr,
+ field_transformer,
+ )
+ if _determine_whether_to_implement(
+ cls, repr, auto_detect, ("__repr__",)
+ ):
+ builder.add_repr(repr_ns)
if str is True:
- cls.__str__ = cls.__repr__
- if cmp is True:
- cls = _add_cmp(cls)
-
+ builder.add_str()
+
+ eq = _determine_whether_to_implement(
+ cls, eq_, auto_detect, ("__eq__", "__ne__")
+ )
+ if not is_exc and eq is True:
+ builder.add_eq()
+ if not is_exc and _determine_whether_to_implement(
+ cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__")
+ ):
+ builder.add_order()
+
+ builder.add_setattr()
+
+ if (
+ hash_ is None
+ and auto_detect is True
+ and _has_own_attribute(cls, "__hash__")
+ ):
+ hash = False
+ else:
+ hash = hash_
if hash is not True and hash is not False and hash is not None:
+ # Can't use `hash in` because 1 == True for example.
raise TypeError(
"Invalid value for hash. Must be True, False, or None."
)
- elif hash is False or (hash is None and cmp is False):
- pass
- elif hash is True or (hash is None and cmp is True and frozen is True):
- cls = _add_hash(cls)
+ elif hash is False or (hash is None and eq is False) or is_exc:
+ # Don't do anything. Should fall back to __object__'s __hash__
+ # which is by id.
+ if cache_hash:
+ raise TypeError(
+ "Invalid value for cache_hash. To use hash caching,"
+ " hashing must be either explicitly or implicitly "
+ "enabled."
+ )
+ elif hash is True or (
+ hash is None and eq is True and is_frozen is True
+ ):
+ # Build a __hash__ if told so, or if it's safe.
+ builder.add_hash()
else:
- cls.__hash__ = None
-
- if init is True:
- cls = _add_init(cls, effectively_frozen)
- if effectively_frozen is True:
- cls.__setattr__ = _frozen_setattrs
- cls.__delattr__ = _frozen_delattrs
- if slots is True:
- # slots and frozen require __getstate__/__setstate__ to work
- cls = _add_pickle(cls)
- if slots is True:
- cls_dict = dict(cls.__dict__)
- cls_dict["__slots__"] = tuple(ca_list)
- for ca_name in ca_list:
- # It might not actually be in there, e.g. if using 'these'.
- cls_dict.pop(ca_name, None)
- cls_dict.pop("__dict__", None)
-
- qualname = getattr(cls, "__qualname__", None)
- cls = type(cls)(cls.__name__, cls.__bases__, cls_dict)
- if qualname is not None:
- cls.__qualname__ = qualname
-
- return cls
-
- # attrs_or class type depends on the usage of the decorator. It's a class
- # if it's used as `@attributes` but ``None`` if used # as `@attributes()`.
+ # Raise TypeError on attempts to hash.
+ if cache_hash:
+ raise TypeError(
+ "Invalid value for cache_hash. To use hash caching,"
+ " hashing must be either explicitly or implicitly "
+ "enabled."
+ )
+ builder.make_unhashable()
+
+ if _determine_whether_to_implement(
+ cls, init, auto_detect, ("__init__",)
+ ):
+ builder.add_init()
+ else:
+ builder.add_attrs_init()
+ if cache_hash:
+ raise TypeError(
+ "Invalid value for cache_hash. To use hash caching,"
+ " init must be True."
+ )
+
+ if (
+ PY310
+ and match_args
+ and not _has_own_attribute(cls, "__match_args__")
+ ):
+ builder.add_match_args()
+
+ return builder.build_class()
+
+ # maybe_cls's type depends on the usage of the decorator. It's a class
+ # if it's used as `@attrs` but ``None`` if used as `@attrs()`.
if maybe_cls is None:
return wrap
else:
return wrap(maybe_cls)
-if PY2:
- def _has_frozen_superclass(cls):
- """
- Check whether *cls* has a frozen ancestor by looking at its
- __setattr__.
- """
- return (
- getattr(
- cls.__setattr__, "__module__", None
- ) == _frozen_setattrs.__module__ and
- cls.__setattr__.__name__ == _frozen_setattrs.__name__
+_attrs = attrs
+"""
+Internal alias so we can use it in functions that take an argument called
+*attrs*.
+"""
+
+
+def _has_frozen_base_class(cls):
+ """
+ Check whether *cls* has a frozen ancestor by looking at its
+ __setattr__.
+ """
+ return cls.__setattr__ is _frozen_setattrs
+
+
+def _generate_unique_filename(cls, func_name):
+ """
+ Create a "filename" suitable for a function being generated.
+ """
+ unique_filename = "<attrs generated {} {}.{}>".format(
+ func_name,
+ cls.__module__,
+ getattr(cls, "__qualname__", cls.__name__),
+ )
+ return unique_filename
+
+
+def _make_hash(cls, attrs, frozen, cache_hash):
+ attrs = tuple(
+ a for a in attrs if a.hash is True or (a.hash is None and a.eq is True)
+ )
+
+ tab = " "
+
+ unique_filename = _generate_unique_filename(cls, "hash")
+ type_hash = hash(unique_filename)
+ # If eq is custom generated, we need to include the functions in globs
+ globs = {}
+
+ hash_def = "def __hash__(self"
+ hash_func = "hash(("
+ closing_braces = "))"
+ if not cache_hash:
+ hash_def += "):"
+ else:
+ hash_def += ", *"
+
+ hash_def += (
+ ", _cache_wrapper="
+ + "__import__('attr._make')._make._CacheHashWrapper):"
)
-else:
- def _has_frozen_superclass(cls):
+ hash_func = "_cache_wrapper(" + hash_func
+ closing_braces += ")"
+
+ method_lines = [hash_def]
+
+ def append_hash_computation_lines(prefix, indent):
"""
- Check whether *cls* has a frozen ancestor by looking at its
- __setattr__.
+ Generate the code for actually computing the hash code.
+ Below this will either be returned directly or used to compute
+ a value which is then cached, depending on the value of cache_hash
"""
- return cls.__setattr__ == _frozen_setattrs
-
-
-def _attrs_to_tuple(obj, attrs):
- """
- Create a tuple of all values of *obj*'s *attrs*.
- """
- return tuple(getattr(obj, a.name) for a in attrs)
-
-
-def _add_hash(cls, attrs=None):
+
+ method_lines.extend(
+ [
+ indent + prefix + hash_func,
+ indent + " %d," % (type_hash,),
+ ]
+ )
+
+ for a in attrs:
+ if a.eq_key:
+ cmp_name = "_%s_key" % (a.name,)
+ globs[cmp_name] = a.eq_key
+ method_lines.append(
+ indent + " %s(self.%s)," % (cmp_name, a.name)
+ )
+ else:
+ method_lines.append(indent + " self.%s," % a.name)
+
+ method_lines.append(indent + " " + closing_braces)
+
+ if cache_hash:
+ method_lines.append(tab + "if self.%s is None:" % _hash_cache_field)
+ if frozen:
+ append_hash_computation_lines(
+ "object.__setattr__(self, '%s', " % _hash_cache_field, tab * 2
+ )
+ method_lines.append(tab * 2 + ")") # close __setattr__
+ else:
+ append_hash_computation_lines(
+ "self.%s = " % _hash_cache_field, tab * 2
+ )
+ method_lines.append(tab + "return self.%s" % _hash_cache_field)
+ else:
+ append_hash_computation_lines("return ", tab)
+
+ script = "\n".join(method_lines)
+ return _make_method("__hash__", script, unique_filename, globs)
+
+
+def _add_hash(cls, attrs):
"""
Add a hash method to *cls*.
"""
- if attrs is None:
- attrs = [a
- for a in cls.__attrs_attrs__
- if a.hash is True or (a.hash is None and a.cmp is True)]
-
- def hash_(self):
+ cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False)
+ return cls
+
+
+def _make_ne():
+ """
+ Create __ne__ method.
+ """
+
+ def __ne__(self, other):
"""
- Automatically created by attrs.
+ Check equality and either forward a NotImplemented or
+ return the result negated.
"""
- return hash(_attrs_to_tuple(self, attrs))
-
- cls.__hash__ = hash_
- return cls
-
-
-def _add_cmp(cls, attrs=None):
+ result = self.__eq__(other)
+ if result is NotImplemented:
+ return NotImplemented
+
+ return not result
+
+ return __ne__
+
+
+def _make_eq(cls, attrs):
+ """
+ Create __eq__ method for *cls* with *attrs*.
"""
- Add comparison methods to *cls*.
+ attrs = [a for a in attrs if a.eq]
+
+ unique_filename = _generate_unique_filename(cls, "eq")
+ lines = [
+ "def __eq__(self, other):",
+ " if other.__class__ is not self.__class__:",
+ " return NotImplemented",
+ ]
+
+ # We can't just do a big self.x = other.x and... clause due to
+ # irregularities like nan == nan is false but (nan,) == (nan,) is true.
+ globs = {}
+ if attrs:
+ lines.append(" return (")
+ others = [" ) == ("]
+ for a in attrs:
+ if a.eq_key:
+ cmp_name = "_%s_key" % (a.name,)
+ # Add the key function to the global namespace
+ # of the evaluated function.
+ globs[cmp_name] = a.eq_key
+ lines.append(
+ " %s(self.%s),"
+ % (
+ cmp_name,
+ a.name,
+ )
+ )
+ others.append(
+ " %s(other.%s),"
+ % (
+ cmp_name,
+ a.name,
+ )
+ )
+ else:
+ lines.append(" self.%s," % (a.name,))
+ others.append(" other.%s," % (a.name,))
+
+ lines += others + [" )"]
+ else:
+ lines.append(" return True")
+
+ script = "\n".join(lines)
+
+ return _make_method("__eq__", script, unique_filename, globs)
+
+
+def _make_order(cls, attrs):
"""
- if attrs is None:
- attrs = [a for a in cls.__attrs_attrs__ if a.cmp]
+ Create ordering methods for *cls* with *attrs*.
+ """
+ attrs = [a for a in attrs if a.order]
def attrs_to_tuple(obj):
"""
Save us some typing.
"""
- return _attrs_to_tuple(obj, attrs)
-
- def eq(self, other):
+ return tuple(
+ key(value) if key else value
+ for value, key in (
+ (getattr(obj, a.name), a.order_key) for a in attrs
+ )
+ )
+
+ def __lt__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) < attrs_to_tuple(other)
+
+ return NotImplemented
+
+ def __le__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) <= attrs_to_tuple(other)
+
+ return NotImplemented
+
+ def __gt__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) > attrs_to_tuple(other)
+
+ return NotImplemented
+
+ def __ge__(self, other):
"""
Automatically created by attrs.
"""
if other.__class__ is self.__class__:
- return attrs_to_tuple(self) == attrs_to_tuple(other)
- else:
- return NotImplemented
-
- def ne(self, other):
- """
- Automatically created by attrs.
- """
- result = eq(self, other)
- if result is NotImplemented:
- return NotImplemented
- else:
- return not result
-
- def lt(self, other):
- """
- Automatically created by attrs.
- """
- if isinstance(other, self.__class__):
- return attrs_to_tuple(self) < attrs_to_tuple(other)
- else:
- return NotImplemented
-
- def le(self, other):
- """
- Automatically created by attrs.
- """
- if isinstance(other, self.__class__):
- return attrs_to_tuple(self) <= attrs_to_tuple(other)
- else:
- return NotImplemented
-
- def gt(self, other):
- """
- Automatically created by attrs.
- """
- if isinstance(other, self.__class__):
- return attrs_to_tuple(self) > attrs_to_tuple(other)
- else:
- return NotImplemented
-
- def ge(self, other):
- """
- Automatically created by attrs.
- """
- if isinstance(other, self.__class__):
return attrs_to_tuple(self) >= attrs_to_tuple(other)
- else:
- return NotImplemented
-
- cls.__eq__ = eq
- cls.__ne__ = ne
- cls.__lt__ = lt
- cls.__le__ = le
- cls.__gt__ = gt
- cls.__ge__ = ge
+
+ return NotImplemented
+
+ return __lt__, __le__, __gt__, __ge__
+
+
+def _add_eq(cls, attrs=None):
+ """
+ Add equality methods to *cls* with *attrs*.
+ """
+ if attrs is None:
+ attrs = cls.__attrs_attrs__
+
+ cls.__eq__ = _make_eq(cls, attrs)
+ cls.__ne__ = _make_ne()
return cls
+if HAS_F_STRINGS:
+
+ def _make_repr(attrs, ns, cls):
+ unique_filename = _generate_unique_filename(cls, "repr")
+ # Figure out which attributes to include, and which function to use to
+ # format them. The a.repr value can be either bool or a custom
+ # callable.
+ attr_names_with_reprs = tuple(
+ (a.name, (repr if a.repr is True else a.repr), a.init)
+ for a in attrs
+ if a.repr is not False
+ )
+ globs = {
+ name + "_repr": r
+ for name, r, _ in attr_names_with_reprs
+ if r != repr
+ }
+ globs["_compat"] = _compat
+ globs["AttributeError"] = AttributeError
+ globs["NOTHING"] = NOTHING
+ attribute_fragments = []
+ for name, r, i in attr_names_with_reprs:
+ accessor = (
+ "self." + name
+ if i
+ else 'getattr(self, "' + name + '", NOTHING)'
+ )
+ fragment = (
+ "%s={%s!r}" % (name, accessor)
+ if r == repr
+ else "%s={%s_repr(%s)}" % (name, name, accessor)
+ )
+ attribute_fragments.append(fragment)
+ repr_fragment = ", ".join(attribute_fragments)
+
+ if ns is None:
+ cls_name_fragment = (
+ '{self.__class__.__qualname__.rsplit(">.", 1)[-1]}'
+ )
+ else:
+ cls_name_fragment = ns + ".{self.__class__.__name__}"
+
+ lines = [
+ "def __repr__(self):",
+ " try:",
+ " already_repring = _compat.repr_context.already_repring",
+ " except AttributeError:",
+ " already_repring = {id(self),}",
+ " _compat.repr_context.already_repring = already_repring",
+ " else:",
+ " if id(self) in already_repring:",
+ " return '...'",
+ " else:",
+ " already_repring.add(id(self))",
+ " try:",
+ " return f'%s(%s)'" % (cls_name_fragment, repr_fragment),
+ " finally:",
+ " already_repring.remove(id(self))",
+ ]
+
+ return _make_method(
+ "__repr__", "\n".join(lines), unique_filename, globs=globs
+ )
+
+else:
+
+ def _make_repr(attrs, ns, _):
+ """
+ Make a repr method that includes relevant *attrs*, adding *ns* to the
+ full name.
+ """
+
+ # Figure out which attributes to include, and which function to use to
+ # format them. The a.repr value can be either bool or a custom
+ # callable.
+ attr_names_with_reprs = tuple(
+ (a.name, repr if a.repr is True else a.repr)
+ for a in attrs
+ if a.repr is not False
+ )
+
+ def __repr__(self):
+ """
+ Automatically created by attrs.
+ """
+ try:
+ already_repring = _compat.repr_context.already_repring
+ except AttributeError:
+ already_repring = set()
+ _compat.repr_context.already_repring = already_repring
+
+ if id(self) in already_repring:
+ return "..."
+ real_cls = self.__class__
+ if ns is None:
+ class_name = real_cls.__qualname__.rsplit(">.", 1)[-1]
+ else:
+ class_name = ns + "." + real_cls.__name__
+
+ # Since 'self' remains on the stack (i.e.: strongly referenced)
+ # for the duration of this call, it's safe to depend on id(...)
+ # stability, and not need to track the instance and therefore
+ # worry about properties like weakref- or hash-ability.
+ already_repring.add(id(self))
+ try:
+ result = [class_name, "("]
+ first = True
+ for name, attr_repr in attr_names_with_reprs:
+ if first:
+ first = False
+ else:
+ result.append(", ")
+ result.extend(
+ (name, "=", attr_repr(getattr(self, name, NOTHING)))
+ )
+ return "".join(result) + ")"
+ finally:
+ already_repring.remove(id(self))
+
+ return __repr__
+
+
def _add_repr(cls, ns=None, attrs=None):
"""
Add a repr method to *cls*.
"""
if attrs is None:
- attrs = [a for a in cls.__attrs_attrs__ if a.repr]
-
- def repr_(self):
- """
- Automatically created by attrs.
- """
- real_cls = self.__class__
- if ns is None:
- qualname = getattr(real_cls, "__qualname__", None)
- if qualname is not None:
- class_name = qualname.rsplit(">.", 1)[-1]
- else:
- class_name = real_cls.__name__
- else:
- class_name = ns + "." + real_cls.__name__
-
- return "{0}({1})".format(
- class_name,
- ", ".join(a.name + "=" + repr(getattr(self, a.name))
- for a in attrs)
- )
- cls.__repr__ = repr_
- return cls
-
-
-def _add_init(cls, frozen):
- """
- Add a __init__ method to *cls*. If *frozen* is True, make it immutable.
- """
- attrs = [a for a in cls.__attrs_attrs__
- if a.init or a.default is not NOTHING]
-
- # We cache the generated init methods for the same kinds of attributes.
- sha1 = hashlib.sha1()
- r = repr(attrs)
- if not isinstance(r, bytes):
- r = r.encode('utf-8')
- sha1.update(r)
- unique_filename = "<attrs generated init {0}>".format(
- sha1.hexdigest()
- )
-
- script, globs = _attrs_to_script(
- attrs,
- frozen,
- getattr(cls, "__attrs_post_init__", False),
- )
- locs = {}
- bytecode = compile(script, unique_filename, "exec")
- attr_dict = dict((a.name, a) for a in attrs)
- globs.update({
- "NOTHING": NOTHING,
- "attr_dict": attr_dict,
- })
- if frozen is True:
- # Save the lookup overhead in __init__ if we need to circumvent
- # immutability.
- globs["_cached_setattr"] = _obj_setattr
- eval(bytecode, globs, locs)
- init = locs["__init__"]
-
- # In order of debuggers like PDB being able to step through the code,
- # we add a fake linecache entry.
- linecache.cache[unique_filename] = (
- len(script),
- None,
- script.splitlines(True),
- unique_filename
- )
- cls.__init__ = init
- return cls
-
-
-def _add_pickle(cls):
- """
- Add pickle helpers, needed for frozen and slotted classes
- """
- def _slots_getstate__(obj):
- """
- Play nice with pickle.
- """
- return tuple(getattr(obj, a.name) for a in fields(obj.__class__))
-
- def _slots_setstate__(obj, state):
- """
- Play nice with pickle.
- """
- __bound_setattr = _obj_setattr.__get__(obj, Attribute)
- for a, value in zip(fields(obj.__class__), state):
- __bound_setattr(a.name, value)
-
- cls.__getstate__ = _slots_getstate__
- cls.__setstate__ = _slots_setstate__
+ attrs = cls.__attrs_attrs__
+
+ cls.__repr__ = _make_repr(attrs, ns, cls)
return cls
def fields(cls):
"""
- Returns the tuple of ``attrs`` attributes for a class.
+ Return the tuple of ``attrs`` attributes for a class.
The tuple also allows accessing the fields by their names (see below for
examples).
@@ -630,12 +1980,12 @@
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
class.
- :rtype: tuple (with name accesors) of :class:`attr.Attribute`
+ :rtype: tuple (with name accessors) of `attrs.Attribute`
.. versionchanged:: 16.2.0 Returned tuple allows accessing the fields
by name.
"""
- if not isclass(cls):
+ if not isinstance(cls, type):
raise TypeError("Passed object must be a class.")
attrs = getattr(cls, "__attrs_attrs__", None)
if attrs is None:
@@ -645,6 +1995,34 @@
return attrs
+def fields_dict(cls):
+ """
+ Return an ordered dictionary of ``attrs`` attributes for a class, whose
+ keys are the attribute names.
+
+ :param type cls: Class to introspect.
+
+ :raise TypeError: If *cls* is not a class.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ :rtype: an ordered dict where keys are attribute names and values are
+ `attrs.Attribute`\\ s. This will be a `dict` if it's
+ naturally ordered like on Python 3.6+ or an
+ :class:`~collections.OrderedDict` otherwise.
+
+ .. versionadded:: 18.1.0
+ """
+ if not isinstance(cls, type):
+ raise TypeError("Passed object must be a class.")
+ attrs = getattr(cls, "__attrs_attrs__", None)
+ if attrs is None:
+ raise NotAnAttrsClassError(
+ "{cls!r} is not an attrs-decorated class.".format(cls=cls)
+ )
+ return ordered_dict((a.name, a) for a in attrs)
+
+
def validate(inst):
"""
Validate all attributes on *inst* that have a validator.
@@ -662,240 +2040,623 @@
v(inst, a, getattr(inst, a.name))
-def _attrs_to_script(attrs, frozen, post_init):
+def _is_slot_cls(cls):
+ return "__slots__" in cls.__dict__
+
+
+def _is_slot_attr(a_name, base_attr_map):
+ """
+ Check if the attribute name comes from a slot class.
+ """
+ return a_name in base_attr_map and _is_slot_cls(base_attr_map[a_name])
+
+
+def _make_init(
+ cls,
+ attrs,
+ pre_init,
+ post_init,
+ frozen,
+ slots,
+ cache_hash,
+ base_attr_map,
+ is_exc,
+ cls_on_setattr,
+ attrs_init,
+):
+ has_cls_on_setattr = (
+ cls_on_setattr is not None and cls_on_setattr is not setters.NO_OP
+ )
+
+ if frozen and has_cls_on_setattr:
+ raise ValueError("Frozen classes can't use on_setattr.")
+
+ needs_cached_setattr = cache_hash or frozen
+ filtered_attrs = []
+ attr_dict = {}
+ for a in attrs:
+ if not a.init and a.default is NOTHING:
+ continue
+
+ filtered_attrs.append(a)
+ attr_dict[a.name] = a
+
+ if a.on_setattr is not None:
+ if frozen is True:
+ raise ValueError("Frozen classes can't use on_setattr.")
+
+ needs_cached_setattr = True
+ elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP:
+ needs_cached_setattr = True
+
+ unique_filename = _generate_unique_filename(cls, "init")
+
+ script, globs, annotations = _attrs_to_init_script(
+ filtered_attrs,
+ frozen,
+ slots,
+ pre_init,
+ post_init,
+ cache_hash,
+ base_attr_map,
+ is_exc,
+ has_cls_on_setattr,
+ attrs_init,
+ )
+ if cls.__module__ in sys.modules:
+ # This makes typing.get_type_hints(CLS.__init__) resolve string types.
+ globs.update(sys.modules[cls.__module__].__dict__)
+
+ globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict})
+
+ if needs_cached_setattr:
+ # Save the lookup overhead in __init__ if we need to circumvent
+ # setattr hooks.
+ globs["_setattr"] = _obj_setattr
+
+ init = _make_method(
+ "__attrs_init__" if attrs_init else "__init__",
+ script,
+ unique_filename,
+ globs,
+ )
+ init.__annotations__ = annotations
+
+ return init
+
+
+def _setattr(attr_name, value_var, has_on_setattr):
+ """
+ Use the cached object.setattr to set *attr_name* to *value_var*.
+ """
+ return "_setattr(self, '%s', %s)" % (attr_name, value_var)
+
+
+def _setattr_with_converter(attr_name, value_var, has_on_setattr):
+ """
+ Use the cached object.setattr to set *attr_name* to *value_var*, but run
+ its converter first.
+ """
+ return "_setattr(self, '%s', %s(%s))" % (
+ attr_name,
+ _init_converter_pat % (attr_name,),
+ value_var,
+ )
+
+
+def _assign(attr_name, value, has_on_setattr):
+ """
+ Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise
+ relegate to _setattr.
+ """
+ if has_on_setattr:
+ return _setattr(attr_name, value, True)
+
+ return "self.%s = %s" % (attr_name, value)
+
+
+def _assign_with_converter(attr_name, value_var, has_on_setattr):
+ """
+ Unless *attr_name* has an on_setattr hook, use normal assignment after
+ conversion. Otherwise relegate to _setattr_with_converter.
+ """
+ if has_on_setattr:
+ return _setattr_with_converter(attr_name, value_var, True)
+
+ return "self.%s = %s(%s)" % (
+ attr_name,
+ _init_converter_pat % (attr_name,),
+ value_var,
+ )
+
+
+def _attrs_to_init_script(
+ attrs,
+ frozen,
+ slots,
+ pre_init,
+ post_init,
+ cache_hash,
+ base_attr_map,
+ is_exc,
+ has_cls_on_setattr,
+ attrs_init,
+):
"""
Return a script of an initializer for *attrs* and a dict of globals.
The globals are expected by the generated script.
- If *frozen* is True, we cannot set the attributes directly so we use
+ If *frozen* is True, we cannot set the attributes directly so we use
a cached ``object.__setattr__``.
"""
lines = []
+ if pre_init:
+ lines.append("self.__attrs_pre_init__()")
+
if frozen is True:
- lines.append(
- # Circumvent the __setattr__ descriptor to save one lookup per
- # assignment.
- "_setattr = _cached_setattr.__get__(self, self.__class__)"
- )
-
- def fmt_setter(attr_name, value_var):
- return "_setattr('%(attr_name)s', %(value_var)s)" % {
- "attr_name": attr_name,
- "value_var": value_var,
- }
-
- def fmt_setter_with_converter(attr_name, value_var):
- conv_name = _init_convert_pat.format(attr_name)
- return "_setattr('%(attr_name)s', %(conv)s(%(value_var)s))" % {
- "attr_name": attr_name,
- "value_var": value_var,
- "conv": conv_name,
- }
+ if slots is True:
+ fmt_setter = _setattr
+ fmt_setter_with_converter = _setattr_with_converter
+ else:
+ # Dict frozen classes assign directly to __dict__.
+ # But only if the attribute doesn't come from an ancestor slot
+ # class.
+ # Note _inst_dict will be used again below if cache_hash is True
+ lines.append("_inst_dict = self.__dict__")
+
+ def fmt_setter(attr_name, value_var, has_on_setattr):
+ if _is_slot_attr(attr_name, base_attr_map):
+ return _setattr(attr_name, value_var, has_on_setattr)
+
+ return "_inst_dict['%s'] = %s" % (attr_name, value_var)
+
+ def fmt_setter_with_converter(
+ attr_name, value_var, has_on_setattr
+ ):
+ if has_on_setattr or _is_slot_attr(attr_name, base_attr_map):
+ return _setattr_with_converter(
+ attr_name, value_var, has_on_setattr
+ )
+
+ return "_inst_dict['%s'] = %s(%s)" % (
+ attr_name,
+ _init_converter_pat % (attr_name,),
+ value_var,
+ )
+
else:
- def fmt_setter(attr_name, value):
- return "self.%(attr_name)s = %(value)s" % {
- "attr_name": attr_name,
- "value": value,
- }
-
- def fmt_setter_with_converter(attr_name, value_var):
- conv_name = _init_convert_pat.format(attr_name)
- return "self.%(attr_name)s = %(conv)s(%(value_var)s)" % {
- "attr_name": attr_name,
- "value_var": value_var,
- "conv": conv_name,
- }
+ # Not frozen.
+ fmt_setter = _assign
+ fmt_setter_with_converter = _assign_with_converter
args = []
+ kw_only_args = []
attrs_to_validate = []
# This is a dictionary of names to validator and converter callables.
# Injecting this into __init__ globals lets us avoid lookups.
names_for_globals = {}
+ annotations = {"return": None}
for a in attrs:
if a.validator:
attrs_to_validate.append(a)
+
attr_name = a.name
+ has_on_setattr = a.on_setattr is not None or (
+ a.on_setattr is not setters.NO_OP and has_cls_on_setattr
+ )
arg_name = a.name.lstrip("_")
+
has_factory = isinstance(a.default, Factory)
if has_factory and a.default.takes_self:
maybe_self = "self"
else:
maybe_self = ""
+
if a.init is False:
if has_factory:
init_factory_name = _init_factory_pat.format(a.name)
- if a.convert is not None:
- lines.append(fmt_setter_with_converter(
- attr_name,
- init_factory_name + "({0})".format(maybe_self)))
- conv_name = _init_convert_pat.format(a.name)
- names_for_globals[conv_name] = a.convert
+ if a.converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name,
+ init_factory_name + "(%s)" % (maybe_self,),
+ has_on_setattr,
+ )
+ )
+ conv_name = _init_converter_pat % (a.name,)
+ names_for_globals[conv_name] = a.converter
else:
- lines.append(fmt_setter(
- attr_name,
- init_factory_name + "({0})".format(maybe_self)
- ))
+ lines.append(
+ fmt_setter(
+ attr_name,
+ init_factory_name + "(%s)" % (maybe_self,),
+ has_on_setattr,
+ )
+ )
names_for_globals[init_factory_name] = a.default.factory
else:
- if a.convert is not None:
- lines.append(fmt_setter_with_converter(
- attr_name,
- "attr_dict['{attr_name}'].default"
- .format(attr_name=attr_name)
- ))
- conv_name = _init_convert_pat.format(a.name)
- names_for_globals[conv_name] = a.convert
+ if a.converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name,
+ "attr_dict['%s'].default" % (attr_name,),
+ has_on_setattr,
+ )
+ )
+ conv_name = _init_converter_pat % (a.name,)
+ names_for_globals[conv_name] = a.converter
else:
- lines.append(fmt_setter(
- attr_name,
- "attr_dict['{attr_name}'].default"
- .format(attr_name=attr_name)
- ))
+ lines.append(
+ fmt_setter(
+ attr_name,
+ "attr_dict['%s'].default" % (attr_name,),
+ has_on_setattr,
+ )
+ )
elif a.default is not NOTHING and not has_factory:
- args.append(
- "{arg_name}=attr_dict['{attr_name}'].default".format(
- arg_name=arg_name,
- attr_name=attr_name,
+ arg = "%s=attr_dict['%s'].default" % (arg_name, attr_name)
+ if a.kw_only:
+ kw_only_args.append(arg)
+ else:
+ args.append(arg)
+
+ if a.converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name, arg_name, has_on_setattr
+ )
)
- )
- if a.convert is not None:
- lines.append(fmt_setter_with_converter(attr_name, arg_name))
- names_for_globals[_init_convert_pat.format(a.name)] = a.convert
+ names_for_globals[
+ _init_converter_pat % (a.name,)
+ ] = a.converter
else:
- lines.append(fmt_setter(attr_name, arg_name))
+ lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
+
elif has_factory:
- args.append("{arg_name}=NOTHING".format(arg_name=arg_name))
- lines.append("if {arg_name} is not NOTHING:"
- .format(arg_name=arg_name))
+ arg = "%s=NOTHING" % (arg_name,)
+ if a.kw_only:
+ kw_only_args.append(arg)
+ else:
+ args.append(arg)
+ lines.append("if %s is not NOTHING:" % (arg_name,))
+
init_factory_name = _init_factory_pat.format(a.name)
- if a.convert is not None:
- lines.append(" " + fmt_setter_with_converter(attr_name,
- arg_name))
+ if a.converter is not None:
+ lines.append(
+ " "
+ + fmt_setter_with_converter(
+ attr_name, arg_name, has_on_setattr
+ )
+ )
lines.append("else:")
- lines.append(" " + fmt_setter_with_converter(
- attr_name,
- init_factory_name + "({0})".format(maybe_self)
- ))
- names_for_globals[_init_convert_pat.format(a.name)] = a.convert
+ lines.append(
+ " "
+ + fmt_setter_with_converter(
+ attr_name,
+ init_factory_name + "(" + maybe_self + ")",
+ has_on_setattr,
+ )
+ )
+ names_for_globals[
+ _init_converter_pat % (a.name,)
+ ] = a.converter
else:
- lines.append(" " + fmt_setter(attr_name, arg_name))
+ lines.append(
+ " " + fmt_setter(attr_name, arg_name, has_on_setattr)
+ )
lines.append("else:")
- lines.append(" " + fmt_setter(
- attr_name,
- init_factory_name + "({0})".format(maybe_self)
- ))
+ lines.append(
+ " "
+ + fmt_setter(
+ attr_name,
+ init_factory_name + "(" + maybe_self + ")",
+ has_on_setattr,
+ )
+ )
names_for_globals[init_factory_name] = a.default.factory
else:
- args.append(arg_name)
- if a.convert is not None:
- lines.append(fmt_setter_with_converter(attr_name, arg_name))
- names_for_globals[_init_convert_pat.format(a.name)] = a.convert
+ if a.kw_only:
+ kw_only_args.append(arg_name)
else:
- lines.append(fmt_setter(attr_name, arg_name))
+ args.append(arg_name)
+
+ if a.converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name, arg_name, has_on_setattr
+ )
+ )
+ names_for_globals[
+ _init_converter_pat % (a.name,)
+ ] = a.converter
+ else:
+ lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
+
+ if a.init is True:
+ if a.type is not None and a.converter is None:
+ annotations[arg_name] = a.type
+ elif a.converter is not None:
+ # Try to get the type from the converter.
+ t = _AnnotationExtractor(a.converter).get_first_param_type()
+ if t:
+ annotations[arg_name] = t
if attrs_to_validate: # we can skip this if there are no validators.
names_for_globals["_config"] = _config
lines.append("if _config._run_validators is True:")
for a in attrs_to_validate:
- val_name = "__attr_validator_{}".format(a.name)
- attr_name = "__attr_{}".format(a.name)
- lines.append(" {}(self, {}, self.{})".format(
- val_name, attr_name, a.name))
+ val_name = "__attr_validator_" + a.name
+ attr_name = "__attr_" + a.name
+ lines.append(
+ " %s(self, %s, self.%s)" % (val_name, attr_name, a.name)
+ )
names_for_globals[val_name] = a.validator
names_for_globals[attr_name] = a
+
if post_init:
lines.append("self.__attrs_post_init__()")
- return """\
-def __init__(self, {args}):
+ # because this is set only after __attrs_post_init__ is called, a crash
+ # will result if post-init tries to access the hash code. This seemed
+ # preferable to setting this beforehand, in which case alteration to
+ # field values during post-init combined with post-init accessing the
+ # hash code would result in silent bugs.
+ if cache_hash:
+ if frozen:
+ if slots:
+ # if frozen and slots, then _setattr defined above
+ init_hash_cache = "_setattr(self, '%s', %s)"
+ else:
+ # if frozen and not slots, then _inst_dict defined above
+ init_hash_cache = "_inst_dict['%s'] = %s"
+ else:
+ init_hash_cache = "self.%s = %s"
+ lines.append(init_hash_cache % (_hash_cache_field, "None"))
+
+ # For exceptions we rely on BaseException.__init__ for proper
+ # initialization.
+ if is_exc:
+ vals = ",".join("self." + a.name for a in attrs if a.init)
+
+ lines.append("BaseException.__init__(self, %s)" % (vals,))
+
+ args = ", ".join(args)
+ if kw_only_args:
+ args += "%s*, %s" % (
+ ", " if args else "", # leading comma
+ ", ".join(kw_only_args), # kw_only args
+ )
+ return (
+ """\
+def {init_name}(self, {args}):
{lines}
""".format(
- args=", ".join(args),
- lines="\n ".join(lines) if lines else "pass",
- ), names_for_globals
-
-
-class Attribute(object):
+ init_name=("__attrs_init__" if attrs_init else "__init__"),
+ args=args,
+ lines="\n ".join(lines) if lines else "pass",
+ ),
+ names_for_globals,
+ annotations,
+ )
+
+
+class Attribute:
"""
*Read-only* representation of an attribute.
- :attribute name: The name of the attribute.
-
- Plus *all* arguments of :func:`attr.ib`.
+ The class has *all* arguments of `attr.ib` (except for ``factory``
+ which is only syntactic sugar for ``default=Factory(...)`` plus the
+ following:
+
+ - ``name`` (`str`): The name of the attribute.
+ - ``inherited`` (`bool`): Whether or not that attribute has been inherited
+ from a base class.
+ - ``eq_key`` and ``order_key`` (`typing.Callable` or `None`): The callables
+ that are used for comparing and ordering objects by this attribute,
+ respectively. These are set by passing a callable to `attr.ib`'s ``eq``,
+ ``order``, or ``cmp`` arguments. See also :ref:`comparison customization
+ <custom-comparison>`.
+
+ Instances of this class are frequently used for introspection purposes
+ like:
+
+ - `fields` returns a tuple of them.
+ - Validators get them passed as the first argument.
+ - The :ref:`field transformer <transform-fields>` hook receives a list of
+ them.
+
+ .. versionadded:: 20.1.0 *inherited*
+ .. versionadded:: 20.1.0 *on_setattr*
+ .. versionchanged:: 20.2.0 *inherited* is not taken into account for
+ equality checks and hashing anymore.
+ .. versionadded:: 21.1.0 *eq_key* and *order_key*
+
+ For the full version history of the fields, see `attr.ib`.
"""
+
__slots__ = (
- "name", "default", "validator", "repr", "cmp", "hash", "init",
- "convert", "metadata",
+ "name",
+ "default",
+ "validator",
+ "repr",
+ "eq",
+ "eq_key",
+ "order",
+ "order_key",
+ "hash",
+ "init",
+ "metadata",
+ "type",
+ "converter",
+ "kw_only",
+ "inherited",
+ "on_setattr",
)
- def __init__(self, name, default, validator, repr, cmp, hash, init,
- convert=None, metadata=None):
+ def __init__(
+ self,
+ name,
+ default,
+ validator,
+ repr,
+ cmp, # XXX: unused, remove along with other cmp code.
+ hash,
+ init,
+ inherited,
+ metadata=None,
+ type=None,
+ converter=None,
+ kw_only=False,
+ eq=None,
+ eq_key=None,
+ order=None,
+ order_key=None,
+ on_setattr=None,
+ ):
+ eq, eq_key, order, order_key = _determine_attrib_eq_order(
+ cmp, eq_key or eq, order_key or order, True
+ )
+
# Cache this descriptor here to speed things up later.
bound_setattr = _obj_setattr.__get__(self, Attribute)
+ # Despite the big red warning, people *do* instantiate `Attribute`
+ # themselves.
bound_setattr("name", name)
bound_setattr("default", default)
bound_setattr("validator", validator)
bound_setattr("repr", repr)
- bound_setattr("cmp", cmp)
+ bound_setattr("eq", eq)
+ bound_setattr("eq_key", eq_key)
+ bound_setattr("order", order)
+ bound_setattr("order_key", order_key)
bound_setattr("hash", hash)
bound_setattr("init", init)
- bound_setattr("convert", convert)
- bound_setattr("metadata", (metadata_proxy(metadata) if metadata
- else _empty_metadata_singleton))
+ bound_setattr("converter", converter)
+ bound_setattr(
+ "metadata",
+ (
+ types.MappingProxyType(dict(metadata)) # Shallow copy
+ if metadata
+ else _empty_metadata_singleton
+ ),
+ )
+ bound_setattr("type", type)
+ bound_setattr("kw_only", kw_only)
+ bound_setattr("inherited", inherited)
+ bound_setattr("on_setattr", on_setattr)
def __setattr__(self, name, value):
raise FrozenInstanceError()
@classmethod
- def from_counting_attr(cls, name, ca):
+ def from_counting_attr(cls, name, ca, type=None):
+ # type holds the annotated value. deal with conflicts:
+ if type is None:
+ type = ca.type
+ elif ca.type is not None:
+ raise ValueError(
+ "Type annotation and type argument cannot both be present"
+ )
inst_dict = {
k: getattr(ca, k)
- for k
- in Attribute.__slots__
- if k not in (
- "name", "validator", "default",
- ) # exclude methods
+ for k in Attribute.__slots__
+ if k
+ not in (
+ "name",
+ "validator",
+ "default",
+ "type",
+ "inherited",
+ ) # exclude methods and deprecated alias
}
- return cls(name=name, validator=ca._validator, default=ca._default,
- **inst_dict)
+ return cls(
+ name=name,
+ validator=ca._validator,
+ default=ca._default,
+ type=type,
+ cmp=None,
+ inherited=False,
+ **inst_dict
+ )
+
+ # Don't use attr.evolve since fields(Attribute) doesn't work
+ def evolve(self, **changes):
+ """
+ Copy *self* and apply *changes*.
+
+ This works similarly to `attr.evolve` but that function does not work
+ with ``Attribute``.
+
+ It is mainly meant to be used for `transform-fields`.
+
+ .. versionadded:: 20.3.0
+ """
+ new = copy.copy(self)
+
+ new._setattrs(changes.items())
+
+ return new
# Don't use _add_pickle since fields(Attribute) doesn't work
def __getstate__(self):
"""
Play nice with pickle.
"""
- return tuple(getattr(self, name) if name != "metadata"
- else dict(self.metadata)
- for name in self.__slots__)
+ return tuple(
+ getattr(self, name) if name != "metadata" else dict(self.metadata)
+ for name in self.__slots__
+ )
def __setstate__(self, state):
"""
Play nice with pickle.
"""
+ self._setattrs(zip(self.__slots__, state))
+
+ def _setattrs(self, name_values_pairs):
bound_setattr = _obj_setattr.__get__(self, Attribute)
- for name, value in zip(self.__slots__, state):
+ for name, value in name_values_pairs:
if name != "metadata":
bound_setattr(name, value)
else:
- bound_setattr(name, metadata_proxy(value) if value else
- _empty_metadata_singleton)
-
-
-_a = [Attribute(name=name, default=NOTHING, validator=None,
- repr=True, cmp=True, hash=(name != "metadata"), init=True)
- for name in Attribute.__slots__]
+ bound_setattr(
+ name,
+ types.MappingProxyType(dict(value))
+ if value
+ else _empty_metadata_singleton,
+ )
+
+
+_a = [
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=False,
+ hash=(name != "metadata"),
+ init=True,
+ inherited=False,
+ )
+ for name in Attribute.__slots__
+]
Attribute = _add_hash(
- _add_cmp(_add_repr(Attribute, attrs=_a), attrs=_a),
- attrs=[a for a in _a if a.hash]
+ _add_eq(
+ _add_repr(Attribute, attrs=_a),
+ attrs=[a for a in _a if a.name != "inherited"],
+ ),
+ attrs=[a for a in _a if a.hash and a.name != "inherited"],
)
-class _CountingAttr(object):
+class _CountingAttr:
"""
Intermediate representation of attributes that uses a counter to preserve
the order in which the attributes have been defined.
@@ -903,35 +2664,105 @@
*Internal* data structure of the attrs library. Running into is most
likely the result of a bug like a forgotten `@attr.s` decorator.
"""
- __slots__ = ("counter", "_default", "repr", "cmp", "hash", "init",
- "metadata", "_validator", "convert")
+
+ __slots__ = (
+ "counter",
+ "_default",
+ "repr",
+ "eq",
+ "eq_key",
+ "order",
+ "order_key",
+ "hash",
+ "init",
+ "metadata",
+ "_validator",
+ "converter",
+ "type",
+ "kw_only",
+ "on_setattr",
+ )
__attrs_attrs__ = tuple(
- Attribute(name=name, default=NOTHING, validator=None,
- repr=True, cmp=True, hash=True, init=True)
- for name
- in ("counter", "_default", "repr", "cmp", "hash", "init",)
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ hash=True,
+ init=True,
+ kw_only=False,
+ eq=True,
+ eq_key=None,
+ order=False,
+ order_key=None,
+ inherited=False,
+ on_setattr=None,
+ )
+ for name in (
+ "counter",
+ "_default",
+ "repr",
+ "eq",
+ "order",
+ "hash",
+ "init",
+ "on_setattr",
+ )
) + (
- Attribute(name="metadata", default=None, validator=None,
- repr=True, cmp=True, hash=False, init=True),
+ Attribute(
+ name="metadata",
+ default=None,
+ validator=None,
+ repr=True,
+ cmp=None,
+ hash=False,
+ init=True,
+ kw_only=False,
+ eq=True,
+ eq_key=None,
+ order=False,
+ order_key=None,
+ inherited=False,
+ on_setattr=None,
+ ),
)
cls_counter = 0
- def __init__(self, default, validator, repr, cmp, hash, init, convert,
- metadata):
+ def __init__(
+ self,
+ default,
+ validator,
+ repr,
+ cmp,
+ hash,
+ init,
+ converter,
+ metadata,
+ type,
+ kw_only,
+ eq,
+ eq_key,
+ order,
+ order_key,
+ on_setattr,
+ ):
_CountingAttr.cls_counter += 1
self.counter = _CountingAttr.cls_counter
self._default = default
- # If validator is a list/tuple, wrap it using helper validator.
- if validator and isinstance(validator, (list, tuple)):
- self._validator = and_(*validator)
- else:
- self._validator = validator
+ self._validator = validator
+ self.converter = converter
self.repr = repr
- self.cmp = cmp
+ self.eq = eq
+ self.eq_key = eq_key
+ self.order = order
+ self.order_key = order_key
self.hash = hash
self.init = init
- self.convert = convert
self.metadata = metadata
+ self.type = type
+ self.kw_only = kw_only
+ self.on_setattr = on_setattr
def validator(self, meth):
"""
@@ -965,15 +2796,14 @@
return meth
-_CountingAttr = _add_cmp(_add_repr(_CountingAttr))
-
-
-@attributes(slots=True, init=False)
-class Factory(object):
+_CountingAttr = _add_eq(_add_repr(_CountingAttr))
+
+
+class Factory:
"""
Stores a factory callable.
- If passed as the default value to :func:`attr.ib`, the factory is used to
+ If passed as the default value to `attrs.field`, the factory is used to
generate a new value.
:param callable factory: A callable that takes either none or exactly one
@@ -983,8 +2813,8 @@
.. versionadded:: 17.1.0 *takes_self*
"""
- factory = attr()
- takes_self = attr()
+
+ __slots__ = ("factory", "takes_self")
def __init__(self, factory, takes_self=False):
"""
@@ -994,47 +2824,122 @@
self.factory = factory
self.takes_self = takes_self
+ def __getstate__(self):
+ """
+ Play nice with pickle.
+ """
+ return tuple(getattr(self, name) for name in self.__slots__)
+
+ def __setstate__(self, state):
+ """
+ Play nice with pickle.
+ """
+ for name, value in zip(self.__slots__, state):
+ setattr(self, name, value)
+
+
+_f = [
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=False,
+ hash=True,
+ init=True,
+ inherited=False,
+ )
+ for name in Factory.__slots__
+]
+
+Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f)
+
def make_class(name, attrs, bases=(object,), **attributes_arguments):
"""
A quick way to create a new class called *name* with *attrs*.
- :param name: The name for the new class.
- :type name: str
+ :param str name: The name for the new class.
:param attrs: A list of names or a dictionary of mappings of names to
attributes.
- :type attrs: :class:`list` or :class:`dict`
+
+ If *attrs* is a list or an ordered dict (`dict` on Python 3.6+,
+ `collections.OrderedDict` otherwise), the order is deduced from
+ the order of the names or attributes inside *attrs*. Otherwise the
+ order of the definition of the attributes is used.
+ :type attrs: `list` or `dict`
:param tuple bases: Classes that the new class will subclass.
- :param attributes_arguments: Passed unmodified to :func:`attr.s`.
+ :param attributes_arguments: Passed unmodified to `attr.s`.
:return: A new class with *attrs*.
:rtype: type
- .. versionadded:: 17.1.0 *bases*
+ .. versionadded:: 17.1.0 *bases*
+ .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained.
"""
if isinstance(attrs, dict):
cls_dict = attrs
elif isinstance(attrs, (list, tuple)):
- cls_dict = dict((a, attr()) for a in attrs)
+ cls_dict = {a: attrib() for a in attrs}
else:
raise TypeError("attrs argument must be a dict or a list.")
- return attributes(**attributes_arguments)(type(name, bases, cls_dict))
-
-
-# These are required by whithin this module so we define them here and merely
-# import into .validators.
-
-
-@attributes(slots=True, hash=True)
-class _AndValidator(object):
+ pre_init = cls_dict.pop("__attrs_pre_init__", None)
+ post_init = cls_dict.pop("__attrs_post_init__", None)
+ user_init = cls_dict.pop("__init__", None)
+
+ body = {}
+ if pre_init is not None:
+ body["__attrs_pre_init__"] = pre_init
+ if post_init is not None:
+ body["__attrs_post_init__"] = post_init
+ if user_init is not None:
+ body["__init__"] = user_init
+
+ type_ = types.new_class(name, bases, {}, lambda ns: ns.update(body))
+
+ # For pickling to work, the __module__ variable needs to be set to the
+ # frame where the class is created. Bypass this step in environments where
+ # sys._getframe is not defined (Jython for example) or sys._getframe is not
+ # defined for arguments greater than 0 (IronPython).
+ try:
+ type_.__module__ = sys._getframe(1).f_globals.get(
+ "__name__", "__main__"
+ )
+ except (AttributeError, ValueError):
+ pass
+
+ # We do it here for proper warnings with meaningful stacklevel.
+ cmp = attributes_arguments.pop("cmp", None)
+ (
+ attributes_arguments["eq"],
+ attributes_arguments["order"],
+ ) = _determine_attrs_eq_order(
+ cmp,
+ attributes_arguments.get("eq"),
+ attributes_arguments.get("order"),
+ True,
+ )
+
+ return _attrs(these=cls_dict, **attributes_arguments)(type_)
+
+
+# These are required by within this module so we define them here and merely
+# import into .validators / .converters.
+
+
+@attrs(slots=True, hash=True)
+class _AndValidator:
"""
Compose many validators to a single one.
"""
- _validators = attr()
+
+ _validators = attrib()
def __call__(self, inst, attr, value):
for v in self._validators:
@@ -1047,16 +2952,55 @@
When called on a value, it runs all wrapped validators.
- :param validators: Arbitrary number of validators.
- :type validators: callables
+ :param callables validators: Arbitrary number of validators.
.. versionadded:: 17.1.0
"""
vals = []
for validator in validators:
vals.extend(
- validator._validators if isinstance(validator, _AndValidator)
+ validator._validators
+ if isinstance(validator, _AndValidator)
else [validator]
)
return _AndValidator(tuple(vals))
+
+
+def pipe(*converters):
+ """
+ A converter that composes multiple converters into one.
+
+ When called on a value, it runs all wrapped converters, returning the
+ *last* value.
+
+ Type annotations will be inferred from the wrapped converters', if
+ they have any.
+
+ :param callables converters: Arbitrary number of converters.
+
+ .. versionadded:: 20.1.0
+ """
+
+ def pipe_converter(val):
+ for converter in converters:
+ val = converter(val)
+
+ return val
+
+ if not converters:
+ # If the converter list is empty, pipe_converter is the identity.
+ A = typing.TypeVar("A")
+ pipe_converter.__annotations__ = {"val": A, "return": A}
+ else:
+ # Get parameter type from first converter.
+ t = _AnnotationExtractor(converters[0]).get_first_param_type()
+ if t:
+ pipe_converter.__annotations__["val"] = t
+
+ # Get return type from last converter.
+ rt = _AnnotationExtractor(converters[-1]).get_return_type()
+ if rt:
+ pipe_converter.__annotations__["return"] = rt
+
+ return pipe_converter
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/attr/_next_gen.py Wed Jan 04 16:02:22 2023 +0100
@@ -0,0 +1,220 @@
+# SPDX-License-Identifier: MIT
+
+"""
+These are Python 3.6+-only and keyword-only APIs that call `attr.s` and
+`attr.ib` with different default values.
+"""
+
+
+from functools import partial
+
+from . import setters
+from ._funcs import asdict as _asdict
+from ._funcs import astuple as _astuple
+from ._make import (
+ NOTHING,
+ _frozen_setattrs,
+ _ng_default_on_setattr,
+ attrib,
+ attrs,
+)
+from .exceptions import UnannotatedAttributeError
+
+
+def define(
+ maybe_cls=None,
+ *,
+ these=None,
+ repr=None,
+ hash=None,
+ init=None,
+ slots=True,
+ frozen=False,
+ weakref_slot=True,
+ str=False,
+ auto_attribs=None,
+ kw_only=False,
+ cache_hash=False,
+ auto_exc=True,
+ eq=None,
+ order=False,
+ auto_detect=True,
+ getstate_setstate=None,
+ on_setattr=None,
+ field_transformer=None,
+ match_args=True,
+):
+ r"""
+ Define an ``attrs`` class.
+
+ Differences to the classic `attr.s` that it uses underneath:
+
+ - Automatically detect whether or not *auto_attribs* should be `True` (c.f.
+ *auto_attribs* parameter).
+ - If *frozen* is `False`, run converters and validators when setting an
+ attribute by default.
+ - *slots=True*
+
+ .. caution::
+
+ Usually this has only upsides and few visible effects in everyday
+ programming. But it *can* lead to some suprising behaviors, so please
+ make sure to read :term:`slotted classes`.
+ - *auto_exc=True*
+ - *auto_detect=True*
+ - *order=False*
+ - Some options that were only relevant on Python 2 or were kept around for
+ backwards-compatibility have been removed.
+
+ Please note that these are all defaults and you can change them as you
+ wish.
+
+ :param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves
+ exactly like `attr.s`. If left `None`, `attr.s` will try to guess:
+
+ 1. If any attributes are annotated and no unannotated `attrs.fields`\ s
+ are found, it assumes *auto_attribs=True*.
+ 2. Otherwise it assumes *auto_attribs=False* and tries to collect
+ `attrs.fields`\ s.
+
+ For now, please refer to `attr.s` for the rest of the parameters.
+
+ .. versionadded:: 20.1.0
+ .. versionchanged:: 21.3.0 Converters are also run ``on_setattr``.
+ """
+
+ def do_it(cls, auto_attribs):
+ return attrs(
+ maybe_cls=cls,
+ these=these,
+ repr=repr,
+ hash=hash,
+ init=init,
+ slots=slots,
+ frozen=frozen,
+ weakref_slot=weakref_slot,
+ str=str,
+ auto_attribs=auto_attribs,
+ kw_only=kw_only,
+ cache_hash=cache_hash,
+ auto_exc=auto_exc,
+ eq=eq,
+ order=order,
+ auto_detect=auto_detect,
+ collect_by_mro=True,
+ getstate_setstate=getstate_setstate,
+ on_setattr=on_setattr,
+ field_transformer=field_transformer,
+ match_args=match_args,
+ )
+
+ def wrap(cls):
+ """
+ Making this a wrapper ensures this code runs during class creation.
+
+ We also ensure that frozen-ness of classes is inherited.
+ """
+ nonlocal frozen, on_setattr
+
+ had_on_setattr = on_setattr not in (None, setters.NO_OP)
+
+ # By default, mutable classes convert & validate on setattr.
+ if frozen is False and on_setattr is None:
+ on_setattr = _ng_default_on_setattr
+
+ # However, if we subclass a frozen class, we inherit the immutability
+ # and disable on_setattr.
+ for base_cls in cls.__bases__:
+ if base_cls.__setattr__ is _frozen_setattrs:
+ if had_on_setattr:
+ raise ValueError(
+ "Frozen classes can't use on_setattr "
+ "(frozen-ness was inherited)."
+ )
+
+ on_setattr = setters.NO_OP
+ break
+
+ if auto_attribs is not None:
+ return do_it(cls, auto_attribs)
+
+ try:
+ return do_it(cls, True)
+ except UnannotatedAttributeError:
+ return do_it(cls, False)
+
+ # maybe_cls's type depends on the usage of the decorator. It's a class
+ # if it's used as `@attrs` but ``None`` if used as `@attrs()`.
+ if maybe_cls is None:
+ return wrap
+ else:
+ return wrap(maybe_cls)
+
+
+mutable = define
+frozen = partial(define, frozen=True, on_setattr=None)
+
+
+def field(
+ *,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ hash=None,
+ init=True,
+ metadata=None,
+ converter=None,
+ factory=None,
+ kw_only=False,
+ eq=None,
+ order=None,
+ on_setattr=None,
+):
+ """
+ Identical to `attr.ib`, except keyword-only and with some arguments
+ removed.
+
+ .. versionadded:: 20.1.0
+ """
+ return attrib(
+ default=default,
+ validator=validator,
+ repr=repr,
+ hash=hash,
+ init=init,
+ metadata=metadata,
+ converter=converter,
+ factory=factory,
+ kw_only=kw_only,
+ eq=eq,
+ order=order,
+ on_setattr=on_setattr,
+ )
+
+
+def asdict(inst, *, recurse=True, filter=None, value_serializer=None):
+ """
+ Same as `attr.asdict`, except that collections types are always retained
+ and dict is always used as *dict_factory*.
+
+ .. versionadded:: 21.3.0
+ """
+ return _asdict(
+ inst=inst,
+ recurse=recurse,
+ filter=filter,
+ value_serializer=value_serializer,
+ retain_collection_types=True,
+ )
+
+
+def astuple(inst, *, recurse=True, filter=None):
+ """
+ Same as `attr.astuple`, except that collections types are always retained
+ and `tuple` is always used as the *tuple_factory*.
+
+ .. versionadded:: 21.3.0
+ """
+ return _astuple(
+ inst=inst, recurse=recurse, filter=filter, retain_collection_types=True
+ )
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/attr/_version_info.py Wed Jan 04 16:02:22 2023 +0100
@@ -0,0 +1,86 @@
+# SPDX-License-Identifier: MIT
+
+
+from functools import total_ordering
+
+from ._funcs import astuple
+from ._make import attrib, attrs
+
+
+@total_ordering
+@attrs(eq=False, order=False, slots=True, frozen=True)
+class VersionInfo:
+ """
+ A version object that can be compared to tuple of length 1--4:
+
+ >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2)
+ True
+ >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1)
+ True
+ >>> vi = attr.VersionInfo(19, 2, 0, "final")
+ >>> vi < (19, 1, 1)
+ False
+ >>> vi < (19,)
+ False
+ >>> vi == (19, 2,)
+ True
+ >>> vi == (19, 2, 1)
+ False
+
+ .. versionadded:: 19.2
+ """
+
+ year = attrib(type=int)
+ minor = attrib(type=int)
+ micro = attrib(type=int)
+ releaselevel = attrib(type=str)
+
+ @classmethod
+ def _from_version_string(cls, s):
+ """
+ Parse *s* and return a _VersionInfo.
+ """
+ v = s.split(".")
+ if len(v) == 3:
+ v.append("final")
+
+ return cls(
+ year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3]
+ )
+
+ def _ensure_tuple(self, other):
+ """
+ Ensure *other* is a tuple of a valid length.
+
+ Returns a possibly transformed *other* and ourselves as a tuple of
+ the same length as *other*.
+ """
+
+ if self.__class__ is other.__class__:
+ other = astuple(other)
+
+ if not isinstance(other, tuple):
+ raise NotImplementedError
+
+ if not (1 <= len(other) <= 4):
+ raise NotImplementedError
+
+ return astuple(self)[: len(other)], other
+
+ def __eq__(self, other):
+ try:
+ us, them = self._ensure_tuple(other)
+ except NotImplementedError:
+ return NotImplemented
+
+ return us == them
+
+ def __lt__(self, other):
+ try:
+ us, them = self._ensure_tuple(other)
+ except NotImplementedError:
+ return NotImplemented
+
+ # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't
+ # have to do anything special with releaselevel for now.
+ return us < them
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/attr/_version_info.pyi Wed Jan 04 16:02:22 2023 +0100
@@ -0,0 +1,9 @@
+class VersionInfo:
+ @property
+ def year(self) -> int: ...
+ @property
+ def minor(self) -> int: ...
+ @property
+ def micro(self) -> int: ...
+ @property
+ def releaselevel(self) -> str: ...
--- a/mercurial/thirdparty/attr/converters.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/thirdparty/attr/converters.py Wed Jan 04 16:02:22 2023 +0100
@@ -1,8 +1,22 @@
+# SPDX-License-Identifier: MIT
+
"""
Commonly useful converters.
"""
-from __future__ import absolute_import, division, print_function
+
+import typing
+
+from ._compat import _AnnotationExtractor
+from ._make import NOTHING, Factory, pipe
+
+
+__all__ = [
+ "default_if_none",
+ "optional",
+ "pipe",
+ "to_bool",
+]
def optional(converter):
@@ -10,10 +24,13 @@
A converter that allows an attribute to be optional. An optional attribute
is one which can be set to ``None``.
+ Type annotations will be inferred from the wrapped converter's, if it
+ has any.
+
:param callable converter: the converter that is used for non-``None``
values.
- .. versionadded:: 17.1.0
+ .. versionadded:: 17.1.0
"""
def optional_converter(val):
@@ -21,4 +38,107 @@
return None
return converter(val)
+ xtr = _AnnotationExtractor(converter)
+
+ t = xtr.get_first_param_type()
+ if t:
+ optional_converter.__annotations__["val"] = typing.Optional[t]
+
+ rt = xtr.get_return_type()
+ if rt:
+ optional_converter.__annotations__["return"] = typing.Optional[rt]
+
return optional_converter
+
+
+def default_if_none(default=NOTHING, factory=None):
+ """
+ A converter that allows to replace ``None`` values by *default* or the
+ result of *factory*.
+
+ :param default: Value to be used if ``None`` is passed. Passing an instance
+ of `attrs.Factory` is supported, however the ``takes_self`` option
+ is *not*.
+ :param callable factory: A callable that takes no parameters whose result
+ is used if ``None`` is passed.
+
+ :raises TypeError: If **neither** *default* or *factory* is passed.
+ :raises TypeError: If **both** *default* and *factory* are passed.
+ :raises ValueError: If an instance of `attrs.Factory` is passed with
+ ``takes_self=True``.
+
+ .. versionadded:: 18.2.0
+ """
+ if default is NOTHING and factory is None:
+ raise TypeError("Must pass either `default` or `factory`.")
+
+ if default is not NOTHING and factory is not None:
+ raise TypeError(
+ "Must pass either `default` or `factory` but not both."
+ )
+
+ if factory is not None:
+ default = Factory(factory)
+
+ if isinstance(default, Factory):
+ if default.takes_self:
+ raise ValueError(
+ "`takes_self` is not supported by default_if_none."
+ )
+
+ def default_if_none_converter(val):
+ if val is not None:
+ return val
+
+ return default.factory()
+
+ else:
+
+ def default_if_none_converter(val):
+ if val is not None:
+ return val
+
+ return default
+
+ return default_if_none_converter
+
+
+def to_bool(val):
+ """
+ Convert "boolean" strings (e.g., from env. vars.) to real booleans.
+
+ Values mapping to :code:`True`:
+
+ - :code:`True`
+ - :code:`"true"` / :code:`"t"`
+ - :code:`"yes"` / :code:`"y"`
+ - :code:`"on"`
+ - :code:`"1"`
+ - :code:`1`
+
+ Values mapping to :code:`False`:
+
+ - :code:`False`
+ - :code:`"false"` / :code:`"f"`
+ - :code:`"no"` / :code:`"n"`
+ - :code:`"off"`
+ - :code:`"0"`
+ - :code:`0`
+
+ :raises ValueError: for any other value.
+
+ .. versionadded:: 21.3.0
+ """
+ if isinstance(val, str):
+ val = val.lower()
+ truthy = {True, "true", "t", "yes", "y", "on", "1", 1}
+ falsy = {False, "false", "f", "no", "n", "off", "0", 0}
+ try:
+ if val in truthy:
+ return True
+ if val in falsy:
+ return False
+ except TypeError:
+ # Raised when "val" is not hashable (e.g., lists)
+ pass
+ raise ValueError("Cannot convert value to bool: {}".format(val))
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/attr/converters.pyi Wed Jan 04 16:02:22 2023 +0100
@@ -0,0 +1,13 @@
+from typing import Callable, Optional, TypeVar, overload
+
+from . import _ConverterType
+
+_T = TypeVar("_T")
+
+def pipe(*validators: _ConverterType) -> _ConverterType: ...
+def optional(converter: _ConverterType) -> _ConverterType: ...
+@overload
+def default_if_none(default: _T) -> _ConverterType: ...
+@overload
+def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ...
+def to_bool(val: str) -> bool: ...
--- a/mercurial/thirdparty/attr/exceptions.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/thirdparty/attr/exceptions.py Wed Jan 04 16:02:22 2023 +0100
@@ -1,17 +1,35 @@
-from __future__ import absolute_import, division, print_function
+# SPDX-License-Identifier: MIT
-class FrozenInstanceError(AttributeError):
+class FrozenError(AttributeError):
"""
- A frozen/immutable instance has been attempted to be modified.
+ A frozen/immutable instance or attribute have been attempted to be
+ modified.
It mirrors the behavior of ``namedtuples`` by using the same error message
- and subclassing :exc:`AttributeError`.
+ and subclassing `AttributeError`.
+
+ .. versionadded:: 20.1.0
+ """
+
+ msg = "can't set attribute"
+ args = [msg]
+
+
+class FrozenInstanceError(FrozenError):
+ """
+ A frozen instance has been attempted to be modified.
.. versionadded:: 16.1.0
"""
- msg = "can't set attribute"
- args = [msg]
+
+
+class FrozenAttributeError(FrozenError):
+ """
+ A frozen attribute has been attempted to be modified.
+
+ .. versionadded:: 20.1.0
+ """
class AttrsAttributeNotFoundError(ValueError):
@@ -37,3 +55,38 @@
.. versionadded:: 17.1.0
"""
+
+
+class UnannotatedAttributeError(RuntimeError):
+ """
+ A class with ``auto_attribs=True`` has an ``attr.ib()`` without a type
+ annotation.
+
+ .. versionadded:: 17.3.0
+ """
+
+
+class PythonTooOldError(RuntimeError):
+ """
+ It was attempted to use an ``attrs`` feature that requires a newer Python
+ version.
+
+ .. versionadded:: 18.2.0
+ """
+
+
+class NotCallableError(TypeError):
+ """
+ A ``attr.ib()`` requiring a callable has been set with a value
+ that is not callable.
+
+ .. versionadded:: 19.2.0
+ """
+
+ def __init__(self, msg, value):
+ super(TypeError, self).__init__(msg, value)
+ self.msg = msg
+ self.value = value
+
+ def __str__(self):
+ return str(self.msg)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/attr/exceptions.pyi Wed Jan 04 16:02:22 2023 +0100
@@ -0,0 +1,17 @@
+from typing import Any
+
+class FrozenError(AttributeError):
+ msg: str = ...
+
+class FrozenInstanceError(FrozenError): ...
+class FrozenAttributeError(FrozenError): ...
+class AttrsAttributeNotFoundError(ValueError): ...
+class NotAnAttrsClassError(ValueError): ...
+class DefaultAlreadySetError(RuntimeError): ...
+class UnannotatedAttributeError(RuntimeError): ...
+class PythonTooOldError(RuntimeError): ...
+
+class NotCallableError(TypeError):
+ msg: str = ...
+ value: Any = ...
+ def __init__(self, msg: str, value: Any) -> None: ...
--- a/mercurial/thirdparty/attr/filters.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/thirdparty/attr/filters.py Wed Jan 04 16:02:22 2023 +0100
@@ -1,10 +1,9 @@
+# SPDX-License-Identifier: MIT
+
"""
-Commonly useful filters for :func:`attr.asdict`.
+Commonly useful filters for `attr.asdict`.
"""
-from __future__ import absolute_import, division, print_function
-
-from ._compat import isclass
from ._make import Attribute
@@ -13,19 +12,19 @@
Returns a tuple of `frozenset`s of classes and attributes.
"""
return (
- frozenset(cls for cls in what if isclass(cls)),
+ frozenset(cls for cls in what if isinstance(cls, type)),
frozenset(cls for cls in what if isinstance(cls, Attribute)),
)
def include(*what):
- r"""
- Whitelist *what*.
+ """
+ Include *what*.
- :param what: What to whitelist.
- :type what: :class:`list` of :class:`type` or :class:`attr.Attribute`\ s
+ :param what: What to include.
+ :type what: `list` of `type` or `attrs.Attribute`\\ s
- :rtype: :class:`callable`
+ :rtype: `callable`
"""
cls, attrs = _split_what(what)
@@ -36,13 +35,13 @@
def exclude(*what):
- r"""
- Blacklist *what*.
+ """
+ Exclude *what*.
- :param what: What to blacklist.
- :type what: :class:`list` of classes or :class:`attr.Attribute`\ s.
+ :param what: What to exclude.
+ :type what: `list` of classes or `attrs.Attribute`\\ s.
- :rtype: :class:`callable`
+ :rtype: `callable`
"""
cls, attrs = _split_what(what)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/attr/filters.pyi Wed Jan 04 16:02:22 2023 +0100
@@ -0,0 +1,6 @@
+from typing import Any, Union
+
+from . import Attribute, _FilterType
+
+def include(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ...
+def exclude(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ...
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/attr/setters.py Wed Jan 04 16:02:22 2023 +0100
@@ -0,0 +1,73 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Commonly used hooks for on_setattr.
+"""
+
+
+from . import _config
+from .exceptions import FrozenAttributeError
+
+
+def pipe(*setters):
+ """
+ Run all *setters* and return the return value of the last one.
+
+ .. versionadded:: 20.1.0
+ """
+
+ def wrapped_pipe(instance, attrib, new_value):
+ rv = new_value
+
+ for setter in setters:
+ rv = setter(instance, attrib, rv)
+
+ return rv
+
+ return wrapped_pipe
+
+
+def frozen(_, __, ___):
+ """
+ Prevent an attribute to be modified.
+
+ .. versionadded:: 20.1.0
+ """
+ raise FrozenAttributeError()
+
+
+def validate(instance, attrib, new_value):
+ """
+ Run *attrib*'s validator on *new_value* if it has one.
+
+ .. versionadded:: 20.1.0
+ """
+ if _config._run_validators is False:
+ return new_value
+
+ v = attrib.validator
+ if not v:
+ return new_value
+
+ v(instance, attrib, new_value)
+
+ return new_value
+
+
+def convert(instance, attrib, new_value):
+ """
+ Run *attrib*'s converter -- if it has one -- on *new_value* and return the
+ result.
+
+ .. versionadded:: 20.1.0
+ """
+ c = attrib.converter
+ if c:
+ return c(new_value)
+
+ return new_value
+
+
+# Sentinel for disabling class-wide *on_setattr* hooks for certain attributes.
+# autodata stopped working, so the docstring is inlined in the API docs.
+NO_OP = object()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/attr/setters.pyi Wed Jan 04 16:02:22 2023 +0100
@@ -0,0 +1,19 @@
+from typing import Any, NewType, NoReturn, TypeVar, cast
+
+from . import Attribute, _OnSetAttrType
+
+_T = TypeVar("_T")
+
+def frozen(
+ instance: Any, attribute: Attribute[Any], new_value: Any
+) -> NoReturn: ...
+def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ...
+def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ...
+
+# convert is allowed to return Any, because they can be chained using pipe.
+def convert(
+ instance: Any, attribute: Attribute[Any], new_value: Any
+) -> Any: ...
+
+_NoOpType = NewType("_NoOpType", object)
+NO_OP: _NoOpType
--- a/mercurial/thirdparty/attr/validators.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/thirdparty/attr/validators.py Wed Jan 04 16:02:22 2023 +0100
@@ -1,24 +1,99 @@
+# SPDX-License-Identifier: MIT
+
"""
Commonly useful validators.
"""
-from __future__ import absolute_import, division, print_function
+
+import operator
+import re
+
+from contextlib import contextmanager
-from ._make import attr, attributes, and_, _AndValidator
+from ._config import get_run_validators, set_run_validators
+from ._make import _AndValidator, and_, attrib, attrs
+from .exceptions import NotCallableError
+
+
+try:
+ Pattern = re.Pattern
+except AttributeError: # Python <3.7 lacks a Pattern type.
+ Pattern = type(re.compile(""))
__all__ = [
"and_",
+ "deep_iterable",
+ "deep_mapping",
+ "disabled",
+ "ge",
+ "get_disabled",
+ "gt",
"in_",
"instance_of",
+ "is_callable",
+ "le",
+ "lt",
+ "matches_re",
+ "max_len",
+ "min_len",
"optional",
"provides",
+ "set_disabled",
]
-@attributes(repr=False, slots=True, hash=True)
-class _InstanceOfValidator(object):
- type = attr()
+def set_disabled(disabled):
+ """
+ Globally disable or enable running validators.
+
+ By default, they are run.
+
+ :param disabled: If ``True``, disable running all validators.
+ :type disabled: bool
+
+ .. warning::
+
+ This function is not thread-safe!
+
+ .. versionadded:: 21.3.0
+ """
+ set_run_validators(not disabled)
+
+
+def get_disabled():
+ """
+ Return a bool indicating whether validators are currently disabled or not.
+
+ :return: ``True`` if validators are currently disabled.
+ :rtype: bool
+
+ .. versionadded:: 21.3.0
+ """
+ return not get_run_validators()
+
+
+@contextmanager
+def disabled():
+ """
+ Context manager that disables running validators within its context.
+
+ .. warning::
+
+ This context manager is not thread-safe!
+
+ .. versionadded:: 21.3.0
+ """
+ set_run_validators(False)
+ try:
+ yield
+ finally:
+ set_run_validators(True)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _InstanceOfValidator:
+ type = attrib()
def __call__(self, inst, attr, value):
"""
@@ -27,38 +102,116 @@
if not isinstance(value, self.type):
raise TypeError(
"'{name}' must be {type!r} (got {value!r} that is a "
- "{actual!r})."
- .format(name=attr.name, type=self.type,
- actual=value.__class__, value=value),
- attr, self.type, value,
+ "{actual!r}).".format(
+ name=attr.name,
+ type=self.type,
+ actual=value.__class__,
+ value=value,
+ ),
+ attr,
+ self.type,
+ value,
)
def __repr__(self):
- return (
- "<instance_of validator for type {type!r}>"
- .format(type=self.type)
+ return "<instance_of validator for type {type!r}>".format(
+ type=self.type
)
def instance_of(type):
"""
- A validator that raises a :exc:`TypeError` if the initializer is called
- with a wrong type for this particular attribute (checks are perfomed using
- :func:`isinstance` therefore it's also valid to pass a tuple of types).
+ A validator that raises a `TypeError` if the initializer is called
+ with a wrong type for this particular attribute (checks are performed using
+ `isinstance` therefore it's also valid to pass a tuple of types).
:param type: The type to check for.
:type type: type or tuple of types
:raises TypeError: With a human readable error message, the attribute
- (of type :class:`attr.Attribute`), the expected type, and the value it
+ (of type `attrs.Attribute`), the expected type, and the value it
got.
"""
return _InstanceOfValidator(type)
-@attributes(repr=False, slots=True, hash=True)
-class _ProvidesValidator(object):
- interface = attr()
+@attrs(repr=False, frozen=True, slots=True)
+class _MatchesReValidator:
+ pattern = attrib()
+ match_func = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not self.match_func(value):
+ raise ValueError(
+ "'{name}' must match regex {pattern!r}"
+ " ({value!r} doesn't)".format(
+ name=attr.name, pattern=self.pattern.pattern, value=value
+ ),
+ attr,
+ self.pattern,
+ value,
+ )
+
+ def __repr__(self):
+ return "<matches_re validator for pattern {pattern!r}>".format(
+ pattern=self.pattern
+ )
+
+
+def matches_re(regex, flags=0, func=None):
+ r"""
+ A validator that raises `ValueError` if the initializer is called
+ with a string that doesn't match *regex*.
+
+ :param regex: a regex string or precompiled pattern to match against
+ :param int flags: flags that will be passed to the underlying re function
+ (default 0)
+ :param callable func: which underlying `re` function to call. Valid options
+ are `re.fullmatch`, `re.search`, and `re.match`; the default ``None``
+ means `re.fullmatch`. For performance reasons, the pattern is always
+ precompiled using `re.compile`.
+
+ .. versionadded:: 19.2.0
+ .. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern.
+ """
+ valid_funcs = (re.fullmatch, None, re.search, re.match)
+ if func not in valid_funcs:
+ raise ValueError(
+ "'func' must be one of {}.".format(
+ ", ".join(
+ sorted(
+ e and e.__name__ or "None" for e in set(valid_funcs)
+ )
+ )
+ )
+ )
+
+ if isinstance(regex, Pattern):
+ if flags:
+ raise TypeError(
+ "'flags' can only be used with a string pattern; "
+ "pass flags to re.compile() instead"
+ )
+ pattern = regex
+ else:
+ pattern = re.compile(regex, flags)
+
+ if func is re.match:
+ match_func = pattern.match
+ elif func is re.search:
+ match_func = pattern.search
+ else:
+ match_func = pattern.fullmatch
+
+ return _MatchesReValidator(pattern, match_func)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _ProvidesValidator:
+ interface = attrib()
def __call__(self, inst, attr, value):
"""
@@ -67,37 +220,40 @@
if not self.interface.providedBy(value):
raise TypeError(
"'{name}' must provide {interface!r} which {value!r} "
- "doesn't."
- .format(name=attr.name, interface=self.interface, value=value),
- attr, self.interface, value,
+ "doesn't.".format(
+ name=attr.name, interface=self.interface, value=value
+ ),
+ attr,
+ self.interface,
+ value,
)
def __repr__(self):
- return (
- "<provides validator for interface {interface!r}>"
- .format(interface=self.interface)
+ return "<provides validator for interface {interface!r}>".format(
+ interface=self.interface
)
def provides(interface):
"""
- A validator that raises a :exc:`TypeError` if the initializer is called
+ A validator that raises a `TypeError` if the initializer is called
with an object that does not provide the requested *interface* (checks are
performed using ``interface.providedBy(value)`` (see `zope.interface
<https://zopeinterface.readthedocs.io/en/latest/>`_).
- :param zope.interface.Interface interface: The interface to check for.
+ :param interface: The interface to check for.
+ :type interface: ``zope.interface.Interface``
:raises TypeError: With a human readable error message, the attribute
- (of type :class:`attr.Attribute`), the expected interface, and the
+ (of type `attrs.Attribute`), the expected interface, and the
value it got.
"""
return _ProvidesValidator(interface)
-@attributes(repr=False, slots=True, hash=True)
-class _OptionalValidator(object):
- validator = attr()
+@attrs(repr=False, slots=True, hash=True)
+class _OptionalValidator:
+ validator = attrib()
def __call__(self, inst, attr, value):
if value is None:
@@ -106,9 +262,8 @@
self.validator(inst, attr, value)
def __repr__(self):
- return (
- "<optional validator for {what} or None>"
- .format(what=repr(self.validator))
+ return "<optional validator for {what} or None>".format(
+ what=repr(self.validator)
)
@@ -120,7 +275,7 @@
:param validator: A validator (or a list of validators) that is used for
non-``None`` values.
- :type validator: callable or :class:`list` of callables.
+ :type validator: callable or `list` of callables.
.. versionadded:: 15.1.0
.. versionchanged:: 17.1.0 *validator* can be a list of validators.
@@ -130,37 +285,310 @@
return _OptionalValidator(validator)
-@attributes(repr=False, slots=True, hash=True)
-class _InValidator(object):
- options = attr()
+@attrs(repr=False, slots=True, hash=True)
+class _InValidator:
+ options = attrib()
def __call__(self, inst, attr, value):
- if value not in self.options:
+ try:
+ in_options = value in self.options
+ except TypeError: # e.g. `1 in "abc"`
+ in_options = False
+
+ if not in_options:
raise ValueError(
- "'{name}' must be in {options!r} (got {value!r})"
- .format(name=attr.name, options=self.options, value=value)
+ "'{name}' must be in {options!r} (got {value!r})".format(
+ name=attr.name, options=self.options, value=value
+ ),
+ attr,
+ self.options,
+ value,
)
def __repr__(self):
- return (
- "<in_ validator with options {options!r}>"
- .format(options=self.options)
+ return "<in_ validator with options {options!r}>".format(
+ options=self.options
)
def in_(options):
"""
- A validator that raises a :exc:`ValueError` if the initializer is called
+ A validator that raises a `ValueError` if the initializer is called
with a value that does not belong in the options provided. The check is
performed using ``value in options``.
:param options: Allowed options.
- :type options: list, tuple, :class:`enum.Enum`, ...
+ :type options: list, tuple, `enum.Enum`, ...
:raises ValueError: With a human readable error message, the attribute (of
- type :class:`attr.Attribute`), the expected options, and the value it
+ type `attrs.Attribute`), the expected options, and the value it
got.
.. versionadded:: 17.1.0
+ .. versionchanged:: 22.1.0
+ The ValueError was incomplete until now and only contained the human
+ readable error message. Now it contains all the information that has
+ been promised since 17.1.0.
"""
return _InValidator(options)
+
+
+@attrs(repr=False, slots=False, hash=True)
+class _IsCallableValidator:
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not callable(value):
+ message = (
+ "'{name}' must be callable "
+ "(got {value!r} that is a {actual!r})."
+ )
+ raise NotCallableError(
+ msg=message.format(
+ name=attr.name, value=value, actual=value.__class__
+ ),
+ value=value,
+ )
+
+ def __repr__(self):
+ return "<is_callable validator>"
+
+
+def is_callable():
+ """
+ A validator that raises a `attr.exceptions.NotCallableError` if the
+ initializer is called with a value for this particular attribute
+ that is not callable.
+
+ .. versionadded:: 19.1.0
+
+ :raises `attr.exceptions.NotCallableError`: With a human readable error
+ message containing the attribute (`attrs.Attribute`) name,
+ and the value it got.
+ """
+ return _IsCallableValidator()
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _DeepIterable:
+ member_validator = attrib(validator=is_callable())
+ iterable_validator = attrib(
+ default=None, validator=optional(is_callable())
+ )
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if self.iterable_validator is not None:
+ self.iterable_validator(inst, attr, value)
+
+ for member in value:
+ self.member_validator(inst, attr, member)
+
+ def __repr__(self):
+ iterable_identifier = (
+ ""
+ if self.iterable_validator is None
+ else " {iterable!r}".format(iterable=self.iterable_validator)
+ )
+ return (
+ "<deep_iterable validator for{iterable_identifier}"
+ " iterables of {member!r}>"
+ ).format(
+ iterable_identifier=iterable_identifier,
+ member=self.member_validator,
+ )
+
+
+def deep_iterable(member_validator, iterable_validator=None):
+ """
+ A validator that performs deep validation of an iterable.
+
+ :param member_validator: Validator(s) to apply to iterable members
+ :param iterable_validator: Validator to apply to iterable itself
+ (optional)
+
+ .. versionadded:: 19.1.0
+
+ :raises TypeError: if any sub-validators fail
+ """
+ if isinstance(member_validator, (list, tuple)):
+ member_validator = and_(*member_validator)
+ return _DeepIterable(member_validator, iterable_validator)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _DeepMapping:
+ key_validator = attrib(validator=is_callable())
+ value_validator = attrib(validator=is_callable())
+ mapping_validator = attrib(default=None, validator=optional(is_callable()))
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if self.mapping_validator is not None:
+ self.mapping_validator(inst, attr, value)
+
+ for key in value:
+ self.key_validator(inst, attr, key)
+ self.value_validator(inst, attr, value[key])
+
+ def __repr__(self):
+ return (
+ "<deep_mapping validator for objects mapping {key!r} to {value!r}>"
+ ).format(key=self.key_validator, value=self.value_validator)
+
+
+def deep_mapping(key_validator, value_validator, mapping_validator=None):
+ """
+ A validator that performs deep validation of a dictionary.
+
+ :param key_validator: Validator to apply to dictionary keys
+ :param value_validator: Validator to apply to dictionary values
+ :param mapping_validator: Validator to apply to top-level mapping
+ attribute (optional)
+
+ .. versionadded:: 19.1.0
+
+ :raises TypeError: if any sub-validators fail
+ """
+ return _DeepMapping(key_validator, value_validator, mapping_validator)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _NumberValidator:
+ bound = attrib()
+ compare_op = attrib()
+ compare_func = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not self.compare_func(value, self.bound):
+ raise ValueError(
+ "'{name}' must be {op} {bound}: {value}".format(
+ name=attr.name,
+ op=self.compare_op,
+ bound=self.bound,
+ value=value,
+ )
+ )
+
+ def __repr__(self):
+ return "<Validator for x {op} {bound}>".format(
+ op=self.compare_op, bound=self.bound
+ )
+
+
+def lt(val):
+ """
+ A validator that raises `ValueError` if the initializer is called
+ with a number larger or equal to *val*.
+
+ :param val: Exclusive upper bound for values
+
+ .. versionadded:: 21.3.0
+ """
+ return _NumberValidator(val, "<", operator.lt)
+
+
+def le(val):
+ """
+ A validator that raises `ValueError` if the initializer is called
+ with a number greater than *val*.
+
+ :param val: Inclusive upper bound for values
+
+ .. versionadded:: 21.3.0
+ """
+ return _NumberValidator(val, "<=", operator.le)
+
+
+def ge(val):
+ """
+ A validator that raises `ValueError` if the initializer is called
+ with a number smaller than *val*.
+
+ :param val: Inclusive lower bound for values
+
+ .. versionadded:: 21.3.0
+ """
+ return _NumberValidator(val, ">=", operator.ge)
+
+
+def gt(val):
+ """
+ A validator that raises `ValueError` if the initializer is called
+ with a number smaller or equal to *val*.
+
+ :param val: Exclusive lower bound for values
+
+ .. versionadded:: 21.3.0
+ """
+ return _NumberValidator(val, ">", operator.gt)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _MaxLengthValidator:
+ max_length = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if len(value) > self.max_length:
+ raise ValueError(
+ "Length of '{name}' must be <= {max}: {len}".format(
+ name=attr.name, max=self.max_length, len=len(value)
+ )
+ )
+
+ def __repr__(self):
+ return "<max_len validator for {max}>".format(max=self.max_length)
+
+
+def max_len(length):
+ """
+ A validator that raises `ValueError` if the initializer is called
+ with a string or iterable that is longer than *length*.
+
+ :param int length: Maximum length of the string or iterable
+
+ .. versionadded:: 21.3.0
+ """
+ return _MaxLengthValidator(length)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _MinLengthValidator:
+ min_length = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if len(value) < self.min_length:
+ raise ValueError(
+ "Length of '{name}' must be => {min}: {len}".format(
+ name=attr.name, min=self.min_length, len=len(value)
+ )
+ )
+
+ def __repr__(self):
+ return "<min_len validator for {min}>".format(min=self.min_length)
+
+
+def min_len(length):
+ """
+ A validator that raises `ValueError` if the initializer is called
+ with a string or iterable that is shorter than *length*.
+
+ :param int length: Minimum length of the string or iterable
+
+ .. versionadded:: 22.1.0
+ """
+ return _MinLengthValidator(length)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/attr/validators.pyi Wed Jan 04 16:02:22 2023 +0100
@@ -0,0 +1,80 @@
+from typing import (
+ Any,
+ AnyStr,
+ Callable,
+ Container,
+ ContextManager,
+ Iterable,
+ List,
+ Mapping,
+ Match,
+ Optional,
+ Pattern,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+ overload,
+)
+
+from . import _ValidatorType
+from . import _ValidatorArgType
+
+_T = TypeVar("_T")
+_T1 = TypeVar("_T1")
+_T2 = TypeVar("_T2")
+_T3 = TypeVar("_T3")
+_I = TypeVar("_I", bound=Iterable)
+_K = TypeVar("_K")
+_V = TypeVar("_V")
+_M = TypeVar("_M", bound=Mapping)
+
+def set_disabled(run: bool) -> None: ...
+def get_disabled() -> bool: ...
+def disabled() -> ContextManager[None]: ...
+
+# To be more precise on instance_of use some overloads.
+# If there are more than 3 items in the tuple then we fall back to Any
+@overload
+def instance_of(type: Type[_T]) -> _ValidatorType[_T]: ...
+@overload
+def instance_of(type: Tuple[Type[_T]]) -> _ValidatorType[_T]: ...
+@overload
+def instance_of(
+ type: Tuple[Type[_T1], Type[_T2]]
+) -> _ValidatorType[Union[_T1, _T2]]: ...
+@overload
+def instance_of(
+ type: Tuple[Type[_T1], Type[_T2], Type[_T3]]
+) -> _ValidatorType[Union[_T1, _T2, _T3]]: ...
+@overload
+def instance_of(type: Tuple[type, ...]) -> _ValidatorType[Any]: ...
+def provides(interface: Any) -> _ValidatorType[Any]: ...
+def optional(
+ validator: Union[_ValidatorType[_T], List[_ValidatorType[_T]]]
+) -> _ValidatorType[Optional[_T]]: ...
+def in_(options: Container[_T]) -> _ValidatorType[_T]: ...
+def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...
+def matches_re(
+ regex: Union[Pattern[AnyStr], AnyStr],
+ flags: int = ...,
+ func: Optional[
+ Callable[[AnyStr, AnyStr, int], Optional[Match[AnyStr]]]
+ ] = ...,
+) -> _ValidatorType[AnyStr]: ...
+def deep_iterable(
+ member_validator: _ValidatorArgType[_T],
+ iterable_validator: Optional[_ValidatorType[_I]] = ...,
+) -> _ValidatorType[_I]: ...
+def deep_mapping(
+ key_validator: _ValidatorType[_K],
+ value_validator: _ValidatorType[_V],
+ mapping_validator: Optional[_ValidatorType[_M]] = ...,
+) -> _ValidatorType[_M]: ...
+def is_callable() -> _ValidatorType[_T]: ...
+def lt(val: _T) -> _ValidatorType[_T]: ...
+def le(val: _T) -> _ValidatorType[_T]: ...
+def ge(val: _T) -> _ValidatorType[_T]: ...
+def gt(val: _T) -> _ValidatorType[_T]: ...
+def max_len(length: int) -> _ValidatorType[_T]: ...
+def min_len(length: int) -> _ValidatorType[_T]: ...
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/typelib.py Wed Jan 04 16:02:22 2023 +0100
@@ -0,0 +1,28 @@
+# typelib.py - type hint aliases and support
+#
+# Copyright 2022 Matt Harbison <matt_harbison@yahoo.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+import typing
+
+# Note: this is slightly different from pycompat.TYPE_CHECKING, as using
+# pycompat causes the BinaryIO_Proxy type to be resolved to ``object`` when
+# used as the base class during a pytype run.
+TYPE_CHECKING = typing.TYPE_CHECKING
+
+
+# The BinaryIO class provides empty methods, which at runtime means that
+# ``__getattr__`` on the proxy classes won't get called for the methods that
+# should delegate to the internal object. So to avoid runtime changes because
+# of the required typing inheritance, just use BinaryIO when typechecking, and
+# ``object`` otherwise.
+if TYPE_CHECKING:
+ from typing import (
+ BinaryIO,
+ )
+
+ BinaryIO_Proxy = BinaryIO
+else:
+ BinaryIO_Proxy = object
--- a/mercurial/ui.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/ui.py Wed Jan 04 16:02:22 2023 +0100
@@ -19,6 +19,21 @@
import sys
import traceback
+from typing import (
+ Any,
+ Callable,
+ Dict,
+ List,
+ NoReturn,
+ Optional,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+ cast,
+ overload,
+)
+
from .i18n import _
from .node import hex
from .pycompat import (
@@ -48,15 +63,23 @@
urlutil,
)
+_ConfigItems = Dict[Tuple[bytes, bytes], object] # {(section, name) : value}
+# The **opts args of the various write() methods can be basically anything, but
+# there's no way to express it as "anything but str". So type it to be the
+# handful of known types that are used.
+_MsgOpts = Union[bytes, bool, List["_PromptChoice"]]
+_PromptChoice = Tuple[bytes, bytes]
+_Tui = TypeVar('_Tui', bound="ui")
+
urlreq = util.urlreq
# for use with str.translate(None, _keepalnum), to keep just alphanumerics
-_keepalnum = b''.join(
+_keepalnum: bytes = b''.join(
c for c in map(pycompat.bytechr, range(256)) if not c.isalnum()
)
# The config knobs that will be altered (if unset) by ui.tweakdefaults.
-tweakrc = b"""
+tweakrc: bytes = b"""
[ui]
# The rollback command is dangerous. As a rule, don't use it.
rollback = False
@@ -83,7 +106,7 @@
word-diff = 1
"""
-samplehgrcs = {
+samplehgrcs: Dict[bytes, bytes] = {
b'user': b"""# example user config (see 'hg help config' for more info)
[ui]
# name and email, e.g.
@@ -172,7 +195,7 @@
class httppasswordmgrdbproxy:
"""Delays loading urllib2 until it's needed."""
- def __init__(self):
+ def __init__(self) -> None:
self._mgr = None
def _get_mgr(self):
@@ -195,7 +218,7 @@
)
-def _catchterm(*args):
+def _catchterm(*args) -> NoReturn:
raise error.SignalInterrupt
@@ -204,11 +227,11 @@
_unset = object()
# _reqexithandlers: callbacks run at the end of a request
-_reqexithandlers = []
+_reqexithandlers: List = []
class ui:
- def __init__(self, src=None):
+ def __init__(self, src: Optional["ui"] = None) -> None:
"""Create a fresh new ui object if no src given
Use uimod.ui.load() to create a ui which knows global and user configs.
@@ -303,13 +326,13 @@
if k in self.environ:
self._exportableenviron[k] = self.environ[k]
- def _new_source(self):
+ def _new_source(self) -> None:
self._ocfg.new_source()
self._tcfg.new_source()
self._ucfg.new_source()
@classmethod
- def load(cls):
+ def load(cls: Type[_Tui]) -> _Tui:
"""Create a ui and load global and user configs"""
u = cls()
# we always trust global config files and environment variables
@@ -335,7 +358,7 @@
u._new_source() # anything after that is a different level
return u
- def _maybetweakdefaults(self):
+ def _maybetweakdefaults(self) -> None:
if not self.configbool(b'ui', b'tweakdefaults'):
return
if self._tweaked or self.plain(b'tweakdefaults'):
@@ -355,17 +378,17 @@
if not self.hasconfig(section, name):
self.setconfig(section, name, value, b"<tweakdefaults>")
- def copy(self):
+ def copy(self: _Tui) -> _Tui:
return self.__class__(self)
- def resetstate(self):
+ def resetstate(self) -> None:
"""Clear internal state that shouldn't persist across commands"""
if self._progbar:
self._progbar.resetstate() # reset last-print time of progress bar
self.httppasswordmgrdb = httppasswordmgrdbproxy()
@contextlib.contextmanager
- def timeblockedsection(self, key):
+ def timeblockedsection(self, key: bytes):
# this is open-coded below - search for timeblockedsection to find them
starttime = util.timer()
try:
@@ -410,10 +433,10 @@
finally:
self._uninterruptible = False
- def formatter(self, topic, opts):
+ def formatter(self, topic: bytes, opts):
return formatter.formatter(self, self, topic, opts)
- def _trusted(self, fp, f):
+ def _trusted(self, fp, f: bytes) -> bool:
st = util.fstat(fp)
if util.isowner(st):
return True
@@ -439,7 +462,7 @@
def read_resource_config(
self, name, root=None, trust=False, sections=None, remap=None
- ):
+ ) -> None:
try:
fp = resourceutil.open_resource(name[0], name[1])
except IOError:
@@ -453,7 +476,7 @@
def readconfig(
self, filename, root=None, trust=False, sections=None, remap=None
- ):
+ ) -> None:
try:
fp = open(filename, 'rb')
except IOError:
@@ -465,7 +488,7 @@
def _readconfig(
self, filename, fp, root=None, trust=False, sections=None, remap=None
- ):
+ ) -> None:
with fp:
cfg = config.config()
trusted = sections or trust or self._trusted(fp, filename)
@@ -481,7 +504,9 @@
self._applyconfig(cfg, trusted, root)
- def applyconfig(self, configitems, source=b"", root=None):
+ def applyconfig(
+ self, configitems: _ConfigItems, source=b"", root=None
+ ) -> None:
"""Add configitems from a non-file source. Unlike with ``setconfig()``,
they can be overridden by subsequent config file reads. The items are
in the same format as ``configoverride()``, namely a dict of the
@@ -497,7 +522,7 @@
self._applyconfig(cfg, True, root)
- def _applyconfig(self, cfg, trusted, root):
+ def _applyconfig(self, cfg, trusted, root) -> None:
if self.plain():
for k in (
b'debug',
@@ -540,7 +565,7 @@
root = os.path.expanduser(b'~')
self.fixconfig(root=root)
- def fixconfig(self, root=None, section=None):
+ def fixconfig(self, root=None, section=None) -> None:
if section in (None, b'paths'):
# expand vars and ~
# translate paths relative to root (or home) into absolute paths
@@ -603,12 +628,12 @@
self._ucfg.backup(section, item),
)
- def restoreconfig(self, data):
+ def restoreconfig(self, data) -> None:
self._ocfg.restore(data[0])
self._tcfg.restore(data[1])
self._ucfg.restore(data[2])
- def setconfig(self, section, name, value, source=b''):
+ def setconfig(self, section, name, value, source=b'') -> None:
for cfg in (self._ocfg, self._tcfg, self._ucfg):
cfg.set(section, name, value, source)
self.fixconfig(section=section)
@@ -994,7 +1019,7 @@
for name, value in self.configitems(section, untrusted):
yield section, name, value
- def plain(self, feature=None):
+ def plain(self, feature: Optional[bytes] = None) -> bool:
"""is plain mode active?
Plain mode means that all configuration variables which affect
@@ -1068,46 +1093,16 @@
)
return user
- def shortuser(self, user):
+ def shortuser(self, user: bytes) -> bytes:
"""Return a short representation of a user name or email address."""
if not self.verbose:
user = stringutil.shortuser(user)
return user
- def expandpath(self, loc, default=None):
- """Return repository location relative to cwd or from [paths]"""
- msg = b'ui.expandpath is deprecated, use `get_*` functions from urlutil'
- self.deprecwarn(msg, b'6.0')
- try:
- p = self.getpath(loc)
- if p:
- return p.rawloc
- except error.RepoError:
- pass
-
- if default:
- try:
- p = self.getpath(default)
- if p:
- return p.rawloc
- except error.RepoError:
- pass
-
- return loc
-
@util.propertycache
def paths(self):
return urlutil.paths(self)
- def getpath(self, *args, **kwargs):
- """see paths.getpath for details
-
- This method exist as `getpath` need a ui for potential warning message.
- """
- msg = b'ui.getpath is deprecated, use `get_*` functions from urlutil'
- self.deprecwarn(msg, b'6.0')
- return self.paths.getpath(self, *args, **kwargs)
-
@property
def fout(self):
return self._fout
@@ -1146,14 +1141,18 @@
self._fmsgout, self._fmsgerr = _selectmsgdests(self)
@contextlib.contextmanager
- def silent(self, error=False, subproc=False, labeled=False):
+ def silent(
+ self, error: bool = False, subproc: bool = False, labeled: bool = False
+ ):
self.pushbuffer(error=error, subproc=subproc, labeled=labeled)
try:
yield
finally:
self.popbuffer()
- def pushbuffer(self, error=False, subproc=False, labeled=False):
+ def pushbuffer(
+ self, error: bool = False, subproc: bool = False, labeled: bool = False
+ ) -> None:
"""install a buffer to capture standard output of the ui object
If error is True, the error output will be captured too.
@@ -1172,7 +1171,7 @@
self._bufferstates.append((error, subproc, labeled))
self._bufferapplylabels = labeled
- def popbuffer(self):
+ def popbuffer(self) -> bytes:
'''pop the last buffer and return the buffered output'''
self._bufferstates.pop()
if self._bufferstates:
@@ -1182,25 +1181,25 @@
return b"".join(self._buffers.pop())
- def _isbuffered(self, dest):
+ def _isbuffered(self, dest) -> bool:
if dest is self._fout:
return bool(self._buffers)
if dest is self._ferr:
return bool(self._bufferstates and self._bufferstates[-1][0])
return False
- def canwritewithoutlabels(self):
+ def canwritewithoutlabels(self) -> bool:
'''check if write skips the label'''
if self._buffers and not self._bufferapplylabels:
return True
return self._colormode is None
- def canbatchlabeledwrites(self):
+ def canbatchlabeledwrites(self) -> bool:
'''check if write calls with labels are batchable'''
# Windows color printing is special, see ``write``.
return self._colormode != b'win32'
- def write(self, *args, **opts):
+ def write(self, *args: bytes, **opts: _MsgOpts) -> None:
"""write args to output
By default, this method simply writes to the buffer or stdout.
@@ -1258,10 +1257,10 @@
util.timer() - starttime
) * 1000
- def write_err(self, *args, **opts):
+ def write_err(self, *args: bytes, **opts: _MsgOpts) -> None:
self._write(self._ferr, *args, **opts)
- def _write(self, dest, *args, **opts):
+ def _write(self, dest, *args: bytes, **opts: _MsgOpts) -> None:
# update write() as well if you touch this code
if self._isbuffered(dest):
label = opts.get('label', b'')
@@ -1272,7 +1271,7 @@
else:
self._writenobuf(dest, *args, **opts)
- def _writenobuf(self, dest, *args, **opts):
+ def _writenobuf(self, dest, *args: bytes, **opts: _MsgOpts) -> None:
# update write() as well if you touch this code
if not opts.get('keepprogressbar', False):
self._progclear()
@@ -1314,7 +1313,7 @@
util.timer() - starttime
) * 1000
- def _writemsg(self, dest, *args, **opts):
+ def _writemsg(self, dest, *args: bytes, **opts: _MsgOpts) -> None:
timestamp = self.showtimestamp and opts.get('type') in {
b'debug',
b'error',
@@ -1331,10 +1330,10 @@
if timestamp:
dest.flush()
- def _writemsgnobuf(self, dest, *args, **opts):
+ def _writemsgnobuf(self, dest, *args: bytes, **opts: _MsgOpts) -> None:
_writemsgwith(self._writenobuf, dest, *args, **opts)
- def flush(self):
+ def flush(self) -> None:
# opencode timeblockedsection because this is a critical path
starttime = util.timer()
try:
@@ -1354,7 +1353,7 @@
util.timer() - starttime
) * 1000
- def _isatty(self, fh):
+ def _isatty(self, fh) -> bool:
if self.configbool(b'ui', b'nontty'):
return False
return procutil.isatty(fh)
@@ -1392,10 +1391,10 @@
finally:
self.restorefinout(fin, fout)
- def disablepager(self):
+ def disablepager(self) -> None:
self._disablepager = True
- def pager(self, command):
+ def pager(self, command: bytes) -> None:
"""Start a pager for subsequent command output.
Commands which produce a long stream of output should call
@@ -1476,7 +1475,7 @@
# warning about a missing pager command.
self.disablepager()
- def _runpager(self, command, env=None):
+ def _runpager(self, command: bytes, env=None) -> bool:
"""Actually start the pager and set up file descriptors.
This is separate in part so that extensions (like chg) can
@@ -1556,7 +1555,7 @@
self._exithandlers.append((func, args, kwargs))
return func
- def interface(self, feature):
+ def interface(self, feature: bytes) -> bytes:
"""what interface to use for interactive console features?
The interface is controlled by the value of `ui.interface` but also by
@@ -1611,12 +1610,12 @@
defaultinterface = b"text"
i = self.config(b"ui", b"interface")
if i in alldefaults:
- defaultinterface = i
+ defaultinterface = cast(bytes, i) # cast to help pytype
- choseninterface = defaultinterface
+ choseninterface: bytes = defaultinterface
f = self.config(b"ui", b"interface.%s" % feature)
if f in availableinterfaces:
- choseninterface = f
+ choseninterface = cast(bytes, f) # cast to help pytype
if i is not None and defaultinterface != i:
if f is not None:
@@ -1656,7 +1655,7 @@
return i
- def termwidth(self):
+ def termwidth(self) -> int:
"""how wide is the terminal in columns?"""
if b'COLUMNS' in encoding.environ:
try:
@@ -1693,7 +1692,11 @@
return i
- def _readline(self, prompt=b' ', promptopts=None):
+ def _readline(
+ self,
+ prompt: bytes = b' ',
+ promptopts: Optional[Dict[str, _MsgOpts]] = None,
+ ) -> bytes:
# Replacing stdin/stdout temporarily is a hard problem on Python 3
# because they have to be text streams with *no buffering*. Instead,
# we use rawinput() only if call_readline() will be invoked by
@@ -1748,14 +1751,38 @@
return line
+ if pycompat.TYPE_CHECKING:
+
+ @overload
+ def prompt(self, msg: bytes, default: bytes) -> bytes:
+ pass
+
+ @overload
+ def prompt(self, msg: bytes, default: None) -> Optional[bytes]:
+ pass
+
def prompt(self, msg, default=b"y"):
"""Prompt user with msg, read response.
If ui is not interactive, the default is returned.
"""
return self._prompt(msg, default=default)
- def _prompt(self, msg, **opts):
- default = opts['default']
+ if pycompat.TYPE_CHECKING:
+
+ @overload
+ def _prompt(
+ self, msg: bytes, default: bytes, **opts: _MsgOpts
+ ) -> bytes:
+ pass
+
+ @overload
+ def _prompt(
+ self, msg: bytes, default: None, **opts: _MsgOpts
+ ) -> Optional[bytes]:
+ pass
+
+ def _prompt(self, msg, default=b'y', **opts):
+ opts = {**opts, 'default': default}
if not self.interactive():
self._writemsg(self._fmsgout, msg, b' ', type=b'prompt', **opts)
self._writemsg(
@@ -1775,7 +1802,7 @@
raise error.ResponseExpected()
@staticmethod
- def extractchoices(prompt):
+ def extractchoices(prompt: bytes) -> Tuple[bytes, List[_PromptChoice]]:
"""Extract prompt message and list of choices from specified prompt.
This returns tuple "(message, choices)", and "choices" is the
@@ -1795,6 +1822,9 @@
# choices containing spaces, ASCII, or basically anything
# except an ampersand followed by a character.
m = re.match(br'(?s)(.+?)\$\$([^$]*&[^ $].*)', prompt)
+
+ assert m is not None # help pytype
+
msg = m.group(1)
choices = [p.strip(b' ') for p in m.group(2).split(b'$$')]
@@ -1804,7 +1834,7 @@
return (msg, [choicetuple(s) for s in choices])
- def promptchoice(self, prompt, default=0):
+ def promptchoice(self, prompt: bytes, default: int = 0) -> int:
"""Prompt user with a message, read response, and ensure it matches
one of the provided choices. The prompt is formatted as follows:
@@ -1824,7 +1854,9 @@
# TODO: shouldn't it be a warning?
self._writemsg(self._fmsgout, _(b"unrecognized response\n"))
- def getpass(self, prompt=None, default=None):
+ def getpass(
+ self, prompt: Optional[bytes] = None, default: Optional[bytes] = None
+ ) -> Optional[bytes]:
if not self.interactive():
return default
try:
@@ -1847,7 +1879,7 @@
except EOFError:
raise error.ResponseExpected()
- def status(self, *msg, **opts):
+ def status(self, *msg: bytes, **opts: _MsgOpts) -> None:
"""write status message to output (if ui.quiet is False)
This adds an output label of "ui.status".
@@ -1855,21 +1887,21 @@
if not self.quiet:
self._writemsg(self._fmsgout, type=b'status', *msg, **opts)
- def warn(self, *msg, **opts):
+ def warn(self, *msg: bytes, **opts: _MsgOpts) -> None:
"""write warning message to output (stderr)
This adds an output label of "ui.warning".
"""
self._writemsg(self._fmsgerr, type=b'warning', *msg, **opts)
- def error(self, *msg, **opts):
+ def error(self, *msg: bytes, **opts: _MsgOpts) -> None:
"""write error message to output (stderr)
This adds an output label of "ui.error".
"""
self._writemsg(self._fmsgerr, type=b'error', *msg, **opts)
- def note(self, *msg, **opts):
+ def note(self, *msg: bytes, **opts: _MsgOpts) -> None:
"""write note to output (if ui.verbose is True)
This adds an output label of "ui.note".
@@ -1877,7 +1909,7 @@
if self.verbose:
self._writemsg(self._fmsgout, type=b'note', *msg, **opts)
- def debug(self, *msg, **opts):
+ def debug(self, *msg: bytes, **opts: _MsgOpts) -> None:
"""write debug message to output (if ui.debugflag is True)
This adds an output label of "ui.debug".
@@ -1894,14 +1926,14 @@
def edit(
self,
- text,
- user,
- extra=None,
+ text: bytes,
+ user: bytes,
+ extra: Optional[Dict[bytes, Any]] = None, # TODO: value type of bytes?
editform=None,
pending=None,
- repopath=None,
- action=None,
- ):
+ repopath: Optional[bytes] = None,
+ action: Optional[bytes] = None,
+ ) -> bytes:
if action is None:
self.develwarn(
b'action is None but will soon be a required '
@@ -1970,13 +2002,13 @@
def system(
self,
- cmd,
+ cmd: bytes,
environ=None,
- cwd=None,
- onerr=None,
- errprefix=None,
- blockedtag=None,
- ):
+ cwd: Optional[bytes] = None,
+ onerr: Optional[Callable[[bytes], Exception]] = None,
+ errprefix: Optional[bytes] = None,
+ blockedtag: Optional[bytes] = None,
+ ) -> int:
"""execute shell command with appropriate output stream. command
output will be redirected if fout is not stdout.
@@ -2003,12 +2035,12 @@
raise onerr(errmsg)
return rc
- def _runsystem(self, cmd, environ, cwd, out):
+ def _runsystem(self, cmd: bytes, environ, cwd: Optional[bytes], out) -> int:
"""actually execute the given shell command (can be overridden by
extensions like chg)"""
return procutil.system(cmd, environ=environ, cwd=cwd, out=out)
- def traceback(self, exc=None, force=False):
+ def traceback(self, exc=None, force: bool = False):
"""print exception traceback if traceback printing enabled or forced.
only to call in exception handler. returns true if traceback
printed."""
@@ -2054,7 +2086,7 @@
)
@util.propertycache
- def _progbar(self):
+ def _progbar(self) -> Optional[progress.progbar]:
"""setup the progbar singleton to the ui object"""
if (
self.quiet
@@ -2065,14 +2097,16 @@
return None
return getprogbar(self)
- def _progclear(self):
+ def _progclear(self) -> None:
"""clear progress bar output if any. use it before any output"""
if not haveprogbar(): # nothing loaded yet
return
if self._progbar is not None and self._progbar.printed:
self._progbar.clear()
- def makeprogress(self, topic, unit=b"", total=None):
+ def makeprogress(
+ self, topic: bytes, unit: bytes = b"", total: Optional[int] = None
+ ) -> scmutil.progress:
"""Create a progress helper for the specified topic"""
if getattr(self._fmsgerr, 'structured', False):
# channel for machine-readable output with metadata, just send
@@ -2104,7 +2138,7 @@
"""Returns a logger of the given name; or None if not registered"""
return self._loggers.get(name)
- def setlogger(self, name, logger):
+ def setlogger(self, name, logger) -> None:
"""Install logger which can be identified later by the given name
More than one loggers can be registered. Use extension or module
@@ -2112,7 +2146,7 @@
"""
self._loggers[name] = logger
- def log(self, event, msgfmt, *msgargs, **opts):
+ def log(self, event, msgfmt, *msgargs, **opts) -> None:
"""hook for logging facility extensions
event should be a readily-identifiable subsystem, which will
@@ -2139,7 +2173,7 @@
finally:
self._loggers = registeredloggers
- def label(self, msg, label):
+ def label(self, msg: bytes, label: bytes) -> bytes:
"""style msg based on supplied label
If some color mode is enabled, this will add the necessary control
@@ -2153,7 +2187,9 @@
return color.colorlabel(self, msg, label)
return msg
- def develwarn(self, msg, stacklevel=1, config=None):
+ def develwarn(
+ self, msg: bytes, stacklevel: int = 1, config: Optional[bytes] = None
+ ) -> None:
"""issue a developer warning message
Use 'stacklevel' to report the offender some layers further up in the
@@ -2185,7 +2221,9 @@
del curframe
del calframe
- def deprecwarn(self, msg, version, stacklevel=2):
+ def deprecwarn(
+ self, msg: bytes, version: bytes, stacklevel: int = 2
+ ) -> None:
"""issue a deprecation warning
- msg: message explaining what is deprecated and how to upgrade,
@@ -2209,7 +2247,7 @@
return self._exportableenviron
@contextlib.contextmanager
- def configoverride(self, overrides, source=b""):
+ def configoverride(self, overrides: _ConfigItems, source: bytes = b""):
"""Context manager for temporary config overrides
`overrides` must be a dict of the following structure:
{(section, name) : value}"""
@@ -2227,7 +2265,7 @@
if (b'ui', b'quiet') in overrides:
self.fixconfig(section=b'ui')
- def estimatememory(self):
+ def estimatememory(self) -> Optional[int]:
"""Provide an estimate for the available system memory in Bytes.
This can be overriden via ui.available-memory. It returns None, if
@@ -2246,10 +2284,10 @@
# we instantiate one globally shared progress bar to avoid
# competing progress bars when multiple UI objects get created
-_progresssingleton = None
+_progresssingleton: Optional[progress.progbar] = None
-def getprogbar(ui):
+def getprogbar(ui: ui) -> progress.progbar:
global _progresssingleton
if _progresssingleton is None:
# passing 'ui' object to the singleton is fishy,
@@ -2258,11 +2296,11 @@
return _progresssingleton
-def haveprogbar():
+def haveprogbar() -> bool:
return _progresssingleton is not None
-def _selectmsgdests(ui):
+def _selectmsgdests(ui: ui):
name = ui.config(b'ui', b'message-output')
if name == b'channel':
if ui.fmsg:
@@ -2278,7 +2316,7 @@
raise error.Abort(b'invalid ui.message-output destination: %s' % name)
-def _writemsgwith(write, dest, *args, **opts):
+def _writemsgwith(write, dest, *args: bytes, **opts: _MsgOpts) -> None:
"""Write ui message with the given ui._write*() function
The specified message type is translated to 'ui.<type>' label if the dest
--- a/mercurial/unionrepo.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/unionrepo.py Wed Jan 04 16:02:22 2023 +0100
@@ -257,8 +257,8 @@
def cancopy(self):
return False
- def peer(self):
- return unionpeer(self)
+ def peer(self, path=None):
+ return unionpeer(self, path=None)
def getcwd(self):
return encoding.getcwd() # always outside the repo
--- a/mercurial/util.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/util.py Wed Jan 04 16:02:22 2023 +0100
@@ -642,12 +642,12 @@
``read()`` and ``readline()``.
"""
- def _fillbuffer(self):
- res = super(observedbufferedinputpipe, self)._fillbuffer()
+ def _fillbuffer(self, size=_chunksize):
+ res = super(observedbufferedinputpipe, self)._fillbuffer(size=size)
fn = getattr(self._input._observer, 'osread', None)
if fn:
- fn(res, _chunksize)
+ fn(res, size)
return res
@@ -2542,6 +2542,7 @@
# delegated methods
self.read = self._fp.read
self.write = self._fp.write
+ self.writelines = self._fp.writelines
self.seek = self._fp.seek
self.tell = self._fp.tell
self.fileno = self._fp.fileno
@@ -3212,10 +3213,7 @@
The passed argument is anything that has a ``.read(N)`` method.
- >>> try:
- ... from StringIO import StringIO as BytesIO
- ... except ImportError:
- ... from io import BytesIO
+ >>> from io import BytesIO
>>> uvarintdecodestream(BytesIO(b'\\x00'))
0
>>> uvarintdecodestream(BytesIO(b'\\x01'))
--- a/mercurial/utils/procutil.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/utils/procutil.py Wed Jan 04 16:02:22 2023 +0100
@@ -18,6 +18,10 @@
import threading
import time
+from typing import (
+ BinaryIO,
+)
+
from ..i18n import _
from ..pycompat import (
getattr,
@@ -29,6 +33,7 @@
error,
policy,
pycompat,
+ typelib,
)
# Import like this to keep import-checker happy
@@ -118,8 +123,8 @@
return stream
-class WriteAllWrapper:
- def __init__(self, orig):
+class WriteAllWrapper(typelib.BinaryIO_Proxy):
+ def __init__(self, orig: BinaryIO):
self.orig = orig
def __getattr__(self, attr):
@@ -580,7 +585,7 @@
return _gethgcmd()
-def rundetached(args, condfn):
+def rundetached(args, condfn) -> int:
"""Execute the argument list in a detached process.
condfn is a callable which is called repeatedly and should return
@@ -616,6 +621,12 @@
if prevhandler is not None:
signal.signal(signal.SIGCHLD, prevhandler)
+ # pytype seems to get confused by not having a return in the finally
+ # block, and thinks the return value should be Optional[int] here. It
+ # appears to be https://github.com/google/pytype/issues/938, without
+ # the `with` clause.
+ pass # pytype: disable=bad-return-type
+
@contextlib.contextmanager
def uninterruptible(warn):
--- a/mercurial/utils/storageutil.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/utils/storageutil.py Wed Jan 04 16:02:22 2023 +0100
@@ -305,6 +305,7 @@
revisiondata=False,
assumehaveparentrevisions=False,
sidedata_helpers=None,
+ debug_info=None,
):
"""Generic implementation of ifiledata.emitrevisions().
@@ -370,6 +371,10 @@
``sidedata_helpers`` (optional)
If not None, means that sidedata should be included.
See `revlogutil.sidedata.get_sidedata_helpers`.
+
+ ``debug_info`
+ An optionnal dictionnary to gather information about the bundling
+ process (if present, see config: debug.bundling.stats.
"""
fnode = store.node
@@ -407,31 +412,59 @@
if rev == nullrev:
continue
+ debug_delta_source = None
+ if debug_info is not None:
+ debug_info['revision-total'] += 1
+
node = fnode(rev)
p1rev, p2rev = parents(rev)
+ if debug_info is not None:
+ if p1rev != p2rev and p1rev != nullrev and p2rev != nullrev:
+ debug_info['merge-total'] += 1
+
if deltaparentfn:
deltaparentrev = deltaparentfn(rev)
+ if debug_info is not None:
+ if deltaparentrev == nullrev:
+ debug_info['available-full'] += 1
+ else:
+ debug_info['available-delta'] += 1
+
else:
deltaparentrev = nullrev
# Forced delta against previous mode.
if deltamode == repository.CG_DELTAMODE_PREV:
+ if debug_info is not None:
+ debug_delta_source = "prev"
baserev = prevrev
# We're instructed to send fulltext. Honor that.
elif deltamode == repository.CG_DELTAMODE_FULL:
+ if debug_info is not None:
+ debug_delta_source = "full"
baserev = nullrev
# We're instructed to use p1. Honor that
elif deltamode == repository.CG_DELTAMODE_P1:
+ if debug_info is not None:
+ debug_delta_source = "p1"
baserev = p1rev
# There is a delta in storage. We try to use that because it
# amounts to effectively copying data from storage and is
# therefore the fastest.
elif is_usable_base(deltaparentrev):
+ if debug_info is not None:
+ debug_delta_source = "storage"
+ baserev = deltaparentrev
+ elif deltaparentrev == nullrev:
+ if debug_info is not None:
+ debug_delta_source = "storage"
baserev = deltaparentrev
else:
+ if deltaparentrev != nullrev and debug_info is not None:
+ debug_info['denied-base-not-available'] += 1
# No guarantee the receiver has the delta parent, or Storage has a
# fulltext revision.
#
@@ -441,22 +474,37 @@
# be close to this revision content.
#
# note: we could optimize between p1 and p2 in merges cases.
- if is_usable_base(p1rev):
+ elif is_usable_base(p1rev):
+ if debug_info is not None:
+ debug_delta_source = "p1"
baserev = p1rev
# if p1 was not an option, try p2
elif is_usable_base(p2rev):
+ if debug_info is not None:
+ debug_delta_source = "p2"
baserev = p2rev
# Send delta against prev in despair
#
# using the closest available ancestors first might be better?
elif prevrev is not None:
+ if debug_info is not None:
+ debug_delta_source = "prev"
baserev = prevrev
else:
+ if debug_info is not None:
+ debug_delta_source = "full"
baserev = nullrev
# But we can't actually use our chosen delta base for whatever
# reason. Reset to fulltext.
- if baserev != nullrev and (candeltafn and not candeltafn(baserev, rev)):
+ if (
+ baserev != nullrev
+ and candeltafn is not None
+ and not candeltafn(baserev, rev)
+ ):
+ if debug_info is not None:
+ debug_delta_source = "full"
+ debug_info['denied-delta-candeltafn'] += 1
baserev = nullrev
revision = None
@@ -468,6 +516,9 @@
try:
revision = store.rawdata(node)
except error.CensoredNodeError as e:
+ if debug_info is not None:
+ debug_delta_source = "full"
+ debug_info['denied-delta-not-available'] += 1
revision = e.tombstone
if baserev != nullrev:
@@ -479,12 +530,46 @@
elif (
baserev == nullrev and deltamode != repository.CG_DELTAMODE_PREV
):
+ if debug_info is not None:
+ debug_info['computed-delta'] += 1 # close enough
+ debug_info['delta-full'] += 1
revision = store.rawdata(node)
emitted.add(rev)
else:
if revdifffn:
+ if debug_info is not None:
+ if debug_delta_source == "full":
+ debug_info['computed-delta'] += 1
+ debug_info['delta-full'] += 1
+ elif debug_delta_source == "prev":
+ debug_info['computed-delta'] += 1
+ debug_info['delta-against-prev'] += 1
+ elif debug_delta_source == "p1":
+ debug_info['computed-delta'] += 1
+ debug_info['delta-against-p1'] += 1
+ elif debug_delta_source == "storage":
+ debug_info['reused-storage-delta'] += 1
+ else:
+ assert False, 'unreachable'
+
delta = revdifffn(baserev, rev)
else:
+ if debug_info is not None:
+ if debug_delta_source == "full":
+ debug_info['computed-delta'] += 1
+ debug_info['delta-full'] += 1
+ elif debug_delta_source == "prev":
+ debug_info['computed-delta'] += 1
+ debug_info['delta-against-prev'] += 1
+ elif debug_delta_source == "p1":
+ debug_info['computed-delta'] += 1
+ debug_info['delta-against-p1'] += 1
+ elif debug_delta_source == "storage":
+ # seem quite unlikelry to happens
+ debug_info['computed-delta'] += 1
+ debug_info['reused-storage-delta'] += 1
+ else:
+ assert False, 'unreachable'
delta = mdiff.textdiff(
store.rawdata(baserev), store.rawdata(rev)
)
--- a/mercurial/utils/stringutil.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/utils/stringutil.py Wed Jan 04 16:02:22 2023 +0100
@@ -14,6 +14,11 @@
import textwrap
import types
+from typing import (
+ Optional,
+ overload,
+)
+
from ..i18n import _
from ..thirdparty import attr
@@ -30,6 +35,16 @@
regexbytesescapemap = {i: (b'\\' + i) for i in _respecial}
+@overload
+def reescape(pat: bytes) -> bytes:
+ ...
+
+
+@overload
+def reescape(pat: str) -> str:
+ ...
+
+
def reescape(pat):
"""Drop-in replacement for re.escape."""
# NOTE: it is intentional that this works on unicodes and not
@@ -45,12 +60,12 @@
return pat.encode('latin1')
-def pprint(o, bprefix=False, indent=0, level=0):
+def pprint(o, bprefix: bool = False, indent: int = 0, level: int = 0) -> bytes:
"""Pretty print an object."""
return b''.join(pprintgen(o, bprefix=bprefix, indent=indent, level=level))
-def pprintgen(o, bprefix=False, indent=0, level=0):
+def pprintgen(o, bprefix: bool = False, indent: int = 0, level: int = 0):
"""Pretty print an object to a generator of atoms.
``bprefix`` is a flag influencing whether bytestrings are preferred with
@@ -250,7 +265,7 @@
yield pycompat.byterepr(o)
-def prettyrepr(o):
+def prettyrepr(o) -> bytes:
"""Pretty print a representation of a possibly-nested object"""
lines = []
rs = pycompat.byterepr(o)
@@ -281,7 +296,7 @@
return b'\n'.join(b' ' * l + s for l, s in lines)
-def buildrepr(r):
+def buildrepr(r) -> bytes:
"""Format an optional printable representation from unexpanded bits
======== =================================
@@ -305,12 +320,12 @@
return pprint(r)
-def binary(s):
+def binary(s: bytes) -> bool:
"""return true if a string is binary data"""
return bool(s and b'\0' in s)
-def _splitpattern(pattern):
+def _splitpattern(pattern: bytes):
if pattern.startswith(b're:'):
return b're', pattern[3:]
elif pattern.startswith(b'literal:'):
@@ -318,7 +333,7 @@
return b'literal', pattern
-def stringmatcher(pattern, casesensitive=True):
+def stringmatcher(pattern: bytes, casesensitive: bool = True):
"""
accepts a string, possibly starting with 're:' or 'literal:' prefix.
returns the matcher name, pattern, and matcher function.
@@ -379,7 +394,7 @@
raise error.ProgrammingError(b'unhandled pattern kind: %s' % kind)
-def substringregexp(pattern, flags=0):
+def substringregexp(pattern: bytes, flags: int = 0):
"""Build a regexp object from a string pattern possibly starting with
're:' or 'literal:' prefix.
@@ -431,7 +446,7 @@
raise error.ProgrammingError(b'unhandled pattern kind: %s' % kind)
-def shortuser(user):
+def shortuser(user: bytes) -> bytes:
"""Return a short representation of a user name or email address."""
f = user.find(b'@')
if f >= 0:
@@ -448,7 +463,7 @@
return user
-def emailuser(user):
+def emailuser(user: bytes) -> bytes:
"""Return the user portion of an email address."""
f = user.find(b'@')
if f >= 0:
@@ -459,7 +474,7 @@
return user
-def email(author):
+def email(author: bytes) -> bytes:
'''get email of author.'''
r = author.find(b'>')
if r == -1:
@@ -467,7 +482,7 @@
return author[author.find(b'<') + 1 : r]
-def person(author):
+def person(author: bytes) -> bytes:
"""Returns the name before an email address,
interpreting it as per RFC 5322
@@ -612,7 +627,7 @@
return mailmap
-def mapname(mailmap, author):
+def mapname(mailmap, author: bytes) -> bytes:
"""Returns the author field according to the mailmap cache, or
the original author field.
@@ -663,7 +678,7 @@
_correctauthorformat = remod.compile(br'^[^<]+\s<[^<>]+@[^<>]+>$')
-def isauthorwellformed(author):
+def isauthorwellformed(author: bytes) -> bool:
"""Return True if the author field is well formed
(ie "Contributor Name <contrib@email.dom>")
@@ -685,7 +700,7 @@
return _correctauthorformat.match(author) is not None
-def firstline(text):
+def firstline(text: bytes) -> bytes:
"""Return the first line of the input"""
# Try to avoid running splitlines() on the whole string
i = text.find(b'\n')
@@ -697,21 +712,26 @@
return b''
-def ellipsis(text, maxlength=400):
+def ellipsis(text: bytes, maxlength: int = 400) -> bytes:
"""Trim string to at most maxlength (default: 400) columns in display."""
return encoding.trim(text, maxlength, ellipsis=b'...')
-def escapestr(s):
+def escapestr(s: bytes) -> bytes:
+ # "bytes" is also a typing shortcut for bytes, bytearray, and memoryview
if isinstance(s, memoryview):
s = bytes(s)
# call underlying function of s.encode('string_escape') directly for
# Python 3 compatibility
+ # pytype: disable=bad-return-type
return codecs.escape_encode(s)[0] # pytype: disable=module-attr
+ # pytype: enable=bad-return-type
-def unescapestr(s):
+def unescapestr(s: bytes) -> bytes:
+ # pytype: disable=bad-return-type
return codecs.escape_decode(s)[0] # pytype: disable=module-attr
+ # pytype: enable=bad-return-type
def forcebytestr(obj):
@@ -724,7 +744,7 @@
return pycompat.bytestr(encoding.strtolocal(str(obj)))
-def uirepr(s):
+def uirepr(s: bytes) -> bytes:
# Avoid double backslash in Windows path repr()
return pycompat.byterepr(pycompat.bytestr(s)).replace(b'\\\\', b'\\')
@@ -838,7 +858,9 @@
return tw(**kwargs)
-def wrap(line, width, initindent=b'', hangindent=b''):
+def wrap(
+ line: bytes, width: int, initindent: bytes = b'', hangindent: bytes = b''
+) -> bytes:
maxindent = max(len(hangindent), len(initindent))
if width <= maxindent:
# adjust for weird terminal size
@@ -875,7 +897,7 @@
}
-def parsebool(s):
+def parsebool(s: bytes) -> Optional[bool]:
"""Parse s into a boolean.
If s is not a valid boolean, returns None.
@@ -883,7 +905,8 @@
return _booleans.get(s.lower(), None)
-def parselist(value):
+# TODO: make arg mandatory (and fix code below?)
+def parselist(value: Optional[bytes]):
"""parse a configuration value as a list of comma/space separated strings
>>> parselist(b'this,is "a small" ,test')
@@ -973,7 +996,7 @@
return result or []
-def evalpythonliteral(s):
+def evalpythonliteral(s: bytes):
"""Evaluate a string containing a Python literal expression"""
# We could backport our tokenizer hack to rewrite '' to u'' if we want
return ast.literal_eval(s.decode('latin1'))
--- a/mercurial/utils/urlutil.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/utils/urlutil.py Wed Jan 04 16:02:22 2023 +0100
@@ -24,6 +24,10 @@
stringutil,
)
+from ..revlogutils import (
+ constants as revlog_constants,
+)
+
if pycompat.TYPE_CHECKING:
from typing import (
@@ -241,7 +245,7 @@
u.user = self.user
u.passwd = self.passwd
u.host = self.host
- u.path = self.path
+ u.port = self.port
u.query = self.query
u.fragment = self.fragment
u._localpath = self._localpath
@@ -480,10 +484,10 @@
if not dests:
if b'default-push' in ui.paths:
for p in ui.paths[b'default-push']:
- yield p
+ yield p.get_push_variant()
elif b'default' in ui.paths:
for p in ui.paths[b'default']:
- yield p
+ yield p.get_push_variant()
else:
raise error.ConfigError(
_(b'default repository not configured!'),
@@ -493,14 +497,14 @@
for dest in dests:
if dest in ui.paths:
for p in ui.paths[dest]:
- yield p
+ yield p.get_push_variant()
else:
path = try_path(ui, dest)
if path is None:
msg = _(b'repository %s does not exist')
msg %= dest
raise error.RepoError(msg)
- yield path
+ yield path.get_push_variant()
def get_pull_paths(repo, ui, sources):
@@ -522,8 +526,6 @@
This is useful for command and action that does not support multiple
destination (yet).
- Note that for now, we cannot get multiple destination so this function is "trivial".
-
The `action` parameter will be used for the error message.
"""
if dest is None:
@@ -544,80 +546,61 @@
return dests[0]
-def get_unique_pull_path(action, repo, ui, source=None, default_branches=()):
+def get_unique_pull_path_obj(action, ui, source=None):
"""return a unique `(path, branch)` or abort if multiple are found
This is useful for command and action that does not support multiple
destination (yet).
- Note that for now, we cannot get multiple destination so this function is "trivial".
+ The `action` parameter will be used for the error message.
- The `action` parameter will be used for the error message.
+ note: Ideally, this function would be called `get_unique_pull_path` to
+ mirror the `get_unique_push_path`, but the name was already taken.
"""
- urls = []
- if source is None:
- if b'default' in ui.paths:
- urls.extend(p.rawloc for p in ui.paths[b'default'])
- else:
- # XXX this is the historical default behavior, but that is not
- # great, consider breaking BC on this.
- urls.append(b'default')
- else:
- if source in ui.paths:
- urls.extend(p.rawloc for p in ui.paths[source])
- else:
- # Try to resolve as a local path or URI.
- path = try_path(ui, source)
- if path is not None:
- urls.append(path.rawloc)
- else:
- urls.append(source)
- if len(urls) != 1:
+ sources = []
+ if source is not None:
+ sources.append(source)
+
+ pull_paths = list(get_pull_paths(None, ui, sources=sources))
+ path_count = len(pull_paths)
+ if path_count != 1:
if source is None:
msg = _(
b"default path points to %d urls while %s only supports one"
)
- msg %= (len(urls), action)
+ msg %= (path_count, action)
else:
msg = _(b"path points to %d urls while %s only supports one: %s")
- msg %= (len(urls), action, source)
+ msg %= (path_count, action, source)
raise error.Abort(msg)
- return parseurl(urls[0], default_branches)
+ return pull_paths[0]
+
+
+def get_unique_pull_path(action, repo, ui, source=None, default_branches=()):
+ """return a unique `(url, branch)` or abort if multiple are found
+
+ See `get_unique_pull_path_obj` for details.
+ """
+ path = get_unique_pull_path_obj(action, ui, source=source)
+ return parseurl(path.rawloc, default_branches)
-def get_clone_path(ui, source, default_branches=()):
- """return the `(origsource, path, branch)` selected as clone source"""
- urls = []
- if source is None:
- if b'default' in ui.paths:
- urls.extend(p.rawloc for p in ui.paths[b'default'])
- else:
- # XXX this is the historical default behavior, but that is not
- # great, consider breaking BC on this.
- urls.append(b'default')
- else:
- if source in ui.paths:
- urls.extend(p.rawloc for p in ui.paths[source])
- else:
- # Try to resolve as a local path or URI.
- path = try_path(ui, source)
- if path is not None:
- urls.append(path.rawloc)
- else:
- urls.append(source)
- if len(urls) != 1:
- if source is None:
- msg = _(
- b"default path points to %d urls while only one is supported"
- )
- msg %= len(urls)
- else:
- msg = _(b"path points to %d urls while only one is supported: %s")
- msg %= (len(urls), source)
- raise error.Abort(msg)
- url = urls[0]
- clone_path, branch = parseurl(url, default_branches)
- return url, clone_path, branch
+def get_clone_path_obj(ui, source):
+ """return the `(origsource, url, branch)` selected as clone source"""
+ if source == b'':
+ return None
+ return get_unique_pull_path_obj(b'clone', ui, source=source)
+
+
+def get_clone_path(ui, source, default_branches=None):
+ """return the `(origsource, url, branch)` selected as clone source"""
+ path = get_clone_path_obj(ui, source)
+ if path is None:
+ return (b'', b'', (None, default_branches))
+ if default_branches is None:
+ default_branches = []
+ branches = (path.branch, default_branches)
+ return path.rawloc, path.loc, branches
def parseurl(path, branches=None):
@@ -673,43 +656,6 @@
new_paths.extend(_chain_path(p, ui, self))
self[name] = new_paths
- def getpath(self, ui, name, default=None):
- """Return a ``path`` from a string, falling back to default.
-
- ``name`` can be a named path or locations. Locations are filesystem
- paths or URIs.
-
- Returns None if ``name`` is not a registered path, a URI, or a local
- path to a repo.
- """
- msg = b'getpath is deprecated, use `get_*` functions from urlutil'
- ui.deprecwarn(msg, b'6.0')
- # Only fall back to default if no path was requested.
- if name is None:
- if not default:
- default = ()
- elif not isinstance(default, (tuple, list)):
- default = (default,)
- for k in default:
- try:
- return self[k][0]
- except KeyError:
- continue
- return None
-
- # Most likely empty string.
- # This may need to raise in the future.
- if not name:
- return None
- if name in self:
- return self[name][0]
- else:
- # Try to resolve as a local path or URI.
- path = try_path(ui, name)
- if path is None:
- raise error.RepoError(_(b'repository %s does not exist') % name)
- return path.rawloc
-
_pathsuboptions = {}
@@ -736,7 +682,7 @@
return register
-@pathsuboption(b'pushurl', b'pushloc')
+@pathsuboption(b'pushurl', b'_pushloc')
def pushurlpathoption(ui, path, value):
u = url(value)
# Actually require a URL.
@@ -788,6 +734,27 @@
return value
+DELTA_REUSE_POLICIES = {
+ b'default': None,
+ b'try-base': revlog_constants.DELTA_BASE_REUSE_TRY,
+ b'no-reuse': revlog_constants.DELTA_BASE_REUSE_NO,
+ b'forced': revlog_constants.DELTA_BASE_REUSE_FORCE,
+}
+
+
+@pathsuboption(b'delta-reuse-policy', b'delta_reuse_policy')
+def delta_reuse_policy(ui, path, value):
+ if value not in DELTA_REUSE_POLICIES:
+ path_name = path.name
+ if path_name is None:
+ # this is an "anonymous" path, config comes from the global one
+ path_name = b'*'
+ msg = _(b'(paths.%s:delta-reuse-policy has unknown value: "%s")\n')
+ msg %= (path_name, value)
+ ui.warn(msg)
+ return DELTA_REUSE_POLICIES.get(value)
+
+
@pathsuboption(b'multi-urls', b'multi_urls')
def multiurls_pathoption(ui, path, value):
res = stringutil.parsebool(value)
@@ -848,7 +815,8 @@
``ui`` is the ``ui`` instance the path is coming from.
``name`` is the symbolic name of the path.
``rawloc`` is the raw location, as defined in the config.
- ``pushloc`` is the raw locations pushes should be made to.
+ ``_pushloc`` is the raw locations pushes should be made to.
+ (see the `get_push_variant` method)
If ``name`` is not defined, we require that the location be a) a local
filesystem path with a .hg directory or b) a URL. If not,
@@ -864,21 +832,11 @@
if not rawloc:
raise ValueError(b'rawloc must be defined')
- # Locations may define branches via syntax <base>#<branch>.
- u = url(rawloc)
- branch = None
- if u.fragment:
- branch = u.fragment
- u.fragment = None
+ self.name = name
- self.url = u
- # the url from the config/command line before dealing with `path://`
- self.raw_url = u.copy()
- self.branch = branch
-
- self.name = name
- self.rawloc = rawloc
- self.loc = b'%s' % u
+ # set by path variant to point to their "non-push" version
+ self.main_path = None
+ self._setup_url(rawloc)
if validate_path:
self._validate_path()
@@ -892,16 +850,66 @@
self._apply_suboptions(ui, sub_opts)
- def copy(self):
- """make a copy of this path object"""
+ def _setup_url(self, rawloc):
+ # Locations may define branches via syntax <base>#<branch>.
+ u = url(rawloc)
+ branch = None
+ if u.fragment:
+ branch = u.fragment
+ u.fragment = None
+
+ self.url = u
+ # the url from the config/command line before dealing with `path://`
+ self.raw_url = u.copy()
+ self.branch = branch
+
+ self.rawloc = rawloc
+ self.loc = b'%s' % u
+
+ def copy(self, new_raw_location=None):
+ """make a copy of this path object
+
+ When `new_raw_location` is set, the new path will point to it.
+ This is used by the scheme extension so expand the scheme.
+ """
new = self.__class__()
for k, v in self.__dict__.items():
new_copy = getattr(v, 'copy', None)
if new_copy is not None:
v = new_copy()
new.__dict__[k] = v
+ if new_raw_location is not None:
+ new._setup_url(new_raw_location)
return new
+ @property
+ def is_push_variant(self):
+ """is this a path variant to be used for pushing"""
+ return self.main_path is not None
+
+ def get_push_variant(self):
+ """get a "copy" of the path, but suitable for pushing
+
+ This means using the value of the `pushurl` option (if any) as the url.
+
+ The original path is available in the `main_path` attribute.
+ """
+ if self.main_path:
+ return self
+ new = self.copy()
+ new.main_path = self
+ if self._pushloc:
+ new._setup_url(self._pushloc)
+ return new
+
+ def pushloc(self):
+ """compatibility layer for the deprecated attributes"""
+ from .. import util # avoid a cycle
+
+ msg = "don't use path.pushloc, use path.get_push_variant()"
+ util.nouideprecwarn(msg, b"6.5")
+ return self._pushloc
+
def _validate_path(self):
# When given a raw location but not a symbolic name, validate the
# location is valid.
--- a/mercurial/verify.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/verify.py Wed Jan 04 16:02:22 2023 +0100
@@ -15,6 +15,7 @@
from . import (
error,
pycompat,
+ requirements,
revlog,
util,
)
@@ -210,6 +211,12 @@
self._crosscheckfiles(filelinkrevs, filenodes)
totalfiles, filerevisions = self._verifyfiles(filenodes, filelinkrevs)
+ if self.errors:
+ ui.warn(_(b"not checking dirstate because of previous errors\n"))
+ dirstate_errors = 0
+ else:
+ dirstate_errors = self._verify_dirstate()
+
# final report
ui.status(
_(b"checked %d changesets with %d changes to %d files\n")
@@ -225,6 +232,11 @@
msg = _(b"(first damaged changeset appears to be %d)\n")
msg %= min(self.badrevs)
ui.warn(msg)
+ if dirstate_errors:
+ ui.warn(
+ _(b"dirstate inconsistent with current parent's manifest\n")
+ )
+ ui.warn(_(b"%d dirstate errors\n") % dirstate_errors)
return 1
return 0
@@ -585,3 +597,25 @@
self._warn(_(b"warning: orphan data file '%s'") % f)
return len(files), revisions
+
+ def _verify_dirstate(self):
+ """Check that the dirstate is consistent with the parent's manifest"""
+ repo = self.repo
+ ui = self.ui
+ ui.status(_(b"checking dirstate\n"))
+
+ parent1, parent2 = repo.dirstate.parents()
+ m1 = repo[parent1].manifest()
+ m2 = repo[parent2].manifest()
+ dirstate_errors = 0
+
+ is_narrow = requirements.NARROW_REQUIREMENT in repo.requirements
+ narrow_matcher = repo.narrowmatch() if is_narrow else None
+
+ for err in repo.dirstate.verify(m1, m2, parent1, narrow_matcher):
+ ui.error(err)
+ dirstate_errors += 1
+
+ if dirstate_errors:
+ self.errors += dirstate_errors
+ return dirstate_errors
--- a/mercurial/vfs.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/vfs.py Wed Jan 04 16:02:22 2023 +0100
@@ -11,6 +11,10 @@
import stat
import threading
+from typing import (
+ Optional,
+)
+
from .i18n import _
from .pycompat import (
delattr,
@@ -26,7 +30,7 @@
)
-def _avoidambig(path, oldstat):
+def _avoidambig(path: bytes, oldstat):
"""Avoid file stat ambiguity forcibly
This function causes copying ``path`` file, if it is owned by
@@ -60,16 +64,17 @@
'''Prevent instantiation; don't call this from subclasses.'''
raise NotImplementedError('attempted instantiating ' + str(type(self)))
- def __call__(self, path, mode=b'rb', **kwargs):
+ # TODO: type return, which is util.posixfile wrapped by a proxy
+ def __call__(self, path: bytes, mode: bytes = b'rb', **kwargs):
raise NotImplementedError
- def _auditpath(self, path, mode):
+ def _auditpath(self, path: bytes, mode: bytes):
raise NotImplementedError
- def join(self, path, *insidef):
+ def join(self, path: Optional[bytes], *insidef: bytes) -> bytes:
raise NotImplementedError
- def tryread(self, path):
+ def tryread(self, path: bytes) -> bytes:
'''gracefully return an empty string for missing files'''
try:
return self.read(path)
@@ -77,7 +82,7 @@
pass
return b""
- def tryreadlines(self, path, mode=b'rb'):
+ def tryreadlines(self, path: bytes, mode: bytes = b'rb'):
'''gracefully return an empty array for missing files'''
try:
return self.readlines(path, mode=mode)
@@ -95,57 +100,61 @@
"""
return self.__call__
- def read(self, path):
+ def read(self, path: bytes) -> bytes:
with self(path, b'rb') as fp:
return fp.read()
- def readlines(self, path, mode=b'rb'):
+ def readlines(self, path: bytes, mode: bytes = b'rb'):
with self(path, mode=mode) as fp:
return fp.readlines()
- def write(self, path, data, backgroundclose=False, **kwargs):
+ def write(
+ self, path: bytes, data: bytes, backgroundclose=False, **kwargs
+ ) -> int:
with self(path, b'wb', backgroundclose=backgroundclose, **kwargs) as fp:
return fp.write(data)
- def writelines(self, path, data, mode=b'wb', notindexed=False):
+ def writelines(
+ self, path: bytes, data: bytes, mode: bytes = b'wb', notindexed=False
+ ) -> None:
with self(path, mode=mode, notindexed=notindexed) as fp:
return fp.writelines(data)
- def append(self, path, data):
+ def append(self, path: bytes, data: bytes) -> int:
with self(path, b'ab') as fp:
return fp.write(data)
- def basename(self, path):
+ def basename(self, path: bytes) -> bytes:
"""return base element of a path (as os.path.basename would do)
This exists to allow handling of strange encoding if needed."""
return os.path.basename(path)
- def chmod(self, path, mode):
+ def chmod(self, path: bytes, mode: int) -> None:
return os.chmod(self.join(path), mode)
- def dirname(self, path):
+ def dirname(self, path: bytes) -> bytes:
"""return dirname element of a path (as os.path.dirname would do)
This exists to allow handling of strange encoding if needed."""
return os.path.dirname(path)
- def exists(self, path=None):
+ def exists(self, path: Optional[bytes] = None) -> bool:
return os.path.exists(self.join(path))
def fstat(self, fp):
return util.fstat(fp)
- def isdir(self, path=None):
+ def isdir(self, path: Optional[bytes] = None) -> bool:
return os.path.isdir(self.join(path))
- def isfile(self, path=None):
+ def isfile(self, path: Optional[bytes] = None) -> bool:
return os.path.isfile(self.join(path))
- def islink(self, path=None):
+ def islink(self, path: Optional[bytes] = None) -> bool:
return os.path.islink(self.join(path))
- def isfileorlink(self, path=None):
+ def isfileorlink(self, path: Optional[bytes] = None) -> bool:
"""return whether path is a regular file or a symlink
Unlike isfile, this doesn't follow symlinks."""
@@ -156,7 +165,7 @@
mode = st.st_mode
return stat.S_ISREG(mode) or stat.S_ISLNK(mode)
- def _join(self, *paths):
+ def _join(self, *paths: bytes) -> bytes:
root_idx = 0
for idx, p in enumerate(paths):
if os.path.isabs(p) or p.startswith(self._dir_sep):
@@ -166,41 +175,48 @@
paths = [p for p in paths if p]
return self._dir_sep.join(paths)
- def reljoin(self, *paths):
+ def reljoin(self, *paths: bytes) -> bytes:
"""join various elements of a path together (as os.path.join would do)
The vfs base is not injected so that path stay relative. This exists
to allow handling of strange encoding if needed."""
return self._join(*paths)
- def split(self, path):
+ def split(self, path: bytes):
"""split top-most element of a path (as os.path.split would do)
This exists to allow handling of strange encoding if needed."""
return os.path.split(path)
- def lexists(self, path=None):
+ def lexists(self, path: Optional[bytes] = None) -> bool:
return os.path.lexists(self.join(path))
- def lstat(self, path=None):
+ def lstat(self, path: Optional[bytes] = None):
return os.lstat(self.join(path))
- def listdir(self, path=None):
+ def listdir(self, path: Optional[bytes] = None):
return os.listdir(self.join(path))
- def makedir(self, path=None, notindexed=True):
+ def makedir(self, path: Optional[bytes] = None, notindexed=True):
return util.makedir(self.join(path), notindexed)
- def makedirs(self, path=None, mode=None):
+ def makedirs(
+ self, path: Optional[bytes] = None, mode: Optional[int] = None
+ ):
return util.makedirs(self.join(path), mode)
- def makelock(self, info, path):
+ def makelock(self, info, path: bytes):
return util.makelock(info, self.join(path))
- def mkdir(self, path=None):
+ def mkdir(self, path: Optional[bytes] = None):
return os.mkdir(self.join(path))
- def mkstemp(self, suffix=b'', prefix=b'tmp', dir=None):
+ def mkstemp(
+ self,
+ suffix: bytes = b'',
+ prefix: bytes = b'tmp',
+ dir: Optional[bytes] = None,
+ ):
fd, name = pycompat.mkstemp(
suffix=suffix, prefix=prefix, dir=self.join(dir)
)
@@ -210,13 +226,13 @@
else:
return fd, fname
- def readdir(self, path=None, stat=None, skip=None):
+ def readdir(self, path: Optional[bytes] = None, stat=None, skip=None):
return util.listdir(self.join(path), stat, skip)
- def readlock(self, path):
+ def readlock(self, path: bytes) -> bytes:
return util.readlock(self.join(path))
- def rename(self, src, dst, checkambig=False):
+ def rename(self, src: bytes, dst: bytes, checkambig=False):
"""Rename from src to dst
checkambig argument is used with util.filestat, and is useful
@@ -238,18 +254,20 @@
return ret
return util.rename(srcpath, dstpath)
- def readlink(self, path):
+ def readlink(self, path: bytes) -> bytes:
return util.readlink(self.join(path))
- def removedirs(self, path=None):
+ def removedirs(self, path: Optional[bytes] = None):
"""Remove a leaf directory and all empty intermediate ones"""
return util.removedirs(self.join(path))
- def rmdir(self, path=None):
+ def rmdir(self, path: Optional[bytes] = None):
"""Remove an empty directory."""
return os.rmdir(self.join(path))
- def rmtree(self, path=None, ignore_errors=False, forcibly=False):
+ def rmtree(
+ self, path: Optional[bytes] = None, ignore_errors=False, forcibly=False
+ ):
"""Remove a directory tree recursively
If ``forcibly``, this tries to remove READ-ONLY files, too.
@@ -272,28 +290,30 @@
self.join(path), ignore_errors=ignore_errors, onerror=onerror
)
- def setflags(self, path, l, x):
+ def setflags(self, path: bytes, l: bool, x: bool):
return util.setflags(self.join(path), l, x)
- def stat(self, path=None):
+ def stat(self, path: Optional[bytes] = None):
return os.stat(self.join(path))
- def unlink(self, path=None):
+ def unlink(self, path: Optional[bytes] = None):
return util.unlink(self.join(path))
- def tryunlink(self, path=None):
+ def tryunlink(self, path: Optional[bytes] = None):
"""Attempt to remove a file, ignoring missing file errors."""
util.tryunlink(self.join(path))
- def unlinkpath(self, path=None, ignoremissing=False, rmdir=True):
+ def unlinkpath(
+ self, path: Optional[bytes] = None, ignoremissing=False, rmdir=True
+ ):
return util.unlinkpath(
self.join(path), ignoremissing=ignoremissing, rmdir=rmdir
)
- def utime(self, path=None, t=None):
+ def utime(self, path: Optional[bytes] = None, t=None):
return os.utime(self.join(path), t)
- def walk(self, path=None, onerror=None):
+ def walk(self, path: Optional[bytes] = None, onerror=None):
"""Yield (dirpath, dirs, files) tuple for each directories under path
``dirpath`` is relative one from the root of this vfs. This
@@ -360,7 +380,7 @@
def __init__(
self,
- base,
+ base: bytes,
audit=True,
cacheaudited=False,
expandpath=False,
@@ -381,7 +401,7 @@
self.options = {}
@util.propertycache
- def _cansymlink(self):
+ def _cansymlink(self) -> bool:
return util.checklink(self.base)
@util.propertycache
@@ -393,7 +413,7 @@
return
os.chmod(name, self.createmode & 0o666)
- def _auditpath(self, path, mode):
+ def _auditpath(self, path, mode) -> None:
if self._audit:
if os.path.isabs(path) and path.startswith(self.base):
path = os.path.relpath(path, self.base)
@@ -404,8 +424,8 @@
def __call__(
self,
- path,
- mode=b"r",
+ path: bytes,
+ mode: bytes = b"rb",
atomictemp=False,
notindexed=False,
backgroundclose=False,
@@ -518,7 +538,7 @@
return fp
- def symlink(self, src, dst):
+ def symlink(self, src: bytes, dst: bytes) -> None:
self.audit(dst)
linkname = self.join(dst)
util.tryunlink(linkname)
@@ -538,7 +558,7 @@
else:
self.write(dst, src)
- def join(self, path, *insidef):
+ def join(self, path: Optional[bytes], *insidef: bytes) -> bytes:
if path:
parts = [self.base, path]
parts.extend(insidef)
@@ -551,7 +571,7 @@
class proxyvfs(abstractvfs):
- def __init__(self, vfs):
+ def __init__(self, vfs: "vfs"):
self.vfs = vfs
def _auditpath(self, path, mode):
@@ -569,14 +589,14 @@
class filtervfs(proxyvfs, abstractvfs):
'''Wrapper vfs for filtering filenames with a function.'''
- def __init__(self, vfs, filter):
+ def __init__(self, vfs: "vfs", filter):
proxyvfs.__init__(self, vfs)
self._filter = filter
- def __call__(self, path, *args, **kwargs):
+ def __call__(self, path: bytes, *args, **kwargs):
return self.vfs(self._filter(path), *args, **kwargs)
- def join(self, path, *insidef):
+ def join(self, path: Optional[bytes], *insidef: bytes) -> bytes:
if path:
return self.vfs.join(self._filter(self.vfs.reljoin(path, *insidef)))
else:
@@ -589,15 +609,15 @@
class readonlyvfs(proxyvfs):
'''Wrapper vfs preventing any writing.'''
- def __init__(self, vfs):
+ def __init__(self, vfs: "vfs"):
proxyvfs.__init__(self, vfs)
- def __call__(self, path, mode=b'r', *args, **kw):
+ def __call__(self, path: bytes, mode: bytes = b'rb', *args, **kw):
if mode not in (b'r', b'rb'):
raise error.Abort(_(b'this vfs is read only'))
return self.vfs(path, mode, *args, **kw)
- def join(self, path, *insidef):
+ def join(self, path: Optional[bytes], *insidef: bytes) -> bytes:
return self.vfs.join(path, *insidef)
--- a/mercurial/win32.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/win32.py Wed Jan 04 16:02:22 2023 +0100
@@ -14,6 +14,13 @@
import random
import subprocess
+from typing import (
+ List,
+ NoReturn,
+ Optional,
+ Tuple,
+)
+
from . import (
encoding,
pycompat,
@@ -356,7 +363,7 @@
_kernel32.PeekNamedPipe.restype = _BOOL
-def _raiseoserror(name):
+def _raiseoserror(name: bytes) -> NoReturn:
# Force the code to a signed int to avoid an 'int too large' error.
# See https://bugs.python.org/issue28474
code = _kernel32.GetLastError()
@@ -368,7 +375,7 @@
)
-def _getfileinfo(name):
+def _getfileinfo(name: bytes) -> _BY_HANDLE_FILE_INFORMATION:
fh = _kernel32.CreateFileA(
name,
0,
@@ -389,7 +396,7 @@
_kernel32.CloseHandle(fh)
-def checkcertificatechain(cert, build=True):
+def checkcertificatechain(cert: bytes, build: bool = True) -> bool:
"""Tests the given certificate to see if there is a complete chain to a
trusted root certificate. As a side effect, missing certificates are
downloaded and installed unless ``build=False``. True is returned if a
@@ -439,7 +446,7 @@
_crypt32.CertFreeCertificateContext(certctx)
-def oslink(src, dst):
+def oslink(src: bytes, dst: bytes) -> None:
try:
if not _kernel32.CreateHardLinkA(dst, src, None):
_raiseoserror(src)
@@ -447,12 +454,12 @@
_raiseoserror(src)
-def nlinks(name):
+def nlinks(name: bytes) -> int:
'''return number of hardlinks for the given file'''
return _getfileinfo(name).nNumberOfLinks
-def samefile(path1, path2):
+def samefile(path1: bytes, path2: bytes) -> bool:
'''Returns whether path1 and path2 refer to the same file or directory.'''
res1 = _getfileinfo(path1)
res2 = _getfileinfo(path2)
@@ -463,14 +470,14 @@
)
-def samedevice(path1, path2):
+def samedevice(path1: bytes, path2: bytes) -> bool:
'''Returns whether path1 and path2 are on the same device.'''
res1 = _getfileinfo(path1)
res2 = _getfileinfo(path2)
return res1.dwVolumeSerialNumber == res2.dwVolumeSerialNumber
-def peekpipe(pipe):
+def peekpipe(pipe) -> int:
handle = msvcrt.get_osfhandle(pipe.fileno()) # pytype: disable=module-attr
avail = _DWORD()
@@ -485,14 +492,14 @@
return avail.value
-def lasterrorwaspipeerror(err):
+def lasterrorwaspipeerror(err) -> bool:
if err.errno != errno.EINVAL:
return False
err = _kernel32.GetLastError()
return err == _ERROR_BROKEN_PIPE or err == _ERROR_NO_DATA
-def testpid(pid):
+def testpid(pid: int) -> bool:
"""return True if pid is still running or unable to
determine, False otherwise"""
h = _kernel32.OpenProcess(_PROCESS_QUERY_INFORMATION, False, pid)
@@ -506,7 +513,7 @@
return _kernel32.GetLastError() != _ERROR_INVALID_PARAMETER
-def executablepath():
+def executablepath() -> bytes:
'''return full path of hg.exe'''
size = 600
buf = ctypes.create_string_buffer(size + 1)
@@ -520,7 +527,7 @@
return buf.value
-def getvolumename(path):
+def getvolumename(path: bytes) -> Optional[bytes]:
"""Get the mount point of the filesystem from a directory or file
(best-effort)
@@ -541,7 +548,7 @@
return buf.value
-def getfstype(path):
+def getfstype(path: bytes) -> Optional[bytes]:
"""Get the filesystem type name from a directory or file (best-effort)
Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
@@ -572,7 +579,7 @@
return name.value
-def getuser():
+def getuser() -> bytes:
'''return name of current user'''
size = _DWORD(300)
buf = ctypes.create_string_buffer(size.value + 1)
@@ -581,10 +588,10 @@
return buf.value
-_signalhandler = []
+_signalhandler: List[_SIGNAL_HANDLER] = []
-def setsignalhandler():
+def setsignalhandler() -> None:
"""Register a termination handler for console events including
CTRL+C. python signal handlers do not work well with socket
operations.
@@ -601,7 +608,7 @@
raise ctypes.WinError() # pytype: disable=module-attr
-def hidewindow():
+def hidewindow() -> None:
def callback(hwnd, pid):
wpid = _DWORD()
_user32.GetWindowThreadProcessId(hwnd, ctypes.byref(wpid))
@@ -614,7 +621,7 @@
_user32.EnumWindows(_WNDENUMPROC(callback), pid)
-def termsize():
+def termsize() -> Tuple[int, int]:
# cmd.exe does not handle CR like a unix console, the CR is
# counted in the line length. On 80 columns consoles, if 80
# characters are written, the following CR won't apply on the
@@ -635,7 +642,7 @@
return width, height
-def enablevtmode():
+def enablevtmode() -> bool:
"""Enable virtual terminal mode for the associated console. Return True if
enabled, else False."""
@@ -661,7 +668,7 @@
return True
-def spawndetached(args):
+def spawndetached(args: List[bytes]) -> int:
# No standard library function really spawns a fully detached
# process under win32 because they allocate pipes or other objects
# to handle standard streams communications. Passing these objects
@@ -703,7 +710,7 @@
return pi.dwProcessId
-def unlink(f):
+def unlink(f: bytes) -> None:
'''try to implement POSIX' unlink semantics on Windows'''
if os.path.isdir(f):
@@ -758,7 +765,7 @@
pass
-def makedir(path, notindexed):
+def makedir(path: bytes, notindexed: bool) -> None:
os.mkdir(path)
if notindexed:
_kernel32.SetFileAttributesA(path, _FILE_ATTRIBUTE_NOT_CONTENT_INDEXED)
--- a/mercurial/windows.py Wed Jan 04 12:06:07 2023 +0100
+++ b/mercurial/windows.py Wed Jan 04 16:02:22 2023 +0100
@@ -14,8 +14,24 @@
import stat
import string
import sys
+import typing
import winreg # pytype: disable=import-error
+from typing import (
+ AnyStr,
+ BinaryIO,
+ Iterable,
+ Iterator,
+ List,
+ Mapping,
+ NoReturn,
+ Optional,
+ Pattern,
+ Sequence,
+ Tuple,
+ Union,
+)
+
from .i18n import _
from .pycompat import getattr
from . import (
@@ -23,6 +39,7 @@
error,
policy,
pycompat,
+ typelib,
win32,
)
@@ -44,7 +61,19 @@
testpid = win32.testpid
unlink = win32.unlink
-umask = 0o022
+if typing.TYPE_CHECKING:
+ # Replace the various overloads that come along with aliasing stdlib methods
+ # with the narrow definition that we care about in the type checking phase
+ # only. This ensures that both Windows and POSIX see only the definition
+ # that is actually available.
+ #
+ # Note that if we check pycompat.TYPE_CHECKING here, it is always False, and
+ # the methods aren't replaced.
+ def split(p: bytes) -> Tuple[bytes, bytes]:
+ raise NotImplementedError
+
+
+umask: int = 0o022
class mixedfilemodewrapper:
@@ -178,15 +207,7 @@
listdir = osutil.listdir
-# copied from .utils.procutil, remove after Python 2 support was dropped
-def _isatty(fp):
- try:
- return fp.isatty()
- except AttributeError:
- return False
-
-
-def get_password():
+def get_password() -> bytes:
"""Prompt for password with echo off, using Windows getch().
This shouldn't be called directly- use ``ui.getpass()`` instead, which
@@ -208,7 +229,7 @@
return encoding.unitolocal(pw)
-class winstdout:
+class winstdout(typelib.BinaryIO_Proxy):
"""Some files on Windows misbehave.
When writing to a broken pipe, EINVAL instead of EPIPE may be raised.
@@ -217,7 +238,7 @@
error may happen. Python 3 already works around that.
"""
- def __init__(self, fp):
+ def __init__(self, fp: BinaryIO):
self.fp = fp
def __getattr__(self, key):
@@ -247,11 +268,11 @@
raise IOError(errno.EPIPE, 'Broken pipe')
-def openhardlinks():
+def openhardlinks() -> bool:
return True
-def parsepatchoutput(output_line):
+def parsepatchoutput(output_line: bytes) -> bytes:
"""parses the output produced by patch and returns the filename"""
pf = output_line[14:]
if pf[0] == b'`':
@@ -259,7 +280,9 @@
return pf
-def sshargs(sshcmd, host, user, port):
+def sshargs(
+ sshcmd: bytes, host: bytes, user: Optional[bytes], port: Optional[bytes]
+) -> bytes:
'''Build argument list for ssh or Plink'''
pflag = b'plink' in sshcmd.lower() and b'-P' or b'-p'
args = user and (b"%s@%s" % (user, host)) or host
@@ -274,23 +297,28 @@
return args
-def setflags(f, l, x):
- pass
-
-
-def copymode(src, dst, mode=None, enforcewritable=False):
+def setflags(f: bytes, l: bool, x: bool) -> None:
pass
-def checkexec(path):
+def copymode(
+ src: bytes,
+ dst: bytes,
+ mode: Optional[bytes] = None,
+ enforcewritable: bool = False,
+) -> None:
+ pass
+
+
+def checkexec(path: bytes) -> bool:
return False
-def checklink(path):
+def checklink(path: bytes) -> bool:
return False
-def setbinary(fd):
+def setbinary(fd) -> None:
# When run without console, pipes may expose invalid
# fileno(), usually set to -1.
fno = getattr(fd, 'fileno', None)
@@ -298,27 +326,28 @@
msvcrt.setmode(fno(), os.O_BINARY) # pytype: disable=module-attr
-def pconvert(path):
+def pconvert(path: bytes) -> bytes:
return path.replace(pycompat.ossep, b'/')
-def localpath(path):
+def localpath(path: bytes) -> bytes:
return path.replace(b'/', b'\\')
-def normpath(path):
+def normpath(path: bytes) -> bytes:
return pconvert(os.path.normpath(path))
-def normcase(path):
+def normcase(path: bytes) -> bytes:
return encoding.upper(path) # NTFS compares via upper()
-DRIVE_RE_B = re.compile(b'^[a-z]:')
-DRIVE_RE_S = re.compile('^[a-z]:')
+DRIVE_RE_B: Pattern[bytes] = re.compile(b'^[a-z]:')
+DRIVE_RE_S: Pattern[str] = re.compile('^[a-z]:')
-def abspath(path):
+# TODO: why is this accepting str?
+def abspath(path: AnyStr) -> AnyStr:
abs_path = os.path.abspath(path) # re-exports
# Python on Windows is inconsistent regarding the capitalization of drive
# letter and this cause issue with various path comparison along the way.
@@ -334,15 +363,15 @@
# see posix.py for definitions
-normcasespec = encoding.normcasespecs.upper
+normcasespec: int = encoding.normcasespecs.upper
normcasefallback = encoding.upperfallback
-def samestat(s1, s2):
+def samestat(s1: os.stat_result, s2: os.stat_result) -> bool:
return False
-def shelltocmdexe(path, env):
+def shelltocmdexe(path: bytes, env: Mapping[bytes, bytes]) -> bytes:
r"""Convert shell variables in the form $var and ${var} inside ``path``
to %var% form. Existing Windows style variables are left unchanged.
@@ -467,11 +496,11 @@
# the number of backslashes that precede double quotes and add another
# backslash before every double quote (being careful with the double
# quote we've appended to the end)
-_quotere = None
+_quotere: Optional[Pattern[bytes]] = None
_needsshellquote = None
-def shellquote(s):
+def shellquote(s: bytes) -> bytes:
r"""
>>> shellquote(br'C:\Users\xyz')
'"C:\\Users\\xyz"'
@@ -501,24 +530,24 @@
return b'"%s"' % _quotere.sub(br'\1\1\\\2', s)
-def _unquote(s):
+def _unquote(s: bytes) -> bytes:
if s.startswith(b'"') and s.endswith(b'"'):
return s[1:-1]
return s
-def shellsplit(s):
+def shellsplit(s: bytes) -> List[bytes]:
"""Parse a command string in cmd.exe way (best-effort)"""
return pycompat.maplist(_unquote, pycompat.shlexsplit(s, posix=False))
# if you change this stub into a real check, please try to implement the
# username and groupname functions above, too.
-def isowner(st):
+def isowner(st: os.stat_result) -> bool:
return True
-def findexe(command):
+def findexe(command: bytes) -> Optional[bytes]:
"""Find executable for command searching like cmd.exe does.
If command is a basename then PATH is searched for command.
PATH isn't searched if command is an absolute or relative path.
@@ -529,7 +558,7 @@
if os.path.splitext(command)[1].lower() in pathexts:
pathexts = [b'']
- def findexisting(pathcommand):
+ def findexisting(pathcommand: bytes) -> Optional[bytes]:
"""Will append extension (if needed) and return existing file"""
for ext in pathexts:
executable = pathcommand + ext
@@ -550,7 +579,7 @@
_wantedkinds = {stat.S_IFREG, stat.S_IFLNK}
-def statfiles(files):
+def statfiles(files: Sequence[bytes]) -> Iterator[Optional[os.stat_result]]:
"""Stat each file in files. Yield each stat, or None if a file
does not exist or has a type we don't care about.
@@ -576,7 +605,7 @@
yield cache.get(base, None)
-def username(uid=None):
+def username(uid: Optional[int] = None) -> Optional[bytes]:
"""Return the name of the user with the given uid.
If uid is None, return the name of the current user."""
@@ -591,14 +620,14 @@
return None
-def groupname(gid=None):
+def groupname(gid: Optional[int] = None) -> Optional[bytes]:
"""Return the name of the group with the given gid.
If gid is None, return the name of the current group."""
return None
-def readlink(pathname):
+def readlink(pathname: bytes) -> bytes:
path = pycompat.fsdecode(pathname)
try:
link = os.readlink(path)
@@ -611,7 +640,7 @@
return pycompat.fsencode(link)
-def removedirs(name):
+def removedirs(name: bytes) -> None:
"""special version of os.removedirs that does not remove symlinked
directories or junction points if they actually contain files"""
if listdir(name):
@@ -630,7 +659,7 @@
head, tail = os.path.split(head)
-def rename(src, dst):
+def rename(src: bytes, dst: bytes) -> None:
'''atomically rename file src to dst, replacing dst if it exists'''
try:
os.rename(src, dst)
@@ -639,28 +668,32 @@
os.rename(src, dst)
-def gethgcmd():
+def gethgcmd() -> List[bytes]:
return [encoding.strtolocal(arg) for arg in [sys.executable] + sys.argv[:1]]
-def groupmembers(name):
+def groupmembers(name: bytes) -> List[bytes]:
# Don't support groups on Windows for now
raise KeyError
-def isexec(f):
+def isexec(f: bytes) -> bool:
return False
class cachestat:
- def __init__(self, path):
+ def __init__(self, path: bytes) -> None:
pass
- def cacheable(self):
+ def cacheable(self) -> bool:
return False
-def lookupreg(key, valname=None, scope=None):
+def lookupreg(
+ key: bytes,
+ valname: Optional[bytes] = None,
+ scope: Optional[Union[int, Iterable[int]]] = None,
+) -> Optional[bytes]:
"""Look up a key/value name in the Windows registry.
valname: value name. If unspecified, the default value for the key
@@ -693,25 +726,25 @@
pass
-expandglobs = True
+expandglobs: bool = True
-def statislink(st):
+def statislink(st: Optional[os.stat_result]) -> bool:
'''check whether a stat result is a symlink'''
return False
-def statisexec(st):
+def statisexec(st: Optional[os.stat_result]) -> bool:
'''check whether a stat result is an executable file'''
return False
-def poll(fds):
+def poll(fds) -> List:
# see posix.py for description
raise NotImplementedError()
-def readpipe(pipe):
+def readpipe(pipe) -> bytes:
"""Read all available data from a pipe."""
chunks = []
while True:
@@ -727,5 +760,5 @@
return b''.join(chunks)
-def bindunixsocket(sock, path):
+def bindunixsocket(sock, path: bytes) -> NoReturn:
raise NotImplementedError('unsupported platform')
--- a/relnotes/next Wed Jan 04 12:06:07 2023 +0100
+++ b/relnotes/next Wed Jan 04 16:02:22 2023 +0100
@@ -16,3 +16,7 @@
== Internal API Changes ==
== Miscellaneous ==
+
+ * pullbundle support no longer requires setting a server-side option,
+ providing a .hg/pullbundles.manifest according to the syntax specified in
+ 'hg help -e clonebundles' is enough.
--- a/rust/Cargo.lock Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/Cargo.lock Wed Jan 04 16:02:22 2023 +0100
@@ -10,21 +10,26 @@
[[package]]
name = "adler"
-version = "0.2.3"
+version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ee2a4ec343196209d6594e19543ae87a39f96d5534d7174822a3ad825dd6ed7e"
+checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
[[package]]
name = "ahash"
-version = "0.4.7"
+version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "739f4a8db6605981345c5654f3a85b056ce52f37a39d34da03f25bf2151ea16e"
+checksum = "bf6ccdb167abbf410dcb915cabd428929d7f6a04980b54a11f26a39f1c7f7107"
+dependencies = [
+ "cfg-if",
+ "once_cell",
+ "version_check",
+]
[[package]]
name = "aho-corasick"
-version = "0.7.18"
+version = "0.7.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f"
+checksum = "b4f55bd91a0978cbfd91c457a164bab8b4001c833b7f323132c0a4e1922dd44e"
dependencies = [
"memchr",
]
@@ -36,12 +41,12 @@
checksum = "250f629c0161ad8107cf89319e990051fae62832fd343083bea452d93e2205fd"
[[package]]
-name = "ansi_term"
-version = "0.12.1"
+name = "android_system_properties"
+version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2"
+checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311"
dependencies = [
- "winapi",
+ "libc",
]
[[package]]
@@ -57,9 +62,9 @@
[[package]]
name = "autocfg"
-version = "1.0.1"
+version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
+checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "bitflags"
@@ -87,14 +92,20 @@
[[package]]
name = "block-buffer"
-version = "0.10.2"
+version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0bf7fe51849ea569fd452f37822f606a5cabb684dc918707a0193fd4664ff324"
+checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e"
dependencies = [
"generic-array",
]
[[package]]
+name = "bumpalo"
+version = "3.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "572f695136211188308f16ad2ca5c851a712c464060ae6974944458eb83880ba"
+
+[[package]]
name = "byteorder"
version = "1.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -111,9 +122,9 @@
[[package]]
name = "bytes-cast-derive"
-version = "0.1.0"
+version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cb936af9de38476664d6b58e529aff30d482e4ce1c5e150293d00730b0d81fdb"
+checksum = "b13e0e8ffc91021ba28dc98b2ea82099ba4ec07655279c21bfa3313ed96708fc"
dependencies = [
"proc-macro2",
"quote",
@@ -122,58 +133,80 @@
[[package]]
name = "cc"
-version = "1.0.66"
+version = "1.0.76"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4c0496836a84f8d0495758516b8621a622beb77c0fed418570e50764093ced48"
+checksum = "76a284da2e6fe2092f2353e51713435363112dfd60030e22add80be333fb928f"
dependencies = [
"jobserver",
]
[[package]]
name = "cfg-if"
-version = "0.1.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
-
-[[package]]
-name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "chrono"
-version = "0.4.19"
+version = "0.4.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73"
+checksum = "16b0a3d9ed01224b22057780a37bb8c5dbfe1be8ba48678e7bf57ec4b385411f"
dependencies = [
- "libc",
+ "iana-time-zone",
+ "js-sys",
"num-integer",
"num-traits",
"time",
+ "wasm-bindgen",
"winapi",
]
[[package]]
name = "clap"
-version = "2.34.0"
+version = "4.0.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c"
+checksum = "60494cedb60cb47462c0ff7be53de32c0e42a6fc2c772184554fa12bd9489c03"
dependencies = [
- "ansi_term",
"atty",
"bitflags",
+ "clap_derive",
+ "clap_lex",
+ "once_cell",
"strsim",
- "textwrap",
- "unicode-width",
- "vec_map",
+ "termcolor",
]
[[package]]
-name = "const_fn"
-version = "0.4.4"
+name = "clap_derive"
+version = "4.0.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cd51eab21ab4fd6a3bf889e2d0958c0a6e3a61ad04260325e919e652a2a62826"
+checksum = "0177313f9f02afc995627906bbd8967e2be069f5261954222dac78290c2b9014"
+dependencies = [
+ "heck",
+ "proc-macro-error",
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "clap_lex"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0d4198f73e42b4936b35b5bb248d81d2b595ecb170da0bac7655c54eedfa8da8"
+dependencies = [
+ "os_str_bytes",
+]
+
+[[package]]
+name = "codespan-reporting"
+version = "0.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e"
+dependencies = [
+ "termcolor",
+ "unicode-width",
+]
[[package]]
name = "convert_case"
@@ -182,28 +215,25 @@
checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e"
[[package]]
-name = "cpufeatures"
-version = "0.1.4"
+name = "core-foundation-sys"
+version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ed00c67cb5d0a7d64a44f6ad2668db7e7530311dd53ea79bcd4fb022c64911c8"
-dependencies = [
- "libc",
-]
+checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc"
[[package]]
name = "cpufeatures"
-version = "0.2.1"
+version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "95059428f66df56b63431fdb4e1947ed2190586af5c5a8a8b71122bdf5a7f469"
+checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320"
dependencies = [
"libc",
]
[[package]]
name = "cpython"
-version = "0.7.0"
+version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b7d46ba8ace7f3a1d204ac5060a706d0a68de6b42eafb6a586cc08bebcffe664"
+checksum = "3052106c29da7390237bc2310c1928335733b286287754ea85e6093d2495280e"
dependencies = [
"libc",
"num-traits",
@@ -213,74 +243,116 @@
[[package]]
name = "crc32fast"
-version = "1.2.1"
+version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "81156fece84ab6a9f2afdb109ce3ae577e42b1228441eded99bd77f627953b1a"
+checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"
dependencies = [
- "cfg-if 1.0.0",
+ "cfg-if",
]
[[package]]
name = "crossbeam-channel"
-version = "0.5.2"
+version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e54ea8bc3fb1ee042f5aace6e3c6e025d3874866da222930f70ce62aceba0bfa"
+checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521"
dependencies = [
- "cfg-if 1.0.0",
+ "cfg-if",
"crossbeam-utils",
]
[[package]]
name = "crossbeam-deque"
-version = "0.8.0"
+version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "94af6efb46fef72616855b036a624cf27ba656ffc9be1b9a3c931cfc7749a9a9"
+checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc"
dependencies = [
- "cfg-if 1.0.0",
+ "cfg-if",
"crossbeam-epoch",
"crossbeam-utils",
]
[[package]]
name = "crossbeam-epoch"
-version = "0.9.1"
+version = "0.9.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a1aaa739f95311c2c7887a76863f500026092fb1dce0161dab577e559ef3569d"
+checksum = "f916dfc5d356b0ed9dae65f1db9fc9770aa2851d2662b988ccf4fe3516e86348"
dependencies = [
- "cfg-if 1.0.0",
- "const_fn",
+ "autocfg",
+ "cfg-if",
"crossbeam-utils",
- "lazy_static",
"memoffset",
"scopeguard",
]
[[package]]
name = "crossbeam-utils"
-version = "0.8.1"
+version = "0.8.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "02d96d1e189ef58269ebe5b97953da3274d83a93af647c2ddd6f9dab28cedb8d"
+checksum = "edbafec5fa1f196ca66527c1b12c2ec4745ca14b50f1ad8f9f6f720b55d11fac"
dependencies = [
- "autocfg",
- "cfg-if 1.0.0",
- "lazy_static",
+ "cfg-if",
]
[[package]]
name = "crypto-common"
-version = "0.1.2"
+version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a4600d695eb3f6ce1cd44e6e291adceb2cc3ab12f20a33777ecd0bf6eba34e06"
+checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
dependencies = [
"generic-array",
+ "typenum",
]
[[package]]
name = "ctor"
-version = "0.1.16"
+version = "0.1.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6d2301688392eb071b0bf1a37be05c469d3cc4dbbd95df672fe28ab021e6a096"
+dependencies = [
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "cxx"
+version = "1.0.81"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "97abf9f0eca9e52b7f81b945524e76710e6cb2366aead23b7d4fbf72e281f888"
+dependencies = [
+ "cc",
+ "cxxbridge-flags",
+ "cxxbridge-macro",
+ "link-cplusplus",
+]
+
+[[package]]
+name = "cxx-build"
+version = "1.0.81"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7fbaabec2c953050352311293be5c6aba8e141ba19d6811862b232d6fd020484"
+checksum = "7cc32cc5fea1d894b77d269ddb9f192110069a8a9c1f1d441195fba90553dea3"
dependencies = [
+ "cc",
+ "codespan-reporting",
+ "once_cell",
+ "proc-macro2",
+ "quote",
+ "scratch",
+ "syn",
+]
+
+[[package]]
+name = "cxxbridge-flags"
+version = "1.0.81"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ca220e4794c934dc6b1207c3b42856ad4c302f2df1712e9f8d2eec5afaacf1f"
+
+[[package]]
+name = "cxxbridge-macro"
+version = "1.0.81"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b846f081361125bfc8dc9d3940c84e1fd83ba54bbca7b17cd29483c828be0704"
+dependencies = [
+ "proc-macro2",
"quote",
"syn",
]
@@ -300,9 +372,9 @@
[[package]]
name = "diff"
-version = "0.1.12"
+version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0e25ea47919b1560c4e3b7fe0aaab9becf5b84a10325ddf7db0f0ba5e1026499"
+checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8"
[[package]]
name = "digest"
@@ -315,25 +387,25 @@
[[package]]
name = "digest"
-version = "0.10.2"
+version = "0.10.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8cb780dce4f9a8f5c087362b3a4595936b2019e7c8b30f2c3e9a7e94e6ae9837"
+checksum = "adfbc57365a37acbd2ebf2b64d7e69bb766e2fea813521ed536f5d0520dcf86c"
dependencies = [
- "block-buffer 0.10.2",
+ "block-buffer 0.10.3",
"crypto-common",
]
[[package]]
name = "either"
-version = "1.6.1"
+version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457"
+checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797"
[[package]]
name = "env_logger"
-version = "0.9.0"
+version = "0.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0b2cf0344971ee6c64c31be0d530793fba457d322dfec2810c453d0ef228f9c3"
+checksum = "a12e6657c4c97ebab115a42dcee77225f7f482cdd841cf7088c657a42e9e00e7"
dependencies = [
"atty",
"humantime",
@@ -344,22 +416,20 @@
[[package]]
name = "fastrand"
-version = "1.7.0"
+version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c3fcf0cee53519c866c09b5de1f6c56ff9d647101f81c1964fa632e148896cdf"
+checksum = "a7a407cfaa3385c4ae6b23e84623d48c2798d06e3e6a1878f7f59f17b3f86499"
dependencies = [
"instant",
]
[[package]]
name = "flate2"
-version = "1.0.22"
+version = "1.0.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e6988e897c1c9c485f43b47a529cef42fde0547f9d8d41a7062518f1d8fc53f"
+checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6"
dependencies = [
- "cfg-if 1.0.0",
"crc32fast",
- "libc",
"libz-sys",
"miniz_oxide",
]
@@ -386,9 +456,9 @@
[[package]]
name = "generic-array"
-version = "0.14.4"
+version = "0.14.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "501466ecc8a30d1d3b7fc9229b122b2ce8ed6e9d9223f1138d4babb253e51817"
+checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9"
dependencies = [
"typenum",
"version_check",
@@ -396,47 +466,47 @@
[[package]]
name = "getrandom"
-version = "0.1.15"
+version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fc587bc0ec293155d5bfa6b9891ec18a1e330c234f896ea47fbada4cadbe47e6"
+checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce"
dependencies = [
- "cfg-if 0.1.10",
+ "cfg-if",
"libc",
"wasi 0.9.0+wasi-snapshot-preview1",
]
[[package]]
name = "getrandom"
-version = "0.2.4"
+version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "418d37c8b1d42553c93648be529cb70f920d3baf8ef469b74b9638df426e0b4c"
+checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31"
dependencies = [
- "cfg-if 1.0.0",
+ "cfg-if",
"libc",
- "wasi 0.10.0+wasi-snapshot-preview1",
+ "wasi 0.11.0+wasi-snapshot-preview1",
]
[[package]]
-name = "glob"
-version = "0.3.0"
+name = "hashbrown"
+version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574"
-
-[[package]]
-name = "hashbrown"
-version = "0.9.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d7afe4a420e3fe79967a00898cc1f4db7c8a49a9333a29f8a4bd76a253d5cd04"
+checksum = "33ff8ae62cd3a9102e5637afc8452c55acf3844001bd5374e0b0bd7b6616c038"
dependencies = [
"ahash",
"rayon",
]
[[package]]
+name = "heck"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9"
+
+[[package]]
name = "hermit-abi"
-version = "0.1.17"
+version = "0.1.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5aca5565f760fb5b220e499d72710ed156fdb74e631659e99377d9ebfbd13ae8"
+checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
dependencies = [
"libc",
]
@@ -462,7 +532,7 @@
"hashbrown",
"home",
"im-rc",
- "itertools 0.10.3",
+ "itertools",
"lazy_static",
"libc",
"log",
@@ -500,9 +570,9 @@
[[package]]
name = "home"
-version = "0.5.3"
+version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2456aef2e6b6a9784192ae780c0f15bc57df0e918585282325e8c8ac27737654"
+checksum = "747309b4b440c06d57b0b25f2aee03ee9b5e5397d288c60e21fc709bb98a7408"
dependencies = [
"winapi",
]
@@ -514,13 +584,37 @@
checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
[[package]]
+name = "iana-time-zone"
+version = "0.1.53"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "64c122667b287044802d6ce17ee2ddf13207ed924c712de9a66a5814d5b64765"
+dependencies = [
+ "android_system_properties",
+ "core-foundation-sys",
+ "iana-time-zone-haiku",
+ "js-sys",
+ "wasm-bindgen",
+ "winapi",
+]
+
+[[package]]
+name = "iana-time-zone-haiku"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0703ae284fc167426161c2e3f1da3ea71d94b21bedbcc9494e92b28e334e3dca"
+dependencies = [
+ "cxx",
+ "cxx-build",
+]
+
+[[package]]
name = "im-rc"
-version = "15.0.0"
+version = "15.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3ca8957e71f04a205cb162508f9326aea04676c8dfd0711220190d6b83664f3f"
+checksum = "af1955a75fa080c677d3972822ec4bad316169ab1cfc6c257a942c2265dbe5fe"
dependencies = [
"bitmaps",
- "rand_core 0.5.1",
+ "rand_core 0.6.4",
"rand_xoshiro",
"sized-chunks",
"typenum",
@@ -533,37 +627,37 @@
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
dependencies = [
- "cfg-if 1.0.0",
+ "cfg-if",
]
[[package]]
name = "itertools"
-version = "0.9.0"
+version = "0.10.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b"
-dependencies = [
- "either",
-]
-
-[[package]]
-name = "itertools"
-version = "0.10.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3"
+checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473"
dependencies = [
"either",
]
[[package]]
name = "jobserver"
-version = "0.1.21"
+version = "0.1.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5c71313ebb9439f74b00d9d2dcec36440beaf57a6aa0623068441dd7cd81a7f2"
+checksum = "068b1ee6743e4d11fb9c6a1e6064b3693a1b600e7f5f5988047d98b3dc9fb90b"
dependencies = [
"libc",
]
[[package]]
+name = "js-sys"
+version = "0.3.60"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "49409df3e3bf0856b916e2ceaca09ee28e6871cf7d9ce97a692cacfdb2a25a47"
+dependencies = [
+ "wasm-bindgen",
+]
+
+[[package]]
name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -571,21 +665,21 @@
[[package]]
name = "libc"
-version = "0.2.124"
+version = "0.2.137"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "21a41fed9d98f27ab1c6d161da622a4fa35e8a54a8adc24bbf3ddd0ef70b0e50"
+checksum = "fc7fcc620a3bff7cdd7a365be3376c97191aeaccc2a603e600951e452615bf89"
[[package]]
name = "libm"
-version = "0.2.1"
+version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c7d73b3f436185384286bd8098d17ec07c9a7d2388a6599f824d8502b529702a"
+checksum = "348108ab3fba42ec82ff6e9564fc4ca0247bdccdc68dd8af9764bbc79c3c8ffb"
[[package]]
name = "libz-sys"
-version = "1.1.2"
+version = "1.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "602113192b08db8f38796c4e85c39e960c145965140e918018bcde1952429655"
+checksum = "9702761c3935f8cc2f101793272e202c72b99da8f4224a19ddcf1279a6450bbf"
dependencies = [
"cc",
"pkg-config",
@@ -593,25 +687,34 @@
]
[[package]]
-name = "log"
-version = "0.4.14"
+name = "link-cplusplus"
+version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710"
+checksum = "9272ab7b96c9046fbc5bc56c06c117cb639fe2d509df0c421cad82d2915cf369"
dependencies = [
- "cfg-if 1.0.0",
+ "cc",
+]
+
+[[package]]
+name = "log"
+version = "0.4.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
+dependencies = [
+ "cfg-if",
]
[[package]]
name = "memchr"
-version = "2.4.1"
+version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"
+checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
name = "memmap2"
-version = "0.5.7"
+version = "0.5.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "95af15f345b17af2efc8ead6080fb8bc376f8cec1b35277b935637595fe77498"
+checksum = "4b182332558b18d807c4ce1ca8ca983b34c3ee32765e47b3f0f69b90355cc1dc"
dependencies = [
"libc",
"stable_deref_trait",
@@ -619,9 +722,9 @@
[[package]]
name = "memoffset"
-version = "0.6.1"
+version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "157b4208e3059a8f9e78d559edc658e13df41410cb3ae03979c83130067fdd87"
+checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
dependencies = [
"autocfg",
]
@@ -650,19 +753,18 @@
[[package]]
name = "miniz_oxide"
-version = "0.4.3"
+version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0f2d26ec3309788e423cfbf68ad1800f061638098d76a83681af979dc4eda19d"
+checksum = "96590ba8f175222643a85693f33d26e9c8a015f599c216509b1a6894af675d34"
dependencies = [
"adler",
- "autocfg",
]
[[package]]
name = "num-integer"
-version = "0.1.44"
+version = "0.1.45"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db"
+checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9"
dependencies = [
"autocfg",
"num-traits",
@@ -670,9 +772,9 @@
[[package]]
name = "num-traits"
-version = "0.2.14"
+version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290"
+checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd"
dependencies = [
"autocfg",
"libm",
@@ -680,9 +782,9 @@
[[package]]
name = "num_cpus"
-version = "1.13.0"
+version = "1.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3"
+checksum = "f6058e64324c71e02bc2b150e4f3bc8286db6c83092132ffa3f6b1eab0f9def5"
dependencies = [
"hermit-abi",
"libc",
@@ -690,9 +792,9 @@
[[package]]
name = "once_cell"
-version = "1.14.0"
+version = "1.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2f7254b99e31cad77da24b08ebf628882739a608578bb1bcdfc1f9c21260d7c0"
+checksum = "86f0b0d4bf799edbc74508c1e8bf170ff5f41238e5f8225603ca7caaae2b7860"
[[package]]
name = "opaque-debug"
@@ -701,21 +803,26 @@
checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5"
[[package]]
+name = "os_str_bytes"
+version = "6.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7b5bf27447411e9ee3ff51186bf7a08e16c341efdde93f4d823e8844429bed7e"
+
+[[package]]
name = "ouroboros"
-version = "0.15.0"
+version = "0.15.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9f31a3b678685b150cba82b702dcdc5e155893f63610cf388d30cd988d4ca2bf"
+checksum = "dfbb50b356159620db6ac971c6d5c9ab788c9cc38a6f49619fca2a27acb062ca"
dependencies = [
"aliasable",
"ouroboros_macro",
- "stable_deref_trait",
]
[[package]]
name = "ouroboros_macro"
-version = "0.15.0"
+version = "0.15.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "084fd65d5dd8b3772edccb5ffd1e4b7eba43897ecd0f9401e330e8c542959408"
+checksum = "4a0d9d1a6191c4f391f87219d1ea42b23f09ee84d64763cd05ee6ea88d9f384d"
dependencies = [
"Inflector",
"proc-macro-error",
@@ -726,41 +833,41 @@
[[package]]
name = "output_vt100"
-version = "0.1.2"
+version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "53cdc5b785b7a58c5aad8216b3dfa114df64b0b06ae6e1501cef91df2fbdf8f9"
+checksum = "628223faebab4e3e40667ee0b2336d34a5b960ff60ea743ddfdbcf7770bcfb66"
dependencies = [
"winapi",
]
[[package]]
name = "paste"
-version = "1.0.5"
+version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "acbf547ad0c65e31259204bd90935776d1c693cec2f4ff7abb7a1bbbd40dfe58"
+checksum = "b1de2e551fb905ac83f73f7aedf2f0cb4a0da7e35efa24a202a936269f1f18e1"
[[package]]
name = "pkg-config"
-version = "0.3.19"
+version = "0.3.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3831453b3449ceb48b6d9c7ad7c96d5ea673e9b470a1dc578c2ce6521230884c"
+checksum = "6ac9a59f73473f1b8d852421e59e64809f025994837ef743615c6d0c5b305160"
[[package]]
name = "ppv-lite86"
-version = "0.2.10"
+version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857"
+checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
[[package]]
name = "pretty_assertions"
-version = "1.1.0"
+version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "76d5b548b725018ab5496482b45cb8bef21e9fed1858a6d674e3a8a0f0bb5d50"
+checksum = "a25e9bcb20aa780fd0bb16b72403a9064d6b3f22f026946029acb941a50af755"
dependencies = [
- "ansi_term",
"ctor",
"diff",
"output_vt100",
+ "yansi",
]
[[package]]
@@ -789,18 +896,18 @@
[[package]]
name = "proc-macro2"
-version = "1.0.24"
+version = "1.0.47"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71"
+checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725"
dependencies = [
- "unicode-xid",
+ "unicode-ident",
]
[[package]]
name = "python3-sys"
-version = "0.7.0"
+version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b18b32e64c103d5045f44644d7ddddd65336f7a0521f6fde673240a9ecceb77e"
+checksum = "49f8b50d72fb3015735aa403eebf19bbd72c093bfeeae24ee798be5f2f1aab52"
dependencies = [
"libc",
"regex",
@@ -808,9 +915,9 @@
[[package]]
name = "quote"
-version = "1.0.7"
+version = "1.0.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "aa563d17ecb180e500da1cfd2b028310ac758de548efdd203e18f283af693f37"
+checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
dependencies = [
"proc-macro2",
]
@@ -821,7 +928,7 @@
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03"
dependencies = [
- "getrandom 0.1.15",
+ "getrandom 0.1.16",
"libc",
"rand_chacha 0.2.2",
"rand_core 0.5.1",
@@ -836,7 +943,7 @@
dependencies = [
"libc",
"rand_chacha 0.3.1",
- "rand_core 0.6.3",
+ "rand_core 0.6.4",
]
[[package]]
@@ -856,7 +963,7 @@
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
dependencies = [
"ppv-lite86",
- "rand_core 0.6.3",
+ "rand_core 0.6.4",
]
[[package]]
@@ -865,16 +972,16 @@
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19"
dependencies = [
- "getrandom 0.1.15",
+ "getrandom 0.1.16",
]
[[package]]
name = "rand_core"
-version = "0.6.3"
+version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7"
+checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
dependencies = [
- "getrandom 0.2.4",
+ "getrandom 0.2.8",
]
[[package]]
@@ -902,23 +1009,23 @@
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59cad018caf63deb318e5a4586d99a24424a364f40f1e5778c29aca23f4fc73e"
dependencies = [
- "rand_core 0.6.3",
+ "rand_core 0.6.4",
]
[[package]]
name = "rand_xoshiro"
-version = "0.4.0"
+version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a9fcdd2e881d02f1d9390ae47ad8e5696a9e4be7b547a1da2afbc61973217004"
+checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa"
dependencies = [
- "rand_core 0.5.1",
+ "rand_core 0.6.4",
]
[[package]]
name = "rayon"
-version = "1.5.1"
+version = "1.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c06aca804d41dbc8ba42dfd964f0d01334eceb64314b9ecf7c5fad5188a06d90"
+checksum = "bd99e5772ead8baa5215278c9b15bf92087709e9c1b2d1f97cdb5a183c933a7d"
dependencies = [
"autocfg",
"crossbeam-deque",
@@ -928,31 +1035,30 @@
[[package]]
name = "rayon-core"
-version = "1.9.1"
+version = "1.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d78120e2c850279833f1dd3582f730c4ab53ed95aeaaaa862a2a5c71b1656d8e"
+checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f"
dependencies = [
"crossbeam-channel",
"crossbeam-deque",
"crossbeam-utils",
- "lazy_static",
"num_cpus",
]
[[package]]
name = "redox_syscall"
-version = "0.2.11"
+version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8380fe0152551244f0747b1bf41737e0f8a74f97a14ccefd1148187271634f3c"
+checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
dependencies = [
"bitflags",
]
[[package]]
name = "regex"
-version = "1.5.5"
+version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286"
+checksum = "e076559ef8e241f2ae3479e36f97bd5741c0330689e217ad51ce2c76808b868a"
dependencies = [
"aho-corasick",
"memchr",
@@ -961,9 +1067,9 @@
[[package]]
name = "regex-syntax"
-version = "0.6.25"
+version = "0.6.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
+checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848"
[[package]]
name = "remove_dir_all"
@@ -1020,20 +1126,26 @@
checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]]
+name = "scratch"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9c8132065adcfd6e02db789d9285a0deb2f3fcb04002865ab67d5fb103533898"
+
+[[package]]
name = "semver"
-version = "1.0.6"
+version = "1.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a4a3381e03edd24287172047536f20cabde766e2cd3e65e6b00fb3af51c4f38d"
+checksum = "e25dfac463d778e353db5be2449d1cce89bd6fd23c9f1ea21310ce6e5a1b29c4"
[[package]]
name = "sha-1"
-version = "0.9.6"
+version = "0.9.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8c4cfa741c5832d0ef7fab46cabed29c2aae926db0b11bb2069edd8db5e64e16"
+checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6"
dependencies = [
"block-buffer 0.9.0",
- "cfg-if 1.0.0",
- "cpufeatures 0.1.4",
+ "cfg-if",
+ "cpufeatures",
"digest 0.9.0",
"opaque-debug",
]
@@ -1044,16 +1156,16 @@
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "028f48d513f9678cda28f6e4064755b3fbb2af6acd672f2c209b62323f7aea0f"
dependencies = [
- "cfg-if 1.0.0",
- "cpufeatures 0.2.1",
- "digest 0.10.2",
+ "cfg-if",
+ "cpufeatures",
+ "digest 0.10.5",
]
[[package]]
name = "sized-chunks"
-version = "0.6.2"
+version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1ec31ceca5644fa6d444cc77548b88b67f46db6f7c71683b0f9336e671830d2f"
+checksum = "16d69225bde7a69b235da73377861095455d298f2b970996eec25ddbb42b3d1e"
dependencies = [
"bitmaps",
"typenum",
@@ -1073,19 +1185,19 @@
[[package]]
name = "strsim"
-version = "0.8.0"
+version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a"
+checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
[[package]]
name = "syn"
-version = "1.0.54"
+version = "1.0.103"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9a2af957a63d6bd42255c359c93d9bfdb97076bd3b820897ce55ffbfbf107f44"
+checksum = "a864042229133ada95abf3b54fdc62ef5ccabe9515b64717bcb9a1919e59445d"
dependencies = [
"proc-macro2",
"quote",
- "unicode-xid",
+ "unicode-ident",
]
[[package]]
@@ -1094,7 +1206,7 @@
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4"
dependencies = [
- "cfg-if 1.0.0",
+ "cfg-if",
"fastrand",
"libc",
"redox_syscall",
@@ -1104,23 +1216,14 @@
[[package]]
name = "termcolor"
-version = "1.1.2"
+version = "1.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2dfed899f0eb03f32ee8c6a0aabdb8a7949659e3466561fc0adf54e26d88c5f4"
+checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755"
dependencies = [
"winapi-util",
]
[[package]]
-name = "textwrap"
-version = "0.11.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060"
-dependencies = [
- "unicode-width",
-]
-
-[[package]]
name = "thread_local"
version = "1.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1142,32 +1245,32 @@
[[package]]
name = "twox-hash"
-version = "1.6.2"
+version = "1.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4ee73e6e4924fe940354b8d4d98cad5231175d615cd855b758adc658c0aac6a0"
+checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675"
dependencies = [
- "cfg-if 1.0.0",
+ "cfg-if",
"rand 0.8.5",
"static_assertions",
]
[[package]]
name = "typenum"
-version = "1.12.0"
+version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33"
+checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987"
+
+[[package]]
+name = "unicode-ident"
+version = "1.0.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3"
[[package]]
name = "unicode-width"
-version = "0.1.9"
+version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973"
-
-[[package]]
-name = "unicode-xid"
-version = "0.2.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564"
+checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b"
[[package]]
name = "users"
@@ -1181,9 +1284,9 @@
[[package]]
name = "vcpkg"
-version = "0.2.11"
+version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b00bca6106a5e23f3eee943593759b7fcddb00554332e856d990c893966879fb"
+checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
[[package]]
name = "vcsgraph"
@@ -1193,20 +1296,14 @@
dependencies = [
"hex",
"rand 0.7.3",
- "sha-1 0.9.6",
+ "sha-1 0.9.8",
]
[[package]]
-name = "vec_map"
-version = "0.8.2"
+name = "version_check"
+version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191"
-
-[[package]]
-name = "version_check"
-version = "0.9.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed"
+checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]]
name = "wasi"
@@ -1221,14 +1318,74 @@
checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f"
[[package]]
+name = "wasi"
+version = "0.11.0+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
+
+[[package]]
+name = "wasm-bindgen"
+version = "0.2.83"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eaf9f5aceeec8be17c128b2e93e031fb8a4d469bb9c4ae2d7dc1888b26887268"
+dependencies = [
+ "cfg-if",
+ "wasm-bindgen-macro",
+]
+
+[[package]]
+name = "wasm-bindgen-backend"
+version = "0.2.83"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4c8ffb332579b0557b52d268b91feab8df3615f265d5270fec2a8c95b17c1142"
+dependencies = [
+ "bumpalo",
+ "log",
+ "once_cell",
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-macro"
+version = "0.2.83"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "052be0f94026e6cbc75cdefc9bae13fd6052cdcaf532fa6c45e7ae33a1e6c810"
+dependencies = [
+ "quote",
+ "wasm-bindgen-macro-support",
+]
+
+[[package]]
+name = "wasm-bindgen-macro-support"
+version = "0.2.83"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "07bc0c051dc5f23e307b13285f9d75df86bfdf816c5721e573dec1f9b8aa193c"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wasm-bindgen-backend",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-shared"
+version = "0.2.83"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1c38c045535d93ec4f0b4defec448e4291638ee608530863b1e2ba115d4fff7f"
+
+[[package]]
name = "which"
-version = "4.2.5"
+version = "4.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5c4fb54e6113b6a8772ee41c3404fb0301ac79604489467e0a9ce1f3e97c24ae"
+checksum = "1c831fbbee9e129a8cf93e7747a82da9d95ba8e16621cae60ec2cdc849bacb7b"
dependencies = [
"either",
- "lazy_static",
"libc",
+ "once_cell",
]
[[package]]
@@ -1263,19 +1420,25 @@
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
+name = "yansi"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec"
+
+[[package]]
name = "zstd"
-version = "0.5.4+zstd.1.4.7"
+version = "0.11.2+zstd.1.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "69996ebdb1ba8b1517f61387a883857818a66c8a295f487b1ffd8fd9d2c82910"
+checksum = "20cc960326ece64f010d2d2107537f26dc589a6573a316bd5b1dba685fa5fde4"
dependencies = [
"zstd-safe",
]
[[package]]
name = "zstd-safe"
-version = "2.0.6+zstd.1.4.7"
+version = "5.0.2+zstd.1.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "98aa931fb69ecee256d44589d19754e61851ae4769bf963b385119b1cc37a49e"
+checksum = "1d2a5585e04f9eea4b2a3d1eca508c4dee9592a89ef6f450c11719da0726f4db"
dependencies = [
"libc",
"zstd-sys",
@@ -1283,12 +1446,10 @@
[[package]]
name = "zstd-sys"
-version = "1.4.18+zstd.1.4.7"
+version = "2.0.1+zstd.1.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a1e6e8778706838f43f771d80d37787cb2fe06dafe89dd3aebaf6721b9eaec81"
+checksum = "9fd07cbbc53846d9145dbffdf6dd09a7a0aa52be46741825f5c97bdd4f73f12b"
dependencies = [
"cc",
- "glob",
- "itertools 0.9.0",
"libc",
]
--- a/rust/README.rst Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/README.rst Wed Jan 04 16:02:22 2023 +0100
@@ -77,8 +77,8 @@
Developing Rust
===============
-The current version of Rust in use is ``1.48.0``, because it's what Debian
-stable has. You can use ``rustup override set 1.48.0`` at the root of the repo
+The current version of Rust in use is ``1.61.0``, because it's what Debian
+testing has. You can use ``rustup override set 1.61.0`` at the root of the repo
to make it easier on you.
Go to the ``hg-cpython`` folder::
--- a/rust/hg-core/Cargo.toml Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/hg-core/Cargo.toml Wed Jan 04 16:02:22 2023 +0100
@@ -3,7 +3,7 @@
version = "0.1.0"
authors = ["Georges Racinet <gracinet@anybox.fr>"]
description = "Mercurial pure Rust core library, with no assumption on Python bindings (FFI)"
-edition = "2018"
+edition = "2021"
[lib]
name = "hg"
@@ -13,40 +13,40 @@
bytes-cast = "0.2.0"
byteorder = "1.4.3"
derive_more = "0.99.17"
-hashbrown = { version = "0.9.1", features = ["rayon"] }
-home = "0.5.3"
-im-rc = "15.0"
-itertools = "0.10.3"
+hashbrown = { version = "0.13.1", features = ["rayon"] }
+home = "0.5.4"
+im-rc = "15.1.0"
+itertools = "0.10.5"
lazy_static = "1.4.0"
-libc = "0.2"
-ouroboros = "0.15.0"
-rand = "0.8.4"
+libc = "0.2.137"
+ouroboros = "0.15.5"
+rand = "0.8.5"
rand_pcg = "0.3.1"
rand_distr = "0.4.3"
-rayon = "1.5.1"
-regex = "1.5.5"
+rayon = "1.5.3"
+regex = "1.7.0"
sha-1 = "0.10.0"
-twox-hash = "1.6.2"
+twox-hash = "1.6.3"
same-file = "1.0.6"
-tempfile = "3.1.0"
+tempfile = "3.3.0"
thread_local = "1.1.4"
-crossbeam-channel = "0.5.0"
+crossbeam-channel = "0.5.6"
micro-timer = "0.4.0"
-log = "0.4.8"
-memmap2 = { version = "0.5.3", features = ["stable_deref_trait"] }
-zstd = "0.5.3"
+log = "0.4.17"
+memmap2 = { version = "0.5.8", features = ["stable_deref_trait"] }
+zstd = "0.11.2"
format-bytes = "0.3.0"
# once_cell 1.15 uses edition 2021, while the heptapod CI
# uses an old version of Cargo that doesn't support it.
-once_cell = "1.14.0"
+once_cell = "1.16.0"
# We don't use the `miniz-oxide` backend to not change rhg benchmarks and until
# we have a clearer view of which backend is the fastest.
[dependencies.flate2]
-version = "1.0.22"
+version = "1.0.24"
features = ["zlib"]
default-features = false
[dev-dependencies]
-clap = "2.34.0"
+clap = { version = "4.0.24", features = ["derive"] }
pretty_assertions = "1.1.0"
--- a/rust/hg-core/examples/nodemap/main.rs Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/hg-core/examples/nodemap/main.rs Wed Jan 04 16:02:22 2023 +0100
@@ -3,7 +3,6 @@
// This software may be used and distributed according to the terms of the
// GNU General Public License version 2 or any later version.
-use clap::*;
use hg::revlog::node::*;
use hg::revlog::nodemap::*;
use hg::revlog::*;
@@ -13,7 +12,6 @@
use std::io;
use std::io::Write;
use std::path::{Path, PathBuf};
-use std::str::FromStr;
use std::time::Instant;
mod index;
@@ -86,61 +84,66 @@
}
fn main() {
- let matches = App::new("Nodemap pure Rust example")
- .arg(
- Arg::with_name("REPOSITORY")
- .help("Path to the repository, always necessary for its index")
- .required(true),
- )
- .arg(
- Arg::with_name("NODEMAP_FILE")
- .help("Path to the nodemap file, independent of REPOSITORY")
- .required(true),
- )
- .subcommand(
- SubCommand::with_name("create")
- .about("Create NODEMAP_FILE by scanning repository index"),
- )
- .subcommand(
- SubCommand::with_name("query")
- .about("Query NODEMAP_FILE for PREFIX")
- .arg(Arg::with_name("PREFIX").required(true)),
- )
- .subcommand(
- SubCommand::with_name("bench")
- .about(
- "Perform #QUERIES random successful queries on NODEMAP_FILE")
- .arg(Arg::with_name("QUERIES").required(true)),
- )
- .get_matches();
+ use clap::{Parser, Subcommand};
- let repo = matches.value_of("REPOSITORY").unwrap();
- let nm_path = matches.value_of("NODEMAP_FILE").unwrap();
-
- let index = mmap_index(&Path::new(repo));
+ #[derive(Parser)]
+ #[command()]
+ /// Nodemap pure Rust example
+ struct App {
+ // Path to the repository, always necessary for its index
+ #[arg(short, long)]
+ repository: PathBuf,
+ // Path to the nodemap file, independent of REPOSITORY
+ #[arg(short, long)]
+ nodemap_file: PathBuf,
+ #[command(subcommand)]
+ command: Command,
+ }
- if let Some(_) = matches.subcommand_matches("create") {
- println!("Creating nodemap file {} for repository {}", nm_path, repo);
- create(&index, &Path::new(nm_path)).unwrap();
- return;
+ #[derive(Subcommand)]
+ enum Command {
+ /// Create `NODEMAP_FILE` by scanning repository index
+ Create,
+ /// Query `NODEMAP_FILE` for `prefix`
+ Query { prefix: String },
+ /// Perform #`QUERIES` random successful queries on `NODEMAP_FILE`
+ Bench { queries: usize },
}
- let nm = mmap_nodemap(&Path::new(nm_path));
- if let Some(matches) = matches.subcommand_matches("query") {
- let prefix = matches.value_of("PREFIX").unwrap();
- println!(
- "Querying {} in nodemap file {} of repository {}",
- prefix, nm_path, repo
- );
- query(&index, &nm, prefix);
- }
- if let Some(matches) = matches.subcommand_matches("bench") {
- let queries =
- usize::from_str(matches.value_of("QUERIES").unwrap()).unwrap();
- println!(
- "Doing {} random queries in nodemap file {} of repository {}",
- queries, nm_path, repo
- );
- bench(&index, &nm, queries);
+ let app = App::parse();
+
+ let repo = &app.repository;
+ let nm_path = &app.nodemap_file;
+
+ let index = mmap_index(repo);
+ let nm = mmap_nodemap(nm_path);
+
+ match &app.command {
+ Command::Create => {
+ println!(
+ "Creating nodemap file {} for repository {}",
+ nm_path.display(),
+ repo.display()
+ );
+ create(&index, &Path::new(nm_path)).unwrap();
+ }
+ Command::Bench { queries } => {
+ println!(
+ "Doing {} random queries in nodemap file {} of repository {}",
+ queries,
+ nm_path.display(),
+ repo.display()
+ );
+ bench(&index, &nm, *queries);
+ }
+ Command::Query { prefix } => {
+ println!(
+ "Querying {} in nodemap file {} of repository {}",
+ prefix,
+ nm_path.display(),
+ repo.display()
+ );
+ query(&index, &nm, prefix);
+ }
}
}
--- a/rust/hg-core/src/ancestors.rs Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/hg-core/src/ancestors.rs Wed Jan 04 16:02:22 2023 +0100
@@ -357,7 +357,6 @@
use super::*;
use crate::testing::{SampleGraph, VecGraph};
- use std::iter::FromIterator;
fn list_ancestors<G: Graph>(
graph: G,
--- a/rust/hg-core/src/config/values.rs Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/hg-core/src/config/values.rs Wed Jan 04 16:02:22 2023 +0100
@@ -30,10 +30,8 @@
("b", 1 << 0), // Needs to be last
];
for &(unit, multiplier) in UNITS {
- // TODO: use `value.strip_suffix(unit)` when we require Rust 1.45+
- if value.ends_with(unit) {
- let value_before_unit = &value[..value.len() - unit.len()];
- let float: f64 = value_before_unit.trim().parse().ok()?;
+ if let Some(value) = value.strip_suffix(unit) {
+ let float: f64 = value.trim().parse().ok()?;
if float >= 0.0 {
return Some((float * multiplier as f64).round() as u64);
} else {
--- a/rust/hg-core/src/dirstate/entry.rs Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/hg-core/src/dirstate/entry.rs Wed Jan 04 16:02:22 2023 +0100
@@ -1,7 +1,6 @@
use crate::dirstate_tree::on_disk::DirstateV2ParseError;
use crate::errors::HgError;
use bitflags::bitflags;
-use std::convert::{TryFrom, TryInto};
use std::fs;
use std::io;
use std::time::{SystemTime, UNIX_EPOCH};
--- a/rust/hg-core/src/dirstate/parsers.rs Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/hg-core/src/dirstate/parsers.rs Wed Jan 04 16:02:22 2023 +0100
@@ -9,7 +9,6 @@
use byteorder::{BigEndian, WriteBytesExt};
use bytes_cast::{unaligned, BytesCast};
use micro_timer::timed;
-use std::convert::TryFrom;
/// Parents are stored in the dirstate as byte hashes.
pub const PARENT_SIZE: usize = 20;
--- a/rust/hg-core/src/dirstate_tree/on_disk.rs Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/hg-core/src/dirstate_tree/on_disk.rs Wed Jan 04 16:02:22 2023 +0100
@@ -17,7 +17,6 @@
use format_bytes::format_bytes;
use rand::Rng;
use std::borrow::Cow;
-use std::convert::{TryFrom, TryInto};
use std::fmt::Write;
/// Added at the start of `.hg/dirstate` when the "v2" format is used.
--- a/rust/hg-core/src/dirstate_tree/status.rs Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/hg-core/src/dirstate_tree/status.rs Wed Jan 04 16:02:22 2023 +0100
@@ -15,7 +15,6 @@
use crate::utils::hg_path::HgPath;
use crate::BadMatch;
use crate::DirstateStatus;
-use crate::HgPathBuf;
use crate::HgPathCow;
use crate::PatternFileWarning;
use crate::StatusError;
@@ -147,7 +146,6 @@
let hg_path = &BorrowedPath::OnDisk(HgPath::new(""));
let has_ignored_ancestor = HasIgnoredAncestor::create(None, hg_path);
let root_cached_mtime = None;
- let root_dir_metadata = None;
// If the path we have for the repository root is a symlink, do follow it.
// (As opposed to symlinks within the working directory which are not
// followed, using `std::fs::symlink_metadata`.)
@@ -155,8 +153,12 @@
&has_ignored_ancestor,
dmap.root.as_ref(),
hg_path,
- &root_dir,
- root_dir_metadata,
+ &DirEntry {
+ hg_path: Cow::Borrowed(HgPath::new(b"")),
+ fs_path: Cow::Borrowed(&root_dir),
+ symlink_metadata: None,
+ file_type: FakeFileType::Directory,
+ },
root_cached_mtime,
is_at_repo_root,
)?;
@@ -340,7 +342,7 @@
/// need to call `read_dir`.
fn can_skip_fs_readdir(
&self,
- directory_metadata: Option<&std::fs::Metadata>,
+ directory_entry: &DirEntry,
cached_directory_mtime: Option<TruncatedTimestamp>,
) -> bool {
if !self.options.list_unknown && !self.options.list_ignored {
@@ -356,9 +358,9 @@
// The dirstate contains a cached mtime for this directory, set
// by a previous run of the `status` algorithm which found this
// directory eligible for `read_dir` caching.
- if let Some(meta) = directory_metadata {
+ if let Ok(meta) = directory_entry.symlink_metadata() {
if cached_mtime
- .likely_equal_to_mtime_of(meta)
+ .likely_equal_to_mtime_of(&meta)
.unwrap_or(false)
{
// The mtime of that directory has not changed
@@ -379,26 +381,40 @@
has_ignored_ancestor: &'ancestor HasIgnoredAncestor<'ancestor>,
dirstate_nodes: ChildNodesRef<'tree, 'on_disk>,
directory_hg_path: &BorrowedPath<'tree, 'on_disk>,
- directory_fs_path: &Path,
- directory_metadata: Option<&std::fs::Metadata>,
+ directory_entry: &DirEntry,
cached_directory_mtime: Option<TruncatedTimestamp>,
is_at_repo_root: bool,
) -> Result<bool, DirstateV2ParseError> {
- if self.can_skip_fs_readdir(directory_metadata, cached_directory_mtime)
- {
+ if self.can_skip_fs_readdir(directory_entry, cached_directory_mtime) {
dirstate_nodes
.par_iter()
.map(|dirstate_node| {
- let fs_path = directory_fs_path.join(get_path_from_bytes(
+ let fs_path = &directory_entry.fs_path;
+ let fs_path = fs_path.join(get_path_from_bytes(
dirstate_node.base_name(self.dmap.on_disk)?.as_bytes(),
));
match std::fs::symlink_metadata(&fs_path) {
- Ok(fs_metadata) => self.traverse_fs_and_dirstate(
- &fs_path,
- &fs_metadata,
- dirstate_node,
- has_ignored_ancestor,
- ),
+ Ok(fs_metadata) => {
+ let file_type =
+ match fs_metadata.file_type().try_into() {
+ Ok(file_type) => file_type,
+ Err(_) => return Ok(()),
+ };
+ let entry = DirEntry {
+ hg_path: Cow::Borrowed(
+ dirstate_node
+ .full_path(&self.dmap.on_disk)?,
+ ),
+ fs_path: Cow::Borrowed(&fs_path),
+ symlink_metadata: Some(fs_metadata),
+ file_type,
+ };
+ self.traverse_fs_and_dirstate(
+ &entry,
+ dirstate_node,
+ has_ignored_ancestor,
+ )
+ }
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
self.traverse_dirstate_only(dirstate_node)
}
@@ -419,7 +435,7 @@
let mut fs_entries = if let Ok(entries) = self.read_dir(
directory_hg_path,
- directory_fs_path,
+ &directory_entry.fs_path,
is_at_repo_root,
) {
entries
@@ -435,7 +451,7 @@
let dirstate_nodes = dirstate_nodes.sorted();
// `sort_unstable_by_key` doesn’t allow keys borrowing from the value:
// https://github.com/rust-lang/rust/issues/34162
- fs_entries.sort_unstable_by(|e1, e2| e1.base_name.cmp(&e2.base_name));
+ fs_entries.sort_unstable_by(|e1, e2| e1.hg_path.cmp(&e2.hg_path));
// Propagate here any error that would happen inside the comparison
// callback below
@@ -451,7 +467,7 @@
dirstate_node
.base_name(self.dmap.on_disk)
.unwrap()
- .cmp(&fs_entry.base_name)
+ .cmp(&fs_entry.hg_path)
},
)
.par_bridge()
@@ -461,8 +477,7 @@
match pair {
Both(dirstate_node, fs_entry) => {
self.traverse_fs_and_dirstate(
- &fs_entry.full_path,
- &fs_entry.metadata,
+ &fs_entry,
dirstate_node,
has_ignored_ancestor,
)?;
@@ -487,23 +502,21 @@
fn traverse_fs_and_dirstate<'ancestor>(
&self,
- fs_path: &Path,
- fs_metadata: &std::fs::Metadata,
+ fs_entry: &DirEntry,
dirstate_node: NodeRef<'tree, 'on_disk>,
has_ignored_ancestor: &'ancestor HasIgnoredAncestor<'ancestor>,
) -> Result<(), DirstateV2ParseError> {
let outdated_dircache =
self.check_for_outdated_directory_cache(&dirstate_node)?;
let hg_path = &dirstate_node.full_path_borrowed(self.dmap.on_disk)?;
- let file_type = fs_metadata.file_type();
- let file_or_symlink = file_type.is_file() || file_type.is_symlink();
+ let file_or_symlink = fs_entry.is_file() || fs_entry.is_symlink();
if !file_or_symlink {
// If we previously had a file here, it was removed (with
// `hg rm` or similar) or deleted before it could be
// replaced by a directory or something else.
self.mark_removed_or_deleted_if_file(&dirstate_node)?;
}
- if file_type.is_dir() {
+ if fs_entry.is_dir() {
if self.options.collect_traversed_dirs {
self.outcome
.lock()
@@ -521,14 +534,13 @@
&is_ignored,
dirstate_node.children(self.dmap.on_disk)?,
hg_path,
- fs_path,
- Some(fs_metadata),
+ fs_entry,
dirstate_node.cached_directory_mtime()?,
is_at_repo_root,
)?;
self.maybe_save_directory_mtime(
children_all_have_dirstate_node_or_are_ignored,
- fs_metadata,
+ fs_entry,
dirstate_node,
outdated_dircache,
)?
@@ -550,7 +562,7 @@
} else if entry.modified() {
self.push_outcome(Outcome::Modified, &dirstate_node)?;
} else {
- self.handle_normal_file(&dirstate_node, fs_metadata)?;
+ self.handle_normal_file(&dirstate_node, fs_entry)?;
}
} else {
// `node.entry.is_none()` indicates a "directory"
@@ -578,7 +590,7 @@
fn maybe_save_directory_mtime(
&self,
children_all_have_dirstate_node_or_are_ignored: bool,
- directory_metadata: &std::fs::Metadata,
+ directory_entry: &DirEntry,
dirstate_node: NodeRef<'tree, 'on_disk>,
outdated_directory_cache: bool,
) -> Result<(), DirstateV2ParseError> {
@@ -605,11 +617,13 @@
// resolution based on the filesystem (for example ext3
// only stores integer seconds), kernel (see
// https://stackoverflow.com/a/14393315/1162888), etc.
+ let metadata = match directory_entry.symlink_metadata() {
+ Ok(meta) => meta,
+ Err(_) => return Ok(()),
+ };
let directory_mtime = if let Ok(option) =
- TruncatedTimestamp::for_reliable_mtime_of(
- directory_metadata,
- status_start,
- ) {
+ TruncatedTimestamp::for_reliable_mtime_of(&metadata, status_start)
+ {
if let Some(directory_mtime) = option {
directory_mtime
} else {
@@ -671,18 +685,23 @@
fn handle_normal_file(
&self,
dirstate_node: &NodeRef<'tree, 'on_disk>,
- fs_metadata: &std::fs::Metadata,
+ fs_entry: &DirEntry,
) -> Result<(), DirstateV2ParseError> {
// Keep the low 31 bits
fn truncate_u64(value: u64) -> i32 {
(value & 0x7FFF_FFFF) as i32
}
+ let fs_metadata = match fs_entry.symlink_metadata() {
+ Ok(meta) => meta,
+ Err(_) => return Ok(()),
+ };
+
let entry = dirstate_node
.entry()?
.expect("handle_normal_file called with entry-less node");
let mode_changed =
- || self.options.check_exec && entry.mode_changed(fs_metadata);
+ || self.options.check_exec && entry.mode_changed(&fs_metadata);
let size = entry.size();
let size_changed = size != truncate_u64(fs_metadata.len());
if size >= 0 && size_changed && fs_metadata.file_type().is_symlink() {
@@ -697,7 +716,7 @@
} else {
let mtime_looks_clean;
if let Some(dirstate_mtime) = entry.truncated_mtime() {
- let fs_mtime = TruncatedTimestamp::for_mtime_of(fs_metadata)
+ let fs_mtime = TruncatedTimestamp::for_mtime_of(&fs_metadata)
.expect("OS/libc does not support mtime?");
// There might be a change in the future if for example the
// internal clock become off while process run, but this is a
@@ -767,10 +786,9 @@
directory_hg_path: &HgPath,
fs_entry: &DirEntry,
) -> bool {
- let hg_path = directory_hg_path.join(&fs_entry.base_name);
- let file_type = fs_entry.metadata.file_type();
- let file_or_symlink = file_type.is_file() || file_type.is_symlink();
- if file_type.is_dir() {
+ let hg_path = directory_hg_path.join(&fs_entry.hg_path);
+ let file_or_symlink = fs_entry.is_file() || fs_entry.is_symlink();
+ if fs_entry.is_dir() {
let is_ignored =
has_ignored_ancestor || (self.ignore_fn)(&hg_path);
let traverse_children = if is_ignored {
@@ -783,11 +801,9 @@
};
if traverse_children {
let is_at_repo_root = false;
- if let Ok(children_fs_entries) = self.read_dir(
- &hg_path,
- &fs_entry.full_path,
- is_at_repo_root,
- ) {
+ if let Ok(children_fs_entries) =
+ self.read_dir(&hg_path, &fs_entry.fs_path, is_at_repo_root)
+ {
children_fs_entries.par_iter().for_each(|child_fs_entry| {
self.traverse_fs_only(
is_ignored,
@@ -850,15 +866,46 @@
}
}
-struct DirEntry {
- base_name: HgPathBuf,
- full_path: PathBuf,
- metadata: std::fs::Metadata,
+/// Since [`std::fs::FileType`] cannot be built directly, we emulate what we
+/// care about.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+enum FakeFileType {
+ File,
+ Directory,
+ Symlink,
}
-impl DirEntry {
- /// Returns **unsorted** entries in the given directory, with name and
- /// metadata.
+impl TryFrom<std::fs::FileType> for FakeFileType {
+ type Error = ();
+
+ fn try_from(f: std::fs::FileType) -> Result<Self, Self::Error> {
+ if f.is_dir() {
+ Ok(Self::Directory)
+ } else if f.is_file() {
+ Ok(Self::File)
+ } else if f.is_symlink() {
+ Ok(Self::Symlink)
+ } else {
+ // Things like FIFO etc.
+ Err(())
+ }
+ }
+}
+
+struct DirEntry<'a> {
+ /// Path as stored in the dirstate, or just the filename for optimization.
+ hg_path: HgPathCow<'a>,
+ /// Filesystem path
+ fs_path: Cow<'a, Path>,
+ /// Lazily computed
+ symlink_metadata: Option<std::fs::Metadata>,
+ /// Already computed for ergonomics.
+ file_type: FakeFileType,
+}
+
+impl<'a> DirEntry<'a> {
+ /// Returns **unsorted** entries in the given directory, with name,
+ /// metadata and file type.
///
/// If a `.hg` sub-directory is encountered:
///
@@ -872,7 +919,7 @@
let mut results = Vec::new();
for entry in read_dir_path.read_dir()? {
let entry = entry?;
- let metadata = match entry.metadata() {
+ let file_type = match entry.file_type() {
Ok(v) => v,
Err(e) => {
// race with file deletion?
@@ -889,7 +936,7 @@
if is_at_repo_root {
// Skip the repo’s own .hg (might be a symlink)
continue;
- } else if metadata.is_dir() {
+ } else if file_type.is_dir() {
// A .hg sub-directory at another location means a subrepo,
// skip it entirely.
return Ok(Vec::new());
@@ -900,15 +947,40 @@
} else {
entry.path()
};
- let base_name = get_bytes_from_os_string(file_name).into();
+ let filename =
+ Cow::Owned(get_bytes_from_os_string(file_name).into());
+ let file_type = match FakeFileType::try_from(file_type) {
+ Ok(file_type) => file_type,
+ Err(_) => continue,
+ };
results.push(DirEntry {
- base_name,
- full_path,
- metadata,
+ hg_path: filename,
+ fs_path: Cow::Owned(full_path.to_path_buf()),
+ symlink_metadata: None,
+ file_type,
})
}
Ok(results)
}
+
+ fn symlink_metadata(&self) -> Result<std::fs::Metadata, std::io::Error> {
+ match &self.symlink_metadata {
+ Some(meta) => Ok(meta.clone()),
+ None => std::fs::symlink_metadata(&self.fs_path),
+ }
+ }
+
+ fn is_dir(&self) -> bool {
+ self.file_type == FakeFileType::Directory
+ }
+
+ fn is_file(&self) -> bool {
+ self.file_type == FakeFileType::File
+ }
+
+ fn is_symlink(&self) -> bool {
+ self.file_type == FakeFileType::Symlink
+ }
}
/// Return the `mtime` of a temporary file newly-created in the `.hg` directory
--- a/rust/hg-core/src/lock.rs Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/hg-core/src/lock.rs Wed Jan 04 16:02:22 2023 +0100
@@ -2,7 +2,6 @@
use crate::errors::HgError;
use crate::errors::HgResultExt;
-use crate::utils::StrExt;
use crate::vfs::Vfs;
use std::io;
use std::io::ErrorKind;
@@ -107,7 +106,7 @@
/// running anymore.
fn lock_should_be_broken(data: &Option<String>) -> bool {
(|| -> Option<bool> {
- let (prefix, pid) = data.as_ref()?.split_2(':')?;
+ let (prefix, pid) = data.as_ref()?.split_once(':')?;
if prefix != &*LOCK_PREFIX {
return Some(false);
}
--- a/rust/hg-core/src/matchers.rs Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/hg-core/src/matchers.rs Wed Jan 04 16:02:22 2023 +0100
@@ -27,7 +27,6 @@
use std::borrow::ToOwned;
use std::collections::HashSet;
use std::fmt::{Display, Error, Formatter};
-use std::iter::FromIterator;
use std::ops::Deref;
use std::path::{Path, PathBuf};
--- a/rust/hg-core/src/revlog/index.rs Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/hg-core/src/revlog/index.rs Wed Jan 04 16:02:22 2023 +0100
@@ -1,4 +1,3 @@
-use std::convert::TryInto;
use std::ops::Deref;
use byteorder::{BigEndian, ByteOrder};
--- a/rust/hg-core/src/revlog/node.rs Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/hg-core/src/revlog/node.rs Wed Jan 04 16:02:22 2023 +0100
@@ -10,7 +10,6 @@
use crate::errors::HgError;
use bytes_cast::BytesCast;
-use std::convert::{TryFrom, TryInto};
use std::fmt;
/// The length in bytes of a `Node`
--- a/rust/hg-core/src/revlog/nodemap_docket.rs Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/hg-core/src/revlog/nodemap_docket.rs Wed Jan 04 16:02:22 2023 +0100
@@ -3,7 +3,6 @@
use memmap2::Mmap;
use std::path::{Path, PathBuf};
-use crate::utils::strip_suffix;
use crate::vfs::Vfs;
const ONDISK_VERSION: u8 = 1;
@@ -97,8 +96,9 @@
.expect("expected a base name")
.to_str()
.expect("expected an ASCII file name in the store");
- let prefix = strip_suffix(docket_name, ".n.a")
- .or_else(|| strip_suffix(docket_name, ".n"))
+ let prefix = docket_name
+ .strip_suffix(".n.a")
+ .or_else(|| docket_name.strip_suffix(".n"))
.expect("expected docket path in .n or .n.a");
let name = format!("{}-{}.nd", prefix, uid);
docket_path
--- a/rust/hg-core/src/revlog/revlog.rs Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/hg-core/src/revlog/revlog.rs Wed Jan 04 16:02:22 2023 +0100
@@ -1,5 +1,4 @@
use std::borrow::Cow;
-use std::convert::TryFrom;
use std::io::Read;
use std::ops::Deref;
use std::path::Path;
@@ -518,7 +517,7 @@
} else {
let cap = self.uncompressed_len.max(0) as usize;
let mut buf = vec![0; cap];
- let len = zstd::block::decompress_to_buffer(self.bytes, &mut buf)
+ let len = zstd::bulk::decompress_to_buffer(self.bytes, &mut buf)
.map_err(|e| corrupted(e.to_string()))?;
if len != self.uncompressed_len as usize {
Err(corrupted("uncompressed length does not match"))
--- a/rust/hg-core/src/utils.rs Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/hg-core/src/utils.rs Wed Jan 04 16:02:22 2023 +0100
@@ -145,21 +145,6 @@
}
}
-pub trait StrExt {
- // TODO: Use https://doc.rust-lang.org/nightly/std/primitive.str.html#method.split_once
- // once we require Rust 1.52+
- fn split_2(&self, separator: char) -> Option<(&str, &str)>;
-}
-
-impl StrExt for str {
- fn split_2(&self, separator: char) -> Option<(&str, &str)> {
- let mut iter = self.splitn(2, separator);
- let a = iter.next()?;
- let b = iter.next()?;
- Some((a, b))
- }
-}
-
pub trait Escaped {
/// Return bytes escaped for display to the user
fn escaped_bytes(&self) -> Vec<u8>;
@@ -211,28 +196,20 @@
}
}
-// TODO: use the str method when we require Rust 1.45
-pub(crate) fn strip_suffix<'a>(s: &'a str, suffix: &str) -> Option<&'a str> {
- if s.ends_with(suffix) {
- Some(&s[..s.len() - suffix.len()])
- } else {
- None
- }
-}
-
#[cfg(unix)]
pub fn shell_quote(value: &[u8]) -> Vec<u8> {
- // TODO: Use the `matches!` macro when we require Rust 1.42+
- if value.iter().all(|&byte| match byte {
- b'a'..=b'z'
- | b'A'..=b'Z'
- | b'0'..=b'9'
- | b'.'
- | b'_'
- | b'/'
- | b'+'
- | b'-' => true,
- _ => false,
+ if value.iter().all(|&byte| {
+ matches!(
+ byte,
+ b'a'..=b'z'
+ | b'A'..=b'Z'
+ | b'0'..=b'9'
+ | b'.'
+ | b'_'
+ | b'/'
+ | b'+'
+ | b'-'
+ )
}) {
value.to_owned()
} else {
--- a/rust/hg-core/src/utils/hg_path.rs Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/hg-core/src/utils/hg_path.rs Wed Jan 04 16:02:22 2023 +0100
@@ -8,7 +8,6 @@
use crate::utils::SliceExt;
use std::borrow::Borrow;
use std::borrow::Cow;
-use std::convert::TryFrom;
use std::ffi::{OsStr, OsString};
use std::fmt;
use std::ops::Deref;
--- a/rust/hg-cpython/Cargo.toml Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/hg-cpython/Cargo.toml Wed Jan 04 16:02:22 2023 +0100
@@ -2,18 +2,18 @@
name = "hg-cpython"
version = "0.1.0"
authors = ["Georges Racinet <gracinet@anybox.fr>"]
-edition = "2018"
+edition = "2021"
[lib]
name='rusthg'
crate-type = ["cdylib"]
[dependencies]
-cpython = { version = "0.7.0", features = ["extension-module"] }
-crossbeam-channel = "0.5.2"
+cpython = { version = "0.7.1", features = ["extension-module"] }
+crossbeam-channel = "0.5.6"
hg-core = { path = "../hg-core"}
-libc = "0.2.119"
-log = "0.4.14"
-env_logger = "0.9.0"
+libc = "0.2.137"
+log = "0.4.17"
+env_logger = "0.9.3"
stable_deref_trait = "1.2.0"
vcsgraph = "0.2.0"
--- a/rust/hg-cpython/src/conversion.rs Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/hg-cpython/src/conversion.rs Wed Jan 04 16:02:22 2023 +0100
@@ -10,7 +10,6 @@
use cpython::{ObjectProtocol, PyObject, PyResult, Python};
use hg::Revision;
-use std::iter::FromIterator;
/// Utility function to convert a Python iterable into various collections
///
--- a/rust/hg-cpython/src/dirstate/dirstate_map.rs Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/hg-cpython/src/dirstate/dirstate_map.rs Wed Jan 04 16:02:22 2023 +0100
@@ -9,7 +9,6 @@
//! `hg-core` package.
use std::cell::{RefCell, RefMut};
-use std::convert::TryInto;
use cpython::{
exc, PyBool, PyBytes, PyClone, PyDict, PyErr, PyList, PyNone, PyObject,
--- a/rust/hg-cpython/src/dirstate/item.rs Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/hg-cpython/src/dirstate/item.rs Wed Jan 04 16:02:22 2023 +0100
@@ -151,6 +151,10 @@
Ok(self.entry(py).get().added())
}
+ @property
+ def modified(&self) -> PyResult<bool> {
+ Ok(self.entry(py).get().modified())
+ }
@property
def p2_info(&self) -> PyResult<bool> {
--- a/rust/hg-cpython/src/utils.rs Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/hg-cpython/src/utils.rs Wed Jan 04 16:02:22 2023 +0100
@@ -1,7 +1,6 @@
use cpython::exc::ValueError;
use cpython::{PyBytes, PyDict, PyErr, PyObject, PyResult, PyTuple, Python};
use hg::revlog::Node;
-use std::convert::TryFrom;
#[allow(unused)]
pub fn print_python_trace(py: Python) -> PyResult<PyObject> {
--- a/rust/rhg/Cargo.toml Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/rhg/Cargo.toml Wed Jan 04 16:02:22 2023 +0100
@@ -5,21 +5,21 @@
"Antoine Cezar <antoine.cezar@octobus.net>",
"Raphaël Gomès <raphael.gomes@octobus.net>",
]
-edition = "2018"
+edition = "2021"
[dependencies]
atty = "0.2.14"
hg-core = { path = "../hg-core"}
-chrono = "0.4.19"
-clap = "2.34.0"
+chrono = "0.4.23"
+clap = { version = "4.0.24", features = ["cargo"] }
derive_more = "0.99.17"
-home = "0.5.3"
+home = "0.5.4"
lazy_static = "1.4.0"
-log = "0.4.14"
+log = "0.4.17"
micro-timer = "0.4.0"
-regex = "1.5.5"
-env_logger = "0.9.0"
+regex = "1.7.0"
+env_logger = "0.9.3"
format-bytes = "0.3.0"
users = "0.11.0"
-which = "4.2.5"
-rayon = "1.5.1"
+which = "4.3.0"
+rayon = "1.5.3"
--- a/rust/rhg/src/commands/cat.rs Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/rhg/src/commands/cat.rs Wed Jan 04 16:02:22 2023 +0100
@@ -4,28 +4,28 @@
use hg::operations::cat;
use hg::utils::hg_path::HgPathBuf;
use micro_timer::timed;
-use std::convert::TryFrom;
+use std::ffi::OsString;
+use std::os::unix::prelude::OsStrExt;
pub const HELP_TEXT: &str = "
Output the current or given revision of files
";
-pub fn args() -> clap::App<'static, 'static> {
- clap::SubCommand::with_name("cat")
+pub fn args() -> clap::Command {
+ clap::command!("cat")
.arg(
- Arg::with_name("rev")
+ Arg::new("rev")
.help("search the repository as it is in REV")
- .short("-r")
- .long("--rev")
- .value_name("REV")
- .takes_value(true),
+ .short('r')
+ .long("rev")
+ .value_name("REV"),
)
.arg(
- clap::Arg::with_name("files")
+ clap::Arg::new("files")
.required(true)
- .multiple(true)
- .empty_values(false)
+ .num_args(1..)
.value_name("FILE")
+ .value_parser(clap::value_parser!(std::ffi::OsString))
.help("Files to output"),
)
.about(HELP_TEXT)
@@ -42,11 +42,15 @@
));
}
- let rev = invocation.subcommand_args.value_of("rev");
- let file_args = match invocation.subcommand_args.values_of("files") {
- Some(files) => files.collect(),
- None => vec![],
- };
+ let rev = invocation.subcommand_args.get_one::<String>("rev");
+ let file_args =
+ match invocation.subcommand_args.get_many::<OsString>("files") {
+ Some(files) => files
+ .filter(|s| !s.is_empty())
+ .map(|s| s.as_os_str())
+ .collect(),
+ None => vec![],
+ };
let repo = invocation.repo?;
let cwd = hg::utils::current_dir()?;
@@ -54,8 +58,8 @@
let working_directory = cwd.join(working_directory); // Make it absolute
let mut files = vec![];
- for file in file_args.iter() {
- if file.starts_with("set:") {
+ for file in file_args {
+ if file.as_bytes().starts_with(b"set:") {
let message = "fileset";
return Err(CommandError::unsupported(message));
}
@@ -63,7 +67,7 @@
let normalized = cwd.join(&file);
// TODO: actually normalize `..` path segments etc?
let dotted = normalized.components().any(|c| c.as_os_str() == "..");
- if file == &"." || dotted {
+ if file.as_bytes() == b"." || dotted {
let message = "`..` or `.` path segment";
return Err(CommandError::unsupported(message));
}
@@ -75,7 +79,7 @@
.map_err(|_| {
CommandError::abort(format!(
"abort: {} not under root '{}'\n(consider using '--cwd {}')",
- file,
+ String::from_utf8_lossy(file.as_bytes()),
working_directory.display(),
relative_path.display(),
))
--- a/rust/rhg/src/commands/config.rs Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/rhg/src/commands/config.rs Wed Jan 04 16:02:22 2023 +0100
@@ -8,14 +8,13 @@
With one argument of the form section.name, print just the value of that config item.
";
-pub fn args() -> clap::App<'static, 'static> {
- clap::SubCommand::with_name("config")
+pub fn args() -> clap::Command {
+ clap::command!("config")
.arg(
- Arg::with_name("name")
+ Arg::new("name")
.help("the section.name to print")
.value_name("NAME")
- .required(true)
- .takes_value(true),
+ .required(true),
)
.about(HELP_TEXT)
}
@@ -23,7 +22,7 @@
pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> {
let (section, name) = invocation
.subcommand_args
- .value_of("name")
+ .get_one::<String>("name")
.expect("missing required CLI argument")
.as_bytes()
.split_2(b'.')
--- a/rust/rhg/src/commands/debugdata.rs Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/rhg/src/commands/debugdata.rs Wed Jan 04 16:02:22 2023 +0100
@@ -8,27 +8,27 @@
Dump the contents of a data file revision
";
-pub fn args() -> clap::App<'static, 'static> {
- clap::SubCommand::with_name("debugdata")
+pub fn args() -> clap::Command {
+ clap::command!("debugdata")
.arg(
- Arg::with_name("changelog")
+ Arg::new("changelog")
.help("open changelog")
- .short("-c")
- .long("--changelog"),
+ .short('c')
+ .action(clap::ArgAction::SetTrue),
)
.arg(
- Arg::with_name("manifest")
+ Arg::new("manifest")
.help("open manifest")
- .short("-m")
- .long("--manifest"),
+ .short('m')
+ .action(clap::ArgAction::SetTrue),
)
.group(
- ArgGroup::with_name("")
+ ArgGroup::new("revlog")
.args(&["changelog", "manifest"])
.required(true),
)
.arg(
- Arg::with_name("rev")
+ Arg::new("rev")
.help("revision")
.required(true)
.value_name("REV"),
@@ -40,19 +40,21 @@
pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> {
let args = invocation.subcommand_args;
let rev = args
- .value_of("rev")
+ .get_one::<String>("rev")
.expect("rev should be a required argument");
- let kind =
- match (args.is_present("changelog"), args.is_present("manifest")) {
- (true, false) => DebugDataKind::Changelog,
- (false, true) => DebugDataKind::Manifest,
- (true, true) => {
- unreachable!("Should not happen since options are exclusive")
- }
- (false, false) => {
- unreachable!("Should not happen since options are required")
- }
- };
+ let kind = match (
+ args.get_one::<bool>("changelog").unwrap(),
+ args.get_one::<bool>("manifest").unwrap(),
+ ) {
+ (true, false) => DebugDataKind::Changelog,
+ (false, true) => DebugDataKind::Manifest,
+ (true, true) => {
+ unreachable!("Should not happen since options are exclusive")
+ }
+ (false, false) => {
+ unreachable!("Should not happen since options are required")
+ }
+ };
let repo = invocation.repo?;
if repo.has_narrow() {
@@ -60,7 +62,7 @@
"support for ellipsis nodes is missing and repo has narrow enabled",
));
}
- let data = debug_data(repo, rev, kind).map_err(|e| (e, rev))?;
+ let data = debug_data(repo, rev, kind).map_err(|e| (e, rev.as_ref()))?;
let mut stdout = invocation.ui.stdout_buffer();
stdout.write_all(&data)?;
--- a/rust/rhg/src/commands/debugignorerhg.rs Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/rhg/src/commands/debugignorerhg.rs Wed Jan 04 16:02:22 2023 +0100
@@ -1,5 +1,4 @@
use crate::error::CommandError;
-use clap::SubCommand;
use hg;
use hg::matchers::get_ignore_matcher;
use hg::StatusError;
@@ -13,8 +12,8 @@
Some options might be missing, check the list below.
";
-pub fn args() -> clap::App<'static, 'static> {
- SubCommand::with_name("debugignorerhg").about(HELP_TEXT)
+pub fn args() -> clap::Command {
+ clap::command!("debugignorerhg").about(HELP_TEXT)
}
pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> {
--- a/rust/rhg/src/commands/debugrequirements.rs Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/rhg/src/commands/debugrequirements.rs Wed Jan 04 16:02:22 2023 +0100
@@ -4,8 +4,8 @@
Print the current repo requirements.
";
-pub fn args() -> clap::App<'static, 'static> {
- clap::SubCommand::with_name("debugrequirements").about(HELP_TEXT)
+pub fn args() -> clap::Command {
+ clap::command!("debugrequirements").about(HELP_TEXT)
}
pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> {
--- a/rust/rhg/src/commands/debugrhgsparse.rs Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/rhg/src/commands/debugrhgsparse.rs Wed Jan 04 16:02:22 2023 +0100
@@ -1,19 +1,21 @@
-use std::os::unix::prelude::OsStrExt;
+use std::{
+ ffi::{OsStr, OsString},
+ os::unix::prelude::OsStrExt,
+};
use crate::error::CommandError;
-use clap::SubCommand;
use hg::{self, utils::hg_path::HgPath};
pub const HELP_TEXT: &str = "";
-pub fn args() -> clap::App<'static, 'static> {
- SubCommand::with_name("debugrhgsparse")
+pub fn args() -> clap::Command {
+ clap::command!("debugrhgsparse")
.arg(
- clap::Arg::with_name("files")
+ clap::Arg::new("files")
+ .value_name("FILES")
.required(true)
- .multiple(true)
- .empty_values(false)
- .value_name("FILES")
+ .num_args(1..)
+ .value_parser(clap::value_parser!(std::ffi::OsString))
.help("Files to check against sparse profile"),
)
.about(HELP_TEXT)
@@ -23,8 +25,12 @@
let repo = invocation.repo?;
let (matcher, _warnings) = hg::sparse::matcher(&repo).unwrap();
- let files = invocation.subcommand_args.values_of_os("files");
+ let files = invocation.subcommand_args.get_many::<OsString>("files");
if let Some(files) = files {
+ let files: Vec<&OsStr> = files
+ .filter(|s| !s.is_empty())
+ .map(|s| s.as_os_str())
+ .collect();
for file in files {
invocation.ui.write_stdout(b"matches: ")?;
invocation.ui.write_stdout(
--- a/rust/rhg/src/commands/files.rs Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/rhg/src/commands/files.rs Wed Jan 04 16:02:22 2023 +0100
@@ -14,15 +14,14 @@
Returns 0 on success.
";
-pub fn args() -> clap::App<'static, 'static> {
- clap::SubCommand::with_name("files")
+pub fn args() -> clap::Command {
+ clap::command!("files")
.arg(
- Arg::with_name("rev")
+ Arg::new("rev")
.help("search the repository as it is in REV")
- .short("-r")
- .long("--revision")
- .value_name("REV")
- .takes_value(true),
+ .short('r')
+ .long("revision")
+ .value_name("REV"),
)
.about(HELP_TEXT)
}
@@ -35,7 +34,7 @@
));
}
- let rev = invocation.subcommand_args.value_of("rev");
+ let rev = invocation.subcommand_args.get_one::<String>("rev");
let repo = invocation.repo?;
@@ -57,7 +56,8 @@
"rhg files -r <rev> is not supported in narrow clones",
));
}
- let files = list_rev_tracked_files(repo, rev).map_err(|e| (e, rev))?;
+ let files = list_rev_tracked_files(repo, rev)
+ .map_err(|e| (e, rev.as_ref()))?;
display_files(invocation.ui, repo, files.iter())
} else {
// The dirstate always reflects the sparse narrowspec, so if
--- a/rust/rhg/src/commands/root.rs Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/rhg/src/commands/root.rs Wed Jan 04 16:02:22 2023 +0100
@@ -9,8 +9,8 @@
Returns 0 on success.
";
-pub fn args() -> clap::App<'static, 'static> {
- clap::SubCommand::with_name("root").about(HELP_TEXT)
+pub fn args() -> clap::Command {
+ clap::command!("root").about(HELP_TEXT)
}
pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> {
--- a/rust/rhg/src/commands/status.rs Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/rhg/src/commands/status.rs Wed Jan 04 16:02:22 2023 +0100
@@ -8,7 +8,7 @@
use crate::error::CommandError;
use crate::ui::Ui;
use crate::utils::path_utils::RelativizePaths;
-use clap::{Arg, SubCommand};
+use clap::Arg;
use format_bytes::format_bytes;
use hg::config::Config;
use hg::dirstate::has_exec_bit;
@@ -41,75 +41,86 @@
Some options might be missing, check the list below.
";
-pub fn args() -> clap::App<'static, 'static> {
- SubCommand::with_name("status")
+pub fn args() -> clap::Command {
+ clap::command!("status")
.alias("st")
.about(HELP_TEXT)
.arg(
- Arg::with_name("all")
+ Arg::new("all")
.help("show status of all files")
- .short("-A")
- .long("--all"),
+ .short('A')
+ .action(clap::ArgAction::SetTrue)
+ .long("all"),
)
.arg(
- Arg::with_name("modified")
+ Arg::new("modified")
.help("show only modified files")
- .short("-m")
- .long("--modified"),
+ .short('m')
+ .action(clap::ArgAction::SetTrue)
+ .long("modified"),
)
.arg(
- Arg::with_name("added")
+ Arg::new("added")
.help("show only added files")
- .short("-a")
- .long("--added"),
+ .short('a')
+ .action(clap::ArgAction::SetTrue)
+ .long("added"),
)
.arg(
- Arg::with_name("removed")
+ Arg::new("removed")
.help("show only removed files")
- .short("-r")
- .long("--removed"),
+ .short('r')
+ .action(clap::ArgAction::SetTrue)
+ .long("removed"),
)
.arg(
- Arg::with_name("clean")
+ Arg::new("clean")
.help("show only clean files")
- .short("-c")
- .long("--clean"),
+ .short('c')
+ .action(clap::ArgAction::SetTrue)
+ .long("clean"),
)
.arg(
- Arg::with_name("deleted")
+ Arg::new("deleted")
.help("show only deleted files")
- .short("-d")
- .long("--deleted"),
+ .short('d')
+ .action(clap::ArgAction::SetTrue)
+ .long("deleted"),
)
.arg(
- Arg::with_name("unknown")
+ Arg::new("unknown")
.help("show only unknown (not tracked) files")
- .short("-u")
- .long("--unknown"),
+ .short('u')
+ .action(clap::ArgAction::SetTrue)
+ .long("unknown"),
)
.arg(
- Arg::with_name("ignored")
+ Arg::new("ignored")
.help("show only ignored files")
- .short("-i")
- .long("--ignored"),
+ .short('i')
+ .action(clap::ArgAction::SetTrue)
+ .long("ignored"),
)
.arg(
- Arg::with_name("copies")
+ Arg::new("copies")
.help("show source of copied files (DEFAULT: ui.statuscopies)")
- .short("-C")
- .long("--copies"),
+ .short('C')
+ .action(clap::ArgAction::SetTrue)
+ .long("copies"),
)
.arg(
- Arg::with_name("no-status")
+ Arg::new("no-status")
.help("hide status prefix")
- .short("-n")
- .long("--no-status"),
+ .short('n')
+ .action(clap::ArgAction::SetTrue)
+ .long("no-status"),
)
.arg(
- Arg::with_name("verbose")
+ Arg::new("verbose")
.help("enable additional output")
- .short("-v")
- .long("--verbose"),
+ .short('v')
+ .action(clap::ArgAction::SetTrue)
+ .long("verbose"),
)
}
@@ -200,25 +211,25 @@
let config = invocation.config;
let args = invocation.subcommand_args;
- let verbose = !args.is_present("print0")
- && (args.is_present("verbose")
- || config.get_bool(b"ui", b"verbose")?
- || config.get_bool(b"commands", b"status.verbose")?);
+ // TODO add `!args.get_flag("print0") &&` when we support `print0`
+ let verbose = args.get_flag("verbose")
+ || config.get_bool(b"ui", b"verbose")?
+ || config.get_bool(b"commands", b"status.verbose")?;
- let all = args.is_present("all");
+ let all = args.get_flag("all");
let display_states = if all {
// TODO when implementing `--quiet`: it excludes clean files
// from `--all`
ALL_DISPLAY_STATES
} else {
let requested = DisplayStates {
- modified: args.is_present("modified"),
- added: args.is_present("added"),
- removed: args.is_present("removed"),
- clean: args.is_present("clean"),
- deleted: args.is_present("deleted"),
- unknown: args.is_present("unknown"),
- ignored: args.is_present("ignored"),
+ modified: args.get_flag("modified"),
+ added: args.get_flag("added"),
+ removed: args.get_flag("removed"),
+ clean: args.get_flag("clean"),
+ deleted: args.get_flag("deleted"),
+ unknown: args.get_flag("unknown"),
+ ignored: args.get_flag("ignored"),
};
if requested.is_empty() {
DEFAULT_DISPLAY_STATES
@@ -226,9 +237,9 @@
requested
}
};
- let no_status = args.is_present("no-status");
+ let no_status = args.get_flag("no-status");
let list_copies = all
- || args.is_present("copies")
+ || args.get_flag("copies")
|| config.get_bool(b"ui", b"statuscopies")?;
let repo = invocation.repo?;
--- a/rust/rhg/src/main.rs Wed Jan 04 12:06:07 2023 +0100
+++ b/rust/rhg/src/main.rs Wed Jan 04 16:02:22 2023 +0100
@@ -1,10 +1,7 @@
extern crate log;
use crate::error::CommandError;
use crate::ui::{local_to_utf8, Ui};
-use clap::App;
-use clap::AppSettings;
-use clap::Arg;
-use clap::ArgMatches;
+use clap::{command, Arg, ArgMatches};
use format_bytes::{format_bytes, join};
use hg::config::{Config, ConfigSource, PlainInfo};
use hg::repo::{Repo, RepoError};
@@ -35,55 +32,47 @@
) -> Result<(), CommandError> {
check_unsupported(config, repo)?;
- let app = App::new("rhg")
- .global_setting(AppSettings::AllowInvalidUtf8)
- .global_setting(AppSettings::DisableVersion)
- .setting(AppSettings::SubcommandRequired)
- .setting(AppSettings::VersionlessSubcommands)
+ let app = command!()
+ .subcommand_required(true)
.arg(
- Arg::with_name("repository")
+ Arg::new("repository")
.help("repository root directory")
- .short("-R")
- .long("--repository")
+ .short('R')
.value_name("REPO")
- .takes_value(true)
// Both ok: `hg -R ./foo log` or `hg log -R ./foo`
.global(true),
)
.arg(
- Arg::with_name("config")
+ Arg::new("config")
.help("set/override config option (use 'section.name=value')")
- .long("--config")
.value_name("CONFIG")
- .takes_value(true)
.global(true)
+ .long("config")
// Ok: `--config section.key1=val --config section.key2=val2`
- .multiple(true)
// Not ok: `--config section.key1=val section.key2=val2`
- .number_of_values(1),
+ .action(clap::ArgAction::Append),
)
.arg(
- Arg::with_name("cwd")
+ Arg::new("cwd")
.help("change working directory")
- .long("--cwd")
.value_name("DIR")
- .takes_value(true)
+ .long("cwd")
.global(true),
)
.arg(
- Arg::with_name("color")
+ Arg::new("color")
.help("when to colorize (boolean, always, auto, never, or debug)")
- .long("--color")
.value_name("TYPE")
- .takes_value(true)
+ .long("color")
.global(true),
)
.version("0.0.1");
let app = add_subcommand_args(app);
- let matches = app.clone().get_matches_from_safe(argv.iter())?;
+ let matches = app.clone().try_get_matches_from(argv.iter())?;
- let (subcommand_name, subcommand_matches) = matches.subcommand();
+ let (subcommand_name, subcommand_args) =
+ matches.subcommand().expect("subcommand required");
// Mercurial allows users to define "defaults" for commands, fallback
// if a default is detected for the current command
@@ -104,9 +93,7 @@
}
}
let run = subcommand_run_fn(subcommand_name)
- .expect("unknown subcommand name from clap despite AppSettings::SubcommandRequired");
- let subcommand_args = subcommand_matches
- .expect("no subcommand arguments from clap despite AppSettings::SubcommandRequired");
+ .expect("unknown subcommand name from clap despite Command::subcommand_required");
let invocation = CliInvocation {
ui,
@@ -535,7 +522,7 @@
)+
}
- fn add_subcommand_args<'a, 'b>(app: App<'a, 'b>) -> App<'a, 'b> {
+ fn add_subcommand_args(app: clap::Command) -> clap::Command {
app
$(
.subcommand(commands::$command::args())
@@ -569,7 +556,7 @@
pub struct CliInvocation<'a> {
ui: &'a Ui,
- subcommand_args: &'a ArgMatches<'a>,
+ subcommand_args: &'a ArgMatches,
config: &'a Config,
/// References inside `Result` is a bit peculiar but allow
/// `invocation.repo?` to work out with `&CliInvocation` since this
--- a/setup.py Wed Jan 04 12:06:07 2023 +0100
+++ b/setup.py Wed Jan 04 16:02:22 2023 +0100
@@ -1607,6 +1607,10 @@
'mercurial.helptext.internals': [
'*.txt',
],
+ 'mercurial.thirdparty.attr': [
+ '*.pyi',
+ 'py.typed',
+ ],
}
--- a/tests/notcapable Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/notcapable Wed Jan 04 16:02:22 2023 +0100
@@ -15,10 +15,10 @@
if name in b'$CAP'.split(b' '):
return False
return orig(self, name, *args, **kwargs)
-def wrappeer(orig, self):
+def wrappeer(orig, self, path=None):
# Since we're disabling some newer features, we need to make sure local
# repos add in the legacy features again.
- return localrepo.locallegacypeer(self)
+ return localrepo.locallegacypeer(self, path=path)
EOF
echo '[extensions]' >> $HGRCPATH
--- a/tests/remotefilelog-getflogheads.py Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/remotefilelog-getflogheads.py Wed Jan 04 16:02:22 2023 +0100
@@ -19,7 +19,7 @@
Used for testing purpose
"""
- dest = urlutil.get_unique_pull_path(b'getflogheads', repo, ui)[0]
+ dest = urlutil.get_unique_pull_path_obj(b'getflogheads', ui)
peer = hg.peer(repo, {}, dest)
try:
--- a/tests/test-acl.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-acl.t Wed Jan 04 16:02:22 2023 +0100
@@ -116,11 +116,11 @@
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 207
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:phases" supported
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:updated-heads" supported
- bundle2-input-part: total payload size 20
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -131,9 +131,9 @@
adding foo/Bar/file.txt revisions
adding foo/file.txt revisions
adding quux/file.py revisions
- bundle2-input-part: total payload size 1553
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "phase-heads" supported
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
bundle2-input-bundle: 5 parts total
updating the branch cache
added 3 changesets with 3 changes to 3 files
@@ -182,11 +182,11 @@
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 207
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:phases" supported
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:updated-heads" supported
- bundle2-input-part: total payload size 20
+ bundle2-input-part: total payload size * (glob)
invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
@@ -200,9 +200,9 @@
adding quux/file.py revisions
calling hook pretxnchangegroup.acl: hgext.acl.hook
acl: changes have source "push" - skipping
- bundle2-input-part: total payload size 1553
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "phase-heads" supported
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
bundle2-input-bundle: 5 parts total
truncating cache/rbc-revs-v1 to 8
updating the branch cache
@@ -252,11 +252,11 @@
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 207
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:phases" supported
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:updated-heads" supported
- bundle2-input-part: total payload size 20
+ bundle2-input-part: total payload size * (glob)
invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
@@ -280,9 +280,9 @@
acl: path access granted: "f9cafe1212c8"
acl: branch access granted: "911600dab2ae" on branch "default"
acl: path access granted: "911600dab2ae"
- bundle2-input-part: total payload size 1553
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "phase-heads" supported
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
bundle2-input-bundle: 5 parts total
truncating cache/rbc-revs-v1 to 8
updating the branch cache
@@ -332,11 +332,11 @@
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 207
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:phases" supported
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:updated-heads" supported
- bundle2-input-part: total payload size 20
+ bundle2-input-part: total payload size * (glob)
invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
@@ -356,8 +356,8 @@
acl: acl.deny not enabled
acl: branch access granted: "ef1ea85a6374" on branch "default"
error: pretxnchangegroup.acl hook failed: acl: user "fred" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
- bundle2-input-part: total payload size 1553
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
+ bundle2-input-part: total payload size * (glob)
bundle2-input-bundle: 5 parts total
transaction abort!
rollback completed
@@ -403,11 +403,11 @@
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 207
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:phases" supported
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:updated-heads" supported
- bundle2-input-part: total payload size 20
+ bundle2-input-part: total payload size * (glob)
invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
@@ -431,8 +431,8 @@
acl: path access granted: "f9cafe1212c8"
acl: branch access granted: "911600dab2ae" on branch "default"
error: pretxnchangegroup.acl hook failed: acl: user "fred" not allowed on "quux/file.py" (changeset "911600dab2ae")
- bundle2-input-part: total payload size 1553
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
+ bundle2-input-part: total payload size * (glob)
bundle2-input-bundle: 5 parts total
transaction abort!
rollback completed
@@ -478,11 +478,11 @@
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 207
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:phases" supported
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:updated-heads" supported
- bundle2-input-part: total payload size 20
+ bundle2-input-part: total payload size * (glob)
invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
@@ -502,8 +502,8 @@
acl: acl.deny enabled, 0 entries for user barney
acl: branch access granted: "ef1ea85a6374" on branch "default"
error: pretxnchangegroup.acl hook failed: acl: user "barney" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
- bundle2-input-part: total payload size 1553
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
+ bundle2-input-part: total payload size * (glob)
bundle2-input-bundle: 5 parts total
transaction abort!
rollback completed
@@ -550,11 +550,11 @@
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 207
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:phases" supported
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:updated-heads" supported
- bundle2-input-part: total payload size 20
+ bundle2-input-part: total payload size * (glob)
invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
@@ -578,8 +578,8 @@
acl: path access granted: "f9cafe1212c8"
acl: branch access granted: "911600dab2ae" on branch "default"
error: pretxnchangegroup.acl hook failed: acl: user "fred" not allowed on "quux/file.py" (changeset "911600dab2ae")
- bundle2-input-part: total payload size 1553
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
+ bundle2-input-part: total payload size * (glob)
bundle2-input-bundle: 5 parts total
transaction abort!
rollback completed
@@ -627,11 +627,11 @@
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 207
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:phases" supported
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:updated-heads" supported
- bundle2-input-part: total payload size 20
+ bundle2-input-part: total payload size * (glob)
invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
@@ -653,8 +653,8 @@
acl: path access granted: "ef1ea85a6374"
acl: branch access granted: "f9cafe1212c8" on branch "default"
error: pretxnchangegroup.acl hook failed: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
- bundle2-input-part: total payload size 1553
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
+ bundle2-input-part: total payload size * (glob)
bundle2-input-bundle: 5 parts total
transaction abort!
rollback completed
@@ -701,11 +701,11 @@
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 207
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:phases" supported
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:updated-heads" supported
- bundle2-input-part: total payload size 20
+ bundle2-input-part: total payload size * (glob)
invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
@@ -725,8 +725,8 @@
acl: acl.deny enabled, 0 entries for user barney
acl: branch access granted: "ef1ea85a6374" on branch "default"
error: pretxnchangegroup.acl hook failed: acl: user "barney" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
- bundle2-input-part: total payload size 1553
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
+ bundle2-input-part: total payload size * (glob)
bundle2-input-bundle: 5 parts total
transaction abort!
rollback completed
@@ -776,13 +776,13 @@
bundle2-output-part: "bookmarks" 37 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 207
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:bookmarks" supported
- bundle2-input-part: total payload size 37
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:phases" supported
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:updated-heads" supported
- bundle2-input-part: total payload size 20
+ bundle2-input-part: total payload size * (glob)
invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
@@ -798,11 +798,11 @@
acl: acl.deny enabled, 2 entries for user fred
acl: branch access granted: "ef1ea85a6374" on branch "default"
acl: path access granted: "ef1ea85a6374"
- bundle2-input-part: total payload size 520
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "phase-heads" supported
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "bookmarks" supported
- bundle2-input-part: total payload size 37
+ bundle2-input-part: total payload size * (glob)
calling hook prepushkey.acl: hgext.acl.hook
acl: checking access for user "fred"
acl: acl.allow.bookmarks not enabled
@@ -865,13 +865,13 @@
bundle2-output-part: "bookmarks" 37 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 207
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:bookmarks" supported
- bundle2-input-part: total payload size 37
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:phases" supported
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:updated-heads" supported
- bundle2-input-part: total payload size 20
+ bundle2-input-part: total payload size * (glob)
invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
@@ -887,11 +887,11 @@
acl: acl.deny enabled, 2 entries for user fred
acl: branch access granted: "ef1ea85a6374" on branch "default"
acl: path access granted: "ef1ea85a6374"
- bundle2-input-part: total payload size 520
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "phase-heads" supported
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "bookmarks" supported
- bundle2-input-part: total payload size 37
+ bundle2-input-part: total payload size * (glob)
calling hook prepushkey.acl: hgext.acl.hook
acl: checking access for user "fred"
acl: acl.allow.bookmarks not enabled
@@ -954,11 +954,11 @@
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 207
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:phases" supported
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:updated-heads" supported
- bundle2-input-part: total payload size 20
+ bundle2-input-part: total payload size * (glob)
invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
@@ -982,9 +982,9 @@
acl: path access granted: "f9cafe1212c8"
acl: branch access granted: "911600dab2ae" on branch "default"
acl: path access granted: "911600dab2ae"
- bundle2-input-part: total payload size 1553
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "phase-heads" supported
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
bundle2-input-bundle: 5 parts total
updating the branch cache
added 3 changesets with 3 changes to 3 files
@@ -1040,11 +1040,11 @@
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 207
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:phases" supported
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:updated-heads" supported
- bundle2-input-part: total payload size 20
+ bundle2-input-part: total payload size * (glob)
invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
@@ -1068,8 +1068,8 @@
acl: path access granted: "f9cafe1212c8"
acl: branch access granted: "911600dab2ae" on branch "default"
error: pretxnchangegroup.acl hook failed: acl: user "wilma" not allowed on "quux/file.py" (changeset "911600dab2ae")
- bundle2-input-part: total payload size 1553
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
+ bundle2-input-part: total payload size * (glob)
bundle2-input-bundle: 5 parts total
transaction abort!
rollback completed
@@ -1124,11 +1124,11 @@
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 207
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:phases" supported
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:updated-heads" supported
- bundle2-input-part: total payload size 20
+ bundle2-input-part: total payload size * (glob)
invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
@@ -1143,8 +1143,8 @@
calling hook pretxnchangegroup.acl: hgext.acl.hook
acl: checking access for user "barney"
error: pretxnchangegroup.acl hook raised an exception: [Errno *] * (glob)
- bundle2-input-part: total payload size 1553
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
+ bundle2-input-part: total payload size * (glob)
bundle2-input-bundle: 5 parts total
transaction abort!
rollback completed
@@ -1202,11 +1202,11 @@
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 207
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:phases" supported
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:updated-heads" supported
- bundle2-input-part: total payload size 20
+ bundle2-input-part: total payload size * (glob)
invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
@@ -1230,8 +1230,8 @@
acl: path access granted: "f9cafe1212c8"
acl: branch access granted: "911600dab2ae" on branch "default"
error: pretxnchangegroup.acl hook failed: acl: user "betty" not allowed on "quux/file.py" (changeset "911600dab2ae")
- bundle2-input-part: total payload size 1553
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
+ bundle2-input-part: total payload size * (glob)
bundle2-input-bundle: 5 parts total
transaction abort!
rollback completed
@@ -1291,11 +1291,11 @@
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 207
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:phases" supported
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:updated-heads" supported
- bundle2-input-part: total payload size 20
+ bundle2-input-part: total payload size * (glob)
invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
@@ -1319,9 +1319,9 @@
acl: path access granted: "f9cafe1212c8"
acl: branch access granted: "911600dab2ae" on branch "default"
acl: path access granted: "911600dab2ae"
- bundle2-input-part: total payload size 1553
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "phase-heads" supported
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
bundle2-input-bundle: 5 parts total
updating the branch cache
added 3 changesets with 3 changes to 3 files
@@ -1381,11 +1381,11 @@
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 207
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:phases" supported
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:updated-heads" supported
- bundle2-input-part: total payload size 20
+ bundle2-input-part: total payload size * (glob)
invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
@@ -1409,9 +1409,9 @@
acl: path access granted: "f9cafe1212c8"
acl: branch access granted: "911600dab2ae" on branch "default"
acl: path access granted: "911600dab2ae"
- bundle2-input-part: total payload size 1553
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "phase-heads" supported
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
bundle2-input-bundle: 5 parts total
truncating cache/rbc-revs-v1 to 8
updating the branch cache
@@ -1468,11 +1468,11 @@
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 207
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:phases" supported
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:updated-heads" supported
- bundle2-input-part: total payload size 20
+ bundle2-input-part: total payload size * (glob)
invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
@@ -1494,8 +1494,8 @@
acl: path access granted: "ef1ea85a6374"
acl: branch access granted: "f9cafe1212c8" on branch "default"
error: pretxnchangegroup.acl hook failed: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
- bundle2-input-part: total payload size 1553
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
+ bundle2-input-part: total payload size * (glob)
bundle2-input-bundle: 5 parts total
transaction abort!
rollback completed
@@ -1551,11 +1551,11 @@
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 207
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:phases" supported
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:updated-heads" supported
- bundle2-input-part: total payload size 20
+ bundle2-input-part: total payload size * (glob)
invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
@@ -1580,9 +1580,9 @@
acl: path access granted: "f9cafe1212c8"
acl: branch access granted: "911600dab2ae" on branch "default"
acl: path access granted: "911600dab2ae"
- bundle2-input-part: total payload size 1553
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "phase-heads" supported
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
bundle2-input-bundle: 5 parts total
updating the branch cache
added 3 changesets with 3 changes to 3 files
@@ -1638,11 +1638,11 @@
bundle2-output-part: "phase-heads" 24 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 207
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:phases" supported
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:updated-heads" supported
- bundle2-input-part: total payload size 20
+ bundle2-input-part: total payload size * (glob)
invalid branch cache (served): tip differs
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
@@ -1666,8 +1666,8 @@
acl: path access granted: "ef1ea85a6374"
acl: branch access granted: "f9cafe1212c8" on branch "default"
error: pretxnchangegroup.acl hook failed: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
- bundle2-input-part: total payload size 1553
- bundle2-input-part: total payload size 24
+ bundle2-input-part: total payload size * (glob)
+ bundle2-input-part: total payload size * (glob)
bundle2-input-bundle: 5 parts total
transaction abort!
rollback completed
@@ -1761,11 +1761,11 @@
bundle2-output-part: "phase-heads" 48 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 207
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:phases" supported
- bundle2-input-part: total payload size 48
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:updated-heads" supported
- bundle2-input-part: total payload size 40
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -1792,9 +1792,9 @@
acl: path access granted: "911600dab2ae"
acl: branch access granted: "e8fc755d4d82" on branch "foobar"
acl: path access granted: "e8fc755d4d82"
- bundle2-input-part: total payload size 2068
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "phase-heads" supported
- bundle2-input-part: total payload size 48
+ bundle2-input-part: total payload size * (glob)
bundle2-input-bundle: 5 parts total
updating the branch cache
invalid branch cache (served.hidden): tip differs
@@ -1848,11 +1848,11 @@
bundle2-output-part: "phase-heads" 48 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 207
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:phases" supported
- bundle2-input-part: total payload size 48
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:updated-heads" supported
- bundle2-input-part: total payload size 40
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -1878,8 +1878,8 @@
acl: branch access granted: "911600dab2ae" on branch "default"
acl: path access granted: "911600dab2ae"
error: pretxnchangegroup.acl hook failed: acl: user "astro" denied on branch "foobar" (changeset "e8fc755d4d82")
- bundle2-input-part: total payload size 2068
- bundle2-input-part: total payload size 48
+ bundle2-input-part: total payload size * (glob)
+ bundle2-input-part: total payload size * (glob)
bundle2-input-bundle: 5 parts total
transaction abort!
rollback completed
@@ -1926,11 +1926,11 @@
bundle2-output-part: "phase-heads" 48 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 207
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:phases" supported
- bundle2-input-part: total payload size 48
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:updated-heads" supported
- bundle2-input-part: total payload size 40
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -1950,8 +1950,8 @@
acl: acl.allow not enabled
acl: acl.deny not enabled
error: pretxnchangegroup.acl hook failed: acl: user "astro" not allowed on branch "default" (changeset "ef1ea85a6374")
- bundle2-input-part: total payload size 2068
- bundle2-input-part: total payload size 48
+ bundle2-input-part: total payload size * (glob)
+ bundle2-input-part: total payload size * (glob)
bundle2-input-bundle: 5 parts total
transaction abort!
rollback completed
@@ -2000,11 +2000,11 @@
bundle2-output-part: "phase-heads" 48 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 207
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:phases" supported
- bundle2-input-part: total payload size 48
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:updated-heads" supported
- bundle2-input-part: total payload size 40
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -2024,8 +2024,8 @@
acl: acl.allow not enabled
acl: acl.deny not enabled
error: pretxnchangegroup.acl hook failed: acl: user "astro" not allowed on branch "default" (changeset "ef1ea85a6374")
- bundle2-input-part: total payload size 2068
- bundle2-input-part: total payload size 48
+ bundle2-input-part: total payload size * (glob)
+ bundle2-input-part: total payload size * (glob)
bundle2-input-bundle: 5 parts total
transaction abort!
rollback completed
@@ -2068,11 +2068,11 @@
bundle2-output-part: "phase-heads" 48 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 207
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:phases" supported
- bundle2-input-part: total payload size 48
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:updated-heads" supported
- bundle2-input-part: total payload size 40
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -2099,9 +2099,9 @@
acl: path access granted: "911600dab2ae"
acl: branch access granted: "e8fc755d4d82" on branch "foobar"
acl: path access granted: "e8fc755d4d82"
- bundle2-input-part: total payload size 2068
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "phase-heads" supported
- bundle2-input-part: total payload size 48
+ bundle2-input-part: total payload size * (glob)
bundle2-input-bundle: 5 parts total
updating the branch cache
invalid branch cache (served.hidden): tip differs
@@ -2160,11 +2160,11 @@
bundle2-output-part: "phase-heads" 48 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 207
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:phases" supported
- bundle2-input-part: total payload size 48
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:updated-heads" supported
- bundle2-input-part: total payload size 40
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -2191,9 +2191,9 @@
acl: path access granted: "911600dab2ae"
acl: branch access granted: "e8fc755d4d82" on branch "foobar"
acl: path access granted: "e8fc755d4d82"
- bundle2-input-part: total payload size 2068
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "phase-heads" supported
- bundle2-input-part: total payload size 48
+ bundle2-input-part: total payload size * (glob)
bundle2-input-bundle: 5 parts total
updating the branch cache
invalid branch cache (served.hidden): tip differs
@@ -2251,11 +2251,11 @@
bundle2-output-part: "phase-heads" 48 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 207
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:phases" supported
- bundle2-input-part: total payload size 48
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:updated-heads" supported
- bundle2-input-part: total payload size 40
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -2275,8 +2275,8 @@
acl: acl.allow not enabled
acl: acl.deny not enabled
error: pretxnchangegroup.acl hook failed: acl: user "george" denied on branch "default" (changeset "ef1ea85a6374")
- bundle2-input-part: total payload size 2068
- bundle2-input-part: total payload size 48
+ bundle2-input-part: total payload size * (glob)
+ bundle2-input-part: total payload size * (glob)
bundle2-input-bundle: 5 parts total
transaction abort!
rollback completed
@@ -2324,11 +2324,11 @@
bundle2-output-part: "phase-heads" 48 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 207
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:phases" supported
- bundle2-input-part: total payload size 48
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:updated-heads" supported
- bundle2-input-part: total payload size 40
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -2355,9 +2355,9 @@
acl: path access granted: "911600dab2ae"
acl: branch access granted: "e8fc755d4d82" on branch "foobar"
acl: path access granted: "e8fc755d4d82"
- bundle2-input-part: total payload size 2068
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "phase-heads" supported
- bundle2-input-part: total payload size 48
+ bundle2-input-part: total payload size * (glob)
bundle2-input-bundle: 5 parts total
updating the branch cache
invalid branch cache (served.hidden): tip differs
@@ -2409,11 +2409,11 @@
bundle2-output-part: "phase-heads" 48 bytes payload
bundle2-input-bundle: with-transaction
bundle2-input-part: "replycaps" supported
- bundle2-input-part: total payload size 207
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:phases" supported
- bundle2-input-part: total payload size 48
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "check:updated-heads" supported
- bundle2-input-part: total payload size 40
+ bundle2-input-part: total payload size * (glob)
bundle2-input-part: "changegroup" (params: 1 mandatory) supported
adding changesets
add changeset ef1ea85a6374
@@ -2433,8 +2433,8 @@
acl: acl.allow not enabled
acl: acl.deny not enabled
error: pretxnchangegroup.acl hook failed: acl: user "george" denied on branch "default" (changeset "ef1ea85a6374")
- bundle2-input-part: total payload size 2068
- bundle2-input-part: total payload size 48
+ bundle2-input-part: total payload size * (glob)
+ bundle2-input-part: total payload size * (glob)
bundle2-input-bundle: 5 parts total
transaction abort!
rollback completed
--- a/tests/test-alias.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-alias.t Wed Jan 04 16:02:22 2023 +0100
@@ -119,6 +119,7 @@
--close-branch mark a branch head as closed
--amend amend the parent of the working directory
-s --secret use the secret phase for committing
+ --draft use the draft phase for committing
-e --edit invoke editor on commit messages
-i --interactive use interactive mode
-I --include PATTERN [+] include names matching the given patterns
--- a/tests/test-amend-subrepo.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-amend-subrepo.t Wed Jan 04 16:02:22 2023 +0100
@@ -190,6 +190,7 @@
checking manifests
crosschecking files in changesets and manifests
checking files
+ checking dirstate
checked 5 changesets with 12 changes to 4 files
checking subrepo links
subrepo 't' not found in revision 04aa62396ec6
--- a/tests/test-amend.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-amend.t Wed Jan 04 16:02:22 2023 +0100
@@ -560,6 +560,12 @@
close=1
phase=secret
+`hg amend --draft` sets phase to draft
+
+ $ hg amend --draft -m declassified
+ $ hg log --limit 1 -T 'phase={phase}\n'
+ phase=draft
+
$ cd ..
Corner case of amend from issue6157:
--- a/tests/test-basic.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-basic.t Wed Jan 04 16:02:22 2023 +0100
@@ -121,6 +121,7 @@
checking manifests
crosschecking files in changesets and manifests
checking files
+ checking dirstate
checked 1 changesets with 1 changes to 1 files
Repository root:
--- a/tests/test-bundle-r.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-bundle-r.t Wed Jan 04 16:02:22 2023 +0100
@@ -17,7 +17,7 @@
> hg -R test bundle -r "$i" test-"$i".hg test-"$i"
> cd test-"$i"
> hg unbundle ../test-"$i".hg
- > hg verify
+ > hg verify -q
> hg tip -q
> cd ..
> done
@@ -29,11 +29,6 @@
added 1 changesets with 1 changes to 1 files
new changesets bfaf4b5cbf01 (1 drafts)
(run 'hg update' to get a working copy)
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 1 changes to 1 files
0:bfaf4b5cbf01
searching for changes
2 changesets found
@@ -43,11 +38,6 @@
added 2 changesets with 2 changes to 1 files
new changesets bfaf4b5cbf01:21f32785131f (2 drafts)
(run 'hg update' to get a working copy)
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 2 changesets with 2 changes to 1 files
1:21f32785131f
searching for changes
3 changesets found
@@ -57,11 +47,6 @@
added 3 changesets with 3 changes to 1 files
new changesets bfaf4b5cbf01:4ce51a113780 (3 drafts)
(run 'hg update' to get a working copy)
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 3 changes to 1 files
2:4ce51a113780
searching for changes
4 changesets found
@@ -71,11 +56,6 @@
added 4 changesets with 4 changes to 1 files
new changesets bfaf4b5cbf01:93ee6ab32777 (4 drafts)
(run 'hg update' to get a working copy)
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 4 changesets with 4 changes to 1 files
3:93ee6ab32777
searching for changes
2 changesets found
@@ -85,11 +65,6 @@
added 2 changesets with 2 changes to 1 files
new changesets bfaf4b5cbf01:c70afb1ee985 (2 drafts)
(run 'hg update' to get a working copy)
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 2 changesets with 2 changes to 1 files
1:c70afb1ee985
searching for changes
3 changesets found
@@ -99,11 +74,6 @@
added 3 changesets with 3 changes to 1 files
new changesets bfaf4b5cbf01:f03ae5a9b979 (3 drafts)
(run 'hg update' to get a working copy)
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 3 changes to 1 files
2:f03ae5a9b979
searching for changes
4 changesets found
@@ -113,11 +83,6 @@
added 4 changesets with 5 changes to 2 files
new changesets bfaf4b5cbf01:095cb14b1b4d (4 drafts)
(run 'hg update' to get a working copy)
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 4 changesets with 5 changes to 2 files
3:095cb14b1b4d
searching for changes
5 changesets found
@@ -127,11 +92,6 @@
added 5 changesets with 6 changes to 3 files
new changesets bfaf4b5cbf01:faa2e4234c7a (5 drafts)
(run 'hg update' to get a working copy)
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5 changesets with 6 changes to 3 files
4:faa2e4234c7a
searching for changes
5 changesets found
@@ -141,11 +101,6 @@
added 5 changesets with 5 changes to 2 files
new changesets bfaf4b5cbf01:916f1afdef90 (5 drafts)
(run 'hg update' to get a working copy)
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5 changesets with 5 changes to 2 files
4:916f1afdef90
$ cd test-8
$ hg pull ../test-7
@@ -158,12 +113,7 @@
new changesets c70afb1ee985:faa2e4234c7a
1 local changesets published
(run 'hg heads' to see heads, 'hg merge' to merge)
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 9 changesets with 7 changes to 4 files
+ $ hg verify -q
$ hg rollback
repository tip rolled back to revision 4 (undo pull)
$ cd ..
@@ -243,12 +193,7 @@
$ hg tip -q
8:916f1afdef90
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 9 changesets with 7 changes to 4 files
+ $ hg verify -q
$ hg rollback
repository tip rolled back to revision 2 (undo unbundle)
@@ -268,12 +213,7 @@
$ hg tip -q
4:916f1afdef90
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5 changesets with 5 changes to 2 files
+ $ hg verify -q
$ hg rollback
repository tip rolled back to revision 2 (undo unbundle)
$ hg unbundle ../test-bundle-branch2.hg
@@ -288,12 +228,7 @@
$ hg tip -q
6:faa2e4234c7a
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 7 changesets with 6 changes to 3 files
+ $ hg verify -q
$ hg rollback
repository tip rolled back to revision 2 (undo unbundle)
$ hg unbundle ../test-bundle-cset-7.hg
@@ -308,12 +243,7 @@
$ hg tip -q
4:916f1afdef90
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5 changesets with 5 changes to 2 files
+ $ hg verify -q
$ cd ../test
$ hg merge 7
@@ -342,11 +272,6 @@
$ hg tip -q
9:03fc0b0e347c
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 10 changesets with 7 changes to 4 files
+ $ hg verify -q
$ cd ..
--- a/tests/test-bundle.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-bundle.t Wed Jan 04 16:02:22 2023 +0100
@@ -28,12 +28,7 @@
1 files updated, 0 files merged, 2 files removed, 0 files unresolved
$ hg mv afile anotherfile
$ hg commit -m "0.3m"
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 9 changesets with 7 changes to 4 files
+ $ hg verify -q
$ cd ..
$ hg init empty
@@ -70,12 +65,7 @@
$ hg -R empty heads
[1]
- $ hg -R empty verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 0 changesets with 0 changes to 0 files
+ $ hg -R empty verify -q
#if repobundlerepo
@@ -853,12 +843,7 @@
but, regular verify must continue to work
- $ hg -R orig verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 2 changesets with 2 changes to 2 files
+ $ hg -R orig verify -q
#if repobundlerepo
diff against bundle
@@ -939,12 +924,7 @@
$ hg clone -q -r0 . part2
$ hg -q -R part2 pull bundle.hg
- $ hg -R part2 verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 5 changes to 4 files
+ $ hg -R part2 verify -q
#endif
== Test bundling no commits
@@ -1039,6 +1019,24 @@
$ hg bundle -a --config devel.bundle.delta=full ./full.hg
3 changesets found
+
+Test the debug statistic when building a bundle
+-----------------------------------------------
+
+ $ hg bundle -a ./default.hg --config debug.bundling-stats=yes
+ 3 changesets found
+ DEBUG-BUNDLING: revisions: 9
+ DEBUG-BUNDLING: changelog: 3
+ DEBUG-BUNDLING: manifest: 3
+ DEBUG-BUNDLING: files: 3 (for 3 revlogs)
+ DEBUG-BUNDLING: deltas:
+ DEBUG-BUNDLING: from-storage: 2 (100% of available 2)
+ DEBUG-BUNDLING: computed: 7
+ DEBUG-BUNDLING: full: 7 (100% of native 7)
+ DEBUG-BUNDLING: changelog: 3 (100% of native 3)
+ DEBUG-BUNDLING: manifests: 1 (100% of native 1)
+ DEBUG-BUNDLING: files: 3 (100% of native 3)
+
Test the debug output when applying delta
-----------------------------------------
@@ -1048,18 +1046,62 @@
> --config storage.revlog.reuse-external-delta=no \
> --config storage.revlog.reuse-external-delta-parent=no
adding changesets
- DBG-DELTAS: CHANGELOG: rev=0: search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
- DBG-DELTAS: CHANGELOG: rev=1: search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
- DBG-DELTAS: CHANGELOG: rev=2: search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
+ DBG-DELTAS: CHANGELOG: rev=0: delta-base=0 is-cached=1 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
+ DBG-DELTAS: CHANGELOG: rev=1: delta-base=1 is-cached=1 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
+ DBG-DELTAS: CHANGELOG: rev=2: delta-base=2 is-cached=1 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
adding manifests
- DBG-DELTAS: MANIFESTLOG: rev=0: search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
- DBG-DELTAS: MANIFESTLOG: rev=1: search-rounds=1 try-count=1 - delta-type=delta snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
- DBG-DELTAS: MANIFESTLOG: rev=2: search-rounds=1 try-count=1 - delta-type=delta snap-depth=0 - p1-chain-length=1 p2-chain-length=-1 - duration=* (glob)
+ DBG-DELTAS: MANIFESTLOG: rev=0: delta-base=0 is-cached=1 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
+ DBG-DELTAS: MANIFESTLOG: rev=1: delta-base=0 is-cached=1 - search-rounds=1 try-count=1 - delta-type=delta snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
+ DBG-DELTAS: MANIFESTLOG: rev=2: delta-base=1 is-cached=1 - search-rounds=1 try-count=1 - delta-type=delta snap-depth=0 - p1-chain-length=1 p2-chain-length=-1 - duration=* (glob)
adding file changes
- DBG-DELTAS: FILELOG:a: rev=0: search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
- DBG-DELTAS: FILELOG:b: rev=0: search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
- DBG-DELTAS: FILELOG:c: rev=0: search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
+ DBG-DELTAS: FILELOG:a: rev=0: delta-base=0 is-cached=1 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
+ DBG-DELTAS: FILELOG:b: rev=0: delta-base=0 is-cached=1 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
+ DBG-DELTAS: FILELOG:c: rev=0: delta-base=0 is-cached=1 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
added 3 changesets with 3 changes to 3 files
new changesets 4fe08cd4693e:4652c276ac4f (3 drafts)
(run 'hg update' to get a working copy)
+
+Test the debug statistic when applying a bundle
+-----------------------------------------------
+
+ $ hg init bar
+ $ hg -R bar unbundle ./default.hg --config debug.unbundling-stats=yes
+ adding changesets
+ adding manifests
+ adding file changes
+ DEBUG-UNBUNDLING: revisions: 9
+ DEBUG-UNBUNDLING: changelog: 3 ( 33%)
+ DEBUG-UNBUNDLING: manifests: 3 ( 33%)
+ DEBUG-UNBUNDLING: files: 3 ( 33%)
+ DEBUG-UNBUNDLING: total-time: ?????????????? seconds (glob)
+ DEBUG-UNBUNDLING: changelog: ?????????????? seconds (???%) (glob)
+ DEBUG-UNBUNDLING: manifests: ?????????????? seconds (???%) (glob)
+ DEBUG-UNBUNDLING: files: ?????????????? seconds (???%) (glob)
+ DEBUG-UNBUNDLING: type-count:
+ DEBUG-UNBUNDLING: changelog:
+ DEBUG-UNBUNDLING: full: 3
+ DEBUG-UNBUNDLING: cached: 3 (100%)
+ DEBUG-UNBUNDLING: manifests:
+ DEBUG-UNBUNDLING: full: 1
+ DEBUG-UNBUNDLING: cached: 1 (100%)
+ DEBUG-UNBUNDLING: delta: 2
+ DEBUG-UNBUNDLING: cached: 2 (100%)
+ DEBUG-UNBUNDLING: files:
+ DEBUG-UNBUNDLING: full: 3
+ DEBUG-UNBUNDLING: cached: 3 (100%)
+ DEBUG-UNBUNDLING: type-time:
+ DEBUG-UNBUNDLING: changelog:
+ DEBUG-UNBUNDLING: full: ?????????????? seconds (???% of total) (glob)
+ DEBUG-UNBUNDLING: cached: ?????????????? seconds (???% of total) (glob)
+ DEBUG-UNBUNDLING: manifests:
+ DEBUG-UNBUNDLING: full: ?????????????? seconds (???% of total) (glob)
+ DEBUG-UNBUNDLING: cached: ?????????????? seconds (???% of total) (glob)
+ DEBUG-UNBUNDLING: delta: ?????????????? seconds (???% of total) (glob)
+ DEBUG-UNBUNDLING: cached: ?????????????? seconds (???% of total) (glob)
+ DEBUG-UNBUNDLING: files:
+ DEBUG-UNBUNDLING: full: ?????????????? seconds (???% of total) (glob)
+ DEBUG-UNBUNDLING: cached: ?????????????? seconds (???% of total) (glob)
+ added 3 changesets with 3 changes to 3 files
+ new changesets 4fe08cd4693e:4652c276ac4f (3 drafts)
+ (run 'hg update' to get a working copy)
--- a/tests/test-bundle2-exchange.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-bundle2-exchange.t Wed Jan 04 16:02:22 2023 +0100
@@ -739,12 +739,10 @@
$ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
pushing to ssh://user@dummy/other
searching for changes
- remote: Fail early! (no-py3 chg !)
remote: adding changesets
remote: adding manifests
remote: adding file changes
remote: Fail early! (py3 !)
- remote: Fail early! (no-py3 no-chg !)
remote: transaction abort!
remote: Cleaning up the mess...
remote: rollback completed
--- a/tests/test-censor.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-censor.t Wed Jan 04 16:02:22 2023 +0100
@@ -175,6 +175,7 @@
checking files
target@1: censored file data
target@2: censored file data
+ not checking dirstate because of previous errors
checked 5 changesets with 7 changes to 2 files
2 integrity errors encountered!
(first damaged changeset appears to be 1)
@@ -205,12 +206,7 @@
Repo passes verification with warnings with explicit config
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5 changesets with 7 changes to 2 files
+ $ hg verify -q
May update to revision with censored data with explicit config
@@ -330,24 +326,14 @@
$ hg cat -r $C1 target | head -n 10
$ hg cat -r 0 target | head -n 10
Initially untainted file
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 12 changesets with 13 changes to 2 files
+ $ hg verify -q
Repo cloned before tainted content introduced can pull censored nodes
$ cd ../rpull
$ hg cat -r tip target | head -n 10
Initially untainted file
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 2 changes to 2 files
+ $ hg verify -q
$ hg pull -r $H1 -r $H2
pulling from $TESTTMP/r
searching for changes
@@ -369,12 +355,7 @@
$ hg cat -r $C1 target | head -n 10
$ hg cat -r 0 target | head -n 10
Initially untainted file
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 12 changesets with 13 changes to 2 files
+ $ hg verify -q
Censored nodes can be pushed if they censor previously unexchanged nodes
@@ -429,12 +410,7 @@
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cat target | head -n 10
Re-sanitized; nothing to see here
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 14 changesets with 15 changes to 2 files
+ $ hg verify -q
Grepping only warns, doesn't error out
@@ -488,12 +464,7 @@
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cat target | head -n 10
Re-sanitized; nothing to see here
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 14 changesets with 15 changes to 2 files
+ $ hg verify -q
$ cd ../r
Can import bundle where first revision of a file is censored
--- a/tests/test-clone-pull-corruption.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-clone-pull-corruption.t Wed Jan 04 16:02:22 2023 +0100
@@ -43,11 +43,6 @@
see what happened
$ wait
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 2 changesets with 2 changes to 1 files
+ $ hg verify -q
$ cd ..
--- a/tests/test-clone-r.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-clone-r.t Wed Jan 04 16:02:22 2023 +0100
@@ -66,12 +66,7 @@
5 7 09bb521d218d de68e904d169 000000000000
6 8 1fde233dfb0f f54c32f13478 000000000000
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 9 changesets with 7 changes to 4 files
+ $ hg verify -q
$ cd ..
@@ -80,7 +75,7 @@
> echo ---- hg clone -r "$i" test test-"$i"
> hg clone -r "$i" test test-"$i"
> cd test-"$i"
- > hg verify
+ > hg verify -q
> cd ..
> done
@@ -92,11 +87,6 @@
new changesets f9ee2f85a263
updating to branch default
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 1 changes to 1 files
---- hg clone -r 1 test test-1
adding changesets
@@ -106,11 +96,6 @@
new changesets f9ee2f85a263:34c2bf6b0626
updating to branch default
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 2 changesets with 2 changes to 1 files
---- hg clone -r 2 test test-2
adding changesets
@@ -120,11 +105,6 @@
new changesets f9ee2f85a263:e38ba6f5b7e0
updating to branch default
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 3 changes to 1 files
---- hg clone -r 3 test test-3
adding changesets
@@ -134,11 +114,6 @@
new changesets f9ee2f85a263:eebf5a27f8ca
updating to branch default
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 4 changesets with 4 changes to 1 files
---- hg clone -r 4 test test-4
adding changesets
@@ -148,11 +123,6 @@
new changesets f9ee2f85a263:095197eb4973
updating to branch default
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 2 changesets with 2 changes to 1 files
---- hg clone -r 5 test test-5
adding changesets
@@ -162,11 +132,6 @@
new changesets f9ee2f85a263:1bb50a9436a7
updating to branch default
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 3 changes to 1 files
---- hg clone -r 6 test test-6
adding changesets
@@ -176,11 +141,6 @@
new changesets f9ee2f85a263:7373c1169842
updating to branch default
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 4 changesets with 5 changes to 2 files
---- hg clone -r 7 test test-7
adding changesets
@@ -190,11 +150,6 @@
new changesets f9ee2f85a263:a6a34bfa0076
updating to branch default
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5 changesets with 6 changes to 3 files
---- hg clone -r 8 test test-8
adding changesets
@@ -204,11 +159,6 @@
new changesets f9ee2f85a263:aa35859c02ea
updating to branch default
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5 changesets with 5 changes to 2 files
$ cd test-8
$ hg pull ../test-7
@@ -220,12 +170,7 @@
added 4 changesets with 2 changes to 3 files (+1 heads)
new changesets 095197eb4973:a6a34bfa0076
(run 'hg heads' to see heads, 'hg merge' to merge)
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 9 changesets with 7 changes to 4 files
+ $ hg verify -q
$ cd ..
$ hg clone test test-9
--- a/tests/test-clone-stream-format.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-clone-stream-format.t Wed Jan 04 16:02:22 2023 +0100
@@ -110,12 +110,7 @@
new changesets 96ee1d7354c4:06ddac466af5
updating to branch default
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ hg verify -R server-no-store
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5004 changesets with 1088 changes to 1088 files
+ $ hg verify -R server-no-store -q
$ hg -R server serve -p $HGPORT -d --pid-file=hg-1.pid --error errors-1.txt
$ cat hg-1.pid > $DAEMON_PIDS
$ hg -R server-no-store serve -p $HGPORT2 -d --pid-file=hg-2.pid --error errors-2.txt
@@ -129,12 +124,7 @@
$ hg clone --quiet --stream -U http://localhost:$HGPORT clone-remove-store --config format.usestore=no
$ cat errors-1.txt
- $ hg -R clone-remove-store verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5004 changesets with 1088 changes to 1088 files
+ $ hg -R clone-remove-store verify -q
$ hg debugrequires -R clone-remove-store | grep store
[1]
@@ -143,12 +133,7 @@
$ hg clone --quiet --stream -U http://localhost:$HGPORT2 clone-add-store --config format.usestore=yes
$ cat errors-2.txt
- $ hg -R clone-add-store verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5004 changesets with 1088 changes to 1088 files
+ $ hg -R clone-add-store verify -q
$ hg debugrequires -R clone-add-store | grep store
store
@@ -171,12 +156,7 @@
new changesets 96ee1d7354c4:06ddac466af5
updating to branch default
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ hg verify -R server-no-fncache
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5004 changesets with 1088 changes to 1088 files
+ $ hg verify -R server-no-fncache -q
$ hg -R server serve -p $HGPORT -d --pid-file=hg-1.pid --error errors-1.txt
$ cat hg-1.pid > $DAEMON_PIDS
$ hg -R server-no-fncache serve -p $HGPORT2 -d --pid-file=hg-2.pid --error errors-2.txt
@@ -190,12 +170,7 @@
$ hg clone --quiet --stream -U http://localhost:$HGPORT clone-remove-fncache --config format.usefncache=no
$ cat errors-1.txt
- $ hg -R clone-remove-fncache verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5004 changesets with 1088 changes to 1088 files
+ $ hg -R clone-remove-fncache verify -q
$ hg debugrequires -R clone-remove-fncache | grep fncache
[1]
@@ -204,12 +179,7 @@
$ hg clone --quiet --stream -U http://localhost:$HGPORT2 clone-add-fncache --config format.usefncache=yes
$ cat errors-2.txt
- $ hg -R clone-add-fncache verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5004 changesets with 1088 changes to 1088 files
+ $ hg -R clone-add-fncache verify -q
$ hg debugrequires -R clone-add-fncache | grep fncache
fncache
@@ -231,12 +201,7 @@
new changesets 96ee1d7354c4:06ddac466af5
updating to branch default
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ hg verify -R server-no-dotencode
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5004 changesets with 1088 changes to 1088 files
+ $ hg verify -R server-no-dotencode -q
$ hg -R server serve -p $HGPORT -d --pid-file=hg-1.pid --error errors-1.txt
$ cat hg-1.pid > $DAEMON_PIDS
$ hg -R server-no-dotencode serve -p $HGPORT2 -d --pid-file=hg-2.pid --error errors-2.txt
@@ -250,12 +215,7 @@
$ hg clone --quiet --stream -U http://localhost:$HGPORT clone-remove-dotencode --config format.dotencode=no
$ cat errors-1.txt
- $ hg -R clone-remove-dotencode verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5004 changesets with 1088 changes to 1088 files
+ $ hg -R clone-remove-dotencode verify -q
$ hg debugrequires -R clone-remove-dotencode | grep dotencode
[1]
@@ -264,12 +224,7 @@
$ hg clone --quiet --stream -U http://localhost:$HGPORT2 clone-add-dotencode --config format.dotencode=yes
$ cat errors-2.txt
- $ hg -R clone-add-dotencode verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5004 changesets with 1088 changes to 1088 files
+ $ hg -R clone-add-dotencode verify -q
$ hg debugrequires -R clone-add-dotencode | grep dotencode
dotencode
@@ -289,12 +244,7 @@
$ cat hg-1.pid > $DAEMON_PIDS
$ hg clone --quiet --stream -U http://localhost:$HGPORT clone-from-share
- $ hg -R clone-from-share verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5004 changesets with 1088 changes to 1088 files
+ $ hg -R clone-from-share verify -q
$ hg debugrequires -R clone-from-share | egrep 'share$'
[1]
@@ -313,12 +263,7 @@
new changesets 96ee1d7354c4:06ddac466af5
updating to branch default
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ hg verify -R server-no-share-safe
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5004 changesets with 1088 changes to 1088 files
+ $ hg verify -R server-no-share-safe -q
$ hg -R server serve -p $HGPORT -d --pid-file=hg-1.pid --error errors-1.txt
$ cat hg-1.pid > $DAEMON_PIDS
$ hg -R server-no-share-safe serve -p $HGPORT2 -d --pid-file=hg-2.pid --error errors-2.txt
@@ -332,12 +277,7 @@
$ hg clone --quiet --stream -U http://localhost:$HGPORT clone-remove-share-safe --config format.use-share-safe=no
$ cat errors-1.txt
- $ hg -R clone-remove-share-safe verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5004 changesets with 1088 changes to 1088 files
+ $ hg -R clone-remove-share-safe verify -q
$ hg debugrequires -R clone-remove-share-safe | grep share-safe
[1]
@@ -346,12 +286,7 @@
$ hg clone --quiet --stream -U http://localhost:$HGPORT2 clone-add-share-safe --config format.use-share-safe=yes
$ cat errors-2.txt
- $ hg -R clone-add-share-safe verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5004 changesets with 1088 changes to 1088 files
+ $ hg -R clone-add-share-safe verify -q
$ hg debugrequires -R clone-add-share-safe | grep share-safe
share-safe
@@ -374,12 +309,7 @@
new changesets 96ee1d7354c4:06ddac466af5
updating to branch default
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ hg verify -R server-no-persistent-nodemap
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5004 changesets with 1088 changes to 1088 files
+ $ hg verify -R server-no-persistent-nodemap -q
$ hg -R server serve -p $HGPORT -d --pid-file=hg-1.pid --error errors-1.txt
$ cat hg-1.pid > $DAEMON_PIDS
$ hg -R server-no-persistent-nodemap serve -p $HGPORT2 -d --pid-file=hg-2.pid --error errors-2.txt
@@ -401,12 +331,7 @@
$ hg clone --quiet --stream -U http://localhost:$HGPORT clone-remove-persistent-nodemap --config format.use-persistent-nodemap=no
$ cat errors-1.txt
- $ hg -R clone-remove-persistent-nodemap verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5004 changesets with 1088 changes to 1088 files
+ $ hg -R clone-remove-persistent-nodemap verify -q
$ hg debugrequires -R clone-remove-persistent-nodemap | grep persistent-nodemap
[1]
@@ -421,12 +346,7 @@
$ hg clone --quiet --stream -U http://localhost:$HGPORT2 clone-add-persistent-nodemap --config format.use-persistent-nodemap=yes
$ cat errors-2.txt
- $ hg -R clone-add-persistent-nodemap verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5004 changesets with 1088 changes to 1088 files
+ $ hg -R clone-add-persistent-nodemap verify -q
$ hg debugrequires -R clone-add-persistent-nodemap | grep persistent-nodemap
persistent-nodemap
--- a/tests/test-clone-stream.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-clone-stream.t Wed Jan 04 16:02:22 2023 +0100
@@ -94,12 +94,7 @@
Check that the clone went well
- $ hg verify -R local-clone
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 1088 changes to 1088 files
+ $ hg verify -R local-clone -q
Check uncompressed
==================
@@ -651,12 +646,7 @@
updating to branch default
1088 files updated, 0 files merged, 0 files removed, 0 files unresolved
#endif
- $ hg verify -R with-bookmarks
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 1088 changes to 1088 files
+ $ hg verify -R with-bookmarks -q
$ hg -R with-bookmarks bookmarks
some-bookmark 2:5223b5e3265f
@@ -692,12 +682,7 @@
updating to branch default
1088 files updated, 0 files merged, 0 files removed, 0 files unresolved
#endif
- $ hg verify -R phase-publish
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 1088 changes to 1088 files
+ $ hg verify -R phase-publish -q
$ hg -R phase-publish phase -r 'all()'
0: public
1: public
@@ -747,12 +732,7 @@
1: draft
2: draft
#endif
- $ hg verify -R phase-no-publish
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 1088 changes to 1088 files
+ $ hg verify -R phase-no-publish -q
$ killdaemons.py
@@ -801,12 +781,7 @@
0: draft
$ hg debugobsolete -R with-obsolescence
8c206a663911c1f97f2f9d7382e417ae55872cfa 0 {5223b5e3265f0df40bb743da62249413d74ac70f} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- $ hg verify -R with-obsolescence
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 4 changesets with 1089 changes to 1088 files
+ $ hg verify -R with-obsolescence -q
$ hg clone -U --stream --config experimental.evolution=0 http://localhost:$HGPORT with-obsolescence-no-evolution
streaming all changes
--- a/tests/test-clone.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-clone.t Wed Jan 04 16:02:22 2023 +0100
@@ -59,12 +59,7 @@
$ cat a
a
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 11 changesets with 11 changes to 2 files
+ $ hg verify -q
Invalid dest '' must abort:
@@ -122,12 +117,7 @@
$ cat a 2>/dev/null || echo "a not present"
a not present
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 11 changesets with 11 changes to 2 files
+ $ hg verify -q
Default destination:
@@ -167,12 +157,7 @@
new changesets acb14030fe0a:a7949464abda
updating to branch default
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ hg -R g verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 11 changesets with 11 changes to 2 files
+ $ hg -R g verify -q
Invalid dest '' with --pull must abort (issue2528):
--- a/tests/test-commandserver.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-commandserver.t Wed Jan 04 16:02:22 2023 +0100
@@ -541,6 +541,7 @@
checking manifests
crosschecking files in changesets and manifests
checking files
+ checking dirstate
checked 2 changesets with 2 changes to 1 files
$ hg revert --no-backup -aq
@@ -825,6 +826,7 @@
message: '\xa6Ditem@Cpos\xf6EtopicMcrosscheckingEtotal\xf6DtypeHprogressDunit@'
message: '\xa2DdataOchecking files\nDtypeFstatus'
message: '\xa6Ditem@Cpos\xf6EtopicHcheckingEtotal\xf6DtypeHprogressDunit@'
+ message: '\xa2DdataRchecking dirstate\nDtypeFstatus'
message: '\xa2DdataX/checked 0 changesets with 0 changes to 0 files\nDtypeFstatus'
>>> from hgclient import checkwith, readchannel, runcommand, stringio
--- a/tests/test-commit-amend.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-commit-amend.t Wed Jan 04 16:02:22 2023 +0100
@@ -123,13 +123,13 @@
uncompressed size of bundle content:
254 (changelog)
163 (manifests)
- 131 a
+ 133 a
saved backup bundle to $TESTTMP/repo/.hg/strip-backup/47343646fa3d-c2758885-amend.hg
1 changesets found
uncompressed size of bundle content:
250 (changelog)
163 (manifests)
- 131 a
+ 133 a
adding branch
adding changesets
adding manifests
@@ -267,13 +267,13 @@
uncompressed size of bundle content:
249 (changelog)
163 (manifests)
- 133 a
+ 135 a
saved backup bundle to $TESTTMP/repo/.hg/strip-backup/a9a13940fc03-7c2e8674-amend.hg
1 changesets found
uncompressed size of bundle content:
257 (changelog)
163 (manifests)
- 133 a
+ 135 a
adding branch
adding changesets
adding manifests
@@ -303,13 +303,13 @@
uncompressed size of bundle content:
257 (changelog)
163 (manifests)
- 133 a
+ 135 a
saved backup bundle to $TESTTMP/repo/.hg/strip-backup/64a124ba1b44-10374b8f-amend.hg
1 changesets found
uncompressed size of bundle content:
257 (changelog)
163 (manifests)
- 135 a
+ 137 a
adding branch
adding changesets
adding manifests
--- a/tests/test-completion.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-completion.t Wed Jan 04 16:02:22 2023 +0100
@@ -77,6 +77,7 @@
debug-delta-find
debug-repair-issue6528
debug-revlog-index
+ debug-revlog-stats
debugancestor
debugantivirusrunning
debugapplystreamclonebundle
@@ -264,13 +265,14 @@
bundle: exact, force, rev, branch, base, all, type, ssh, remotecmd, insecure
cat: output, rev, decode, include, exclude, template
clone: noupdate, updaterev, rev, branch, pull, uncompressed, stream, ssh, remotecmd, insecure
- commit: addremove, close-branch, amend, secret, edit, force-close-branch, interactive, include, exclude, message, logfile, date, user, subrepos
+ commit: addremove, close-branch, amend, secret, draft, edit, force-close-branch, interactive, include, exclude, message, logfile, date, user, subrepos
config: untrusted, exp-all-known, edit, local, source, shared, non-shared, global, template
continue: dry-run
copy: forget, after, at-rev, force, include, exclude, dry-run
- debug-delta-find: changelog, manifest, dir, template
+ debug-delta-find: changelog, manifest, dir, template, source
debug-repair-issue6528: to-report, from-report, paranoid, dry-run
debug-revlog-index: changelog, manifest, dir, template
+ debug-revlog-stats: changelog, manifest, filelogs, template
debugancestor:
debugantivirusrunning:
debugapplystreamclonebundle:
--- a/tests/test-contrib-dumprevlog.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-contrib-dumprevlog.t Wed Jan 04 16:02:22 2023 +0100
@@ -14,12 +14,7 @@
$ echo adding more to file a >> a
$ hg commit -m third
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 3 changes to 1 files
+ $ hg verify -q
Dumping revlog of file a to stdout:
$ "$PYTHON" "$CONTRIBDIR/dumprevlog" .hg/store/data/a.i
@@ -79,12 +74,7 @@
Verify:
- $ hg -R repo-c verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 3 changes to 1 files
+ $ hg -R repo-c verify -q
Compare repos:
--- a/tests/test-convert-filemap.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-convert-filemap.t Wed Jan 04 16:02:22 2023 +0100
@@ -292,12 +292,12 @@
$ rm -rf source/.hg/store/data/dir/file4
#endif
$ hg -q convert --filemap renames.fmap --datesort source dummydest
- abort: data/dir/file3@e96dce0bc6a217656a3a410e5e6bec2c4f42bf7c: no match found (reporevlogstore !)
+ abort: dir/file3@e96dce0bc6a217656a3a410e5e6bec2c4f42bf7c: no match found (reporevlogstore !)
abort: data/dir/file3/index@e96dce0bc6a2: no node (reposimplestore !)
[50]
$ hg -q convert --filemap renames.fmap --datesort --config convert.hg.ignoreerrors=1 source renames.repo
- ignoring: data/dir/file3@e96dce0bc6a217656a3a410e5e6bec2c4f42bf7c: no match found (reporevlogstore !)
- ignoring: data/dir/file4@6edd55f559cdce67132b12ca09e09cee08b60442: no match found (reporevlogstore !)
+ ignoring: dir/file3@e96dce0bc6a217656a3a410e5e6bec2c4f42bf7c: no match found (reporevlogstore !)
+ ignoring: dir/file4@6edd55f559cdce67132b12ca09e09cee08b60442: no match found (reporevlogstore !)
ignoring: data/dir/file3/index@e96dce0bc6a2: no node (reposimplestore !)
ignoring: data/dir/file4/index@6edd55f559cd: no node (reposimplestore !)
$ hg up -q -R renames.repo
@@ -312,12 +312,7 @@
|
o 0 "0: add foo baz dir/" files: dir2/dir3/file dir2/dir3/subdir/file3 foo2
- $ hg -R renames.repo verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5 changesets with 7 changes to 4 files
+ $ hg -R renames.repo verify -q
$ hg -R renames.repo manifest --debug
d43feacba7a4f1f2080dde4a4b985bd8a0236d46 644 copied2
--- a/tests/test-convert-hg-source.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-convert-hg-source.t Wed Jan 04 16:02:22 2023 +0100
@@ -182,18 +182,13 @@
sorting...
converting...
4 init
- ignoring: data/b@1e88685f5ddec574a34c70af492f95b6debc8741: no match found (reporevlogstore !)
+ ignoring: b@1e88685f5ddec574a34c70af492f95b6debc8741: no match found (reporevlogstore !)
ignoring: data/b/index@1e88685f5dde: no node (reposimplestore !)
3 changeall
2 changebagain
1 merge
0 moveb
- $ hg -R fixed verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5 changesets with 5 changes to 3 files
+ $ hg -R fixed verify -q
manifest -r 0
--- a/tests/test-copy.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-copy.t Wed Jan 04 16:02:22 2023 +0100
@@ -96,12 +96,7 @@
$ hg cat a > asum
$ md5sum.py asum
60b725f10c9c85c70d97880dfe8191b3 asum
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 2 changesets with 2 changes to 2 files
+ $ hg verify -q
$ cd ..
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-debug-revlog-stats.t Wed Jan 04 16:02:22 2023 +0100
@@ -0,0 +1,77 @@
+Force revlog max inline value to be smaller than default
+
+ $ mkdir $TESTTMP/ext
+ $ cat << EOF > $TESTTMP/ext/small_inline.py
+ > from mercurial import revlog
+ > revlog._maxinline = 8
+ > EOF
+
+ $ cat << EOF >> $HGRCPATH
+ > [extensions]
+ > small_inline=$TESTTMP/ext/small_inline.py
+ > EOF
+
+ $ hg init repo
+ $ cd repo
+
+Try on an empty repository
+
+ $ hg debug-revlog-stats
+ rev-count data-size inl type target
+ 0 0 yes changelog
+ 0 0 yes manifest
+
+ $ mkdir folder
+ $ touch a b folder/c folder/d
+ $ hg commit -Aqm 0
+ $ echo "text" > a
+ $ hg rm b
+ $ echo "longer string" > folder/d
+ $ hg commit -Aqm 1
+
+Differences in data size observed with pure is due to different compression
+algorithms
+
+ $ hg debug-revlog-stats
+ rev-count data-size inl type target
+ 2 138 no changelog (no-pure !)
+ 2 137 no changelog (pure !)
+ 2 177 no manifest (no-pure !)
+ 2 168 no manifest (pure !)
+ 2 6 yes file a
+ 1 0 yes file b
+ 1 0 yes file folder/c
+ 2 15 no file folder/d
+
+Test 'changelog' command argument
+
+ $ hg debug-revlog-stats -c
+ rev-count data-size inl type target
+ 2 138 no changelog (no-pure !)
+ 2 137 no changelog (pure !)
+
+Test 'manifest' command argument
+
+ $ hg debug-revlog-stats -m
+ rev-count data-size inl type target
+ 2 177 no manifest (no-pure !)
+ 2 168 no manifest (pure !)
+
+Test 'file' command argument
+
+ $ hg debug-revlog-stats -f
+ rev-count data-size inl type target
+ 2 6 yes file a
+ 1 0 yes file b
+ 1 0 yes file folder/c
+ 2 15 no file folder/d
+
+Test multiple command arguments
+
+ $ hg debug-revlog-stats -cm
+ rev-count data-size inl type target
+ 2 138 no changelog (no-pure !)
+ 2 137 no changelog (pure !)
+ 2 177 no manifest (no-pure !)
+ 2 168 no manifest (pure !)
+
--- a/tests/test-debugcommands.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-debugcommands.t Wed Jan 04 16:02:22 2023 +0100
@@ -39,6 +39,9 @@
chunks size : 191
0x75 (u) : 191 (100.00%)
+
+ total-stored-content: 188 bytes
+
avg chain length : 0
max chain length : 0
max chain reach : 67
@@ -74,6 +77,9 @@
empty : 0 ( 0.00%)
0x75 (u) : 88 (100.00%)
+
+ total-stored-content: 86 bytes
+
avg chain length : 0
max chain length : 0
max chain reach : 44
@@ -107,6 +113,9 @@
chunks size : 3
0x75 (u) : 3 (100.00%)
+
+ total-stored-content: 2 bytes
+
avg chain length : 0
max chain length : 0
max chain reach : 3
--- a/tests/test-demandimport.py Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-demandimport.py Wed Jan 04 16:02:22 2023 +0100
@@ -234,3 +234,11 @@
zipfileimp = __import__('ftplib', globals(), locals(), ['unknownattr'])
assert f(zipfileimp) == "<module 'ftplib' from '?'>", f(zipfileimp)
assert not util.safehasattr(zipfileimp, 'unknownattr')
+
+
+# test deactivation for issue6725
+del sys.modules['telnetlib']
+with demandimport.deactivated():
+ import telnetlib
+assert telnetlib.__loader__ == telnetlib.__spec__.loader
+assert telnetlib.__loader__.get_resource_reader
--- a/tests/test-empty.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-empty.t Wed Jan 04 16:02:22 2023 +0100
@@ -9,12 +9,7 @@
$ hg grep wah
[1]
$ hg manifest
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 0 changesets with 0 changes to 0 files
+ $ hg verify -q
Check the basic files created:
@@ -37,12 +32,7 @@
updating to branch default
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cd b
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 0 changesets with 0 changes to 0 files
+ $ hg verify -q
$ ls .hg
00changelog.i
cache
--- a/tests/test-excessive-merge.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-excessive-merge.t Wed Jan 04 16:02:22 2023 +0100
@@ -93,9 +93,4 @@
0 0 2ed2a3912a0b 000000000000 000000000000
1 1 79d7492df40a 2ed2a3912a0b 000000000000
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5 changesets with 4 changes to 2 files
+ $ hg verify -q
--- a/tests/test-filebranch.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-filebranch.t Wed Jan 04 16:02:22 2023 +0100
@@ -135,11 +135,6 @@
$ hg status
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 4 changesets with 10 changes to 4 files
+ $ hg verify -q
$ cd ..
--- a/tests/test-fncache.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-fncache.t Wed Jan 04 16:02:22 2023 +0100
@@ -49,12 +49,7 @@
Testing verify:
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 3 changes to 3 files
+ $ hg verify -q
$ rm .hg/store/fncache
@@ -66,6 +61,7 @@
warning: revlog 'data/a.i' not in fncache!
warning: revlog 'data/a.i.hg/c.i' not in fncache!
warning: revlog 'data/a.i/b.i' not in fncache!
+ checking dirstate
checked 3 changesets with 3 changes to 3 files
3 warnings encountered!
hint: run "hg debugrebuildfncache" to recover from corrupt fncache
@@ -78,12 +74,7 @@
adding data/a.i/b.i
3 items added, 0 removed from fncache
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 3 changes to 3 files
+ $ hg verify -q
$ cd ..
@@ -359,6 +350,7 @@
checking manifests
crosschecking files in changesets and manifests
checking files
+ checking dirstate
checked 1 changesets with 1 changes to 1 files
$ cat .hg/store/fncache
data/y.i
--- a/tests/test-hardlinks.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-hardlinks.t Wed Jan 04 16:02:22 2023 +0100
@@ -151,12 +151,7 @@
Push to repo r1 should break up most hardlinks in r2:
- $ hg -R r2 verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 2 changesets with 2 changes to 2 files
+ $ hg -R r2 verify -q
$ cd r3
$ hg push
@@ -182,13 +177,7 @@
1 r2/.hg/store/fncache
#endif
- $ hg -R r2 verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 2 changesets with 2 changes to 2 files
-
+ $ hg -R r2 verify -q
$ cd r1
$ hg up
--- a/tests/test-help.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-help.t Wed Jan 04 16:02:22 2023 +0100
@@ -985,6 +985,8 @@
details.
debug-revlog-index
dump index data for a revlog
+ debug-revlog-stats
+ display statistics about revlogs in the store
debugancestor
find the ancestor revision of two revisions in a given index
debugantivirusrunning
--- a/tests/test-http-bundle1.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-http-bundle1.t Wed Jan 04 16:02:22 2023 +0100
@@ -45,12 +45,7 @@
no changes found
updating to branch default
4 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ hg verify -R copy
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 4 changes to 4 files
+ $ hg verify -R copy -q
#endif
try to clone via stream, should use pull instead
@@ -99,12 +94,7 @@
new changesets 8b6053c928fe
updating to branch default
4 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ hg verify -R copy-pull
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 4 changes to 4 files
+ $ hg verify -R copy-pull -q
$ cd test
$ echo bar > bar
$ hg commit -A -d '1 0' -m 2
--- a/tests/test-http-clone-r.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-http-clone-r.t Wed Jan 04 16:02:22 2023 +0100
@@ -25,7 +25,7 @@
$ for i in 0 1 2 3 4 5 6 7 8; do
> hg clone -r "$i" http://localhost:$HGPORT/ test-"$i"
> if cd test-"$i"; then
- > hg verify
+ > hg verify -q
> cd ..
> fi
> done
@@ -36,11 +36,6 @@
new changesets bfaf4b5cbf01
updating to branch default
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 1 changes to 1 files
adding changesets
adding manifests
adding file changes
@@ -48,11 +43,6 @@
new changesets bfaf4b5cbf01:21f32785131f
updating to branch default
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 2 changesets with 2 changes to 1 files
adding changesets
adding manifests
adding file changes
@@ -60,11 +50,6 @@
new changesets bfaf4b5cbf01:4ce51a113780
updating to branch default
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 3 changes to 1 files
adding changesets
adding manifests
adding file changes
@@ -72,11 +57,6 @@
new changesets bfaf4b5cbf01:93ee6ab32777
updating to branch default
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 4 changesets with 4 changes to 1 files
adding changesets
adding manifests
adding file changes
@@ -84,11 +64,6 @@
new changesets bfaf4b5cbf01:c70afb1ee985
updating to branch default
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 2 changesets with 2 changes to 1 files
adding changesets
adding manifests
adding file changes
@@ -96,11 +71,6 @@
new changesets bfaf4b5cbf01:f03ae5a9b979
updating to branch default
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 3 changes to 1 files
adding changesets
adding manifests
adding file changes
@@ -108,11 +78,6 @@
new changesets bfaf4b5cbf01:095cb14b1b4d
updating to branch default
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 4 changesets with 5 changes to 2 files
adding changesets
adding manifests
adding file changes
@@ -120,11 +85,6 @@
new changesets bfaf4b5cbf01:faa2e4234c7a
updating to branch default
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5 changesets with 6 changes to 3 files
adding changesets
adding manifests
adding file changes
@@ -132,11 +92,6 @@
new changesets bfaf4b5cbf01:916f1afdef90
updating to branch default
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5 changesets with 5 changes to 2 files
$ cd test-8
$ hg pull ../test-7
pulling from ../test-7
@@ -147,12 +102,7 @@
added 4 changesets with 2 changes to 3 files (+1 heads)
new changesets c70afb1ee985:faa2e4234c7a
(run 'hg heads' to see heads, 'hg merge' to merge)
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 9 changesets with 7 changes to 4 files
+ $ hg verify -q
$ cd ..
$ cd test-1
$ hg pull -r 4 http://localhost:$HGPORT/
@@ -164,12 +114,7 @@
added 1 changesets with 0 changes to 0 files (+1 heads)
new changesets c70afb1ee985
(run 'hg heads' to see heads, 'hg merge' to merge)
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 2 changes to 1 files
+ $ hg verify -q
$ hg pull http://localhost:$HGPORT/
pulling from http://localhost:$HGPORT/
searching for changes
@@ -190,12 +135,7 @@
added 2 changesets with 0 changes to 0 files (+1 heads)
new changesets c70afb1ee985:f03ae5a9b979
(run 'hg heads' to see heads, 'hg merge' to merge)
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5 changesets with 3 changes to 1 files
+ $ hg verify -q
$ hg pull http://localhost:$HGPORT/
pulling from http://localhost:$HGPORT/
searching for changes
@@ -205,12 +145,7 @@
added 4 changesets with 4 changes to 4 files
new changesets 93ee6ab32777:916f1afdef90
(run 'hg update' to get a working copy)
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 9 changesets with 7 changes to 4 files
+ $ hg verify -q
$ cd ..
no default destination if url has no path:
--- a/tests/test-http-proxy.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-http-proxy.t Wed Jan 04 16:02:22 2023 +0100
@@ -22,12 +22,7 @@
updating to branch default
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cd b
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 1 changes to 1 files
+ $ hg verify -q
$ cd ..
url for proxy, pull
@@ -42,12 +37,7 @@
updating to branch default
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cd b-pull
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 1 changes to 1 files
+ $ hg verify -q
$ cd ..
host:port for proxy
--- a/tests/test-http.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-http.t Wed Jan 04 16:02:22 2023 +0100
@@ -34,12 +34,7 @@
transferred * bytes in * seconds (*/sec) (glob)
updating to branch default
4 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ hg verify -R copy
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 4 changes to 4 files
+ $ hg verify -R copy -q
#endif
try to clone via stream, should use pull instead
@@ -88,12 +83,7 @@
new changesets 8b6053c928fe
updating to branch default
4 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ hg verify -R copy-pull
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 4 changes to 4 files
+ $ hg verify -R copy-pull -q
$ cd test
$ echo bar > bar
$ hg commit -A -d '1 0' -m 2
--- a/tests/test-https.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-https.t Wed Jan 04 16:02:22 2023 +0100
@@ -137,12 +137,7 @@
new changesets 8b6053c928fe
updating to branch default
4 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ hg verify -R copy-pull
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 4 changes to 4 files
+ $ hg verify -R copy-pull -q
$ cd test
$ echo bar > bar
$ hg commit -A -d '1 0' -m 2
--- a/tests/test-import-merge.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-import-merge.t Wed Jan 04 16:02:22 2023 +0100
@@ -159,9 +159,4 @@
rollback completed
abort: patch is damaged or loses information
[255]
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 2 changesets with 2 changes to 1 files
+ $ hg verify -q
--- a/tests/test-incoming-outgoing.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-incoming-outgoing.t Wed Jan 04 16:02:22 2023 +0100
@@ -7,12 +7,7 @@
> hg commit -A -m $i
> done
adding foo
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 9 changesets with 9 changes to 1 files
+ $ hg verify -q
$ hg serve -p $HGPORT -d --pid-file=hg.pid
$ cat hg.pid >> $DAEMON_PIDS
$ cd ..
@@ -365,12 +360,7 @@
> echo $i >> foo
> hg commit -A -m $i
> done
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 14 changesets with 14 changes to 1 files
+ $ hg verify -q
$ cd ..
$ hg -R test-dev outgoing test
comparing with test
--- a/tests/test-infinitepush.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-infinitepush.t Wed Jan 04 16:02:22 2023 +0100
@@ -46,8 +46,8 @@
remote: bc22f9a30a82 multihead1
remote: ee4802bf6864 multihead2
$ scratchnodes
- bc22f9a30a821118244deacbd732e394ed0b686c ab1bc557aa090a9e4145512c734b6e8a828393a5
- ee4802bf6864326a6b3dcfff5a03abc2a0a69b8f ab1bc557aa090a9e4145512c734b6e8a828393a5
+ bc22f9a30a821118244deacbd732e394ed0b686c de1b7d132ba98f0172cd974e3e69dfa80faa335c
+ ee4802bf6864326a6b3dcfff5a03abc2a0a69b8f de1b7d132ba98f0172cd974e3e69dfa80faa335c
Create two new scratch bookmarks
$ hg up 0
--- a/tests/test-install.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-install.t Wed Jan 04 16:02:22 2023 +0100
@@ -238,42 +238,3 @@
checking username (test)
no problems detected
#endif
-
-#if virtualenv no-py3 network-io no-pyoxidizer
-
-Note: --no-site-packages is the default for all versions enabled by hghave
-
- $ "$PYTHON" -m virtualenv installenv >> pip.log
- DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. (?)
- DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. More details about Python 2 support in pip, can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support (?)
-
-Note: we use this weird path to run pip and hg to avoid platform differences,
-since it's bin on most platforms but Scripts on Windows.
- $ ./installenv/*/pip install $TESTDIR/.. >> pip.log
- DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. (?)
- DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. More details about Python 2 support in pip, can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support (?)
- DEPRECATION: Python 2.7 reached the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 is no longer maintained. pip 21.0 will drop support for Python 2.7 in January 2021. More details about Python 2 support in pip can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support pip 21.0 will remove support for this functionality. (?)
- $ ./installenv/*/hg debuginstall || cat pip.log
- checking encoding (ascii)...
- checking Python executable (*) (glob)
- checking Python implementation (*) (glob)
- checking Python version (2.*) (glob)
- checking Python lib (*)... (glob)
- checking Python security support (*) (glob)
- TLS 1.2 not supported by Python install; network connections lack modern security (?)
- SNI not supported by Python install; may have connectivity issues with some servers (?)
- checking Rust extensions \((installed|missing)\) (re)
- checking Mercurial version (*) (glob)
- checking Mercurial custom build (*) (glob)
- checking module policy (*) (glob)
- checking installed modules (*/mercurial)... (glob)
- checking registered compression engines (*) (glob)
- checking available compression engines (*) (glob)
- checking available compression engines for wire protocol (*) (glob)
- checking "re2" regexp engine \((available|missing)\) (re)
- checking templates ($TESTTMP/installenv/*/site-packages/mercurial/templates)... (glob)
- checking default template ($TESTTMP/installenv/*/site-packages/mercurial/templates/map-cmdline.default) (glob)
- checking commit editor... (*) (glob)
- checking username (test)
- no problems detected
-#endif
--- a/tests/test-issue1175.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-issue1175.t Wed Jan 04 16:02:22 2023 +0100
@@ -37,12 +37,7 @@
updating the branch cache
committed changeset 5:83a687e8a97c80992ba385bbfd766be181bfb1d1
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 6 changesets with 4 changes to 4 files
+ $ hg verify -q
$ hg export --git tip
# HG changeset patch
--- a/tests/test-journal-exists.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-journal-exists.t Wed Jan 04 16:02:22 2023 +0100
@@ -25,13 +25,7 @@
abort: abandoned transaction found
(run 'hg recover' to clean up transaction)
[255]
- $ hg recover --verify
- rolling back interrupted transaction
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 1 changes to 1 files
+ $ hg recover --verify -q
recover, no verify
--- a/tests/test-keyword.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-keyword.t Wed Jan 04 16:02:22 2023 +0100
@@ -838,12 +838,7 @@
$ hg status
? c
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 4 changes to 3 files
+ $ hg verify -q
$ cat a b
expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $
do not process $Id:
--- a/tests/test-largefiles-wireproto.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-largefiles-wireproto.t Wed Jan 04 16:02:22 2023 +0100
@@ -151,14 +151,7 @@
$ hg commit -m "m2"
Invoking status precommit hook
A f2
- $ hg verify --large
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 2 changesets with 2 changes to 2 files
- searching 1 changesets for largefiles
- verified existence of 1 revisions of 1 largefiles
+ $ hg verify --large -q
$ hg serve --config extensions.largefiles=! -R ../r6 -d -p $HGPORT --pid-file ../hg.pid
$ cat ../hg.pid >> $DAEMON_PIDS
$ hg push http://localhost:$HGPORT
@@ -249,6 +242,7 @@
checking manifests
crosschecking files in changesets and manifests
checking files
+ checking dirstate
checked 1 changesets with 1 changes to 1 files
searching 1 changesets for largefiles
changeset 0:cf03e5bb9936: f1 missing
@@ -280,14 +274,7 @@
$ [ ! -f http-clone/.hg/largefiles/02a439e5c31c526465ab1a0ca1f431f76b827b90 ]
$ [ ! -f http-clone/f1 ]
$ [ ! -f http-clone-usercache ]
- $ hg -R http-clone verify --large --lfc
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 1 changes to 1 files
- searching 1 changesets for largefiles
- verified contents of 1 revisions of 1 largefiles
+ $ hg -R http-clone verify --large --lfc -q
$ hg -R http-clone up -Cqr null
largefiles pulled on update - no server side problems:
@@ -343,14 +330,7 @@
adding file changes
added 2 changesets with 2 changes to 2 files
new changesets 567253b0f523:04d19c27a332 (2 drafts)
- $ hg -R batchverifyclone verify --large --lfa
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 2 changesets with 2 changes to 2 files
- searching 2 changesets for largefiles
- verified existence of 2 revisions of 2 largefiles
+ $ hg -R batchverifyclone verify --large --lfa -q
$ tail -1 access.log
$LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=statlfile+sha%3D972a1a11f19934401291cc99117ec614933374ce%3Bstatlfile+sha%3Dc801c9cfe94400963fcb683246217d5db77f9a9a x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
$ hg -R batchverifyclone update
@@ -381,14 +361,7 @@
added 1 changesets with 1 changes to 1 files
new changesets 6bba8cb6935d (1 drafts)
(run 'hg update' to get a working copy)
- $ hg -R batchverifyclone verify --lfa
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 3 changes to 3 files
- searching 3 changesets for largefiles
- verified existence of 3 revisions of 3 largefiles
+ $ hg -R batchverifyclone verify --lfa -q
$ tail -1 access.log
$LOCALIP - - [$LOGDATE$] "GET /?cmd=statlfile HTTP/1.1" 200 - x-hgarg-1:sha=c8559c3c9cfb42131794b7d8009230403b9b454c x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
--- a/tests/test-largefiles.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-largefiles.t Wed Jan 04 16:02:22 2023 +0100
@@ -1029,14 +1029,7 @@
2 largefiles updated, 0 removed
4 files updated, 0 files merged, 0 files removed, 0 files unresolved
8 additional largefiles cached
- $ hg -R a-clone1 verify --large --lfa --lfc
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 8 changesets with 24 changes to 10 files
- searching 8 changesets for largefiles
- verified contents of 13 revisions of 6 largefiles
+ $ hg -R a-clone1 verify --large --lfa --lfc -q
$ hg -R a-clone1 sum
parent: 1:ce8896473775
edit files
@@ -1122,7 +1115,7 @@
6 changesets found
uncompressed size of bundle content:
1389 (changelog)
- 1599 (manifests)
+ 1698 (manifests)
254 .hglf/large1
564 .hglf/large3
572 .hglf/sub/large4
@@ -1552,6 +1545,7 @@
checking manifests
crosschecking files in changesets and manifests
checking files
+ checking dirstate
checked 10 changesets with 28 changes to 10 files
searching 1 changesets for largefiles
verified existence of 3 revisions of 3 largefiles
@@ -1561,15 +1555,8 @@
$ mv $TESTTMP/d/.hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928 .
$ rm .hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928
- $ hg verify --large
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 10 changesets with 28 changes to 10 files
- searching 1 changesets for largefiles
+ $ hg verify --large -q
changeset 9:598410d3eb9a: sub/large4 references missing $TESTTMP/d/.hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928
- verified existence of 3 revisions of 3 largefiles
[1]
- introduce corruption and make sure that it is caught when checking content:
--- a/tests/test-lfconvert.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-lfconvert.t Wed Jan 04 16:02:22 2023 +0100
@@ -345,6 +345,7 @@
checking manifests
crosschecking files in changesets and manifests
checking files
+ checking dirstate
checked 8 changesets with 13 changes to 9 files
searching 7 changesets for largefiles
changeset 0:d4892ec57ce2: large references missing $TESTTMP/largefiles-repo-hg/.hg/largefiles/2e000fa7e85759c7f4c254d4d9c33ef481e459a7
--- a/tests/test-lfs.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-lfs.t Wed Jan 04 16:02:22 2023 +0100
@@ -787,8 +787,9 @@
checking manifests
crosschecking files in changesets and manifests
checking files
- l@1: unpacking 46a2f24864bc: integrity check failed on data/l:0
- large@0: unpacking 2c531e0992ff: integrity check failed on data/large:0
+ l@1: unpacking 46a2f24864bc: integrity check failed on l:0
+ large@0: unpacking 2c531e0992ff: integrity check failed on large:0
+ not checking dirstate because of previous errors
checked 5 changesets with 10 changes to 4 files
2 integrity errors encountered!
(first damaged changeset appears to be 0)
@@ -851,6 +852,7 @@
checking files
lfs: found 22f66a3fc0b9bf3f012c814303995ec07099b3a9ce02a7af84b5970811074a3b in the local lfs store
lfs blob sha256:66100b384bf761271b407d79fc30cdd0554f3b2c5d944836e936d584b88ce88e renamed large -> l
+ checking dirstate
checked 5 changesets with 10 changes to 4 files
Verify will not try to download lfs blobs, if told not to by the config option
@@ -865,6 +867,7 @@
checking files
lfs: found 22f66a3fc0b9bf3f012c814303995ec07099b3a9ce02a7af84b5970811074a3b in the local lfs store
lfs blob sha256:66100b384bf761271b407d79fc30cdd0554f3b2c5d944836e936d584b88ce88e renamed large -> l
+ checking dirstate
checked 5 changesets with 10 changes to 4 files
Verify will copy/link all lfs objects into the local store that aren't already
@@ -885,6 +888,7 @@
lfs: found 89b6070915a3d573ff3599d1cda305bc5e38549b15c4847ab034169da66e1ca8 in the local lfs store
lfs: adding b1a6ea88da0017a0e77db139a54618986e9a2489bee24af9fe596de9daac498c to the usercache
lfs: found b1a6ea88da0017a0e77db139a54618986e9a2489bee24af9fe596de9daac498c in the local lfs store
+ checking dirstate
checked 5 changesets with 10 changes to 4 files
Verify will not copy/link a corrupted file from the usercache into the local
@@ -897,11 +901,12 @@
checking manifests
crosschecking files in changesets and manifests
checking files
- l@1: unpacking 46a2f24864bc: integrity check failed on data/l:0
+ l@1: unpacking 46a2f24864bc: integrity check failed on l:0
lfs: found 22f66a3fc0b9bf3f012c814303995ec07099b3a9ce02a7af84b5970811074a3b in the local lfs store
- large@0: unpacking 2c531e0992ff: integrity check failed on data/large:0
+ large@0: unpacking 2c531e0992ff: integrity check failed on large:0
lfs: found 89b6070915a3d573ff3599d1cda305bc5e38549b15c4847ab034169da66e1ca8 in the local lfs store
lfs: found b1a6ea88da0017a0e77db139a54618986e9a2489bee24af9fe596de9daac498c in the local lfs store
+ not checking dirstate because of previous errors
checked 5 changesets with 10 changes to 4 files
2 integrity errors encountered!
(first damaged changeset appears to be 0)
@@ -917,6 +922,7 @@
lfs: found 66100b384bf761271b407d79fc30cdd0554f3b2c5d944836e936d584b88ce88e in the local lfs store
lfs: found 89b6070915a3d573ff3599d1cda305bc5e38549b15c4847ab034169da66e1ca8 in the local lfs store
lfs: found b1a6ea88da0017a0e77db139a54618986e9a2489bee24af9fe596de9daac498c in the local lfs store
+ checking dirstate
checked 5 changesets with 10 changes to 4 files
Damaging a file required by the update destination fails the update.
@@ -941,8 +947,9 @@
checking manifests
crosschecking files in changesets and manifests
checking files
- l@1: unpacking 46a2f24864bc: integrity check failed on data/l:0
- large@0: unpacking 2c531e0992ff: integrity check failed on data/large:0
+ l@1: unpacking 46a2f24864bc: integrity check failed on l:0
+ large@0: unpacking 2c531e0992ff: integrity check failed on large:0
+ not checking dirstate because of previous errors
checked 5 changesets with 10 changes to 4 files
2 integrity errors encountered!
(first damaged changeset appears to be 0)
@@ -967,11 +974,12 @@
checking manifests
crosschecking files in changesets and manifests
checking files
- l@1: unpacking 46a2f24864bc: integrity check failed on data/l:0
+ l@1: unpacking 46a2f24864bc: integrity check failed on l:0
lfs: found 22f66a3fc0b9bf3f012c814303995ec07099b3a9ce02a7af84b5970811074a3b in the local lfs store
- large@0: unpacking 2c531e0992ff: integrity check failed on data/large:0
+ large@0: unpacking 2c531e0992ff: integrity check failed on large:0
lfs: found 89b6070915a3d573ff3599d1cda305bc5e38549b15c4847ab034169da66e1ca8 in the local lfs store
lfs: found b1a6ea88da0017a0e77db139a54618986e9a2489bee24af9fe596de9daac498c in the local lfs store
+ not checking dirstate because of previous errors
checked 5 changesets with 10 changes to 4 files
2 integrity errors encountered!
(first damaged changeset appears to be 0)
@@ -987,7 +995,7 @@
Accessing a corrupt file will complain
$ hg --cwd fromcorrupt2 cat -r 0 large
- abort: integrity check failed on data/large:0
+ abort: integrity check failed on large:0
[50]
lfs -> normal -> lfs round trip conversions are possible. The 'none()'
--- a/tests/test-manifest.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-manifest.t Wed Jan 04 16:02:22 2023 +0100
@@ -246,12 +246,7 @@
$ hg up -qC .
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 2 changesets with 8 changes to 8 files
+ $ hg verify -q
$ hg rollback -q --config ui.rollback=True
$ hg rm b.txt d.txt
@@ -270,12 +265,7 @@
ccc.txt\x00149da44f2a4e14f488b7bd4157945a9837408c00 (esc)
e.txt\x00149da44f2a4e14f488b7bd4157945a9837408c00 (esc)
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 2 changesets with 9 changes to 9 files
+ $ hg verify -q
$ cd ..
Test manifest cache interraction with shares
--- a/tests/test-narrow-clone-stream.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-narrow-clone-stream.t Wed Jan 04 16:02:22 2023 +0100
@@ -101,4 +101,5 @@
checking directory manifests (tree !)
crosschecking files in changesets and manifests
checking files
+ checking dirstate
checked 40 changesets with 1 changes to 1 files
--- a/tests/test-narrow-exchange.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-narrow-exchange.t Wed Jan 04 16:02:22 2023 +0100
@@ -164,12 +164,7 @@
remote: adding file changes
remote: added 4 changesets with 4 changes to 2 files
$ cd ../master
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 8 changesets with 10 changes to 3 files
+ $ hg verify -q
Can not push to wider repo if change affects paths in wider repo that are
not also in narrower repo
@@ -218,8 +213,8 @@
remote: adding manifests
remote: adding file changes
remote: added 1 changesets with 0 changes to 0 files (no-lfs-on !)
- remote: error: pretxnchangegroup.lfs hook raised an exception: data/inside2/f@f59b4e0218355383d2789196f1092abcf2262b0c: no match found (lfs-on !)
+ remote: error: pretxnchangegroup.lfs hook raised an exception: inside2/f@f59b4e0218355383d2789196f1092abcf2262b0c: no match found (lfs-on !)
remote: transaction abort! (lfs-on !)
remote: rollback completed (lfs-on !)
- remote: abort: data/inside2/f@f59b4e0218355383d2789196f1092abcf2262b0c: no match found (lfs-on !)
+ remote: abort: inside2/f@f59b4e0218355383d2789196f1092abcf2262b0c: no match found (lfs-on !)
abort: stream ended unexpectedly (got 0 bytes, expected 4) (lfs-on !)
--- a/tests/test-narrow-share.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-narrow-share.t Wed Jan 04 16:02:22 2023 +0100
@@ -161,13 +161,7 @@
3 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cd share-unshare
$ hg unshare
- $ hg verify
- checking changesets
- checking manifests
- checking directory manifests (tree !)
- crosschecking files in changesets and manifests
- checking files
- checked 11 changesets with 3 changes to 3 files
+ $ hg verify -q
$ cd ..
Dirstate should be left alone when upgrading from version of hg that didn't support narrow+share
--- a/tests/test-narrow-widen-no-ellipsis.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-narrow-widen-no-ellipsis.t Wed Jan 04 16:02:22 2023 +0100
@@ -274,13 +274,7 @@
I path:d3
I path:d6
I path:d9
- $ hg verify
- checking changesets
- checking manifests
- checking directory manifests (tree !)
- crosschecking files in changesets and manifests
- checking files
- checked 11 changesets with 4 changes to 4 files
+ $ hg verify -q
$ hg log -T "{if(ellipsis, '...')}{rev}: {desc}\n"
10: add d10/f
9: add d9/f
@@ -321,13 +315,7 @@
Verify shouldn't claim the repo is corrupt after a widen.
- $ hg verify
- checking changesets
- checking manifests
- checking directory manifests (tree !)
- crosschecking files in changesets and manifests
- checking files
- checked 11 changesets with 5 changes to 5 files
+ $ hg verify -q
Widening preserves parent of local commit
--- a/tests/test-narrow-widen.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-narrow-widen.t Wed Jan 04 16:02:22 2023 +0100
@@ -280,13 +280,7 @@
I path:d3
I path:d6
I path:d9
- $ hg verify
- checking changesets
- checking manifests
- checking directory manifests (tree !)
- crosschecking files in changesets and manifests
- checking files
- checked 8 changesets with 4 changes to 4 files
+ $ hg verify -q
$ hg l
@ ...7: add d10/f
|
@@ -340,13 +334,7 @@
Verify shouldn't claim the repo is corrupt after a widen.
- $ hg verify
- checking changesets
- checking manifests
- checking directory manifests (tree !)
- crosschecking files in changesets and manifests
- checking files
- checked 9 changesets with 5 changes to 5 files
+ $ hg verify -q
Widening preserves parent of local commit
--- a/tests/test-obsolete-changeset-exchange.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-obsolete-changeset-exchange.t Wed Jan 04 16:02:22 2023 +0100
@@ -47,12 +47,7 @@
adding manifests
adding file changes
added 2 changesets with 2 changes to 2 files
- $ hg -R ../other verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 2 changesets with 2 changes to 2 files
+ $ hg -R ../other verify -q
Adding a changeset going extinct locally
------------------------------------------
--- a/tests/test-permissions.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-permissions.t Wed Jan 04 16:02:22 2023 +0100
@@ -19,31 +19,17 @@
$ hg commit -m "1"
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 1 changes to 1 files
+ $ hg verify -q
$ chmod -r .hg/store/data/a.i
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
+ $ hg verify -q
abort: Permission denied: '$TESTTMP/t/.hg/store/data/a.i'
[255]
$ chmod +r .hg/store/data/a.i
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 1 changes to 1 files
+ $ hg verify -q
$ chmod -w .hg/store/data/a.i
--- a/tests/test-phases.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-phases.t Wed Jan 04 16:02:22 2023 +0100
@@ -9,7 +9,7 @@
> txnclose-phase.test = sh $TESTTMP/hook.sh
> EOF
- $ hglog() { hg log --template "{rev} {phaseidx} {desc}\n" $*; }
+ $ hglog() { hg log -G --template "{rev} {phaseidx} {desc}\n" $*; }
$ mkcommit() {
> echo "$1" > "$1"
> hg add "$1"
@@ -36,7 +36,8 @@
New commit are draft by default
$ hglog
- 0 1 A
+ @ 0 1 A
+
Following commit are draft too
@@ -45,8 +46,10 @@
test-hook-close-phase: 27547f69f25460a52fff66ad004e58da7ad3fb56: -> draft
$ hglog
- 1 1 B
- 0 1 A
+ @ 1 1 B
+ |
+ o 0 1 A
+
Working directory phase is secret when its parent is secret.
@@ -103,8 +106,10 @@
$ hg phase
1: public
$ hglog
- 1 0 B
- 0 0 A
+ @ 1 0 B
+ |
+ o 0 0 A
+
$ mkcommit C
test-debug-phase: new rev 2: x -> 1
@@ -114,10 +119,14 @@
test-hook-close-phase: b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e: -> draft
$ hglog
- 3 1 D
- 2 1 C
- 1 0 B
- 0 0 A
+ @ 3 1 D
+ |
+ o 2 1 C
+ |
+ o 1 0 B
+ |
+ o 0 0 A
+
Test creating changeset as secret
@@ -125,11 +134,16 @@
test-debug-phase: new rev 4: x -> 2
test-hook-close-phase: a603bfb5a83e312131cebcd05353c217d4d21dde: -> secret
$ hglog
- 4 2 E
- 3 1 D
- 2 1 C
- 1 0 B
- 0 0 A
+ @ 4 2 E
+ |
+ o 3 1 D
+ |
+ o 2 1 C
+ |
+ o 1 0 B
+ |
+ o 0 0 A
+
Test the secret property is inherited
@@ -137,12 +151,18 @@
test-debug-phase: new rev 5: x -> 2
test-hook-close-phase: a030c6be5127abc010fcbff1851536552e6951a8: -> secret
$ hglog
- 5 2 H
- 4 2 E
- 3 1 D
- 2 1 C
- 1 0 B
- 0 0 A
+ @ 5 2 H
+ |
+ o 4 2 E
+ |
+ o 3 1 D
+ |
+ o 2 1 C
+ |
+ o 1 0 B
+ |
+ o 0 0 A
+
Even on merge
@@ -152,13 +172,20 @@
created new head
test-hook-close-phase: cf9fe039dfd67e829edf6522a45de057b5c86519: -> draft
$ hglog
- 6 1 B'
- 5 2 H
- 4 2 E
- 3 1 D
- 2 1 C
- 1 0 B
- 0 0 A
+ @ 6 1 B'
+ |
+ | o 5 2 H
+ | |
+ | o 4 2 E
+ | |
+ | o 3 1 D
+ | |
+ | o 2 1 C
+ |/
+ o 1 0 B
+ |
+ o 0 0 A
+
$ hg merge 4 # E
3 files updated, 0 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
@@ -170,14 +197,22 @@
test-hook-close-phase: 17a481b3bccb796c0521ae97903d81c52bfee4af: -> secret
$ hglog
- 7 2 merge B' and E
- 6 1 B'
- 5 2 H
- 4 2 E
- 3 1 D
- 2 1 C
- 1 0 B
- 0 0 A
+ @ 7 2 merge B' and E
+ |\
+ | o 6 1 B'
+ | |
+ +---o 5 2 H
+ | |
+ o | 4 2 E
+ | |
+ o | 3 1 D
+ | |
+ o | 2 1 C
+ |/
+ o 1 0 B
+ |
+ o 0 0 A
+
Test secret changeset are not pushed
@@ -221,21 +256,34 @@
test-hook-close-phase: b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e: -> draft
test-hook-close-phase: cf9fe039dfd67e829edf6522a45de057b5c86519: -> draft
$ hglog
- 7 2 merge B' and E
- 6 1 B'
- 5 2 H
- 4 2 E
- 3 1 D
- 2 1 C
- 1 0 B
- 0 0 A
+ @ 7 2 merge B' and E
+ |\
+ | o 6 1 B'
+ | |
+ +---o 5 2 H
+ | |
+ o | 4 2 E
+ | |
+ o | 3 1 D
+ | |
+ o | 2 1 C
+ |/
+ o 1 0 B
+ |
+ o 0 0 A
+
$ cd ../push-dest
$ hglog
- 4 1 B'
- 3 1 D
- 2 1 C
- 1 0 B
- 0 0 A
+ o 4 1 B'
+ |
+ | o 3 1 D
+ | |
+ | o 2 1 C
+ |/
+ o 1 0 B
+ |
+ o 0 0 A
+
(Issue3303)
Check that remote secret changeset are ignore when checking creation of remote heads
@@ -328,11 +376,16 @@
test-hook-close-phase: cf9fe039dfd67e829edf6522a45de057b5c86519: -> public
(run 'hg heads' to see heads, 'hg merge' to merge)
$ hglog
- 4 0 B'
- 3 0 D
- 2 0 C
- 1 0 B
- 0 0 A
+ o 4 0 B'
+ |
+ | o 3 0 D
+ | |
+ | o 2 0 C
+ |/
+ o 1 0 B
+ |
+ o 0 0 A
+
$ cd ..
But secret can still be bundled explicitly
@@ -357,11 +410,16 @@
test-hook-close-phase: b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e: -> public
test-hook-close-phase: cf9fe039dfd67e829edf6522a45de057b5c86519: -> public
$ hglog -R clone-dest
- 4 0 B'
- 3 0 D
- 2 0 C
- 1 0 B
- 0 0 A
+ o 4 0 B'
+ |
+ | o 3 0 D
+ | |
+ | o 2 0 C
+ |/
+ o 1 0 B
+ |
+ o 0 0 A
+
Test summary
@@ -385,16 +443,28 @@
$ cd initialrepo
$ hglog -r 'public()'
- 0 0 A
- 1 0 B
+ o 1 0 B
+ |
+ o 0 0 A
+
$ hglog -r 'draft()'
- 2 1 C
- 3 1 D
- 6 1 B'
+ o 6 1 B'
+ |
+ ~
+ o 3 1 D
+ |
+ o 2 1 C
+ |
+ ~
$ hglog -r 'secret()'
- 4 2 E
- 5 2 H
- 7 2 merge B' and E
+ @ 7 2 merge B' and E
+ |\
+ | ~
+ | o 5 2 H
+ |/
+ o 4 2 E
+ |
+ ~
test that phase are displayed in log at debug level
@@ -730,12 +800,7 @@
because repo.cancopy() is False
$ cd ../initialrepo
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 8 changesets with 7 changes to 7 files
+ $ hg verify -q
$ cd ..
@@ -1047,3 +1112,30 @@
$ hg up tip
2 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ cd ..
+
+Testing that command line flags override configuration
+
+ $ hg init commit-overrides
+ $ cd commit-overrides
+
+`hg commit --draft` overrides new-commit=secret
+
+ $ mkcommit A --config phases.new-commit='secret' --draft
+ test-debug-phase: new rev 0: x -> 1
+ test-hook-close-phase: 4a2df7238c3b48766b5e22fafbb8a2f506ec8256: -> draft
+ $ hglog
+ @ 0 1 A
+
+
+`hg commit --secret` overrides new-commit=draft
+
+ $ mkcommit B --config phases.new-commit='draft' --secret
+ test-debug-phase: new rev 1: x -> 2
+ test-hook-close-phase: 27547f69f25460a52fff66ad004e58da7ad3fb56: -> secret
+ $ hglog
+ @ 1 2 B
+ |
+ o 0 1 A
+
+
+ $ cd ..
--- a/tests/test-pull-bundle.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-pull-bundle.t Wed Jan 04 16:02:22 2023 +0100
@@ -33,8 +33,6 @@
$ cd repo
$ cat <<EOF > .hg/hgrc
- > [server]
- > pullbundle = True
> [experimental]
> evolution = True
> [extensions]
--- a/tests/test-pull-network.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-pull-network.t Wed Jan 04 16:02:22 2023 +0100
@@ -8,12 +8,7 @@
adding foo
$ hg commit -m 1
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 1 changes to 1 files
+ $ hg verify -q
$ hg serve -p $HGPORT -d --pid-file=hg.pid
$ cat hg.pid >> $DAEMON_PIDS
@@ -30,12 +25,7 @@
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cd copy
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 1 changes to 1 files
+ $ hg verify -q
$ hg co
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-pull-permission.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-pull-permission.t Wed Jan 04 16:02:22 2023 +0100
@@ -23,11 +23,6 @@
$ chmod +w a/.hg/store # let test clean up
$ cd b
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 1 changes to 1 files
+ $ hg verify -q
$ cd ..
--- a/tests/test-pull-pull-corruption.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-pull-pull-corruption.t Wed Jan 04 16:02:22 2023 +0100
@@ -65,11 +65,6 @@
see the result
$ wait
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 11 changesets with 11 changes to 1 files
+ $ hg verify -q
$ cd ..
--- a/tests/test-push.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-push.t Wed Jan 04 16:02:22 2023 +0100
@@ -18,7 +18,7 @@
> echo
> hg init test-revflag-"$i"
> hg -R test-revflag push -r "$i" test-revflag-"$i"
- > hg -R test-revflag-"$i" verify
+ > hg -R test-revflag-"$i" verify -q
> done
pushing to test-revflag-0
@@ -27,11 +27,6 @@
adding manifests
adding file changes
added 1 changesets with 1 changes to 1 files
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 1 changes to 1 files
pushing to test-revflag-1
searching for changes
@@ -39,11 +34,6 @@
adding manifests
adding file changes
added 2 changesets with 2 changes to 1 files
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 2 changesets with 2 changes to 1 files
pushing to test-revflag-2
searching for changes
@@ -51,11 +41,6 @@
adding manifests
adding file changes
added 3 changesets with 3 changes to 1 files
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 3 changes to 1 files
pushing to test-revflag-3
searching for changes
@@ -63,11 +48,6 @@
adding manifests
adding file changes
added 4 changesets with 4 changes to 1 files
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 4 changesets with 4 changes to 1 files
pushing to test-revflag-4
searching for changes
@@ -75,11 +55,6 @@
adding manifests
adding file changes
added 2 changesets with 2 changes to 1 files
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 2 changesets with 2 changes to 1 files
pushing to test-revflag-5
searching for changes
@@ -87,11 +62,6 @@
adding manifests
adding file changes
added 3 changesets with 3 changes to 1 files
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 3 changes to 1 files
pushing to test-revflag-6
searching for changes
@@ -99,11 +69,6 @@
adding manifests
adding file changes
added 4 changesets with 5 changes to 2 files
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 4 changesets with 5 changes to 2 files
pushing to test-revflag-7
searching for changes
@@ -111,11 +76,6 @@
adding manifests
adding file changes
added 5 changesets with 6 changes to 3 files
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5 changesets with 6 changes to 3 files
pushing to test-revflag-8
searching for changes
@@ -123,11 +83,6 @@
adding manifests
adding file changes
added 5 changesets with 5 changes to 2 files
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5 changesets with 5 changes to 2 files
$ cd test-revflag-8
@@ -141,12 +96,7 @@
new changesets c70afb1ee985:faa2e4234c7a
(run 'hg heads' to see heads, 'hg merge' to merge)
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 9 changesets with 7 changes to 4 files
+ $ hg verify -q
$ cd ..
@@ -189,13 +139,9 @@
Expected to fail:
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
+ $ hg verify -q
beta@1: dddc47b3ba30 not in manifests
- checked 2 changesets with 4 changes to 2 files
+ not checking dirstate because of previous errors
1 integrity errors encountered!
(first damaged changeset appears to be 1)
[1]
@@ -224,13 +170,9 @@
Expected to fail:
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
+ $ hg verify -q
beta@1: manifest refers to unknown revision dddc47b3ba30
- checked 2 changesets with 2 changes to 2 files
+ not checking dirstate because of previous errors
1 integrity errors encountered!
(first damaged changeset appears to be 1)
[1]
--- a/tests/test-qrecord.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-qrecord.t Wed Jan 04 16:02:22 2023 +0100
@@ -68,6 +68,7 @@
--close-branch mark a branch head as closed
--amend amend the parent of the working directory
-s --secret use the secret phase for committing
+ --draft use the draft phase for committing
-e --edit invoke editor on commit messages
-I --include PATTERN [+] include names matching the given patterns
-X --exclude PATTERN [+] exclude names matching the given patterns
--- a/tests/test-rebase-conflicts.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-rebase-conflicts.t Wed Jan 04 16:02:22 2023 +0100
@@ -315,7 +315,7 @@
adding manifests
adding file changes
adding f1.txt revisions
- bundle2-input-part: total payload size 1686
+ bundle2-input-part: total payload size 1739
bundle2-input-part: "cache:rev-branch-cache" (advisory) supported
bundle2-input-part: total payload size 74
bundle2-input-part: "phase-heads" supported
--- a/tests/test-record.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-record.t Wed Jan 04 16:02:22 2023 +0100
@@ -51,6 +51,7 @@
--close-branch mark a branch head as closed
--amend amend the parent of the working directory
-s --secret use the secret phase for committing
+ --draft use the draft phase for committing
-e --edit invoke editor on commit messages
-I --include PATTERN [+] include names matching the given patterns
-X --exclude PATTERN [+] exclude names matching the given patterns
--- a/tests/test-repair-strip.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-repair-strip.t Wed Jan 04 16:02:22 2023 +0100
@@ -66,6 +66,7 @@
(expected 1)
b@?: 736c29771fba not in manifests
warning: orphan data file 'data/c.i'
+ not checking dirstate because of previous errors
checked 2 changesets with 3 changes to 2 files
2 warnings encountered!
2 integrity errors encountered!
@@ -79,6 +80,7 @@
checking manifests
crosschecking files in changesets and manifests
checking files
+ checking dirstate
checked 2 changesets with 2 changes to 2 files
$ teststrip 0 2 r .hg/store/data/b.i
% before update 0, strip 2
@@ -93,6 +95,7 @@
checking manifests
crosschecking files in changesets and manifests
checking files
+ checking dirstate
checked 4 changesets with 4 changes to 3 files
% journal contents
(no journal)
@@ -124,6 +127,7 @@
b@?: rev 1 points to nonexistent changeset 2
(expected 1)
c@?: rev 0 points to nonexistent changeset 3
+ not checking dirstate because of previous errors
checked 2 changesets with 4 changes to 3 files
1 warnings encountered!
7 integrity errors encountered!
@@ -138,6 +142,7 @@
checking manifests
crosschecking files in changesets and manifests
checking files
+ checking dirstate
checked 2 changesets with 2 changes to 2 files
$ cd ..
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-revlog-delta-find.t Wed Jan 04 16:02:22 2023 +0100
@@ -0,0 +1,333 @@
+==========================================================
+Test various things around delta computation within revlog
+==========================================================
+
+
+basic setup
+-----------
+
+ $ cat << EOF >> $HGRCPATH
+ > [debug]
+ > revlog.debug-delta=yes
+ > EOF
+ $ cat << EOF >> sha256line.py
+ > # a way to quickly produce file of significant size and poorly compressable content.
+ > import hashlib
+ > import sys
+ > for line in sys.stdin:
+ > print(hashlib.sha256(line.encode('utf8')).hexdigest())
+ > EOF
+
+ $ hg init base-repo
+ $ cd base-repo
+
+create a "large" file
+
+ $ $TESTDIR/seq.py 1000 | $PYTHON $TESTTMP/sha256line.py > my-file.txt
+ $ hg add my-file.txt
+ $ hg commit -m initial-commit
+ DBG-DELTAS: FILELOG:my-file.txt: rev=0: delta-base=0 * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: CHANGELOG: * (glob)
+
+Add more change at the end of the file
+
+ $ $TESTDIR/seq.py 1001 1200 | $PYTHON $TESTTMP/sha256line.py >> my-file.txt
+ $ hg commit -m "large-change"
+ DBG-DELTAS: FILELOG:my-file.txt: rev=1: delta-base=0 * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: CHANGELOG: * (glob)
+
+Add small change at the start
+
+ $ hg up 'desc("initial-commit")' --quiet
+ $ mv my-file.txt foo
+ $ echo "small change at the start" > my-file.txt
+ $ cat foo >> my-file.txt
+ $ rm foo
+ $ hg commit -m "small-change"
+ DBG-DELTAS: FILELOG:my-file.txt: rev=2: delta-base=0 * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: CHANGELOG: * (glob)
+ created new head
+
+
+ $ hg log -r 'head()' -T '{node}\n' >> ../base-heads.nodes
+ $ hg log -r 'desc("initial-commit")' -T '{node}\n' >> ../initial.node
+ $ hg log -r 'desc("small-change")' -T '{node}\n' >> ../small.node
+ $ hg log -r 'desc("large-change")' -T '{node}\n' >> ../large.node
+ $ cd ..
+
+Check delta find policy and result for merge on commit
+======================================================
+
+Check that delta of merge pick best of the two parents
+------------------------------------------------------
+
+As we check against both parents, the one with the largest change should
+produce the smallest delta and be picked.
+
+ $ hg clone base-repo test-parents --quiet
+ $ hg -R test-parents update 'nodefromfile("small.node")' --quiet
+ $ hg -R test-parents merge 'nodefromfile("large.node")' --quiet
+
+The delta base is the "large" revision as it produce a smaller delta.
+
+ $ hg -R test-parents commit -m "merge from small change"
+ DBG-DELTAS: FILELOG:my-file.txt: rev=3: delta-base=1 * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: CHANGELOG: * (glob)
+
+Check that the behavior tested above can we disabled
+----------------------------------------------------
+
+We disable the checking of both parent at the same time. The `small` change,
+that produce a less optimal delta, should be picked first as it is "closer" to
+the new commit.
+
+ $ hg clone base-repo test-no-parents --quiet
+ $ hg -R test-no-parents update 'nodefromfile("small.node")' --quiet
+ $ hg -R test-no-parents merge 'nodefromfile("large.node")' --quiet
+
+The delta base is the "large" revision as it produce a smaller delta.
+
+ $ hg -R test-no-parents commit -m "merge from small change" \
+ > --config storage.revlog.optimize-delta-parent-choice=no
+ DBG-DELTAS: FILELOG:my-file.txt: rev=3: delta-base=2 * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: CHANGELOG: * (glob)
+
+
+Check delta-find policy and result when unbundling
+==================================================
+
+Build a bundle with all delta built against p1
+
+ $ hg bundle -R test-parents --all --config devel.bundle.delta=p1 all-p1.hg
+ 4 changesets found
+
+Default policy of trusting delta from the bundle
+------------------------------------------------
+
+Keeping the `p1` delta used in the bundle is sub-optimal for storage, but
+strusting in-bundle delta is faster to apply.
+
+ $ hg init bundle-default
+ $ hg -R bundle-default unbundle all-p1.hg --quiet
+ DBG-DELTAS: CHANGELOG: * (glob)
+ DBG-DELTAS: CHANGELOG: * (glob)
+ DBG-DELTAS: CHANGELOG: * (glob)
+ DBG-DELTAS: CHANGELOG: * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: FILELOG:my-file.txt: rev=0: delta-base=0 * (glob)
+ DBG-DELTAS: FILELOG:my-file.txt: rev=1: delta-base=0 * (glob)
+ DBG-DELTAS: FILELOG:my-file.txt: rev=2: delta-base=0 * (glob)
+ DBG-DELTAS: FILELOG:my-file.txt: rev=3: delta-base=2 * (glob)
+
+(confirm the file revision are in the same order, 2 should be smaller than 1)
+
+ $ hg -R bundle-default debugdata my-file.txt 2 | wc -l
+ \s*1001 (re)
+ $ hg -R bundle-default debugdata my-file.txt 1 | wc -l
+ \s*1200 (re)
+
+explicitly enabled
+------------------
+
+Keeping the `p1` delta used in the bundle is sub-optimal for storage, but
+strusting in-bundle delta is faster to apply.
+
+ $ hg init bundle-reuse-enabled
+ $ hg -R bundle-reuse-enabled unbundle all-p1.hg --quiet \
+ > --config storage.revlog.reuse-external-delta-parent=yes
+ DBG-DELTAS: CHANGELOG: * (glob)
+ DBG-DELTAS: CHANGELOG: * (glob)
+ DBG-DELTAS: CHANGELOG: * (glob)
+ DBG-DELTAS: CHANGELOG: * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: FILELOG:my-file.txt: rev=0: delta-base=0 * (glob)
+ DBG-DELTAS: FILELOG:my-file.txt: rev=1: delta-base=0 * (glob)
+ DBG-DELTAS: FILELOG:my-file.txt: rev=2: delta-base=0 * (glob)
+ DBG-DELTAS: FILELOG:my-file.txt: rev=3: delta-base=2 * (glob)
+
+(confirm the file revision are in the same order, 2 should be smaller than 1)
+
+ $ hg -R bundle-reuse-enabled debugdata my-file.txt 2 | wc -l
+ \s*1001 (re)
+ $ hg -R bundle-reuse-enabled debugdata my-file.txt 1 | wc -l
+ \s*1200 (re)
+
+explicitly disabled
+-------------------
+
+Not reusing the delta-base from the parent means we the delta will be made
+against the "best" parent. (so not the same as the previous two)
+
+ $ hg init bundle-reuse-disabled
+ $ hg -R bundle-reuse-disabled unbundle all-p1.hg --quiet \
+ > --config storage.revlog.reuse-external-delta-parent=no
+ DBG-DELTAS: CHANGELOG: * (glob)
+ DBG-DELTAS: CHANGELOG: * (glob)
+ DBG-DELTAS: CHANGELOG: * (glob)
+ DBG-DELTAS: CHANGELOG: * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: FILELOG:my-file.txt: rev=0: delta-base=0 * (glob)
+ DBG-DELTAS: FILELOG:my-file.txt: rev=1: delta-base=0 * (glob)
+ DBG-DELTAS: FILELOG:my-file.txt: rev=2: delta-base=0 * (glob)
+ DBG-DELTAS: FILELOG:my-file.txt: rev=3: delta-base=1 * (glob)
+
+(confirm the file revision are in the same order, 2 should be smaller than 1)
+
+ $ hg -R bundle-reuse-disabled debugdata my-file.txt 2 | wc -l
+ \s*1001 (re)
+ $ hg -R bundle-reuse-disabled debugdata my-file.txt 1 | wc -l
+ \s*1200 (re)
+
+
+Check the path.*:delta-reuse-policy option
+==========================================
+
+Get a repository with the bad parent picked and a clone ready to pull the merge
+
+ $ cp -ar bundle-reuse-enabled peer-bad-delta
+ $ hg clone peer-bad-delta local-pre-pull --rev `cat large.node` --rev `cat small.node` --quiet
+ DBG-DELTAS: CHANGELOG: * (glob)
+ DBG-DELTAS: CHANGELOG: * (glob)
+ DBG-DELTAS: CHANGELOG: * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: FILELOG:my-file.txt: rev=0: delta-base=0 * (glob)
+ DBG-DELTAS: FILELOG:my-file.txt: rev=1: delta-base=0 * (glob)
+ DBG-DELTAS: FILELOG:my-file.txt: rev=2: delta-base=0 * (glob)
+
+Check the parent order for the file
+
+ $ hg -R local-pre-pull debugdata my-file.txt 2 | wc -l
+ \s*1001 (re)
+ $ hg -R local-pre-pull debugdata my-file.txt 1 | wc -l
+ \s*1200 (re)
+
+Pull with no value (so the default)
+-----------------------------------
+
+default is to reuse the (bad) delta
+
+ $ cp -ar local-pre-pull local-no-value
+ $ hg -R local-no-value pull --quiet
+ DBG-DELTAS: CHANGELOG: * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: FILELOG:my-file.txt: rev=3: delta-base=2 * (glob)
+
+Pull with explicitly the default
+--------------------------------
+
+default is to reuse the (bad) delta
+
+ $ cp -ar local-pre-pull local-default
+ $ hg -R local-default pull --quiet --config 'paths.default:delta-reuse-policy=default'
+ DBG-DELTAS: CHANGELOG: * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: FILELOG:my-file.txt: rev=3: delta-base=2 * (glob)
+
+Pull with no-reuse
+------------------
+
+We don't reuse the base, so we get a better delta
+
+ $ cp -ar local-pre-pull local-no-reuse
+ $ hg -R local-no-reuse pull --quiet --config 'paths.default:delta-reuse-policy=no-reuse'
+ DBG-DELTAS: CHANGELOG: * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: FILELOG:my-file.txt: rev=3: delta-base=1 * (glob)
+
+Pull with try-base
+------------------
+
+We requested to use the (bad) delta
+
+ $ cp -ar local-pre-pull local-try-base
+ $ hg -R local-try-base pull --quiet --config 'paths.default:delta-reuse-policy=try-base'
+ DBG-DELTAS: CHANGELOG: * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: FILELOG:my-file.txt: rev=3: delta-base=2 * (glob)
+
+Case where we force a "bad" delta to be applied
+===============================================
+
+We build a very different file content to force a full snapshot
+
+ $ cp -ar peer-bad-delta peer-bad-delta-with-full
+ $ cp -ar local-pre-pull local-pre-pull-full
+ $ echo '[paths]' >> local-pre-pull-full/.hg/hgrc
+ $ echo 'default=../peer-bad-delta-with-full' >> local-pre-pull-full/.hg/hgrc
+
+ $ hg -R peer-bad-delta-with-full update 'desc("merge")' --quiet
+ $ ($TESTDIR/seq.py 2000 2100; $TESTDIR/seq.py 500 510; $TESTDIR/seq.py 3000 3050) \
+ > | $PYTHON $TESTTMP/sha256line.py > peer-bad-delta-with-full/my-file.txt
+ $ hg -R peer-bad-delta-with-full commit -m 'trigger-full'
+ DBG-DELTAS: FILELOG:my-file.txt: rev=4: delta-base=4 * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: CHANGELOG: * (glob)
+
+Check that "try-base" behavior challenge the delta
+--------------------------------------------------
+
+The bundling process creates a delta against the previous revision, however this
+is an invalid chain for the client, so it is not considered and we do a full
+snapshot again.
+
+ $ cp -ar local-pre-pull-full local-try-base-full
+ $ hg -R local-try-base-full pull --quiet \
+ > --config 'paths.default:delta-reuse-policy=try-base'
+ DBG-DELTAS: CHANGELOG: * (glob)
+ DBG-DELTAS: CHANGELOG: * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: FILELOG:my-file.txt: rev=3: delta-base=2 * (glob)
+ DBG-DELTAS: FILELOG:my-file.txt: rev=4: delta-base=4 * (glob)
+
+Check that "forced" behavior do not challenge the delta, even if it is full.
+---------------------------------------------------------------------------
+
+A full bundle should be accepted as full bundle without recomputation
+
+ $ cp -ar local-pre-pull-full local-forced-full
+ $ hg -R local-forced-full pull --quiet \
+ > --config 'paths.default:delta-reuse-policy=forced'
+ DBG-DELTAS: CHANGELOG: * (glob)
+ DBG-DELTAS: CHANGELOG: * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: FILELOG:my-file.txt: rev=3: delta-base=2 * (glob)
+ DBG-DELTAS: FILELOG:my-file.txt: rev=4: delta-base=4 is-cached=1 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - * (glob)
+
+Check that "forced" behavior do not challenge the delta, even if it is bad.
+---------------------------------------------------------------------------
+
+The client does not challenge anything and applies the bizarre delta directly.
+
+Note: If the bundling process becomes smarter, this test might no longer work
+(as the server won't be sending "bad" deltas anymore) and might need something
+more subtle to test this behavior.
+
+ $ hg bundle -R peer-bad-delta-with-full --all --config devel.bundle.delta=p1 all-p1.hg
+ 5 changesets found
+ $ cp -ar local-pre-pull-full local-forced-full-p1
+ $ hg -R local-forced-full-p1 pull --quiet \
+ > --config 'paths.*:delta-reuse-policy=forced' all-p1.hg
+ DBG-DELTAS: CHANGELOG: * (glob)
+ DBG-DELTAS: CHANGELOG: * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: MANIFESTLOG: * (glob)
+ DBG-DELTAS: FILELOG:my-file.txt: rev=3: delta-base=2 * (glob)
+ DBG-DELTAS: FILELOG:my-file.txt: rev=4: delta-base=3 * (glob)
--- a/tests/test-revlog-raw.py Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-revlog-raw.py Wed Jan 04 16:02:22 2023 +0100
@@ -1,7 +1,6 @@
# test revlog interaction about raw data (flagprocessor)
-import collections
import hashlib
import sys
@@ -54,10 +53,6 @@
b'sparse-revlog': True,
}
-# The test wants to control whether to use delta explicitly, based on
-# "storedeltachains".
-revlog.revlog._isgooddeltainfo = lambda self, d, textlen: self._storedeltachains
-
def abort(msg):
print('abort: %s' % msg)
@@ -471,21 +466,21 @@
print(' got: %s' % result)
-snapshotmapall = {0: [6, 8, 11, 17, 19, 25], 8: [21], -1: [0, 30]}
-snapshotmap15 = {0: [17, 19, 25], 8: [21], -1: [30]}
+snapshotmapall = {0: {6, 8, 11, 17, 19, 25}, 8: {21}, -1: {0, 30}}
+snapshotmap15 = {0: {17, 19, 25}, 8: {21}, -1: {30}}
def findsnapshottest(rlog):
- resultall = collections.defaultdict(list)
- deltas._findsnapshots(rlog, resultall, 0)
- resultall = dict(resultall.items())
+ cache = deltas.SnapshotCache()
+ cache.update(rlog)
+ resultall = dict(cache.snapshots)
if resultall != snapshotmapall:
print('snapshot map differ:')
print(' expected: %s' % snapshotmapall)
print(' got: %s' % resultall)
- result15 = collections.defaultdict(list)
- deltas._findsnapshots(rlog, result15, 15)
- result15 = dict(result15.items())
+ cache15 = deltas.SnapshotCache()
+ cache15.update(rlog, 15)
+ result15 = dict(cache15.snapshots)
if result15 != snapshotmap15:
print('snapshot map differ:')
print(' expected: %s' % snapshotmap15)
--- a/tests/test-revlog-v2.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-revlog-v2.t Wed Jan 04 16:02:22 2023 +0100
@@ -117,16 +117,6 @@
hg verify should be happy
-------------------------
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 1 changes to 1 files
+ $ hg verify -q
- $ hg verify -R ../cloned-repo
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 1 changes to 1 files
+ $ hg verify -R ../cloned-repo -q
--- a/tests/test-rhg-sparse-narrow.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-rhg-sparse-narrow.t Wed Jan 04 16:02:22 2023 +0100
@@ -96,12 +96,7 @@
$ (cd ..; cp repo-sparse/.hg/store/data/hide.i repo-narrow/.hg/store/data/hide.i)
$ (cd ..; mkdir repo-narrow/.hg/store/data/dir2; cp repo-sparse/.hg/store/data/dir2/z.i repo-narrow/.hg/store/data/dir2/z.i)
- $ "$real_hg" verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 2 changes to 2 files
+ $ "$real_hg" verify -q
$ "$real_hg" files -r "$tip"
dir1/x
--- a/tests/test-rhg.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-rhg.t Wed Jan 04 16:02:22 2023 +0100
@@ -4,12 +4,11 @@
Unimplemented command
$ $NO_FALLBACK rhg unimplemented-command
- unsupported feature: error: Found argument 'unimplemented-command' which wasn't expected, or isn't valid in this context
+ unsupported feature: error: The subcommand 'unimplemented-command' wasn't recognized
- USAGE:
- rhg [OPTIONS] <SUBCOMMAND>
+ Usage: rhg [OPTIONS] <COMMAND>
- For more information try --help
+ For more information try '--help'
[252]
$ rhg unimplemented-command --config rhg.on-unsupported=abort-silent
@@ -159,10 +158,11 @@
$ $NO_FALLBACK rhg cat original --exclude="*.rs"
unsupported feature: error: Found argument '--exclude' which wasn't expected, or isn't valid in this context
- USAGE:
- rhg cat [OPTIONS] <FILE>...
+ If you tried to supply '--exclude' as a value rather than a flag, use '-- --exclude'
- For more information try --help
+ Usage: rhg cat <FILE>...
+
+ For more information try '--help'
[252]
$ rhg cat original --exclude="*.rs"
@@ -190,10 +190,11 @@
Blocking recursive fallback. The 'rhg.fallback-executable = rhg' config points to `rhg` itself.
unsupported feature: error: Found argument '--exclude' which wasn't expected, or isn't valid in this context
- USAGE:
- rhg cat [OPTIONS] <FILE>...
+ If you tried to supply '--exclude' as a value rather than a flag, use '-- --exclude'
- For more information try --help
+ Usage: rhg cat <FILE>...
+
+ For more information try '--help'
[252]
--- a/tests/test-rollback.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-rollback.t Wed Jan 04 16:02:22 2023 +0100
@@ -4,12 +4,7 @@
$ echo a > a
$ hg commit -Am'add a'
adding a
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 1 changes to 1 files
+ $ hg verify -q
$ hg parents
changeset: 0:1f0dee641bb7
tag: tip
@@ -23,12 +18,7 @@
$ hg rollback
repository tip rolled back to revision -1 (undo commit)
working directory now based on revision -1
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 0 changesets with 0 changes to 0 files
+ $ hg verify -q
$ hg parents
$ hg status
A a
@@ -191,14 +181,8 @@
corrupt journal test
$ echo "foo" > .hg/store/journal
- $ hg recover --verify
- rolling back interrupted transaction
+ $ hg recover --verify -q
couldn't read journal entry 'foo\n'!
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 2 changesets with 2 changes to 1 files
rollback disabled by config
$ cat >> $HGRCPATH <<EOF
@@ -433,12 +417,7 @@
abort: pretxncommit hook exited with status 1
[40]
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 1 changes to 1 files
+ $ hg verify -q
$ cd ..
@@ -458,11 +437,6 @@
$ hg --config ui.ioerrors=pretxncommit,pretxnclose,txnclose,txnabort,msgabort,msgrollback commit -m 'multiple errors'
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 2 changesets with 2 changes to 1 files
+ $ hg verify -q
$ cd ..
--- a/tests/test-shelve.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-shelve.t Wed Jan 04 16:02:22 2023 +0100
@@ -1600,6 +1600,7 @@
$ rm -r .hg/shelve*
#if phasebased
+ $ cp $HGRCPATH $TESTTMP/hgrc-saved
$ cat <<EOF >> $HGRCPATH
> [shelve]
> store = strip
@@ -1628,3 +1629,32 @@
#if stripbased
$ hg log --hidden --template '{user}\n'
#endif
+
+clean up
+
+#if phasebased
+ $ mv $TESTTMP/hgrc-saved $HGRCPATH
+#endif
+
+changed files should be reachable in all shelves
+
+create an extension that emits changed files
+
+ $ cat > shelve-changed-files.py << EOF
+ > """Command to emit changed files for a shelf"""
+ >
+ > from mercurial import registrar, shelve
+ >
+ > cmdtable = {}
+ > command = registrar.command(cmdtable)
+ >
+ >
+ > @command(b'shelve-changed-files')
+ > def shelve_changed_files(ui, repo, name):
+ > shelf = shelve.ShelfDir(repo).get(name)
+ > for file in shelf.changed_files(ui, repo):
+ > ui.write(file + b'\n')
+ > EOF
+
+ $ hg --config extensions.shelve-changed-files=shelve-changed-files.py shelve-changed-files default
+ somefile.py
--- a/tests/test-simple-update.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-simple-update.t Wed Jan 04 16:02:22 2023 +0100
@@ -5,12 +5,7 @@
adding foo
$ hg commit -m "1"
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 1 changes to 1 files
+ $ hg verify -q
$ hg clone . ../branch
updating to branch default
@@ -34,12 +29,7 @@
1 local changesets published
(run 'hg update' to get a working copy)
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 2 changesets with 2 changes to 1 files
+ $ hg verify -q
$ hg co
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-sparse-revlog.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-sparse-revlog.t Wed Jan 04 16:02:22 2023 +0100
@@ -105,11 +105,11 @@
delta : 0 (100.00%)
snapshot : 383 ( 7.66%)
lvl-0 : 3 ( 0.06%)
- lvl-1 : 18 ( 0.36%)
- lvl-2 : 62 ( 1.24%)
- lvl-3 : 108 ( 2.16%)
- lvl-4 : 191 ( 3.82%)
- lvl-5 : 1 ( 0.02%)
+ lvl-1 : 18 ( 0.36%) non-ancestor-bases: 9 (50.00%)
+ lvl-2 : 62 ( 1.24%) non-ancestor-bases: 58 (93.55%)
+ lvl-3 : 108 ( 2.16%) non-ancestor-bases: 108 (100.00%)
+ lvl-4 : 191 ( 3.82%) non-ancestor-bases: 180 (94.24%)
+ lvl-5 : 1 ( 0.02%) non-ancestor-bases: 1 (100.00%)
deltas : 4618 (92.34%)
revision size : 58616973
snapshot : 9247844 (15.78%)
@@ -126,6 +126,9 @@
chunks size : 58616973
0x28 : 58616973 (100.00%)
+
+ total-stored-content: 1 732 705 361 bytes
+
avg chain length : 9
max chain length : 15
max chain reach : 27366701
@@ -144,9 +147,11 @@
deltas against prev : 3906 (84.58%)
where prev = p1 : 3906 (100.00%)
where prev = p2 : 0 ( 0.00%)
- other : 0 ( 0.00%)
+ other-ancestor : 0 ( 0.00%)
+ unrelated : 0 ( 0.00%)
deltas against p1 : 649 (14.05%)
deltas against p2 : 63 ( 1.36%)
+ deltas against ancs : 0 ( 0.00%)
deltas against other : 0 ( 0.00%)
@@ -159,7 +164,7 @@
4971 4970 -1 3 5 4930 snap 19179 346472 427596 1.23414 15994877 15567281 36.40652 427596 179288 1.00000 5
$ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971
DBG-DELTAS-SEARCH: SEARCH rev=4971
- DBG-DELTAS-SEARCH: ROUND #1 - 2 candidates - search-down
+ DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - search-down
DBG-DELTAS-SEARCH: CANDIDATE: rev=4962
DBG-DELTAS-SEARCH: type=snapshot-4
DBG-DELTAS-SEARCH: size=18296
@@ -167,11 +172,43 @@
DBG-DELTAS-SEARCH: uncompressed-delta-size=30377
DBG-DELTAS-SEARCH: delta-search-time=* (glob)
DBG-DELTAS-SEARCH: DELTA: length=16872 (BAD)
- DBG-DELTAS-SEARCH: CANDIDATE: rev=4971
+ DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
+ DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
+ DBG-DELTAS-SEARCH: type=snapshot-3
+ DBG-DELTAS-SEARCH: size=39228
+ DBG-DELTAS-SEARCH: base=4799
+ DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
+ DBG-DELTAS-SEARCH: delta-search-time=* (glob)
+ DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
+ DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - refine-down
+ DBG-DELTAS-SEARCH: CONTENDER: rev=4930 - length=19179
+ DBG-DELTAS-SEARCH: CANDIDATE: rev=4799
+ DBG-DELTAS-SEARCH: type=snapshot-2
+ DBG-DELTAS-SEARCH: size=50213
+ DBG-DELTAS-SEARCH: base=4623
+ DBG-DELTAS-SEARCH: uncompressed-delta-size=82661
+ DBG-DELTAS-SEARCH: delta-search-time=* (glob)
+ DBG-DELTAS-SEARCH: DELTA: length=49132 (BAD)
+ DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: delta-base=4930 is-cached=0 - search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
+
+ $ cat << EOF >>.hg/hgrc
+ > [storage]
+ > revlog.optimize-delta-parent-choice = no
+ > revlog.reuse-external-delta = yes
+ > EOF
+
+ $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --quiet
+ DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: delta-base=4930 is-cached=0 - search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
+ $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --source full
+ DBG-DELTAS-SEARCH: SEARCH rev=4971
+ DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - search-down
+ DBG-DELTAS-SEARCH: CANDIDATE: rev=4962
DBG-DELTAS-SEARCH: type=snapshot-4
- DBG-DELTAS-SEARCH: size=19179
+ DBG-DELTAS-SEARCH: size=18296
DBG-DELTAS-SEARCH: base=4930
- DBG-DELTAS-SEARCH: TOO-HIGH
+ DBG-DELTAS-SEARCH: uncompressed-delta-size=30377
+ DBG-DELTAS-SEARCH: delta-search-time=* (glob)
+ DBG-DELTAS-SEARCH: DELTA: length=16872 (BAD)
DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
DBG-DELTAS-SEARCH: type=snapshot-3
@@ -189,6 +226,101 @@
DBG-DELTAS-SEARCH: uncompressed-delta-size=82661
DBG-DELTAS-SEARCH: delta-search-time=* (glob)
DBG-DELTAS-SEARCH: DELTA: length=49132 (BAD)
- DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
+ DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: delta-base=4930 is-cached=0 - search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
+ $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --source storage
+ DBG-DELTAS-SEARCH: SEARCH rev=4971
+ DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - cached-delta
+ DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
+ DBG-DELTAS-SEARCH: type=snapshot-3
+ DBG-DELTAS-SEARCH: size=39228
+ DBG-DELTAS-SEARCH: base=4799
+ DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
+ DBG-DELTAS-SEARCH: delta-search-time=* (glob)
+ DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
+ DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: delta-base=4930 is-cached=1 - search-rounds=1 try-count=1 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
+ $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --source p1
+ DBG-DELTAS-SEARCH: SEARCH rev=4971
+ DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - search-down
+ DBG-DELTAS-SEARCH: CANDIDATE: rev=4962
+ DBG-DELTAS-SEARCH: type=snapshot-4
+ DBG-DELTAS-SEARCH: size=18296
+ DBG-DELTAS-SEARCH: base=4930
+ DBG-DELTAS-SEARCH: uncompressed-delta-size=30377
+ DBG-DELTAS-SEARCH: delta-search-time=* (glob)
+ DBG-DELTAS-SEARCH: DELTA: length=16872 (BAD)
+ DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
+ DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
+ DBG-DELTAS-SEARCH: type=snapshot-3
+ DBG-DELTAS-SEARCH: size=39228
+ DBG-DELTAS-SEARCH: base=4799
+ DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
+ DBG-DELTAS-SEARCH: delta-search-time=* (glob)
+ DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
+ DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - refine-down
+ DBG-DELTAS-SEARCH: CONTENDER: rev=4930 - length=19179
+ DBG-DELTAS-SEARCH: CANDIDATE: rev=4799
+ DBG-DELTAS-SEARCH: type=snapshot-2
+ DBG-DELTAS-SEARCH: size=50213
+ DBG-DELTAS-SEARCH: base=4623
+ DBG-DELTAS-SEARCH: uncompressed-delta-size=82661
+ DBG-DELTAS-SEARCH: delta-search-time=* (glob)
+ DBG-DELTAS-SEARCH: DELTA: length=49132 (BAD)
+ DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: delta-base=4930 is-cached=0 - search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
+ $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --source p2
+ DBG-DELTAS-SEARCH: SEARCH rev=4971
+ DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - search-down
+ DBG-DELTAS-SEARCH: CANDIDATE: rev=4962
+ DBG-DELTAS-SEARCH: type=snapshot-4
+ DBG-DELTAS-SEARCH: size=18296
+ DBG-DELTAS-SEARCH: base=4930
+ DBG-DELTAS-SEARCH: uncompressed-delta-size=30377
+ DBG-DELTAS-SEARCH: delta-search-time=* (glob)
+ DBG-DELTAS-SEARCH: DELTA: length=16872 (BAD)
+ DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
+ DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
+ DBG-DELTAS-SEARCH: type=snapshot-3
+ DBG-DELTAS-SEARCH: size=39228
+ DBG-DELTAS-SEARCH: base=4799
+ DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
+ DBG-DELTAS-SEARCH: delta-search-time=* (glob)
+ DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
+ DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - refine-down
+ DBG-DELTAS-SEARCH: CONTENDER: rev=4930 - length=19179
+ DBG-DELTAS-SEARCH: CANDIDATE: rev=4799
+ DBG-DELTAS-SEARCH: type=snapshot-2
+ DBG-DELTAS-SEARCH: size=50213
+ DBG-DELTAS-SEARCH: base=4623
+ DBG-DELTAS-SEARCH: uncompressed-delta-size=82661
+ DBG-DELTAS-SEARCH: delta-search-time=* (glob)
+ DBG-DELTAS-SEARCH: DELTA: length=49132 (BAD)
+ DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: delta-base=4930 is-cached=0 - search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
+ $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --source prev
+ DBG-DELTAS-SEARCH: SEARCH rev=4971
+ DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - search-down
+ DBG-DELTAS-SEARCH: CANDIDATE: rev=4962
+ DBG-DELTAS-SEARCH: type=snapshot-4
+ DBG-DELTAS-SEARCH: size=18296
+ DBG-DELTAS-SEARCH: base=4930
+ DBG-DELTAS-SEARCH: uncompressed-delta-size=30377
+ DBG-DELTAS-SEARCH: delta-search-time=* (glob)
+ DBG-DELTAS-SEARCH: DELTA: length=16872 (BAD)
+ DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
+ DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
+ DBG-DELTAS-SEARCH: type=snapshot-3
+ DBG-DELTAS-SEARCH: size=39228
+ DBG-DELTAS-SEARCH: base=4799
+ DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
+ DBG-DELTAS-SEARCH: delta-search-time=* (glob)
+ DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
+ DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - refine-down
+ DBG-DELTAS-SEARCH: CONTENDER: rev=4930 - length=19179
+ DBG-DELTAS-SEARCH: CANDIDATE: rev=4799
+ DBG-DELTAS-SEARCH: type=snapshot-2
+ DBG-DELTAS-SEARCH: size=50213
+ DBG-DELTAS-SEARCH: base=4623
+ DBG-DELTAS-SEARCH: uncompressed-delta-size=82661
+ DBG-DELTAS-SEARCH: delta-search-time=* (glob)
+ DBG-DELTAS-SEARCH: DELTA: length=49132 (BAD)
+ DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: delta-base=4930 is-cached=0 - search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
$ cd ..
--- a/tests/test-ssh-bundle1.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-ssh-bundle1.t Wed Jan 04 16:02:22 2023 +0100
@@ -71,12 +71,7 @@
updating to branch default
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cd local-stream
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 2 changes to 2 files
+ $ hg verify -q
$ hg branches
default 0:1160648e36ce
$ cd $TESTTMP
@@ -117,12 +112,7 @@
verify
$ cd local
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 2 changes to 2 files
+ $ hg verify -q
$ cat >> .hg/hgrc <<EOF
> [hooks]
> changegroup = sh -c "printenv.py --line changegroup-in-local 0 ../dummylog"
@@ -214,12 +204,7 @@
date: Thu Jan 01 00:00:00 1970 +0000
summary: add
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 4 changesets with 3 changes to 2 files
+ $ hg verify -q
$ hg cat -r tip foo
bleah
$ echo z > z
@@ -293,9 +278,7 @@
remote: adding manifests
remote: adding file changes
remote: added 1 changesets with 1 changes to 1 files (py3 !)
- remote: added 1 changesets with 1 changes to 1 files (no-py3 no-chg !)
remote: KABOOM
- remote: added 1 changesets with 1 changes to 1 files (no-py3 chg !)
$ hg -R ../remote heads
changeset: 5:1383141674ec
tag: tip
@@ -463,9 +446,7 @@
remote: adding manifests
remote: adding file changes
remote: added 1 changesets with 1 changes to 1 files (py3 !)
- remote: added 1 changesets with 1 changes to 1 files (no-py3 no-chg !)
remote: KABOOM
- remote: added 1 changesets with 1 changes to 1 files (no-py3 chg !)
local stdout
debug output
--- a/tests/test-ssh-clone-r.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-ssh-clone-r.t Wed Jan 04 16:02:22 2023 +0100
@@ -20,7 +20,7 @@
$ for i in 0 1 2 3 4 5 6 7 8; do
> hg clone --stream -r "$i" ssh://user@dummy/remote test-"$i"
> if cd test-"$i"; then
- > hg verify
+ > hg verify -q
> cd ..
> fi
> done
@@ -31,11 +31,6 @@
new changesets bfaf4b5cbf01
updating to branch default
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 1 changes to 1 files
adding changesets
adding manifests
adding file changes
@@ -43,11 +38,6 @@
new changesets bfaf4b5cbf01:21f32785131f
updating to branch default
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 2 changesets with 2 changes to 1 files
adding changesets
adding manifests
adding file changes
@@ -55,11 +45,6 @@
new changesets bfaf4b5cbf01:4ce51a113780
updating to branch default
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 3 changes to 1 files
adding changesets
adding manifests
adding file changes
@@ -67,11 +52,6 @@
new changesets bfaf4b5cbf01:93ee6ab32777
updating to branch default
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 4 changesets with 4 changes to 1 files
adding changesets
adding manifests
adding file changes
@@ -79,11 +59,6 @@
new changesets bfaf4b5cbf01:c70afb1ee985
updating to branch default
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 2 changesets with 2 changes to 1 files
adding changesets
adding manifests
adding file changes
@@ -91,11 +66,6 @@
new changesets bfaf4b5cbf01:f03ae5a9b979
updating to branch default
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 3 changes to 1 files
adding changesets
adding manifests
adding file changes
@@ -103,11 +73,6 @@
new changesets bfaf4b5cbf01:095cb14b1b4d
updating to branch default
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 4 changesets with 5 changes to 2 files
adding changesets
adding manifests
adding file changes
@@ -115,11 +80,6 @@
new changesets bfaf4b5cbf01:faa2e4234c7a
updating to branch default
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5 changesets with 6 changes to 3 files
adding changesets
adding manifests
adding file changes
@@ -127,11 +87,6 @@
new changesets bfaf4b5cbf01:916f1afdef90
updating to branch default
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5 changesets with 5 changes to 2 files
$ cd test-8
$ hg pull ../test-7
pulling from ../test-7
@@ -142,12 +97,7 @@
added 4 changesets with 2 changes to 3 files (+1 heads)
new changesets c70afb1ee985:faa2e4234c7a
(run 'hg heads' to see heads, 'hg merge' to merge)
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 9 changesets with 7 changes to 4 files
+ $ hg verify -q
$ cd ..
$ cd test-1
$ hg pull -r 4 ssh://user@dummy/remote
@@ -159,12 +109,7 @@
added 1 changesets with 0 changes to 0 files (+1 heads)
new changesets c70afb1ee985
(run 'hg heads' to see heads, 'hg merge' to merge)
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 2 changes to 1 files
+ $ hg verify -q
$ hg pull ssh://user@dummy/remote
pulling from ssh://user@dummy/remote
searching for changes
@@ -185,12 +130,7 @@
added 2 changesets with 0 changes to 0 files (+1 heads)
new changesets c70afb1ee985:f03ae5a9b979
(run 'hg heads' to see heads, 'hg merge' to merge)
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5 changesets with 3 changes to 1 files
+ $ hg verify -q
$ hg pull ssh://user@dummy/remote
pulling from ssh://user@dummy/remote
searching for changes
@@ -200,11 +140,6 @@
added 4 changesets with 4 changes to 4 files
new changesets 93ee6ab32777:916f1afdef90
(run 'hg update' to get a working copy)
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 9 changesets with 7 changes to 4 files
+ $ hg verify -q
$ cd ..
--- a/tests/test-ssh.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-ssh.t Wed Jan 04 16:02:22 2023 +0100
@@ -61,12 +61,7 @@
updating to branch default
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cd local-stream
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 2 changes to 2 files
+ $ hg verify -q
$ hg branches
default 0:1160648e36ce
$ cd $TESTTMP
@@ -103,12 +98,7 @@
verify
$ cd local
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 2 changes to 2 files
+ $ hg verify -q
$ cat >> .hg/hgrc <<EOF
> [hooks]
> changegroup = sh -c "printenv.py changegroup-in-local 0 ../dummylog"
@@ -200,12 +190,7 @@
date: Thu Jan 01 00:00:00 1970 +0000
summary: add
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 4 changesets with 3 changes to 2 files
+ $ hg verify -q
$ hg cat -r tip foo
bleah
$ echo z > z
@@ -290,10 +275,8 @@
remote: adding manifests
remote: adding file changes
remote: added 1 changesets with 1 changes to 1 files (py3 !)
- remote: added 1 changesets with 1 changes to 1 files (no-py3 no-chg !)
remote: KABOOM
remote: KABOOM IN PROCESS
- remote: added 1 changesets with 1 changes to 1 files (no-py3 chg !)
$ hg -R ../remote heads
changeset: 5:1383141674ec
tag: tip
@@ -515,10 +498,8 @@
remote: adding manifests
remote: adding file changes
remote: added 1 changesets with 1 changes to 1 files (py3 !)
- remote: added 1 changesets with 1 changes to 1 files (no-py3 no-chg !)
remote: KABOOM
remote: KABOOM IN PROCESS
- remote: added 1 changesets with 1 changes to 1 files (no-py3 chg !)
local stdout
debug output
--- a/tests/test-static-http.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-static-http.t Wed Jan 04 16:02:22 2023 +0100
@@ -38,12 +38,7 @@
updating to branch default
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cd local
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 2 changes to 2 files
+ $ hg verify -q
$ cat bar
foo
$ cd ../remote
@@ -134,13 +129,7 @@
new changesets be090ea66256:322ea90975df
3 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cd local2
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 3 changes to 3 files
- checking subrepo links
+ $ hg verify -q
$ cat a
a
$ hg paths
@@ -155,12 +144,7 @@
updating to branch default
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cd local3
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 0 changesets with 0 changes to 0 files
+ $ hg verify -q
$ hg paths
default = static-http://localhost:$HGPORT/remotempty
--- a/tests/test-strip-cross.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-strip-cross.t Wed Jan 04 16:02:22 2023 +0100
@@ -80,35 +80,20 @@
> echo "% Trying to strip revision $i"
> hg --cwd $i strip $i
> echo "% Verifying"
- > hg --cwd $i verify
+ > hg --cwd $i verify -q
> echo
> done
% Trying to strip revision 0
saved backup bundle to $TESTTMP/files/0/.hg/strip-backup/cbb8c2f0a2e3-239800b9-backup.hg
% Verifying
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 2 changesets with 12 changes to 6 files
% Trying to strip revision 1
saved backup bundle to $TESTTMP/files/1/.hg/strip-backup/124ecc0cbec9-6104543f-backup.hg
% Verifying
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 2 changesets with 12 changes to 6 files
% Trying to strip revision 2
saved backup bundle to $TESTTMP/files/2/.hg/strip-backup/f6439b304a1a-c6505a5f-backup.hg
% Verifying
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 2 changesets with 12 changes to 6 files
$ cd ..
@@ -139,26 +124,16 @@
> echo "% Trying to strip revision $i"
> hg --cwd $i strip $i
> echo "% Verifying"
- > hg --cwd $i verify
+ > hg --cwd $i verify -q
> echo
> done
% Trying to strip revision 2
saved backup bundle to $TESTTMP/manifests/2/.hg/strip-backup/f3015ad03c03-4d98bdc2-backup.hg
% Verifying
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 3 changes to 2 files
% Trying to strip revision 3
saved backup bundle to $TESTTMP/manifests/3/.hg/strip-backup/9632aa303aa4-69192e3f-backup.hg
% Verifying
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 3 changes to 2 files
$ cd ..
@@ -194,27 +169,16 @@
> echo "% Trying to strip revision $i"
> hg --cwd $i strip $i
> echo "% Verifying"
- > hg --cwd $i verify
+ > hg --cwd $i verify -q
> echo
> done
% Trying to strip revision 2
saved backup bundle to $TESTTMP/treemanifests/2/.hg/strip-backup/145f5c75f9ac-a105cfbe-backup.hg
% Verifying
- checking changesets
- checking manifests
- checking directory manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 4 changes to 3 files
% Trying to strip revision 3
saved backup bundle to $TESTTMP/treemanifests/3/.hg/strip-backup/e4e3de5c3cb2-f4c70376-backup.hg
% Verifying
- checking changesets
- checking manifests
- checking directory manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 4 changes to 3 files
+
$ cd ..
--- a/tests/test-subrepo-missing.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-subrepo-missing.t Wed Jan 04 16:02:22 2023 +0100
@@ -111,13 +111,7 @@
$ hg ci -m "amended subrepo (again)"
$ hg --config extensions.strip= --hidden strip -R subrepo -qr 'tip' --config devel.strip-obsmarkers=no
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5 changesets with 5 changes to 2 files
- checking subrepo links
+ $ hg verify -q
subrepo 'subrepo' is hidden in revision a66de08943b6
subrepo 'subrepo' is hidden in revision 674d05939c1e
subrepo 'subrepo' not found in revision a7d05d9055a4
@@ -125,13 +119,7 @@
verifying shouldn't init a new subrepo if the reference doesn't exist
$ mv subrepo b
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 5 changesets with 5 changes to 2 files
- checking subrepo links
+ $ hg verify -q
0: repository $TESTTMP/repo/subrepo not found
1: repository $TESTTMP/repo/subrepo not found
3: repository $TESTTMP/repo/subrepo not found
--- a/tests/test-treemanifest.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-treemanifest.t Wed Jan 04 16:02:22 2023 +0100
@@ -399,13 +399,7 @@
added 11 changesets with 15 changes to 10 files (+3 heads)
$ hg debugrequires -R clone | grep treemanifest
treemanifest
- $ hg -R clone verify
- checking changesets
- checking manifests
- checking directory manifests
- crosschecking files in changesets and manifests
- checking files
- checked 11 changesets with 15 changes to 10 files
+ $ hg -R clone verify -q
Create deeper repo with tree manifests.
@@ -567,13 +561,7 @@
$ hg ci -m troz
Verify works
- $ hg verify
- checking changesets
- checking manifests
- checking directory manifests
- crosschecking files in changesets and manifests
- checking files
- checked 4 changesets with 18 changes to 8 files
+ $ hg verify -q
#if repofncache
Dirlogs are included in fncache
@@ -631,6 +619,7 @@
b/bar/orange/fly/housefly.txt@0: in changeset but not in manifest
b/foo/apple/bees/flower.py@0: in changeset but not in manifest
checking files
+ not checking dirstate because of previous errors
checked 4 changesets with 18 changes to 8 files
6 warnings encountered! (reporevlogstore !)
9 integrity errors encountered!
@@ -656,6 +645,7 @@
(expected None)
crosschecking files in changesets and manifests
checking files
+ not checking dirstate because of previous errors
checked 4 changesets with 18 changes to 8 files
2 warnings encountered!
8 integrity errors encountered!
@@ -707,13 +697,7 @@
deepclone/.hg/store/meta/~2e_a/00manifest.i (reporevlogstore !)
Verify passes.
$ cd deepclone
- $ hg verify
- checking changesets
- checking manifests
- checking directory manifests
- crosschecking files in changesets and manifests
- checking files
- checked 4 changesets with 18 changes to 8 files
+ $ hg verify -q
$ cd ..
#if reporevlogstore
@@ -755,33 +739,15 @@
Local clone with basicstore
$ hg clone -U deeprepo-basicstore local-clone-basicstore
- $ hg -R local-clone-basicstore verify
- checking changesets
- checking manifests
- checking directory manifests
- crosschecking files in changesets and manifests
- checking files
- checked 4 changesets with 18 changes to 8 files
+ $ hg -R local-clone-basicstore verify -q
Local clone with encodedstore
$ hg clone -U deeprepo-encodedstore local-clone-encodedstore
- $ hg -R local-clone-encodedstore verify
- checking changesets
- checking manifests
- checking directory manifests
- crosschecking files in changesets and manifests
- checking files
- checked 4 changesets with 18 changes to 8 files
+ $ hg -R local-clone-encodedstore verify -q
Local clone with fncachestore
$ hg clone -U deeprepo local-clone-fncachestore
- $ hg -R local-clone-fncachestore verify
- checking changesets
- checking manifests
- checking directory manifests
- crosschecking files in changesets and manifests
- checking files
- checked 4 changesets with 18 changes to 8 files
+ $ hg -R local-clone-fncachestore verify -q
Stream clone with basicstore
$ hg clone --config experimental.changegroup3=True --stream -U \
@@ -789,13 +755,7 @@
streaming all changes
28 files to transfer, * of data (glob)
transferred * in * seconds (*) (glob)
- $ hg -R stream-clone-basicstore verify
- checking changesets
- checking manifests
- checking directory manifests
- crosschecking files in changesets and manifests
- checking files
- checked 4 changesets with 18 changes to 8 files
+ $ hg -R stream-clone-basicstore verify -q
Stream clone with encodedstore
$ hg clone --config experimental.changegroup3=True --stream -U \
@@ -803,13 +763,7 @@
streaming all changes
28 files to transfer, * of data (glob)
transferred * in * seconds (*) (glob)
- $ hg -R stream-clone-encodedstore verify
- checking changesets
- checking manifests
- checking directory manifests
- crosschecking files in changesets and manifests
- checking files
- checked 4 changesets with 18 changes to 8 files
+ $ hg -R stream-clone-encodedstore verify -q
Stream clone with fncachestore
$ hg clone --config experimental.changegroup3=True --stream -U \
@@ -817,13 +771,7 @@
streaming all changes
22 files to transfer, * of data (glob)
transferred * in * seconds (*) (glob)
- $ hg -R stream-clone-fncachestore verify
- checking changesets
- checking manifests
- checking directory manifests
- crosschecking files in changesets and manifests
- checking files
- checked 4 changesets with 18 changes to 8 files
+ $ hg -R stream-clone-fncachestore verify -q
Packed bundle
$ hg -R deeprepo debugcreatestreamclonebundle repo-packed.hg
--- a/tests/test-unamend.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-unamend.t Wed Jan 04 16:02:22 2023 +0100
@@ -363,13 +363,7 @@
$ hg mv c wat
$ hg unamend
- $ hg verify -v
- repository uses revlog format 1
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 28 changesets with 16 changes to 11 files
+ $ hg verify -q
Retained copies in new prdecessor commit
--- a/tests/test-unionrepo.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-unionrepo.t Wed Jan 04 16:02:22 2023 +0100
@@ -133,12 +133,7 @@
$ hg -R repo3 paths
default = union:repo1+repo2
- $ hg -R repo3 verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 6 changesets with 11 changes to 6 files
+ $ hg -R repo3 verify -q
$ hg -R repo3 heads --template '{rev}:{node|short} {desc|firstline}\n'
5:2f0d178c469c repo2-3
--- a/tests/test-upgrade-repo.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-upgrade-repo.t Wed Jan 04 16:02:22 2023 +0100
@@ -853,12 +853,7 @@
verify should be happy
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 3 changes to 3 files
+ $ hg verify -q
old store should be backed up
@@ -995,12 +990,7 @@
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 3 changes to 3 files
+ $ hg verify -q
Check we can select negatively
@@ -1047,12 +1037,7 @@
store replacement complete; repository was inconsistent for *s (glob)
finalizing requirements file and making repository readable again
removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob)
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 3 changes to 3 files
+ $ hg verify -q
Check that we can select changelog only
@@ -1098,12 +1083,7 @@
store replacement complete; repository was inconsistent for *s (glob)
finalizing requirements file and making repository readable again
removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob)
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 3 changes to 3 files
+ $ hg verify -q
Check that we can select filelog only
@@ -1149,12 +1129,7 @@
store replacement complete; repository was inconsistent for *s (glob)
finalizing requirements file and making repository readable again
removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob)
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 3 changes to 3 files
+ $ hg verify -q
Check you can't skip revlog clone during important format downgrade
@@ -1224,12 +1199,7 @@
store replacement complete; repository was inconsistent for *s (glob)
finalizing requirements file and making repository readable again
removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob)
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 3 changes to 3 files
+ $ hg verify -q
Check you can't skip revlog clone during important format upgrade
@@ -1285,12 +1255,7 @@
store replacement complete; repository was inconsistent for *s (glob)
finalizing requirements file and making repository readable again
removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob)
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 3 changesets with 3 changes to 3 files
+ $ hg verify -q
$ cd ..
@@ -1413,12 +1378,7 @@
lfs
$ find .hg/store/lfs -type f
.hg/store/lfs/objects/d0/beab232adff5ba365880366ad30b1edb85c4c5372442b5d2fe27adc96d653f
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 2 changesets with 2 changes to 2 files
+ $ hg verify -q
$ hg debugdata lfs.bin 0
version https://git-lfs.github.com/spec/v1
oid sha256:d0beab232adff5ba365880366ad30b1edb85c4c5372442b5d2fe27adc96d653f
--- a/tests/test-util.py Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-util.py Wed Jan 04 16:02:22 2023 +0100
@@ -50,7 +50,7 @@
# attr.s default factory for util.timedstats.start binds the timer we
# need to mock out.
-_start_default = (util.timedcmstats.start.default, 'factory')
+_start_default = (util.timedcmstats.__attrs_attrs__.start.default, 'factory')
@contextlib.contextmanager
--- a/tests/test-verify.t Wed Jan 04 12:06:07 2023 +0100
+++ b/tests/test-verify.t Wed Jan 04 16:02:22 2023 +0100
@@ -20,6 +20,7 @@
checking manifests
crosschecking files in changesets and manifests
checking files
+ checking dirstate
checked 1 changesets with 3 changes to 3 files
verify with journal
@@ -31,6 +32,7 @@
checking manifests
crosschecking files in changesets and manifests
checking files
+ checking dirstate
checked 1 changesets with 3 changes to 3 files
$ rm .hg/store/journal
@@ -55,6 +57,7 @@
warning: revlog 'data/bar.txt.i' not in fncache!
0: empty or missing bar.txt
bar.txt@0: manifest refers to unknown revision 256559129457
+ not checking dirstate because of previous errors
checked 1 changesets with 0 changes to 3 files
3 warnings encountered!
hint: run "hg debugrebuildfncache" to recover from corrupt fncache
@@ -83,6 +86,7 @@
0: empty or missing changelog
manifest@0: d0b6632564d4 not in changesets
manifest@1: 941fc4534185 not in changesets
+ not checking dirstate because of previous errors
3 integrity errors encountered!
(first damaged changeset appears to be 0)
[1]
@@ -93,6 +97,7 @@
$ rm .hg/store/00manifest.*
$ hg verify -q
0: empty or missing manifest
+ not checking dirstate because of previous errors
1 integrity errors encountered!
(first damaged changeset appears to be 0)
[1]
@@ -106,6 +111,7 @@
0: empty or missing file
file@0: manifest refers to unknown revision 362fef284ce2
file@1: manifest refers to unknown revision c10f2164107d
+ not checking dirstate because of previous errors
1 warnings encountered!
hint: run "hg debugrebuildfncache" to recover from corrupt fncache
3 integrity errors encountered!
@@ -119,7 +125,13 @@
$ rm .hg/store/00manifest.*
$ hg verify -q
warning: orphan data file 'data/file.i'
+ warning: ignoring unknown working parent c5ddb05ab828!
+ file marked as tracked in p1 (000000000000) but not in manifest1
1 warnings encountered!
+ 1 integrity errors encountered!
+ dirstate inconsistent with current parent's manifest
+ 1 dirstate errors
+ [1]
$ cp -R .hg/store-full/. .hg/store
Entire changelog and filelog missing
@@ -134,6 +146,7 @@
?: empty or missing file
file@0: manifest refers to unknown revision 362fef284ce2
file@1: manifest refers to unknown revision c10f2164107d
+ not checking dirstate because of previous errors
1 warnings encountered!
hint: run "hg debugrebuildfncache" to recover from corrupt fncache
6 integrity errors encountered!
@@ -149,6 +162,7 @@
0: empty or missing manifest
warning: revlog 'data/file.i' not in fncache!
0: empty or missing file
+ not checking dirstate because of previous errors
1 warnings encountered!
hint: run "hg debugrebuildfncache" to recover from corrupt fncache
2 integrity errors encountered!
@@ -164,6 +178,7 @@
manifest@?: 941fc4534185 not in changesets
file@?: rev 1 points to nonexistent changeset 1
(expected 0)
+ not checking dirstate because of previous errors
1 warnings encountered!
3 integrity errors encountered!
[1]
@@ -175,6 +190,7 @@
$ hg verify -q
manifest@1: changeset refers to unknown revision 941fc4534185
file@1: c10f2164107d not in manifests
+ not checking dirstate because of previous errors
2 integrity errors encountered!
(first damaged changeset appears to be 1)
[1]
@@ -185,6 +201,7 @@
$ cp -f .hg/store-partial/data/file.* .hg/store/data
$ hg verify -q
file@1: manifest refers to unknown revision c10f2164107d
+ not checking dirstate because of previous errors
1 integrity errors encountered!
(first damaged changeset appears to be 1)
[1]
@@ -198,6 +215,7 @@
file@?: rev 1 points to nonexistent changeset 1
(expected 0)
file@?: c10f2164107d not in manifests
+ not checking dirstate because of previous errors
1 warnings encountered!
2 integrity errors encountered!
[1]
@@ -211,6 +229,7 @@
manifest@?: rev 1 points to nonexistent changeset 1
manifest@?: 941fc4534185 not in changesets
file@?: manifest refers to unknown revision c10f2164107d
+ not checking dirstate because of previous errors
3 integrity errors encountered!
[1]
$ cp -R .hg/store-full/. .hg/store
@@ -221,6 +240,7 @@
$ cp -f .hg/store-partial/data/file.* .hg/store/data
$ hg verify -q
manifest@1: changeset refers to unknown revision 941fc4534185
+ not checking dirstate because of previous errors
1 integrity errors encountered!
(first damaged changeset appears to be 1)
[1]
@@ -236,6 +256,7 @@
manifest@?: d0b6632564d4 not in changesets
file@?: rev 0 points to unexpected changeset 0
(expected 1)
+ not checking dirstate because of previous errors
1 warnings encountered!
4 integrity errors encountered!
(first damaged changeset appears to be 0)
@@ -249,6 +270,7 @@
$ hg verify -q
manifest@0: reading delta d0b6632564d4: * (glob)
file@0: 362fef284ce2 not in manifests
+ not checking dirstate because of previous errors
2 integrity errors encountered!
(first damaged changeset appears to be 0)
[1]
@@ -260,6 +282,7 @@
> 2> /dev/null
$ hg verify -q
file@0: unpacking 362fef284ce2: * (glob)
+ not checking dirstate because of previous errors
1 integrity errors encountered!
(first damaged changeset appears to be 0)
[1]
@@ -275,12 +298,7 @@
marked working directory as branch foo
(branches are permanent and global, did you want a bookmark?)
$ hg ci -m branchfoo
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 0 changes to 0 files
+ $ hg verify -q
test revlog corruption
@@ -292,14 +310,10 @@
$ dd if=.hg/store/data/a.i of=start bs=1 count=20 2>/dev/null
$ cat start b > .hg/store/data/a.i
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- a@1: broken revlog! (index data/a is corrupted)
+ $ hg verify -q
+ a@1: broken revlog! (index a is corrupted)
warning: orphan data file 'data/a.i'
- checked 2 changesets with 0 changes to 1 files
+ not checking dirstate because of previous errors
1 warnings encountered!
1 integrity errors encountered!
(first damaged changeset appears to be 1)
@@ -317,6 +331,7 @@
checking manifests
crosschecking files in changesets and manifests
checking files
+ checking dirstate
checked 1 changesets with 1 changes to 1 files
$ cd ..
@@ -330,12 +345,7 @@
> EOF
$ echo '[BASE64]content' > base64
$ hg commit -Aqm 'flag processor content' base64
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 1 changes to 1 files
+ $ hg verify -q
$ cat >> $TESTTMP/break-base64.py <<EOF
> import base64
@@ -345,20 +355,11 @@
> breakbase64=$TESTTMP/break-base64.py
> EOF
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- base64@0: unpacking 794cee7777cb: integrity check failed on data/base64:0
- checked 1 changesets with 1 changes to 1 files
+ $ hg verify -q
+ base64@0: unpacking 794cee7777cb: integrity check failed on base64:0
+ not checking dirstate because of previous errors
1 integrity errors encountered!
(first damaged changeset appears to be 0)
[1]
- $ hg verify --config verify.skipflags=2147483647
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- checked 1 changesets with 1 changes to 1 files
+ $ hg verify --config verify.skipflags=2147483647 -q