# HG changeset patch # User Pierre-Yves David # Date 1696982566 -7200 # Node ID 12c308c55e538415aaec1fa046c66deb9738ddb4 # Parent 752c5a5b73c687588ad0f52a7d9d3e0242709a06# Parent 704c3d0878d93fc4e8fe0b72b2b1c9f0f3fe91a9 branching: merge stable into default diff -r 704c3d0878d9 -r 12c308c55e53 contrib/benchmarks/__init__.py --- a/contrib/benchmarks/__init__.py Tue Oct 10 18:29:04 2023 +0200 +++ b/contrib/benchmarks/__init__.py Wed Oct 11 02:02:46 2023 +0200 @@ -40,7 +40,6 @@ extensions, hg, ui as uimod, - util, ) basedir = os.path.abspath( @@ -66,7 +65,7 @@ os.environ["HGRCPATH"] = os.environ.get("ASVHGRCPATH", "") # for "historical portability" # ui.load() has been available since d83ca85 - if util.safehasattr(uimod.ui, "load"): + if hasattr(uimod.ui, "load"): ui = uimod.ui.load() else: ui = uimod.ui() diff -r 704c3d0878d9 -r 12c308c55e53 contrib/byteify-strings.py --- a/contrib/byteify-strings.py Tue Oct 10 18:29:04 2023 +0200 +++ b/contrib/byteify-strings.py Wed Oct 11 02:02:46 2023 +0200 @@ -212,18 +212,14 @@ fn = t.string # *attr() builtins don't accept byte strings to 2nd argument. - if ( - fn - in ( - 'getattr', - 'setattr', - 'hasattr', - 'safehasattr', - 'wrapfunction', - 'wrapclass', - 'addattr', - ) - and (opts['allow-attr-methods'] or not _isop(i - 1, '.')) + if fn in ( + 'getattr', + 'setattr', + 'hasattr', + 'safehasattr', + 'wrapfunction', + 'wrapclass', + 'addattr', ): arg1idx = _findargnofcall(1) if arg1idx is not None: @@ -312,12 +308,6 @@ help='rewrite iteritems() and itervalues()', ), ap.add_argument( - '--allow-attr-methods', - action='store_true', - default=False, - help='also handle attr*() when they are methods', - ), - ap.add_argument( '--treat-as-kwargs', nargs="+", default=[], @@ -328,7 +318,6 @@ opts = { 'dictiter': args.dictiter, 'treat-as-kwargs': set(args.treat_as_kwargs), - 'allow-attr-methods': args.allow_attr_methods, } for fname in args.files: fname = os.path.realpath(fname) diff -r 704c3d0878d9 -r 12c308c55e53 contrib/check-code.py --- a/contrib/check-code.py Tue Oct 10 18:29:04 2023 +0200 +++ b/contrib/check-code.py Wed Oct 11 02:02:46 2023 +0200 @@ -383,12 +383,6 @@ "use True/False for constant Boolean expression", ), (r'^\s*if False(:| +and)', 'Remove code instead of using `if False`'), - ( - r'(?:(? None fctxs should be linear, and sorted by topo order - oldest first. @@ -308,7 +308,7 @@ self.fctxs = fctxs self.path = path self.ui = ui or nullui() - self.opts = opts or {} + self.opts = opts # following fields are built from fctxs. they exist for perf reason self.contents = [f.data() for f in fctxs] @@ -375,7 +375,7 @@ % (short(self.fctxs[idx].node()), a1, a2, len(blines)) ) self.linelog.replacelines(rev, a1, a2, b1, b2) - if self.opts.get(b'edit_lines', False): + if self.opts.get('edit_lines', False): self.finalcontents = self._checkoutlinelogwithedits() else: self.finalcontents = self._checkoutlinelog() @@ -668,7 +668,7 @@ 4. call commit, to commit changes to hg database """ - def __init__(self, stack, ui=None, opts=None): + def __init__(self, stack, ui=None, **opts): """([ctx], ui or None) -> None stack: should be linear, and sorted by topo order - oldest first. @@ -676,7 +676,7 @@ """ assert stack self.ui = ui or nullui() - self.opts = opts or {} + self.opts = opts self.stack = stack self.repo = stack[-1].repo().unfiltered() @@ -696,7 +696,7 @@ self.paths = [] # but if --edit-lines is used, the user may want to edit files # even if they are not modified - editopt = self.opts.get(b'edit_lines') + editopt = self.opts.get('edit_lines') if not self.status.modified and editopt and match: interestingpaths = match.files() else: @@ -720,7 +720,7 @@ continue seenfctxs.update(fctxs[1:]) self.fctxmap[path] = ctx2fctx - fstate = filefixupstate(fctxs, path, ui=self.ui, opts=self.opts) + fstate = filefixupstate(fctxs, path, ui=self.ui, **self.opts) if fm is not None: fm.startitem() fm.plain(b'showing changes for ') @@ -873,7 +873,7 @@ # be slow. in absorb's case, no need to invalidate fsmonitorstate. noop = lambda: 0 restore = noop - if util.safehasattr(dirstate, '_fsmonitorstate'): + if hasattr(dirstate, '_fsmonitorstate'): bak = dirstate._fsmonitorstate.invalidate def restore(): @@ -1009,7 +1009,7 @@ return overlaycontext(memworkingcopy, ctx) -def absorb(ui, repo, stack=None, targetctx=None, pats=None, opts=None): +def absorb(ui, repo, stack=None, targetctx=None, pats=None, **opts): """pick fixup chunks from targetctx, apply them to stack. if targetctx is None, the working copy context will be used. @@ -1036,22 +1036,21 @@ targetctx = repo[None] if pats is None: pats = () - if opts is None: - opts = {} - state = fixupstate(stack, ui=ui, opts=opts) - matcher = scmutil.match(targetctx, pats, opts) - if opts.get(b'interactive'): + + state = fixupstate(stack, ui=ui, **opts) + matcher = scmutil.match(targetctx, pats, pycompat.byteskwargs(opts)) + if opts.get('interactive'): diff = patch.diff(repo, stack[-1].node(), targetctx.node(), matcher) origchunks = patch.parsepatch(diff) chunks = cmdutil.recordfilter(ui, origchunks, matcher)[0] targetctx = overlaydiffcontext(stack[-1], chunks) - if opts.get(b'edit_lines'): + if opts.get('edit_lines'): # If we're going to open the editor, don't ask the user to confirm # first - opts[b'apply_changes'] = True + opts['apply_changes'] = True fm = None - if opts.get(b'print_changes') or not opts.get(b'apply_changes'): - fm = ui.formatter(b'absorb', opts) + if opts.get('print_changes') or not opts.get('apply_changes'): + fm = ui.formatter(b'absorb', pycompat.byteskwargs(opts)) state.diffwith(targetctx, matcher, fm) if fm is not None: fm.startitem() @@ -1074,9 +1073,9 @@ label=b'absorb.description', ) fm.end() - if not opts.get(b'dry_run'): + if not opts.get('dry_run'): if ( - not opts.get(b'apply_changes') + not opts.get('apply_changes') and state.ctxaffected and ui.promptchoice( b"apply changes (y/N)? $$ &Yes $$ &No", default=1 @@ -1154,12 +1153,10 @@ Returns 0 on success, 1 if all chunks were ignored and nothing amended. """ - opts = pycompat.byteskwargs(opts) - with repo.wlock(), repo.lock(): - if not opts[b'dry_run']: + if not opts['dry_run']: cmdutil.checkunfinished(repo) - state = absorb(ui, repo, pats=pats, opts=opts) + state = absorb(ui, repo, pats=pats, **opts) if sum(s[0] for s in state.chunkstats.values()) == 0: return 1 diff -r 704c3d0878d9 -r 12c308c55e53 hgext/automv.py --- a/hgext/automv.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/automv.py Wed Oct 11 02:02:46 2023 +0200 @@ -56,9 +56,8 @@ def mvcheck(orig, ui, repo, *pats, **opts): """Hook to check for moves at commit time""" - opts = pycompat.byteskwargs(opts) renames = None - disabled = opts.pop(b'no_automv', False) + disabled = opts.pop('no_automv', False) with repo.wlock(): if not disabled: threshold = ui.configint(b'automv', b'similarity') @@ -67,7 +66,9 @@ _(b'automv.similarity must be between 0 and 100') ) if threshold > 0: - match = scmutil.match(repo[None], pats, opts) + match = scmutil.match( + repo[None], pats, pycompat.byteskwargs(opts) + ) added, removed = _interestingfiles(repo, match) uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) renames = _findrenames( @@ -82,7 +83,7 @@ # current extension structure, and this is not worse than what # happened before. scmutil._markchanges(repo, (), (), renames) - return orig(ui, repo, *pats, **pycompat.strkwargs(opts)) + return orig(ui, repo, *pats, **opts) def _interestingfiles(repo, matcher): diff -r 704c3d0878d9 -r 12c308c55e53 hgext/beautifygraph.py --- a/hgext/beautifygraph.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/beautifygraph.py Wed Oct 11 02:02:46 2023 +0200 @@ -103,5 +103,5 @@ ) return - extensions.wrapfunction(graphmod, b'outputgraph', outputprettygraph) - extensions.wrapfunction(templatekw, b'getgraphnode', getprettygraphnode) + extensions.wrapfunction(graphmod, 'outputgraph', outputprettygraph) + extensions.wrapfunction(templatekw, 'getgraphnode', getprettygraphnode) diff -r 704c3d0878d9 -r 12c308c55e53 hgext/blackbox.py --- a/hgext/blackbox.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/blackbox.py Wed Oct 11 02:02:46 2023 +0200 @@ -67,48 +67,6 @@ cmdtable = {} command = registrar.command(cmdtable) -configtable = {} -configitem = registrar.configitem(configtable) - -configitem( - b'blackbox', - b'dirty', - default=False, -) -configitem( - b'blackbox', - b'maxsize', - default=b'1 MB', -) -configitem( - b'blackbox', - b'logsource', - default=False, -) -configitem( - b'blackbox', - b'maxfiles', - default=7, -) -configitem( - b'blackbox', - b'track', - default=lambda: [b'*'], -) -# Debug config option that also display the blackbox output on stderr -# (in addition to writing it to disk) -configitem( - b'blackbox', - b'debug.to-stderr', - default=False, -) -configitem( - b'blackbox', - b'ignore', - default=lambda: [b'chgserver', b'cmdserver', b'extension'], -) -configitem(b'blackbox', b'date-format', default=b'') - _lastlogger = loggingutil.proxylogger() diff -r 704c3d0878d9 -r 12c308c55e53 hgext/bookflow.py --- a/hgext/bookflow.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/bookflow.py Wed Oct 11 02:02:46 2023 +0200 @@ -117,8 +117,8 @@ def uisetup(ui): - extensions.wrapfunction(bookmarks, b'update', bookmarks_update) - extensions.wrapfunction(bookmarks, b'addbookmarks', bookmarks_addbookmarks) + extensions.wrapfunction(bookmarks, 'update', bookmarks_update) + extensions.wrapfunction(bookmarks, 'addbookmarks', bookmarks_addbookmarks) extensions.wrapcommand(commands.table, b'commit', commands_commit) extensions.wrapcommand(commands.table, b'pull', commands_pull) if not ui.configbool(MY_NAME, b'enable-branches'): diff -r 704c3d0878d9 -r 12c308c55e53 hgext/bugzilla.py --- a/hgext/bugzilla.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/bugzilla.py Wed Oct 11 02:02:46 2023 +0200 @@ -766,13 +766,13 @@ # inheritance with a new-style class. class cookietransport(cookietransportrequest, xmlrpclib.Transport): def __init__(self, use_datetime=0): - if util.safehasattr(xmlrpclib.Transport, "__init__"): + if hasattr(xmlrpclib.Transport, "__init__"): xmlrpclib.Transport.__init__(self, use_datetime) class cookiesafetransport(cookietransportrequest, xmlrpclib.SafeTransport): def __init__(self, use_datetime=0): - if util.safehasattr(xmlrpclib.Transport, "__init__"): + if hasattr(xmlrpclib.Transport, "__init__"): xmlrpclib.SafeTransport.__init__(self, use_datetime) diff -r 704c3d0878d9 -r 12c308c55e53 hgext/children.py --- a/hgext/children.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/children.py Wed Oct 11 02:02:46 2023 +0200 @@ -67,8 +67,7 @@ See :hg:`help log` and :hg:`help revsets.children`. """ - opts = pycompat.byteskwargs(opts) - rev = opts.get(b'rev') + rev = opts.get('rev') ctx = logcmdutil.revsingle(repo, rev) if file_: fctx = repo.filectx(file_, changeid=ctx.rev()) @@ -76,7 +75,9 @@ else: childctxs = ctx.children() - displayer = logcmdutil.changesetdisplayer(ui, repo, opts) + displayer = logcmdutil.changesetdisplayer( + ui, repo, pycompat.byteskwargs(opts) + ) for cctx in childctxs: displayer.show(cctx) displayer.close() diff -r 704c3d0878d9 -r 12c308c55e53 hgext/churn.py --- a/hgext/churn.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/churn.py Wed Oct 11 02:02:46 2023 +0200 @@ -52,18 +52,17 @@ def countrate(ui, repo, amap, *pats, **opts): """Calculate stats""" - opts = pycompat.byteskwargs(opts) - if opts.get(b'dateformat'): + if opts.get('dateformat'): def getkey(ctx): t, tz = ctx.date() date = datetime.datetime(*time.gmtime(float(t) - tz)[:6]) return encoding.strtolocal( - date.strftime(encoding.strfromlocal(opts[b'dateformat'])) + date.strftime(encoding.strfromlocal(opts['dateformat'])) ) else: - tmpl = opts.get(b'oldtemplate') or opts.get(b'template') + tmpl = opts.get('oldtemplate') or opts.get('template') tmpl = logcmdutil.maketemplater(ui, repo, tmpl) def getkey(ctx): @@ -80,7 +79,7 @@ rev = ctx.rev() key = getkey(ctx).strip() key = amap.get(key, key) # alias remap - if opts.get(b'changesets'): + if opts.get('changesets'): rate[key] = (rate.get(key, (0,))[0] + 1, 0) else: parents = ctx.parents() @@ -96,11 +95,11 @@ wopts = logcmdutil.walkopts( pats=pats, - opts=opts, - revspec=opts[b'rev'], - date=opts[b'date'], - include_pats=opts[b'include'], - exclude_pats=opts[b'exclude'], + opts=pycompat.byteskwargs(opts), + revspec=opts['rev'], + date=opts['date'], + include_pats=opts['include'], + exclude_pats=opts['exclude'], ) revs, makefilematcher = logcmdutil.makewalker(repo, wopts) for ctx in scmutil.walkchangerevs(repo, revs, makefilematcher, prep): diff -r 704c3d0878d9 -r 12c308c55e53 hgext/clonebundles.py --- a/hgext/clonebundles.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/clonebundles.py Wed Oct 11 02:02:46 2023 +0200 @@ -349,7 +349,7 @@ def extsetup(ui): - extensions.wrapfunction(wireprotov1server, b'_capabilities', capabilities) + extensions.wrapfunction(wireprotov1server, '_capabilities', capabilities) # logic for bundle auto-generation @@ -987,7 +987,7 @@ @localrepo.unfilteredmethod def clonebundles_lock(self, wait=True): '''Lock the repository file related to clone bundles''' - if not util.safehasattr(self, '_cb_lock_ref'): + if not hasattr(self, '_cb_lock_ref'): self._cb_lock_ref = None l = self._currentlock(self._cb_lock_ref) if l is not None: diff -r 704c3d0878d9 -r 12c308c55e53 hgext/closehead.py --- a/hgext/closehead.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/closehead.py Wed Oct 11 02:02:46 2023 +0200 @@ -54,19 +54,16 @@ text=message, files=[], filectxfn=None, - user=opts.get(b'user'), - date=opts.get(b'date'), + user=opts.get('user'), + date=opts.get('date'), extra=extra, ) - tr = repo.transaction(b'commit') - ret = repo.commitctx(cctx, True) - bookmarks.update(repo, [rev, None], ret) - cctx.markcommitted(ret) - tr.close() + with repo.transaction(b'commit'): + ret = repo.commitctx(cctx, True) + bookmarks.update(repo, [rev, None], ret) + cctx.markcommitted(ret) - opts = pycompat.byteskwargs(opts) - - revs += tuple(opts.get(b'rev', [])) + revs += tuple(opts.get('rev', [])) revs = logcmdutil.revrange(repo, revs) if not revs: @@ -80,7 +77,7 @@ if rev not in heads: raise error.Abort(_(b'revision is not an open head: %d') % rev) - message = cmdutil.logmessage(ui, opts) + message = cmdutil.logmessage(ui, pycompat.byteskwargs(opts)) if not message: raise error.Abort(_(b"no commit message specified with -l or -m")) extra = {b'close': b'1'} diff -r 704c3d0878d9 -r 12c308c55e53 hgext/commitextras.py --- a/hgext/commitextras.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/commitextras.py Wed Oct 11 02:02:46 2023 +0200 @@ -16,7 +16,6 @@ error, extensions, registrar, - util, ) cmdtable = {} @@ -52,7 +51,7 @@ def _commit(orig, ui, repo, *pats, **opts): - if util.safehasattr(repo, 'unfiltered'): + if hasattr(repo, 'unfiltered'): repo = repo.unfiltered() class repoextra(repo.__class__): diff -r 704c3d0878d9 -r 12c308c55e53 hgext/convert/convcmd.py --- a/hgext/convert/convcmd.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/convert/convcmd.py Wed Oct 11 02:02:46 2023 +0200 @@ -435,7 +435,13 @@ """Sort revisions by date.""" def getdate(n): - return dateutil.parsedate(self.commitcache[n].date) + commit = self.commitcache[n] + # The other entries are here as tie breaker for stability + return ( + dateutil.parsedate(commit.date), + commit.rev, + commit.branch, + ) return keysorter(getdate) diff -r 704c3d0878d9 -r 12c308c55e53 hgext/convert/cvs.py --- a/hgext/convert/cvs.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/convert/cvs.py Wed Oct 11 02:02:46 2023 +0200 @@ -12,7 +12,6 @@ from mercurial.i18n import _ from mercurial.pycompat import ( - getattr, open, ) from mercurial import ( diff -r 704c3d0878d9 -r 12c308c55e53 hgext/convert/cvsps.py --- a/hgext/convert/cvsps.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/convert/cvsps.py Wed Oct 11 02:02:46 2023 +0200 @@ -198,9 +198,9 @@ oldlog = pickle.load(open(cachefile, b'rb')) for e in oldlog: if not ( - util.safehasattr(e, b'branchpoints') - and util.safehasattr(e, b'commitid') - and util.safehasattr(e, b'mergepoint') + hasattr(e, b'branchpoints') + and hasattr(e, b'commitid') + and hasattr(e, b'mergepoint') ): ui.status(_(b'ignoring old cache\n')) oldlog = [] diff -r 704c3d0878d9 -r 12c308c55e53 hgext/convert/transport.py --- a/hgext/convert/transport.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/convert/transport.py Wed Oct 11 02:02:46 2023 +0200 @@ -27,9 +27,6 @@ Pool = svn.core.Pool SubversionException = svn.core.SubversionException -from mercurial.pycompat import getattr -from mercurial import util - # Some older versions of the Python bindings need to be # explicitly initialized. But what we want to do probably # won't work worth a darn against those libraries anyway! @@ -63,7 +60,7 @@ if p: providers.append(p) else: - if util.safehasattr(svn.client, b'get_windows_simple_provider'): + if hasattr(svn.client, 'get_windows_simple_provider'): providers.append(svn.client.get_windows_simple_provider(pool)) return svn.core.svn_auth_open(providers, pool) @@ -85,7 +82,7 @@ self.password = b'' # Only Subversion 1.4 has reparent() - if ra is None or not util.safehasattr(svn.ra, b'reparent'): + if ra is None or not hasattr(svn.ra, 'reparent'): self.client = svn.client.create_context(self.pool) ab = _create_auth_baton(self.pool) self.client.auth_baton = ab diff -r 704c3d0878d9 -r 12c308c55e53 hgext/factotum.py --- a/hgext/factotum.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/factotum.py Wed Oct 11 02:02:46 2023 +0200 @@ -48,7 +48,6 @@ import os from mercurial.i18n import _ -from mercurial.pycompat import setattr from mercurial.utils import procutil from mercurial import ( error, diff -r 704c3d0878d9 -r 12c308c55e53 hgext/fastannotate/commands.py --- a/hgext/fastannotate/commands.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/fastannotate/commands.py Wed Oct 11 02:02:46 2023 +0200 @@ -19,7 +19,6 @@ pycompat, registrar, scmutil, - util, ) from . import ( @@ -218,7 +217,7 @@ paths = list(_matchpaths(repo, rev, pats, opts, aopts)) # for client, prefetch from the server - if util.safehasattr(repo, 'prefetchfastannotate'): + if hasattr(repo, 'prefetchfastannotate'): repo.prefetchfastannotate(paths) for path in paths: @@ -273,7 +272,7 @@ # check if we need to do prefetch (client-side) rev = opts.get('rev') - if util.safehasattr(repo, 'prefetchfastannotate') and rev is not None: + if hasattr(repo, 'prefetchfastannotate') and rev is not None: paths = list(_matchpaths(repo, rev, pats, pycompat.byteskwargs(opts))) repo.prefetchfastannotate(paths) @@ -320,7 +319,7 @@ ctx = logcmdutil.revsingle(repo, rev) m = scmutil.match(ctx, pats, opts) paths = list(ctx.walk(m)) - if util.safehasattr(repo, 'prefetchfastannotate'): + if hasattr(repo, 'prefetchfastannotate'): # client if opts.get(b'REV'): raise error.Abort(_(b'--rev cannot be used for client')) diff -r 704c3d0878d9 -r 12c308c55e53 hgext/fastannotate/context.py --- a/hgext/fastannotate/context.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/fastannotate/context.py Wed Oct 11 02:02:46 2023 +0200 @@ -12,9 +12,7 @@ from mercurial.i18n import _ from mercurial.pycompat import ( - getattr, open, - setattr, ) from mercurial.node import ( bin, @@ -151,7 +149,10 @@ def hashdiffopts(diffopts): diffoptstr = stringutil.pprint( - sorted((k, getattr(diffopts, k)) for k in mdiff.diffopts.defaults) + sorted( + (k, getattr(diffopts, pycompat.sysstr(k))) + for k in mdiff.diffopts.defaults + ) ) return hex(hashutil.sha1(diffoptstr).digest())[:6] @@ -167,13 +168,12 @@ """ defaults = { - b'diffopts': None, - b'followrename': True, - b'followmerge': True, + 'diffopts': None, + 'followrename': True, + 'followmerge': True, } def __init__(self, **opts): - opts = pycompat.byteskwargs(opts) for k, v in self.defaults.items(): setattr(self, k, opts.get(k, v)) @@ -322,7 +322,7 @@ b'(resolved fctx: %s)\n' % ( self.path, - stringutil.pprint(util.safehasattr(revfctx, b'node')), + stringutil.pprint(hasattr(revfctx, 'node')), ) ) return self.annotatedirectly(revfctx, showpath, showlines) diff -r 704c3d0878d9 -r 12c308c55e53 hgext/fastannotate/protocol.py --- a/hgext/fastannotate/protocol.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/fastannotate/protocol.py Wed Oct 11 02:02:46 2023 +0200 @@ -101,7 +101,7 @@ def serveruisetup(ui): _registerwireprotocommand() - extensions.wrapfunction(wireprotov1server, b'_capabilities', _capabilities) + extensions.wrapfunction(wireprotov1server, '_capabilities', _capabilities) # client-side diff -r 704c3d0878d9 -r 12c308c55e53 hgext/fastannotate/support.py --- a/hgext/fastannotate/support.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/fastannotate/support.py Wed Oct 11 02:02:46 2023 +0200 @@ -6,7 +6,6 @@ # GNU General Public License version 2 or any later version. -from mercurial.pycompat import getattr from mercurial import ( context as hgcontext, dagop, @@ -129,8 +128,8 @@ def replacehgwebannotate(): - extensions.wrapfunction(hgweb.webutil, b'annotate', _hgwebannotate) + extensions.wrapfunction(hgweb.webutil, 'annotate', _hgwebannotate) def replacefctxannotate(): - extensions.wrapfunction(hgcontext.basefilectx, b'annotate', _fctxannotate) + extensions.wrapfunction(hgcontext.basefilectx, 'annotate', _fctxannotate) diff -r 704c3d0878d9 -r 12c308c55e53 hgext/fastexport.py --- a/hgext/fastexport.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/fastexport.py Wed Oct 11 02:02:46 2023 +0200 @@ -15,7 +15,6 @@ from mercurial import ( error, logcmdutil, - pycompat, registrar, scmutil, ) @@ -176,22 +175,20 @@ It can be piped into corresponding import routines like "git fast-import". Incremental dumps can be created by using marks files. """ - opts = pycompat.byteskwargs(opts) - - revs += tuple(opts.get(b"rev", [])) + revs += tuple(opts.get("rev", [])) if not revs: revs = scmutil.revrange(repo, [b":"]) else: revs = logcmdutil.revrange(repo, revs) if not revs: raise error.Abort(_(b"no revisions matched")) - authorfile = opts.get(b"authormap") + authorfile = opts.get("authormap") if authorfile: authormap = convcmd.readauthormap(ui, authorfile) else: authormap = {} - import_marks = opts.get(b"import_marks") + import_marks = opts.get("import_marks") marks = {} if import_marks: with open(import_marks, "rb") as import_marks_file: @@ -209,7 +206,7 @@ export_commit(ui, repo, rev, marks, authormap) progress.increment() - export_marks = opts.get(b"export_marks") + export_marks = opts.get("export_marks") if export_marks: with open(export_marks, "wb") as export_marks_file: output_marks = [None] * len(marks) diff -r 704c3d0878d9 -r 12c308c55e53 hgext/fetch.py --- a/hgext/fetch.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/fetch.py Wed Oct 11 02:02:46 2023 +0200 @@ -74,10 +74,9 @@ Returns 0 on success. """ - opts = pycompat.byteskwargs(opts) - date = opts.get(b'date') + date = opts.get('date') if date: - opts[b'date'] = dateutil.parsedate(date) + opts['date'] = dateutil.parsedate(date) parent = repo.dirstate.p1() branch = repo.dirstate.branch() @@ -109,12 +108,12 @@ ) path = urlutil.get_unique_pull_path_obj(b'fetch', ui, source) - other = hg.peer(repo, opts, path) + other = hg.peer(repo, pycompat.byteskwargs(opts), path) ui.status(_(b'pulling from %s\n') % urlutil.hidepassword(path.loc)) revs = None - if opts[b'rev']: + if opts['rev']: try: - revs = [other.lookup(rev) for rev in opts[b'rev']] + revs = [other.lookup(rev) for rev in opts['rev']] except error.CapabilityError: err = _( b"other repository doesn't support revision lookup, " @@ -162,7 +161,7 @@ # By default, we consider the repository we're pulling # *from* as authoritative, so we merge our changes into # theirs. - if opts[b'switch_parent']: + if opts['switch_parent']: firstparent, secondparent = newparent, newheads[0] else: firstparent, secondparent = newheads[0], newparent @@ -179,14 +178,12 @@ if not err: # we don't translate commit messages - message = cmdutil.logmessage(ui, opts) or ( + message = cmdutil.logmessage(ui, pycompat.byteskwargs(opts)) or ( b'Automated merge with %s' % urlutil.removeauth(other.url()) ) - editopt = opts.get(b'edit') or opts.get(b'force_editor') + editopt = opts.get('edit') or opts.get('force_editor') editor = cmdutil.getcommiteditor(edit=editopt, editform=b'fetch') - n = repo.commit( - message, opts[b'user'], opts[b'date'], editor=editor - ) + n = repo.commit(message, opts['user'], opts['date'], editor=editor) ui.status( _(b'new changeset %d:%s merges remote changes with local\n') % (repo.changelog.rev(n), short(n)) diff -r 704c3d0878d9 -r 12c308c55e53 hgext/fsmonitor/__init__.py --- a/hgext/fsmonitor/__init__.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/fsmonitor/__init__.py Wed Oct 11 02:02:46 2023 +0200 @@ -332,7 +332,7 @@ matchfn = match.matchfn matchalways = match.always() dmap = self._map - if util.safehasattr(dmap, b'_map'): + if hasattr(dmap, b'_map'): # for better performance, directly access the inner dirstate map if the # standard dirstate implementation is in use. dmap = dmap._map @@ -744,7 +744,7 @@ def wrapdirstate(orig, self): ds = orig(self) # only override the dirstate when Watchman is available for the repo - if util.safehasattr(self, b'_fsmonitorstate'): + if hasattr(self, b'_fsmonitorstate'): makedirstate(self, ds) return ds @@ -755,9 +755,9 @@ ) if pycompat.isdarwin: # An assist for avoiding the dangling-symlink fsevents bug - extensions.wrapfunction(os, b'symlink', wrapsymlink) + extensions.wrapfunction(os, 'symlink', wrapsymlink) - extensions.wrapfunction(merge, b'_update', wrapupdate) + extensions.wrapfunction(merge, '_update', wrapupdate) def wrapsymlink(orig, source, link_name): @@ -811,7 +811,7 @@ self.oldnode = self.repo[b'.'].node() if self.repo.currentwlock() is None: - if util.safehasattr(self.repo, b'wlocknostateupdate'): + if hasattr(self.repo, b'wlocknostateupdate'): self._lock = self.repo.wlocknostateupdate() else: self._lock = self.repo.wlock() @@ -839,7 +839,7 @@ self._lock.release() def _state(self, cmd, commithash, status=b'ok'): - if not util.safehasattr(self.repo, b'_watchmanclient'): + if not hasattr(self.repo, b'_watchmanclient'): return False try: self.repo._watchmanclient.command( diff -r 704c3d0878d9 -r 12c308c55e53 hgext/fsmonitor/watchmanclient.py --- a/hgext/fsmonitor/watchmanclient.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/fsmonitor/watchmanclient.py Wed Oct 11 02:02:46 2023 +0200 @@ -69,7 +69,7 @@ def getcurrentclock(self): result = self.command(b'clock') - if not util.safehasattr(result, 'clock'): + if not hasattr(result, 'clock'): raise Unavailable( b'clock result is missing clock value', invalidate=True ) diff -r 704c3d0878d9 -r 12c308c55e53 hgext/git/__init__.py --- a/hgext/git/__init__.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/git/__init__.py Wed Oct 11 02:02:46 2023 +0200 @@ -342,8 +342,8 @@ def extsetup(ui): - extensions.wrapfunction(localrepo, b'makestore', _makestore) - extensions.wrapfunction(localrepo, b'makefilestorage', _makefilestorage) + extensions.wrapfunction(localrepo, 'makestore', _makestore) + extensions.wrapfunction(localrepo, 'makefilestorage', _makefilestorage) # Inject --git flag for `hg init` entry = extensions.wrapcommand(commands.table, b'init', init) entry[1].extend( diff -r 704c3d0878d9 -r 12c308c55e53 hgext/git/dirstate.py --- a/hgext/git/dirstate.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/git/dirstate.py Wed Oct 11 02:02:46 2023 +0200 @@ -47,7 +47,7 @@ return result, warnings -extensions.wrapfunction(matchmod, b'readpatternfile', readpatternfile) +extensions.wrapfunction(matchmod, 'readpatternfile', readpatternfile) _STATUS_MAP = {} diff -r 704c3d0878d9 -r 12c308c55e53 hgext/gpg.py --- a/hgext/gpg.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/gpg.py Wed Oct 11 02:02:46 2023 +0200 @@ -301,13 +301,13 @@ def _dosign(ui, repo, *revs, **opts): mygpg = newgpg(ui, **opts) - opts = pycompat.byteskwargs(opts) + sigver = b"0" sigmessage = b"" - date = opts.get(b'date') + date = opts.get('date') if date: - opts[b'date'] = dateutil.parsedate(date) + opts['date'] = dateutil.parsedate(date) if revs: nodes = [repo.lookup(n) for n in revs] @@ -335,42 +335,39 @@ sigmessage += b"%s %s %s\n" % (hexnode, sigver, sig) # write it - if opts[b'local']: + if opts['local']: repo.vfs.append(b"localsigs", sigmessage) return msigs = match.exact([b'.hgsigs']) - if not opts[b"force"]: + if not opts["force"]: if any(repo.status(match=msigs, unknown=True, ignored=True)): raise error.Abort( _(b"working copy of .hgsigs is changed "), hint=_(b"please commit .hgsigs manually"), ) - sigsfile = repo.wvfs(b".hgsigs", b"ab") - sigsfile.write(sigmessage) - sigsfile.close() + with repo.wvfs(b".hgsigs", b"ab") as sigsfile: + sigsfile.write(sigmessage) if b'.hgsigs' not in repo.dirstate: with repo.dirstate.changing_files(repo): repo[None].add([b".hgsigs"]) - if opts[b"no_commit"]: + if opts["no_commit"]: return - message = opts[b'message'] + message = opts['message'] if not message: # we don't translate commit messages message = b"\n".join( [b"Added signature for changeset %s" % short(n) for n in nodes] ) try: - editor = cmdutil.getcommiteditor( - editform=b'gpg.sign', **pycompat.strkwargs(opts) - ) + editor = cmdutil.getcommiteditor(editform=b'gpg.sign', **opts) repo.commit( - message, opts[b'user'], opts[b'date'], match=msigs, editor=editor + message, opts['user'], opts['date'], match=msigs, editor=editor ) except ValueError as inst: raise error.Abort(pycompat.bytestr(inst)) diff -r 704c3d0878d9 -r 12c308c55e53 hgext/highlight/__init__.py --- a/hgext/highlight/__init__.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/highlight/__init__.py Wed Oct 11 02:02:46 2023 +0200 @@ -101,8 +101,8 @@ def extsetup(ui): # monkeypatch in the new version extensions.wrapfunction( - webcommands, b'_filerevision', filerevision_highlight + webcommands, '_filerevision', filerevision_highlight ) - extensions.wrapfunction(webcommands, b'annotate', annotate_highlight) + extensions.wrapfunction(webcommands, 'annotate', annotate_highlight) webcommands.highlightcss = generate_css webcommands.__all__.append(b'highlightcss') diff -r 704c3d0878d9 -r 12c308c55e53 hgext/histedit.py --- a/hgext/histedit.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/histedit.py Wed Oct 11 02:02:46 2023 +0200 @@ -207,7 +207,6 @@ from mercurial.i18n import _ from mercurial.pycompat import ( - getattr, open, ) from mercurial.node import ( @@ -2652,7 +2651,7 @@ return orig(ui, repo, nodelist, *args, **kwargs) -extensions.wrapfunction(repair, b'strip', stripwrapper) +extensions.wrapfunction(repair, 'strip', stripwrapper) def summaryhook(ui, repo): diff -r 704c3d0878d9 -r 12c308c55e53 hgext/infinitepush/README --- a/hgext/infinitepush/README Tue Oct 10 18:29:04 2023 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,23 +0,0 @@ -## What is it? - -This extension adds ability to save certain pushes to a remote blob store -as bundles and to serve commits from remote blob store. -The revisions are stored on disk or in everstore. -The metadata are stored in sql or on disk. - -## Config options - -infinitepush.branchpattern: pattern to detect a scratchbranch, example - 're:scratch/.+' - -infinitepush.indextype: disk or sql for the metadata -infinitepush.reponame: only relevant for sql metadata backend, reponame to put in - sql - -infinitepush.indexpath: only relevant for ondisk metadata backend, the path to - store the index on disk. If not set will be under .hg - in a folder named filebundlestore - -infinitepush.storepath: only relevant for ondisk metadata backend, the path to - store the bundles. If not set, it will be - .hg/filebundlestore diff -r 704c3d0878d9 -r 12c308c55e53 hgext/infinitepush/__init__.py --- a/hgext/infinitepush/__init__.py Tue Oct 10 18:29:04 2023 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1413 +0,0 @@ -# Infinite push -# -# Copyright 2016 Facebook, Inc. -# -# This software may be used and distributed according to the terms of the -# GNU General Public License version 2 or any later version. -""" store some pushes in a remote blob store on the server (EXPERIMENTAL) - -IMPORTANT: if you use this extension, please contact -mercurial-devel@mercurial-scm.org ASAP. This extension is believed to -be unused and barring learning of users of this functionality, we will -delete this code at the end of 2020. - - [infinitepush] - # Server-side and client-side option. Pattern of the infinitepush bookmark - branchpattern = PATTERN - - # Server or client - server = False - - # Server-side option. Possible values: 'disk' or 'sql'. Fails if not set - indextype = disk - - # Server-side option. Used only if indextype=sql. - # Format: 'IP:PORT:DB_NAME:USER:PASSWORD' - sqlhost = IP:PORT:DB_NAME:USER:PASSWORD - - # Server-side option. Used only if indextype=disk. - # Filesystem path to the index store - indexpath = PATH - - # Server-side option. Possible values: 'disk' or 'external' - # Fails if not set - storetype = disk - - # Server-side option. - # Path to the binary that will save bundle to the bundlestore - # Formatted cmd line will be passed to it (see `put_args`) - put_binary = put - - # Serser-side option. Used only if storetype=external. - # Format cmd-line string for put binary. Placeholder: {filename} - put_args = {filename} - - # Server-side option. - # Path to the binary that get bundle from the bundlestore. - # Formatted cmd line will be passed to it (see `get_args`) - get_binary = get - - # Serser-side option. Used only if storetype=external. - # Format cmd-line string for get binary. Placeholders: {filename} {handle} - get_args = {filename} {handle} - - # Server-side option - logfile = FIlE - - # Server-side option - loglevel = DEBUG - - # Server-side option. Used only if indextype=sql. - # Sets mysql wait_timeout option. - waittimeout = 300 - - # Server-side option. Used only if indextype=sql. - # Sets mysql innodb_lock_wait_timeout option. - locktimeout = 120 - - # Server-side option. Used only if indextype=sql. - # Name of the repository - reponame = '' - - # Client-side option. Used by --list-remote option. List of remote scratch - # patterns to list if no patterns are specified. - defaultremotepatterns = ['*'] - - # Instructs infinitepush to forward all received bundle2 parts to the - # bundle for storage. Defaults to False. - storeallparts = True - - # routes each incoming push to the bundlestore. defaults to False - pushtobundlestore = True - - [remotenames] - # Client-side option - # This option should be set only if remotenames extension is enabled. - # Whether remote bookmarks are tracked by remotenames extension. - bookmarks = True -""" - - -import collections -import contextlib -import functools -import logging -import os -import random -import re -import socket -import subprocess -import time - -from mercurial.node import ( - bin, - hex, -) - -from mercurial.i18n import _ - -from mercurial.pycompat import ( - getattr, - open, -) - -from mercurial.utils import ( - procutil, - stringutil, - urlutil, -) - -from mercurial import ( - bundle2, - changegroup, - commands, - discovery, - encoding, - error, - exchange, - extensions, - hg, - localrepo, - phases, - pushkey, - pycompat, - registrar, - util, - wireprototypes, - wireprotov1peer, - wireprotov1server, -) - -from . import ( - bundleparts, - common, -) - -# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for -# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should -# be specifying the version(s) of Mercurial they are tested with, or -# leave the attribute unspecified. -testedwith = b'ships-with-hg-core' - -configtable = {} -configitem = registrar.configitem(configtable) - -configitem( - b'infinitepush', - b'deprecation-message', - default=True, -) - -configitem( - b'infinitepush', - b'deprecation-abort', - default=True, -) - -configitem( - b'infinitepush', - b'server', - default=False, -) -configitem( - b'infinitepush', - b'storetype', - default=b'', -) -configitem( - b'infinitepush', - b'indextype', - default=b'', -) -configitem( - b'infinitepush', - b'indexpath', - default=b'', -) -configitem( - b'infinitepush', - b'storeallparts', - default=False, -) -configitem( - b'infinitepush', - b'reponame', - default=b'', -) -configitem( - b'scratchbranch', - b'storepath', - default=b'', -) -configitem( - b'infinitepush', - b'branchpattern', - default=b'', -) -configitem( - b'infinitepush', - b'pushtobundlestore', - default=False, -) -configitem( - b'experimental', - b'server-bundlestore-bookmark', - default=b'', -) -configitem( - b'experimental', - b'infinitepush-scratchpush', - default=False, -) - -experimental = b'experimental' -configbookmark = b'server-bundlestore-bookmark' -configscratchpush = b'infinitepush-scratchpush' - -scratchbranchparttype = bundleparts.scratchbranchparttype -revsetpredicate = registrar.revsetpredicate() -templatekeyword = registrar.templatekeyword() -_scratchbranchmatcher = lambda x: False -_maybehash = re.compile('^[a-f0-9]+$').search - - -def _buildexternalbundlestore(ui): - put_args = ui.configlist(b'infinitepush', b'put_args', []) - put_binary = ui.config(b'infinitepush', b'put_binary') - if not put_binary: - raise error.Abort(b'put binary is not specified') - get_args = ui.configlist(b'infinitepush', b'get_args', []) - get_binary = ui.config(b'infinitepush', b'get_binary') - if not get_binary: - raise error.Abort(b'get binary is not specified') - from . import store - - return store.externalbundlestore(put_binary, put_args, get_binary, get_args) - - -def _buildsqlindex(ui): - sqlhost = ui.config(b'infinitepush', b'sqlhost') - if not sqlhost: - raise error.Abort(_(b'please set infinitepush.sqlhost')) - host, port, db, user, password = sqlhost.split(b':') - reponame = ui.config(b'infinitepush', b'reponame') - if not reponame: - raise error.Abort(_(b'please set infinitepush.reponame')) - - logfile = ui.config(b'infinitepush', b'logfile', b'') - waittimeout = ui.configint(b'infinitepush', b'waittimeout', 300) - locktimeout = ui.configint(b'infinitepush', b'locktimeout', 120) - from . import sqlindexapi - - return sqlindexapi.sqlindexapi( - reponame, - host, - port, - db, - user, - password, - logfile, - _getloglevel(ui), - waittimeout=waittimeout, - locktimeout=locktimeout, - ) - - -def _getloglevel(ui): - loglevel = ui.config(b'infinitepush', b'loglevel', b'DEBUG') - numeric_loglevel = getattr(logging, loglevel.upper(), None) - if not isinstance(numeric_loglevel, int): - raise error.Abort(_(b'invalid log level %s') % loglevel) - return numeric_loglevel - - -def _tryhoist(ui, remotebookmark): - """returns a bookmarks with hoisted part removed - - Remotenames extension has a 'hoist' config that allows to use remote - bookmarks without specifying remote path. For example, 'hg update master' - works as well as 'hg update remote/master'. We want to allow the same in - infinitepush. - """ - - if common.isremotebooksenabled(ui): - hoist = ui.config(b'remotenames', b'hoistedpeer') + b'/' - if remotebookmark.startswith(hoist): - return remotebookmark[len(hoist) :] - return remotebookmark - - -class bundlestore: - def __init__(self, repo): - self._repo = repo - storetype = self._repo.ui.config(b'infinitepush', b'storetype') - if storetype == b'disk': - from . import store - - self.store = store.filebundlestore(self._repo.ui, self._repo) - elif storetype == b'external': - self.store = _buildexternalbundlestore(self._repo.ui) - else: - raise error.Abort( - _(b'unknown infinitepush store type specified %s') % storetype - ) - - indextype = self._repo.ui.config(b'infinitepush', b'indextype') - if indextype == b'disk': - from . import fileindexapi - - self.index = fileindexapi.fileindexapi(self._repo) - elif indextype == b'sql': - self.index = _buildsqlindex(self._repo.ui) - else: - raise error.Abort( - _(b'unknown infinitepush index type specified %s') % indextype - ) - - -def _isserver(ui): - return ui.configbool(b'infinitepush', b'server') - - -WARNING_MSG = b"""IMPORTANT: if you use this extension, please contact -mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be -unused and barring learning of users of this functionality, we drop this -extension in Mercurial 6.6. -""" - - -def reposetup(ui, repo): - if ui.configbool(b'infinitepush', b'deprecation-message'): - ui.write_err(WARNING_MSG) - if ui.configbool(b'infinitepush', b'deprecation-abort'): - msg = b"USING EXTENSION INFINITE PUSH DESPITE PENDING DROP" - hint = b"contact mercurial-devel@mercurial-scm.org" - raise error.Abort(msg, hint=hint) - if _isserver(ui) and repo.local(): - repo.bundlestore = bundlestore(repo) - - -def extsetup(ui): - commonsetup(ui) - if _isserver(ui): - serverextsetup(ui) - else: - clientextsetup(ui) - - -def uipopulate(ui): - if not ui.hasconfig(b"experimental", b"changegroup3"): - ui.setconfig(b"experimental", b"changegroup3", False, b"infinitepush") - - -def commonsetup(ui): - wireprotov1server.commands[b'listkeyspatterns'] = ( - wireprotolistkeyspatterns, - b'namespace patterns', - ) - scratchbranchpat = ui.config(b'infinitepush', b'branchpattern') - if scratchbranchpat: - global _scratchbranchmatcher - kind, pat, _scratchbranchmatcher = stringutil.stringmatcher( - scratchbranchpat - ) - - -def serverextsetup(ui): - origpushkeyhandler = bundle2.parthandlermapping[b'pushkey'] - - def newpushkeyhandler(*args, **kwargs): - bundle2pushkey(origpushkeyhandler, *args, **kwargs) - - newpushkeyhandler.params = origpushkeyhandler.params - bundle2.parthandlermapping[b'pushkey'] = newpushkeyhandler - - orighandlephasehandler = bundle2.parthandlermapping[b'phase-heads'] - newphaseheadshandler = lambda *args, **kwargs: bundle2handlephases( - orighandlephasehandler, *args, **kwargs - ) - newphaseheadshandler.params = orighandlephasehandler.params - bundle2.parthandlermapping[b'phase-heads'] = newphaseheadshandler - - extensions.wrapfunction( - localrepo.localrepository, b'listkeys', localrepolistkeys - ) - wireprotov1server.commands[b'lookup'] = ( - _lookupwrap(wireprotov1server.commands[b'lookup'][0]), - b'key', - ) - extensions.wrapfunction(exchange, b'getbundlechunks', getbundlechunks) - - extensions.wrapfunction(bundle2, b'processparts', processparts) - - -def clientextsetup(ui): - entry = extensions.wrapcommand(commands.table, b'push', _push) - - entry[1].append( - ( - b'', - b'bundle-store', - None, - _(b'force push to go to bundle store (EXPERIMENTAL)'), - ) - ) - - extensions.wrapcommand(commands.table, b'pull', _pull) - - extensions.wrapfunction(discovery, b'checkheads', _checkheads) - - wireprotov1peer.wirepeer.listkeyspatterns = listkeyspatterns - - partorder = exchange.b2partsgenorder - index = partorder.index(b'changeset') - partorder.insert( - index, partorder.pop(partorder.index(scratchbranchparttype)) - ) - - -def _checkheads(orig, pushop): - if pushop.ui.configbool(experimental, configscratchpush, False): - return - return orig(pushop) - - -def wireprotolistkeyspatterns(repo, proto, namespace, patterns): - patterns = wireprototypes.decodelist(patterns) - d = repo.listkeys(encoding.tolocal(namespace), patterns).items() - return pushkey.encodekeys(d) - - -def localrepolistkeys(orig, self, namespace, patterns=None): - if namespace == b'bookmarks' and patterns: - index = self.bundlestore.index - results = {} - bookmarks = orig(self, namespace) - for pattern in patterns: - results.update(index.getbookmarks(pattern)) - if pattern.endswith(b'*'): - pattern = b're:^' + pattern[:-1] + b'.*' - kind, pat, matcher = stringutil.stringmatcher(pattern) - for bookmark, node in bookmarks.items(): - if matcher(bookmark): - results[bookmark] = node - return results - else: - return orig(self, namespace) - - -@wireprotov1peer.batchable -def listkeyspatterns(self, namespace, patterns): - if not self.capable(b'pushkey'): - return {}, None - self.ui.debug(b'preparing listkeys for "%s"\n' % namespace) - - def decode(d): - self.ui.debug( - b'received listkey for "%s": %i bytes\n' % (namespace, len(d)) - ) - return pushkey.decodekeys(d) - - return { - b'namespace': encoding.fromlocal(namespace), - b'patterns': wireprototypes.encodelist(patterns), - }, decode - - -def _readbundlerevs(bundlerepo): - return list(bundlerepo.revs(b'bundle()')) - - -def _includefilelogstobundle(bundlecaps, bundlerepo, bundlerevs, ui): - """Tells remotefilelog to include all changed files to the changegroup - - By default remotefilelog doesn't include file content to the changegroup. - But we need to include it if we are fetching from bundlestore. - """ - changedfiles = set() - cl = bundlerepo.changelog - for r in bundlerevs: - # [3] means changed files - changedfiles.update(cl.read(r)[3]) - if not changedfiles: - return bundlecaps - - changedfiles = b'\0'.join(changedfiles) - newcaps = [] - appended = False - for cap in bundlecaps or []: - if cap.startswith(b'excludepattern='): - newcaps.append(b'\0'.join((cap, changedfiles))) - appended = True - else: - newcaps.append(cap) - if not appended: - # Not found excludepattern cap. Just append it - newcaps.append(b'excludepattern=' + changedfiles) - - return newcaps - - -def _rebundle(bundlerepo, bundleroots, unknownhead): - """ - Bundle may include more revision then user requested. For example, - if user asks for revision but bundle also consists its descendants. - This function will filter out all revision that user is not requested. - """ - parts = [] - - version = b'02' - outgoing = discovery.outgoing( - bundlerepo, commonheads=bundleroots, ancestorsof=[unknownhead] - ) - cgstream = changegroup.makestream(bundlerepo, outgoing, version, b'pull') - cgstream = util.chunkbuffer(cgstream).read() - cgpart = bundle2.bundlepart(b'changegroup', data=cgstream) - cgpart.addparam(b'version', version) - parts.append(cgpart) - - return parts - - -def _getbundleroots(oldrepo, bundlerepo, bundlerevs): - cl = bundlerepo.changelog - bundleroots = [] - for rev in bundlerevs: - node = cl.node(rev) - parents = cl.parents(node) - for parent in parents: - # include all revs that exist in the main repo - # to make sure that bundle may apply client-side - if parent in oldrepo: - bundleroots.append(parent) - return bundleroots - - -def _needsrebundling(head, bundlerepo): - bundleheads = list(bundlerepo.revs(b'heads(bundle())')) - return not ( - len(bundleheads) == 1 and bundlerepo[bundleheads[0]].node() == head - ) - - -def _generateoutputparts(head, bundlerepo, bundleroots, bundlefile): - """generates bundle that will be send to the user - - returns tuple with raw bundle string and bundle type - """ - parts = [] - if not _needsrebundling(head, bundlerepo): - with util.posixfile(bundlefile, b"rb") as f: - unbundler = exchange.readbundle(bundlerepo.ui, f, bundlefile) - if isinstance(unbundler, changegroup.cg1unpacker): - part = bundle2.bundlepart( - b'changegroup', data=unbundler._stream.read() - ) - part.addparam(b'version', b'01') - parts.append(part) - elif isinstance(unbundler, bundle2.unbundle20): - haschangegroup = False - for part in unbundler.iterparts(): - if part.type == b'changegroup': - haschangegroup = True - newpart = bundle2.bundlepart(part.type, data=part.read()) - for key, value in part.params.items(): - newpart.addparam(key, value) - parts.append(newpart) - - if not haschangegroup: - raise error.Abort( - b'unexpected bundle without changegroup part, ' - + b'head: %s' % hex(head), - hint=b'report to administrator', - ) - else: - raise error.Abort(b'unknown bundle type') - else: - parts = _rebundle(bundlerepo, bundleroots, head) - - return parts - - -def getbundlechunks(orig, repo, source, heads=None, bundlecaps=None, **kwargs): - heads = heads or [] - # newheads are parents of roots of scratch bundles that were requested - newphases = {} - scratchbundles = [] - newheads = [] - scratchheads = [] - nodestobundle = {} - allbundlestocleanup = [] - try: - for head in heads: - if not repo.changelog.index.has_node(head): - if head not in nodestobundle: - newbundlefile = common.downloadbundle(repo, head) - bundlepath = b"bundle:%s+%s" % (repo.root, newbundlefile) - bundlerepo = hg.repository(repo.ui, bundlepath) - - allbundlestocleanup.append((bundlerepo, newbundlefile)) - bundlerevs = set(_readbundlerevs(bundlerepo)) - bundlecaps = _includefilelogstobundle( - bundlecaps, bundlerepo, bundlerevs, repo.ui - ) - cl = bundlerepo.changelog - bundleroots = _getbundleroots(repo, bundlerepo, bundlerevs) - for rev in bundlerevs: - node = cl.node(rev) - newphases[hex(node)] = str(phases.draft) - nodestobundle[node] = ( - bundlerepo, - bundleroots, - newbundlefile, - ) - - scratchbundles.append( - _generateoutputparts(head, *nodestobundle[head]) - ) - newheads.extend(bundleroots) - scratchheads.append(head) - finally: - for bundlerepo, bundlefile in allbundlestocleanup: - bundlerepo.close() - try: - os.unlink(bundlefile) - except (IOError, OSError): - # if we can't cleanup the file then just ignore the error, - # no need to fail - pass - - pullfrombundlestore = bool(scratchbundles) - wrappedchangegrouppart = False - wrappedlistkeys = False - oldchangegrouppart = exchange.getbundle2partsmapping[b'changegroup'] - try: - - def _changegrouppart(bundler, *args, **kwargs): - # Order is important here. First add non-scratch part - # and only then add parts with scratch bundles because - # non-scratch part contains parents of roots of scratch bundles. - result = oldchangegrouppart(bundler, *args, **kwargs) - for bundle in scratchbundles: - for part in bundle: - bundler.addpart(part) - return result - - exchange.getbundle2partsmapping[b'changegroup'] = _changegrouppart - wrappedchangegrouppart = True - - def _listkeys(orig, self, namespace): - origvalues = orig(self, namespace) - if namespace == b'phases' and pullfrombundlestore: - if origvalues.get(b'publishing') == b'True': - # Make repo non-publishing to preserve draft phase - del origvalues[b'publishing'] - origvalues.update(newphases) - return origvalues - - extensions.wrapfunction( - localrepo.localrepository, b'listkeys', _listkeys - ) - wrappedlistkeys = True - heads = list((set(newheads) | set(heads)) - set(scratchheads)) - result = orig( - repo, source, heads=heads, bundlecaps=bundlecaps, **kwargs - ) - finally: - if wrappedchangegrouppart: - exchange.getbundle2partsmapping[b'changegroup'] = oldchangegrouppart - if wrappedlistkeys: - extensions.unwrapfunction( - localrepo.localrepository, b'listkeys', _listkeys - ) - return result - - -def _lookupwrap(orig): - def _lookup(repo, proto, key): - localkey = encoding.tolocal(key) - - if isinstance(localkey, str) and _scratchbranchmatcher(localkey): - scratchnode = repo.bundlestore.index.getnode(localkey) - if scratchnode: - return b"%d %s\n" % (1, scratchnode) - else: - return b"%d %s\n" % ( - 0, - b'scratch branch %s not found' % localkey, - ) - else: - try: - r = hex(repo.lookup(localkey)) - return b"%d %s\n" % (1, r) - except Exception as inst: - if repo.bundlestore.index.getbundle(localkey): - return b"%d %s\n" % (1, localkey) - else: - r = stringutil.forcebytestr(inst) - return b"%d %s\n" % (0, r) - - return _lookup - - -def _pull(orig, ui, repo, source=b"default", **opts): - opts = pycompat.byteskwargs(opts) - # Copy paste from `pull` command - path = urlutil.get_unique_pull_path_obj( - b"infinite-push's pull", - ui, - source, - ) - - scratchbookmarks = {} - unfi = repo.unfiltered() - unknownnodes = [] - for rev in opts.get(b'rev', []): - if rev not in unfi: - unknownnodes.append(rev) - if opts.get(b'bookmark'): - bookmarks = [] - revs = opts.get(b'rev') or [] - for bookmark in opts.get(b'bookmark'): - if _scratchbranchmatcher(bookmark): - # rev is not known yet - # it will be fetched with listkeyspatterns next - scratchbookmarks[bookmark] = b'REVTOFETCH' - else: - bookmarks.append(bookmark) - - if scratchbookmarks: - other = hg.peer(repo, opts, path) - try: - fetchedbookmarks = other.listkeyspatterns( - b'bookmarks', patterns=scratchbookmarks - ) - for bookmark in scratchbookmarks: - if bookmark not in fetchedbookmarks: - raise error.Abort( - b'remote bookmark %s not found!' % bookmark - ) - scratchbookmarks[bookmark] = fetchedbookmarks[bookmark] - revs.append(fetchedbookmarks[bookmark]) - finally: - other.close() - opts[b'bookmark'] = bookmarks - opts[b'rev'] = revs - - if scratchbookmarks or unknownnodes: - # Set anyincoming to True - extensions.wrapfunction( - discovery, b'findcommonincoming', _findcommonincoming - ) - try: - # Remote scratch bookmarks will be deleted because remotenames doesn't - # know about them. Let's save it before pull and restore after - remotescratchbookmarks = _readscratchremotebookmarks(ui, repo, path.loc) - result = orig(ui, repo, path.loc, **pycompat.strkwargs(opts)) - # TODO(stash): race condition is possible - # if scratch bookmarks was updated right after orig. - # But that's unlikely and shouldn't be harmful. - if common.isremotebooksenabled(ui): - remotescratchbookmarks.update(scratchbookmarks) - _saveremotebookmarks(repo, remotescratchbookmarks, path.loc) - else: - _savelocalbookmarks(repo, scratchbookmarks) - return result - finally: - if scratchbookmarks: - extensions.unwrapfunction(discovery, b'findcommonincoming') - - -def _readscratchremotebookmarks(ui, repo, other): - if common.isremotebooksenabled(ui): - remotenamesext = extensions.find(b'remotenames') - remotepath = remotenamesext.activepath(repo.ui, other) - result = {} - # Let's refresh remotenames to make sure we have it up to date - # Seems that `repo.names['remotebookmarks']` may return stale bookmarks - # and it results in deleting scratch bookmarks. Our best guess how to - # fix it is to use `clearnames()` - repo._remotenames.clearnames() - for remotebookmark in repo.names[b'remotebookmarks'].listnames(repo): - path, bookname = remotenamesext.splitremotename(remotebookmark) - if path == remotepath and _scratchbranchmatcher(bookname): - nodes = repo.names[b'remotebookmarks'].nodes( - repo, remotebookmark - ) - if nodes: - result[bookname] = hex(nodes[0]) - return result - else: - return {} - - -def _saveremotebookmarks(repo, newbookmarks, remote): - remotenamesext = extensions.find(b'remotenames') - remotepath = remotenamesext.activepath(repo.ui, remote) - branches = collections.defaultdict(list) - bookmarks = {} - remotenames = remotenamesext.readremotenames(repo) - for hexnode, nametype, remote, rname in remotenames: - if remote != remotepath: - continue - if nametype == b'bookmarks': - if rname in newbookmarks: - # It's possible if we have a normal bookmark that matches - # scratch branch pattern. In this case just use the current - # bookmark node - del newbookmarks[rname] - bookmarks[rname] = hexnode - elif nametype == b'branches': - # saveremotenames expects 20 byte binary nodes for branches - branches[rname].append(bin(hexnode)) - - for bookmark, hexnode in newbookmarks.items(): - bookmarks[bookmark] = hexnode - remotenamesext.saveremotenames(repo, remotepath, branches, bookmarks) - - -def _savelocalbookmarks(repo, bookmarks): - if not bookmarks: - return - with repo.wlock(), repo.lock(), repo.transaction(b'bookmark') as tr: - changes = [] - for scratchbook, node in bookmarks.items(): - changectx = repo[node] - changes.append((scratchbook, changectx.node())) - repo._bookmarks.applychanges(repo, tr, changes) - - -def _findcommonincoming(orig, *args, **kwargs): - common, inc, remoteheads = orig(*args, **kwargs) - return common, True, remoteheads - - -def _push(orig, ui, repo, *dests, **opts): - opts = pycompat.byteskwargs(opts) - bookmark = opts.get(b'bookmark') - # we only support pushing one infinitepush bookmark at once - if len(bookmark) == 1: - bookmark = bookmark[0] - else: - bookmark = b'' - - oldphasemove = None - overrides = {(experimental, configbookmark): bookmark} - - with ui.configoverride(overrides, b'infinitepush'): - scratchpush = opts.get(b'bundle_store') - if _scratchbranchmatcher(bookmark): - scratchpush = True - # bundle2 can be sent back after push (for example, bundle2 - # containing `pushkey` part to update bookmarks) - ui.setconfig(experimental, b'bundle2.pushback', True) - - if scratchpush: - # this is an infinitepush, we don't want the bookmark to be applied - # rather that should be stored in the bundlestore - opts[b'bookmark'] = [] - ui.setconfig(experimental, configscratchpush, True) - oldphasemove = extensions.wrapfunction( - exchange, b'_localphasemove', _phasemove - ) - - paths = list(urlutil.get_push_paths(repo, ui, dests)) - if len(paths) > 1: - msg = _(b'cannot push to multiple path with infinitepush') - raise error.Abort(msg) - - path = paths[0] - destpath = path.loc - # Remote scratch bookmarks will be deleted because remotenames doesn't - # know about them. Let's save it before push and restore after - remotescratchbookmarks = _readscratchremotebookmarks(ui, repo, destpath) - result = orig(ui, repo, *dests, **pycompat.strkwargs(opts)) - if common.isremotebooksenabled(ui): - if bookmark and scratchpush: - other = hg.peer(repo, opts, path) - try: - fetchedbookmarks = other.listkeyspatterns( - b'bookmarks', patterns=[bookmark] - ) - remotescratchbookmarks.update(fetchedbookmarks) - finally: - other.close() - _saveremotebookmarks(repo, remotescratchbookmarks, destpath) - if oldphasemove: - exchange._localphasemove = oldphasemove - return result - - -def _deleteinfinitepushbookmarks(ui, repo, path, names): - """Prune remote names by removing the bookmarks we don't want anymore, - then writing the result back to disk - """ - remotenamesext = extensions.find(b'remotenames') - - # remotename format is: - # (node, nametype ("branches" or "bookmarks"), remote, name) - nametype_idx = 1 - remote_idx = 2 - name_idx = 3 - remotenames = [ - remotename - for remotename in remotenamesext.readremotenames(repo) - if remotename[remote_idx] == path - ] - remote_bm_names = [ - remotename[name_idx] - for remotename in remotenames - if remotename[nametype_idx] == b"bookmarks" - ] - - for name in names: - if name not in remote_bm_names: - raise error.Abort( - _( - b"infinitepush bookmark '{}' does not exist " - b"in path '{}'" - ).format(name, path) - ) - - bookmarks = {} - branches = collections.defaultdict(list) - for node, nametype, remote, name in remotenames: - if nametype == b"bookmarks" and name not in names: - bookmarks[name] = node - elif nametype == b"branches": - # saveremotenames wants binary nodes for branches - branches[name].append(bin(node)) - - remotenamesext.saveremotenames(repo, path, branches, bookmarks) - - -def _phasemove(orig, pushop, nodes, phase=phases.public): - """prevent commits from being marked public - - Since these are going to a scratch branch, they aren't really being - published.""" - - if phase != phases.public: - orig(pushop, nodes, phase) - - -@exchange.b2partsgenerator(scratchbranchparttype) -def partgen(pushop, bundler): - bookmark = pushop.ui.config(experimental, configbookmark) - scratchpush = pushop.ui.configbool(experimental, configscratchpush) - if b'changesets' in pushop.stepsdone or not scratchpush: - return - - if scratchbranchparttype not in bundle2.bundle2caps(pushop.remote): - return - - pushop.stepsdone.add(b'changesets') - if not pushop.outgoing.missing: - pushop.ui.status(_(b'no changes found\n')) - pushop.cgresult = 0 - return - - # This parameter tells the server that the following bundle is an - # infinitepush. This let's it switch the part processing to our infinitepush - # code path. - bundler.addparam(b"infinitepush", b"True") - - scratchparts = bundleparts.getscratchbranchparts( - pushop.repo, pushop.remote, pushop.outgoing, pushop.ui, bookmark - ) - - for scratchpart in scratchparts: - bundler.addpart(scratchpart) - - def handlereply(op): - # server either succeeds or aborts; no code to read - pushop.cgresult = 1 - - return handlereply - - -bundle2.capabilities[bundleparts.scratchbranchparttype] = () - - -def _getrevs(bundle, oldnode, force, bookmark): - b'extracts and validates the revs to be imported' - revs = [bundle[r] for r in bundle.revs(b'sort(bundle())')] - - # new bookmark - if oldnode is None: - return revs - - # Fast forward update - if oldnode in bundle and list(bundle.set(b'bundle() & %s::', oldnode)): - return revs - - return revs - - -@contextlib.contextmanager -def logservicecall(logger, service, **kwargs): - start = time.time() - logger(service, eventtype=b'start', **kwargs) - try: - yield - logger( - service, - eventtype=b'success', - elapsedms=(time.time() - start) * 1000, - **kwargs - ) - except Exception as e: - logger( - service, - eventtype=b'failure', - elapsedms=(time.time() - start) * 1000, - errormsg=stringutil.forcebytestr(e), - **kwargs - ) - raise - - -def _getorcreateinfinitepushlogger(op): - logger = op.records[b'infinitepushlogger'] - if not logger: - ui = op.repo.ui - try: - username = procutil.getuser() - except Exception: - username = b'unknown' - # Generate random request id to be able to find all logged entries - # for the same request. Since requestid is pseudo-generated it may - # not be unique, but we assume that (hostname, username, requestid) - # is unique. - random.seed() - requestid = random.randint(0, 2000000000) - hostname = socket.gethostname() - logger = functools.partial( - ui.log, - b'infinitepush', - user=username, - requestid=requestid, - hostname=hostname, - reponame=ui.config(b'infinitepush', b'reponame'), - ) - op.records.add(b'infinitepushlogger', logger) - else: - logger = logger[0] - return logger - - -def storetobundlestore(orig, repo, op, unbundler): - """stores the incoming bundle coming from push command to the bundlestore - instead of applying on the revlogs""" - - repo.ui.status(_(b"storing changesets on the bundlestore\n")) - bundler = bundle2.bundle20(repo.ui) - - # processing each part and storing it in bundler - with bundle2.partiterator(repo, op, unbundler) as parts: - for part in parts: - bundlepart = None - if part.type == b'replycaps': - # This configures the current operation to allow reply parts. - bundle2._processpart(op, part) - else: - bundlepart = bundle2.bundlepart(part.type, data=part.read()) - for key, value in part.params.items(): - bundlepart.addparam(key, value) - - # Certain parts require a response - if part.type in (b'pushkey', b'changegroup'): - if op.reply is not None: - rpart = op.reply.newpart(b'reply:%s' % part.type) - rpart.addparam( - b'in-reply-to', b'%d' % part.id, mandatory=False - ) - rpart.addparam(b'return', b'1', mandatory=False) - - op.records.add( - part.type, - { - b'return': 1, - }, - ) - if bundlepart: - bundler.addpart(bundlepart) - - # storing the bundle in the bundlestore - buf = util.chunkbuffer(bundler.getchunks()) - fd, bundlefile = pycompat.mkstemp() - try: - try: - fp = os.fdopen(fd, 'wb') - fp.write(buf.read()) - finally: - fp.close() - storebundle(op, {}, bundlefile) - finally: - try: - os.unlink(bundlefile) - except Exception: - # we would rather see the original exception - pass - - -def processparts(orig, repo, op, unbundler): - - # make sure we don't wrap processparts in case of `hg unbundle` - if op.source == b'unbundle': - return orig(repo, op, unbundler) - - # this server routes each push to bundle store - if repo.ui.configbool(b'infinitepush', b'pushtobundlestore'): - return storetobundlestore(orig, repo, op, unbundler) - - if unbundler.params.get(b'infinitepush') != b'True': - return orig(repo, op, unbundler) - - handleallparts = repo.ui.configbool(b'infinitepush', b'storeallparts') - - bundler = bundle2.bundle20(repo.ui) - cgparams = None - with bundle2.partiterator(repo, op, unbundler) as parts: - for part in parts: - bundlepart = None - if part.type == b'replycaps': - # This configures the current operation to allow reply parts. - bundle2._processpart(op, part) - elif part.type == bundleparts.scratchbranchparttype: - # Scratch branch parts need to be converted to normal - # changegroup parts, and the extra parameters stored for later - # when we upload to the store. Eventually those parameters will - # be put on the actual bundle instead of this part, then we can - # send a vanilla changegroup instead of the scratchbranch part. - cgversion = part.params.get(b'cgversion', b'01') - bundlepart = bundle2.bundlepart( - b'changegroup', data=part.read() - ) - bundlepart.addparam(b'version', cgversion) - cgparams = part.params - - # If we're not dumping all parts into the new bundle, we need to - # alert the future pushkey and phase-heads handler to skip - # the part. - if not handleallparts: - op.records.add( - scratchbranchparttype + b'_skippushkey', True - ) - op.records.add( - scratchbranchparttype + b'_skipphaseheads', True - ) - else: - if handleallparts: - # Ideally we would not process any parts, and instead just - # forward them to the bundle for storage, but since this - # differs from previous behavior, we need to put it behind a - # config flag for incremental rollout. - bundlepart = bundle2.bundlepart(part.type, data=part.read()) - for key, value in part.params.items(): - bundlepart.addparam(key, value) - - # Certain parts require a response - if part.type == b'pushkey': - if op.reply is not None: - rpart = op.reply.newpart(b'reply:pushkey') - rpart.addparam( - b'in-reply-to', str(part.id), mandatory=False - ) - rpart.addparam(b'return', b'1', mandatory=False) - else: - bundle2._processpart(op, part) - - if handleallparts: - op.records.add( - part.type, - { - b'return': 1, - }, - ) - if bundlepart: - bundler.addpart(bundlepart) - - # If commits were sent, store them - if cgparams: - buf = util.chunkbuffer(bundler.getchunks()) - fd, bundlefile = pycompat.mkstemp() - try: - try: - fp = os.fdopen(fd, 'wb') - fp.write(buf.read()) - finally: - fp.close() - storebundle(op, cgparams, bundlefile) - finally: - try: - os.unlink(bundlefile) - except Exception: - # we would rather see the original exception - pass - - -def storebundle(op, params, bundlefile): - log = _getorcreateinfinitepushlogger(op) - parthandlerstart = time.time() - log(scratchbranchparttype, eventtype=b'start') - index = op.repo.bundlestore.index - store = op.repo.bundlestore.store - op.records.add(scratchbranchparttype + b'_skippushkey', True) - - bundle = None - try: # guards bundle - bundlepath = b"bundle:%s+%s" % (op.repo.root, bundlefile) - bundle = hg.repository(op.repo.ui, bundlepath) - - bookmark = params.get(b'bookmark') - bookprevnode = params.get(b'bookprevnode', b'') - force = params.get(b'force') - - if bookmark: - oldnode = index.getnode(bookmark) - else: - oldnode = None - bundleheads = bundle.revs(b'heads(bundle())') - if bookmark and len(bundleheads) > 1: - raise error.Abort( - _(b'cannot push more than one head to a scratch branch') - ) - - revs = _getrevs(bundle, oldnode, force, bookmark) - - # Notify the user of what is being pushed - plural = b's' if len(revs) > 1 else b'' - op.repo.ui.warn(_(b"pushing %d commit%s:\n") % (len(revs), plural)) - maxoutput = 10 - for i in range(0, min(len(revs), maxoutput)): - firstline = bundle[revs[i]].description().split(b'\n')[0][:50] - op.repo.ui.warn(b" %s %s\n" % (revs[i], firstline)) - - if len(revs) > maxoutput + 1: - op.repo.ui.warn(b" ...\n") - firstline = bundle[revs[-1]].description().split(b'\n')[0][:50] - op.repo.ui.warn(b" %s %s\n" % (revs[-1], firstline)) - - nodesctx = [bundle[rev] for rev in revs] - inindex = lambda rev: bool(index.getbundle(bundle[rev].hex())) - if bundleheads: - newheadscount = sum(not inindex(rev) for rev in bundleheads) - else: - newheadscount = 0 - # If there's a bookmark specified, there should be only one head, - # so we choose the last node, which will be that head. - # If a bug or malicious client allows there to be a bookmark - # with multiple heads, we will place the bookmark on the last head. - bookmarknode = nodesctx[-1].hex() if nodesctx else None - key = None - if newheadscount: - with open(bundlefile, b'rb') as f: - bundledata = f.read() - with logservicecall( - log, b'bundlestore', bundlesize=len(bundledata) - ): - bundlesizelimit = 100 * 1024 * 1024 # 100 MB - if len(bundledata) > bundlesizelimit: - error_msg = ( - b'bundle is too big: %d bytes. ' - + b'max allowed size is 100 MB' - ) - raise error.Abort(error_msg % (len(bundledata),)) - key = store.write(bundledata) - - with logservicecall(log, b'index', newheadscount=newheadscount), index: - if key: - index.addbundle(key, nodesctx) - if bookmark: - index.addbookmark(bookmark, bookmarknode) - _maybeaddpushbackpart( - op, bookmark, bookmarknode, bookprevnode, params - ) - log( - scratchbranchparttype, - eventtype=b'success', - elapsedms=(time.time() - parthandlerstart) * 1000, - ) - - except Exception as e: - log( - scratchbranchparttype, - eventtype=b'failure', - elapsedms=(time.time() - parthandlerstart) * 1000, - errormsg=stringutil.forcebytestr(e), - ) - raise - finally: - if bundle: - bundle.close() - - -@bundle2.parthandler( - scratchbranchparttype, - ( - b'bookmark', - b'bookprevnode', - b'force', - b'pushbackbookmarks', - b'cgversion', - ), -) -def bundle2scratchbranch(op, part): - '''unbundle a bundle2 part containing a changegroup to store''' - - bundler = bundle2.bundle20(op.repo.ui) - cgversion = part.params.get(b'cgversion', b'01') - cgpart = bundle2.bundlepart(b'changegroup', data=part.read()) - cgpart.addparam(b'version', cgversion) - bundler.addpart(cgpart) - buf = util.chunkbuffer(bundler.getchunks()) - - fd, bundlefile = pycompat.mkstemp() - try: - try: - fp = os.fdopen(fd, 'wb') - fp.write(buf.read()) - finally: - fp.close() - storebundle(op, part.params, bundlefile) - finally: - try: - os.unlink(bundlefile) - except FileNotFoundError: - pass - - return 1 - - -def _maybeaddpushbackpart(op, bookmark, newnode, oldnode, params): - if params.get(b'pushbackbookmarks'): - if op.reply and b'pushback' in op.reply.capabilities: - params = { - b'namespace': b'bookmarks', - b'key': bookmark, - b'new': newnode, - b'old': oldnode, - } - op.reply.newpart(b'pushkey', mandatoryparams=params.items()) - - -def bundle2pushkey(orig, op, part): - """Wrapper of bundle2.handlepushkey() - - The only goal is to skip calling the original function if flag is set. - It's set if infinitepush push is happening. - """ - if op.records[scratchbranchparttype + b'_skippushkey']: - if op.reply is not None: - rpart = op.reply.newpart(b'reply:pushkey') - rpart.addparam(b'in-reply-to', str(part.id), mandatory=False) - rpart.addparam(b'return', b'1', mandatory=False) - return 1 - - return orig(op, part) - - -def bundle2handlephases(orig, op, part): - """Wrapper of bundle2.handlephases() - - The only goal is to skip calling the original function if flag is set. - It's set if infinitepush push is happening. - """ - - if op.records[scratchbranchparttype + b'_skipphaseheads']: - return - - return orig(op, part) - - -def _asyncsavemetadata(root, nodes): - """starts a separate process that fills metadata for the nodes - - This function creates a separate process and doesn't wait for it's - completion. This was done to avoid slowing down pushes - """ - - maxnodes = 50 - if len(nodes) > maxnodes: - return - nodesargs = [] - for node in nodes: - nodesargs.append(b'--node') - nodesargs.append(node) - with open(os.devnull, b'w+b') as devnull: - cmdline = [ - util.hgexecutable(), - b'debugfillinfinitepushmetadata', - b'-R', - root, - ] + nodesargs - # Process will run in background. We don't care about the return code - subprocess.Popen( - pycompat.rapply(procutil.tonativestr, cmdline), - close_fds=True, - shell=False, - stdin=devnull, - stdout=devnull, - stderr=devnull, - ) diff -r 704c3d0878d9 -r 12c308c55e53 hgext/infinitepush/bundleparts.py --- a/hgext/infinitepush/bundleparts.py Tue Oct 10 18:29:04 2023 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,126 +0,0 @@ -# Copyright 2017 Facebook, Inc. -# -# This software may be used and distributed according to the terms of the -# GNU General Public License version 2 or any later version. - - -from mercurial.i18n import _ -from mercurial.node import hex - -from mercurial import ( - bundle2, - changegroup, - error, - extensions, - revsetlang, - util, -) - -from . import common - -isremotebooksenabled = common.isremotebooksenabled - -scratchbranchparttype = b'b2x:infinitepush' - - -def getscratchbranchparts(repo, peer, outgoing, ui, bookmark): - if not outgoing.missing: - raise error.Abort(_(b'no commits to push')) - - if scratchbranchparttype not in bundle2.bundle2caps(peer): - raise error.Abort( - _(b'no server support for %r') % scratchbranchparttype - ) - - _validaterevset( - repo, revsetlang.formatspec(b'%ln', outgoing.missing), bookmark - ) - - supportedversions = changegroup.supportedoutgoingversions(repo) - # Explicitly avoid using '01' changegroup version in infinitepush to - # support general delta - supportedversions.discard(b'01') - cgversion = min(supportedversions) - _handlelfs(repo, outgoing.missing) - cg = changegroup.makestream(repo, outgoing, cgversion, b'push') - - params = {} - params[b'cgversion'] = cgversion - if bookmark: - params[b'bookmark'] = bookmark - # 'prevbooknode' is necessary for pushkey reply part - params[b'bookprevnode'] = b'' - bookmarks = repo._bookmarks - if bookmark in bookmarks: - params[b'bookprevnode'] = hex(bookmarks[bookmark]) - - # Do not send pushback bundle2 part with bookmarks if remotenames extension - # is enabled. It will be handled manually in `_push()` - if not isremotebooksenabled(ui): - params[b'pushbackbookmarks'] = b'1' - - parts = [] - - # .upper() marks this as a mandatory part: server will abort if there's no - # handler - parts.append( - bundle2.bundlepart( - scratchbranchparttype.upper(), - advisoryparams=params.items(), - data=cg, - ) - ) - - return parts - - -def _validaterevset(repo, revset, bookmark): - """Abort if the revs to be pushed aren't valid for a scratch branch.""" - if not repo.revs(revset): - raise error.Abort(_(b'nothing to push')) - if bookmark: - # Allow bundle with many heads only if no bookmark is specified - heads = repo.revs(b'heads(%r)', revset) - if len(heads) > 1: - raise error.Abort( - _(b'cannot push more than one head to a scratch branch') - ) - - -def _handlelfs(repo, missing): - """Special case if lfs is enabled - - If lfs is enabled then we need to call prepush hook - to make sure large files are uploaded to lfs - """ - try: - lfsmod = extensions.find(b'lfs') - lfsmod.wrapper.uploadblobsfromrevs(repo, missing) - except KeyError: - # Ignore if lfs extension is not enabled - return - - -class copiedpart: - """a copy of unbundlepart content that can be consumed later""" - - def __init__(self, part): - # copy "public properties" - self.type = part.type - self.id = part.id - self.mandatory = part.mandatory - self.mandatoryparams = part.mandatoryparams - self.advisoryparams = part.advisoryparams - self.params = part.params - self.mandatorykeys = part.mandatorykeys - # copy the buffer - self._io = util.stringio(part.read()) - - def consume(self): - return - - def read(self, size=None): - if size is None: - return self._io.read() - else: - return self._io.read(size) diff -r 704c3d0878d9 -r 12c308c55e53 hgext/infinitepush/common.py --- a/hgext/infinitepush/common.py Tue Oct 10 18:29:04 2023 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,51 +0,0 @@ -# Copyright 2017 Facebook, Inc. -# -# This software may be used and distributed according to the terms of the -# GNU General Public License version 2 or any later version. - - -import os - -from mercurial.node import hex - -from mercurial import ( - error, - extensions, - pycompat, -) - - -def isremotebooksenabled(ui): - return b'remotenames' in extensions._extensions and ui.configbool( - b'remotenames', b'bookmarks' - ) - - -def downloadbundle(repo, unknownbinhead): - index = repo.bundlestore.index - store = repo.bundlestore.store - bundleid = index.getbundle(hex(unknownbinhead)) - if bundleid is None: - raise error.Abort(b'%s head is not known' % hex(unknownbinhead)) - bundleraw = store.read(bundleid) - return _makebundlefromraw(bundleraw) - - -def _makebundlefromraw(data): - fp = None - fd, bundlefile = pycompat.mkstemp() - try: # guards bundlefile - try: # guards fp - fp = os.fdopen(fd, 'wb') - fp.write(data) - finally: - fp.close() - except Exception: - try: - os.unlink(bundlefile) - except Exception: - # we would rather see the original exception - pass - raise - - return bundlefile diff -r 704c3d0878d9 -r 12c308c55e53 hgext/infinitepush/fileindexapi.py --- a/hgext/infinitepush/fileindexapi.py Tue Oct 10 18:29:04 2023 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,110 +0,0 @@ -# Infinite push -# -# Copyright 2016 Facebook, Inc. -# -# This software may be used and distributed according to the terms of the -# GNU General Public License version 2 or any later version. -""" - [infinitepush] - # Server-side option. Used only if indextype=disk. - # Filesystem path to the index store - indexpath = PATH -""" - - -import os - -from mercurial import util - -from mercurial.utils import stringutil - -from . import indexapi - - -class fileindexapi(indexapi.indexapi): - def __init__(self, repo): - super(fileindexapi, self).__init__() - self._repo = repo - root = repo.ui.config(b'infinitepush', b'indexpath') - if not root: - root = os.path.join(b'scratchbranches', b'index') - - self._nodemap = os.path.join(root, b'nodemap') - self._bookmarkmap = os.path.join(root, b'bookmarkmap') - self._metadatamap = os.path.join(root, b'nodemetadatamap') - self._lock = None - - def __enter__(self): - self._lock = self._repo.wlock() - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if self._lock: - self._lock.__exit__(exc_type, exc_val, exc_tb) - - def addbundle(self, bundleid, nodesctx): - for node in nodesctx: - nodepath = os.path.join(self._nodemap, node.hex()) - self._write(nodepath, bundleid) - - def addbookmark(self, bookmark, node): - bookmarkpath = os.path.join(self._bookmarkmap, bookmark) - self._write(bookmarkpath, node) - - def addmanybookmarks(self, bookmarks): - for bookmark, node in bookmarks.items(): - self.addbookmark(bookmark, node) - - def deletebookmarks(self, patterns): - for pattern in patterns: - for bookmark, _ in self._listbookmarks(pattern): - bookmarkpath = os.path.join(self._bookmarkmap, bookmark) - self._delete(bookmarkpath) - - def getbundle(self, node): - nodepath = os.path.join(self._nodemap, node) - return self._read(nodepath) - - def getnode(self, bookmark): - bookmarkpath = os.path.join(self._bookmarkmap, bookmark) - return self._read(bookmarkpath) - - def getbookmarks(self, query): - return dict(self._listbookmarks(query)) - - def saveoptionaljsonmetadata(self, node, jsonmetadata): - vfs = self._repo.vfs - vfs.write(os.path.join(self._metadatamap, node), jsonmetadata) - - def _listbookmarks(self, pattern): - if pattern.endswith(b'*'): - pattern = b're:^' + pattern[:-1] + b'.*' - kind, pat, matcher = stringutil.stringmatcher(pattern) - prefixlen = len(self._bookmarkmap) + 1 - for dirpath, _, books in self._repo.vfs.walk(self._bookmarkmap): - for book in books: - bookmark = os.path.join(dirpath, book)[prefixlen:] - bookmark = util.pconvert(bookmark) - if not matcher(bookmark): - continue - yield bookmark, self._read(os.path.join(dirpath, book)) - - def _write(self, path, value): - vfs = self._repo.vfs - dirname = vfs.dirname(path) - if not vfs.exists(dirname): - vfs.makedirs(dirname) - - vfs.write(path, value) - - def _read(self, path): - vfs = self._repo.vfs - if not vfs.exists(path): - return None - return vfs.read(path) - - def _delete(self, path): - vfs = self._repo.vfs - if not vfs.exists(path): - return - return vfs.unlink(path) diff -r 704c3d0878d9 -r 12c308c55e53 hgext/infinitepush/indexapi.py --- a/hgext/infinitepush/indexapi.py Tue Oct 10 18:29:04 2023 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,69 +0,0 @@ -# Infinite push -# -# Copyright 2016 Facebook, Inc. -# -# This software may be used and distributed according to the terms of the -# GNU General Public License version 2 or any later version. - - -class indexapi: - """Class that manages access to infinitepush index. - - This class is a context manager and all write operations (like - deletebookmarks, addbookmark etc) should use `with` statement: - - with index: - index.deletebookmarks(...) - ... - """ - - def __init__(self): - """Initializes the metadata store connection.""" - - def close(self): - """Cleans up the metadata store connection.""" - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - pass - - def addbundle(self, bundleid, nodesctx): - """Takes a bundleid and a list of node contexts for each node - in that bundle and records that.""" - raise NotImplementedError() - - def addbookmark(self, bookmark, node): - """Takes a bookmark name and hash, and records mapping in the metadata - store.""" - raise NotImplementedError() - - def addmanybookmarks(self, bookmarks): - """Takes a dict with mapping from bookmark to hash and records mapping - in the metadata store.""" - raise NotImplementedError() - - def deletebookmarks(self, patterns): - """Accepts list of bookmarks and deletes them.""" - raise NotImplementedError() - - def getbundle(self, node): - """Returns the bundleid for the bundle that contains the given node.""" - raise NotImplementedError() - - def getnode(self, bookmark): - """Returns the node for the given bookmark. None if it doesn't exist.""" - raise NotImplementedError() - - def getbookmarks(self, query): - """Returns bookmarks that match the query""" - raise NotImplementedError() - - def saveoptionaljsonmetadata(self, node, jsonmetadata): - """Saves optional metadata for a given node""" - raise NotImplementedError() - - -class indexexception(Exception): - pass diff -r 704c3d0878d9 -r 12c308c55e53 hgext/infinitepush/schema.sql --- a/hgext/infinitepush/schema.sql Tue Oct 10 18:29:04 2023 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,33 +0,0 @@ -CREATE TABLE `bookmarkstonode` ( - `node` varbinary(64) NOT NULL, - `bookmark` varbinary(512) NOT NULL, - `reponame` varbinary(255) NOT NULL, - PRIMARY KEY (`reponame`,`bookmark`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - -CREATE TABLE `bundles` ( - `bundle` varbinary(512) NOT NULL, - `reponame` varbinary(255) NOT NULL, - PRIMARY KEY (`bundle`,`reponame`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - -CREATE TABLE `nodestobundle` ( - `node` varbinary(64) NOT NULL, - `bundle` varbinary(512) NOT NULL, - `reponame` varbinary(255) NOT NULL, - PRIMARY KEY (`node`,`reponame`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - -CREATE TABLE `nodesmetadata` ( - `node` varbinary(64) NOT NULL, - `message` mediumblob NOT NULL, - `p1` varbinary(64) NOT NULL, - `p2` varbinary(64) DEFAULT NULL, - `author` varbinary(255) NOT NULL, - `committer` varbinary(255) DEFAULT NULL, - `author_date` bigint(20) NOT NULL, - `committer_date` bigint(20) DEFAULT NULL, - `reponame` varbinary(255) NOT NULL, - `optional_json_metadata` mediumblob, - PRIMARY KEY (`reponame`,`node`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; diff -r 704c3d0878d9 -r 12c308c55e53 hgext/infinitepush/sqlindexapi.py --- a/hgext/infinitepush/sqlindexapi.py Tue Oct 10 18:29:04 2023 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,295 +0,0 @@ -# Infinite push -# -# Copyright 2016 Facebook, Inc. -# -# This software may be used and distributed according to the terms of the -# GNU General Public License version 2 or any later version. - - -import logging -import os -import time - -import warnings -import mysql.connector - -from . import indexapi - - -def _convertbookmarkpattern(pattern): - pattern = pattern.replace(b'_', b'\\_') - pattern = pattern.replace(b'%', b'\\%') - if pattern.endswith(b'*'): - pattern = pattern[:-1] + b'%' - return pattern - - -class sqlindexapi(indexapi.indexapi): - """ - Sql backend for infinitepush index. See schema.sql - """ - - def __init__( - self, - reponame, - host, - port, - database, - user, - password, - logfile, - loglevel, - waittimeout=300, - locktimeout=120, - ): - super(sqlindexapi, self).__init__() - self.reponame = reponame - self.sqlargs = { - b'host': host, - b'port': port, - b'database': database, - b'user': user, - b'password': password, - } - self.sqlconn = None - self.sqlcursor = None - if not logfile: - logfile = os.devnull - logging.basicConfig(filename=logfile) - self.log = logging.getLogger() - self.log.setLevel(loglevel) - self._connected = False - self._waittimeout = waittimeout - self._locktimeout = locktimeout - - def sqlconnect(self): - if self.sqlconn: - raise indexapi.indexexception(b"SQL connection already open") - if self.sqlcursor: - raise indexapi.indexexception( - b"SQL cursor already open without connection" - ) - retry = 3 - while True: - try: - self.sqlconn = mysql.connector.connect(**self.sqlargs) - - # Code is copy-pasted from hgsql. Bug fixes need to be - # back-ported! - # The default behavior is to return byte arrays, when we - # need strings. This custom convert returns strings. - self.sqlconn.set_converter_class(CustomConverter) - self.sqlconn.autocommit = False - break - except mysql.connector.errors.Error: - # mysql can be flakey occasionally, so do some minimal - # retrying. - retry -= 1 - if retry == 0: - raise - time.sleep(0.2) - - waittimeout = self.sqlconn.converter.escape(b'%s' % self._waittimeout) - - self.sqlcursor = self.sqlconn.cursor() - self.sqlcursor.execute(b"SET wait_timeout=%s" % waittimeout) - self.sqlcursor.execute( - b"SET innodb_lock_wait_timeout=%s" % self._locktimeout - ) - self._connected = True - - def close(self): - """Cleans up the metadata store connection.""" - with warnings.catch_warnings(): - warnings.simplefilter(b"ignore") - self.sqlcursor.close() - self.sqlconn.close() - self.sqlcursor = None - self.sqlconn = None - - def __enter__(self): - if not self._connected: - self.sqlconnect() - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if exc_type is None: - self.sqlconn.commit() - else: - self.sqlconn.rollback() - - def addbundle(self, bundleid, nodesctx): - if not self._connected: - self.sqlconnect() - self.log.info(b"ADD BUNDLE %r %r" % (self.reponame, bundleid)) - self.sqlcursor.execute( - b"INSERT INTO bundles(bundle, reponame) VALUES (%s, %s)", - params=(bundleid, self.reponame), - ) - for ctx in nodesctx: - self.sqlcursor.execute( - b"INSERT INTO nodestobundle(node, bundle, reponame) " - b"VALUES (%s, %s, %s) ON DUPLICATE KEY UPDATE " - b"bundle=VALUES(bundle)", - params=(ctx.hex(), bundleid, self.reponame), - ) - - extra = ctx.extra() - author_name = ctx.user() - committer_name = extra.get(b'committer', ctx.user()) - author_date = int(ctx.date()[0]) - committer_date = int(extra.get(b'committer_date', author_date)) - self.sqlcursor.execute( - b"INSERT IGNORE INTO nodesmetadata(node, message, p1, p2, " - b"author, committer, author_date, committer_date, " - b"reponame) VALUES " - b"(%s, %s, %s, %s, %s, %s, %s, %s, %s)", - params=( - ctx.hex(), - ctx.description(), - ctx.p1().hex(), - ctx.p2().hex(), - author_name, - committer_name, - author_date, - committer_date, - self.reponame, - ), - ) - - def addbookmark(self, bookmark, node): - """Takes a bookmark name and hash, and records mapping in the metadata - store.""" - if not self._connected: - self.sqlconnect() - self.log.info( - b"ADD BOOKMARKS %r bookmark: %r node: %r" - % (self.reponame, bookmark, node) - ) - self.sqlcursor.execute( - b"INSERT INTO bookmarkstonode(bookmark, node, reponame) " - b"VALUES (%s, %s, %s) ON DUPLICATE KEY UPDATE node=VALUES(node)", - params=(bookmark, node, self.reponame), - ) - - def addmanybookmarks(self, bookmarks): - if not self._connected: - self.sqlconnect() - args = [] - values = [] - for bookmark, node in bookmarks.items(): - args.append(b'(%s, %s, %s)') - values.extend((bookmark, node, self.reponame)) - args = b','.join(args) - - self.sqlcursor.execute( - b"INSERT INTO bookmarkstonode(bookmark, node, reponame) " - b"VALUES %s ON DUPLICATE KEY UPDATE node=VALUES(node)" % args, - params=values, - ) - - def deletebookmarks(self, patterns): - """Accepts list of bookmark patterns and deletes them. - If `commit` is set then bookmark will actually be deleted. Otherwise - deletion will be delayed until the end of transaction. - """ - if not self._connected: - self.sqlconnect() - self.log.info(b"DELETE BOOKMARKS: %s" % patterns) - for pattern in patterns: - pattern = _convertbookmarkpattern(pattern) - self.sqlcursor.execute( - b"DELETE from bookmarkstonode WHERE bookmark LIKE (%s) " - b"and reponame = %s", - params=(pattern, self.reponame), - ) - - def getbundle(self, node): - """Returns the bundleid for the bundle that contains the given node.""" - if not self._connected: - self.sqlconnect() - self.log.info(b"GET BUNDLE %r %r" % (self.reponame, node)) - self.sqlcursor.execute( - b"SELECT bundle from nodestobundle " - b"WHERE node = %s AND reponame = %s", - params=(node, self.reponame), - ) - result = self.sqlcursor.fetchall() - if len(result) != 1 or len(result[0]) != 1: - self.log.info(b"No matching node") - return None - bundle = result[0][0] - self.log.info(b"Found bundle %r" % bundle) - return bundle - - def getnode(self, bookmark): - """Returns the node for the given bookmark. None if it doesn't exist.""" - if not self._connected: - self.sqlconnect() - self.log.info( - b"GET NODE reponame: %r bookmark: %r" % (self.reponame, bookmark) - ) - self.sqlcursor.execute( - b"SELECT node from bookmarkstonode WHERE " - b"bookmark = %s AND reponame = %s", - params=(bookmark, self.reponame), - ) - result = self.sqlcursor.fetchall() - if len(result) != 1 or len(result[0]) != 1: - self.log.info(b"No matching bookmark") - return None - node = result[0][0] - self.log.info(b"Found node %r" % node) - return node - - def getbookmarks(self, query): - if not self._connected: - self.sqlconnect() - self.log.info( - b"QUERY BOOKMARKS reponame: %r query: %r" % (self.reponame, query) - ) - query = _convertbookmarkpattern(query) - self.sqlcursor.execute( - b"SELECT bookmark, node from bookmarkstonode WHERE " - b"reponame = %s AND bookmark LIKE %s", - params=(self.reponame, query), - ) - result = self.sqlcursor.fetchall() - bookmarks = {} - for row in result: - if len(row) != 2: - self.log.info(b"Bad row returned: %s" % row) - continue - bookmarks[row[0]] = row[1] - return bookmarks - - def saveoptionaljsonmetadata(self, node, jsonmetadata): - if not self._connected: - self.sqlconnect() - self.log.info( - ( - b"INSERT METADATA, QUERY BOOKMARKS reponame: %r " - + b"node: %r, jsonmetadata: %s" - ) - % (self.reponame, node, jsonmetadata) - ) - - self.sqlcursor.execute( - b"UPDATE nodesmetadata SET optional_json_metadata=%s WHERE " - b"reponame=%s AND node=%s", - params=(jsonmetadata, self.reponame, node), - ) - - -class CustomConverter(mysql.connector.conversion.MySQLConverter): - """Ensure that all values being returned are returned as python string - (versus the default byte arrays).""" - - def _STRING_to_python(self, value, dsc=None): - return str(value) - - def _VAR_STRING_to_python(self, value, dsc=None): - return str(value) - - def _BLOB_to_python(self, value, dsc=None): - return str(value) diff -r 704c3d0878d9 -r 12c308c55e53 hgext/infinitepush/store.py --- a/hgext/infinitepush/store.py Tue Oct 10 18:29:04 2023 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,194 +0,0 @@ -# This software may be used and distributed according to the terms of the -# GNU General Public License version 2 or any later version. - -# based on bundleheads extension by Gregory Szorc - - -import abc -import os -import subprocess - -from mercurial.node import hex -from mercurial.pycompat import open -from mercurial import pycompat -from mercurial.utils import ( - hashutil, - procutil, -) - - -class BundleWriteException(Exception): - pass - - -class BundleReadException(Exception): - pass - - -class abstractbundlestore: # pytype: disable=ignored-metaclass - """Defines the interface for bundle stores. - - A bundle store is an entity that stores raw bundle data. It is a simple - key-value store. However, the keys are chosen by the store. The keys can - be any Python object understood by the corresponding bundle index (see - ``abstractbundleindex`` below). - """ - - __metaclass__ = abc.ABCMeta - - @abc.abstractmethod - def write(self, data): - """Write bundle data to the store. - - This function receives the raw data to be written as a str. - Throws BundleWriteException - The key of the written data MUST be returned. - """ - - @abc.abstractmethod - def read(self, key): - """Obtain bundle data for a key. - - Returns None if the bundle isn't known. - Throws BundleReadException - The returned object should be a file object supporting read() - and close(). - """ - - -class filebundlestore: - """bundle store in filesystem - - meant for storing bundles somewhere on disk and on network filesystems - """ - - def __init__(self, ui, repo): - self.ui = ui - self.repo = repo - self.storepath = ui.configpath(b'scratchbranch', b'storepath') - if not self.storepath: - self.storepath = self.repo.vfs.join( - b"scratchbranches", b"filebundlestore" - ) - if not os.path.exists(self.storepath): - os.makedirs(self.storepath) - - def _dirpath(self, hashvalue): - """First two bytes of the hash are the name of the upper - level directory, next two bytes are the name of the - next level directory""" - return os.path.join(self.storepath, hashvalue[0:2], hashvalue[2:4]) - - def _filepath(self, filename): - return os.path.join(self._dirpath(filename), filename) - - def write(self, data): - filename = hex(hashutil.sha1(data).digest()) - dirpath = self._dirpath(filename) - - if not os.path.exists(dirpath): - os.makedirs(dirpath) - - with open(self._filepath(filename), b'wb') as f: - f.write(data) - - return filename - - def read(self, key): - try: - with open(self._filepath(key), b'rb') as f: - return f.read() - except IOError: - return None - - -def format_placeholders_args(args, filename=None, handle=None): - """Formats `args` with Infinitepush replacements. - - Hack to get `str.format()`-ed strings working in a BC way with - bytes. - """ - formatted_args = [] - for arg in args: - if filename and arg == b'{filename}': - formatted_args.append(filename) - elif handle and arg == b'{handle}': - formatted_args.append(handle) - else: - formatted_args.append(arg) - return formatted_args - - -class externalbundlestore(abstractbundlestore): - def __init__(self, put_binary, put_args, get_binary, get_args): - """ - `put_binary` - path to binary file which uploads bundle to external - storage and prints key to stdout - `put_args` - format string with additional args to `put_binary` - {filename} replacement field can be used. - `get_binary` - path to binary file which accepts filename and key - (in that order), downloads bundle from store and saves it to file - `get_args` - format string with additional args to `get_binary`. - {filename} and {handle} replacement field can be used. - """ - - self.put_args = put_args - self.get_args = get_args - self.put_binary = put_binary - self.get_binary = get_binary - - def _call_binary(self, args): - p = subprocess.Popen( - pycompat.rapply(procutil.tonativestr, args), - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - close_fds=True, - ) - stdout, stderr = p.communicate() - returncode = p.returncode - return returncode, stdout, stderr - - def write(self, data): - # Won't work on windows because you can't open file second time without - # closing it - # TODO: rewrite without str.format() and replace NamedTemporaryFile() - # with pycompat.namedtempfile() - with pycompat.namedtempfile() as temp: - temp.write(data) - temp.flush() - temp.seek(0) - formatted_args = format_placeholders_args( - self.put_args, filename=temp.name - ) - returncode, stdout, stderr = self._call_binary( - [self.put_binary] + formatted_args - ) - - if returncode != 0: - raise BundleWriteException( - b'Failed to upload to external store: %s' % stderr - ) - stdout_lines = stdout.splitlines() - if len(stdout_lines) == 1: - return stdout_lines[0] - else: - raise BundleWriteException( - b'Bad output from %s: %s' % (self.put_binary, stdout) - ) - - def read(self, handle): - # Won't work on windows because you can't open file second time without - # closing it - with pycompat.namedtempfile() as temp: - formatted_args = format_placeholders_args( - self.get_args, filename=temp.name, handle=handle - ) - returncode, stdout, stderr = self._call_binary( - [self.get_binary] + formatted_args - ) - - if returncode != 0: - raise BundleReadException( - b'Failed to download from external store: %s' % stderr - ) - return temp.read() diff -r 704c3d0878d9 -r 12c308c55e53 hgext/journal.py --- a/hgext/journal.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/journal.py Wed Oct 11 02:02:46 2023 +0200 @@ -66,13 +66,13 @@ # Journal recording, register hooks and storage object def extsetup(ui): - extensions.wrapfunction(dispatch, b'runcommand', runcommand) - extensions.wrapfunction(bookmarks.bmstore, b'_write', recordbookmarks) + extensions.wrapfunction(dispatch, 'runcommand', runcommand) + extensions.wrapfunction(bookmarks.bmstore, '_write', recordbookmarks) extensions.wrapfilecache( localrepo.localrepository, b'dirstate', wrapdirstate ) - extensions.wrapfunction(hg, b'postshare', wrappostshare) - extensions.wrapfunction(hg, b'copystore', unsharejournal) + extensions.wrapfunction(hg, 'postshare', wrappostshare) + extensions.wrapfunction(hg, 'copystore', unsharejournal) def reposetup(ui, repo): @@ -103,7 +103,7 @@ def wrapdirstate(orig, repo): """Make journal storage available to the dirstate object""" dirstate = orig(repo) - if util.safehasattr(repo, 'journal'): + if hasattr(repo, 'journal'): _setupdirstate(repo, dirstate) return dirstate @@ -112,7 +112,7 @@ """Records all dirstate parent changes in the journal.""" old = list(old) new = list(new) - if util.safehasattr(dirstate, 'journalstorage'): + if hasattr(dirstate, 'journalstorage'): # only record two hashes if there was a merge oldhashes = old[:1] if old[1] == dirstate._nodeconstants.nullid else old newhashes = new[:1] if new[1] == dirstate._nodeconstants.nullid else new @@ -125,9 +125,12 @@ def recordbookmarks(orig, store, fp): """Records all bookmark changes in the journal.""" repo = store._repo - if util.safehasattr(repo, 'journal'): + if hasattr(repo, 'journal'): oldmarks = bookmarks.bmstore(repo) - for mark, value in store.items(): + all_marks = set(b for b, n in oldmarks.items()) + all_marks.update(b for b, n in store.items()) + for mark in sorted(all_marks): + value = store.get(mark, repo.nullid) oldvalue = oldmarks.get(mark, repo.nullid) if value != oldvalue: repo.journal.record(bookmarktype, mark, oldvalue, value) @@ -182,11 +185,7 @@ def unsharejournal(orig, ui, repo, repopath): """Copy shared journal entries into this repo when unsharing""" - if ( - repo.path == repopath - and repo.shared() - and util.safehasattr(repo, 'journal') - ): + if repo.path == repopath and repo.shared() and hasattr(repo, 'journal'): sharedrepo = hg.sharedreposource(repo) sharedfeatures = _readsharedfeatures(repo) if sharedrepo and sharedfeatures > {b'journal'}: diff -r 704c3d0878d9 -r 12c308c55e53 hgext/keyword.py --- a/hgext/keyword.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/keyword.py Wed Oct 11 02:02:46 2023 +0200 @@ -88,7 +88,6 @@ import weakref from mercurial.i18n import _ -from mercurial.pycompat import getattr from mercurial.hgweb import webcommands from mercurial import ( @@ -131,7 +130,7 @@ ) # webcommands that do not act on keywords -nokwwebcommands = b'annotate changeset rev filediff diff comparison' +nokwwebcommands = 'annotate changeset rev filediff diff comparison' # hg commands that trigger expansion only when writing to working dir, # not when reading filelog, and unexpand when reading from working dir @@ -420,11 +419,10 @@ """Bails out if [keyword] configuration is not active. Returns status of working directory.""" if kwt: - opts = pycompat.byteskwargs(opts) return repo.status( - match=scmutil.match(wctx, pats, opts), + match=scmutil.match(wctx, pats, pycompat.byteskwargs(opts)), clean=True, - unknown=opts.get(b'unknown') or opts.get(b'all'), + unknown=opts.get('unknown') or opts.get('all'), ) if ui.configitems(b'keyword'): raise error.Abort(_(b'[keyword] patterns cannot match')) @@ -604,26 +602,26 @@ else: cwd = b'' files = [] - opts = pycompat.byteskwargs(opts) - if not opts.get(b'unknown') or opts.get(b'all'): + + if not opts.get('unknown') or opts.get('all'): files = sorted(status.modified + status.added + status.clean) kwfiles = kwt.iskwfile(files, wctx) kwdeleted = kwt.iskwfile(status.deleted, wctx) kwunknown = kwt.iskwfile(status.unknown, wctx) - if not opts.get(b'ignore') or opts.get(b'all'): + if not opts.get('ignore') or opts.get('all'): showfiles = kwfiles, kwdeleted, kwunknown else: showfiles = [], [], [] - if opts.get(b'all') or opts.get(b'ignore'): + if opts.get('all') or opts.get('ignore'): showfiles += ( [f for f in files if f not in kwfiles], [f for f in status.unknown if f not in kwunknown], ) kwlabels = b'enabled deleted enabledunknown ignored ignoredunknown'.split() kwstates = zip(kwlabels, pycompat.bytestr(b'K!kIi'), showfiles) - fm = ui.formatter(b'kwfiles', opts) + fm = ui.formatter(b'kwfiles', pycompat.byteskwargs(opts)) fmt = b'%.0s%s\n' - if opts.get(b'all') or ui.verbose: + if opts.get('all') or ui.verbose: fmt = b'%s %s\n' for kwstate, char, filenames in kwstates: label = b'kwfiles.' + kwstate @@ -806,14 +804,14 @@ kwtools[b'hgcmd'] = cmd return cmd, func, args, options, cmdoptions - extensions.wrapfunction(dispatch, b'_parse', kwdispatch_parse) + extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse) - extensions.wrapfunction(context.filectx, b'cmp', kwfilectx_cmp) - extensions.wrapfunction(patch.patchfile, b'__init__', kwpatchfile_init) - extensions.wrapfunction(patch, b'diff', kwdiff) - extensions.wrapfunction(cmdutil, b'amend', kw_amend) - extensions.wrapfunction(cmdutil, b'copy', kw_copy) - extensions.wrapfunction(cmdutil, b'dorecord', kw_dorecord) + extensions.wrapfunction(context.filectx, 'cmp', kwfilectx_cmp) + extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init) + extensions.wrapfunction(patch, 'diff', kwdiff) + extensions.wrapfunction(cmdutil, 'amend', kw_amend) + extensions.wrapfunction(cmdutil, 'copy', kw_copy) + extensions.wrapfunction(cmdutil, 'dorecord', kw_dorecord) for c in nokwwebcommands.split(): extensions.wrapfunction(webcommands, c, kwweb_skip) diff -r 704c3d0878d9 -r 12c308c55e53 hgext/largefiles/__init__.py --- a/hgext/largefiles/__init__.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/largefiles/__init__.py Wed Oct 11 02:02:46 2023 +0200 @@ -184,7 +184,7 @@ ) extensions.wrapfunction( - wireprotov1server.commands[b'heads'], b'func', proto.heads + wireprotov1server.commands[b'heads'], 'func', proto.heads ) # TODO also wrap wireproto.commandsv2 once heads is implemented there. @@ -193,7 +193,7 @@ if name == b'rebase': # TODO: teach exthelper to handle this extensions.wrapfunction( - module, b'rebase', overrides.overriderebasecmd + module, 'rebase', overrides.overriderebasecmd ) diff -r 704c3d0878d9 -r 12c308c55e53 hgext/largefiles/lfcommands.py --- a/hgext/largefiles/lfcommands.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/largefiles/lfcommands.py Wed Oct 11 02:02:46 2023 +0200 @@ -27,7 +27,6 @@ lock, logcmdutil, match as matchmod, - pycompat, scmutil, util, ) @@ -87,12 +86,11 @@ Use --to-normal to convert largefiles back to normal files; after this, the DEST repository can be used without largefiles at all.""" - opts = pycompat.byteskwargs(opts) - if opts[b'to_normal']: + if opts['to_normal']: tolfile = False else: tolfile = True - size = lfutil.getminsize(ui, True, opts.get(b'size'), default=None) + size = lfutil.getminsize(ui, True, opts.get('size'), default=None) if not hg.islocal(src): raise error.Abort(_(b'%s is not a local Mercurial repo') % src) diff -r 704c3d0878d9 -r 12c308c55e53 hgext/largefiles/lfutil.py --- a/hgext/largefiles/lfutil.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/largefiles/lfutil.py Wed Oct 11 02:02:46 2023 +0200 @@ -814,7 +814,7 @@ Otherwise, this returns the function to always write out (or ignore if ``not forcibly``) status. """ - if forcibly is None and util.safehasattr(repo, b'_largefilesenabled'): + if forcibly is None and hasattr(repo, '_largefilesenabled'): return repo._lfstatuswriters[-1] else: if forcibly: diff -r 704c3d0878d9 -r 12c308c55e53 hgext/largefiles/overrides.py --- a/hgext/largefiles/overrides.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/largefiles/overrides.py Wed Oct 11 02:02:46 2023 +0200 @@ -243,7 +243,7 @@ # For overriding mercurial.hgweb.webcommands so that largefiles will # appear at their right place in the manifests. -@eh.wrapfunction(webcommands, b'decodepath') +@eh.wrapfunction(webcommands, 'decodepath') def decodepath(orig, path): return lfutil.splitstandin(path) or path @@ -273,7 +273,7 @@ return orig(ui, repo, *pats, **opts) -@eh.wrapfunction(cmdutil, b'add') +@eh.wrapfunction(cmdutil, 'add') def cmdutiladd(orig, ui, repo, matcher, prefix, uipathfn, explicitonly, **opts): # The --normal flag short circuits this override if opts.get('normal'): @@ -289,7 +289,7 @@ return bad -@eh.wrapfunction(cmdutil, b'remove') +@eh.wrapfunction(cmdutil, 'remove') def cmdutilremove( orig, ui, repo, matcher, prefix, uipathfn, after, force, subrepos, dryrun ): @@ -313,7 +313,7 @@ ) -@eh.wrapfunction(dirstate.dirstate, b'_changing') +@eh.wrapfunction(dirstate.dirstate, '_changing') @contextlib.contextmanager def _changing(orig, self, repo, change_type): pre = sub_dirstate = getattr(self, '_sub_dirstate', None) @@ -334,7 +334,7 @@ self._sub_dirstate = pre -@eh.wrapfunction(dirstate.dirstate, b'running_status') +@eh.wrapfunction(dirstate.dirstate, 'running_status') @contextlib.contextmanager def running_status(orig, self, repo): pre = sub_dirstate = getattr(self, '_sub_dirstate', None) @@ -355,7 +355,7 @@ self._sub_dirstate = pre -@eh.wrapfunction(subrepo.hgsubrepo, b'status') +@eh.wrapfunction(subrepo.hgsubrepo, 'status') def overridestatusfn(orig, repo, rev2, **opts): with lfstatus(repo._repo): return orig(repo, rev2, **opts) @@ -367,7 +367,7 @@ return orig(ui, repo, *pats, **opts) -@eh.wrapfunction(subrepo.hgsubrepo, b'dirty') +@eh.wrapfunction(subrepo.hgsubrepo, 'dirty') def overridedirty(orig, repo, ignoreupdate=False, missing=False): with lfstatus(repo._repo): return orig(repo, ignoreupdate=ignoreupdate, missing=missing) @@ -485,10 +485,10 @@ return lambda ctx: match wrappedmatchandpats = extensions.wrappedfunction( - scmutil, b'matchandpats', overridematchandpats + scmutil, 'matchandpats', overridematchandpats ) wrappedmakefilematcher = extensions.wrappedfunction( - logcmdutil, b'_makenofollowfilematcher', overridemakefilematcher + logcmdutil, '_makenofollowfilematcher', overridemakefilematcher ) with wrappedmatchandpats, wrappedmakefilematcher: return orig(ui, repo, *pats, **opts) @@ -554,7 +554,7 @@ # The overridden function filters the unknown files by removing any # largefiles. This makes the merge proceed and we can then handle this # case further in the overridden calculateupdates function below. -@eh.wrapfunction(merge, b'_checkunknownfile') +@eh.wrapfunction(merge, '_checkunknownfile') def overridecheckunknownfile( origfn, dirstate, wvfs, dircache, wctx, mctx, f, f2=None ): @@ -589,7 +589,7 @@ # Finally, the merge.applyupdates function will then take care of # writing the files into the working copy and lfcommands.updatelfiles # will update the largefiles. -@eh.wrapfunction(merge, b'calculateupdates') +@eh.wrapfunction(merge, 'calculateupdates') def overridecalculateupdates( origfn, repo, p1, p2, pas, branchmerge, force, acceptremote, *args, **kwargs ): @@ -700,7 +700,7 @@ return mresult -@eh.wrapfunction(mergestatemod, b'recordupdates') +@eh.wrapfunction(mergestatemod, 'recordupdates') def mergerecordupdates(orig, repo, actions, branchmerge, getfiledata): if MERGE_ACTION_LARGEFILE_MARK_REMOVED in actions: lfdirstate = lfutil.openlfdirstate(repo.ui, repo) @@ -716,7 +716,7 @@ # Override filemerge to prompt the user about how they wish to merge # largefiles. This will handle identical edits without prompting the user. -@eh.wrapfunction(filemerge, b'filemerge') +@eh.wrapfunction(filemerge, 'filemerge') def overridefilemerge( origfn, repo, wctx, mynode, orig, fcd, fco, fca, labels=None ): @@ -748,7 +748,7 @@ return 0, False -@eh.wrapfunction(copiesmod, b'pathcopies') +@eh.wrapfunction(copiesmod, 'pathcopies') def copiespathcopies(orig, ctx1, ctx2, match=None): copies = orig(ctx1, ctx2, match=match) updated = {} @@ -764,7 +764,7 @@ # checks if the destination largefile already exists. It also keeps a # list of copied files so that the largefiles can be copied and the # dirstate updated. -@eh.wrapfunction(cmdutil, b'copy') +@eh.wrapfunction(cmdutil, 'copy') def overridecopy(orig, ui, repo, pats, opts, rename=False): # doesn't remove largefile on rename if len(pats) < 2: @@ -793,7 +793,7 @@ match = orig(ctx, pats, opts, globbed, default, badfn=badfn) return composenormalfilematcher(match, manifest) - with extensions.wrappedfunction(scmutil, b'match', normalfilesmatchfn): + with extensions.wrappedfunction(scmutil, 'match', normalfilesmatchfn): try: result = orig(ui, repo, pats, opts, rename) except error.Abort as e: @@ -887,8 +887,8 @@ copiedfiles.append((src, dest)) orig(src, dest, *args, **kwargs) - with extensions.wrappedfunction(util, b'copyfile', overridecopyfile): - with extensions.wrappedfunction(scmutil, b'match', overridematch): + with extensions.wrappedfunction(util, 'copyfile', overridecopyfile): + with extensions.wrappedfunction(scmutil, 'match', overridematch): result += orig(ui, repo, listpats, opts, rename) lfdirstate = lfutil.openlfdirstate(ui, repo) @@ -936,7 +936,7 @@ # commits. Update the standins then run the original revert, changing # the matcher to hit standins instead of largefiles. Based on the # resulting standins update the largefiles. -@eh.wrapfunction(cmdutil, b'revert') +@eh.wrapfunction(cmdutil, 'revert') def overriderevert(orig, ui, repo, ctx, *pats, **opts): # Because we put the standins in a bad state (by updating them) # and then return them to a correct state we need to lock to @@ -999,7 +999,7 @@ m.matchfn = matchfn return m - with extensions.wrappedfunction(scmutil, b'match', overridematch): + with extensions.wrappedfunction(scmutil, 'match', overridematch): orig(ui, repo, ctx, *pats, **opts) newstandins = lfutil.getstandinsstate(repo) @@ -1079,7 +1079,7 @@ return orig(ui, repo, *args, **kwargs) -@eh.wrapfunction(exchange, b'pushoperation') +@eh.wrapfunction(exchange, 'pushoperation') def exchangepushoperation(orig, *args, **kwargs): """Override pushoperation constructor and store lfrevs parameter""" lfrevs = kwargs.pop('lfrevs', None) @@ -1139,7 +1139,7 @@ return orig(ui, source, dest, **opts) -@eh.wrapfunction(hg, b'clone') +@eh.wrapfunction(hg, 'clone') def hgclone(orig, ui, opts, *args, **kwargs): result = orig(ui, opts, *args, **kwargs) @@ -1167,7 +1167,7 @@ @eh.wrapcommand(b'rebase', extension=b'rebase') def overriderebasecmd(orig, ui, repo, **opts): - if not util.safehasattr(repo, b'_largefilesenabled'): + if not hasattr(repo, '_largefilesenabled'): return orig(ui, repo, **opts) resuming = opts.get('continue') @@ -1195,7 +1195,7 @@ kwargs['inmemory'] = False return orig(*args, **kwargs) - extensions.wrapfunction(rebase, b'_dorebase', _dorebase) + extensions.wrapfunction(rebase, '_dorebase', _dorebase) @eh.wrapcommand(b'archive') @@ -1204,13 +1204,13 @@ return orig(ui, repo.unfiltered(), dest, **opts) -@eh.wrapfunction(webcommands, b'archive') +@eh.wrapfunction(webcommands, 'archive') def hgwebarchive(orig, web): with lfstatus(web.repo): return orig(web) -@eh.wrapfunction(archival, b'archive') +@eh.wrapfunction(archival, 'archive') def overridearchive( orig, repo, @@ -1298,7 +1298,7 @@ # allow only hgsubrepos to set this, instead of the current scheme # where the parent sets this for the child. with ( - util.safehasattr(sub, '_repo') + hasattr(sub, '_repo') and lfstatus(sub._repo) or util.nullcontextmanager() ): @@ -1307,9 +1307,9 @@ archiver.done() -@eh.wrapfunction(subrepo.hgsubrepo, b'archive') +@eh.wrapfunction(subrepo.hgsubrepo, 'archive') def hgsubrepoarchive(orig, repo, archiver, prefix, match=None, decode=True): - lfenabled = util.safehasattr(repo._repo, b'_largefilesenabled') + lfenabled = hasattr(repo._repo, '_largefilesenabled') if not lfenabled or not repo._repo.lfstatus: return orig(repo, archiver, prefix, match, decode) @@ -1364,7 +1364,7 @@ # would allow only hgsubrepos to set this, instead of the current scheme # where the parent sets this for the child. with ( - util.safehasattr(sub, '_repo') + hasattr(sub, '_repo') and lfstatus(sub._repo) or util.nullcontextmanager() ): @@ -1375,7 +1375,7 @@ # standin until a commit. cmdutil.bailifchanged() raises an exception # if the repo has uncommitted changes. Wrap it to also check if # largefiles were changed. This is used by bisect, backout and fetch. -@eh.wrapfunction(cmdutil, b'bailifchanged') +@eh.wrapfunction(cmdutil, 'bailifchanged') def overridebailifchanged(orig, repo, *args, **kwargs): orig(repo, *args, **kwargs) with lfstatus(repo): @@ -1384,13 +1384,13 @@ raise error.Abort(_(b'uncommitted changes')) -@eh.wrapfunction(cmdutil, b'postcommitstatus') +@eh.wrapfunction(cmdutil, 'postcommitstatus') def postcommitstatus(orig, repo, *args, **kwargs): with lfstatus(repo): return orig(repo, *args, **kwargs) -@eh.wrapfunction(cmdutil, b'forget') +@eh.wrapfunction(cmdutil, 'forget') def cmdutilforget( orig, ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive ): @@ -1559,7 +1559,7 @@ orig(ui, repo, *pats, **opts) -@eh.wrapfunction(scmutil, b'addremove') +@eh.wrapfunction(scmutil, 'addremove') def scmutiladdremove( orig, repo, @@ -1717,11 +1717,10 @@ @eh.wrapcommand(b'cat') def overridecat(orig, ui, repo, file1, *pats, **opts): - opts = pycompat.byteskwargs(opts) - ctx = logcmdutil.revsingle(repo, opts.get(b'rev')) + ctx = logcmdutil.revsingle(repo, opts.get('rev')) err = 1 notbad = set() - m = scmutil.match(ctx, (file1,) + pats, opts) + m = scmutil.match(ctx, (file1,) + pats, pycompat.byteskwargs(opts)) origmatchfn = m.matchfn def lfmatchfn(f): @@ -1758,12 +1757,12 @@ m.visitdir = lfvisitdirfn for f in ctx.walk(m): - with cmdutil.makefileobj(ctx, opts.get(b'output'), pathname=f) as fp: + with cmdutil.makefileobj(ctx, opts.get('output'), pathname=f) as fp: lf = lfutil.splitstandin(f) if lf is None or origmatchfn(f): # duplicating unreachable code from commands.cat data = ctx[f].data() - if opts.get(b'decode'): + if opts.get('decode'): data = repo.wwritedata(f, data) fp.write(data) else: @@ -1787,7 +1786,7 @@ return err -@eh.wrapfunction(merge, b'_update') +@eh.wrapfunction(merge, '_update') def mergeupdate(orig, repo, node, branchmerge, force, *args, **kwargs): matcher = kwargs.get('matcher', None) # note if this is a partial update @@ -1880,7 +1879,7 @@ return result -@eh.wrapfunction(scmutil, b'marktouched') +@eh.wrapfunction(scmutil, 'marktouched') def scmutilmarktouched(orig, repo, files, *args, **kwargs): result = orig(repo, files, *args, **kwargs) @@ -1901,8 +1900,8 @@ return result -@eh.wrapfunction(upgrade_actions, b'preservedrequirements') -@eh.wrapfunction(upgrade_actions, b'supporteddestrequirements') +@eh.wrapfunction(upgrade_actions, 'preservedrequirements') +@eh.wrapfunction(upgrade_actions, 'supporteddestrequirements') def upgraderequirements(orig, repo): reqs = orig(repo) if b'largefiles' in repo.requirements: @@ -1913,7 +1912,7 @@ _lfscheme = b'largefile://' -@eh.wrapfunction(urlmod, b'open') +@eh.wrapfunction(urlmod, 'open') def openlargefile(orig, ui, url_, data=None, **kwargs): if url_.startswith(_lfscheme): if data: diff -r 704c3d0878d9 -r 12c308c55e53 hgext/largefiles/proto.py --- a/hgext/largefiles/proto.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/largefiles/proto.py Wed Oct 11 02:02:46 2023 +0200 @@ -200,7 +200,7 @@ # advertise the largefiles=serve capability -@eh.wrapfunction(wireprotov1server, b'_capabilities') +@eh.wrapfunction(wireprotov1server, '_capabilities') def _capabilities(orig, repo, proto): '''announce largefile server capability''' caps = orig(repo, proto) diff -r 704c3d0878d9 -r 12c308c55e53 hgext/largefiles/storefactory.py --- a/hgext/largefiles/storefactory.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/largefiles/storefactory.py Wed Oct 11 02:02:46 2023 +0200 @@ -5,7 +5,6 @@ import re from mercurial.i18n import _ -from mercurial.pycompat import getattr from mercurial import ( error, hg, @@ -57,7 +56,7 @@ # The path could be a scheme so use Mercurial's normal functionality # to resolve the scheme to a repository and use its path - path = util.safehasattr(remote, b'url') and remote.url() or remote.path + path = hasattr(remote, 'url') and remote.url() or remote.path match = _scheme_re.match(path) if not match: # regular filesystem path diff -r 704c3d0878d9 -r 12c308c55e53 hgext/lfs/__init__.py --- a/hgext/lfs/__init__.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/lfs/__init__.py Wed Oct 11 02:02:46 2023 +0200 @@ -342,7 +342,7 @@ wrapfunction(filelog, 'size', wrapper.filelogsize) -@eh.wrapfunction(localrepo, b'resolverevlogstorevfsoptions') +@eh.wrapfunction(localrepo, 'resolverevlogstorevfsoptions') def _resolverevlogstorevfsoptions(orig, ui, requirements, features): opts = orig(ui, requirements, features) for name, module in extensions.extensions(ui): diff -r 704c3d0878d9 -r 12c308c55e53 hgext/lfs/blobstore.py --- a/hgext/lfs/blobstore.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/lfs/blobstore.py Wed Oct 11 02:02:46 2023 +0200 @@ -15,7 +15,6 @@ import socket from mercurial.i18n import _ -from mercurial.pycompat import getattr from mercurial.node import hex from mercurial import ( @@ -271,7 +270,7 @@ if isinstance(urlerror.reason, Exception): inst = urlerror.reason - if util.safehasattr(inst, b'reason'): + if hasattr(inst, 'reason'): try: # usually it is in the form (errno, strerror) reason = inst.reason.args[1] except (AttributeError, IndexError): @@ -751,7 +750,7 @@ if lfsurl is None: if remote: path = remote - elif util.safehasattr(repo, b'_subtoppath'): + elif hasattr(repo, '_subtoppath'): # The pull command sets this during the optional update phase, which # tells exactly where the pull originated, whether 'paths.default' # or explicit. diff -r 704c3d0878d9 -r 12c308c55e53 hgext/lfs/wireprotolfsserver.py --- a/hgext/lfs/wireprotolfsserver.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/lfs/wireprotolfsserver.py Wed Oct 11 02:02:46 2023 +0200 @@ -16,7 +16,6 @@ from mercurial import ( exthelper, pycompat, - util, wireprotoserver, ) @@ -33,7 +32,7 @@ eh = exthelper.exthelper() -@eh.wrapfunction(wireprotoserver, b'handlewsgirequest') +@eh.wrapfunction(wireprotoserver, 'handlewsgirequest') def handlewsgirequest(orig, rctx, req, res, checkperm): """Wrap wireprotoserver.handlewsgirequest() to possibly process an LFS request if it is left unprocessed by the wrapped method. @@ -44,7 +43,7 @@ if not rctx.repo.ui.configbool(b'experimental', b'lfs.serve'): return False - if not util.safehasattr(rctx.repo.svfs, 'lfslocalblobstore'): + if not hasattr(rctx.repo.svfs, 'lfslocalblobstore'): return False if not req.dispatchpath: diff -r 704c3d0878d9 -r 12c308c55e53 hgext/lfs/wrapper.py --- a/hgext/lfs/wrapper.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/lfs/wrapper.py Wed Oct 11 02:02:46 2023 +0200 @@ -10,10 +10,6 @@ from mercurial.i18n import _ from mercurial.node import bin, hex, short -from mercurial.pycompat import ( - getattr, - setattr, -) from mercurial import ( bundle2, @@ -26,7 +22,6 @@ localrepo, revlog, scmutil, - util, vfs as vfsmod, wireprotov1server, ) @@ -53,7 +48,7 @@ eh = exthelper.exthelper() -@eh.wrapfunction(localrepo, b'makefilestorage') +@eh.wrapfunction(localrepo, 'makefilestorage') def localrepomakefilestorage(orig, requirements, features, **kwargs): if b'lfs' in requirements: features.add(repository.REPO_FEATURE_LFS) @@ -61,18 +56,18 @@ return orig(requirements=requirements, features=features, **kwargs) -@eh.wrapfunction(changegroup, b'allsupportedversions') +@eh.wrapfunction(changegroup, 'allsupportedversions') def allsupportedversions(orig, ui): versions = orig(ui) versions.add(b'03') return versions -@eh.wrapfunction(wireprotov1server, b'_capabilities') +@eh.wrapfunction(wireprotov1server, '_capabilities') def _capabilities(orig, repo, proto): '''Wrap server command to announce lfs server capability''' caps = orig(repo, proto) - if util.safehasattr(repo.svfs, b'lfslocalblobstore'): + if hasattr(repo.svfs, 'lfslocalblobstore'): # Advertise a slightly different capability when lfs is *required*, so # that the client knows it MUST load the extension. If lfs is not # required on the server, there's no reason to autoload the extension @@ -227,7 +222,7 @@ return orig(self, rev) -@eh.wrapfunction(revlog, b'_verify_revision') +@eh.wrapfunction(revlog, '_verify_revision') def _verify_revision(orig, rl, skipflags, state, node): if _islfs(rl, node=node): rawtext = rl.rawdata(node) @@ -246,7 +241,7 @@ orig(rl, skipflags, state, node) -@eh.wrapfunction(context.basefilectx, b'cmp') +@eh.wrapfunction(context.basefilectx, 'cmp') def filectxcmp(orig, self, fctx): """returns True if text is different than fctx""" # some fctx (ex. hg-git) is not based on basefilectx and do not have islfs @@ -258,7 +253,7 @@ return orig(self, fctx) -@eh.wrapfunction(context.basefilectx, b'isbinary') +@eh.wrapfunction(context.basefilectx, 'isbinary') def filectxisbinary(orig, self): if self.islfs(): # fast path: use lfs metadata to answer isbinary @@ -272,13 +267,13 @@ return _islfs(self.filelog()._revlog, self.filenode()) -@eh.wrapfunction(cmdutil, b'_updatecatformatter') +@eh.wrapfunction(cmdutil, '_updatecatformatter') def _updatecatformatter(orig, fm, ctx, matcher, path, decode): orig(fm, ctx, matcher, path, decode) fm.data(rawdata=ctx[path].rawdata()) -@eh.wrapfunction(scmutil, b'wrapconvertsink') +@eh.wrapfunction(scmutil, 'wrapconvertsink') def convertsink(orig, sink): sink = orig(sink) if sink.repotype == b'hg': @@ -325,7 +320,7 @@ # bundlerepo uses "vfsmod.readonlyvfs(othervfs)", we need to make sure lfs # options and blob stores are passed from othervfs to the new readonlyvfs. -@eh.wrapfunction(vfsmod.readonlyvfs, b'__init__') +@eh.wrapfunction(vfsmod.readonlyvfs, '__init__') def vfsinit(orig, self, othervfs): orig(self, othervfs) # copy lfs related options @@ -334,15 +329,15 @@ self.options[k] = v # also copy lfs blobstores. note: this can run before reposetup, so lfs # blobstore attributes are not always ready at this time. - for name in [b'lfslocalblobstore', b'lfsremoteblobstore']: - if util.safehasattr(othervfs, name): + for name in ['lfslocalblobstore', 'lfsremoteblobstore']: + if hasattr(othervfs, name): setattr(self, name, getattr(othervfs, name)) def _prefetchfiles(repo, revmatches): """Ensure that required LFS blobs are present, fetching them as a group if needed.""" - if not util.safehasattr(repo.svfs, b'lfslocalblobstore'): + if not hasattr(repo.svfs, 'lfslocalblobstore'): return pointers = [] @@ -366,7 +361,7 @@ def _canskipupload(repo): # Skip if this hasn't been passed to reposetup() - if not util.safehasattr(repo.svfs, b'lfsremoteblobstore'): + if not hasattr(repo.svfs, 'lfsremoteblobstore'): return True # if remotestore is a null store, upload is a no-op and can be skipped @@ -375,7 +370,7 @@ def candownload(repo): # Skip if this hasn't been passed to reposetup() - if not util.safehasattr(repo.svfs, b'lfsremoteblobstore'): + if not hasattr(repo.svfs, 'lfsremoteblobstore'): return False # if remotestore is a null store, downloads will lead to nothing @@ -383,10 +378,7 @@ def uploadblobsfromrevs(repo, revs): - """upload lfs blobs introduced by revs - - Note: also used by other extensions e. g. infinitepush. avoid renaming. - """ + """upload lfs blobs introduced by revs""" if _canskipupload(repo): return pointers = extractpointers(repo, revs) @@ -403,7 +395,7 @@ return uploadblobsfromrevs(pushop.repo, pushop.outgoing.missing) -@eh.wrapfunction(exchange, b'push') +@eh.wrapfunction(exchange, 'push') def push(orig, repo, remote, *args, **kwargs): """bail on push if the extension isn't enabled on remote when needed, and update the remote store based on the destination path.""" @@ -433,7 +425,7 @@ # when writing a bundle via "hg bundle" command, upload related LFS blobs -@eh.wrapfunction(bundle2, b'writenewbundle') +@eh.wrapfunction(bundle2, 'writenewbundle') def writenewbundle( orig, ui, repo, source, filename, bundletype, outgoing, *args, **kwargs ): @@ -522,14 +514,14 @@ remoteblob.writebatch(pointers, repo.svfs.lfslocalblobstore) -@eh.wrapfunction(upgrade_engine, b'finishdatamigration') +@eh.wrapfunction(upgrade_engine, 'finishdatamigration') def upgradefinishdatamigration(orig, ui, srcrepo, dstrepo, requirements): orig(ui, srcrepo, dstrepo, requirements) # Skip if this hasn't been passed to reposetup() - if util.safehasattr( - srcrepo.svfs, b'lfslocalblobstore' - ) and util.safehasattr(dstrepo.svfs, b'lfslocalblobstore'): + if hasattr(srcrepo.svfs, 'lfslocalblobstore') and hasattr( + dstrepo.svfs, 'lfslocalblobstore' + ): srclfsvfs = srcrepo.svfs.lfslocalblobstore.vfs dstlfsvfs = dstrepo.svfs.lfslocalblobstore.vfs @@ -539,8 +531,8 @@ lfutil.link(srclfsvfs.join(oid), dstlfsvfs.join(oid)) -@eh.wrapfunction(upgrade_actions, b'preservedrequirements') -@eh.wrapfunction(upgrade_actions, b'supporteddestrequirements') +@eh.wrapfunction(upgrade_actions, 'preservedrequirements') +@eh.wrapfunction(upgrade_actions, 'supporteddestrequirements') def upgraderequirements(orig, repo): reqs = orig(repo) if b'lfs' in repo.requirements: diff -r 704c3d0878d9 -r 12c308c55e53 hgext/mq.py --- a/hgext/mq.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/mq.py Wed Oct 11 02:02:46 2023 +0200 @@ -75,8 +75,6 @@ short, ) from mercurial.pycompat import ( - delattr, - getattr, open, ) from mercurial import ( @@ -4186,7 +4184,7 @@ def mqimport(orig, ui, repo, *args, **kwargs): - if util.safehasattr(repo, b'abortifwdirpatched') and not kwargs.get( + if hasattr(repo, 'abortifwdirpatched') and not kwargs.get( 'no_commit', False ): repo.abortifwdirpatched( diff -r 704c3d0878d9 -r 12c308c55e53 hgext/narrow/narrowbundle2.py --- a/hgext/narrow/narrowbundle2.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/narrow/narrowbundle2.py Wed Oct 11 02:02:46 2023 +0200 @@ -259,7 +259,7 @@ # will currently always be there when using the core+narrowhg server, but # other servers may include a changespec part even when not widening (e.g. # because we're deepening a shallow repo). - if util.safehasattr(repo, 'setnewnarrowpats'): + if hasattr(repo, 'setnewnarrowpats'): op.gettransaction() repo.setnewnarrowpats() @@ -333,9 +333,9 @@ def wrappedcghandler(op, inpart): origcghandler(op, inpart) - if util.safehasattr(op, '_widen_bundle'): + if hasattr(op, '_widen_bundle'): handlechangegroup_widen(op, inpart) - if util.safehasattr(op, '_bookmarksbackup'): + if hasattr(op, '_bookmarksbackup'): localrepo.localrepository._bookmarks.set( op.repo, op._bookmarksbackup ) diff -r 704c3d0878d9 -r 12c308c55e53 hgext/narrow/narrowcommands.py --- a/hgext/narrow/narrowcommands.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/narrow/narrowcommands.py Wed Oct 11 02:02:46 2023 +0200 @@ -87,9 +87,8 @@ def clonenarrowcmd(orig, ui, repo, *args, **opts): """Wraps clone command, so 'hg clone' first wraps localrepo.clone().""" - opts = pycompat.byteskwargs(opts) wrappedextraprepare = util.nullcontextmanager() - narrowspecfile = opts[b'narrowspec'] + narrowspecfile = opts['narrowspec'] if narrowspecfile: filepath = os.path.join(encoding.getcwd(), narrowspecfile) @@ -115,24 +114,25 @@ narrowspec.validatepatterns(excludes) # narrowspec is passed so we should assume that user wants narrow clone - opts[b'narrow'] = True - opts[b'include'].extend(includes) - opts[b'exclude'].extend(excludes) + opts['narrow'] = True + opts['include'].extend(includes) + opts['exclude'].extend(excludes) - if opts[b'narrow']: + if opts['narrow']: def pullbundle2extraprepare_widen(orig, pullop, kwargs): orig(pullop, kwargs) - if opts.get(b'depth'): - kwargs[b'depth'] = opts[b'depth'] + if opts.get('depth'): + # TODO: fix exchange._pullbundle2extraprepare() + kwargs[b'depth'] = opts['depth'] wrappedextraprepare = extensions.wrappedfunction( - exchange, b'_pullbundle2extraprepare', pullbundle2extraprepare_widen + exchange, '_pullbundle2extraprepare', pullbundle2extraprepare_widen ) with wrappedextraprepare: - return orig(ui, repo, *args, **pycompat.strkwargs(opts)) + return orig(ui, repo, *args, **opts) def pullnarrowcmd(orig, ui, repo, *args, **opts): @@ -146,7 +146,7 @@ kwargs[b'depth'] = opts['depth'] wrappedextraprepare = extensions.wrappedfunction( - exchange, b'_pullbundle2extraprepare', pullbundle2extraprepare_widen + exchange, '_pullbundle2extraprepare', pullbundle2extraprepare_widen ) with wrappedextraprepare: @@ -201,7 +201,7 @@ extensions.wrapfunction( - exchange, b'_pullbundle2extraprepare', pullbundle2extraprepare + exchange, '_pullbundle2extraprepare', pullbundle2extraprepare ) @@ -366,7 +366,7 @@ kwargs[b'excludepats'] = newexcludes wrappedextraprepare = extensions.wrappedfunction( - exchange, b'_pullbundle2extraprepare', pullbundle2extraprepare_widen + exchange, '_pullbundle2extraprepare', pullbundle2extraprepare_widen ) # define a function that narrowbundle2 can call after creating the @@ -511,7 +511,6 @@ add --addinclude, --addexclude rules in bulk. Like the other include and exclude switches, the changes are applied immediately. """ - opts = pycompat.byteskwargs(opts) if requirements.NARROW_REQUIREMENT not in repo.requirements: raise error.InputError( _( @@ -522,14 +521,14 @@ # Before supporting, decide whether it "hg tracked --clear" should mean # tracking no paths or all paths. - if opts[b'clear']: + if opts['clear']: raise error.InputError(_(b'the --clear option is not yet supported')) # import rules from a file - newrules = opts.get(b'import_rules') + newrules = opts.get('import_rules') if newrules: + filepath = os.path.join(encoding.getcwd(), newrules) try: - filepath = os.path.join(encoding.getcwd(), newrules) fdata = util.readfile(filepath) except IOError as inst: raise error.StorageError( @@ -546,16 +545,16 @@ b"is not supported in narrowspec" ) ) - opts[b'addinclude'].extend(includepats) - opts[b'addexclude'].extend(excludepats) + opts['addinclude'].extend(includepats) + opts['addexclude'].extend(excludepats) - addedincludes = narrowspec.parsepatterns(opts[b'addinclude']) - removedincludes = narrowspec.parsepatterns(opts[b'removeinclude']) - addedexcludes = narrowspec.parsepatterns(opts[b'addexclude']) - removedexcludes = narrowspec.parsepatterns(opts[b'removeexclude']) - autoremoveincludes = opts[b'auto_remove_includes'] + addedincludes = narrowspec.parsepatterns(opts['addinclude']) + removedincludes = narrowspec.parsepatterns(opts['removeinclude']) + addedexcludes = narrowspec.parsepatterns(opts['addexclude']) + removedexcludes = narrowspec.parsepatterns(opts['removeexclude']) + autoremoveincludes = opts['auto_remove_includes'] - update_working_copy = opts[b'update_working_copy'] + update_working_copy = opts['update_working_copy'] only_show = not ( addedincludes or removedincludes @@ -570,7 +569,7 @@ if only_show: oldincludes, oldexcludes = repo.narrowpats ui.pager(b'tracked') - fm = ui.formatter(b'narrow', opts) + fm = ui.formatter(b'narrow', pycompat.byteskwargs(opts)) for i in sorted(oldincludes): fm.startitem() fm.write(b'status', b'%s ', b'I', label=b'narrow.included') @@ -614,7 +613,7 @@ # also define the set of revisions to update for widening. path = urlutil.get_unique_pull_path_obj(b'tracked', ui, remotepath) ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc)) - remote = hg.peer(repo, opts, path) + remote = hg.peer(repo, pycompat.byteskwargs(opts), path) try: # check narrow support before doing anything if widening needs to be @@ -670,8 +669,8 @@ oldexcludes, newincludes, newexcludes, - opts[b'force_delete_local_changes'], - opts[b'backup'], + opts['force_delete_local_changes'], + opts['backup'], ) # _narrow() updated the narrowspec and _widen() below needs to # use the updated values as its base (otherwise removed includes diff -r 704c3d0878d9 -r 12c308c55e53 hgext/narrow/narrowwirepeer.py --- a/hgext/narrow/narrowwirepeer.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/narrow/narrowwirepeer.py Wed Oct 11 02:02:46 2023 +0200 @@ -36,7 +36,7 @@ kwargs["excludepats"] = b','.join(exclude) return orig(cmd, *args, **kwargs) - extensions.wrapfunction(peer, b'_calltwowaystream', wrapped) + extensions.wrapfunction(peer, '_calltwowaystream', wrapped) hg.wirepeersetupfuncs.append(wirereposetup) diff -r 704c3d0878d9 -r 12c308c55e53 hgext/pager.py --- a/hgext/pager.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/pager.py Wed Oct 11 02:02:46 2023 +0200 @@ -76,7 +76,7 @@ ui.disablepager() return orig(ui, options, cmd, cmdfunc) - extensions.wrapfunction(dispatch, b'_runcommand', pagecmd) + extensions.wrapfunction(dispatch, '_runcommand', pagecmd) attended = [b'annotate', b'cat', b'diff', b'export', b'glog', b'log', b'qdiff'] diff -r 704c3d0878d9 -r 12c308c55e53 hgext/phabricator.py --- a/hgext/phabricator.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/phabricator.py Wed Oct 11 02:02:46 2023 +0200 @@ -71,7 +71,6 @@ from mercurial.node import bin, short from mercurial.i18n import _ -from mercurial.pycompat import getattr from mercurial.thirdparty import attr from mercurial import ( cmdutil, diff -r 704c3d0878d9 -r 12c308c55e53 hgext/releasenotes.py --- a/hgext/releasenotes.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/releasenotes.py Wed Oct 11 02:02:46 2023 +0200 @@ -24,7 +24,6 @@ error, logcmdutil, minirst, - pycompat, registrar, util, ) @@ -665,17 +664,16 @@ admonitions (if any). """ - opts = pycompat.byteskwargs(opts) sections = releasenotessections(ui, repo) - cmdutil.check_incompatible_arguments(opts, b'list', [b'rev', b'check']) + cmdutil.check_incompatible_arguments(opts, 'list', ['rev', 'check']) - if opts.get(b'list'): + if opts.get('list'): return _getadmonitionlist(ui, sections) - rev = opts.get(b'rev') + rev = opts.get('rev') revs = logcmdutil.revrange(repo, [rev or b'not public()']) - if opts.get(b'check'): + if opts.get('check'): return checkadmonitions(ui, repo, sections.names(), revs) incoming = parsenotesfromrevisions(repo, sections.names(), revs) diff -r 704c3d0878d9 -r 12c308c55e53 hgext/relink.py --- a/hgext/relink.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/relink.py Wed Oct 11 02:02:46 2023 +0200 @@ -60,9 +60,7 @@ command is running. (Both repositories will be locked against writes.) """ - if not util.safehasattr(util, b'samefile') or not util.safehasattr( - util, b'samedevice' - ): + if not hasattr(util, 'samefile') or not hasattr(util, 'samedevice'): raise error.Abort(_(b'hardlinks are not supported on this system')) if origin is None and b'default-relink' in ui.paths: diff -r 704c3d0878d9 -r 12c308c55e53 hgext/remotefilelog/__init__.py --- a/hgext/remotefilelog/__init__.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/remotefilelog/__init__.py Wed Oct 11 02:02:46 2023 +0200 @@ -317,31 +317,31 @@ changegroup.cgpacker = shallowbundle.shallowcg1packer extensions.wrapfunction( - changegroup, b'_addchangegroupfiles', shallowbundle.addchangegroupfiles + changegroup, '_addchangegroupfiles', shallowbundle.addchangegroupfiles ) extensions.wrapfunction( - changegroup, b'makechangegroup', shallowbundle.makechangegroup + changegroup, 'makechangegroup', shallowbundle.makechangegroup ) - extensions.wrapfunction(localrepo, b'makestore', storewrapper) - extensions.wrapfunction(exchange, b'pull', exchangepull) - extensions.wrapfunction(merge, b'applyupdates', applyupdates) - extensions.wrapfunction(merge, b'_checkunknownfiles', checkunknownfiles) - extensions.wrapfunction(context.workingctx, b'_checklookup', checklookup) - extensions.wrapfunction(scmutil, b'_findrenames', findrenames) + extensions.wrapfunction(localrepo, 'makestore', storewrapper) + extensions.wrapfunction(exchange, 'pull', exchangepull) + extensions.wrapfunction(merge, 'applyupdates', applyupdates) + extensions.wrapfunction(merge, '_checkunknownfiles', checkunknownfiles) + extensions.wrapfunction(context.workingctx, '_checklookup', checklookup) + extensions.wrapfunction(scmutil, '_findrenames', findrenames) extensions.wrapfunction( - copies, b'_computeforwardmissing', computeforwardmissing + copies, '_computeforwardmissing', computeforwardmissing ) - extensions.wrapfunction(dispatch, b'runcommand', runcommand) - extensions.wrapfunction(repair, b'_collectbrokencsets', _collectbrokencsets) - extensions.wrapfunction(context.changectx, b'filectx', filectx) - extensions.wrapfunction(context.workingctx, b'filectx', workingfilectx) - extensions.wrapfunction(patch, b'trydiff', trydiff) - extensions.wrapfunction(hg, b'verify', _verify) + extensions.wrapfunction(dispatch, 'runcommand', runcommand) + extensions.wrapfunction(repair, '_collectbrokencsets', _collectbrokencsets) + extensions.wrapfunction(context.changectx, 'filectx', filectx) + extensions.wrapfunction(context.workingctx, 'filectx', workingfilectx) + extensions.wrapfunction(patch, 'trydiff', trydiff) + extensions.wrapfunction(hg, 'verify', _verify) scmutil.fileprefetchhooks.add(b'remotefilelog', _fileprefetchhook) # disappointing hacks below - extensions.wrapfunction(scmutil, b'getrenamedfn', getrenamedfn) - extensions.wrapfunction(revset, b'filelog', filelogrevset) + extensions.wrapfunction(scmutil, 'getrenamedfn', getrenamedfn) + extensions.wrapfunction(revset, 'filelog', filelogrevset) revset.symbols[b'filelog'] = revset.filelog @@ -374,7 +374,7 @@ else: return orig(self, *args, **kwargs) - extensions.wrapfunction(exchange, b'pull', pull_shallow) + extensions.wrapfunction(exchange, 'pull', pull_shallow) # Wrap the stream logic to add requirements and to pass include/exclude # patterns around. @@ -393,14 +393,14 @@ else: return orig() - extensions.wrapfunction(remote, b'stream_out', stream_out_shallow) + extensions.wrapfunction(remote, 'stream_out', stream_out_shallow) def stream_wrap(orig, op): setup_streamout(op.repo, op.remote) return orig(op) extensions.wrapfunction( - streamclone, b'maybeperformlegacystreamclone', stream_wrap + streamclone, 'maybeperformlegacystreamclone', stream_wrap ) def canperformstreamclone(orig, pullop, bundle2=False): @@ -417,7 +417,7 @@ return supported, requirements extensions.wrapfunction( - streamclone, b'canperformstreamclone', canperformstreamclone + streamclone, 'canperformstreamclone', canperformstreamclone ) try: @@ -425,7 +425,7 @@ finally: if opts.get('shallow'): for r in repos: - if util.safehasattr(r, b'fileservice'): + if hasattr(r, 'fileservice'): r.fileservice.close() @@ -721,7 +721,7 @@ ) extensions.wrapfunction( - remotefilelog.remotefilelog, b'addrawrevision', addrawrevision + remotefilelog.remotefilelog, 'addrawrevision', addrawrevision ) def changelogadd(orig, self, *args, **kwargs): @@ -749,7 +749,7 @@ del pendingfilecommits[:] return node - extensions.wrapfunction(changelog.changelog, b'add', changelogadd) + extensions.wrapfunction(changelog.changelog, 'add', changelogadd) def getrenamedfn(orig, repo, endrev=None): @@ -904,7 +904,7 @@ if not isenabled(repo): continue - if not util.safehasattr(repo, b'name'): + if not hasattr(repo, 'name'): ui.warn( _(b"repo %s is a misconfigured remotefilelog repo\n") % path ) @@ -1034,7 +1034,7 @@ bgprefetchrevs = revdatelimit(ui, bgprefetchrevs) def anon(unused_success): - if util.safehasattr(repo, b'ranprefetch') and repo.ranprefetch: + if hasattr(repo, 'ranprefetch') and repo.ranprefetch: return repo.ranprefetch = True repo.backgroundprefetch(bgprefetchrevs, repack=bgrepack) @@ -1080,10 +1080,10 @@ source, heads=heads, common=common, bundlecaps=bundlecaps, **kwargs ) - if util.safehasattr(remote, b'_callstream'): + if hasattr(remote, '_callstream'): remote._localrepo = repo - elif util.safehasattr(remote, b'getbundle'): - extensions.wrapfunction(remote, b'getbundle', localgetbundle) + elif hasattr(remote, 'getbundle'): + extensions.wrapfunction(remote, 'getbundle', localgetbundle) return orig(repo, remote, *args, **kwargs) diff -r 704c3d0878d9 -r 12c308c55e53 hgext/remotefilelog/basepack.py --- a/hgext/remotefilelog/basepack.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/remotefilelog/basepack.py Wed Oct 11 02:02:46 2023 +0200 @@ -7,7 +7,6 @@ from mercurial.i18n import _ from mercurial.pycompat import ( - getattr, open, ) from mercurial.node import hex diff -r 704c3d0878d9 -r 12c308c55e53 hgext/remotefilelog/basestore.py --- a/hgext/remotefilelog/basestore.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/remotefilelog/basestore.py Wed Oct 11 02:02:46 2023 +0200 @@ -415,7 +415,7 @@ def markforrefresh(self): for store in self.stores: - if util.safehasattr(store, b'markforrefresh'): + if hasattr(store, b'markforrefresh'): store.markforrefresh() @staticmethod diff -r 704c3d0878d9 -r 12c308c55e53 hgext/remotefilelog/connectionpool.py --- a/hgext/remotefilelog/connectionpool.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/remotefilelog/connectionpool.py Wed Oct 11 02:02:46 2023 +0200 @@ -9,7 +9,6 @@ from mercurial import ( hg, sshpeer, - util, ) _sshv1peer = sshpeer.sshv1peer @@ -41,14 +40,14 @@ if conn is None: peer = hg.peer(self._repo.ui, {}, path) - if util.safehasattr(peer, '_cleanup'): + if hasattr(peer, '_cleanup'): class mypeer(peer.__class__): def _cleanup(self, warn=None): # close pipee first so peer.cleanup reading it won't # deadlock, if there are other processes with pipeo # open (i.e. us). - if util.safehasattr(self, 'pipee'): + if hasattr(self, 'pipee'): self.pipee.close() return super(mypeer, self)._cleanup() @@ -83,5 +82,5 @@ self.close() def close(self): - if util.safehasattr(self.peer, 'cleanup'): + if hasattr(self.peer, 'cleanup'): self.peer.cleanup() diff -r 704c3d0878d9 -r 12c308c55e53 hgext/remotefilelog/contentstore.py --- a/hgext/remotefilelog/contentstore.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/remotefilelog/contentstore.py Wed Oct 11 02:02:46 2023 +0200 @@ -4,7 +4,6 @@ hex, sha1nodeconstants, ) -from mercurial.pycompat import getattr from mercurial import ( mdiff, revlog, diff -r 704c3d0878d9 -r 12c308c55e53 hgext/remotefilelog/fileserverclient.py --- a/hgext/remotefilelog/fileserverclient.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/remotefilelog/fileserverclient.py Wed Oct 11 02:02:46 2023 +0200 @@ -92,7 +92,7 @@ not in self.capabilities() ): return - if not util.safehasattr(self, '_localrepo'): + if not hasattr(self, '_localrepo'): return if ( constants.SHALLOWREPO_REQUIREMENT @@ -132,7 +132,7 @@ def _callstream(self, command, **opts): supertype = super(remotefilepeer, self) - if not util.safehasattr(supertype, '_sendrequest'): + if not hasattr(supertype, '_sendrequest'): self._updatecallstreamopts(command, pycompat.byteskwargs(opts)) return super(remotefilepeer, self)._callstream(command, **opts) @@ -641,9 +641,7 @@ self._lfsprefetch(fileids) def _lfsprefetch(self, fileids): - if not _lfsmod or not util.safehasattr( - self.repo.svfs, b'lfslocalblobstore' - ): + if not _lfsmod or not hasattr(self.repo.svfs, b'lfslocalblobstore'): return if not _lfsmod.wrapper.candownload(self.repo): return diff -r 704c3d0878d9 -r 12c308c55e53 hgext/remotefilelog/remotefilelogserver.py --- a/hgext/remotefilelog/remotefilelogserver.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/remotefilelog/remotefilelogserver.py Wed Oct 11 02:02:46 2023 +0200 @@ -67,7 +67,7 @@ ) extensions.wrapfunction( - changegroup.cgpacker, b'generatefiles', generatefiles + changegroup.cgpacker, 'generatefiles', generatefiles ) @@ -207,7 +207,7 @@ ): yield x - extensions.wrapfunction(streamclone, b'_walkstreamfiles', _walkstreamfiles) + extensions.wrapfunction(streamclone, '_walkstreamfiles', _walkstreamfiles) # expose remotefilelog capabilities def _capabilities(orig, repo, proto): @@ -222,18 +222,18 @@ caps.append(b'x_rfl_getfile') return caps - extensions.wrapfunction(wireprotov1server, b'_capabilities', _capabilities) + extensions.wrapfunction(wireprotov1server, '_capabilities', _capabilities) def _adjustlinkrev(orig, self, *args, **kwargs): # When generating file blobs, taking the real path is too slow on large # repos, so force it to just return the linkrev directly. repo = self._repo - if util.safehasattr(repo, b'forcelinkrev') and repo.forcelinkrev: + if hasattr(repo, 'forcelinkrev') and repo.forcelinkrev: return self._filelog.linkrev(self._filelog.rev(self._filenode)) return orig(self, *args, **kwargs) extensions.wrapfunction( - context.basefilectx, b'_adjustlinkrev', _adjustlinkrev + context.basefilectx, '_adjustlinkrev', _adjustlinkrev ) def _iscmd(orig, cmd): @@ -241,7 +241,7 @@ return False return orig(cmd) - extensions.wrapfunction(wireprotoserver, b'iscmd', _iscmd) + extensions.wrapfunction(wireprotoserver, 'iscmd', _iscmd) def _loadfileblob(repo, cachepath, path, node): diff -r 704c3d0878d9 -r 12c308c55e53 hgext/remotefilelog/repack.py --- a/hgext/remotefilelog/repack.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/remotefilelog/repack.py Wed Oct 11 02:02:46 2023 +0200 @@ -49,7 +49,7 @@ def fullrepack(repo, options=None): """If ``packsonly`` is True, stores creating only loose objects are skipped.""" - if util.safehasattr(repo, 'shareddatastores'): + if hasattr(repo, 'shareddatastores'): datasource = contentstore.unioncontentstore(*repo.shareddatastores) historysource = metadatastore.unionmetadatastore( *repo.sharedhistorystores, allowincomplete=True @@ -67,7 +67,7 @@ options=options, ) - if util.safehasattr(repo.manifestlog, 'datastore'): + if hasattr(repo.manifestlog, 'datastore'): localdata, shareddata = _getmanifeststores(repo) lpackpath, ldstores, lhstores = localdata spackpath, sdstores, shstores = shareddata @@ -107,7 +107,7 @@ """This repacks the repo by looking at the distribution of pack files in the repo and performing the most minimal repack to keep the repo in good shape. """ - if util.safehasattr(repo, 'shareddatastores'): + if hasattr(repo, 'shareddatastores'): packpath = shallowutil.getcachepackpath( repo, constants.FILEPACK_CATEGORY ) @@ -120,7 +120,7 @@ options=options, ) - if util.safehasattr(repo.manifestlog, 'datastore'): + if hasattr(repo.manifestlog, 'datastore'): localdata, shareddata = _getmanifeststores(repo) lpackpath, ldstores, lhstores = localdata spackpath, sdstores, shstores = shareddata @@ -895,7 +895,7 @@ def repacklockvfs(repo): - if util.safehasattr(repo, 'name'): + if hasattr(repo, 'name'): # Lock in the shared cache so repacks across multiple copies of the same # repo are coordinated. sharedcachepath = shallowutil.getcachepackpath( diff -r 704c3d0878d9 -r 12c308c55e53 hgext/remotefilelog/shallowrepo.py --- a/hgext/remotefilelog/shallowrepo.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/remotefilelog/shallowrepo.py Wed Oct 11 02:02:46 2023 +0200 @@ -340,7 +340,7 @@ repo.excludepattern = repo.ui.configlist( b"remotefilelog", b"excludepattern", None ) - if not util.safehasattr(repo, 'connectionpool'): + if not hasattr(repo, 'connectionpool'): repo.connectionpool = connectionpool.connectionpool(repo) if repo.includepattern or repo.excludepattern: diff -r 704c3d0878d9 -r 12c308c55e53 hgext/remotenames.py --- a/hgext/remotenames.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/remotenames.py Wed Oct 11 02:02:46 2023 +0200 @@ -255,7 +255,7 @@ def extsetup(ui): - extensions.wrapfunction(bookmarks, b'_printbookmarks', wrapprintbookmarks) + extensions.wrapfunction(bookmarks, '_printbookmarks', wrapprintbookmarks) def reposetup(ui, repo): diff -r 704c3d0878d9 -r 12c308c55e53 hgext/schemes.py --- a/hgext/schemes.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/schemes.py Wed Oct 11 02:02:46 2023 +0200 @@ -159,7 +159,7 @@ else: hg.repo_schemes[scheme] = ShortRepository(url, scheme, t) - extensions.wrapfunction(urlutil, b'hasdriveletter', hasdriveletter) + extensions.wrapfunction(urlutil, 'hasdriveletter', hasdriveletter) @command(b'debugexpandscheme', norepo=True) diff -r 704c3d0878d9 -r 12c308c55e53 hgext/share.py --- a/hgext/share.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/share.py Wed Oct 11 02:02:46 2023 +0200 @@ -162,9 +162,9 @@ def extsetup(ui): - extensions.wrapfunction(bookmarks, b'_getbkfile', getbkfile) - extensions.wrapfunction(bookmarks.bmstore, b'_recordchange', recordchange) - extensions.wrapfunction(bookmarks.bmstore, b'_writerepo', writerepo) + extensions.wrapfunction(bookmarks, '_getbkfile', getbkfile) + extensions.wrapfunction(bookmarks.bmstore, '_recordchange', recordchange) + extensions.wrapfunction(bookmarks.bmstore, '_writerepo', writerepo) extensions.wrapcommand(commands.table, b'clone', clone) diff -r 704c3d0878d9 -r 12c308c55e53 hgext/sparse.py --- a/hgext/sparse.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/sparse.py Wed Oct 11 02:02:46 2023 +0200 @@ -73,7 +73,6 @@ from mercurial.i18n import _ -from mercurial.pycompat import setattr from mercurial import ( cmdutil, commands, @@ -146,7 +145,7 @@ revs = revs.filter(ctxmatch) return revs - extensions.wrapfunction(logcmdutil, b'_initialrevs', _initialrevs) + extensions.wrapfunction(logcmdutil, '_initialrevs', _initialrevs) def _clonesparsecmd(orig, ui, repo, *args, **opts): @@ -170,7 +169,7 @@ ) return orig(ctx, *args, **kwargs) - extensions.wrapfunction(mergemod, b'update', clonesparse) + extensions.wrapfunction(mergemod, 'update', clonesparse) return orig(ui, repo, *args, **opts) diff -r 704c3d0878d9 -r 12c308c55e53 hgext/split.py --- a/hgext/split.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/split.py Wed Oct 11 02:02:46 2023 +0200 @@ -22,7 +22,6 @@ error, hg, logcmdutil, - pycompat, registrar, revsetlang, rewriteutil, @@ -65,10 +64,9 @@ By default, rebase connected non-obsoleted descendants onto the new changeset. Use --no-rebase to avoid the rebase. """ - opts = pycompat.byteskwargs(opts) revlist = [] - if opts.get(b'rev'): - revlist.append(opts.get(b'rev')) + if opts.get('rev'): + revlist.append(opts.get('rev')) revlist.extend(revs) with repo.wlock(), repo.lock(): tr = repo.transaction(b'split') @@ -89,7 +87,7 @@ if ctx.node() is None: raise error.InputError(_(b'cannot split working directory')) - if opts.get(b'rebase'): + if opts.get('rebase'): # Skip obsoleted descendants and their descendants so the rebase # won't cause conflicts for sure. descendants = list(repo.revs(b'(%d::) - (%d)', rev, rev)) @@ -116,7 +114,7 @@ wnode = repo[b'.'].node() top = None try: - top = dosplit(ui, repo, tr, ctx, opts) + top = dosplit(ui, repo, tr, ctx, **opts) finally: # top is None: split failed, need update --clean recovery. # wnode == ctx.node(): wnode split, no need to update. @@ -128,7 +126,7 @@ dorebase(ui, repo, torebase, top) -def dosplit(ui, repo, tr, ctx, opts): +def dosplit(ui, repo, tr, ctx, **opts): committed = [] # [ctx] # Set working parent to ctx.p1(), and keep working copy as ctx's content @@ -166,13 +164,13 @@ ) % short(ctx.node()) opts.update( { - b'edit': True, - b'interactive': True, - b'message': header + ctx.description(), + 'edit': True, + 'interactive': True, + 'message': header + ctx.description(), } ) origctx = repo[b'.'] - commands.commit(ui, repo, **pycompat.strkwargs(opts)) + commands.commit(ui, repo, **opts) newctx = repo[b'.'] # Ensure user didn't do a "no-op" split (such as deselecting # everything). diff -r 704c3d0878d9 -r 12c308c55e53 hgext/sqlitestore.py --- a/hgext/sqlitestore.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/sqlitestore.py Wed Oct 11 02:02:46 2023 +0200 @@ -1330,11 +1330,11 @@ def extsetup(ui): localrepo.featuresetupfuncs.add(featuresetup) extensions.wrapfunction( - localrepo, b'newreporequirements', newreporequirements + localrepo, 'newreporequirements', newreporequirements ) - extensions.wrapfunction(localrepo, b'makefilestorage', makefilestorage) - extensions.wrapfunction(localrepo, b'makemain', makemain) - extensions.wrapfunction(verify.verifier, b'__init__', verifierinit) + extensions.wrapfunction(localrepo, 'makefilestorage', makefilestorage) + extensions.wrapfunction(localrepo, 'makemain', makemain) + extensions.wrapfunction(verify.verifier, '__init__', verifierinit) def reposetup(ui, repo): diff -r 704c3d0878d9 -r 12c308c55e53 hgext/uncommit.py --- a/hgext/uncommit.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/uncommit.py Wed Oct 11 02:02:46 2023 +0200 @@ -152,7 +152,6 @@ """ cmdutil.check_note_size(opts) cmdutil.resolve_commit_options(ui, opts) - opts = pycompat.byteskwargs(opts) with repo.wlock(), repo.lock(): @@ -160,7 +159,7 @@ m, a, r, d = st.modified, st.added, st.removed, st.deleted isdirtypath = any(set(m + a + r + d) & set(pats)) allowdirtywcopy = opts[ - b'allow_dirty_working_copy' + 'allow_dirty_working_copy' ] or repo.ui.configbool(b'experimental', b'uncommitondirtywdir') if not allowdirtywcopy and (not pats or isdirtypath): cmdutil.bailifchanged( @@ -172,7 +171,7 @@ if len(old.parents()) > 1: raise error.InputError(_(b"cannot uncommit merge changeset")) - match = scmutil.match(old, pats, opts) + match = scmutil.match(old, pats, pycompat.byteskwargs(opts)) # Check all explicitly given files; abort if there's a problem. if match.files(): @@ -203,14 +202,14 @@ ) with repo.transaction(b'uncommit'): - if not (opts[b'message'] or opts[b'logfile']): - opts[b'message'] = old.description() - message = cmdutil.logmessage(ui, opts) + if not (opts['message'] or opts['logfile']): + opts['message'] = old.description() + message = cmdutil.logmessage(ui, pycompat.byteskwargs(opts)) keepcommit = pats if not keepcommit: - if opts.get(b'keep') is not None: - keepcommit = opts.get(b'keep') + if opts.get('keep') is not None: + keepcommit = opts.get('keep') else: keepcommit = ui.configbool( b'experimental', b'uncommit.keep' @@ -221,8 +220,8 @@ match, keepcommit, message=message, - user=opts.get(b'user'), - date=opts.get(b'date'), + user=opts.get('user'), + date=opts.get('date'), ) if newid is None: ui.status(_(b"nothing to uncommit\n")) diff -r 704c3d0878d9 -r 12c308c55e53 hgext/win32mbcs.py --- a/hgext/win32mbcs.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/win32mbcs.py Wed Oct 11 02:02:46 2023 +0200 @@ -49,7 +49,6 @@ import sys from mercurial.i18n import _ -from mercurial.pycompat import getattr, setattr from mercurial import ( encoding, error, diff -r 704c3d0878d9 -r 12c308c55e53 hgext/zeroconf/__init__.py --- a/hgext/zeroconf/__init__.py Tue Oct 10 18:29:04 2023 +0200 +++ b/hgext/zeroconf/__init__.py Wed Oct 11 02:02:46 2023 +0200 @@ -233,10 +233,10 @@ server.close() -extensions.wrapfunction(dispatch, b'_runcommand', cleanupafterdispatch) +extensions.wrapfunction(dispatch, '_runcommand', cleanupafterdispatch) -extensions.wrapfunction(uimod.ui, b'config', config) -extensions.wrapfunction(uimod.ui, b'configitems', configitems) -extensions.wrapfunction(uimod.ui, b'configsuboptions', configsuboptions) -extensions.wrapfunction(hg, b'defaultdest', defaultdest) -extensions.wrapfunction(servermod, b'create_server', zc_create_server) +extensions.wrapfunction(uimod.ui, 'config', config) +extensions.wrapfunction(uimod.ui, 'configitems', configitems) +extensions.wrapfunction(uimod.ui, 'configsuboptions', configsuboptions) +extensions.wrapfunction(hg, 'defaultdest', defaultdest) +extensions.wrapfunction(servermod, 'create_server', zc_create_server) diff -r 704c3d0878d9 -r 12c308c55e53 i18n/ja.po --- a/i18n/ja.po Tue Oct 10 18:29:04 2023 +0200 +++ b/i18n/ja.po Wed Oct 11 02:02:46 2023 +0200 @@ -5875,13 +5875,6 @@ msgstr "共有元情報を相対パスで保持 (実験的実装)" msgid "" -" [infinitepush]\n" -" # Server-side and client-side option. Pattern of the infinitepush " -"bookmark\n" -" branchpattern = PATTERN" -msgstr "" - -msgid "" " # Server or client\n" " server = False" msgstr "" @@ -5973,12 +5966,6 @@ msgstr "" msgid "" -" # Instructs infinitepush to forward all received bundle2 parts to the\n" -" # bundle for storage. Defaults to False.\n" -" storeallparts = True" -msgstr "" - -msgid "" " # routes each incoming push to the bundlestore. defaults to False\n" " pushtobundlestore = True" msgstr "" @@ -5991,24 +5978,10 @@ " bookmarks = True\n" msgstr "" -msgid "please set infinitepush.sqlhost" -msgstr "" - -msgid "please set infinitepush.reponame" -msgstr "" - #, fuzzy, python-format msgid "invalid log level %s" msgstr "不正なローカルアドレス: %s" -#, fuzzy, python-format -msgid "unknown infinitepush store type specified %s" -msgstr "--type に未知のバンドル種別が指定されました" - -#, fuzzy, python-format -msgid "unknown infinitepush index type specified %s" -msgstr "--type に未知のバンドル種別が指定されました" - #, fuzzy msgid "force push to go to bundle store (EXPERIMENTAL)" msgstr "表示対象リビジョン" @@ -6019,10 +5992,6 @@ msgid "see 'hg help config.paths'" msgstr "詳細は 'hg help config.paths' 参照" -#, fuzzy -msgid "infinitepush bookmark '{}' does not exist in path '{}'" -msgstr "ブックマーク '%s' は存在しません" - msgid "no changes found\n" msgstr "差分はありません\n" diff -r 704c3d0878d9 -r 12c308c55e53 i18n/pt_BR.po --- a/i18n/pt_BR.po Tue Oct 10 18:29:04 2023 +0200 +++ b/i18n/pt_BR.po Wed Oct 11 02:02:46 2023 +0200 @@ -5940,12 +5940,6 @@ msgstr "" msgid "" -" [infinitepush]\n" -" # Server-side and client-side option. Pattern of the infinitepush bookmark\n" -" branchpattern = PATTERN" -msgstr "" - -msgid "" " # Server or client\n" " server = False" msgstr "" @@ -6034,12 +6028,6 @@ msgstr "" msgid "" -" # Instructs infinitepush to forward all received bundle2 parts to the\n" -" # bundle for storage. Defaults to False.\n" -" storeallparts = True" -msgstr "" - -msgid "" " # routes each incoming push to the bundlestore. defaults to False\n" " pushtobundlestore = True" msgstr "" @@ -6052,24 +6040,10 @@ " bookmarks = True\n" msgstr "" -msgid "please set infinitepush.sqlhost" -msgstr "" - -msgid "please set infinitepush.reponame" -msgstr "" - #, python-format msgid "invalid log level %s" msgstr "" -#, python-format -msgid "unknown infinitepush store type specified %s" -msgstr "" - -#, python-format -msgid "unknown infinitepush index type specified %s" -msgstr "" - msgid "force push to go to bundle store (EXPERIMENTAL)" msgstr "" @@ -6079,9 +6053,6 @@ msgid "see 'hg help config.paths'" msgstr "veja 'hg help config.paths'" -msgid "infinitepush bookmark '{}' does not exist in path '{}'" -msgstr "" - msgid "no changes found\n" msgstr "nenhuma alteração encontrada\n" diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/admin/__init__.py diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/admin/verify.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/mercurial/admin/verify.py Wed Oct 11 02:02:46 2023 +0200 @@ -0,0 +1,341 @@ +# admin/verify.py - better repository integrity checking for Mercurial +# +# Copyright 2023 Octobus +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import collections +import copy +import functools + +from ..i18n import _ +from .. import error, pycompat, registrar, requirements +from ..utils import stringutil + + +verify_table = {} +verify_alias_table = {} +check = registrar.verify_check(verify_table, verify_alias_table) + + +# Use this to declare options/aliases in the middle of the hierarchy. +# Checks like these are not run themselves and cannot have a body. +# For an example, see the `revlogs` check. +def noop_func(*args, **kwargs): + return + + +@check(b"working-copy.dirstate", alias=b"dirstate") +def check_dirstate(ui, repo, **options): + ui.status(_(b"checking dirstate\n")) + + parent1, parent2 = repo.dirstate.parents() + m1 = repo[parent1].manifest() + m2 = repo[parent2].manifest() + errors = 0 + + is_narrow = requirements.NARROW_REQUIREMENT in repo.requirements + narrow_matcher = repo.narrowmatch() if is_narrow else None + + for err in repo.dirstate.verify(m1, m2, narrow_matcher): + ui.warn(err[0] % err[1:]) + errors += 1 + + return errors + + +# Tree of all checks and their associated function +pyramid = {} + + +def build_pyramid(table, full_pyramid): + """Create a pyramid of checks of the registered checks. + It is a name-based hierarchy that can be arbitrarily nested.""" + for entry, func in sorted(table.items(), key=lambda x: x[0], reverse=True): + cursor = full_pyramid + levels = entry.split(b".") + for level in levels[:-1]: + current_node = cursor.setdefault(level, {}) + cursor = current_node + if cursor.get(levels[-1]) is None: + cursor[levels[-1]] = (entry, func) + elif func is not noop_func: + m = b"intermediate checks need to use `verify.noop_func`" + raise error.ProgrammingError(m) + + +def find_checks(name, table=None, alias_table=None, full_pyramid=None): + """Find all checks for a given name and returns a dict of + (qualified_check_name, check_function) + + # Examples + + Using a full qualified name: + "working-copy.dirstate" -> { + "working-copy.dirstate": CF, + } + + Using a *prefix* of a qualified name: + "store.revlogs" -> { + "store.revlogs.changelog": CF, + "store.revlogs.manifestlog": CF, + "store.revlogs.filelog": CF, + } + + Using a defined alias: + "revlogs" -> { + "store.revlogs.changelog": CF, + "store.revlogs.manifestlog": CF, + "store.revlogs.filelog": CF, + } + + Using something that is none of the above will be an error. + """ + if table is None: + table = verify_table + if alias_table is None: + alias_table = verify_alias_table + + if name == b"full": + return table + checks = {} + + # is it a full name? + check = table.get(name) + + if check is None: + # is it an alias? + qualified_name = alias_table.get(name) + if qualified_name is not None: + name = qualified_name + check = table.get(name) + else: + split = name.split(b".", 1) + if len(split) == 2: + # split[0] can be an alias + qualified_name = alias_table.get(split[0]) + if qualified_name is not None: + name = b"%s.%s" % (qualified_name, split[1]) + check = table.get(name) + else: + qualified_name = name + + # Maybe it's a subtree in the check hierarchy that does not + # have an explicit alias. + levels = name.split(b".") + if full_pyramid is not None: + if not full_pyramid: + build_pyramid(table, full_pyramid) + + pyramid.clear() + pyramid.update(full_pyramid.items()) + else: + build_pyramid(table, pyramid) + + subtree = pyramid + # Find subtree + for level in levels: + subtree = subtree.get(level) + if subtree is None: + hint = error.getsimilar(list(alias_table) + list(table), name) + hint = error.similarity_hint(hint) + + raise error.InputError(_(b"unknown check %s" % name), hint=hint) + + # Get all checks in that subtree + if isinstance(subtree, dict): + stack = list(subtree.items()) + while stack: + current_name, entry = stack.pop() + if isinstance(entry, dict): + stack.extend(entry.items()) + else: + # (qualified_name, func) + checks[entry[0]] = entry[1] + else: + checks[name] = check + + return checks + + +def pass_options( + ui, + checks, + options, + table=None, + alias_table=None, + full_pyramid=None, +): + """Given a dict of checks (fully qualified name to function), and a list + of options as given by the user, pass each option down to the right check + function.""" + ui.debug(b"passing options to check functions\n") + to_modify = collections.defaultdict(dict) + + if not checks: + raise error.Error(_(b"`checks` required")) + + for option in sorted(options): + split = option.split(b":") + hint = _( + b"syntax is 'check:option=value', " + b"eg. revlogs.changelog:copies=yes" + ) + option_error = error.InputError( + _(b"invalid option '%s'") % option, hint=hint + ) + if len(split) != 2: + raise option_error + + check_name, option_value = split + if not option_value: + raise option_error + + split = option_value.split(b"=") + if len(split) != 2: + raise option_error + + option_name, value = split + if not value: + raise option_error + + path = b"%s:%s" % (check_name, option_name) + + matching_checks = find_checks( + check_name, + table=table, + alias_table=alias_table, + full_pyramid=full_pyramid, + ) + for name in matching_checks: + check = checks.get(name) + if check is None: + msg = _(b"specified option '%s' for unselected check '%s'\n") + raise error.InputError(msg % (name, option_name)) + + assert hasattr(check, "func") # help Pytype + + if not hasattr(check.func, "options"): + raise error.InputError( + _(b"check '%s' has no option '%s'") % (name, option_name) + ) + + try: + matching_option = next( + (o for o in check.func.options if o[0] == option_name) + ) + except StopIteration: + raise error.InputError( + _(b"check '%s' has no option '%s'") % (name, option_name) + ) + + # transform the argument from cli string to the expected Python type + _name, typ, _docstring = matching_option + + as_typed = None + if isinstance(typ, bool): + as_bool = stringutil.parsebool(value) + if as_bool is None: + raise error.InputError( + _(b"'%s' is not a boolean ('%s')") % (path, value) + ) + as_typed = as_bool + elif isinstance(typ, list): + as_list = stringutil.parselist(value) + if as_list is None: + raise error.InputError( + _(b"'%s' is not a list ('%s')") % (path, value) + ) + as_typed = as_list + else: + raise error.ProgrammingError(b"unsupported type %s", type(typ)) + + if option_name in to_modify[name]: + raise error.InputError( + _(b"duplicated option '%s' for '%s'") % (option_name, name) + ) + else: + assert as_typed is not None + to_modify[name][option_name] = as_typed + + # Manage case where a check is set but without command line options + # it will later be set with default check options values + for name, f in checks.items(): + if name not in to_modify: + to_modify[name] = {} + + # Merge default options with command line options + for check_name, cmd_options in to_modify.items(): + check = checks.get(check_name) + func = checks[check_name] + merged_options = {} + # help Pytype + assert check is not None + assert check.func is not None + assert hasattr(check.func, "options") + + if check.func.options: + # copy the default value in case it's mutable (list, etc.) + merged_options = { + o[0]: copy.deepcopy(o[1]) for o in check.func.options + } + if cmd_options: + for k, v in cmd_options.items(): + merged_options[k] = v + options = pycompat.strkwargs(merged_options) + checks[check_name] = functools.partial(func, **options) + ui.debug(b"merged options for '%s': '%r'\n" % (check_name, options)) + + return checks + + +def get_checks( + repo, + ui, + names=None, + options=None, + table=None, + alias_table=None, + full_pyramid=None, +): + """Given a list of function names and optionally a list of + options, return matched checks with merged options (command line options + values take precedence on default ones) + + It runs find checks, then resolve options and returns a dict of matched + functions with resolved options. + """ + funcs = {} + + if names is None: + names = [] + + if options is None: + options = [] + + # find checks + for name in names: + matched = find_checks( + name, + table=table, + alias_table=alias_table, + full_pyramid=full_pyramid, + ) + matched_names = b", ".join(matched) + ui.debug(b"found checks '%s' for name '%s'\n" % (matched_names, name)) + funcs.update(matched) + + funcs = {n: functools.partial(f, ui, repo) for n, f in funcs.items()} + + # resolve options + checks = pass_options( + ui, + funcs, + options, + table=table, + alias_table=alias_table, + full_pyramid=full_pyramid, + ) + + return checks diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/admin_commands.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/mercurial/admin_commands.py Wed Oct 11 02:02:46 2023 +0200 @@ -0,0 +1,49 @@ +# admin_commands.py - command processing for admin* commands +# +# Copyright 2022 Mercurial Developers +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from .i18n import _ +from .admin import verify +from . import error, registrar, transaction + + +table = {} +command = registrar.command(table) + + +@command( + b'admin::verify', + [ + (b'c', b'check', [], _(b'add a check'), _(b'CHECK')), + (b'o', b'option', [], _(b'pass an option to a check'), _(b'OPTION')), + ], + helpcategory=command.CATEGORY_MAINTENANCE, +) +def admin_verify(ui, repo, **opts): + """verify the integrity of the repository + + Alternative UI to `hg verify` with a lot more control over the + verification process and better error reporting. + """ + + if not repo.url().startswith(b'file:'): + raise error.Abort(_(b"cannot verify bundle or remote repos")) + + if transaction.has_abandoned_transaction(repo): + ui.warn(_(b"abandoned transaction found - run hg recover\n")) + + checks = opts.get("check", []) + options = opts.get("option", []) + + funcs = verify.get_checks(repo, ui, names=checks, options=options) + + ui.status(_(b"running %d checks\n") % len(funcs)) + # Done in two times so the execution is separated from the resolving step + for name, func in sorted(funcs.items(), key=lambda x: x[0]): + ui.status(_(b"running %s\n") % name) + errors = func() + if errors: + ui.warn(_(b"found %d errors\n") % len(errors)) diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/bookmarks.py --- a/mercurial/bookmarks.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/bookmarks.py Wed Oct 11 02:02:46 2023 +0200 @@ -14,7 +14,6 @@ hex, short, ) -from .pycompat import getattr from . import ( encoding, error, diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/bundle2.py --- a/mercurial/bundle2.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/bundle2.py Wed Oct 11 02:02:46 2023 +0200 @@ -980,7 +980,7 @@ def close(self): """close underlying file""" - if util.safehasattr(self._fp, 'close'): + if hasattr(self._fp, 'close'): return self._fp.close() @@ -1068,7 +1068,7 @@ The new part have the very same content but no partid assigned yet. Parts with generated data cannot be copied.""" - assert not util.safehasattr(self.data, 'next') + assert not hasattr(self.data, 'next') return self.__class__( self.type, self._mandatoryparams, @@ -1137,9 +1137,7 @@ msg.append(b')') if not self.data: msg.append(b' empty payload') - elif util.safehasattr(self.data, 'next') or util.safehasattr( - self.data, b'__next__' - ): + elif hasattr(self.data, 'next') or hasattr(self.data, '__next__'): msg.append(b' streamed payload') else: msg.append(b' %i bytes payload' % len(self.data)) @@ -1233,9 +1231,7 @@ Exists to handle the different methods to provide data to a part.""" # we only support fixed size data now. # This will be improved in the future. - if util.safehasattr(self.data, 'next') or util.safehasattr( - self.data, '__next__' - ): + if hasattr(self.data, 'next') or hasattr(self.data, '__next__'): buff = util.chunkbuffer(self.data) chunk = buff.read(preferedchunksize) while chunk: @@ -1380,9 +1376,7 @@ def __init__(self, ui, header, fp): super(unbundlepart, self).__init__(fp) - self._seekable = util.safehasattr(fp, 'seek') and util.safehasattr( - fp, 'tell' - ) + self._seekable = hasattr(fp, 'seek') and hasattr(fp, 'tell') self.ui = ui # unbundle state attr self._headerdata = header diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/bundlerepo.py --- a/mercurial/bundlerepo.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/bundlerepo.py Wed Oct 11 02:02:46 2023 +0200 @@ -245,7 +245,7 @@ class bundlephasecache(phases.phasecache): def __init__(self, *args, **kwargs): super(bundlephasecache, self).__init__(*args, **kwargs) - if util.safehasattr(self, 'opener'): + if hasattr(self, 'opener'): self.opener = vfsmod.readonlyvfs(self.opener) def write(self): diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/changegroup.py --- a/mercurial/changegroup.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/changegroup.py Wed Oct 11 02:02:46 2023 +0200 @@ -1043,7 +1043,7 @@ return i # We failed to resolve a parent for this node, so # we crash the changegroup construction. - if util.safehasattr(store, 'target'): + if hasattr(store, 'target'): target = store.display_id else: # some revlog not actually a revlog diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/chgserver.py --- a/mercurial/chgserver.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/chgserver.py Wed Oct 11 02:02:46 2023 +0200 @@ -49,10 +49,6 @@ import time from .i18n import _ -from .pycompat import ( - getattr, - setattr, -) from .node import hex from . import ( @@ -236,7 +232,7 @@ # will behave differently (i.e. write to stdout). if ( out is not self.fout - or not util.safehasattr(self.fout, 'fileno') + or not hasattr(self.fout, 'fileno') or self.fout.fileno() != procutil.stdout.fileno() or self._finoutredirected ): @@ -260,9 +256,9 @@ from . import dispatch # avoid cycle newui = srcui.__class__.load() - for a in [b'fin', b'fout', b'ferr', b'environ']: + for a in ['fin', 'fout', 'ferr', 'environ']: setattr(newui, a, getattr(srcui, a)) - if util.safehasattr(srcui, '_csystem'): + if hasattr(srcui, '_csystem'): newui._csystem = srcui._csystem # command line args @@ -348,9 +344,9 @@ _iochannels = [ # server.ch, ui.fp, mode - (b'cin', b'fin', 'rb'), - (b'cout', b'fout', 'wb'), - (b'cerr', b'ferr', 'wb'), + ('cin', 'fin', 'rb'), + ('cout', 'fout', 'wb'), + ('cerr', 'ferr', 'wb'), ] @@ -603,7 +599,7 @@ } ) - if util.safehasattr(procutil, 'setprocname'): + if hasattr(procutil, 'setprocname'): def setprocname(self): """Change process title""" diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/cmdutil.py --- a/mercurial/cmdutil.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/cmdutil.py Wed Oct 11 02:02:46 2023 +0200 @@ -28,9 +28,7 @@ short, ) from .pycompat import ( - getattr, open, - setattr, ) from .thirdparty import attr @@ -813,18 +811,17 @@ # creating a dirnode object for the root of the repo rootobj = dirnode(b'') pstatus = ( - b'modified', - b'added', - b'deleted', - b'clean', - b'unknown', - b'ignored', - b'removed', + ('modified', b'm'), + ('added', b'a'), + ('deleted', b'd'), + ('clean', b'c'), + ('unknown', b'u'), + ('ignored', b'i'), + ('removed', b'r'), ) tersedict = {} - for attrname in pstatus: - statuschar = attrname[0:1] + for attrname, statuschar in pstatus: for f in getattr(statuslist, attrname): rootobj.addfile(f, statuschar) tersedict[statuschar] = [] @@ -1007,7 +1004,7 @@ raise error.UnknownCommand(cmd, allcmds) -def changebranch(ui, repo, revs, label, opts): +def changebranch(ui, repo, revs, label, **opts): """Change the branch name of given revs to label""" with repo.wlock(), repo.lock(), repo.transaction(b'branches'): @@ -1026,7 +1023,7 @@ root = repo[roots.first()] rpb = {parent.branch() for parent in root.parents()} if ( - not opts.get(b'force') + not opts.get('force') and label not in rpb and label in repo.branchmap() ): @@ -1450,7 +1447,7 @@ if returnrevlog: if isinstance(r, revlog.revlog): pass - elif util.safehasattr(r, '_revlog'): + elif hasattr(r, '_revlog'): r = r._revlog # pytype: disable=attribute-error elif r is not None: raise error.InputError( @@ -3329,9 +3326,7 @@ return b"\n".join(edittext) -def commitstatus(repo, node, branch, bheads=None, tip=None, opts=None): - if opts is None: - opts = {} +def commitstatus(repo, node, branch, bheads=None, tip=None, **opts): ctx = repo[node] parents = ctx.parents() @@ -3341,7 +3336,7 @@ # for most instances repo.ui.warn(_(b"warning: commit already existed in the repository!\n")) elif ( - not opts.get(b'amend') + not opts.get('amend') and bheads and node not in bheads and not any( @@ -3378,7 +3373,7 @@ # # H H n head merge: head count decreases - if not opts.get(b'close_branch'): + if not opts.get('close_branch'): for r in parents: if r.closesbranch() and r.branch() == branch: repo.ui.status( diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/color.py --- a/mercurial/color.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/color.py Wed Oct 11 02:02:46 2023 +0200 @@ -9,7 +9,6 @@ import re from .i18n import _ -from .pycompat import getattr from . import ( encoding, diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/commands.py --- a/mercurial/commands.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/commands.py Wed Oct 11 02:02:46 2023 +0200 @@ -18,8 +18,8 @@ short, wdirrev, ) -from .pycompat import open from . import ( + admin_commands as admin_commands_mod, archival, bookmarks, bundle2, @@ -76,6 +76,7 @@ table = {} table.update(debugcommandsmod.command._table) +table.update(admin_commands_mod.command._table) command = registrar.command(table) INTENT_READONLY = registrar.INTENT_READONLY @@ -646,8 +647,7 @@ Returns 0 on success. """ - opts = pycompat.byteskwargs(opts) - rev = opts.get(b'rev') + rev = opts.get('rev') if rev: repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn') ctx = logcmdutil.revsingle(repo, rev) @@ -660,8 +660,8 @@ if os.path.realpath(dest) == repo.root: raise error.InputError(_(b'repository root cannot be destination')) - kind = opts.get(b'type') or archival.guesskind(dest) or b'files' - prefix = opts.get(b'prefix') + kind = opts.get('type') or archival.guesskind(dest) or b'files' + prefix = opts.get('prefix') if dest == b'-': if kind == b'files': @@ -671,16 +671,16 @@ prefix = os.path.basename(repo.root) + b'-%h' prefix = cmdutil.makefilename(ctx, prefix) - match = scmutil.match(ctx, [], opts) + match = scmutil.match(ctx, [], pycompat.byteskwargs(opts)) archival.archive( repo, dest, node, kind, - not opts.get(b'no_decode'), + not opts.get('no_decode'), match, prefix, - subrepos=opts.get(b'subrepos'), + subrepos=opts.get('subrepos'), ) @@ -775,7 +775,6 @@ def _dobackout(ui, repo, node=None, rev=None, **opts): cmdutil.check_incompatible_arguments(opts, 'no_commit', ['commit', 'merge']) - opts = pycompat.byteskwargs(opts) if rev and node: raise error.InputError(_(b"please specify just one revision")) @@ -786,9 +785,9 @@ if not rev: raise error.InputError(_(b"please specify a revision to backout")) - date = opts.get(b'date') + date = opts.get('date') if date: - opts[b'date'] = dateutil.parsedate(date) + opts['date'] = dateutil.parsedate(date) cmdutil.checkunfinished(repo) cmdutil.bailifchanged(repo) @@ -805,16 +804,16 @@ if p1 == repo.nullid: raise error.InputError(_(b'cannot backout a change with no parents')) if p2 != repo.nullid: - if not opts.get(b'parent'): + if not opts.get('parent'): raise error.InputError(_(b'cannot backout a merge changeset')) - p = repo.lookup(opts[b'parent']) + p = repo.lookup(opts['parent']) if p not in (p1, p2): raise error.InputError( _(b'%s is not a parent of %s') % (short(p), short(node)) ) parent = p else: - if opts.get(b'parent'): + if opts.get('parent'): raise error.InputError( _(b'cannot use --parent on non-merge changeset') ) @@ -824,9 +823,9 @@ branch = repo.dirstate.branch() bheads = repo.branchheads(branch) rctx = scmutil.revsingle(repo, hex(parent)) - if not opts.get(b'merge') and op1 != node: + if not opts.get('merge') and op1 != node: with repo.transaction(b"backout"): - overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')} + overrides = {(b'ui', b'forcemerge'): opts.get('tool', b'')} with ui.configoverride(overrides, b'backout'): stats = mergemod.back_out(ctx, parent=repo[parent]) repo.setparents(op1, op2) @@ -841,7 +840,7 @@ repo.dirstate.setbranch(branch, repo.currenttransaction()) cmdutil.revert(ui, repo, rctx) - if opts.get(b'no_commit'): + if opts.get('no_commit'): msg = _(b"changeset %s backed out, don't forget to commit.\n") ui.status(msg % short(node)) return 0 @@ -862,7 +861,9 @@ # save to detect changes tip = repo.changelog.tip() - newnode = cmdutil.commit(ui, repo, commitfunc, [], opts) + newnode = cmdutil.commit( + ui, repo, commitfunc, [], pycompat.byteskwargs(opts) + ) if not newnode: ui.status(_(b"nothing changed\n")) return 1 @@ -875,10 +876,10 @@ _(b'changeset %s backs out changeset %s\n') % (nice(newnode), nice(node)) ) - if opts.get(b'merge') and op1 != node: + if opts.get('merge') and op1 != node: hg.clean(repo, op1, show_stats=False) ui.status(_(b'merging with changeset %s\n') % nice(newnode)) - overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')} + overrides = {(b'ui', b'forcemerge'): opts.get('tool', b'')} with ui.configoverride(overrides, b'backout'): return hg.merge(repo[b'tip']) return 0 @@ -1239,56 +1240,55 @@ hg book -ql . """ - opts = pycompat.byteskwargs(opts) - force = opts.get(b'force') - rev = opts.get(b'rev') - inactive = opts.get(b'inactive') # meaning add/rename to inactive bookmark - - action = cmdutil.check_at_most_one_arg(opts, b'delete', b'rename', b'list') + force = opts.get('force') + rev = opts.get('rev') + inactive = opts.get('inactive') # meaning add/rename to inactive bookmark + + action = cmdutil.check_at_most_one_arg(opts, 'delete', 'rename', 'list') if action: - cmdutil.check_incompatible_arguments(opts, action, [b'rev']) + cmdutil.check_incompatible_arguments(opts, action, ['rev']) elif names or rev: - action = b'add' + action = 'add' elif inactive: - action = b'inactive' # meaning deactivate + action = 'inactive' # meaning deactivate else: - action = b'list' - - cmdutil.check_incompatible_arguments( - opts, b'inactive', [b'delete', b'list'] - ) - if not names and action in {b'add', b'delete'}: + action = 'list' + + cmdutil.check_incompatible_arguments(opts, 'inactive', ['delete', 'list']) + if not names and action in {'add', 'delete'}: raise error.InputError(_(b"bookmark name required")) - if action in {b'add', b'delete', b'rename', b'inactive'}: + if action in {'add', 'delete', 'rename', 'inactive'}: with repo.wlock(), repo.lock(), repo.transaction(b'bookmark') as tr: - if action == b'delete': + if action == 'delete': names = pycompat.maplist(repo._bookmarks.expandname, names) bookmarks.delete(repo, tr, names) - elif action == b'rename': + elif action == 'rename': if not names: raise error.InputError(_(b"new bookmark name required")) elif len(names) > 1: raise error.InputError( _(b"only one new bookmark name allowed") ) - oldname = repo._bookmarks.expandname(opts[b'rename']) + oldname = repo._bookmarks.expandname(opts['rename']) bookmarks.rename(repo, tr, oldname, names[0], force, inactive) - elif action == b'add': + elif action == 'add': bookmarks.addbookmarks(repo, tr, names, rev, force, inactive) - elif action == b'inactive': + elif action == 'inactive': if len(repo._bookmarks) == 0: ui.status(_(b"no bookmarks set\n")) elif not repo._activebookmark: ui.status(_(b"no active bookmark\n")) else: bookmarks.deactivate(repo) - elif action == b'list': + elif action == 'list': names = pycompat.maplist(repo._bookmarks.expandname, names) - with ui.formatter(b'bookmarks', opts) as fm: + with ui.formatter(b'bookmarks', pycompat.byteskwargs(opts)) as fm: bookmarks.printbookmarks(ui, repo, fm, names) else: - raise error.ProgrammingError(b'invalid action: %s' % action) + raise error.ProgrammingError( + b'invalid action: %s' % pycompat.sysbytes(action) + ) @command( @@ -1340,12 +1340,11 @@ Returns 0 on success. """ - opts = pycompat.byteskwargs(opts) - revs = opts.get(b'rev') + revs = opts.get('rev') if label: label = label.strip() - if not opts.get(b'clean') and not label: + if not opts.get('clean') and not label: if revs: raise error.InputError( _(b"no branch name specified for the revisions") @@ -1354,7 +1353,7 @@ return with repo.wlock(): - if opts.get(b'clean'): + if opts.get('clean'): label = repo[b'.'].branch() repo.dirstate.setbranch(label, repo.currenttransaction()) ui.status(_(b'reset working directory to branch %s\n') % label) @@ -1362,9 +1361,9 @@ scmutil.checknewlabel(repo, label, b'branch') if revs: - return cmdutil.changebranch(ui, repo, revs, label, opts) - - if not opts.get(b'force') and label in repo.branchmap(): + return cmdutil.changebranch(ui, repo, revs, label, **opts) + + if not opts.get('force') and label in repo.branchmap(): if label not in [p.branch() for p in repo[None].parents()]: raise error.InputError( _(b'a branch of the same name already exists'), @@ -1428,8 +1427,7 @@ Returns 0. """ - opts = pycompat.byteskwargs(opts) - revs = opts.get(b'rev') + revs = opts.get('rev') selectedbranches = None if revs: revs = logcmdutil.revrange(repo, revs) @@ -1437,7 +1435,7 @@ selectedbranches = {getbi(r)[0] for r in revs} ui.pager(b'branches') - fm = ui.formatter(b'branches', opts) + fm = ui.formatter(b'branches', pycompat.byteskwargs(opts)) hexfunc = fm.hexfunc allheads = set(repo.heads()) @@ -1568,16 +1566,15 @@ Returns 0 on success, 1 if no changes found. """ - opts = pycompat.byteskwargs(opts) revs = None - if b'rev' in opts: - revstrings = opts[b'rev'] + if 'rev' in opts: + revstrings = opts['rev'] revs = logcmdutil.revrange(repo, revstrings) if revstrings and not revs: raise error.InputError(_(b'no commits to bundle')) - bundletype = opts.get(b'type', b'bzip2').lower() + bundletype = opts.get('type', b'bzip2').lower() try: bundlespec = bundlecaches.parsebundlespec( repo, bundletype, strict=False @@ -1596,28 +1593,28 @@ hint=_(b"use 'hg debugcreatestreamclonebundle'"), ) - if opts.get(b'all'): + if opts.get('all'): if dests: raise error.InputError( _(b"--all is incompatible with specifying destinations") ) - if opts.get(b'base'): + if opts.get('base'): ui.warn(_(b"ignoring --base because --all was specified\n")) - if opts.get(b'exact'): + if opts.get('exact'): ui.warn(_(b"ignoring --exact because --all was specified\n")) base = [nullrev] - elif opts.get(b'exact'): + elif opts.get('exact'): if dests: raise error.InputError( _(b"--exact is incompatible with specifying destinations") ) - if opts.get(b'base'): + if opts.get('base'): ui.warn(_(b"ignoring --base because --exact was specified\n")) base = repo.revs(b'parents(%ld) - %ld', revs, revs) if not base: base = [nullrev] else: - base = logcmdutil.revrange(repo, opts.get(b'base')) + base = logcmdutil.revrange(repo, opts.get('base')) if cgversion not in changegroup.supportedoutgoingversions(repo): raise error.Abort( _(b"repository does not support bundle version %s") % cgversion @@ -1638,7 +1635,7 @@ missing = set() excluded = set() for path in urlutil.get_push_paths(repo, ui, dests): - other = hg.peer(repo, opts, path) + other = hg.peer(repo, pycompat.byteskwargs(opts), path) if revs is not None: hex_revs = [repo[r].hex() for r in revs] else: @@ -1656,7 +1653,7 @@ repo, other, onlyheads=heads, - force=opts.get(b'force'), + force=opts.get('force'), portable=True, ) missing.update(outgoing.missing) @@ -1794,25 +1791,22 @@ Returns 0 on success. """ - opts = pycompat.byteskwargs(opts) - rev = opts.get(b'rev') + rev = opts.get('rev') if rev: repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn') ctx = logcmdutil.revsingle(repo, rev) - m = scmutil.match(ctx, (file1,) + pats, opts) - fntemplate = opts.pop(b'output', b'') + m = scmutil.match(ctx, (file1,) + pats, pycompat.byteskwargs(opts)) + fntemplate = opts.pop('output', b'') if cmdutil.isstdiofilename(fntemplate): fntemplate = b'' if fntemplate: - fm = formatter.nullformatter(ui, b'cat', opts) + fm = formatter.nullformatter(ui, b'cat', pycompat.byteskwargs(opts)) else: ui.pager(b'cat') - fm = ui.formatter(b'cat', opts) + fm = ui.formatter(b'cat', pycompat.byteskwargs(opts)) with fm: - return cmdutil.cat( - ui, repo, ctx, m, fm, fntemplate, b'', **pycompat.strkwargs(opts) - ) + return cmdutil.cat(ui, repo, ctx, m, fm, fntemplate, b'', **opts) @command( @@ -1972,37 +1966,36 @@ Returns 0 on success. """ - opts = pycompat.byteskwargs(opts) - cmdutil.check_at_most_one_arg(opts, b'noupdate', b'updaterev') + cmdutil.check_at_most_one_arg(opts, 'noupdate', 'updaterev') # --include/--exclude can come from narrow or sparse. includepats, excludepats = None, None # hg.clone() differentiates between None and an empty set. So make sure # patterns are sets if narrow is requested without patterns. - if opts.get(b'narrow'): + if opts.get('narrow'): includepats = set() excludepats = set() - if opts.get(b'include'): - includepats = narrowspec.parsepatterns(opts.get(b'include')) - if opts.get(b'exclude'): - excludepats = narrowspec.parsepatterns(opts.get(b'exclude')) + if opts.get('include'): + includepats = narrowspec.parsepatterns(opts.get('include')) + if opts.get('exclude'): + excludepats = narrowspec.parsepatterns(opts.get('exclude')) r = hg.clone( ui, - opts, + pycompat.byteskwargs(opts), source, dest, - pull=opts.get(b'pull'), - stream=opts.get(b'stream') or opts.get(b'uncompressed'), - revs=opts.get(b'rev'), - update=opts.get(b'updaterev') or not opts.get(b'noupdate'), - branch=opts.get(b'branch'), - shareopts=opts.get(b'shareopts'), + pull=opts.get('pull'), + stream=opts.get('stream') or opts.get('uncompressed'), + revs=opts.get('rev'), + update=opts.get('updaterev') or not opts.get('noupdate'), + branch=opts.get('branch'), + shareopts=opts.get('shareopts'), storeincludepats=includepats, storeexcludepats=excludepats, - depth=opts.get(b'depth') or None, + depth=opts.get('depth') or None, ) return r is None @@ -2178,7 +2171,6 @@ cmdutil.checkunfinished(repo) node = cmdutil.amend(ui, repo, old, extra, pats, opts) - opts = pycompat.byteskwargs(opts) if node == old.node(): ui.status(_(b"nothing changed\n")) return 1 @@ -2209,11 +2201,14 @@ extra=extra, ) - opts = pycompat.byteskwargs(opts) - node = cmdutil.commit(ui, repo, commitfunc, pats, opts) + node = cmdutil.commit( + ui, repo, commitfunc, pats, pycompat.byteskwargs(opts) + ) if not node: - stat = cmdutil.postcommitstatus(repo, pats, opts) + stat = cmdutil.postcommitstatus( + repo, pats, pycompat.byteskwargs(opts) + ) if stat.deleted: ui.status( _( @@ -2226,7 +2221,7 @@ ui.status(_(b"nothing changed\n")) return 1 - cmdutil.commitstatus(repo, node, branch, bheads, tip, opts) + cmdutil.commitstatus(repo, node, branch, bheads, tip, **opts) if not ui.quiet and ui.configbool(b'commands', b'commit.post-status'): status( @@ -2237,7 +2232,7 @@ removed=True, deleted=True, unknown=True, - subrepos=opts.get(b'subrepos'), + subrepos=opts.get('subrepos'), ) @@ -2319,19 +2314,18 @@ """ - opts = pycompat.byteskwargs(opts) - editopts = (b'edit', b'local', b'global', b'shared', b'non_shared') + editopts = ('edit', 'local', 'global', 'shared', 'non_shared') if any(opts.get(o) for o in editopts): cmdutil.check_at_most_one_arg(opts, *editopts[1:]) - if opts.get(b'local'): + if opts.get('local'): if not repo: raise error.InputError( _(b"can't use --local outside a repository") ) paths = [repo.vfs.join(b'hgrc')] - elif opts.get(b'global'): + elif opts.get('global'): paths = rcutil.systemrcpath() - elif opts.get(b'shared'): + elif opts.get('shared'): if not repo.shared(): raise error.InputError( _(b"repository is not shared; can't use --shared") @@ -2344,7 +2338,7 @@ ) ) paths = [vfsmod.vfs(repo.sharedpath).join(b'hgrc')] - elif opts.get(b'non_shared'): + elif opts.get('non_shared'): paths = [repo.vfs.join(b'hgrc-not-shared')] else: paths = rcutil.userrcpath() @@ -2353,17 +2347,15 @@ if os.path.exists(f): break else: - if opts.get(b'global'): + if opts.get('global'): samplehgrc = uimod.samplehgrcs[b'global'] - elif opts.get(b'local'): + elif opts.get('local'): samplehgrc = uimod.samplehgrcs[b'local'] else: samplehgrc = uimod.samplehgrcs[b'user'] f = paths[0] - fp = open(f, b"wb") - fp.write(util.tonativeeol(samplehgrc)) - fp.close() + util.writefile(f, util.tonativeeol(samplehgrc)) editor = ui.geteditor() ui.system( @@ -2374,7 +2366,7 @@ ) return ui.pager(b'config') - fm = ui.formatter(b'config', opts) + fm = ui.formatter(b'config', pycompat.byteskwargs(opts)) for t, f in rcutil.rccomponents(): if t == b'path': ui.debug(b'read config from: %s\n' % f) @@ -2385,7 +2377,7 @@ pass else: raise error.ProgrammingError(b'unknown rctype: %s' % t) - untrusted = bool(opts.get(b'untrusted')) + untrusted = bool(opts.get('untrusted')) selsections = selentries = [] if values: @@ -2396,8 +2388,8 @@ selentries = set(selentries) matched = False - all_known = opts[b'exp_all_known'] - show_source = ui.debugflag or opts.get(b'source') + all_known = opts['exp_all_known'] + show_source = ui.debugflag or opts.get('source') entries = ui.walkconfig(untrusted=untrusted, all_known=all_known) for section, name, value in entries: source = ui.configsource(section, name, untrusted) @@ -2506,11 +2498,10 @@ Returns 0 on success, 1 if errors are encountered. """ - opts = pycompat.byteskwargs(opts) context = lambda repo: repo.dirstate.changing_files(repo) - rev = opts.get(b'at_rev') - ctx = None + rev = opts.get('at_rev') + if rev: ctx = logcmdutil.revsingle(repo, rev) if ctx.rev() is not None: @@ -2518,9 +2509,9 @@ def context(repo): return util.nullcontextmanager() - opts[b'at_rev'] = ctx.rev() + opts['at_rev'] = ctx.rev() with repo.wlock(), context(repo): - return cmdutil.copy(ui, repo, pats, opts) + return cmdutil.copy(ui, repo, pats, pycompat.byteskwargs(opts)) @command( @@ -2984,13 +2975,12 @@ Returns 0 on success. """ - opts = pycompat.byteskwargs(opts) if not pats: raise error.InputError(_(b'no files specified')) with repo.wlock(), repo.dirstate.changing_files(repo): - m = scmutil.match(repo[None], pats, opts) - dryrun, interactive = opts.get(b'dry_run'), opts.get(b'interactive') + m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts)) + dryrun, interactive = opts.get('dry_run'), opts.get('interactive') uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) rejected = cmdutil.forget( ui, @@ -3755,19 +3745,18 @@ Returns 0 if matching heads are found, 1 if not. """ - opts = pycompat.byteskwargs(opts) start = None - rev = opts.get(b'rev') + rev = opts.get('rev') if rev: repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn') start = logcmdutil.revsingle(repo, rev, None).node() - if opts.get(b'topo'): + if opts.get('topo'): heads = [repo[h] for h in repo.heads(start)] else: heads = [] for branch in repo.branchmap(): - heads += repo.branchheads(branch, start, opts.get(b'closed')) + heads += repo.branchheads(branch, start, opts.get('closed')) heads = [repo[h] for h in heads] if branchrevs: @@ -3776,7 +3765,7 @@ } heads = [h for h in heads if h.branch() in branches] - if opts.get(b'active') and branchrevs: + if opts.get('active') and branchrevs: dagheads = repo.heads(start) heads = [h for h in heads if h.node() in dagheads] @@ -3785,8 +3774,8 @@ if branches - haveheads: headless = b', '.join(b for b in branches - haveheads) msg = _(b'no open branch heads found on branches %s') - if opts.get(b'rev'): - msg += _(b' (started at %s)') % opts[b'rev'] + if opts.get('rev'): + msg += _(b' (started at %s)') % opts['rev'] ui.warn((msg + b'\n') % headless) if not heads: @@ -3794,7 +3783,9 @@ ui.pager(b'heads') heads = sorted(heads, key=lambda x: -(x.rev())) - displayer = logcmdutil.changesetdisplayer(ui, repo, opts) + displayer = logcmdutil.changesetdisplayer( + ui, repo, pycompat.byteskwargs(opts) + ) for ctx in heads: displayer.show(ctx) displayer.close() @@ -4221,20 +4212,20 @@ opts, 'no_commit', ['bypass', 'secret'] ) cmdutil.check_incompatible_arguments(opts, 'exact', ['edit', 'prefix']) - opts = pycompat.byteskwargs(opts) + if not patch1: raise error.InputError(_(b'need at least one patch to import')) patches = (patch1,) + patches - date = opts.get(b'date') + date = opts.get('date') if date: - opts[b'date'] = dateutil.parsedate(date) - - exact = opts.get(b'exact') - update = not opts.get(b'bypass') + opts['date'] = dateutil.parsedate(date) + + exact = opts.get('exact') + update = not opts.get('bypass') try: - sim = float(opts.get(b'similarity') or 0) + sim = float(opts.get('similarity') or 0) except ValueError: raise error.InputError(_(b'similarity must be a number')) if sim < 0 or sim > 100: @@ -4242,17 +4233,17 @@ if sim and not update: raise error.InputError(_(b'cannot use --similarity with --bypass')) - base = opts[b"base"] + base = opts["base"] msgs = [] ret = 0 with repo.wlock(): if update: cmdutil.checkunfinished(repo) - if exact or not opts.get(b'force'): + if exact or not opts.get('force'): cmdutil.bailifchanged(repo) - if not opts.get(b'no_commit'): + if not opts.get('no_commit'): lock = repo.lock tr = lambda: repo.transaction(b'import') else: @@ -4274,7 +4265,13 @@ for hunk in patch.split(patchfile): with patch.extract(ui, hunk) as patchdata: msg, node, rej = cmdutil.tryimportone( - ui, repo, patchdata, parents, opts, msgs, hg.clean + ui, + repo, + patchdata, + parents, + pycompat.byteskwargs(opts), + msgs, + hg.clean, ) if msg: haspatch = True @@ -4502,16 +4499,19 @@ Returns 0 if a match is found, 1 otherwise. """ - opts = pycompat.byteskwargs(opts) - if opts.get(b'print0'): + if opts.get('print0'): end = b'\0' else: end = b'\n' - ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None) + ctx = logcmdutil.revsingle(repo, opts.get('rev'), None) ret = 1 m = scmutil.match( - ctx, pats, opts, default=b'relglob', badfn=lambda x, y: False + ctx, + pats, + pycompat.byteskwargs(opts), + default=b'relglob', + badfn=lambda x, y: False, ) ui.pager(b'locate') @@ -4523,7 +4523,7 @@ filesgen = ctx.matches(m) uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=bool(pats)) for abs in filesgen: - if opts.get(b'fullpath'): + if opts.get('fullpath'): ui.write(repo.wjoin(abs), end) else: ui.write(uipathfn(abs), end) @@ -4823,10 +4823,9 @@ Returns 0 on success. """ - opts = pycompat.byteskwargs(opts) - fm = ui.formatter(b'manifest', opts) - - if opts.get(b'all'): + fm = ui.formatter(b'manifest', pycompat.byteskwargs(opts)) + + if opts.get('all'): if rev or node: raise error.InputError(_(b"can't specify a revision with --all")) @@ -4917,11 +4916,10 @@ Returns 0 on success, 1 if there are unresolved files. """ - opts = pycompat.byteskwargs(opts) - abort = opts.get(b'abort') + abort = opts.get('abort') if abort and repo.dirstate.p2() == repo.nullid: cmdutil.wrongtooltocontinue(repo, _(b'merge')) - cmdutil.check_incompatible_arguments(opts, b'abort', [b'rev', b'preview']) + cmdutil.check_incompatible_arguments(opts, 'abort', ['rev', 'preview']) if abort: state = cmdutil.getunfinishedstate(repo) if state and state._opname != b'merge': @@ -4933,10 +4931,10 @@ raise error.InputError(_(b"cannot specify a node with --abort")) return hg.abortmerge(repo.ui, repo) - if opts.get(b'rev') and node: + if opts.get('rev') and node: raise error.InputError(_(b"please specify just one revision")) if not node: - node = opts.get(b'rev') + node = opts.get('rev') if node: ctx = logcmdutil.revsingle(repo, node) @@ -4955,22 +4953,24 @@ _(b'merging with the working copy has no effect') ) - if opts.get(b'preview'): + if opts.get('preview'): # find nodes that are ancestors of p2 but not of p1 p1 = repo[b'.'].node() p2 = ctx.node() nodes = repo.changelog.findmissing(common=[p1], heads=[p2]) - displayer = logcmdutil.changesetdisplayer(ui, repo, opts) + displayer = logcmdutil.changesetdisplayer( + ui, repo, pycompat.byteskwargs(opts) + ) for node in nodes: displayer.show(repo[node]) displayer.close() return 0 # ui.forcemerge is an internal variable, do not document - overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')} + overrides = {(b'ui', b'forcemerge'): opts.get('tool', b'')} with ui.configoverride(overrides, b'merge'): - force = opts.get(b'force') + force = opts.get('force') labels = [b'working copy', b'merge rev', b'common ancestor'] return hg.merge(ctx, force=force, labels=labels) @@ -5198,12 +5198,10 @@ Returns 0 on success. """ - opts = pycompat.byteskwargs(opts) - pathitems = urlutil.list_paths(ui, search) ui.pager(b'paths') - fm = ui.formatter(b'paths', opts) + fm = ui.formatter(b'paths', pycompat.byteskwargs(opts)) if fm.isplain(): hidepassword = urlutil.hidepassword else: @@ -5457,33 +5455,37 @@ Returns 0 on success, 1 if an update had unresolved files. """ - opts = pycompat.byteskwargs(opts) - if ui.configbool(b'commands', b'update.requiredest') and opts.get( - b'update' - ): + if ui.configbool(b'commands', b'update.requiredest') and opts.get('update'): msg = _(b'update destination required by configuration') hint = _(b'use hg pull followed by hg update DEST') raise error.InputError(msg, hint=hint) + update_conflict = None + for path in urlutil.get_pull_paths(repo, ui, sources): ui.status(_(b'pulling from %s\n') % urlutil.hidepassword(path.loc)) ui.flush() - other = hg.peer(repo, opts, path, remotehidden=opts[b'remote_hidden']) + other = hg.peer( + repo, + pycompat.byteskwargs(opts), + path, + remotehidden=opts['remote_hidden'], + ) update_conflict = None try: - branches = (path.branch, opts.get(b'branch', [])) + branches = (path.branch, opts.get('branch', [])) revs, checkout = hg.addbranchrevs( repo, other, branches, - opts.get(b'rev'), - remotehidden=opts[b'remote_hidden'], + opts.get('rev'), + remotehidden=opts['remote_hidden'], ) pullopargs = {} nodes = None - if opts.get(b'bookmark') or revs: + if opts.get('bookmark') or revs: # The list of bookmark used here is the same used to actually update # the bookmark names, to avoid the race from issue 4689 and we do # all lookup and bookmark queries in one go so they see the same @@ -5506,7 +5508,7 @@ remotebookmarks = fremotebookmarks.result() remotebookmarks = bookmarks.unhexlifybookmarks(remotebookmarks) pullopargs[b'remotebookmarks'] = remotebookmarks - for b in opts.get(b'bookmark', []): + for b in opts.get('bookmark', []): b = repo._bookmarks.expandname(b) if b not in remotebookmarks: raise error.InputError( @@ -5520,19 +5522,19 @@ checkout = node wlock = util.nullcontextmanager() - if opts.get(b'update'): + if opts.get('update'): wlock = repo.wlock() with wlock: - pullopargs.update(opts.get(b'opargs', {})) + pullopargs.update(opts.get('opargs', {})) modheads = exchange.pull( repo, other, path=path, heads=nodes, - force=opts.get(b'force'), - bookmarks=opts.get(b'bookmark', ()), + force=opts.get('force'), + bookmarks=opts.get('bookmark', ()), opargs=pullopargs, - confirm=opts.get(b'confirm'), + confirm=opts.get('confirm'), ).cgresult # brev is a name, which might be a bookmark to be activated at @@ -5546,10 +5548,10 @@ # order below depends on implementation of # hg.addbranchrevs(). opts['bookmark'] is ignored, # because 'checkout' is determined without it. - if opts.get(b'rev'): - brev = opts[b'rev'][0] - elif opts.get(b'branch'): - brev = opts[b'branch'][0] + if opts.get('rev'): + brev = opts['rev'][0] + elif opts.get('branch'): + brev = opts['branch'][0] else: brev = path.branch @@ -5559,7 +5561,7 @@ repo._subtoppath = path.loc try: update_conflict = postincoming( - ui, repo, modheads, opts.get(b'update'), checkout, brev + ui, repo, modheads, opts.get('update'), checkout, brev ) except error.FilteredRepoLookupError as exc: msg = _(b'cannot update to target: %s') % exc.args[0] @@ -5633,24 +5635,23 @@ list of files that this program would delete, use the --print option. """ - opts = pycompat.byteskwargs(opts) - cmdutil.check_at_most_one_arg(opts, b'all', b'ignored') - - act = not opts.get(b'print') + cmdutil.check_at_most_one_arg(opts, 'all', 'ignored') + + act = not opts.get('print') eol = b'\n' - if opts.get(b'print0'): + if opts.get('print0'): eol = b'\0' act = False # --print0 implies --print - if opts.get(b'all', False): + if opts.get('all', False): ignored = True unknown = True else: - ignored = opts.get(b'ignored', False) + ignored = opts.get('ignored', False) unknown = not ignored - removefiles = opts.get(b'files') - removedirs = opts.get(b'dirs') - confirm = opts.get(b'confirm') + removefiles = opts.get('files') + removedirs = opts.get('dirs') + confirm = opts.get('confirm') if confirm is None: try: extensions.find(b'purge') @@ -5662,7 +5663,7 @@ removefiles = True removedirs = True - match = scmutil.match(repo[None], dirs, opts) + match = scmutil.match(repo[None], dirs, pycompat.byteskwargs(opts)) paths = mergemod.purge( repo, @@ -5671,7 +5672,7 @@ ignored=ignored, removeemptydirs=removedirs, removefiles=removefiles, - abortonerror=opts.get(b'abort_on_err'), + abortonerror=opts.get('abort_on_err'), noop=not act, confirm=confirm, ) @@ -5974,15 +5975,14 @@ Returns 0 on success, 1 if any warnings encountered. """ - opts = pycompat.byteskwargs(opts) - after, force = opts.get(b'after'), opts.get(b'force') - dryrun = opts.get(b'dry_run') + after, force = opts.get('after'), opts.get('force') + dryrun = opts.get('dry_run') if not pats and not after: raise error.InputError(_(b'no files specified')) with repo.wlock(), repo.dirstate.changing_files(repo): - m = scmutil.match(repo[None], pats, opts) - subrepos = opts.get(b'subrepos') + m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts)) + subrepos = opts.get('subrepos') uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) return cmdutil.remove( ui, repo, m, b"", uipathfn, after, force, subrepos, dryrun=dryrun @@ -6034,10 +6034,9 @@ Returns 0 on success, 1 if errors are encountered. """ - opts = pycompat.byteskwargs(opts) context = lambda repo: repo.dirstate.changing_files(repo) - rev = opts.get(b'at_rev') - ctx = None + rev = opts.get('at_rev') + if rev: ctx = logcmdutil.revsingle(repo, rev) if ctx.rev() is not None: @@ -6045,9 +6044,11 @@ def context(repo): return util.nullcontextmanager() - opts[b'at_rev'] = ctx.rev() + opts['at_rev'] = ctx.rev() with repo.wlock(), context(repo): - return cmdutil.copy(ui, repo, pats, opts, rename=True) + return cmdutil.copy( + ui, repo, pats, pycompat.byteskwargs(opts), rename=True + ) @command( @@ -6398,30 +6399,29 @@ Returns 0 on success. """ - opts = pycompat.byteskwargs(opts) - if opts.get(b"date"): - cmdutil.check_incompatible_arguments(opts, b'date', [b'rev']) - opts[b"rev"] = cmdutil.finddate(ui, repo, opts[b"date"]) + if opts.get("date"): + cmdutil.check_incompatible_arguments(opts, 'date', ['rev']) + opts["rev"] = cmdutil.finddate(ui, repo, opts["date"]) parent, p2 = repo.dirstate.parents() - if not opts.get(b'rev') and p2 != repo.nullid: + if not opts.get('rev') and p2 != repo.nullid: # revert after merge is a trap for new users (issue2915) raise error.InputError( _(b'uncommitted merge with no revision specified'), hint=_(b"use 'hg update' or see 'hg help revert'"), ) - rev = opts.get(b'rev') + rev = opts.get('rev') if rev: repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn') ctx = logcmdutil.revsingle(repo, rev) if not ( pats - or opts.get(b'include') - or opts.get(b'exclude') - or opts.get(b'all') - or opts.get(b'interactive') + or opts.get('include') + or opts.get('exclude') + or opts.get('all') + or opts.get('interactive') ): msg = _(b"no files or directories specified") if p2 != repo.nullid: @@ -6455,7 +6455,7 @@ hint = _(b"use --all to revert all files") raise error.InputError(msg, hint=hint) - return cmdutil.revert(ui, repo, ctx, *pats, **pycompat.strkwargs(opts)) + return cmdutil.revert(ui, repo, ctx, *pats, **opts) @command( @@ -7117,7 +7117,6 @@ Returns 0 on success. """ - opts = pycompat.byteskwargs(opts) ui.pager(b'summary') ctx = repo[None] parents = ctx.parents() @@ -7306,11 +7305,13 @@ cmdutil.summaryhooks(ui, repo) - if opts.get(b'remote'): + if opts.get('remote'): needsincoming, needsoutgoing = True, True else: needsincoming, needsoutgoing = False, False - for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None): + for i, o in cmdutil.summaryremotehooks( + ui, repo, pycompat.byteskwargs(opts), None + ): if i: needsincoming = True if o: @@ -7327,7 +7328,7 @@ try: other = hg.peer(repo, {}, path) except error.RepoError: - if opts.get(b'remote'): + if opts.get('remote'): raise return path.loc, sbranch, None, None, None branches = (path.branch, []) @@ -7366,7 +7367,7 @@ try: dother = hg.peer(repo, {}, path if path is not None else dest) except error.RepoError: - if opts.get(b'remote'): + if opts.get('remote'): raise return dest, dbranch, None, None ui.debug(b'comparing with %s\n' % urlutil.hidepassword(dest)) @@ -7392,7 +7393,7 @@ else: dest = dbranch = dother = outgoing = None - if opts.get(b'remote'): + if opts.get('remote'): # Help pytype. --remote sets both `needsincoming` and `needsoutgoing`. # The former always sets `sother` (or raises an exception if it can't); # the latter always sets `outgoing`. @@ -7423,7 +7424,7 @@ cmdutil.summaryremotehooks( ui, repo, - opts, + pycompat.byteskwargs(opts), ( (source, sbranch, sother, commoninc), (dest, dbranch, dother, outgoing), @@ -7479,7 +7480,7 @@ Returns 0 on success. """ cmdutil.check_incompatible_arguments(opts, 'remove', ['rev']) - opts = pycompat.byteskwargs(opts) + with repo.wlock(), repo.lock(): rev_ = b"." names = [t.strip() for t in (name1,) + names] @@ -7491,11 +7492,11 @@ raise error.InputError( _(b'tag names cannot consist entirely of whitespace') ) - if opts.get(b'rev'): - rev_ = opts[b'rev'] - message = opts.get(b'message') - if opts.get(b'remove'): - if opts.get(b'local'): + if opts.get('rev'): + rev_ = opts['rev'] + message = opts.get('message') + if opts.get('remove'): + if opts.get('local'): expectedtype = b'local' else: expectedtype = b'global' @@ -7522,18 +7523,18 @@ if not message: # we don't translate commit messages message = b'Removed tag %s' % b', '.join(names) - elif not opts.get(b'force'): + elif not opts.get('force'): for n in names: if n in repo.tags(): raise error.InputError( _(b"tag '%s' already exists (use -f to force)") % n ) - if not opts.get(b'local'): + if not opts.get('local'): p1, p2 = repo.dirstate.parents() if p2 != repo.nullid: raise error.StateError(_(b'uncommitted merge')) bheads = repo.branchheads() - if not opts.get(b'force') and bheads and p1 not in bheads: + if not opts.get('force') and bheads and p1 not in bheads: raise error.InputError( _( b'working directory is not at a branch head ' @@ -7545,7 +7546,7 @@ # don't allow tagging the null rev or the working directory if node is None: raise error.InputError(_(b"cannot tag working directory")) - elif not opts.get(b'remove') and node == nullid: + elif not opts.get('remove') and node == nullid: raise error.InputError(_(b"cannot tag null revision")) if not message: @@ -7555,25 +7556,23 @@ short(node), ) - date = opts.get(b'date') + date = opts.get('date') if date: date = dateutil.parsedate(date) - if opts.get(b'remove'): + if opts.get('remove'): editform = b'tag.remove' else: editform = b'tag.add' - editor = cmdutil.getcommiteditor( - editform=editform, **pycompat.strkwargs(opts) - ) + editor = cmdutil.getcommiteditor(editform=editform, **opts) tagsmod.tag( repo, names, node, message, - opts.get(b'local'), - opts.get(b'user'), + opts.get('local'), + opts.get('user'), date, editor=editor, ) @@ -7606,9 +7605,8 @@ Returns 0 on success. """ - opts = pycompat.byteskwargs(opts) ui.pager(b'tags') - fm = ui.formatter(b'tags', opts) + fm = ui.formatter(b'tags', pycompat.byteskwargs(opts)) hexfunc = fm.hexfunc for t, n in reversed(repo.tagslist()): @@ -7963,12 +7961,13 @@ for more information about recovery from corruption of the repository. + For an alternative UI with a lot more control over the verification + process and better error reporting, try `hg help admin::verify`. + Returns 0 on success, 1 if errors are encountered. """ - opts = pycompat.byteskwargs(opts) - level = None - if opts[b'full']: + if opts['full']: level = verifymod.VERIFY_FULL return hg.verify(repo, level) @@ -7998,10 +7997,9 @@ :bundled: Boolean. True if included in the release. :name: String. Extension name. """ - opts = pycompat.byteskwargs(opts) if ui.verbose: ui.pager(b'version') - fm = ui.formatter(b"version", opts) + fm = ui.formatter(b"version", pycompat.byteskwargs(opts)) fm.startitem() fm.write( b"ver", _(b"Mercurial Distributed SCM (version %s)\n"), util.version() diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/commandserver.py --- a/mercurial/commandserver.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/commandserver.py Wed Oct 11 02:02:46 2023 +0200 @@ -16,7 +16,6 @@ import traceback from .i18n import _ -from .pycompat import getattr from . import ( encoding, error, @@ -332,7 +331,7 @@ # any kind of interaction must use server channels, but chg may # replace channels by fully functional tty files. so nontty is # enforced only if cin is a channel. - if not util.safehasattr(self.cin, 'fileno'): + if not hasattr(self.cin, 'fileno'): ui.setconfig(b'ui', b'nontty', b'true', b'commandserver') req = dispatch.request( @@ -384,7 +383,7 @@ if self.cmsg: hellomsg += b'message-encoding: %s\n' % self.cmsg.encoding hellomsg += b'pid: %d' % procutil.getpid() - if util.safehasattr(os, 'getpgid'): + if hasattr(os, 'getpgid'): hellomsg += b'\n' hellomsg += b'pgid: %d' % os.getpgid(0) @@ -559,7 +558,7 @@ self.ui = ui self.repo = repo self.address = opts[b'address'] - if not util.safehasattr(socket, 'AF_UNIX'): + if not hasattr(socket, 'AF_UNIX'): raise error.Abort(_(b'unsupported platform')) if not self.address: raise error.Abort(_(b'no socket path specified with --address')) @@ -588,7 +587,7 @@ o = socket.socketpair(socket.AF_UNIX, socket.SOCK_DGRAM) self._mainipc, self._workeripc = o self._servicehandler.bindsocket(self._sock, self.address) - if util.safehasattr(procutil, 'unblocksignal'): + if hasattr(procutil, 'unblocksignal'): procutil.unblocksignal(signal.SIGCHLD) o = signal.signal(signal.SIGCHLD, self._sigchldhandler) self._oldsigchldhandler = o diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/config.py --- a/mercurial/config.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/config.py Wed Oct 11 02:02:46 2023 +0200 @@ -10,7 +10,6 @@ import os from .i18n import _ -from .pycompat import getattr from . import ( encoding, error, diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/configitems.py --- a/mercurial/configitems.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/configitems.py Wed Oct 11 02:02:46 2023 +0200 @@ -9,11 +9,21 @@ import functools import re +from .utils import resourceutil + from . import ( encoding, error, ) +try: + import tomllib # pytype: disable=import-error + + tomllib.load # trigger lazy import +except ModuleNotFoundError: + # Python <3.11 compat + from .thirdparty import tomli as tomllib + def loadconfigtable(ui, extname, configtable): """update config item known to the ui with the extension ones""" @@ -22,7 +32,7 @@ knownkeys = set(knownitems) newkeys = set(items) for key in sorted(knownkeys & newkeys): - msg = b"extension '%s' overwrite config item '%s.%s'" + msg = b"extension '%s' overwrites config item '%s.%s'" msg %= (extname, section, key) ui.develwarn(msg, config=b'warn-config') @@ -48,15 +58,19 @@ generic=False, priority=0, experimental=False, + documentation="", + in_core_extension=None, ): self.section = section self.name = name self.default = default + self.documentation = documentation self.alias = list(alias) self.generic = generic self.priority = priority self.experimental = experimental self._re = None + self.in_core_extension = in_core_extension if generic: self._re = re.compile(self.name) @@ -102,6 +116,74 @@ return None +def sanitize_item(item): + """Apply the transformations that are encoded on top of the pure data""" + + # Set the special defaults + default_type_key = "default-type" + default_type = item.pop(default_type_key, None) + if default_type == "dynamic": + item["default"] = dynamicdefault + elif default_type == "list_type": + item["default"] = list + elif default_type == "lambda": + assert isinstance(item["default"], list) + default = [e.encode() for e in item["default"]] + item["default"] = lambda: default + elif default_type == "lazy_module": + item["default"] = lambda: encoding.encoding + else: + if default_type is not None: + msg = "invalid default config type %r for '%s.%s'" + msg %= (default_type, item["section"], item["name"]) + raise error.ProgrammingError(msg) + + # config expects bytes + alias = item.get("alias") + if alias: + item["alias"] = [(k.encode(), v.encode()) for (k, v) in alias] + if isinstance(item.get("default"), str): + item["default"] = item["default"].encode() + item["section"] = item["section"].encode() + item["name"] = item["name"].encode() + + +def read_configitems_file(): + """Returns the deserialized TOML structure from the configitems file""" + with resourceutil.open_resource(b"mercurial", b"configitems.toml") as fp: + return tomllib.load(fp) + + +def configitems_from_toml(items): + """Register the configitems from the *deserialized* toml file""" + for item in items["items"]: + sanitize_item(item) + coreconfigitem(**item) + + templates = items["templates"] + + for application in items["template-applications"]: + template_items = templates[application["template"]] + + for template_item in template_items: + item = template_item.copy() + prefix = application.get("prefix", "") + item["section"] = application["section"] + if prefix: + item["name"] = f'{prefix}.{item["suffix"]}' + else: + item["name"] = item["suffix"] + + sanitize_item(item) + item.pop("suffix", None) + coreconfigitem(**item) + + +def import_configitems_from_file(): + as_toml = read_configitems_file() + configitems_from_toml(as_toml) + + coreitems = {} @@ -129,2856 +211,4 @@ coreconfigitem = getitemregister(coreitems) - -def _registerdiffopts(section, configprefix=b''): - coreconfigitem( - section, - configprefix + b'nodates', - default=False, - ) - coreconfigitem( - section, - configprefix + b'showfunc', - default=False, - ) - coreconfigitem( - section, - configprefix + b'unified', - default=None, - ) - coreconfigitem( - section, - configprefix + b'git', - default=False, - ) - coreconfigitem( - section, - configprefix + b'ignorews', - default=False, - ) - coreconfigitem( - section, - configprefix + b'ignorewsamount', - default=False, - ) - coreconfigitem( - section, - configprefix + b'ignoreblanklines', - default=False, - ) - coreconfigitem( - section, - configprefix + b'ignorewseol', - default=False, - ) - coreconfigitem( - section, - configprefix + b'nobinary', - default=False, - ) - coreconfigitem( - section, - configprefix + b'noprefix', - default=False, - ) - coreconfigitem( - section, - configprefix + b'word-diff', - default=False, - ) - - -coreconfigitem( - b'alias', - b'.*', - default=dynamicdefault, - generic=True, -) -coreconfigitem( - b'auth', - b'cookiefile', - default=None, -) -_registerdiffopts(section=b'annotate') -# bookmarks.pushing: internal hack for discovery -coreconfigitem( - b'bookmarks', - b'pushing', - default=list, -) -# bundle.mainreporoot: internal hack for bundlerepo -coreconfigitem( - b'bundle', - b'mainreporoot', - default=b'', -) -coreconfigitem( - b'censor', - b'policy', - default=b'abort', - experimental=True, -) -coreconfigitem( - b'chgserver', - b'idletimeout', - default=3600, -) -coreconfigitem( - b'chgserver', - b'skiphash', - default=False, -) -coreconfigitem( - b'cmdserver', - b'log', - default=None, -) -coreconfigitem( - b'cmdserver', - b'max-log-files', - default=7, -) -coreconfigitem( - b'cmdserver', - b'max-log-size', - default=b'1 MB', -) -coreconfigitem( - b'cmdserver', - b'max-repo-cache', - default=0, - experimental=True, -) -coreconfigitem( - b'cmdserver', - b'message-encodings', - default=list, -) -coreconfigitem( - b'cmdserver', - b'track-log', - default=lambda: [b'chgserver', b'cmdserver', b'repocache'], -) -coreconfigitem( - b'cmdserver', - b'shutdown-on-interrupt', - default=True, -) -coreconfigitem( - b'color', - b'.*', - default=None, - generic=True, -) -coreconfigitem( - b'color', - b'mode', - default=b'auto', -) -coreconfigitem( - b'color', - b'pagermode', - default=dynamicdefault, -) -coreconfigitem( - b'command-templates', - b'graphnode', - default=None, - alias=[(b'ui', b'graphnodetemplate')], -) -coreconfigitem( - b'command-templates', - b'log', - default=None, - alias=[(b'ui', b'logtemplate')], -) -coreconfigitem( - b'command-templates', - b'mergemarker', - default=( - b'{node|short} ' - b'{ifeq(tags, "tip", "", ' - b'ifeq(tags, "", "", "{tags} "))}' - b'{if(bookmarks, "{bookmarks} ")}' - b'{ifeq(branch, "default", "", "{branch} ")}' - b'- {author|user}: {desc|firstline}' - ), - alias=[(b'ui', b'mergemarkertemplate')], -) -coreconfigitem( - b'command-templates', - b'pre-merge-tool-output', - default=None, - alias=[(b'ui', b'pre-merge-tool-output-template')], -) -coreconfigitem( - b'command-templates', - b'oneline-summary', - default=None, -) -coreconfigitem( - b'command-templates', - b'oneline-summary.*', - default=dynamicdefault, - generic=True, -) -_registerdiffopts(section=b'commands', configprefix=b'commit.interactive.') -coreconfigitem( - b'commands', - b'commit.post-status', - default=False, -) -coreconfigitem( - b'commands', - b'grep.all-files', - default=False, - experimental=True, -) -coreconfigitem( - b'commands', - b'merge.require-rev', - default=False, -) -coreconfigitem( - b'commands', - b'push.require-revs', - default=False, -) -coreconfigitem( - b'commands', - b'resolve.confirm', - default=False, -) -coreconfigitem( - b'commands', - b'resolve.explicit-re-merge', - default=False, -) -coreconfigitem( - b'commands', - b'resolve.mark-check', - default=b'none', -) -_registerdiffopts(section=b'commands', configprefix=b'revert.interactive.') -coreconfigitem( - b'commands', - b'show.aliasprefix', - default=list, -) -coreconfigitem( - b'commands', - b'status.relative', - default=False, -) -coreconfigitem( - b'commands', - b'status.skipstates', - default=[], - experimental=True, -) -coreconfigitem( - b'commands', - b'status.terse', - default=b'', -) -coreconfigitem( - b'commands', - b'status.verbose', - default=False, -) -coreconfigitem( - b'commands', - b'update.check', - default=None, -) -coreconfigitem( - b'commands', - b'update.requiredest', - default=False, -) -coreconfigitem( - b'committemplate', - b'.*', - default=None, - generic=True, -) -coreconfigitem( - b'convert', - b'bzr.saverev', - default=True, -) -coreconfigitem( - b'convert', - b'cvsps.cache', - default=True, -) -coreconfigitem( - b'convert', - b'cvsps.fuzz', - default=60, -) -coreconfigitem( - b'convert', - b'cvsps.logencoding', - default=None, -) -coreconfigitem( - b'convert', - b'cvsps.mergefrom', - default=None, -) -coreconfigitem( - b'convert', - b'cvsps.mergeto', - default=None, -) -coreconfigitem( - b'convert', - b'git.committeractions', - default=lambda: [b'messagedifferent'], -) -coreconfigitem( - b'convert', - b'git.extrakeys', - default=list, -) -coreconfigitem( - b'convert', - b'git.findcopiesharder', - default=False, -) -coreconfigitem( - b'convert', - b'git.remoteprefix', - default=b'remote', -) -coreconfigitem( - b'convert', - b'git.renamelimit', - default=400, -) -coreconfigitem( - b'convert', - b'git.saverev', - default=True, -) -coreconfigitem( - b'convert', - b'git.similarity', - default=50, -) -coreconfigitem( - b'convert', - b'git.skipsubmodules', - default=False, -) -coreconfigitem( - b'convert', - b'hg.clonebranches', - default=False, -) -coreconfigitem( - b'convert', - b'hg.ignoreerrors', - default=False, -) -coreconfigitem( - b'convert', - b'hg.preserve-hash', - default=False, -) -coreconfigitem( - b'convert', - b'hg.revs', - default=None, -) -coreconfigitem( - b'convert', - b'hg.saverev', - default=False, -) -coreconfigitem( - b'convert', - b'hg.sourcename', - default=None, -) -coreconfigitem( - b'convert', - b'hg.startrev', - default=None, -) -coreconfigitem( - b'convert', - b'hg.tagsbranch', - default=b'default', -) -coreconfigitem( - b'convert', - b'hg.usebranchnames', - default=True, -) -coreconfigitem( - b'convert', - b'ignoreancestorcheck', - default=False, - experimental=True, -) -coreconfigitem( - b'convert', - b'localtimezone', - default=False, -) -coreconfigitem( - b'convert', - b'p4.encoding', - default=dynamicdefault, -) -coreconfigitem( - b'convert', - b'p4.startrev', - default=0, -) -coreconfigitem( - b'convert', - b'skiptags', - default=False, -) -coreconfigitem( - b'convert', - b'svn.debugsvnlog', - default=True, -) -coreconfigitem( - b'convert', - b'svn.trunk', - default=None, -) -coreconfigitem( - b'convert', - b'svn.tags', - default=None, -) -coreconfigitem( - b'convert', - b'svn.branches', - default=None, -) -coreconfigitem( - b'convert', - b'svn.startrev', - default=0, -) -coreconfigitem( - b'convert', - b'svn.dangerous-set-commit-dates', - default=False, -) -coreconfigitem( - b'debug', - b'dirstate.delaywrite', - default=0, -) -coreconfigitem( - b'debug', - b'revlog.verifyposition.changelog', - default=b'', -) -coreconfigitem( - b'debug', - b'revlog.debug-delta', - default=False, -) -# display extra information about the bundling process -coreconfigitem( - b'debug', - b'bundling-stats', - default=False, -) -# display extra information about the unbundling process -coreconfigitem( - b'debug', - b'unbundling-stats', - default=False, -) -coreconfigitem( - b'defaults', - b'.*', - default=None, - generic=True, -) -coreconfigitem( - b'devel', - b'all-warnings', - default=False, -) -coreconfigitem( - b'devel', - b'bundle2.debug', - default=False, -) -# which kind of delta to put in the bundled changegroup. Possible value -# - '': use default behavior -# - p1: force to always use delta against p1 -# - full: force to always use full content -coreconfigitem( - b'devel', - b'bundle.delta', - default=b'', -) -coreconfigitem( - b'devel', - b'cache-vfs', - default=None, -) -coreconfigitem( - b'devel', - b'check-locks', - default=False, -) -coreconfigitem( - b'devel', - b'check-relroot', - default=False, -) -# Track copy information for all file, not just "added" one (very slow) -coreconfigitem( - b'devel', - b'copy-tracing.trace-all-files', - default=False, -) -coreconfigitem( - b'devel', - b'default-date', - default=None, -) -coreconfigitem( - b'devel', - b'deprec-warn', - default=False, -) -# possible values: -# - auto (the default) -# - force-append -# - force-new -coreconfigitem( - b'devel', - b'dirstate.v2.data_update_mode', - default="auto", -) -coreconfigitem( - b'devel', - b'disableloaddefaultcerts', - default=False, -) -coreconfigitem( - b'devel', - b'warn-empty-changegroup', - default=False, -) -coreconfigitem( - b'devel', - b'legacy.exchange', - default=list, -) -# When True, revlogs use a special reference version of the nodemap, that is not -# performant but is "known" to behave properly. -coreconfigitem( - b'devel', - b'persistent-nodemap', - default=False, -) -coreconfigitem( - b'devel', - b'servercafile', - default=b'', -) -# This config option is intended for use in tests only. It is a giant -# footgun to kill security. Don't define it. -coreconfigitem( - b'devel', - b'server-insecure-exact-protocol', - default=b'', -) -coreconfigitem( - b'devel', - b'serverrequirecert', - default=False, -) -# Makes the status algorithm wait for the existence of this file -# (or until a timeout of `devel.sync.status.pre-dirstate-write-file-timeout` -# seconds) before taking the lock and writing the dirstate. -# Status signals that it's ready to wait by creating a file -# with the same name + `.waiting`. -# Useful when testing race conditions. -coreconfigitem( - b'devel', - b'sync.status.pre-dirstate-write-file', - default=None, -) -coreconfigitem( - b'devel', - b'sync.status.pre-dirstate-write-file-timeout', - default=2, -) -coreconfigitem( - b'devel', - b'sync.dirstate.post-docket-read-file', - default=None, -) -coreconfigitem( - b'devel', - b'sync.dirstate.post-docket-read-file-timeout', - default=2, -) -coreconfigitem( - b'devel', - b'sync.dirstate.pre-read-file', - default=None, -) -coreconfigitem( - b'devel', - b'sync.dirstate.pre-read-file-timeout', - default=2, -) -coreconfigitem( - b'devel', - b'strip-obsmarkers', - default=True, -) -coreconfigitem( - b'devel', - b'warn-config', - default=None, -) -coreconfigitem( - b'devel', - b'warn-config-default', - default=None, -) -coreconfigitem( - b'devel', - b'user.obsmarker', - default=None, -) -coreconfigitem( - b'devel', - b'warn-config-unknown', - default=None, -) -coreconfigitem( - b'devel', - b'debug.copies', - default=False, -) -coreconfigitem( - b'devel', - b'copy-tracing.multi-thread', - default=True, -) -coreconfigitem( - b'devel', - b'debug.extensions', - default=False, -) -coreconfigitem( - b'devel', - b'debug.repo-filters', - default=False, -) -coreconfigitem( - b'devel', - b'debug.peer-request', - default=False, -) -# If discovery.exchange-heads is False, the discovery will not start with -# remote head fetching and local head querying. -coreconfigitem( - b'devel', - b'discovery.exchange-heads', - default=True, -) -# If devel.debug.abort-update is True, then any merge with the working copy, -# e.g. [hg update], will be aborted after figuring out what needs to be done, -# but before spawning the parallel worker -coreconfigitem( - b'devel', - b'debug.abort-update', - default=False, -) -# If discovery.grow-sample is False, the sample size used in set discovery will -# not be increased through the process -coreconfigitem( - b'devel', - b'discovery.grow-sample', - default=True, -) -# When discovery.grow-sample.dynamic is True, the default, the sample size is -# adapted to the shape of the undecided set (it is set to the max of: -# , len(roots(undecided)), len(heads(undecided) -coreconfigitem( - b'devel', - b'discovery.grow-sample.dynamic', - default=True, -) -# discovery.grow-sample.rate control the rate at which the sample grow -coreconfigitem( - b'devel', - b'discovery.grow-sample.rate', - default=1.05, -) -# If discovery.randomize is False, random sampling during discovery are -# deterministic. It is meant for integration tests. -coreconfigitem( - b'devel', - b'discovery.randomize', - default=True, -) -# Control the initial size of the discovery sample -coreconfigitem( - b'devel', - b'discovery.sample-size', - default=200, -) -# Control the initial size of the discovery for initial change -coreconfigitem( - b'devel', - b'discovery.sample-size.initial', - default=100, -) -_registerdiffopts(section=b'diff') -coreconfigitem( - b'diff', - b'merge', - default=False, - experimental=True, -) -coreconfigitem( - b'email', - b'bcc', - default=None, -) -coreconfigitem( - b'email', - b'cc', - default=None, -) -coreconfigitem( - b'email', - b'charsets', - default=list, -) -coreconfigitem( - b'email', - b'from', - default=None, -) -coreconfigitem( - b'email', - b'method', - default=b'smtp', -) -coreconfigitem( - b'email', - b'reply-to', - default=None, -) -coreconfigitem( - b'email', - b'to', - default=None, -) -coreconfigitem( - b'experimental', - b'archivemetatemplate', - default=dynamicdefault, -) -coreconfigitem( - b'experimental', - b'auto-publish', - default=b'publish', -) -coreconfigitem( - b'experimental', - b'bundle-phases', - default=False, -) -coreconfigitem( - b'experimental', - b'bundle2-advertise', - default=True, -) -coreconfigitem( - b'experimental', - b'bundle2-output-capture', - default=False, -) -coreconfigitem( - b'experimental', - b'bundle2.pushback', - default=False, -) -coreconfigitem( - b'experimental', - b'bundle2lazylocking', - default=False, -) -coreconfigitem( - b'experimental', - b'bundlecomplevel', - default=None, -) -coreconfigitem( - b'experimental', - b'bundlecomplevel.bzip2', - default=None, -) -coreconfigitem( - b'experimental', - b'bundlecomplevel.gzip', - default=None, -) -coreconfigitem( - b'experimental', - b'bundlecomplevel.none', - default=None, -) -coreconfigitem( - b'experimental', - b'bundlecomplevel.zstd', - default=None, -) -coreconfigitem( - b'experimental', - b'bundlecompthreads', - default=None, -) -coreconfigitem( - b'experimental', - b'bundlecompthreads.bzip2', - default=None, -) -coreconfigitem( - b'experimental', - b'bundlecompthreads.gzip', - default=None, -) -coreconfigitem( - b'experimental', - b'bundlecompthreads.none', - default=None, -) -coreconfigitem( - b'experimental', - b'bundlecompthreads.zstd', - default=None, -) -coreconfigitem( - b'experimental', - b'changegroup3', - default=True, -) -coreconfigitem( - b'experimental', - b'changegroup4', - default=False, -) - -# might remove rank configuration once the computation has no impact -coreconfigitem( - b'experimental', - b'changelog-v2.compute-rank', - default=True, -) -coreconfigitem( - b'experimental', - b'cleanup-as-archived', - default=False, -) -coreconfigitem( - b'experimental', - b'clientcompressionengines', - default=list, -) -coreconfigitem( - b'experimental', - b'copytrace', - default=b'on', -) -coreconfigitem( - b'experimental', - b'copytrace.movecandidateslimit', - default=100, -) -coreconfigitem( - b'experimental', - b'copytrace.sourcecommitlimit', - default=100, -) -coreconfigitem( - b'experimental', - b'copies.read-from', - default=b"filelog-only", -) -coreconfigitem( - b'experimental', - b'copies.write-to', - default=b'filelog-only', -) -coreconfigitem( - b'experimental', - b'crecordtest', - default=None, -) -coreconfigitem( - b'experimental', - b'directaccess', - default=False, -) -coreconfigitem( - b'experimental', - b'directaccess.revnums', - default=False, -) -coreconfigitem( - b'experimental', - b'editortmpinhg', - default=False, -) -coreconfigitem( - b'experimental', - b'evolution', - default=list, -) -coreconfigitem( - b'experimental', - b'evolution.allowdivergence', - default=False, - alias=[(b'experimental', b'allowdivergence')], -) -coreconfigitem( - b'experimental', - b'evolution.allowunstable', - default=None, -) -coreconfigitem( - b'experimental', - b'evolution.createmarkers', - default=None, -) -coreconfigitem( - b'experimental', - b'evolution.effect-flags', - default=True, - alias=[(b'experimental', b'effect-flags')], -) -coreconfigitem( - b'experimental', - b'evolution.exchange', - default=None, -) -coreconfigitem( - b'experimental', - b'evolution.bundle-obsmarker', - default=False, -) -coreconfigitem( - b'experimental', - b'evolution.bundle-obsmarker:mandatory', - default=True, -) -coreconfigitem( - b'experimental', - b'log.topo', - default=False, -) -coreconfigitem( - b'experimental', - b'evolution.report-instabilities', - default=True, -) -coreconfigitem( - b'experimental', - b'evolution.track-operation', - default=True, -) -# repo-level config to exclude a revset visibility -# -# The target use case is to use `share` to expose different subset of the same -# repository, especially server side. See also `server.view`. -coreconfigitem( - b'experimental', - b'extra-filter-revs', - default=None, -) -coreconfigitem( - b'experimental', - b'maxdeltachainspan', - default=-1, -) -# tracks files which were undeleted (merge might delete them but we explicitly -# kept/undeleted them) and creates new filenodes for them -coreconfigitem( - b'experimental', - b'merge-track-salvaged', - default=False, -) -coreconfigitem( - b'experimental', - b'mmapindexthreshold', - default=None, -) -coreconfigitem( - b'experimental', - b'narrow', - default=False, -) -coreconfigitem( - b'experimental', - b'nonnormalparanoidcheck', - default=False, -) -coreconfigitem( - b'experimental', - b'exportableenviron', - default=list, -) -coreconfigitem( - b'experimental', - b'extendedheader.index', - default=None, -) -coreconfigitem( - b'experimental', - b'extendedheader.similarity', - default=False, -) -coreconfigitem( - b'experimental', - b'graphshorten', - default=False, -) -coreconfigitem( - b'experimental', - b'graphstyle.parent', - default=dynamicdefault, -) -coreconfigitem( - b'experimental', - b'graphstyle.missing', - default=dynamicdefault, -) -coreconfigitem( - b'experimental', - b'graphstyle.grandparent', - default=dynamicdefault, -) -coreconfigitem( - b'experimental', - b'hook-track-tags', - default=False, -) -coreconfigitem( - b'experimental', - b'httppostargs', - default=False, -) -coreconfigitem(b'experimental', b'nointerrupt', default=False) -coreconfigitem(b'experimental', b'nointerrupt-interactiveonly', default=True) - -coreconfigitem( - b'experimental', - b'obsmarkers-exchange-debug', - default=False, -) -coreconfigitem( - b'experimental', - b'remotenames', - default=False, -) -coreconfigitem( - b'experimental', - b'removeemptydirs', - default=True, -) -coreconfigitem( - b'experimental', - b'revert.interactive.select-to-keep', - default=False, -) -coreconfigitem( - b'experimental', - b'revisions.prefixhexnode', - default=False, -) -# "out of experimental" todo list. -# -# * include management of a persistent nodemap in the main docket -# * enforce a "no-truncate" policy for mmap safety -# - for censoring operation -# - for stripping operation -# - for rollback operation -# * proper streaming (race free) of the docket file -# * track garbage data to evemtually allow rewriting -existing- sidedata. -# * Exchange-wise, we will also need to do something more efficient than -# keeping references to the affected revlogs, especially memory-wise when -# rewriting sidedata. -# * introduce a proper solution to reduce the number of filelog related files. -# * use caching for reading sidedata (similar to what we do for data). -# * no longer set offset=0 if sidedata_size=0 (simplify cutoff computation). -# * Improvement to consider -# - avoid compression header in chunk using the default compression? -# - forbid "inline" compression mode entirely? -# - split the data offset and flag field (the 2 bytes save are mostly trouble) -# - keep track of uncompressed -chunk- size (to preallocate memory better) -# - keep track of chain base or size (probably not that useful anymore) -coreconfigitem( - b'experimental', - b'revlogv2', - default=None, -) -coreconfigitem( - b'experimental', - b'revisions.disambiguatewithin', - default=None, -) -coreconfigitem( - b'experimental', - b'rust.index', - default=False, -) -coreconfigitem( - b'experimental', - b'server.allow-hidden-access', - default=list, -) -coreconfigitem( - b'experimental', - b'server.filesdata.recommended-batch-size', - default=50000, -) -coreconfigitem( - b'experimental', - b'server.manifestdata.recommended-batch-size', - default=100000, -) -coreconfigitem( - b'experimental', - b'server.stream-narrow-clones', - default=False, -) -coreconfigitem( - b'experimental', - b'single-head-per-branch', - default=False, -) -coreconfigitem( - b'experimental', - b'single-head-per-branch:account-closed-heads', - default=False, -) -coreconfigitem( - b'experimental', - b'single-head-per-branch:public-changes-only', - default=False, -) -coreconfigitem( - b'experimental', - b'sparse-read', - default=False, -) -coreconfigitem( - b'experimental', - b'sparse-read.density-threshold', - default=0.50, -) -coreconfigitem( - b'experimental', - b'sparse-read.min-gap-size', - default=b'65K', -) -coreconfigitem( - b'experimental', - b'stream-v3', - default=False, -) -coreconfigitem( - b'experimental', - b'treemanifest', - default=False, -) -coreconfigitem( - b'experimental', - b'update.atomic-file', - default=False, -) -coreconfigitem( - b'experimental', - b'web.full-garbage-collection-rate', - default=1, # still forcing a full collection on each request -) -coreconfigitem( - b'experimental', - b'worker.wdir-get-thread-safe', - default=False, -) -coreconfigitem( - b'experimental', - b'worker.repository-upgrade', - default=False, -) -coreconfigitem( - b'experimental', - b'xdiff', - default=False, -) -coreconfigitem( - b'extensions', - b'[^:]*', - default=None, - generic=True, -) -coreconfigitem( - b'extensions', - b'[^:]*:required', - default=False, - generic=True, -) -coreconfigitem( - b'extdata', - b'.*', - default=None, - generic=True, -) -coreconfigitem( - b'format', - b'bookmarks-in-store', - default=False, -) -coreconfigitem( - b'format', - b'chunkcachesize', - default=None, - experimental=True, -) -coreconfigitem( - # Enable this dirstate format *when creating a new repository*. - # Which format to use for existing repos is controlled by .hg/requires - b'format', - b'use-dirstate-v2', - default=False, - experimental=True, - alias=[(b'format', b'exp-rc-dirstate-v2')], -) -coreconfigitem( - b'format', - b'use-dirstate-v2.automatic-upgrade-of-mismatching-repositories', - default=False, - experimental=True, -) -coreconfigitem( - b'format', - b'use-dirstate-v2.automatic-upgrade-of-mismatching-repositories:quiet', - default=False, - experimental=True, -) -coreconfigitem( - b'format', - b'use-dirstate-tracked-hint', - default=False, - experimental=True, -) -coreconfigitem( - b'format', - b'use-dirstate-tracked-hint.version', - default=1, - experimental=True, -) -coreconfigitem( - b'format', - b'use-dirstate-tracked-hint.automatic-upgrade-of-mismatching-repositories', - default=False, - experimental=True, -) -coreconfigitem( - b'format', - b'use-dirstate-tracked-hint.automatic-upgrade-of-mismatching-repositories:quiet', - default=False, - experimental=True, -) -coreconfigitem( - b'format', - b'dotencode', - default=True, -) -coreconfigitem( - b'format', - b'generaldelta', - default=False, - experimental=True, -) -coreconfigitem( - b'format', - b'manifestcachesize', - default=None, - experimental=True, -) -coreconfigitem( - b'format', - b'maxchainlen', - default=dynamicdefault, - experimental=True, -) -coreconfigitem( - b'format', - b'obsstore-version', - default=None, -) -coreconfigitem( - b'format', - b'sparse-revlog', - default=True, -) -coreconfigitem( - b'format', - b'revlog-compression', - default=lambda: [b'zstd', b'zlib'], - alias=[(b'experimental', b'format.compression')], -) -# Experimental TODOs: -# -# * Same as for revlogv2 (but for the reduction of the number of files) -# * Actually computing the rank of changesets -# * Improvement to investigate -# - storing .hgtags fnode -# - storing branch related identifier - -coreconfigitem( - b'format', - b'exp-use-changelog-v2', - default=None, - experimental=True, -) -coreconfigitem( - b'format', - b'usefncache', - default=True, -) -coreconfigitem( - b'format', - b'usegeneraldelta', - default=True, -) -coreconfigitem( - b'format', - b'usestore', - default=True, -) - - -def _persistent_nodemap_default(): - """compute `use-persistent-nodemap` default value - - The feature is disabled unless a fast implementation is available. - """ - from . import policy - - return policy.importrust('revlog') is not None - - -coreconfigitem( - b'format', - b'use-persistent-nodemap', - default=_persistent_nodemap_default, -) -coreconfigitem( - b'format', - b'exp-use-copies-side-data-changeset', - default=False, - experimental=True, -) -coreconfigitem( - b'format', - b'use-share-safe', - default=True, -) -coreconfigitem( - b'format', - b'use-share-safe.automatic-upgrade-of-mismatching-repositories', - default=False, - experimental=True, -) -coreconfigitem( - b'format', - b'use-share-safe.automatic-upgrade-of-mismatching-repositories:quiet', - default=False, - experimental=True, -) - -# Moving this on by default means we are confident about the scaling of phases. -# This is not garanteed to be the case at the time this message is written. -coreconfigitem( - b'format', - b'use-internal-phase', - default=False, - experimental=True, -) -# The interaction between the archived phase and obsolescence markers needs to -# be sorted out before wider usage of this are to be considered. -# -# At the time this message is written, behavior when archiving obsolete -# changeset differ significantly from stripping. As part of stripping, we also -# remove the obsolescence marker associated to the stripped changesets, -# revealing the precedecessors changesets when applicable. When archiving, we -# don't touch the obsolescence markers, keeping everything hidden. This can -# result in quite confusing situation for people combining exchanging draft -# with the archived phases. As some markers needed by others may be skipped -# during exchange. -coreconfigitem( - b'format', - b'exp-archived-phase', - default=False, - experimental=True, -) -coreconfigitem( - b'shelve', - b'store', - default=b'internal', - experimental=True, -) -coreconfigitem( - b'fsmonitor', - b'warn_when_unused', - default=True, -) -coreconfigitem( - b'fsmonitor', - b'warn_update_file_count', - default=50000, -) -coreconfigitem( - b'fsmonitor', - b'warn_update_file_count_rust', - default=400000, -) -coreconfigitem( - b'help', - br'hidden-command\..*', - default=False, - generic=True, -) -coreconfigitem( - b'help', - br'hidden-topic\..*', - default=False, - generic=True, -) -coreconfigitem( - b'hooks', - b'[^:]*', - default=dynamicdefault, - generic=True, -) -coreconfigitem( - b'hooks', - b'.*:run-with-plain', - default=True, - generic=True, -) -coreconfigitem( - b'hgweb-paths', - b'.*', - default=list, - generic=True, -) -coreconfigitem( - b'hostfingerprints', - b'.*', - default=list, - generic=True, -) -coreconfigitem( - b'hostsecurity', - b'ciphers', - default=None, -) -coreconfigitem( - b'hostsecurity', - b'minimumprotocol', - default=dynamicdefault, -) -coreconfigitem( - b'hostsecurity', - b'.*:minimumprotocol$', - default=dynamicdefault, - generic=True, -) -coreconfigitem( - b'hostsecurity', - b'.*:ciphers$', - default=dynamicdefault, - generic=True, -) -coreconfigitem( - b'hostsecurity', - b'.*:fingerprints$', - default=list, - generic=True, -) -coreconfigitem( - b'hostsecurity', - b'.*:verifycertsfile$', - default=None, - generic=True, -) - -coreconfigitem( - b'http_proxy', - b'always', - default=False, -) -coreconfigitem( - b'http_proxy', - b'host', - default=None, -) -coreconfigitem( - b'http_proxy', - b'no', - default=list, -) -coreconfigitem( - b'http_proxy', - b'passwd', - default=None, -) -coreconfigitem( - b'http_proxy', - b'user', - default=None, -) - -coreconfigitem( - b'http', - b'timeout', - default=None, -) - -coreconfigitem( - b'logtoprocess', - b'commandexception', - default=None, -) -coreconfigitem( - b'logtoprocess', - b'commandfinish', - default=None, -) -coreconfigitem( - b'logtoprocess', - b'command', - default=None, -) -coreconfigitem( - b'logtoprocess', - b'develwarn', - default=None, -) -coreconfigitem( - b'logtoprocess', - b'uiblocked', - default=None, -) -coreconfigitem( - b'merge', - b'checkunknown', - default=b'abort', -) -coreconfigitem( - b'merge', - b'checkignored', - default=b'abort', -) -coreconfigitem( - b'experimental', - b'merge.checkpathconflicts', - default=False, -) -coreconfigitem( - b'merge', - b'followcopies', - default=True, -) -coreconfigitem( - b'merge', - b'on-failure', - default=b'continue', -) -coreconfigitem( - b'merge', - b'preferancestor', - default=lambda: [b'*'], - experimental=True, -) -coreconfigitem( - b'merge', - b'strict-capability-check', - default=False, -) -coreconfigitem( - b'merge', - b'disable-partial-tools', - default=False, - experimental=True, -) -coreconfigitem( - b'partial-merge-tools', - b'.*', - default=None, - generic=True, - experimental=True, -) -coreconfigitem( - b'partial-merge-tools', - br'.*\.patterns', - default=dynamicdefault, - generic=True, - priority=-1, - experimental=True, -) -coreconfigitem( - b'partial-merge-tools', - br'.*\.executable$', - default=dynamicdefault, - generic=True, - priority=-1, - experimental=True, -) -coreconfigitem( - b'partial-merge-tools', - br'.*\.order', - default=0, - generic=True, - priority=-1, - experimental=True, -) -coreconfigitem( - b'partial-merge-tools', - br'.*\.args', - default=b"$local $base $other", - generic=True, - priority=-1, - experimental=True, -) -coreconfigitem( - b'partial-merge-tools', - br'.*\.disable', - default=False, - generic=True, - priority=-1, - experimental=True, -) -coreconfigitem( - b'merge-tools', - b'.*', - default=None, - generic=True, -) -coreconfigitem( - b'merge-tools', - br'.*\.args$', - default=b"$local $base $other", - generic=True, - priority=-1, -) -coreconfigitem( - b'merge-tools', - br'.*\.binary$', - default=False, - generic=True, - priority=-1, -) -coreconfigitem( - b'merge-tools', - br'.*\.check$', - default=list, - generic=True, - priority=-1, -) -coreconfigitem( - b'merge-tools', - br'.*\.checkchanged$', - default=False, - generic=True, - priority=-1, -) -coreconfigitem( - b'merge-tools', - br'.*\.executable$', - default=dynamicdefault, - generic=True, - priority=-1, -) -coreconfigitem( - b'merge-tools', - br'.*\.fixeol$', - default=False, - generic=True, - priority=-1, -) -coreconfigitem( - b'merge-tools', - br'.*\.gui$', - default=False, - generic=True, - priority=-1, -) -coreconfigitem( - b'merge-tools', - br'.*\.mergemarkers$', - default=b'basic', - generic=True, - priority=-1, -) -coreconfigitem( - b'merge-tools', - br'.*\.mergemarkertemplate$', - default=dynamicdefault, # take from command-templates.mergemarker - generic=True, - priority=-1, -) -coreconfigitem( - b'merge-tools', - br'.*\.priority$', - default=0, - generic=True, - priority=-1, -) -coreconfigitem( - b'merge-tools', - br'.*\.premerge$', - default=dynamicdefault, - generic=True, - priority=-1, -) -coreconfigitem( - b'merge-tools', - br'.*\.regappend$', - default=b"", - generic=True, - priority=-1, -) -coreconfigitem( - b'merge-tools', - br'.*\.symlink$', - default=False, - generic=True, - priority=-1, -) -coreconfigitem( - b'pager', - b'attend-.*', - default=dynamicdefault, - generic=True, -) -coreconfigitem( - b'pager', - b'ignore', - default=list, -) -coreconfigitem( - b'pager', - b'pager', - default=dynamicdefault, -) -coreconfigitem( - b'patch', - b'eol', - default=b'strict', -) -coreconfigitem( - b'patch', - b'fuzz', - default=2, -) -coreconfigitem( - b'paths', - b'default', - default=None, -) -coreconfigitem( - b'paths', - b'default-push', - default=None, -) -coreconfigitem( - b'paths', - b'[^:]*', - default=None, - generic=True, -) -coreconfigitem( - b'paths', - b'.*:bookmarks.mode', - default='default', - generic=True, -) -coreconfigitem( - b'paths', - b'.*:multi-urls', - default=False, - generic=True, -) -coreconfigitem( - b'paths', - b'.*:pushrev', - default=None, - generic=True, -) -coreconfigitem( - b'paths', - b'.*:pushurl', - default=None, - generic=True, -) -coreconfigitem( - b'paths', - b'.*:pulled-delta-reuse-policy', - default=None, - generic=True, -) -coreconfigitem( - b'phases', - b'checksubrepos', - default=b'follow', -) -coreconfigitem( - b'phases', - b'new-commit', - default=b'draft', -) -coreconfigitem( - b'phases', - b'publish', - default=True, -) -coreconfigitem( - b'profiling', - b'enabled', - default=False, -) -coreconfigitem( - b'profiling', - b'format', - default=b'text', -) -coreconfigitem( - b'profiling', - b'freq', - default=1000, -) -coreconfigitem( - b'profiling', - b'limit', - default=30, -) -coreconfigitem( - b'profiling', - b'nested', - default=0, -) -coreconfigitem( - b'profiling', - b'output', - default=None, -) -coreconfigitem( - b'profiling', - b'showmax', - default=0.999, -) -coreconfigitem( - b'profiling', - b'showmin', - default=dynamicdefault, -) -coreconfigitem( - b'profiling', - b'showtime', - default=True, -) -coreconfigitem( - b'profiling', - b'sort', - default=b'inlinetime', -) -coreconfigitem( - b'profiling', - b'statformat', - default=b'hotpath', -) -coreconfigitem( - b'profiling', - b'time-track', - default=dynamicdefault, -) -coreconfigitem( - b'profiling', - b'type', - default=b'stat', -) -coreconfigitem( - b'progress', - b'assume-tty', - default=False, -) -coreconfigitem( - b'progress', - b'changedelay', - default=1, -) -coreconfigitem( - b'progress', - b'clear-complete', - default=True, -) -coreconfigitem( - b'progress', - b'debug', - default=False, -) -coreconfigitem( - b'progress', - b'delay', - default=3, -) -coreconfigitem( - b'progress', - b'disable', - default=False, -) -coreconfigitem( - b'progress', - b'estimateinterval', - default=60.0, -) -coreconfigitem( - b'progress', - b'format', - default=lambda: [b'topic', b'bar', b'number', b'estimate'], -) -coreconfigitem( - b'progress', - b'refresh', - default=0.1, -) -coreconfigitem( - b'progress', - b'width', - default=dynamicdefault, -) -coreconfigitem( - b'pull', - b'confirm', - default=False, -) -coreconfigitem( - b'push', - b'pushvars.server', - default=False, -) -coreconfigitem( - b'rewrite', - b'backup-bundle', - default=True, - alias=[(b'ui', b'history-editing-backup')], -) -coreconfigitem( - b'rewrite', - b'update-timestamp', - default=False, -) -coreconfigitem( - b'rewrite', - b'empty-successor', - default=b'skip', - experimental=True, -) -# experimental as long as format.use-dirstate-v2 is. -coreconfigitem( - b'storage', - b'dirstate-v2.slow-path', - default=b"abort", - experimental=True, -) -coreconfigitem( - b'storage', - b'new-repo-backend', - default=b'revlogv1', - experimental=True, -) -coreconfigitem( - b'storage', - b'revlog.optimize-delta-parent-choice', - default=True, - alias=[(b'format', b'aggressivemergedeltas')], -) -coreconfigitem( - b'storage', - b'revlog.delta-parent-search.candidate-group-chunk-size', - default=20, -) -coreconfigitem( - b'storage', - b'revlog.issue6528.fix-incoming', - default=True, -) -# experimental as long as rust is experimental (or a C version is implemented) -coreconfigitem( - b'storage', - b'revlog.persistent-nodemap.mmap', - default=True, -) -# experimental as long as format.use-persistent-nodemap is. -coreconfigitem( - b'storage', - b'revlog.persistent-nodemap.slow-path', - default=b"abort", -) - -coreconfigitem( - b'storage', - b'revlog.reuse-external-delta', - default=True, -) -# This option is True unless `format.generaldelta` is set. -coreconfigitem( - b'storage', - b'revlog.reuse-external-delta-parent', - default=None, -) -coreconfigitem( - b'storage', - b'revlog.zlib.level', - default=None, -) -coreconfigitem( - b'storage', - b'revlog.zstd.level', - default=None, -) -coreconfigitem( - b'server', - b'bookmarks-pushkey-compat', - default=True, -) -coreconfigitem( - b'server', - b'bundle1', - default=True, -) -coreconfigitem( - b'server', - b'bundle1gd', - default=None, -) -coreconfigitem( - b'server', - b'bundle1.pull', - default=None, -) -coreconfigitem( - b'server', - b'bundle1gd.pull', - default=None, -) -coreconfigitem( - b'server', - b'bundle1.push', - default=None, -) -coreconfigitem( - b'server', - b'bundle1gd.push', - default=None, -) -coreconfigitem( - b'server', - b'bundle2.stream', - default=True, - alias=[(b'experimental', b'bundle2.stream')], -) -coreconfigitem( - b'server', - b'compressionengines', - default=list, -) -coreconfigitem( - b'server', - b'concurrent-push-mode', - default=b'check-related', -) -coreconfigitem( - b'server', - b'disablefullbundle', - default=False, -) -coreconfigitem( - b'server', - b'maxhttpheaderlen', - default=1024, -) -coreconfigitem( - b'server', - b'pullbundle', - default=True, -) -coreconfigitem( - b'server', - b'preferuncompressed', - default=False, -) -coreconfigitem( - b'server', - b'streamunbundle', - default=False, -) -coreconfigitem( - b'server', - b'uncompressed', - default=True, -) -coreconfigitem( - b'server', - b'uncompressedallowsecret', - default=False, -) -coreconfigitem( - b'server', - b'view', - default=b'served', -) -coreconfigitem( - b'server', - b'validate', - default=False, -) -coreconfigitem( - b'server', - b'zliblevel', - default=-1, -) -coreconfigitem( - b'server', - b'zstdlevel', - default=3, -) -coreconfigitem( - b'share', - b'pool', - default=None, -) -coreconfigitem( - b'share', - b'poolnaming', - default=b'identity', -) -coreconfigitem( - b'share', - b'safe-mismatch.source-not-safe', - default=b'abort', -) -coreconfigitem( - b'share', - b'safe-mismatch.source-safe', - default=b'abort', -) -coreconfigitem( - b'share', - b'safe-mismatch.source-not-safe.warn', - default=True, -) -coreconfigitem( - b'share', - b'safe-mismatch.source-safe.warn', - default=True, -) -coreconfigitem( - b'share', - b'safe-mismatch.source-not-safe:verbose-upgrade', - default=True, -) -coreconfigitem( - b'share', - b'safe-mismatch.source-safe:verbose-upgrade', - default=True, -) -coreconfigitem( - b'shelve', - b'maxbackups', - default=10, -) -coreconfigitem( - b'smtp', - b'host', - default=None, -) -coreconfigitem( - b'smtp', - b'local_hostname', - default=None, -) -coreconfigitem( - b'smtp', - b'password', - default=None, -) -coreconfigitem( - b'smtp', - b'port', - default=dynamicdefault, -) -coreconfigitem( - b'smtp', - b'tls', - default=b'none', -) -coreconfigitem( - b'smtp', - b'username', - default=None, -) -coreconfigitem( - b'sparse', - b'missingwarning', - default=True, - experimental=True, -) -coreconfigitem( - b'subrepos', - b'allowed', - default=dynamicdefault, # to make backporting simpler -) -coreconfigitem( - b'subrepos', - b'hg:allowed', - default=dynamicdefault, -) -coreconfigitem( - b'subrepos', - b'git:allowed', - default=dynamicdefault, -) -coreconfigitem( - b'subrepos', - b'svn:allowed', - default=dynamicdefault, -) -coreconfigitem( - b'templates', - b'.*', - default=None, - generic=True, -) -coreconfigitem( - b'templateconfig', - b'.*', - default=dynamicdefault, - generic=True, -) -coreconfigitem( - b'trusted', - b'groups', - default=list, -) -coreconfigitem( - b'trusted', - b'users', - default=list, -) -coreconfigitem( - b'ui', - b'_usedassubrepo', - default=False, -) -coreconfigitem( - b'ui', - b'allowemptycommit', - default=False, -) -coreconfigitem( - b'ui', - b'archivemeta', - default=True, -) -coreconfigitem( - b'ui', - b'askusername', - default=False, -) -coreconfigitem( - b'ui', - b'available-memory', - default=None, -) - -coreconfigitem( - b'ui', - b'clonebundlefallback', - default=False, -) -coreconfigitem( - b'ui', - b'clonebundleprefers', - default=list, -) -coreconfigitem( - b'ui', - b'clonebundles', - default=True, -) -coreconfigitem( - b'ui', - b'color', - default=b'auto', -) -coreconfigitem( - b'ui', - b'commitsubrepos', - default=False, -) -coreconfigitem( - b'ui', - b'debug', - default=False, -) -coreconfigitem( - b'ui', - b'debugger', - default=None, -) -coreconfigitem( - b'ui', - b'editor', - default=dynamicdefault, -) -coreconfigitem( - b'ui', - b'detailed-exit-code', - default=False, - experimental=True, -) -coreconfigitem( - b'ui', - b'fallbackencoding', - default=None, -) -coreconfigitem( - b'ui', - b'forcecwd', - default=None, -) -coreconfigitem( - b'ui', - b'forcemerge', - default=None, -) -coreconfigitem( - b'ui', - b'formatdebug', - default=False, -) -coreconfigitem( - b'ui', - b'formatjson', - default=False, -) -coreconfigitem( - b'ui', - b'formatted', - default=None, -) -coreconfigitem( - b'ui', - b'interactive', - default=None, -) -coreconfigitem( - b'ui', - b'interface', - default=None, -) -coreconfigitem( - b'ui', - b'interface.chunkselector', - default=None, -) -coreconfigitem( - b'ui', - b'large-file-limit', - default=10 * (2 ** 20), -) -coreconfigitem( - b'ui', - b'logblockedtimes', - default=False, -) -coreconfigitem( - b'ui', - b'merge', - default=None, -) -coreconfigitem( - b'ui', - b'mergemarkers', - default=b'basic', -) -coreconfigitem( - b'ui', - b'message-output', - default=b'stdio', -) -coreconfigitem( - b'ui', - b'nontty', - default=False, -) -coreconfigitem( - b'ui', - b'origbackuppath', - default=None, -) -coreconfigitem( - b'ui', - b'paginate', - default=True, -) -coreconfigitem( - b'ui', - b'patch', - default=None, -) -coreconfigitem( - b'ui', - b'portablefilenames', - default=b'warn', -) -coreconfigitem( - b'ui', - b'promptecho', - default=False, -) -coreconfigitem( - b'ui', - b'quiet', - default=False, -) -coreconfigitem( - b'ui', - b'quietbookmarkmove', - default=False, -) -coreconfigitem( - b'ui', - b'relative-paths', - default=b'legacy', -) -coreconfigitem( - b'ui', - b'remotecmd', - default=b'hg', -) -coreconfigitem( - b'ui', - b'report_untrusted', - default=True, -) -coreconfigitem( - b'ui', - b'rollback', - default=True, -) -coreconfigitem( - b'ui', - b'signal-safe-lock', - default=True, -) -coreconfigitem( - b'ui', - b'slash', - default=False, -) -coreconfigitem( - b'ui', - b'ssh', - default=b'ssh', -) -coreconfigitem( - b'ui', - b'ssherrorhint', - default=None, -) -coreconfigitem( - b'ui', - b'statuscopies', - default=False, -) -coreconfigitem( - b'ui', - b'strict', - default=False, -) -coreconfigitem( - b'ui', - b'style', - default=b'', -) -coreconfigitem( - b'ui', - b'supportcontact', - default=None, -) -coreconfigitem( - b'ui', - b'textwidth', - default=78, -) -coreconfigitem( - b'ui', - b'timeout', - default=b'600', -) -coreconfigitem( - b'ui', - b'timeout.warn', - default=0, -) -coreconfigitem( - b'ui', - b'timestamp-output', - default=False, -) -coreconfigitem( - b'ui', - b'traceback', - default=False, -) -coreconfigitem( - b'ui', - b'tweakdefaults', - default=False, -) -coreconfigitem(b'ui', b'username', alias=[(b'ui', b'user')]) -coreconfigitem( - b'ui', - b'verbose', - default=False, -) -coreconfigitem( - b'verify', - b'skipflags', - default=0, -) -coreconfigitem( - b'web', - b'allowbz2', - default=False, -) -coreconfigitem( - b'web', - b'allowgz', - default=False, -) -coreconfigitem( - b'web', - b'allow-pull', - alias=[(b'web', b'allowpull')], - default=True, -) -coreconfigitem( - b'web', - b'allow-push', - alias=[(b'web', b'allow_push')], - default=list, -) -coreconfigitem( - b'web', - b'allowzip', - default=False, -) -coreconfigitem( - b'web', - b'archivesubrepos', - default=False, -) -coreconfigitem( - b'web', - b'cache', - default=True, -) -coreconfigitem( - b'web', - b'comparisoncontext', - default=5, -) -coreconfigitem( - b'web', - b'contact', - default=None, -) -coreconfigitem( - b'web', - b'deny_push', - default=list, -) -coreconfigitem( - b'web', - b'guessmime', - default=False, -) -coreconfigitem( - b'web', - b'hidden', - default=False, -) -coreconfigitem( - b'web', - b'labels', - default=list, -) -coreconfigitem( - b'web', - b'logoimg', - default=b'hglogo.png', -) -coreconfigitem( - b'web', - b'logourl', - default=b'https://mercurial-scm.org/', -) -coreconfigitem( - b'web', - b'accesslog', - default=b'-', -) -coreconfigitem( - b'web', - b'address', - default=b'', -) -coreconfigitem( - b'web', - b'allow-archive', - alias=[(b'web', b'allow_archive')], - default=list, -) -coreconfigitem( - b'web', - b'allow_read', - default=list, -) -coreconfigitem( - b'web', - b'baseurl', - default=None, -) -coreconfigitem( - b'web', - b'cacerts', - default=None, -) -coreconfigitem( - b'web', - b'certificate', - default=None, -) -coreconfigitem( - b'web', - b'collapse', - default=False, -) -coreconfigitem( - b'web', - b'csp', - default=None, -) -coreconfigitem( - b'web', - b'deny_read', - default=list, -) -coreconfigitem( - b'web', - b'descend', - default=True, -) -coreconfigitem( - b'web', - b'description', - default=b"", -) -coreconfigitem( - b'web', - b'encoding', - default=lambda: encoding.encoding, -) -coreconfigitem( - b'web', - b'errorlog', - default=b'-', -) -coreconfigitem( - b'web', - b'ipv6', - default=False, -) -coreconfigitem( - b'web', - b'maxchanges', - default=10, -) -coreconfigitem( - b'web', - b'maxfiles', - default=10, -) -coreconfigitem( - b'web', - b'maxshortchanges', - default=60, -) -coreconfigitem( - b'web', - b'motd', - default=b'', -) -coreconfigitem( - b'web', - b'name', - default=dynamicdefault, -) -coreconfigitem( - b'web', - b'port', - default=8000, -) -coreconfigitem( - b'web', - b'prefix', - default=b'', -) -coreconfigitem( - b'web', - b'push_ssl', - default=True, -) -coreconfigitem( - b'web', - b'refreshinterval', - default=20, -) -coreconfigitem( - b'web', - b'server-header', - default=None, -) -coreconfigitem( - b'web', - b'static', - default=None, -) -coreconfigitem( - b'web', - b'staticurl', - default=None, -) -coreconfigitem( - b'web', - b'stripes', - default=1, -) -coreconfigitem( - b'web', - b'style', - default=b'paper', -) -coreconfigitem( - b'web', - b'templates', - default=None, -) -coreconfigitem( - b'web', - b'view', - default=b'served', - experimental=True, -) -coreconfigitem( - b'worker', - b'backgroundclose', - default=dynamicdefault, -) -# Windows defaults to a limit of 512 open files. A buffer of 128 -# should give us enough headway. -coreconfigitem( - b'worker', - b'backgroundclosemaxqueue', - default=384, -) -coreconfigitem( - b'worker', - b'backgroundcloseminfilecount', - default=2048, -) -coreconfigitem( - b'worker', - b'backgroundclosethreadcount', - default=4, -) -coreconfigitem( - b'worker', - b'enabled', - default=True, -) -coreconfigitem( - b'worker', - b'numcpus', - default=None, -) - -# Rebase related configuration moved to core because other extension are doing -# strange things. For example, shelve import the extensions to reuse some bit -# without formally loading it. -coreconfigitem( - b'commands', - b'rebase.requiredest', - default=False, -) -coreconfigitem( - b'experimental', - b'rebaseskipobsolete', - default=True, -) -coreconfigitem( - b'rebase', - b'singletransaction', - default=False, -) -coreconfigitem( - b'rebase', - b'experimental.inmemory', - default=False, -) - -# This setting controls creation of a rebase_source extra field -# during rebase. When False, no such field is created. This is -# useful eg for incrementally converting changesets and then -# rebasing them onto an existing repo. -# WARNING: this is an advanced setting reserved for people who know -# exactly what they are doing. Misuse of this setting can easily -# result in obsmarker cycles and a vivid headache. -coreconfigitem( - b'rebase', - b'store-source', - default=True, - experimental=True, -) +import_configitems_from_file() diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/configitems.toml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/mercurial/configitems.toml Wed Oct 11 02:02:46 2023 +0200 @@ -0,0 +1,2845 @@ +# configitems.toml - centralized declaration of configuration options +# +# This file contains declarations of the core Mercurial configuration options. +# +# # Structure +# +# items: array of config items +# templates: mapping of template name to template declaration +# template-applications: array of template applications +# +# # Elements +# +# ## Item +# +# Declares a core Mercurial option. +# +# - section: string (required) +# - name: string (required) +# - default-type: boolean, changes how `default` is read +# - default: any +# - generic: boolean +# - priority: integer, only if `generic` is true +# - alias: list of 2-tuples of strings +# - experimental: boolean +# - documentation: string +# - in_core_extension: string +# +# ## Template +# +# Declares a group of options to be re-used for multiple sections. +# +# - all the same fields as `Item`, except `section` and `name` +# - `suffix` (string, required) +# +# ## Template applications +# +# Uses a `Template` to instanciate its options in a given section. +# +# - template: string (required, must match a `Template` name) +# - section: string (required) + +[[items]] +section = "alias" +name = ".*" +default-type = "dynamic" +generic = true + +[[items]] +section = "auth" +name = "cookiefile" + +# bookmarks.pushing: internal hack for discovery +[[items]] +section = "bookmarks" +name = "pushing" +default-type = "list_type" + +# bundle.mainreporoot: internal hack for bundlerepo +[[items]] +section = "bundle" +name = "mainreporoot" +default = "" + +[[items]] +section = "censor" +name = "policy" +default = "abort" +experimental = true + +[[items]] +section = "chgserver" +name = "idletimeout" +default = 3600 + +[[items]] +section = "chgserver" +name = "skiphash" +default = false + +[[items]] +section = "cmdserver" +name = "log" + +[[items]] +section = "cmdserver" +name = "max-log-files" +default = 7 + +[[items]] +section = "cmdserver" +name = "max-log-size" +default = "1 MB" + +[[items]] +section = "cmdserver" +name = "max-repo-cache" +default = 0 +experimental = true + +[[items]] +section = "cmdserver" +name = "message-encodings" +default-type = "list_type" + +[[items]] +section = "cmdserver" +name = "shutdown-on-interrupt" +default = true + +[[items]] +section = "cmdserver" +name = "track-log" +default-type = "lambda" +default = [ "chgserver", "cmdserver", "repocache",] + +[[items]] +section = "color" +name = ".*" +generic = true + +[[items]] +section = "color" +name = "mode" +default = "auto" + +[[items]] +section = "color" +name = "pagermode" +default-type = "dynamic" + +[[items]] +section = "command-templates" +name = "graphnode" +alias = [["ui", "graphnodetemplate"]] + +[[items]] +section = "command-templates" +name = "log" +alias = [["ui", "logtemplate"]] + +[[items]] +section = "command-templates" +name = "mergemarker" +default = '{node|short} {ifeq(tags, "tip", "", ifeq(tags, "", "", "{tags} "))}{if(bookmarks, "{bookmarks} ")}{ifeq(branch, "default", "", "{branch} ")}- {author|user}: {desc|firstline}' +alias = [["ui", "mergemarkertemplate"]] + +[[items]] +section = "command-templates" +name = "oneline-summary" + +[[items]] +section = "command-templates" +name = "oneline-summary.*" +default-type = "dynamic" +generic = true + +[[items]] +section = "command-templates" +name = "pre-merge-tool-output" +alias = [["ui", "pre-merge-tool-output-template"]] + +[[items]] +section = "commands" +name = "commit.post-status" +default = false + +[[items]] +section = "commands" +name = "grep.all-files" +default = false +experimental = true + +[[items]] +section = "commands" +name = "merge.require-rev" +default = false + +[[items]] +section = "commands" +name = "push.require-revs" +default = false + +# Rebase related configuration moved to core because other extension are doing +# strange things. For example, shelve import the extensions to reuse some bit +# without formally loading it. +[[items]] +section = "commands" +name = "rebase.requiredest" +default = false + +[[items]] +section = "commands" +name = "resolve.confirm" +default = false + +[[items]] +section = "commands" +name = "resolve.explicit-re-merge" +default = false + +[[items]] +section = "commands" +name = "resolve.mark-check" +default = "none" + +[[items]] +section = "commands" +name = "show.aliasprefix" +default-type = "list_type" + +[[items]] +section = "commands" +name = "status.relative" +default = false + +[[items]] +section = "commands" +name = "status.skipstates" +default = [] +experimental = true + +[[items]] +section = "commands" +name = "status.terse" +default = "" + +[[items]] +section = "commands" +name = "status.verbose" +default = false + +[[items]] +section = "commands" +name = "update.check" + +[[items]] +section = "commands" +name = "update.requiredest" +default = false + +[[items]] +section = "committemplate" +name = ".*" +generic = true + +[[items]] +section = "convert" +name = "bzr.saverev" +default = true + +[[items]] +section = "convert" +name = "cvsps.cache" +default = true + +[[items]] +section = "convert" +name = "cvsps.fuzz" +default = 60 + +[[items]] +section = "convert" +name = "cvsps.logencoding" + +[[items]] +section = "convert" +name = "cvsps.mergefrom" + +[[items]] +section = "convert" +name = "cvsps.mergeto" + +[[items]] +section = "convert" +name = "git.committeractions" +default-type = "lambda" +default = [ "messagedifferent",] + +[[items]] +section = "convert" +name = "git.extrakeys" +default-type = "list_type" + +[[items]] +section = "convert" +name = "git.findcopiesharder" +default = false + +[[items]] +section = "convert" +name = "git.remoteprefix" +default = "remote" + +[[items]] +section = "convert" +name = "git.renamelimit" +default = 400 + +[[items]] +section = "convert" +name = "git.saverev" +default = true + +[[items]] +section = "convert" +name = "git.similarity" +default = 50 + +[[items]] +section = "convert" +name = "git.skipsubmodules" +default = false + +[[items]] +section = "convert" +name = "hg.clonebranches" +default = false + +[[items]] +section = "convert" +name = "hg.ignoreerrors" +default = false + +[[items]] +section = "convert" +name = "hg.preserve-hash" +default = false + +[[items]] +section = "convert" +name = "hg.revs" + +[[items]] +section = "convert" +name = "hg.saverev" +default = false + +[[items]] +section = "convert" +name = "hg.sourcename" + +[[items]] +section = "convert" +name = "hg.startrev" + +[[items]] +section = "convert" +name = "hg.tagsbranch" +default = "default" + +[[items]] +section = "convert" +name = "hg.usebranchnames" +default = true + +[[items]] +section = "convert" +name = "ignoreancestorcheck" +default = false +experimental = true + +[[items]] +section = "convert" +name = "localtimezone" +default = false + +[[items]] +section = "convert" +name = "p4.encoding" +default-type = "dynamic" + +[[items]] +section = "convert" +name = "p4.startrev" +default = 0 + +[[items]] +section = "convert" +name = "skiptags" +default = false + +[[items]] +section = "convert" +name = "svn.branches" + +[[items]] +section = "convert" +name = "svn.dangerous-set-commit-dates" +default = false + +[[items]] +section = "convert" +name = "svn.debugsvnlog" +default = true + +[[items]] +section = "convert" +name = "svn.startrev" +default = 0 + +[[items]] +section = "convert" +name = "svn.tags" + +[[items]] +section = "convert" +name = "svn.trunk" + +[[items]] +section = "debug" +name = "bundling-stats" +default = false +documentation = "Display extra information about the bundling process." + +[[items]] +section = "debug" +name = "dirstate.delaywrite" +default = 0 + +[[items]] +section = "debug" +name = "revlog.debug-delta" +default = false + +[[items]] +section = "debug" +name = "revlog.verifyposition.changelog" +default = "" + +[[items]] +section = "debug" +name = "unbundling-stats" +default = false +documentation = "Display extra information about the unbundling process." + +[[items]] +section = "defaults" +name = ".*" +generic = true + +[[items]] +section = "devel" +name = "all-warnings" +default = false + +[[items]] +section = "devel" +name = "bundle.delta" +default = "" + +[[items]] +section = "devel" +name = "bundle2.debug" +default = false + +[[items]] +section = "devel" +name = "cache-vfs" + +[[items]] +section = "devel" +name = "check-locks" +default = false + +[[items]] +section = "devel" +name = "check-relroot" +default = false + +[[items]] +section = "devel" +name = "copy-tracing.multi-thread" +default = true + +# Track copy information for all files, not just "added" ones (very slow) +[[items]] +section = "devel" +name = "copy-tracing.trace-all-files" +default = false + +[[items]] +section = "devel" +name = "debug.abort-update" +default = false +documentation = """If true, then any merge with the working copy, \ +e.g. [hg update], will be aborted after figuring out what needs to be done, \ +but before spawning the parallel worker.""" + +[[items]] +section = "devel" +name = "debug.copies" +default = false + +[[items]] +section = "devel" +name = "debug.extensions" +default = false + +[[items]] +section = "devel" +name = "debug.peer-request" +default = false + +[[items]] +section = "devel" +name = "debug.repo-filters" +default = false + +[[items]] +section = "devel" +name = "default-date" + +[[items]] +section = "devel" +name = "deprec-warn" +default = false + +# possible values: +# - auto (the default) +# - force-append +# - force-new +[[items]] +section = "devel" +name = "dirstate.v2.data_update_mode" +default = "auto" + +[[items]] +section = "devel" +name = "disableloaddefaultcerts" +default = false + +[[items]] +section = "devel" +name = "discovery.exchange-heads" +default = true +documentation = """If false, the discovery will not start with remote \ +head fetching and local head querying.""" + +[[items]] +section = "devel" +name = "discovery.grow-sample" +default = true +documentation = """If false, the sample size used in set discovery \ +will not be increased through the process.""" + +[[items]] +section = "devel" +name = "discovery.grow-sample.dynamic" +default = true +documentation = """If true, the default, the sample size is adapted to the shape \ +of the undecided set. It is set to the max of: +`, len(roots(undecided)), len(heads(undecided))`""" + +[[items]] +section = "devel" +name = "discovery.grow-sample.rate" +default = 1.05 +documentation = "Controls the rate at which the sample grows." + +[[items]] +section = "devel" +name = "discovery.randomize" +default = true +documentation = """If false, random samplings during discovery are deterministic. \ +It is meant for integration tests.""" + +[[items]] +section = "devel" +name = "discovery.sample-size" +default = 200 +documentation = "Controls the initial size of the discovery sample." + +[[items]] +section = "devel" +name = "discovery.sample-size.initial" +default = 100 +documentation = "Controls the initial size of the discovery for initial change." + +[[items]] +section = "devel" +name = "legacy.exchange" +default-type = "list_type" + +[[items]] +section = "devel" +name = "persistent-nodemap" +default = false +documentation = """When true, revlogs use a special reference version of the \ +nodemap, that is not performant but is "known" to behave properly.""" + +[[items]] +section = "devel" +name = "server-insecure-exact-protocol" +default = "" + +[[items]] +section = "devel" +name = "servercafile" +default = "" + +[[items]] +section = "devel" +name = "serverexactprotocol" +default = "" + +[[items]] +section = "devel" +name = "serverrequirecert" +default = false + +[[items]] +section = "devel" +name = "strip-obsmarkers" +default = true + +[[items]] +section = 'devel' +name = 'sync.status.pre-dirstate-write-file' +documentation = """ +Makes the status algorithm wait for the existence of this file \ +(or until a timeout of `devel.sync.status.pre-dirstate-write-file-timeout` \ +seconds) before taking the lock and writing the dirstate. \ +Status signals that it's ready to wait by creating a file \ +with the same name + `.waiting`. \ +Useful when testing race conditions.""" + +[[items]] +section = 'devel' +name = 'sync.status.pre-dirstate-write-file-timeout' +default=2 + +[[items]] +section = 'devel' +name = 'sync.dirstate.post-docket-read-file' + +[[items]] +section = 'devel' +name = 'sync.dirstate.post-docket-read-file-timeout' +default=2 + +[[items]] +section = 'devel' +name = 'sync.dirstate.pre-read-file' + +[[items]] +section = 'devel' +name = 'sync.dirstate.pre-read-file-timeout' +default=2 + +[[items]] +section = "devel" +name = "user.obsmarker" + +[[items]] +section = "devel" +name = "warn-config" + +[[items]] +section = "devel" +name = "warn-config-default" + +[[items]] +section = "devel" +name = "warn-config-unknown" + +[[items]] +section = "devel" +name = "warn-empty-changegroup" +default = false + +[[items]] +section = "diff" +name = "merge" +default = false +experimental = true + +[[items]] +section = "email" +name = "bcc" + +[[items]] +section = "email" +name = "cc" + +[[items]] +section = "email" +name = "charsets" +default-type = "list_type" + +[[items]] +section = "email" +name = "from" + +[[items]] +section = "email" +name = "method" +default = "smtp" + +[[items]] +section = "email" +name = "reply-to" + +[[items]] +section = "email" +name = "to" + +[[items]] +section = "experimental" +name = "archivemetatemplate" +default-type = "dynamic" + +[[items]] +section = "experimental" +name = "auto-publish" +default = "publish" + +[[items]] +section = "experimental" +name = "bundle-phases" +default = false + +[[items]] +section = "experimental" +name = "bundle2-advertise" +default = true + +[[items]] +section = "experimental" +name = "bundle2-output-capture" +default = false + +[[items]] +section = "experimental" +name = "bundle2.pushback" +default = false + +[[items]] +section = "experimental" +name = "bundle2lazylocking" +default = false + +[[items]] +section = "experimental" +name = "bundlecomplevel" + +[[items]] +section = "experimental" +name = "bundlecomplevel.bzip2" + +[[items]] +section = "experimental" +name = "bundlecomplevel.gzip" + +[[items]] +section = "experimental" +name = "bundlecomplevel.none" + +[[items]] +section = "experimental" +name = "bundlecomplevel.zstd" + +[[items]] +section = "experimental" +name = "bundlecompthreads" + +[[items]] +section = "experimental" +name = "bundlecompthreads.bzip2" + +[[items]] +section = "experimental" +name = "bundlecompthreads.gzip" + +[[items]] +section = "experimental" +name = "bundlecompthreads.none" + +[[items]] +section = "experimental" +name = "bundlecompthreads.zstd" + +[[items]] +section = "experimental" +name = "changegroup3" +default = true + +[[items]] +section = "experimental" +name = "changegroup4" +default = false + +# might remove rank configuration once the computation has no impact +[[items]] +section = "experimental" +name = "changelog-v2.compute-rank" +default = true + +[[items]] +section = "experimental" +name = "cleanup-as-archived" +default = false + +[[items]] +section = "experimental" +name = "clientcompressionengines" +default-type = "list_type" + +[[items]] +section = "experimental" +name = "copies.read-from" +default = "filelog-only" + +[[items]] +section = "experimental" +name = "copies.write-to" +default = "filelog-only" + +[[items]] +section = "experimental" +name = "copytrace" +default = "on" + +[[items]] +section = "experimental" +name = "copytrace.movecandidateslimit" +default = 100 + +[[items]] +section = "experimental" +name = "copytrace.sourcecommitlimit" +default = 100 + +[[items]] +section = "experimental" +name = "crecordtest" + +[[items]] +section = "experimental" +name = "directaccess" +default = false + +[[items]] +section = "experimental" +name = "directaccess.revnums" +default = false + +[[items]] +section = "experimental" +name = "editortmpinhg" +default = false + +[[items]] +section = "experimental" +name = "evolution" +default-type = "list_type" + +[[items]] +section = "experimental" +name = "evolution.allowdivergence" +default = false +alias = [["experimental", "allowdivergence"]] + +[[items]] +section = "experimental" +name = "evolution.allowunstable" + +[[items]] +section = "experimental" +name = "evolution.bundle-obsmarker" +default = false + +[[items]] +section = "experimental" +name = "evolution.bundle-obsmarker:mandatory" +default = true + +[[items]] +section = "experimental" +name = "evolution.createmarkers" + +[[items]] +section = "experimental" +name = "evolution.effect-flags" +default = true +alias = [["experimental", "effect-flags"]] + +[[items]] +section = "experimental" +name = "evolution.exchange" + +[[items]] +section = "experimental" +name = "evolution.report-instabilities" +default = true + +[[items]] +section = "experimental" +name = "evolution.track-operation" +default = true + +[[items]] +section = "experimental" +name = "exportableenviron" +default-type = "list_type" + +[[items]] +section = "experimental" +name = "extendedheader.index" + +[[items]] +section = "experimental" +name = "extendedheader.similarity" +default = false + +[[items]] +section = "experimental" +name = "extra-filter-revs" +documentation = """Repo-level config to prevent a revset from being visible. +The target use case is to use `share` to expose different subsets of the same \ +repository, especially server side. See also `server.view`.""" + +[[items]] +section = "experimental" +name = "graphshorten" +default = false + +[[items]] +section = "experimental" +name = "graphstyle.grandparent" +default-type = "dynamic" + +[[items]] +section = "experimental" +name = "graphstyle.missing" +default-type = "dynamic" + +[[items]] +section = "experimental" +name = "graphstyle.parent" +default-type = "dynamic" + +[[items]] +section = "experimental" +name = "hook-track-tags" +default = false + +[[items]] +section = "experimental" +name = "httppostargs" +default = false + +[[items]] +section = "experimental" +name = "log.topo" +default = false + +[[items]] +section = "experimental" +name = "maxdeltachainspan" +default = -1 + +[[items]] +section = "experimental" +name = "merge-track-salvaged" +default = false +documentation = """Tracks files which were undeleted (merge might delete them \ +but we explicitly kept/undeleted them) and creates new filenodes for them.""" + +[[items]] +section = "experimental" +name = "merge.checkpathconflicts" +default = false + +[[items]] +section = "experimental" +name = "mmapindexthreshold" + +[[items]] +section = "experimental" +name = "narrow" +default = false + +[[items]] +section = "experimental" +name = "nointerrupt" +default = false + +[[items]] +section = "experimental" +name = "nointerrupt-interactiveonly" +default = true + +[[items]] +section = "experimental" +name = "nonnormalparanoidcheck" +default = false + +[[items]] +section = "experimental" +name = "obsmarkers-exchange-debug" +default = false + +[[items]] +section = "experimental" +name = "rebaseskipobsolete" +default = true + +[[items]] +section = "experimental" +name = "remotenames" +default = false + +[[items]] +section = "experimental" +name = "removeemptydirs" +default = true + +[[items]] +section = "experimental" +name = "revert.interactive.select-to-keep" +default = false + +[[items]] +section = "experimental" +name = "revisions.disambiguatewithin" + +[[items]] +section = "experimental" +name = "revisions.prefixhexnode" +default = false + +# "out of experimental" todo list. +# +# * include management of a persistent nodemap in the main docket +# * enforce a "no-truncate" policy for mmap safety +# - for censoring operation +# - for stripping operation +# - for rollback operation +# * proper streaming (race free) of the docket file +# * track garbage data to evemtually allow rewriting -existing- sidedata. +# * Exchange-wise, we will also need to do something more efficient than +# keeping references to the affected revlogs, especially memory-wise when +# rewriting sidedata. +# * introduce a proper solution to reduce the number of filelog related files. +# * use caching for reading sidedata (similar to what we do for data). +# * no longer set offset=0 if sidedata_size=0 (simplify cutoff computation). +# * Improvement to consider +# - avoid compression header in chunk using the default compression? +# - forbid "inline" compression mode entirely? +# - split the data offset and flag field (the 2 bytes save are mostly trouble) +# - keep track of uncompressed -chunk- size (to preallocate memory better) +# - keep track of chain base or size (probably not that useful anymore) +[[items]] +section = "experimental" +name = "revlogv2" + +[[items]] +section = "experimental" +name = "rust.index" +default = false + +[[items]] +section = "experimental" +name = "server.allow-hidden-access" +default-type = "list_type" + +[[items]] +section = "experimental" +name = "server.filesdata.recommended-batch-size" +default = 50000 + +[[items]] +section = "experimental" +name = "server.manifestdata.recommended-batch-size" +default = 100000 + +[[items]] +section = "experimental" +name = "server.stream-narrow-clones" +default = false + +[[items]] +section = "experimental" +name = "single-head-per-branch" +default = false + +[[items]] +section = "experimental" +name = "single-head-per-branch:account-closed-heads" +default = false + +[[items]] +section = "experimental" +name = "single-head-per-branch:public-changes-only" +default = false + +[[items]] +section = "experimental" +name = "sparse-read" +default = false + +[[items]] +section = "experimental" +name = "sparse-read.density-threshold" +default = 0.5 + +[[items]] +section = "experimental" +name = "sparse-read.min-gap-size" +default = "65K" + +[[items]] +section = "experimental" +name = "stream-v3" +default = false + +[[items]] +section = "experimental" +name = "treemanifest" +default = false + +[[items]] +section = "experimental" +name = "update.atomic-file" +default = false + +[[items]] +section = "experimental" +name = "web.full-garbage-collection-rate" +default = 1 # still forcing a full collection on each request + +[[items]] +section = "experimental" +name = "worker.repository-upgrade" +default = false + +[[items]] +section = "experimental" +name = "worker.wdir-get-thread-safe" +default = false + +[[items]] +section = "experimental" +name = "xdiff" +default = false + +[[items]] +section = "extdata" +name = ".*" +generic = true + +[[items]] +section = "extensions" +name = "[^:]*" +generic = true + +[[items]] +section = "extensions" +name = "[^:]*:required" +default = false +generic = true + +[[items]] +section = "format" +name = "bookmarks-in-store" +default = false + +[[items]] +section = "format" +name = "chunkcachesize" +experimental = true + +[[items]] +section = "format" +name = "dotencode" +default = true + +# The interaction between the archived phase and obsolescence markers needs to +# be sorted out before wider usage of this are to be considered. +# +# At the time this message is written, behavior when archiving obsolete +# changeset differ significantly from stripping. As part of stripping, we also +# remove the obsolescence marker associated to the stripped changesets, +# revealing the precedecessors changesets when applicable. When archiving, we +# don't touch the obsolescence markers, keeping everything hidden. This can +# result in quite confusing situation for people combining exchanging draft +# with the archived phases. As some markers needed by others may be skipped +# during exchange. +[[items]] +section = "format" +name = "exp-archived-phase" +default = false +experimental = true + +# Experimental TODOs: +# +# * Same as for revlogv2 (but for the reduction of the number of files) +# * Actually computing the rank of changesets +# * Improvement to investigate +# - storing .hgtags fnode +# - storing branch related identifier +[[items]] +section = "format" +name = "exp-use-changelog-v2" +experimental = true + +[[items]] +section = "format" +name = "exp-use-copies-side-data-changeset" +default = false +experimental = true + +[[items]] +section = "format" +name = "generaldelta" +default = false +experimental = true + +[[items]] +section = "format" +name = "manifestcachesize" +experimental = true + +[[items]] +section = "format" +name = "maxchainlen" +default-type = "dynamic" +experimental = true + +[[items]] +section = "format" +name = "obsstore-version" + +[[items]] +section = "format" +name = "revlog-compression" +default-type = "lambda" +alias = [["experimental", "format.compression"]] +default = [ "zstd", "zlib",] + +[[items]] +section = "format" +name = "sparse-revlog" +default = true + +[[items]] +section = "format" +name = "use-dirstate-tracked-hint" +default = false +experimental = true + +[[items]] +section = "format" +name = "use-dirstate-tracked-hint.automatic-upgrade-of-mismatching-repositories" +default = false +experimental = true + +[[items]] +section = "format" +name = "use-dirstate-tracked-hint.automatic-upgrade-of-mismatching-repositories:quiet" +default = false +experimental = true + +[[items]] +section = "format" +name = "use-dirstate-tracked-hint.version" +default = 1 +experimental = true + +[[items]] +section = "format" +name = "use-dirstate-v2" +default = false +alias = [["format", "exp-rc-dirstate-v2"]] +experimental = true +documentation = """Enables dirstate-v2 format *when creating a new repository*. +Which format to use for existing repos is controlled by `.hg/requires`.""" + +[[items]] +section = "format" +name = "use-dirstate-v2.automatic-upgrade-of-mismatching-repositories" +default = false +experimental = true + +[[items]] +section = "format" +name = "use-dirstate-v2.automatic-upgrade-of-mismatching-repositories:quiet" +default = false +experimental = true + +# Having this on by default means we are confident about the scaling of phases. +# This is not garanteed to be the case at the time this message is written. +[[items]] +section = "format" +name = "use-internal-phase" +default = false +experimental = true + +[[items]] +section = "format" +name = "use-persistent-nodemap" +default-type = "dynamic" + +[[items]] +section = "format" +name = "use-share-safe" +default = true + +[[items]] +section = "format" +name = "use-share-safe.automatic-upgrade-of-mismatching-repositories" +default = false +experimental = true + +[[items]] +section = "format" +name = "use-share-safe.automatic-upgrade-of-mismatching-repositories:quiet" +default = false +experimental = true + +[[items]] +section = "format" +name = "usefncache" +default = true + +[[items]] +section = "format" +name = "usegeneraldelta" +default = true + +[[items]] +section = "format" +name = "usestore" +default = true + +[[items]] +section = "fsmonitor" +name = "warn_update_file_count" +default = 50000 + +[[items]] +section = "fsmonitor" +name = "warn_update_file_count_rust" +default = 400000 + +[[items]] +section = "fsmonitor" +name = "warn_when_unused" +default = true + +[[items]] +section = "help" +name = 'hidden-command\..*' +default = false +generic = true + +[[items]] +section = "help" +name = 'hidden-topic\..*' +default = false +generic = true + +[[items]] +section = "hgweb-paths" +name = ".*" +default-type = "list_type" +generic = true + +[[items]] +section = "hooks" +name = ".*:run-with-plain" +default = true +generic = true + +[[items]] +section = "hooks" +name = "[^:]*" +default-type = "dynamic" +generic = true + +[[items]] +section = "hostfingerprints" +name = ".*" +default-type = "list_type" +generic = true + +[[items]] +section = "hostsecurity" +name = ".*:ciphers$" +default-type = "dynamic" +generic = true + +[[items]] +section = "hostsecurity" +name = ".*:fingerprints$" +default-type = "list_type" +generic = true + +[[items]] +section = "hostsecurity" +name = ".*:minimumprotocol$" +default-type = "dynamic" +generic = true + +[[items]] +section = "hostsecurity" +name = ".*:verifycertsfile$" +generic = true + +[[items]] +section = "hostsecurity" +name = "ciphers" + +[[items]] +section = "hostsecurity" +name = "minimumprotocol" +default-type = "dynamic" + +[[items]] +section = "http" +name = "timeout" + +[[items]] +section = "http_proxy" +name = "always" +default = false + +[[items]] +section = "http_proxy" +name = "host" + +[[items]] +section = "http_proxy" +name = "no" +default-type = "list_type" + +[[items]] +section = "http_proxy" +name = "passwd" + +[[items]] +section = "http_proxy" +name = "user" + +[[items]] +section = "logtoprocess" +name = "command" + +[[items]] +section = "logtoprocess" +name = "commandexception" + +[[items]] +section = "logtoprocess" +name = "commandfinish" + +[[items]] +section = "logtoprocess" +name = "develwarn" + +[[items]] +section = "logtoprocess" +name = "uiblocked" + +[[items]] +section = "merge" +name = "checkignored" +default = "abort" + +[[items]] +section = "merge" +name = "checkunknown" +default = "abort" + +[[items]] +section = "merge" +name = "disable-partial-tools" +default = false +experimental = true + +[[items]] +section = "merge" +name = "followcopies" +default = true + +[[items]] +section = "merge" +name = "on-failure" +default = "continue" + +[[items]] +section = "merge" +name = "preferancestor" +default-type = "lambda" +default = ["*"] +experimental = true + +[[items]] +section = "merge" +name = "strict-capability-check" +default = false + +[[items]] +section = "merge-tools" +name = ".*" +generic = true + +[[items]] +section = "merge-tools" +name = '.*\.args$' +default = "$local $base $other" +generic = true +priority = -1 + +[[items]] +section = "merge-tools" +name = '.*\.binary$' +default = false +generic = true +priority = -1 + +[[items]] +section = "merge-tools" +name = '.*\.check$' +default-type = "list_type" +generic = true +priority = -1 + +[[items]] +section = "merge-tools" +name = '.*\.checkchanged$' +default = false +generic = true +priority = -1 + +[[items]] +section = "merge-tools" +name = '.*\.executable$' +default-type = "dynamic" +generic = true +priority = -1 + +[[items]] +section = "merge-tools" +name = '.*\.fixeol$' +default = false +generic = true +priority = -1 + +[[items]] +section = "merge-tools" +name = '.*\.gui$' +default = false +generic = true +priority = -1 + +[[items]] +section = "merge-tools" +name = '.*\.mergemarkers$' +default = "basic" +generic = true +priority = -1 + +[[items]] +section = "merge-tools" +name = '.*\.mergemarkertemplate$' # take from command-templates.mergemarker +default-type = "dynamic" +generic = true +priority = -1 + +[[items]] +section = "merge-tools" +name = '.*\.premerge$' +default-type = "dynamic" +generic = true +priority = -1 + +[[items]] +section = "merge-tools" +name = '.*\.priority$' +default = 0 +generic = true +priority = -1 + +[[items]] +section = "merge-tools" +name = '.*\.regappend$' +default = "" +generic = true +priority = -1 + +[[items]] +section = "merge-tools" +name = '.*\.symlink$' +default = false +generic = true +priority = -1 + +[[items]] +section = "pager" +name = "attend-.*" +default-type = "dynamic" +generic = true + +[[items]] +section = "pager" +name = "ignore" +default-type = "list_type" + +[[items]] +section = "pager" +name = "pager" +default-type = "dynamic" + +[[items]] +section = "partial-merge-tools" +name = ".*" +generic = true +experimental = true + +[[items]] +section = "partial-merge-tools" +name = '.*\.args' +default = "$local $base $other" +generic = true +priority = -1 +experimental = true + +[[items]] +section = "partial-merge-tools" +name = '.*\.disable' +default = false +generic = true +priority = -1 +experimental = true + +[[items]] +section = "partial-merge-tools" +name = '.*\.executable$' +default-type = "dynamic" +generic = true +priority = -1 +experimental = true + +[[items]] +section = "partial-merge-tools" +name = '.*\.order' +default = 0 +generic = true +priority = -1 +experimental = true + +[[items]] +section = "partial-merge-tools" +name = '.*\.patterns' +default-type = "dynamic" +generic = true +priority = -1 +experimental = true + +[[items]] +section = "patch" +name = "eol" +default = "strict" + +[[items]] +section = "patch" +name = "fuzz" +default = 2 + +[[items]] +section = "paths" +name = "[^:]*" +generic = true + +[[items]] +section = "paths" +name = ".*:bookmarks.mode" +default = "default" +generic = true + +[[items]] +section = "paths" +name = ".*:multi-urls" +default = false +generic = true + +[[items]] +section = "paths" +name = ".*:pulled-delta-reuse-policy" +generic = true + +[[items]] +section = "paths" +name = ".*:pushrev" +generic = true + +[[items]] +section = "paths" +name = ".*:pushurl" +generic = true + +[[items]] +section = "paths" +name = "default" + +[[items]] +section = "paths" +name = "default-push" + +[[items]] +section = "phases" +name = "checksubrepos" +default = "follow" + +[[items]] +section = "phases" +name = "new-commit" +default = "draft" + +[[items]] +section = "phases" +name = "publish" +default = true + +[[items]] +section = "profiling" +name = "enabled" +default = false + +[[items]] +section = "profiling" +name = "format" +default = "text" + +[[items]] +section = "profiling" +name = "freq" +default = 1000 + +[[items]] +section = "profiling" +name = "limit" +default = 30 + +[[items]] +section = "profiling" +name = "nested" +default = 0 + +[[items]] +section = "profiling" +name = "output" + +[[items]] +section = "profiling" +name = "showmax" +default = 0.999 + +[[items]] +section = "profiling" +name = "showmin" +default-type = "dynamic" + +[[items]] +section = "profiling" +name = "showtime" +default = true + +[[items]] +section = "profiling" +name = "sort" +default = "inlinetime" + +[[items]] +section = "profiling" +name = "statformat" +default = "hotpath" + +[[items]] +section = "profiling" +name = "time-track" +default-type = "dynamic" + +[[items]] +section = "profiling" +name = "type" +default = "stat" + +[[items]] +section = "progress" +name = "assume-tty" +default = false + +[[items]] +section = "progress" +name = "changedelay" +default = 1 + +[[items]] +section = "progress" +name = "clear-complete" +default = true + +[[items]] +section = "progress" +name = "debug" +default = false + +[[items]] +section = "progress" +name = "delay" +default = 3 + +[[items]] +section = "progress" +name = "disable" +default = false + +[[items]] +section = "progress" +name = "estimateinterval" +default = 60.0 + +[[items]] +section = "progress" +name = "format" +default-type = "lambda" +default = [ "topic", "bar", "number", "estimate",] + +[[items]] +section = "progress" +name = "refresh" +default = 0.1 + +[[items]] +section = "progress" +name = "width" +default-type = "dynamic" + +[[items]] +section = "pull" +name = "confirm" +default = false + +[[items]] +section = "push" +name = "pushvars.server" +default = false + +[[items]] +section = "rebase" +name = "experimental.inmemory" +default = false + +[[items]] +section = "rebase" +name = "singletransaction" +default = false + +[[items]] +section = "rebase" +name = "store-source" +default = true +experimental = true +documentation = """Controls creation of a `rebase_source` extra field during rebase. +When false, no such field is created. This is useful e.g. for incrementally \ +converting changesets and then rebasing them onto an existing repo. +WARNING: this is an advanced setting reserved for people who know \ +exactly what they are doing. Misuse of this setting can easily \ +result in obsmarker cycles and a vivid headache.""" + +[[items]] +section = "rewrite" +name = "backup-bundle" +default = true +alias = [["ui", "history-editing-backup"]] + +[[items]] +section = "rewrite" +name = "empty-successor" +default = "skip" +experimental = true + +[[items]] +section = "rewrite" +name = "update-timestamp" +default = false + +[[items]] +section = "rhg" +name = "cat" +default = true +experimental = true +documentation = """rhg cat has some quirks that need to be ironed out. \ +In particular, the `-r` argument accepts a partial hash, but does not \ +correctly resolve `abcdef` as a potential bookmark, tag or branch name.""" + +[[items]] +section = "rhg" +name = "fallback-exectutable" +experimental = true + +[[items]] +section = "rhg" +name = "fallback-immediately" +default = false +experimental = true + +[[items]] +section = "rhg" +name = "ignored-extensions" +default-type = "list_type" +experimental = true + +[[items]] +section = "rhg" +name = "on-unsupported" +default = "abort" +experimental = true + +[[items]] +section = "server" +name = "bookmarks-pushkey-compat" +default = true + +[[items]] +section = "server" +name = "bundle1" +default = true + +[[items]] +section = "server" +name = "bundle1.pull" + +[[items]] +section = "server" +name = "bundle1.push" + +[[items]] +section = "server" +name = "bundle1gd" + +[[items]] +section = "server" +name = "bundle1gd.pull" + +[[items]] +section = "server" +name = "bundle1gd.push" + +[[items]] +section = "server" +name = "bundle2.stream" +default = true +alias = [["experimental", "bundle2.stream"]] + +[[items]] +section = "server" +name = "compressionengines" +default-type = "list_type" + +[[items]] +section = "server" +name = "concurrent-push-mode" +default = "check-related" + +[[items]] +section = "server" +name = "disablefullbundle" +default = false + +[[items]] +section = "server" +name = "maxhttpheaderlen" +default = 1024 + +[[items]] +section = "server" +name = "preferuncompressed" +default = false + +[[items]] +section = "server" +name = "pullbundle" +default = true + +[[items]] +section = "server" +name = "streamunbundle" +default = false + +[[items]] +section = "server" +name = "uncompressed" +default = true + +[[items]] +section = "server" +name = "uncompressedallowsecret" +default = false + +[[items]] +section = "server" +name = "validate" +default = false + +[[items]] +section = "server" +name = "view" +default = "served" + +[[items]] +section = "server" +name = "zliblevel" +default = -1 + +[[items]] +section = "server" +name = "zstdlevel" +default = 3 + +[[items]] +section = "share" +name = "pool" + +[[items]] +section = "share" +name = "poolnaming" +default = "identity" + +[[items]] +section = "share" +name = "safe-mismatch.source-not-safe" +default = "abort" + +[[items]] +section = "share" +name = "safe-mismatch.source-not-safe.warn" +default = true + +[[items]] +section = "share" +name = "safe-mismatch.source-not-safe:verbose-upgrade" +default = true + +[[items]] +section = "share" +name = "safe-mismatch.source-safe" +default = "abort" + +[[items]] +section = "share" +name = "safe-mismatch.source-safe.warn" +default = true + +[[items]] +section = "share" +name = "safe-mismatch.source-safe:verbose-upgrade" +default = true + +[[items]] +section = "shelve" +name = "maxbackups" +default = 10 + +[[items]] +section = "shelve" +name = "store" +default = "internal" +experimental = true + +[[items]] +section = "smtp" +name = "host" + +[[items]] +section = "smtp" +name = "local_hostname" + +[[items]] +section = "smtp" +name = "password" + +[[items]] +section = "smtp" +name = "port" +default-type = "dynamic" + +[[items]] +section = "smtp" +name = "tls" +default = "none" + +[[items]] +section = "smtp" +name = "username" + +[[items]] +section = "sparse" +name = "missingwarning" +default = true +experimental = true + +[[items]] +section = "storage" +name = "dirstate-v2.slow-path" +default = "abort" +experimental = true # experimental as long as format.use-dirstate-v2 is. + +[[items]] +section = "storage" +name = "new-repo-backend" +default = "revlogv1" +experimental = true + +[[items]] +section = "storage" +name = "revlog.delta-parent-search.candidate-group-chunk-size" +default = 20 + +[[items]] +section = "storage" +name = "revlog.issue6528.fix-incoming" +default = true + +[[items]] +section = "storage" +name = "revlog.optimize-delta-parent-choice" +default = true +alias = [["format", "aggressivemergedeltas"]] + +[[items]] +section = "storage" +name = "revlog.persistent-nodemap.mmap" +default = true + +[[items]] +section = "storage" +name = "revlog.persistent-nodemap.slow-path" +default = "abort" + +[[items]] +section = "storage" +name = "revlog.reuse-external-delta" +default = true + +[[items]] +section = "storage" +name = "revlog.reuse-external-delta-parent" +documentation = """This option is true unless `format.generaldelta` is set.""" + +[[items]] +section = "storage" +name = "revlog.zlib.level" + +[[items]] +section = "storage" +name = "revlog.zstd.level" + +[[items]] +section = "subrepos" +name = "allowed" +default-type = "dynamic" # to make backporting simpler + +[[items]] +section = "subrepos" +name = "git:allowed" +default-type = "dynamic" + +[[items]] +section = "subrepos" +name = "hg:allowed" +default-type = "dynamic" + +[[items]] +section = "subrepos" +name = "svn:allowed" +default-type = "dynamic" + +[[items]] +section = "templateconfig" +name = ".*" +default-type = "dynamic" +generic = true + +[[items]] +section = "templates" +name = ".*" +generic = true + +[[items]] +section = "trusted" +name = "groups" +default-type = "list_type" + +[[items]] +section = "trusted" +name = "users" +default-type = "list_type" + +[[items]] +section = "ui" +name = "_usedassubrepo" +default = false + +[[items]] +section = "ui" +name = "allowemptycommit" +default = false + +[[items]] +section = "ui" +name = "archivemeta" +default = true + +[[items]] +section = "ui" +name = "askusername" +default = false + +[[items]] +section = "ui" +name = "available-memory" + +[[items]] +section = "ui" +name = "clonebundlefallback" +default = false + +[[items]] +section = "ui" +name = "clonebundleprefers" +default-type = "list_type" + +[[items]] +section = "ui" +name = "clonebundles" +default = true + +[[items]] +section = "ui" +name = "color" +default = "auto" + +[[items]] +section = "ui" +name = "commitsubrepos" +default = false + +[[items]] +section = "ui" +name = "debug" +default = false + +[[items]] +section = "ui" +name = "debugger" + +[[items]] +section = "ui" +name = "detailed-exit-code" +default = false +experimental = true + +[[items]] +section = "ui" +name = "editor" +default-type = "dynamic" + +[[items]] +section = "ui" +name = "fallbackencoding" + +[[items]] +section = "ui" +name = "forcecwd" + +[[items]] +section = "ui" +name = "forcemerge" + +[[items]] +section = "ui" +name = "formatdebug" +default = false + +[[items]] +section = "ui" +name = "formatjson" +default = false + +[[items]] +section = "ui" +name = "formatted" + +[[items]] +section = "ui" +name = "interactive" + +[[items]] +section = "ui" +name = "interface" + +[[items]] +section = "ui" +name = "interface.chunkselector" + +[[items]] +section = "ui" +name = "large-file-limit" +default = 10485760 + +[[items]] +section = "ui" +name = "logblockedtimes" +default = false + +[[items]] +section = "ui" +name = "merge" + +[[items]] +section = "ui" +name = "mergemarkers" +default = "basic" + +[[items]] +section = "ui" +name = "message-output" +default = "stdio" + +[[items]] +section = "ui" +name = "nontty" +default = false + +[[items]] +section = "ui" +name = "origbackuppath" + +[[items]] +section = "ui" +name = "paginate" +default = true + +[[items]] +section = "ui" +name = "patch" + +[[items]] +section = "ui" +name = "portablefilenames" +default = "warn" + +[[items]] +section = "ui" +name = "promptecho" +default = false + +[[items]] +section = "ui" +name = "quiet" +default = false + +[[items]] +section = "ui" +name = "quietbookmarkmove" +default = false + +[[items]] +section = "ui" +name = "relative-paths" +default = "legacy" + +[[items]] +section = "ui" +name = "remotecmd" +default = "hg" + +[[items]] +section = "ui" +name = "report_untrusted" +default = true + +[[items]] +section = "ui" +name = "rollback" +default = true + +[[items]] +section = "ui" +name = "signal-safe-lock" +default = true + +[[items]] +section = "ui" +name = "slash" +default = false + +[[items]] +section = "ui" +name = "ssh" +default = "ssh" + +[[items]] +section = "ui" +name = "ssherrorhint" + +[[items]] +section = "ui" +name = "statuscopies" +default = false + +[[items]] +section = "ui" +name = "strict" +default = false + +[[items]] +section = "ui" +name = "style" +default = "" + +[[items]] +section = "ui" +name = "supportcontact" + +[[items]] +section = "ui" +name = "textwidth" +default = 78 + +[[items]] +section = "ui" +name = "timeout" +default = "600" + +[[items]] +section = "ui" +name = "timeout.warn" +default = 0 + +[[items]] +section = "ui" +name = "timestamp-output" +default = false + +[[items]] +section = "ui" +name = "traceback" +default = false + +[[items]] +section = "ui" +name = "tweakdefaults" +default = false + +[[items]] +section = "ui" +name = "username" +alias = [["ui", "user"]] + +[[items]] +section = "ui" +name = "verbose" +default = false + +[[items]] +section = "verify" +name = "skipflags" +default = 0 + +[[items]] +section = "web" +name = "accesslog" +default = "-" + +[[items]] +section = "web" +name = "address" +default = "" + +[[items]] +section = "web" +name = "allow-archive" +default-type = "list_type" +alias = [["web", "allow_archive"]] + +[[items]] +section = "web" +name = "allow-pull" +default = true +alias = [["web", "allowpull"]] + +[[items]] +section = "web" +name = "allow-push" +default-type = "list_type" +alias = [["web", "allow_push"]] + +[[items]] +section = "web" +name = "allow_read" +default-type = "list_type" + +[[items]] +section = "web" +name = "allowbz2" +default = false + +[[items]] +section = "web" +name = "allowgz" +default = false + +[[items]] +section = "web" +name = "allowzip" +default = false + +[[items]] +section = "web" +name = "archivesubrepos" +default = false + +[[items]] +section = "web" +name = "baseurl" + +[[items]] +section = "web" +name = "cacerts" + +[[items]] +section = "web" +name = "cache" +default = true + +[[items]] +section = "web" +name = "certificate" + +[[items]] +section = "web" +name = "collapse" +default = false + +[[items]] +section = "web" +name = "comparisoncontext" +default = 5 + +[[items]] +section = "web" +name = "contact" + +[[items]] +section = "web" +name = "csp" + +[[items]] +section = "web" +name = "deny_push" +default-type = "list_type" + +[[items]] +section = "web" +name = "deny_read" +default-type = "list_type" + +[[items]] +section = "web" +name = "descend" +default = true + +[[items]] +section = "web" +name = "description" +default = "" + +[[items]] +section = "web" +name = "encoding" +default-type = "lazy_module" +default = "encoding.encoding" + +[[items]] +section = "web" +name = "errorlog" +default = "-" + +[[items]] +section = "web" +name = "guessmime" +default = false + +[[items]] +section = "web" +name = "hidden" +default = false + +[[items]] +section = "web" +name = "ipv6" +default = false + +[[items]] +section = "web" +name = "labels" +default-type = "list_type" + +[[items]] +section = "web" +name = "logoimg" +default = "hglogo.png" + +[[items]] +section = "web" +name = "logourl" +default = "https://mercurial-scm.org/" + +[[items]] +section = "web" +name = "maxchanges" +default = 10 + +[[items]] +section = "web" +name = "maxfiles" +default = 10 + +[[items]] +section = "web" +name = "maxshortchanges" +default = 60 + +[[items]] +section = "web" +name = "motd" +default = "" + +[[items]] +section = "web" +name = "name" +default-type = "dynamic" + +[[items]] +section = "web" +name = "port" +default = 8000 + +[[items]] +section = "web" +name = "prefix" +default = "" + +[[items]] +section = "web" +name = "push_ssl" +default = true + +[[items]] +section = "web" +name = "refreshinterval" +default = 20 + +[[items]] +section = "web" +name = "server-header" + +[[items]] +section = "web" +name = "static" + +[[items]] +section = "web" +name = "staticurl" + +[[items]] +section = "web" +name = "stripes" +default = 1 + +[[items]] +section = "web" +name = "style" +default = "paper" + +[[items]] +section = "web" +name = "templates" + +[[items]] +section = "web" +name = "view" +default = "served" +experimental = true + +[[items]] +section = "worker" +name = "backgroundclose" +default-type = "dynamic" + +[[items]] +section = "worker" +name = "backgroundclosemaxqueue" +# Windows defaults to a limit of 512 open files. A buffer of 128 +# should give us enough headway. +default = 384 + +[[items]] +section = "worker" +name = "backgroundcloseminfilecount" +default = 2048 + +[[items]] +section = "worker" +name = "backgroundclosethreadcount" +default = 4 + +[[items]] +section = "worker" +name = "enabled" +default = true + +[[items]] +section = "worker" +name = "numcpus" + +# Templates and template applications + +[[template-applications]] +template = "diff-options" +section = "annotate" + +[[template-applications]] +template = "diff-options" +section = "commands" +prefix = "commit.interactive" + +[[template-applications]] +template = "diff-options" +section = "commands" +prefix = "revert.interactive" + +[[template-applications]] +template = "diff-options" +section = "diff" + +[templates] +[[templates.diff-options]] +suffix = "nodates" +default = false + +[[templates.diff-options]] +suffix = "showfunc" +default = false + +[[templates.diff-options]] +suffix = "unified" + +[[templates.diff-options]] +suffix = "git" +default = false + +[[templates.diff-options]] +suffix = "ignorews" +default = false + +[[templates.diff-options]] +suffix = "ignorewsamount" +default = false + +[[templates.diff-options]] +suffix = "ignoreblanklines" +default = false + +[[templates.diff-options]] +suffix = "ignorewseol" +default = false + +[[templates.diff-options]] +suffix = "nobinary" +default = false + +[[templates.diff-options]] +suffix = "noprefix" +default = false + +[[templates.diff-options]] +suffix = "word-diff" +default = false + +# In-core extensions + +[[items]] +section = "blackbox" +name = "debug.to-stderr" +default = false +in_core_extension = "blackbox" + +[[items]] +section = "blackbox" +name = "dirty" +default = false +in_core_extension = "blackbox" + +[[items]] +section = "blackbox" +name = "maxsize" +default = "1 MB" +in_core_extension = "blackbox" + +[[items]] +section = "blackbox" +name = "logsource" +default = false +in_core_extension = "blackbox" + +[[items]] +section = "blackbox" +name = "maxfiles" +default = 7 +in_core_extension = "blackbox" + +[[items]] +section = "blackbox" +name = "track" +default-type = "lambda" +default = ["*"] +in_core_extension = "blackbox" + +[[items]] +section = "blackbox" +name = "ignore" +default-type = "lambda" +default = ["chgserver", "cmdserver", "extension"] +in_core_extension = "blackbox" + +[[items]] +section = "blackbox" +name = "date-format" +default = "" +in_core_extension = "blackbox" diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/context.py --- a/mercurial/context.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/context.py Wed Oct 11 02:02:46 2023 +0200 @@ -16,9 +16,6 @@ nullrev, short, ) -from .pycompat import ( - getattr, -) from . import ( dagop, encoding, diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/crecord.py --- a/mercurial/crecord.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/crecord.py Wed Oct 11 02:02:46 2023 +0200 @@ -15,7 +15,6 @@ from .i18n import _ from .pycompat import ( - getattr, open, ) from . import ( @@ -573,7 +572,7 @@ ui.write(_(b'starting interactive selection\n')) chunkselector = curseschunkselector(headerlist, ui, operation) origsigtstp = sentinel = object() - if util.safehasattr(signal, 'SIGTSTP'): + if hasattr(signal, 'SIGTSTP'): origsigtstp = signal.getsignal(signal.SIGTSTP) try: with util.with_lc_ctype(): @@ -1944,7 +1943,7 @@ """ origsigwinch = sentinel = object() - if util.safehasattr(signal, 'SIGWINCH'): + if hasattr(signal, 'SIGWINCH'): origsigwinch = signal.signal(signal.SIGWINCH, self.sigwinchhandler) try: return self._main(stdscr) diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/debugcommands.py --- a/mercurial/debugcommands.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/debugcommands.py Wed Oct 11 02:02:46 2023 +0200 @@ -33,7 +33,6 @@ short, ) from .pycompat import ( - getattr, open, ) from . import ( @@ -395,7 +394,6 @@ def _debugobsmarkers(ui, part, indent=0, **opts): """display version and markers contained in 'data'""" - opts = pycompat.byteskwargs(opts) data = part.read() indent_string = b' ' * indent try: @@ -408,7 +406,7 @@ msg = b"%sversion: %d (%d bytes)\n" msg %= indent_string, version, len(data) ui.write(msg) - fm = ui.formatter(b'debugobsolete', opts) + fm = ui.formatter(b'debugobsolete', pycompat.byteskwargs(opts)) for rawmarker in sorted(markers): m = obsutil.marker(None, rawmarker) fm.startitem() @@ -486,8 +484,7 @@ @command(b'debugcapabilities', [], _(b'PATH'), norepo=True) def debugcapabilities(ui, path, **opts): """lists the capabilities of a remote peer""" - opts = pycompat.byteskwargs(opts) - peer = hg.peer(ui, opts, path) + peer = hg.peer(ui, pycompat.byteskwargs(opts), path) try: caps = peer.capabilities() ui.writenoi18n(b'Main capabilities:\n') @@ -712,8 +709,7 @@ @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV')) def debugdata(ui, repo, file_, rev=None, **opts): """dump the contents of a data file revision""" - opts = pycompat.byteskwargs(opts) - if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'): + if opts.get('changelog') or opts.get('manifest') or opts.get('dir'): if rev is not None: raise error.InputError( _(b'cannot specify a revision with other arguments') @@ -721,7 +717,9 @@ file_, rev = None, file_ elif rev is None: raise error.InputError(_(b'please specify a revision')) - r = cmdutil.openstorage(repo, b'debugdata', file_, opts) + r = cmdutil.openstorage( + repo, b'debugdata', file_, pycompat.byteskwargs(opts) + ) try: ui.write(r.rawdata(r.lookup(rev))) except KeyError: @@ -801,8 +799,9 @@ The sparse read can be enabled with experimental.sparse-read = True """ - opts = pycompat.byteskwargs(opts) - r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts) + r = cmdutil.openrevlog( + repo, b'debugdeltachain', file_, pycompat.byteskwargs(opts) + ) index = r.index start = r.start length = r.length @@ -892,7 +891,7 @@ return p1, p2, compsize, uncompsize, deltatype, chain, chain_size - fm = ui.formatter(b'debugdeltachain', opts) + fm = ui.formatter(b'debugdeltachain', pycompat.byteskwargs(opts)) fm.plain( b' rev p1 p2 chain# chainlen prev delta ' @@ -1027,7 +1026,6 @@ note: the process is initiated from a full text of the revision to store. """ - opts = pycompat.byteskwargs(opts) if arg_2 is None: file_ = None rev = arg_1 @@ -1037,7 +1035,9 @@ rev = int(rev) - revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts) + revlog = cmdutil.openrevlog( + repo, b'debugdeltachain', file_, pycompat.byteskwargs(opts) + ) p1r, p2r = revlog.parentrevs(rev) if source == b'full': @@ -1234,22 +1234,21 @@ Control the initial size of the discovery for initial change """ - opts = pycompat.byteskwargs(opts) unfi = repo.unfiltered() # setup potential extra filtering - local_revs = opts[b"local_as_revs"] - remote_revs = opts[b"remote_as_revs"] + local_revs = opts["local_as_revs"] + remote_revs = opts["remote_as_revs"] # make sure tests are repeatable - random.seed(int(opts[b'seed'])) + random.seed(int(opts['seed'])) if not remote_revs: path = urlutil.get_unique_pull_path_obj( b'debugdiscovery', ui, remoteurl ) branches = (path.branch, []) - remote = hg.peer(repo, opts, path) + remote = hg.peer(repo, pycompat.byteskwargs(opts), path) ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc)) else: branches = (None, []) @@ -1279,10 +1278,10 @@ repo = repo.filtered(b'debug-discovery-local-filter') data = {} - if opts.get(b'old'): + if opts.get('old'): def doit(pushedrevs, remoteheads, remote=remote): - if not util.safehasattr(remote, 'branches'): + if not hasattr(remote, 'branches'): # enable in-client legacy support remote = localrepo.locallegacypeer(remote.local()) if remote_revs: @@ -1292,7 +1291,7 @@ repo, remote, force=True, audit=data ) common = set(common) - if not opts.get(b'nonheads'): + if not opts.get('nonheads'): ui.writenoi18n( b"unpruned common: %s\n" % b" ".join(sorted(short(n) for n in common)) @@ -1321,9 +1320,9 @@ return common, hds remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None) - localrevs = opts[b'rev'] - - fm = ui.formatter(b'debugdiscovery', opts) + localrevs = opts['rev'] + + fm = ui.formatter(b'debugdiscovery', pycompat.byteskwargs(opts)) if fm.strict_format: @contextlib.contextmanager @@ -1474,15 +1473,14 @@ @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True) def debugextensions(ui, repo, **opts): '''show information about active extensions''' - opts = pycompat.byteskwargs(opts) exts = extensions.extensions(ui) hgver = util.version() - fm = ui.formatter(b'debugextensions', opts) + fm = ui.formatter(b'debugextensions', pycompat.byteskwargs(opts)) for extname, extmod in sorted(exts, key=operator.itemgetter(0)): isinternal = extensions.ismoduleinternal(extmod) extsource = None - if util.safehasattr(extmod, '__file__'): + if hasattr(extmod, '__file__'): extsource = pycompat.fsencode(extmod.__file__) elif getattr(sys, 'oxidized', False): extsource = pycompat.sysexecutable @@ -1571,8 +1569,8 @@ from . import fileset fileset.symbols # force import of fileset so we have predicates to optimize - opts = pycompat.byteskwargs(opts) - ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None) + + ctx = logcmdutil.revsingle(repo, opts.get('rev'), None) stages = [ (b'parsed', pycompat.identity), @@ -1582,32 +1580,32 @@ stagenames = {n for n, f in stages} showalways = set() - if ui.verbose and not opts[b'show_stage']: + if ui.verbose and not opts['show_stage']: # show parsed tree by --verbose (deprecated) showalways.add(b'parsed') - if opts[b'show_stage'] == [b'all']: + if opts['show_stage'] == [b'all']: showalways.update(stagenames) else: - for n in opts[b'show_stage']: + for n in opts['show_stage']: if n not in stagenames: raise error.Abort(_(b'invalid stage name: %s') % n) - showalways.update(opts[b'show_stage']) + showalways.update(opts['show_stage']) tree = filesetlang.parse(expr) for n, f in stages: tree = f(tree) if n in showalways: - if opts[b'show_stage'] or n != b'parsed': + if opts['show_stage'] or n != b'parsed': ui.write(b"* %s:\n" % n) ui.write(filesetlang.prettyformat(tree), b"\n") files = set() - if opts[b'all_files']: + if opts['all_files']: for r in repo: c = repo[r] files.update(c.files()) files.update(c.substate) - if opts[b'all_files'] or ctx.rev() is None: + if opts['all_files'] or ctx.rev() is None: wctx = repo[None] files.update( repo.dirstate.walk( @@ -1623,7 +1621,7 @@ files.update(ctx.substate) m = ctx.matchfileset(repo.getcwd(), expr) - if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose): + if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose): ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n') for f in sorted(files): if not m(f): @@ -1711,18 +1709,17 @@ Use --verbose to get extra information about current config value and Mercurial default.""" - opts = pycompat.byteskwargs(opts) maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant) maxvariantlength = max(len(b'format-variant'), maxvariantlength) def makeformatname(name): return b'%s:' + (b' ' * (maxvariantlength - len(name))) - fm = ui.formatter(b'debugformat', opts) + fm = ui.formatter(b'debugformat', pycompat.byteskwargs(opts)) if fm.isplain(): def formatvalue(value): - if util.safehasattr(value, 'startswith'): + if hasattr(value, 'startswith'): return value if value: return b'yes' @@ -1823,8 +1820,7 @@ Every ID must be a full-length hex node id string. Saves the bundle to the given file. """ - opts = pycompat.byteskwargs(opts) - repo = hg.peer(ui, opts, repopath) + repo = hg.peer(ui, pycompat.byteskwargs(opts), repopath) if not repo.capable(b'getbundle'): raise error.Abort(b"getbundle() not supported by target repository") args = {} @@ -1836,7 +1832,7 @@ args['bundlecaps'] = None bundle = repo.getbundle(b'debug', **args) - bundletype = opts.get(b'type', b'bzip2').lower() + bundletype = opts.get('type', b'bzip2').lower() btypes = { b'none': b'HG10UN', b'bzip2': b'HG10BZ', @@ -1930,8 +1926,9 @@ ) def debugindexdot(ui, repo, file_=None, **opts): """dump an index DAG as a graphviz dot file""" - opts = pycompat.byteskwargs(opts) - r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts) + r = cmdutil.openstorage( + repo, b'debugindexdot', file_, pycompat.byteskwargs(opts) + ) ui.writenoi18n(b"digraph G {\n") for i in r: node = r.node(i) @@ -1947,7 +1944,7 @@ """show stats related to the changelog index""" repo.changelog.shortest(repo.nullid, 1) index = repo.changelog.index - if not util.safehasattr(index, 'stats'): + if not hasattr(index, 'stats'): raise error.Abort(_(b'debugindexstats only works with native code')) for k, v in sorted(index.stats().items()): ui.write(b'%s: %d\n' % (k, v)) @@ -1959,11 +1956,9 @@ Returns 0 on success. """ - opts = pycompat.byteskwargs(opts) - problems = 0 - fm = ui.formatter(b'debuginstall', opts) + fm = ui.formatter(b'debuginstall', pycompat.byteskwargs(opts)) fm.startitem() # encoding might be unknown or wrong. don't translate these messages. @@ -1983,7 +1978,7 @@ # Python pythonlib = None - if util.safehasattr(os, '__file__'): + if hasattr(os, '__file__'): pythonlib = os.path.dirname(pycompat.fsencode(os.__file__)) elif getattr(sys, 'oxidized', False): pythonlib = pycompat.sysexecutable @@ -2065,7 +2060,7 @@ # compiled modules hgmodules = None - if util.safehasattr(sys.modules[__name__], '__file__'): + if hasattr(sys.modules[__name__], '__file__'): hgmodules = os.path.dirname(pycompat.fsencode(__file__)) elif getattr(sys, 'oxidized', False): hgmodules = pycompat.sysexecutable @@ -2260,8 +2255,7 @@ Every ID must be a full-length hex node id string. Returns a list of 0s and 1s indicating unknown/known. """ - opts = pycompat.byteskwargs(opts) - repo = hg.peer(ui, opts, repopath) + repo = hg.peer(ui, pycompat.byteskwargs(opts), repopath) if not repo.capable(b'known'): raise error.Abort(b"known() not supported by target repository") flags = repo.known([bin(s) for s in ids]) @@ -2496,9 +2490,8 @@ else: ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n') - opts = pycompat.byteskwargs(opts) - if not opts[b'template']: - opts[b'template'] = ( + if not opts['template']: + opts['template'] = ( b'{if(commits, "", "no merge state found\n")}' b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}' b'{files % "file: {path} (state \\"{state}\\")\n' @@ -2518,7 +2511,7 @@ ms = mergestatemod.mergestate.read(repo) - fm = ui.formatter(b'debugmergestate', opts) + fm = ui.formatter(b'debugmergestate', pycompat.byteskwargs(opts)) fm.startitem() fm_commits = fm.nested(b'commits') @@ -2649,7 +2642,7 @@ if isinstance(r, (manifest.manifestrevlog, filelog.filelog)): r = r._revlog if opts['dump_new']: - if util.safehasattr(r.index, "nodemap_data_all"): + if hasattr(r.index, "nodemap_data_all"): data = r.index.nodemap_data_all() else: data = nodemap.persistent_data(r.index) @@ -2706,8 +2699,6 @@ With no arguments, displays the list of obsolescence markers.""" - opts = pycompat.byteskwargs(opts) - def parsenodeid(s): try: # We do not use revsingle/revrange functions here to accept @@ -2723,9 +2714,9 @@ b'node identifiers' ) - if opts.get(b'delete'): + if opts.get('delete'): indices = [] - for v in opts.get(b'delete'): + for v in opts.get('delete'): try: indices.append(int(v)) except ValueError: @@ -2746,25 +2737,25 @@ return if precursor is not None: - if opts[b'rev']: + if opts['rev']: raise error.InputError( b'cannot select revision when creating marker' ) metadata = {} - metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username()) + metadata[b'user'] = encoding.fromlocal(opts['user'] or ui.username()) succs = tuple(parsenodeid(succ) for succ in successors) l = repo.lock() try: tr = repo.transaction(b'debugobsolete') try: - date = opts.get(b'date') + date = opts.get('date') if date: date = dateutil.parsedate(date) else: date = None prec = parsenodeid(precursor) parents = None - if opts[b'record_parents']: + if opts['record_parents']: if prec not in repo.unfiltered(): raise error.Abort( b'cannot used --record-parents on ' @@ -2776,7 +2767,7 @@ tr, prec, succs, - opts[b'flags'], + opts['flags'], parents=parents, date=date, metadata=metadata, @@ -2792,12 +2783,12 @@ finally: l.release() else: - if opts[b'rev']: - revs = logcmdutil.revrange(repo, opts[b'rev']) + if opts['rev']: + revs = logcmdutil.revrange(repo, opts['rev']) nodes = [repo[r].node() for r in revs] markers = list( obsutil.getmarkers( - repo, nodes=nodes, exclusive=opts[b'exclusive'] + repo, nodes=nodes, exclusive=opts['exclusive'] ) ) markers.sort(key=lambda x: x._data) @@ -2806,12 +2797,12 @@ markerstoiter = markers isrelevant = lambda m: True - if opts.get(b'rev') and opts.get(b'index'): + if opts.get('rev') and opts.get('index'): markerstoiter = obsutil.getmarkers(repo) markerset = set(markers) isrelevant = lambda m: m in markerset - fm = ui.formatter(b'debugobsolete', opts) + fm = ui.formatter(b'debugobsolete', pycompat.byteskwargs(opts)) for i, m in enumerate(markerstoiter): if not isrelevant(m): # marker can be irrelevant when we're iterating over a set @@ -2823,7 +2814,7 @@ # are relevant to --rev value continue fm.startitem() - ind = i if opts.get(b'index') else None + ind = i if opts.get('index') else None cmdutil.showmarker(fm, m, index=ind) fm.end() @@ -2836,8 +2827,7 @@ def debugp1copies(ui, repo, **opts): """dump copy information compared to p1""" - opts = pycompat.byteskwargs(opts) - ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None) + ctx = scmutil.revsingle(repo, opts.get('rev'), default=None) for dst, src in ctx.p1copies().items(): ui.write(b'%s -> %s\n' % (src, dst)) @@ -2850,8 +2840,7 @@ def debugp2copies(ui, repo, **opts): """dump copy information compared to p2""" - opts = pycompat.byteskwargs(opts) - ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None) + ctx = scmutil.revsingle(repo, opts.get('rev'), default=None) for dst, src in ctx.p2copies().items(): ui.write(b'%s -> %s\n' % (src, dst)) @@ -3019,11 +3008,10 @@ information, even with --debug. In such case, information above is useful to know why a merge tool is chosen. """ - opts = pycompat.byteskwargs(opts) overrides = {} - if opts[b'tool']: - overrides[(b'ui', b'forcemerge')] = opts[b'tool'] - ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool']))) + if opts['tool']: + overrides[(b'ui', b'forcemerge')] = opts['tool'] + ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts['tool']))) with ui.configoverride(overrides, b'debugmergepatterns'): hgmerge = encoding.environ.get(b"HGMERGE") @@ -3033,9 +3021,9 @@ if uimerge: ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge))) - ctx = scmutil.revsingle(repo, opts.get(b'rev')) - m = scmutil.match(ctx, pats, opts) - changedelete = opts[b'changedelete'] + ctx = scmutil.revsingle(repo, opts.get('rev')) + m = scmutil.match(ctx, pats, pycompat.byteskwargs(opts)) + changedelete = opts['changedelete'] for path in ctx.walk(m): fctx = ctx[path] with ui.silent( @@ -3184,8 +3172,7 @@ ) def debugrebuildfncache(ui, repo, **opts): """rebuild the fncache file""" - opts = pycompat.byteskwargs(opts) - repair.rebuildfncache(ui, repo, opts.get(b"only_data")) + repair.rebuildfncache(ui, repo, opts.get("only_data")) @command( @@ -3196,9 +3183,8 @@ def debugrename(ui, repo, *pats, **opts): """dump rename information""" - opts = pycompat.byteskwargs(opts) - ctx = scmutil.revsingle(repo, opts.get(b'rev')) - m = scmutil.match(ctx, pats, opts) + ctx = scmutil.revsingle(repo, opts.get('rev')) + m = scmutil.match(ctx, pats, pycompat.byteskwargs(opts)) for abs in ctx.walk(m): fctx = ctx[abs] o = fctx.filelog().renamed(fctx.filenode()) @@ -3224,10 +3210,11 @@ ) def debugrevlog(ui, repo, file_=None, **opts): """show data and statistics about a revlog""" - opts = pycompat.byteskwargs(opts) - r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts) - - if opts.get(b"dump"): + r = cmdutil.openrevlog( + repo, b'debugrevlog', file_, pycompat.byteskwargs(opts) + ) + + if opts.get("dump"): revlog_debug.dump(ui, r) else: revlog_debug.debug_revlog(ui, r) @@ -3243,9 +3230,10 @@ ) def debugrevlogindex(ui, repo, file_=None, **opts): """dump the contents of a revlog index""" - opts = pycompat.byteskwargs(opts) - r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts) - format = opts.get(b'format', 0) + r = cmdutil.openrevlog( + repo, b'debugrevlogindex', file_, pycompat.byteskwargs(opts) + ) + format = opts.get('format', 0) if format not in (0, 1): raise error.Abort(_(b"unknown format %d") % format) @@ -3394,7 +3382,6 @@ Use --verify-optimized to compare the optimized result with the unoptimized one. Returns 1 if the optimized result differs. """ - opts = pycompat.byteskwargs(opts) aliases = ui.configitems(b'revsetalias') stages = [ (b'parsed', lambda tree: tree), @@ -3406,9 +3393,9 @@ (b'analyzed', revsetlang.analyze), (b'optimized', revsetlang.optimize), ] - if opts[b'no_optimized']: + if opts['no_optimized']: stages = stages[:-1] - if opts[b'verify_optimized'] and opts[b'no_optimized']: + if opts['verify_optimized'] and opts['no_optimized']: raise error.Abort( _(b'cannot use --verify-optimized with --no-optimized') ) @@ -3416,21 +3403,21 @@ showalways = set() showchanged = set() - if ui.verbose and not opts[b'show_stage']: + if ui.verbose and not opts['show_stage']: # show parsed tree by --verbose (deprecated) showalways.add(b'parsed') showchanged.update([b'expanded', b'concatenated']) - if opts[b'optimize']: + if opts['optimize']: showalways.add(b'optimized') - if opts[b'show_stage'] and opts[b'optimize']: + if opts['show_stage'] and opts['optimize']: raise error.Abort(_(b'cannot use --optimize with --show-stage')) - if opts[b'show_stage'] == [b'all']: + if opts['show_stage'] == [b'all']: showalways.update(stagenames) else: - for n in opts[b'show_stage']: + for n in opts['show_stage']: if n not in stagenames: raise error.Abort(_(b'invalid stage name: %s') % n) - showalways.update(opts[b'show_stage']) + showalways.update(opts['show_stage']) treebystage = {} printedtree = None @@ -3438,15 +3425,15 @@ for n, f in stages: treebystage[n] = tree = f(tree) if n in showalways or (n in showchanged and tree != printedtree): - if opts[b'show_stage'] or n != b'parsed': + if opts['show_stage'] or n != b'parsed': ui.write(b"* %s:\n" % n) ui.write(revsetlang.prettyformat(tree), b"\n") printedtree = tree - if opts[b'verify_optimized']: + if opts['verify_optimized']: arevs = revset.makematcher(treebystage[b'analyzed'])(repo) brevs = revset.makematcher(treebystage[b'optimized'])(repo) - if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose): + if opts['show_set'] or (opts['show_set'] is None and ui.verbose): ui.writenoi18n( b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n" ) @@ -3474,9 +3461,9 @@ func = revset.makematcher(tree) revs = func(repo) - if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose): + if opts['show_set'] or (opts['show_set'] is None and ui.verbose): ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n") - if not opts[b'show_revs']: + if not opts['show_revs']: return for c in revs: ui.write(b"%d\n" % c) @@ -3503,30 +3490,28 @@ workaround to the fact that ``hg serve --stdio`` must have specific arguments for security reasons. """ - opts = pycompat.byteskwargs(opts) - - if not opts[b'sshstdio']: + if not opts['sshstdio']: raise error.Abort(_(b'only --sshstdio is currently supported')) logfh = None - if opts[b'logiofd'] and opts[b'logiofile']: + if opts['logiofd'] and opts['logiofile']: raise error.Abort(_(b'cannot use both --logiofd and --logiofile')) - if opts[b'logiofd']: + if opts['logiofd']: # Ideally we would be line buffered. But line buffering in binary # mode isn't supported and emits a warning in Python 3.8+. Disabling # buffering could have performance impacts. But since this isn't # performance critical code, it should be fine. try: - logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0) + logfh = os.fdopen(int(opts['logiofd']), 'ab', 0) except OSError as e: if e.errno != errno.ESPIPE: raise # can't seek a pipe, so `ab` mode fails on py3 - logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0) - elif opts[b'logiofile']: - logfh = open(opts[b'logiofile'], b'ab', 0) + logfh = os.fdopen(int(opts['logiofd']), 'wb', 0) + elif opts['logiofile']: + logfh = open(opts['logiofile'], b'ab', 0) s = wireprotoserver.sshserver(ui, repo, logfh=logfh) s.serve_forever() @@ -3566,8 +3551,7 @@ """dump the side data for a cl/manifest/file revision Use --verbose to dump the sidedata content.""" - opts = pycompat.byteskwargs(opts) - if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'): + if opts.get('changelog') or opts.get('manifest') or opts.get('dir'): if rev is not None: raise error.InputError( _(b'cannot specify a revision with other arguments') @@ -3575,7 +3559,9 @@ file_, rev = None, file_ elif rev is None: raise error.InputError(_(b'please specify a revision')) - r = cmdutil.openstorage(repo, b'debugdata', file_, opts) + r = cmdutil.openstorage( + repo, b'debugdata', file_, pycompat.byteskwargs(opts) + ) r = getattr(r, '_revlog', r) try: sidedata = r.sidedata(r.lookup(rev)) @@ -3748,13 +3734,12 @@ ) backups.sort(key=lambda x: os.path.getmtime(x), reverse=True) - opts = pycompat.byteskwargs(opts) - opts[b"bundle"] = b"" - opts[b"force"] = None - limit = logcmdutil.getlimit(opts) + opts["bundle"] = b"" + opts["force"] = None + limit = logcmdutil.getlimit(pycompat.byteskwargs(opts)) def display(other, chlist, displayer): - if opts.get(b"newest_first"): + if opts.get("newest_first"): chlist.reverse() count = 0 for n in chlist: @@ -3763,12 +3748,12 @@ parents = [ True for p in other.changelog.parents(n) if p != repo.nullid ] - if opts.get(b"no_merges") and len(parents) == 2: + if opts.get("no_merges") and len(parents) == 2: continue count += 1 displayer.show(other[n]) - recovernode = opts.get(b"recover") + recovernode = opts.get("recover") if recovernode: if scmutil.isrevsymbol(repo, recovernode): ui.warn(_(b"%s already exists in the repo\n") % recovernode) @@ -3792,15 +3777,15 @@ source, ) try: - other = hg.peer(repo, opts, path) + other = hg.peer(repo, pycompat.byteskwargs(opts), path) except error.LookupError as ex: msg = _(b"\nwarning: unable to open bundle %s") % path.loc hint = _(b"\n(missing parent rev %s)\n") % short(ex.name) ui.warn(msg, hint=hint) continue - branches = (path.branch, opts.get(b'branch', [])) + branches = (path.branch, opts.get('branch', [])) revs, checkout = hg.addbranchrevs( - repo, other, branches, opts.get(b"rev") + repo, other, branches, opts.get("rev") ) if revs: @@ -3809,7 +3794,7 @@ with ui.silent(): try: other, chlist, cleanupfn = bundlerepo.getremotechanges( - ui, repo, other, revs, opts[b"bundle"], opts[b"force"] + ui, repo, other, revs, opts["bundle"], opts["force"] ) except error.LookupError: continue @@ -3846,10 +3831,10 @@ ui.status(b"%s%s\n" % (b"bundle:".ljust(13), path.loc)) else: opts[ - b"template" + "template" ] = b"{label('status.modified', node|short)} {desc|firstline}\n" displayer = logcmdutil.changesetdisplayer( - ui, other, opts, False + ui, other, pycompat.byteskwargs(opts), False ) display(other, chlist, displayer) displayer.close() @@ -3932,10 +3917,9 @@ ) def debug_revlog_stats(ui, repo, **opts): """display statistics about revlogs in the store""" - opts = pycompat.byteskwargs(opts) - changelog = opts[b"changelog"] - manifest = opts[b"manifest"] - filelogs = opts[b"filelogs"] + changelog = opts["changelog"] + manifest = opts["manifest"] + filelogs = opts["filelogs"] if changelog is None and manifest is None and filelogs is None: changelog = True @@ -3943,7 +3927,7 @@ filelogs = True repo = repo.unfiltered() - fm = ui.formatter(b'debug-revlog-stats', opts) + fm = ui.formatter(b'debug-revlog-stats', pycompat.byteskwargs(opts)) revlog_debug.debug_revlog_stats(repo, fm, changelog, manifest, filelogs) fm.end() @@ -4182,8 +4166,7 @@ ) def debugwalk(ui, repo, *pats, **opts): """show how files match on given patterns""" - opts = pycompat.byteskwargs(opts) - m = scmutil.match(repo[None], pats, opts) + m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts)) if ui.verbose: ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n') items = list(repo[None].walk(m)) @@ -4236,16 +4219,15 @@ norepo=True, ) def debugwireargs(ui, repopath, *vals, **opts): - opts = pycompat.byteskwargs(opts) - repo = hg.peer(ui, opts, repopath) + repo = hg.peer(ui, pycompat.byteskwargs(opts), repopath) try: for opt in cmdutil.remoteopts: - del opts[opt[1]] + del opts[pycompat.sysstr(opt[1])] args = {} for k, v in opts.items(): if v: args[k] = v - args = pycompat.strkwargs(args) + # run twice to check that we don't mess up the stream for the next command res1 = repo.debugwireargs(*vals, **args) res2 = repo.debugwireargs(*vals, **args) @@ -4501,12 +4483,10 @@ resulting object is fed into a CBOR encoder. Otherwise it is interpreted as a Python byte string literal. """ - opts = pycompat.byteskwargs(opts) - - if opts[b'localssh'] and not repo: + if opts['localssh'] and not repo: raise error.Abort(_(b'--localssh requires a repository')) - if opts[b'peer'] and opts[b'peer'] not in ( + if opts['peer'] and opts['peer'] not in ( b'raw', b'ssh1', ): @@ -4515,7 +4495,7 @@ hint=_(b'valid values are "raw" and "ssh1"'), ) - if path and opts[b'localssh']: + if path and opts['localssh']: raise error.Abort(_(b'cannot specify --localssh with an explicit path')) if ui.interactive(): @@ -4529,7 +4509,7 @@ stderr = None opener = None - if opts[b'localssh']: + if opts['localssh']: # We start the SSH server in its own process so there is process # separation. This prevents a whole class of potential bugs around # shared state from interfering with server operation. @@ -4552,7 +4532,7 @@ stderr = proc.stderr # We turn the pipes into observers so we can log I/O. - if ui.verbose or opts[b'peer'] == b'raw': + if ui.verbose or opts['peer'] == b'raw': stdin = util.makeloggingfileobject( ui, proc.stdin, b'i', logdata=True ) @@ -4566,9 +4546,9 @@ # --localssh also implies the peer connection settings. url = b'ssh://localserver' - autoreadstderr = not opts[b'noreadstderr'] - - if opts[b'peer'] == b'ssh1': + autoreadstderr = not opts['noreadstderr'] + + if opts['peer'] == b'ssh1': ui.write(_(b'creating ssh peer for wire protocol version 1\n')) peer = sshpeer.sshv1peer( ui, @@ -4580,7 +4560,7 @@ None, autoreadstderr=autoreadstderr, ) - elif opts[b'peer'] == b'raw': + elif opts['peer'] == b'raw': ui.write(_(b'using raw connection to peer\n')) peer = None else: @@ -4627,17 +4607,17 @@ # Don't send default headers when in raw mode. This allows us to # bypass most of the behavior of our URL handling code so we can # have near complete control over what's sent on the wire. - if opts[b'peer'] == b'raw': + if opts['peer'] == b'raw': openerargs['sendaccept'] = False opener = urlmod.opener(ui, authinfo, **openerargs) - if opts[b'peer'] == b'raw': + if opts['peer'] == b'raw': ui.write(_(b'using raw connection to peer\n')) peer = None - elif opts[b'peer']: + elif opts['peer']: raise error.Abort( - _(b'--peer %s not supported with HTTP peers') % opts[b'peer'] + _(b'--peer %s not supported with HTTP peers') % opts['peer'] ) else: peer_path = urlutil.try_path(ui, path) diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/dirstate.py --- a/mercurial/dirstate.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/dirstate.py Wed Oct 11 02:02:46 2023 +0200 @@ -13,7 +13,6 @@ import uuid from .i18n import _ -from .pycompat import delattr from hgdemandimport import tracing diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/dirstatemap.py --- a/mercurial/dirstatemap.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/dirstatemap.py Wed Oct 11 02:02:46 2023 +0200 @@ -377,7 +377,7 @@ return # TODO: adjust this estimate for dirstate-v2 - if util.safehasattr(parsers, 'dict_new_presized'): + if hasattr(parsers, 'dict_new_presized'): # Make an estimate of the number of files in the dirstate based on # its size. This trades wasting some memory for avoiding costly # resizes. Each entry have a prefix of 17 bytes followed by one or diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/dispatch.py --- a/mercurial/dispatch.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/dispatch.py Wed Oct 11 02:02:46 2023 +0200 @@ -18,7 +18,6 @@ from .i18n import _ -from .pycompat import getattr from hgdemandimport import tracing @@ -107,7 +106,7 @@ def _flushstdio(ui, err): status = None # In all cases we try to flush stdio streams. - if util.safehasattr(ui, 'fout'): + if hasattr(ui, 'fout'): assert ui is not None # help pytype assert ui.fout is not None # help pytype try: @@ -116,7 +115,7 @@ err = e status = -1 - if util.safehasattr(ui, 'ferr'): + if hasattr(ui, 'ferr'): assert ui is not None # help pytype assert ui.ferr is not None # help pytype try: @@ -170,7 +169,7 @@ "newline": "\n", "line_buffering": sys.stdout.line_buffering, } - if util.safehasattr(sys.stdout, "write_through"): + if hasattr(sys.stdout, "write_through"): # pytype: disable=attribute-error kwargs["write_through"] = sys.stdout.write_through # pytype: enable=attribute-error @@ -183,7 +182,7 @@ "newline": "\n", "line_buffering": sys.stderr.line_buffering, } - if util.safehasattr(sys.stderr, "write_through"): + if hasattr(sys.stderr, "write_through"): # pytype: disable=attribute-error kwargs["write_through"] = sys.stderr.write_through # pytype: enable=attribute-error @@ -520,7 +519,7 @@ def aliasargs(fn, givenargs): args = [] # only care about alias 'args', ignore 'args' set by extensions.wrapfunction - if not util.safehasattr(fn, '_origfunc'): + if not hasattr(fn, '_origfunc'): args = getattr(fn, 'args', args) if args: cmd = b' '.join(map(procutil.shellquote, args)) @@ -708,7 +707,7 @@ } if name not in adefaults: raise AttributeError(name) - if self.badalias or util.safehasattr(self, 'shell'): + if self.badalias or hasattr(self, 'shell'): return adefaults[name] return getattr(self.fn, name) @@ -734,7 +733,7 @@ self.name, self.definition, ) - if util.safehasattr(self, 'shell'): + if hasattr(self, 'shell'): return self.fn(ui, *args, **opts) else: try: @@ -1024,7 +1023,7 @@ cmd = aliases[0] fn = entry[0] - if cmd and util.safehasattr(fn, 'shell'): + if cmd and hasattr(fn, 'shell'): # shell alias shouldn't receive early options which are consumed by hg _earlyopts, args = _earlysplitopts(args) d = lambda: fn(ui, *args[1:]) diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/encoding.py --- a/mercurial/encoding.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/encoding.py Wed Oct 11 02:02:46 2023 +0200 @@ -11,7 +11,6 @@ import re import unicodedata -from .pycompat import getattr from . import ( error, policy, diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/extensions.py --- a/mercurial/extensions.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/extensions.py Wed Oct 11 02:02:46 2023 +0200 @@ -19,9 +19,7 @@ gettext, ) from .pycompat import ( - getattr, open, - setattr, ) from . import ( @@ -84,9 +82,8 @@ def loadpath(path, module_name): - module_name = module_name.replace(b'.', b'_') + module_name = module_name.replace('.', '_') path = util.normpath(util.expandpath(path)) - module_name = pycompat.fsdecode(module_name) path = pycompat.fsdecode(path) if os.path.isdir(path): # module/__init__.py style @@ -106,30 +103,31 @@ def _importh(name): """import and return the module""" - mod = __import__(pycompat.sysstr(name)) - components = name.split(b'.') + mod = __import__(name) + components = name.split('.') for comp in components[1:]: mod = getattr(mod, comp) return mod def _importext(name, path=None, reportfunc=None): + name = pycompat.fsdecode(name) if path: # the module will be loaded in sys.modules # choose an unique name so that it doesn't # conflicts with other modules - mod = loadpath(path, b'hgext.%s' % name) + mod = loadpath(path, 'hgext.%s' % name) else: try: - mod = _importh(b"hgext.%s" % name) + mod = _importh("hgext.%s" % name) except ImportError as err: if reportfunc: - reportfunc(err, b"hgext.%s" % name, b"hgext3rd.%s" % name) + reportfunc(err, "hgext.%s" % name, "hgext3rd.%s" % name) try: - mod = _importh(b"hgext3rd.%s" % name) + mod = _importh("hgext3rd.%s" % name) except ImportError as err: if reportfunc: - reportfunc(err, b"hgext3rd.%s" % name, name) + reportfunc(err, "hgext3rd.%s" % name, name) mod = _importh(name) return mod @@ -140,9 +138,9 @@ ui.log( b'extension', b' - could not import %s (%s): trying %s\n', - failed, + stringutil.forcebytestr(failed), stringutil.forcebytestr(err), - next, + stringutil.forcebytestr(next), ) if ui.debugflag and ui.configbool(b'devel', b'debug.extensions'): ui.traceback() @@ -155,42 +153,43 @@ elif isinstance(xs, dict): for k, v in xs.items(): _rejectunicode(name, k) - _rejectunicode(b'%s.%s' % (name, stringutil.forcebytestr(k)), v) - elif isinstance(xs, type(u'')): + k = pycompat.sysstr(k) + _rejectunicode('%s.%s' % (name, k), v) + elif isinstance(xs, str): raise error.ProgrammingError( - b"unicode %r found in %s" % (xs, name), + b"unicode %r found in %s" % (xs, stringutil.forcebytestr(name)), hint=b"use b'' to make it byte string", ) # attributes set by registrar.command -_cmdfuncattrs = (b'norepo', b'optionalrepo', b'inferrepo') +_cmdfuncattrs = ('norepo', 'optionalrepo', 'inferrepo') def _validatecmdtable(ui, cmdtable): """Check if extension commands have required attributes""" for c, e in cmdtable.items(): f = e[0] - missing = [a for a in _cmdfuncattrs if not util.safehasattr(f, a)] + missing = [a for a in _cmdfuncattrs if not hasattr(f, a)] if not missing: continue - raise error.ProgrammingError( - b'missing attributes: %s' % b', '.join(missing), - hint=b"use @command decorator to register '%s'" % c, - ) + msg = b'missing attributes: %s' + msg %= b', '.join([stringutil.forcebytestr(m) for m in missing]) + hint = b"use @command decorator to register '%s'" % c + raise error.ProgrammingError(msg, hint=hint) def _validatetables(ui, mod): """Sanity check for loadable tables provided by extension module""" - for t in [b'cmdtable', b'colortable', b'configtable']: + for t in ['cmdtable', 'colortable', 'configtable']: _rejectunicode(t, getattr(mod, t, {})) for t in [ - b'filesetpredicate', - b'internalmerge', - b'revsetpredicate', - b'templatefilter', - b'templatefunc', - b'templatekeyword', + 'filesetpredicate', + 'internalmerge', + 'revsetpredicate', + 'templatefilter', + 'templatefunc', + 'templatekeyword', ]: o = getattr(mod, t, None) if o: @@ -349,7 +348,7 @@ # # This one is for the list of item that must be run before running any setup earlyextraloaders = [ - (b'configtable', configitems, b'loadconfigtable'), + ('configtable', configitems, 'loadconfigtable'), ] ui.log(b'extension', b'- loading configtable attributes\n') @@ -434,14 +433,14 @@ # which takes (ui, extensionname, extraobj) arguments ui.log(b'extension', b'- loading extension registration objects\n') extraloaders = [ - (b'cmdtable', commands, b'loadcmdtable'), - (b'colortable', color, b'loadcolortable'), - (b'filesetpredicate', fileset, b'loadpredicate'), - (b'internalmerge', filemerge, b'loadinternalmerge'), - (b'revsetpredicate', revset, b'loadpredicate'), - (b'templatefilter', templatefilters, b'loadfilter'), - (b'templatefunc', templatefuncs, b'loadfunction'), - (b'templatekeyword', templatekw, b'loadkeyword'), + ('cmdtable', commands, 'loadcmdtable'), + ('colortable', color, 'loadcolortable'), + ('filesetpredicate', fileset, 'loadpredicate'), + ('internalmerge', filemerge, 'loadinternalmerge'), + ('revsetpredicate', revset, 'loadpredicate'), + ('templatefilter', templatefilters, 'loadfilter'), + ('templatefunc', templatefuncs, 'loadfunction'), + ('templatekeyword', templatekw, 'loadkeyword'), ] with util.timedcm('load registration objects') as stats: _loadextra(ui, newindex, extraloaders) @@ -625,6 +624,10 @@ def __init__(self, container, funcname, wrapper): assert callable(wrapper) + if not isinstance(funcname, str): + msg = b"pass wrappedfunction target name as `str`, not `bytes`" + util.nouideprecwarn(msg, b"6.6", stacklevel=2) + funcname = pycompat.sysstr(funcname) self._container = container self._funcname = funcname self._wrapper = wrapper @@ -671,6 +674,11 @@ """ assert callable(wrapper) + if not isinstance(funcname, str): + msg = b"pass wrapfunction target name as `str`, not `bytes`" + util.nouideprecwarn(msg, b"6.6", stacklevel=2) + funcname = pycompat.sysstr(funcname) + origfn = getattr(container, funcname) assert callable(origfn) if inspect.ismodule(container): @@ -732,7 +740,7 @@ # The hgext might not have a __file__ attribute (e.g. in PyOxidizer) and # it might not be on a filesystem even if it does. - if util.safehasattr(hgext, '__file__'): + if hasattr(hgext, '__file__'): extpath = os.path.dirname( util.abspath(pycompat.fsencode(hgext.__file__)) ) @@ -847,7 +855,7 @@ # The extensions are filesystem based, so either an error occurred # or all are enabled. - if util.safehasattr(hgext, '__file__'): + if hasattr(hgext, '__file__'): return if name in _order: # enabled @@ -977,13 +985,13 @@ def moduleversion(module): '''return version information from given module as a string''' - if util.safehasattr(module, b'getversion') and callable(module.getversion): + if hasattr(module, 'getversion') and callable(module.getversion): try: version = module.getversion() except Exception: version = b'unknown' - elif util.safehasattr(module, b'__version__'): + elif hasattr(module, '__version__'): version = module.__version__ else: version = b'' diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/exthelper.py --- a/mercurial/exthelper.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/exthelper.py Wed Oct 11 02:02:46 2023 +0200 @@ -325,7 +325,7 @@ # Required, otherwise the function will not be wrapped uisetup = eh.finaluisetup - @eh.wrapfunction(discovery, b'checkheads') + @eh.wrapfunction(discovery, 'checkheads') def wrapcheckheads(orig, *args, **kwargs): ui.note(b'His head smashed in and his heart cut out') return orig(*args, **kwargs) diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/filemerge.py --- a/mercurial/filemerge.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/filemerge.py Wed Oct 11 02:02:46 2023 +0200 @@ -16,9 +16,6 @@ hex, short, ) -from .pycompat import ( - getattr, -) from . import ( encoding, @@ -834,12 +831,13 @@ # avoid cycle cmdutil->merge->filemerge->extensions->cmdutil from . import extensions - mod = extensions.loadpath(toolpath, b'hgmerge.%s' % tool) + mod_name = 'hgmerge.%s' % pycompat.sysstr(tool) + mod = extensions.loadpath(toolpath, mod_name) except Exception: raise error.Abort( _(b"loading python merge script failed: %s") % toolpath ) - mergefn = getattr(mod, scriptfn, None) + mergefn = getattr(mod, pycompat.sysstr(scriptfn), None) if mergefn is None: raise error.Abort( _(b"%s does not have function: %s") % (toolpath, scriptfn) diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/fileset.py --- a/mercurial/fileset.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/fileset.py Wed Oct 11 02:02:46 2023 +0200 @@ -9,7 +9,6 @@ import re from .i18n import _ -from .pycompat import getattr from . import ( error, filesetlang, diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/filesetlang.py --- a/mercurial/filesetlang.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/filesetlang.py Wed Oct 11 02:02:46 2023 +0200 @@ -7,7 +7,6 @@ from .i18n import _ -from .pycompat import getattr from . import ( error, parser, diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/help.py --- a/mercurial/help.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/help.py Wed Oct 11 02:02:46 2023 +0200 @@ -26,7 +26,6 @@ _, gettext, ) -from .pycompat import getattr from . import ( cmdutil, encoding, @@ -43,7 +42,6 @@ templatefuncs, templatekw, ui as uimod, - util, ) from .hgweb import webcommands from .utils import ( @@ -810,7 +808,7 @@ doc = gettext(pycompat.getdoc(entry[0])) if not doc: doc = _(b"(no help text available)") - if util.safehasattr(entry[0], 'definition'): # aliased command + if hasattr(entry[0], 'definition'): # aliased command source = entry[0].source if entry[0].definition.startswith(b'!'): # shell alias doc = _(b'shell alias for: %s\n\n%s\n\ndefined by: %s\n') % ( diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/hg.py --- a/mercurial/hg.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/hg.py Wed Oct 11 02:02:46 2023 +0200 @@ -19,7 +19,6 @@ sha1nodeconstants, short, ) -from .pycompat import getattr from . import ( bookmarks, @@ -66,7 +65,7 @@ def addbranchrevs(lrepo, other, branches, revs, remotehidden=False): - if util.safehasattr(other, 'peer'): + if hasattr(other, 'peer'): # a courtesy to callers using a localrepo for other peer = other.peer(remotehidden=remotehidden) else: @@ -174,7 +173,7 @@ cls.instance # make sure we load the module else: cls = LocalFactory - if util.safehasattr(cls, 'islocal'): + if hasattr(cls, 'islocal'): return cls.islocal(repo) # pytype: disable=module-attr return False repo.ui.deprecwarn(b"use obj.local() instead of islocal(obj)", b"6.4") @@ -254,7 +253,7 @@ '''return a repository peer for the specified path''' ui = getattr(uiorrepo, 'ui', uiorrepo) rui = remoteui(uiorrepo, opts) - if util.safehasattr(path, 'url'): + if hasattr(path, 'url'): # this is already a urlutil.path object peer_path = path else: @@ -317,7 +316,7 @@ if repo.sharedpath == repo.path: return None - if util.safehasattr(repo, 'srcrepo') and repo.srcrepo: + if hasattr(repo, 'srcrepo') and repo.srcrepo: return repo.srcrepo # the sharedpath always ends in the .hg; we want the path to the repo @@ -340,7 +339,7 @@ '''create a shared repository''' not_local_msg = _(b'can only share local repositories') - if util.safehasattr(source, 'local'): + if hasattr(source, 'local'): if source.local() is None: raise error.Abort(not_local_msg) elif not islocal(source): @@ -729,7 +728,7 @@ branches = (src_path.branch, branch or []) source = src_path.loc else: - if util.safehasattr(source, 'peer'): + if hasattr(source, 'peer'): srcpeer = source.peer() # in case we were called with a localrepo else: srcpeer = source @@ -1567,7 +1566,7 @@ def remoteui(src, opts): """build a remote ui from ui or repo and opts""" - if util.safehasattr(src, 'baseui'): # looks like a repository + if hasattr(src, 'baseui'): # looks like a repository dst = src.baseui.copy() # drop repo-specific config src = src.ui # copy target options from repo else: # assume it's a global ui object @@ -1599,10 +1598,10 @@ # Used to check if the repository has changed looking at mtime and size of # these files. foi = [ - (b'spath', b'00changelog.i'), - (b'spath', b'phaseroots'), # ! phase can change content at the same size - (b'spath', b'obsstore'), - (b'path', b'bookmarks'), # ! bookmark can change content at the same size + ('spath', b'00changelog.i'), + ('spath', b'phaseroots'), # ! phase can change content at the same size + ('spath', b'obsstore'), + ('path', b'bookmarks'), # ! bookmark can change content at the same size ] diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/hgweb/common.py --- a/mercurial/hgweb/common.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/hgweb/common.py Wed Oct 11 02:02:46 2023 +0200 @@ -15,7 +15,6 @@ from ..i18n import _ from ..pycompat import ( - getattr, open, ) from .. import ( diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/hgweb/hgweb_mod.py --- a/mercurial/hgweb/hgweb_mod.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/hgweb/hgweb_mod.py Wed Oct 11 02:02:46 2023 +0200 @@ -17,7 +17,6 @@ permhooks, statusmessage, ) -from ..pycompat import getattr from .. import ( encoding, @@ -34,7 +33,6 @@ templater, templateutil, ui as uimod, - util, wireprotoserver, ) @@ -403,7 +401,7 @@ cmd = cmd[style + 1 :] # avoid accepting e.g. style parameter as command - if util.safehasattr(webcommands, cmd): + if hasattr(webcommands, pycompat.sysstr(cmd)): req.qsparams[b'cmd'] = cmd if cmd == b'static': @@ -474,11 +472,11 @@ # override easily enough. res.status = b'200 Script output follows' res.headers[b'Content-Type'] = ctype - return getattr(webcommands, cmd)(rctx) + return getattr(webcommands, pycompat.sysstr(cmd))(rctx) except (error.LookupError, error.RepoLookupError) as err: msg = pycompat.bytestr(err) - if util.safehasattr(err, 'name') and not isinstance( + if hasattr(err, 'name') and not isinstance( err, error.ManifestLookupError ): msg = b'revision not found: %s' % err.name diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/hgweb/server.py --- a/mercurial/hgweb/server.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/hgweb/server.py Wed Oct 11 02:02:46 2023 +0200 @@ -16,7 +16,6 @@ from ..i18n import _ from ..pycompat import ( - getattr, open, ) @@ -100,7 +99,7 @@ def log_request(self, code='-', size='-'): xheaders = [] - if util.safehasattr(self, 'headers'): + if hasattr(self, 'headers'): xheaders = [ h for h in self.headers.items() if h[0].startswith('x-') ] @@ -214,7 +213,7 @@ env['wsgi.multithread'] = isinstance( self.server, socketserver.ThreadingMixIn ) - if util.safehasattr(socketserver, 'ForkingMixIn'): + if hasattr(socketserver, 'ForkingMixIn'): env['wsgi.multiprocess'] = isinstance( self.server, socketserver.ForkingMixIn ) @@ -344,7 +343,7 @@ threading.active_count() # silence pyflakes and bypass demandimport _mixin = socketserver.ThreadingMixIn except ImportError: - if util.safehasattr(os, "fork"): + if hasattr(os, "fork"): _mixin = socketserver.ForkingMixIn else: diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/hgweb/webcommands.py --- a/mercurial/hgweb/webcommands.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/hgweb/webcommands.py Wed Oct 11 02:02:46 2023 +0200 @@ -13,7 +13,6 @@ from ..i18n import _ from ..node import hex, short -from ..pycompat import getattr from .common import ( ErrorResponse, @@ -1050,7 +1049,9 @@ } diffopts = webutil.difffeatureopts(web.req, web.repo.ui, b'annotate') - diffopts = {k: getattr(diffopts, k) for k in diffopts.defaults} + diffopts = { + k: getattr(diffopts, pycompat.sysstr(k)) for k in diffopts.defaults + } return web.sendtemplate( b'fileannotate', diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/hgweb/webutil.py --- a/mercurial/hgweb/webutil.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/hgweb/webutil.py Wed Oct 11 02:02:46 2023 +0200 @@ -14,7 +14,6 @@ from ..i18n import _ from ..node import hex, short -from ..pycompat import setattr from .common import ( ErrorResponse, @@ -211,7 +210,7 @@ b'description': s.description(), b'branch': s.branch(), } - if util.safehasattr(s, 'path'): + if hasattr(s, 'path'): d[b'file'] = s.path() yield d @@ -230,16 +229,16 @@ ui, untrusted=True, section=section, whitespace=True ) - for k in ( - b'ignorews', - b'ignorewsamount', - b'ignorewseol', - b'ignoreblanklines', + for kb, ks in ( + (b'ignorews', 'ignorews'), + (b'ignorewsamount', 'ignorewsamount'), + (b'ignorewseol', 'ignorewseol'), + (b'ignoreblanklines', 'ignoreblanklines'), ): - v = req.qsparams.get(k) + v = req.qsparams.get(kb) if v is not None: v = stringutil.parsebool(v) - setattr(diffopts, k, v if v is not None else True) + setattr(diffopts, ks, v if v is not None else True) return diffopts diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/hgweb/wsgicgi.py --- a/mercurial/hgweb/wsgicgi.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/hgweb/wsgicgi.py Wed Oct 11 02:02:46 2023 +0200 @@ -9,7 +9,6 @@ # http://www.python.org/dev/peps/pep-0333/#the-server-gateway-side -from ..pycompat import getattr from .. import encoding, pycompat from ..utils import procutil diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/hook.py --- a/mercurial/hook.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/hook.py Wed Oct 11 02:02:46 2023 +0200 @@ -12,7 +12,6 @@ import sys from .i18n import _ -from .pycompat import getattr from . import ( demandimport, encoding, @@ -40,13 +39,14 @@ if callable(funcname): obj = funcname - funcname = pycompat.sysbytes(obj.__module__ + "." + obj.__name__) + funcname = obj.__module__ + "." + obj.__name__ else: - d = funcname.rfind(b'.') + funcname = pycompat.sysstr(funcname) + d = funcname.rfind('.') if d == -1: raise error.HookLoadError( _(b'%s hook is invalid: "%s" not in a module') - % (hname, funcname) + % (hname, stringutil.forcebytestr(funcname)) ) modname = funcname[:d] oldpaths = sys.path @@ -89,27 +89,30 @@ ) else: tracebackhint = None - raise error.HookLoadError( - _(b'%s hook is invalid: import of "%s" failed') - % (hname, modname), - hint=tracebackhint, + msg = _(b'%s hook is invalid: import of "%s" failed') + msg %= ( + stringutil.forcebytestr(hname), + stringutil.forcebytestr(modname), ) + raise error.HookLoadError(msg, hint=tracebackhint) sys.path = oldpaths try: - for p in funcname.split(b'.')[1:]: + for p in funcname.split('.')[1:]: obj = getattr(obj, p) except AttributeError: raise error.HookLoadError( _(b'%s hook is invalid: "%s" is not defined') - % (hname, funcname) + % (hname, stringutil.forcebytestr(funcname)) ) if not callable(obj): raise error.HookLoadError( _(b'%s hook is invalid: "%s" is not callable') - % (hname, funcname) + % (hname, stringutil.forcebytestr(funcname)) ) - ui.note(_(b"calling hook %s: %s\n") % (hname, funcname)) + ui.note( + _(b"calling hook %s: %s\n") % (hname, stringutil.forcebytestr(funcname)) + ) starttime = util.timer() try: @@ -134,7 +137,7 @@ b'pythonhook', b'pythonhook-%s: %s finished in %0.2f seconds\n', htype, - funcname, + stringutil.forcebytestr(funcname), duration, ) if r: @@ -347,11 +350,12 @@ if repo: path = os.path.join(repo.root, path) try: - mod = extensions.loadpath(path, b'hghook.%s' % hname) + mod_name = 'hghook.%s' % pycompat.sysstr(hname) + mod = extensions.loadpath(path, mod_name) except Exception: ui.write(_(b"loading %s hook failed:\n") % hname) raise - hookfn = getattr(mod, cmd) + hookfn = getattr(mod, pycompat.sysstr(cmd)) else: hookfn = cmd[7:].strip() r, raised = pythonhook( diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/httppeer.py --- a/mercurial/httppeer.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/httppeer.py Wed Oct 11 02:02:46 2023 +0200 @@ -15,7 +15,6 @@ from concurrent import futures from .i18n import _ -from .pycompat import getattr from . import ( bundle2, error, @@ -65,7 +64,7 @@ class _multifile: def __init__(self, *fileobjs): for f in fileobjs: - if not util.safehasattr(f, 'length'): + if not hasattr(f, 'length'): raise ValueError( b'_multifile only supports file objects that ' b'have a length but this one does not:', @@ -180,7 +179,7 @@ qs = b'?%s' % urlreq.urlencode(q) cu = b"%s%s" % (repobaseurl, qs) size = 0 - if util.safehasattr(data, 'length'): + if hasattr(data, 'length'): size = data.length elif data is not None: size = len(data) diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/i18n.py --- a/mercurial/i18n.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/i18n.py Wed Oct 11 02:02:46 2023 +0200 @@ -11,7 +11,6 @@ import os import sys -from .pycompat import getattr from .utils import resourceutil from . import ( encoding, diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/keepalive.py --- a/mercurial/keepalive.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/keepalive.py Wed Oct 11 02:02:46 2023 +0200 @@ -90,7 +90,6 @@ import threading from .i18n import _ -from .pycompat import getattr from .node import hex from . import ( pycompat, diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/localrepo.py --- a/mercurial/localrepo.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/localrepo.py Wed Oct 11 02:02:46 2023 +0200 @@ -28,10 +28,6 @@ sha1nodeconstants, short, ) -from .pycompat import ( - delattr, - getattr, -) from . import ( bookmarks, branchmap, @@ -58,6 +54,7 @@ obsolete, pathutil, phases, + policy, pushkey, pycompat, rcutil, @@ -419,7 +416,7 @@ try: bundle = exchange.readbundle(self.ui, bundle, None) ret = exchange.unbundle(self._repo, bundle, heads, b'push', url) - if util.safehasattr(ret, 'getchunks'): + if hasattr(ret, 'getchunks'): # This is a bundle20 object, turn it into an unbundler. # This little dance should be dropped eventually when the # API is finally improved. @@ -1460,7 +1457,7 @@ if self.ui.configbool(b'devel', b'all-warnings') or self.ui.configbool( b'devel', b'check-locks' ): - if util.safehasattr(self.svfs, 'vfs'): # this is filtervfs + if hasattr(self.svfs, 'vfs'): # this is filtervfs self.svfs.vfs.audit = self._getsvfsward(self.svfs.vfs.audit) else: # standard vfs self.svfs.audit = self._getsvfsward(self.svfs.audit) @@ -1522,8 +1519,8 @@ repo = rref() if ( repo is None - or not util.safehasattr(repo, '_wlockref') - or not util.safehasattr(repo, '_lockref') + or not hasattr(repo, '_wlockref') + or not hasattr(repo, '_lockref') ): return if mode in (None, b'r', b'rb'): @@ -1571,7 +1568,7 @@ def checksvfs(path, mode=None): ret = origfunc(path, mode=mode) repo = rref() - if repo is None or not util.safehasattr(repo, '_lockref'): + if repo is None or not hasattr(repo, '_lockref'): return if mode in (None, b'r', b'rb'): return @@ -3027,7 +3024,11 @@ if clearfilecache: del self._filecache[k] try: - delattr(unfiltered, k) + # XXX ideally, the key would be a unicode string to match the + # fact it refers to an attribut name. However changing this was + # a bit a scope creep compared to the series cleaning up + # del/set/getattr so we kept thing simple here. + delattr(unfiltered, pycompat.sysstr(k)) except AttributeError: pass self.invalidatecaches() @@ -3763,7 +3764,11 @@ if ui.configbool(b'format', b'bookmarks-in-store'): requirements.add(requirementsmod.BOOKMARKS_IN_STORE_REQUIREMENT) - if ui.configbool(b'format', b'use-persistent-nodemap'): + # The feature is disabled unless a fast implementation is available. + persistent_nodemap_default = policy.importrust('revlog') is not None + if ui.configbool( + b'format', b'use-persistent-nodemap', persistent_nodemap_default + ): requirements.add(requirementsmod.NODEMAP_REQUIREMENT) # if share-safe is enabled, let's create the new repository with the new diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/lock.py --- a/mercurial/lock.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/lock.py Wed Oct 11 02:02:46 2023 +0200 @@ -15,7 +15,6 @@ import warnings from .i18n import _ -from .pycompat import getattr from . import ( encoding, diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/lsprof.py --- a/mercurial/lsprof.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/lsprof.py Wed Oct 11 02:02:46 2023 +0200 @@ -1,8 +1,6 @@ import _lsprof import sys -from .pycompat import getattr - Profiler = _lsprof.Profiler # PyPy doesn't expose profiler_entry from the module. diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/mail.py --- a/mercurial/mail.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/mail.py Wed Oct 11 02:02:46 2023 +0200 @@ -20,7 +20,6 @@ from .i18n import _ from .pycompat import ( - getattr, open, ) from . import ( diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/manifest.py --- a/mercurial/manifest.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/manifest.py Wed Oct 11 02:02:46 2023 +0200 @@ -17,7 +17,6 @@ hex, nullrev, ) -from .pycompat import getattr from . import ( encoding, error, @@ -1628,7 +1627,7 @@ def _setupmanifestcachehooks(self, repo): """Persist the manifestfulltextcache on lock release""" - if not util.safehasattr(repo, '_wlockref'): + if not hasattr(repo, '_wlockref'): return self._fulltextcache._opener = repo.wcachevfs diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/mdiff.py --- a/mercurial/mdiff.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/mdiff.py Wed Oct 11 02:02:46 2023 +0200 @@ -11,10 +11,6 @@ import zlib from .i18n import _ -from .pycompat import ( - getattr, - setattr, -) from . import ( diffhelper, encoding, @@ -78,7 +74,7 @@ v = opts.get(k) if v is None: v = self.defaults[k] - setattr(self, k, v) + setattr(self, pycompat.sysstr(k), v) try: self.context = int(self.context) @@ -89,14 +85,15 @@ ) def copy(self, **kwargs): - opts = {k: getattr(self, k) for k in self.defaults} + opts = {k: getattr(self, pycompat.sysstr(k)) for k in self.defaults} opts = pycompat.strkwargs(opts) opts.update(kwargs) return diffopts(**opts) def __bytes__(self): return b", ".join( - b"%s: %r" % (k, getattr(self, k)) for k in self.defaults + b"%s: %r" % (k, getattr(self, pycompat.sysstr(k))) + for k in self.defaults ) __str__ = encoding.strmethod(__bytes__) @@ -210,11 +207,7 @@ def chooseblocksfunc(opts=None): - if ( - opts is None - or not opts.xdiff - or not util.safehasattr(bdiff, 'xdiffblocks') - ): + if opts is None or not opts.xdiff or not hasattr(bdiff, 'xdiffblocks'): return bdiff.blocks else: return bdiff.xdiffblocks diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/narrowspec.py --- a/mercurial/narrowspec.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/narrowspec.py Wed Oct 11 02:02:46 2023 +0200 @@ -8,7 +8,6 @@ import weakref from .i18n import _ -from .pycompat import getattr from . import ( error, match as matchmod, diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/obsolete.py --- a/mercurial/obsolete.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/obsolete.py Wed Oct 11 02:02:46 2023 +0200 @@ -73,7 +73,6 @@ import weakref from .i18n import _ -from .pycompat import getattr from .node import ( bin, hex, diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/patch.py --- a/mercurial/patch.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/patch.py Wed Oct 11 02:02:46 2023 +0200 @@ -168,7 +168,7 @@ mimeheaders = [b'content-type'] - if not util.safehasattr(stream, 'next'): + if not hasattr(stream, 'next'): # http responses, for example, have readline but not next stream = fiter(stream) @@ -1703,7 +1703,7 @@ newhunks = [] for c in hunks: - if util.safehasattr(c, 'reversehunk'): + if hasattr(c, 'reversehunk'): c = c.reversehunk() newhunks.append(c) return newhunks diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/pathutil.py --- a/mercurial/pathutil.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/pathutil.py Wed Oct 11 02:02:46 2023 +0200 @@ -377,7 +377,7 @@ return d in self._dirs -if util.safehasattr(parsers, 'dirs'): +if hasattr(parsers, 'dirs'): dirs = parsers.dirs if rustdirs is not None: diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/phases.py --- a/mercurial/phases.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/phases.py Wed Oct 11 02:02:46 2023 +0200 @@ -111,10 +111,6 @@ short, wdirrev, ) -from .pycompat import ( - getattr, - setattr, -) from . import ( error, pycompat, diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/policy.py --- a/mercurial/policy.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/policy.py Wed Oct 11 02:02:46 2023 +0200 @@ -9,8 +9,6 @@ import os import sys -from .pycompat import getattr - # Rules for how modules can be loaded. Values are: # # c - require C extensions diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/posix.py --- a/mercurial/posix.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/posix.py Wed Oct 11 02:02:46 2023 +0200 @@ -36,7 +36,6 @@ from .i18n import _ from .pycompat import ( - getattr, open, ) from . import ( diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/profiling.py --- a/mercurial/profiling.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/profiling.py Wed Oct 11 02:02:46 2023 +0200 @@ -10,7 +10,6 @@ from .i18n import _ from .pycompat import ( - getattr, open, ) from . import ( diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/pure/osutil.py --- a/mercurial/pure/osutil.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/pure/osutil.py Wed Oct 11 02:02:46 2023 +0200 @@ -11,7 +11,6 @@ import os import stat as statmod -from ..pycompat import getattr from .. import ( encoding, pycompat, diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/pvec.py --- a/mercurial/pvec.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/pvec.py Wed Oct 11 02:02:46 2023 +0200 @@ -159,7 +159,7 @@ def ctxpvec(ctx): '''construct a pvec for ctx while filling in the cache''' r = ctx.repo() - if not util.safehasattr(r, "_pveccache"): + if not hasattr(r, "_pveccache"): r._pveccache = {} pvc = r._pveccache if ctx.rev() not in pvc: diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/pycompat.py --- a/mercurial/pycompat.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/pycompat.py Wed Oct 11 02:02:46 2023 +0200 @@ -355,6 +355,13 @@ def _wrapattrfunc(f): @functools.wraps(f) def w(object, name, *args): + if isinstance(name, bytes): + from . import util + + msg = b'function "%s" take `str` as argument, not `bytes`' + fname = f.__name__.encode('ascii') + msg %= fname + util.nouideprecwarn(msg, b"6.6", stacklevel=2) return f(object, sysstr(name), *args) return w diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/registrar.py --- a/mercurial/registrar.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/registrar.py Wed Oct 11 02:02:46 2023 +0200 @@ -6,11 +6,11 @@ # GNU General Public License version 2 or any later version. +from typing import Any, List, Optional, Tuple from . import ( configitems, error, pycompat, - util, ) # unlike the other registered items, config options are neither functions or @@ -64,7 +64,7 @@ msg = b'duplicate registration for name: "%s"' % name raise error.ProgrammingError(msg) - if func.__doc__ and not util.safehasattr(func, '_origdoc'): + if func.__doc__ and not hasattr(func, '_origdoc'): func._origdoc = func.__doc__.strip() doc = pycompat.sysbytes(func._origdoc) func.__doc__ = pycompat.sysstr(self._formatdoc(decl, doc)) @@ -534,3 +534,30 @@ # actual capabilities, which this internal merge tool has func.capabilities = {b"binary": binarycap, b"symlink": symlinkcap} + + +class verify_check(_funcregistrarbase): + """Decorator to register a check for admin::verify + + options is a list of (name, default value, help) to be passed to the check + """ + + def __init__(self, table=None, alias_table=None): + super().__init__(table) + if alias_table is None: + self._alias_table = {} + else: + self._alias_table = alias_table + + def _extrasetup( + self, + name, + func, + alias: Optional[bytes] = None, + options: Optional[List[Tuple[bytes, Any, bytes]]] = None, + ): + func.alias = alias + func.options = options + + if alias: + self._alias_table[alias] = name diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/repoview.py --- a/mercurial/repoview.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/repoview.py Wed Oct 11 02:02:46 2023 +0200 @@ -15,11 +15,6 @@ hex, nullrev, ) -from .pycompat import ( - delattr, - getattr, - setattr, -) from . import ( error, obsolete, @@ -296,13 +291,12 @@ This returns a version of 'revs' to be used thereafter by the caller. In particular, if revs is an iterator, it is converted into a set. """ - safehasattr = util.safehasattr - if safehasattr(revs, '__next__'): + if hasattr(revs, '__next__'): # Note that inspect.isgenerator() is not true for iterators, revs = set(revs) filteredrevs = self.filteredrevs - if safehasattr(revs, 'first'): # smartset + if hasattr(revs, 'first'): # smartset offenders = revs & filteredrevs else: offenders = filteredrevs.intersection(revs) diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/revlog.py --- a/mercurial/revlog.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/revlog.py Wed Oct 11 02:02:46 2023 +0200 @@ -32,7 +32,6 @@ wdirrev, ) from .i18n import _ -from .pycompat import getattr from .revlogutils.constants import ( ALL_KINDS, CHANGELOGV2, @@ -167,7 +166,7 @@ # We also consider we have a "fast" implementation in "pure" python because # people using pure don't really have performance consideration (and a # wheelbarrow of other slowness source) -HAS_FAST_PERSISTENT_NODEMAP = rustrevlog is not None or util.safehasattr( +HAS_FAST_PERSISTENT_NODEMAP = rustrevlog is not None or hasattr( parsers, 'BaseIndexObject' ) @@ -214,7 +213,7 @@ return index, cache -if util.safehasattr(parsers, 'parse_index_devel_nodemap'): +if hasattr(parsers, 'parse_index_devel_nodemap'): def parse_index_v1_nodemap(data, inline): index, cache = parsers.parse_index_devel_nodemap(data, inline) @@ -730,7 +729,7 @@ use_nodemap = ( not self._inline and self._nodemap_file is not None - and util.safehasattr(index, 'update_nodemap_data') + and hasattr(index, 'update_nodemap_data') ) if use_nodemap: nodemap_data = nodemaputil.persisted_data(self) @@ -911,7 +910,7 @@ use_nodemap = ( not self._inline and self._nodemap_file is not None - and util.safehasattr(self.index, 'update_nodemap_data') + and hasattr(self.index, 'update_nodemap_data') ) if use_nodemap: nodemap_data = nodemaputil.persisted_data(self) @@ -1887,7 +1886,7 @@ """tells whether rev is a snapshot""" if not self._sparserevlog: return self.deltaparent(rev) == nullrev - elif util.safehasattr(self.index, 'issnapshot'): + elif hasattr(self.index, 'issnapshot'): # directly assign the method to cache the testing and access self.issnapshot = self.index.issnapshot return self.issnapshot(rev) diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/revlogutils/debug.py --- a/mercurial/revlogutils/debug.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/revlogutils/debug.py Wed Oct 11 02:02:46 2023 +0200 @@ -13,7 +13,6 @@ mdiff, node as nodemod, revlogutils, - util, ) from . import ( @@ -409,7 +408,7 @@ numother_nad += 1 # Obtain data on the raw chunks in the revlog. - if util.safehasattr(r, '_getsegmentforrevs'): + if hasattr(r, '_getsegmentforrevs'): segment = r._getsegmentforrevs(rev, rev)[1] else: segment = r._revlog._getsegmentforrevs(rev, rev)[1] diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/revlogutils/deltas.py --- a/mercurial/revlogutils/deltas.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/revlogutils/deltas.py Wed Oct 11 02:02:46 2023 +0200 @@ -14,7 +14,6 @@ # import stuff from node for others to import from revlog from ..node import nullrev from ..i18n import _ -from ..pycompat import getattr from .constants import ( COMP_MODE_DEFAULT, @@ -1060,7 +1059,7 @@ end_rev < self._start_rev or end_rev > self._end_rev ), (self._start_rev, self._end_rev, start_rev, end_rev) cache = self.snapshots - if util.safehasattr(revlog.index, 'findsnapshots'): + if hasattr(revlog.index, 'findsnapshots'): revlog.index.findsnapshots(cache, start_rev, end_rev) else: deltaparent = revlog.deltaparent diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/revlogutils/nodemap.py --- a/mercurial/revlogutils/nodemap.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/revlogutils/nodemap.py Wed Oct 11 02:02:46 2023 +0200 @@ -174,9 +174,9 @@ msg = "calling persist nodemap on a revlog without the feature enabled" raise error.ProgrammingError(msg) - can_incremental = util.safehasattr(revlog.index, "nodemap_data_incremental") + can_incremental = hasattr(revlog.index, "nodemap_data_incremental") ondisk_docket = revlog._nodemap_docket - feed_data = util.safehasattr(revlog.index, "update_nodemap_data") + feed_data = hasattr(revlog.index, "update_nodemap_data") use_mmap = revlog.opener.options.get(b"persistent-nodemap.mmap") data = None @@ -216,7 +216,7 @@ # otherwise fallback to a full new export target_docket = NodeMapDocket() datafile = _rawdata_filepath(revlog, target_docket) - if util.safehasattr(revlog.index, "nodemap_data_all"): + if hasattr(revlog.index, "nodemap_data_all"): data = revlog.index.nodemap_data_all() else: data = persistent_data(revlog.index) diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/revset.py --- a/mercurial/revset.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/revset.py Wed Oct 11 02:02:46 2023 +0200 @@ -12,7 +12,6 @@ import re from .i18n import _ -from .pycompat import getattr from .node import ( bin, nullrev, diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/revsetlang.py --- a/mercurial/revsetlang.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/revsetlang.py Wed Oct 11 02:02:46 2023 +0200 @@ -9,7 +9,6 @@ import string from .i18n import _ -from .pycompat import getattr from .node import hex from . import ( error, diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/rewriteutil.py --- a/mercurial/rewriteutil.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/rewriteutil.py Wed Oct 11 02:02:46 2023 +0200 @@ -21,7 +21,6 @@ obsutil, revset, scmutil, - util, ) @@ -77,7 +76,7 @@ hint = _(b"no changeset checked out") raise error.InputError(msg, hint=hint) - if any(util.safehasattr(r, 'rev') for r in revs): + if any(hasattr(r, 'rev') for r in revs): repo.ui.develwarn(b"rewriteutil.precheck called with ctx not revs") revs = (r.rev() for r in revs) diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/scmposix.py --- a/mercurial/scmposix.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/scmposix.py Wed Oct 11 02:02:46 2023 +0200 @@ -9,7 +9,6 @@ Tuple, ) -from .pycompat import getattr from . import ( encoding, pycompat, diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/scmutil.py --- a/mercurial/scmutil.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/scmutil.py Wed Oct 11 02:02:46 2023 +0200 @@ -23,7 +23,6 @@ short, wdirrev, ) -from .pycompat import getattr from .thirdparty import attr from . import ( copies as copiesmod, @@ -233,11 +232,7 @@ reason = encoding.unitolocal(reason) ui.error(_(b"abort: error: %s\n") % stringutil.forcebytestr(reason)) except (IOError, OSError) as inst: - if ( - util.safehasattr(inst, "args") - and inst.args - and inst.args[0] == errno.EPIPE - ): + if hasattr(inst, "args") and inst.args and inst.args[0] == errno.EPIPE: pass elif getattr(inst, "strerror", None): # common IOError or OSError if getattr(inst, "filename", None) is not None: @@ -561,11 +556,11 @@ if cache is not None: nodetree = cache.get(b'disambiguationnodetree') if not nodetree: - if util.safehasattr(parsers, 'nodetree'): + if hasattr(parsers, 'nodetree'): # The CExt is the only implementation to provide a nodetree # class so far. index = cl.index - if util.safehasattr(index, 'get_cindex'): + if hasattr(index, 'get_cindex'): # the rust wrapped need to give access to its internal index index = index.get_cindex() nodetree = parsers.nodetree(index, len(revs)) @@ -1066,7 +1061,7 @@ return # translate mapping's other forms - if not util.safehasattr(replacements, 'items'): + if not hasattr(replacements, 'items'): replacements = {(n,): () for n in replacements} else: # upgrading non tuple "source" to tuple ones for BC @@ -1692,6 +1687,10 @@ def __call__(self, func): self.func = func self.sname = func.__name__ + # XXX We should be using a unicode string instead of bytes for the main + # name (and the _filecache key). The fact we use bytes is a remains + # from Python2, since the name is derived from an attribute name a + # `str` is a better fit now that we support Python3 only self.name = pycompat.sysbytes(self.sname) return self diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/shelve.py --- a/mercurial/shelve.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/shelve.py Wed Oct 11 02:02:46 2023 +0200 @@ -516,7 +516,7 @@ def getcommitfunc(extra, interactive, editor=False): def commitfunc(ui, repo, message, match, opts): - hasmq = util.safehasattr(repo, 'mq') + hasmq = hasattr(repo, 'mq') if hasmq: saved, repo.mq.checkapplied = repo.mq.checkapplied, False diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/smartset.py --- a/mercurial/smartset.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/smartset.py Wed Oct 11 02:02:46 2023 +0200 @@ -6,7 +6,6 @@ # GNU General Public License version 2 or any later version. -from .pycompat import getattr from . import ( encoding, error, @@ -137,7 +136,7 @@ This is part of the mandatory API for smartset.""" # builtin cannot be cached. but do not needs to - if cache and util.safehasattr(condition, '__code__'): + if cache and hasattr(condition, '__code__'): condition = util.cachefunc(condition) return filteredset(self, condition, condrepr) @@ -359,10 +358,10 @@ return s def __and__(self, other): - return self._fastsetop(other, b'__and__') + return self._fastsetop(other, '__and__') def __sub__(self, other): - return self._fastsetop(other, b'__sub__') + return self._fastsetop(other, '__sub__') def _slice(self, start, stop): # creating new list should be generally cheaper than iterating items @@ -1127,7 +1126,7 @@ This boldly assumes the other contains valid revs only. """ # other not a smartset, make is so - if not util.safehasattr(other, 'isascending'): + if not hasattr(other, 'isascending'): # filter out hidden revision # (this boldly assumes all smartset are pure) # diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/sshpeer.py --- a/mercurial/sshpeer.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/sshpeer.py Wed Oct 11 02:02:46 2023 +0200 @@ -10,7 +10,6 @@ import uuid from .i18n import _ -from .pycompat import getattr from . import ( error, pycompat, @@ -130,7 +129,7 @@ if sideready: _forwardoutput(self._ui, self._side) if mainready: - meth = getattr(self._main, methname) + meth = getattr(self._main, pycompat.sysstr(methname)) if data is None: return meth() else: diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/sslutil.py --- a/mercurial/sslutil.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/sslutil.py Wed Oct 11 02:02:46 2023 +0200 @@ -15,7 +15,6 @@ import warnings from .i18n import _ -from .pycompat import getattr from .node import hex from . import ( encoding, @@ -50,11 +49,11 @@ # were defined only if compiled against a OpenSSL version with TLS 1.1 / 1.2 # support. At the mentioned commit, they were unconditionally defined. supportedprotocols = set() -if getattr(ssl, 'HAS_TLSv1', util.safehasattr(ssl, 'PROTOCOL_TLSv1')): +if getattr(ssl, 'HAS_TLSv1', hasattr(ssl, 'PROTOCOL_TLSv1')): supportedprotocols.add(b'tls1.0') -if getattr(ssl, 'HAS_TLSv1_1', util.safehasattr(ssl, 'PROTOCOL_TLSv1_1')): +if getattr(ssl, 'HAS_TLSv1_1', hasattr(ssl, 'PROTOCOL_TLSv1_1')): supportedprotocols.add(b'tls1.1') -if getattr(ssl, 'HAS_TLSv1_2', util.safehasattr(ssl, 'PROTOCOL_TLSv1_2')): +if getattr(ssl, 'HAS_TLSv1_2', hasattr(ssl, 'PROTOCOL_TLSv1_2')): supportedprotocols.add(b'tls1.2') @@ -312,7 +311,7 @@ # is loaded and contains that removed CA, you've just undone the user's # choice. - if util.safehasattr(ssl, 'TLSVersion'): + if hasattr(ssl, 'TLSVersion'): # python 3.7+ sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) minimumprotocol = settings[b'minimumprotocol'] @@ -419,7 +418,7 @@ pass # Try to print more helpful error messages for known failures. - if util.safehasattr(e, 'reason'): + if hasattr(e, 'reason'): # This error occurs when the client and server don't share a # common/supported SSL/TLS protocol. We've disabled SSLv2 and SSLv3 # outright. Hopefully the reason for this error is that we require @@ -546,7 +545,7 @@ _(b'referenced certificate file (%s) does not exist') % f ) - if util.safehasattr(ssl, 'TLSVersion'): + if hasattr(ssl, 'TLSVersion'): # python 3.7+ sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) sslcontext.options |= getattr(ssl, 'OP_NO_COMPRESSION', 0) @@ -628,7 +627,7 @@ # Otherwise, use the list of more secure ciphers if found in the ssl module. if exactprotocol: sslcontext.set_ciphers('DEFAULT:@SECLEVEL=0') - elif util.safehasattr(ssl, '_RESTRICTED_SERVER_CIPHERS'): + elif hasattr(ssl, '_RESTRICTED_SERVER_CIPHERS'): sslcontext.options |= getattr(ssl, 'OP_CIPHER_SERVER_PREFERENCE', 0) # pytype: disable=module-attr sslcontext.set_ciphers(ssl._RESTRICTED_SERVER_CIPHERS) diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/statprof.py --- a/mercurial/statprof.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/statprof.py Wed Oct 11 02:02:46 2023 +0200 @@ -167,7 +167,7 @@ # a float if frequency: self.sample_interval = 1.0 / frequency - elif not pycompat.hasattr(self, 'sample_interval'): + elif not hasattr(self, 'sample_interval'): # default to 1000 Hz self.sample_interval = 1.0 / 1000.0 else: diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/store.py --- a/mercurial/store.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/store.py Wed Oct 11 02:02:46 2023 +0200 @@ -13,7 +13,6 @@ from typing import Generator, List from .i18n import _ -from .pycompat import getattr from .thirdparty import attr from .node import hex from .revlogutils.constants import ( diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/streamclone.py --- a/mercurial/streamclone.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/streamclone.py Wed Oct 11 02:02:46 2023 +0200 @@ -428,7 +428,7 @@ with repo.svfs.backgroundclosing(repo.ui, expectedcount=filecount): for i in range(filecount): # XXX doesn't support '\n' or '\r' in filenames - if util.safehasattr(fp, 'readline'): + if hasattr(fp, 'readline'): l = fp.readline() else: # inline clonebundles use a chunkbuffer, so no readline diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/strip.py --- a/mercurial/strip.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/strip.py Wed Oct 11 02:02:46 2023 +0200 @@ -1,5 +1,4 @@ from .i18n import _ -from .pycompat import getattr from . import ( bookmarks as bookmarksmod, cmdutil, @@ -12,7 +11,6 @@ registrar, repair, scmutil, - util, ) release = lockmod.release @@ -36,7 +34,7 @@ currentbranch = repo[None].branch() if ( - util.safehasattr(repo, 'mq') + hasattr(repo, 'mq') and p2 != repo.nullid and p2 in [x.node for x in repo.mq.applied] ): diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/subrepo.py --- a/mercurial/subrepo.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/subrepo.py Wed Oct 11 02:02:46 2023 +0200 @@ -1136,7 +1136,7 @@ # --non-interactive. if commands[0] in (b'update', b'checkout', b'commit'): cmd.append(b'--non-interactive') - if util.safehasattr(subprocess, 'CREATE_NO_WINDOW'): + if hasattr(subprocess, 'CREATE_NO_WINDOW'): # On Windows, prevent command prompts windows from popping up when # running in pythonw. extrakw['creationflags'] = getattr(subprocess, 'CREATE_NO_WINDOW') @@ -1511,7 +1511,7 @@ # the end of git diff arguments is used for paths commands.insert(1, b'--color') extrakw = {} - if util.safehasattr(subprocess, 'CREATE_NO_WINDOW'): + if hasattr(subprocess, 'CREATE_NO_WINDOW'): # On Windows, prevent command prompts windows from popping up when # running in pythonw. extrakw['creationflags'] = getattr(subprocess, 'CREATE_NO_WINDOW') diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/subrepoutil.py --- a/mercurial/subrepoutil.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/subrepoutil.py Wed Oct 11 02:02:46 2023 +0200 @@ -11,7 +11,6 @@ import re from .i18n import _ -from .pycompat import getattr from . import ( config, error, @@ -384,7 +383,7 @@ Either absolute or relative the outermost repo""" parent = repo chunks = [] - while util.safehasattr(parent, '_subparent'): + while hasattr(parent, '_subparent'): source = urlutil.url(parent._subsource) chunks.append(bytes(source)) if source.isabs(): @@ -400,7 +399,7 @@ # type: (localrepo.localrepository) -> bytes """return path to this (sub)repo as seen from outermost repo""" parent = repo - while util.safehasattr(parent, '_subparent'): + while hasattr(parent, '_subparent'): parent = parent._subparent return repo.root[len(pathutil.normasprefix(parent.root)) :] @@ -415,7 +414,7 @@ # type: (localrepo.localrepository, bool, bool) -> Optional[bytes] """return pull/push path of repo - either based on parent repo .hgsub info or on the top repo config. Abort or return None if no source found.""" - if util.safehasattr(repo, '_subparent'): + if hasattr(repo, '_subparent'): source = urlutil.url(repo._subsource) if source.isabs(): return bytes(source) @@ -428,7 +427,7 @@ return bytes(parent) else: # recursion reached top repo path = None - if util.safehasattr(repo, '_subtoppath'): + if hasattr(repo, '_subtoppath'): path = repo._subtoppath elif push and repo.ui.config(b'paths', b'default-push'): path = repo.ui.config(b'paths', b'default-push') diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/templatefilters.py --- a/mercurial/templatefilters.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/templatefilters.py Wed Oct 11 02:02:46 2023 +0200 @@ -339,14 +339,14 @@ raise error.ProgrammingError( b'Mercurial only does output with bytes: %r' % obj ) - elif util.safehasattr(obj, 'keys'): + elif hasattr(obj, 'keys'): out = [ b'"%s": %s' % (encoding.jsonescape(k, paranoid=paranoid), json(v, paranoid)) for k, v in sorted(obj.items()) ] return b'{' + b', '.join(out) + b'}' - elif util.safehasattr(obj, '__iter__'): + elif hasattr(obj, '__iter__'): out = [json(i, paranoid) for i in obj] return b'[' + b', '.join(out) + b']' raise error.ProgrammingError(b'cannot encode %r' % obj) diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/templater.py --- a/mercurial/templater.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/templater.py Wed Oct 11 02:02:46 2023 +0200 @@ -72,7 +72,6 @@ from .i18n import _ from .pycompat import ( FileNotFoundError, - getattr, ) from . import ( config, diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/templateutil.py --- a/mercurial/templateutil.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/templateutil.py Wed Oct 11 02:02:46 2023 +0200 @@ -10,7 +10,6 @@ import types from .i18n import _ -from .pycompat import getattr from . import ( error, pycompat, @@ -281,7 +280,7 @@ def getmember(self, context, mapping, key): # TODO: maybe split hybrid list/dict types? - if not util.safehasattr(self._values, 'get'): + if not hasattr(self._values, 'get'): raise error.ParseError(_(b'not a dictionary')) key = unwrapastype(context, mapping, key, self._keytype) return self._wrapvalue(key, self._values.get(key)) @@ -301,13 +300,13 @@ def _wrapvalue(self, key, val): if val is None: return - if util.safehasattr(val, '_makemap'): + if hasattr(val, '_makemap'): # a nested hybrid list/dict, which has its own way of map operation return val return hybriditem(None, key, val, self._makemap) def filter(self, context, mapping, select): - if util.safehasattr(self._values, 'get'): + if hasattr(self._values, 'get'): values = { k: v for k, v in self._values.items() @@ -341,7 +340,7 @@ def tovalue(self, context, mapping): # TODO: make it non-recursive for trivial lists/dicts xs = self._values - if util.safehasattr(xs, 'get'): + if hasattr(xs, 'get'): return {k: unwrapvalue(context, mapping, v) for k, v in xs.items()} return [unwrapvalue(context, mapping, x) for x in xs] @@ -858,7 +857,7 @@ ) elif thing is None: pass - elif not util.safehasattr(thing, '__iter__'): + elif not hasattr(thing, '__iter__'): yield pycompat.bytestr(thing) else: for i in thing: @@ -868,7 +867,7 @@ yield i elif i is None: pass - elif not util.safehasattr(i, '__iter__'): + elif not hasattr(i, '__iter__'): yield pycompat.bytestr(i) else: for j in flatten(context, mapping, i): diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/testing/storage.py --- a/mercurial/testing/storage.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/testing/storage.py Wed Oct 11 02:02:46 2023 +0200 @@ -12,7 +12,6 @@ hex, nullrev, ) -from ..pycompat import getattr from .. import ( error, mdiff, diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/thirdparty/tomli/LICENSE --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/mercurial/thirdparty/tomli/LICENSE Wed Oct 11 02:02:46 2023 +0200 @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021 Taneli Hukkinen + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/thirdparty/tomli/README.md --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/mercurial/thirdparty/tomli/README.md Wed Oct 11 02:02:46 2023 +0200 @@ -0,0 +1,182 @@ +[![Build Status](https://github.com/hukkin/tomli/workflows/Tests/badge.svg?branch=master)](https://github.com/hukkin/tomli/actions?query=workflow%3ATests+branch%3Amaster+event%3Apush) +[![codecov.io](https://codecov.io/gh/hukkin/tomli/branch/master/graph/badge.svg)](https://codecov.io/gh/hukkin/tomli) +[![PyPI version](https://img.shields.io/pypi/v/tomli)](https://pypi.org/project/tomli) + +# Tomli + +> A lil' TOML parser + +**Table of Contents** *generated with [mdformat-toc](https://github.com/hukkin/mdformat-toc)* + + + +- [Intro](#intro) +- [Installation](#installation) +- [Usage](#usage) + - [Parse a TOML string](#parse-a-toml-string) + - [Parse a TOML file](#parse-a-toml-file) + - [Handle invalid TOML](#handle-invalid-toml) + - [Construct `decimal.Decimal`s from TOML floats](#construct-decimaldecimals-from-toml-floats) +- [FAQ](#faq) + - [Why this parser?](#why-this-parser) + - [Is comment preserving round-trip parsing supported?](#is-comment-preserving-round-trip-parsing-supported) + - [Is there a `dumps`, `write` or `encode` function?](#is-there-a-dumps-write-or-encode-function) + - [How do TOML types map into Python types?](#how-do-toml-types-map-into-python-types) +- [Performance](#performance) + + + +## Intro + +Tomli is a Python library for parsing [TOML](https://toml.io). +Tomli is fully compatible with [TOML v1.0.0](https://toml.io/en/v1.0.0). + +## Installation + +```bash +pip install tomli +``` + +## Usage + +### Parse a TOML string + +```python +import tomli + +toml_str = """ + gretzky = 99 + + [kurri] + jari = 17 + """ + +toml_dict = tomli.loads(toml_str) +assert toml_dict == {"gretzky": 99, "kurri": {"jari": 17}} +``` + +### Parse a TOML file + +```python +import tomli + +with open("path_to_file/conf.toml", "rb") as f: + toml_dict = tomli.load(f) +``` + +The file must be opened in binary mode (with the `"rb"` flag). +Binary mode will enforce decoding the file as UTF-8 with universal newlines disabled, +both of which are required to correctly parse TOML. +Support for text file objects is deprecated for removal in the next major release. + +### Handle invalid TOML + +```python +import tomli + +try: + toml_dict = tomli.loads("]] this is invalid TOML [[") +except tomli.TOMLDecodeError: + print("Yep, definitely not valid.") +``` + +Note that while the `TOMLDecodeError` type is public API, error messages of raised instances of it are not. +Error messages should not be assumed to stay constant across Tomli versions. + +### Construct `decimal.Decimal`s from TOML floats + +```python +from decimal import Decimal +import tomli + +toml_dict = tomli.loads("precision-matters = 0.982492", parse_float=Decimal) +assert toml_dict["precision-matters"] == Decimal("0.982492") +``` + +Note that `decimal.Decimal` can be replaced with another callable that converts a TOML float from string to a Python type. +The `decimal.Decimal` is, however, a practical choice for use cases where float inaccuracies can not be tolerated. + +Illegal types include `dict`, `list`, and anything that has the `append` attribute. +Parsing floats into an illegal type results in undefined behavior. + +## FAQ + +### Why this parser? + +- it's lil' +- pure Python with zero dependencies +- the fastest pure Python parser [\*](#performance): + 15x as fast as [tomlkit](https://pypi.org/project/tomlkit/), + 2.4x as fast as [toml](https://pypi.org/project/toml/) +- outputs [basic data types](#how-do-toml-types-map-into-python-types) only +- 100% spec compliant: passes all tests in + [a test set](https://github.com/toml-lang/compliance/pull/8) + soon to be merged to the official + [compliance tests for TOML](https://github.com/toml-lang/compliance) + repository +- thoroughly tested: 100% branch coverage + +### Is comment preserving round-trip parsing supported? + +No. + +The `tomli.loads` function returns a plain `dict` that is populated with builtin types and types from the standard library only. +Preserving comments requires a custom type to be returned so will not be supported, +at least not by the `tomli.loads` and `tomli.load` functions. + +Look into [TOML Kit](https://github.com/sdispater/tomlkit) if preservation of style is what you need. + +### Is there a `dumps`, `write` or `encode` function? + +[Tomli-W](https://github.com/hukkin/tomli-w) is the write-only counterpart of Tomli, providing `dump` and `dumps` functions. + +The core library does not include write capability, as most TOML use cases are read-only, and Tomli intends to be minimal. + +### How do TOML types map into Python types? + +| TOML type | Python type | Details | +| ---------------- | ------------------- | ------------------------------------------------------------ | +| Document Root | `dict` | | +| Key | `str` | | +| String | `str` | | +| Integer | `int` | | +| Float | `float` | | +| Boolean | `bool` | | +| Offset Date-Time | `datetime.datetime` | `tzinfo` attribute set to an instance of `datetime.timezone` | +| Local Date-Time | `datetime.datetime` | `tzinfo` attribute set to `None` | +| Local Date | `datetime.date` | | +| Local Time | `datetime.time` | | +| Array | `list` | | +| Table | `dict` | | +| Inline Table | `dict` | | + +## Performance + +The `benchmark/` folder in this repository contains a performance benchmark for comparing the various Python TOML parsers. +The benchmark can be run with `tox -e benchmark-pypi`. +Running the benchmark on my personal computer output the following: + +```console +foo@bar:~/dev/tomli$ tox -e benchmark-pypi +benchmark-pypi installed: attrs==19.3.0,click==7.1.2,pytomlpp==1.0.2,qtoml==0.3.0,rtoml==0.7.0,toml==0.10.2,tomli==1.1.0,tomlkit==0.7.2 +benchmark-pypi run-test-pre: PYTHONHASHSEED='2658546909' +benchmark-pypi run-test: commands[0] | python -c 'import datetime; print(datetime.date.today())' +2021-07-23 +benchmark-pypi run-test: commands[1] | python --version +Python 3.8.10 +benchmark-pypi run-test: commands[2] | python benchmark/run.py +Parsing data.toml 5000 times: +------------------------------------------------------ + parser | exec time | performance (more is better) +-----------+------------+----------------------------- + rtoml | 0.901 s | baseline (100%) + pytomlpp | 1.08 s | 83.15% + tomli | 3.89 s | 23.15% + toml | 9.36 s | 9.63% + qtoml | 11.5 s | 7.82% + tomlkit | 56.8 s | 1.59% +``` + +The parsers are ordered from fastest to slowest, using the fastest parser as baseline. +Tomli performed the best out of all pure Python TOML parsers, +losing only to pytomlpp (wraps C++) and rtoml (wraps Rust). diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/thirdparty/tomli/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/mercurial/thirdparty/tomli/__init__.py Wed Oct 11 02:02:46 2023 +0200 @@ -0,0 +1,9 @@ +"""A lil' TOML parser.""" + +__all__ = ("loads", "load", "TOMLDecodeError") +__version__ = "1.2.3" # DO NOT EDIT THIS LINE MANUALLY. LET bump2version UTILITY DO IT + +from ._parser import TOMLDecodeError, load, loads + +# Pretend this exception was created here. +TOMLDecodeError.__module__ = "tomli" diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/thirdparty/tomli/_parser.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/mercurial/thirdparty/tomli/_parser.py Wed Oct 11 02:02:46 2023 +0200 @@ -0,0 +1,663 @@ +import string +from types import MappingProxyType +from typing import Any, BinaryIO, Dict, FrozenSet, Iterable, NamedTuple, Optional, Tuple +import warnings + +from ._re import ( + RE_DATETIME, + RE_LOCALTIME, + RE_NUMBER, + match_to_datetime, + match_to_localtime, + match_to_number, +) +from ._types import Key, ParseFloat, Pos + +ASCII_CTRL = frozenset(chr(i) for i in range(32)) | frozenset(chr(127)) + +# Neither of these sets include quotation mark or backslash. They are +# currently handled as separate cases in the parser functions. +ILLEGAL_BASIC_STR_CHARS = ASCII_CTRL - frozenset("\t") +ILLEGAL_MULTILINE_BASIC_STR_CHARS = ASCII_CTRL - frozenset("\t\n") + +ILLEGAL_LITERAL_STR_CHARS = ILLEGAL_BASIC_STR_CHARS +ILLEGAL_MULTILINE_LITERAL_STR_CHARS = ILLEGAL_MULTILINE_BASIC_STR_CHARS + +ILLEGAL_COMMENT_CHARS = ILLEGAL_BASIC_STR_CHARS + +TOML_WS = frozenset(" \t") +TOML_WS_AND_NEWLINE = TOML_WS | frozenset("\n") +BARE_KEY_CHARS = frozenset(string.ascii_letters + string.digits + "-_") +KEY_INITIAL_CHARS = BARE_KEY_CHARS | frozenset("\"'") +HEXDIGIT_CHARS = frozenset(string.hexdigits) + +BASIC_STR_ESCAPE_REPLACEMENTS = MappingProxyType( + { + "\\b": "\u0008", # backspace + "\\t": "\u0009", # tab + "\\n": "\u000A", # linefeed + "\\f": "\u000C", # form feed + "\\r": "\u000D", # carriage return + '\\"': "\u0022", # quote + "\\\\": "\u005C", # backslash + } +) + + +class TOMLDecodeError(ValueError): + """An error raised if a document is not valid TOML.""" + + +def load(fp: BinaryIO, *, parse_float: ParseFloat = float) -> Dict[str, Any]: + """Parse TOML from a binary file object.""" + s_bytes = fp.read() + try: + s = s_bytes.decode() + except AttributeError: + warnings.warn( + "Text file object support is deprecated in favor of binary file objects." + ' Use `open("foo.toml", "rb")` to open the file in binary mode.', + DeprecationWarning, + stacklevel=2, + ) + s = s_bytes # type: ignore[assignment] + return loads(s, parse_float=parse_float) + + +def loads(s: str, *, parse_float: ParseFloat = float) -> Dict[str, Any]: # noqa: C901 + """Parse TOML from a string.""" + + # The spec allows converting "\r\n" to "\n", even in string + # literals. Let's do so to simplify parsing. + src = s.replace("\r\n", "\n") + pos = 0 + out = Output(NestedDict(), Flags()) + header: Key = () + + # Parse one statement at a time + # (typically means one line in TOML source) + while True: + # 1. Skip line leading whitespace + pos = skip_chars(src, pos, TOML_WS) + + # 2. Parse rules. Expect one of the following: + # - end of file + # - end of line + # - comment + # - key/value pair + # - append dict to list (and move to its namespace) + # - create dict (and move to its namespace) + # Skip trailing whitespace when applicable. + try: + char = src[pos] + except IndexError: + break + if char == "\n": + pos += 1 + continue + if char in KEY_INITIAL_CHARS: + pos = key_value_rule(src, pos, out, header, parse_float) + pos = skip_chars(src, pos, TOML_WS) + elif char == "[": + try: + second_char: Optional[str] = src[pos + 1] + except IndexError: + second_char = None + if second_char == "[": + pos, header = create_list_rule(src, pos, out) + else: + pos, header = create_dict_rule(src, pos, out) + pos = skip_chars(src, pos, TOML_WS) + elif char != "#": + raise suffixed_err(src, pos, "Invalid statement") + + # 3. Skip comment + pos = skip_comment(src, pos) + + # 4. Expect end of line or end of file + try: + char = src[pos] + except IndexError: + break + if char != "\n": + raise suffixed_err( + src, pos, "Expected newline or end of document after a statement" + ) + pos += 1 + + return out.data.dict + + +class Flags: + """Flags that map to parsed keys/namespaces.""" + + # Marks an immutable namespace (inline array or inline table). + FROZEN = 0 + # Marks a nest that has been explicitly created and can no longer + # be opened using the "[table]" syntax. + EXPLICIT_NEST = 1 + + def __init__(self) -> None: + self._flags: Dict[str, dict] = {} + + def unset_all(self, key: Key) -> None: + cont = self._flags + for k in key[:-1]: + if k not in cont: + return + cont = cont[k]["nested"] + cont.pop(key[-1], None) + + def set_for_relative_key(self, head_key: Key, rel_key: Key, flag: int) -> None: + cont = self._flags + for k in head_key: + if k not in cont: + cont[k] = {"flags": set(), "recursive_flags": set(), "nested": {}} + cont = cont[k]["nested"] + for k in rel_key: + if k in cont: + cont[k]["flags"].add(flag) + else: + cont[k] = {"flags": {flag}, "recursive_flags": set(), "nested": {}} + cont = cont[k]["nested"] + + def set(self, key: Key, flag: int, *, recursive: bool) -> None: # noqa: A003 + cont = self._flags + key_parent, key_stem = key[:-1], key[-1] + for k in key_parent: + if k not in cont: + cont[k] = {"flags": set(), "recursive_flags": set(), "nested": {}} + cont = cont[k]["nested"] + if key_stem not in cont: + cont[key_stem] = {"flags": set(), "recursive_flags": set(), "nested": {}} + cont[key_stem]["recursive_flags" if recursive else "flags"].add(flag) + + def is_(self, key: Key, flag: int) -> bool: + if not key: + return False # document root has no flags + cont = self._flags + for k in key[:-1]: + if k not in cont: + return False + inner_cont = cont[k] + if flag in inner_cont["recursive_flags"]: + return True + cont = inner_cont["nested"] + key_stem = key[-1] + if key_stem in cont: + cont = cont[key_stem] + return flag in cont["flags"] or flag in cont["recursive_flags"] + return False + + +class NestedDict: + def __init__(self) -> None: + # The parsed content of the TOML document + self.dict: Dict[str, Any] = {} + + def get_or_create_nest( + self, + key: Key, + *, + access_lists: bool = True, + ) -> dict: + cont: Any = self.dict + for k in key: + if k not in cont: + cont[k] = {} + cont = cont[k] + if access_lists and isinstance(cont, list): + cont = cont[-1] + if not isinstance(cont, dict): + raise KeyError("There is no nest behind this key") + return cont + + def append_nest_to_list(self, key: Key) -> None: + cont = self.get_or_create_nest(key[:-1]) + last_key = key[-1] + if last_key in cont: + list_ = cont[last_key] + try: + list_.append({}) + except AttributeError: + raise KeyError("An object other than list found behind this key") + else: + cont[last_key] = [{}] + + +class Output(NamedTuple): + data: NestedDict + flags: Flags + + +def skip_chars(src: str, pos: Pos, chars: Iterable[str]) -> Pos: + try: + while src[pos] in chars: + pos += 1 + except IndexError: + pass + return pos + + +def skip_until( + src: str, + pos: Pos, + expect: str, + *, + error_on: FrozenSet[str], + error_on_eof: bool, +) -> Pos: + try: + new_pos = src.index(expect, pos) + except ValueError: + new_pos = len(src) + if error_on_eof: + raise suffixed_err(src, new_pos, f"Expected {expect!r}") from None + + if not error_on.isdisjoint(src[pos:new_pos]): + while src[pos] not in error_on: + pos += 1 + raise suffixed_err(src, pos, f"Found invalid character {src[pos]!r}") + return new_pos + + +def skip_comment(src: str, pos: Pos) -> Pos: + try: + char: Optional[str] = src[pos] + except IndexError: + char = None + if char == "#": + return skip_until( + src, pos + 1, "\n", error_on=ILLEGAL_COMMENT_CHARS, error_on_eof=False + ) + return pos + + +def skip_comments_and_array_ws(src: str, pos: Pos) -> Pos: + while True: + pos_before_skip = pos + pos = skip_chars(src, pos, TOML_WS_AND_NEWLINE) + pos = skip_comment(src, pos) + if pos == pos_before_skip: + return pos + + +def create_dict_rule(src: str, pos: Pos, out: Output) -> Tuple[Pos, Key]: + pos += 1 # Skip "[" + pos = skip_chars(src, pos, TOML_WS) + pos, key = parse_key(src, pos) + + if out.flags.is_(key, Flags.EXPLICIT_NEST) or out.flags.is_(key, Flags.FROZEN): + raise suffixed_err(src, pos, f"Can not declare {key} twice") + out.flags.set(key, Flags.EXPLICIT_NEST, recursive=False) + try: + out.data.get_or_create_nest(key) + except KeyError: + raise suffixed_err(src, pos, "Can not overwrite a value") from None + + if not src.startswith("]", pos): + raise suffixed_err(src, pos, 'Expected "]" at the end of a table declaration') + return pos + 1, key + + +def create_list_rule(src: str, pos: Pos, out: Output) -> Tuple[Pos, Key]: + pos += 2 # Skip "[[" + pos = skip_chars(src, pos, TOML_WS) + pos, key = parse_key(src, pos) + + if out.flags.is_(key, Flags.FROZEN): + raise suffixed_err(src, pos, f"Can not mutate immutable namespace {key}") + # Free the namespace now that it points to another empty list item... + out.flags.unset_all(key) + # ...but this key precisely is still prohibited from table declaration + out.flags.set(key, Flags.EXPLICIT_NEST, recursive=False) + try: + out.data.append_nest_to_list(key) + except KeyError: + raise suffixed_err(src, pos, "Can not overwrite a value") from None + + if not src.startswith("]]", pos): + raise suffixed_err(src, pos, 'Expected "]]" at the end of an array declaration') + return pos + 2, key + + +def key_value_rule( + src: str, pos: Pos, out: Output, header: Key, parse_float: ParseFloat +) -> Pos: + pos, key, value = parse_key_value_pair(src, pos, parse_float) + key_parent, key_stem = key[:-1], key[-1] + abs_key_parent = header + key_parent + + if out.flags.is_(abs_key_parent, Flags.FROZEN): + raise suffixed_err( + src, pos, f"Can not mutate immutable namespace {abs_key_parent}" + ) + # Containers in the relative path can't be opened with the table syntax after this + out.flags.set_for_relative_key(header, key, Flags.EXPLICIT_NEST) + try: + nest = out.data.get_or_create_nest(abs_key_parent) + except KeyError: + raise suffixed_err(src, pos, "Can not overwrite a value") from None + if key_stem in nest: + raise suffixed_err(src, pos, "Can not overwrite a value") + # Mark inline table and array namespaces recursively immutable + if isinstance(value, (dict, list)): + out.flags.set(header + key, Flags.FROZEN, recursive=True) + nest[key_stem] = value + return pos + + +def parse_key_value_pair( + src: str, pos: Pos, parse_float: ParseFloat +) -> Tuple[Pos, Key, Any]: + pos, key = parse_key(src, pos) + try: + char: Optional[str] = src[pos] + except IndexError: + char = None + if char != "=": + raise suffixed_err(src, pos, 'Expected "=" after a key in a key/value pair') + pos += 1 + pos = skip_chars(src, pos, TOML_WS) + pos, value = parse_value(src, pos, parse_float) + return pos, key, value + + +def parse_key(src: str, pos: Pos) -> Tuple[Pos, Key]: + pos, key_part = parse_key_part(src, pos) + key: Key = (key_part,) + pos = skip_chars(src, pos, TOML_WS) + while True: + try: + char: Optional[str] = src[pos] + except IndexError: + char = None + if char != ".": + return pos, key + pos += 1 + pos = skip_chars(src, pos, TOML_WS) + pos, key_part = parse_key_part(src, pos) + key += (key_part,) + pos = skip_chars(src, pos, TOML_WS) + + +def parse_key_part(src: str, pos: Pos) -> Tuple[Pos, str]: + try: + char: Optional[str] = src[pos] + except IndexError: + char = None + if char in BARE_KEY_CHARS: + start_pos = pos + pos = skip_chars(src, pos, BARE_KEY_CHARS) + return pos, src[start_pos:pos] + if char == "'": + return parse_literal_str(src, pos) + if char == '"': + return parse_one_line_basic_str(src, pos) + raise suffixed_err(src, pos, "Invalid initial character for a key part") + + +def parse_one_line_basic_str(src: str, pos: Pos) -> Tuple[Pos, str]: + pos += 1 + return parse_basic_str(src, pos, multiline=False) + + +def parse_array(src: str, pos: Pos, parse_float: ParseFloat) -> Tuple[Pos, list]: + pos += 1 + array: list = [] + + pos = skip_comments_and_array_ws(src, pos) + if src.startswith("]", pos): + return pos + 1, array + while True: + pos, val = parse_value(src, pos, parse_float) + array.append(val) + pos = skip_comments_and_array_ws(src, pos) + + c = src[pos : pos + 1] + if c == "]": + return pos + 1, array + if c != ",": + raise suffixed_err(src, pos, "Unclosed array") + pos += 1 + + pos = skip_comments_and_array_ws(src, pos) + if src.startswith("]", pos): + return pos + 1, array + + +def parse_inline_table(src: str, pos: Pos, parse_float: ParseFloat) -> Tuple[Pos, dict]: + pos += 1 + nested_dict = NestedDict() + flags = Flags() + + pos = skip_chars(src, pos, TOML_WS) + if src.startswith("}", pos): + return pos + 1, nested_dict.dict + while True: + pos, key, value = parse_key_value_pair(src, pos, parse_float) + key_parent, key_stem = key[:-1], key[-1] + if flags.is_(key, Flags.FROZEN): + raise suffixed_err(src, pos, f"Can not mutate immutable namespace {key}") + try: + nest = nested_dict.get_or_create_nest(key_parent, access_lists=False) + except KeyError: + raise suffixed_err(src, pos, "Can not overwrite a value") from None + if key_stem in nest: + raise suffixed_err(src, pos, f"Duplicate inline table key {key_stem!r}") + nest[key_stem] = value + pos = skip_chars(src, pos, TOML_WS) + c = src[pos : pos + 1] + if c == "}": + return pos + 1, nested_dict.dict + if c != ",": + raise suffixed_err(src, pos, "Unclosed inline table") + if isinstance(value, (dict, list)): + flags.set(key, Flags.FROZEN, recursive=True) + pos += 1 + pos = skip_chars(src, pos, TOML_WS) + + +def parse_basic_str_escape( # noqa: C901 + src: str, pos: Pos, *, multiline: bool = False +) -> Tuple[Pos, str]: + escape_id = src[pos : pos + 2] + pos += 2 + if multiline and escape_id in {"\\ ", "\\\t", "\\\n"}: + # Skip whitespace until next non-whitespace character or end of + # the doc. Error if non-whitespace is found before newline. + if escape_id != "\\\n": + pos = skip_chars(src, pos, TOML_WS) + try: + char = src[pos] + except IndexError: + return pos, "" + if char != "\n": + raise suffixed_err(src, pos, 'Unescaped "\\" in a string') + pos += 1 + pos = skip_chars(src, pos, TOML_WS_AND_NEWLINE) + return pos, "" + if escape_id == "\\u": + return parse_hex_char(src, pos, 4) + if escape_id == "\\U": + return parse_hex_char(src, pos, 8) + try: + return pos, BASIC_STR_ESCAPE_REPLACEMENTS[escape_id] + except KeyError: + if len(escape_id) != 2: + raise suffixed_err(src, pos, "Unterminated string") from None + raise suffixed_err(src, pos, 'Unescaped "\\" in a string') from None + + +def parse_basic_str_escape_multiline(src: str, pos: Pos) -> Tuple[Pos, str]: + return parse_basic_str_escape(src, pos, multiline=True) + + +def parse_hex_char(src: str, pos: Pos, hex_len: int) -> Tuple[Pos, str]: + hex_str = src[pos : pos + hex_len] + if len(hex_str) != hex_len or not HEXDIGIT_CHARS.issuperset(hex_str): + raise suffixed_err(src, pos, "Invalid hex value") + pos += hex_len + hex_int = int(hex_str, 16) + if not is_unicode_scalar_value(hex_int): + raise suffixed_err(src, pos, "Escaped character is not a Unicode scalar value") + return pos, chr(hex_int) + + +def parse_literal_str(src: str, pos: Pos) -> Tuple[Pos, str]: + pos += 1 # Skip starting apostrophe + start_pos = pos + pos = skip_until( + src, pos, "'", error_on=ILLEGAL_LITERAL_STR_CHARS, error_on_eof=True + ) + return pos + 1, src[start_pos:pos] # Skip ending apostrophe + + +def parse_multiline_str(src: str, pos: Pos, *, literal: bool) -> Tuple[Pos, str]: + pos += 3 + if src.startswith("\n", pos): + pos += 1 + + if literal: + delim = "'" + end_pos = skip_until( + src, + pos, + "'''", + error_on=ILLEGAL_MULTILINE_LITERAL_STR_CHARS, + error_on_eof=True, + ) + result = src[pos:end_pos] + pos = end_pos + 3 + else: + delim = '"' + pos, result = parse_basic_str(src, pos, multiline=True) + + # Add at maximum two extra apostrophes/quotes if the end sequence + # is 4 or 5 chars long instead of just 3. + if not src.startswith(delim, pos): + return pos, result + pos += 1 + if not src.startswith(delim, pos): + return pos, result + delim + pos += 1 + return pos, result + (delim * 2) + + +def parse_basic_str(src: str, pos: Pos, *, multiline: bool) -> Tuple[Pos, str]: + if multiline: + error_on = ILLEGAL_MULTILINE_BASIC_STR_CHARS + parse_escapes = parse_basic_str_escape_multiline + else: + error_on = ILLEGAL_BASIC_STR_CHARS + parse_escapes = parse_basic_str_escape + result = "" + start_pos = pos + while True: + try: + char = src[pos] + except IndexError: + raise suffixed_err(src, pos, "Unterminated string") from None + if char == '"': + if not multiline: + return pos + 1, result + src[start_pos:pos] + if src.startswith('"""', pos): + return pos + 3, result + src[start_pos:pos] + pos += 1 + continue + if char == "\\": + result += src[start_pos:pos] + pos, parsed_escape = parse_escapes(src, pos) + result += parsed_escape + start_pos = pos + continue + if char in error_on: + raise suffixed_err(src, pos, f"Illegal character {char!r}") + pos += 1 + + +def parse_value( # noqa: C901 + src: str, pos: Pos, parse_float: ParseFloat +) -> Tuple[Pos, Any]: + try: + char: Optional[str] = src[pos] + except IndexError: + char = None + + # Basic strings + if char == '"': + if src.startswith('"""', pos): + return parse_multiline_str(src, pos, literal=False) + return parse_one_line_basic_str(src, pos) + + # Literal strings + if char == "'": + if src.startswith("'''", pos): + return parse_multiline_str(src, pos, literal=True) + return parse_literal_str(src, pos) + + # Booleans + if char == "t": + if src.startswith("true", pos): + return pos + 4, True + if char == "f": + if src.startswith("false", pos): + return pos + 5, False + + # Dates and times + datetime_match = RE_DATETIME.match(src, pos) + if datetime_match: + try: + datetime_obj = match_to_datetime(datetime_match) + except ValueError as e: + raise suffixed_err(src, pos, "Invalid date or datetime") from e + return datetime_match.end(), datetime_obj + localtime_match = RE_LOCALTIME.match(src, pos) + if localtime_match: + return localtime_match.end(), match_to_localtime(localtime_match) + + # Integers and "normal" floats. + # The regex will greedily match any type starting with a decimal + # char, so needs to be located after handling of dates and times. + number_match = RE_NUMBER.match(src, pos) + if number_match: + return number_match.end(), match_to_number(number_match, parse_float) + + # Arrays + if char == "[": + return parse_array(src, pos, parse_float) + + # Inline tables + if char == "{": + return parse_inline_table(src, pos, parse_float) + + # Special floats + first_three = src[pos : pos + 3] + if first_three in {"inf", "nan"}: + return pos + 3, parse_float(first_three) + first_four = src[pos : pos + 4] + if first_four in {"-inf", "+inf", "-nan", "+nan"}: + return pos + 4, parse_float(first_four) + + raise suffixed_err(src, pos, "Invalid value") + + +def suffixed_err(src: str, pos: Pos, msg: str) -> TOMLDecodeError: + """Return a `TOMLDecodeError` where error message is suffixed with + coordinates in source.""" + + def coord_repr(src: str, pos: Pos) -> str: + if pos >= len(src): + return "end of document" + line = src.count("\n", 0, pos) + 1 + if line == 1: + column = pos + 1 + else: + column = pos - src.rindex("\n", 0, pos) + return f"line {line}, column {column}" + + return TOMLDecodeError(f"{msg} (at {coord_repr(src, pos)})") + + +def is_unicode_scalar_value(codepoint: int) -> bool: + return (0 <= codepoint <= 55295) or (57344 <= codepoint <= 1114111) diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/thirdparty/tomli/_re.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/mercurial/thirdparty/tomli/_re.py Wed Oct 11 02:02:46 2023 +0200 @@ -0,0 +1,101 @@ +from datetime import date, datetime, time, timedelta, timezone, tzinfo +from functools import lru_cache +import re +from typing import Any, Optional, Union + +from ._types import ParseFloat + +# E.g. +# - 00:32:00.999999 +# - 00:32:00 +_TIME_RE_STR = r"([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(?:\.([0-9]{1,6})[0-9]*)?" + +RE_NUMBER = re.compile( + r""" +0 +(?: + x[0-9A-Fa-f](?:_?[0-9A-Fa-f])* # hex + | + b[01](?:_?[01])* # bin + | + o[0-7](?:_?[0-7])* # oct +) +| +[+-]?(?:0|[1-9](?:_?[0-9])*) # dec, integer part +(?P + (?:\.[0-9](?:_?[0-9])*)? # optional fractional part + (?:[eE][+-]?[0-9](?:_?[0-9])*)? # optional exponent part +) +""", + flags=re.VERBOSE, +) +RE_LOCALTIME = re.compile(_TIME_RE_STR) +RE_DATETIME = re.compile( + fr""" +([0-9]{{4}})-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01]) # date, e.g. 1988-10-27 +(?: + [Tt ] + {_TIME_RE_STR} + (?:([Zz])|([+-])([01][0-9]|2[0-3]):([0-5][0-9]))? # optional time offset +)? +""", + flags=re.VERBOSE, +) + + +def match_to_datetime(match: "re.Match") -> Union[datetime, date]: + """Convert a `RE_DATETIME` match to `datetime.datetime` or `datetime.date`. + + Raises ValueError if the match does not correspond to a valid date + or datetime. + """ + ( + year_str, + month_str, + day_str, + hour_str, + minute_str, + sec_str, + micros_str, + zulu_time, + offset_sign_str, + offset_hour_str, + offset_minute_str, + ) = match.groups() + year, month, day = int(year_str), int(month_str), int(day_str) + if hour_str is None: + return date(year, month, day) + hour, minute, sec = int(hour_str), int(minute_str), int(sec_str) + micros = int(micros_str.ljust(6, "0")) if micros_str else 0 + if offset_sign_str: + tz: Optional[tzinfo] = cached_tz( + offset_hour_str, offset_minute_str, offset_sign_str + ) + elif zulu_time: + tz = timezone.utc + else: # local date-time + tz = None + return datetime(year, month, day, hour, minute, sec, micros, tzinfo=tz) + + +@lru_cache(maxsize=None) +def cached_tz(hour_str: str, minute_str: str, sign_str: str) -> timezone: + sign = 1 if sign_str == "+" else -1 + return timezone( + timedelta( + hours=sign * int(hour_str), + minutes=sign * int(minute_str), + ) + ) + + +def match_to_localtime(match: "re.Match") -> time: + hour_str, minute_str, sec_str, micros_str = match.groups() + micros = int(micros_str.ljust(6, "0")) if micros_str else 0 + return time(int(hour_str), int(minute_str), int(sec_str), micros) + + +def match_to_number(match: "re.Match", parse_float: "ParseFloat") -> Any: + if match.group("floatpart"): + return parse_float(match.group()) + return int(match.group(), 0) diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/thirdparty/tomli/_types.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/mercurial/thirdparty/tomli/_types.py Wed Oct 11 02:02:46 2023 +0200 @@ -0,0 +1,6 @@ +from typing import Any, Callable, Tuple + +# Type annotations +ParseFloat = Callable[[str], Any] +Key = Tuple[str, ...] +Pos = int diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/thirdparty/tomli/py.typed --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/mercurial/thirdparty/tomli/py.typed Wed Oct 11 02:02:46 2023 +0200 @@ -0,0 +1,1 @@ +# Marker file for PEP 561 diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/transaction.py --- a/mercurial/transaction.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/transaction.py Wed Oct 11 02:02:46 2023 +0200 @@ -59,6 +59,11 @@ ] +def has_abandoned_transaction(repo): + """Return True if the repo has an abandoned transaction""" + return os.path.exists(repo.sjoin(b"journal")) + + def cleanup_undo_files(report, vfsmap, undo_prefix=b'undo'): """remove "undo" files used by the rollback logic @@ -868,7 +873,7 @@ self._vfsmap, entries, self._backupentries, - False, + unlink=True, checkambigfiles=self._checkambigfiles, ) self._report(_(b"rollback completed\n")) diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/ui.py --- a/mercurial/ui.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/ui.py Wed Oct 11 02:02:46 2023 +0200 @@ -37,7 +37,6 @@ from .i18n import _ from .node import hex from .pycompat import ( - getattr, open, ) @@ -47,6 +46,7 @@ configitems, encoding, error, + extensions, formatter, loggingutil, progress, @@ -659,6 +659,12 @@ item = self._knownconfig.get(section, {}).get(name) alternates = [(section, name)] + if item is not None and item.in_core_extension is not None: + # Only return the default for an in-core extension item if said + # extension is enabled + if item.in_core_extension in extensions.extensions(self): + item = None + if item is not None: alternates.extend(item.alias) if callable(item.default): @@ -1460,7 +1466,7 @@ self.flush() wasformatted = self.formatted() - if util.safehasattr(signal, b"SIGPIPE"): + if hasattr(signal, "SIGPIPE"): signal.signal(signal.SIGPIPE, _catchterm) if self._runpager(pagercmd, pagerenv): self.pageractive = True @@ -1540,7 +1546,7 @@ @self.atexit def killpager(): - if util.safehasattr(signal, b"SIGINT"): + if hasattr(signal, "SIGINT"): signal.signal(signal.SIGINT, signal.SIG_IGN) # restore original fds, closing pager.stdin copies in the process os.dup2(stdoutfd, procutil.stdout.fileno()) diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/unionrepo.py --- a/mercurial/unionrepo.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/unionrepo.py Wed Oct 11 02:02:46 2023 +0200 @@ -13,7 +13,6 @@ from .i18n import _ -from .pycompat import getattr from . import ( changelog, diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/upgrade_utils/actions.py --- a/mercurial/upgrade_utils/actions.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/upgrade_utils/actions.py Wed Oct 11 02:02:46 2023 +0200 @@ -670,7 +670,7 @@ newactions = [] for d in format_upgrades: - if util.safehasattr(d, '_requirement'): + if hasattr(d, '_requirement'): name = d._requirement else: name = None diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/upgrade_utils/engine.py --- a/mercurial/upgrade_utils/engine.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/upgrade_utils/engine.py Wed Oct 11 02:02:46 2023 +0200 @@ -9,7 +9,6 @@ import stat from ..i18n import _ -from ..pycompat import getattr from .. import ( error, metadata, diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/url.py --- a/mercurial/url.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/url.py Wed Oct 11 02:02:46 2023 +0200 @@ -190,7 +190,7 @@ return _sendfile -has_https = util.safehasattr(urlreq, 'httpshandler') +has_https = hasattr(urlreq, 'httpshandler') class httpconnection(keepalive.HTTPConnection): @@ -222,47 +222,6 @@ h.headers = None -def _generic_proxytunnel(self: "httpsconnection"): - headers = self.headers # pytype: disable=attribute-error - proxyheaders = { - pycompat.bytestr(x): pycompat.bytestr(headers[x]) - for x in headers - if x.lower().startswith('proxy-') - } - realhostport = self.realhostport # pytype: disable=attribute-error - self.send(b'CONNECT %s HTTP/1.0\r\n' % realhostport) - - for header in proxyheaders.items(): - self.send(b'%s: %s\r\n' % header) - self.send(b'\r\n') - - # majority of the following code is duplicated from - # httplib.HTTPConnection as there are no adequate places to - # override functions to provide the needed functionality. - - # pytype: disable=attribute-error - res = self.response_class(self.sock, method=self._method) - # pytype: enable=attribute-error - - while True: - # pytype: disable=attribute-error - version, status, reason = res._read_status() - # pytype: enable=attribute-error - if status != httplib.CONTINUE: - break - # skip lines that are all whitespace - list(iter(lambda: res.fp.readline().strip(), b'')) - - if status == 200: - # skip lines until we find a blank line - list(iter(res.fp.readline, b'\r\n')) - else: - self.close() - raise socket.error( - "Tunnel connection failed: %d %s" % (status, reason.strip()) - ) - - class httphandler(keepalive.HTTPHandler): def http_open(self, req): return self.do_open(httpconnection, req) @@ -306,6 +265,46 @@ if has_https: + def _generic_proxytunnel(self: "httpsconnection"): + headers = self.headers # pytype: disable=attribute-error + proxyheaders = { + pycompat.bytestr(x): pycompat.bytestr(headers[x]) + for x in headers + if x.lower().startswith('proxy-') + } + realhostport = self.realhostport # pytype: disable=attribute-error + self.send(b'CONNECT %s HTTP/1.0\r\n' % realhostport) + + for header in proxyheaders.items(): + self.send(b'%s: %s\r\n' % header) + self.send(b'\r\n') + + # majority of the following code is duplicated from + # httplib.HTTPConnection as there are no adequate places to + # override functions to provide the needed functionality. + + # pytype: disable=attribute-error + res = self.response_class(self.sock, method=self._method) + # pytype: enable=attribute-error + + while True: + # pytype: disable=attribute-error + version, status, reason = res._read_status() + # pytype: enable=attribute-error + if status != httplib.CONTINUE: + break + # skip lines that are all whitespace + list(iter(lambda: res.fp.readline().strip(), b'')) + + if status == 200: + # skip lines until we find a blank line + list(iter(res.fp.readline, b'\r\n')) + else: + self.close() + raise socket.error( + "Tunnel connection failed: %d %s" % (status, reason.strip()) + ) + class httpsconnection(keepalive.HTTPConnection): response_class = keepalive.HTTPResponse default_port = httplib.HTTPS_PORT @@ -542,7 +541,10 @@ else: handlers.append(httphandler(timeout=timeout)) if has_https: - handlers.append(httpshandler(ui, timeout=timeout)) + # pytype get confused about the conditional existence for httpshandler here. + handlers.append( + httpshandler(ui, timeout=timeout) # pytype: disable=name-error + ) handlers.append(proxyhandler(ui)) diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/urllibcompat.py --- a/mercurial/urllibcompat.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/urllibcompat.py Wed Oct 11 02:02:46 2023 +0200 @@ -11,7 +11,6 @@ import urllib.request import urllib.response -from .pycompat import getattr from . import pycompat _sysstr = pycompat.sysstr diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/util.py --- a/mercurial/util.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/util.py Wed Oct 11 02:02:46 2023 +0200 @@ -37,10 +37,7 @@ from .node import hex from .thirdparty import attr from .pycompat import ( - delattr, - getattr, open, - setattr, ) from hgdemandimport import tracing from . import ( @@ -2583,7 +2580,7 @@ self._fp.close() def __del__(self): - if safehasattr(self, '_fp'): # constructor actually did something + if hasattr(self, '_fp'): # constructor actually did something self.discard() def __enter__(self): diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/utils/compression.py --- a/mercurial/utils/compression.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/utils/compression.py Wed Oct 11 02:02:46 2023 +0200 @@ -8,7 +8,6 @@ import collections import zlib -from ..pycompat import getattr from .. import ( error, i18n, @@ -16,8 +15,6 @@ ) from . import stringutil -safehasattr = pycompat.safehasattr - _ = i18n._ @@ -185,7 +182,7 @@ """ assert role in (SERVERROLE, CLIENTROLE) - attr = b'serverpriority' if role == SERVERROLE else b'clientpriority' + attr = 'serverpriority' if role == SERVERROLE else 'clientpriority' engines = [self._engines[e] for e in self._wiretypes.values()] if onlyavailable: @@ -340,7 +337,7 @@ class _CompressedStreamReader: def __init__(self, fh): - if safehasattr(fh, 'unbufferedread'): + if hasattr(fh, 'unbufferedread'): self._reader = fh.unbufferedread else: self._reader = fh.read diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/utils/procutil.py --- a/mercurial/utils/procutil.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/utils/procutil.py Wed Oct 11 02:02:46 2023 +0200 @@ -24,7 +24,6 @@ from ..i18n import _ from ..pycompat import ( - getattr, open, ) diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/utils/resourceutil.py --- a/mercurial/utils/resourceutil.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/utils/resourceutil.py Wed Oct 11 02:02:46 2023 +0200 @@ -22,8 +22,8 @@ (portable, not much used). """ return ( - pycompat.safehasattr(sys, "frozen") # new py2exe - or pycompat.safehasattr(sys, "importers") # old py2exe + hasattr(sys, "frozen") # new py2exe + or hasattr(sys, "importers") # old py2exe or _imp.is_frozen("__main__") # tools/freeze ) @@ -59,7 +59,7 @@ from importlib import resources # pytype: disable=import-error # Force loading of the resources module - if pycompat.safehasattr(resources, 'files'): + if hasattr(resources, 'files'): resources.files # pytype: disable=module-attr else: resources.open_binary # pytype: disable=module-attr @@ -95,7 +95,7 @@ from .. import encoding def open_resource(package, name): - if pycompat.safehasattr(resources, 'files'): + if hasattr(resources, 'files'): return ( resources.files( # pytype: disable=module-attr pycompat.sysstr(package) diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/utils/urlutil.py --- a/mercurial/utils/urlutil.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/utils/urlutil.py Wed Oct 11 02:02:46 2023 +0200 @@ -9,15 +9,12 @@ import socket from ..i18n import _ -from ..pycompat import ( - getattr, - setattr, -) from .. import ( encoding, error, pycompat, urllibcompat, + util, ) from . import ( @@ -257,18 +254,20 @@ def __repr__(self): attrs = [] for a in ( - b'scheme', - b'user', - b'passwd', - b'host', - b'port', - b'path', - b'query', - b'fragment', + 'scheme', + 'user', + 'passwd', + 'host', + 'port', + 'path', + 'query', + 'fragment', ): v = getattr(self, a) if v is not None: - attrs.append(b'%s: %r' % (a, pycompat.bytestr(v))) + line = b'%s: %r' + line %= (pycompat.bytestr(a), pycompat.bytestr(v)) + attrs.append(line) return b'' % b', '.join(attrs) def __bytes__(self): @@ -679,6 +678,10 @@ This decorator can be used to perform additional verification of sub-options and to change the type of sub-options. """ + if isinstance(attr, bytes): + msg = b'pathsuboption take `str` as "attr" argument, not `bytes`' + util.nouideprecwarn(msg, b"6.6", stacklevel=2) + attr = attr.decode('ascii') def register(func): _pathsuboptions[option] = (attr, func) @@ -693,7 +696,7 @@ return b'yes' if value else b'no' -@pathsuboption(b'pushurl', b'_pushloc') +@pathsuboption(b'pushurl', '_pushloc') def pushurlpathoption(ui, path, value): u = url(value) # Actually require a URL. @@ -718,7 +721,7 @@ return bytes(u) -@pathsuboption(b'pushrev', b'pushrev') +@pathsuboption(b'pushrev', 'pushrev') def pushrevpathoption(ui, path, value): return value @@ -730,7 +733,7 @@ } -@pathsuboption(b'bookmarks.mode', b'bookmarks_mode') +@pathsuboption(b'bookmarks.mode', 'bookmarks_mode') def bookmarks_mode_option(ui, path, value): if value not in SUPPORTED_BOOKMARKS_MODES: path_name = path.name @@ -756,7 +759,7 @@ @pathsuboption( b'pulled-delta-reuse-policy', - b'delta_reuse_policy', + 'delta_reuse_policy', display=DELTA_REUSE_POLICIES_NAME.get, ) def delta_reuse_policy(ui, path, value): @@ -773,7 +776,7 @@ return DELTA_REUSE_POLICIES.get(value) -@pathsuboption(b'multi-urls', b'multi_urls', display=display_bool) +@pathsuboption(b'multi-urls', 'multi_urls', display=display_bool) def multiurls_pathoption(ui, path, value): res = stringutil.parsebool(value) if res is None: diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/verify.py --- a/mercurial/verify.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/verify.py Wed Oct 11 02:02:46 2023 +0200 @@ -17,6 +17,7 @@ pycompat, requirements, revlog, + transaction, util, ) @@ -195,7 +196,7 @@ if not repo.url().startswith(b'file:'): raise error.Abort(_(b"cannot verify bundle or remote repos")) - if os.path.exists(repo.sjoin(b"journal")): + if transaction.has_abandoned_transaction(repo): ui.warn(_(b"abandoned transaction found - run hg recover\n")) if ui.verbose or not self.revlogv1: diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/vfs.py --- a/mercurial/vfs.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/vfs.py Wed Oct 11 02:02:46 2023 +0200 @@ -16,11 +16,6 @@ ) from .i18n import _ -from .pycompat import ( - delattr, - getattr, - setattr, -) from . import ( encoding, error, diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/windows.py --- a/mercurial/windows.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/windows.py Wed Oct 11 02:02:46 2023 +0200 @@ -33,7 +33,6 @@ ) from .i18n import _ -from .pycompat import getattr from . import ( encoding, error, diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/wireprotoframing.py --- a/mercurial/wireprotoframing.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/wireprotoframing.py Wed Oct 11 02:02:46 2023 +0200 @@ -14,7 +14,6 @@ import struct from .i18n import _ -from .pycompat import getattr from .thirdparty import attr from . import ( encoding, diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/wireprototypes.py --- a/mercurial/wireprototypes.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/wireprototypes.py Wed Oct 11 02:02:46 2023 +0200 @@ -9,7 +9,6 @@ hex, ) from .i18n import _ -from .pycompat import getattr from .thirdparty import attr from . import ( error, @@ -367,9 +366,7 @@ # No explicit config. Filter out the ones that aren't supposed to be # advertised and return default ordering. if not configengines: - attr = ( - b'serverpriority' if role == util.SERVERROLE else b'clientpriority' - ) + attr = 'serverpriority' if role == util.SERVERROLE else 'clientpriority' return [ e for e in compengines if getattr(e.wireprotosupport(), attr) > 0 ] diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/wireprotov1peer.py --- a/mercurial/wireprotov1peer.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/wireprotov1peer.py Wed Oct 11 02:02:46 2023 +0200 @@ -12,10 +12,6 @@ from concurrent import futures from .i18n import _ from .node import bin -from .pycompat import ( - getattr, - setattr, -) from . import ( bundle2, changegroup as changegroupmod, @@ -499,7 +495,7 @@ else: heads = wireprototypes.encodelist(heads) - if util.safehasattr(bundle, 'deltaheader'): + if hasattr(bundle, 'deltaheader'): # this a bundle10, do the old style call sequence ret, output = self._callpush(b"unbundle", bundle, heads=heads) if ret == b"": diff -r 704c3d0878d9 -r 12c308c55e53 mercurial/wireprotov1server.py --- a/mercurial/wireprotov1server.py Tue Oct 10 18:29:04 2023 +0200 +++ b/mercurial/wireprotov1server.py Wed Oct 11 02:02:46 2023 +0200 @@ -11,7 +11,6 @@ from .i18n import _ from .node import hex -from .pycompat import getattr from . import ( bundle2, @@ -721,7 +720,7 @@ r = exchange.unbundle( repo, gen, their_heads, b'serve', proto.client() ) - if util.safehasattr(r, 'addpart'): + if hasattr(r, 'addpart'): # The return looks streamable, we are in the bundle2 case # and should return a stream. return wireprototypes.streamreslegacy(gen=r.getchunks()) diff -r 704c3d0878d9 -r 12c308c55e53 relnotes/next --- a/relnotes/next Tue Oct 10 18:29:04 2023 +0200 +++ b/relnotes/next Wed Oct 11 02:02:46 2023 +0200 @@ -13,6 +13,8 @@ == Backwards Compatibility Changes == +* remove the experimental infinite push extension + == Internal API Changes == == Miscellaneous == diff -r 704c3d0878d9 -r 12c308c55e53 rust/Cargo.lock --- a/rust/Cargo.lock Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/Cargo.lock Wed Oct 11 02:02:46 2023 +0200 @@ -476,6 +476,12 @@ [[package]] name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "hashbrown" version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33ff8ae62cd3a9102e5637afc8452c55acf3844001bd5374e0b0bd7b6616c038" @@ -517,7 +523,7 @@ "derive_more", "flate2", "format-bytes", - "hashbrown", + "hashbrown 0.13.1", "home", "im-rc", "itertools", @@ -535,9 +541,11 @@ "regex", "same-file", "self_cell", + "serde", "sha-1 0.10.0", "tempfile", "thread_local", + "toml", "twox-hash", "zstd", ] @@ -610,6 +618,16 @@ ] [[package]] +name = "indexmap" +version = "1.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", +] + +[[package]] name = "instant" version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -749,6 +767,15 @@ ] [[package]] +name = "nom8" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae01545c9c7fc4486ab7debaf2aad7003ac19431791868fb2e8066df97fad2f8" +dependencies = [ + "memchr", +] + +[[package]] name = "num-integer" version = "0.1.45" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1107,6 +1134,35 @@ checksum = "e25dfac463d778e353db5be2449d1cce89bd6fd23c9f1ea21310ce6e5a1b29c4" [[package]] +name = "serde" +version = "1.0.152" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.152" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_spanned" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0efd8caf556a6cebd3b285caf480045fcc1ac04f6bd786b09a6f11af30c4fcf4" +dependencies = [ + "serde", +] + +[[package]] name = "sha-1" version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1160,9 +1216,9 @@ [[package]] name = "syn" -version = "1.0.103" +version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a864042229133ada95abf3b54fdc62ef5ccabe9515b64717bcb9a1919e59445d" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ "proc-macro2", "quote", @@ -1213,6 +1269,40 @@ ] [[package]] +name = "toml" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb9d890e4dc9298b70f740f615f2e05b9db37dce531f6b24fb77ac993f9f217" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit", +] + +[[package]] +name = "toml_datetime" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4553f467ac8e3d374bc9a177a26801e5d0f9b211aa1673fb137a403afd1c9cf5" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56c59d8dd7d0dcbc6428bf7aa2f0e823e26e43b3c9aca15bbc9475d23e5fa12b" +dependencies = [ + "indexmap", + "nom8", + "serde", + "serde_spanned", + "toml_datetime", +] + +[[package]] name = "twox-hash" version = "1.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/Cargo.toml --- a/rust/hg-core/Cargo.toml Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/Cargo.toml Wed Oct 11 02:02:46 2023 +0200 @@ -26,10 +26,12 @@ rayon = "1.7.0" regex = "1.7.0" self_cell = "1.0" +serde = { version = "1.0", features = ["derive"] } sha-1 = "0.10.0" twox-hash = "1.6.3" same-file = "1.0.6" tempfile = "3.3.0" +toml = "0.6" thread_local = "1.1.4" crossbeam-channel = "0.5.6" log = "0.4.17" @@ -46,5 +48,5 @@ default-features = false [dev-dependencies] -clap = { version = "4.0.24", features = ["derive"] } +clap = { version = "~4.0", features = ["derive"] } pretty_assertions = "1.1.0" diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/examples/nodemap/index.rs --- a/rust/hg-core/examples/nodemap/index.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/examples/nodemap/index.rs Wed Oct 11 02:02:46 2023 +0200 @@ -29,7 +29,7 @@ impl IndexEntry { fn parents(&self) -> [Revision; 2] { - [Revision::from_be(self.p1), Revision::from_be(self.p1)] + [self.p1, self.p2] } } @@ -42,23 +42,18 @@ if rev == NULL_REVISION { return None; } - let i = rev as usize; - if i >= self.len() { - None - } else { - Some(&self.data[i].node) - } + Some(&self.data[rev.0 as usize].node) } } impl Graph for &Index { fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError> { - let [p1, p2] = (*self).data[rev as usize].parents(); + let [p1, p2] = self.data[rev.0 as usize].parents(); let len = (*self).len(); if p1 < NULL_REVISION || p2 < NULL_REVISION - || p1 as usize >= len - || p2 as usize >= len + || p1.0 as usize >= len + || p2.0 as usize >= len { return Err(GraphError::ParentOutOfRange(rev)); } diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/examples/nodemap/main.rs --- a/rust/hg-core/examples/nodemap/main.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/examples/nodemap/main.rs Wed Oct 11 02:02:46 2023 +0200 @@ -36,7 +36,7 @@ let start = Instant::now(); let mut nm = NodeTree::default(); for rev in 0..index.len() { - let rev = rev as Revision; + let rev = Revision(rev as BaseRevision); nm.insert(index, index.node(rev).unwrap(), rev).unwrap(); } eprintln!("Nodemap constructed in RAM in {:?}", start.elapsed()); @@ -55,7 +55,11 @@ let len = index.len() as u32; let mut rng = rand::thread_rng(); let nodes: Vec = (0..queries) - .map(|_| *index.node((rng.gen::() % len) as Revision).unwrap()) + .map(|_| { + *index + .node(Revision((rng.gen::() % len) as BaseRevision)) + .unwrap() + }) .collect(); if queries < 10 { let nodes_hex: Vec = diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/src/ancestors.rs --- a/rust/hg-core/src/ancestors.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/src/ancestors.rs Wed Oct 11 02:02:46 2023 +0200 @@ -247,7 +247,9 @@ revs.remove(&curr); self.add_parents(curr)?; } - curr -= 1; + // We know this revision is safe because we've checked the bounds + // before. + curr = Revision(curr.0 - 1); } Ok(()) } @@ -297,14 +299,14 @@ // TODO heuristics for with_capacity()? let mut missing: Vec = Vec::new(); - for curr in (0..=start).rev() { + for curr in (0..=start.0).rev() { if revs_visit.is_empty() { break; } - if both_visit.remove(&curr) { + if both_visit.remove(&Revision(curr)) { // curr's parents might have made it into revs_visit through // another path - for p in self.graph.parents(curr)?.iter().cloned() { + for p in self.graph.parents(Revision(curr))?.iter().cloned() { if p == NULL_REVISION { continue; } @@ -312,9 +314,9 @@ bases_visit.insert(p); both_visit.insert(p); } - } else if revs_visit.remove(&curr) { - missing.push(curr); - for p in self.graph.parents(curr)?.iter().cloned() { + } else if revs_visit.remove(&Revision(curr)) { + missing.push(Revision(curr)); + for p in self.graph.parents(Revision(curr))?.iter().cloned() { if p == NULL_REVISION { continue; } @@ -331,8 +333,8 @@ revs_visit.insert(p); } } - } else if bases_visit.contains(&curr) { - for p in self.graph.parents(curr)?.iter().cloned() { + } else if bases_visit.contains(&Revision(curr)) { + for p in self.graph.parents(Revision(curr))?.iter().cloned() { if p == NULL_REVISION { continue; } @@ -356,7 +358,41 @@ mod tests { use super::*; - use crate::testing::{SampleGraph, VecGraph}; + use crate::{ + testing::{SampleGraph, VecGraph}, + BaseRevision, + }; + + impl From for Revision { + fn from(value: BaseRevision) -> Self { + if !cfg!(test) { + panic!("should only be used in tests") + } + Revision(value) + } + } + + impl PartialEq for Revision { + fn eq(&self, other: &BaseRevision) -> bool { + if !cfg!(test) { + panic!("should only be used in tests") + } + self.0.eq(other) + } + } + + impl PartialEq for Revision { + fn eq(&self, other: &u32) -> bool { + if !cfg!(test) { + panic!("should only be used in tests") + } + let check: Result = self.0.try_into(); + match check { + Ok(value) => value.eq(other), + Err(_) => false, + } + } + } fn list_ancestors( graph: G, @@ -374,37 +410,80 @@ /// Same tests as test-ancestor.py, without membership /// (see also test-ancestor.py.out) fn test_list_ancestor() { - assert_eq!(list_ancestors(SampleGraph, vec![], 0, false), vec![]); + assert_eq!( + list_ancestors(SampleGraph, vec![], 0.into(), false), + Vec::::new() + ); assert_eq!( - list_ancestors(SampleGraph, vec![11, 13], 0, false), + list_ancestors( + SampleGraph, + vec![11.into(), 13.into()], + 0.into(), + false + ), vec![8, 7, 4, 3, 2, 1, 0] ); assert_eq!( - list_ancestors(SampleGraph, vec![1, 3], 0, false), + list_ancestors( + SampleGraph, + vec![1.into(), 3.into()], + 0.into(), + false + ), vec![1, 0] ); assert_eq!( - list_ancestors(SampleGraph, vec![11, 13], 0, true), + list_ancestors( + SampleGraph, + vec![11.into(), 13.into()], + 0.into(), + true + ), vec![13, 11, 8, 7, 4, 3, 2, 1, 0] ); assert_eq!( - list_ancestors(SampleGraph, vec![11, 13], 6, false), + list_ancestors( + SampleGraph, + vec![11.into(), 13.into()], + 6.into(), + false + ), vec![8, 7] ); assert_eq!( - list_ancestors(SampleGraph, vec![11, 13], 6, true), + list_ancestors( + SampleGraph, + vec![11.into(), 13.into()], + 6.into(), + true + ), vec![13, 11, 8, 7] ); assert_eq!( - list_ancestors(SampleGraph, vec![11, 13], 11, true), + list_ancestors( + SampleGraph, + vec![11.into(), 13.into()], + 11.into(), + true + ), vec![13, 11] ); assert_eq!( - list_ancestors(SampleGraph, vec![11, 13], 12, true), + list_ancestors( + SampleGraph, + vec![11.into(), 13.into()], + 12.into(), + true + ), vec![13] ); assert_eq!( - list_ancestors(SampleGraph, vec![10, 1], 0, true), + list_ancestors( + SampleGraph, + vec![10.into(), 1.into()], + 0.into(), + true + ), vec![10, 5, 4, 2, 1, 0] ); } @@ -415,33 +494,53 @@ /// suite. /// For instance, run tests/test-obsolete-checkheads.t fn test_nullrev_input() { - let mut iter = - AncestorsIterator::new(SampleGraph, vec![-1], 0, false).unwrap(); + let mut iter = AncestorsIterator::new( + SampleGraph, + vec![Revision(-1)], + 0.into(), + false, + ) + .unwrap(); assert_eq!(iter.next(), None) } #[test] fn test_contains() { - let mut lazy = - AncestorsIterator::new(SampleGraph, vec![10, 1], 0, true).unwrap(); - assert!(lazy.contains(1).unwrap()); - assert!(!lazy.contains(3).unwrap()); + let mut lazy = AncestorsIterator::new( + SampleGraph, + vec![10.into(), 1.into()], + 0.into(), + true, + ) + .unwrap(); + assert!(lazy.contains(1.into()).unwrap()); + assert!(!lazy.contains(3.into()).unwrap()); - let mut lazy = - AncestorsIterator::new(SampleGraph, vec![0], 0, false).unwrap(); + let mut lazy = AncestorsIterator::new( + SampleGraph, + vec![0.into()], + 0.into(), + false, + ) + .unwrap(); assert!(!lazy.contains(NULL_REVISION).unwrap()); } #[test] fn test_peek() { - let mut iter = - AncestorsIterator::new(SampleGraph, vec![10], 0, true).unwrap(); + let mut iter = AncestorsIterator::new( + SampleGraph, + vec![10.into()], + 0.into(), + true, + ) + .unwrap(); // peek() gives us the next value - assert_eq!(iter.peek(), Some(10)); + assert_eq!(iter.peek(), Some(10.into())); // but it's not been consumed - assert_eq!(iter.next(), Some(Ok(10))); + assert_eq!(iter.next(), Some(Ok(10.into()))); // and iteration resumes normally - assert_eq!(iter.next(), Some(Ok(5))); + assert_eq!(iter.next(), Some(Ok(5.into()))); // let's drain the iterator to test peek() at the end while iter.next().is_some() {} @@ -450,19 +549,29 @@ #[test] fn test_empty() { - let mut iter = - AncestorsIterator::new(SampleGraph, vec![10], 0, true).unwrap(); + let mut iter = AncestorsIterator::new( + SampleGraph, + vec![10.into()], + 0.into(), + true, + ) + .unwrap(); assert!(!iter.is_empty()); while iter.next().is_some() {} assert!(!iter.is_empty()); - let iter = - AncestorsIterator::new(SampleGraph, vec![], 0, true).unwrap(); + let iter = AncestorsIterator::new(SampleGraph, vec![], 0.into(), true) + .unwrap(); assert!(iter.is_empty()); // case where iter.seen == {NULL_REVISION} - let iter = - AncestorsIterator::new(SampleGraph, vec![0], 0, false).unwrap(); + let iter = AncestorsIterator::new( + SampleGraph, + vec![0.into()], + 0.into(), + false, + ) + .unwrap(); assert!(iter.is_empty()); } @@ -471,9 +580,11 @@ struct Corrupted; impl Graph for Corrupted { + // FIXME what to do about this? Are we just not supposed to get them + // anymore? fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError> { match rev { - 1 => Ok([0, -1]), + Revision(1) => Ok([0.into(), (-1).into()]), r => Err(GraphError::ParentOutOfRange(r)), } } @@ -482,9 +593,14 @@ #[test] fn test_initrev_out_of_range() { // inclusive=false looks up initrev's parents right away - match AncestorsIterator::new(SampleGraph, vec![25], 0, false) { + match AncestorsIterator::new( + SampleGraph, + vec![25.into()], + 0.into(), + false, + ) { Ok(_) => panic!("Should have been ParentOutOfRange"), - Err(e) => assert_eq!(e, GraphError::ParentOutOfRange(25)), + Err(e) => assert_eq!(e, GraphError::ParentOutOfRange(25.into())), } } @@ -492,22 +608,29 @@ fn test_next_out_of_range() { // inclusive=false looks up initrev's parents right away let mut iter = - AncestorsIterator::new(Corrupted, vec![1], 0, false).unwrap(); - assert_eq!(iter.next(), Some(Err(GraphError::ParentOutOfRange(0)))); + AncestorsIterator::new(Corrupted, vec![1.into()], 0.into(), false) + .unwrap(); + assert_eq!( + iter.next(), + Some(Err(GraphError::ParentOutOfRange(0.into()))) + ); } #[test] /// Test constructor, add/get bases and heads fn test_missing_bases() -> Result<(), GraphError> { - let mut missing_ancestors = - MissingAncestors::new(SampleGraph, [5, 3, 1, 3].iter().cloned()); + let mut missing_ancestors = MissingAncestors::new( + SampleGraph, + [5.into(), 3.into(), 1.into(), 3.into()].iter().cloned(), + ); let mut as_vec: Vec = missing_ancestors.get_bases().iter().cloned().collect(); as_vec.sort_unstable(); assert_eq!(as_vec, [1, 3, 5]); assert_eq!(missing_ancestors.max_base, 5); - missing_ancestors.add_bases([3, 7, 8].iter().cloned()); + missing_ancestors + .add_bases([3.into(), 7.into(), 8.into()].iter().cloned()); as_vec = missing_ancestors.get_bases().iter().cloned().collect(); as_vec.sort_unstable(); assert_eq!(as_vec, [1, 3, 5, 7, 8]); @@ -520,13 +643,16 @@ } fn assert_missing_remove( - bases: &[Revision], - revs: &[Revision], - expected: &[Revision], + bases: &[BaseRevision], + revs: &[BaseRevision], + expected: &[BaseRevision], ) { - let mut missing_ancestors = - MissingAncestors::new(SampleGraph, bases.iter().cloned()); - let mut revset: HashSet = revs.iter().cloned().collect(); + let mut missing_ancestors = MissingAncestors::new( + SampleGraph, + bases.iter().map(|r| Revision(*r)), + ); + let mut revset: HashSet = + revs.iter().map(|r| Revision(*r)).collect(); missing_ancestors .remove_ancestors_from(&mut revset) .unwrap(); @@ -547,14 +673,16 @@ } fn assert_missing_ancestors( - bases: &[Revision], - revs: &[Revision], - expected: &[Revision], + bases: &[BaseRevision], + revs: &[BaseRevision], + expected: &[BaseRevision], ) { - let mut missing_ancestors = - MissingAncestors::new(SampleGraph, bases.iter().cloned()); + let mut missing_ancestors = MissingAncestors::new( + SampleGraph, + bases.iter().map(|r| Revision(*r)), + ); let missing = missing_ancestors - .missing_ancestors(revs.iter().cloned()) + .missing_ancestors(revs.iter().map(|r| Revision(*r))) .unwrap(); assert_eq!(missing.as_slice(), expected); } @@ -575,110 +703,115 @@ #[allow(clippy::unnecessary_cast)] #[test] fn test_remove_ancestors_from_case1() { + const FAKE_NULL_REVISION: BaseRevision = -1; + assert_eq!(FAKE_NULL_REVISION, NULL_REVISION.0); let graph: VecGraph = vec![ - [NULL_REVISION, NULL_REVISION], - [0, NULL_REVISION], + [FAKE_NULL_REVISION, FAKE_NULL_REVISION], + [0, FAKE_NULL_REVISION], [1, 0], [2, 1], - [3, NULL_REVISION], - [4, NULL_REVISION], + [3, FAKE_NULL_REVISION], + [4, FAKE_NULL_REVISION], [5, 1], - [2, NULL_REVISION], - [7, NULL_REVISION], - [8, NULL_REVISION], - [9, NULL_REVISION], + [2, FAKE_NULL_REVISION], + [7, FAKE_NULL_REVISION], + [8, FAKE_NULL_REVISION], + [9, FAKE_NULL_REVISION], [10, 1], - [3, NULL_REVISION], - [12, NULL_REVISION], - [13, NULL_REVISION], - [14, NULL_REVISION], - [4, NULL_REVISION], - [16, NULL_REVISION], - [17, NULL_REVISION], - [18, NULL_REVISION], + [3, FAKE_NULL_REVISION], + [12, FAKE_NULL_REVISION], + [13, FAKE_NULL_REVISION], + [14, FAKE_NULL_REVISION], + [4, FAKE_NULL_REVISION], + [16, FAKE_NULL_REVISION], + [17, FAKE_NULL_REVISION], + [18, FAKE_NULL_REVISION], [19, 11], - [20, NULL_REVISION], - [21, NULL_REVISION], - [22, NULL_REVISION], - [23, NULL_REVISION], - [2, NULL_REVISION], - [3, NULL_REVISION], + [20, FAKE_NULL_REVISION], + [21, FAKE_NULL_REVISION], + [22, FAKE_NULL_REVISION], + [23, FAKE_NULL_REVISION], + [2, FAKE_NULL_REVISION], + [3, FAKE_NULL_REVISION], [26, 24], - [27, NULL_REVISION], - [28, NULL_REVISION], - [12, NULL_REVISION], - [1, NULL_REVISION], + [27, FAKE_NULL_REVISION], + [28, FAKE_NULL_REVISION], + [12, FAKE_NULL_REVISION], + [1, FAKE_NULL_REVISION], [1, 9], - [32, NULL_REVISION], - [33, NULL_REVISION], + [32, FAKE_NULL_REVISION], + [33, FAKE_NULL_REVISION], [34, 31], - [35, NULL_REVISION], + [35, FAKE_NULL_REVISION], [36, 26], - [37, NULL_REVISION], - [38, NULL_REVISION], - [39, NULL_REVISION], - [40, NULL_REVISION], - [41, NULL_REVISION], + [37, FAKE_NULL_REVISION], + [38, FAKE_NULL_REVISION], + [39, FAKE_NULL_REVISION], + [40, FAKE_NULL_REVISION], + [41, FAKE_NULL_REVISION], [42, 26], - [0, NULL_REVISION], - [44, NULL_REVISION], + [0, FAKE_NULL_REVISION], + [44, FAKE_NULL_REVISION], [45, 4], - [40, NULL_REVISION], - [47, NULL_REVISION], + [40, FAKE_NULL_REVISION], + [47, FAKE_NULL_REVISION], [36, 0], - [49, NULL_REVISION], - [NULL_REVISION, NULL_REVISION], - [51, NULL_REVISION], - [52, NULL_REVISION], - [53, NULL_REVISION], - [14, NULL_REVISION], - [55, NULL_REVISION], - [15, NULL_REVISION], - [23, NULL_REVISION], - [58, NULL_REVISION], - [59, NULL_REVISION], - [2, NULL_REVISION], + [49, FAKE_NULL_REVISION], + [FAKE_NULL_REVISION, FAKE_NULL_REVISION], + [51, FAKE_NULL_REVISION], + [52, FAKE_NULL_REVISION], + [53, FAKE_NULL_REVISION], + [14, FAKE_NULL_REVISION], + [55, FAKE_NULL_REVISION], + [15, FAKE_NULL_REVISION], + [23, FAKE_NULL_REVISION], + [58, FAKE_NULL_REVISION], + [59, FAKE_NULL_REVISION], + [2, FAKE_NULL_REVISION], [61, 59], - [62, NULL_REVISION], - [63, NULL_REVISION], - [NULL_REVISION, NULL_REVISION], - [65, NULL_REVISION], - [66, NULL_REVISION], - [67, NULL_REVISION], - [68, NULL_REVISION], + [62, FAKE_NULL_REVISION], + [63, FAKE_NULL_REVISION], + [FAKE_NULL_REVISION, FAKE_NULL_REVISION], + [65, FAKE_NULL_REVISION], + [66, FAKE_NULL_REVISION], + [67, FAKE_NULL_REVISION], + [68, FAKE_NULL_REVISION], [37, 28], [69, 25], - [71, NULL_REVISION], - [72, NULL_REVISION], + [71, FAKE_NULL_REVISION], + [72, FAKE_NULL_REVISION], [50, 2], - [74, NULL_REVISION], - [12, NULL_REVISION], - [18, NULL_REVISION], - [77, NULL_REVISION], - [78, NULL_REVISION], - [79, NULL_REVISION], + [74, FAKE_NULL_REVISION], + [12, FAKE_NULL_REVISION], + [18, FAKE_NULL_REVISION], + [77, FAKE_NULL_REVISION], + [78, FAKE_NULL_REVISION], + [79, FAKE_NULL_REVISION], [43, 33], - [81, NULL_REVISION], - [82, NULL_REVISION], - [83, NULL_REVISION], + [81, FAKE_NULL_REVISION], + [82, FAKE_NULL_REVISION], + [83, FAKE_NULL_REVISION], [84, 45], - [85, NULL_REVISION], - [86, NULL_REVISION], - [NULL_REVISION, NULL_REVISION], - [88, NULL_REVISION], - [NULL_REVISION, NULL_REVISION], + [85, FAKE_NULL_REVISION], + [86, FAKE_NULL_REVISION], + [FAKE_NULL_REVISION, FAKE_NULL_REVISION], + [88, FAKE_NULL_REVISION], + [FAKE_NULL_REVISION, FAKE_NULL_REVISION], [76, 83], - [44, NULL_REVISION], - [92, NULL_REVISION], - [93, NULL_REVISION], - [9, NULL_REVISION], + [44, FAKE_NULL_REVISION], + [92, FAKE_NULL_REVISION], + [93, FAKE_NULL_REVISION], + [9, FAKE_NULL_REVISION], [95, 67], - [96, NULL_REVISION], - [97, NULL_REVISION], - [NULL_REVISION, NULL_REVISION], - ]; - let problem_rev = 28 as Revision; - let problem_base = 70 as Revision; + [96, FAKE_NULL_REVISION], + [97, FAKE_NULL_REVISION], + [FAKE_NULL_REVISION, FAKE_NULL_REVISION], + ] + .into_iter() + .map(|[a, b]| [Revision(a), Revision(b)]) + .collect(); + let problem_rev = 28.into(); + let problem_base = 70.into(); // making the problem obvious: problem_rev is a parent of problem_base assert_eq!(graph.parents(problem_base).unwrap()[1], problem_rev); @@ -687,14 +820,14 @@ graph, [60, 26, 70, 3, 96, 19, 98, 49, 97, 47, 1, 6] .iter() - .cloned(), + .map(|r| Revision(*r)), ); assert!(missing_ancestors.bases.contains(&problem_base)); let mut revs: HashSet = [4, 12, 41, 28, 68, 38, 1, 30, 56, 44] .iter() - .cloned() + .map(|r| Revision(*r)) .collect(); missing_ancestors.remove_ancestors_from(&mut revs).unwrap(); assert!(!revs.contains(&problem_rev)); diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/src/config/config.rs --- a/rust/hg-core/src/config/config.rs Tue Oct 10 18:29:04 2023 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ - diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/src/config/config_items.rs --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/rust/hg-core/src/config/config_items.rs Wed Oct 11 02:02:46 2023 +0200 @@ -0,0 +1,725 @@ +//! Code for parsing default Mercurial config items. +use itertools::Itertools; +use serde::Deserialize; + +use crate::{errors::HgError, exit_codes, FastHashMap}; + +/// Corresponds to the structure of `mercurial/configitems.toml`. +#[derive(Debug, Deserialize)] +pub struct ConfigItems { + items: Vec, + templates: FastHashMap>, + #[serde(rename = "template-applications")] + template_applications: Vec, +} + +/// Corresponds to a config item declaration in `mercurial/configitems.toml`. +#[derive(Clone, Debug, PartialEq, Deserialize)] +#[serde(try_from = "RawDefaultConfigItem")] +pub struct DefaultConfigItem { + /// Section of the config the item is in (e.g. `[merge-tools]`) + section: String, + /// Name of the item (e.g. `meld.gui`) + name: String, + /// Default value (can be dynamic, see [`DefaultConfigItemType`]) + default: Option, + /// If the config option is generic (e.g. `merge-tools.*`), defines + /// the priority of this item relative to other generic items. + /// If we're looking for , then all generic items within the same + /// section will be sorted by order of priority, and the first regex match + /// against `name` is returned. + #[serde(default)] + priority: Option, + /// Aliases, if any. Each alias is a tuple of `(section, name)` for each + /// option that is aliased to this one. + #[serde(default)] + alias: Vec<(String, String)>, + /// Whether the config item is marked as experimental + #[serde(default)] + experimental: bool, + /// The (possibly empty) docstring for the item + #[serde(default)] + documentation: String, + /// Whether the item is part of an in-core extension. This allows us to + /// hide them if the extension is not enabled, to preserve legacy + /// behavior. + #[serde(default)] + in_core_extension: Option, +} + +/// Corresponds to the raw (i.e. on disk) structure of config items. Used as +/// an intermediate step in deserialization. +#[derive(Clone, Debug, Deserialize)] +struct RawDefaultConfigItem { + section: String, + name: String, + default: Option, + #[serde(rename = "default-type")] + default_type: Option, + #[serde(default)] + priority: isize, + #[serde(default)] + generic: bool, + #[serde(default)] + alias: Vec<(String, String)>, + #[serde(default)] + experimental: bool, + #[serde(default)] + documentation: String, + #[serde(default)] + in_core_extension: Option, +} + +impl TryFrom for DefaultConfigItem { + type Error = HgError; + + fn try_from(value: RawDefaultConfigItem) -> Result { + Ok(Self { + section: value.section, + name: value.name, + default: raw_default_to_concrete( + value.default_type, + value.default, + )?, + priority: if value.generic { + Some(value.priority) + } else { + None + }, + alias: value.alias, + experimental: value.experimental, + documentation: value.documentation, + in_core_extension: value.in_core_extension, + }) + } +} + +impl DefaultConfigItem { + fn is_generic(&self) -> bool { + self.priority.is_some() + } + + pub fn in_core_extension(&self) -> Option<&str> { + self.in_core_extension.as_deref() + } + + pub fn section(&self) -> &str { + self.section.as_ref() + } +} + +impl<'a> TryFrom<&'a DefaultConfigItem> for Option<&'a str> { + type Error = HgError; + + fn try_from( + value: &'a DefaultConfigItem, + ) -> Result, Self::Error> { + match &value.default { + Some(default) => { + let err = HgError::abort( + format!( + "programming error: wrong query on config item '{}.{}'", + value.section, + value.name + ), + exit_codes::ABORT, + Some(format!( + "asked for '&str', type of default is '{}'", + default.type_str() + )), + ); + match default { + DefaultConfigItemType::Primitive(toml::Value::String( + s, + )) => Ok(Some(s)), + _ => Err(err), + } + } + None => Ok(None), + } + } +} + +impl<'a> TryFrom<&'a DefaultConfigItem> for Option<&'a [u8]> { + type Error = HgError; + + fn try_from( + value: &'a DefaultConfigItem, + ) -> Result, Self::Error> { + match &value.default { + Some(default) => { + let err = HgError::abort( + format!( + "programming error: wrong query on config item '{}.{}'", + value.section, + value.name + ), + exit_codes::ABORT, + Some(format!( + "asked for bytes, type of default is '{}', \ + which cannot be interpreted as bytes", + default.type_str() + )), + ); + match default { + DefaultConfigItemType::Primitive(p) => { + Ok(p.as_str().map(str::as_bytes)) + } + _ => Err(err), + } + } + None => Ok(None), + } + } +} + +impl TryFrom<&DefaultConfigItem> for Option { + type Error = HgError; + + fn try_from(value: &DefaultConfigItem) -> Result { + match &value.default { + Some(default) => { + let err = HgError::abort( + format!( + "programming error: wrong query on config item '{}.{}'", + value.section, + value.name + ), + exit_codes::ABORT, + Some(format!( + "asked for 'bool', type of default is '{}'", + default.type_str() + )), + ); + match default { + DefaultConfigItemType::Primitive( + toml::Value::Boolean(b), + ) => Ok(Some(*b)), + _ => Err(err), + } + } + None => Ok(None), + } + } +} + +impl TryFrom<&DefaultConfigItem> for Option { + type Error = HgError; + + fn try_from(value: &DefaultConfigItem) -> Result { + match &value.default { + Some(default) => { + let err = HgError::abort( + format!( + "programming error: wrong query on config item '{}.{}'", + value.section, + value.name + ), + exit_codes::ABORT, + Some(format!( + "asked for 'u32', type of default is '{}'", + default.type_str() + )), + ); + match default { + DefaultConfigItemType::Primitive( + toml::Value::Integer(b), + ) => { + Ok(Some((*b).try_into().expect("TOML integer to u32"))) + } + _ => Err(err), + } + } + None => Ok(None), + } + } +} + +impl TryFrom<&DefaultConfigItem> for Option { + type Error = HgError; + + fn try_from(value: &DefaultConfigItem) -> Result { + match &value.default { + Some(default) => { + let err = HgError::abort( + format!( + "programming error: wrong query on config item '{}.{}'", + value.section, + value.name + ), + exit_codes::ABORT, + Some(format!( + "asked for 'u64', type of default is '{}'", + default.type_str() + )), + ); + match default { + DefaultConfigItemType::Primitive( + toml::Value::Integer(b), + ) => { + Ok(Some((*b).try_into().expect("TOML integer to u64"))) + } + _ => Err(err), + } + } + None => Ok(None), + } + } +} + +/// Allows abstracting over more complex default values than just primitives. +/// The former `configitems.py` contained some dynamic code that is encoded +/// in this enum. +#[derive(Debug, PartialEq, Clone, Deserialize)] +pub enum DefaultConfigItemType { + /// Some primitive type (string, integer, boolean) + Primitive(toml::Value), + /// A dynamic value that will be given by the code at runtime + Dynamic, + /// An lazily-returned array (possibly only relevant in the Python impl) + /// Example: `lambda: [b"zstd", b"zlib"]` + Lambda(Vec), + /// For now, a special case for `web.encoding` that points to the + /// `encoding.encoding` module in the Python impl so that local encoding + /// is correctly resolved at runtime + LazyModule(String), + ListType, +} + +impl DefaultConfigItemType { + pub fn type_str(&self) -> &str { + match self { + DefaultConfigItemType::Primitive(primitive) => { + primitive.type_str() + } + DefaultConfigItemType::Dynamic => "dynamic", + DefaultConfigItemType::Lambda(_) => "lambda", + DefaultConfigItemType::LazyModule(_) => "lazy_module", + DefaultConfigItemType::ListType => "list_type", + } + } +} + +/// Most of the fields are shared with [`DefaultConfigItem`]. +#[derive(Debug, Clone, Deserialize)] +#[serde(try_from = "RawTemplateItem")] +struct TemplateItem { + suffix: String, + default: Option, + priority: Option, + #[serde(default)] + alias: Vec<(String, String)>, + #[serde(default)] + experimental: bool, + #[serde(default)] + documentation: String, +} + +/// Corresponds to the raw (i.e. on disk) representation of a template item. +/// Used as an intermediate step in deserialization. +#[derive(Clone, Debug, Deserialize)] +struct RawTemplateItem { + suffix: String, + default: Option, + #[serde(rename = "default-type")] + default_type: Option, + #[serde(default)] + priority: isize, + #[serde(default)] + generic: bool, + #[serde(default)] + alias: Vec<(String, String)>, + #[serde(default)] + experimental: bool, + #[serde(default)] + documentation: String, +} + +impl TemplateItem { + fn into_default_item( + self, + application: TemplateApplication, + ) -> DefaultConfigItem { + DefaultConfigItem { + section: application.section, + name: application + .prefix + .map(|prefix| format!("{}.{}", prefix, self.suffix)) + .unwrap_or(self.suffix), + default: self.default, + priority: self.priority, + alias: self.alias, + experimental: self.experimental, + documentation: self.documentation, + in_core_extension: None, + } + } +} + +impl TryFrom for TemplateItem { + type Error = HgError; + + fn try_from(value: RawTemplateItem) -> Result { + Ok(Self { + suffix: value.suffix, + default: raw_default_to_concrete( + value.default_type, + value.default, + )?, + priority: if value.generic { + Some(value.priority) + } else { + None + }, + alias: value.alias, + experimental: value.experimental, + documentation: value.documentation, + }) + } +} + +/// Transforms the on-disk string-based representation of complex default types +/// to the concrete [`DefaultconfigItemType`]. +fn raw_default_to_concrete( + default_type: Option, + default: Option, +) -> Result, HgError> { + Ok(match default_type.as_deref() { + None => default.as_ref().map(|default| { + DefaultConfigItemType::Primitive(default.to_owned()) + }), + Some("dynamic") => Some(DefaultConfigItemType::Dynamic), + Some("list_type") => Some(DefaultConfigItemType::ListType), + Some("lambda") => match &default { + Some(default) => Some(DefaultConfigItemType::Lambda( + default.to_owned().try_into().map_err(|e| { + HgError::abort( + e.to_string(), + exit_codes::ABORT, + Some("Check 'mercurial/configitems.toml'".into()), + ) + })?, + )), + None => { + return Err(HgError::abort( + "lambda defined with no return value".to_string(), + exit_codes::ABORT, + Some("Check 'mercurial/configitems.toml'".into()), + )) + } + }, + Some("lazy_module") => match &default { + Some(default) => { + Some(DefaultConfigItemType::LazyModule(match default { + toml::Value::String(module) => module.to_owned(), + _ => { + return Err(HgError::abort( + "lazy_module module name should be a string" + .to_string(), + exit_codes::ABORT, + Some("Check 'mercurial/configitems.toml'".into()), + )) + } + })) + } + None => { + return Err(HgError::abort( + "lazy_module should have a default value".to_string(), + exit_codes::ABORT, + Some("Check 'mercurial/configitems.toml'".into()), + )) + } + }, + Some(invalid) => { + return Err(HgError::abort( + format!("invalid default_type '{}'", invalid), + exit_codes::ABORT, + Some("Check 'mercurial/configitems.toml'".into()), + )) + } + }) +} + +#[derive(Debug, Clone, Deserialize)] +struct TemplateApplication { + template: String, + section: String, + #[serde(default)] + prefix: Option, +} + +/// Represents the (dynamic) set of default core Mercurial config items from +/// `mercurial/configitems.toml`. +#[derive(Clone, Debug, Default)] +pub struct DefaultConfig { + /// Mapping of section -> (mapping of name -> item) + items: FastHashMap>, +} + +impl DefaultConfig { + pub fn empty() -> DefaultConfig { + Self { + items: Default::default(), + } + } + + /// Returns `Self`, given the contents of `mercurial/configitems.toml` + #[logging_timer::time("trace")] + pub fn from_contents(contents: &str) -> Result { + let mut from_file: ConfigItems = + toml::from_str(contents).map_err(|e| { + HgError::abort( + e.to_string(), + exit_codes::ABORT, + Some("Check 'mercurial/configitems.toml'".into()), + ) + })?; + + let mut flat_items = from_file.items; + + for application in from_file.template_applications.drain(..) { + match from_file.templates.get(&application.template) { + None => return Err( + HgError::abort( + format!( + "template application refers to undefined template '{}'", + application.template + ), + exit_codes::ABORT, + Some("Check 'mercurial/configitems.toml'".into()) + ) + ), + Some(template_items) => { + for template_item in template_items { + flat_items.push( + template_item + .clone() + .into_default_item(application.clone()), + ) + } + } + }; + } + + let items = flat_items.into_iter().fold( + FastHashMap::default(), + |mut acc, item| { + acc.entry(item.section.to_owned()) + .or_insert_with(|| { + let mut section = FastHashMap::default(); + section.insert(item.name.to_owned(), item.to_owned()); + section + }) + .insert(item.name.to_owned(), item); + acc + }, + ); + + Ok(Self { items }) + } + + /// Return the default config item that matches `section` and `item`. + pub fn get( + &self, + section: &[u8], + item: &[u8], + ) -> Option<&DefaultConfigItem> { + // Core items must be valid UTF-8 + let section = String::from_utf8_lossy(section); + let section_map = self.items.get(section.as_ref())?; + let item_name_lossy = String::from_utf8_lossy(item); + match section_map.get(item_name_lossy.as_ref()) { + Some(item) => Some(item), + None => { + for generic_item in section_map + .values() + .filter(|item| item.is_generic()) + .sorted_by_key(|item| match item.priority { + Some(priority) => (priority, &item.name), + _ => unreachable!(), + }) + { + if regex::bytes::Regex::new(&generic_item.name) + .expect("invalid regex in configitems") + .is_match(item) + { + return Some(generic_item); + } + } + None + } + } + } +} + +#[cfg(test)] +mod tests { + use crate::config::config_items::{ + DefaultConfigItem, DefaultConfigItemType, + }; + + use super::DefaultConfig; + + #[test] + fn test_config_read() { + let contents = r#" +[[items]] +section = "alias" +name = "abcd.*" +default = 3 +generic = true +priority = -1 + +[[items]] +section = "alias" +name = ".*" +default-type = "dynamic" +generic = true + +[[items]] +section = "cmdserver" +name = "track-log" +default-type = "lambda" +default = [ "chgserver", "cmdserver", "repocache",] + +[[items]] +section = "chgserver" +name = "idletimeout" +default = 3600 + +[[items]] +section = "cmdserver" +name = "message-encodings" +default-type = "list_type" + +[[items]] +section = "web" +name = "encoding" +default-type = "lazy_module" +default = "encoding.encoding" + +[[items]] +section = "command-templates" +name = "graphnode" +alias = [["ui", "graphnodetemplate"]] +documentation = """This is a docstring. +This is another line \ +but this is not.""" + +[[items]] +section = "censor" +name = "policy" +default = "abort" +experimental = true + +[[template-applications]] +template = "diff-options" +section = "commands" +prefix = "revert.interactive" + +[[template-applications]] +template = "diff-options" +section = "diff" + +[templates] +[[templates.diff-options]] +suffix = "nodates" +default = false + +[[templates.diff-options]] +suffix = "showfunc" +default = false + +[[templates.diff-options]] +suffix = "unified" +"#; + let res = DefaultConfig::from_contents(contents); + let config = match res { + Ok(config) => config, + Err(e) => panic!("{}", e), + }; + let expected = DefaultConfigItem { + section: "censor".into(), + name: "policy".into(), + default: Some(DefaultConfigItemType::Primitive("abort".into())), + priority: None, + alias: vec![], + experimental: true, + documentation: "".into(), + in_core_extension: None, + }; + assert_eq!(config.get(b"censor", b"policy"), Some(&expected)); + + // Test generic priority. The `.*` pattern is wider than `abcd.*`, but + // `abcd.*` has priority, so it should match first. + let expected = DefaultConfigItem { + section: "alias".into(), + name: "abcd.*".into(), + default: Some(DefaultConfigItemType::Primitive(3.into())), + priority: Some(-1), + alias: vec![], + experimental: false, + documentation: "".into(), + in_core_extension: None, + }; + assert_eq!(config.get(b"alias", b"abcdsomething"), Some(&expected)); + + //... but if it doesn't, we should fallback to `.*` + let expected = DefaultConfigItem { + section: "alias".into(), + name: ".*".into(), + default: Some(DefaultConfigItemType::Dynamic), + priority: Some(0), + alias: vec![], + experimental: false, + documentation: "".into(), + in_core_extension: None, + }; + assert_eq!(config.get(b"alias", b"something"), Some(&expected)); + + let expected = DefaultConfigItem { + section: "chgserver".into(), + name: "idletimeout".into(), + default: Some(DefaultConfigItemType::Primitive(3600.into())), + priority: None, + alias: vec![], + experimental: false, + documentation: "".into(), + in_core_extension: None, + }; + assert_eq!(config.get(b"chgserver", b"idletimeout"), Some(&expected)); + + let expected = DefaultConfigItem { + section: "cmdserver".into(), + name: "track-log".into(), + default: Some(DefaultConfigItemType::Lambda(vec![ + "chgserver".into(), + "cmdserver".into(), + "repocache".into(), + ])), + priority: None, + alias: vec![], + experimental: false, + documentation: "".into(), + in_core_extension: None, + }; + assert_eq!(config.get(b"cmdserver", b"track-log"), Some(&expected)); + + let expected = DefaultConfigItem { + section: "command-templates".into(), + name: "graphnode".into(), + default: None, + priority: None, + alias: vec![("ui".into(), "graphnodetemplate".into())], + experimental: false, + documentation: + "This is a docstring.\nThis is another line but this is not." + .into(), + in_core_extension: None, + }; + assert_eq!( + config.get(b"command-templates", b"graphnode"), + Some(&expected) + ); + } +} diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/src/config/layer.rs --- a/rust/hg-core/src/config/layer.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/src/config/layer.rs Wed Oct 11 02:02:46 2023 +0200 @@ -304,8 +304,9 @@ CommandLineColor, /// From environment variables like `$PAGER` or `$EDITOR` Environment(Vec), - /* TODO defaults (configitems.py) - * TODO extensions + /// From configitems.toml + Defaults, + /* TODO extensions * TODO Python resources? * Others? */ } @@ -323,6 +324,9 @@ ConfigOrigin::Tweakdefaults => { write_bytes!(out, b"ui.tweakdefaults") } + ConfigOrigin::Defaults => { + write_bytes!(out, b"configitems.toml") + } } } } diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/src/config/mod.rs --- a/rust/hg-core/src/config/mod.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/src/config/mod.rs Wed Oct 11 02:02:46 2023 +0200 @@ -9,14 +9,19 @@ //! Mercurial config parsing and interfaces. +pub mod config_items; mod layer; mod plain_info; mod values; pub use layer::{ConfigError, ConfigOrigin, ConfigParseError}; +use lazy_static::lazy_static; pub use plain_info::PlainInfo; +use self::config_items::DefaultConfig; +use self::config_items::DefaultConfigItem; use self::layer::ConfigLayer; use self::layer::ConfigValue; +use crate::errors::HgError; use crate::errors::{HgResultExt, IoResultExt}; use crate::utils::files::get_bytes_from_os_str; use format_bytes::{write_bytes, DisplayBytes}; @@ -26,6 +31,14 @@ use std::path::{Path, PathBuf}; use std::str; +lazy_static! { + static ref DEFAULT_CONFIG: Result = { + DefaultConfig::from_contents(include_str!( + "../../../../mercurial/configitems.toml" + )) + }; +} + /// Holds the config values for the current repository /// TODO update this docstring once we support more sources #[derive(Clone)] @@ -347,13 +360,50 @@ self.plain = plain; } + /// Returns the default value for the given config item, if any. + pub fn get_default( + &self, + section: &[u8], + item: &[u8], + ) -> Result, HgError> { + let default_config = DEFAULT_CONFIG.as_ref().map_err(|e| { + HgError::abort( + e.to_string(), + crate::exit_codes::ABORT, + Some("`mercurial/configitems.toml` is not valid".into()), + ) + })?; + let default_opt = default_config.get(section, item); + Ok(default_opt.filter(|default| { + default + .in_core_extension() + .map(|extension| { + // Only return the default for an in-core extension item + // if said extension is enabled + self.is_extension_enabled(extension.as_bytes()) + }) + .unwrap_or(true) + })) + } + + /// Return the config item that corresponds to a section + item, a function + /// to parse from the raw bytes to the expected type (which is passed as + /// a string only to make debugging easier). + /// Used by higher-level methods like `get_bool`. + /// + /// `fallback_to_default` controls whether the default value (if any) is + /// returned if nothing is found. fn get_parse<'config, T: 'config>( &'config self, section: &[u8], item: &[u8], expected_type: &'static str, parse: impl Fn(&'config [u8]) -> Option, - ) -> Result, ConfigValueParseError> { + fallback_to_default: bool, + ) -> Result, HgError> + where + Option: TryFrom<&'config DefaultConfigItem, Error = HgError>, + { match self.get_inner(section, item) { Some((layer, v)) => match parse(&v.bytes) { Some(b) => Ok(Some(b)), @@ -364,22 +414,105 @@ section: section.to_owned(), item: item.to_owned(), expected_type, - })), + }) + .into()), }, - None => Ok(None), + None => { + if !fallback_to_default { + return Ok(None); + } + match self.get_default(section, item)? { + Some(default) => { + // Defaults are TOML values, so they're not in the same + // shape as in the config files. + // First try to convert directly to the expected type + let as_t = default.try_into(); + match as_t { + Ok(t) => Ok(t), + Err(e) => { + // If it fails, it means that... + let as_bytes: Result, _> = + default.try_into(); + match as_bytes { + Ok(bytes_opt) => { + if let Some(bytes) = bytes_opt { + // ...we should be able to parse it + return Ok(parse(bytes)); + } + Err(e) + } + Err(_) => Err(e), + } + } + } + } + None => { + self.print_devel_warning(section, item)?; + Ok(None) + } + } + } } } + fn print_devel_warning( + &self, + section: &[u8], + item: &[u8], + ) -> Result<(), HgError> { + let warn_all = self.get_bool(b"devel", b"all-warnings")?; + let warn_specific = self.get_bool(b"devel", b"warn-config-unknown")?; + if !warn_all || !warn_specific { + // We technically shouldn't print anything here since it's not + // the concern of `hg-core`. + // + // We're printing directly to stderr since development warnings + // are not on by default and surfacing this to consumer crates + // (like `rhg`) would be more difficult, probably requiring + // something à la `log` crate. + // + // TODO maybe figure out a way of exposing a "warnings" channel + // that consumer crates can hook into. It would be useful for + // all other warnings that `hg-core` could expose. + eprintln!( + "devel-warn: accessing unregistered config item: '{}.{}'", + String::from_utf8_lossy(section), + String::from_utf8_lossy(item), + ); + } + Ok(()) + } + /// Returns an `Err` if the first value found is not a valid UTF-8 string. /// Otherwise, returns an `Ok(value)` if found, or `None`. pub fn get_str( &self, section: &[u8], item: &[u8], - ) -> Result, ConfigValueParseError> { - self.get_parse(section, item, "ASCII or UTF-8 string", |value| { - str::from_utf8(value).ok() - }) + ) -> Result, HgError> { + self.get_parse( + section, + item, + "ASCII or UTF-8 string", + |value| str::from_utf8(value).ok(), + true, + ) + } + + /// Same as `get_str`, but doesn't fall back to the default `configitem` + /// if not defined in the user config. + pub fn get_str_no_default( + &self, + section: &[u8], + item: &[u8], + ) -> Result, HgError> { + self.get_parse( + section, + item, + "ASCII or UTF-8 string", + |value| str::from_utf8(value).ok(), + false, + ) } /// Returns an `Err` if the first value found is not a valid unsigned @@ -388,10 +521,14 @@ &self, section: &[u8], item: &[u8], - ) -> Result, ConfigValueParseError> { - self.get_parse(section, item, "valid integer", |value| { - str::from_utf8(value).ok()?.parse().ok() - }) + ) -> Result, HgError> { + self.get_parse( + section, + item, + "valid integer", + |value| str::from_utf8(value).ok()?.parse().ok(), + true, + ) } /// Returns an `Err` if the first value found is not a valid file size @@ -401,8 +538,14 @@ &self, section: &[u8], item: &[u8], - ) -> Result, ConfigValueParseError> { - self.get_parse(section, item, "byte quantity", values::parse_byte_size) + ) -> Result, HgError> { + self.get_parse( + section, + item, + "byte quantity", + values::parse_byte_size, + true, + ) } /// Returns an `Err` if the first value found is not a valid boolean. @@ -412,8 +555,18 @@ &self, section: &[u8], item: &[u8], - ) -> Result, ConfigValueParseError> { - self.get_parse(section, item, "boolean", values::parse_bool) + ) -> Result, HgError> { + self.get_parse(section, item, "boolean", values::parse_bool, true) + } + + /// Same as `get_option`, but doesn't fall back to the default `configitem` + /// if not defined in the user config. + pub fn get_option_no_default( + &self, + section: &[u8], + item: &[u8], + ) -> Result, HgError> { + self.get_parse(section, item, "boolean", values::parse_bool, false) } /// Returns the corresponding boolean in the config. Returns `Ok(false)` @@ -422,10 +575,20 @@ &self, section: &[u8], item: &[u8], - ) -> Result { + ) -> Result { Ok(self.get_option(section, item)?.unwrap_or(false)) } + /// Same as `get_bool`, but doesn't fall back to the default `configitem` + /// if not defined in the user config. + pub fn get_bool_no_default( + &self, + section: &[u8], + item: &[u8], + ) -> Result { + Ok(self.get_option_no_default(section, item)?.unwrap_or(false)) + } + /// Returns `true` if the extension is enabled, `false` otherwise pub fn is_extension_enabled(&self, extension: &[u8]) -> bool { let value = self.get(b"extensions", extension); @@ -633,4 +796,15 @@ assert!(config.get_u32(b"section2", b"not-count").is_err()); assert!(config.get_byte_size(b"section2", b"not-size").is_err()); } + + #[test] + fn test_default_parse() { + let config = Config::load_from_explicit_sources(vec![]) + .expect("expected valid config"); + let ret = config.get_byte_size(b"cmdserver", b"max-log-size"); + assert!(ret.is_ok(), "{:?}", ret); + + let ret = config.get_byte_size(b"ui", b"formatted"); + assert!(ret.unwrap().is_none()); + } } diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/src/copy_tracing/tests.rs --- a/rust/hg-core/src/copy_tracing/tests.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/src/copy_tracing/tests.rs Wed Oct 11 02:02:46 2023 +0200 @@ -1,5 +1,12 @@ use super::*; +/// Shorthand to reduce boilerplate when creating [`Revision`] for testing +macro_rules! R { + ($revision:literal) => { + Revision($revision) + }; +} + /// Unit tests for: /// /// ```ignore @@ -27,7 +34,12 @@ use MergePick::*; assert_eq!( - compare_value!(1, Normal, (1, None, { 1 }), (1, None, { 1 })), + compare_value!( + R!(1), + Normal, + (R!(1), None, { R!(1) }), + (R!(1), None, { R!(1) }) + ), (Any, false) ); } @@ -70,12 +82,12 @@ assert_eq!( merge_copies_dict!( - 1, - {"foo" => (1, None, {})}, + R!(1), + {"foo" => (R!(1), None, {})}, {}, {"foo" => Merged} ), - internal_path_copies!("foo" => (1, None, {})) + internal_path_copies!("foo" => (R!(1), None, {})) ); } @@ -124,17 +136,29 @@ assert_eq!( combine_changeset_copies!( - { 1 => 1, 2 => 1 }, + { R!(1) => 1, R!(2) => 1 }, [ - { rev: 1, p1: NULL, p2: NULL, actions: [], merge_cases: {}, }, - { rev: 2, p1: NULL, p2: NULL, actions: [], merge_cases: {}, }, + { + rev: R!(1), + p1: NULL, + p2: NULL, + actions: [], + merge_cases: {}, + }, { - rev: 3, p1: 1, p2: 2, + rev: R!(2), + p1: NULL, + p2: NULL, + actions: [], + merge_cases: {}, + }, + { + rev: R!(3), p1: R!(1), p2: R!(2), actions: [CopiedFromP1("destination.txt", "source.txt")], merge_cases: {"destination.txt" => Merged}, }, ], - 3, + R!(3), ), path_copies!("destination.txt" => "source.txt") ); diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/src/dagops.rs --- a/rust/hg-core/src/dagops.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/src/dagops.rs Wed Oct 11 02:02:46 2023 +0200 @@ -171,14 +171,15 @@ mod tests { use super::*; - use crate::testing::SampleGraph; + use crate::{testing::SampleGraph, BaseRevision}; /// Apply `retain_heads()` to the given slice and return as a sorted `Vec` fn retain_heads_sorted( graph: &impl Graph, - revs: &[Revision], + revs: &[BaseRevision], ) -> Result, GraphError> { - let mut revs: HashSet = revs.iter().cloned().collect(); + let mut revs: HashSet = + revs.iter().cloned().map(Revision).collect(); retain_heads(graph, &mut revs)?; let mut as_vec: Vec = revs.iter().cloned().collect(); as_vec.sort_unstable(); @@ -202,9 +203,11 @@ /// Apply `heads()` to the given slice and return as a sorted `Vec` fn heads_sorted( graph: &impl Graph, - revs: &[Revision], + revs: &[BaseRevision], ) -> Result, GraphError> { - let heads = heads(graph, revs.iter())?; + let iter_revs: Vec<_> = + revs.into_iter().cloned().map(Revision).collect(); + let heads = heads(graph, iter_revs.iter())?; let mut as_vec: Vec = heads.iter().cloned().collect(); as_vec.sort_unstable(); Ok(as_vec) @@ -227,9 +230,9 @@ /// Apply `roots()` and sort the result for easier comparison fn roots_sorted( graph: &impl Graph, - revs: &[Revision], + revs: &[BaseRevision], ) -> Result, GraphError> { - let set: HashSet<_> = revs.iter().cloned().collect(); + let set: HashSet<_> = revs.iter().cloned().map(Revision).collect(); let mut as_vec = roots(graph, &set)?; as_vec.sort_unstable(); Ok(as_vec) @@ -252,17 +255,24 @@ /// Apply `range()` and convert the result into a Vec for easier comparison fn range_vec( graph: impl Graph + Clone, - roots: &[Revision], - heads: &[Revision], + roots: &[BaseRevision], + heads: &[BaseRevision], ) -> Result, GraphError> { - range(&graph, roots.iter().cloned(), heads.iter().cloned()) - .map(|bs| bs.into_iter().collect()) + range( + &graph, + roots.iter().cloned().map(Revision), + heads.iter().cloned().map(Revision), + ) + .map(|bs| bs.into_iter().collect()) } #[test] fn test_range() -> Result<(), GraphError> { assert_eq!(range_vec(SampleGraph, &[0], &[4])?, vec![0, 1, 2, 4]); - assert_eq!(range_vec(SampleGraph, &[0], &[8])?, vec![]); + assert_eq!( + range_vec(SampleGraph, &[0], &[8])?, + Vec::::new() + ); assert_eq!( range_vec(SampleGraph, &[5, 6], &[10, 11, 13])?, vec![5, 10] diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/src/dirstate/dirs_multiset.rs --- a/rust/hg-core/src/dirstate/dirs_multiset.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/src/dirstate/dirs_multiset.rs Wed Oct 11 02:02:46 2023 +0200 @@ -62,7 +62,7 @@ /// Initializes the multiset from a manifest. pub fn from_manifest( manifest: &[impl AsRef], - ) -> Result { + ) -> Result { let mut multiset = DirsMultiset { inner: FastHashMap::default(), }; @@ -80,19 +80,17 @@ pub fn add_path( &mut self, path: impl AsRef, - ) -> Result<(), DirstateMapError> { + ) -> Result<(), HgPathError> { for subpath in files::find_dirs(path.as_ref()) { if subpath.as_bytes().last() == Some(&b'/') { // TODO Remove this once PathAuditor is certified // as the only entrypoint for path data let second_slash_index = subpath.len() - 1; - return Err(DirstateMapError::InvalidPath( - HgPathError::ConsecutiveSlashes { - bytes: path.as_ref().as_bytes().to_owned(), - second_slash_index, - }, - )); + return Err(HgPathError::ConsecutiveSlashes { + bytes: path.as_ref().as_bytes().to_owned(), + second_slash_index, + }); } if let Some(val) = self.inner.get_mut(subpath) { *val += 1; diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/src/dirstate/status.rs --- a/rust/hg-core/src/dirstate/status.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/src/dirstate/status.rs Wed Oct 11 02:02:46 2023 +0200 @@ -20,7 +20,7 @@ /// Wrong type of file from a `BadMatch` /// Note: a lot of those don't exist on all platforms. -#[derive(Debug, Copy, Clone)] +#[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum BadType { CharacterDevice, BlockDevice, diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/src/dirstate_tree/dirstate_map.rs --- a/rust/hg-core/src/dirstate_tree/dirstate_map.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/src/dirstate_tree/dirstate_map.rs Wed Oct 11 02:02:46 2023 +0200 @@ -579,6 +579,14 @@ } } + pub fn has_node( + &self, + path: &HgPath, + ) -> Result { + let node = self.get_node(path)?; + Ok(node.is_some()) + } + /// Returns a mutable reference to the node at `path` if it exists /// /// `each_ancestor` is a callback that is called for each ancestor node diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/src/dirstate_tree/status.rs --- a/rust/hg-core/src/dirstate_tree/status.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/src/dirstate_tree/status.rs Wed Oct 11 02:02:46 2023 +0200 @@ -8,12 +8,14 @@ use crate::dirstate_tree::dirstate_map::NodeRef; use crate::dirstate_tree::on_disk::DirstateV2ParseError; use crate::matchers::get_ignore_function; -use crate::matchers::Matcher; +use crate::matchers::{Matcher, VisitChildrenSet}; use crate::utils::files::get_bytes_from_os_string; use crate::utils::files::get_bytes_from_path; use crate::utils::files::get_path_from_bytes; +use crate::utils::hg_path::hg_path_to_path_buf; use crate::utils::hg_path::HgPath; use crate::BadMatch; +use crate::BadType; use crate::DirstateStatus; use crate::HgPathCow; use crate::PatternFileWarning; @@ -24,6 +26,7 @@ use sha1::{Digest, Sha1}; use std::borrow::Cow; use std::io; +use std::os::unix::prelude::FileTypeExt; use std::path::Path; use std::path::PathBuf; use std::sync::Mutex; @@ -155,6 +158,18 @@ root_cached_mtime, is_at_repo_root, )?; + if let Some(file_set) = common.matcher.file_set() { + for file in file_set { + if !file.is_empty() && !dmap.has_node(file)? { + let path = hg_path_to_path_buf(file)?; + if let io::Result::Err(error) = + root_dir.join(path).symlink_metadata() + { + common.io_error(error, file) + } + } + } + } let mut outcome = common.outcome.into_inner().unwrap(); let new_cacheable = common.new_cacheable_directories.into_inner().unwrap(); let outdated = common.outdated_cached_directories.into_inner().unwrap(); @@ -367,6 +382,16 @@ false } + fn should_visit(set: &VisitChildrenSet, basename: &HgPath) -> bool { + match set { + VisitChildrenSet::This | VisitChildrenSet::Recursive => true, + VisitChildrenSet::Empty => false, + VisitChildrenSet::Set(children_to_visit) => { + children_to_visit.contains(basename) + } + } + } + /// Returns whether all child entries of the filesystem directory have a /// corresponding dirstate node or are ignored. fn traverse_fs_directory_and_dirstate<'ancestor>( @@ -378,21 +403,27 @@ cached_directory_mtime: Option, is_at_repo_root: bool, ) -> Result { + let children_set = self.matcher.visit_children_set(directory_hg_path); + if let VisitChildrenSet::Empty = children_set { + return Ok(false); + } if self.can_skip_fs_readdir(directory_entry, cached_directory_mtime) { dirstate_nodes .par_iter() .map(|dirstate_node| { let fs_path = &directory_entry.fs_path; - let fs_path = fs_path.join(get_path_from_bytes( - dirstate_node.base_name(self.dmap.on_disk)?.as_bytes(), - )); + let basename = + dirstate_node.base_name(self.dmap.on_disk)?.as_bytes(); + let fs_path = fs_path.join(get_path_from_bytes(basename)); + if !Self::should_visit( + &children_set, + HgPath::new(basename), + ) { + return Ok(()); + } match std::fs::symlink_metadata(&fs_path) { Ok(fs_metadata) => { - let file_type = - match fs_metadata.file_type().try_into() { - Ok(file_type) => file_type, - Err(_) => return Ok(()), - }; + let file_type = fs_metadata.file_type().into(); let entry = DirEntry { hg_path: Cow::Borrowed( dirstate_node @@ -472,6 +503,15 @@ .par_bridge() .map(|pair| { use itertools::EitherOrBoth::*; + let basename = match &pair { + Left(dirstate_node) | Both(dirstate_node, _) => HgPath::new( + dirstate_node.base_name(self.dmap.on_disk)?.as_bytes(), + ), + Right(fs_entry) => &fs_entry.hg_path, + }; + if !Self::should_visit(&children_set, basename) { + return Ok(false); + } let has_dirstate_node_or_is_ignored = match pair { Both(dirstate_node, fs_entry) => { self.traverse_fs_and_dirstate( @@ -513,6 +553,15 @@ // replaced by a directory or something else. self.mark_removed_or_deleted_if_file(&dirstate_node)?; } + if let Some(bad_type) = fs_entry.is_bad() { + if self.matcher.exact_match(hg_path) { + let path = dirstate_node.full_path(self.dmap.on_disk)?; + self.outcome.lock().unwrap().bad.push(( + path.to_owned().into(), + BadMatch::BadType(bad_type), + )) + } + } if fs_entry.is_dir() { if self.options.collect_traversed_dirs { self.outcome @@ -866,21 +915,27 @@ File, Directory, Symlink, + BadType(BadType), } -impl TryFrom for FakeFileType { - type Error = (); - - fn try_from(f: std::fs::FileType) -> Result { +impl From for FakeFileType { + fn from(f: std::fs::FileType) -> Self { if f.is_dir() { - Ok(Self::Directory) + Self::Directory } else if f.is_file() { - Ok(Self::File) + Self::File } else if f.is_symlink() { - Ok(Self::Symlink) + Self::Symlink + } else if f.is_fifo() { + Self::BadType(BadType::FIFO) + } else if f.is_block_device() { + Self::BadType(BadType::BlockDevice) + } else if f.is_char_device() { + Self::BadType(BadType::CharacterDevice) + } else if f.is_socket() { + Self::BadType(BadType::Socket) } else { - // Things like FIFO etc. - Err(()) + Self::BadType(BadType::Unknown) } } } @@ -942,10 +997,7 @@ }; let filename = Cow::Owned(get_bytes_from_os_string(file_name).into()); - let file_type = match FakeFileType::try_from(file_type) { - Ok(file_type) => file_type, - Err(_) => continue, - }; + let file_type = FakeFileType::from(file_type); results.push(DirEntry { hg_path: filename, fs_path: Cow::Owned(full_path.to_path_buf()), @@ -974,6 +1026,13 @@ fn is_symlink(&self) -> bool { self.file_type == FakeFileType::Symlink } + + fn is_bad(&self) -> Option { + match self.file_type { + FakeFileType::BadType(ty) => Some(ty), + _ => None, + } + } } /// Return the `mtime` of a temporary file newly-created in the `.hg` directory diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/src/discovery.rs --- a/rust/hg-core/src/discovery.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/src/discovery.rs Wed Oct 11 02:02:46 2023 +0200 @@ -481,6 +481,13 @@ use super::*; use crate::testing::SampleGraph; + /// Shorthand to reduce boilerplate when creating [`Revision`] for testing + macro_rules! R { + ($revision:literal) => { + Revision($revision) + }; + } + /// A PartialDiscovery as for pushing all the heads of `SampleGraph` /// /// To avoid actual randomness in these tests, we give it a fixed @@ -488,7 +495,7 @@ fn full_disco() -> PartialDiscovery { PartialDiscovery::new_with_seed( SampleGraph, - vec![10, 11, 12, 13], + vec![R!(10), R!(11), R!(12), R!(13)], [0; 16], true, true, @@ -501,7 +508,7 @@ fn disco12() -> PartialDiscovery { PartialDiscovery::new_with_seed( SampleGraph, - vec![12], + vec![R!(12)], [0; 16], true, true, @@ -540,7 +547,7 @@ assert!(!disco.has_info()); assert_eq!(disco.stats().undecided, None); - disco.add_common_revisions(vec![11, 12])?; + disco.add_common_revisions(vec![R!(11), R!(12)])?; assert!(disco.has_info()); assert!(!disco.is_complete()); assert!(disco.missing.is_empty()); @@ -559,14 +566,14 @@ #[test] fn test_discovery() -> Result<(), GraphError> { let mut disco = full_disco(); - disco.add_common_revisions(vec![11, 12])?; - disco.add_missing_revisions(vec![8, 10])?; + disco.add_common_revisions(vec![R!(11), R!(12)])?; + disco.add_missing_revisions(vec![R!(8), R!(10)])?; assert_eq!(sorted_undecided(&disco), vec![5]); assert_eq!(sorted_missing(&disco), vec![8, 10, 13]); assert!(!disco.is_complete()); - disco.add_common_revisions(vec![5])?; - assert_eq!(sorted_undecided(&disco), vec![]); + disco.add_common_revisions(vec![R!(5)])?; + assert_eq!(sorted_undecided(&disco), Vec::::new()); assert_eq!(sorted_missing(&disco), vec![8, 10, 13]); assert!(disco.is_complete()); assert_eq!(sorted_common_heads(&disco)?, vec![5, 11, 12]); @@ -577,12 +584,12 @@ fn test_add_missing_early_continue() -> Result<(), GraphError> { eprintln!("test_add_missing_early_stop"); let mut disco = full_disco(); - disco.add_common_revisions(vec![13, 3, 4])?; + disco.add_common_revisions(vec![R!(13), R!(3), R!(4)])?; disco.ensure_children_cache()?; // 12 is grand-child of 6 through 9 // passing them in this order maximizes the chances of the // early continue to do the wrong thing - disco.add_missing_revisions(vec![6, 9, 12])?; + disco.add_missing_revisions(vec![R!(6), R!(9), R!(12)])?; assert_eq!(sorted_undecided(&disco), vec![5, 7, 10, 11]); assert_eq!(sorted_missing(&disco), vec![6, 9, 12]); assert!(!disco.is_complete()); @@ -591,18 +598,24 @@ #[test] fn test_limit_sample_no_need_to() { - let sample = vec![1, 2, 3, 4]; + let sample = vec![R!(1), R!(2), R!(3), R!(4)]; assert_eq!(full_disco().limit_sample(sample, 10), vec![1, 2, 3, 4]); } #[test] fn test_limit_sample_less_than_half() { - assert_eq!(full_disco().limit_sample((1..6).collect(), 2), vec![2, 5]); + assert_eq!( + full_disco().limit_sample((1..6).map(Revision).collect(), 2), + vec![2, 5] + ); } #[test] fn test_limit_sample_more_than_half() { - assert_eq!(full_disco().limit_sample((1..4).collect(), 2), vec![1, 2]); + assert_eq!( + full_disco().limit_sample((1..4).map(Revision).collect(), 2), + vec![1, 2] + ); } #[test] @@ -610,7 +623,10 @@ let mut disco = full_disco(); disco.randomize = false; assert_eq!( - disco.limit_sample(vec![1, 8, 13, 5, 7, 3], 4), + disco.limit_sample( + vec![R!(1), R!(8), R!(13), R!(5), R!(7), R!(3)], + 4 + ), vec![1, 3, 5, 7] ); } @@ -618,7 +634,7 @@ #[test] fn test_quick_sample_enough_undecided_heads() -> Result<(), GraphError> { let mut disco = full_disco(); - disco.undecided = Some((1..=13).collect()); + disco.undecided = Some((1..=13).map(Revision).collect()); let mut sample_vec = disco.take_quick_sample(vec![], 4)?; sample_vec.sort_unstable(); @@ -631,7 +647,7 @@ let mut disco = disco12(); disco.ensure_undecided()?; - let mut sample_vec = disco.take_quick_sample(vec![12], 4)?; + let mut sample_vec = disco.take_quick_sample(vec![R!(12)], 4)?; sample_vec.sort_unstable(); // r12's only parent is r9, whose unique grand-parent through the // diamond shape is r4. This ends there because the distance from r4 @@ -646,16 +662,16 @@ disco.ensure_children_cache()?; let cache = disco.children_cache.unwrap(); - assert_eq!(cache.get(&2).cloned(), Some(vec![4])); - assert_eq!(cache.get(&10).cloned(), None); + assert_eq!(cache.get(&R!(2)).cloned(), Some(vec![R!(4)])); + assert_eq!(cache.get(&R!(10)).cloned(), None); - let mut children_4 = cache.get(&4).cloned().unwrap(); + let mut children_4 = cache.get(&R!(4)).cloned().unwrap(); children_4.sort_unstable(); - assert_eq!(children_4, vec![5, 6, 7]); + assert_eq!(children_4, vec![R!(5), R!(6), R!(7)]); - let mut children_7 = cache.get(&7).cloned().unwrap(); + let mut children_7 = cache.get(&R!(7)).cloned().unwrap(); children_7.sort_unstable(); - assert_eq!(children_7, vec![9, 11]); + assert_eq!(children_7, vec![R!(9), R!(11)]); Ok(()) } @@ -664,14 +680,14 @@ fn test_complete_sample() { let mut disco = full_disco(); let undecided: HashSet = - [4, 7, 9, 2, 3].iter().cloned().collect(); + [4, 7, 9, 2, 3].iter().cloned().map(Revision).collect(); disco.undecided = Some(undecided); - let mut sample = vec![0]; + let mut sample = vec![R!(0)]; disco.random_complete_sample(&mut sample, 3); assert_eq!(sample.len(), 3); - let mut sample = vec![2, 4, 7]; + let mut sample = vec![R!(2), R!(4), R!(7)]; disco.random_complete_sample(&mut sample, 1); assert_eq!(sample.len(), 3); } @@ -679,7 +695,7 @@ #[test] fn test_bidirectional_sample() -> Result<(), GraphError> { let mut disco = full_disco(); - disco.undecided = Some((0..=13).into_iter().collect()); + disco.undecided = Some((0..=13).into_iter().map(Revision).collect()); let (sample_set, size) = disco.bidirectional_sample(7)?; assert_eq!(size, 7); diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/src/filepatterns.rs --- a/rust/hg-core/src/filepatterns.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/src/filepatterns.rs Wed Oct 11 02:02:46 2023 +0200 @@ -24,7 +24,7 @@ lazy_static! { static ref RE_ESCAPE: Vec> = { let mut v: Vec> = (0..=255).map(|byte| vec![byte]).collect(); - let to_escape = b"()[]{}?*+-|^$\\.&~# \t\n\r\x0b\x0c"; + let to_escape = b"()[]{}?*+-|^$\\.&~#\t\n\r\x0b\x0c"; for byte in to_escape { v[*byte as usize].insert(0, b'\\'); } @@ -36,9 +36,6 @@ const GLOB_REPLACEMENTS: &[(&[u8], &[u8])] = &[(b"*/", b"(?:.*/)?"), (b"*", b".*"), (b"", b"[^/]*")]; -/// Appended to the regexp of globs -const GLOB_SUFFIX: &[u8; 7] = b"(?:/|$)"; - #[derive(Debug, Clone, PartialEq, Eq)] pub enum PatternSyntax { /// A regular expression @@ -181,7 +178,7 @@ /// Builds the regex that corresponds to the given pattern. /// If within a `syntax: regexp` context, returns the pattern, /// otherwise, returns the corresponding regex. -fn _build_single_regex(entry: &IgnorePattern) -> Vec { +fn _build_single_regex(entry: &IgnorePattern, glob_suffix: &[u8]) -> Vec { let IgnorePattern { syntax, pattern, .. } = entry; @@ -245,13 +242,13 @@ PatternSyntax::RelGlob => { let glob_re = glob_to_re(pattern); if let Some(rest) = glob_re.drop_prefix(b"[^/]*") { - [b".*", rest, GLOB_SUFFIX].concat() + [b".*", rest, glob_suffix].concat() } else { - [b"(?:.*/)?", glob_re.as_slice(), GLOB_SUFFIX].concat() + [b"(?:.*/)?", glob_re.as_slice(), glob_suffix].concat() } } PatternSyntax::Glob | PatternSyntax::RootGlob => { - [glob_to_re(pattern).as_slice(), GLOB_SUFFIX].concat() + [glob_to_re(pattern).as_slice(), glob_suffix].concat() } PatternSyntax::Include | PatternSyntax::SubInclude @@ -309,6 +306,7 @@ /// that don't need to be transformed into a regex. pub fn build_single_regex( entry: &IgnorePattern, + glob_suffix: &[u8], ) -> Result>, PatternError> { let IgnorePattern { pattern, syntax, .. @@ -317,6 +315,7 @@ PatternSyntax::RootGlob | PatternSyntax::Path | PatternSyntax::RelGlob + | PatternSyntax::RelPath | PatternSyntax::RootFiles => normalize_path_bytes(pattern), PatternSyntax::Include | PatternSyntax::SubInclude => { return Err(PatternError::NonRegexPattern(entry.clone())) @@ -330,22 +329,27 @@ } else { let mut entry = entry.clone(); entry.pattern = pattern; - Ok(Some(_build_single_regex(&entry))) + Ok(Some(_build_single_regex(&entry, glob_suffix))) } } lazy_static! { - static ref SYNTAXES: FastHashMap<&'static [u8], &'static [u8]> = { + static ref SYNTAXES: FastHashMap<&'static [u8], PatternSyntax> = { let mut m = FastHashMap::default(); - m.insert(b"re".as_ref(), b"relre:".as_ref()); - m.insert(b"regexp".as_ref(), b"relre:".as_ref()); - m.insert(b"glob".as_ref(), b"relglob:".as_ref()); - m.insert(b"rootglob".as_ref(), b"rootglob:".as_ref()); - m.insert(b"include".as_ref(), b"include:".as_ref()); - m.insert(b"subinclude".as_ref(), b"subinclude:".as_ref()); - m.insert(b"path".as_ref(), b"path:".as_ref()); - m.insert(b"rootfilesin".as_ref(), b"rootfilesin:".as_ref()); + m.insert(b"re:".as_ref(), PatternSyntax::Regexp); + m.insert(b"regexp:".as_ref(), PatternSyntax::Regexp); + m.insert(b"path:".as_ref(), PatternSyntax::Path); + m.insert(b"filepath:".as_ref(), PatternSyntax::FilePath); + m.insert(b"relpath:".as_ref(), PatternSyntax::RelPath); + m.insert(b"rootfilesin:".as_ref(), PatternSyntax::RootFiles); + m.insert(b"relglob:".as_ref(), PatternSyntax::RelGlob); + m.insert(b"relre:".as_ref(), PatternSyntax::RelRegexp); + m.insert(b"glob:".as_ref(), PatternSyntax::Glob); + m.insert(b"rootglob:".as_ref(), PatternSyntax::RootGlob); + m.insert(b"include:".as_ref(), PatternSyntax::Include); + m.insert(b"subinclude:".as_ref(), PatternSyntax::SubInclude); + m }; } @@ -358,11 +362,50 @@ NoSuchFile(PathBuf), } +pub fn parse_one_pattern( + pattern: &[u8], + source: &Path, + default: PatternSyntax, + normalize: bool, +) -> IgnorePattern { + let mut pattern_bytes: &[u8] = pattern; + let mut syntax = default; + + for (s, val) in SYNTAXES.iter() { + if let Some(rest) = pattern_bytes.drop_prefix(s) { + syntax = val.clone(); + pattern_bytes = rest; + break; + } + } + + let pattern = match syntax { + PatternSyntax::RootGlob + | PatternSyntax::Path + | PatternSyntax::Glob + | PatternSyntax::RelGlob + | PatternSyntax::RelPath + | PatternSyntax::RootFiles + if normalize => + { + normalize_path_bytes(pattern_bytes) + } + _ => pattern_bytes.to_vec(), + }; + + IgnorePattern { + syntax, + pattern, + source: source.to_owned(), + } +} + pub fn parse_pattern_file_contents( lines: &[u8], file_path: &Path, - default_syntax_override: Option<&[u8]>, + default_syntax_override: Option, warn: bool, + relativize: bool, ) -> Result<(Vec, Vec), PatternError> { let comment_regex = Regex::new(r"((?:^|[^\\])(?:\\\\)*)#.*").unwrap(); @@ -372,11 +415,9 @@ let mut warnings: Vec = vec![]; let mut current_syntax = - default_syntax_override.unwrap_or_else(|| b"relre:".as_ref()); + default_syntax_override.unwrap_or(PatternSyntax::RelRegexp); - for (line_number, mut line) in lines.split(|c| *c == b'\n').enumerate() { - let line_number = line_number + 1; - + for mut line in lines.split(|c| *c == b'\n') { let line_buf; if line.contains(&b'#') { if let Some(cap) = comment_regex.captures(line) { @@ -386,7 +427,7 @@ line = &line_buf; } - let mut line = line.trim_end(); + let line = line.trim_end(); if line.is_empty() { continue; @@ -395,50 +436,62 @@ if let Some(syntax) = line.drop_prefix(b"syntax:") { let syntax = syntax.trim(); - if let Some(rel_syntax) = SYNTAXES.get(syntax) { - current_syntax = rel_syntax; + if let Some(parsed) = + SYNTAXES.get([syntax, &b":"[..]].concat().as_slice()) + { + current_syntax = parsed.clone(); } else if warn { warnings.push(PatternFileWarning::InvalidSyntax( file_path.to_owned(), syntax.to_owned(), )); } - continue; + } else { + let pattern = parse_one_pattern( + line, + file_path, + current_syntax.clone(), + false, + ); + inputs.push(if relativize { + pattern.to_relative() + } else { + pattern + }) } - - let mut line_syntax: &[u8] = current_syntax; - - for (s, rels) in SYNTAXES.iter() { - if let Some(rest) = line.drop_prefix(rels) { - line_syntax = rels; - line = rest; - break; - } - if let Some(rest) = line.drop_prefix(&[s, &b":"[..]].concat()) { - line_syntax = rels; - line = rest; - break; - } - } - - inputs.push(IgnorePattern::new( - parse_pattern_syntax(line_syntax).map_err(|e| match e { - PatternError::UnsupportedSyntax(syntax) => { - PatternError::UnsupportedSyntaxInFile( - syntax, - file_path.to_string_lossy().into(), - line_number, - ) - } - _ => e, - })?, - line, - file_path, - )); } Ok((inputs, warnings)) } +pub fn parse_pattern_args( + patterns: Vec>, + cwd: &Path, + root: &Path, +) -> Result, HgPathError> { + let mut ignore_patterns: Vec = Vec::new(); + for pattern in patterns { + let pattern = parse_one_pattern( + &pattern, + Path::new(""), + PatternSyntax::RelPath, + true, + ); + match pattern.syntax { + PatternSyntax::RelGlob | PatternSyntax::RelPath => { + let name = get_path_from_bytes(&pattern.pattern); + let canon = canonical_path(root, cwd, name)?; + ignore_patterns.push(IgnorePattern { + syntax: pattern.syntax, + pattern: get_bytes_from_path(canon), + source: pattern.source, + }) + } + _ => ignore_patterns.push(pattern.to_owned()), + }; + } + Ok(ignore_patterns) +} + pub fn read_pattern_file( file_path: &Path, warn: bool, @@ -447,7 +500,7 @@ match std::fs::read(file_path) { Ok(contents) => { inspect_pattern_bytes(file_path, &contents); - parse_pattern_file_contents(&contents, file_path, None, warn) + parse_pattern_file_contents(&contents, file_path, None, warn, true) } Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(( vec![], @@ -473,6 +526,23 @@ source: source.to_owned(), } } + + pub fn to_relative(self) -> Self { + let Self { + syntax, + pattern, + source, + } = self; + Self { + syntax: match syntax { + PatternSyntax::Regexp => PatternSyntax::RelRegexp, + PatternSyntax::Glob => PatternSyntax::RelGlob, + x => x, + }, + pattern, + source, + } + } } pub type PatternResult = Result; @@ -612,8 +682,8 @@ assert_eq!(escape_pattern(untouched), untouched.to_vec()); // All escape codes assert_eq!( - escape_pattern(br#"()[]{}?*+-|^$\\.&~# \t\n\r\v\f"#), - br#"\(\)\[\]\{\}\?\*\+\-\|\^\$\\\\\.\&\~\#\ \\t\\n\\r\\v\\f"# + escape_pattern(br#"()[]{}?*+-|^$\\.&~#\t\n\r\v\f"#), + br#"\(\)\[\]\{\}\?\*\+\-\|\^\$\\\\\.\&\~\#\\t\\n\\r\\v\\f"# .to_vec() ); } @@ -639,7 +709,8 @@ lines, Path::new("file_path"), None, - false + false, + true, ) .unwrap() .0, @@ -657,7 +728,8 @@ lines, Path::new("file_path"), None, - false + false, + true, ) .unwrap() .0, @@ -669,7 +741,8 @@ lines, Path::new("file_path"), None, - false + false, + true, ) .unwrap() .0, @@ -684,20 +757,26 @@ #[test] fn test_build_single_regex() { assert_eq!( - build_single_regex(&IgnorePattern::new( - PatternSyntax::RelGlob, - b"rust/target/", - Path::new("") - )) + build_single_regex( + &IgnorePattern::new( + PatternSyntax::RelGlob, + b"rust/target/", + Path::new("") + ), + b"(?:/|$)" + ) .unwrap(), Some(br"(?:.*/)?rust/target(?:/|$)".to_vec()), ); assert_eq!( - build_single_regex(&IgnorePattern::new( - PatternSyntax::Regexp, - br"rust/target/\d+", - Path::new("") - )) + build_single_regex( + &IgnorePattern::new( + PatternSyntax::Regexp, + br"rust/target/\d+", + Path::new("") + ), + b"(?:/|$)" + ) .unwrap(), Some(br"rust/target/\d+".to_vec()), ); @@ -706,29 +785,38 @@ #[test] fn test_build_single_regex_shortcut() { assert_eq!( - build_single_regex(&IgnorePattern::new( - PatternSyntax::RootGlob, - b"", - Path::new("") - )) + build_single_regex( + &IgnorePattern::new( + PatternSyntax::RootGlob, + b"", + Path::new("") + ), + b"(?:/|$)" + ) .unwrap(), None, ); assert_eq!( - build_single_regex(&IgnorePattern::new( - PatternSyntax::RootGlob, - b"whatever", - Path::new("") - )) + build_single_regex( + &IgnorePattern::new( + PatternSyntax::RootGlob, + b"whatever", + Path::new("") + ), + b"(?:/|$)" + ) .unwrap(), None, ); assert_eq!( - build_single_regex(&IgnorePattern::new( - PatternSyntax::RootGlob, - b"*.o", - Path::new("") - )) + build_single_regex( + &IgnorePattern::new( + PatternSyntax::RootGlob, + b"*.o", + Path::new("") + ), + b"(?:/|$)" + ) .unwrap(), Some(br"[^/]*\.o(?:/|$)".to_vec()), ); @@ -737,38 +825,50 @@ #[test] fn test_build_single_relregex() { assert_eq!( - build_single_regex(&IgnorePattern::new( - PatternSyntax::RelRegexp, - b"^ba{2}r", - Path::new("") - )) + build_single_regex( + &IgnorePattern::new( + PatternSyntax::RelRegexp, + b"^ba{2}r", + Path::new("") + ), + b"(?:/|$)" + ) .unwrap(), Some(b"^ba{2}r".to_vec()), ); assert_eq!( - build_single_regex(&IgnorePattern::new( - PatternSyntax::RelRegexp, - b"ba{2}r", - Path::new("") - )) + build_single_regex( + &IgnorePattern::new( + PatternSyntax::RelRegexp, + b"ba{2}r", + Path::new("") + ), + b"(?:/|$)" + ) .unwrap(), Some(b".*ba{2}r".to_vec()), ); assert_eq!( - build_single_regex(&IgnorePattern::new( - PatternSyntax::RelRegexp, - b"(?ia)ba{2}r", - Path::new("") - )) + build_single_regex( + &IgnorePattern::new( + PatternSyntax::RelRegexp, + b"(?ia)ba{2}r", + Path::new("") + ), + b"(?:/|$)" + ) .unwrap(), Some(b"(?ia:.*ba{2}r)".to_vec()), ); assert_eq!( - build_single_regex(&IgnorePattern::new( - PatternSyntax::RelRegexp, - b"(?ia)^ba{2}r", - Path::new("") - )) + build_single_regex( + &IgnorePattern::new( + PatternSyntax::RelRegexp, + b"(?ia)^ba{2}r", + Path::new("") + ), + b"(?:/|$)" + ) .unwrap(), Some(b"(?ia:^ba{2}r)".to_vec()), ); diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/src/lib.rs --- a/rust/hg-core/src/lib.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/src/lib.rs Wed Oct 11 02:02:46 2023 +0200 @@ -25,7 +25,7 @@ DirstateEntry, DirstateParents, EntryState, }; pub mod copy_tracing; -mod filepatterns; +pub mod filepatterns; pub mod matchers; pub mod repo; pub mod revlog; @@ -66,6 +66,12 @@ InvalidPath(HgPathError), } +impl From for DirstateMapError { + fn from(error: HgPathError) -> Self { + Self::InvalidPath(error) + } +} + impl fmt::Display for DirstateMapError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { @@ -83,6 +89,12 @@ Common(errors::HgError), } +impl From for DirstateError { + fn from(error: HgPathError) -> Self { + Self::Map(DirstateMapError::InvalidPath(error)) + } +} + impl fmt::Display for DirstateError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/src/matchers.rs --- a/rust/hg-core/src/matchers.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/src/matchers.rs Wed Oct 11 02:02:46 2023 +0200 @@ -15,11 +15,10 @@ }, utils::{ files::find_dirs, - hg_path::{HgPath, HgPathBuf}, + hg_path::{HgPath, HgPathBuf, HgPathError}, Escaped, }, - DirsMultiset, DirstateMapError, FastHashMap, IgnorePattern, PatternError, - PatternSyntax, + DirsMultiset, FastHashMap, IgnorePattern, PatternError, PatternSyntax, }; use crate::dirstate::status::IgnoreFnType; @@ -177,7 +176,7 @@ } impl FileMatcher { - pub fn new(files: Vec) -> Result { + pub fn new(files: Vec) -> Result { let dirs = DirsMultiset::from_manifest(&files)?; Ok(Self { files: HashSet::from_iter(files.into_iter()), @@ -251,6 +250,118 @@ } } +/// Matches a set of (kind, pat, source) against a 'root' directory. +/// (Currently the 'root' directory is effectively always empty) +/// ``` +/// use hg::{ +/// matchers::{PatternMatcher, Matcher}, +/// IgnorePattern, +/// PatternSyntax, +/// utils::hg_path::{HgPath, HgPathBuf} +/// }; +/// use std::collections::HashSet; +/// use std::path::Path; +/// /// +/// let ignore_patterns : Vec = +/// vec![IgnorePattern::new(PatternSyntax::Regexp, br".*\.c$", Path::new("")), +/// IgnorePattern::new(PatternSyntax::Path, b"foo/a", Path::new("")), +/// IgnorePattern::new(PatternSyntax::RelPath, b"b", Path::new("")), +/// IgnorePattern::new(PatternSyntax::Glob, b"*.h", Path::new("")), +/// ]; +/// let matcher = PatternMatcher::new(ignore_patterns).unwrap(); +/// /// +/// assert_eq!(matcher.matches(HgPath::new(b"main.c")), true); // matches re:.*\.c$ +/// assert_eq!(matcher.matches(HgPath::new(b"b.txt")), false); +/// assert_eq!(matcher.matches(HgPath::new(b"foo/a")), true); // matches path:foo/a +/// assert_eq!(matcher.matches(HgPath::new(b"a")), false); // does not match path:b, since 'root' is 'foo' +/// assert_eq!(matcher.matches(HgPath::new(b"b")), true); // matches relpath:b, since 'root' is 'foo' +/// assert_eq!(matcher.matches(HgPath::new(b"lib.h")), true); // matches glob:*.h +/// assert_eq!(matcher.file_set().unwrap(), +/// &HashSet::from([HgPathBuf::from_bytes(b""), HgPathBuf::from_bytes(b"foo/a"), +/// HgPathBuf::from_bytes(b""), HgPathBuf::from_bytes(b"b")])); +/// assert_eq!(matcher.exact_match(HgPath::new(b"foo/a")), true); +/// assert_eq!(matcher.exact_match(HgPath::new(b"b")), true); +/// assert_eq!(matcher.exact_match(HgPath::new(b"lib.h")), false); // exact matches are for (rel)path kinds +/// ``` +pub struct PatternMatcher<'a> { + patterns: Vec, + match_fn: IgnoreFnType<'a>, + /// Whether all the patterns match a prefix (i.e. recursively) + prefix: bool, + files: HashSet, + dirs: DirsMultiset, +} + +impl core::fmt::Debug for PatternMatcher<'_> { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.debug_struct("PatternMatcher") + .field("patterns", &String::from_utf8_lossy(&self.patterns)) + .field("prefix", &self.prefix) + .field("files", &self.files) + .field("dirs", &self.dirs) + .finish() + } +} + +impl<'a> PatternMatcher<'a> { + pub fn new(ignore_patterns: Vec) -> PatternResult { + let (files, _) = roots_and_dirs(&ignore_patterns); + let dirs = DirsMultiset::from_manifest(&files)?; + let files: HashSet = HashSet::from_iter(files.into_iter()); + + let prefix = ignore_patterns.iter().all(|k| { + matches!(k.syntax, PatternSyntax::Path | PatternSyntax::RelPath) + }); + let (patterns, match_fn) = build_match(ignore_patterns, b"$")?; + + Ok(Self { + patterns, + match_fn, + prefix, + files, + dirs, + }) + } +} + +impl<'a> Matcher for PatternMatcher<'a> { + fn file_set(&self) -> Option<&HashSet> { + Some(&self.files) + } + + fn exact_match(&self, filename: &HgPath) -> bool { + self.files.contains(filename) + } + + fn matches(&self, filename: &HgPath) -> bool { + if self.files.contains(filename) { + return true; + } + (self.match_fn)(filename) + } + + fn visit_children_set(&self, directory: &HgPath) -> VisitChildrenSet { + if self.prefix && self.files.contains(directory) { + return VisitChildrenSet::Recursive; + } + let path_or_parents_in_set = find_dirs(directory) + .any(|parent_dir| self.files.contains(parent_dir)); + if self.dirs.contains(directory) || path_or_parents_in_set { + VisitChildrenSet::This + } else { + VisitChildrenSet::Empty + } + } + + fn matches_everything(&self) -> bool { + false + } + + fn is_exact(&self) -> bool { + false + } +} + /// Matches files that are included in the ignore rules. /// ``` /// use hg::{ @@ -479,7 +590,13 @@ m1_files.iter().cloned().filter(|f| m2.matches(f)).collect() }) } else { - None + // without exact input file sets, we can't do an exact + // intersection, so we must over-approximate by + // unioning instead + m1.file_set().map(|m1_files| match m2.file_set() { + Some(m2_files) => m1_files.union(m2_files).cloned().collect(), + None => m1_files.iter().cloned().collect(), + }) }; Self { m1, m2, files } } @@ -649,12 +766,13 @@ /// said regex formed by the given ignore patterns. fn build_regex_match<'a, 'b>( ignore_patterns: &'a [IgnorePattern], + glob_suffix: &[u8], ) -> PatternResult<(Vec, IgnoreFnType<'b>)> { let mut regexps = vec![]; let mut exact_set = HashSet::new(); for pattern in ignore_patterns { - if let Some(re) = build_single_regex(pattern)? { + if let Some(re) = build_single_regex(pattern, glob_suffix)? { regexps.push(re); } else { let exact = normalize_path_bytes(&pattern.pattern); @@ -754,20 +872,12 @@ let mut parents = HashSet::new(); parents.extend( - DirsMultiset::from_manifest(&dirs) - .map_err(|e| match e { - DirstateMapError::InvalidPath(e) => e, - _ => unreachable!(), - })? + DirsMultiset::from_manifest(&dirs)? .iter() .map(ToOwned::to_owned), ); parents.extend( - DirsMultiset::from_manifest(&roots) - .map_err(|e| match e { - DirstateMapError::InvalidPath(e) => e, - _ => unreachable!(), - })? + DirsMultiset::from_manifest(&roots)? .iter() .map(ToOwned::to_owned), ); @@ -783,6 +893,7 @@ /// should be matched. fn build_match<'a>( ignore_patterns: Vec, + glob_suffix: &[u8], ) -> PatternResult<(Vec, IgnoreFnType<'a>)> { let mut match_funcs: Vec> = vec![]; // For debugging and printing @@ -846,7 +957,8 @@ dirs_vec.sort(); patterns.extend(dirs_vec.escaped_bytes()); } else { - let (new_re, match_func) = build_regex_match(&ignore_patterns)?; + let (new_re, match_func) = + build_regex_match(&ignore_patterns, glob_suffix)?; patterns = new_re; match_funcs.push(match_func) } @@ -925,7 +1037,7 @@ let prefix = ignore_patterns.iter().all(|k| { matches!(k.syntax, PatternSyntax::Path | PatternSyntax::RelPath) }); - let (patterns, match_fn) = build_match(ignore_patterns)?; + let (patterns, match_fn) = build_match(ignore_patterns, b"(?:/|$)")?; Ok(Self { patterns, @@ -1122,6 +1234,242 @@ } #[test] + fn test_patternmatcher() { + // VisitdirPrefix + let m = PatternMatcher::new(vec![IgnorePattern::new( + PatternSyntax::Path, + b"dir/subdir", + Path::new(""), + )]) + .unwrap(); + assert_eq!( + m.visit_children_set(HgPath::new(b"")), + VisitChildrenSet::This + ); + assert_eq!( + m.visit_children_set(HgPath::new(b"dir")), + VisitChildrenSet::This + ); + assert_eq!( + m.visit_children_set(HgPath::new(b"dir/subdir")), + VisitChildrenSet::Recursive + ); + // OPT: This should probably be Recursive if its parent is? + assert_eq!( + m.visit_children_set(HgPath::new(b"dir/subdir/x")), + VisitChildrenSet::This + ); + assert_eq!( + m.visit_children_set(HgPath::new(b"folder")), + VisitChildrenSet::Empty + ); + + // VisitchildrensetPrefix + let m = PatternMatcher::new(vec![IgnorePattern::new( + PatternSyntax::Path, + b"dir/subdir", + Path::new(""), + )]) + .unwrap(); + assert_eq!( + m.visit_children_set(HgPath::new(b"")), + VisitChildrenSet::This + ); + assert_eq!( + m.visit_children_set(HgPath::new(b"dir")), + VisitChildrenSet::This + ); + assert_eq!( + m.visit_children_set(HgPath::new(b"dir/subdir")), + VisitChildrenSet::Recursive + ); + // OPT: This should probably be Recursive if its parent is? + assert_eq!( + m.visit_children_set(HgPath::new(b"dir/subdir/x")), + VisitChildrenSet::This + ); + assert_eq!( + m.visit_children_set(HgPath::new(b"folder")), + VisitChildrenSet::Empty + ); + + // VisitdirRootfilesin + let m = PatternMatcher::new(vec![IgnorePattern::new( + PatternSyntax::RootFiles, + b"dir/subdir", + Path::new(""), + )]) + .unwrap(); + assert_eq!( + m.visit_children_set(HgPath::new(b"dir/subdir/x")), + VisitChildrenSet::Empty + ); + assert_eq!( + m.visit_children_set(HgPath::new(b"folder")), + VisitChildrenSet::Empty + ); + // FIXME: These should probably be This. + assert_eq!( + m.visit_children_set(HgPath::new(b"")), + VisitChildrenSet::Empty + ); + assert_eq!( + m.visit_children_set(HgPath::new(b"dir")), + VisitChildrenSet::Empty + ); + assert_eq!( + m.visit_children_set(HgPath::new(b"dir/subdir")), + VisitChildrenSet::Empty + ); + + // VisitchildrensetRootfilesin + let m = PatternMatcher::new(vec![IgnorePattern::new( + PatternSyntax::RootFiles, + b"dir/subdir", + Path::new(""), + )]) + .unwrap(); + assert_eq!( + m.visit_children_set(HgPath::new(b"dir/subdir/x")), + VisitChildrenSet::Empty + ); + assert_eq!( + m.visit_children_set(HgPath::new(b"folder")), + VisitChildrenSet::Empty + ); + // FIXME: These should probably be {'dir'}, {'subdir'} and This, + // respectively, or at least This for all three. + assert_eq!( + m.visit_children_set(HgPath::new(b"")), + VisitChildrenSet::Empty + ); + assert_eq!( + m.visit_children_set(HgPath::new(b"dir")), + VisitChildrenSet::Empty + ); + assert_eq!( + m.visit_children_set(HgPath::new(b"dir/subdir")), + VisitChildrenSet::Empty + ); + + // VisitdirGlob + let m = PatternMatcher::new(vec![IgnorePattern::new( + PatternSyntax::Glob, + b"dir/z*", + Path::new(""), + )]) + .unwrap(); + assert_eq!( + m.visit_children_set(HgPath::new(b"")), + VisitChildrenSet::This + ); + // FIXME: This probably should be This + assert_eq!( + m.visit_children_set(HgPath::new(b"dir")), + VisitChildrenSet::Empty + ); + assert_eq!( + m.visit_children_set(HgPath::new(b"folder")), + VisitChildrenSet::Empty + ); + // OPT: these should probably be False. + assert_eq!( + m.visit_children_set(HgPath::new(b"dir/subdir")), + VisitChildrenSet::This + ); + assert_eq!( + m.visit_children_set(HgPath::new(b"dir/subdir/x")), + VisitChildrenSet::This + ); + + // VisitchildrensetGlob + let m = PatternMatcher::new(vec![IgnorePattern::new( + PatternSyntax::Glob, + b"dir/z*", + Path::new(""), + )]) + .unwrap(); + assert_eq!( + m.visit_children_set(HgPath::new(b"")), + VisitChildrenSet::This + ); + assert_eq!( + m.visit_children_set(HgPath::new(b"folder")), + VisitChildrenSet::Empty + ); + // FIXME: This probably should be This + assert_eq!( + m.visit_children_set(HgPath::new(b"dir")), + VisitChildrenSet::Empty + ); + // OPT: these should probably be Empty + assert_eq!( + m.visit_children_set(HgPath::new(b"dir/subdir")), + VisitChildrenSet::This + ); + assert_eq!( + m.visit_children_set(HgPath::new(b"dir/subdir/x")), + VisitChildrenSet::This + ); + + // VisitdirFilepath + let m = PatternMatcher::new(vec![IgnorePattern::new( + PatternSyntax::FilePath, + b"dir/z", + Path::new(""), + )]) + .unwrap(); + assert_eq!( + m.visit_children_set(HgPath::new(b"")), + VisitChildrenSet::This + ); + assert_eq!( + m.visit_children_set(HgPath::new(b"dir")), + VisitChildrenSet::This + ); + assert_eq!( + m.visit_children_set(HgPath::new(b"folder")), + VisitChildrenSet::Empty + ); + assert_eq!( + m.visit_children_set(HgPath::new(b"dir/subdir")), + VisitChildrenSet::Empty + ); + assert_eq!( + m.visit_children_set(HgPath::new(b"dir/subdir/x")), + VisitChildrenSet::Empty + ); + + // VisitchildrensetFilepath + let m = PatternMatcher::new(vec![IgnorePattern::new( + PatternSyntax::FilePath, + b"dir/z", + Path::new(""), + )]) + .unwrap(); + assert_eq!( + m.visit_children_set(HgPath::new(b"")), + VisitChildrenSet::This + ); + assert_eq!( + m.visit_children_set(HgPath::new(b"folder")), + VisitChildrenSet::Empty + ); + assert_eq!( + m.visit_children_set(HgPath::new(b"dir")), + VisitChildrenSet::This + ); + assert_eq!( + m.visit_children_set(HgPath::new(b"dir/subdir")), + VisitChildrenSet::Empty + ); + assert_eq!( + m.visit_children_set(HgPath::new(b"dir/subdir/x")), + VisitChildrenSet::Empty + ); + } + + #[test] fn test_includematcher() { // VisitchildrensetPrefix let matcher = IncludeMatcher::new(vec![IgnorePattern::new( diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/src/narrow.rs --- a/rust/hg-core/src/narrow.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/src/narrow.rs Wed Oct 11 02:02:46 2023 +0200 @@ -74,6 +74,7 @@ Path::new(""), None, false, + true, )?; warnings.extend(subwarnings.into_iter().map(From::from)); @@ -85,6 +86,7 @@ Path::new(""), None, false, + true, )?; if !patterns.is_empty() { warnings.extend(subwarnings.into_iter().map(From::from)); diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/src/operations/cat.rs --- a/rust/hg-core/src/operations/cat.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/src/operations/cat.rs Wed Oct 11 02:02:46 2023 +0200 @@ -84,10 +84,10 @@ mut files: Vec<&'a HgPath>, ) -> Result, RevlogError> { let rev = crate::revset::resolve_single(revset, repo)?; - let manifest = repo.manifest_for_rev(rev)?; + let manifest = repo.manifest_for_rev(rev.into())?; let node = *repo .changelog()? - .node_from_rev(rev) + .node_from_rev(rev.into()) .expect("should succeed when repo.manifest did"); let mut results: Vec<(&'a HgPath, Vec)> = vec![]; let mut found_any = false; diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/src/operations/debugdata.rs --- a/rust/hg-core/src/operations/debugdata.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/src/operations/debugdata.rs Wed Oct 11 02:02:46 2023 +0200 @@ -33,6 +33,6 @@ Revlog::open(&repo.store_vfs(), index_file, None, use_nodemap)?; let rev = crate::revset::resolve_rev_number_or_hex_prefix(revset, &revlog)?; - let data = revlog.get_rev_data(rev)?; + let data = revlog.get_rev_data_for_checked_rev(rev)?; Ok(data.into_owned()) } diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/src/operations/list_tracked_files.rs --- a/rust/hg-core/src/operations/list_tracked_files.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/src/operations/list_tracked_files.rs Wed Oct 11 02:02:46 2023 +0200 @@ -21,7 +21,7 @@ ) -> Result { let rev = crate::revset::resolve_single(revset, repo)?; Ok(FilesForRev { - manifest: repo.manifest_for_rev(rev)?, + manifest: repo.manifest_for_rev(rev.into())?, narrow_matcher, }) } diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/src/repo.rs --- a/rust/hg-core/src/repo.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/src/repo.rs Wed Oct 11 02:02:46 2023 +0200 @@ -15,8 +15,8 @@ use crate::utils::hg_path::HgPath; use crate::utils::SliceExt; use crate::vfs::{is_dir, is_file, Vfs}; -use crate::{requirements, NodePrefix}; -use crate::{DirstateError, Revision}; +use crate::DirstateError; +use crate::{requirements, NodePrefix, UncheckedRevision}; use std::cell::{Ref, RefCell, RefMut}; use std::collections::HashSet; use std::io::Seek; @@ -562,7 +562,7 @@ /// Returns the manifest of the *changeset* with the given revision number pub fn manifest_for_rev( &self, - revision: Revision, + revision: UncheckedRevision, ) -> Result { self.manifestlog()?.data_for_node( self.changelog()? diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/src/revlog/changelog.rs --- a/rust/hg-core/src/revlog/changelog.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/src/revlog/changelog.rs Wed Oct 11 02:02:46 2023 +0200 @@ -4,6 +4,7 @@ use crate::revlog::{Revlog, RevlogEntry, RevlogError}; use crate::utils::hg_path::HgPath; use crate::vfs::Vfs; +use crate::{Graph, GraphError, UncheckedRevision}; use itertools::Itertools; use std::ascii::escape_default; use std::borrow::Cow; @@ -29,15 +30,24 @@ node: NodePrefix, ) -> Result { let rev = self.revlog.rev_from_node(node)?; - self.data_for_rev(rev) + self.entry_for_checked_rev(rev)?.data() } /// Return the [`ChangelogEntry`] for the given revision number. pub fn entry_for_rev( &self, + rev: UncheckedRevision, + ) -> Result { + let revlog_entry = self.revlog.get_entry(rev)?; + Ok(ChangelogEntry { revlog_entry }) + } + + /// Same as [`Self::entry_for_rev`] for checked revisions. + fn entry_for_checked_rev( + &self, rev: Revision, ) -> Result { - let revlog_entry = self.revlog.get_entry(rev)?; + let revlog_entry = self.revlog.get_entry_for_checked_rev(rev)?; Ok(ChangelogEntry { revlog_entry }) } @@ -49,12 +59,12 @@ /// [entry_for_rev](`Self::entry_for_rev`) and doing everything from there. pub fn data_for_rev( &self, - rev: Revision, + rev: UncheckedRevision, ) -> Result { self.entry_for_rev(rev)?.data() } - pub fn node_from_rev(&self, rev: Revision) -> Option<&Node> { + pub fn node_from_rev(&self, rev: UncheckedRevision) -> Option<&Node> { self.revlog.node_from_rev(rev) } @@ -66,6 +76,12 @@ } } +impl Graph for Changelog { + fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError> { + self.revlog.parents(rev) + } +} + /// A specialized `RevlogEntry` for `changelog` data format /// /// This is a `RevlogEntry` with the added semantics that the associated @@ -330,12 +346,12 @@ let changelog = Changelog { revlog }; assert_eq!( - changelog.data_for_rev(NULL_REVISION)?, + changelog.data_for_rev(NULL_REVISION.into())?, ChangelogRevisionData::null() ); // same with the intermediate entry object assert_eq!( - changelog.entry_for_rev(NULL_REVISION)?.data()?, + changelog.entry_for_rev(NULL_REVISION.into())?.data()?, ChangelogRevisionData::null() ); Ok(()) diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/src/revlog/filelog.rs --- a/rust/hg-core/src/revlog/filelog.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/src/revlog/filelog.rs Wed Oct 11 02:02:46 2023 +0200 @@ -1,4 +1,5 @@ use crate::errors::HgError; +use crate::exit_codes; use crate::repo::Repo; use crate::revlog::path_encode::path_encode; use crate::revlog::NodePrefix; @@ -8,6 +9,9 @@ use crate::utils::files::get_path_from_bytes; use crate::utils::hg_path::HgPath; use crate::utils::SliceExt; +use crate::Graph; +use crate::GraphError; +use crate::UncheckedRevision; use std::path::PathBuf; /// A specialized `Revlog` to work with file data logs. @@ -16,6 +20,12 @@ revlog: Revlog, } +impl Graph for Filelog { + fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError> { + self.revlog.parents(rev) + } +} + impl Filelog { pub fn open_vfs( store_vfs: &crate::vfs::Vfs<'_>, @@ -39,14 +49,14 @@ file_node: impl Into, ) -> Result { let file_rev = self.revlog.rev_from_node(file_node.into())?; - self.data_for_rev(file_rev) + self.data_for_rev(file_rev.into()) } /// The given revision is that of the file as found in a filelog, not of a /// changeset. pub fn data_for_rev( &self, - file_rev: Revision, + file_rev: UncheckedRevision, ) -> Result { let data: Vec = self.revlog.get_rev_data(file_rev)?.into_owned(); Ok(FilelogRevisionData(data)) @@ -59,16 +69,25 @@ file_node: impl Into, ) -> Result { let file_rev = self.revlog.rev_from_node(file_node.into())?; - self.entry_for_rev(file_rev) + self.entry_for_checked_rev(file_rev) } /// The given revision is that of the file as found in a filelog, not of a /// changeset. pub fn entry_for_rev( &self, + file_rev: UncheckedRevision, + ) -> Result { + Ok(FilelogEntry(self.revlog.get_entry(file_rev)?)) + } + + fn entry_for_checked_rev( + &self, file_rev: Revision, ) -> Result { - Ok(FilelogEntry(self.revlog.get_entry(file_rev)?)) + Ok(FilelogEntry( + self.revlog.get_entry_for_checked_rev(file_rev)?, + )) } } @@ -165,7 +184,19 @@ } pub fn data(&self) -> Result { - Ok(FilelogRevisionData(self.0.data()?.into_owned())) + let data = self.0.data(); + match data { + Ok(data) => Ok(FilelogRevisionData(data.into_owned())), + // Errors other than `HgError` should not happen at this point + Err(e) => match e { + RevlogError::Other(hg_error) => Err(hg_error), + revlog_error => Err(HgError::abort( + revlog_error.to_string(), + exit_codes::ABORT, + None, + )), + }, + } } } diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/src/revlog/index.rs --- a/rust/hg-core/src/revlog/index.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/src/revlog/index.rs Wed Oct 11 02:02:46 2023 +0200 @@ -1,3 +1,4 @@ +use std::fmt::Debug; use std::ops::Deref; use byteorder::{BigEndian, ByteOrder}; @@ -5,6 +6,7 @@ use crate::errors::HgError; use crate::revlog::node::Node; use crate::revlog::{Revision, NULL_REVISION}; +use crate::{Graph, GraphError, RevlogIndex, UncheckedRevision}; pub const INDEX_ENTRY_SIZE: usize = 64; @@ -86,6 +88,32 @@ uses_generaldelta: bool, } +impl Debug for Index { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("Index") + .field("offsets", &self.offsets) + .field("uses_generaldelta", &self.uses_generaldelta) + .finish() + } +} + +impl Graph for Index { + fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError> { + let err = || GraphError::ParentOutOfRange(rev); + match self.get_entry(rev) { + Some(entry) => { + // The C implementation checks that the parents are valid + // before returning + Ok([ + self.check_revision(entry.p1()).ok_or_else(err)?, + self.check_revision(entry.p2()).ok_or_else(err)?, + ]) + } + None => Ok([NULL_REVISION, NULL_REVISION]), + } + } +} + impl Index { /// Create an index from bytes. /// Calculate the start of each entry when is_inline is true. @@ -175,48 +203,44 @@ if rev == NULL_REVISION { return None; } - if let Some(offsets) = &self.offsets { + Some(if let Some(offsets) = &self.offsets { self.get_entry_inline(rev, offsets) } else { self.get_entry_separated(rev) - } + }) } fn get_entry_inline( &self, rev: Revision, offsets: &[usize], - ) -> Option { - let start = *offsets.get(rev as usize)?; - let end = start.checked_add(INDEX_ENTRY_SIZE)?; + ) -> IndexEntry { + let start = offsets[rev.0 as usize]; + let end = start + INDEX_ENTRY_SIZE; let bytes = &self.bytes[start..end]; // See IndexEntry for an explanation of this override. let offset_override = Some(end); - Some(IndexEntry { + IndexEntry { bytes, offset_override, - }) + } } - fn get_entry_separated(&self, rev: Revision) -> Option { - let max_rev = self.bytes.len() / INDEX_ENTRY_SIZE; - if rev as usize >= max_rev { - return None; - } - let start = rev as usize * INDEX_ENTRY_SIZE; + fn get_entry_separated(&self, rev: Revision) -> IndexEntry { + let start = rev.0 as usize * INDEX_ENTRY_SIZE; let end = start + INDEX_ENTRY_SIZE; let bytes = &self.bytes[start..end]; // Override the offset of the first revision as its bytes are used // for the index's metadata (saving space because it is always 0) - let offset_override = if rev == 0 { Some(0) } else { None }; + let offset_override = if rev == Revision(0) { Some(0) } else { None }; - Some(IndexEntry { + IndexEntry { bytes, offset_override, - }) + } } } @@ -273,23 +297,23 @@ } /// Return the revision upon which the data has been derived. - pub fn base_revision_or_base_of_delta_chain(&self) -> Revision { + pub fn base_revision_or_base_of_delta_chain(&self) -> UncheckedRevision { // TODO Maybe return an Option when base_revision == rev? // Requires to add rev to IndexEntry - BigEndian::read_i32(&self.bytes[16..]) + BigEndian::read_i32(&self.bytes[16..]).into() } - pub fn link_revision(&self) -> Revision { - BigEndian::read_i32(&self.bytes[20..]) + pub fn link_revision(&self) -> UncheckedRevision { + BigEndian::read_i32(&self.bytes[20..]).into() } - pub fn p1(&self) -> Revision { - BigEndian::read_i32(&self.bytes[24..]) + pub fn p1(&self) -> UncheckedRevision { + BigEndian::read_i32(&self.bytes[24..]).into() } - pub fn p2(&self) -> Revision { - BigEndian::read_i32(&self.bytes[28..]) + pub fn p2(&self) -> UncheckedRevision { + BigEndian::read_i32(&self.bytes[28..]).into() } /// Return the hash of revision's full text. @@ -335,8 +359,8 @@ offset: 0, compressed_len: 0, uncompressed_len: 0, - base_revision_or_base_of_delta_chain: 0, - link_revision: 0, + base_revision_or_base_of_delta_chain: Revision(0), + link_revision: Revision(0), p1: NULL_REVISION, p2: NULL_REVISION, node: NULL_NODE, @@ -426,11 +450,11 @@ bytes.extend(&(self.compressed_len as u32).to_be_bytes()); bytes.extend(&(self.uncompressed_len as u32).to_be_bytes()); bytes.extend( - &self.base_revision_or_base_of_delta_chain.to_be_bytes(), + &self.base_revision_or_base_of_delta_chain.0.to_be_bytes(), ); - bytes.extend(&self.link_revision.to_be_bytes()); - bytes.extend(&self.p1.to_be_bytes()); - bytes.extend(&self.p2.to_be_bytes()); + bytes.extend(&self.link_revision.0.to_be_bytes()); + bytes.extend(&self.p1.0.to_be_bytes()); + bytes.extend(&self.p2.0.to_be_bytes()); bytes.extend(self.node.as_bytes()); bytes.extend(vec![0u8; 12]); bytes @@ -540,50 +564,52 @@ #[test] fn test_base_revision_or_base_of_delta_chain() { let bytes = IndexEntryBuilder::new() - .with_base_revision_or_base_of_delta_chain(1) + .with_base_revision_or_base_of_delta_chain(Revision(1)) .build(); let entry = IndexEntry { bytes: &bytes, offset_override: None, }; - assert_eq!(entry.base_revision_or_base_of_delta_chain(), 1) + assert_eq!(entry.base_revision_or_base_of_delta_chain(), 1.into()) } #[test] fn link_revision_test() { - let bytes = IndexEntryBuilder::new().with_link_revision(123).build(); + let bytes = IndexEntryBuilder::new() + .with_link_revision(Revision(123)) + .build(); let entry = IndexEntry { bytes: &bytes, offset_override: None, }; - assert_eq!(entry.link_revision(), 123); + assert_eq!(entry.link_revision(), 123.into()); } #[test] fn p1_test() { - let bytes = IndexEntryBuilder::new().with_p1(123).build(); + let bytes = IndexEntryBuilder::new().with_p1(Revision(123)).build(); let entry = IndexEntry { bytes: &bytes, offset_override: None, }; - assert_eq!(entry.p1(), 123); + assert_eq!(entry.p1(), 123.into()); } #[test] fn p2_test() { - let bytes = IndexEntryBuilder::new().with_p2(123).build(); + let bytes = IndexEntryBuilder::new().with_p2(Revision(123)).build(); let entry = IndexEntry { bytes: &bytes, offset_override: None, }; - assert_eq!(entry.p2(), 123); + assert_eq!(entry.p2(), 123.into()); } #[test] diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/src/revlog/manifest.rs --- a/rust/hg-core/src/revlog/manifest.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/src/revlog/manifest.rs Wed Oct 11 02:02:46 2023 +0200 @@ -1,10 +1,10 @@ use crate::errors::HgError; -use crate::revlog::Revision; use crate::revlog::{Node, NodePrefix}; use crate::revlog::{Revlog, RevlogError}; use crate::utils::hg_path::HgPath; use crate::utils::SliceExt; use crate::vfs::Vfs; +use crate::{Graph, GraphError, Revision, UncheckedRevision}; /// A specialized `Revlog` to work with `manifest` data format. pub struct Manifestlog { @@ -12,6 +12,12 @@ revlog: Revlog, } +impl Graph for Manifestlog { + fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError> { + self.revlog.parents(rev) + } +} + impl Manifestlog { /// Open the `manifest` of a repository given by its root. pub fn open(store_vfs: &Vfs, use_nodemap: bool) -> Result { @@ -32,7 +38,7 @@ node: NodePrefix, ) -> Result { let rev = self.revlog.rev_from_node(node)?; - self.data_for_rev(rev) + self.data_for_checked_rev(rev) } /// Return the `Manifest` of a given revision number. @@ -43,9 +49,18 @@ /// See also `Repo::manifest_for_rev` pub fn data_for_rev( &self, + rev: UncheckedRevision, + ) -> Result { + let bytes = self.revlog.get_rev_data(rev)?.into_owned(); + Ok(Manifest { bytes }) + } + + pub fn data_for_checked_rev( + &self, rev: Revision, ) -> Result { - let bytes = self.revlog.get_rev_data(rev)?.into_owned(); + let bytes = + self.revlog.get_rev_data_for_checked_rev(rev)?.into_owned(); Ok(Manifest { bytes }) } } diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/src/revlog/mod.rs --- a/rust/hg-core/src/revlog/mod.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/src/revlog/mod.rs Wed Oct 11 02:02:46 2023 +0200 @@ -33,24 +33,88 @@ use crate::errors::HgError; use crate::vfs::Vfs; -/// Mercurial revision numbers -/// /// As noted in revlog.c, revision numbers are actually encoded in /// 4 bytes, and are liberally converted to ints, whence the i32 -pub type Revision = i32; +pub type BaseRevision = i32; + +/// Mercurial revision numbers +/// In contrast to the more general [`UncheckedRevision`], these are "checked" +/// in the sense that they should only be used for revisions that are +/// valid for a given index (i.e. in bounds). +#[derive( + Debug, + derive_more::Display, + Clone, + Copy, + Hash, + PartialEq, + Eq, + PartialOrd, + Ord, +)] +pub struct Revision(pub BaseRevision); + +impl format_bytes::DisplayBytes for Revision { + fn display_bytes( + &self, + output: &mut dyn std::io::Write, + ) -> std::io::Result<()> { + self.0.display_bytes(output) + } +} + +/// Unchecked Mercurial revision numbers. +/// +/// Values of this type have no guarantee of being a valid revision number +/// in any context. Use method `check_revision` to get a valid revision within +/// the appropriate index object. +#[derive( + Debug, + derive_more::Display, + Clone, + Copy, + Hash, + PartialEq, + Eq, + PartialOrd, + Ord, +)] +pub struct UncheckedRevision(pub BaseRevision); + +impl format_bytes::DisplayBytes for UncheckedRevision { + fn display_bytes( + &self, + output: &mut dyn std::io::Write, + ) -> std::io::Result<()> { + self.0.display_bytes(output) + } +} + +impl From for UncheckedRevision { + fn from(value: Revision) -> Self { + Self(value.0) + } +} + +impl From for UncheckedRevision { + fn from(value: BaseRevision) -> Self { + Self(value) + } +} /// Marker expressing the absence of a parent /// /// Independently of the actual representation, `NULL_REVISION` is guaranteed /// to be smaller than all existing revisions. -pub const NULL_REVISION: Revision = -1; +pub const NULL_REVISION: Revision = Revision(-1); /// Same as `mercurial.node.wdirrev` /// /// This is also equal to `i32::max_value()`, but it's better to spell /// it out explicitely, same as in `mercurial.node` #[allow(clippy::unreadable_literal)] -pub const WORKING_DIRECTORY_REVISION: Revision = 0x7fffffff; +pub const WORKING_DIRECTORY_REVISION: UncheckedRevision = + UncheckedRevision(0x7fffffff); pub const WORKING_DIRECTORY_HEX: &str = "ffffffffffffffffffffffffffffffffffffffff"; @@ -66,7 +130,6 @@ #[derive(Clone, Debug, PartialEq)] pub enum GraphError { ParentOutOfRange(Revision), - WorkingDirectoryUnsupported, } /// The Mercurial Revlog Index @@ -81,10 +144,21 @@ self.len() == 0 } - /// Return a reference to the Node or `None` if rev is out of bounds - /// - /// `NULL_REVISION` is not considered to be out of bounds. + /// Return a reference to the Node or `None` for `NULL_REVISION` fn node(&self, rev: Revision) -> Option<&Node>; + + /// Return a [`Revision`] if `rev` is a valid revision number for this + /// index + fn check_revision(&self, rev: UncheckedRevision) -> Option { + let rev = rev.0; + + if rev == NULL_REVISION.0 || (rev >= 0 && (rev as usize) < self.len()) + { + Some(Revision(rev)) + } else { + None + } + } } const REVISION_FLAG_CENSORED: u16 = 1 << 15; @@ -101,7 +175,7 @@ const NULL_REVLOG_ENTRY_FLAGS: u16 = 0; -#[derive(Debug, derive_more::From)] +#[derive(Debug, derive_more::From, derive_more::Display)] pub enum RevlogError { InvalidRevision, /// Working directory is not supported @@ -145,6 +219,12 @@ nodemap: Option, } +impl Graph for Revlog { + fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError> { + self.index.parents(rev) + } +} + impl Revlog { /// Open a revlog index file. /// @@ -224,10 +304,11 @@ /// Returns the node ID for the given revision number, if it exists in this /// revlog - pub fn node_from_rev(&self, rev: Revision) -> Option<&Node> { - if rev == NULL_REVISION { + pub fn node_from_rev(&self, rev: UncheckedRevision) -> Option<&Node> { + if rev == NULL_REVISION.into() { return Some(&NULL_NODE); } + let rev = self.index.check_revision(rev)?; Some(self.index.get_entry(rev)?.hash()) } @@ -259,8 +340,9 @@ // TODO: consider building a non-persistent nodemap in memory to // optimize these cases. let mut found_by_prefix = None; - for rev in (-1..self.len() as Revision).rev() { - let candidate_node = if rev == -1 { + for rev in (-1..self.len() as BaseRevision).rev() { + let rev = Revision(rev as BaseRevision); + let candidate_node = if rev == Revision(-1) { NULL_NODE } else { let index_entry = @@ -285,8 +367,8 @@ } /// Returns whether the given revision exists in this revlog. - pub fn has_rev(&self, rev: Revision) -> bool { - self.index.get_entry(rev).is_some() + pub fn has_rev(&self, rev: UncheckedRevision) -> bool { + self.index.check_revision(rev).is_some() } /// Return the full data associated to a revision. @@ -296,12 +378,23 @@ /// snapshot to rebuild the final data. pub fn get_rev_data( &self, + rev: UncheckedRevision, + ) -> Result, RevlogError> { + if rev == NULL_REVISION.into() { + return Ok(Cow::Borrowed(&[])); + }; + self.get_entry(rev)?.data() + } + + /// [`Self::get_rev_data`] for checked revisions. + pub fn get_rev_data_for_checked_rev( + &self, rev: Revision, ) -> Result, RevlogError> { if rev == NULL_REVISION { return Ok(Cow::Borrowed(&[])); }; - Ok(self.get_entry(rev)?.data()?) + self.get_entry_for_checked_rev(rev)?.data() } /// Check the hash of some given data against the recorded hash. @@ -369,8 +462,7 @@ } } - /// Get an entry of the revlog. - pub fn get_entry( + fn get_entry_for_checked_rev( &self, rev: Revision, ) -> Result { @@ -388,36 +480,60 @@ } else { &self.data()[start..end] }; + let base_rev = self + .index + .check_revision(index_entry.base_revision_or_base_of_delta_chain()) + .ok_or_else(|| { + RevlogError::corrupted(format!( + "base revision for rev {} is invalid", + rev + )) + })?; + let p1 = + self.index.check_revision(index_entry.p1()).ok_or_else(|| { + RevlogError::corrupted(format!( + "p1 for rev {} is invalid", + rev + )) + })?; + let p2 = + self.index.check_revision(index_entry.p2()).ok_or_else(|| { + RevlogError::corrupted(format!( + "p2 for rev {} is invalid", + rev + )) + })?; let entry = RevlogEntry { revlog: self, rev, bytes: data, compressed_len: index_entry.compressed_len(), uncompressed_len: index_entry.uncompressed_len(), - base_rev_or_base_of_delta_chain: if index_entry - .base_revision_or_base_of_delta_chain() - == rev - { + base_rev_or_base_of_delta_chain: if base_rev == rev { None } else { - Some(index_entry.base_revision_or_base_of_delta_chain()) + Some(base_rev) }, - p1: index_entry.p1(), - p2: index_entry.p2(), + p1, + p2, flags: index_entry.flags(), hash: *index_entry.hash(), }; Ok(entry) } - /// when resolving internal references within revlog, any errors - /// should be reported as corruption, instead of e.g. "invalid revision" - fn get_entry_internal( + /// Get an entry of the revlog. + pub fn get_entry( &self, - rev: Revision, - ) -> Result { - self.get_entry(rev) - .map_err(|_| corrupted(format!("revision {} out of range", rev))) + rev: UncheckedRevision, + ) -> Result { + if rev == NULL_REVISION.into() { + return Ok(self.make_null_entry()); + } + let rev = self.index.check_revision(rev).ok_or_else(|| { + RevlogError::corrupted(format!("rev {} is invalid", rev)) + })?; + self.get_entry_for_checked_rev(rev) } } @@ -475,7 +591,7 @@ if self.p1 == NULL_REVISION { Ok(None) } else { - Ok(Some(self.revlog.get_entry(self.p1)?)) + Ok(Some(self.revlog.get_entry_for_checked_rev(self.p1)?)) } } @@ -485,7 +601,7 @@ if self.p2 == NULL_REVISION { Ok(None) } else { - Ok(Some(self.revlog.get_entry(self.p2)?)) + Ok(Some(self.revlog.get_entry_for_checked_rev(self.p2)?)) } } @@ -516,7 +632,7 @@ } /// The data for this entry, after resolving deltas if any. - pub fn rawdata(&self) -> Result, HgError> { + pub fn rawdata(&self) -> Result, RevlogError> { let mut entry = self.clone(); let mut delta_chain = vec![]; @@ -526,13 +642,14 @@ // [_chaininfo] and in [index_deltachain]. let uses_generaldelta = self.revlog.index.uses_generaldelta(); while let Some(base_rev) = entry.base_rev_or_base_of_delta_chain { - let base_rev = if uses_generaldelta { - base_rev + entry = if uses_generaldelta { + delta_chain.push(entry); + self.revlog.get_entry_for_checked_rev(base_rev)? } else { - entry.rev - 1 + let base_rev = UncheckedRevision(entry.rev.0 - 1); + delta_chain.push(entry); + self.revlog.get_entry(base_rev)? }; - delta_chain.push(entry); - entry = self.revlog.get_entry_internal(base_rev)?; } let data = if delta_chain.is_empty() { @@ -547,7 +664,7 @@ fn check_data( &self, data: Cow<'revlog, [u8]>, - ) -> Result, HgError> { + ) -> Result, RevlogError> { if self.revlog.check_hash( self.p1, self.p2, @@ -559,22 +676,24 @@ if (self.flags & REVISION_FLAG_ELLIPSIS) != 0 { return Err(HgError::unsupported( "ellipsis revisions are not supported by rhg", - )); + ) + .into()); } Err(corrupted(format!( "hash check failed for revision {}", self.rev - ))) + )) + .into()) } } - pub fn data(&self) -> Result, HgError> { + pub fn data(&self) -> Result, RevlogError> { let data = self.rawdata()?; if self.rev == NULL_REVISION { return Ok(data); } if self.is_censored() { - return Err(HgError::CensoredNodeError); + return Err(HgError::CensoredNodeError.into()); } self.check_data(data) } @@ -693,13 +812,13 @@ let revlog = Revlog::open(&vfs, "foo.i", None, false).unwrap(); assert!(revlog.is_empty()); assert_eq!(revlog.len(), 0); - assert!(revlog.get_entry(0).is_err()); - assert!(!revlog.has_rev(0)); + assert!(revlog.get_entry(0.into()).is_err()); + assert!(!revlog.has_rev(0.into())); assert_eq!( revlog.rev_from_node(NULL_NODE.into()).unwrap(), NULL_REVISION ); - let null_entry = revlog.get_entry(NULL_REVISION).ok().unwrap(); + let null_entry = revlog.get_entry(NULL_REVISION.into()).ok().unwrap(); assert_eq!(null_entry.revision(), NULL_REVISION); assert!(null_entry.data().unwrap().is_empty()); } @@ -727,8 +846,8 @@ .build(); let entry2_bytes = IndexEntryBuilder::new() .with_offset(INDEX_ENTRY_SIZE) - .with_p1(0) - .with_p2(1) + .with_p1(Revision(0)) + .with_p2(Revision(1)) .with_node(node2) .build(); let contents = vec![entry0_bytes, entry1_bytes, entry2_bytes] @@ -738,8 +857,8 @@ std::fs::write(temp.path().join("foo.i"), contents).unwrap(); let revlog = Revlog::open(&vfs, "foo.i", None, false).unwrap(); - let entry0 = revlog.get_entry(0).ok().unwrap(); - assert_eq!(entry0.revision(), 0); + let entry0 = revlog.get_entry(0.into()).ok().unwrap(); + assert_eq!(entry0.revision(), Revision(0)); assert_eq!(*entry0.node(), node0); assert!(!entry0.has_p1()); assert_eq!(entry0.p1(), None); @@ -749,8 +868,8 @@ let p2_entry = entry0.p2_entry().unwrap(); assert!(p2_entry.is_none()); - let entry1 = revlog.get_entry(1).ok().unwrap(); - assert_eq!(entry1.revision(), 1); + let entry1 = revlog.get_entry(1.into()).ok().unwrap(); + assert_eq!(entry1.revision(), Revision(1)); assert_eq!(*entry1.node(), node1); assert!(!entry1.has_p1()); assert_eq!(entry1.p1(), None); @@ -760,18 +879,18 @@ let p2_entry = entry1.p2_entry().unwrap(); assert!(p2_entry.is_none()); - let entry2 = revlog.get_entry(2).ok().unwrap(); - assert_eq!(entry2.revision(), 2); + let entry2 = revlog.get_entry(2.into()).ok().unwrap(); + assert_eq!(entry2.revision(), Revision(2)); assert_eq!(*entry2.node(), node2); assert!(entry2.has_p1()); - assert_eq!(entry2.p1(), Some(0)); - assert_eq!(entry2.p2(), Some(1)); + assert_eq!(entry2.p1(), Some(Revision(0))); + assert_eq!(entry2.p2(), Some(Revision(1))); let p1_entry = entry2.p1_entry().unwrap(); assert!(p1_entry.is_some()); - assert_eq!(p1_entry.unwrap().revision(), 0); + assert_eq!(p1_entry.unwrap().revision(), Revision(0)); let p2_entry = entry2.p2_entry().unwrap(); assert!(p2_entry.is_some()); - assert_eq!(p2_entry.unwrap().revision(), 1); + assert_eq!(p2_entry.unwrap().revision(), Revision(1)); } #[test] @@ -804,29 +923,32 @@ std::fs::write(temp.path().join("foo.i"), contents).unwrap(); let mut idx = nodemap::tests::TestNtIndex::new(); - idx.insert_node(0, node0).unwrap(); - idx.insert_node(1, node1).unwrap(); + idx.insert_node(Revision(0), node0).unwrap(); + idx.insert_node(Revision(1), node1).unwrap(); let revlog = Revlog::open_gen(&vfs, "foo.i", None, true, Some(idx.nt)).unwrap(); // accessing the data shows the corruption - revlog.get_entry(0).unwrap().data().unwrap_err(); + revlog.get_entry(0.into()).unwrap().data().unwrap_err(); - assert_eq!(revlog.rev_from_node(NULL_NODE.into()).unwrap(), -1); - assert_eq!(revlog.rev_from_node(node0.into()).unwrap(), 0); - assert_eq!(revlog.rev_from_node(node1.into()).unwrap(), 1); + assert_eq!( + revlog.rev_from_node(NULL_NODE.into()).unwrap(), + Revision(-1) + ); + assert_eq!(revlog.rev_from_node(node0.into()).unwrap(), Revision(0)); + assert_eq!(revlog.rev_from_node(node1.into()).unwrap(), Revision(1)); assert_eq!( revlog .rev_from_node(NodePrefix::from_hex("000").unwrap()) .unwrap(), - -1 + Revision(-1) ); assert_eq!( revlog .rev_from_node(NodePrefix::from_hex("b00").unwrap()) .unwrap(), - 1 + Revision(1) ); // RevlogError does not implement PartialEq // (ultimately because io::Error does not) diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/src/revlog/nodemap.rs --- a/rust/hg-core/src/revlog/nodemap.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/src/revlog/nodemap.rs Wed Oct 11 02:02:46 2023 +0200 @@ -12,6 +12,8 @@ //! Following existing implicit conventions, the "nodemap" terminology //! is used in a more abstract context. +use crate::UncheckedRevision; + use super::{ node::NULL_NODE, Node, NodePrefix, Revision, RevlogIndex, NULL_REVISION, }; @@ -30,7 +32,7 @@ /// This can be returned by methods meant for (at most) one match. MultipleResults, /// A `Revision` stored in the nodemap could not be found in the index - RevisionNotInIndex(Revision), + RevisionNotInIndex(UncheckedRevision), } /// Mapping system from Mercurial nodes to revision numbers. @@ -125,7 +127,9 @@ /// use. #[derive(Clone, Debug, Eq, PartialEq)] enum Element { - Rev(Revision), + // This is not a Mercurial revision. It's a `i32` because this is the + // right type for this structure. + Rev(i32), Block(usize), None, } @@ -245,17 +249,21 @@ fn has_prefix_or_none( idx: &impl RevlogIndex, prefix: NodePrefix, - rev: Revision, + rev: UncheckedRevision, ) -> Result, NodeMapError> { - idx.node(rev) - .ok_or(NodeMapError::RevisionNotInIndex(rev)) - .map(|node| { - if prefix.is_prefix_of(node) { - Some(rev) - } else { - None - } - }) + match idx.check_revision(rev) { + Some(checked) => idx + .node(checked) + .ok_or(NodeMapError::RevisionNotInIndex(rev)) + .map(|node| { + if prefix.is_prefix_of(node) { + Some(checked) + } else { + None + } + }), + None => Err(NodeMapError::RevisionNotInIndex(rev)), + } } /// validate that the candidate's node starts indeed with given prefix, @@ -266,7 +274,7 @@ fn validate_candidate( idx: &impl RevlogIndex, prefix: NodePrefix, - candidate: (Option, usize), + candidate: (Option, usize), ) -> Result<(Option, usize), NodeMapError> { let (rev, steps) = candidate; if let Some(nz_nybble) = prefix.first_different_nybble(&NULL_NODE) { @@ -384,6 +392,8 @@ /// be inferred from /// the `NodeTree` data is that `rev` is the revision with the longest /// common node prefix with the given prefix. + /// We return an [`UncheckedRevision`] because we have no guarantee that + /// the revision we found is valid for the index. /// /// The second returned value is the size of the smallest subprefix /// of `prefix` that would give the same result, i.e. not the @@ -392,7 +402,7 @@ fn lookup( &self, prefix: NodePrefix, - ) -> Result<(Option, usize), NodeMapError> { + ) -> Result<(Option, usize), NodeMapError> { for (i, visit_item) in self.visit(prefix).enumerate() { if let Some(opt) = visit_item.final_revision() { return Ok((opt, i + 1)); @@ -465,8 +475,11 @@ if let Element::Rev(old_rev) = deepest.element { let old_node = index - .node(old_rev) - .ok_or(NodeMapError::RevisionNotInIndex(old_rev))?; + .check_revision(old_rev.into()) + .and_then(|rev| index.node(rev)) + .ok_or_else(|| { + NodeMapError::RevisionNotInIndex(old_rev.into()) + })?; if old_node == node { return Ok(()); // avoid creating lots of useless blocks } @@ -490,14 +503,14 @@ } else { let mut new_block = Block::new(); new_block.set(old_nybble, Element::Rev(old_rev)); - new_block.set(new_nybble, Element::Rev(rev)); + new_block.set(new_nybble, Element::Rev(rev.0)); self.growable.push(new_block); break; } } } else { // Free slot in the deepest block: no splitting has to be done - block.set(deepest.nybble, Element::Rev(rev)); + block.set(deepest.nybble, Element::Rev(rev.0)); } // Backtrack over visit steps to update references @@ -623,13 +636,13 @@ impl NodeTreeVisitItem { // Return `Some(opt)` if this item is final, with `opt` being the - // `Revision` that it may represent. + // `UncheckedRevision` that it may represent. // // If the item is not terminal, return `None` - fn final_revision(&self) -> Option> { + fn final_revision(&self) -> Option> { match self.element { Element::Block(_) => None, - Element::Rev(r) => Some(Some(r)), + Element::Rev(r) => Some(Some(r.into())), Element::None => Some(None), } } @@ -697,6 +710,13 @@ ) } + /// Shorthand to reduce boilerplate when creating [`Revision`] for testing + macro_rules! R { + ($revision:literal) => { + Revision($revision) + }; + } + #[test] fn test_block_debug() { let mut block = Block::new(); @@ -733,16 +753,20 @@ assert_eq!(block.get(4), Element::Rev(1)); } - type TestIndex = HashMap; + type TestIndex = HashMap; impl RevlogIndex for TestIndex { fn node(&self, rev: Revision) -> Option<&Node> { - self.get(&rev) + self.get(&rev.into()) } fn len(&self) -> usize { self.len() } + + fn check_revision(&self, rev: UncheckedRevision) -> Option { + self.get(&rev).map(|_| Revision(rev.0)) + } } /// Pad hexadecimal Node prefix with zeros on the right @@ -756,7 +780,7 @@ /// Pad hexadecimal Node prefix with zeros on the right, then insert fn pad_insert(idx: &mut TestIndex, rev: Revision, hex: &str) { - idx.insert(rev, pad_node(hex)); + idx.insert(rev.into(), pad_node(hex)); } fn sample_nodetree() -> NodeTree { @@ -786,17 +810,20 @@ #[test] fn test_immutable_find_simplest() -> Result<(), NodeMapError> { let mut idx: TestIndex = HashMap::new(); - pad_insert(&mut idx, 1, "1234deadcafe"); + pad_insert(&mut idx, R!(1), "1234deadcafe"); let nt = NodeTree::from(vec![block! {1: Rev(1)}]); - assert_eq!(nt.find_bin(&idx, hex("1"))?, Some(1)); - assert_eq!(nt.find_bin(&idx, hex("12"))?, Some(1)); - assert_eq!(nt.find_bin(&idx, hex("1234de"))?, Some(1)); + assert_eq!(nt.find_bin(&idx, hex("1"))?, Some(R!(1))); + assert_eq!(nt.find_bin(&idx, hex("12"))?, Some(R!(1))); + assert_eq!(nt.find_bin(&idx, hex("1234de"))?, Some(R!(1))); assert_eq!(nt.find_bin(&idx, hex("1a"))?, None); assert_eq!(nt.find_bin(&idx, hex("ab"))?, None); // and with full binary Nodes - assert_eq!(nt.find_node(&idx, idx.get(&1).unwrap())?, Some(1)); + assert_eq!( + nt.find_node(&idx, idx.get(&1.into()).unwrap())?, + Some(R!(1)) + ); let unknown = Node::from_hex(&hex_pad_right("3d")).unwrap(); assert_eq!(nt.find_node(&idx, &unknown)?, None); Ok(()) @@ -805,15 +832,15 @@ #[test] fn test_immutable_find_one_jump() { let mut idx = TestIndex::new(); - pad_insert(&mut idx, 9, "012"); - pad_insert(&mut idx, 0, "00a"); + pad_insert(&mut idx, R!(9), "012"); + pad_insert(&mut idx, R!(0), "00a"); let nt = sample_nodetree(); assert_eq!(nt.find_bin(&idx, hex("0")), Err(MultipleResults)); - assert_eq!(nt.find_bin(&idx, hex("01")), Ok(Some(9))); + assert_eq!(nt.find_bin(&idx, hex("01")), Ok(Some(R!(9)))); assert_eq!(nt.find_bin(&idx, hex("00")), Err(MultipleResults)); - assert_eq!(nt.find_bin(&idx, hex("00a")), Ok(Some(0))); + assert_eq!(nt.find_bin(&idx, hex("00a")), Ok(Some(R!(0)))); assert_eq!(nt.unique_prefix_len_bin(&idx, hex("00a")), Ok(Some(3))); assert_eq!(nt.find_bin(&idx, hex("000")), Ok(Some(NULL_REVISION))); } @@ -821,11 +848,11 @@ #[test] fn test_mutated_find() -> Result<(), NodeMapError> { let mut idx = TestIndex::new(); - pad_insert(&mut idx, 9, "012"); - pad_insert(&mut idx, 0, "00a"); - pad_insert(&mut idx, 2, "cafe"); - pad_insert(&mut idx, 3, "15"); - pad_insert(&mut idx, 1, "10"); + pad_insert(&mut idx, R!(9), "012"); + pad_insert(&mut idx, R!(0), "00a"); + pad_insert(&mut idx, R!(2), "cafe"); + pad_insert(&mut idx, R!(3), "15"); + pad_insert(&mut idx, R!(1), "10"); let nt = NodeTree { readonly: sample_nodetree().readonly, @@ -833,13 +860,13 @@ root: block![0: Block(1), 1:Block(3), 12: Rev(2)], masked_inner_blocks: 1, }; - assert_eq!(nt.find_bin(&idx, hex("10"))?, Some(1)); - assert_eq!(nt.find_bin(&idx, hex("c"))?, Some(2)); + assert_eq!(nt.find_bin(&idx, hex("10"))?, Some(R!(1))); + assert_eq!(nt.find_bin(&idx, hex("c"))?, Some(R!(2))); assert_eq!(nt.unique_prefix_len_bin(&idx, hex("c"))?, Some(1)); assert_eq!(nt.find_bin(&idx, hex("00")), Err(MultipleResults)); assert_eq!(nt.find_bin(&idx, hex("000"))?, Some(NULL_REVISION)); assert_eq!(nt.unique_prefix_len_bin(&idx, hex("000"))?, Some(3)); - assert_eq!(nt.find_bin(&idx, hex("01"))?, Some(9)); + assert_eq!(nt.find_bin(&idx, hex("01"))?, Some(R!(9))); assert_eq!(nt.masked_readonly_blocks(), 2); Ok(()) } @@ -862,7 +889,7 @@ rev: Revision, node: Node, ) -> Result<(), NodeMapError> { - self.index.insert(rev, node); + self.index.insert(rev.into(), node); self.nt.insert(&self.index, &node, rev)?; Ok(()) } @@ -872,7 +899,8 @@ rev: Revision, hex: &str, ) -> Result<(), NodeMapError> { - return self.insert_node(rev, pad_node(hex)); + let node = pad_node(hex); + return self.insert_node(rev, node); } fn find_hex( @@ -906,35 +934,35 @@ #[test] fn test_insert_full_mutable() -> Result<(), NodeMapError> { let mut idx = TestNtIndex::new(); - idx.insert(0, "1234")?; - assert_eq!(idx.find_hex("1")?, Some(0)); - assert_eq!(idx.find_hex("12")?, Some(0)); + idx.insert(Revision(0), "1234")?; + assert_eq!(idx.find_hex("1")?, Some(R!(0))); + assert_eq!(idx.find_hex("12")?, Some(R!(0))); // let's trigger a simple split - idx.insert(1, "1a34")?; + idx.insert(Revision(1), "1a34")?; assert_eq!(idx.nt.growable.len(), 1); - assert_eq!(idx.find_hex("12")?, Some(0)); - assert_eq!(idx.find_hex("1a")?, Some(1)); + assert_eq!(idx.find_hex("12")?, Some(R!(0))); + assert_eq!(idx.find_hex("1a")?, Some(R!(1))); // reinserting is a no_op - idx.insert(1, "1a34")?; + idx.insert(Revision(1), "1a34")?; assert_eq!(idx.nt.growable.len(), 1); - assert_eq!(idx.find_hex("12")?, Some(0)); - assert_eq!(idx.find_hex("1a")?, Some(1)); + assert_eq!(idx.find_hex("12")?, Some(R!(0))); + assert_eq!(idx.find_hex("1a")?, Some(R!(1))); - idx.insert(2, "1a01")?; + idx.insert(Revision(2), "1a01")?; assert_eq!(idx.nt.growable.len(), 2); assert_eq!(idx.find_hex("1a"), Err(NodeMapError::MultipleResults)); - assert_eq!(idx.find_hex("12")?, Some(0)); - assert_eq!(idx.find_hex("1a3")?, Some(1)); - assert_eq!(idx.find_hex("1a0")?, Some(2)); + assert_eq!(idx.find_hex("12")?, Some(R!(0))); + assert_eq!(idx.find_hex("1a3")?, Some(R!(1))); + assert_eq!(idx.find_hex("1a0")?, Some(R!(2))); assert_eq!(idx.find_hex("1a12")?, None); // now let's make it split and create more than one additional block - idx.insert(3, "1a345")?; + idx.insert(Revision(3), "1a345")?; assert_eq!(idx.nt.growable.len(), 4); - assert_eq!(idx.find_hex("1a340")?, Some(1)); - assert_eq!(idx.find_hex("1a345")?, Some(3)); + assert_eq!(idx.find_hex("1a340")?, Some(R!(1))); + assert_eq!(idx.find_hex("1a345")?, Some(R!(3))); assert_eq!(idx.find_hex("1a341")?, None); // there's no readonly block to mask @@ -945,7 +973,7 @@ #[test] fn test_unique_prefix_len_zero_prefix() { let mut idx = TestNtIndex::new(); - idx.insert(0, "00000abcd").unwrap(); + idx.insert(Revision(0), "00000abcd").unwrap(); assert_eq!(idx.find_hex("000"), Err(NodeMapError::MultipleResults)); // in the nodetree proper, this will be found at the first nybble @@ -955,7 +983,7 @@ assert_eq!(idx.unique_prefix_len_hex("00000ab"), Ok(Some(6))); // same with odd result - idx.insert(1, "00123").unwrap(); + idx.insert(Revision(1), "00123").unwrap(); assert_eq!(idx.unique_prefix_len_hex("001"), Ok(Some(3))); assert_eq!(idx.unique_prefix_len_hex("0012"), Ok(Some(3))); @@ -978,46 +1006,46 @@ let node0 = Node::from_hex(&node0_hex).unwrap(); let node1 = Node::from_hex(&node1_hex).unwrap(); - idx.insert(0, node0); - nt.insert(idx, &node0, 0)?; - idx.insert(1, node1); - nt.insert(idx, &node1, 1)?; + idx.insert(0.into(), node0); + nt.insert(idx, &node0, R!(0))?; + idx.insert(1.into(), node1); + nt.insert(idx, &node1, R!(1))?; - assert_eq!(nt.find_bin(idx, (&node0).into())?, Some(0)); - assert_eq!(nt.find_bin(idx, (&node1).into())?, Some(1)); + assert_eq!(nt.find_bin(idx, (&node0).into())?, Some(R!(0))); + assert_eq!(nt.find_bin(idx, (&node1).into())?, Some(R!(1))); Ok(()) } #[test] fn test_insert_partly_immutable() -> Result<(), NodeMapError> { let mut idx = TestNtIndex::new(); - idx.insert(0, "1234")?; - idx.insert(1, "1235")?; - idx.insert(2, "131")?; - idx.insert(3, "cafe")?; + idx.insert(Revision(0), "1234")?; + idx.insert(Revision(1), "1235")?; + idx.insert(Revision(2), "131")?; + idx.insert(Revision(3), "cafe")?; let mut idx = idx.commit(); - assert_eq!(idx.find_hex("1234")?, Some(0)); - assert_eq!(idx.find_hex("1235")?, Some(1)); - assert_eq!(idx.find_hex("131")?, Some(2)); - assert_eq!(idx.find_hex("cafe")?, Some(3)); + assert_eq!(idx.find_hex("1234")?, Some(R!(0))); + assert_eq!(idx.find_hex("1235")?, Some(R!(1))); + assert_eq!(idx.find_hex("131")?, Some(R!(2))); + assert_eq!(idx.find_hex("cafe")?, Some(R!(3))); // we did not add anything since init from readonly assert_eq!(idx.nt.masked_readonly_blocks(), 0); - idx.insert(4, "123A")?; - assert_eq!(idx.find_hex("1234")?, Some(0)); - assert_eq!(idx.find_hex("1235")?, Some(1)); - assert_eq!(idx.find_hex("131")?, Some(2)); - assert_eq!(idx.find_hex("cafe")?, Some(3)); - assert_eq!(idx.find_hex("123A")?, Some(4)); + idx.insert(Revision(4), "123A")?; + assert_eq!(idx.find_hex("1234")?, Some(R!(0))); + assert_eq!(idx.find_hex("1235")?, Some(R!(1))); + assert_eq!(idx.find_hex("131")?, Some(R!(2))); + assert_eq!(idx.find_hex("cafe")?, Some(R!(3))); + assert_eq!(idx.find_hex("123A")?, Some(R!(4))); // we masked blocks for all prefixes of "123", including the root assert_eq!(idx.nt.masked_readonly_blocks(), 4); eprintln!("{:?}", idx.nt); - idx.insert(5, "c0")?; - assert_eq!(idx.find_hex("cafe")?, Some(3)); - assert_eq!(idx.find_hex("c0")?, Some(5)); + idx.insert(Revision(5), "c0")?; + assert_eq!(idx.find_hex("cafe")?, Some(R!(3))); + assert_eq!(idx.find_hex("c0")?, Some(R!(5))); assert_eq!(idx.find_hex("c1")?, None); - assert_eq!(idx.find_hex("1234")?, Some(0)); + assert_eq!(idx.find_hex("1234")?, Some(R!(0))); // inserting "c0" is just splitting the 'c' slot of the mutable root, // it doesn't mask anything assert_eq!(idx.nt.masked_readonly_blocks(), 4); @@ -1028,10 +1056,10 @@ #[test] fn test_invalidate_all() -> Result<(), NodeMapError> { let mut idx = TestNtIndex::new(); - idx.insert(0, "1234")?; - idx.insert(1, "1235")?; - idx.insert(2, "131")?; - idx.insert(3, "cafe")?; + idx.insert(Revision(0), "1234")?; + idx.insert(Revision(1), "1235")?; + idx.insert(Revision(2), "131")?; + idx.insert(Revision(3), "cafe")?; let mut idx = idx.commit(); idx.nt.invalidate_all(); @@ -1058,9 +1086,9 @@ #[test] fn test_into_added_bytes() -> Result<(), NodeMapError> { let mut idx = TestNtIndex::new(); - idx.insert(0, "1234")?; + idx.insert(Revision(0), "1234")?; let mut idx = idx.commit(); - idx.insert(4, "cafe")?; + idx.insert(Revision(4), "cafe")?; let (_, bytes) = idx.nt.into_readonly_and_added_bytes(); // only the root block has been changed diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/src/revset.rs --- a/rust/hg-core/src/revset.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/src/revset.rs Wed Oct 11 02:02:46 2023 +0200 @@ -53,9 +53,11 @@ if let Ok(integer) = input.parse::() { if integer.to_string() == input && integer >= 0 - && revlog.has_rev(integer) + && revlog.has_rev(integer.into()) { - return Ok(integer); + // This is fine because we've just checked that the revision is + // valid for the given revlog. + return Ok(Revision(integer)); } } if let Ok(prefix) = NodePrefix::from_hex(input) { diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/src/sparse.rs --- a/rust/hg-core/src/sparse.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/src/sparse.rs Wed Oct 11 02:02:46 2023 +0200 @@ -282,7 +282,8 @@ let (patterns, subwarnings) = parse_pattern_file_contents( &config.includes, Path::new(""), - Some(b"glob:".as_ref()), + Some(PatternSyntax::Glob), + false, false, )?; warnings.extend(subwarnings.into_iter().map(From::from)); @@ -292,7 +293,8 @@ let (patterns, subwarnings) = parse_pattern_file_contents( &config.excludes, Path::new(""), - Some(b"glob:".as_ref()), + Some(PatternSyntax::Glob), + false, false, )?; warnings.extend(subwarnings.into_iter().map(From::from)); diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/src/testing.rs --- a/rust/hg-core/src/testing.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/src/testing.rs Wed Oct 11 02:02:46 2023 +0200 @@ -41,22 +41,27 @@ impl Graph for SampleGraph { fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError> { - match rev { - 0 => Ok([NULL_REVISION, NULL_REVISION]), - 1 => Ok([0, NULL_REVISION]), - 2 => Ok([1, NULL_REVISION]), - 3 => Ok([1, NULL_REVISION]), - 4 => Ok([2, NULL_REVISION]), - 5 => Ok([4, NULL_REVISION]), - 6 => Ok([4, NULL_REVISION]), - 7 => Ok([4, NULL_REVISION]), - 8 => Ok([NULL_REVISION, NULL_REVISION]), + let null_rev = NULL_REVISION.0; + let res = match rev.0 { + 0 => Ok([null_rev, null_rev]), + 1 => Ok([0, null_rev]), + 2 => Ok([1, null_rev]), + 3 => Ok([1, null_rev]), + 4 => Ok([2, null_rev]), + 5 => Ok([4, null_rev]), + 6 => Ok([4, null_rev]), + 7 => Ok([4, null_rev]), + 8 => Ok([null_rev, null_rev]), 9 => Ok([6, 7]), - 10 => Ok([5, NULL_REVISION]), + 10 => Ok([5, null_rev]), 11 => Ok([3, 7]), - 12 => Ok([9, NULL_REVISION]), - 13 => Ok([8, NULL_REVISION]), - r => Err(GraphError::ParentOutOfRange(r)), + 12 => Ok([9, null_rev]), + 13 => Ok([8, null_rev]), + r => Err(GraphError::ParentOutOfRange(Revision(r))), + }; + match res { + Ok([a, b]) => Ok([Revision(a), Revision(b)]), + Err(e) => Err(e), } } } @@ -67,6 +72,6 @@ impl Graph for VecGraph { fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError> { - Ok(self[rev as usize]) + Ok(self[rev.0 as usize]) } } diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-core/tests/test_missing_ancestors.rs --- a/rust/hg-core/tests/test_missing_ancestors.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-core/tests/test_missing_ancestors.rs Wed Oct 11 02:02:46 2023 +0200 @@ -26,25 +26,28 @@ if i == 0 || rng.gen_bool(rootprob) { vg.push([NULL_REVISION, NULL_REVISION]) } else if i == 1 { - vg.push([0, NULL_REVISION]) + vg.push([Revision(0), NULL_REVISION]) } else if rng.gen_bool(mergeprob) { let p1 = { if i == 2 || rng.gen_bool(prevprob) { - (i - 1) as Revision + Revision((i - 1) as BaseRevision) } else { - rng.gen_range(0..i - 1) as Revision + Revision(rng.gen_range(0..i - 1) as BaseRevision) } }; // p2 is a random revision lower than i and different from p1 - let mut p2 = rng.gen_range(0..i - 1) as Revision; + let mut p2 = Revision(rng.gen_range(0..i - 1) as BaseRevision); if p2 >= p1 { - p2 += 1; + p2.0 += 1; } vg.push([p1, p2]); } else if rng.gen_bool(prevprob) { - vg.push([(i - 1) as Revision, NULL_REVISION]) + vg.push([Revision((i - 1) as BaseRevision), NULL_REVISION]) } else { - vg.push([rng.gen_range(0..i - 1) as Revision, NULL_REVISION]) + vg.push([ + Revision(rng.gen_range(0..i - 1) as BaseRevision), + NULL_REVISION, + ]) } } vg @@ -55,10 +58,10 @@ let mut ancs: Vec> = Vec::new(); (0..vg.len()).for_each(|i| { let mut ancs_i = HashSet::new(); - ancs_i.insert(i as Revision); + ancs_i.insert(Revision(i as BaseRevision)); for p in vg[i].iter().cloned() { if p != NULL_REVISION { - ancs_i.extend(&ancs[p as usize]); + ancs_i.extend(&ancs[p.0 as usize]); } } ancs.push(ancs_i); @@ -115,7 +118,7 @@ .push(MissingAncestorsAction::RemoveAncestorsFrom(revs.clone())); for base in self.bases.iter().cloned() { if base != NULL_REVISION { - for rev in &self.ancestors_sets[base as usize] { + for rev in &self.ancestors_sets[base.0 as usize] { revs.remove(rev); } } @@ -131,7 +134,7 @@ let mut missing: HashSet = HashSet::new(); for rev in revs_as_set.iter().cloned() { if rev != NULL_REVISION { - missing.extend(&self.ancestors_sets[rev as usize]) + missing.extend(&self.ancestors_sets[rev.0 as usize]) } } self.history @@ -139,7 +142,7 @@ for base in self.bases.iter().cloned() { if base != NULL_REVISION { - for rev in &self.ancestors_sets[base as usize] { + for rev in &self.ancestors_sets[base.0 as usize] { missing.remove(rev); } } @@ -193,10 +196,10 @@ let sigma = sigma_opt.unwrap_or(0.8); let log_normal = LogNormal::new(mu, sigma).unwrap(); - let nb = min(maxrev as usize, log_normal.sample(rng).floor() as usize); + let nb = min(maxrev.0 as usize, log_normal.sample(rng).floor() as usize); - let dist = Uniform::from(NULL_REVISION..maxrev); - rng.sample_iter(&dist).take(nb).collect() + let dist = Uniform::from(NULL_REVISION.0..maxrev.0); + rng.sample_iter(&dist).take(nb).map(Revision).collect() } /// Produces the hexadecimal representation of a slice of bytes @@ -294,7 +297,7 @@ eprintln!("Tested with {} graphs", g); } let graph = build_random_graph(None, None, None, None); - let graph_len = graph.len() as Revision; + let graph_len = Revision(graph.len() as BaseRevision); let ancestors_sets = ancestors_sets(&graph); for _testno in 0..testcount { let bases: HashSet = diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-cpython/src/ancestors.rs --- a/rust/hg-cpython/src/ancestors.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-cpython/src/ancestors.rs Wed Oct 11 02:02:46 2023 +0200 @@ -35,6 +35,7 @@ //! [`MissingAncestors`]: struct.MissingAncestors.html //! [`AncestorsIterator`]: struct.AncestorsIterator.html use crate::revlog::pyindex_to_graph; +use crate::PyRevision; use crate::{ cindex::Index, conversion::rev_pyiter_collect, exceptions::GraphError, }; @@ -54,16 +55,16 @@ py_class!(pub class AncestorsIterator |py| { data inner: RefCell>>; - def __next__(&self) -> PyResult> { + def __next__(&self) -> PyResult> { match self.inner(py).borrow_mut().next() { Some(Err(e)) => Err(GraphError::pynew_from_vcsgraph(py, e)), None => Ok(None), - Some(Ok(r)) => Ok(Some(r)), + Some(Ok(r)) => Ok(Some(PyRevision(r))), } } - def __contains__(&self, rev: Revision) -> PyResult { - self.inner(py).borrow_mut().contains(rev) + def __contains__(&self, rev: PyRevision) -> PyResult { + self.inner(py).borrow_mut().contains(rev.0) .map_err(|e| GraphError::pynew_from_vcsgraph(py, e)) } @@ -71,13 +72,19 @@ Ok(self.clone_ref(py)) } - def __new__(_cls, index: PyObject, initrevs: PyObject, stoprev: Revision, - inclusive: bool) -> PyResult { - let initvec: Vec = rev_pyiter_collect(py, &initrevs)?; + def __new__( + _cls, + index: PyObject, + initrevs: PyObject, + stoprev: PyRevision, + inclusive: bool + ) -> PyResult { + let index = pyindex_to_graph(py, index)?; + let initvec: Vec<_> = rev_pyiter_collect(py, &initrevs, &index)?; let ait = VCGAncestorsIterator::new( - pyindex_to_graph(py, index)?, - initvec, - stoprev, + index, + initvec.into_iter().map(|r| r.0), + stoprev.0, inclusive, ) .map_err(|e| GraphError::pynew_from_vcsgraph(py, e))?; @@ -98,10 +105,10 @@ py_class!(pub class LazyAncestors |py| { data inner: RefCell>>; - def __contains__(&self, rev: Revision) -> PyResult { + def __contains__(&self, rev: PyRevision) -> PyResult { self.inner(py) .borrow_mut() - .contains(rev) + .contains(rev.0) .map_err(|e| GraphError::pynew_from_vcsgraph(py, e)) } @@ -113,14 +120,24 @@ Ok(!self.inner(py).borrow().is_empty()) } - def __new__(_cls, index: PyObject, initrevs: PyObject, stoprev: Revision, - inclusive: bool) -> PyResult { - let initvec: Vec = rev_pyiter_collect(py, &initrevs)?; + def __new__( + _cls, + index: PyObject, + initrevs: PyObject, + stoprev: PyRevision, + inclusive: bool + ) -> PyResult { + let index = pyindex_to_graph(py, index)?; + let initvec: Vec<_> = rev_pyiter_collect(py, &initrevs, &index)?; let lazy = - VCGLazyAncestors::new(pyindex_to_graph(py, index)?, - initvec, stoprev, inclusive) - .map_err(|e| GraphError::pynew_from_vcsgraph(py, e))?; + VCGLazyAncestors::new( + index, + initvec.into_iter().map(|r| r.0), + stoprev.0, + inclusive + ) + .map_err(|e| GraphError::pynew_from_vcsgraph(py, e))?; Self::create_instance(py, RefCell::new(Box::new(lazy))) } @@ -129,6 +146,7 @@ py_class!(pub class MissingAncestors |py| { data inner: RefCell>>; + data index: RefCell; def __new__( _cls, @@ -136,9 +154,15 @@ bases: PyObject ) -> PyResult { - let bases_vec: Vec = rev_pyiter_collect(py, &bases)?; - let inner = CoreMissing::new(pyindex_to_graph(py, index)?, bases_vec); - MissingAncestors::create_instance(py, RefCell::new(Box::new(inner))) + let index = pyindex_to_graph(py, index)?; + let bases_vec: Vec<_> = rev_pyiter_collect(py, &bases, &index)?; + + let inner = CoreMissing::new(index.clone_ref(py), bases_vec); + MissingAncestors::create_instance( + py, + RefCell::new(Box::new(inner)), + RefCell::new(index) + ) } def hasbases(&self) -> PyResult { @@ -146,8 +170,9 @@ } def addbases(&self, bases: PyObject) -> PyResult { + let index = self.index(py).borrow(); + let bases_vec: Vec<_> = rev_pyiter_collect(py, &bases, &*index)?; let mut inner = self.inner(py).borrow_mut(); - let bases_vec: Vec = rev_pyiter_collect(py, &bases)?; inner.add_bases(bases_vec); // cpython doc has examples with PyResult<()> but this gives me // the trait `cpython::ToPyObject` is not implemented for `()` @@ -155,17 +180,31 @@ Ok(py.None()) } - def bases(&self) -> PyResult> { - Ok(self.inner(py).borrow().get_bases().clone()) + def bases(&self) -> PyResult> { + Ok( + self.inner(py) + .borrow() + .get_bases() + .iter() + .map(|r| PyRevision(r.0)) + .collect() + ) } - def basesheads(&self) -> PyResult> { + def basesheads(&self) -> PyResult> { let inner = self.inner(py).borrow(); - inner.bases_heads().map_err(|e| GraphError::pynew(py, e)) + Ok( + inner + .bases_heads() + .map_err(|e| GraphError::pynew(py, e))? + .into_iter() + .map(|r| PyRevision(r.0)) + .collect() + ) } def removeancestorsfrom(&self, revs: PyObject) -> PyResult { - let mut inner = self.inner(py).borrow_mut(); + let index = self.index(py).borrow(); // this is very lame: we convert to a Rust set, update it in place // and then convert back to Python, only to have Python remove the // excess (thankfully, Python is happy with a list or even an iterator) @@ -174,7 +213,10 @@ // discard // - define a trait for sets of revisions in the core and implement // it for a Python set rewrapped with the GIL marker - let mut revs_pyset: HashSet = rev_pyiter_collect(py, &revs)?; + let mut revs_pyset: HashSet = rev_pyiter_collect( + py, &revs, &*index + )?; + let mut inner = self.inner(py).borrow_mut(); inner.remove_ancestors_from(&mut revs_pyset) .map_err(|e| GraphError::pynew(py, e))?; @@ -182,15 +224,19 @@ let mut remaining_pyint_vec: Vec = Vec::with_capacity( revs_pyset.len()); for rev in revs_pyset { - remaining_pyint_vec.push(rev.to_py_object(py).into_object()); + remaining_pyint_vec.push( + PyRevision(rev.0).to_py_object(py).into_object() + ); } let remaining_pylist = PyList::new(py, remaining_pyint_vec.as_slice()); revs.call_method(py, "intersection_update", (remaining_pylist, ), None) } def missingancestors(&self, revs: PyObject) -> PyResult { + let index = self.index(py).borrow(); + let revs_vec: Vec = rev_pyiter_collect(py, &revs, &*index)?; + let mut inner = self.inner(py).borrow_mut(); - let revs_vec: Vec = rev_pyiter_collect(py, &revs)?; let missing_vec = match inner.missing_ancestors(revs_vec) { Ok(missing) => missing, Err(e) => { @@ -201,7 +247,9 @@ let mut missing_pyint_vec: Vec = Vec::with_capacity( missing_vec.len()); for rev in missing_vec { - missing_pyint_vec.push(rev.to_py_object(py).into_object()); + missing_pyint_vec.push( + PyRevision(rev.0).to_py_object(py).into_object() + ); } Ok(PyList::new(py, missing_pyint_vec.as_slice())) } diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-cpython/src/cindex.rs --- a/rust/hg-cpython/src/cindex.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-cpython/src/cindex.rs Wed Oct 11 02:02:46 2023 +0200 @@ -15,7 +15,7 @@ PyObject, PyResult, PyTuple, Python, PythonObject, }; use hg::revlog::{Node, RevlogIndex}; -use hg::{Graph, GraphError, Revision, WORKING_DIRECTORY_REVISION}; +use hg::{BaseRevision, Graph, GraphError, Revision}; use libc::{c_int, ssize_t}; const REVLOG_CABI_VERSION: c_int = 3; @@ -141,19 +141,16 @@ impl Graph for Index { /// wrap a call to the C extern parents function fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError> { - if rev == WORKING_DIRECTORY_REVISION { - return Err(GraphError::WorkingDirectoryUnsupported); - } let mut res: [c_int; 2] = [0; 2]; let code = unsafe { (self.capi.index_parents)( self.index.as_ptr(), - rev as c_int, + rev.0 as c_int, &mut res as *mut [c_int; 2], ) }; match code { - 0 => Ok(res), + 0 => Ok([Revision(res[0]), Revision(res[1])]), _ => Err(GraphError::ParentOutOfRange(rev)), } } @@ -162,17 +159,18 @@ impl vcsgraph::graph::Graph for Index { fn parents( &self, - rev: Revision, + rev: BaseRevision, ) -> Result { - match Graph::parents(self, rev) { - Ok(parents) => Ok(vcsgraph::graph::Parents(parents)), + // FIXME This trait should be reworked to decide between Revision + // and UncheckedRevision, get better errors names, etc. + match Graph::parents(self, Revision(rev)) { + Ok(parents) => { + Ok(vcsgraph::graph::Parents([parents[0].0, parents[1].0])) + } Err(GraphError::ParentOutOfRange(rev)) => { - Err(vcsgraph::graph::GraphReadError::KeyedInvalidKey(rev)) + Err(vcsgraph::graph::GraphReadError::KeyedInvalidKey(rev.0)) } - Err(GraphError::WorkingDirectoryUnsupported) => Err( - vcsgraph::graph::GraphReadError::WorkingDirectoryUnsupported, - ), } } } @@ -180,7 +178,7 @@ impl vcsgraph::graph::RankedGraph for Index { fn rank( &self, - rev: Revision, + rev: BaseRevision, ) -> Result { match unsafe { (self.capi.fast_rank)(self.index.as_ptr(), rev as ssize_t) @@ -200,7 +198,7 @@ fn node(&self, rev: Revision) -> Option<&Node> { let raw = unsafe { - (self.capi.index_node)(self.index.as_ptr(), rev as ssize_t) + (self.capi.index_node)(self.index.as_ptr(), rev.0 as ssize_t) }; if raw.is_null() { None diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-cpython/src/conversion.rs --- a/rust/hg-cpython/src/conversion.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-cpython/src/conversion.rs Wed Oct 11 02:02:46 2023 +0200 @@ -8,8 +8,10 @@ //! Bindings for the hg::ancestors module provided by the //! `hg-core` crate. From Python, this will be seen as `rustext.ancestor` -use cpython::{ObjectProtocol, PyObject, PyResult, Python}; -use hg::Revision; +use cpython::{ObjectProtocol, PyErr, PyObject, PyResult, Python}; +use hg::{Revision, RevlogIndex, UncheckedRevision}; + +use crate::{exceptions::GraphError, PyRevision}; /// Utility function to convert a Python iterable into various collections /// @@ -17,11 +19,28 @@ /// with `impl IntoIterator` arguments, because /// a `PyErr` can arise at each step of iteration, whereas these methods /// expect iterables over `Revision`, not over some `Result` -pub fn rev_pyiter_collect(py: Python, revs: &PyObject) -> PyResult +pub fn rev_pyiter_collect( + py: Python, + revs: &PyObject, + index: &I, +) -> PyResult where C: FromIterator, + I: RevlogIndex, { revs.iter(py)? - .map(|r| r.and_then(|o| o.extract::(py))) + .map(|r| { + r.and_then(|o| match o.extract::(py) { + Ok(r) => index + .check_revision(UncheckedRevision(r.0)) + .ok_or_else(|| { + PyErr::new::( + py, + ("InvalidRevision", r.0), + ) + }), + Err(e) => Err(e), + }) + }) .collect() } diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-cpython/src/copy_tracing.rs --- a/rust/hg-cpython/src/copy_tracing.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-cpython/src/copy_tracing.rs Wed Oct 11 02:02:46 2023 +0200 @@ -14,6 +14,7 @@ use hg::Revision; use crate::pybytes_deref::PyBytesDeref; +use crate::PyRevision; /// Combines copies information contained into revision `revs` to build a copy /// map. @@ -23,14 +24,17 @@ py: Python, revs: PyList, children_count: PyDict, - target_rev: Revision, + target_rev: PyRevision, rev_info: PyObject, multi_thread: bool, ) -> PyResult { + let target_rev = Revision(target_rev.0); let children_count = children_count .items(py) .iter() - .map(|(k, v)| Ok((k.extract(py)?, v.extract(py)?))) + .map(|(k, v)| { + Ok((Revision(k.extract::(py)?.0), v.extract(py)?)) + }) .collect::>()?; /// (Revision number, parent 1, parent 2, copy data for this revision) @@ -38,11 +42,13 @@ let revs_info = revs.iter(py).map(|rev_py| -> PyResult> { - let rev = rev_py.extract(py)?; + let rev = Revision(rev_py.extract::(py)?.0); let tuple: PyTuple = rev_info.call(py, (rev_py,), None)?.cast_into(py)?; - let p1 = tuple.get_item(py, 0).extract(py)?; - let p2 = tuple.get_item(py, 1).extract(py)?; + let p1 = + Revision(tuple.get_item(py, 0).extract::(py)?.0); + let p2 = + Revision(tuple.get_item(py, 1).extract::(py)?.0); let opt_bytes = tuple.get_item(py, 2).extract(py)?; Ok((rev, p1, p2, opt_bytes)) }); @@ -179,7 +185,7 @@ combine_changeset_copies_wrapper( revs: PyList, children: PyDict, - target_rev: Revision, + target_rev: PyRevision, rev_info: PyObject, multi_thread: bool ) diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-cpython/src/dagops.rs --- a/rust/hg-cpython/src/dagops.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-cpython/src/dagops.rs Wed Oct 11 02:02:46 2023 +0200 @@ -9,6 +9,7 @@ //! `hg-core` package. //! //! From Python, this will be seen as `mercurial.rustext.dagop` +use crate::PyRevision; use crate::{conversion::rev_pyiter_collect, exceptions::GraphError}; use cpython::{PyDict, PyModule, PyObject, PyResult, Python}; use hg::dagops; @@ -26,11 +27,12 @@ py: Python, index: PyObject, revs: PyObject, -) -> PyResult> { - let mut as_set: HashSet = rev_pyiter_collect(py, &revs)?; - dagops::retain_heads(&pyindex_to_graph(py, index)?, &mut as_set) +) -> PyResult> { + let index = pyindex_to_graph(py, index)?; + let mut as_set: HashSet = rev_pyiter_collect(py, &revs, &index)?; + dagops::retain_heads(&index, &mut as_set) .map_err(|e| GraphError::pynew(py, e))?; - Ok(as_set) + Ok(as_set.into_iter().map(Into::into).collect()) } /// Computes the rank, i.e. the number of ancestors including itself, @@ -38,10 +40,10 @@ pub fn rank( py: Python, index: PyObject, - p1r: Revision, - p2r: Revision, + p1r: PyRevision, + p2r: PyRevision, ) -> PyResult { - node_rank(&pyindex_to_graph(py, index)?, &Parents([p1r, p2r])) + node_rank(&pyindex_to_graph(py, index)?, &Parents([p1r.0, p2r.0])) .map_err(|e| GraphError::pynew_from_vcsgraph(py, e)) } @@ -59,7 +61,7 @@ m.add( py, "rank", - py_fn!(py, rank(index: PyObject, p1r: Revision, p2r: Revision)), + py_fn!(py, rank(index: PyObject, p1r: PyRevision, p2r: PyRevision)), )?; let sys = PyModule::import(py, "sys")?; diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-cpython/src/discovery.rs --- a/rust/hg-cpython/src/discovery.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-cpython/src/discovery.rs Wed Oct 11 02:02:46 2023 +0200 @@ -12,12 +12,13 @@ //! - [`PartialDiscover`] is the Rust implementation of //! `mercurial.setdiscovery.partialdiscovery`. +use crate::PyRevision; use crate::{ cindex::Index, conversion::rev_pyiter_collect, exceptions::GraphError, }; use cpython::{ - ObjectProtocol, PyDict, PyModule, PyObject, PyResult, PyTuple, Python, - PythonObject, ToPyObject, + ObjectProtocol, PyClone, PyDict, PyModule, PyObject, PyResult, PyTuple, + Python, PythonObject, ToPyObject, }; use hg::discovery::PartialDiscovery as CorePartialDiscovery; use hg::Revision; @@ -29,6 +30,7 @@ py_class!(pub class PartialDiscovery |py| { data inner: RefCell>>; + data index: RefCell; // `_respectsize` is currently only here to replicate the Python API and // will be used in future patches inside methods that are yet to be @@ -41,28 +43,33 @@ randomize: bool = true ) -> PyResult { let index = repo.getattr(py, "changelog")?.getattr(py, "index")?; + let index = pyindex_to_graph(py, index)?; + let target_heads = rev_pyiter_collect(py, &targetheads, &index)?; Self::create_instance( py, RefCell::new(Box::new(CorePartialDiscovery::new( - pyindex_to_graph(py, index)?, - rev_pyiter_collect(py, &targetheads)?, + index.clone_ref(py), + target_heads, respectsize, randomize, - ))) + ))), + RefCell::new(index), ) } def addcommons(&self, commons: PyObject) -> PyResult { + let index = self.index(py).borrow(); + let commons_vec: Vec<_> = rev_pyiter_collect(py, &commons, &*index)?; let mut inner = self.inner(py).borrow_mut(); - let commons_vec: Vec = rev_pyiter_collect(py, &commons)?; inner.add_common_revisions(commons_vec) - .map_err(|e| GraphError::pynew(py, e))?; - Ok(py.None()) - } + .map_err(|e| GraphError::pynew(py, e))?; + Ok(py.None()) +} def addmissings(&self, missings: PyObject) -> PyResult { + let index = self.index(py).borrow(); + let missings_vec: Vec<_> = rev_pyiter_collect(py, &missings, &*index)?; let mut inner = self.inner(py).borrow_mut(); - let missings_vec: Vec = rev_pyiter_collect(py, &missings)?; inner.add_missing_revisions(missings_vec) .map_err(|e| GraphError::pynew(py, e))?; Ok(py.None()) @@ -73,7 +80,10 @@ let mut common: Vec = Vec::new(); for info in sample.iter(py)? { // info is a pair (Revision, bool) let mut revknown = info?.iter(py)?; - let rev: Revision = revknown.next().unwrap()?.extract(py)?; + let rev: PyRevision = revknown.next().unwrap()?.extract(py)?; + // This is fine since we're just using revisions as integers + // for the purposes of discovery + let rev = Revision(rev.0); let known: bool = revknown.next().unwrap()?.extract(py)?; if known { common.push(rev); @@ -107,9 +117,10 @@ Ok(as_dict) } - def commonheads(&self) -> PyResult> { - self.inner(py).borrow().common_heads() - .map_err(|e| GraphError::pynew(py, e)) + def commonheads(&self) -> PyResult> { + let res = self.inner(py).borrow().common_heads() + .map_err(|e| GraphError::pynew(py, e))?; + Ok(res.into_iter().map(Into::into).collect()) } def takefullsample(&self, _headrevs: PyObject, @@ -119,20 +130,21 @@ .map_err(|e| GraphError::pynew(py, e))?; let as_vec: Vec = sample .iter() - .map(|rev| rev.to_py_object(py).into_object()) + .map(|rev| PyRevision(rev.0).to_py_object(py).into_object()) .collect(); Ok(PyTuple::new(py, as_vec.as_slice()).into_object()) } def takequicksample(&self, headrevs: PyObject, size: usize) -> PyResult { + let index = self.index(py).borrow(); let mut inner = self.inner(py).borrow_mut(); - let revsvec: Vec = rev_pyiter_collect(py, &headrevs)?; + let revsvec: Vec<_> = rev_pyiter_collect(py, &headrevs, &*index)?; let sample = inner.take_quick_sample(revsvec, size) .map_err(|e| GraphError::pynew(py, e))?; let as_vec: Vec = sample .iter() - .map(|rev| rev.to_py_object(py).into_object()) + .map(|rev| PyRevision(rev.0).to_py_object(py).into_object()) .collect(); Ok(PyTuple::new(py, as_vec.as_slice()).into_object()) } diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-cpython/src/exceptions.rs --- a/rust/hg-cpython/src/exceptions.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-cpython/src/exceptions.rs Wed Oct 11 02:02:46 2023 +0200 @@ -18,22 +18,15 @@ }; use hg; +use crate::PyRevision; + py_exception!(rustext, GraphError, ValueError); impl GraphError { pub fn pynew(py: Python, inner: hg::GraphError) -> PyErr { match inner { hg::GraphError::ParentOutOfRange(r) => { - GraphError::new(py, ("ParentOutOfRange", r)) - } - hg::GraphError::WorkingDirectoryUnsupported => { - match py - .import("mercurial.error") - .and_then(|m| m.get(py, "WdirUnsupported")) - { - Err(e) => e, - Ok(cls) => PyErr::from_instance(py, cls), - } + GraphError::new(py, ("ParentOutOfRange", PyRevision(r.0))) } } } diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-cpython/src/lib.rs --- a/rust/hg-cpython/src/lib.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-cpython/src/lib.rs Wed Oct 11 02:02:46 2023 +0200 @@ -24,6 +24,9 @@ #![allow(clippy::manual_strip)] // rust-cpython macros #![allow(clippy::type_complexity)] // rust-cpython macros +use cpython::{FromPyObject, PyInt, Python, ToPyObject}; +use hg::{BaseRevision, Revision}; + /// This crate uses nested private macros, `extern crate` is still needed in /// 2018 edition. #[macro_use] @@ -44,6 +47,40 @@ pub mod revlog; pub mod utils; +/// Revision as exposed to/from the Python layer. +/// +/// We need this indirection because of the orphan rule, meaning we can't +/// implement a foreign trait (like [`cpython::ToPyObject`]) +/// for a foreign type (like [`hg::UncheckedRevision`]). +/// +/// This also acts as a deterrent against blindly trusting Python to send +/// us valid revision numbers. +#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct PyRevision(BaseRevision); + +impl From for PyRevision { + fn from(r: Revision) -> Self { + PyRevision(r.0) + } +} + +impl<'s> FromPyObject<'s> for PyRevision { + fn extract( + py: Python, + obj: &'s cpython::PyObject, + ) -> cpython::PyResult { + Ok(Self(obj.extract::(py)?)) + } +} + +impl ToPyObject for PyRevision { + type ObjectType = PyInt; + + fn to_py_object(&self, py: Python) -> Self::ObjectType { + self.0.to_py_object(py) + } +} + py_module_initializer!(rustext, initrustext, PyInit_rustext, |py, m| { m.add( py, diff -r 704c3d0878d9 -r 12c308c55e53 rust/hg-cpython/src/revlog.rs --- a/rust/hg-cpython/src/revlog.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/hg-cpython/src/revlog.rs Wed Oct 11 02:02:46 2023 +0200 @@ -8,6 +8,7 @@ use crate::{ cindex, utils::{node_from_py_bytes, node_from_py_object}, + PyRevision, }; use cpython::{ buffer::{Element, PyBuffer}, @@ -18,7 +19,7 @@ use hg::{ nodemap::{Block, NodeMapError, NodeTree}, revlog::{nodemap::NodeMap, NodePrefix, RevlogIndex}, - Revision, + BaseRevision, Revision, UncheckedRevision, }; use std::cell::RefCell; @@ -59,12 +60,13 @@ /// Return Revision if found, raises a bare `error.RevlogError` /// in case of ambiguity, same as C version does - def get_rev(&self, node: PyBytes) -> PyResult> { + def get_rev(&self, node: PyBytes) -> PyResult> { let opt = self.get_nodetree(py)?.borrow(); let nt = opt.as_ref().unwrap(); let idx = &*self.cindex(py).borrow(); let node = node_from_py_bytes(py, &node)?; - nt.find_bin(idx, node.into()).map_err(|e| nodemap_error(py, e)) + let res = nt.find_bin(idx, node.into()); + Ok(res.map_err(|e| nodemap_error(py, e))?.map(Into::into)) } /// same as `get_rev()` but raises a bare `error.RevlogError` if node @@ -72,7 +74,7 @@ /// /// No need to repeat `node` in the exception, `mercurial/revlog.py` /// will catch and rewrap with it - def rev(&self, node: PyBytes) -> PyResult { + def rev(&self, node: PyBytes) -> PyResult { self.get_rev(py, node)?.ok_or_else(|| revlog_error(py)) } @@ -131,9 +133,11 @@ let node = node_from_py_object(py, &node_bytes)?; let mut idx = self.cindex(py).borrow_mut(); - let rev = idx.len() as Revision; + // This is ok since we will just add the revision to the index + let rev = Revision(idx.len() as BaseRevision); idx.append(py, tup)?; + self.get_nodetree(py)?.borrow_mut().as_mut().unwrap() .insert(&*idx, &node, rev) .map_err(|e| nodemap_error(py, e))?; @@ -252,7 +256,7 @@ // Note that we don't seem to have a direct way to call // PySequence_GetItem (does the job), which would possibly be better // for performance - let key = match key.extract::(py) { + let key = match key.extract::(py) { Ok(rev) => rev.to_py_object(py).into_object(), Err(_) => key, }; @@ -268,9 +272,9 @@ // this is an equivalent implementation of the index_contains() // defined in revlog.c let cindex = self.cindex(py).borrow(); - match item.extract::(py) { + match item.extract::(py) { Ok(rev) => { - Ok(rev >= -1 && rev < cindex.inner().len(py)? as Revision) + Ok(rev >= -1 && rev < cindex.inner().len(py)? as BaseRevision) } Err(_) => { cindex.inner().call_method( @@ -331,7 +335,7 @@ ) -> PyResult { let index = self.cindex(py).borrow(); for r in 0..index.len() { - let rev = r as Revision; + let rev = Revision(r as BaseRevision); // in this case node() won't ever return None nt.insert(&*index, index.node(rev).unwrap(), rev) .map_err(|e| nodemap_error(py, e))? @@ -447,14 +451,19 @@ let mut nt = NodeTree::load_bytes(Box::new(bytes), len); - let data_tip = - docket.getattr(py, "tip_rev")?.extract::(py)?; + let data_tip = docket + .getattr(py, "tip_rev")? + .extract::(py)? + .into(); self.docket(py).borrow_mut().replace(docket.clone_ref(py)); let idx = self.cindex(py).borrow(); + let data_tip = idx.check_revision(data_tip).ok_or_else(|| { + nodemap_error(py, NodeMapError::RevisionNotInIndex(data_tip)) + })?; let current_tip = idx.len(); - for r in (data_tip + 1)..current_tip as Revision { - let rev = r as Revision; + for r in (data_tip.0 + 1)..current_tip as BaseRevision { + let rev = Revision(r); // in this case node() won't ever return None nt.insert(&*idx, idx.node(rev).unwrap(), rev) .map_err(|e| nodemap_error(py, e))? @@ -479,7 +488,7 @@ } } -fn rev_not_in_index(py: Python, rev: Revision) -> PyErr { +fn rev_not_in_index(py: Python, rev: UncheckedRevision) -> PyErr { PyErr::new::( py, format!( diff -r 704c3d0878d9 -r 12c308c55e53 rust/rhg/src/blackbox.rs --- a/rust/rhg/src/blackbox.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/rhg/src/blackbox.rs Wed Oct 11 02:02:46 2023 +0200 @@ -7,12 +7,6 @@ use hg::utils::{files::get_bytes_from_os_str, shell_quote}; use std::ffi::OsString; -const ONE_MEBIBYTE: u64 = 1 << 20; - -// TODO: somehow keep defaults in sync with `configitem` in `hgext/blackbox.py` -const DEFAULT_MAX_SIZE: u64 = ONE_MEBIBYTE; -const DEFAULT_MAX_FILES: u32 = 7; - // Python does not support %.3f, only %f const DEFAULT_DATE_FORMAT: &str = "%Y-%m-%d %H:%M:%S%.3f"; @@ -53,8 +47,7 @@ process_start_time: &'a ProcessStartTime, ) -> Result { let configured = if let Ok(repo) = invocation.repo { - if invocation.config.get(b"extensions", b"blackbox").is_none() { - // The extension is not enabled + if !invocation.config.is_extension_enabled(b"blackbox") { None } else { Some(ConfiguredBlackbox { @@ -62,15 +55,28 @@ max_size: invocation .config .get_byte_size(b"blackbox", b"maxsize")? - .unwrap_or(DEFAULT_MAX_SIZE), + .expect( + "blackbox.maxsize should have a default value", + ), max_files: invocation .config .get_u32(b"blackbox", b"maxfiles")? - .unwrap_or(DEFAULT_MAX_FILES), + .expect( + "blackbox.maxfiles should have a default value", + ), date_format: invocation .config .get_str(b"blackbox", b"date-format")? - .unwrap_or(DEFAULT_DATE_FORMAT), + .map(|f| { + if f.is_empty() { + DEFAULT_DATE_FORMAT + } else { + f + } + }) + .expect( + "blackbox.date-format should have a default value", + ), }) } } else { diff -r 704c3d0878d9 -r 12c308c55e53 rust/rhg/src/commands/cat.rs --- a/rust/rhg/src/commands/cat.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/rhg/src/commands/cat.rs Wed Oct 11 02:02:46 2023 +0200 @@ -32,9 +32,8 @@ #[logging_timer::time("trace")] pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> { - let cat_enabled_default = true; - let cat_enabled = invocation.config.get_option(b"rhg", b"cat")?; - if !cat_enabled.unwrap_or(cat_enabled_default) { + let cat_enabled = invocation.config.get_bool(b"rhg", b"cat")?; + if !cat_enabled { return Err(CommandError::unsupported( "cat is disabled in rhg (enable it with 'rhg.cat = true' \ or enable fallback with 'rhg.on-unsupported = fallback')", diff -r 704c3d0878d9 -r 12c308c55e53 rust/rhg/src/commands/files.rs --- a/rust/rhg/src/commands/files.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/rhg/src/commands/files.rs Wed Oct 11 02:02:46 2023 +0200 @@ -4,9 +4,12 @@ }; use crate::utils::path_utils::RelativizePaths; use clap::Arg; +use hg::filepatterns::parse_pattern_args; +use hg::matchers::IntersectionMatcher; use hg::narrow; use hg::operations::list_rev_tracked_files; use hg::repo::Repo; +use hg::utils::files::get_bytes_from_os_str; use hg::utils::filter_map_results; use hg::utils::hg_path::HgPath; use rayon::prelude::*; @@ -26,6 +29,12 @@ .long("revision") .value_name("REV"), ) + .arg( + Arg::new("file") + .value_parser(clap::value_parser!(std::ffi::OsString)) + .help("show only these files") + .action(clap::ArgAction::Append), + ) .about(HELP_TEXT) } @@ -35,7 +44,8 @@ RelativePaths::Bool(v) => v, }; - let rev = invocation.subcommand_args.get_one::("rev"); + let args = invocation.subcommand_args; + let rev = args.get_one::("rev"); let repo = invocation.repo?; @@ -51,11 +61,34 @@ )); } - let (narrow_matcher, narrow_warnings) = narrow::matcher(repo)?; + let (matcher, narrow_warnings) = narrow::matcher(repo)?; print_narrow_sparse_warnings(&narrow_warnings, &[], invocation.ui, repo)?; + let matcher = match args.get_many::("file") { + None => matcher, + Some(files) => { + let patterns: Vec> = files + .filter(|s| !s.is_empty()) + .map(get_bytes_from_os_str) + .collect(); + for file in &patterns { + if file.starts_with(b"set:") { + return Err(CommandError::unsupported("fileset")); + } + } + let cwd = hg::utils::current_dir()?; + let root = repo.working_directory_path(); + let ignore_patterns = parse_pattern_args(patterns, &cwd, root)?; + let files_matcher = + hg::matchers::PatternMatcher::new(ignore_patterns)?; + Box::new(IntersectionMatcher::new( + Box::new(files_matcher), + matcher, + )) + } + }; if let Some(rev) = rev { - let files = list_rev_tracked_files(repo, rev, narrow_matcher) + let files = list_rev_tracked_files(repo, rev, matcher) .map_err(|e| (e, rev.as_ref()))?; display_files(invocation.ui, repo, relative_paths, files.iter()) } else { @@ -63,7 +96,7 @@ let dirstate = repo.dirstate_map()?; let files_res: Result, _> = filter_map_results(dirstate.iter(), |(path, entry)| { - Ok(if entry.tracked() && narrow_matcher.matches(path) { + Ok(if entry.tracked() && matcher.matches(path) { Some(path) } else { None diff -r 704c3d0878d9 -r 12c308c55e53 rust/rhg/src/commands/status.rs --- a/rust/rhg/src/commands/status.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/rhg/src/commands/status.rs Wed Oct 11 02:02:46 2023 +0200 @@ -7,7 +7,8 @@ use crate::error::CommandError; use crate::ui::{ - format_pattern_file_warning, print_narrow_sparse_warnings, Ui, + format_pattern_file_warning, print_narrow_sparse_warnings, relative_paths, + RelativePaths, Ui, }; use crate::utils::path_utils::RelativizePaths; use clap::Arg; @@ -17,13 +18,15 @@ use hg::dirstate::status::StatusPath; use hg::dirstate::TruncatedTimestamp; use hg::errors::{HgError, IoResultExt}; +use hg::filepatterns::parse_pattern_args; use hg::lock::LockError; use hg::manifest::Manifest; use hg::matchers::{AlwaysMatcher, IntersectionMatcher}; use hg::repo::Repo; use hg::utils::debug::debug_wait_for_file; -use hg::utils::files::get_bytes_from_os_string; -use hg::utils::files::get_path_from_bytes; +use hg::utils::files::{ + get_bytes_from_os_str, get_bytes_from_os_string, get_path_from_bytes, +}; use hg::utils::hg_path::{hg_path_to_path_buf, HgPath}; use hg::DirstateStatus; use hg::PatternFileWarning; @@ -48,6 +51,12 @@ .alias("st") .about(HELP_TEXT) .arg( + Arg::new("file") + .value_parser(clap::value_parser!(std::ffi::OsString)) + .help("show only these files") + .action(clap::ArgAction::Append), + ) + .arg( Arg::new("all") .help("show status of all files") .short('A') @@ -360,13 +369,24 @@ } } } - let relative_paths = config + + let relative_status = config .get_option(b"commands", b"status.relative")? - .unwrap_or(config.get_bool(b"ui", b"relative-paths")?); + .expect("commands.status.relative should have a default value"); + + let relativize_paths = relative_status || { + // See in Python code with `getuipathfn` usage in `commands.py`. + let legacy_relative_behavior = args.contains_id("file"); + match relative_paths(invocation.config)? { + RelativePaths::Legacy => legacy_relative_behavior, + RelativePaths::Bool(v) => v, + } + }; + let output = DisplayStatusPaths { ui, no_status, - relativize: if relative_paths { + relativize: if relativize_paths { Some(RelativizePaths::new(repo)?) } else { None @@ -415,6 +435,29 @@ (false, true) => sparse_matcher, (false, false) => Box::new(AlwaysMatcher), }; + let matcher = match args.get_many::("file") { + None => matcher, + Some(files) => { + let patterns: Vec> = files + .filter(|s| !s.is_empty()) + .map(get_bytes_from_os_str) + .collect(); + for file in &patterns { + if file.starts_with(b"set:") { + return Err(CommandError::unsupported("fileset")); + } + } + let cwd = hg::utils::current_dir()?; + let root = repo.working_directory_path(); + let ignore_patterns = parse_pattern_args(patterns, &cwd, root)?; + let files_matcher = + hg::matchers::PatternMatcher::new(ignore_patterns)?; + Box::new(IntersectionMatcher::new( + Box::new(files_matcher), + matcher, + )) + } + }; print_narrow_sparse_warnings( &narrow_warnings, diff -r 704c3d0878d9 -r 12c308c55e53 rust/rhg/src/error.rs --- a/rust/rhg/src/error.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/rhg/src/error.rs Wed Oct 11 02:02:46 2023 +0200 @@ -10,7 +10,8 @@ use hg::revlog::RevlogError; use hg::sparse::SparseConfigError; use hg::utils::files::get_bytes_from_path; -use hg::{DirstateError, DirstateMapError, StatusError}; +use hg::utils::hg_path::HgPathError; +use hg::{DirstateError, DirstateMapError, PatternError, StatusError}; use std::convert::From; /// The kind of command error @@ -230,6 +231,18 @@ } } +impl From for CommandError { + fn from(error: HgPathError) -> Self { + CommandError::unsupported(format!("{}", error)) + } +} + +impl From for CommandError { + fn from(error: PatternError) -> Self { + CommandError::unsupported(format!("{}", error)) + } +} + impl From for CommandError { fn from(error: DirstateMapError) -> Self { CommandError::abort(format!("{}", error)) diff -r 704c3d0878d9 -r 12c308c55e53 rust/rhg/src/main.rs --- a/rust/rhg/src/main.rs Tue Oct 10 18:29:04 2023 +0200 +++ b/rust/rhg/src/main.rs Wed Oct 11 02:02:46 2023 +0200 @@ -76,17 +76,23 @@ // Mercurial allows users to define "defaults" for commands, fallback // if a default is detected for the current command - let defaults = config.get_str(b"defaults", subcommand_name.as_bytes()); - if defaults?.is_some() { - let msg = "`defaults` config set"; - return Err(CommandError::unsupported(msg)); + let defaults = config.get_str(b"defaults", subcommand_name.as_bytes())?; + match defaults { + // Programmatic usage might set defaults to an empty string to unset + // it; allow that + None | Some("") => {} + Some(_) => { + let msg = "`defaults` config set"; + return Err(CommandError::unsupported(msg)); + } } for prefix in ["pre", "post", "fail"].iter() { // Mercurial allows users to define generic hooks for commands, // fallback if any are detected let item = format!("{}-{}", prefix, subcommand_name); - let hook_for_command = config.get_str(b"hooks", item.as_bytes())?; + let hook_for_command = + config.get_str_no_default(b"hooks", item.as_bytes())?; if hook_for_command.is_some() { let msg = format!("{}-{} hook defined", prefix, subcommand_name); return Err(CommandError::unsupported(msg)); @@ -349,11 +355,7 @@ &argv, &initial_current_dir, &ui, - OnUnsupported::Fallback { - executable: config - .get(b"rhg", b"fallback-executable") - .map(ToOwned::to_owned), - }, + OnUnsupported::fallback(config), Err(CommandError::unsupported( "`rhg.fallback-immediately is true`", )), @@ -662,6 +664,18 @@ impl OnUnsupported { const DEFAULT: Self = OnUnsupported::Abort; + fn fallback_executable(config: &Config) -> Option> { + config + .get(b"rhg", b"fallback-executable") + .map(|x| x.to_owned()) + } + + fn fallback(config: &Config) -> Self { + OnUnsupported::Fallback { + executable: Self::fallback_executable(config), + } + } + fn from_config(config: &Config) -> Self { match config .get(b"rhg", b"on-unsupported") @@ -670,11 +684,7 @@ { Some(b"abort") => OnUnsupported::Abort, Some(b"abort-silent") => OnUnsupported::AbortSilent, - Some(b"fallback") => OnUnsupported::Fallback { - executable: config - .get(b"rhg", b"fallback-executable") - .map(|x| x.to_owned()), - }, + Some(b"fallback") => Self::fallback(config), None => Self::DEFAULT, Some(_) => { // TODO: warn about unknown config value diff -r 704c3d0878d9 -r 12c308c55e53 setup.py --- a/setup.py Tue Oct 10 18:29:04 2023 +0200 +++ b/setup.py Wed Oct 11 02:02:46 2023 +0200 @@ -1309,6 +1309,7 @@ packages = [ 'mercurial', + 'mercurial.admin', 'mercurial.cext', 'mercurial.cffi', 'mercurial.defaultrc', @@ -1322,6 +1323,7 @@ 'mercurial.templates', 'mercurial.thirdparty', 'mercurial.thirdparty.attr', + 'mercurial.thirdparty.tomli', 'mercurial.thirdparty.zope', 'mercurial.thirdparty.zope.interface', 'mercurial.upgrade_utils', @@ -1336,7 +1338,6 @@ 'hgext.git', 'hgext.highlight', 'hgext.hooklib', - 'hgext.infinitepush', 'hgext.largefiles', 'hgext.lfs', 'hgext.narrow', @@ -1659,6 +1660,7 @@ packagedata = { 'mercurial': [ + 'configitems.toml', 'locale/*/LC_MESSAGES/hg.mo', 'dummycert.pem', ], diff -r 704c3d0878d9 -r 12c308c55e53 tests/library-infinitepush.sh --- a/tests/library-infinitepush.sh Tue Oct 10 18:29:04 2023 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,33 +0,0 @@ -scratchnodes() { - for node in `find ../repo/.hg/scratchbranches/index/nodemap/* | sort`; do - echo ${node##*/} `cat $node` - done -} - -scratchbookmarks() { - for bookmark in `find ../repo/.hg/scratchbranches/index/bookmarkmap/* -type f | sort`; do - echo "${bookmark##*/bookmarkmap/} `cat $bookmark`" - done -} - -setupcommon() { - cat >> $HGRCPATH << EOF -[extensions] -infinitepush= -[infinitepush] -branchpattern=re:scratch/.* -deprecation-abort=no -deprecation-message=yes - -EOF -} - -setupserver() { -cat >> .hg/hgrc << EOF -[infinitepush] -server=yes -indextype=disk -storetype=disk -reponame=babar -EOF -} diff -r 704c3d0878d9 -r 12c308c55e53 tests/run-tests.py diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-admin-commands.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-admin-commands.py Wed Oct 11 02:02:46 2023 +0200 @@ -0,0 +1,399 @@ +# Test admin commands + +import functools +import unittest +from mercurial.i18n import _ +from mercurial import error, ui as uimod +from mercurial import registrar +from mercurial.admin import verify + + +class TestAdminVerifyFindChecks(unittest.TestCase): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.ui = uimod.ui.load() + self.repo = b"fake-repo" + + def cleanup_table(self): + self.table = {} + self.alias_table = {} + self.pyramid = {} + + self.addCleanup(cleanup_table, self) + + def setUp(self): + self.table = {} + self.alias_table = {} + self.pyramid = {} + check = registrar.verify_check(self.table, self.alias_table) + + # mock some fake check method for tests purpose + @check( + b"test.dummy", + alias=b"dummy", + options=[], + ) + def check_dummy(ui, repo, **options): + return options + + @check( + b"test.fake", + alias=b"fake", + options=[ + (b'a', False, _(b'a boolean value (default: False)')), + (b'b', True, _(b'a boolean value (default: True)')), + (b'c', [], _(b'a list')), + ], + ) + def check_fake(ui, repo, **options): + return options + + # alias in the middle of a hierarchy + check( + b"test.noop", + alias=b"noop", + options=[], + )(verify.noop_func) + + @check( + b"test.noop.deeper", + alias=b"deeper", + options=[ + (b'y', True, _(b'a boolean value (default: True)')), + (b'z', [], _(b'a list')), + ], + ) + def check_noop_deeper(ui, repo, **options): + return options + + # args wrapper utilities + def find_checks(self, name): + return verify.find_checks( + name=name, + table=self.table, + alias_table=self.alias_table, + full_pyramid=self.pyramid, + ) + + def pass_options(self, checks, options): + return verify.pass_options( + self.ui, + checks, + options, + table=self.table, + alias_table=self.alias_table, + full_pyramid=self.pyramid, + ) + + def get_checks(self, names, options): + return verify.get_checks( + self.repo, + self.ui, + names=names, + options=options, + table=self.table, + alias_table=self.alias_table, + full_pyramid=self.pyramid, + ) + + # tests find_checks + def test_find_checks_empty_name(self): + with self.assertRaises(error.InputError): + self.find_checks(name=b"") + + def test_find_checks_wrong_name(self): + with self.assertRaises(error.InputError): + self.find_checks(name=b"unknown") + + def test_find_checks_dummy(self): + name = b"test.dummy" + found = self.find_checks(name=name) + self.assertEqual(len(found), 1) + self.assertIn(name, found) + meth = found[name] + self.assertTrue(callable(meth)) + self.assertEqual(len(meth.options), 0) + + def test_find_checks_fake(self): + name = b"test.fake" + found = self.find_checks(name=name) + self.assertEqual(len(found), 1) + self.assertIn(name, found) + meth = found[name] + self.assertTrue(callable(meth)) + self.assertEqual(len(meth.options), 3) + + def test_find_checks_noop(self): + name = b"test.noop.deeper" + found = self.find_checks(name=name) + self.assertEqual(len(found), 1) + self.assertIn(name, found) + meth = found[name] + self.assertTrue(callable(meth)) + self.assertEqual(len(meth.options), 2) + + def test_find_checks_from_aliases(self): + found = self.find_checks(name=b"dummy") + self.assertEqual(len(found), 1) + self.assertIn(b"test.dummy", found) + + found = self.find_checks(name=b"fake") + self.assertEqual(len(found), 1) + self.assertIn(b"test.fake", found) + + found = self.find_checks(name=b"deeper") + self.assertEqual(len(found), 1) + self.assertIn(b"test.noop.deeper", found) + + def test_find_checks_from_root(self): + found = self.find_checks(name=b"test") + self.assertEqual(len(found), 3) + self.assertIn(b"test.dummy", found) + self.assertIn(b"test.fake", found) + self.assertIn(b"test.noop.deeper", found) + + def test_find_checks_from_intermediate(self): + found = self.find_checks(name=b"test.noop") + self.assertEqual(len(found), 1) + self.assertIn(b"test.noop.deeper", found) + + def test_find_checks_from_parent_dot_name(self): + found = self.find_checks(name=b"noop.deeper") + self.assertEqual(len(found), 1) + self.assertIn(b"test.noop.deeper", found) + + # tests pass_options + def test_pass_options_no_checks_no_options(self): + checks = {} + options = [] + + with self.assertRaises(error.Error): + self.pass_options(checks=checks, options=options) + + def test_pass_options_fake_empty_options(self): + checks = self.find_checks(name=b"test.fake") + funcs = { + n: functools.partial(f, self.ui, self.repo) + for n, f in checks.items() + } + options = [] + # should end with default options + expected_options = {"a": False, "b": True, "c": []} + func = self.pass_options(checks=funcs, options=options) + + self.assertDictEqual(func[b"test.fake"].keywords, expected_options) + + def test_pass_options_fake_non_existing_options(self): + checks = self.find_checks(name=b"test.fake") + funcs = { + n: functools.partial(f, self.ui, self.repo) + for n, f in checks.items() + } + + with self.assertRaises(error.InputError): + options = [b"test.fake:boom=yes"] + self.pass_options(checks=funcs, options=options) + + def test_pass_options_fake_unrelated_options(self): + checks = self.find_checks(name=b"test.fake") + funcs = { + n: functools.partial(f, self.ui, self.repo) + for n, f in checks.items() + } + options = [b"test.noop.deeper:y=yes"] + + with self.assertRaises(error.InputError): + self.pass_options(checks=funcs, options=options) + + def test_pass_options_fake_set_option(self): + checks = self.find_checks(name=b"test.fake") + funcs = { + n: functools.partial(f, self.ui, self.repo) + for n, f in checks.items() + } + options = [b"test.fake:a=yes"] + expected_options = {"a": True, "b": True, "c": []} + func = self.pass_options(checks=funcs, options=options) + + self.assertDictEqual(func[b"test.fake"].keywords, expected_options) + + def test_pass_options_fake_set_option_with_alias(self): + checks = self.find_checks(name=b"test.fake") + funcs = { + n: functools.partial(f, self.ui, self.repo) + for n, f in checks.items() + } + options = [b"fake:a=yes"] + expected_options = {"a": True, "b": True, "c": []} + func = self.pass_options(checks=funcs, options=options) + + self.assertDictEqual(func[b"test.fake"].keywords, expected_options) + + def test_pass_options_fake_set_all_option(self): + checks = self.find_checks(name=b"test.fake") + funcs = { + n: functools.partial(f, self.ui, self.repo) + for n, f in checks.items() + } + options = [b"test.fake:a=yes", b"test.fake:b=no", b"test.fake:c=0,1,2"] + expected_options = {"a": True, "b": False, "c": [b"0", b"1", b"2"]} + func = self.pass_options(checks=funcs, options=options) + + self.assertDictEqual(func[b"test.fake"].keywords, expected_options) + + def test_pass_options_fake_set_all_option_plus_unexisting(self): + checks = self.find_checks(name=b"test.fake") + funcs = { + n: functools.partial(f, self.ui, self.repo) + for n, f in checks.items() + } + options = [ + b"test.fake:a=yes", + b"test.fake:b=no", + b"test.fake:c=0,1,2", + b"test.fake:d=0", + ] + + with self.assertRaises(error.InputError): + self.pass_options(checks=funcs, options=options) + + def test_pass_options_fake_duplicate_option(self): + checks = self.find_checks(name=b"test.fake") + funcs = { + n: functools.partial(f, self.ui, self.repo) + for n, f in checks.items() + } + options = [ + b"test.fake:a=yes", + b"test.fake:a=no", + ] + + with self.assertRaises(error.InputError): + self.pass_options(checks=funcs, options=options) + + def test_pass_options_fake_set_malformed_option(self): + checks = self.find_checks(name=b"test.fake") + funcs = { + n: functools.partial(f, self.ui, self.repo) + for n, f in checks.items() + } + options = [ + b"test.fake:ayes", + b"test.fake:b==no", + b"test.fake=", + b"test.fake:", + b"test.fa=ke:d=0", + b"test.fa=ke:d=0", + ] + + for opt in options: + with self.assertRaises(error.InputError): + self.pass_options(checks=funcs, options=[opt]) + + def test_pass_options_types(self): + checks = self.find_checks(name=b"test.fake") + funcs = { + n: functools.partial(f, self.ui, self.repo) + for n, f in checks.items() + } + # boolean, yes/no + options = [b"test.fake:a=yes", b"test.fake:b=no"] + expected_options = {"a": True, "b": False, "c": []} + func = self.pass_options(checks=funcs, options=options) + + self.assertDictEqual(func[b"test.fake"].keywords, expected_options) + + # boolean, 0/1 + options = [b"test.fake:a=1", b"test.fake:b=0"] + expected_options = {"a": True, "b": False, "c": []} + func = self.pass_options(checks=funcs, options=options) + + self.assertDictEqual(func[b"test.fake"].keywords, expected_options) + + # boolean, true/false + options = [b"test.fake:a=true", b"test.fake:b=false"] + expected_options = {"a": True, "b": False, "c": []} + func = self.pass_options(checks=funcs, options=options) + + self.assertDictEqual(func[b"test.fake"].keywords, expected_options) + + # boolean, wrong type + options = [b"test.fake:a=si"] + with self.assertRaises(error.InputError): + self.pass_options(checks=funcs, options=options) + + # lists + options = [b"test.fake:c=0,1,2"] + expected_options = {"a": False, "b": True, "c": [b"0", b"1", b"2"]} + func = self.pass_options(checks=funcs, options=options) + + self.assertDictEqual(func[b"test.fake"].keywords, expected_options) + + options = [b"test.fake:c=x,y,z"] + expected_options = {"a": False, "b": True, "c": [b"x", b"y", b"z"]} + func = self.pass_options(checks=funcs, options=options) + + self.assertDictEqual(func[b"test.fake"].keywords, expected_options) + + # tests get_checks + def test_get_checks_fake(self): + funcs = self.get_checks( + names=[b"test.fake"], options=[b"test.fake:a=yes"] + ) + options = funcs.get(b"test.fake").keywords + expected_options = {"a": True, "b": True, "c": []} + self.assertDictEqual(options, expected_options) + + def test_get_checks_multiple_mixed_with_defaults(self): + funcs = self.get_checks( + names=[b"test.fake", b"test.noop.deeper", b"test.dummy"], + options=[ + b"test.noop.deeper:y=no", + b"test.noop.deeper:z=-1,0,1", + ], + ) + options = funcs.get(b"test.fake").keywords + expected_options = {"a": False, "b": True, "c": []} + self.assertDictEqual(options, expected_options) + + options = funcs.get(b"test.noop.deeper").keywords + expected_options = {"y": False, "z": [b"-1", b"0", b"1"]} + self.assertDictEqual(options, expected_options) + + options = funcs.get(b"test.dummy").keywords + expected_options = {} + self.assertDictEqual(options, expected_options) + + def test_broken_pyramid(self): + """Check that we detect pyramids that can't resolve""" + table = {} + alias_table = {} + pyramid = {} + check = registrar.verify_check(table, alias_table) + + # Create two checks that clash + @check(b"test.wrong.intermediate") + def check_dummy(ui, repo, **options): + return options + + @check(b"test.wrong.intermediate.thing") + def check_fake(ui, repo, **options): + return options + + with self.assertRaises(error.ProgrammingError) as e: + verify.get_checks( + self.repo, + self.ui, + names=[b"test.wrong.intermediate"], + options=[], + table=table, + alias_table=alias_table, + full_pyramid=pyramid, + ) + assert "`verify.noop_func`" in str(e.exception), str(e.exception) + + +if __name__ == '__main__': + import silenttestrunner + + silenttestrunner.main(__name__) diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-admin-commands.t --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-admin-commands.t Wed Oct 11 02:02:46 2023 +0200 @@ -0,0 +1,49 @@ +Test admin::verify + + $ hg init admin-verify + $ cd admin-verify + +Test normal output + + $ hg admin::verify -c dirstate + running 1 checks + running working-copy.dirstate + checking dirstate + +Quiet works + + $ hg admin::verify -c dirstate --quiet + +Test no check no options + + $ hg admin::verify + abort: `checks` required + [255] + +Test single check without options + + $ hg admin::verify -c working-copy.dirstate + running 1 checks + running working-copy.dirstate + checking dirstate + +Test single check (alias) without options + + $ hg admin::verify -c dirstate + running 1 checks + running working-copy.dirstate + checking dirstate + +Test wrong check name without options + + $ hg admin::verify -c working-copy.dir + abort: unknown check working-copy.dir + (did you mean working-copy.dirstate?) + [10] + +Test wrong alias without options + + $ hg admin::verify -c dir + abort: unknown check dir + [10] + diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-alias.t --- a/tests/test-alias.t Tue Oct 10 18:29:04 2023 +0200 +++ b/tests/test-alias.t Wed Oct 11 02:02:46 2023 +0200 @@ -68,17 +68,17 @@ help $ hg help -c | grep myinit - myinit This is my documented alias for init. + myinit This is my documented alias for init. $ hg help -c | grep mycommit - mycommit This is my alias with only doc. + mycommit This is my alias with only doc. $ hg help -c | grep cleanstatus [1] $ hg help -c | grep lognull - lognull Logs the null rev + lognull Logs the null rev $ hg help -c | grep dln [1] $ hg help -c | grep recursivedoc - recursivedoc Logs the null rev in debug mode + recursivedoc Logs the null rev in debug mode $ hg help myinit hg myinit [OPTIONS] [BLA] [BLE] @@ -603,7 +603,7 @@ help for a shell alias $ hg help -c | grep rebate - rebate This is my alias which just prints something. + rebate This is my alias which just prints something. $ hg help rebate hg rebate [MYARGS] diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-ancestor.py --- a/tests/test-ancestor.py Tue Oct 10 18:29:04 2023 +0200 +++ b/tests/test-ancestor.py Wed Oct 11 02:02:46 2023 +0200 @@ -12,7 +12,6 @@ debugcommands, hg, ui as uimod, - util, ) @@ -416,7 +415,7 @@ for i, (dag, tests) in enumerate(dagtests): repo = hg.repository(u, b'gca%d' % i, create=1) cl = repo.changelog - if not util.safehasattr(cl.index, 'ancestors'): + if not hasattr(cl.index, 'ancestors'): # C version not available return diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-bundle2-exchange.t --- a/tests/test-bundle2-exchange.t Tue Oct 10 18:29:04 2023 +0200 +++ b/tests/test-bundle2-exchange.t Wed Oct 11 02:02:46 2023 +0200 @@ -917,7 +917,7 @@ > raise error.Abort(b"Lock should not be taken") > return orig(repo, *args, **kwargs) > def extsetup(ui): - > extensions.wrapfunction(bundle2, b'processbundle', checklock) + > extensions.wrapfunction(bundle2, 'processbundle', checklock) > EOF $ hg init lazylock diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-byteify-strings.t --- a/tests/test-byteify-strings.t Tue Oct 10 18:29:04 2023 +0200 +++ b/tests/test-byteify-strings.t Wed Oct 11 02:02:46 2023 +0200 @@ -110,19 +110,6 @@ > def f(): > pass > EOF - $ byteify_strings testfile.py --allow-attr-methods - setattr(o, 'a', 1) - util.setattr(o, 'ae', 1) - util.getattr(o, 'alksjdf', b'default') - util.addattr(o, 'asdf') - util.hasattr(o, 'lksjdf', b'default') - util.safehasattr(o, 'lksjdf', b'default') - @eh.wrapfunction(func, 'lksjdf') - def f(): - pass - @eh.wrapclass(klass, 'lksjdf') - def f(): - pass Test without attr*() as methods @@ -142,15 +129,15 @@ > EOF $ byteify_strings testfile.py setattr(o, 'a', 1) - util.setattr(o, b'ae', 1) - util.getattr(o, b'alksjdf', b'default') - util.addattr(o, b'asdf') - util.hasattr(o, b'lksjdf', b'default') - util.safehasattr(o, b'lksjdf', b'default') - @eh.wrapfunction(func, b'lksjdf') + util.setattr(o, 'ae', 1) + util.getattr(o, 'alksjdf', b'default') + util.addattr(o, 'asdf') + util.hasattr(o, 'lksjdf', b'default') + util.safehasattr(o, 'lksjdf', b'default') + @eh.wrapfunction(func, 'lksjdf') def f(): pass - @eh.wrapclass(klass, b'lksjdf') + @eh.wrapclass(klass, 'lksjdf') def f(): pass diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-check-py3-compat.t --- a/tests/test-check-py3-compat.t Tue Oct 10 18:29:04 2023 +0200 +++ b/tests/test-check-py3-compat.t Wed Oct 11 02:02:46 2023 +0200 @@ -10,7 +10,6 @@ > | sed 's|\\|/|g' | xargs "$PYTHON" contrib/check-py3-compat.py \ > | sed 's/[0-9][0-9]*)$/*)/' hgext/convert/transport.py: error importing: <*Error> No module named 'svn.client' (error at transport.py:*) (glob) (?) - hgext/infinitepush/sqlindexapi.py: error importing: <*Error> No module named 'mysql' (error at sqlindexapi.py:*) (glob) (?) mercurial/scmwindows.py: error importing: _type_ 'v' not supported (error at win32.py:*) (no-windows !) mercurial/win32.py: error importing: _type_ 'v' not supported (error at win32.py:*) (no-windows !) mercurial/windows.py: error importing: <*Error> No module named 'msvcrt' (error at windows.py:*) (glob) (no-windows !) diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-commandserver.t --- a/tests/test-commandserver.t Tue Oct 10 18:29:04 2023 +0200 +++ b/tests/test-commandserver.t Wed Oct 11 02:02:46 2023 +0200 @@ -923,7 +923,7 @@ > raise Exception('crash') > return orig(ui, repo, conn, createcmdserver, prereposetups) > def extsetup(ui): - > extensions.wrapfunction(commandserver, b'_serverequest', _serverequest) + > extensions.wrapfunction(commandserver, '_serverequest', _serverequest) > EOF $ cat <> .hg/hgrc > [extensions] diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-completion.t --- a/tests/test-completion.t Tue Oct 10 18:29:04 2023 +0200 +++ b/tests/test-completion.t Wed Oct 11 02:02:46 2023 +0200 @@ -3,6 +3,7 @@ abort add addremove + admin::verify annotate archive backout @@ -65,6 +66,7 @@ abort add addremove + admin::verify annotate archive @@ -257,6 +259,7 @@ abort: dry-run add: include, exclude, subrepos, dry-run addremove: similarity, subrepos, include, exclude, dry-run + admin::verify: check, option annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, include, exclude, template archive: no-decode, prefix, rev, type, subrepos, include, exclude backout: merge, commit, no-commit, parent, rev, edit, tool, include, exclude, message, logfile, date, user diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-contrib-perf.t --- a/tests/test-contrib-perf.t Tue Oct 10 18:29:04 2023 +0200 +++ b/tests/test-contrib-perf.t Wed Oct 11 02:02:46 2023 +0200 @@ -301,23 +301,38 @@ $ hg perfparents --config perf.stub=no --config perf.run-limits='0.000000001-15' ! wall * comb * user * sys * (best of 15) (glob) + ! wall * comb * user * sys * (max of 15) (glob) + ! wall * comb * user * sys * (avg of 15) (glob) + ! wall * comb * user * sys * (median of 15) (glob) Multiple entries $ hg perfparents --config perf.stub=no --config perf.run-limits='500000-1, 0.000000001-50' ! wall * comb * user * sys * (best of 50) (glob) + ! wall * comb * user * sys 0.000000 (max of 50) (glob) + ! wall * comb * user * sys 0.000000 (avg of 50) (glob) + ! wall * comb * user * sys 0.000000 (median of 50) (glob) error case are ignored $ hg perfparents --config perf.stub=no --config perf.run-limits='500, 0.000000001-50' malformatted run limit entry, missing "-": 500 ! wall * comb * user * sys * (best of 50) (glob) + ! wall * comb * user * sys * (max of 50) (glob) + ! wall * comb * user * sys * (avg of 50) (glob) + ! wall * comb * user * sys * (median of 50) (glob) $ hg perfparents --config perf.stub=no --config perf.run-limits='aaa-120, 0.000000001-50' malformatted run limit entry, could not convert string to float: 'aaa': aaa-120 ! wall * comb * user * sys * (best of 50) (glob) + ! wall * comb * user * sys * (max of 50) (glob) + ! wall * comb * user * sys * (avg of 50) (glob) + ! wall * comb * user * sys * (median of 50) (glob) $ hg perfparents --config perf.stub=no --config perf.run-limits='120-aaaaaa, 0.000000001-50' malformatted run limit entry, invalid literal for int() with base 10: 'aaaaaa': 120-aaaaaa ! wall * comb * user * sys * (best of 50) (glob) + ! wall * comb * user * sys * (max of 50) (glob) + ! wall * comb * user * sys * (avg of 50) (glob) + ! wall * comb * user * sys * (median of 50) (glob) test actual output ------------------ @@ -326,6 +341,9 @@ $ hg perfheads --config perf.stub=no ! wall * comb * user * sys * (best of *) (glob) + ! wall * comb * user * sys * (max of *) (glob) + ! wall * comb * user * sys * (avg of *) (glob) + ! wall * comb * user * sys * (median of *) (glob) detailed output: @@ -343,8 +361,23 @@ $ hg perfheads --template json --config perf.stub=no [ { + "avg.comb": *, (glob) + "avg.count": *, (glob) + "avg.sys": *, (glob) + "avg.user": *, (glob) + "avg.wall": *, (glob) "comb": *, (glob) "count": *, (glob) + "max.comb": *, (glob) + "max.count": *, (glob) + "max.sys": *, (glob) + "max.user": *, (glob) + "max.wall": *, (glob) + "median.comb": *, (glob) + "median.count": *, (glob) + "median.sys": *, (glob) + "median.user": *, (glob) + "median.wall": *, (glob) "sys": *, (glob) "user": *, (glob) "wall": * (glob) @@ -386,13 +419,22 @@ $ hg perfdiscovery . --config perf.stub=no --config perf.run-limits='0.000000001-1' --config perf.pre-run=0 ! wall * comb * user * sys * (best of 1) (glob) + ! wall * comb * user * sys * (max of 1) (glob) + ! wall * comb * user * sys * (avg of 1) (glob) + ! wall * comb * user * sys * (median of 1) (glob) searching for changes $ hg perfdiscovery . --config perf.stub=no --config perf.run-limits='0.000000001-1' --config perf.pre-run=1 ! wall * comb * user * sys * (best of 1) (glob) + ! wall * comb * user * sys * (max of 1) (glob) + ! wall * comb * user * sys * (avg of 1) (glob) + ! wall * comb * user * sys * (median of 1) (glob) searching for changes searching for changes $ hg perfdiscovery . --config perf.stub=no --config perf.run-limits='0.000000001-1' --config perf.pre-run=3 ! wall * comb * user * sys * (best of 1) (glob) + ! wall * comb * user * sys * (max of 1) (glob) + ! wall * comb * user * sys * (avg of 1) (glob) + ! wall * comb * user * sys * (median of 1) (glob) searching for changes searching for changes searching for changes diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-demandimport.py --- a/tests/test-demandimport.py Tue Oct 10 18:29:04 2023 +0200 +++ b/tests/test-demandimport.py Wed Oct 11 02:02:46 2023 +0200 @@ -179,15 +179,13 @@ 'cannot import name unknownattr' ) -from mercurial import util - # Unlike the import statement, __import__() function should not raise # ImportError even if fromlist has an unknown item # (see Python/import.c:import_module_level() and ensure_fromlist()) assert 'ftplib' not in sys.modules zipfileimp = __import__('ftplib', globals(), locals(), ['unknownattr']) assert f(zipfileimp) == "", f(zipfileimp) -assert not util.safehasattr(zipfileimp, 'unknownattr') +assert not hasattr(zipfileimp, 'unknownattr') # test deactivation for issue6725 diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-devel-warnings.t --- a/tests/test-devel-warnings.t Tue Oct 10 18:29:04 2023 +0200 +++ b/tests/test-devel-warnings.t Wed Oct 11 02:02:46 2023 +0200 @@ -455,10 +455,10 @@ > EOF $ hg --config "extensions.buggyconfig=${TESTTMP}/buggyconfig.py" buggyconfig - devel-warn: extension 'buggyconfig' overwrite config item 'ui.interactive' at: */mercurial/extensions.py:* (_loadextra) (glob) (no-pyoxidizer !) - devel-warn: extension 'buggyconfig' overwrite config item 'ui.quiet' at: */mercurial/extensions.py:* (_loadextra) (glob) (no-pyoxidizer !) - devel-warn: extension 'buggyconfig' overwrite config item 'ui.interactive' at: mercurial.extensions:* (_loadextra) (glob) (pyoxidizer !) - devel-warn: extension 'buggyconfig' overwrite config item 'ui.quiet' at: mercurial.extensions:* (_loadextra) (glob) (pyoxidizer !) + devel-warn: extension 'buggyconfig' overwrites config item 'ui.interactive' at: */mercurial/extensions.py:* (_loadextra) (glob) (no-pyoxidizer !) + devel-warn: extension 'buggyconfig' overwrites config item 'ui.quiet' at: */mercurial/extensions.py:* (_loadextra) (glob) (no-pyoxidizer !) + devel-warn: extension 'buggyconfig' overwrites config item 'ui.interactive' at: mercurial.extensions:* (_loadextra) (glob) (pyoxidizer !) + devel-warn: extension 'buggyconfig' overwrites config item 'ui.quiet' at: mercurial.extensions:* (_loadextra) (glob) (pyoxidizer !) devel-warn: specifying a mismatched default value for a registered config item: 'ui.quiet' 'True' at: $TESTTMP/buggyconfig.py:* (cmdbuggyconfig) (glob) devel-warn: specifying a mismatched default value for a registered config item: 'ui.interactive' 'False' at: $TESTTMP/buggyconfig.py:* (cmdbuggyconfig) (glob) devel-warn: specifying a mismatched default value for a registered config item: 'test.some' 'bar' at: $TESTTMP/buggyconfig.py:* (cmdbuggyconfig) (glob) diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-fncache.t --- a/tests/test-fncache.t Tue Oct 10 18:29:04 2023 +0200 +++ b/tests/test-fncache.t Wed Oct 11 02:02:46 2023 +0200 @@ -275,7 +275,7 @@ > > def uisetup(ui): > extensions.wrapfunction( - > localrepo.localrepository, b'transaction', wrapper) + > localrepo.localrepository, 'transaction', wrapper) > > cmdtable = {} > diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-globalopts.t --- a/tests/test-globalopts.t Tue Oct 10 18:29:04 2023 +0200 +++ b/tests/test-globalopts.t Wed Oct 11 02:02:46 2023 +0200 @@ -378,6 +378,8 @@ Repository maintenance: + admin::verify + verify the integrity of the repository manifest output the current or given revision of the project manifest recover roll back an interrupted transaction verify verify the integrity of the repository @@ -513,6 +515,8 @@ Repository maintenance: + admin::verify + verify the integrity of the repository manifest output the current or given revision of the project manifest recover roll back an interrupted transaction verify verify the integrity of the repository diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-help-hide.t --- a/tests/test-help-hide.t Tue Oct 10 18:29:04 2023 +0200 +++ b/tests/test-help-hide.t Wed Oct 11 02:02:46 2023 +0200 @@ -77,6 +77,8 @@ Repository maintenance: + admin::verify + verify the integrity of the repository manifest output the current or given revision of the project manifest recover roll back an interrupted transaction verify verify the integrity of the repository @@ -216,6 +218,8 @@ Repository maintenance: + admin::verify + verify the integrity of the repository manifest output the current or given revision of the project manifest recover roll back an interrupted transaction verify verify the integrity of the repository diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-help.t --- a/tests/test-help.t Tue Oct 10 18:29:04 2023 +0200 +++ b/tests/test-help.t Wed Oct 11 02:02:46 2023 +0200 @@ -129,6 +129,8 @@ Repository maintenance: + admin::verify + verify the integrity of the repository manifest output the current or given revision of the project manifest recover roll back an interrupted transaction verify verify the integrity of the repository @@ -260,6 +262,8 @@ Repository maintenance: + admin::verify + verify the integrity of the repository manifest output the current or given revision of the project manifest recover roll back an interrupted transaction verify verify the integrity of the repository @@ -604,9 +608,16 @@ $ hg help ad list of commands: + Working directory management: + add add the specified files on the next commit addremove add all new files, delete all missing files + Repository maintenance: + + admin::verify + verify the integrity of the repository + (use 'hg help -v ad' to show built-in aliases and global options) Test command without options @@ -626,6 +637,9 @@ Please see https://mercurial-scm.org/wiki/RepositoryCorruption for more information about recovery from corruption of the repository. + For an alternative UI with a lot more control over the verification + process and better error reporting, try 'hg help admin::verify'. + Returns 0 on success, 1 if errors are encountered. options: @@ -2650,6 +2664,13 @@ add all new files, delete all missing files + + admin::verify + + + verify the integrity of the repository + + archive diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-hgweb-json.t --- a/tests/test-hgweb-json.t Tue Oct 10 18:29:04 2023 +0200 +++ b/tests/test-hgweb-json.t Wed Oct 11 02:02:46 2023 +0200 @@ -2112,6 +2112,10 @@ "topic": "addremove" }, { + "summary": "verify the integrity of the repository", + "topic": "admin::verify" + }, + { "summary": "create an unversioned archive of a repository revision", "topic": "archive" }, diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-hgweb.t --- a/tests/test-hgweb.t Tue Oct 10 18:29:04 2023 +0200 +++ b/tests/test-hgweb.t Wed Oct 11 02:02:46 2023 +0200 @@ -876,7 +876,7 @@ > except ValueError: > raise error.Abort(b'signal.signal() called in thread?') > def uisetup(ui): - > extensions.wrapfunction(signal, b'signal', disabledsig) + > extensions.wrapfunction(signal, 'signal', disabledsig) > EOF by default, signal interrupt should be disabled while making a lock file diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-hook.t --- a/tests/test-hook.t Tue Oct 10 18:29:04 2023 +0200 +++ b/tests/test-hook.t Wed Oct 11 02:02:46 2023 +0200 @@ -991,7 +991,7 @@ Traceback (most recent call last): ModuleNotFoundError: No module named 'hgext_syntaxerror' Traceback (most recent call last): - raise error.HookLoadError( (py38 !) + raise error.HookLoadError(msg, hint=tracebackhint) (py37 !) mercurial.error.HookLoadError: preoutgoing.syntaxerror hook is invalid: import of "syntaxerror" failed abort: preoutgoing.syntaxerror hook is invalid: import of "syntaxerror" failed @@ -1156,7 +1156,7 @@ Traceback (most recent call last): ModuleNotFoundError: No module named 'hgext_importfail' Traceback (most recent call last): - raise error.HookLoadError( (py38 !) + raise error.HookLoadError(msg, hint=tracebackhint) (py37 !) mercurial.error.HookLoadError: precommit.importfail hook is invalid: import of "importfail" failed abort: precommit.importfail hook is invalid: import of "importfail" failed diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-infinitepush-bundlestore.t --- a/tests/test-infinitepush-bundlestore.t Tue Oct 10 18:29:04 2023 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,755 +0,0 @@ -#require no-reposimplestore no-chg - -XXX-CHG this test hangs if `hg` is really `chg`. This was hidden by the use of -`alias hg=chg` by run-tests.py. With such alias removed, this test is revealed -buggy. This need to be resolved sooner than later. - - -Testing infinipush extension and the confi options provided by it - -Create an ondisk bundlestore in .hg/scratchbranches - $ . "$TESTDIR/library-infinitepush.sh" - $ cp $HGRCPATH $TESTTMP/defaulthgrc - $ setupcommon - $ mkcommit() { - > echo "$1" > "$1" - > hg add "$1" - > hg ci -m "$1" - > } - $ hg init repo - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - $ cd repo - -Check that we can send a scratch on the server and it does not show there in -the history but is stored on disk - $ setupserver - $ cd .. - $ hg clone ssh://user@dummy/repo client -q - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - $ cd client - $ mkcommit initialcommit - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - $ hg push -r . - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - pushing to ssh://user@dummy/repo - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - searching for changes - remote: adding changesets - remote: adding manifests - remote: adding file changes - remote: added 1 changesets with 1 changes to 1 files - $ mkcommit scratchcommit - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - $ hg push -r . -B scratch/mybranch - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - pushing to ssh://user@dummy/repo - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - searching for changes - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - remote: pushing 1 commit: - remote: 20759b6926ce scratchcommit - $ hg log -G - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - @ changeset: 1:20759b6926ce - | bookmark: scratch/mybranch - | tag: tip - | user: test - | date: Thu Jan 01 00:00:00 1970 +0000 - | summary: scratchcommit - | - o changeset: 0:67145f466344 - user: test - date: Thu Jan 01 00:00:00 1970 +0000 - summary: initialcommit - - $ hg log -G -R ../repo - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - o changeset: 0:67145f466344 - tag: tip - user: test - date: Thu Jan 01 00:00:00 1970 +0000 - summary: initialcommit - - $ find ../repo/.hg/scratchbranches | sort - ../repo/.hg/scratchbranches - ../repo/.hg/scratchbranches/filebundlestore - ../repo/.hg/scratchbranches/filebundlestore/b9 - ../repo/.hg/scratchbranches/filebundlestore/b9/e1 - ../repo/.hg/scratchbranches/filebundlestore/b9/e1/b9e1ee5f93fb6d7c42496fc176c09839639dd9cc - ../repo/.hg/scratchbranches/index - ../repo/.hg/scratchbranches/index/bookmarkmap - ../repo/.hg/scratchbranches/index/bookmarkmap/scratch - ../repo/.hg/scratchbranches/index/bookmarkmap/scratch/mybranch - ../repo/.hg/scratchbranches/index/nodemap - ../repo/.hg/scratchbranches/index/nodemap/20759b6926ce827d5a8c73eb1fa9726d6f7defb2 - -From another client we can get the scratchbranch if we ask for it explicitely - - $ cd .. - $ hg clone ssh://user@dummy/repo client2 -q - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - $ cd client2 - $ hg pull -B scratch/mybranch --traceback - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - pulling from ssh://user@dummy/repo - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - searching for changes - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - adding changesets - adding manifests - adding file changes - added 1 changesets with 1 changes to 1 files - new changesets 20759b6926ce (1 drafts) - (run 'hg update' to get a working copy) - $ hg log -G - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - o changeset: 1:20759b6926ce - | bookmark: scratch/mybranch - | tag: tip - | user: test - | date: Thu Jan 01 00:00:00 1970 +0000 - | summary: scratchcommit - | - @ changeset: 0:67145f466344 - user: test - date: Thu Jan 01 00:00:00 1970 +0000 - summary: initialcommit - - $ cd .. - -Push to non-scratch bookmark - - $ cd client - $ hg up 0 - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ mkcommit newcommit - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - created new head - $ hg push -r . - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - pushing to ssh://user@dummy/repo - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - searching for changes - remote: adding changesets - remote: adding manifests - remote: adding file changes - remote: added 1 changesets with 1 changes to 1 files - $ hg log -G -T '{desc} {phase} {bookmarks}' - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - @ newcommit public - | - | o scratchcommit draft scratch/mybranch - |/ - o initialcommit public - - -Push to scratch branch - $ cd ../client2 - $ hg up -q scratch/mybranch - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - $ mkcommit 'new scratch commit' - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - $ hg push -r . -B scratch/mybranch - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - pushing to ssh://user@dummy/repo - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - searching for changes - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - remote: pushing 2 commits: - remote: 20759b6926ce scratchcommit - remote: 1de1d7d92f89 new scratch commit - $ hg log -G -T '{desc} {phase} {bookmarks}' - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - @ new scratch commit draft scratch/mybranch - | - o scratchcommit draft - | - o initialcommit public - - $ scratchnodes - 1de1d7d92f8965260391d0513fe8a8d5973d3042 bed63daed3beba97fff2e819a148cf415c217a85 - 20759b6926ce827d5a8c73eb1fa9726d6f7defb2 bed63daed3beba97fff2e819a148cf415c217a85 - - $ scratchbookmarks - scratch/mybranch 1de1d7d92f8965260391d0513fe8a8d5973d3042 - -Push scratch bookmark with no new revs - $ hg push -r . -B scratch/anotherbranch - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - pushing to ssh://user@dummy/repo - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - searching for changes - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - remote: pushing 2 commits: - remote: 20759b6926ce scratchcommit - remote: 1de1d7d92f89 new scratch commit - $ hg log -G -T '{desc} {phase} {bookmarks}' - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - @ new scratch commit draft scratch/anotherbranch scratch/mybranch - | - o scratchcommit draft - | - o initialcommit public - - $ scratchbookmarks - scratch/anotherbranch 1de1d7d92f8965260391d0513fe8a8d5973d3042 - scratch/mybranch 1de1d7d92f8965260391d0513fe8a8d5973d3042 - -Pull scratch and non-scratch bookmark at the same time - - $ hg -R ../repo book newbook - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - $ cd ../client - $ hg pull -B newbook -B scratch/mybranch --traceback - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - pulling from ssh://user@dummy/repo - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - searching for changes - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - adding changesets - adding manifests - adding file changes - adding remote bookmark newbook - added 1 changesets with 1 changes to 2 files - new changesets 1de1d7d92f89 (1 drafts) - (run 'hg update' to get a working copy) - $ hg log -G -T '{desc} {phase} {bookmarks}' - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - o new scratch commit draft scratch/mybranch - | - | @ newcommit public - | | - o | scratchcommit draft - |/ - o initialcommit public - - -Push scratch revision without bookmark with --bundle-store - - $ hg up -q tip - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - $ mkcommit scratchcommitnobook - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - $ hg log -G -T '{desc} {phase} {bookmarks}' - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - @ scratchcommitnobook draft - | - o new scratch commit draft scratch/mybranch - | - | o newcommit public - | | - o | scratchcommit draft - |/ - o initialcommit public - - $ hg push -r . --bundle-store - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - pushing to ssh://user@dummy/repo - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - searching for changes - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - remote: pushing 3 commits: - remote: 20759b6926ce scratchcommit - remote: 1de1d7d92f89 new scratch commit - remote: 2b5d271c7e0d scratchcommitnobook - $ hg -R ../repo log -G -T '{desc} {phase}' - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - o newcommit public - | - o initialcommit public - - - $ scratchnodes - 1de1d7d92f8965260391d0513fe8a8d5973d3042 66fa08ff107451320512817bed42b7f467a1bec3 - 20759b6926ce827d5a8c73eb1fa9726d6f7defb2 66fa08ff107451320512817bed42b7f467a1bec3 - 2b5d271c7e0d25d811359a314d413ebcc75c9524 66fa08ff107451320512817bed42b7f467a1bec3 - -Test with pushrebase - $ mkcommit scratchcommitwithpushrebase - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - $ hg push -r . -B scratch/mybranch - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - pushing to ssh://user@dummy/repo - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - searching for changes - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - remote: pushing 4 commits: - remote: 20759b6926ce scratchcommit - remote: 1de1d7d92f89 new scratch commit - remote: 2b5d271c7e0d scratchcommitnobook - remote: d8c4f54ab678 scratchcommitwithpushrebase - $ hg -R ../repo log -G -T '{desc} {phase}' - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - o newcommit public - | - o initialcommit public - - $ scratchnodes - 1de1d7d92f8965260391d0513fe8a8d5973d3042 e3cb2ac50f9e1e6a5ead3217fc21236c84af4397 - 20759b6926ce827d5a8c73eb1fa9726d6f7defb2 e3cb2ac50f9e1e6a5ead3217fc21236c84af4397 - 2b5d271c7e0d25d811359a314d413ebcc75c9524 e3cb2ac50f9e1e6a5ead3217fc21236c84af4397 - d8c4f54ab678fd67cb90bb3f272a2dc6513a59a7 e3cb2ac50f9e1e6a5ead3217fc21236c84af4397 - -Change the order of pushrebase and infinitepush - $ mkcommit scratchcommitwithpushrebase2 - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - $ hg push -r . -B scratch/mybranch - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - pushing to ssh://user@dummy/repo - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - searching for changes - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - remote: pushing 5 commits: - remote: 20759b6926ce scratchcommit - remote: 1de1d7d92f89 new scratch commit - remote: 2b5d271c7e0d scratchcommitnobook - remote: d8c4f54ab678 scratchcommitwithpushrebase - remote: 6c10d49fe927 scratchcommitwithpushrebase2 - $ hg -R ../repo log -G -T '{desc} {phase}' - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - o newcommit public - | - o initialcommit public - - $ scratchnodes - 1de1d7d92f8965260391d0513fe8a8d5973d3042 cd0586065eaf8b483698518f5fc32531e36fd8e0 - 20759b6926ce827d5a8c73eb1fa9726d6f7defb2 cd0586065eaf8b483698518f5fc32531e36fd8e0 - 2b5d271c7e0d25d811359a314d413ebcc75c9524 cd0586065eaf8b483698518f5fc32531e36fd8e0 - 6c10d49fe92751666c40263f96721b918170d3da cd0586065eaf8b483698518f5fc32531e36fd8e0 - d8c4f54ab678fd67cb90bb3f272a2dc6513a59a7 cd0586065eaf8b483698518f5fc32531e36fd8e0 - -Non-fastforward scratch bookmark push - - $ hg log -GT "{rev}:{node} {desc}\n" - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - @ 6:6c10d49fe92751666c40263f96721b918170d3da scratchcommitwithpushrebase2 - | - o 5:d8c4f54ab678fd67cb90bb3f272a2dc6513a59a7 scratchcommitwithpushrebase - | - o 4:2b5d271c7e0d25d811359a314d413ebcc75c9524 scratchcommitnobook - | - o 3:1de1d7d92f8965260391d0513fe8a8d5973d3042 new scratch commit - | - | o 2:91894e11e8255bf41aa5434b7b98e8b2aa2786eb newcommit - | | - o | 1:20759b6926ce827d5a8c73eb1fa9726d6f7defb2 scratchcommit - |/ - o 0:67145f4663446a9580364f70034fea6e21293b6f initialcommit - - $ hg up 6c10d49fe927 - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - 0 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ echo 1 > amend - $ hg add amend - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - $ hg ci --amend -m 'scratch amended commit' - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - saved backup bundle to $TESTTMP/client/.hg/strip-backup/6c10d49fe927-c99ffec5-amend.hg - $ hg log -G -T '{desc} {phase} {bookmarks}' - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - @ scratch amended commit draft scratch/mybranch - | - o scratchcommitwithpushrebase draft - | - o scratchcommitnobook draft - | - o new scratch commit draft - | - | o newcommit public - | | - o | scratchcommit draft - |/ - o initialcommit public - - - $ scratchbookmarks - scratch/anotherbranch 1de1d7d92f8965260391d0513fe8a8d5973d3042 - scratch/mybranch 6c10d49fe92751666c40263f96721b918170d3da - $ hg push -r . -B scratch/mybranch - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - pushing to ssh://user@dummy/repo - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - searching for changes - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - remote: pushing 5 commits: - remote: 20759b6926ce scratchcommit - remote: 1de1d7d92f89 new scratch commit - remote: 2b5d271c7e0d scratchcommitnobook - remote: d8c4f54ab678 scratchcommitwithpushrebase - remote: 8872775dd97a scratch amended commit - $ scratchbookmarks - scratch/anotherbranch 1de1d7d92f8965260391d0513fe8a8d5973d3042 - scratch/mybranch 8872775dd97a750e1533dc1fbbca665644b32547 - $ hg log -G -T '{desc} {phase} {bookmarks}' - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - @ scratch amended commit draft scratch/mybranch - | - o scratchcommitwithpushrebase draft - | - o scratchcommitnobook draft - | - o new scratch commit draft - | - | o newcommit public - | | - o | scratchcommit draft - |/ - o initialcommit public - -Check that push path is not ignored. Add new path to the hgrc - $ cat >> .hg/hgrc << EOF - > [paths] - > peer=ssh://user@dummy/client2 - > EOF - -Checkout last non-scrath commit - $ hg up 91894e11e8255 - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - 1 files updated, 0 files merged, 6 files removed, 0 files unresolved - $ mkcommit peercommit - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. -Use --force because this push creates new head - $ hg push peer -r . -f - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - pushing to ssh://user@dummy/client2 - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - searching for changes - remote: adding changesets - remote: adding manifests - remote: adding file changes - remote: added 2 changesets with 2 changes to 2 files (+1 heads) - $ hg -R ../repo log -G -T '{desc} {phase} {bookmarks}' - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - o newcommit public - | - o initialcommit public - - $ hg -R ../client2 log -G -T '{desc} {phase} {bookmarks}' - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - o peercommit public - | - o newcommit public - | - | @ new scratch commit draft scratch/anotherbranch scratch/mybranch - | | - | o scratchcommit draft - |/ - o initialcommit public - diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-infinitepush-ci.t --- a/tests/test-infinitepush-ci.t Tue Oct 10 18:29:04 2023 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,579 +0,0 @@ -#require no-reposimplestore - -Testing the case when there is no infinitepush extension present on the client -side and the server routes each push to bundlestore. This case is very much -similar to CI use case. - -Setup ------ - - $ . "$TESTDIR/library-infinitepush.sh" - $ cat >> $HGRCPATH < [alias] - > glog = log -GT "{rev}:{node|short} {desc}\n{phase}" - > EOF - $ cp $HGRCPATH $TESTTMP/defaulthgrc - $ hg init repo - $ cd repo - $ setupserver - $ echo "pushtobundlestore = True" >> .hg/hgrc - $ echo "[extensions]" >> .hg/hgrc - $ echo "infinitepush=" >> .hg/hgrc - $ echo "[infinitepush]" >> .hg/hgrc - $ echo "deprecation-abort=no" >> .hg/hgrc - $ echo initialcommit > initialcommit - $ hg ci -Aqm "initialcommit" - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact (chg !) - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be (chg !) - unused and barring learning of users of this functionality, we drop this (chg !) - extension in Mercurial 6.6. (chg !) - $ hg phase --public . - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - - $ cd .. - $ hg clone repo client -q - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - $ hg clone repo client2 -q - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - $ hg clone ssh://user@dummy/repo client3 -q - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - $ cd client - -Pushing a new commit from the client to the server ------------------------------------------------------ - - $ echo foobar > a - $ hg ci -Aqm "added a" - $ hg glog - @ 1:6cb0989601f1 added a - | draft - o 0:67145f466344 initialcommit - public - - $ hg push - pushing to $TESTTMP/repo - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - searching for changes - storing changesets on the bundlestore - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - pushing 1 commit: - 6cb0989601f1 added a - - $ scratchnodes - 6cb0989601f1fb5805238edfb16f3606713d9a0b 3b414252ff8acab801318445d88ff48faf4a28c3 - -Understanding how data is stored on the bundlestore in server -------------------------------------------------------------- - -There are two things, filebundlestore and index - $ ls ../repo/.hg/scratchbranches - filebundlestore - index - -filebundlestore stores the bundles - $ ls ../repo/.hg/scratchbranches/filebundlestore/3b/41/ - 3b414252ff8acab801318445d88ff48faf4a28c3 - -index/nodemap stores a map of node id and file in which bundle is stored in filebundlestore - $ ls ../repo/.hg/scratchbranches/index/ - nodemap - $ ls ../repo/.hg/scratchbranches/index/nodemap/ - 6cb0989601f1fb5805238edfb16f3606713d9a0b - - $ cd ../repo - -Checking that the commit was not applied to revlog on the server ------------------------------------------------------------------- - - $ hg glog - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - @ 0:67145f466344 initialcommit - public - -Applying the changeset from the bundlestore --------------------------------------------- - - $ hg unbundle .hg/scratchbranches/filebundlestore/3b/41/3b414252ff8acab801318445d88ff48faf4a28c3 - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - adding changesets - adding manifests - adding file changes - added 1 changesets with 1 changes to 1 files - new changesets 6cb0989601f1 - (run 'hg update' to get a working copy) - - $ hg glog - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - o 1:6cb0989601f1 added a - | public - @ 0:67145f466344 initialcommit - public - -Pushing more changesets from the local repo --------------------------------------------- - - $ cd ../client - $ echo b > b - $ hg ci -Aqm "added b" - $ echo c > c - $ hg ci -Aqm "added c" - $ hg glog - @ 3:bf8a6e3011b3 added c - | draft - o 2:eaba929e866c added b - | draft - o 1:6cb0989601f1 added a - | public - o 0:67145f466344 initialcommit - public - - $ hg push - pushing to $TESTTMP/repo - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - searching for changes - storing changesets on the bundlestore - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - pushing 2 commits: - eaba929e866c added b - bf8a6e3011b3 added c - -Checking that changesets are not applied on the server ------------------------------------------------------- - - $ hg glog -R ../repo - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - o 1:6cb0989601f1 added a - | public - @ 0:67145f466344 initialcommit - public - -Both of the new changesets are stored in a single bundle-file - $ scratchnodes - 6cb0989601f1fb5805238edfb16f3606713d9a0b 3b414252ff8acab801318445d88ff48faf4a28c3 - bf8a6e3011b345146bbbedbcb1ebd4837571492a 239585f5e61f0c09ce7106bdc1097bff731738f4 - eaba929e866c59bc9a6aada5a9dd2f6990db83c0 239585f5e61f0c09ce7106bdc1097bff731738f4 - -Pushing more changesets to the server -------------------------------------- - - $ echo d > d - $ hg ci -Aqm "added d" - $ echo e > e - $ hg ci -Aqm "added e" - -XXX: we should have pushed only the parts which are not in bundlestore - $ hg push - pushing to $TESTTMP/repo - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - searching for changes - storing changesets on the bundlestore - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - pushing 4 commits: - eaba929e866c added b - bf8a6e3011b3 added c - 1bb96358eda2 added d - b4e4bce66051 added e - -Sneak peek into the bundlestore at the server - $ scratchnodes - 1bb96358eda285b536c6d1c66846a7cdb2336cea 98fbae0016662521b0007da1b7bc349cd3caacd1 - 6cb0989601f1fb5805238edfb16f3606713d9a0b 3b414252ff8acab801318445d88ff48faf4a28c3 - b4e4bce660512ad3e71189e14588a70ac8e31fef 98fbae0016662521b0007da1b7bc349cd3caacd1 - bf8a6e3011b345146bbbedbcb1ebd4837571492a 98fbae0016662521b0007da1b7bc349cd3caacd1 - eaba929e866c59bc9a6aada5a9dd2f6990db83c0 98fbae0016662521b0007da1b7bc349cd3caacd1 - -Checking if `hg pull` pulls something or `hg incoming` shows something ------------------------------------------------------------------------ - - $ hg incoming - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - comparing with $TESTTMP/repo - searching for changes - no changes found - [1] - - $ hg pull - pulling from $TESTTMP/repo - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - searching for changes - no changes found - -Pulling from second client which is a localpeer to test `hg pull -r ` --------------------------------------------------------------------------- - -Pulling the revision which is applied - - $ cd ../client2 - $ hg pull -r 6cb0989601f1 - pulling from $TESTTMP/repo - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - searching for changes - adding changesets - adding manifests - adding file changes - added 1 changesets with 1 changes to 1 files - new changesets 6cb0989601f1 - (run 'hg update' to get a working copy) - $ hg glog - o 1:6cb0989601f1 added a - | public - @ 0:67145f466344 initialcommit - public - -Pulling the revision which is in bundlestore -XXX: we should support pulling revisions from a local peers bundlestore without -client side wrapping - - $ hg pull -r b4e4bce660512ad3e71189e14588a70ac8e31fef - pulling from $TESTTMP/repo - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - abort: unknown revision 'b4e4bce660512ad3e71189e14588a70ac8e31fef' - [10] - $ hg glog - o 1:6cb0989601f1 added a - | public - @ 0:67145f466344 initialcommit - public - - $ cd ../client - -Pulling from third client which is not a localpeer ---------------------------------------------------- - -Pulling the revision which is applied - - $ cd ../client3 - $ hg pull -r 6cb0989601f1 - pulling from ssh://user@dummy/repo - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - searching for changes - adding changesets - adding manifests - adding file changes - added 1 changesets with 1 changes to 1 files - new changesets 6cb0989601f1 - (run 'hg update' to get a working copy) - $ hg glog - o 1:6cb0989601f1 added a - | public - @ 0:67145f466344 initialcommit - public - -Pulling the revision which is in bundlestore - -Trying to specify short hash -XXX: we should support this - $ hg pull -r b4e4bce660512 - pulling from ssh://user@dummy/repo - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - abort: unknown revision 'b4e4bce660512' - [255] - -XXX: we should show better message when the pull is happening from bundlestore - $ hg pull -r b4e4bce660512ad3e71189e14588a70ac8e31fef - pulling from ssh://user@dummy/repo - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - searching for changes - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - adding changesets - adding manifests - adding file changes - added 4 changesets with 4 changes to 4 files - new changesets eaba929e866c:b4e4bce66051 - (run 'hg update' to get a working copy) - $ hg glog - o 5:b4e4bce66051 added e - | public - o 4:1bb96358eda2 added d - | public - o 3:bf8a6e3011b3 added c - | public - o 2:eaba929e866c added b - | public - o 1:6cb0989601f1 added a - | public - @ 0:67145f466344 initialcommit - public - - $ cd ../client - -Checking storage of phase information with the bundle on bundlestore ---------------------------------------------------------------------- - -creating a draft commit - $ cat >> $HGRCPATH < [phases] - > publish = False - > EOF - $ echo f > f - $ hg ci -Aqm "added f" - $ hg glog -r '.^::' - @ 6:9b42578d4447 added f - | draft - o 5:b4e4bce66051 added e - | public - ~ - - $ hg push - pushing to $TESTTMP/repo - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - searching for changes - storing changesets on the bundlestore - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - pushing 5 commits: - eaba929e866c added b - bf8a6e3011b3 added c - 1bb96358eda2 added d - b4e4bce66051 added e - 9b42578d4447 added f - -XXX: the phase of 9b42578d4447 should not be changed here - $ hg glog -r . - @ 6:9b42578d4447 added f - | public - ~ - -applying the bundle on the server to check preservation of phase-information - - $ cd ../repo - $ scratchnodes - 1bb96358eda285b536c6d1c66846a7cdb2336cea 280a46a259a268f0e740c81c5a7751bdbfaec85f - 6cb0989601f1fb5805238edfb16f3606713d9a0b 3b414252ff8acab801318445d88ff48faf4a28c3 - 9b42578d44473575994109161430d65dd147d16d 280a46a259a268f0e740c81c5a7751bdbfaec85f - b4e4bce660512ad3e71189e14588a70ac8e31fef 280a46a259a268f0e740c81c5a7751bdbfaec85f - bf8a6e3011b345146bbbedbcb1ebd4837571492a 280a46a259a268f0e740c81c5a7751bdbfaec85f - eaba929e866c59bc9a6aada5a9dd2f6990db83c0 280a46a259a268f0e740c81c5a7751bdbfaec85f - - $ hg unbundle .hg/scratchbranches/filebundlestore/28/0a/280a46a259a268f0e740c81c5a7751bdbfaec85f - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - adding changesets - adding manifests - adding file changes - added 5 changesets with 5 changes to 5 files - new changesets eaba929e866c:9b42578d4447 (1 drafts) - (run 'hg update' to get a working copy) - - $ hg glog - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - o 6:9b42578d4447 added f - | draft - o 5:b4e4bce66051 added e - | public - o 4:1bb96358eda2 added d - | public - o 3:bf8a6e3011b3 added c - | public - o 2:eaba929e866c added b - | public - o 1:6cb0989601f1 added a - | public - @ 0:67145f466344 initialcommit - public - -Checking storage of obsmarkers in the bundlestore --------------------------------------------------- - -enabling obsmarkers and rebase extension - - $ cat >> $HGRCPATH << EOF - > [experimental] - > evolution = all - > [extensions] - > rebase = - > EOF - - $ cd ../client - - $ hg phase -r . --draft --force - $ hg rebase -r 6 -d 3 - rebasing 6:9b42578d4447 tip "added f" - - $ hg glog - @ 7:99949238d9ac added f - | draft - | o 5:b4e4bce66051 added e - | | public - | o 4:1bb96358eda2 added d - |/ public - o 3:bf8a6e3011b3 added c - | public - o 2:eaba929e866c added b - | public - o 1:6cb0989601f1 added a - | public - o 0:67145f466344 initialcommit - public - - $ hg push -f - pushing to $TESTTMP/repo - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - searching for changes - storing changesets on the bundlestore - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - pushing 1 commit: - 99949238d9ac added f - -XXX: the phase should not have changed here - $ hg glog -r . - @ 7:99949238d9ac added f - | public - ~ - -Unbundling on server to see obsmarkers being applied - - $ cd ../repo - - $ scratchnodes - 1bb96358eda285b536c6d1c66846a7cdb2336cea 280a46a259a268f0e740c81c5a7751bdbfaec85f - 6cb0989601f1fb5805238edfb16f3606713d9a0b 3b414252ff8acab801318445d88ff48faf4a28c3 - 99949238d9ac7f2424a33a46dface6f866afd059 090a24fe63f31d3b4bee714447f835c8c362ff57 - 9b42578d44473575994109161430d65dd147d16d 280a46a259a268f0e740c81c5a7751bdbfaec85f - b4e4bce660512ad3e71189e14588a70ac8e31fef 280a46a259a268f0e740c81c5a7751bdbfaec85f - bf8a6e3011b345146bbbedbcb1ebd4837571492a 280a46a259a268f0e740c81c5a7751bdbfaec85f - eaba929e866c59bc9a6aada5a9dd2f6990db83c0 280a46a259a268f0e740c81c5a7751bdbfaec85f - - $ hg glog - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact (chg !) - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be (chg !) - unused and barring learning of users of this functionality, we drop this (chg !) - extension in Mercurial 6.6. (chg !) - o 6:9b42578d4447 added f - | draft - o 5:b4e4bce66051 added e - | public - o 4:1bb96358eda2 added d - | public - o 3:bf8a6e3011b3 added c - | public - o 2:eaba929e866c added b - | public - o 1:6cb0989601f1 added a - | public - @ 0:67145f466344 initialcommit - public - - $ hg unbundle .hg/scratchbranches/filebundlestore/09/0a/090a24fe63f31d3b4bee714447f835c8c362ff57 - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - adding changesets - adding manifests - adding file changes - added 1 changesets with 0 changes to 1 files (+1 heads) - 1 new obsolescence markers - obsoleted 1 changesets - new changesets 99949238d9ac (1 drafts) - (run 'hg heads' to see heads, 'hg merge' to merge) - - $ hg glog - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - o 7:99949238d9ac added f - | draft - | o 5:b4e4bce66051 added e - | | public - | o 4:1bb96358eda2 added d - |/ public - o 3:bf8a6e3011b3 added c - | public - o 2:eaba929e866c added b - | public - o 1:6cb0989601f1 added a - | public - @ 0:67145f466344 initialcommit - public diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-infinitepush.t --- a/tests/test-infinitepush.t Tue Oct 10 18:29:04 2023 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,472 +0,0 @@ -#require no-reposimplestore no-chg - -XXX-CHG this test hangs if `hg` is really `chg`. This was hidden by the use of -`alias hg=chg` by run-tests.py. With such alias removed, this test is revealed -buggy. This need to be resolved sooner than later. - - -Testing infinipush extension and the confi options provided by it - -Setup - - $ . "$TESTDIR/library-infinitepush.sh" - $ cp $HGRCPATH $TESTTMP/defaulthgrc - $ setupcommon - $ hg init repo - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - $ cd repo - $ setupserver - $ echo initialcommit > initialcommit - $ hg ci -Aqm "initialcommit" - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - $ hg phase --public . - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - - $ cd .. - $ hg clone ssh://user@dummy/repo client -q - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - -Create two heads. Push first head alone, then two heads together. Make sure that -multihead push works. - $ cd client - $ echo multihead1 > multihead1 - $ hg add multihead1 - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - $ hg ci -m "multihead1" - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - $ hg up null - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - 0 files updated, 0 files merged, 2 files removed, 0 files unresolved - $ echo multihead2 > multihead2 - $ hg ci -Am "multihead2" - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - adding multihead2 - created new head - $ hg push -r . --bundle-store - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - pushing to ssh://user@dummy/repo - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - searching for changes - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - remote: pushing 1 commit: - remote: ee4802bf6864 multihead2 - $ hg push -r '1:2' --bundle-store - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - pushing to ssh://user@dummy/repo - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - searching for changes - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - remote: pushing 2 commits: - remote: bc22f9a30a82 multihead1 - remote: ee4802bf6864 multihead2 - $ scratchnodes - bc22f9a30a821118244deacbd732e394ed0b686c de1b7d132ba98f0172cd974e3e69dfa80faa335c - ee4802bf6864326a6b3dcfff5a03abc2a0a69b8f de1b7d132ba98f0172cd974e3e69dfa80faa335c - -Create two new scratch bookmarks - $ hg up 0 - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo scratchfirstpart > scratchfirstpart - $ hg ci -Am "scratchfirstpart" - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - adding scratchfirstpart - created new head - $ hg push -r . -B scratch/firstpart - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - pushing to ssh://user@dummy/repo - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - searching for changes - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - remote: pushing 1 commit: - remote: 176993b87e39 scratchfirstpart - $ hg up 0 - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo scratchsecondpart > scratchsecondpart - $ hg ci -Am "scratchsecondpart" - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - adding scratchsecondpart - created new head - $ hg push -r . -B scratch/secondpart - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - pushing to ssh://user@dummy/repo - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - searching for changes - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - remote: pushing 1 commit: - remote: 8db3891c220e scratchsecondpart - -Pull two bookmarks from the second client - $ cd .. - $ hg clone ssh://user@dummy/repo client2 -q - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - $ cd client2 - $ hg pull -B scratch/firstpart -B scratch/secondpart - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - pulling from ssh://user@dummy/repo - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - searching for changes - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - adding changesets - adding manifests - adding file changes - adding changesets - adding manifests - adding file changes - added 2 changesets with 2 changes to 2 files (+1 heads) - new changesets * (glob) - (run 'hg heads' to see heads, 'hg merge' to merge) - $ hg log -r scratch/secondpart -T '{node}' - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - 8db3891c220e216f6da214e8254bd4371f55efca (no-eol) - $ hg log -r scratch/firstpart -T '{node}' - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - 176993b87e39bd88d66a2cccadabe33f0b346339 (no-eol) -Make two commits to the scratch branch - - $ echo testpullbycommithash1 > testpullbycommithash1 - $ hg ci -Am "testpullbycommithash1" - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - adding testpullbycommithash1 - created new head - $ hg log -r '.' -T '{node}\n' > ../testpullbycommithash1 - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - $ echo testpullbycommithash2 > testpullbycommithash2 - $ hg ci -Aqm "testpullbycommithash2" - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - $ hg push -r . -B scratch/mybranch -q - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - -Create third client and pull by commit hash. -Make sure testpullbycommithash2 has not fetched - $ cd .. - $ hg clone ssh://user@dummy/repo client3 -q - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - $ cd client3 - $ hg pull -r `cat ../testpullbycommithash1` - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - pulling from ssh://user@dummy/repo - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - searching for changes - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - adding changesets - adding manifests - adding file changes - added 1 changesets with 1 changes to 1 files - new changesets 33910bfe6ffe (1 drafts) - (run 'hg update' to get a working copy) - $ hg log -G -T '{desc} {phase} {bookmarks}' - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - o testpullbycommithash1 draft - | - @ initialcommit public - -Make public commit in the repo and pull it. -Make sure phase on the client is public. - $ cd ../repo - $ echo publiccommit > publiccommit - $ hg ci -Aqm "publiccommit" - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - $ hg phase --public . - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - $ cd ../client3 - $ hg pull - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - pulling from ssh://user@dummy/repo - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - searching for changes - adding changesets - adding manifests - adding file changes - added 1 changesets with 1 changes to 1 files (+1 heads) - new changesets a79b6597f322 - (run 'hg heads' to see heads, 'hg merge' to merge) - $ hg log -G -T '{desc} {phase} {bookmarks} {node|short}' - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - o publiccommit public a79b6597f322 - | - | o testpullbycommithash1 draft 33910bfe6ffe - |/ - @ initialcommit public 67145f466344 - - $ hg up a79b6597f322 - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - 1 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ echo scratchontopofpublic > scratchontopofpublic - $ hg ci -Aqm "scratchontopofpublic" - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - $ hg push -r . -B scratch/scratchontopofpublic - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - pushing to ssh://user@dummy/repo - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - searching for changes - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - remote: pushing 1 commit: - remote: c70aee6da07d scratchontopofpublic - $ cd ../client2 - $ hg pull -B scratch/scratchontopofpublic - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - pulling from ssh://user@dummy/repo - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - searching for changes - remote: IMPORTANT: if you use this extension, please contact - remote: mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - remote: unused and barring learning of users of this functionality, we drop this - remote: extension in Mercurial 6.6. - adding changesets - adding manifests - adding file changes - adding changesets - adding manifests - adding file changes - added 2 changesets with 2 changes to 2 files (+1 heads) - new changesets a79b6597f322:c70aee6da07d (1 drafts) - (run 'hg heads .' to see heads, 'hg merge' to merge) - $ hg log -r scratch/scratchontopofpublic -T '{phase}' - IMPORTANT: if you use this extension, please contact - mercurial-devel@mercurial-scm.org IMMEDIATELY. This extension is believed to be - unused and barring learning of users of this functionality, we drop this - extension in Mercurial 6.6. - draft (no-eol) diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-journal.t --- a/tests/test-journal.t Tue Oct 10 18:29:04 2023 +0200 +++ b/tests/test-journal.t Wed Oct 11 02:02:46 2023 +0200 @@ -84,6 +84,17 @@ cb9a9f314b8b book -f bar 1e6c11564562 book -r tip bar +Test that we tracks bookmark deletion + + $ hg book -r . babar + $ hg book -f -r .~1 babar + $ hg book -d babar + $ hg journal babar + previous locations of 'babar': + 000000000000 book -d babar + cb9a9f314b8b book -f -r '.~1' babar + 1e6c11564562 book -r . babar + Test that bookmarks and working copy tracking is not mixed $ hg journal @@ -99,6 +110,9 @@ $ hg journal --all previous locations of the working copy and bookmarks: 1e6c11564562 baz book -r tip baz + 000000000000 babar book -d babar + cb9a9f314b8b babar book -f -r '.~1' babar + 1e6c11564562 babar book -r . babar 1e6c11564562 bar up 1e6c11564562 . up cb9a9f314b8b bar book -f bar @@ -127,6 +141,9 @@ $ hg journal "re:ba." previous locations of 're:ba.': 1e6c11564562 baz book -r tip baz + 000000000000 babar book -d babar + cb9a9f314b8b babar book -f -r '.~1' babar + 1e6c11564562 babar book -r . babar 1e6c11564562 bar up cb9a9f314b8b bar book -f bar 1e6c11564562 bar book -r tip bar @@ -136,6 +153,9 @@ $ hg journal --verbose --all previous locations of the working copy and bookmarks: 000000000000 -> 1e6c11564562 foobar baz 1970-01-01 00:00 +0000 book -r tip baz + cb9a9f314b8b -> 000000000000 foobar babar 1970-01-01 00:00 +0000 book -d babar + 1e6c11564562 -> cb9a9f314b8b foobar babar 1970-01-01 00:00 +0000 book -f -r '.~1' babar + 000000000000 -> 1e6c11564562 foobar babar 1970-01-01 00:00 +0000 book -r . babar cb9a9f314b8b -> 1e6c11564562 foobar bar 1970-01-01 00:00 +0000 up cb9a9f314b8b -> 1e6c11564562 foobar . 1970-01-01 00:00 +0000 up 1e6c11564562 -> cb9a9f314b8b foobar bar 1970-01-01 00:00 +0000 book -f bar diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-lfs.t --- a/tests/test-lfs.t Tue Oct 10 18:29:04 2023 +0200 +++ b/tests/test-lfs.t Wed Oct 11 02:02:46 2023 +0200 @@ -829,7 +829,7 @@ > eh = exthelper.exthelper() > uisetup = eh.finaluisetup > - > @eh.wrapfunction(wrapper, b'filelogrenamed') + > @eh.wrapfunction(wrapper, 'filelogrenamed') > def filelogrenamed(orig, orig1, self, node): > ret = orig(orig1, self, node) > if wrapper._islfs(self._revlog, node) and ret: diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-narrow-expanddirstate.t --- a/tests/test-narrow-expanddirstate.t Tue Oct 10 18:29:04 2023 +0200 +++ b/tests/test-narrow-expanddirstate.t Wed Oct 11 02:02:46 2023 +0200 @@ -99,7 +99,7 @@ > expandnarrowspec(ui, repo, encoding.environ.get(b'PATCHINCLUDES')) > return orig(ui, repo, *args, **kwargs) > - > extensions.wrapfunction(patch, b'patch', overridepatch) + > extensions.wrapfunction(patch, 'patch', overridepatch) > EOF $ cat >> ".hg/hgrc" < [extensions] diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-push-race.t --- a/tests/test-push-race.t Tue Oct 10 18:29:04 2023 +0200 +++ b/tests/test-push-race.t Wed Oct 11 02:02:46 2023 +0200 @@ -76,7 +76,7 @@ > return orig(pushop) > > def uisetup(ui): - > extensions.wrapfunction(exchange, b'_pushbundle2', delaypush) + > extensions.wrapfunction(exchange, '_pushbundle2', delaypush) > EOF $ waiton () { diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-remotefilelog-bundle2-legacy.t --- a/tests/test-remotefilelog-bundle2-legacy.t Tue Oct 10 18:29:04 2023 +0200 +++ b/tests/test-remotefilelog-bundle2-legacy.t Wed Oct 11 02:02:46 2023 +0200 @@ -11,7 +11,7 @@ > command = registrar.command(cmdtable) > @command('testcg2', norepo=True) > def testcg2(ui): - > if not util.safehasattr(changegroup, 'cg2packer'): + > if not hasattr(changegroup, 'cg2packer'): > sys.exit(80) > EOF $ cat >> $HGRCPATH << EOF diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-rust-ancestor.py --- a/tests/test-rust-ancestor.py Tue Oct 10 18:29:04 2023 +0200 +++ b/tests/test-rust-ancestor.py Wed Oct 11 02:02:46 2023 +0200 @@ -2,7 +2,6 @@ import unittest from mercurial.node import wdirrev -from mercurial import error from mercurial.testing import revlog as revlogtesting @@ -144,11 +143,15 @@ def testwdirunsupported(self): # trying to access ancestors of the working directory raises - # WdirUnsupported directly idx = self.parseindex() - with self.assertRaises(error.WdirUnsupported): + with self.assertRaises(rustext.GraphError) as arc: list(AncestorsIterator(idx, [wdirrev], -1, False)) + exc = arc.exception + self.assertIsInstance(exc, ValueError) + # rust-cpython issues appropriate str instances for Python 2 and 3 + self.assertEqual(exc.args, ('InvalidRevision', wdirrev)) + def testheadrevs(self): idx = self.parseindex() self.assertEqual(dagop.headrevs(idx, [1, 2, 3]), {3}) diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-ssh-bundle1.t --- a/tests/test-ssh-bundle1.t Tue Oct 10 18:29:04 2023 +0200 +++ b/tests/test-ssh-bundle1.t Wed Oct 11 02:02:46 2023 +0200 @@ -427,7 +427,7 @@ > return res > > def extsetup(ui): - > extensions.wrapfunction(exchange, b'push', wrappedpush) + > extensions.wrapfunction(exchange, 'push', wrappedpush) > EOF $ cat >> .hg/hgrc << EOF diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-ssh.t --- a/tests/test-ssh.t Tue Oct 10 18:29:04 2023 +0200 +++ b/tests/test-ssh.t Wed Oct 11 02:02:46 2023 +0200 @@ -479,7 +479,7 @@ > return res > > def extsetup(ui): - > extensions.wrapfunction(exchange, b'push', wrappedpush) + > extensions.wrapfunction(exchange, 'push', wrappedpush) > EOF $ cat >> .hg/hgrc << EOF diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-strip.t --- a/tests/test-strip.t Tue Oct 10 18:29:04 2023 +0200 +++ b/tests/test-strip.t Wed Oct 11 02:02:46 2023 +0200 @@ -970,7 +970,7 @@ > transaction.addpostclose(b"phase invalidation test", test) > return transaction > def extsetup(ui): - > extensions.wrapfunction(localrepo.localrepository, b"transaction", + > extensions.wrapfunction(localrepo.localrepository, "transaction", > transactioncallback) > EOF $ hg up -C 2 diff -r 704c3d0878d9 -r 12c308c55e53 tests/test-transaction-rollback-on-revlog-split.t --- a/tests/test-transaction-rollback-on-revlog-split.t Tue Oct 10 18:29:04 2023 +0200 +++ b/tests/test-transaction-rollback-on-revlog-split.t Wed Oct 11 02:02:46 2023 +0200 @@ -400,7 +400,6 @@ The split was rollback $ f -s .hg/store/data*/file* - .hg/store/data/file.d: size=0 .hg/store/data/file.i: size=1174 $ hg tip