Mercurial > evolve
changeset 6775:6adcc5c7c1f1
branching: merge with stable
author | Anton Shestakov <av6@dwimlabs.net> |
---|---|
date | Sun, 14 Apr 2024 00:41:49 -0300 |
parents | c6ff8ae8a752 (diff) 564e3d3d9799 (current diff) |
children | 8275a04db4ae |
files | .gitlab-ci.yml CHANGELOG hgext3rd/evolve/metadata.py hgext3rd/topic/__init__.py hgext3rd/topic/flow.py tests/test-namespaces.t |
diffstat | 39 files changed, 344 insertions(+), 1237 deletions(-) [+] |
line wrap: on
line diff
--- a/.gitlab-ci.yml Sun Apr 14 00:26:33 2024 -0300 +++ b/.gitlab-ci.yml Sun Apr 14 00:41:49 2024 -0300 @@ -7,12 +7,16 @@ - echo testing with mercurial branch="$hg_branch", revision="$hg_rev" pytype: - image: registry.heptapod.net/mercurial/ci-images/py3-hgext3rd + image: registry.heptapod.net/mercurial/ci-images/py3-hgext3rd:v2.0 script: - *prepare_hg + - hg clone /ci/repos/mercurial/ /tmp/mercurial-ci/ --noupdate --config phases.publish=no + - hg -R /tmp/mercurial-ci/ update "$hg_rev" + - make -C /tmp/mercurial-ci/ local PYTHON=$PYTHON + - $PYTHON -m pip install --user --break-system-packages --upgrade pytype==2024.1.5 + - (cd /tmp/mercurial-ci/ && ./contrib/setup-pytype.sh) - pytype --version - - jobs=$(python3 -c 'import multiprocessing; print(multiprocessing.cpu_count())') - - pytype -P /ci/repos/mercurial/:hgext3rd -k hgext3rd -x hgext3rd/evolve/thirdparty -j $jobs || true + - pytype --keep-going --jobs auto -P /tmp/mercurial-ci/:hgext3rd hgext3rd when: manual variables: @@ -34,6 +38,7 @@ <<: *runtests variables: RUNTEST_ARGS: "--test-list /tmp/check-tests.txt" + CI_CLEVER_CLOUD_FLAVOR: S tests-py3-cext: <<: *runtests @@ -58,12 +63,13 @@ allow_failure: true doc: - image: registry.heptapod.net/mercurial/ci-images/py3-evolve-doc + image: registry.heptapod.net/mercurial/ci-images/py3-evolve-doc:v2.0 script: - make doc variables: LANG: en_US.UTF-8 PYTHONPATH: "/ci/repos/mercurial:$PYTHONPATH" + CI_CLEVER_CLOUD_FLAVOR: S artifacts: paths: - html/*
--- a/CHANGELOG Sun Apr 14 00:26:33 2024 -0300 +++ b/CHANGELOG Sun Apr 14 00:41:49 2024 -0300 @@ -1,6 +1,12 @@ Changelog ========= +11.2.0 - in progress +-------------------- + + * evolve: remove legacy.py that could be used to "detect and convert + prehistoric format of obsolete markers" (older than Mercurial 2.3) + 11.1.3 -- 2024-04-12 --------------------
--- a/MANIFEST.in Sun Apr 14 00:26:33 2024 -0300 +++ b/MANIFEST.in Sun Apr 14 00:41:49 2024 -0300 @@ -19,11 +19,9 @@ exclude docs/tutorial/.netlify exclude .gitlab-ci.yml -exclude hgext3rd/evolve/legacy.py exclude .hg-format-source exclude Makefile exclude tests/test-drop.t -exclude tests/test-oldconvert.t prune contrib prune debian
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/docker/pytype/Dockerfile Sun Apr 14 00:41:49 2024 -0300 @@ -0,0 +1,14 @@ +FROM registry.heptapod.net/mercurial/ci-images/py3-hgext3rd:v2.0 + +USER ci-runner + +ENV PATH=/home/ci-runner/.local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin +ENV PYTHONPATH=/home/ci-runner/.local/lib/python3.11/site-packages + +RUN python3 -m pip install --user --break-system-packages --upgrade pytype==2024.1.5 + +ADD --chown=ci-runner entrypoint.sh /home/ci-runner/entrypoint.sh + +RUN chmod -R a=rwX /home/ci-runner/.local/ /home/ci-runner/entrypoint.sh + +CMD /home/ci-runner/entrypoint.sh
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/docker/pytype/README.rst Sun Apr 14 00:41:49 2024 -0300 @@ -0,0 +1,10 @@ +Pytype in Docker +================ + +To build the container:: + + $ docker build --tag evolve-pytype-checker "$(hg root)/contrib/docker/pytype/" + +To check the code with pytype:: + + $ docker run --rm -it -u "$(id -u):$(id -g)" -v "$(hg root):/tmp/evolve-ci" -v "/path/to/mercurial-repo:/tmp/mercurial-ci" evolve-pytype-checker
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/docker/pytype/entrypoint.sh Sun Apr 14 00:41:49 2024 -0300 @@ -0,0 +1,11 @@ +#!/usr/bin/env bash + +set -euo pipefail + +cd /tmp/mercurial-ci/ +make local +./contrib/setup-pytype.sh + +cd /tmp/evolve-ci/ +pytype --version +pytype --keep-going --jobs auto -P /tmp/mercurial-ci/:hgext3rd hgext3rd
--- a/debian/control Sun Apr 14 00:26:33 2024 -0300 +++ b/debian/control Sun Apr 14 00:41:49 2024 -0300 @@ -7,7 +7,7 @@ Pierre-Yves David <pierre-yves.david@logilab.fr>, Standards-Version: 4.3.0 Build-Depends: - mercurial (>= 4.9), + mercurial (>= 5.6), python3, debhelper (>= 10), dh-python, @@ -26,7 +26,7 @@ ${python3:Depends}, ${misc:Depends}, ${sphinxdoc:Depends}, - mercurial (>= 4.9), + mercurial (>= 5.6), Built-Using: ${sphinxdoc:Built-Using} Description: evolve extension for Mercurial This package provides the experimental "evolve" extension for the Mercurial
--- a/debian/source/options Sun Apr 14 00:26:33 2024 -0300 +++ b/debian/source/options Sun Apr 14 00:41:49 2024 -0300 @@ -1,1 +1,1 @@ -extend-diff-ignore = "^(contrib/|\.gitlab/|\.gitlab-ci.yml|\.hg-format-source|MANIFEST|Makefile|docs/tutorial/\.netlify|hgext3rd/evolve/hack/|tests/\.testtimes|tests/test-drop\.t|tests/test-oldconvert\.t)" +extend-diff-ignore = "^(contrib/|\.gitlab/|\.gitlab-ci.yml|\.hg-format-source|MANIFEST|Makefile|docs/tutorial/\.netlify|hgext3rd/evolve/hack/|tests/\.testtimes|tests/test-drop\.t)"
--- a/docs/conf.py Sun Apr 14 00:26:33 2024 -0300 +++ b/docs/conf.py Sun Apr 14 00:41:49 2024 -0300 @@ -98,7 +98,7 @@ # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -html_favicon = 'logo-evolve.ico' +html_favicon = 'static/logo-evolve.ico' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files,
--- a/hgext3rd/evolve/__init__.py Sun Apr 14 00:26:33 2024 -0300 +++ b/hgext3rd/evolve/__init__.py Sun Apr 14 00:41:49 2024 -0300 @@ -30,7 +30,7 @@ backported to older version of Mercurial by this extension. Some older experimental protocols are also supported for a longer time in the extension to help people transitioning. (The extension is currently compatible down to -Mercurial version 4.9). +Mercurial version 5.6). New Config:: @@ -296,6 +296,7 @@ obsolete, pycompat, registrar, + state as statemod, util, ) @@ -317,7 +318,6 @@ obshistory, revset, rewind, - safeguard, templatekw, utility, ) @@ -358,7 +358,6 @@ eh.merge(evolvecmd.eh) eh.merge(obsexchange.eh) eh.merge(obshashtree.eh) -eh.merge(safeguard.eh) eh.merge(obscache.eh) eh.merge(obshistory.eh) eh.merge(templatekw.eh) @@ -1145,65 +1144,26 @@ evolvestateversion = 0 -def _evolvemessage(): +@eh.uisetup +def setupevolveunfinished(ui): _msg = _(b'To continue: hg evolve --continue\n' b'To abort: hg evolve --abort\n' b'To stop: hg evolve --stop\n' b'(also see `hg help evolve.interrupted`)') - return cmdutil._commentlines(_msg) - -def _fixupmessage(): - _msg = _(b'To continue: hg fixup --continue\n' - b'To abort: hg fixup --abort\n') - return cmdutil._commentlines(_msg) - -@eh.uisetup -def setupevolveunfinished(ui): - if not util.safehasattr(cmdutil, 'unfinishedstates'): - from mercurial import state as statemod - _msg = _(b'To continue: hg evolve --continue\n' - b'To abort: hg evolve --abort\n' - b'To stop: hg evolve --stop\n' - b'(also see `hg help evolve.interrupted`)') - statemod.addunfinished(b'evolve', fname=b'evolvestate', - continueflag=True, stopflag=True, - statushint=_msg, - abortfunc=evolvecmd.hgabortevolve, - continuefunc=evolvecmd.hgcontinueevolve) - statemod.addunfinished(b'pick', fname=b'pickstate', continueflag=True, - abortfunc=cmdrewrite.hgabortpick, - continuefunc=cmdrewrite.hgcontinuepick) - _fixup_msg = _(b'To continue: hg fixup --continue\n' - b'To abort: hg fixup --abort\n') - statemod.addunfinished(b'fixup', fname=b'fixup-state', - continueflag=True, statushint=_fixup_msg, - abortfunc=cmdrewrite.hgabortfixup, - continuefunc=cmdrewrite.hgcontinuefixup) - else: - # hg <= 5.0 (5f2f6912c9e6) - estate = (b'evolvestate', False, False, _(b'evolve in progress'), - _(b"use 'hg evolve --continue' or 'hg evolve --abort' to abort")) - cmdutil.unfinishedstates.append(estate) - pstate = (b'pickstate', False, False, _(b'pick in progress'), - _(b"use 'hg pick --continue' or 'hg pick --abort' to abort")) - cmdutil.unfinishedstates.append(pstate) - fstate = (b'fixup-state', False, False, _(b'fixup in progress'), - _(b"use 'hg fixup --continue' or 'hg fixup --abort' to abort")) - cmdutil.unfinishedstates.append(fstate) - - afterresolved = (b'evolvestate', _(b'hg evolve --continue')) - pickresolved = (b'pickstate', _(b'hg pick --continue')) - fixupresolved = (b'fixup-state', _(b'hg fixup --continue')) - cmdutil.afterresolvedstates.append(afterresolved) - cmdutil.afterresolvedstates.append(pickresolved) - cmdutil.afterresolvedstates.append(fixupresolved) - - # hg <= 5.0 (12243f15d53e) - if util.safehasattr(cmdutil, 'STATES'): - cmdutil.STATES = ( - (b'evolve', cmdutil.fileexistspredicate(b'evolvestate'), _evolvemessage), - (b'fixup', cmdutil.fileexistspredicate(b'fixup-state'), _fixupmessage), - ) + cmdutil.STATES + statemod.addunfinished(b'evolve', fname=b'evolvestate', + continueflag=True, stopflag=True, + statushint=_msg, + abortfunc=evolvecmd.hgabortevolve, + continuefunc=evolvecmd.hgcontinueevolve) + statemod.addunfinished(b'pick', fname=b'pickstate', continueflag=True, + abortfunc=cmdrewrite.hgabortpick, + continuefunc=cmdrewrite.hgcontinuepick) + _fixup_msg = _(b'To continue: hg fixup --continue\n' + b'To abort: hg fixup --abort\n') + statemod.addunfinished(b'fixup', fname=b'fixup-state', + continueflag=True, statushint=_fixup_msg, + abortfunc=cmdrewrite.hgabortfixup, + continuefunc=cmdrewrite.hgcontinuefixup) @eh.wrapfunction(hg, 'clean') def clean(orig, repo, *args, **kwargs):
--- a/hgext3rd/evolve/cmdrewrite.py Sun Apr 14 00:26:33 2024 -0300 +++ b/hgext3rd/evolve/cmdrewrite.py Sun Apr 14 00:41:49 2024 -0300 @@ -530,7 +530,7 @@ except TypeError: # hg <= 4.9 (db72f9f6580e) chunks, opts = cmdutil.recordfilter(repo.ui, originalchunks, - operation=b'discard') + operation=b'discard') # pytype: disable=missing-parameter if not chunks: raise error.Abort(_(b"nothing selected to uncommit")) fp = stringio()
--- a/hgext3rd/evolve/dagutil.py Sun Apr 14 00:26:33 2024 -0300 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,290 +0,0 @@ -# dagutil.py - dag utilities for mercurial -# -# Copyright 2010 Benoit Boissinot <bboissin@gmail.com> -# and Peter Arrenbrecht <peter@arrenbrecht.ch> -# -# This software may be used and distributed according to the terms of the -# GNU General Public License version 2 or any later version. -# -# Imported from Mercurial code at cee9043c7dba - -from __future__ import absolute_import - -from mercurial.i18n import _ -from mercurial.node import nullrev - -from . import compat - -class basedag(object): - '''generic interface for DAGs - - terms: - "ix" (short for index) identifies a nodes internally, - "id" identifies one externally. - - All params are ixs unless explicitly suffixed otherwise. - Pluralized params are lists or sets. - ''' - - def __init__(self): - self._inverse = None - - def nodeset(self): - '''set of all node ixs''' - raise NotImplementedError - - def heads(self): - '''list of head ixs''' - raise NotImplementedError - - def parents(self, ix): - '''list of parents ixs of ix''' - raise NotImplementedError - - def inverse(self): - '''inverse DAG, where parents becomes children, etc.''' - raise NotImplementedError - - def ancestorset(self, starts, stops=None): - ''' - set of all ancestors of starts (incl), but stop walk at stops (excl) - ''' - raise NotImplementedError - - def descendantset(self, starts, stops=None): - ''' - set of all descendants of starts (incl), but stop walk at stops (excl) - ''' - return self.inverse().ancestorset(starts, stops) - - def headsetofconnecteds(self, ixs): - ''' - subset of connected list of ixs so that no node has a descendant in it - - By "connected list" we mean that if an ancestor and a descendant are in - the list, then so is at least one path connecting them. - ''' - raise NotImplementedError - - def externalize(self, ix): - '''return a node id''' - return self._externalize(ix) - - def externalizeall(self, ixs): - '''return a list of (or set if given a set) of node ids''' - ids = self._externalizeall(ixs) - if isinstance(ixs, set): - return set(ids) - return list(ids) - - def internalize(self, id): - '''return a node ix''' - return self._internalize(id) - - def internalizeall(self, ids, filterunknown=False): - '''return a list of (or set if given a set) of node ixs''' - ixs = self._internalizeall(ids, filterunknown) - if isinstance(ids, set): - return set(ixs) - return list(ixs) - -class genericdag(basedag): - '''generic implementations for DAGs''' - - def ancestorset(self, starts, stops=None): - if stops: - stops = set(stops) - else: - stops = set() - seen = set() - pending = list(starts) - while pending: - n = pending.pop() - if n not in seen and n not in stops: - seen.add(n) - pending.extend(self.parents(n)) - return seen - - def headsetofconnecteds(self, ixs): - hds = set(ixs) - if not hds: - return hds - for n in ixs: - for p in self.parents(n): - hds.discard(p) - assert hds - return hds - -class revlogbaseddag(basedag): - '''generic dag interface to a revlog''' - - def __init__(self, revlog, nodeset): - basedag.__init__(self) - self._revlog = revlog - self._heads = None - self._nodeset = nodeset - - def nodeset(self): - return self._nodeset - - def heads(self): - if self._heads is None: - self._heads = self._getheads() - return self._heads - - def _externalize(self, ix): - return self._revlog.index[ix][7] - - def _externalizeall(self, ixs): - idx = self._revlog.index - return [idx[i][7] for i in ixs] - - def _internalize(self, id): - ix = self._revlog.rev(id) - if ix == nullrev: - raise LookupError(id, self._revlog.indexfile, _(b'nullid')) - return ix - - def _internalizeall(self, ids, filterunknown): - rl = self._revlog - getrev = compat.getgetrev(rl) - if filterunknown: - return [r for r in map(getrev, ids) - if (r is not None - and r != nullrev - and r not in rl.filteredrevs)] - return [self._internalize(i) for i in ids] - -class revlogdag(revlogbaseddag): - '''dag interface to a revlog''' - - def __init__(self, revlog, localsubset=None): - revlogbaseddag.__init__(self, revlog, set(revlog)) - self._heads = localsubset - - def _getheads(self): - return [r for r in self._revlog.headrevs() if r != nullrev] - - def parents(self, ix): - rlog = self._revlog - idx = rlog.index - revdata = idx[ix] - prev = revdata[5] - if prev != nullrev: - prev2 = revdata[6] - if prev2 == nullrev: - return [prev] - return [prev, prev2] - prev2 = revdata[6] - if prev2 != nullrev: - return [prev2] - return [] - - def inverse(self): - if self._inverse is None: - self._inverse = inverserevlogdag(self) - return self._inverse - - def ancestorset(self, starts, stops=None): - rlog = self._revlog - idx = rlog.index - if stops: - stops = set(stops) - else: - stops = set() - seen = set() - pending = list(starts) - while pending: - rev = pending.pop() - if rev not in seen and rev not in stops: - seen.add(rev) - revdata = idx[rev] - for i in [5, 6]: - prev = revdata[i] - if prev != nullrev: - pending.append(prev) - return seen - - def headsetofconnecteds(self, ixs): - if not ixs: - return set() - rlog = self._revlog - idx = rlog.index - headrevs = set(ixs) - for rev in ixs: - revdata = idx[rev] - for i in [5, 6]: - prev = revdata[i] - if prev != nullrev: - headrevs.discard(prev) - assert headrevs - return headrevs - - def linearize(self, ixs): - '''linearize and topologically sort a list of revisions - - The linearization process tries to create long runs of revs where - a child rev comes immediately after its first parent. This is done by - visiting the heads of the given revs in inverse topological order, - and for each visited rev, visiting its second parent, then its first - parent, then adding the rev itself to the output list. - ''' - sorted = [] - visit = list(self.headsetofconnecteds(ixs)) - visit.sort(reverse=True) - finished = set() - - while visit: - cur = visit.pop() - if cur < 0: - cur = -cur - 1 - if cur not in finished: - sorted.append(cur) - finished.add(cur) - else: - visit.append(-cur - 1) - visit += [p for p in self.parents(cur) - if p in ixs and p not in finished] - assert len(sorted) == len(ixs) - return sorted - -class inverserevlogdag(revlogbaseddag, genericdag): - '''inverse of an existing revlog dag; see revlogdag.inverse()''' - - def __init__(self, orig): - revlogbaseddag.__init__(self, orig._revlog, orig._nodeset) - self._orig = orig - self._children = {} - self._roots = [] - self._walkfrom = len(self._revlog) - 1 - - def _walkto(self, walkto): - rev = self._walkfrom - cs = self._children - roots = self._roots - idx = self._revlog.index - while rev >= walkto: - data = idx[rev] - isroot = True - for prev in [data[5], data[6]]: # parent revs - if prev != nullrev: - cs.setdefault(prev, []).append(rev) - isroot = False - if isroot: - roots.append(rev) - rev -= 1 - self._walkfrom = rev - - def _getheads(self): - self._walkto(nullrev) - return self._roots - - def parents(self, ix): - if ix is None: - return [] - if ix <= self._walkfrom: - self._walkto(ix) - return self._children.get(ix, []) - - def inverse(self): - return self._orig
--- a/hgext3rd/evolve/evolvecmd.py Sun Apr 14 00:26:33 2024 -0300 +++ b/hgext3rd/evolve/evolvecmd.py Sun Apr 14 00:41:49 2024 -0300 @@ -822,8 +822,8 @@ else: # hg <= 6.0 (12ac4401ff7d) kwargs['base_marker'] = b'|||||||' - desc = b''.join(merger.merge_lines(**kwargs)) - conflicts = merger.conflicts + desc = b''.join(merger.merge_lines(**kwargs)) # pytype: disable=attribute-error + conflicts = merger.conflicts # pytype: disable=attribute-error if conflicts: @@ -911,13 +911,6 @@ return commitmsg def use_in_memory_merge(repo): - try: - from mercurial import mergestate as mergestatemod - mergestatemod.memmergestate - except (AttributeError, ImportError): - # no in-memory evolve if Mercurial lacks the required code - # hg <= 5.5 (19590b126764) - return False config_value = repo.ui.config(b'experimental', b'evolution.in-memory') if config_value == b'force': return True
--- a/hgext3rd/evolve/legacy.py Sun Apr 14 00:26:33 2024 -0300 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,204 +0,0 @@ -# Copyright 2011 Pierre-Yves David <pierre-yves.david@ens-lyon.org> -# Logilab SA <contact@logilab.fr> -# -# This software may be used and distributed according to the terms of the -# GNU General Public License version 2 or any later version. -"""Deprecated extension that formerly introduced "Changeset Obsolescence". - -This concept is now partially in Mercurial core (starting with Mercurial 2.3). -The remaining logic has been grouped with the evolve extension. - -Some code remains in this extensions to detect and convert prehistoric format -of obsolete marker than early user may have create. Keep it enabled if you -were such user. -""" - -from mercurial import error - -import sys -import json - -from mercurial.i18n import _ -from mercurial import lock as lockmod -from mercurial.node import bin, nullid -from mercurial import registrar -from mercurial import util - -try: - from mercurial.utils.dateutil import makedate -except ImportError: - # compat with hg < 4.6 - from mercurial.util import makedate - -commandfunc = registrar.command - -##################################################################### -### Older format management ### -##################################################################### - -# Code related to detection and management of older legacy format never -# handled by core - - -def reposetup(ui, repo): - """Detect that a repo still contains some old obsolete format - """ - if not repo.local(): - return - evolveopts = ui.configlist(b'experimental', b'evolution') - if not evolveopts: - evolveopts = b'all' - ui.setconfig(b'experimental', b'evolution', evolveopts) - for arg in sys.argv: - if r'debugc' in arg: - break - else: - data = repo.vfs.tryread(b'obsolete-relations') - if not data: - data = repo.svfs.tryread(b'obsoletemarkers') - if data: - raise error.Abort(b'old format of obsolete marker detected!\n' - b'run `hg debugconvertobsolete` once.') - -def _obsdeserialize(flike): - """read a file like object serialized with _obsserialize - - this deserialize into a {subject -> objects} mapping - - this was the very first format ever.""" - rels = {} - for line in flike: - subhex, objhex = line.split() - subnode = bin(subhex) - if subnode == nullid: - subnode = None - rels.setdefault(subnode, set()).add(bin(objhex)) - return rels - -cmdtable = {} -command = commandfunc(cmdtable) -@command(b'debugconvertobsolete', [], b'') -def cmddebugconvertobsolete(ui, repo): - """import markers from an .hg/obsolete-relations file""" - cnt = 0 - err = 0 - lock = repo.lock() - some = False - try: - unlink = [] - tr = repo.transaction(b'convert-obsolete') - try: - repo._importoldobsolete = True - store = repo.obsstore - ### very first format - try: - f = repo.vfs(b'obsolete-relations') - try: - some = True - for line in f: - subhex, objhex = line.split() - suc = bin(subhex) - prec = bin(objhex) - sucs = (suc == nullid) and [] or [suc] - meta = { - b'date': b'%i %i' % makedate(), - b'user': ui.username(), - } - try: - store.create(tr, prec, sucs, 0, metadata=meta) - cnt += 1 - except ValueError: - repo.ui.write_err(b"invalid old marker line: %s" - % (line)) - err += 1 - finally: - f.close() - unlink.append(repo.vfs.join(b'obsolete-relations')) - except IOError: - pass - ### second (json) format - data = repo.svfs.tryread(b'obsoletemarkers') - if data: - some = True - for oldmark in json.loads(data): - del oldmark[r'id'] # dropped for now - del oldmark[r'reason'] # unused until then - oldobject = str(oldmark.pop(r'object')) - oldsubjects = [str(s) for s in oldmark.pop(r'subjects', [])] - lookup_errors = (error.RepoLookupError, error.LookupError) - if len(oldobject) != 40: - try: - oldobject = repo[oldobject].node() - except lookup_errors: - pass - if any(len(s) != 40 for s in oldsubjects): - try: - oldsubjects = [repo[s].node() for s in oldsubjects] - except lookup_errors: - pass - - oldmark[r'date'] = r'%i %i' % tuple(oldmark[r'date']) - meta = dict((k.encode('utf-8'), v.encode('utf-8')) - for k, v in oldmark.items()) - try: - succs = [bin(n) for n in oldsubjects] - succs = [n for n in succs if n != nullid] - store.create(tr, bin(oldobject), succs, - 0, metadata=meta) - cnt += 1 - except ValueError: - msg = b"invalid marker %s -> %s\n" - msg %= (oldobject, oldsubjects) - repo.ui.write_err(msg) - err += 1 - unlink.append(repo.svfs.join(b'obsoletemarkers')) - tr.close() - for path in unlink: - util.unlink(path) - finally: - tr.release() - finally: - del repo._importoldobsolete - lock.release() - if not some: - ui.warn(_(b'nothing to do\n')) - ui.status(b'%i obsolete marker converted\n' % cnt) - if err: - ui.write_err(b'%i conversion failed. check you graph!\n' % err) - -@command(b'debugrecordpruneparents', [], b'') -def cmddebugrecordpruneparents(ui, repo): - """add parent data to prune markers when possible - - This command searches the repo for prune markers without parent information. - If the pruned node is locally known, it creates a new marker with parent - data. - """ - pgop = b'reading markers' - - # lock from the beginning to prevent race - wlock = lock = tr = None - try: - wlock = repo.wlock() - lock = repo.lock() - tr = repo.transaction(b'recordpruneparents') - unfi = repo.unfiltered() - nm = unfi.changelog.nodemap - store = repo.obsstore - pgtotal = len(store._all) - for idx, mark in enumerate(list(store._all)): - if not mark[1]: - rev = nm.get(mark[0]) - if rev is not None: - ctx = unfi[rev] - parents = tuple(p.node() for p in ctx.parents()) - before = len(store._all) - store.create(tr, prec=mark[0], succs=mark[1], flag=mark[2], - metadata=dict(mark[3]), parents=parents) - if len(store._all) - before: - ui.write(_(b'created new markers for %i\n') % rev) - ui.progress(pgop, idx, total=pgtotal) - tr.close() - ui.progress(pgop, None) - finally: - lockmod.release(tr, lock, wlock)
--- a/hgext3rd/evolve/metadata.py Sun Apr 14 00:26:33 2024 -0300 +++ b/hgext3rd/evolve/metadata.py Sun Apr 14 00:41:49 2024 -0300 @@ -5,7 +5,7 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -__version__ = b'11.1.4.dev0' -testedwith = b'4.9 5.0 5.1 5.2 5.3 5.4 5.5 5.6 5.7 5.8 5.9 6.0 6.1 6.2 6.3 6.4 6.5 6.6 6.7' -minimumhgversion = b'4.9' +__version__ = b'11.2.0.dev0' +testedwith = b'5.6 5.7 5.8 5.9 6.0 6.1 6.2 6.3 6.4 6.5 6.6 6.7' +minimumhgversion = b'5.6' buglink = b'https://bz.mercurial-scm.org/'
--- a/hgext3rd/evolve/obsexchange.py Sun Apr 14 00:26:33 2024 -0300 +++ b/hgext3rd/evolve/obsexchange.py Sun Apr 14 00:41:49 2024 -0300 @@ -15,7 +15,6 @@ node, obsolete, pushkey, - util, wireprototypes, wireprotov1server ) @@ -112,10 +111,7 @@ subset = [c.node() for c in repo.unfiltered().set(b'only(%ln, %ln)', heads, common)] subset += kwargs['evo_missing_nodes'] markers = repo.obsstore.relevantmarkers(subset) - if util.safehasattr(bundle2, 'buildobsmarkerspart'): - bundle2.buildobsmarkerspart(bundler, markers) - else: - exchange.buildobsmarkerspart(bundler, markers) + bundle2.buildobsmarkerspart(bundler, markers) def _obscommon_capabilities(orig, repo, proto): """wrapper to advertise new capability"""
--- a/hgext3rd/evolve/obshistory.py Sun Apr 14 00:26:33 2024 -0300 +++ b/hgext3rd/evolve/obshistory.py Sun Apr 14 00:41:49 2024 -0300 @@ -265,6 +265,7 @@ super(obsmarker_printer, self).__init__(ui, repo, *args, **kwargs) diffopts = kwargs.get('diffopts', {}) + assert isinstance(diffopts, dict) # help pytype self._includediff = diffopts and diffopts.get(b'patch') self.template = diffopts and diffopts.get(b'template') self.filter = diffopts and diffopts.get(b'filternonlocal') @@ -827,6 +828,7 @@ if all(ef1): combined = 0 for ef in ef1: + assert ef is not None # help pytype combined |= int(ef) # Combined will be in VERBMAPPING only if one bit is set
--- a/hgext3rd/evolve/rewind.py Sun Apr 14 00:26:33 2024 -0300 +++ b/hgext3rd/evolve/rewind.py Sun Apr 14 00:41:49 2024 -0300 @@ -335,8 +335,8 @@ """ unfi = repo.unfiltered() targets = set() - revsto = opts.get('to') - revsfrom = opts.get('from') + revsto = opts['to'] + revsfrom = opts['from'] if not (revsto or revsfrom): revsfrom.append(b'.') if revsto:
--- a/hgext3rd/evolve/rewriteutil.py Sun Apr 14 00:26:33 2024 -0300 +++ b/hgext3rd/evolve/rewriteutil.py Sun Apr 14 00:41:49 2024 -0300 @@ -21,7 +21,6 @@ error, hg, lock as lockmod, - mergeutil, node, obsolete, obsutil, @@ -233,28 +232,12 @@ revs = sorted(revs) return repomarks, revs -try: - from mercurial import mergestate - mergestate.memmergestate - hasmemmergestate = True -except (ImportError, AttributeError): - # hg <= 5.5 (19590b126764) - hasmemmergestate = False - def rewrite(repo, old, head, newbases, commitopts): """Return (nodeid, created) where nodeid is the identifier of the changeset generated by the rewrite process, and created is True if nodeid was actually created. If created is False, nodeid references a changeset existing before the rewrite call. """ - # Until there was memmergestate, in-memory would clear the on-disk - # mergestate and use that. We don't want that to happen, so we'll require - # users of old Mercurial versions to run `hg touch` etc without - # mergestate. - if not hasmemmergestate: - ms = compat.mergestate.read(repo) - mergeutil.checkunresolved(ms) - wlock = lock = tr = None try: wlock = repo.wlock()
--- a/hgext3rd/evolve/safeguard.py Sun Apr 14 00:26:33 2024 -0300 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,64 +0,0 @@ -# Code dedicated to adding various "safeguard" around evolution -# -# Some of these will be pollished and upstream when mature. Some other will be -# replaced by better alternative later. -# -# Copyright 2017 Pierre-Yves David <pierre-yves.david@ens-lyon.org> -# -# This software may be used and distributed according to the terms of the -# GNU General Public License version 2 or any later version. - -from mercurial.i18n import _ - -from mercurial import ( - configitems, - error, -) - -from . import exthelper - -eh = exthelper.exthelper() - -# hg <= 4.8 (33d30fb1e4ae) -if b'auto-publish' not in configitems.coreitems.get(b'experimental', {}): - - eh.configitem(b'experimental', b'auto-publish', b'publish') - - def _checkpublish(pushop): - repo = pushop.repo - ui = repo.ui - behavior = ui.config(b'experimental', b'auto-publish') - nocheck = behavior not in (b'warn', b'abort') - if nocheck or getattr(pushop, 'publish', False): - return - remotephases = pushop.remote.listkeys(b'phases') - publishing = remotephases.get(b'publishing', False) - if publishing: - if pushop.revs is None: - published = repo.filtered(b'served').revs(b"not public()") - else: - published = repo.revs(b"::%ln - public()", pushop.revs) - # we want to use pushop.revs in the revset even if they - # themselves are secret, but we don't want to have anything - # that the server won't see in the result of this expression - published &= repo.filtered(b'served') - if published: - if behavior == b'warn': - ui.warn(_(b'%i changesets about to be published\n') - % len(published)) - elif behavior == b'abort': - msg = _(b'push would publish 1 changesets') - hint = _(b"behavior controlled by " - b"'experimental.auto-publish' config") - raise error.Abort(msg, hint=hint) - - @eh.reposetup - def setuppublishprevention(ui, repo): - - class noautopublishrepo(repo.__class__): - - def checkpush(self, pushop): - super(noautopublishrepo, self).checkpush(pushop) - _checkpublish(pushop) - - repo.__class__ = noautopublishrepo
--- a/hgext3rd/evolve/stablerange.py Sun Apr 14 00:26:33 2024 -0300 +++ b/hgext3rd/evolve/stablerange.py Sun Apr 14 00:41:49 2024 -0300 @@ -493,7 +493,7 @@ else: ui.status(b'%s - %s\n' % (rstr, subsstr)) -class abstractstablerange(object): +class abstractstablerange(object): # pytype: disable=ignored-metaclass """The official API for a stablerange""" __metaclass__ = abc.ABCMeta @@ -541,7 +541,8 @@ assert standard_start < rangedepth slicepoint = standard_start return slicepoint -class stablerangebasic(abstractstablerange): + +class stablerangebasic(abstractstablerange): # pytype: disable=ignored-metaclass """a very dummy implementation of stablerange the implementation is here to lay down the basic algorithm in the stable @@ -628,7 +629,7 @@ def _sortfunction(self, repo, headrev): return stablesort.stablesort_mergepoint_head_basic(repo, [headrev]) -class stablerangecached(abstractstablerange): +class stablerangecached(abstractstablerange): # pytype: disable=ignored-metaclass """an implementation of stablerange using caching""" __metaclass__ = abc.ABCMeta @@ -646,6 +647,9 @@ headrev, index = rangeid[0], rangeid[1] return self.depthrev(repo, headrev) - index + def _subranges(self, repo, rangeid): + raise NotImplementedError() + def subranges(self, repo, rangeid): assert 0 <= rangeid[1] <= rangeid[0], rangeid cached = self._getsub(rangeid)
--- a/hgext3rd/evolve/stablerangecache.py Sun Apr 14 00:26:33 2024 -0300 +++ b/hgext3rd/evolve/stablerangecache.py Sun Apr 14 00:41:49 2024 -0300 @@ -168,12 +168,14 @@ body = r' OR '.join(_querysuperrangesbody % r for r in ranges) return _querysuperrangesmain % body -class stablerangesqlbase(stablerange.stablerangecached): +class stablerangesqlbase(stablerange.stablerangecached): # pytype: disable=ignored-metaclass """class that can handle all the bits needed to store range into sql """ __metaclass__ = abc.ABCMeta + _tiprev = None + _tipnode = None _schemaversion = None _cachefile = None @@ -368,8 +370,6 @@ class stablerangesql(stablerangesqlbase, stablerangeondiskbase): """base clase able to preserve data to disk as sql""" - __metaclass__ = abc.ABCMeta - # self._cachekey = (tiprev, tipnode) @property
--- a/hgext3rd/evolve/state.py Sun Apr 14 00:26:33 2024 -0300 +++ b/hgext3rd/evolve/state.py Sun Apr 14 00:41:49 2024 -0300 @@ -26,7 +26,7 @@ from .thirdparty import cbor cbor.__doc__ # trigger ImportError immediately except ImportError: - import cbor + import cbor # pytype: disable=import-error from mercurial import ( error,
--- a/hgext3rd/evolve/thirdparty/cbor.py Sun Apr 14 00:26:33 2024 -0300 +++ b/hgext3rd/evolve/thirdparty/cbor.py Sun Apr 14 00:41:49 2024 -0300 @@ -79,23 +79,23 @@ CBOR_TAG_MIME = 36 # following text is MIME message, headers, separators and all CBOR_TAG_CBOR_FILEHEADER = 55799 # can open a file with 0xd9d9f7 -_CBOR_TAG_BIGNUM_BYTES = struct.pack(b'B', CBOR_TAG | CBOR_TAG_BIGNUM) +_CBOR_TAG_BIGNUM_BYTES = struct.pack('B', CBOR_TAG | CBOR_TAG_BIGNUM) def dumps_int(val): - b"return bytes representing int val in CBOR" + "return bytes representing int val in CBOR" if val >= 0: # CBOR_UINT is 0, so I'm lazy/efficient about not OR-ing it in. if val <= 23: - return struct.pack(b'B', val) + return struct.pack('B', val) if val <= 0x0ff: - return struct.pack(b'BB', CBOR_UINT8_FOLLOWS, val) + return struct.pack('BB', CBOR_UINT8_FOLLOWS, val) if val <= 0x0ffff: - return struct.pack(b'!BH', CBOR_UINT16_FOLLOWS, val) + return struct.pack('!BH', CBOR_UINT16_FOLLOWS, val) if val <= 0x0ffffffff: - return struct.pack(b'!BI', CBOR_UINT32_FOLLOWS, val) + return struct.pack('!BI', CBOR_UINT32_FOLLOWS, val) if val <= 0x0ffffffffffffffff: - return struct.pack(b'!BQ', CBOR_UINT64_FOLLOWS, val) + return struct.pack('!BQ', CBOR_UINT64_FOLLOWS, val) outb = _dumps_bignum_to_bytearray(val) return _CBOR_TAG_BIGNUM_BYTES + _encode_type_num(CBOR_BYTES, len(outb)) + outb val = -1 - val @@ -119,28 +119,28 @@ def dumps_float(val): - return struct.pack(b"!Bd", CBOR_FLOAT64, val) + return struct.pack("!Bd", CBOR_FLOAT64, val) -_CBOR_TAG_NEGBIGNUM_BYTES = struct.pack(b'B', CBOR_TAG | CBOR_TAG_NEGBIGNUM) +_CBOR_TAG_NEGBIGNUM_BYTES = struct.pack('B', CBOR_TAG | CBOR_TAG_NEGBIGNUM) def _encode_type_num(cbor_type, val): """For some CBOR primary type [0..7] and an auxiliary unsigned number, return CBOR encoded bytes""" assert val >= 0 if val <= 23: - return struct.pack(b'B', cbor_type | val) + return struct.pack('B', cbor_type | val) if val <= 0x0ff: - return struct.pack(b'BB', cbor_type | CBOR_UINT8_FOLLOWS, val) + return struct.pack('BB', cbor_type | CBOR_UINT8_FOLLOWS, val) if val <= 0x0ffff: - return struct.pack(b'!BH', cbor_type | CBOR_UINT16_FOLLOWS, val) + return struct.pack('!BH', cbor_type | CBOR_UINT16_FOLLOWS, val) if val <= 0x0ffffffff: - return struct.pack(b'!BI', cbor_type | CBOR_UINT32_FOLLOWS, val) + return struct.pack('!BI', cbor_type | CBOR_UINT32_FOLLOWS, val) if (((cbor_type == CBOR_NEGINT) and (val <= 0x07fffffffffffffff)) or ((cbor_type != CBOR_NEGINT) and (val <= 0x0ffffffffffffffff))): - return struct.pack(b'!BQ', cbor_type | CBOR_UINT64_FOLLOWS, val) + return struct.pack('!BQ', cbor_type | CBOR_UINT64_FOLLOWS, val) if cbor_type != CBOR_NEGINT: - raise Exception(b"value too big for CBOR unsigned number: {0!r}".format(val)) + raise Exception("value too big for CBOR unsigned number: {0!r}".format(val)) outb = _dumps_bignum_to_bytearray(val) return _CBOR_TAG_NEGBIGNUM_BYTES + _encode_type_num(CBOR_BYTES, len(outb)) + outb @@ -201,8 +201,8 @@ def dumps_bool(b): if b: - return struct.pack(b'B', CBOR_TRUE) - return struct.pack(b'B', CBOR_FALSE) + return struct.pack('B', CBOR_TRUE) + return struct.pack('B', CBOR_FALSE) def dumps_tag(t, sort_keys=False): @@ -223,7 +223,7 @@ def dumps(ob, sort_keys=False): if ob is None: - return struct.pack(b'B', CBOR_NULL) + return struct.pack('B', CBOR_NULL) if isinstance(ob, bool): return dumps_bool(ob) if _is_stringish(ob): @@ -239,7 +239,7 @@ return dumps_int(ob) if isinstance(ob, Tag): return dumps_tag(ob, sort_keys=sort_keys) - raise Exception(b"don't know how to cbor serialize object of type %s", type(ob)) + raise Exception("don't know how to cbor serialize object of type %s", type(ob)) # same basic signature as json.dump, but with no options (yet) @@ -260,7 +260,7 @@ self.value = value def __repr__(self): - return b"Tag({0!r}, {1!r})".format(self.tag, self.value) + return "Tag({0!r}, {1!r})".format(self.tag, self.value) def __eq__(self, other): if not isinstance(other, Tag): @@ -273,7 +273,7 @@ Parse CBOR bytes and return Python objects. """ if data is None: - raise ValueError(b"got None for buffer to decode in loads") + raise ValueError("got None for buffer to decode in loads") fp = StringIO(data) return _loads(fp)[0] @@ -296,22 +296,22 @@ aux = tag_aux elif tag_aux == CBOR_UINT8_FOLLOWS: data = fp.read(1) - aux = struct.unpack_from(b"!B", data, 0)[0] + aux = struct.unpack_from("!B", data, 0)[0] bytes_read += 1 elif tag_aux == CBOR_UINT16_FOLLOWS: data = fp.read(2) - aux = struct.unpack_from(b"!H", data, 0)[0] + aux = struct.unpack_from("!H", data, 0)[0] bytes_read += 2 elif tag_aux == CBOR_UINT32_FOLLOWS: data = fp.read(4) - aux = struct.unpack_from(b"!I", data, 0)[0] + aux = struct.unpack_from("!I", data, 0)[0] bytes_read += 4 elif tag_aux == CBOR_UINT64_FOLLOWS: data = fp.read(8) - aux = struct.unpack_from(b"!Q", data, 0)[0] + aux = struct.unpack_from("!Q", data, 0)[0] bytes_read += 8 else: - assert tag_aux == CBOR_VAR_FOLLOWS, b"bogus tag {0:02x}".format(tb) + assert tag_aux == CBOR_VAR_FOLLOWS, "bogus tag {0:02x}".format(tb) aux = None return tag, tag_aux, aux, bytes_read @@ -385,9 +385,9 @@ return ob, bytes_read def _loads(fp, limit=None, depth=0, returntags=False): - b"return (object, bytes read)" + "return (object, bytes read)" if depth > _MAX_DEPTH: - raise Exception(b"hit CBOR loads recursion depth limit") + raise Exception("hit CBOR loads recursion depth limit") tb = _read_byte(fp) @@ -397,16 +397,16 @@ # Some special cases of CBOR_7 best handled by special struct.unpack logic here if tb == CBOR_FLOAT16: data = fp.read(2) - hibyte, lowbyte = struct.unpack_from(b"BB", data, 0) + hibyte, lowbyte = struct.unpack_from("BB", data, 0) exp = (hibyte >> 2) & 0x1F mant = ((hibyte & 0x03) << 8) | lowbyte if exp == 0: val = mant * (2.0 ** -24) elif exp == 31: if mant == 0: - val = float(b'Inf') + val = float('Inf') else: - val = float(b'NaN') + val = float('NaN') else: val = (mant + 1024.0) * (2 ** (exp - 25)) if hibyte & 0x80: @@ -414,11 +414,11 @@ return (val, 3) elif tb == CBOR_FLOAT32: data = fp.read(4) - pf = struct.unpack_from(b"!f", data, 0) + pf = struct.unpack_from("!f", data, 0) return (pf[0], 5) elif tb == CBOR_FLOAT64: data = fp.read(8) - pf = struct.unpack_from(b"!d", data, 0) + pf = struct.unpack_from("!d", data, 0) return (pf[0], 9) tag, tag_aux, aux, bytes_read = _tag_aux(fp, tb) @@ -461,7 +461,7 @@ return (None, bytes_read) if tb == CBOR_UNDEFINED: return (None, bytes_read) - raise ValueError(b"unknown cbor tag 7 byte: {:02x}".format(tb)) + raise ValueError("unknown cbor tag 7 byte: {:02x}".format(tb)) def loads_bytes(fp, aux, btag=CBOR_BYTES): @@ -481,7 +481,7 @@ total_bytes_read += 1 break tag, tag_aux, aux, bytes_read = _tag_aux(fp, tb) - assert tag == btag, b'variable length value contains unexpected component' + assert tag == btag, 'variable length value contains unexpected component' ob = fp.read(aux) chunklist.append(ob) total_bytes_read += bytes_read + aux
--- a/hgext3rd/pullbundle.py Sun Apr 14 00:26:33 2024 -0300 +++ b/hgext3rd/pullbundle.py Sun Apr 14 00:41:49 2024 -0300 @@ -383,7 +383,7 @@ if r'ancestorsof' in discovery.outgoing.__init__.__code__.co_varnames: return discovery.outgoing(repo, missingroots=nodes, ancestorsof=nodes) else: - return discovery.outgoing(repo, missingroots=nodes, missingheads=nodes) + return discovery.outgoing(repo, missingroots=nodes, missingheads=nodes) # pytype: disable=wrong-keyword-args # changegroup part construction
--- a/hgext3rd/topic/__init__.py Sun Apr 14 00:26:33 2024 -0300 +++ b/hgext3rd/topic/__init__.py Sun Apr 14 00:41:49 2024 -0300 @@ -80,32 +80,32 @@ Publishing behavior =================== -Topic vanish when changeset move to the public phases. Moving to the public -phase usually happens on push, but it is possible to update that behavior. The +Topics vanish when changesets move to the public phase. Moving to the public +phase usually happens on push, but it is possible to modify this behavior. The server needs to have specific config for this. -* everything pushed become public (the default):: +* everything pushed becomes public (the default):: [phases] publish = yes -* nothing push turned public:: +* nothing pushed turns public:: [phases] publish = no -* topic branches are not published, changeset without topic are:: +* topic branches are not published, changesets without topic are:: [phases] publish = no [experimental] topic.publish-bare-branch = yes -In addition, the topic extension adds a ``--publish`` flag on :hg:`push`. When -used, the pushed revisions are published if the push succeeds. It also applies -to common revisions selected by the push. +In addition, :hg:`push` command has a ``--publish`` flag. When used, the pushed +revisions are published if the push succeeds. It also applies to common +revisions selected by the push. -One can prevent any publishing to happens in a repository using:: +One can prevent any publishing from happening in a repository using:: [experimental] topic.allow-publish = no @@ -238,10 +238,10 @@ b'log.topic': b'green_background', } -__version__ = b'1.1.4.dev0' +__version__ = b'1.2.0.dev0' -testedwith = b'4.9 5.0 5.1 5.2 5.3 5.4 5.5 5.6 5.7 5.8 5.9 6.0 6.1 6.2 6.3 6.4 6.5 6.6 6.7' -minimumhgversion = b'4.9' +testedwith = b'5.6 5.7 5.8 5.9 6.0 6.1 6.2 6.3 6.4 6.5 6.6 6.7' +minimumhgversion = b'5.6' buglink = b'https://bz.mercurial-scm.org/' configtable = {} @@ -486,11 +486,7 @@ # - forcefully making changesets draft again # - turning secret changesets draft and making them visible to peers tnsphases = (phases.secret, phases.draft) - phasechanges = tr.changes[b'phases'] - if isinstance(phasechanges, dict): - # hg <= 5.3 (fdc802f29b2c) - phasechanges = [((k,), v) for k, v in phasechanges.items()] - for revs, (old, new) in phasechanges: + for revs, (old, new) in tr.changes[b'phases']: if old not in tnsphases and new not in tnsphases: # Skip phase movement if there is no phase (old or new) that has # visible topic namespace (i.e. draft and secret) @@ -559,8 +555,6 @@ extensions.afterloaded(b'rebase', _fixrebase) - flow.installpushflag(ui) - entry = extensions.wrapcommand(commands.table, b'commit', commitwrap) entry[1].append((b't', b'topic', b'', _(b"use specified topic"), _(b'TOPIC'))) @@ -606,17 +600,8 @@ # Wrap changelog.add to drop empty topic extensions.wrapfunction(changelog.changelog, 'add', wrapadd) # Make exchange._checkpublish handle experimental.topic.publish-bare-branch - if util.safehasattr(exchange, '_checkpublish'): - extensions.wrapfunction(exchange, '_checkpublish', - flow.replacecheckpublish) - else: - # hg <= 4.8 (33d30fb1e4ae) - try: - evolve = extensions.find(b'evolve') - extensions.wrapfunction(evolve.safeguard, '_checkpublish', - flow.replacecheckpublish) - except (KeyError, AttributeError): - pass + extensions.wrapfunction(exchange, '_checkpublish', + flow.replacecheckpublish) try: histedit = extensions.find(b'histedit') @@ -769,14 +754,13 @@ return super(topicrepo, self).branchmap() bm = self.filtered(topicfilter).branchmap() if convertbm: - entries = compat.bcentries(bm) - for key in list(entries): + for key in list(bm._entries): branch, tns, topic = common.parsefqbn(key) if topic: - value = entries.pop(key) + value = bm._entries.pop(key) # we lose namespace when converting to ":" format key = b'%s:%s' % (branch, topic) - entries[key] = value + bm._entries[key] = value return bm def branchmaptns(self, topic=None): @@ -790,7 +774,11 @@ def branchheads(self, branch=None, start=None, closed=False): if branch is None: - branch = self[None].branch() + # using dirstate.branch() instead of self[None].branch() + # because we wrap context.branch method to return branch + # already in FQBN format, and we can't give it to formatfqbn() + # again directly + branch = self.dirstate.branch() branch = common.formatfqbn(branch, self.currenttns, self.currenttopic) return super(topicrepo, self).branchheads(branch=branch, start=start, @@ -811,6 +799,9 @@ peer = super(topicrepo, self).peer(*args, **kwargs) if getattr(peer, '_repo', None) is not None: # localpeer class topicpeer(peer.__class__): + def commandexecutor(self): + return server.topiccommandexecutor(self) + def branchmap(self): usetopic = not self._repo.publishing() return self._repo.branchmap(topic=usetopic, convertbm=usetopic) @@ -826,7 +817,7 @@ else: # only changesets in the selected topic namespaces are visible h = [] - entries = compat.bcentries(self._repo.branchmaptns()) + entries = self._repo.branchmaptns()._entries for branch, nodes in compat.branchmapitems(entries): namedbranch, tns, topic = common.parsefqbn(branch) if tns == b'none' or tns in namespaces: @@ -843,23 +834,11 @@ reporef = weakref.ref(self) if self.ui.configbool(b'experimental', b'enforce-single-head'): - if util.safehasattr(tr, '_validator'): - # hg <= 5.3 (36f08ae87ef6) - origvalidator_single_head = tr._validator - def _validate_single_head(tr2): repo = reporef() flow.enforcesinglehead(repo, tr2) - def validator(tr2): - _validate_single_head(tr2) - return origvalidator_single_head(tr2) - - if util.safehasattr(tr, '_validator'): - # hg <= 5.3 (36f08ae87ef6) - tr._validator = validator - else: - tr.addvalidator(b'000-enforce-single-head', _validate_single_head) + tr.addvalidator(b'000-enforce-single-head', _validate_single_head) topicmodeserver = self.ui.config(b'experimental', b'topic-mode.server', b'ignore') @@ -867,23 +846,11 @@ b'topic.publish-bare-branch') ispush = desc.startswith((b'push', b'serve')) if (topicmodeserver != b'ignore' and ispush): - if util.safehasattr(tr, '_validator'): - # hg <= 5.3 (36f08ae87ef6) - origvalidator_untopiced = tr._validator - def _validate_untopiced(tr2): repo = reporef() flow.rejectuntopicedchangeset(repo, tr2) - def validator(tr2): - _validate_untopiced(tr2) - return origvalidator_untopiced(tr2) - - if util.safehasattr(tr, '_validator'): - # hg <= 5.3 (36f08ae87ef6) - tr._validator = validator - else: - tr.addvalidator(b'000-reject-untopiced', _validate_untopiced) + tr.addvalidator(b'000-reject-untopiced', _validate_untopiced) elif publishbare and ispush: origclose = tr.close @@ -899,61 +866,25 @@ b'topic.allow-publish', True) if not allow_publish: - if util.safehasattr(tr, '_validator'): - # hg <= 5.3 (36f08ae87ef6) - origvalidator_publish = tr._validator - def _validate_publish(tr2): repo = reporef() flow.reject_publish(repo, tr2) - def validator(tr2): - _validate_publish(tr2) - return origvalidator_publish(tr2) - - if util.safehasattr(tr, '_validator'): - # hg <= 5.3 (36f08ae87ef6) - tr._validator = validator - else: - tr.addvalidator(b'000-reject-publish', _validate_publish) + tr.addvalidator(b'000-reject-publish', _validate_publish) if self.ui.configbool(b'experimental', b'tns-reject-push'): - if util.safehasattr(tr, '_validator'): - # hg <= 5.3 (36f08ae87ef6) - origvalidator_publish = tr._validator - def _validate_csets_with_tns(tr2): repo = reporef() flow.reject_csets_with_tns(repo, tr2) - def validator(tr2): - _validate_csets_with_tns(tr2) - return origvalidator_publish(tr2) - - if util.safehasattr(tr, '_validator'): - # hg <= 5.3 (36f08ae87ef6) - tr._validator = validator - else: - tr.addvalidator(b'000-reject-csets-with-tns', _validate_csets_with_tns) - - if util.safehasattr(tr, '_validator'): - # hg <= 5.3 (36f08ae87ef6) - origvalidator_affected_tns = tr._validator + tr.addvalidator(b'000-reject-csets-with-tns', _validate_csets_with_tns) def _validate_affected_tns(tr2): repo = reporef() + assert repo is not None # help pytype find_affected_tns(repo, tr2) - def validator(tr2): - result = origvalidator_affected_tns(tr2) - _validate_affected_tns(tr2) - return result - - if util.safehasattr(tr, '_validator'): - # hg <= 5.3 (36f08ae87ef6) - tr._validator = validator - else: - tr.addvalidator(b'999-find-affected-tns', _validate_affected_tns) + tr.addvalidator(b'999-find-affected-tns', _validate_affected_tns) # real transaction start ct = self.currenttopic @@ -1040,8 +971,7 @@ self._extra[constants.extrakey] = b'' def wrapadd(orig, cl, manifest, files, desc, transaction, p1, p2, user, - date=None, extra=None, p1copies=None, p2copies=None, - filesadded=None, filesremoved=None): + date=None, extra=None): if b'topic-namespace' in extra and extra[b'topic-namespace'] == b'none': extra = extra.copy() del extra[b'topic-namespace'] @@ -1052,19 +982,8 @@ # if topic is not in extra, drop namespace as well extra = extra.copy() del extra[b'topic-namespace'] - # hg <= 4.9 (0e41f40b01cc) - kwargs = {} - if p1copies is not None: - kwargs['p1copies'] = p1copies - if p2copies is not None: - kwargs['p2copies'] = p2copies - # hg <= 5.0 (f385ba70e4af) - if filesadded is not None: - kwargs['filesadded'] = filesadded - if filesremoved is not None: - kwargs['filesremoved'] = filesremoved return orig(cl, manifest, files, desc, transaction, p1, p2, user, - date=date, extra=extra, **kwargs) + date=date, extra=extra) def applychangewrap(orig, self): orig(self) @@ -1161,7 +1080,7 @@ utopic = encoding.unifromlocal(topic) except error.Abort: # Maybe we should allow these topic names as well, as long as they - # don't break any other rules + # don't break any other rules. utopic = '' rmatch = re.match(r'[-_.\w]+', utopic, re.UNICODE) if not utopic or not rmatch or rmatch.group(0) != utopic: @@ -1334,7 +1253,7 @@ _applyconvertbmarktopic(ui, repo, targetrevs, revnum, bmark, tr) tr.close() finally: - tr.release() + lockmod.release(tr) finally: lockmod.release(lock, wlock) @@ -1890,7 +1809,7 @@ """set or show the current topic namespace""" if opts.get('clear'): if tns: - raise error.Abort(_(b"cannot use --clear when setting a topic namespace")) + raise compat.InputError(_(b"cannot use --clear when setting a topic namespace")) tns = b'none' elif not tns: ui.write(b'%s\n' % repo.currenttns) @@ -1898,9 +1817,9 @@ if tns: tns = tns.strip() if not tns: - raise error.Abort(_(b"topic namespace cannot consist entirely of whitespace")) + raise compat.InputError(_(b"topic namespace cannot consist entirely of whitespace")) if b'/' in tns: - raise error.Abort(_(b"topic namespace cannot contain '/' character")) + raise compat.InputError(_(b"topic namespace cannot contain '/' character")) scmutil.checknewlabel(repo, tns, b'topic namespace') helptxt = _(b"topic namespace names can only consist of alphanumeric, " @@ -1908,8 +1827,8 @@ try: utns = encoding.unifromlocal(tns) except error.Abort: - # Maybe we should allow these topic names as well, as long as they - # don't break any other rules + # Maybe we should allow these topic namespace names as well, as + # long as they don't break any other rules. utns = '' rmatch = re.match(r'[-_.\w]+', utns, re.UNICODE) if not utns or not rmatch or rmatch.group(0) != utns:
--- a/hgext3rd/topic/compat.py Sun Apr 14 00:26:33 2024 -0300 +++ b/hgext3rd/topic/compat.py Sun Apr 14 00:41:49 2024 -0300 @@ -25,13 +25,6 @@ return branchmap.iteritems() # py3-transform: on -def bcentries(branchcache): - if util.safehasattr(branchcache, '_entries'): - return branchcache._entries - else: - # hg <= 4.9 (624d6683c705+b137a6793c51) - return branchcache - # nodemap.get and index.[has_node|rev|get_rev] # hg <= 5.2 (02802fa87b74) def getgetrev(cl):
--- a/hgext3rd/topic/discovery.py Sun Apr 14 00:26:33 2024 -0300 +++ b/hgext3rd/topic/discovery.py Sun Apr 14 00:41:49 2024 -0300 @@ -83,7 +83,6 @@ remote = pushop.remote publishedset = () - remotebranchmap = None if remote.capable(b'topics-namespaces'): origremotebranchmap = remote.branchmaptns else: @@ -137,8 +136,7 @@ with ctxoverride, configoverride: try: - if remotebranchmap is not None: - remote.branchmap = remotebranchmap + remote.branchmap = remotebranchmap unxx = repo.filtered(b'unfiltered-topic') repo.unfiltered = lambda: unxx pushop.repo = repo @@ -152,8 +150,7 @@ finally: if r'unfiltered' in vars(repo): del repo.unfiltered - if remotebranchmap is not None: - remote.branchmap = origremotebranchmap + remote.branchmap = origremotebranchmap def wireprotobranchmap(orig, repo, proto): if not common.hastopicext(repo): @@ -185,7 +182,7 @@ if not common.hastopicext(repo): return wireprotov1server.branchmap(repo, proto) heads = [] - entries = compat.bcentries(repo.branchmaptns()) + entries = repo.branchmaptns()._entries for branch, nodes in compat.branchmapitems(entries): branchname = urlreq.quote(encoding.fromlocal(branch)) branchnodes = wireprototypes.encodelist(nodes) @@ -266,9 +263,6 @@ return tr._prepushheads = _nbheads(op.repo) reporef = weakref.ref(op.repo) - if util.safehasattr(tr, '_validator'): - # hg <= 5.3 (36f08ae87ef6) - oldvalidator = tr._validator def _validate(tr): repo = reporef() @@ -298,15 +292,7 @@ b"pushing new heads") raise compat.StateError(msg, hint=hint) - def validator(tr): - _validate(tr) - return oldvalidator(tr) - - if util.safehasattr(tr, '_validator'): - # hg <= 5.3 (36f08ae87ef6) - tr._validator = validator - else: - tr.addvalidator(b'000-new-head-check', _validate) + tr.addvalidator(b'000-new-head-check', _validate) handlecheckheads.params = frozenset() @@ -325,11 +311,9 @@ caps.append(b'topics-namespaces') return caps -# **kwargs is for accommodating an optional changelog argument -# hg <= 4.8 (5e5c8f2a1eb5) -def wrapbranchinfo(orig, self, rev, **kwargs): +def wrapbranchinfo(orig, self, rev): # NOTE: orig can be either branchinfo() or _branchinfo()! - b, close = orig(self, rev, **kwargs) + b, close = orig(self, rev) if common.hastopicext(self._repo): if self._repo.ui.configbool(b'_internal', b'tns-disable-fqbn'): # the config option prevents this function from doing anything, @@ -344,16 +328,14 @@ b = ctx.fqbn() return b, close -# **kwargs is for accommodating an optional changelog argument -# hg <= 4.8 (5e5c8f2a1eb5) -def wrapslowbranchinfo(orig, self, rev, **kwargs): +def wrapslowbranchinfo(orig, self, rev): if self.branchinfo == self._branchinfo: # _branchinfo() gets called directly and needs to do the conversion - return wrapbranchinfo(orig, self, rev, **kwargs) + return wrapbranchinfo(orig, self, rev) else: # _branchinfo() gets called through branchinfo(), the latter will need # to do the conversion - return orig(self, rev, **kwargs) + return orig(self, rev) def wrapaddpartrevbranchcache(orig, repo, bundler, outgoing): """making sure we send rev-branch-cache that only has bare branches"""
--- a/hgext3rd/topic/flow.py Sun Apr 14 00:26:33 2024 -0300 +++ b/hgext3rd/topic/flow.py Sun Apr 14 00:41:49 2024 -0300 @@ -1,13 +1,10 @@ from __future__ import absolute_import from mercurial import ( - commands, error, exchange, - extensions, node, phases, - util, ) from mercurial.i18n import _ @@ -64,18 +61,11 @@ def reject_publish(repo, tr): """prevent a transaction to be publish anything""" - if util.safehasattr(tr.changes[b'phases'], 'items'): - # hg <= 5.3 (fdc802f29b2c) - published = { - r for r, (o, n) in tr.changes[b'phases'].items() - if n == phases.public - } - else: - revranges = [ - r for r, (o, n) in tr.changes[b'phases'] - if n == phases.public - ] - published = {r for revrange in revranges for r in revrange} + revranges = [ + r for r, (o, n) in tr.changes[b'phases'] + if n == phases.public + ] + published = {r for revrange in revranges for r in revrange} if published: r = min(published) msg = b"rejecting publishing of changeset %s" % repo[r] @@ -106,43 +96,6 @@ fullmsg = _(b"rejecting draft changesets with topic namespace: %s") raise error.Abort(fullmsg % msg) -def wrappush(orig, repo, remote, *args, **kwargs): - """interpret the --publish flag and pass it to the push operation""" - newargs = kwargs.copy() - if kwargs.pop('publish', False): - opargs = kwargs.get('opargs') - if opargs is None: - opargs = {} - newargs[r'opargs'] = opargs.copy() - newargs[r'opargs'][b'publish'] = True - return orig(repo, remote, *args, **newargs) - -def extendpushoperation(orig, self, *args, **kwargs): - publish = kwargs.pop('publish', False) - orig(self, *args, **kwargs) - self.publish = publish - -def wrapphasediscovery(orig, pushop): - orig(pushop) - if getattr(pushop, 'publish', False): - if not pushop.remotephases.publishing: - unfi = pushop.repo.unfiltered() - droots = pushop.remotephases.draftroots - revset = b'%ln and (not public() or %ln::)' - future = list(unfi.set(revset, pushop.futureheads, droots)) - pushop.outdatedphases = future - -def installpushflag(ui): - entry = extensions.wrapcommand(commands.table, b'push', wrappush) - if not any(opt for opt in entry[1] if opt[1] == b'publish'): - # hg <= 4.8 (9b8d1ad851f8) - entry[1].append((b'', b'publish', False, - _(b'push the changeset as public'))) - extensions.wrapfunction(exchange.pushoperation, '__init__', - extendpushoperation) - extensions.wrapfunction(exchange, '_pushdiscoveryphase', wrapphasediscovery) - exchange.pushdiscoverymapping[b'phase'] = exchange._pushdiscoveryphase - def replacecheckpublish(orig, pushop): listkeys = exchange.listkeys repo = pushop.repo
--- a/hgext3rd/topic/revset.py Sun Apr 14 00:26:33 2024 -0300 +++ b/hgext3rd/topic/revset.py Sun Apr 14 00:41:49 2024 -0300 @@ -4,7 +4,6 @@ error, registrar, revset, - util, ) from mercurial.utils import stringutil @@ -147,80 +146,66 @@ branch = repo[None].branch() return revset.baseset(stack.stack(repo, branch=branch, topic=topic)[1:]) & subset -# x#y[z] revset operator support (no support for older version) -# hg <= 4.8 (e54bfde922f2) -if util.safehasattr(revset, 'subscriptrelations'): - def stacksubrel(repo, subset, x, rel, z, order): - """This is a revset-flavored implementation of stack aliases. +def stacksubrel(repo, subset, x, rel, z, order): + """This is a revset-flavored implementation of stack aliases. - The syntax is: rev#stack[n] or rev#s[n]. Plenty of logic is borrowed - from topic._namemap, but unlike that function, which prefers to abort - (e.g. when stack index is too high), this returns empty set to be more - revset-friendly. - """ - # hg 4.9 provides a number or None, hg 5.0 provides a tuple of tokens - if isinstance(z, tuple): - a, b = revset.getintrange( - z, - b'relation subscript must be an integer or a range', - b'relation subscript bounds must be integers', - None, None) - else: - # hg <= 4.9 (431cf2c8c839+13f7a6a4f0db) - a = b = z + The syntax is: rev#stack[n] or rev#s[n]. Plenty of logic is borrowed + from topic._namemap, but unlike that function, which prefers to abort + (e.g. when stack index is too high), this returns empty set to be more + revset-friendly. + """ + a, b = revset.getintrange( + z, + b'relation subscript must be an integer or a range', + b'relation subscript bounds must be integers', + None, None) - s = revset.getset(repo, revset.fullreposet(repo), x) - if not s: - return revset.baseset() + s = revset.getset(repo, revset.fullreposet(repo), x) + if not s: + return revset.baseset() - def getrange(st, a, b): - start = 1 if a is None else a - end = len(st.revs) if b is None else b + 1 - return range(start, end) + def getrange(st, a, b): + start = 1 if a is None else a + end = len(st.revs) if b is None else b + 1 + return range(start, end) - revs = [] - for r in s: - topic = repo[r].topic() - if topic: - st = stack.stack(repo, topic=topic) - else: - st = stack.stack(repo, branch=repo[r].branch()) - for n in getrange(st, a, b): - if abs(n) >= len(st.revs): - # also means stack base is not accessible with n < 0, which - # is by design - continue - if n == 0 and b != 0 and a != 0: - # quirk: we don't want stack base unless specifically asked - # for it (at least one of the indices is 0) - continue - rev = st.revs[n] - if rev == -1 and n == 0: - continue - if rev not in revs: - revs.append(rev) + revs = [] + for r in s: + topic = repo[r].topic() + if topic: + st = stack.stack(repo, topic=topic) + else: + st = stack.stack(repo, branch=repo[r].branch()) + for n in getrange(st, a, b): + if abs(n) >= len(st.revs): + # also means stack base is not accessible with n < 0, which + # is by design + continue + if n == 0 and b != 0 and a != 0: + # quirk: we don't want stack base unless specifically asked + # for it (at least one of the indices is 0) + continue + rev = st.revs[n] + if rev == -1 and n == 0: + continue + if rev not in revs: + revs.append(rev) - return subset & revset.baseset(revs) - - revset.subscriptrelations[b'stack'] = stacksubrel - revset.subscriptrelations[b's'] = stacksubrel + return subset & revset.baseset(revs) - def topicsubrel(repo, subset, x, *args): - subset &= topicset(repo, subset, x) - # not using revset.generationssubrel directly because it was renamed - # hg <= 5.3 (8859de3e83dc) - generationssubrel = revset.subscriptrelations[b'generations'] - return generationssubrel(repo, subset, x, *args) +revset.subscriptrelations[b'stack'] = stacksubrel +revset.subscriptrelations[b's'] = stacksubrel + +def topicsubrel(repo, subset, x, *args): + subset &= topicset(repo, subset, x) + return revset.generationssubrel(repo, subset, x, *args) - revset.subscriptrelations[b'topic'] = topicsubrel - revset.subscriptrelations[b't'] = topicsubrel +revset.subscriptrelations[b'topic'] = topicsubrel +revset.subscriptrelations[b't'] = topicsubrel - # x#y revset operator support (no support for older version) - # hg <= 5.3 (eca82eb9d777) - if util.safehasattr(revset, 'relations'): - def stackrel(repo, subset, x, rel, order): - z = (b'rangeall', None) - return stacksubrel(repo, subset, x, rel, z, order) +def stackrel(repo, subset, x, rel, order): + z = (b'rangeall', None) + return stacksubrel(repo, subset, x, rel, z, order) - revset.relations[b'stack'] = stackrel - revset.relations[b's'] = stackrel +revset.relations[b'stack'] = stackrel +revset.relations[b's'] = stackrel
--- a/hgext3rd/topic/server.py Sun Apr 14 00:26:33 2024 -0300 +++ b/hgext3rd/topic/server.py Sun Apr 14 00:41:49 2024 -0300 @@ -5,7 +5,6 @@ from mercurial.i18n import _ from mercurial import ( - branchmap, error, extensions, localrepo, @@ -16,6 +15,8 @@ wireprotov1server, ) +from mercurial.utils import repoviewutil + from . import ( common, compat, @@ -76,7 +77,7 @@ else: # only changesets in the selected topic namespaces are visible h = [] - entries = compat.bcentries(repo.branchmaptns()) + entries = repo.branchmaptns()._entries for branch, nodes in compat.branchmapitems(entries): namedbranch, tns, topic = common.parsefqbn(branch) if tns == b'none' or tns in namespaces: @@ -98,6 +99,34 @@ caps.append(b'ext-topics-tns-heads') return caps +class topiccommandexecutor(localrepo.localcommandexecutor): + def callcommand(self, command, args): + if command == b'heads': + if self._peer.capable(b'ext-topics-tns-heads'): + command = b'tns_heads' + if self._peer.ui.configbool(b'_internal', b'tns-explicit-target', False): + args[b'namespaces'] = [b'*'] + else: + args[b'namespaces'] = self._peer.ui.configlist(b'experimental', b'tns-default-pull-namespaces', [b'*']) + s = super(topiccommandexecutor, self) + return s.callcommand(command, args) + +class topicpeerexecutor(wireprotov1peer.peerexecutor): + def callcommand(self, command, args): + if command == b'heads': + if self._peer.capable(b'ext-topics-tns-heads'): + command = b'tns_heads' + if self._peer.ui.configbool(b'_internal', b'tns-explicit-target', False): + args[b'namespaces'] = [b'*'] + else: + args[b'namespaces'] = self._peer.ui.configlist(b'experimental', b'tns-default-pull-namespaces', [b'*']) + elif self._peer.capable(b'_exttopics_heads'): + command = b'_exttopics_heads' + if getattr(self._peer, '_exttopics_heads', None) is None: + self._peer._exttopics_heads = self._peer.heads + s = super(topicpeerexecutor, self) + return s.callcommand(command, args) + def setupserver(ui): extensions.wrapfunction(wireprotov1server, 'heads', wrapheads) wireprotov1server.commands.pop(b'heads') @@ -110,7 +139,7 @@ # hg <= 5.9 (c424ff4807e6) @wireprotov1peer.batchable def wp_tns_heads(self, namespaces): - f = wireprotov1peer.future() + f = wireprotov1peer.future() # pytype: disable=module-attr yield {b'namespaces': wireprototypes.encodelist(namespaces)}, f d = f.value try: @@ -130,41 +159,12 @@ wireprotov1peer.wirepeer.tns_heads = wp_tns_heads - class topicpeerexecutor(wireprotov1peer.peerexecutor): + def wp_commandexecutor(self): + return topicpeerexecutor(self) - def callcommand(self, command, args): - if command == b'heads': - if self._peer.capable(b'ext-topics-tns-heads'): - command = b'tns_heads' - if self._peer.ui.configbool(b'_internal', b'tns-explicit-target', False): - args[b'namespaces'] = [b'*'] - else: - args[b'namespaces'] = self._peer.ui.configlist(b'experimental', b'tns-default-pull-namespaces', [b'*']) - elif self._peer.capable(b'_exttopics_heads'): - command = b'_exttopics_heads' - if getattr(self._peer, '_exttopics_heads', None) is None: - self._peer._exttopics_heads = self._peer.heads - s = super(topicpeerexecutor, self) - return s.callcommand(command, args) - - wireprotov1peer.peerexecutor = topicpeerexecutor - - class topiccommandexecutor(localrepo.localcommandexecutor): - def callcommand(self, command, args): - if command == b'heads': - if self._peer.capable(b'ext-topics-tns-heads'): - command = b'tns_heads' - if self._peer.ui.configbool(b'_internal', b'tns-explicit-target', False): - args[b'namespaces'] = [b'*'] - else: - args[b'namespaces'] = self._peer.ui.configlist(b'experimental', b'tns-default-pull-namespaces', [b'*']) - s = super(topiccommandexecutor, self) - return s.callcommand(command, args) - - localrepo.localcommandexecutor = topiccommandexecutor + wireprotov1peer.wirepeer.commandexecutor = wp_commandexecutor if FILTERNAME not in repoview.filtertable: repoview.filtertable[FILTERNAME] = computeunservedtopic - # hg <= 4.9 (caebe5e7f4bd) - branchmap.subsettable[FILTERNAME] = b'immutable' - branchmap.subsettable[b'served'] = FILTERNAME + repoviewutil.subsettable[FILTERNAME] = b'immutable' + repoviewutil.subsettable[b'served'] = FILTERNAME
--- a/hgext3rd/topic/topicmap.py Sun Apr 14 00:26:33 2024 -0300 +++ b/hgext3rd/topic/topicmap.py Sun Apr 14 00:41:49 2024 -0300 @@ -1,4 +1,4 @@ -import contextlib +import functools import hashlib from mercurial.i18n import _ @@ -11,6 +11,8 @@ util, ) +from mercurial.utils import repoviewutil + from . import ( common, compat, @@ -44,13 +46,9 @@ def _setuptopicfilter(ui): """extend the filter related mapping with topic related one""" - funcmap = repoview.filtertable - # hg <= 4.9 (caebe5e7f4bd) - partialmap = branchmap.subsettable - # filter level not affected by topic that we should not override - for plainname in list(funcmap): + for plainname in list(repoview.filtertable): newfilter = topicfilter(plainname) if newfilter == plainname: continue @@ -58,13 +56,13 @@ def revsfunc(repo, name=plainname): return repoview.filterrevs(repo, name) - base = topicfilter(partialmap[plainname]) + base = topicfilter(repoviewutil.subsettable[plainname]) - if newfilter not in funcmap: - funcmap[newfilter] = revsfunc - partialmap[newfilter] = base - funcmap[b'unfiltered-topic'] = lambda repo: frozenset() - partialmap[b'unfiltered-topic'] = b'visible-topic' + if newfilter not in repoview.filtertable: + repoview.filtertable[newfilter] = revsfunc + repoviewutil.subsettable[newfilter] = base + repoview.filtertable[b'unfiltered-topic'] = lambda repo: frozenset() + repoviewutil.subsettable[b'unfiltered-topic'] = b'visible-topic' def _phaseshash(repo, maxrev): """uniq ID for a phase matching a set of rev""" @@ -126,22 +124,14 @@ pass branchmap.branchcache = topiccache - try: - # Mercurial 5.0 - class remotetopiccache(_topiccache, branchmap.remotebranchcache): - pass - branchmap.remotebranchcache = remotetopiccache + class remotetopiccache(_topiccache, branchmap.remotebranchcache): + pass + branchmap.remotebranchcache = remotetopiccache - def _wrapupdatebmcachemethod(orig, self, repo): - # pass in the bound method as the original - return _wrapupdatebmcache(orig.__get__(self), repo) - extensions.wrapfunction(branchmap.BranchMapCache, 'updatecache', _wrapupdatebmcachemethod) - except AttributeError: - # hg <= 4.9 (3461814417f3) - extensions.wrapfunction(branchmap, 'updatecache', _wrapupdatebmcache) - # branchcache in hg <= 4.9 doesn't have load method, instead there's a - # module-level function to read on-disk cache and return a branchcache - extensions.wrapfunction(branchmap, 'read', _wrapbmread) + def _wrapupdatebmcachemethod(orig, self, repo): + # pass in the bound method as the original + return _wrapupdatebmcache(functools.partial(orig, self), repo) + extensions.wrapfunction(branchmap.BranchMapCache, 'updatecache', _wrapupdatebmcachemethod) def _wrapupdatebmcache(orig, repo): previous = getattr(repo, '_autobranchmaptopic', False) @@ -151,31 +141,16 @@ finally: repo._autobranchmaptopic = previous -# needed to prevent reference used for 'super()' call using in branchmap.py to -# no go into cycle. (yes, URG) -_oldbranchmap = branchmap.branchcache - -@contextlib.contextmanager -def oldbranchmap(): - previous = branchmap.branchcache - try: - branchmap.branchcache = _oldbranchmap - yield - finally: - branchmap.branchcache = previous - class _topiccache(object): # combine me with branchmap.branchcache def __init__(self, *args, **kwargs): - # super() call may fail otherwise - with oldbranchmap(): - super(_topiccache, self).__init__(*args, **kwargs) + super(_topiccache, self).__init__(*args, **kwargs) self.phaseshash = None def copy(self): """return an deep copy of the branchcache object""" - entries = compat.bcentries(self) - args = (entries, self.tipnode, self.tiprev, self.filteredhash, + assert isinstance(self, (branchmap.branchcache, branchmap.remotebranchcache)) # help pytype + args = (self._entries, self.tipnode, self.tiprev, self.filteredhash, self._closednodes) if util.safehasattr(self, '_repo'): # hg <= 5.7 (6266d19556ad) @@ -188,19 +163,20 @@ """call branchmap.load(), and then transform branch names to be in the new "//" format """ + assert isinstance(self, branchmap.branchcache) # help pytype super(_topiccache, self).load(repo, lineiter) - entries = compat.bcentries(self) - for branch in tuple(entries): + for branch in tuple(self._entries): formatted = common.formatfqbn(branch=branch) if branch != formatted: - entries[formatted] = entries.pop(branch) + self._entries[formatted] = self._entries.pop(branch) def validfor(self, repo): """Is the cache content valid regarding a repo - False when cached tipnode is unknown or if we detect a strip. - True when cache is up to date or a subset of current repo.""" + assert isinstance(self, (branchmap.branchcache, branchmap.remotebranchcache)) # help pytype valid = super(_topiccache, self).validfor(repo) if not valid: return False @@ -223,12 +199,11 @@ # the time will be fast enough if not istopicfilter(repo.filtername): cache = self.copy() - entries = compat.bcentries(cache) - for formatted in tuple(entries): + for formatted in tuple(cache._entries): branch, tns, topic = common.parsefqbn(formatted) if branch != formatted: - entries[branch] = entries.pop(formatted) + cache._entries[branch] = cache._entries.pop(formatted) super(_topiccache, cache).write(repo) @@ -237,6 +212,7 @@ missing heads, and a generator of nodes that are strictly a superset of heads missing, this function updates self to be correct. """ + assert isinstance(self, (branchmap.branchcache, branchmap.remotebranchcache)) # help pytype if not istopicfilter(repo.filtername): return super(_topiccache, self).update(repo, revgen) @@ -250,21 +226,3 @@ super(_topiccache, self).update(repo, revgen) self.phaseshash = _phaseshash(repo, self.tiprev) - -def _wrapbmread(orig, repo): - """call branchmap.read(), and then transform branch names to be in the - new "//" format - """ - partial = orig(repo) - if partial is None: - # because of IOError or OSError - return partial - - entries = compat.bcentries(partial) - - for branch in tuple(entries): - formatted = common.formatfqbn(branch=branch) - if branch != formatted: - entries[formatted] = entries.pop(branch) - - return partial
--- a/tests/test-check-sdist.t Sun Apr 14 00:26:33 2024 -0300 +++ b/tests/test-check-sdist.t Sun Apr 14 00:41:49 2024 -0300 @@ -34,31 +34,29 @@ hg-evolve-*.tar.gz (glob) $ tar -tzf hg-evolve-*.tar.gz | sed 's|^hg-evolve-[^/]*/||' | sort > files - $ egrep '^tests/test-.*\.(t|py)$' files > test-files - $ egrep -v '^tests/test-.*\.(t|py)$' files > other-files + $ grep -E '^tests/test-.*\.(t|py)$' files > test-files + $ grep -E -v '^tests/test-.*\.(t|py)$' files > other-files $ wc -l other-files - 148 other-files + 146 other-files $ wc -l test-files ??? test-files (glob) - $ fgrep debian files + $ grep -F debian files tests/test-check-debian.t - $ fgrep __init__.py files + $ grep -F __init__.py files hgext3rd/__init__.py hgext3rd/evolve/__init__.py hgext3rd/evolve/thirdparty/__init__.py hgext3rd/topic/__init__.py - $ fgrep common.sh files + $ grep -F common.sh files docs/tutorial/testlib/common.sh tests/testlib/common.sh - $ fgrep README files + $ grep -F README files README.rst docs/README docs/tutorial/README.rst hgext3rd/topic/README - $ egrep '(gitlab|contrib|hack|format-source)' files + $ grep -E '(gitlab|contrib|hack|format-source)' files [1] - $ fgrep legacy.py files + $ grep -F netlify files [1] - $ fgrep netlify files - [1]
--- a/tests/test-check-tag.t Sun Apr 14 00:26:33 2024 -0300 +++ b/tests/test-check-tag.t Sun Apr 14 00:41:49 2024 -0300 @@ -26,14 +26,14 @@ > if hg grep --rev $node '^__version__ = .*\.dev' hgext3rd/evolve/ hgext3rd/topic/; then > echo "Versions should not end with .dev at tagged revision $node" > fi - > entry=`hg cat --rev $node CHANGELOG | fgrep "$tags"` + > entry=`hg cat --rev $node CHANGELOG | grep -F "$tags"` > if [ -z "$entry" ]; then > echo "Revision $node has no CHANGELOG entry for $tags" > fi - > if echo "$entry" | egrep -vq ' -- [0-9]{4}-[0-9]{2}-[0-9]{2}'; then + > if echo "$entry" | grep -E -vq ' -- [0-9]{4}-[0-9]{2}-[0-9]{2}'; then > echo "CHANGELOG entry for $tags should have a date in YYYY-MM-DD format: $entry" > fi - > entry=`hg cat --rev $node debian/changelog | fgrep "$tags"` + > entry=`hg cat --rev $node debian/changelog | grep -F "$tags"` > if [ -z "$entry" ]; then > echo "Revision $node has no debian/changelog entry for $tags" > fi
--- a/tests/test-evolve-public-content-divergent-discard.t Sun Apr 14 00:26:33 2024 -0300 +++ b/tests/test-evolve-public-content-divergent-discard.t Sun Apr 14 00:41:49 2024 -0300 @@ -681,12 +681,12 @@ marked working directory as topic: topic-2 (make other divergent a closed branch head) - $ hg ci --amend -m "closing branch double//slash" --close-branch + $ hg ci --amend -m "closing default branch" --close-branch active topic 'topic-2' grew its first changeset (see 'hg help topics' for more information) $ hg glog - @ 6:fe5d55b4e488 closing branch double//slash + @ 6:638eff2d31b7 closing default branch | draft content-divergent | | o 5:bde8ac1c636a added d @@ -706,11 +706,11 @@ $ hg evolve --content-divergent merge:[5] added d - with: [6] closing branch double//slash + with: [6] closing default branch base: [3] added d 0 files updated, 0 files merged, 0 files removed, 0 files unresolved - other divergent changeset fe5d55b4e488 is a closed branch head and differs from local bde8ac1c636a by "branch, description" only, discarding fe5d55b4e488 - content divergence resolution between bde8ac1c636a (public) and fe5d55b4e488 has same content as bde8ac1c636a, discarding fe5d55b4e488 + other divergent changeset 638eff2d31b7 is a closed branch head and differs from local bde8ac1c636a by "branch, description" only, discarding 638eff2d31b7 + content divergence resolution between bde8ac1c636a (public) and 638eff2d31b7 has same content as bde8ac1c636a, discarding 638eff2d31b7 active topic 'topic-2' is now empty working directory is now at bde8ac1c636a
--- a/tests/test-extension-isolation.t Sun Apr 14 00:26:33 2024 -0300 +++ b/tests/test-extension-isolation.t Sun Apr 14 00:41:49 2024 -0300 @@ -117,34 +117,34 @@ Check evolve isolation ----------------------- - $ hg debugcapabilities http://$LOCALIP:$HGPORT/repo-no-ext | egrep 'topics|evoext' + $ hg debugcapabilities http://$LOCALIP:$HGPORT/repo-no-ext | grep -E 'topics|evoext' [1] - $ hg debugcapabilities http://$LOCALIP:$HGPORT/repo-evo | egrep 'topics|evoext' + $ hg debugcapabilities http://$LOCALIP:$HGPORT/repo-evo | grep -E 'topics|evoext' _evoext_getbundle_obscommon _evoext_obshashrange_v1 - $ hg debugcapabilities http://$LOCALIP:$HGPORT/repo-no-ext | egrep 'topics|evoext' + $ hg debugcapabilities http://$LOCALIP:$HGPORT/repo-no-ext | grep -E 'topics|evoext' [1] Check topic isolation --------------------- - $ hg debugcapabilities http://$LOCALIP:$HGPORT/repo-no-ext | egrep 'topics|evoext' + $ hg debugcapabilities http://$LOCALIP:$HGPORT/repo-no-ext | grep -E 'topics|evoext' [1] - $ hg debugcapabilities http://$LOCALIP:$HGPORT/repo-topic | egrep 'topics|evoext' + $ hg debugcapabilities http://$LOCALIP:$HGPORT/repo-topic | grep -E 'topics|evoext' _exttopics_heads ext-topics-publish=all ext-topics-tns-heads topics topics-namespaces - $ hg debugcapabilities http://$LOCALIP:$HGPORT/repo-no-ext | egrep 'topics|evoext' + $ hg debugcapabilities http://$LOCALIP:$HGPORT/repo-no-ext | grep -E 'topics|evoext' [1] Check coupled isolation ----------------------- - $ hg debugcapabilities http://$LOCALIP:$HGPORT/repo-no-ext | egrep 'topics|evoext' + $ hg debugcapabilities http://$LOCALIP:$HGPORT/repo-no-ext | grep -E 'topics|evoext' [1] - $ hg debugcapabilities http://$LOCALIP:$HGPORT/repo-both | egrep 'topics|evoext' + $ hg debugcapabilities http://$LOCALIP:$HGPORT/repo-both | grep -E 'topics|evoext' _evoext_getbundle_obscommon _evoext_obshashrange_v1 _exttopics_heads @@ -152,19 +152,19 @@ ext-topics-tns-heads topics topics-namespaces - $ hg debugcapabilities http://$LOCALIP:$HGPORT/repo-evo | egrep 'topics|evoext' + $ hg debugcapabilities http://$LOCALIP:$HGPORT/repo-evo | grep -E 'topics|evoext' _evoext_getbundle_obscommon _evoext_obshashrange_v1 - $ hg debugcapabilities http://$LOCALIP:$HGPORT/repo-topic | egrep 'topics|evoext' + $ hg debugcapabilities http://$LOCALIP:$HGPORT/repo-topic | grep -E 'topics|evoext' _exttopics_heads ext-topics-publish=all ext-topics-tns-heads topics topics-namespaces - $ hg debugcapabilities http://$LOCALIP:$HGPORT/repo-evo | egrep 'topics|evoext' + $ hg debugcapabilities http://$LOCALIP:$HGPORT/repo-evo | grep -E 'topics|evoext' _evoext_getbundle_obscommon _evoext_obshashrange_v1 - $ hg debugcapabilities http://$LOCALIP:$HGPORT/repo-no-ext | egrep 'topics|evoext' + $ hg debugcapabilities http://$LOCALIP:$HGPORT/repo-no-ext | grep -E 'topics|evoext' [1] Final cleanup
--- a/tests/test-namespaces.t Sun Apr 14 00:26:33 2024 -0300 +++ b/tests/test-namespaces.t Sun Apr 14 00:41:49 2024 -0300 @@ -70,7 +70,7 @@ $ hg debugtopicnamespace --clear nonsense abort: cannot use --clear when setting a topic namespace - [255] + [10] $ hg branch stable marked working directory as branch stable
--- a/tests/test-oldconvert.t Sun Apr 14 00:26:33 2024 -0300 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,106 +0,0 @@ - $ cat >> $HGRCPATH <<EOF - > [extensions] - > EOF - $ mkcommit() { - > echo "$1" > "$1" - > hg add "$1" - > hg ci -m "add $1" - > } - -create commit - - $ hg init repo - $ cd repo - $ mkcommit a - $ mkcommit b - $ hg up -q 0 - $ mkcommit c - created new head - -forge old style relation files - - $ hg log -r 2 --template='{node} ' > .hg/obsolete-relations - $ hg log -r 1 --template='{node}' >> .hg/obsolete-relations - -enable the extensions - - $ echo "obsolete=$(echo $(dirname $TESTDIR))/hgext3rd/evolve/legacy.py" >> $HGRCPATH - - $ hg log -G - abort: old format of obsolete marker detected! - run `hg debugconvertobsolete` once. - [255] - $ hg debugconvertobsolete --traceback - 1 obsolete marker converted - $ hg log -G - @ changeset: 2:d67cd0334eee - | tag: tip - | parent: 0:1f0dee641bb7 - | user: test - | date: Thu Jan 01 00:00:00 1970 +0000 - | summary: add c - | - o changeset: 0:1f0dee641bb7 - user: test - date: Thu Jan 01 00:00:00 1970 +0000 - summary: add a - - $ hg debugobsolete - 7c3bad9141dcb46ff89abf5f61856facd56e476c d67cd0334eeecfded222fed9009f0db4beb57585 0 (*) {'user': 'test'} (glob) - $ hg debugconvertobsolete - nothing to do - 0 obsolete marker converted - -Convert json - - $ cat > .hg/store/obsoletemarkers << EOF - > [ - > { - > "reason": "import from older format.", - > "subjects": [ - > "3218406b50ed13480765e7c260669620f37fba6e" - > ], - > "user": "Pierre-Yves David <pierre-yves.david@ens-lyon.org>", - > "date": [ - > 1336503323.9768269, - > -7200 - > ], - > "object": "3e03d82708d4da97a92158558dd13386d8f09ad5", - > "id": "4743f676eaf3923cb98c921ee06b2e91052c365b" - > }, - > { - > "reason": "import from older format.", - > "user": "Pierre-Yves David <pierre-yves.david@logilab.fr>", - > "date": [ - > 1336557472.7875929, - > -7200 - > ], - > "object": "5c722672795c3a2cb94d0cc9a821c394c1475f87", - > "id": "1fd90a84b7225d2e3062b7e1b3100aa2e060fc72" - > }, - > { - > "reason": "import from older format.", - > "subjects": [ - > "0000000000000000000000000000000000000000" - > ], - > "user": "Pierre-Yves David <pierre-yves.david@logilab.fr>", - > "date": [ - > 1336557472.784307, - > -7200 - > ], - > "object": "2c3784e102bb34ccc93862af5bd6d609ee30c577", - > "id": "7d940c5ee1f886c8a6c0d805b43e522cb3ef7a15" - > } - > ] - > EOF - $ hg log -G - abort: old format of obsolete marker detected! - run `hg debugconvertobsolete` once. - [255] - $ hg debugconvertobsolete --traceback - 3 obsolete marker converted - $ hg debugobsolete - 7c3bad9141dcb46ff89abf5f61856facd56e476c d67cd0334eeecfded222fed9009f0db4beb57585 0 (*) {'user': 'test'} (glob) - 3e03d82708d4da97a92158558dd13386d8f09ad5 3218406b50ed13480765e7c260669620f37fba6e 0 (Tue May 08 20:55:23 2012 +0200) {'user': 'Pierre-Yves David <pierre-yves.david@ens-lyon.org>'} - 5c722672795c3a2cb94d0cc9a821c394c1475f87 0 (Wed May 09 11:57:52 2012 +0200) {'user': 'Pierre-Yves David <pierre-yves.david@logilab.fr>'} - 2c3784e102bb34ccc93862af5bd6d609ee30c577 0 (Wed May 09 11:57:52 2012 +0200) {'user': 'Pierre-Yves David <pierre-yves.david@logilab.fr>'}
--- a/tests/test-topic.t Sun Apr 14 00:26:33 2024 -0300 +++ b/tests/test-topic.t Sun Apr 14 00:41:49 2024 -0300 @@ -85,32 +85,32 @@ Publishing behavior =================== - Topic vanish when changeset move to the public phases. Moving to the public - phase usually happens on push, but it is possible to update that behavior. The + Topics vanish when changesets move to the public phase. Moving to the public + phase usually happens on push, but it is possible to modify this behavior. The server needs to have specific config for this. - * everything pushed become public (the default): + * everything pushed becomes public (the default): [phases] publish = yes - * nothing push turned public: + * nothing pushed turns public: [phases] publish = no - * topic branches are not published, changeset without topic are: + * topic branches are not published, changesets without topic are: [phases] publish = no [experimental] topic.publish-bare-branch = yes - In addition, the topic extension adds a "--publish" flag on 'hg push'. When - used, the pushed revisions are published if the push succeeds. It also applies - to common revisions selected by the push. + In addition, 'hg push' command has a "--publish" flag. When used, the pushed + revisions are published if the push succeeds. It also applies to common + revisions selected by the push. - One can prevent any publishing to happens in a repository using: + One can prevent any publishing from happening in a repository using: [experimental] topic.allow-publish = no