Mercurial > evolve
changeset 6635:6940272bc07d
branching: merge with stable
author | Anton Shestakov <av6@dwimlabs.net> |
---|---|
date | Wed, 03 Jan 2024 13:56:27 -0300 |
parents | 991cbf0f66f2 (diff) c8779efa4fb4 (current diff) |
children | 7f95546f584e |
files | |
diffstat | 23 files changed, 105 insertions(+), 384 deletions(-) [+] |
line wrap: on
line diff
--- a/.gitlab-ci.yml Sun Dec 31 16:22:15 2023 -0300 +++ b/.gitlab-ci.yml Wed Jan 03 13:56:27 2024 -0300 @@ -58,7 +58,7 @@ allow_failure: true doc: - image: registry.heptapod.net/mercurial/ci-images/py3-evolve-doc + image: registry.heptapod.net/mercurial/ci-images/py3-evolve-doc:v2.0 script: - make doc variables:
--- a/debian/control Sun Dec 31 16:22:15 2023 -0300 +++ b/debian/control Wed Jan 03 13:56:27 2024 -0300 @@ -7,7 +7,7 @@ Pierre-Yves David <pierre-yves.david@logilab.fr>, Standards-Version: 4.3.0 Build-Depends: - mercurial (>= 4.9), + mercurial (>= 5.6), python3, debhelper (>= 10), dh-python, @@ -26,7 +26,7 @@ ${python3:Depends}, ${misc:Depends}, ${sphinxdoc:Depends}, - mercurial (>= 4.9), + mercurial (>= 5.6), Built-Using: ${sphinxdoc:Built-Using} Description: evolve extension for Mercurial This package provides the experimental "evolve" extension for the Mercurial
--- a/docs/conf.py Sun Dec 31 16:22:15 2023 -0300 +++ b/docs/conf.py Wed Jan 03 13:56:27 2024 -0300 @@ -98,7 +98,7 @@ # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -html_favicon = 'logo-evolve.ico' +html_favicon = 'static/logo-evolve.ico' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files,
--- a/hgext3rd/evolve/__init__.py Sun Dec 31 16:22:15 2023 -0300 +++ b/hgext3rd/evolve/__init__.py Wed Jan 03 13:56:27 2024 -0300 @@ -30,7 +30,7 @@ backported to older version of Mercurial by this extension. Some older experimental protocols are also supported for a longer time in the extension to help people transitioning. (The extension is currently compatible down to -Mercurial version 4.9). +Mercurial version 5.6). New Config:: @@ -1181,6 +1181,7 @@ continuefunc=cmdrewrite.hgcontinuefixup) else: # hg <= 5.0 (5f2f6912c9e6) + # pytype: disable=module-attr estate = (b'evolvestate', False, False, _(b'evolve in progress'), _(b"use 'hg evolve --continue' or 'hg evolve --abort' to abort")) cmdutil.unfinishedstates.append(estate) @@ -1197,13 +1198,16 @@ cmdutil.afterresolvedstates.append(afterresolved) cmdutil.afterresolvedstates.append(pickresolved) cmdutil.afterresolvedstates.append(fixupresolved) + # pytype: enable=module-attr # hg <= 5.0 (12243f15d53e) if util.safehasattr(cmdutil, 'STATES'): + # pytype: disable=module-attr cmdutil.STATES = ( (b'evolve', cmdutil.fileexistspredicate(b'evolvestate'), _evolvemessage), (b'fixup', cmdutil.fileexistspredicate(b'fixup-state'), _fixupmessage), ) + cmdutil.STATES + # pytype: enable=module-attr @eh.wrapfunction(hg, 'clean') def clean(orig, repo, *args, **kwargs):
--- a/hgext3rd/evolve/cmdrewrite.py Sun Dec 31 16:22:15 2023 -0300 +++ b/hgext3rd/evolve/cmdrewrite.py Wed Jan 03 13:56:27 2024 -0300 @@ -530,7 +530,7 @@ except TypeError: # hg <= 4.9 (db72f9f6580e) chunks, opts = cmdutil.recordfilter(repo.ui, originalchunks, - operation=b'discard') + operation=b'discard') # pytype: disable=missing-parameter if not chunks: raise error.Abort(_(b"nothing selected to uncommit")) fp = stringio()
--- a/hgext3rd/evolve/dagutil.py Sun Dec 31 16:22:15 2023 -0300 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,290 +0,0 @@ -# dagutil.py - dag utilities for mercurial -# -# Copyright 2010 Benoit Boissinot <bboissin@gmail.com> -# and Peter Arrenbrecht <peter@arrenbrecht.ch> -# -# This software may be used and distributed according to the terms of the -# GNU General Public License version 2 or any later version. -# -# Imported from Mercurial code at cee9043c7dba - -from __future__ import absolute_import - -from mercurial.i18n import _ -from mercurial.node import nullrev - -from . import compat - -class basedag(object): - '''generic interface for DAGs - - terms: - "ix" (short for index) identifies a nodes internally, - "id" identifies one externally. - - All params are ixs unless explicitly suffixed otherwise. - Pluralized params are lists or sets. - ''' - - def __init__(self): - self._inverse = None - - def nodeset(self): - '''set of all node ixs''' - raise NotImplementedError - - def heads(self): - '''list of head ixs''' - raise NotImplementedError - - def parents(self, ix): - '''list of parents ixs of ix''' - raise NotImplementedError - - def inverse(self): - '''inverse DAG, where parents becomes children, etc.''' - raise NotImplementedError - - def ancestorset(self, starts, stops=None): - ''' - set of all ancestors of starts (incl), but stop walk at stops (excl) - ''' - raise NotImplementedError - - def descendantset(self, starts, stops=None): - ''' - set of all descendants of starts (incl), but stop walk at stops (excl) - ''' - return self.inverse().ancestorset(starts, stops) - - def headsetofconnecteds(self, ixs): - ''' - subset of connected list of ixs so that no node has a descendant in it - - By "connected list" we mean that if an ancestor and a descendant are in - the list, then so is at least one path connecting them. - ''' - raise NotImplementedError - - def externalize(self, ix): - '''return a node id''' - return self._externalize(ix) - - def externalizeall(self, ixs): - '''return a list of (or set if given a set) of node ids''' - ids = self._externalizeall(ixs) - if isinstance(ixs, set): - return set(ids) - return list(ids) - - def internalize(self, id): - '''return a node ix''' - return self._internalize(id) - - def internalizeall(self, ids, filterunknown=False): - '''return a list of (or set if given a set) of node ixs''' - ixs = self._internalizeall(ids, filterunknown) - if isinstance(ids, set): - return set(ixs) - return list(ixs) - -class genericdag(basedag): - '''generic implementations for DAGs''' - - def ancestorset(self, starts, stops=None): - if stops: - stops = set(stops) - else: - stops = set() - seen = set() - pending = list(starts) - while pending: - n = pending.pop() - if n not in seen and n not in stops: - seen.add(n) - pending.extend(self.parents(n)) - return seen - - def headsetofconnecteds(self, ixs): - hds = set(ixs) - if not hds: - return hds - for n in ixs: - for p in self.parents(n): - hds.discard(p) - assert hds - return hds - -class revlogbaseddag(basedag): - '''generic dag interface to a revlog''' - - def __init__(self, revlog, nodeset): - basedag.__init__(self) - self._revlog = revlog - self._heads = None - self._nodeset = nodeset - - def nodeset(self): - return self._nodeset - - def heads(self): - if self._heads is None: - self._heads = self._getheads() - return self._heads - - def _externalize(self, ix): - return self._revlog.index[ix][7] - - def _externalizeall(self, ixs): - idx = self._revlog.index - return [idx[i][7] for i in ixs] - - def _internalize(self, id): - ix = self._revlog.rev(id) - if ix == nullrev: - raise LookupError(id, self._revlog.indexfile, _(b'nullid')) - return ix - - def _internalizeall(self, ids, filterunknown): - rl = self._revlog - getrev = compat.getgetrev(rl) - if filterunknown: - return [r for r in map(getrev, ids) - if (r is not None - and r != nullrev - and r not in rl.filteredrevs)] - return [self._internalize(i) for i in ids] - -class revlogdag(revlogbaseddag): - '''dag interface to a revlog''' - - def __init__(self, revlog, localsubset=None): - revlogbaseddag.__init__(self, revlog, set(revlog)) - self._heads = localsubset - - def _getheads(self): - return [r for r in self._revlog.headrevs() if r != nullrev] - - def parents(self, ix): - rlog = self._revlog - idx = rlog.index - revdata = idx[ix] - prev = revdata[5] - if prev != nullrev: - prev2 = revdata[6] - if prev2 == nullrev: - return [prev] - return [prev, prev2] - prev2 = revdata[6] - if prev2 != nullrev: - return [prev2] - return [] - - def inverse(self): - if self._inverse is None: - self._inverse = inverserevlogdag(self) - return self._inverse - - def ancestorset(self, starts, stops=None): - rlog = self._revlog - idx = rlog.index - if stops: - stops = set(stops) - else: - stops = set() - seen = set() - pending = list(starts) - while pending: - rev = pending.pop() - if rev not in seen and rev not in stops: - seen.add(rev) - revdata = idx[rev] - for i in [5, 6]: - prev = revdata[i] - if prev != nullrev: - pending.append(prev) - return seen - - def headsetofconnecteds(self, ixs): - if not ixs: - return set() - rlog = self._revlog - idx = rlog.index - headrevs = set(ixs) - for rev in ixs: - revdata = idx[rev] - for i in [5, 6]: - prev = revdata[i] - if prev != nullrev: - headrevs.discard(prev) - assert headrevs - return headrevs - - def linearize(self, ixs): - '''linearize and topologically sort a list of revisions - - The linearization process tries to create long runs of revs where - a child rev comes immediately after its first parent. This is done by - visiting the heads of the given revs in inverse topological order, - and for each visited rev, visiting its second parent, then its first - parent, then adding the rev itself to the output list. - ''' - sorted = [] - visit = list(self.headsetofconnecteds(ixs)) - visit.sort(reverse=True) - finished = set() - - while visit: - cur = visit.pop() - if cur < 0: - cur = -cur - 1 - if cur not in finished: - sorted.append(cur) - finished.add(cur) - else: - visit.append(-cur - 1) - visit += [p for p in self.parents(cur) - if p in ixs and p not in finished] - assert len(sorted) == len(ixs) - return sorted - -class inverserevlogdag(revlogbaseddag, genericdag): - '''inverse of an existing revlog dag; see revlogdag.inverse()''' - - def __init__(self, orig): - revlogbaseddag.__init__(self, orig._revlog, orig._nodeset) - self._orig = orig - self._children = {} - self._roots = [] - self._walkfrom = len(self._revlog) - 1 - - def _walkto(self, walkto): - rev = self._walkfrom - cs = self._children - roots = self._roots - idx = self._revlog.index - while rev >= walkto: - data = idx[rev] - isroot = True - for prev in [data[5], data[6]]: # parent revs - if prev != nullrev: - cs.setdefault(prev, []).append(rev) - isroot = False - if isroot: - roots.append(rev) - rev -= 1 - self._walkfrom = rev - - def _getheads(self): - self._walkto(nullrev) - return self._roots - - def parents(self, ix): - if ix is None: - return [] - if ix <= self._walkfrom: - self._walkto(ix) - return self._children.get(ix, []) - - def inverse(self): - return self._orig
--- a/hgext3rd/evolve/evolvecmd.py Sun Dec 31 16:22:15 2023 -0300 +++ b/hgext3rd/evolve/evolvecmd.py Wed Jan 03 13:56:27 2024 -0300 @@ -821,8 +821,8 @@ else: # hg <= 6.0 (12ac4401ff7d) kwargs['base_marker'] = b'|||||||' - desc = b''.join(merger.merge_lines(**kwargs)) - conflicts = merger.conflicts + desc = b''.join(merger.merge_lines(**kwargs)) # pytype: disable=attribute-error + conflicts = merger.conflicts # pytype: disable=attribute-error if conflicts:
--- a/hgext3rd/evolve/metadata.py Sun Dec 31 16:22:15 2023 -0300 +++ b/hgext3rd/evolve/metadata.py Wed Jan 03 13:56:27 2024 -0300 @@ -5,7 +5,7 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -__version__ = b'11.1.1.dev0' -testedwith = b'4.9 5.0 5.1 5.2 5.3 5.4 5.5 5.6 5.7 5.8 5.9 6.0 6.1 6.2 6.3 6.4 6.5' -minimumhgversion = b'4.9' +__version__ = b'11.2.0.dev0' +testedwith = b'5.6 5.7 5.8 5.9 6.0 6.1 6.2 6.3 6.4 6.5' +minimumhgversion = b'5.6' buglink = b'https://bz.mercurial-scm.org/'
--- a/hgext3rd/evolve/obsexchange.py Sun Dec 31 16:22:15 2023 -0300 +++ b/hgext3rd/evolve/obsexchange.py Wed Jan 03 13:56:27 2024 -0300 @@ -15,7 +15,6 @@ node, obsolete, pushkey, - util, wireprototypes, wireprotov1server ) @@ -112,10 +111,7 @@ subset = [c.node() for c in repo.unfiltered().set(b'only(%ln, %ln)', heads, common)] subset += kwargs['evo_missing_nodes'] markers = repo.obsstore.relevantmarkers(subset) - if util.safehasattr(bundle2, 'buildobsmarkerspart'): - bundle2.buildobsmarkerspart(bundler, markers) - else: - exchange.buildobsmarkerspart(bundler, markers) + bundle2.buildobsmarkerspart(bundler, markers) def _obscommon_capabilities(orig, repo, proto): """wrapper to advertise new capability"""
--- a/hgext3rd/evolve/obshistory.py Sun Dec 31 16:22:15 2023 -0300 +++ b/hgext3rd/evolve/obshistory.py Wed Jan 03 13:56:27 2024 -0300 @@ -265,6 +265,7 @@ super(obsmarker_printer, self).__init__(ui, repo, *args, **kwargs) diffopts = kwargs.get('diffopts', {}) + assert isinstance(diffopts, dict) # help pytype self._includediff = diffopts and diffopts.get(b'patch') self.template = diffopts and diffopts.get(b'template') self.filter = diffopts and diffopts.get(b'filternonlocal')
--- a/hgext3rd/evolve/rewind.py Sun Dec 31 16:22:15 2023 -0300 +++ b/hgext3rd/evolve/rewind.py Wed Jan 03 13:56:27 2024 -0300 @@ -335,8 +335,8 @@ """ unfi = repo.unfiltered() targets = set() - revsto = opts.get('to') - revsfrom = opts.get('from') + revsto = opts['to'] + revsfrom = opts['from'] if not (revsto or revsfrom): revsfrom.append(b'.') if revsto:
--- a/hgext3rd/evolve/stablerange.py Sun Dec 31 16:22:15 2023 -0300 +++ b/hgext3rd/evolve/stablerange.py Wed Jan 03 13:56:27 2024 -0300 @@ -493,7 +493,7 @@ else: ui.status(b'%s - %s\n' % (rstr, subsstr)) -class abstractstablerange(object): +class abstractstablerange(object): # pytype: disable=ignored-metaclass """The official API for a stablerange""" __metaclass__ = abc.ABCMeta @@ -541,7 +541,8 @@ assert standard_start < rangedepth slicepoint = standard_start return slicepoint -class stablerangebasic(abstractstablerange): + +class stablerangebasic(abstractstablerange): # pytype: disable=ignored-metaclass """a very dummy implementation of stablerange the implementation is here to lay down the basic algorithm in the stable @@ -628,7 +629,7 @@ def _sortfunction(self, repo, headrev): return stablesort.stablesort_mergepoint_head_basic(repo, [headrev]) -class stablerangecached(abstractstablerange): +class stablerangecached(abstractstablerange): # pytype: disable=ignored-metaclass """an implementation of stablerange using caching""" __metaclass__ = abc.ABCMeta @@ -646,6 +647,9 @@ headrev, index = rangeid[0], rangeid[1] return self.depthrev(repo, headrev) - index + def _subranges(self, repo, rangeid): + raise NotImplementedError() + def subranges(self, repo, rangeid): assert 0 <= rangeid[1] <= rangeid[0], rangeid cached = self._getsub(rangeid)
--- a/hgext3rd/evolve/stablerangecache.py Sun Dec 31 16:22:15 2023 -0300 +++ b/hgext3rd/evolve/stablerangecache.py Wed Jan 03 13:56:27 2024 -0300 @@ -168,12 +168,14 @@ body = r' OR '.join(_querysuperrangesbody % r for r in ranges) return _querysuperrangesmain % body -class stablerangesqlbase(stablerange.stablerangecached): +class stablerangesqlbase(stablerange.stablerangecached): # pytype: disable=ignored-metaclass """class that can handle all the bits needed to store range into sql """ __metaclass__ = abc.ABCMeta + _tiprev = None + _tipnode = None _schemaversion = None _cachefile = None @@ -368,8 +370,6 @@ class stablerangesql(stablerangesqlbase, stablerangeondiskbase): """base clase able to preserve data to disk as sql""" - __metaclass__ = abc.ABCMeta - # self._cachekey = (tiprev, tipnode) @property
--- a/hgext3rd/evolve/state.py Sun Dec 31 16:22:15 2023 -0300 +++ b/hgext3rd/evolve/state.py Wed Jan 03 13:56:27 2024 -0300 @@ -26,7 +26,7 @@ from .thirdparty import cbor cbor.__doc__ # trigger ImportError immediately except ImportError: - import cbor + import cbor # pytype: disable=import-error from mercurial import ( error,
--- a/hgext3rd/evolve/thirdparty/cbor.py Sun Dec 31 16:22:15 2023 -0300 +++ b/hgext3rd/evolve/thirdparty/cbor.py Wed Jan 03 13:56:27 2024 -0300 @@ -79,23 +79,23 @@ CBOR_TAG_MIME = 36 # following text is MIME message, headers, separators and all CBOR_TAG_CBOR_FILEHEADER = 55799 # can open a file with 0xd9d9f7 -_CBOR_TAG_BIGNUM_BYTES = struct.pack(b'B', CBOR_TAG | CBOR_TAG_BIGNUM) +_CBOR_TAG_BIGNUM_BYTES = struct.pack('B', CBOR_TAG | CBOR_TAG_BIGNUM) def dumps_int(val): - b"return bytes representing int val in CBOR" + "return bytes representing int val in CBOR" if val >= 0: # CBOR_UINT is 0, so I'm lazy/efficient about not OR-ing it in. if val <= 23: - return struct.pack(b'B', val) + return struct.pack('B', val) if val <= 0x0ff: - return struct.pack(b'BB', CBOR_UINT8_FOLLOWS, val) + return struct.pack('BB', CBOR_UINT8_FOLLOWS, val) if val <= 0x0ffff: - return struct.pack(b'!BH', CBOR_UINT16_FOLLOWS, val) + return struct.pack('!BH', CBOR_UINT16_FOLLOWS, val) if val <= 0x0ffffffff: - return struct.pack(b'!BI', CBOR_UINT32_FOLLOWS, val) + return struct.pack('!BI', CBOR_UINT32_FOLLOWS, val) if val <= 0x0ffffffffffffffff: - return struct.pack(b'!BQ', CBOR_UINT64_FOLLOWS, val) + return struct.pack('!BQ', CBOR_UINT64_FOLLOWS, val) outb = _dumps_bignum_to_bytearray(val) return _CBOR_TAG_BIGNUM_BYTES + _encode_type_num(CBOR_BYTES, len(outb)) + outb val = -1 - val @@ -119,28 +119,28 @@ def dumps_float(val): - return struct.pack(b"!Bd", CBOR_FLOAT64, val) + return struct.pack("!Bd", CBOR_FLOAT64, val) -_CBOR_TAG_NEGBIGNUM_BYTES = struct.pack(b'B', CBOR_TAG | CBOR_TAG_NEGBIGNUM) +_CBOR_TAG_NEGBIGNUM_BYTES = struct.pack('B', CBOR_TAG | CBOR_TAG_NEGBIGNUM) def _encode_type_num(cbor_type, val): """For some CBOR primary type [0..7] and an auxiliary unsigned number, return CBOR encoded bytes""" assert val >= 0 if val <= 23: - return struct.pack(b'B', cbor_type | val) + return struct.pack('B', cbor_type | val) if val <= 0x0ff: - return struct.pack(b'BB', cbor_type | CBOR_UINT8_FOLLOWS, val) + return struct.pack('BB', cbor_type | CBOR_UINT8_FOLLOWS, val) if val <= 0x0ffff: - return struct.pack(b'!BH', cbor_type | CBOR_UINT16_FOLLOWS, val) + return struct.pack('!BH', cbor_type | CBOR_UINT16_FOLLOWS, val) if val <= 0x0ffffffff: - return struct.pack(b'!BI', cbor_type | CBOR_UINT32_FOLLOWS, val) + return struct.pack('!BI', cbor_type | CBOR_UINT32_FOLLOWS, val) if (((cbor_type == CBOR_NEGINT) and (val <= 0x07fffffffffffffff)) or ((cbor_type != CBOR_NEGINT) and (val <= 0x0ffffffffffffffff))): - return struct.pack(b'!BQ', cbor_type | CBOR_UINT64_FOLLOWS, val) + return struct.pack('!BQ', cbor_type | CBOR_UINT64_FOLLOWS, val) if cbor_type != CBOR_NEGINT: - raise Exception(b"value too big for CBOR unsigned number: {0!r}".format(val)) + raise Exception("value too big for CBOR unsigned number: {0!r}".format(val)) outb = _dumps_bignum_to_bytearray(val) return _CBOR_TAG_NEGBIGNUM_BYTES + _encode_type_num(CBOR_BYTES, len(outb)) + outb @@ -201,8 +201,8 @@ def dumps_bool(b): if b: - return struct.pack(b'B', CBOR_TRUE) - return struct.pack(b'B', CBOR_FALSE) + return struct.pack('B', CBOR_TRUE) + return struct.pack('B', CBOR_FALSE) def dumps_tag(t, sort_keys=False): @@ -223,7 +223,7 @@ def dumps(ob, sort_keys=False): if ob is None: - return struct.pack(b'B', CBOR_NULL) + return struct.pack('B', CBOR_NULL) if isinstance(ob, bool): return dumps_bool(ob) if _is_stringish(ob): @@ -239,7 +239,7 @@ return dumps_int(ob) if isinstance(ob, Tag): return dumps_tag(ob, sort_keys=sort_keys) - raise Exception(b"don't know how to cbor serialize object of type %s", type(ob)) + raise Exception("don't know how to cbor serialize object of type %s", type(ob)) # same basic signature as json.dump, but with no options (yet) @@ -260,7 +260,7 @@ self.value = value def __repr__(self): - return b"Tag({0!r}, {1!r})".format(self.tag, self.value) + return "Tag({0!r}, {1!r})".format(self.tag, self.value) def __eq__(self, other): if not isinstance(other, Tag): @@ -273,7 +273,7 @@ Parse CBOR bytes and return Python objects. """ if data is None: - raise ValueError(b"got None for buffer to decode in loads") + raise ValueError("got None for buffer to decode in loads") fp = StringIO(data) return _loads(fp)[0] @@ -296,22 +296,22 @@ aux = tag_aux elif tag_aux == CBOR_UINT8_FOLLOWS: data = fp.read(1) - aux = struct.unpack_from(b"!B", data, 0)[0] + aux = struct.unpack_from("!B", data, 0)[0] bytes_read += 1 elif tag_aux == CBOR_UINT16_FOLLOWS: data = fp.read(2) - aux = struct.unpack_from(b"!H", data, 0)[0] + aux = struct.unpack_from("!H", data, 0)[0] bytes_read += 2 elif tag_aux == CBOR_UINT32_FOLLOWS: data = fp.read(4) - aux = struct.unpack_from(b"!I", data, 0)[0] + aux = struct.unpack_from("!I", data, 0)[0] bytes_read += 4 elif tag_aux == CBOR_UINT64_FOLLOWS: data = fp.read(8) - aux = struct.unpack_from(b"!Q", data, 0)[0] + aux = struct.unpack_from("!Q", data, 0)[0] bytes_read += 8 else: - assert tag_aux == CBOR_VAR_FOLLOWS, b"bogus tag {0:02x}".format(tb) + assert tag_aux == CBOR_VAR_FOLLOWS, "bogus tag {0:02x}".format(tb) aux = None return tag, tag_aux, aux, bytes_read @@ -385,9 +385,9 @@ return ob, bytes_read def _loads(fp, limit=None, depth=0, returntags=False): - b"return (object, bytes read)" + "return (object, bytes read)" if depth > _MAX_DEPTH: - raise Exception(b"hit CBOR loads recursion depth limit") + raise Exception("hit CBOR loads recursion depth limit") tb = _read_byte(fp) @@ -397,16 +397,16 @@ # Some special cases of CBOR_7 best handled by special struct.unpack logic here if tb == CBOR_FLOAT16: data = fp.read(2) - hibyte, lowbyte = struct.unpack_from(b"BB", data, 0) + hibyte, lowbyte = struct.unpack_from("BB", data, 0) exp = (hibyte >> 2) & 0x1F mant = ((hibyte & 0x03) << 8) | lowbyte if exp == 0: val = mant * (2.0 ** -24) elif exp == 31: if mant == 0: - val = float(b'Inf') + val = float('Inf') else: - val = float(b'NaN') + val = float('NaN') else: val = (mant + 1024.0) * (2 ** (exp - 25)) if hibyte & 0x80: @@ -414,11 +414,11 @@ return (val, 3) elif tb == CBOR_FLOAT32: data = fp.read(4) - pf = struct.unpack_from(b"!f", data, 0) + pf = struct.unpack_from("!f", data, 0) return (pf[0], 5) elif tb == CBOR_FLOAT64: data = fp.read(8) - pf = struct.unpack_from(b"!d", data, 0) + pf = struct.unpack_from("!d", data, 0) return (pf[0], 9) tag, tag_aux, aux, bytes_read = _tag_aux(fp, tb) @@ -461,7 +461,7 @@ return (None, bytes_read) if tb == CBOR_UNDEFINED: return (None, bytes_read) - raise ValueError(b"unknown cbor tag 7 byte: {:02x}".format(tb)) + raise ValueError("unknown cbor tag 7 byte: {:02x}".format(tb)) def loads_bytes(fp, aux, btag=CBOR_BYTES): @@ -481,7 +481,7 @@ total_bytes_read += 1 break tag, tag_aux, aux, bytes_read = _tag_aux(fp, tb) - assert tag == btag, b'variable length value contains unexpected component' + assert tag == btag, 'variable length value contains unexpected component' ob = fp.read(aux) chunklist.append(ob) total_bytes_read += bytes_read + aux
--- a/hgext3rd/pullbundle.py Sun Dec 31 16:22:15 2023 -0300 +++ b/hgext3rd/pullbundle.py Wed Jan 03 13:56:27 2024 -0300 @@ -383,7 +383,7 @@ if r'ancestorsof' in discovery.outgoing.__init__.__code__.co_varnames: return discovery.outgoing(repo, missingroots=nodes, ancestorsof=nodes) else: - return discovery.outgoing(repo, missingroots=nodes, missingheads=nodes) + return discovery.outgoing(repo, missingroots=nodes, missingheads=nodes) # pytype: disable=wrong-keyword-args # changegroup part construction
--- a/hgext3rd/topic/__init__.py Sun Dec 31 16:22:15 2023 -0300 +++ b/hgext3rd/topic/__init__.py Wed Jan 03 13:56:27 2024 -0300 @@ -236,10 +236,10 @@ b'log.topic': b'green_background', } -__version__ = b'1.1.1.dev0' +__version__ = b'1.2.0.dev0' -testedwith = b'4.9 5.0 5.1 5.2 5.3 5.4 5.5 5.6 5.7 5.8 5.9 6.0 6.1 6.2 6.3 6.4 6.5' -minimumhgversion = b'4.9' +testedwith = b'5.6 5.7 5.8 5.9 6.0 6.1 6.2 6.3 6.4 6.5' +minimumhgversion = b'5.6' buglink = b'https://bz.mercurial-scm.org/' configtable = {} @@ -748,7 +748,11 @@ def branchheads(self, branch=None, start=None, closed=False): if branch is None: - branch = self[None].branch() + # using dirstate.branch() instead of self[None].branch() + # because we wrap context.branch method to return branch + # already in FQBN format, and we can't give it to formatfqbn() + # again directly + branch = self.dirstate.branch() branch = common.formatfqbn(branch, self.currenttns, self.currenttopic) return super(topicrepo, self).branchheads(branch=branch, start=start, @@ -881,6 +885,7 @@ def _validate_affected_tns(tr2): repo = reporef() + assert repo is not None # help pytype find_affected_tns(repo, tr2) def validator(tr2): @@ -1100,7 +1105,7 @@ utopic = encoding.unifromlocal(topic) except error.Abort: # Maybe we should allow these topic names as well, as long as they - # don't break any other rules + # don't break any other rules. utopic = '' rmatch = re.match(r'[-_.\w]+', utopic, re.UNICODE) if not utopic or not rmatch or rmatch.group(0) != utopic: @@ -1273,7 +1278,7 @@ _applyconvertbmarktopic(ui, repo, targetrevs, revnum, bmark, tr) tr.close() finally: - tr.release() + lockmod.release(tr) finally: lockmod.release(lock, wlock) @@ -1829,7 +1834,7 @@ """set or show the current topic namespace""" if opts.get('clear'): if tns: - raise error.Abort(_(b"cannot use --clear when setting a topic namespace")) + raise compat.InputError(_(b"cannot use --clear when setting a topic namespace")) tns = b'none' elif not tns: ui.write(b'%s\n' % repo.currenttns) @@ -1837,9 +1842,9 @@ if tns: tns = tns.strip() if not tns: - raise error.Abort(_(b"topic namespace cannot consist entirely of whitespace")) + raise compat.InputError(_(b"topic namespace cannot consist entirely of whitespace")) if b'/' in tns: - raise error.Abort(_(b"topic namespace cannot contain '/' character")) + raise compat.InputError(_(b"topic namespace cannot contain '/' character")) scmutil.checknewlabel(repo, tns, b'topic namespace') helptxt = _(b"topic namespace names can only consist of alphanumeric, " @@ -1847,8 +1852,8 @@ try: utns = encoding.unifromlocal(tns) except error.Abort: - # Maybe we should allow these topic names as well, as long as they - # don't break any other rules + # Maybe we should allow these topic namespace names as well, as + # long as they don't break any other rules. utns = '' rmatch = re.match(r'[-_.\w]+', utns, re.UNICODE) if not utns or not rmatch or rmatch.group(0) != utns:
--- a/hgext3rd/topic/discovery.py Sun Dec 31 16:22:15 2023 -0300 +++ b/hgext3rd/topic/discovery.py Wed Jan 03 13:56:27 2024 -0300 @@ -325,11 +325,9 @@ caps.append(b'topics-namespaces') return caps -# **kwargs is for accommodating an optional changelog argument -# hg <= 4.8 (5e5c8f2a1eb5) -def wrapbranchinfo(orig, self, rev, **kwargs): +def wrapbranchinfo(orig, self, rev): # NOTE: orig can be either branchinfo() or _branchinfo()! - b, close = orig(self, rev, **kwargs) + b, close = orig(self, rev) if common.hastopicext(self._repo): if self._repo.ui.configbool(b'_internal', b'tns-disable-fqbn'): # the config option prevents this function from doing anything, @@ -344,16 +342,14 @@ b = ctx.fqbn() return b, close -# **kwargs is for accommodating an optional changelog argument -# hg <= 4.8 (5e5c8f2a1eb5) -def wrapslowbranchinfo(orig, self, rev, **kwargs): +def wrapslowbranchinfo(orig, self, rev): if self.branchinfo == self._branchinfo: # _branchinfo() gets called directly and needs to do the conversion - return wrapbranchinfo(orig, self, rev, **kwargs) + return wrapbranchinfo(orig, self, rev) else: # _branchinfo() gets called through branchinfo(), the latter will need # to do the conversion - return orig(self, rev, **kwargs) + return orig(self, rev) def wrapaddpartrevbranchcache(orig, repo, bundler, outgoing): """making sure we send rev-branch-cache that only has bare branches"""
--- a/hgext3rd/topic/server.py Sun Dec 31 16:22:15 2023 -0300 +++ b/hgext3rd/topic/server.py Wed Jan 03 13:56:27 2024 -0300 @@ -5,7 +5,6 @@ from mercurial.i18n import _ from mercurial import ( - branchmap, error, extensions, localrepo, @@ -16,6 +15,8 @@ wireprotov1server, ) +from mercurial.utils import repoviewutil + from . import ( common, compat, @@ -112,7 +113,7 @@ """ wirepeer that uses `future` class from before c424ff4807e6 """ @wireprotov1peer.batchable def tns_heads(self, namespaces): - f = wireprotov1peer.future() + f = wireprotov1peer.future() # pytype: disable=module-attr yield {b'namespaces': wireprototypes.encodelist(namespaces)}, f d = f.value try: @@ -169,6 +170,5 @@ if FILTERNAME not in repoview.filtertable: repoview.filtertable[FILTERNAME] = computeunservedtopic - # hg <= 4.9 (caebe5e7f4bd) - branchmap.subsettable[FILTERNAME] = b'immutable' - branchmap.subsettable[b'served'] = FILTERNAME + repoviewutil.subsettable[FILTERNAME] = b'immutable' + repoviewutil.subsettable[b'served'] = FILTERNAME
--- a/hgext3rd/topic/topicmap.py Sun Dec 31 16:22:15 2023 -0300 +++ b/hgext3rd/topic/topicmap.py Wed Jan 03 13:56:27 2024 -0300 @@ -1,4 +1,5 @@ import contextlib +import functools import hashlib from mercurial.i18n import _ @@ -134,7 +135,7 @@ def _wrapupdatebmcachemethod(orig, self, repo): # pass in the bound method as the original - return _wrapupdatebmcache(orig.__get__(self), repo) + return _wrapupdatebmcache(functools.partial(orig, self), repo) extensions.wrapfunction(branchmap.BranchMapCache, 'updatecache', _wrapupdatebmcachemethod) except AttributeError: # hg <= 4.9 (3461814417f3) @@ -175,6 +176,7 @@ def copy(self): """return an deep copy of the branchcache object""" entries = compat.bcentries(self) + assert isinstance(self, _oldbranchmap) # help pytype args = (entries, self.tipnode, self.tiprev, self.filteredhash, self._closednodes) if util.safehasattr(self, '_repo'): @@ -188,6 +190,7 @@ """call branchmap.load(), and then transform branch names to be in the new "//" format """ + assert isinstance(self, _oldbranchmap) # help pytype super(_topiccache, self).load(repo, lineiter) entries = compat.bcentries(self) @@ -201,6 +204,7 @@ - False when cached tipnode is unknown or if we detect a strip. - True when cache is up to date or a subset of current repo.""" + assert isinstance(self, _oldbranchmap) # help pytype valid = super(_topiccache, self).validfor(repo) if not valid: return False @@ -237,6 +241,7 @@ missing heads, and a generator of nodes that are strictly a superset of heads missing, this function updates self to be correct. """ + assert isinstance(self, _oldbranchmap) # help pytype if not istopicfilter(repo.filtername): return super(_topiccache, self).update(repo, revgen)
--- a/tests/test-check-sdist.t Sun Dec 31 16:22:15 2023 -0300 +++ b/tests/test-check-sdist.t Wed Jan 03 13:56:27 2024 -0300 @@ -37,7 +37,7 @@ $ egrep '^tests/test-.*\.(t|py)$' files > test-files $ egrep -v '^tests/test-.*\.(t|py)$' files > other-files $ wc -l other-files - 148 other-files + 147 other-files $ wc -l test-files ??? test-files (glob) $ fgrep debian files
--- a/tests/test-evolve-public-content-divergent-discard.t Sun Dec 31 16:22:15 2023 -0300 +++ b/tests/test-evolve-public-content-divergent-discard.t Wed Jan 03 13:56:27 2024 -0300 @@ -681,12 +681,12 @@ marked working directory as topic: topic-2 (make other divergent a closed branch head) - $ hg ci --amend -m "closing branch double//slash" --close-branch + $ hg ci --amend -m "closing default branch" --close-branch active topic 'topic-2' grew its first changeset (see 'hg help topics' for more information) $ hg glog - @ 6:fe5d55b4e488 closing branch double//slash + @ 6:638eff2d31b7 closing default branch | draft content-divergent | | o 5:bde8ac1c636a added d @@ -706,11 +706,11 @@ $ hg evolve --content-divergent merge:[5] added d - with: [6] closing branch double//slash + with: [6] closing default branch base: [3] added d 0 files updated, 0 files merged, 0 files removed, 0 files unresolved - other divergent changeset fe5d55b4e488 is a closed branch head and differs from local bde8ac1c636a by "branch, description" only, discarding fe5d55b4e488 - content divergence resolution between bde8ac1c636a (public) and fe5d55b4e488 has same content as bde8ac1c636a, discarding fe5d55b4e488 + other divergent changeset 638eff2d31b7 is a closed branch head and differs from local bde8ac1c636a by "branch, description" only, discarding 638eff2d31b7 + content divergence resolution between bde8ac1c636a (public) and 638eff2d31b7 has same content as bde8ac1c636a, discarding 638eff2d31b7 active topic 'topic-2' is now empty working directory is now at bde8ac1c636a
--- a/tests/test-namespaces.t Sun Dec 31 16:22:15 2023 -0300 +++ b/tests/test-namespaces.t Wed Jan 03 13:56:27 2024 -0300 @@ -64,7 +64,7 @@ $ hg debugtopicnamespace --clear nonsense abort: cannot use --clear when setting a topic namespace - [255] + [10] $ hg branch stable marked working directory as branch stable