Mercurial > hg
view mercurial/ancestor.py @ 43299:83bb1e89ab9b
copies: compute the exact set of revision to walk
This change make the code clearer by removing the revision queue. It comes
without very noticeable performance impact. However the simpler code will be
easier to update in later changesets.
revision: large amount; added files: large amount; rename small amount; c3b14617fbd7 9ba6ab77fd29
before: ! wall 1.430082 comb 1.430000 user 1.390000 sys 0.040000 (median of 10)
after: ! wall 1.405192 comb 1.410000 user 1.390000 sys 0.020000 (median of 10)
revision: large amount; added files: small amount; rename small amount; c3b14617fbd7 f650a9b140d2
before: ! wall 1.971366 comb 1.970000 user 1.950000 sys 0.020000 (median of 10)
after: ! wall 1.892541 comb 1.890000 user 1.870000 sys 0.020000 (median of 10)
revision: large amount; added files: large amount; rename large amount; 08ea3258278e d9fa043f30c0
before: ! wall 0.252594 comb 0.250000 user 0.240000 sys 0.010000 (median of 38)
after: ! wall 0.240075 comb 0.240000 user 0.240000 sys 0.000000 (median of 40)
revision: small amount; added files: large amount; rename large amount; df6f7a526b60 a83dc6a2d56f
before: ! wall 0.013100 comb 0.010000 user 0.010000 sys 0.000000 (median of 226)
after: ! wall 0.013247 comb 0.010000 user 0.010000 sys 0.000000 (median of 223)
revision: small amount; added files: large amount; rename small amount; 4aa4e1f8e19a 169138063d63
before: ! wall 0.001633 comb 0.000000 user 0.000000 sys 0.000000 (median of 1000)
after: ! wall 0.001670 comb 0.000000 user 0.000000 sys 0.000000 (median of 1000)
revision: small amount; added files: small amount; rename small amount; 4bc173b045a6 964879152e2e
before: ! wall 0.000078 comb 0.000000 user 0.000000 sys 0.000000 (median of 11984)
after: ! wall 0.000119 comb 0.000000 user 0.000000 sys 0.000000 (median of 7982)
revision: medium amount; added files: large amount; rename medium amount; c95f1ced15f2 2c68e87c3efe
before: ! wall 0.207093 comb 0.210000 user 0.210000 sys 0.000000 (median of 47)
after: ! wall 0.201551 comb 0.200000 user 0.200000 sys 0.000000 (median of 48)
revision: medium amount; added files: medium amount; rename small amount; d343da0c55a8 d7746d32bf9d
before: ! wall 0.038462 comb 0.040000 user 0.040000 sys 0.000000 (median of 100)
after: ! wall 0.036578 comb 0.030000 user 0.030000 sys 0.000000 (median of 100)
Differential Revision: https://phab.mercurial-scm.org/D7076
author | Pierre-Yves David <pierre-yves.david@octobus.net> |
---|---|
date | Sat, 12 Oct 2019 18:35:14 +0200 |
parents | 2372284d9457 |
children | 9f70512ae2cf |
line wrap: on
line source
# ancestor.py - generic DAG ancestor algorithm for mercurial # # Copyright 2006 Matt Mackall <mpm@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import heapq from .node import nullrev from . import ( dagop, policy, pycompat, ) parsers = policy.importmod(r'parsers') def commonancestorsheads(pfunc, *nodes): """Returns a set with the heads of all common ancestors of all nodes, heads(::nodes[0] and ::nodes[1] and ...) . pfunc must return a list of parent vertices for a given vertex. """ if not isinstance(nodes, set): nodes = set(nodes) if nullrev in nodes: return set() if len(nodes) <= 1: return nodes allseen = (1 << len(nodes)) - 1 seen = [0] * (max(nodes) + 1) for i, n in enumerate(nodes): seen[n] = 1 << i poison = 1 << (i + 1) gca = set() interesting = len(nodes) nv = len(seen) - 1 while nv >= 0 and interesting: v = nv nv -= 1 if not seen[v]: continue sv = seen[v] if sv < poison: interesting -= 1 if sv == allseen: gca.add(v) sv |= poison if v in nodes: # history is linear return {v} if sv < poison: for p in pfunc(v): sp = seen[p] if p == nullrev: continue if sp == 0: seen[p] = sv interesting += 1 elif sp != sv: seen[p] |= sv else: for p in pfunc(v): if p == nullrev: continue sp = seen[p] if sp and sp < poison: interesting -= 1 seen[p] = sv return gca def ancestors(pfunc, *orignodes): """ Returns the common ancestors of a and b that are furthest from a root (as measured by longest path). pfunc must return a list of parent vertices for a given vertex. """ def deepest(nodes): interesting = {} count = max(nodes) + 1 depth = [0] * count seen = [0] * count mapping = [] for (i, n) in enumerate(sorted(nodes)): depth[n] = 1 b = 1 << i seen[n] = b interesting[b] = 1 mapping.append((b, n)) nv = count - 1 while nv >= 0 and len(interesting) > 1: v = nv nv -= 1 dv = depth[v] if dv == 0: continue sv = seen[v] for p in pfunc(v): if p == nullrev: continue dp = depth[p] nsp = sp = seen[p] if dp <= dv: depth[p] = dv + 1 if sp != sv: interesting[sv] += 1 nsp = seen[p] = sv if sp: interesting[sp] -= 1 if interesting[sp] == 0: del interesting[sp] elif dv == dp - 1: nsp = sp | sv if nsp == sp: continue seen[p] = nsp interesting.setdefault(nsp, 0) interesting[nsp] += 1 interesting[sp] -= 1 if interesting[sp] == 0: del interesting[sp] interesting[sv] -= 1 if interesting[sv] == 0: del interesting[sv] if len(interesting) != 1: return [] k = 0 for i in interesting: k |= i return set(n for (i, n) in mapping if k & i) gca = commonancestorsheads(pfunc, *orignodes) if len(gca) <= 1: return gca return deepest(gca) class incrementalmissingancestors(object): '''persistent state used to calculate missing ancestors incrementally Although similar in spirit to lazyancestors below, this is a separate class because trying to support contains and missingancestors operations with the same internal data structures adds needless complexity.''' def __init__(self, pfunc, bases): self.bases = set(bases) if not self.bases: self.bases.add(nullrev) self.pfunc = pfunc def hasbases(self): '''whether the common set has any non-trivial bases''' return self.bases and self.bases != {nullrev} def addbases(self, newbases): '''grow the ancestor set by adding new bases''' self.bases.update(newbases) def basesheads(self): return dagop.headrevs(self.bases, self.pfunc) def removeancestorsfrom(self, revs): '''remove all ancestors of bases from the set revs (in place)''' bases = self.bases pfunc = self.pfunc revs.difference_update(bases) # nullrev is always an ancestor revs.discard(nullrev) if not revs: return # anything in revs > start is definitely not an ancestor of bases # revs <= start needs to be investigated start = max(bases) keepcount = sum(1 for r in revs if r > start) if len(revs) == keepcount: # no revs to consider return for curr in pycompat.xrange(start, min(revs) - 1, -1): if curr not in bases: continue revs.discard(curr) bases.update(pfunc(curr)) if len(revs) == keepcount: # no more potential revs to discard break def missingancestors(self, revs): '''return all the ancestors of revs that are not ancestors of self.bases This may include elements from revs. Equivalent to the revset (::revs - ::self.bases). Revs are returned in revision number order, which is a topological order.''' revsvisit = set(revs) basesvisit = self.bases pfunc = self.pfunc bothvisit = revsvisit.intersection(basesvisit) revsvisit.difference_update(bothvisit) if not revsvisit: return [] start = max(max(revsvisit), max(basesvisit)) # At this point, we hold the invariants that: # - revsvisit is the set of nodes we know are an ancestor of at least # one of the nodes in revs # - basesvisit is the same for bases # - bothvisit is the set of nodes we know are ancestors of at least one # of the nodes in revs and one of the nodes in bases. bothvisit and # revsvisit are mutually exclusive, but bothvisit is a subset of # basesvisit. # Now we walk down in reverse topo order, adding parents of nodes # already visited to the sets while maintaining the invariants. When a # node is found in both revsvisit and basesvisit, it is removed from # revsvisit and added to bothvisit. When revsvisit becomes empty, there # are no more ancestors of revs that aren't also ancestors of bases, so # exit. missing = [] for curr in pycompat.xrange(start, nullrev, -1): if not revsvisit: break if curr in bothvisit: bothvisit.remove(curr) # curr's parents might have made it into revsvisit through # another path for p in pfunc(curr): revsvisit.discard(p) basesvisit.add(p) bothvisit.add(p) continue if curr in revsvisit: missing.append(curr) revsvisit.remove(curr) thisvisit = revsvisit othervisit = basesvisit elif curr in basesvisit: thisvisit = basesvisit othervisit = revsvisit else: # not an ancestor of revs or bases: ignore continue for p in pfunc(curr): if p == nullrev: pass elif p in othervisit or p in bothvisit: # p is implicitly in thisvisit. This means p is or should be # in bothvisit revsvisit.discard(p) basesvisit.add(p) bothvisit.add(p) else: # visit later thisvisit.add(p) missing.reverse() return missing # Extracted from lazyancestors.__iter__ to avoid a reference cycle def _lazyancestorsiter(parentrevs, initrevs, stoprev, inclusive): seen = {nullrev} heappush = heapq.heappush heappop = heapq.heappop heapreplace = heapq.heapreplace see = seen.add if inclusive: visit = [-r for r in initrevs] seen.update(initrevs) heapq.heapify(visit) else: visit = [] heapq.heapify(visit) for r in initrevs: p1, p2 = parentrevs(r) if p1 not in seen: heappush(visit, -p1) see(p1) if p2 not in seen: heappush(visit, -p2) see(p2) while visit: current = -visit[0] if current < stoprev: break yield current # optimize out heapq operation if p1 is known to be the next highest # revision, which is quite common in linear history. p1, p2 = parentrevs(current) if p1 not in seen: if current - p1 == 1: visit[0] = -p1 else: heapreplace(visit, -p1) see(p1) else: heappop(visit) if p2 not in seen: heappush(visit, -p2) see(p2) class lazyancestors(object): def __init__(self, pfunc, revs, stoprev=0, inclusive=False): """Create a new object generating ancestors for the given revs. Does not generate revs lower than stoprev. This is computed lazily starting from revs. The object supports iteration and membership. cl should be a changelog and revs should be an iterable. inclusive is a boolean that indicates whether revs should be included. Revs lower than stoprev will not be generated. Result does not include the null revision.""" self._parentrevs = pfunc self._initrevs = revs = [r for r in revs if r >= stoprev] self._stoprev = stoprev self._inclusive = inclusive self._containsseen = set() self._containsiter = _lazyancestorsiter( self._parentrevs, self._initrevs, self._stoprev, self._inclusive ) def __nonzero__(self): """False if the set is empty, True otherwise.""" try: next(iter(self)) return True except StopIteration: return False __bool__ = __nonzero__ def __iter__(self): """Generate the ancestors of _initrevs in reverse topological order. If inclusive is False, yield a sequence of revision numbers starting with the parents of each revision in revs, i.e., each revision is *not* considered an ancestor of itself. Results are emitted in reverse revision number order. That order is also topological: a child is always emitted before its parent. If inclusive is True, the source revisions are also yielded. The reverse revision number order is still enforced.""" return _lazyancestorsiter( self._parentrevs, self._initrevs, self._stoprev, self._inclusive ) def __contains__(self, target): """Test whether target is an ancestor of self._initrevs.""" seen = self._containsseen if target in seen: return True iter = self._containsiter if iter is None: # Iterator exhausted return False # Only integer target is valid, but some callers expect 'None in self' # to be False. So we explicitly allow it. if target is None: return False see = seen.add try: while True: rev = next(iter) see(rev) if rev == target: return True if rev < target: return False except StopIteration: # Set to None to indicate fast-path can be used next time, and to # free up memory. self._containsiter = None return False class rustlazyancestors(object): def __init__(self, index, revs, stoprev=0, inclusive=False): self._index = index self._stoprev = stoprev self._inclusive = inclusive # no need to prefilter out init revs that are smaller than stoprev, # it's done by rustlazyancestors constructor. # we need to convert to a list, because our ruslazyancestors # constructor (from C code) doesn't understand anything else yet self._initrevs = initrevs = list(revs) self._containsiter = parsers.rustlazyancestors( index, initrevs, stoprev, inclusive ) def __nonzero__(self): """False if the set is empty, True otherwise. It's better to duplicate this essentially trivial method than to subclass lazyancestors """ try: next(iter(self)) return True except StopIteration: return False def __iter__(self): return parsers.rustlazyancestors( self._index, self._initrevs, self._stoprev, self._inclusive ) def __contains__(self, target): return target in self._containsiter