Mercurial > hg
view mercurial/treediscovery.py @ 45047:40120de810ba stable
demandimport: ignore `lzma` module for demandimport
This makes importing the module fail if the `_lzma` module is not present.
This makes e.g. tarfile correctly recognize if LZMA support is not present. It
changes the exception
File "/usr/lib/python3.6/tarfile.py", line 1694, in xzopen
fileobj = lzma.LZMAFile(fileobj or name, mode, preset=preset)
AttributeError: module 'lzma' has no attribute 'LZMAFile'
to the more correct exception
File "/usr/lib/python3.6/tarfile.py", line 1692, in xzopen
raise CompressionError("lzma module is not available")
tarfile.CompressionError: lzma module is not available
Also, it prevents that the error "abort: No module named '_lzma'!" is shown when
a development warning is to be shown. The reason why that happened is that for
showing the warning, we get information about the stack frames from the inspect
module, which accesses the `__file__` attribute of all modules in `sys.modules`
to build some cache, causing all modules (including `lzma`) to be imported.
author | Manuel Jacob <me@manueljacob.de> |
---|---|
date | Wed, 08 Jul 2020 08:25:30 +0200 |
parents | 687b865b95ad |
children | 89a2afe31e82 |
line wrap: on
line source
# discovery.py - protocol changeset discovery functions # # Copyright 2010 Matt Mackall <mpm@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import collections from .i18n import _ from .node import ( nullid, short, ) from . import ( error, pycompat, ) def findcommonincoming(repo, remote, heads=None, force=False): """Return a tuple (common, fetch, heads) used to identify the common subset of nodes between repo and remote. "common" is a list of (at least) the heads of the common subset. "fetch" is a list of roots of the nodes that would be incoming, to be supplied to changegroupsubset. "heads" is either the supplied heads, or else the remote's heads. """ knownnode = repo.changelog.hasnode search = [] fetch = set() seen = set() seenbranch = set() base = set() if not heads: with remote.commandexecutor() as e: heads = e.callcommand(b'heads', {}).result() if repo.changelog.tip() == nullid: base.add(nullid) if heads != [nullid]: return [nullid], [nullid], list(heads) return [nullid], [], heads # assume we're closer to the tip than the root # and start by examining the heads repo.ui.status(_(b"searching for changes\n")) unknown = [] for h in heads: if not knownnode(h): unknown.append(h) else: base.add(h) if not unknown: return list(base), [], list(heads) req = set(unknown) reqcnt = 0 progress = repo.ui.makeprogress(_(b'searching'), unit=_(b'queries')) # search through remote branches # a 'branch' here is a linear segment of history, with four parts: # head, root, first parent, second parent # (a branch always has two parents (or none) by definition) with remote.commandexecutor() as e: branches = e.callcommand(b'branches', {b'nodes': unknown}).result() unknown = collections.deque(branches) while unknown: r = [] while unknown: n = unknown.popleft() if n[0] in seen: continue repo.ui.debug(b"examining %s:%s\n" % (short(n[0]), short(n[1]))) if n[0] == nullid: # found the end of the branch pass elif n in seenbranch: repo.ui.debug(b"branch already found\n") continue elif n[1] and knownnode(n[1]): # do we know the base? repo.ui.debug( b"found incomplete branch %s:%s\n" % (short(n[0]), short(n[1])) ) search.append(n[0:2]) # schedule branch range for scanning seenbranch.add(n) else: if n[1] not in seen and n[1] not in fetch: if knownnode(n[2]) and knownnode(n[3]): repo.ui.debug(b"found new changeset %s\n" % short(n[1])) fetch.add(n[1]) # earliest unknown for p in n[2:4]: if knownnode(p): base.add(p) # latest known for p in n[2:4]: if p not in req and not knownnode(p): r.append(p) req.add(p) seen.add(n[0]) if r: reqcnt += 1 progress.increment() repo.ui.debug( b"request %d: %s\n" % (reqcnt, b" ".join(map(short, r))) ) for p in pycompat.xrange(0, len(r), 10): with remote.commandexecutor() as e: branches = e.callcommand( b'branches', {b'nodes': r[p : p + 10],} ).result() for b in branches: repo.ui.debug( b"received %s:%s\n" % (short(b[0]), short(b[1])) ) unknown.append(b) # do binary search on the branches we found while search: newsearch = [] reqcnt += 1 progress.increment() with remote.commandexecutor() as e: between = e.callcommand(b'between', {b'pairs': search}).result() for n, l in zip(search, between): l.append(n[1]) p = n[0] f = 1 for i in l: repo.ui.debug(b"narrowing %d:%d %s\n" % (f, len(l), short(i))) if knownnode(i): if f <= 2: repo.ui.debug( b"found new branch changeset %s\n" % short(p) ) fetch.add(p) base.add(i) else: repo.ui.debug( b"narrowed branch search to %s:%s\n" % (short(p), short(i)) ) newsearch.append((p, i)) break p, f = i, f * 2 search = newsearch # sanity check our fetch list for f in fetch: if knownnode(f): raise error.RepoError(_(b"already have changeset ") + short(f[:4])) base = list(base) if base == [nullid]: if force: repo.ui.warn(_(b"warning: repository is unrelated\n")) else: raise error.Abort(_(b"repository is unrelated")) repo.ui.debug( b"found new changesets starting at " + b" ".join([short(f) for f in fetch]) + b"\n" ) progress.complete() repo.ui.debug(b"%d total queries\n" % reqcnt) return base, list(fetch), heads