view mercurial/setdiscovery.py @ 16719:e7bf09acd410

localrepo: add branchtip() method for faster single-branch lookups For the PyPy repo with 744 branches and 843 branch heads, this brings hg log -r default over NFS from: CallCount Recursive Total(ms) Inline(ms) module:lineno(function) 3249 0 1.3222 1.3222 <open> 3244 0 0.6211 0.6211 <method 'close' of 'file' objects> 3243 0 0.0800 0.0800 <method 'read' of 'file' objects> 3241 0 0.0660 0.0660 <method 'seek' of 'file' objects> 3905 0 0.0476 0.0476 <zlib.decompress> 3281 0 2.6756 0.0472 mercurial.changelog:182(read) +3281 0 2.5256 0.0453 +mercurial.revlog:881(revision) +3276 0 0.0389 0.0196 +mercurial.changelog:28(decodeextra) +6562 0 0.0123 0.0123 +<method 'split' of 'str' objects> +6562 0 0.0408 0.0073 +mercurial.encoding:61(tolocal) +3281 0 0.0054 0.0054 +<method 'index' of 'str' objects> 3241 0 2.2464 0.0456 mercurial.revlog:818(_loadchunk) +3241 0 0.6205 0.6205 +<method 'close' of 'file' objects> +3241 0 0.0765 0.0765 +<method 'read' of 'file' objects> +3241 0 0.0660 0.0660 +<method 'seek' of 'file' objects> +3241 0 1.4209 0.0135 +mercurial.store:374(__call__) +3241 0 0.0122 0.0107 +mercurial.revlog:810(_addchunk) 3281 0 2.5256 0.0453 mercurial.revlog:881(revision) +3280 0 0.0175 0.0175 +mercurial.revlog:305(rev) +3281 0 2.2819 0.0119 +mercurial.revlog:847(_chunkraw) +3281 0 0.0603 0.0083 +mercurial.revlog:945(_checkhash) +3281 0 0.0051 0.0051 +mercurial.revlog:349(flags) +3281 0 0.0040 0.0040 +<mercurial.mpatch.patches> 13682 0 0.0479 0.0248 <method 'decode' of 'str' objects> +7418 0 0.0228 0.0076 +encodings.utf_8:15(decode) +1 0 0.0003 0.0000 +encodings:71(search_function) 3248 0 1.3995 0.0246 mercurial.scmutil:218(__call__) +3248 0 1.3222 1.3222 +<open> +3248 0 0.0235 0.0184 +os.path:80(split) +3248 0 0.0084 0.0068 +mercurial.scmutil:92(__call__) Time: real 2.750 secs (user 0.680+0.000 sys 0.360+0.000) down to: CallCount Recursive Total(ms) Inline(ms) module:lineno(function) 55 31 0.0197 0.0163 <__import__> +1 0 0.0006 0.0002 +mercurial.context:8(<module>) +1 0 0.0042 0.0001 +mercurial.revlog:12(<module>) +1 0 0.0002 0.0001 +mercurial.match:8(<module>) +1 0 0.0003 0.0001 +mercurial.dirstate:7(<module>) +1 0 0.0057 0.0001 +mercurial.changelog:8(<module>) 1 0 0.0117 0.0032 mercurial.localrepo:525(_readbranchcache) +844 0 0.0015 0.0015 +<binascii.unhexlify> +845 0 0.0010 0.0010 +<method 'split' of 'str' objects> +843 0 0.0045 0.0009 +mercurial.encoding:61(tolocal) +843 0 0.0004 0.0004 +<method 'setdefault' of 'dict' objects> +1 0 0.0003 0.0003 +<method 'close' of 'file' objects> 3 0 0.0029 0.0029 <method 'read' of 'file' objects> 9 0 0.0018 0.0018 <open> 990 0 0.0017 0.0017 <binascii.unhexlify> 53 0 0.0016 0.0016 mercurial.demandimport:43(__init__) 862 0 0.0015 0.0015 <_codecs.utf_8_decode> 862 0 0.0037 0.0014 <method 'decode' of 'str' objects> +862 0 0.0023 0.0008 +encodings.utf_8:15(decode) 981 0 0.0011 0.0011 <method 'split' of 'str' objects> 861 0 0.0046 0.0009 mercurial.encoding:61(tolocal) +861 0 0.0037 0.0014 +<method 'decode' of 'str' objects> 862 0 0.0023 0.0008 encodings.utf_8:15(decode) +862 0 0.0015 0.0015 +<_codecs.utf_8_decode> 4 0 0.0008 0.0008 <method 'close' of 'file' objects> 179 154 0.0202 0.0004 mercurial.demandimport:83(__getattribute__) +36 11 0.0199 0.0003 +mercurial.demandimport:55(_load) +72 0 0.0001 0.0001 +mercurial.demandimport:83(__getattribute__) +36 0 0.0000 0.0000 +<getattr> 1 0 0.0015 0.0004 mercurial.tags:148(_readtagcache) Time: real 0.060 secs (user 0.030+0.000 sys 0.010+0.000)
author Brodie Rao <brodie@sf.io>
date Sun, 13 May 2012 14:04:04 +0200
parents 525fdb738975
children cafd8a8fb713
line wrap: on
line source

# setdiscovery.py - improved discovery of common nodeset for mercurial
#
# Copyright 2010 Benoit Boissinot <bboissin@gmail.com>
# and Peter Arrenbrecht <peter@arrenbrecht.ch>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.

from node import nullid
from i18n import _
import random, collections, util, dagutil
import phases

def _updatesample(dag, nodes, sample, always, quicksamplesize=0):
    # if nodes is empty we scan the entire graph
    if nodes:
        heads = dag.headsetofconnecteds(nodes)
    else:
        heads = dag.heads()
    dist = {}
    visit = collections.deque(heads)
    seen = set()
    factor = 1
    while visit:
        curr = visit.popleft()
        if curr in seen:
            continue
        d = dist.setdefault(curr, 1)
        if d > factor:
            factor *= 2
        if d == factor:
            if curr not in always: # need this check for the early exit below
                sample.add(curr)
                if quicksamplesize and (len(sample) >= quicksamplesize):
                    return
        seen.add(curr)
        for p in dag.parents(curr):
            if not nodes or p in nodes:
                dist.setdefault(p, d + 1)
                visit.append(p)

def _setupsample(dag, nodes, size):
    if len(nodes) <= size:
        return set(nodes), None, 0
    always = dag.headsetofconnecteds(nodes)
    desiredlen = size - len(always)
    if desiredlen <= 0:
        # This could be bad if there are very many heads, all unknown to the
        # server. We're counting on long request support here.
        return always, None, desiredlen
    return always, set(), desiredlen

def _takequicksample(dag, nodes, size, initial):
    always, sample, desiredlen = _setupsample(dag, nodes, size)
    if sample is None:
        return always
    if initial:
        fromset = None
    else:
        fromset = nodes
    _updatesample(dag, fromset, sample, always, quicksamplesize=desiredlen)
    sample.update(always)
    return sample

def _takefullsample(dag, nodes, size):
    always, sample, desiredlen = _setupsample(dag, nodes, size)
    if sample is None:
        return always
    # update from heads
    _updatesample(dag, nodes, sample, always)
    # update from roots
    _updatesample(dag.inverse(), nodes, sample, always)
    assert sample
    if len(sample) > desiredlen:
        sample = set(random.sample(sample, desiredlen))
    elif len(sample) < desiredlen:
        more = desiredlen - len(sample)
        sample.update(random.sample(list(nodes - sample - always), more))
    sample.update(always)
    return sample

def findcommonheads(ui, local, remote,
                    initialsamplesize=100,
                    fullsamplesize=200,
                    abortwhenunrelated=True):
    '''Return a tuple (common, anyincoming, remoteheads) used to identify
    missing nodes from or in remote.

    shortcutlocal determines whether we try use direct access to localrepo if
    remote is actually local.
    '''
    roundtrips = 0
    cl = local.changelog
    dag = dagutil.revlogdag(cl)

    # early exit if we know all the specified remote heads already
    ui.debug("query 1; heads\n")
    roundtrips += 1
    ownheads = dag.heads()
    sample = ownheads
    if remote.local():
        # stopgap until we have a proper localpeer that supports batch()
        srvheadhashes = phases.visibleheads(remote)
        yesno = remote.known(dag.externalizeall(sample))
    elif remote.capable('batch'):
        batch = remote.batch()
        srvheadhashesref = batch.heads()
        yesnoref = batch.known(dag.externalizeall(sample))
        batch.submit()
        srvheadhashes = srvheadhashesref.value
        yesno = yesnoref.value
    else:
        # compatibitity with pre-batch, but post-known remotes during 1.9 devel
        srvheadhashes = remote.heads()
        sample = []

    if cl.tip() == nullid:
        if srvheadhashes != [nullid]:
            return [nullid], True, srvheadhashes
        return [nullid], False, []

    # start actual discovery (we note this before the next "if" for
    # compatibility reasons)
    ui.status(_("searching for changes\n"))

    srvheads = dag.internalizeall(srvheadhashes, filterunknown=True)
    if len(srvheads) == len(srvheadhashes):
        ui.debug("all remote heads known locally\n")
        return (srvheadhashes, False, srvheadhashes,)

    if sample and util.all(yesno):
        ui.note(_("all local heads known remotely\n"))
        ownheadhashes = dag.externalizeall(ownheads)
        return (ownheadhashes, True, srvheadhashes,)

    # full blown discovery

    # own nodes where I don't know if remote knows them
    undecided = dag.nodeset()
    # own nodes I know we both know
    common = set()
    # own nodes I know remote lacks
    missing = set()

    # treat remote heads (and maybe own heads) as a first implicit sample
    # response
    common.update(dag.ancestorset(srvheads))
    undecided.difference_update(common)

    full = False
    while undecided:

        if sample:
            commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
            common.update(dag.ancestorset(commoninsample, common))

            missinginsample = [n for i, n in enumerate(sample) if not yesno[i]]
            missing.update(dag.descendantset(missinginsample, missing))

            undecided.difference_update(missing)
            undecided.difference_update(common)

        if not undecided:
            break

        if full:
            ui.note(_("sampling from both directions\n"))
            sample = _takefullsample(dag, undecided, size=fullsamplesize)
        elif common:
            # use cheapish initial sample
            ui.debug("taking initial sample\n")
            sample = _takefullsample(dag, undecided, size=fullsamplesize)
        else:
            # use even cheaper initial sample
            ui.debug("taking quick initial sample\n")
            sample = _takequicksample(dag, undecided, size=initialsamplesize,
                                      initial=True)

        roundtrips += 1
        ui.progress(_('searching'), roundtrips, unit=_('queries'))
        ui.debug("query %i; still undecided: %i, sample size is: %i\n"
                 % (roundtrips, len(undecided), len(sample)))
        # indices between sample and externalized version must match
        sample = list(sample)
        yesno = remote.known(dag.externalizeall(sample))
        full = True

    result = dag.headsetofconnecteds(common)
    ui.progress(_('searching'), None)
    ui.debug("%d total queries\n" % roundtrips)

    if not result and srvheadhashes != [nullid]:
        if abortwhenunrelated:
            raise util.Abort(_("repository is unrelated"))
        else:
            ui.warn(_("warning: repository is unrelated\n"))
        return (set([nullid]), True, srvheadhashes,)

    anyincoming = (srvheadhashes != [nullid])
    return dag.externalizeall(result), anyincoming, srvheadhashes