view mercurial/peer.py @ 29610:754f63671229 stable

rebase: turn rebase revs into set before filtering obsolete When the inhibit extension from mutable-history is enabled, it attempts to iterate over the rebaseset to prevent the nodes being rebased from being marked obsolete. This happens at the same time as rebase's _filterobsoleterevs function trying to iterate over the rebaseset to figure out which ones are obsolete. The two of these iterating over the same revset generatorset cause a 'generator already executing' exception. This is probably a flaw in the revset implementation, since iterating over the same set twice should be supported. This regression was introduced in 5d16ebe7b14, since it changed _filterobsoleterevs to be called before the rebaseset was turned into a set(). For now let’s just make the rebaseset an actual set again before calling that function. This was caught by the inhibit tests. The relevant call stack from test-inhibit.t: File "/tmp/hgtests.jgjrN5/install/lib/python/hgext/rebase.py", line 285, in _preparenewrebase obsrevs = _filterobsoleterevs(self.repo, rebaseset) File "/data/hgbuild/facebook-hg-rpms/mutable-history/hgext/inhibit.py", line 197, in _filterobsoleterevswrap r = orig(repo, rebasesetrevs, *args, **kwargs) File "/tmp/hgtests.jgjrN5/install/lib/python/hgext/rebase.py", line 1380, in _filterobsoleterevs return set(r for r in revs if repo[r].obsolete()) File "/tmp/hgtests.jgjrN5/install/lib/python/hgext/rebase.py", line 1380, in <genexpr> return set(r for r in revs if repo[r].obsolete()) File "/tmp/hgtests.jgjrN5/install/lib/python/mercurial/revset.py", line 3079, in _iterordered val2 = next(iter2) File "/tmp/hgtests.jgjrN5/install/lib/python/mercurial/revset.py", line 3417, in gen yield nextrev() File "/tmp/hgtests.jgjrN5/install/lib/python/mercurial/revset.py", line 3424, in _consumegen for item in self._gen: File "/tmp/hgtests.jgjrN5/install/lib/python/mercurial/revset.py", line 71, in iterate cl = repo.changelog File "/tmp/hgtests.jgjrN5/install/lib/python/mercurial/repoview.py", line 319, in changelog revs = filterrevs(unfi, self.filtername) File "/tmp/hgtests.jgjrN5/install/lib/python/mercurial/repoview.py", line 261, in filterrevs repo.filteredrevcache[filtername] = func(repo.unfiltered()) File "/data/hgbuild/facebook-hg-rpms/mutable-history/hgext/directaccess.py", line 65, in _computehidden hidden = repoview.filterrevs(repo, 'visible') File "/tmp/hgtests.jgjrN5/install/lib/python/mercurial/repoview.py", line 261, in filterrevs repo.filteredrevcache[filtername] = func(repo.unfiltered()) File "/tmp/hgtests.jgjrN5/install/lib/python/mercurial/repoview.py", line 175, in computehidden hideable = hideablerevs(repo) File "/tmp/hgtests.jgjrN5/install/lib/python/mercurial/repoview.py", line 33, in hideablerevs return obsolete.getrevs(repo, 'obsolete') File "/tmp/hgtests.jgjrN5/install/lib/python/mercurial/obsolete.py", line 1097, in getrevs repo.obsstore.caches[name] = cachefuncs[name](repo) File "/data/hgbuild/facebook-hg-rpms/mutable-history/hgext/inhibit.py", line 255, in _computeobsoleteset if getrev(n) not in blacklist: File "/tmp/hgtests.jgjrN5/install/lib/python/mercurial/revset.py", line 3264, in __contains__ return x in self._r1 or x in self._r2 File "/tmp/hgtests.jgjrN5/install/lib/python/mercurial/revset.py", line 3348, in __contains__ for l in self._consumegen(): File "/tmp/hgtests.jgjrN5/install/lib/python/mercurial/revset.py", line 3424, in _consumegen for item in self._gen: ValueError: generator already executing
author Simon Farnsworth <simonfar@fb.com>
date Tue, 19 Jul 2016 03:29:53 -0700
parents ead25aa27a43
children e2fc2122029c
line wrap: on
line source

# peer.py - repository base classes for mercurial
#
# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.

from __future__ import absolute_import

from .i18n import _
from . import (
    error,
    util,
)

# abstract batching support

class future(object):
    '''placeholder for a value to be set later'''
    def set(self, value):
        if util.safehasattr(self, 'value'):
            raise error.RepoError("future is already set")
        self.value = value

class batcher(object):
    '''base class for batches of commands submittable in a single request

    All methods invoked on instances of this class are simply queued and
    return a a future for the result. Once you call submit(), all the queued
    calls are performed and the results set in their respective futures.
    '''
    def __init__(self):
        self.calls = []
    def __getattr__(self, name):
        def call(*args, **opts):
            resref = future()
            self.calls.append((name, args, opts, resref,))
            return resref
        return call
    def submit(self):
        raise NotImplementedError()

class iterbatcher(batcher):

    def submit(self):
        raise NotImplementedError()

    def results(self):
        raise NotImplementedError()

class localbatch(batcher):
    '''performs the queued calls directly'''
    def __init__(self, local):
        batcher.__init__(self)
        self.local = local
    def submit(self):
        for name, args, opts, resref in self.calls:
            resref.set(getattr(self.local, name)(*args, **opts))

class localiterbatcher(iterbatcher):
    def __init__(self, local):
        super(iterbatcher, self).__init__()
        self.local = local

    def submit(self):
        # submit for a local iter batcher is a noop
        pass

    def results(self):
        for name, args, opts, resref in self.calls:
            yield getattr(self.local, name)(*args, **opts)

def batchable(f):
    '''annotation for batchable methods

    Such methods must implement a coroutine as follows:

    @batchable
    def sample(self, one, two=None):
        # Handle locally computable results first:
        if not one:
            yield "a local result", None
        # Build list of encoded arguments suitable for your wire protocol:
        encargs = [('one', encode(one),), ('two', encode(two),)]
        # Create future for injection of encoded result:
        encresref = future()
        # Return encoded arguments and future:
        yield encargs, encresref
        # Assuming the future to be filled with the result from the batched
        # request now. Decode it:
        yield decode(encresref.value)

    The decorator returns a function which wraps this coroutine as a plain
    method, but adds the original method as an attribute called "batchable",
    which is used by remotebatch to split the call into separate encoding and
    decoding phases.
    '''
    def plain(*args, **opts):
        batchable = f(*args, **opts)
        encargsorres, encresref = next(batchable)
        if not encresref:
            return encargsorres # a local result in this case
        self = args[0]
        encresref.set(self._submitone(f.func_name, encargsorres))
        return next(batchable)
    setattr(plain, 'batchable', f)
    return plain

class peerrepository(object):

    def batch(self):
        return localbatch(self)

    def iterbatch(self):
        """Batch requests but allow iterating over the results.

        This is to allow interleaving responses with things like
        progress updates for clients.
        """
        return localiterbatcher(self)

    def capable(self, name):
        '''tell whether repo supports named capability.
        return False if not supported.
        if boolean capability, return True.
        if string capability, return string.'''
        caps = self._capabilities()
        if name in caps:
            return True
        name_eq = name + '='
        for cap in caps:
            if cap.startswith(name_eq):
                return cap[len(name_eq):]
        return False

    def requirecap(self, name, purpose):
        '''raise an exception if the given capability is not present'''
        if not self.capable(name):
            raise error.CapabilityError(
                _('cannot %s; remote repository does not '
                  'support the %r capability') % (purpose, name))

    def local(self):
        '''return peer as a localrepo, or None'''
        return None

    def peer(self):
        return self

    def canpush(self):
        return True

    def close(self):
        pass