view hgext/largefiles/remotestore.py @ 35455:02ea370c2baa

split: new extension to split changesets This diff introduces an experimental split extension to split changesets. The implementation is largely inspired by Laurent Charignon's implementation for mutable-history (changeset 9603aa1ecdfd54b0d86e262318a72e0a2ffeb6cc [1]) This version contains various improvements: - Rebase by default. This is more friendly for new users. Split won't lead to merge conflicts so a rebase won't give the user more trouble. This has been on by default at Facebook for months now and seems to be a good UX improvement. The rebase skips obsoleted or orphaned changesets, which can avoid issues like allowdivergence, merge conflicts, etc. This is more flexible because the user can decide what to do next (see the last test case in test-split.t) - Remove "Done split? [y/n]" prompt. That could be detected by checking `repo.status()` instead. - Works with obsstore disabled. Without obsstore, split uses strip to clean up old nodes, and it can even handle split a non-head changeset with "allowunstable" disabled, since it runs a rebase to solve the "unstable" issue in a same transaction. - More friendly editor text. Put what has been already split into the editor text so users won't lost track about where they are. [1]: https://bitbucket.org/marmoute/mutable-history/commits/9603aa1ecdfd54b Differential Revision: https://phab.mercurial-scm.org/D1082
author Jun Wu <quark@fb.com>
date Sat, 24 Jun 2017 23:03:41 -0700
parents dcdc17551653
children cf841f2b5a72
line wrap: on
line source

# Copyright 2010-2011 Fog Creek Software
# Copyright 2010-2011 Unity Technologies
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.

'''remote largefile store; the base class for wirestore'''
from __future__ import absolute_import

from mercurial.i18n import _

from mercurial import (
    error,
    util,
)

from . import (
    basestore,
    lfutil,
    localstore,
)

urlerr = util.urlerr
urlreq = util.urlreq

class remotestore(basestore.basestore):
    '''a largefile store accessed over a network'''
    def __init__(self, ui, repo, url):
        super(remotestore, self).__init__(ui, repo, url)
        self._lstore = localstore.localstore(self.ui, self.repo, self.repo)

    def put(self, source, hash):
        if self.sendfile(source, hash):
            raise error.Abort(
                _('remotestore: could not put %s to remote store %s')
                % (source, util.hidepassword(self.url)))
        self.ui.debug(
            _('remotestore: put %s to remote store %s\n')
            % (source, util.hidepassword(self.url)))

    def exists(self, hashes):
        return dict((h, s == 0) for (h, s) in # dict-from-generator
                    self._stat(hashes).iteritems())

    def sendfile(self, filename, hash):
        self.ui.debug('remotestore: sendfile(%s, %s)\n' % (filename, hash))
        try:
            with lfutil.httpsendfile(self.ui, filename) as fd:
                return self._put(hash, fd)
        except IOError as e:
            raise error.Abort(
                _('remotestore: could not open file %s: %s')
                % (filename, str(e)))

    def _getfile(self, tmpfile, filename, hash):
        try:
            chunks = self._get(hash)
        except urlerr.httperror as e:
            # 401s get converted to error.Aborts; everything else is fine being
            # turned into a StoreError
            raise basestore.StoreError(filename, hash, self.url, str(e))
        except urlerr.urlerror as e:
            # This usually indicates a connection problem, so don't
            # keep trying with the other files... they will probably
            # all fail too.
            raise error.Abort('%s: %s' %
                             (util.hidepassword(self.url), e.reason))
        except IOError as e:
            raise basestore.StoreError(filename, hash, self.url, str(e))

        return lfutil.copyandhash(chunks, tmpfile)

    def _hashesavailablelocally(self, hashes):
        existslocallymap = self._lstore.exists(hashes)
        localhashes = [hash for hash in hashes if existslocallymap[hash]]
        return localhashes

    def _verifyfiles(self, contents, filestocheck):
        failed = False
        expectedhashes = [expectedhash
                          for cset, filename, expectedhash in filestocheck]
        localhashes = self._hashesavailablelocally(expectedhashes)
        stats = self._stat([expectedhash for expectedhash in expectedhashes
                            if expectedhash not in localhashes])

        for cset, filename, expectedhash in filestocheck:
            if expectedhash in localhashes:
                filetocheck = (cset, filename, expectedhash)
                verifyresult = self._lstore._verifyfiles(contents,
                                                         [filetocheck])
                if verifyresult:
                    failed = True
            else:
                stat = stats[expectedhash]
                if stat:
                    if stat == 1:
                        self.ui.warn(
                            _('changeset %s: %s: contents differ\n')
                            % (cset, filename))
                        failed = True
                    elif stat == 2:
                        self.ui.warn(
                            _('changeset %s: %s missing\n')
                            % (cset, filename))
                        failed = True
                    else:
                        raise RuntimeError('verify failed: unexpected response '
                                           'from statlfile (%r)' % stat)
        return failed

    def _put(self, hash, fd):
        '''Put file with the given hash in the remote store.'''
        raise NotImplementedError('abstract method')

    def _get(self, hash):
        '''Get a iterator for content with the given hash.'''
        raise NotImplementedError('abstract method')

    def _stat(self, hashes):
        '''Get information about availability of files specified by
        hashes in the remote store. Return dictionary mapping hashes
        to return code where 0 means that file is available, other
        values if not.'''
        raise NotImplementedError('abstract method')