hgext/largefiles/basestore.py
author Augie Fackler <raf@durin42.com>
Fri, 17 Oct 2014 13:52:10 -0400
changeset 23031 3c0983cc279e
parent 19950 cce7ab960312
child 23941 164bd5218ddb
permissions -rw-r--r--
i18n: cache the result of every gettext call In looking at profiler output for 'hg log' on mozilla-central, I noticed we spent a _huge_ amount of time in gettext relative to what it's doing. Caching provides a roughly 15% performance improvement even on repositories as small as hg. == hg repo on linux == Before: % cumulative self time seconds seconds name 5.05 0.19 0.19 i18n.py:62:gettext 4.84 0.18 0.18 revlog.py:88:decompress 2.95 0.17 0.11 changelog.py:201:node 2.32 0.09 0.09 ui.py:577:write 2.11 0.08 0.08 i18n.py:72:gettext 2.11 0.08 0.08 obsolete.py:196:_fm0readmarkers 1.89 0.07 0.07 obsolete.py:569:_load 1.68 0.63 0.06 localrepo.py:29:__get__ real 0m4.026s user 0m3.993s sys 0m0.034s After: % cumulative self time seconds seconds name 8.05 0.26 0.26 revlog.py:88:decompress 2.68 0.22 0.09 color.py:395:write 2.20 0.07 0.07 obsolete.py:196:_fm0readmarkers 1.95 0.06 0.06 obsolete.py:174:_fm0readmarkers 1.95 0.06 0.06 ui.py:577:write 1.95 0.06 0.06 util.py:1228:datestr 1.71 0.06 0.06 utf_8.py:16:decode 1.71 0.06 0.06 revlog.py:273:__len__ real 0m3.519s user 0m3.447s sys 0m0.073s == mozilla-central repo on linux == Before: % cumulative self time seconds seconds name 7.72 2.35 2.35 revlog.py:88:decompress 4.46 1.36 1.36 i18n.py:62:gettext 2.22 0.67 0.67 i18n.py:72:gettext 2.19 1.14 0.67 changelog.py:201:node 2.16 0.66 0.66 ui.py:577:write 1.96 0.60 0.60 utf_8.py:16:decode 1.93 1.97 0.59 color.py:395:write 1.85 0.81 0.56 changelog.py:136:tip real 0m30.822s user 0m30.660s sys 0m0.149s After: % cumulative self time seconds seconds name 9.82 2.49 2.49 revlog.py:88:decompress 2.67 1.31 0.68 localrepo.py:29:__get__ 2.57 0.65 0.65 utf_8.py:16:decode 2.48 1.01 0.63 changelog.py:201:node 2.10 0.82 0.53 changelog.py:136:tip 2.01 0.51 0.51 ui.py:577:write 1.91 0.49 0.49 util.py:1232:datestr 1.85 1.65 0.47 color.py:395:write real 0m25.619s user 0m25.446s sys 0m0.166s == cpython repo on os x = Before: % cumulative self time seconds seconds name 5.05 1.35 1.35 cmdutil.py:982:_show 4.59 1.22 1.22 revlog.py:274:__len__ 3.98 1.06 1.06 i18n.py:62:gettext 3.91 1.04 1.04 revlog.py:1016:revision 3.68 0.98 0.98 revlog.py:337:parents 3.45 0.92 0.92 revlog.py:88:decompress 2.91 0.78 0.78 revlog.py:309:rev 2.62 0.70 0.70 revlog.py:1033:revision real 0m30.414s user 0m28.145s sys 0m0.541s After: % cumulative self time seconds seconds name 7.98 1.66 1.66 cmdutil.py:982:_show 6.83 1.42 1.42 changelog.py:46:decodeextra 5.18 1.08 1.08 revlog.py:274:__len__ 3.94 0.82 0.82 revlog.py:1016:revision 3.41 0.71 0.71 revlog.py:309:rev 3.32 0.69 0.69 revlog.py:88:decompress 2.99 0.63 0.62 revlog.py:1033:revision 2.69 0.56 0.56 revlog.py:341:start real 0m22.811s user 0m21.883s sys 0m0.397s

# Copyright 2009-2010 Gregory P. Ward
# Copyright 2009-2010 Intelerad Medical Systems Incorporated
# Copyright 2010-2011 Fog Creek Software
# Copyright 2010-2011 Unity Technologies
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.

'''base class for store implementations and store-related utility code'''

import re

from mercurial import util, node, hg
from mercurial.i18n import _

import lfutil

class StoreError(Exception):
    '''Raised when there is a problem getting files from or putting
    files to a central store.'''
    def __init__(self, filename, hash, url, detail):
        self.filename = filename
        self.hash = hash
        self.url = url
        self.detail = detail

    def longmessage(self):
        return (_("error getting id %s from url %s for file %s: %s\n") %
                 (self.hash, util.hidepassword(self.url), self.filename,
                  self.detail))

    def __str__(self):
        return "%s: %s" % (util.hidepassword(self.url), self.detail)

class basestore(object):
    def __init__(self, ui, repo, url):
        self.ui = ui
        self.repo = repo
        self.url = url

    def put(self, source, hash):
        '''Put source file into the store so it can be retrieved by hash.'''
        raise NotImplementedError('abstract method')

    def exists(self, hashes):
        '''Check to see if the store contains the given hashes. Given an
        iterable of hashes it returns a mapping from hash to bool.'''
        raise NotImplementedError('abstract method')

    def get(self, files):
        '''Get the specified largefiles from the store and write to local
        files under repo.root.  files is a list of (filename, hash)
        tuples.  Return (success, missing), lists of files successfully
        downloaded and those not found in the store.  success is a list
        of (filename, hash) tuples; missing is a list of filenames that
        we could not get.  (The detailed error message will already have
        been presented to the user, so missing is just supplied as a
        summary.)'''
        success = []
        missing = []
        ui = self.ui

        at = 0
        available = self.exists(set(hash for (_filename, hash) in files))
        for filename, hash in files:
            ui.progress(_('getting largefiles'), at, unit='lfile',
                total=len(files))
            at += 1
            ui.note(_('getting %s:%s\n') % (filename, hash))

            if not available.get(hash):
                ui.warn(_('%s: largefile %s not available from %s\n')
                        % (filename, hash, util.hidepassword(self.url)))
                missing.append(filename)
                continue

            if self._gethash(filename, hash):
                success.append((filename, hash))
            else:
                missing.append(filename)

        ui.progress(_('getting largefiles'), None)
        return (success, missing)

    def _gethash(self, filename, hash):
        """Get file with the provided hash and store it in the local repo's
        store and in the usercache.
        filename is for informational messages only.
        """
        util.makedirs(lfutil.storepath(self.repo, ''))
        storefilename = lfutil.storepath(self.repo, hash)

        tmpname = storefilename + '.tmp'
        tmpfile = util.atomictempfile(tmpname,
                                      createmode=self.repo.store.createmode)

        try:
            gothash = self._getfile(tmpfile, filename, hash)
        except StoreError, err:
            self.ui.warn(err.longmessage())
            gothash = ""
        tmpfile.close()

        if gothash != hash:
            if gothash != "":
                self.ui.warn(_('%s: data corruption (expected %s, got %s)\n')
                             % (filename, hash, gothash))
            util.unlink(tmpname)
            return False

        util.rename(tmpname, storefilename)
        lfutil.linktousercache(self.repo, hash)
        return True

    def verify(self, revs, contents=False):
        '''Verify the existence (and, optionally, contents) of every big
        file revision referenced by every changeset in revs.
        Return 0 if all is well, non-zero on any errors.'''
        failed = False

        self.ui.status(_('searching %d changesets for largefiles\n') %
                       len(revs))
        verified = set()                # set of (filename, filenode) tuples

        for rev in revs:
            cctx = self.repo[rev]
            cset = "%d:%s" % (cctx.rev(), node.short(cctx.node()))

            for standin in cctx:
                if self._verifyfile(cctx, cset, contents, standin, verified):
                    failed = True

        numrevs = len(verified)
        numlfiles = len(set([fname for (fname, fnode) in verified]))
        if contents:
            self.ui.status(
                _('verified contents of %d revisions of %d largefiles\n')
                % (numrevs, numlfiles))
        else:
            self.ui.status(
                _('verified existence of %d revisions of %d largefiles\n')
                % (numrevs, numlfiles))
        return int(failed)

    def _getfile(self, tmpfile, filename, hash):
        '''Fetch one revision of one file from the store and write it
        to tmpfile.  Compute the hash of the file on-the-fly as it
        downloads and return the hash.  Close tmpfile.  Raise
        StoreError if unable to download the file (e.g. it does not
        exist in the store).'''
        raise NotImplementedError('abstract method')

    def _verifyfile(self, cctx, cset, contents, standin, verified):
        '''Perform the actual verification of a file in the store.
        'cset' is only used in warnings.
        'contents' controls verification of content hash.
        'standin' is the standin path of the largefile to verify.
        'verified' is maintained as a set of already verified files.
        Returns _true_ if it is a standin and any problems are found!
        '''
        raise NotImplementedError('abstract method')

import localstore, wirestore

_storeprovider = {
    'file':  [localstore.localstore],
    'http':  [wirestore.wirestore],
    'https': [wirestore.wirestore],
    'ssh': [wirestore.wirestore],
    }

_scheme_re = re.compile(r'^([a-zA-Z0-9+-.]+)://')

# During clone this function is passed the src's ui object
# but it needs the dest's ui object so it can read out of
# the config file. Use repo.ui instead.
def _openstore(repo, remote=None, put=False):
    ui = repo.ui

    if not remote:
        lfpullsource = getattr(repo, 'lfpullsource', None)
        if lfpullsource:
            path = ui.expandpath(lfpullsource)
        else:
            path = ui.expandpath('default-push', 'default')

        # ui.expandpath() leaves 'default-push' and 'default' alone if
        # they cannot be expanded: fallback to the empty string,
        # meaning the current directory.
        if path == 'default-push' or path == 'default':
            path = ''
            remote = repo
        else:
            path, _branches = hg.parseurl(path)
            remote = hg.peer(repo, {}, path)

    # The path could be a scheme so use Mercurial's normal functionality
    # to resolve the scheme to a repository and use its path
    path = util.safehasattr(remote, 'url') and remote.url() or remote.path

    match = _scheme_re.match(path)
    if not match:                       # regular filesystem path
        scheme = 'file'
    else:
        scheme = match.group(1)

    try:
        storeproviders = _storeprovider[scheme]
    except KeyError:
        raise util.Abort(_('unsupported URL scheme %r') % scheme)

    for classobj in storeproviders:
        try:
            return classobj(ui, repo, remote)
        except lfutil.storeprotonotcapable:
            pass

    raise util.Abort(_('%s does not appear to be a largefile store') %
                     util.hidepassword(path))