Mercurial > hg
view hgext/largefiles/remotestore.py @ 33649:377e8ddaebef stable
pathauditor: disable cache of audited paths by default (issue5628)
The initial attempt was to discard cache when appropriate, but it appears
to be error prone. We had to carefully inspect all places where audit() is
called e.g. without actually updating filesystem, before removing files and
directories, etc.
So, this patch disables the cache of audited paths by default, and enables
it only for the following cases:
- short-lived auditor objects
- repo.vfs, repo.svfs, and repo.cachevfs, which are managed directories
and considered sort of append-only (a file/directory would never be
replaced with a symlink)
There would be more cacheable vfs objects (e.g. mq.queue.opener), but I
decided not to inspect all of them in this patch. We can make them cached
later.
Benchmark result:
- using old clone of http://selenic.com/repo/linux-2.6/ (38319 files)
- on tmpfs
- run HGRCPATH=/dev/null hg up -q --time tip && hg up -q null
- try 4 times and take the last three results
original:
real 7.480 secs (user 1.140+22.760 sys 0.150+1.690)
real 8.010 secs (user 1.070+22.280 sys 0.170+2.120)
real 7.470 secs (user 1.120+22.390 sys 0.120+1.910)
clearcache (the other series):
real 7.680 secs (user 1.120+23.420 sys 0.140+1.970)
real 7.670 secs (user 1.110+23.620 sys 0.130+1.810)
real 7.740 secs (user 1.090+23.510 sys 0.160+1.940)
enable cache only for vfs and svfs (this series):
real 8.730 secs (user 1.500+25.190 sys 0.260+2.260)
real 8.750 secs (user 1.490+25.170 sys 0.250+2.340)
real 9.010 secs (user 1.680+25.340 sys 0.280+2.540)
remove cache function at all (for reference):
real 9.620 secs (user 1.440+27.120 sys 0.250+2.980)
real 9.420 secs (user 1.400+26.940 sys 0.320+3.130)
real 9.760 secs (user 1.530+27.270 sys 0.250+2.970)
author | Yuya Nishihara <yuya@tcha.org> |
---|---|
date | Wed, 26 Jul 2017 22:10:15 +0900 |
parents | 736f92c44656 |
children | dcdc17551653 |
line wrap: on
line source
# Copyright 2010-2011 Fog Creek Software # Copyright 2010-2011 Unity Technologies # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. '''remote largefile store; the base class for wirestore''' from __future__ import absolute_import from mercurial.i18n import _ from mercurial import ( error, util, wireproto, ) from . import ( basestore, lfutil, localstore, ) urlerr = util.urlerr urlreq = util.urlreq class remotestore(basestore.basestore): '''a largefile store accessed over a network''' def __init__(self, ui, repo, url): super(remotestore, self).__init__(ui, repo, url) self._lstore = localstore.localstore(self.ui, self.repo, self.repo) def put(self, source, hash): if self.sendfile(source, hash): raise error.Abort( _('remotestore: could not put %s to remote store %s') % (source, util.hidepassword(self.url))) self.ui.debug( _('remotestore: put %s to remote store %s\n') % (source, util.hidepassword(self.url))) def exists(self, hashes): return dict((h, s == 0) for (h, s) in # dict-from-generator self._stat(hashes).iteritems()) def sendfile(self, filename, hash): self.ui.debug('remotestore: sendfile(%s, %s)\n' % (filename, hash)) try: with lfutil.httpsendfile(self.ui, filename) as fd: return self._put(hash, fd) except IOError as e: raise error.Abort( _('remotestore: could not open file %s: %s') % (filename, str(e))) def _getfile(self, tmpfile, filename, hash): try: chunks = self._get(hash) except urlerr.httperror as e: # 401s get converted to error.Aborts; everything else is fine being # turned into a StoreError raise basestore.StoreError(filename, hash, self.url, str(e)) except urlerr.urlerror as e: # This usually indicates a connection problem, so don't # keep trying with the other files... they will probably # all fail too. raise error.Abort('%s: %s' % (util.hidepassword(self.url), e.reason)) except IOError as e: raise basestore.StoreError(filename, hash, self.url, str(e)) return lfutil.copyandhash(chunks, tmpfile) def _hashesavailablelocally(self, hashes): existslocallymap = self._lstore.exists(hashes) localhashes = [hash for hash in hashes if existslocallymap[hash]] return localhashes def _verifyfiles(self, contents, filestocheck): failed = False expectedhashes = [expectedhash for cset, filename, expectedhash in filestocheck] localhashes = self._hashesavailablelocally(expectedhashes) stats = self._stat([expectedhash for expectedhash in expectedhashes if expectedhash not in localhashes]) for cset, filename, expectedhash in filestocheck: if expectedhash in localhashes: filetocheck = (cset, filename, expectedhash) verifyresult = self._lstore._verifyfiles(contents, [filetocheck]) if verifyresult: failed = True else: stat = stats[expectedhash] if stat: if stat == 1: self.ui.warn( _('changeset %s: %s: contents differ\n') % (cset, filename)) failed = True elif stat == 2: self.ui.warn( _('changeset %s: %s missing\n') % (cset, filename)) failed = True else: raise RuntimeError('verify failed: unexpected response ' 'from statlfile (%r)' % stat) return failed def batch(self): '''Support for remote batching.''' return wireproto.remotebatch(self) def _put(self, hash, fd): '''Put file with the given hash in the remote store.''' raise NotImplementedError('abstract method') def _get(self, hash): '''Get a iterator for content with the given hash.''' raise NotImplementedError('abstract method') def _stat(self, hashes): '''Get information about availability of files specified by hashes in the remote store. Return dictionary mapping hashes to return code where 0 means that file is available, other values if not.''' raise NotImplementedError('abstract method')