Mercurial > hg
changeset 3687:d5dd0a2a44bc
Handle locking exceptions if streaming clone can't lock the repo. (Issue324)
author | Thomas Arendsen Hein <thomas@intevation.de> |
---|---|
date | Mon, 20 Nov 2006 19:41:49 +0100 |
parents | 4308f4cdc07b |
children | d92dad355000 |
files | mercurial/localrepo.py mercurial/streamclone.py |
diffstat | 2 files changed, 15 insertions(+), 6 deletions(-) [+] |
line wrap: on
line diff
--- a/mercurial/localrepo.py Mon Nov 20 19:36:28 2006 +0100 +++ b/mercurial/localrepo.py Mon Nov 20 19:41:49 2006 +0100 @@ -1825,8 +1825,12 @@ except ValueError: raise util.UnexpectedOutput( _('Unexpected response from remote server:'), l) - if resp != 0: + if resp == 1: raise util.Abort(_('operation forbidden by server')) + elif resp == 2: + raise util.Abort(_('locking the remote repository failed')) + elif resp != 0: + raise util.Abort(_('the server sent an unknown error code')) self.ui.status(_('streaming all changes\n')) l = fp.readline() try:
--- a/mercurial/streamclone.py Mon Nov 20 19:36:28 2006 +0100 +++ b/mercurial/streamclone.py Mon Nov 20 19:41:49 2006 +0100 @@ -7,7 +7,7 @@ from demandload import demandload from i18n import gettext as _ -demandload(globals(), "os stat util") +demandload(globals(), "os stat util lock") # if server supports streaming clone, it advertises "stream" # capability with value that is version+flags of repo it is serving. @@ -65,18 +65,23 @@ fileobj.write('1\n') return - fileobj.write('0\n') - # get consistent snapshot of repo. lock during scan so lock not # needed while we stream, and commits can happen. - lock = repo.lock() + try: + repolock = repo.lock() + except (lock.LockHeld, lock.LockUnavailable), inst: + repo.ui.warn('locking the repository failed: %s\n' % (inst,)) + fileobj.write('2\n') + return + + fileobj.write('0\n') repo.ui.debug('scanning\n') entries = [] total_bytes = 0 for name, size in walkrepo(repo.path): entries.append((name, size)) total_bytes += size - lock.release() + repolock.release() repo.ui.debug('%d files, %d bytes to transfer\n' % (len(entries), total_bytes))