progress: make the progress helper a context manager
This lets us simplify the use site in streamclone.
Differential Revision: https://phab.mercurial-scm.org/D3775
--- a/mercurial/scmutil.py Sun Jun 17 22:13:41 2018 -0700
+++ b/mercurial/scmutil.py Sun Jun 17 13:48:58 2018 -0700
@@ -1293,6 +1293,12 @@
self.unit = unit
self.total = total
+ def __enter__(self):
+ pass
+
+ def __exit__(self, exc_type, exc_value, exc_tb):
+ self.complete()
+
def update(self, pos, item="", total=None):
if total:
self.total = total
--- a/mercurial/streamclone.py Sun Jun 17 22:13:41 2018 -0700
+++ b/mercurial/streamclone.py Sun Jun 17 13:48:58 2018 -0700
@@ -495,38 +495,35 @@
progress = repo.ui.makeprogress(_('bundle'), total=totalfilesize,
unit=_('bytes'))
progress.update(0)
- with maketempcopies() as copy:
- try:
- # copy is delayed until we are in the try
- entries = [_filterfull(e, copy, vfsmap) for e in entries]
- yield None # this release the lock on the repository
- seen = 0
+ with maketempcopies() as copy, progress:
+ # copy is delayed until we are in the try
+ entries = [_filterfull(e, copy, vfsmap) for e in entries]
+ yield None # this release the lock on the repository
+ seen = 0
- for src, name, ftype, data in entries:
- vfs = vfsmap[src]
- yield src
- yield util.uvarintencode(len(name))
- if ftype == _fileappend:
- fp = vfs(name)
- size = data
- elif ftype == _filefull:
- fp = open(data, 'rb')
- size = util.fstat(fp).st_size
- try:
- yield util.uvarintencode(size)
- yield name
- if size <= 65536:
- chunks = (fp.read(size),)
- else:
- chunks = util.filechunkiter(fp, limit=size)
- for chunk in chunks:
- seen += len(chunk)
- progress.update(seen)
- yield chunk
- finally:
- fp.close()
- finally:
- progress.complete()
+ for src, name, ftype, data in entries:
+ vfs = vfsmap[src]
+ yield src
+ yield util.uvarintencode(len(name))
+ if ftype == _fileappend:
+ fp = vfs(name)
+ size = data
+ elif ftype == _filefull:
+ fp = open(data, 'rb')
+ size = util.fstat(fp).st_size
+ try:
+ yield util.uvarintencode(size)
+ yield name
+ if size <= 65536:
+ chunks = (fp.read(size),)
+ else:
+ chunks = util.filechunkiter(fp, limit=size)
+ for chunk in chunks:
+ seen += len(chunk)
+ progress.update(seen)
+ yield chunk
+ finally:
+ fp.close()
def generatev2(repo):
"""Emit content for version 2 of a streaming clone.