# HG changeset patch # User Martin von Zweigbergk # Date 1529268538 25200 # Node ID 800f5a2c869ef36a9d73c42d9cb032cf70667798 # Parent ef692614e601e9773a4831483b82bddb683339c1 progress: make the progress helper a context manager This lets us simplify the use site in streamclone. Differential Revision: https://phab.mercurial-scm.org/D3775 diff -r ef692614e601 -r 800f5a2c869e mercurial/scmutil.py --- a/mercurial/scmutil.py Sun Jun 17 22:13:41 2018 -0700 +++ b/mercurial/scmutil.py Sun Jun 17 13:48:58 2018 -0700 @@ -1293,6 +1293,12 @@ self.unit = unit self.total = total + def __enter__(self): + pass + + def __exit__(self, exc_type, exc_value, exc_tb): + self.complete() + def update(self, pos, item="", total=None): if total: self.total = total diff -r ef692614e601 -r 800f5a2c869e mercurial/streamclone.py --- a/mercurial/streamclone.py Sun Jun 17 22:13:41 2018 -0700 +++ b/mercurial/streamclone.py Sun Jun 17 13:48:58 2018 -0700 @@ -495,38 +495,35 @@ progress = repo.ui.makeprogress(_('bundle'), total=totalfilesize, unit=_('bytes')) progress.update(0) - with maketempcopies() as copy: - try: - # copy is delayed until we are in the try - entries = [_filterfull(e, copy, vfsmap) for e in entries] - yield None # this release the lock on the repository - seen = 0 + with maketempcopies() as copy, progress: + # copy is delayed until we are in the try + entries = [_filterfull(e, copy, vfsmap) for e in entries] + yield None # this release the lock on the repository + seen = 0 - for src, name, ftype, data in entries: - vfs = vfsmap[src] - yield src - yield util.uvarintencode(len(name)) - if ftype == _fileappend: - fp = vfs(name) - size = data - elif ftype == _filefull: - fp = open(data, 'rb') - size = util.fstat(fp).st_size - try: - yield util.uvarintencode(size) - yield name - if size <= 65536: - chunks = (fp.read(size),) - else: - chunks = util.filechunkiter(fp, limit=size) - for chunk in chunks: - seen += len(chunk) - progress.update(seen) - yield chunk - finally: - fp.close() - finally: - progress.complete() + for src, name, ftype, data in entries: + vfs = vfsmap[src] + yield src + yield util.uvarintencode(len(name)) + if ftype == _fileappend: + fp = vfs(name) + size = data + elif ftype == _filefull: + fp = open(data, 'rb') + size = util.fstat(fp).st_size + try: + yield util.uvarintencode(size) + yield name + if size <= 65536: + chunks = (fp.read(size),) + else: + chunks = util.filechunkiter(fp, limit=size) + for chunk in chunks: + seen += len(chunk) + progress.update(seen) + yield chunk + finally: + fp.close() def generatev2(repo): """Emit content for version 2 of a streaming clone.