changeset 50634:1415e17ea20f

stream-clone: pre-indent some code This make the next changeset clearer.
author Pierre-Yves David <pierre-yves.david@octobus.net>
date Mon, 29 May 2023 04:12:30 +0200
parents f2ae815ae34c
children 3416b46320dc
files mercurial/streamclone.py
diffstat 1 files changed, 30 insertions(+), 31 deletions(-) [+]
line wrap: on
line diff
--- a/mercurial/streamclone.py	Sun May 28 04:12:10 2023 +0200
+++ b/mercurial/streamclone.py	Mon May 29 04:12:30 2023 +0200
@@ -644,40 +644,39 @@
         totalbytecount = 0
 
         for src, name, ftype, data in entries:
-            vfs = vfsmap[src]
-            yield src
-            yield util.uvarintencode(len(name))
-            if ftype == _fileappend:
-                fp = vfs(name)
-                size = data
-            elif ftype == _filefull:
-                fp = open(data, b'rb')
-                size = util.fstat(fp).st_size
-            bytecount = 0
-            try:
-                yield util.uvarintencode(size)
-                yield name
-                if size <= 65536:
-                    chunks = (fp.read(size),)
-                else:
-                    chunks = util.filechunkiter(fp, limit=size)
-                for chunk in chunks:
-                    bytecount += len(chunk)
-                    totalbytecount += len(chunk)
-                    progress.update(totalbytecount)
-                    yield chunk
-                if bytecount != size:
-                    # Would most likely be caused by a race due to `hg strip` or
-                    # a revlog split
-                    raise error.Abort(
-                        _(
+            if True:
+                vfs = vfsmap[src]
+                yield src
+                yield util.uvarintencode(len(name))
+                if ftype == _fileappend:
+                    fp = vfs(name)
+                    size = data
+                elif ftype == _filefull:
+                    fp = open(data, b'rb')
+                    size = util.fstat(fp).st_size
+                bytecount = 0
+                try:
+                    yield util.uvarintencode(size)
+                    yield name
+                    if size <= 65536:
+                        chunks = (fp.read(size),)
+                    else:
+                        chunks = util.filechunkiter(fp, limit=size)
+                    for chunk in chunks:
+                        bytecount += len(chunk)
+                        totalbytecount += len(chunk)
+                        progress.update(totalbytecount)
+                        yield chunk
+                    if bytecount != size:
+                        # Would most likely be caused by a race due to `hg
+                        # strip` or a revlog split
+                        msg = _(
                             b'clone could only read %d bytes from %s, but '
                             b'expected %d bytes'
                         )
-                        % (bytecount, name, size)
-                    )
-            finally:
-                fp.close()
+                        raise error.Abort(msg % (bytecount, name, size))
+                finally:
+                    fp.close()
 
 
 def _test_sync_point_walk_1(repo):