chunkiter: handle large reads more efficiently
- for large reads, don't attempt to read more than necessary
- if we've gathered the exact number of bytes needed, avoid a string copy
--- a/mercurial/util.py Thu Oct 11 00:46:51 2007 -0500
+++ b/mercurial/util.py Thu Oct 11 00:46:52 2007 -0500
@@ -1408,7 +1408,7 @@
Returns less than L bytes if the iterator runs dry."""
if l > len(self.buf) and self.iter:
# Clamp to a multiple of self.targetsize
- targetsize = self.targetsize * ((l // self.targetsize) + 1)
+ targetsize = max(l, self.targetsize)
collector = cStringIO.StringIO()
collector.write(self.buf)
collected = len(self.buf)
@@ -1420,7 +1420,10 @@
if collected < targetsize:
self.iter = False
self.buf = collector.getvalue()
- s, self.buf = self.buf[:l], buffer(self.buf, l)
+ if len(self.buf) == l:
+ s, self.buf = self.buf, ''
+ else:
+ s, self.buf = self.buf[:l], buffer(self.buf, l)
return s
def filechunkiter(f, size=65536, limit=None):