--- a/mercurial/httprepo.py Wed Aug 04 13:21:11 2010 -0500
+++ b/mercurial/httprepo.py Fri Aug 06 12:59:13 2010 -0500
@@ -17,7 +17,9 @@
zd = zlib.decompressobj()
try:
for chunk in util.filechunkiter(f):
- yield zd.decompress(chunk)
+ while chunk:
+ yield zd.decompress(chunk, 2**18)
+ chunk = zd.unconsumed_tail
except httplib.HTTPException:
raise IOError(None, _('connection ended unexpectedly'))
yield zd.flush()
--- a/mercurial/merge.py Wed Aug 04 13:21:11 2010 -0500
+++ b/mercurial/merge.py Fri Aug 06 12:59:13 2010 -0500
@@ -323,6 +323,7 @@
repo.ui.note(_("getting %s\n") % f)
t = mctx.filectx(f).data()
repo.wwrite(f, t, flags)
+ t = None
updated += 1
if f == '.hgsubstate': # subrepo states need updating
subrepo.submerge(repo, wctx, mctx, wctx)
--- a/mercurial/revlog.py Wed Aug 04 13:21:11 2010 -0500
+++ b/mercurial/revlog.py Fri Aug 06 12:59:13 2010 -0500
@@ -1041,6 +1041,9 @@
base = self._cache[1]
text = self._cache[2]
+ # drop cache to save memory
+ self._cache = None
+
self._loadindex(base, rev + 1)
self._chunkraw(base, rev)
if text is None:
--- a/mercurial/util.py Wed Aug 04 13:21:11 2010 -0500
+++ b/mercurial/util.py Fri Aug 06 12:59:13 2010 -0500
@@ -925,30 +925,36 @@
else:
yield chunk
self.iter = splitbig(in_iter)
- self.buf = ''
+ self._queue = []
def read(self, l):
"""Read L bytes of data from the iterator of chunks of data.
Returns less than L bytes if the iterator runs dry."""
- if l > len(self.buf) and self.iter:
- # Clamp to a multiple of 2**16
- targetsize = max(l, 2**16)
- collector = [str(self.buf)]
- collected = len(self.buf)
- for chunk in self.iter:
- collector.append(chunk)
- collected += len(chunk)
- if collected >= targetsize:
+ left = l
+ buf = ''
+ queue = self._queue
+ while left > 0:
+ # refill the queue
+ if not queue:
+ target = 2**18
+ for chunk in self.iter:
+ queue.append(chunk)
+ target -= len(chunk)
+ if target <= 0:
+ break
+ if not queue:
break
+
+ chunk = queue.pop(0)
+ left -= len(chunk)
+ if left < 0:
+ queue.insert(0, chunk[left:])
+ buf += chunk[:left]
else:
- self.iter = False
- self.buf = ''.join(collector)
- if len(self.buf) == l:
- s, self.buf = str(self.buf), ''
- else:
- s, self.buf = self.buf[:l], buffer(self.buf, l)
- return s
+ buf += chunk
+ return buf
+
def filechunkiter(f, size=65536, limit=None):
"""Create a generator that produces the data in the file size
(default 65536) bytes at a time, up to optional limit (default is
--- a/mercurial/verify.py Wed Aug 04 13:21:11 2010 -0500
+++ b/mercurial/verify.py Fri Aug 06 12:59:13 2010 -0500
@@ -48,6 +48,8 @@
if isinstance(inst, KeyboardInterrupt):
ui.warn(_("interrupted"))
raise
+ if not str(inst):
+ inst = repr(inst)
err(linkrev, "%s: %s" % (msg, inst), filename)
def warn(msg):
@@ -229,6 +231,7 @@
checklog(fl, f, lr)
seen = {}
+ rp = None
for i in fl:
revisions += 1
n = fl.node(i)
@@ -241,12 +244,12 @@
# verify contents
try:
- t = fl.read(n)
+ l = len(fl.read(n))
rp = fl.renamed(n)
- if len(t) != fl.size(i):
+ if l != fl.size(i):
if len(fl.revision(n)) != fl.size(i):
err(lr, _("unpacked size is %s, %s expected") %
- (len(t), fl.size(i)), f)
+ (l, fl.size(i)), f)
except Exception, inst:
exc(lr, _("unpacking %s") % short(n), inst, f)