largefiles: drop limitreader, use filechunkiter limit
filechunkiter.close was a noop.
--- a/hgext/largefiles/lfutil.py Tue Apr 16 01:46:39 2013 +0200
+++ b/hgext/largefiles/lfutil.py Tue Apr 16 01:55:57 2013 +0200
@@ -309,21 +309,6 @@
fd.close()
return hasher.hexdigest()
-class limitreader(object):
- def __init__(self, f, limit):
- self.f = f
- self.limit = limit
-
- def read(self, length):
- if self.limit == 0:
- return ''
- length = length > self.limit and self.limit or length
- self.limit -= length
- return self.f.read(length)
-
- def close(self):
- pass
-
def writehash(hash, filename, executable):
util.makedirs(os.path.dirname(filename))
util.writefile(filename, hash + '\n')
--- a/hgext/largefiles/proto.py Tue Apr 16 01:46:39 2013 +0200
+++ b/hgext/largefiles/proto.py Tue Apr 16 01:55:57 2013 +0200
@@ -123,11 +123,9 @@
self._abort(error.ResponseError(_("unexpected response:"),
length))
- # Mercurial doesn't close SSH connections after writing a stream
- infile = lfutil.limitreader(stream, length)
- for chunk in util.filechunkiter(infile, 128 * 1024):
+ # SSH streams will block if reading more than length
+ for chunk in util.filechunkiter(stream, 128 * 1024, length):
yield chunk
- infile.close()
@batchable
def statlfile(self, sha):