largefiles: write .hg/largefiles/ files atomically
Before, it was possible to create a
.hg/largefiles/hash
file with truncated content, i.e., content where
SHA-1(content) != hash
This breaks the fundamental invariant in largefiles that the file
content for files in .hg/largefiles hash to the filename.
--- a/hgext/largefiles/lfutil.py Thu Nov 24 18:11:43 2011 +0100
+++ b/hgext/largefiles/lfutil.py Thu Nov 24 18:12:13 2011 +0100
@@ -228,8 +228,11 @@
if inusercache(repo.ui, hash):
link(usercachepath(repo.ui, hash), storepath(repo, hash))
else:
- shutil.copyfile(file, storepath(repo, hash))
- os.chmod(storepath(repo, hash), os.stat(file).st_mode)
+ dst = util.atomictempfile(storepath(repo, hash))
+ for chunk in util.filechunkiter(open(file)):
+ dst.write(chunk)
+ dst.close()
+ util.copymode(file, storepath(repo, hash))
linktousercache(repo, hash)
def linktousercache(repo, hash):
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-largefiles-small-disk.t Thu Nov 24 18:12:13 2011 +0100
@@ -0,0 +1,39 @@
+Test how largefiles abort in case the disk runs full
+
+ $ cat > criple.py <<EOF
+ > import os, errno, shutil
+ > from mercurial import util
+ > #
+ > # this makes the original largefiles code abort:
+ > def copyfileobj(fsrc, fdst, length=16*1024):
+ > fdst.write(fsrc.read(4))
+ > raise IOError(errno.ENOSPC, os.strerror(errno.ENOSPC))
+ > shutil.copyfileobj = copyfileobj
+ > #
+ > # this makes the rewritten code abort:
+ > def filechunkiter(f, size=65536, limit=None):
+ > yield f.read(4)
+ > raise IOError(errno.ENOSPC, os.strerror(errno.ENOSPC))
+ > util.filechunkiter = filechunkiter
+ > EOF
+
+ $ echo "[extensions]" >> $HGRCPATH
+ $ echo "largefiles =" >> $HGRCPATH
+
+ $ hg init alice
+ $ cd alice
+ $ echo "this is a very big file" > big
+ $ hg add --large big
+ $ hg commit --config extensions.criple=$TESTTMP/criple.py -m big
+ abort: No space left on device
+ [255]
+
+The largefile is not created in .hg/largefiles:
+
+ $ ls .hg/largefiles
+ dirstate
+
+The user cache is not even created:
+
+ >>> import os; os.path.exists("$HOME/.cache/largefiles/")
+ False