changeset 26823:45e8bd2f36f0 stable

largefiles: check hash of files in the store before copying to working dir If the store somehow got corrupted, users could end up in weird situations that were very hard to recover from or lead to propagation of the corruption. Instead, spend the extra time checking the hash when copying to the working directory. If it doesn't match, emit a warning, and don't put wrong content in the working directory.
author Mads Kiilerich <madski@unity3d.com>
date Fri, 23 Oct 2015 21:27:29 +0200
parents d881c072050a
children 30db531e4146
files hgext/largefiles/lfutil.py tests/test-largefiles-cache.t
diffstat 2 files changed, 12 insertions(+), 6 deletions(-) [+]
line wrap: on
line diff
--- a/hgext/largefiles/lfutil.py	Fri Oct 23 21:27:29 2015 +0200
+++ b/hgext/largefiles/lfutil.py	Fri Oct 23 21:27:29 2015 +0200
@@ -10,7 +10,6 @@
 
 import os
 import platform
-import shutil
 import stat
 import copy
 
@@ -207,7 +206,15 @@
     util.makedirs(os.path.dirname(repo.wjoin(filename)))
     # The write may fail before the file is fully written, but we
     # don't use atomic writes in the working copy.
-    shutil.copy(path, repo.wjoin(filename))
+    dest = repo.wjoin(filename)
+    with open(path, 'rb') as srcfd:
+        with open(dest, 'wb') as destfd:
+            gothash = copyandhash(srcfd, destfd)
+    if gothash != hash:
+        repo.ui.warn(_('%s: data corruption in %s with hash %s\n')
+                     % (filename, path, gothash))
+        util.unlink(dest)
+        return False
     return True
 
 def copytostore(repo, rev, file, uploaded=False):
--- a/tests/test-largefiles-cache.t	Fri Oct 23 21:27:29 2015 +0200
+++ b/tests/test-largefiles-cache.t	Fri Oct 23 21:27:29 2015 +0200
@@ -191,13 +191,12 @@
   e2fb5f2139d086ded2cb600d5a91a196e76bf020
   $ mv .hg/largefiles/e2fb5f2139d086ded2cb600d5a91a196e76bf020 ..
   $ echo corruption > .hg/largefiles/e2fb5f2139d086ded2cb600d5a91a196e76bf020
-(the following update will put the corrupted file into the working directory
-where it will show up as a change)
   $ hg up -C
   getting changed largefiles
-  1 largefiles updated, 0 removed
+  large: data corruption in $TESTTMP/src/.hg/largefiles/e2fb5f2139d086ded2cb600d5a91a196e76bf020 with hash 6a7bb2556144babe3899b25e5428123735bb1e27
+  0 largefiles updated, 0 removed
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg st
-  M large
+  ! large
   ? z
   $ rm .hg/largefiles/e2fb5f2139d086ded2cb600d5a91a196e76bf020