largefiles: make caching largefiles message more explicit
authorNa'Tosha Bard <natosha@unity3d.com>
Sat, 09 Feb 2013 15:08:21 +0000
changeset 18600 8ba520003ae0
parent 18599 5cd1dbf4c5d2
child 18601 ce844e8e8af2
child 18603 2251b3184e6e
largefiles: make caching largefiles message more explicit In some cases, caching largefiles may take a long time (if the user has pulled a lot of new heads). This patch makes it more clear what is happening, by showing the number of heads we are caching largefiles for.
hgext/largefiles/overrides.py
tests/test-largefiles.t
--- a/hgext/largefiles/overrides.py	Sat Feb 09 15:25:46 2013 +0000
+++ b/hgext/largefiles/overrides.py	Sat Feb 09 15:08:21 2013 +0000
@@ -735,10 +735,11 @@
         # will run into a problem later if we try to merge or rebase with one of
         # these heads, so cache the largefiles now directly into the system
         # cache.
-        ui.status(_("caching new largefiles\n"))
         numcached = 0
         heads = lfutil.getcurrentheads(repo)
         newheads = set(heads).difference(set(oldheads))
+        if len(newheads) > 0:
+            ui.status(_("caching largefiles for %s heads\n" % len(newheads)))
         for head in newheads:
             (cached, missing) = lfcommands.cachelfiles(ui, repo, head)
             numcached += len(cached)
--- a/tests/test-largefiles.t	Sat Feb 09 15:25:46 2013 +0000
+++ b/tests/test-largefiles.t	Sat Feb 09 15:08:21 2013 +0000
@@ -883,7 +883,7 @@
   adding file changes
   added 6 changesets with 16 changes to 8 files
   (run 'hg update' to get a working copy)
-  caching new largefiles
+  caching largefiles for 1 heads
   3 largefiles cached
   3 additional largefiles cached
   $ cd ..
@@ -974,7 +974,7 @@
   adding file changes
   added 1 changesets with 2 changes to 2 files (+1 heads)
   (run 'hg heads' to see heads, 'hg merge' to merge)
-  caching new largefiles
+  caching largefiles for 1 heads
   0 largefiles cached
   $ hg rebase
   Invoking status precommit hook
@@ -1273,7 +1273,7 @@
   adding file changes
   added 2 changesets with 4 changes to 4 files (+1 heads)
   (run 'hg heads' to see heads, 'hg merge' to merge)
-  caching new largefiles
+  caching largefiles for 1 heads
   2 largefiles cached
   $ hg merge
   merging sub/large4