changeset 18705:a8204cef4c5a

merge with main
author Kevin Bullock <kbullock@ringworld.org>
date Tue, 19 Feb 2013 13:35:25 -0600
parents 4921b5c2aeed (current diff) d69585a5c5c0 (diff)
children f17680992123
files
diffstat 5 files changed, 32 insertions(+), 29 deletions(-) [+]
line wrap: on
line diff
--- a/hgext/largefiles/__init__.py	Sun Feb 17 14:34:53 2013 -0600
+++ b/hgext/largefiles/__init__.py	Tue Feb 19 13:35:25 2013 -0600
@@ -41,13 +41,17 @@
 enabled for this to work.
 
 When you pull a changeset that affects largefiles from a remote
-repository, the largefiles for the changeset usually won't be
-pulled down until you update to the revision (there is one exception
-to this case).  However, when you update to such a revision, any
-largefiles needed by that revision are downloaded and cached (if
-they have never been downloaded before).  This means that network
-access may be required to update to changesets you have no
-previously updated to.
+repository, the largefiles for the changeset won't be pulled down.
+Instead, when you later update to such a revision, any largefiles
+needed by that revision are downloaded and cached (if they have
+never been downloaded before).  This means that network access may
+be required to update to changesets you have previously updated to.
+
+If you know you are pulling from a non-default location and want to
+ensure that you will have the largefiles needed to merge or rebase
+with new heads that you are pulling, then you can pull with the
+--cache-largefiles flag to pre-emptively download any largefiles
+that are new in the heads you are pulling.
 
 The one exception to the "largefiles won't be pulled until you update
 to a revision that changes them" rule is when you pull new heads.
--- a/hgext/largefiles/overrides.py	Sun Feb 17 14:34:53 2013 -0600
+++ b/hgext/largefiles/overrides.py	Tue Feb 19 13:35:25 2013 -0600
@@ -733,19 +733,21 @@
         repo.lfpullsource = source
         oldheads = lfutil.getcurrentheads(repo)
         result = orig(ui, repo, source, **opts)
-        # If we do not have the new largefiles for any new heads we pulled, we
-        # will run into a problem later if we try to merge or rebase with one of
-        # these heads, so cache the largefiles now directly into the system
-        # cache.
-        numcached = 0
-        heads = lfutil.getcurrentheads(repo)
-        newheads = set(heads).difference(set(oldheads))
-        if len(newheads) > 0:
-            ui.status(_("caching largefiles for %s heads\n") % len(newheads))
-        for head in newheads:
-            (cached, missing) = lfcommands.cachelfiles(ui, repo, head)
-            numcached += len(cached)
-        ui.status(_("%d largefiles cached\n") % numcached)
+        if opts.get('cache_largefiles'):
+            # If you are pulling from a remote location that is not your
+            # default location, you may want to cache largefiles for new heads
+            # that have been pulled, so you can easily merge or rebase with
+            # them later
+            numcached = 0
+            heads = lfutil.getcurrentheads(repo)
+            newheads = set(heads).difference(set(oldheads))
+            if len(newheads) > 0:
+                ui.status(_("caching largefiles for %s heads\n") %
+                          len(newheads))
+            for head in newheads:
+                (cached, missing) = lfcommands.cachelfiles(ui, repo, head)
+                numcached += len(cached)
+            ui.status(_("%d largefiles cached\n") % numcached)
     if opts.get('all_largefiles'):
         revspostpull = len(repo)
         revs = []
--- a/hgext/largefiles/uisetup.py	Sun Feb 17 14:34:53 2013 -0600
+++ b/hgext/largefiles/uisetup.py	Tue Feb 19 13:35:25 2013 -0600
@@ -79,7 +79,9 @@
     entry = extensions.wrapcommand(commands.table, 'pull',
                                    overrides.overridepull)
     pullopt = [('', 'all-largefiles', None,
-                 _('download all pulled versions of largefiles'))]
+                 _('download all pulled versions of largefiles')),
+               ('', 'cache-largefiles', None,
+                 _('caches new largefiles in all pulled heads'))]
     entry[1].extend(pullopt)
     entry = extensions.wrapcommand(commands.table, 'clone',
                                    overrides.overrideclone)
--- a/tests/test-largefiles-cache.t	Sun Feb 17 14:34:53 2013 -0600
+++ b/tests/test-largefiles-cache.t	Tue Feb 19 13:35:25 2013 -0600
@@ -37,8 +37,6 @@
   adding file changes
   added 1 changesets with 1 changes to 1 files
   (run 'hg update' to get a working copy)
-  caching largefiles for 1 heads
-  0 largefiles cached
 
 Update working directory to "tip", which requires largefile("large"),
 but there is no cache file for it.  So, hg must treat it as
--- a/tests/test-largefiles.t	Sun Feb 17 14:34:53 2013 -0600
+++ b/tests/test-largefiles.t	Tue Feb 19 13:35:25 2013 -0600
@@ -883,9 +883,7 @@
   adding file changes
   added 6 changesets with 16 changes to 8 files
   (run 'hg update' to get a working copy)
-  caching largefiles for 1 heads
-  3 largefiles cached
-  3 additional largefiles cached
+  6 additional largefiles cached
   $ cd ..
 
 Rebasing between two repositories does not revert largefiles to old
@@ -974,8 +972,6 @@
   adding file changes
   added 1 changesets with 2 changes to 2 files (+1 heads)
   (run 'hg heads' to see heads, 'hg merge' to merge)
-  caching largefiles for 1 heads
-  0 largefiles cached
   $ hg rebase
   Invoking status precommit hook
   M sub/normal4
@@ -1265,7 +1261,8 @@
   $ hg commit -m "Modify large4 to test merge"
   Invoking status precommit hook
   M sub/large4
-  $ hg pull ../e
+# Test --cache-largefiles flag
+  $ hg pull --cache-largefiles ../e
   pulling from ../e
   searching for changes
   adding changesets