largefiles: drop --cache-largefiles again
This goes a step further than
d69585a5c5c0 and backs out the unreleased
--cache-largefiles option. The same can be achieved with --lfrev heads(pulled()) and
we shouldn't introduce unnecessary command line options.
--- a/hgext/largefiles/__init__.py Mon Apr 15 01:59:04 2013 +0200
+++ b/hgext/largefiles/__init__.py Mon Apr 15 01:59:11 2013 +0200
@@ -59,8 +59,6 @@
If you just want to ensure that you will have the largefiles needed to
merge or rebase with new heads that you are pulling, then you can pull
with `--lfrev "head(pulled())"` flag to pre-emptively download any largefiles
-that are new in the heads you are pulling. You can also pull with the
-`--cache-largefiles` flag to pre-emptively download any largefiles
that are new in the heads you are pulling.
Keep in mind that network access may now be required to update to
--- a/hgext/largefiles/lfutil.py Mon Apr 15 01:59:04 2013 +0200
+++ b/hgext/largefiles/lfutil.py Mon Apr 15 01:59:11 2013 +0200
@@ -398,14 +398,6 @@
def __init__(self, storetypes):
self.storetypes = storetypes
-def getcurrentheads(repo):
- branches = repo.branchmap()
- heads = []
- for branch in branches:
- newheads = repo.branchheads(branch)
- heads = heads + newheads
- return heads
-
def getstandinsstate(repo):
standins = []
matcher = getstandinmatcher(repo)
--- a/hgext/largefiles/overrides.py Mon Apr 15 01:59:04 2013 +0200
+++ b/hgext/largefiles/overrides.py Mon Apr 15 01:59:11 2013 +0200
@@ -730,23 +730,7 @@
finally:
repo._isrebasing = False
else:
- oldheads = lfutil.getcurrentheads(repo)
result = orig(ui, repo, source, **opts)
- if opts.get('cache_largefiles'):
- # If you are pulling from a remote location that is not your
- # default location, you may want to cache largefiles for new heads
- # that have been pulled, so you can easily merge or rebase with
- # them later
- numcached = 0
- heads = lfutil.getcurrentheads(repo)
- newheads = set(heads).difference(set(oldheads))
- if len(newheads) > 0:
- ui.status(_("caching largefiles for %s heads\n") %
- len(newheads))
- for head in newheads:
- (cached, missing) = lfcommands.cachelfiles(ui, repo, head)
- numcached += len(cached)
- ui.status(_("%d largefiles cached\n") % numcached)
revspostpull = len(repo)
if opts.get('all_largefiles'):
revs = []
--- a/hgext/largefiles/uisetup.py Mon Apr 15 01:59:04 2013 +0200
+++ b/hgext/largefiles/uisetup.py Mon Apr 15 01:59:11 2013 +0200
@@ -80,8 +80,6 @@
overrides.overridepull)
pullopt = [('', 'all-largefiles', None,
_('download all pulled versions of largefiles')),
- ('', 'cache-largefiles', None,
- _('caches new largefiles in all pulled heads')),
('', 'lfrev', [],
_('download largefiles for these revisions'), _('REV'))]
entry[1].extend(pullopt)
--- a/tests/test-largefiles.t Mon Apr 15 01:59:04 2013 +0200
+++ b/tests/test-largefiles.t Mon Apr 15 01:59:11 2013 +0200
@@ -1391,7 +1391,7 @@
Invoking status precommit hook
M sub/large4
# Test --cache-largefiles flag
- $ hg pull --cache-largefiles ../e
+ $ hg pull --lfrev 'heads(pulled())' ../e
pulling from ../e
searching for changes
adding changesets
@@ -1399,7 +1399,6 @@
adding file changes
added 2 changesets with 4 changes to 4 files (+1 heads)
(run 'hg heads' to see heads, 'hg merge' to merge)
- caching largefiles for 1 heads
2 largefiles cached
$ hg merge
merging sub/large4