hgext/remotefilelog/__init__.py
changeset 40495 3a333a582d7b
child 40502 6d64e2abe8d3
equal deleted inserted replaced
40494:9aeb9e2d28a7 40495:3a333a582d7b
       
     1 # __init__.py - remotefilelog extension
       
     2 #
       
     3 # Copyright 2013 Facebook, Inc.
       
     4 #
       
     5 # This software may be used and distributed according to the terms of the
       
     6 # GNU General Public License version 2 or any later version.
       
     7 """remotefilelog causes Mercurial to lazilly fetch file contents (EXPERIMENTAL)
       
     8 
       
     9 Configs:
       
    10 
       
    11     ``packs.maxchainlen`` specifies the maximum delta chain length in pack files
       
    12     ``packs.maxpacksize`` specifies the maximum pack file size
       
    13     ``packs.maxpackfilecount`` specifies the maximum number of packs in the
       
    14       shared cache (trees only for now)
       
    15     ``remotefilelog.backgroundprefetch`` runs prefetch in background when True
       
    16     ``remotefilelog.bgprefetchrevs`` specifies revisions to fetch on commit and
       
    17       update, and on other commands that use them. Different from pullprefetch.
       
    18     ``remotefilelog.gcrepack`` does garbage collection during repack when True
       
    19     ``remotefilelog.nodettl`` specifies maximum TTL of a node in seconds before
       
    20       it is garbage collected
       
    21     ``remotefilelog.repackonhggc`` runs repack on hg gc when True
       
    22     ``remotefilelog.prefetchdays`` specifies the maximum age of a commit in
       
    23       days after which it is no longer prefetched.
       
    24     ``remotefilelog.prefetchdelay`` specifies delay between background
       
    25       prefetches in seconds after operations that change the working copy parent
       
    26     ``remotefilelog.data.gencountlimit`` constraints the minimum number of data
       
    27       pack files required to be considered part of a generation. In particular,
       
    28       minimum number of packs files > gencountlimit.
       
    29     ``remotefilelog.data.generations`` list for specifying the lower bound of
       
    30       each generation of the data pack files. For example, list ['100MB','1MB']
       
    31       or ['1MB', '100MB'] will lead to three generations: [0, 1MB), [
       
    32       1MB, 100MB) and [100MB, infinity).
       
    33     ``remotefilelog.data.maxrepackpacks`` the maximum number of pack files to
       
    34       include in an incremental data repack.
       
    35     ``remotefilelog.data.repackmaxpacksize`` the maximum size of a pack file for
       
    36       it to be considered for an incremental data repack.
       
    37     ``remotefilelog.data.repacksizelimit`` the maximum total size of pack files
       
    38       to include in an incremental data repack.
       
    39     ``remotefilelog.history.gencountlimit`` constraints the minimum number of
       
    40       history pack files required to be considered part of a generation. In
       
    41       particular, minimum number of packs files > gencountlimit.
       
    42     ``remotefilelog.history.generations`` list for specifying the lower bound of
       
    43       each generation of the historhy pack files. For example, list [
       
    44       '100MB', '1MB'] or ['1MB', '100MB'] will lead to three generations: [
       
    45       0, 1MB), [1MB, 100MB) and [100MB, infinity).
       
    46     ``remotefilelog.history.maxrepackpacks`` the maximum number of pack files to
       
    47       include in an incremental history repack.
       
    48     ``remotefilelog.history.repackmaxpacksize`` the maximum size of a pack file
       
    49       for it to be considered for an incremental history repack.
       
    50     ``remotefilelog.history.repacksizelimit`` the maximum total size of pack
       
    51       files to include in an incremental history repack.
       
    52     ``remotefilelog.backgroundrepack`` automatically consolidate packs in the
       
    53       background
       
    54     ``remotefilelog.cachepath`` path to cache
       
    55     ``remotefilelog.cachegroup`` if set, make cache directory sgid to this
       
    56       group
       
    57     ``remotefilelog.cacheprocess`` binary to invoke for fetching file data
       
    58     ``remotefilelog.debug`` turn on remotefilelog-specific debug output
       
    59     ``remotefilelog.excludepattern`` pattern of files to exclude from pulls
       
    60     ``remotefilelog.includepattern``pattern of files to include in pulls
       
    61     ``remotefilelog.fetchpacks`` if set, fetch pre-packed files from the server
       
    62     ``remotefilelog.fetchwarning``: message to print when too many
       
    63       single-file fetches occur
       
    64     ``remotefilelog.getfilesstep`` number of files to request in a single RPC
       
    65     ``remotefilelog.getfilestype`` if set to 'threaded' use threads to fetch
       
    66       files, otherwise use optimistic fetching
       
    67     ``remotefilelog.pullprefetch`` revset for selecting files that should be
       
    68       eagerly downloaded rather than lazily
       
    69     ``remotefilelog.reponame`` name of the repo. If set, used to partition
       
    70       data from other repos in a shared store.
       
    71     ``remotefilelog.server`` if true, enable server-side functionality
       
    72     ``remotefilelog.servercachepath`` path for caching blobs on the server
       
    73     ``remotefilelog.serverexpiration`` number of days to keep cached server
       
    74       blobs
       
    75     ``remotefilelog.validatecache`` if set, check cache entries for corruption
       
    76       before returning blobs
       
    77     ``remotefilelog.validatecachelog`` if set, check cache entries for
       
    78       corruption before returning metadata
       
    79 
       
    80 """
       
    81 from __future__ import absolute_import
       
    82 
       
    83 import os
       
    84 import time
       
    85 import traceback
       
    86 
       
    87 from mercurial.node import hex
       
    88 from mercurial.i18n import _
       
    89 from mercurial import (
       
    90     changegroup,
       
    91     changelog,
       
    92     cmdutil,
       
    93     commands,
       
    94     configitems,
       
    95     context,
       
    96     copies,
       
    97     debugcommands as hgdebugcommands,
       
    98     dispatch,
       
    99     error,
       
   100     exchange,
       
   101     extensions,
       
   102     hg,
       
   103     localrepo,
       
   104     match,
       
   105     merge,
       
   106     node as nodemod,
       
   107     patch,
       
   108     registrar,
       
   109     repair,
       
   110     repoview,
       
   111     revset,
       
   112     scmutil,
       
   113     smartset,
       
   114     templatekw,
       
   115     util,
       
   116 )
       
   117 from . import (
       
   118     debugcommands,
       
   119     fileserverclient,
       
   120     remotefilectx,
       
   121     remotefilelog,
       
   122     remotefilelogserver,
       
   123     repack as repackmod,
       
   124     shallowbundle,
       
   125     shallowrepo,
       
   126     shallowstore,
       
   127     shallowutil,
       
   128     shallowverifier,
       
   129 )
       
   130 
       
   131 # ensures debug commands are registered
       
   132 hgdebugcommands.command
       
   133 
       
   134 try:
       
   135     from mercurial import streamclone
       
   136     streamclone._walkstreamfiles
       
   137     hasstreamclone = True
       
   138 except Exception:
       
   139     hasstreamclone = False
       
   140 
       
   141 cmdtable = {}
       
   142 command = registrar.command(cmdtable)
       
   143 
       
   144 configtable = {}
       
   145 configitem = registrar.configitem(configtable)
       
   146 
       
   147 configitem('remotefilelog', 'debug', default=False)
       
   148 
       
   149 configitem('remotefilelog', 'reponame', default='')
       
   150 configitem('remotefilelog', 'cachepath', default=None)
       
   151 configitem('remotefilelog', 'cachegroup', default=None)
       
   152 configitem('remotefilelog', 'cacheprocess', default=None)
       
   153 configitem('remotefilelog', 'cacheprocess.includepath', default=None)
       
   154 configitem("remotefilelog", "cachelimit", default="1000 GB")
       
   155 
       
   156 configitem('remotefilelog', 'fetchpacks', default=False)
       
   157 configitem('remotefilelog', 'fallbackpath', default=configitems.dynamicdefault,
       
   158            alias=[('remotefilelog', 'fallbackrepo')])
       
   159 
       
   160 configitem('remotefilelog', 'validatecachelog', default=None)
       
   161 configitem('remotefilelog', 'validatecache', default='on')
       
   162 configitem('remotefilelog', 'server', default=None)
       
   163 configitem('remotefilelog', 'servercachepath', default=None)
       
   164 configitem("remotefilelog", "serverexpiration", default=30)
       
   165 configitem('remotefilelog', 'backgroundrepack', default=False)
       
   166 configitem('remotefilelog', 'bgprefetchrevs', default=None)
       
   167 configitem('remotefilelog', 'pullprefetch', default=None)
       
   168 configitem('remotefilelog', 'backgroundprefetch', default=False)
       
   169 configitem('remotefilelog', 'prefetchdelay', default=120)
       
   170 configitem('remotefilelog', 'prefetchdays', default=14)
       
   171 
       
   172 configitem('remotefilelog', 'getfilesstep', default=10000)
       
   173 configitem('remotefilelog', 'getfilestype', default='optimistic')
       
   174 configitem('remotefilelog', 'batchsize', configitems.dynamicdefault)
       
   175 configitem('remotefilelog', 'fetchwarning', default='')
       
   176 
       
   177 configitem('remotefilelog', 'includepattern', default=None)
       
   178 configitem('remotefilelog', 'excludepattern', default=None)
       
   179 
       
   180 configitem('remotefilelog', 'gcrepack', default=False)
       
   181 configitem('remotefilelog', 'repackonhggc', default=False)
       
   182 configitem('remotefilelog', 'datapackversion', default=0)
       
   183 configitem('repack', 'chainorphansbysize', default=True)
       
   184 
       
   185 configitem('packs', 'maxpacksize', default=0)
       
   186 configitem('packs', 'maxchainlen', default=1000)
       
   187 
       
   188 configitem('remotefilelog', 'historypackv1', default=False)
       
   189 #  default TTL limit is 30 days
       
   190 _defaultlimit = 60 * 60 * 24 * 30
       
   191 configitem('remotefilelog', 'nodettl', default=_defaultlimit)
       
   192 
       
   193 configitem('remotefilelog', 'data.gencountlimit', default=2),
       
   194 configitem('remotefilelog', 'data.generations',
       
   195            default=['1GB', '100MB', '1MB'])
       
   196 configitem('remotefilelog', 'data.maxrepackpacks', default=50)
       
   197 configitem('remotefilelog', 'data.repackmaxpacksize', default='4GB')
       
   198 configitem('remotefilelog', 'data.repacksizelimit', default='100MB')
       
   199 
       
   200 configitem('remotefilelog', 'history.gencountlimit', default=2),
       
   201 configitem('remotefilelog', 'history.generations', default=['100MB'])
       
   202 configitem('remotefilelog', 'history.maxrepackpacks', default=50)
       
   203 configitem('remotefilelog', 'history.repackmaxpacksize', default='400MB')
       
   204 configitem('remotefilelog', 'history.repacksizelimit', default='100MB')
       
   205 
       
   206 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
       
   207 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
       
   208 # be specifying the version(s) of Mercurial they are tested with, or
       
   209 # leave the attribute unspecified.
       
   210 testedwith = 'ships-with-hg-core'
       
   211 
       
   212 repoclass = localrepo.localrepository
       
   213 repoclass._basesupported.add(shallowrepo.requirement)
       
   214 
       
   215 def uisetup(ui):
       
   216     """Wraps user facing Mercurial commands to swap them out with shallow
       
   217     versions.
       
   218     """
       
   219     hg.wirepeersetupfuncs.append(fileserverclient.peersetup)
       
   220 
       
   221     entry = extensions.wrapcommand(commands.table, 'clone', cloneshallow)
       
   222     entry[1].append(('', 'shallow', None,
       
   223                      _("create a shallow clone which uses remote file "
       
   224                        "history")))
       
   225 
       
   226     extensions.wrapcommand(commands.table, 'debugindex',
       
   227         debugcommands.debugindex)
       
   228     extensions.wrapcommand(commands.table, 'debugindexdot',
       
   229         debugcommands.debugindexdot)
       
   230     extensions.wrapcommand(commands.table, 'log', log)
       
   231     extensions.wrapcommand(commands.table, 'pull', pull)
       
   232 
       
   233     # Prevent 'hg manifest --all'
       
   234     def _manifest(orig, ui, repo, *args, **opts):
       
   235         if shallowrepo.requirement in repo.requirements and opts.get('all'):
       
   236             raise error.Abort(_("--all is not supported in a shallow repo"))
       
   237 
       
   238         return orig(ui, repo, *args, **opts)
       
   239     extensions.wrapcommand(commands.table, "manifest", _manifest)
       
   240 
       
   241     # Wrap remotefilelog with lfs code
       
   242     def _lfsloaded(loaded=False):
       
   243         lfsmod = None
       
   244         try:
       
   245             lfsmod = extensions.find('lfs')
       
   246         except KeyError:
       
   247             pass
       
   248         if lfsmod:
       
   249             lfsmod.wrapfilelog(remotefilelog.remotefilelog)
       
   250             fileserverclient._lfsmod = lfsmod
       
   251     extensions.afterloaded('lfs', _lfsloaded)
       
   252 
       
   253     # debugdata needs remotefilelog.len to work
       
   254     extensions.wrapcommand(commands.table, 'debugdata', debugdatashallow)
       
   255 
       
   256 def cloneshallow(orig, ui, repo, *args, **opts):
       
   257     if opts.get('shallow'):
       
   258         repos = []
       
   259         def pull_shallow(orig, self, *args, **kwargs):
       
   260             if shallowrepo.requirement not in self.requirements:
       
   261                 repos.append(self.unfiltered())
       
   262                 # set up the client hooks so the post-clone update works
       
   263                 setupclient(self.ui, self.unfiltered())
       
   264 
       
   265                 # setupclient fixed the class on the repo itself
       
   266                 # but we also need to fix it on the repoview
       
   267                 if isinstance(self, repoview.repoview):
       
   268                     self.__class__.__bases__ = (self.__class__.__bases__[0],
       
   269                                                 self.unfiltered().__class__)
       
   270                 self.requirements.add(shallowrepo.requirement)
       
   271                 self._writerequirements()
       
   272 
       
   273                 # Since setupclient hadn't been called, exchange.pull was not
       
   274                 # wrapped. So we need to manually invoke our version of it.
       
   275                 return exchangepull(orig, self, *args, **kwargs)
       
   276             else:
       
   277                 return orig(self, *args, **kwargs)
       
   278         extensions.wrapfunction(exchange, 'pull', pull_shallow)
       
   279 
       
   280         # Wrap the stream logic to add requirements and to pass include/exclude
       
   281         # patterns around.
       
   282         def setup_streamout(repo, remote):
       
   283             # Replace remote.stream_out with a version that sends file
       
   284             # patterns.
       
   285             def stream_out_shallow(orig):
       
   286                 caps = remote.capabilities()
       
   287                 if shallowrepo.requirement in caps:
       
   288                     opts = {}
       
   289                     if repo.includepattern:
       
   290                         opts['includepattern'] = '\0'.join(repo.includepattern)
       
   291                     if repo.excludepattern:
       
   292                         opts['excludepattern'] = '\0'.join(repo.excludepattern)
       
   293                     return remote._callstream('stream_out_shallow', **opts)
       
   294                 else:
       
   295                     return orig()
       
   296             extensions.wrapfunction(remote, 'stream_out', stream_out_shallow)
       
   297         if hasstreamclone:
       
   298             def stream_wrap(orig, op):
       
   299                 setup_streamout(op.repo, op.remote)
       
   300                 return orig(op)
       
   301             extensions.wrapfunction(
       
   302                 streamclone, 'maybeperformlegacystreamclone', stream_wrap)
       
   303 
       
   304             def canperformstreamclone(orig, pullop, bundle2=False):
       
   305                 # remotefilelog is currently incompatible with the
       
   306                 # bundle2 flavor of streamclones, so force us to use
       
   307                 # v1 instead.
       
   308                 if 'v2' in pullop.remotebundle2caps.get('stream', []):
       
   309                     pullop.remotebundle2caps['stream'] = [
       
   310                         c for c in pullop.remotebundle2caps['stream']
       
   311                         if c != 'v2']
       
   312                 if bundle2:
       
   313                     return False, None
       
   314                 supported, requirements = orig(pullop, bundle2=bundle2)
       
   315                 if requirements is not None:
       
   316                     requirements.add(shallowrepo.requirement)
       
   317                 return supported, requirements
       
   318             extensions.wrapfunction(
       
   319                 streamclone, 'canperformstreamclone', canperformstreamclone)
       
   320         else:
       
   321             def stream_in_shallow(orig, repo, remote, requirements):
       
   322                 setup_streamout(repo, remote)
       
   323                 requirements.add(shallowrepo.requirement)
       
   324                 return orig(repo, remote, requirements)
       
   325             extensions.wrapfunction(
       
   326                 localrepo.localrepository, 'stream_in', stream_in_shallow)
       
   327 
       
   328     try:
       
   329         orig(ui, repo, *args, **opts)
       
   330     finally:
       
   331         if opts.get('shallow'):
       
   332             for r in repos:
       
   333                 if util.safehasattr(r, 'fileservice'):
       
   334                     r.fileservice.close()
       
   335 
       
   336 def debugdatashallow(orig, *args, **kwds):
       
   337     oldlen = remotefilelog.remotefilelog.__len__
       
   338     try:
       
   339         remotefilelog.remotefilelog.__len__ = lambda x: 1
       
   340         return orig(*args, **kwds)
       
   341     finally:
       
   342         remotefilelog.remotefilelog.__len__ = oldlen
       
   343 
       
   344 def reposetup(ui, repo):
       
   345     if not isinstance(repo, localrepo.localrepository):
       
   346         return
       
   347 
       
   348     # put here intentionally bc doesnt work in uisetup
       
   349     ui.setconfig('hooks', 'update.prefetch', wcpprefetch)
       
   350     ui.setconfig('hooks', 'commit.prefetch', wcpprefetch)
       
   351 
       
   352     isserverenabled = ui.configbool('remotefilelog', 'server')
       
   353     isshallowclient = shallowrepo.requirement in repo.requirements
       
   354 
       
   355     if isserverenabled and isshallowclient:
       
   356         raise RuntimeError("Cannot be both a server and shallow client.")
       
   357 
       
   358     if isshallowclient:
       
   359         setupclient(ui, repo)
       
   360 
       
   361     if isserverenabled:
       
   362         remotefilelogserver.setupserver(ui, repo)
       
   363 
       
   364 def setupclient(ui, repo):
       
   365     if not isinstance(repo, localrepo.localrepository):
       
   366         return
       
   367 
       
   368     # Even clients get the server setup since they need to have the
       
   369     # wireprotocol endpoints registered.
       
   370     remotefilelogserver.onetimesetup(ui)
       
   371     onetimeclientsetup(ui)
       
   372 
       
   373     shallowrepo.wraprepo(repo)
       
   374     repo.store = shallowstore.wrapstore(repo.store)
       
   375 
       
   376 clientonetime = False
       
   377 def onetimeclientsetup(ui):
       
   378     global clientonetime
       
   379     if clientonetime:
       
   380         return
       
   381     clientonetime = True
       
   382 
       
   383     changegroup.cgpacker = shallowbundle.shallowcg1packer
       
   384 
       
   385     extensions.wrapfunction(changegroup, '_addchangegroupfiles',
       
   386                             shallowbundle.addchangegroupfiles)
       
   387     extensions.wrapfunction(
       
   388         changegroup, 'makechangegroup', shallowbundle.makechangegroup)
       
   389 
       
   390     def storewrapper(orig, requirements, path, vfstype):
       
   391         s = orig(requirements, path, vfstype)
       
   392         if shallowrepo.requirement in requirements:
       
   393             s = shallowstore.wrapstore(s)
       
   394 
       
   395         return s
       
   396     extensions.wrapfunction(localrepo, 'makestore', storewrapper)
       
   397 
       
   398     extensions.wrapfunction(exchange, 'pull', exchangepull)
       
   399 
       
   400     # prefetch files before update
       
   401     def applyupdates(orig, repo, actions, wctx, mctx, overwrite, labels=None):
       
   402         if shallowrepo.requirement in repo.requirements:
       
   403             manifest = mctx.manifest()
       
   404             files = []
       
   405             for f, args, msg in actions['g']:
       
   406                 files.append((f, hex(manifest[f])))
       
   407             # batch fetch the needed files from the server
       
   408             repo.fileservice.prefetch(files)
       
   409         return orig(repo, actions, wctx, mctx, overwrite, labels=labels)
       
   410     extensions.wrapfunction(merge, 'applyupdates', applyupdates)
       
   411 
       
   412     # Prefetch merge checkunknownfiles
       
   413     def checkunknownfiles(orig, repo, wctx, mctx, force, actions,
       
   414                           *args, **kwargs):
       
   415         if shallowrepo.requirement in repo.requirements:
       
   416             files = []
       
   417             sparsematch = repo.maybesparsematch(mctx.rev())
       
   418             for f, (m, actionargs, msg) in actions.iteritems():
       
   419                 if sparsematch and not sparsematch(f):
       
   420                     continue
       
   421                 if m in ('c', 'dc', 'cm'):
       
   422                     files.append((f, hex(mctx.filenode(f))))
       
   423                 elif m == 'dg':
       
   424                     f2 = actionargs[0]
       
   425                     files.append((f2, hex(mctx.filenode(f2))))
       
   426             # batch fetch the needed files from the server
       
   427             repo.fileservice.prefetch(files)
       
   428         return orig(repo, wctx, mctx, force, actions, *args, **kwargs)
       
   429     extensions.wrapfunction(merge, '_checkunknownfiles', checkunknownfiles)
       
   430 
       
   431     # Prefetch files before status attempts to look at their size and contents
       
   432     def checklookup(orig, self, files):
       
   433         repo = self._repo
       
   434         if shallowrepo.requirement in repo.requirements:
       
   435             prefetchfiles = []
       
   436             for parent in self._parents:
       
   437                 for f in files:
       
   438                     if f in parent:
       
   439                         prefetchfiles.append((f, hex(parent.filenode(f))))
       
   440             # batch fetch the needed files from the server
       
   441             repo.fileservice.prefetch(prefetchfiles)
       
   442         return orig(self, files)
       
   443     extensions.wrapfunction(context.workingctx, '_checklookup', checklookup)
       
   444 
       
   445     # Prefetch the logic that compares added and removed files for renames
       
   446     def findrenames(orig, repo, matcher, added, removed, *args, **kwargs):
       
   447         if shallowrepo.requirement in repo.requirements:
       
   448             files = []
       
   449             parentctx = repo['.']
       
   450             for f in removed:
       
   451                 files.append((f, hex(parentctx.filenode(f))))
       
   452             # batch fetch the needed files from the server
       
   453             repo.fileservice.prefetch(files)
       
   454         return orig(repo, matcher, added, removed, *args, **kwargs)
       
   455     extensions.wrapfunction(scmutil, '_findrenames', findrenames)
       
   456 
       
   457     # prefetch files before mergecopies check
       
   458     def computenonoverlap(orig, repo, c1, c2, *args, **kwargs):
       
   459         u1, u2 = orig(repo, c1, c2, *args, **kwargs)
       
   460         if shallowrepo.requirement in repo.requirements:
       
   461             m1 = c1.manifest()
       
   462             m2 = c2.manifest()
       
   463             files = []
       
   464 
       
   465             sparsematch1 = repo.maybesparsematch(c1.rev())
       
   466             if sparsematch1:
       
   467                 sparseu1 = []
       
   468                 for f in u1:
       
   469                     if sparsematch1(f):
       
   470                         files.append((f, hex(m1[f])))
       
   471                         sparseu1.append(f)
       
   472                 u1 = sparseu1
       
   473 
       
   474             sparsematch2 = repo.maybesparsematch(c2.rev())
       
   475             if sparsematch2:
       
   476                 sparseu2 = []
       
   477                 for f in u2:
       
   478                     if sparsematch2(f):
       
   479                         files.append((f, hex(m2[f])))
       
   480                         sparseu2.append(f)
       
   481                 u2 = sparseu2
       
   482 
       
   483             # batch fetch the needed files from the server
       
   484             repo.fileservice.prefetch(files)
       
   485         return u1, u2
       
   486     extensions.wrapfunction(copies, '_computenonoverlap', computenonoverlap)
       
   487 
       
   488     # prefetch files before pathcopies check
       
   489     def computeforwardmissing(orig, a, b, match=None):
       
   490         missing = list(orig(a, b, match=match))
       
   491         repo = a._repo
       
   492         if shallowrepo.requirement in repo.requirements:
       
   493             mb = b.manifest()
       
   494 
       
   495             files = []
       
   496             sparsematch = repo.maybesparsematch(b.rev())
       
   497             if sparsematch:
       
   498                 sparsemissing = []
       
   499                 for f in missing:
       
   500                     if sparsematch(f):
       
   501                         files.append((f, hex(mb[f])))
       
   502                         sparsemissing.append(f)
       
   503                 missing = sparsemissing
       
   504 
       
   505             # batch fetch the needed files from the server
       
   506             repo.fileservice.prefetch(files)
       
   507         return missing
       
   508     extensions.wrapfunction(copies, '_computeforwardmissing',
       
   509                             computeforwardmissing)
       
   510 
       
   511     # close cache miss server connection after the command has finished
       
   512     def runcommand(orig, lui, repo, *args, **kwargs):
       
   513         try:
       
   514             return orig(lui, repo, *args, **kwargs)
       
   515         finally:
       
   516             # repo can be None when running in chg:
       
   517             # - at startup, reposetup was called because serve is not norepo
       
   518             # - a norepo command like "help" is called
       
   519             if repo and shallowrepo.requirement in repo.requirements:
       
   520                 repo.fileservice.close()
       
   521     extensions.wrapfunction(dispatch, 'runcommand', runcommand)
       
   522 
       
   523     # disappointing hacks below
       
   524     templatekw.getrenamedfn = getrenamedfn
       
   525     extensions.wrapfunction(revset, 'filelog', filelogrevset)
       
   526     revset.symbols['filelog'] = revset.filelog
       
   527     extensions.wrapfunction(cmdutil, 'walkfilerevs', walkfilerevs)
       
   528 
       
   529     # prevent strip from stripping remotefilelogs
       
   530     def _collectbrokencsets(orig, repo, files, striprev):
       
   531         if shallowrepo.requirement in repo.requirements:
       
   532             files = list([f for f in files if not repo.shallowmatch(f)])
       
   533         return orig(repo, files, striprev)
       
   534     extensions.wrapfunction(repair, '_collectbrokencsets', _collectbrokencsets)
       
   535 
       
   536     # Don't commit filelogs until we know the commit hash, since the hash
       
   537     # is present in the filelog blob.
       
   538     # This violates Mercurial's filelog->manifest->changelog write order,
       
   539     # but is generally fine for client repos.
       
   540     pendingfilecommits = []
       
   541     def addrawrevision(orig, self, rawtext, transaction, link, p1, p2, node,
       
   542                        flags, cachedelta=None, _metatuple=None):
       
   543         if isinstance(link, int):
       
   544             pendingfilecommits.append(
       
   545                 (self, rawtext, transaction, link, p1, p2, node, flags,
       
   546                  cachedelta, _metatuple))
       
   547             return node
       
   548         else:
       
   549             return orig(self, rawtext, transaction, link, p1, p2, node, flags,
       
   550                         cachedelta, _metatuple=_metatuple)
       
   551     extensions.wrapfunction(
       
   552         remotefilelog.remotefilelog, 'addrawrevision', addrawrevision)
       
   553 
       
   554     def changelogadd(orig, self, *args):
       
   555         oldlen = len(self)
       
   556         node = orig(self, *args)
       
   557         newlen = len(self)
       
   558         if oldlen != newlen:
       
   559             for oldargs in pendingfilecommits:
       
   560                 log, rt, tr, link, p1, p2, n, fl, c, m = oldargs
       
   561                 linknode = self.node(link)
       
   562                 if linknode == node:
       
   563                     log.addrawrevision(rt, tr, linknode, p1, p2, n, fl, c, m)
       
   564                 else:
       
   565                     raise error.ProgrammingError(
       
   566                         'pending multiple integer revisions are not supported')
       
   567         else:
       
   568             # "link" is actually wrong here (it is set to len(changelog))
       
   569             # if changelog remains unchanged, skip writing file revisions
       
   570             # but still do a sanity check about pending multiple revisions
       
   571             if len(set(x[3] for x in pendingfilecommits)) > 1:
       
   572                 raise error.ProgrammingError(
       
   573                     'pending multiple integer revisions are not supported')
       
   574         del pendingfilecommits[:]
       
   575         return node
       
   576     extensions.wrapfunction(changelog.changelog, 'add', changelogadd)
       
   577 
       
   578     # changectx wrappers
       
   579     def filectx(orig, self, path, fileid=None, filelog=None):
       
   580         if fileid is None:
       
   581             fileid = self.filenode(path)
       
   582         if (shallowrepo.requirement in self._repo.requirements and
       
   583             self._repo.shallowmatch(path)):
       
   584             return remotefilectx.remotefilectx(self._repo, path,
       
   585                 fileid=fileid, changectx=self, filelog=filelog)
       
   586         return orig(self, path, fileid=fileid, filelog=filelog)
       
   587     extensions.wrapfunction(context.changectx, 'filectx', filectx)
       
   588 
       
   589     def workingfilectx(orig, self, path, filelog=None):
       
   590         if (shallowrepo.requirement in self._repo.requirements and
       
   591             self._repo.shallowmatch(path)):
       
   592             return remotefilectx.remoteworkingfilectx(self._repo,
       
   593                 path, workingctx=self, filelog=filelog)
       
   594         return orig(self, path, filelog=filelog)
       
   595     extensions.wrapfunction(context.workingctx, 'filectx', workingfilectx)
       
   596 
       
   597     # prefetch required revisions before a diff
       
   598     def trydiff(orig, repo, revs, ctx1, ctx2, modified, added, removed,
       
   599                 copy, getfilectx, *args, **kwargs):
       
   600         if shallowrepo.requirement in repo.requirements:
       
   601             prefetch = []
       
   602             mf1 = ctx1.manifest()
       
   603             for fname in modified + added + removed:
       
   604                 if fname in mf1:
       
   605                     fnode = getfilectx(fname, ctx1).filenode()
       
   606                     # fnode can be None if it's a edited working ctx file
       
   607                     if fnode:
       
   608                         prefetch.append((fname, hex(fnode)))
       
   609                 if fname not in removed:
       
   610                     fnode = getfilectx(fname, ctx2).filenode()
       
   611                     if fnode:
       
   612                         prefetch.append((fname, hex(fnode)))
       
   613 
       
   614             repo.fileservice.prefetch(prefetch)
       
   615 
       
   616         return orig(repo, revs, ctx1, ctx2, modified, added, removed,
       
   617             copy, getfilectx, *args, **kwargs)
       
   618     extensions.wrapfunction(patch, 'trydiff', trydiff)
       
   619 
       
   620     # Prevent verify from processing files
       
   621     # a stub for mercurial.hg.verify()
       
   622     def _verify(orig, repo):
       
   623         lock = repo.lock()
       
   624         try:
       
   625             return shallowverifier.shallowverifier(repo).verify()
       
   626         finally:
       
   627             lock.release()
       
   628 
       
   629     extensions.wrapfunction(hg, 'verify', _verify)
       
   630 
       
   631     scmutil.fileprefetchhooks.add('remotefilelog', _fileprefetchhook)
       
   632 
       
   633 def getrenamedfn(repo, endrev=None):
       
   634     rcache = {}
       
   635 
       
   636     def getrenamed(fn, rev):
       
   637         '''looks up all renames for a file (up to endrev) the first
       
   638         time the file is given. It indexes on the changerev and only
       
   639         parses the manifest if linkrev != changerev.
       
   640         Returns rename info for fn at changerev rev.'''
       
   641         if rev in rcache.setdefault(fn, {}):
       
   642             return rcache[fn][rev]
       
   643 
       
   644         try:
       
   645             fctx = repo[rev].filectx(fn)
       
   646             for ancestor in fctx.ancestors():
       
   647                 if ancestor.path() == fn:
       
   648                     renamed = ancestor.renamed()
       
   649                     rcache[fn][ancestor.rev()] = renamed
       
   650 
       
   651             return fctx.renamed()
       
   652         except error.LookupError:
       
   653             return None
       
   654 
       
   655     return getrenamed
       
   656 
       
   657 def walkfilerevs(orig, repo, match, follow, revs, fncache):
       
   658     if not shallowrepo.requirement in repo.requirements:
       
   659         return orig(repo, match, follow, revs, fncache)
       
   660 
       
   661     # remotefilelog's can't be walked in rev order, so throw.
       
   662     # The caller will see the exception and walk the commit tree instead.
       
   663     if not follow:
       
   664         raise cmdutil.FileWalkError("Cannot walk via filelog")
       
   665 
       
   666     wanted = set()
       
   667     minrev, maxrev = min(revs), max(revs)
       
   668 
       
   669     pctx = repo['.']
       
   670     for filename in match.files():
       
   671         if filename not in pctx:
       
   672             raise error.Abort(_('cannot follow file not in parent '
       
   673                                'revision: "%s"') % filename)
       
   674         fctx = pctx[filename]
       
   675 
       
   676         linkrev = fctx.linkrev()
       
   677         if linkrev >= minrev and linkrev <= maxrev:
       
   678             fncache.setdefault(linkrev, []).append(filename)
       
   679             wanted.add(linkrev)
       
   680 
       
   681         for ancestor in fctx.ancestors():
       
   682             linkrev = ancestor.linkrev()
       
   683             if linkrev >= minrev and linkrev <= maxrev:
       
   684                 fncache.setdefault(linkrev, []).append(ancestor.path())
       
   685                 wanted.add(linkrev)
       
   686 
       
   687     return wanted
       
   688 
       
   689 def filelogrevset(orig, repo, subset, x):
       
   690     """``filelog(pattern)``
       
   691     Changesets connected to the specified filelog.
       
   692 
       
   693     For performance reasons, ``filelog()`` does not show every changeset
       
   694     that affects the requested file(s). See :hg:`help log` for details. For
       
   695     a slower, more accurate result, use ``file()``.
       
   696     """
       
   697 
       
   698     if not shallowrepo.requirement in repo.requirements:
       
   699         return orig(repo, subset, x)
       
   700 
       
   701     # i18n: "filelog" is a keyword
       
   702     pat = revset.getstring(x, _("filelog requires a pattern"))
       
   703     m = match.match(repo.root, repo.getcwd(), [pat], default='relpath',
       
   704                        ctx=repo[None])
       
   705     s = set()
       
   706 
       
   707     if not match.patkind(pat):
       
   708         # slow
       
   709         for r in subset:
       
   710             ctx = repo[r]
       
   711             cfiles = ctx.files()
       
   712             for f in m.files():
       
   713                 if f in cfiles:
       
   714                     s.add(ctx.rev())
       
   715                     break
       
   716     else:
       
   717         # partial
       
   718         files = (f for f in repo[None] if m(f))
       
   719         for f in files:
       
   720             fctx = repo[None].filectx(f)
       
   721             s.add(fctx.linkrev())
       
   722             for actx in fctx.ancestors():
       
   723                 s.add(actx.linkrev())
       
   724 
       
   725     return smartset.baseset([r for r in subset if r in s])
       
   726 
       
   727 @command('gc', [], _('hg gc [REPO...]'), norepo=True)
       
   728 def gc(ui, *args, **opts):
       
   729     '''garbage collect the client and server filelog caches
       
   730     '''
       
   731     cachepaths = set()
       
   732 
       
   733     # get the system client cache
       
   734     systemcache = shallowutil.getcachepath(ui, allowempty=True)
       
   735     if systemcache:
       
   736         cachepaths.add(systemcache)
       
   737 
       
   738     # get repo client and server cache
       
   739     repopaths = []
       
   740     pwd = ui.environ.get('PWD')
       
   741     if pwd:
       
   742         repopaths.append(pwd)
       
   743 
       
   744     repopaths.extend(args)
       
   745     repos = []
       
   746     for repopath in repopaths:
       
   747         try:
       
   748             repo = hg.peer(ui, {}, repopath)
       
   749             repos.append(repo)
       
   750 
       
   751             repocache = shallowutil.getcachepath(repo.ui, allowempty=True)
       
   752             if repocache:
       
   753                 cachepaths.add(repocache)
       
   754         except error.RepoError:
       
   755             pass
       
   756 
       
   757     # gc client cache
       
   758     for cachepath in cachepaths:
       
   759         gcclient(ui, cachepath)
       
   760 
       
   761     # gc server cache
       
   762     for repo in repos:
       
   763         remotefilelogserver.gcserver(ui, repo._repo)
       
   764 
       
   765 def gcclient(ui, cachepath):
       
   766     # get list of repos that use this cache
       
   767     repospath = os.path.join(cachepath, 'repos')
       
   768     if not os.path.exists(repospath):
       
   769         ui.warn(_("no known cache at %s\n") % cachepath)
       
   770         return
       
   771 
       
   772     reposfile = open(repospath, 'r')
       
   773     repos = set([r[:-1] for r in reposfile.readlines()])
       
   774     reposfile.close()
       
   775 
       
   776     # build list of useful files
       
   777     validrepos = []
       
   778     keepkeys = set()
       
   779 
       
   780     _analyzing = _("analyzing repositories")
       
   781 
       
   782     sharedcache = None
       
   783     filesrepacked = False
       
   784 
       
   785     count = 0
       
   786     for path in repos:
       
   787         ui.progress(_analyzing, count, unit="repos", total=len(repos))
       
   788         count += 1
       
   789         try:
       
   790             path = ui.expandpath(os.path.normpath(path))
       
   791         except TypeError as e:
       
   792             ui.warn(_("warning: malformed path: %r:%s\n") % (path, e))
       
   793             traceback.print_exc()
       
   794             continue
       
   795         try:
       
   796             peer = hg.peer(ui, {}, path)
       
   797             repo = peer._repo
       
   798         except error.RepoError:
       
   799             continue
       
   800 
       
   801         validrepos.append(path)
       
   802 
       
   803         # Protect against any repo or config changes that have happened since
       
   804         # this repo was added to the repos file. We'd rather this loop succeed
       
   805         # and too much be deleted, than the loop fail and nothing gets deleted.
       
   806         if shallowrepo.requirement not in repo.requirements:
       
   807             continue
       
   808 
       
   809         if not util.safehasattr(repo, 'name'):
       
   810             ui.warn(_("repo %s is a misconfigured remotefilelog repo\n") % path)
       
   811             continue
       
   812 
       
   813         # If garbage collection on repack and repack on hg gc are enabled
       
   814         # then loose files are repacked and garbage collected.
       
   815         # Otherwise regular garbage collection is performed.
       
   816         repackonhggc = repo.ui.configbool('remotefilelog', 'repackonhggc')
       
   817         gcrepack = repo.ui.configbool('remotefilelog', 'gcrepack')
       
   818         if repackonhggc and gcrepack:
       
   819             try:
       
   820                 repackmod.incrementalrepack(repo)
       
   821                 filesrepacked = True
       
   822                 continue
       
   823             except (IOError, repackmod.RepackAlreadyRunning):
       
   824                 # If repack cannot be performed due to not enough disk space
       
   825                 # continue doing garbage collection of loose files w/o repack
       
   826                 pass
       
   827 
       
   828         reponame = repo.name
       
   829         if not sharedcache:
       
   830             sharedcache = repo.sharedstore
       
   831 
       
   832         # Compute a keepset which is not garbage collected
       
   833         def keyfn(fname, fnode):
       
   834             return fileserverclient.getcachekey(reponame, fname, hex(fnode))
       
   835         keepkeys = repackmod.keepset(repo, keyfn=keyfn, lastkeepkeys=keepkeys)
       
   836 
       
   837     ui.progress(_analyzing, None)
       
   838 
       
   839     # write list of valid repos back
       
   840     oldumask = os.umask(0o002)
       
   841     try:
       
   842         reposfile = open(repospath, 'w')
       
   843         reposfile.writelines([("%s\n" % r) for r in validrepos])
       
   844         reposfile.close()
       
   845     finally:
       
   846         os.umask(oldumask)
       
   847 
       
   848     # prune cache
       
   849     if sharedcache is not None:
       
   850         sharedcache.gc(keepkeys)
       
   851     elif not filesrepacked:
       
   852         ui.warn(_("warning: no valid repos in repofile\n"))
       
   853 
       
   854 def log(orig, ui, repo, *pats, **opts):
       
   855     if shallowrepo.requirement not in repo.requirements:
       
   856         return orig(ui, repo, *pats, **opts)
       
   857 
       
   858     follow = opts.get('follow')
       
   859     revs = opts.get('rev')
       
   860     if pats:
       
   861         # Force slowpath for non-follow patterns and follows that start from
       
   862         # non-working-copy-parent revs.
       
   863         if not follow or revs:
       
   864             # This forces the slowpath
       
   865             opts['removed'] = True
       
   866 
       
   867         # If this is a non-follow log without any revs specified, recommend that
       
   868         # the user add -f to speed it up.
       
   869         if not follow and not revs:
       
   870             match, pats = scmutil.matchandpats(repo['.'], pats, opts)
       
   871             isfile = not match.anypats()
       
   872             if isfile:
       
   873                 for file in match.files():
       
   874                     if not os.path.isfile(repo.wjoin(file)):
       
   875                         isfile = False
       
   876                         break
       
   877 
       
   878             if isfile:
       
   879                 ui.warn(_("warning: file log can be slow on large repos - " +
       
   880                           "use -f to speed it up\n"))
       
   881 
       
   882     return orig(ui, repo, *pats, **opts)
       
   883 
       
   884 def revdatelimit(ui, revset):
       
   885     """Update revset so that only changesets no older than 'prefetchdays' days
       
   886     are included. The default value is set to 14 days. If 'prefetchdays' is set
       
   887     to zero or negative value then date restriction is not applied.
       
   888     """
       
   889     days = ui.configint('remotefilelog', 'prefetchdays')
       
   890     if days > 0:
       
   891         revset = '(%s) & date(-%s)' % (revset, days)
       
   892     return revset
       
   893 
       
   894 def readytofetch(repo):
       
   895     """Check that enough time has passed since the last background prefetch.
       
   896     This only relates to prefetches after operations that change the working
       
   897     copy parent. Default delay between background prefetches is 2 minutes.
       
   898     """
       
   899     timeout = repo.ui.configint('remotefilelog', 'prefetchdelay')
       
   900     fname = repo.vfs.join('lastprefetch')
       
   901 
       
   902     ready = False
       
   903     with open(fname, 'a'):
       
   904         # the with construct above is used to avoid race conditions
       
   905         modtime = os.path.getmtime(fname)
       
   906         if (time.time() - modtime) > timeout:
       
   907             os.utime(fname, None)
       
   908             ready = True
       
   909 
       
   910     return ready
       
   911 
       
   912 def wcpprefetch(ui, repo, **kwargs):
       
   913     """Prefetches in background revisions specified by bgprefetchrevs revset.
       
   914     Does background repack if backgroundrepack flag is set in config.
       
   915     """
       
   916     shallow = shallowrepo.requirement in repo.requirements
       
   917     bgprefetchrevs = ui.config('remotefilelog', 'bgprefetchrevs')
       
   918     isready = readytofetch(repo)
       
   919 
       
   920     if not (shallow and bgprefetchrevs and isready):
       
   921         return
       
   922 
       
   923     bgrepack = repo.ui.configbool('remotefilelog', 'backgroundrepack')
       
   924     # update a revset with a date limit
       
   925     bgprefetchrevs = revdatelimit(ui, bgprefetchrevs)
       
   926 
       
   927     def anon():
       
   928         if util.safehasattr(repo, 'ranprefetch') and repo.ranprefetch:
       
   929             return
       
   930         repo.ranprefetch = True
       
   931         repo.backgroundprefetch(bgprefetchrevs, repack=bgrepack)
       
   932 
       
   933     repo._afterlock(anon)
       
   934 
       
   935 def pull(orig, ui, repo, *pats, **opts):
       
   936     result = orig(ui, repo, *pats, **opts)
       
   937 
       
   938     if shallowrepo.requirement in repo.requirements:
       
   939         # prefetch if it's configured
       
   940         prefetchrevset = ui.config('remotefilelog', 'pullprefetch')
       
   941         bgrepack = repo.ui.configbool('remotefilelog', 'backgroundrepack')
       
   942         bgprefetch = repo.ui.configbool('remotefilelog', 'backgroundprefetch')
       
   943 
       
   944         if prefetchrevset:
       
   945             ui.status(_("prefetching file contents\n"))
       
   946             revs = scmutil.revrange(repo, [prefetchrevset])
       
   947             base = repo['.'].rev()
       
   948             if bgprefetch:
       
   949                 repo.backgroundprefetch(prefetchrevset, repack=bgrepack)
       
   950             else:
       
   951                 repo.prefetch(revs, base=base)
       
   952                 if bgrepack:
       
   953                     repackmod.backgroundrepack(repo, incremental=True)
       
   954         elif bgrepack:
       
   955             repackmod.backgroundrepack(repo, incremental=True)
       
   956 
       
   957     return result
       
   958 
       
   959 def exchangepull(orig, repo, remote, *args, **kwargs):
       
   960     # Hook into the callstream/getbundle to insert bundle capabilities
       
   961     # during a pull.
       
   962     def localgetbundle(orig, source, heads=None, common=None, bundlecaps=None,
       
   963                        **kwargs):
       
   964         if not bundlecaps:
       
   965             bundlecaps = set()
       
   966         bundlecaps.add('remotefilelog')
       
   967         return orig(source, heads=heads, common=common, bundlecaps=bundlecaps,
       
   968                     **kwargs)
       
   969 
       
   970     if util.safehasattr(remote, '_callstream'):
       
   971         remote._localrepo = repo
       
   972     elif util.safehasattr(remote, 'getbundle'):
       
   973         extensions.wrapfunction(remote, 'getbundle', localgetbundle)
       
   974 
       
   975     return orig(repo, remote, *args, **kwargs)
       
   976 
       
   977 def _fileprefetchhook(repo, revs, match):
       
   978     if shallowrepo.requirement in repo.requirements:
       
   979         allfiles = []
       
   980         for rev in revs:
       
   981             if rev == nodemod.wdirrev or rev is None:
       
   982                 continue
       
   983             ctx = repo[rev]
       
   984             mf = ctx.manifest()
       
   985             sparsematch = repo.maybesparsematch(ctx.rev())
       
   986             for path in ctx.walk(match):
       
   987                 if path.endswith('/'):
       
   988                     # Tree manifest that's being excluded as part of narrow
       
   989                     continue
       
   990                 if (not sparsematch or sparsematch(path)) and path in mf:
       
   991                     allfiles.append((path, hex(mf[path])))
       
   992         repo.fileservice.prefetch(allfiles)
       
   993 
       
   994 @command('debugremotefilelog', [
       
   995     ('d', 'decompress', None, _('decompress the filelog first')),
       
   996     ], _('hg debugremotefilelog <path>'), norepo=True)
       
   997 def debugremotefilelog(ui, path, **opts):
       
   998     return debugcommands.debugremotefilelog(ui, path, **opts)
       
   999 
       
  1000 @command('verifyremotefilelog', [
       
  1001     ('d', 'decompress', None, _('decompress the filelogs first')),
       
  1002     ], _('hg verifyremotefilelogs <directory>'), norepo=True)
       
  1003 def verifyremotefilelog(ui, path, **opts):
       
  1004     return debugcommands.verifyremotefilelog(ui, path, **opts)
       
  1005 
       
  1006 @command('debugdatapack', [
       
  1007     ('', 'long', None, _('print the long hashes')),
       
  1008     ('', 'node', '', _('dump the contents of node'), 'NODE'),
       
  1009     ], _('hg debugdatapack <paths>'), norepo=True)
       
  1010 def debugdatapack(ui, *paths, **opts):
       
  1011     return debugcommands.debugdatapack(ui, *paths, **opts)
       
  1012 
       
  1013 @command('debughistorypack', [
       
  1014     ], _('hg debughistorypack <path>'), norepo=True)
       
  1015 def debughistorypack(ui, path, **opts):
       
  1016     return debugcommands.debughistorypack(ui, path)
       
  1017 
       
  1018 @command('debugkeepset', [
       
  1019     ], _('hg debugkeepset'))
       
  1020 def debugkeepset(ui, repo, **opts):
       
  1021     # The command is used to measure keepset computation time
       
  1022     def keyfn(fname, fnode):
       
  1023         return fileserverclient.getcachekey(repo.name, fname, hex(fnode))
       
  1024     repackmod.keepset(repo, keyfn)
       
  1025     return
       
  1026 
       
  1027 @command('debugwaitonrepack', [
       
  1028     ], _('hg debugwaitonrepack'))
       
  1029 def debugwaitonrepack(ui, repo, **opts):
       
  1030     return debugcommands.debugwaitonrepack(repo)
       
  1031 
       
  1032 @command('debugwaitonprefetch', [
       
  1033     ], _('hg debugwaitonprefetch'))
       
  1034 def debugwaitonprefetch(ui, repo, **opts):
       
  1035     return debugcommands.debugwaitonprefetch(repo)
       
  1036 
       
  1037 def resolveprefetchopts(ui, opts):
       
  1038     if not opts.get('rev'):
       
  1039         revset = ['.', 'draft()']
       
  1040 
       
  1041         prefetchrevset = ui.config('remotefilelog', 'pullprefetch', None)
       
  1042         if prefetchrevset:
       
  1043             revset.append('(%s)' % prefetchrevset)
       
  1044         bgprefetchrevs = ui.config('remotefilelog', 'bgprefetchrevs', None)
       
  1045         if bgprefetchrevs:
       
  1046             revset.append('(%s)' % bgprefetchrevs)
       
  1047         revset = '+'.join(revset)
       
  1048 
       
  1049         # update a revset with a date limit
       
  1050         revset = revdatelimit(ui, revset)
       
  1051 
       
  1052         opts['rev'] = [revset]
       
  1053 
       
  1054     if not opts.get('base'):
       
  1055         opts['base'] = None
       
  1056 
       
  1057     return opts
       
  1058 
       
  1059 @command('prefetch', [
       
  1060     ('r', 'rev', [], _('prefetch the specified revisions'), _('REV')),
       
  1061     ('', 'repack', False, _('run repack after prefetch')),
       
  1062     ('b', 'base', '', _("rev that is assumed to already be local")),
       
  1063     ] + commands.walkopts, _('hg prefetch [OPTIONS] [FILE...]'))
       
  1064 def prefetch(ui, repo, *pats, **opts):
       
  1065     """prefetch file revisions from the server
       
  1066 
       
  1067     Prefetchs file revisions for the specified revs and stores them in the
       
  1068     local remotefilelog cache.  If no rev is specified, the default rev is
       
  1069     used which is the union of dot, draft, pullprefetch and bgprefetchrev.
       
  1070     File names or patterns can be used to limit which files are downloaded.
       
  1071 
       
  1072     Return 0 on success.
       
  1073     """
       
  1074     if not shallowrepo.requirement in repo.requirements:
       
  1075         raise error.Abort(_("repo is not shallow"))
       
  1076 
       
  1077     opts = resolveprefetchopts(ui, opts)
       
  1078     revs = scmutil.revrange(repo, opts.get('rev'))
       
  1079     repo.prefetch(revs, opts.get('base'), pats, opts)
       
  1080 
       
  1081     # Run repack in background
       
  1082     if opts.get('repack'):
       
  1083         repackmod.backgroundrepack(repo, incremental=True)
       
  1084 
       
  1085 @command('repack', [
       
  1086      ('', 'background', None, _('run in a background process'), None),
       
  1087      ('', 'incremental', None, _('do an incremental repack'), None),
       
  1088      ('', 'packsonly', None, _('only repack packs (skip loose objects)'), None),
       
  1089     ], _('hg repack [OPTIONS]'))
       
  1090 def repack_(ui, repo, *pats, **opts):
       
  1091     if opts.get('background'):
       
  1092         repackmod.backgroundrepack(repo, incremental=opts.get('incremental'),
       
  1093                                    packsonly=opts.get('packsonly', False))
       
  1094         return
       
  1095 
       
  1096     options = {'packsonly': opts.get('packsonly')}
       
  1097 
       
  1098     try:
       
  1099         if opts.get('incremental'):
       
  1100             repackmod.incrementalrepack(repo, options=options)
       
  1101         else:
       
  1102             repackmod.fullrepack(repo, options=options)
       
  1103     except repackmod.RepackAlreadyRunning as ex:
       
  1104         # Don't propogate the exception if the repack is already in
       
  1105         # progress, since we want the command to exit 0.
       
  1106         repo.ui.warn('%s\n' % ex)