comparison hgext/remotefilelog/__init__.py @ 43076:2372284d9457

formatting: blacken the codebase This is using my patch to black (https://github.com/psf/black/pull/826) so we don't un-wrap collection literals. Done with: hg files 'set:**.py - mercurial/thirdparty/** - "contrib/python-zstandard/**"' | xargs black -S # skip-blame mass-reformatting only # no-check-commit reformats foo_bar functions Differential Revision: https://phab.mercurial-scm.org/D6971
author Augie Fackler <augie@google.com>
date Sun, 06 Oct 2019 09:45:02 -0400
parents 2c74337e6483
children 687b865b95ad
comparison
equal deleted inserted replaced
43075:57875cf423c9 43076:2372284d9457
192 configitem('remotefilelog', 'cachegroup', default=None) 192 configitem('remotefilelog', 'cachegroup', default=None)
193 configitem('remotefilelog', 'cacheprocess', default=None) 193 configitem('remotefilelog', 'cacheprocess', default=None)
194 configitem('remotefilelog', 'cacheprocess.includepath', default=None) 194 configitem('remotefilelog', 'cacheprocess.includepath', default=None)
195 configitem("remotefilelog", "cachelimit", default="1000 GB") 195 configitem("remotefilelog", "cachelimit", default="1000 GB")
196 196
197 configitem('remotefilelog', 'fallbackpath', default=configitems.dynamicdefault, 197 configitem(
198 alias=[('remotefilelog', 'fallbackrepo')]) 198 'remotefilelog',
199 'fallbackpath',
200 default=configitems.dynamicdefault,
201 alias=[('remotefilelog', 'fallbackrepo')],
202 )
199 203
200 configitem('remotefilelog', 'validatecachelog', default=None) 204 configitem('remotefilelog', 'validatecachelog', default=None)
201 configitem('remotefilelog', 'validatecache', default='on') 205 configitem('remotefilelog', 'validatecache', default='on')
202 configitem('remotefilelog', 'server', default=None) 206 configitem('remotefilelog', 'server', default=None)
203 configitem('remotefilelog', 'servercachepath', default=None) 207 configitem('remotefilelog', 'servercachepath', default=None)
229 # default TTL limit is 30 days 233 # default TTL limit is 30 days
230 _defaultlimit = 60 * 60 * 24 * 30 234 _defaultlimit = 60 * 60 * 24 * 30
231 configitem('remotefilelog', 'nodettl', default=_defaultlimit) 235 configitem('remotefilelog', 'nodettl', default=_defaultlimit)
232 236
233 configitem('remotefilelog', 'data.gencountlimit', default=2), 237 configitem('remotefilelog', 'data.gencountlimit', default=2),
234 configitem('remotefilelog', 'data.generations', 238 configitem('remotefilelog', 'data.generations', default=['1GB', '100MB', '1MB'])
235 default=['1GB', '100MB', '1MB'])
236 configitem('remotefilelog', 'data.maxrepackpacks', default=50) 239 configitem('remotefilelog', 'data.maxrepackpacks', default=50)
237 configitem('remotefilelog', 'data.repackmaxpacksize', default='4GB') 240 configitem('remotefilelog', 'data.repackmaxpacksize', default='4GB')
238 configitem('remotefilelog', 'data.repacksizelimit', default='100MB') 241 configitem('remotefilelog', 'data.repacksizelimit', default='100MB')
239 242
240 configitem('remotefilelog', 'history.gencountlimit', default=2), 243 configitem('remotefilelog', 'history.gencountlimit', default=2),
252 repoclass = localrepo.localrepository 255 repoclass = localrepo.localrepository
253 repoclass._basesupported.add(constants.SHALLOWREPO_REQUIREMENT) 256 repoclass._basesupported.add(constants.SHALLOWREPO_REQUIREMENT)
254 257
255 isenabled = shallowutil.isenabled 258 isenabled = shallowutil.isenabled
256 259
260
257 def uisetup(ui): 261 def uisetup(ui):
258 """Wraps user facing Mercurial commands to swap them out with shallow 262 """Wraps user facing Mercurial commands to swap them out with shallow
259 versions. 263 versions.
260 """ 264 """
261 hg.wirepeersetupfuncs.append(fileserverclient.peersetup) 265 hg.wirepeersetupfuncs.append(fileserverclient.peersetup)
262 266
263 entry = extensions.wrapcommand(commands.table, 'clone', cloneshallow) 267 entry = extensions.wrapcommand(commands.table, 'clone', cloneshallow)
264 entry[1].append(('', 'shallow', None, 268 entry[1].append(
265 _("create a shallow clone which uses remote file " 269 (
266 "history"))) 270 '',
267 271 'shallow',
268 extensions.wrapcommand(commands.table, 'debugindex', 272 None,
269 debugcommands.debugindex) 273 _("create a shallow clone which uses remote file " "history"),
270 extensions.wrapcommand(commands.table, 'debugindexdot', 274 )
271 debugcommands.debugindexdot) 275 )
276
277 extensions.wrapcommand(
278 commands.table, 'debugindex', debugcommands.debugindex
279 )
280 extensions.wrapcommand(
281 commands.table, 'debugindexdot', debugcommands.debugindexdot
282 )
272 extensions.wrapcommand(commands.table, 'log', log) 283 extensions.wrapcommand(commands.table, 'log', log)
273 extensions.wrapcommand(commands.table, 'pull', pull) 284 extensions.wrapcommand(commands.table, 'pull', pull)
274 285
275 # Prevent 'hg manifest --all' 286 # Prevent 'hg manifest --all'
276 def _manifest(orig, ui, repo, *args, **opts): 287 def _manifest(orig, ui, repo, *args, **opts):
277 if (isenabled(repo) and opts.get(r'all')): 288 if isenabled(repo) and opts.get(r'all'):
278 raise error.Abort(_("--all is not supported in a shallow repo")) 289 raise error.Abort(_("--all is not supported in a shallow repo"))
279 290
280 return orig(ui, repo, *args, **opts) 291 return orig(ui, repo, *args, **opts)
292
281 extensions.wrapcommand(commands.table, "manifest", _manifest) 293 extensions.wrapcommand(commands.table, "manifest", _manifest)
282 294
283 # Wrap remotefilelog with lfs code 295 # Wrap remotefilelog with lfs code
284 def _lfsloaded(loaded=False): 296 def _lfsloaded(loaded=False):
285 lfsmod = None 297 lfsmod = None
288 except KeyError: 300 except KeyError:
289 pass 301 pass
290 if lfsmod: 302 if lfsmod:
291 lfsmod.wrapfilelog(remotefilelog.remotefilelog) 303 lfsmod.wrapfilelog(remotefilelog.remotefilelog)
292 fileserverclient._lfsmod = lfsmod 304 fileserverclient._lfsmod = lfsmod
305
293 extensions.afterloaded('lfs', _lfsloaded) 306 extensions.afterloaded('lfs', _lfsloaded)
294 307
295 # debugdata needs remotefilelog.len to work 308 # debugdata needs remotefilelog.len to work
296 extensions.wrapcommand(commands.table, 'debugdata', debugdatashallow) 309 extensions.wrapcommand(commands.table, 'debugdata', debugdatashallow)
297 310
298 changegroup.cgpacker = shallowbundle.shallowcg1packer 311 changegroup.cgpacker = shallowbundle.shallowcg1packer
299 312
300 extensions.wrapfunction(changegroup, '_addchangegroupfiles',
301 shallowbundle.addchangegroupfiles)
302 extensions.wrapfunction( 313 extensions.wrapfunction(
303 changegroup, 'makechangegroup', shallowbundle.makechangegroup) 314 changegroup, '_addchangegroupfiles', shallowbundle.addchangegroupfiles
315 )
316 extensions.wrapfunction(
317 changegroup, 'makechangegroup', shallowbundle.makechangegroup
318 )
304 extensions.wrapfunction(localrepo, 'makestore', storewrapper) 319 extensions.wrapfunction(localrepo, 'makestore', storewrapper)
305 extensions.wrapfunction(exchange, 'pull', exchangepull) 320 extensions.wrapfunction(exchange, 'pull', exchangepull)
306 extensions.wrapfunction(merge, 'applyupdates', applyupdates) 321 extensions.wrapfunction(merge, 'applyupdates', applyupdates)
307 extensions.wrapfunction(merge, '_checkunknownfiles', checkunknownfiles) 322 extensions.wrapfunction(merge, '_checkunknownfiles', checkunknownfiles)
308 extensions.wrapfunction(context.workingctx, '_checklookup', checklookup) 323 extensions.wrapfunction(context.workingctx, '_checklookup', checklookup)
309 extensions.wrapfunction(scmutil, '_findrenames', findrenames) 324 extensions.wrapfunction(scmutil, '_findrenames', findrenames)
310 extensions.wrapfunction(copies, '_computeforwardmissing', 325 extensions.wrapfunction(
311 computeforwardmissing) 326 copies, '_computeforwardmissing', computeforwardmissing
327 )
312 extensions.wrapfunction(dispatch, 'runcommand', runcommand) 328 extensions.wrapfunction(dispatch, 'runcommand', runcommand)
313 extensions.wrapfunction(repair, '_collectbrokencsets', _collectbrokencsets) 329 extensions.wrapfunction(repair, '_collectbrokencsets', _collectbrokencsets)
314 extensions.wrapfunction(context.changectx, 'filectx', filectx) 330 extensions.wrapfunction(context.changectx, 'filectx', filectx)
315 extensions.wrapfunction(context.workingctx, 'filectx', workingfilectx) 331 extensions.wrapfunction(context.workingctx, 'filectx', workingfilectx)
316 extensions.wrapfunction(patch, 'trydiff', trydiff) 332 extensions.wrapfunction(patch, 'trydiff', trydiff)
325 341
326 342
327 def cloneshallow(orig, ui, repo, *args, **opts): 343 def cloneshallow(orig, ui, repo, *args, **opts):
328 if opts.get(r'shallow'): 344 if opts.get(r'shallow'):
329 repos = [] 345 repos = []
346
330 def pull_shallow(orig, self, *args, **kwargs): 347 def pull_shallow(orig, self, *args, **kwargs):
331 if not isenabled(self): 348 if not isenabled(self):
332 repos.append(self.unfiltered()) 349 repos.append(self.unfiltered())
333 # set up the client hooks so the post-clone update works 350 # set up the client hooks so the post-clone update works
334 setupclient(self.ui, self.unfiltered()) 351 setupclient(self.ui, self.unfiltered())
335 352
336 # setupclient fixed the class on the repo itself 353 # setupclient fixed the class on the repo itself
337 # but we also need to fix it on the repoview 354 # but we also need to fix it on the repoview
338 if isinstance(self, repoview.repoview): 355 if isinstance(self, repoview.repoview):
339 self.__class__.__bases__ = (self.__class__.__bases__[0], 356 self.__class__.__bases__ = (
340 self.unfiltered().__class__) 357 self.__class__.__bases__[0],
358 self.unfiltered().__class__,
359 )
341 self.requirements.add(constants.SHALLOWREPO_REQUIREMENT) 360 self.requirements.add(constants.SHALLOWREPO_REQUIREMENT)
342 self._writerequirements() 361 self._writerequirements()
343 362
344 # Since setupclient hadn't been called, exchange.pull was not 363 # Since setupclient hadn't been called, exchange.pull was not
345 # wrapped. So we need to manually invoke our version of it. 364 # wrapped. So we need to manually invoke our version of it.
346 return exchangepull(orig, self, *args, **kwargs) 365 return exchangepull(orig, self, *args, **kwargs)
347 else: 366 else:
348 return orig(self, *args, **kwargs) 367 return orig(self, *args, **kwargs)
368
349 extensions.wrapfunction(exchange, 'pull', pull_shallow) 369 extensions.wrapfunction(exchange, 'pull', pull_shallow)
350 370
351 # Wrap the stream logic to add requirements and to pass include/exclude 371 # Wrap the stream logic to add requirements and to pass include/exclude
352 # patterns around. 372 # patterns around.
353 def setup_streamout(repo, remote): 373 def setup_streamout(repo, remote):
362 if repo.excludepattern: 382 if repo.excludepattern:
363 opts[r'excludepattern'] = '\0'.join(repo.excludepattern) 383 opts[r'excludepattern'] = '\0'.join(repo.excludepattern)
364 return remote._callstream('stream_out_shallow', **opts) 384 return remote._callstream('stream_out_shallow', **opts)
365 else: 385 else:
366 return orig() 386 return orig()
387
367 extensions.wrapfunction(remote, 'stream_out', stream_out_shallow) 388 extensions.wrapfunction(remote, 'stream_out', stream_out_shallow)
389
368 def stream_wrap(orig, op): 390 def stream_wrap(orig, op):
369 setup_streamout(op.repo, op.remote) 391 setup_streamout(op.repo, op.remote)
370 return orig(op) 392 return orig(op)
393
371 extensions.wrapfunction( 394 extensions.wrapfunction(
372 streamclone, 'maybeperformlegacystreamclone', stream_wrap) 395 streamclone, 'maybeperformlegacystreamclone', stream_wrap
396 )
373 397
374 def canperformstreamclone(orig, pullop, bundle2=False): 398 def canperformstreamclone(orig, pullop, bundle2=False):
375 # remotefilelog is currently incompatible with the 399 # remotefilelog is currently incompatible with the
376 # bundle2 flavor of streamclones, so force us to use 400 # bundle2 flavor of streamclones, so force us to use
377 # v1 instead. 401 # v1 instead.
378 if 'v2' in pullop.remotebundle2caps.get('stream', []): 402 if 'v2' in pullop.remotebundle2caps.get('stream', []):
379 pullop.remotebundle2caps['stream'] = [ 403 pullop.remotebundle2caps['stream'] = [
380 c for c in pullop.remotebundle2caps['stream'] 404 c for c in pullop.remotebundle2caps['stream'] if c != 'v2'
381 if c != 'v2'] 405 ]
382 if bundle2: 406 if bundle2:
383 return False, None 407 return False, None
384 supported, requirements = orig(pullop, bundle2=bundle2) 408 supported, requirements = orig(pullop, bundle2=bundle2)
385 if requirements is not None: 409 if requirements is not None:
386 requirements.add(constants.SHALLOWREPO_REQUIREMENT) 410 requirements.add(constants.SHALLOWREPO_REQUIREMENT)
387 return supported, requirements 411 return supported, requirements
412
388 extensions.wrapfunction( 413 extensions.wrapfunction(
389 streamclone, 'canperformstreamclone', canperformstreamclone) 414 streamclone, 'canperformstreamclone', canperformstreamclone
415 )
390 416
391 try: 417 try:
392 orig(ui, repo, *args, **opts) 418 orig(ui, repo, *args, **opts)
393 finally: 419 finally:
394 if opts.get(r'shallow'): 420 if opts.get(r'shallow'):
395 for r in repos: 421 for r in repos:
396 if util.safehasattr(r, 'fileservice'): 422 if util.safehasattr(r, 'fileservice'):
397 r.fileservice.close() 423 r.fileservice.close()
398 424
425
399 def debugdatashallow(orig, *args, **kwds): 426 def debugdatashallow(orig, *args, **kwds):
400 oldlen = remotefilelog.remotefilelog.__len__ 427 oldlen = remotefilelog.remotefilelog.__len__
401 try: 428 try:
402 remotefilelog.remotefilelog.__len__ = lambda x: 1 429 remotefilelog.remotefilelog.__len__ = lambda x: 1
403 return orig(*args, **kwds) 430 return orig(*args, **kwds)
404 finally: 431 finally:
405 remotefilelog.remotefilelog.__len__ = oldlen 432 remotefilelog.remotefilelog.__len__ = oldlen
406 433
434
407 def reposetup(ui, repo): 435 def reposetup(ui, repo):
408 if not repo.local(): 436 if not repo.local():
409 return 437 return
410 438
411 # put here intentionally bc doesnt work in uisetup 439 # put here intentionally bc doesnt work in uisetup
421 if isshallowclient: 449 if isshallowclient:
422 setupclient(ui, repo) 450 setupclient(ui, repo)
423 451
424 if isserverenabled: 452 if isserverenabled:
425 remotefilelogserver.setupserver(ui, repo) 453 remotefilelogserver.setupserver(ui, repo)
454
426 455
427 def setupclient(ui, repo): 456 def setupclient(ui, repo):
428 if not isinstance(repo, localrepo.localrepository): 457 if not isinstance(repo, localrepo.localrepository):
429 return 458 return
430 459
434 onetimeclientsetup(ui) 463 onetimeclientsetup(ui)
435 464
436 shallowrepo.wraprepo(repo) 465 shallowrepo.wraprepo(repo)
437 repo.store = shallowstore.wrapstore(repo.store) 466 repo.store = shallowstore.wrapstore(repo.store)
438 467
468
439 def storewrapper(orig, requirements, path, vfstype): 469 def storewrapper(orig, requirements, path, vfstype):
440 s = orig(requirements, path, vfstype) 470 s = orig(requirements, path, vfstype)
441 if constants.SHALLOWREPO_REQUIREMENT in requirements: 471 if constants.SHALLOWREPO_REQUIREMENT in requirements:
442 s = shallowstore.wrapstore(s) 472 s = shallowstore.wrapstore(s)
443 473
444 return s 474 return s
445 475
476
446 # prefetch files before update 477 # prefetch files before update
447 def applyupdates(orig, repo, actions, wctx, mctx, overwrite, wantfiledata, 478 def applyupdates(
448 labels=None): 479 orig, repo, actions, wctx, mctx, overwrite, wantfiledata, labels=None
480 ):
449 if isenabled(repo): 481 if isenabled(repo):
450 manifest = mctx.manifest() 482 manifest = mctx.manifest()
451 files = [] 483 files = []
452 for f, args, msg in actions['g']: 484 for f, args, msg in actions['g']:
453 files.append((f, hex(manifest[f]))) 485 files.append((f, hex(manifest[f])))
454 # batch fetch the needed files from the server 486 # batch fetch the needed files from the server
455 repo.fileservice.prefetch(files) 487 repo.fileservice.prefetch(files)
456 return orig(repo, actions, wctx, mctx, overwrite, wantfiledata, 488 return orig(
457 labels=labels) 489 repo, actions, wctx, mctx, overwrite, wantfiledata, labels=labels
490 )
491
458 492
459 # Prefetch merge checkunknownfiles 493 # Prefetch merge checkunknownfiles
460 def checkunknownfiles(orig, repo, wctx, mctx, force, actions, 494 def checkunknownfiles(orig, repo, wctx, mctx, force, actions, *args, **kwargs):
461 *args, **kwargs):
462 if isenabled(repo): 495 if isenabled(repo):
463 files = [] 496 files = []
464 sparsematch = repo.maybesparsematch(mctx.rev()) 497 sparsematch = repo.maybesparsematch(mctx.rev())
465 for f, (m, actionargs, msg) in actions.iteritems(): 498 for f, (m, actionargs, msg) in actions.iteritems():
466 if sparsematch and not sparsematch(f): 499 if sparsematch and not sparsematch(f):
472 files.append((f2, hex(mctx.filenode(f2)))) 505 files.append((f2, hex(mctx.filenode(f2))))
473 # batch fetch the needed files from the server 506 # batch fetch the needed files from the server
474 repo.fileservice.prefetch(files) 507 repo.fileservice.prefetch(files)
475 return orig(repo, wctx, mctx, force, actions, *args, **kwargs) 508 return orig(repo, wctx, mctx, force, actions, *args, **kwargs)
476 509
510
477 # Prefetch files before status attempts to look at their size and contents 511 # Prefetch files before status attempts to look at their size and contents
478 def checklookup(orig, self, files): 512 def checklookup(orig, self, files):
479 repo = self._repo 513 repo = self._repo
480 if isenabled(repo): 514 if isenabled(repo):
481 prefetchfiles = [] 515 prefetchfiles = []
485 prefetchfiles.append((f, hex(parent.filenode(f)))) 519 prefetchfiles.append((f, hex(parent.filenode(f))))
486 # batch fetch the needed files from the server 520 # batch fetch the needed files from the server
487 repo.fileservice.prefetch(prefetchfiles) 521 repo.fileservice.prefetch(prefetchfiles)
488 return orig(self, files) 522 return orig(self, files)
489 523
524
490 # Prefetch the logic that compares added and removed files for renames 525 # Prefetch the logic that compares added and removed files for renames
491 def findrenames(orig, repo, matcher, added, removed, *args, **kwargs): 526 def findrenames(orig, repo, matcher, added, removed, *args, **kwargs):
492 if isenabled(repo): 527 if isenabled(repo):
493 files = [] 528 files = []
494 pmf = repo['.'].manifest() 529 pmf = repo['.'].manifest()
497 files.append((f, hex(pmf[f]))) 532 files.append((f, hex(pmf[f])))
498 # batch fetch the needed files from the server 533 # batch fetch the needed files from the server
499 repo.fileservice.prefetch(files) 534 repo.fileservice.prefetch(files)
500 return orig(repo, matcher, added, removed, *args, **kwargs) 535 return orig(repo, matcher, added, removed, *args, **kwargs)
501 536
537
502 # prefetch files before pathcopies check 538 # prefetch files before pathcopies check
503 def computeforwardmissing(orig, a, b, match=None): 539 def computeforwardmissing(orig, a, b, match=None):
504 missing = orig(a, b, match=match) 540 missing = orig(a, b, match=match)
505 repo = a._repo 541 repo = a._repo
506 if isenabled(repo): 542 if isenabled(repo):
517 missing = sparsemissing 553 missing = sparsemissing
518 554
519 # batch fetch the needed files from the server 555 # batch fetch the needed files from the server
520 repo.fileservice.prefetch(files) 556 repo.fileservice.prefetch(files)
521 return missing 557 return missing
558
522 559
523 # close cache miss server connection after the command has finished 560 # close cache miss server connection after the command has finished
524 def runcommand(orig, lui, repo, *args, **kwargs): 561 def runcommand(orig, lui, repo, *args, **kwargs):
525 fileservice = None 562 fileservice = None
526 # repo can be None when running in chg: 563 # repo can be None when running in chg:
532 return orig(lui, repo, *args, **kwargs) 569 return orig(lui, repo, *args, **kwargs)
533 finally: 570 finally:
534 if fileservice: 571 if fileservice:
535 fileservice.close() 572 fileservice.close()
536 573
574
537 # prevent strip from stripping remotefilelogs 575 # prevent strip from stripping remotefilelogs
538 def _collectbrokencsets(orig, repo, files, striprev): 576 def _collectbrokencsets(orig, repo, files, striprev):
539 if isenabled(repo): 577 if isenabled(repo):
540 files = list([f for f in files if not repo.shallowmatch(f)]) 578 files = list([f for f in files if not repo.shallowmatch(f)])
541 return orig(repo, files, striprev) 579 return orig(repo, files, striprev)
542 580
581
543 # changectx wrappers 582 # changectx wrappers
544 def filectx(orig, self, path, fileid=None, filelog=None): 583 def filectx(orig, self, path, fileid=None, filelog=None):
545 if fileid is None: 584 if fileid is None:
546 fileid = self.filenode(path) 585 fileid = self.filenode(path)
547 if (isenabled(self._repo) and self._repo.shallowmatch(path)): 586 if isenabled(self._repo) and self._repo.shallowmatch(path):
548 return remotefilectx.remotefilectx(self._repo, path, fileid=fileid, 587 return remotefilectx.remotefilectx(
549 changectx=self, filelog=filelog) 588 self._repo, path, fileid=fileid, changectx=self, filelog=filelog
589 )
550 return orig(self, path, fileid=fileid, filelog=filelog) 590 return orig(self, path, fileid=fileid, filelog=filelog)
551 591
592
552 def workingfilectx(orig, self, path, filelog=None): 593 def workingfilectx(orig, self, path, filelog=None):
553 if (isenabled(self._repo) and self._repo.shallowmatch(path)): 594 if isenabled(self._repo) and self._repo.shallowmatch(path):
554 return remotefilectx.remoteworkingfilectx(self._repo, path, 595 return remotefilectx.remoteworkingfilectx(
555 workingctx=self, 596 self._repo, path, workingctx=self, filelog=filelog
556 filelog=filelog) 597 )
557 return orig(self, path, filelog=filelog) 598 return orig(self, path, filelog=filelog)
558 599
600
559 # prefetch required revisions before a diff 601 # prefetch required revisions before a diff
560 def trydiff(orig, repo, revs, ctx1, ctx2, modified, added, removed, 602 def trydiff(
561 copy, getfilectx, *args, **kwargs): 603 orig,
604 repo,
605 revs,
606 ctx1,
607 ctx2,
608 modified,
609 added,
610 removed,
611 copy,
612 getfilectx,
613 *args,
614 **kwargs
615 ):
562 if isenabled(repo): 616 if isenabled(repo):
563 prefetch = [] 617 prefetch = []
564 mf1 = ctx1.manifest() 618 mf1 = ctx1.manifest()
565 for fname in modified + added + removed: 619 for fname in modified + added + removed:
566 if fname in mf1: 620 if fname in mf1:
573 if fnode: 627 if fnode:
574 prefetch.append((fname, hex(fnode))) 628 prefetch.append((fname, hex(fnode)))
575 629
576 repo.fileservice.prefetch(prefetch) 630 repo.fileservice.prefetch(prefetch)
577 631
578 return orig(repo, revs, ctx1, ctx2, modified, added, removed, copy, 632 return orig(
579 getfilectx, *args, **kwargs) 633 repo,
634 revs,
635 ctx1,
636 ctx2,
637 modified,
638 added,
639 removed,
640 copy,
641 getfilectx,
642 *args,
643 **kwargs
644 )
645
580 646
581 # Prevent verify from processing files 647 # Prevent verify from processing files
582 # a stub for mercurial.hg.verify() 648 # a stub for mercurial.hg.verify()
583 def _verify(orig, repo, level=None): 649 def _verify(orig, repo, level=None):
584 lock = repo.lock() 650 lock = repo.lock()
587 finally: 653 finally:
588 lock.release() 654 lock.release()
589 655
590 656
591 clientonetime = False 657 clientonetime = False
658
659
592 def onetimeclientsetup(ui): 660 def onetimeclientsetup(ui):
593 global clientonetime 661 global clientonetime
594 if clientonetime: 662 if clientonetime:
595 return 663 return
596 clientonetime = True 664 clientonetime = True
598 # Don't commit filelogs until we know the commit hash, since the hash 666 # Don't commit filelogs until we know the commit hash, since the hash
599 # is present in the filelog blob. 667 # is present in the filelog blob.
600 # This violates Mercurial's filelog->manifest->changelog write order, 668 # This violates Mercurial's filelog->manifest->changelog write order,
601 # but is generally fine for client repos. 669 # but is generally fine for client repos.
602 pendingfilecommits = [] 670 pendingfilecommits = []
603 def addrawrevision(orig, self, rawtext, transaction, link, p1, p2, node, 671
604 flags, cachedelta=None, _metatuple=None): 672 def addrawrevision(
673 orig,
674 self,
675 rawtext,
676 transaction,
677 link,
678 p1,
679 p2,
680 node,
681 flags,
682 cachedelta=None,
683 _metatuple=None,
684 ):
605 if isinstance(link, int): 685 if isinstance(link, int):
606 pendingfilecommits.append( 686 pendingfilecommits.append(
607 (self, rawtext, transaction, link, p1, p2, node, flags, 687 (
608 cachedelta, _metatuple)) 688 self,
689 rawtext,
690 transaction,
691 link,
692 p1,
693 p2,
694 node,
695 flags,
696 cachedelta,
697 _metatuple,
698 )
699 )
609 return node 700 return node
610 else: 701 else:
611 return orig(self, rawtext, transaction, link, p1, p2, node, flags, 702 return orig(
612 cachedelta, _metatuple=_metatuple) 703 self,
704 rawtext,
705 transaction,
706 link,
707 p1,
708 p2,
709 node,
710 flags,
711 cachedelta,
712 _metatuple=_metatuple,
713 )
714
613 extensions.wrapfunction( 715 extensions.wrapfunction(
614 remotefilelog.remotefilelog, 'addrawrevision', addrawrevision) 716 remotefilelog.remotefilelog, 'addrawrevision', addrawrevision
717 )
615 718
616 def changelogadd(orig, self, *args): 719 def changelogadd(orig, self, *args):
617 oldlen = len(self) 720 oldlen = len(self)
618 node = orig(self, *args) 721 node = orig(self, *args)
619 newlen = len(self) 722 newlen = len(self)
623 linknode = self.node(link) 726 linknode = self.node(link)
624 if linknode == node: 727 if linknode == node:
625 log.addrawrevision(rt, tr, linknode, p1, p2, n, fl, c, m) 728 log.addrawrevision(rt, tr, linknode, p1, p2, n, fl, c, m)
626 else: 729 else:
627 raise error.ProgrammingError( 730 raise error.ProgrammingError(
628 'pending multiple integer revisions are not supported') 731 'pending multiple integer revisions are not supported'
732 )
629 else: 733 else:
630 # "link" is actually wrong here (it is set to len(changelog)) 734 # "link" is actually wrong here (it is set to len(changelog))
631 # if changelog remains unchanged, skip writing file revisions 735 # if changelog remains unchanged, skip writing file revisions
632 # but still do a sanity check about pending multiple revisions 736 # but still do a sanity check about pending multiple revisions
633 if len(set(x[3] for x in pendingfilecommits)) > 1: 737 if len(set(x[3] for x in pendingfilecommits)) > 1:
634 raise error.ProgrammingError( 738 raise error.ProgrammingError(
635 'pending multiple integer revisions are not supported') 739 'pending multiple integer revisions are not supported'
740 )
636 del pendingfilecommits[:] 741 del pendingfilecommits[:]
637 return node 742 return node
743
638 extensions.wrapfunction(changelog.changelog, 'add', changelogadd) 744 extensions.wrapfunction(changelog.changelog, 'add', changelogadd)
745
639 746
640 def getrenamedfn(orig, repo, endrev=None): 747 def getrenamedfn(orig, repo, endrev=None):
641 if not isenabled(repo) or copies.usechangesetcentricalgo(repo): 748 if not isenabled(repo) or copies.usechangesetcentricalgo(repo):
642 return orig(repo, endrev) 749 return orig(repo, endrev)
643 750
663 except error.LookupError: 770 except error.LookupError:
664 return None 771 return None
665 772
666 return getrenamed 773 return getrenamed
667 774
775
668 def walkfilerevs(orig, repo, match, follow, revs, fncache): 776 def walkfilerevs(orig, repo, match, follow, revs, fncache):
669 if not isenabled(repo): 777 if not isenabled(repo):
670 return orig(repo, match, follow, revs, fncache) 778 return orig(repo, match, follow, revs, fncache)
671 779
672 # remotefilelog's can't be walked in rev order, so throw. 780 # remotefilelog's can't be walked in rev order, so throw.
678 minrev, maxrev = min(revs), max(revs) 786 minrev, maxrev = min(revs), max(revs)
679 787
680 pctx = repo['.'] 788 pctx = repo['.']
681 for filename in match.files(): 789 for filename in match.files():
682 if filename not in pctx: 790 if filename not in pctx:
683 raise error.Abort(_('cannot follow file not in parent ' 791 raise error.Abort(
684 'revision: "%s"') % filename) 792 _('cannot follow file not in parent ' 'revision: "%s"')
793 % filename
794 )
685 fctx = pctx[filename] 795 fctx = pctx[filename]
686 796
687 linkrev = fctx.linkrev() 797 linkrev = fctx.linkrev()
688 if linkrev >= minrev and linkrev <= maxrev: 798 if linkrev >= minrev and linkrev <= maxrev:
689 fncache.setdefault(linkrev, []).append(filename) 799 fncache.setdefault(linkrev, []).append(filename)
695 fncache.setdefault(linkrev, []).append(ancestor.path()) 805 fncache.setdefault(linkrev, []).append(ancestor.path())
696 wanted.add(linkrev) 806 wanted.add(linkrev)
697 807
698 return wanted 808 return wanted
699 809
810
700 def filelogrevset(orig, repo, subset, x): 811 def filelogrevset(orig, repo, subset, x):
701 """``filelog(pattern)`` 812 """``filelog(pattern)``
702 Changesets connected to the specified filelog. 813 Changesets connected to the specified filelog.
703 814
704 For performance reasons, ``filelog()`` does not show every changeset 815 For performance reasons, ``filelog()`` does not show every changeset
709 if not isenabled(repo): 820 if not isenabled(repo):
710 return orig(repo, subset, x) 821 return orig(repo, subset, x)
711 822
712 # i18n: "filelog" is a keyword 823 # i18n: "filelog" is a keyword
713 pat = revset.getstring(x, _("filelog requires a pattern")) 824 pat = revset.getstring(x, _("filelog requires a pattern"))
714 m = match.match(repo.root, repo.getcwd(), [pat], default='relpath', 825 m = match.match(
715 ctx=repo[None]) 826 repo.root, repo.getcwd(), [pat], default='relpath', ctx=repo[None]
827 )
716 s = set() 828 s = set()
717 829
718 if not match.patkind(pat): 830 if not match.patkind(pat):
719 # slow 831 # slow
720 for r in subset: 832 for r in subset:
733 for actx in fctx.ancestors(): 845 for actx in fctx.ancestors():
734 s.add(actx.linkrev()) 846 s.add(actx.linkrev())
735 847
736 return smartset.baseset([r for r in subset if r in s]) 848 return smartset.baseset([r for r in subset if r in s])
737 849
850
738 @command('gc', [], _('hg gc [REPO...]'), norepo=True) 851 @command('gc', [], _('hg gc [REPO...]'), norepo=True)
739 def gc(ui, *args, **opts): 852 def gc(ui, *args, **opts):
740 '''garbage collect the client and server filelog caches 853 '''garbage collect the client and server filelog caches
741 ''' 854 '''
742 cachepaths = set() 855 cachepaths = set()
771 884
772 # gc server cache 885 # gc server cache
773 for repo in repos: 886 for repo in repos:
774 remotefilelogserver.gcserver(ui, repo._repo) 887 remotefilelogserver.gcserver(ui, repo._repo)
775 888
889
776 def gcclient(ui, cachepath): 890 def gcclient(ui, cachepath):
777 # get list of repos that use this cache 891 # get list of repos that use this cache
778 repospath = os.path.join(cachepath, 'repos') 892 repospath = os.path.join(cachepath, 'repos')
779 if not os.path.exists(repospath): 893 if not os.path.exists(repospath):
780 ui.warn(_("no known cache at %s\n") % cachepath) 894 ui.warn(_("no known cache at %s\n") % cachepath)
790 904
791 sharedcache = None 905 sharedcache = None
792 filesrepacked = False 906 filesrepacked = False
793 907
794 count = 0 908 count = 0
795 progress = ui.makeprogress(_("analyzing repositories"), unit="repos", 909 progress = ui.makeprogress(
796 total=len(repos)) 910 _("analyzing repositories"), unit="repos", total=len(repos)
911 )
797 for path in repos: 912 for path in repos:
798 progress.update(count) 913 progress.update(count)
799 count += 1 914 count += 1
800 try: 915 try:
801 path = ui.expandpath(os.path.normpath(path)) 916 path = ui.expandpath(os.path.normpath(path))
841 sharedcache = repo.sharedstore 956 sharedcache = repo.sharedstore
842 957
843 # Compute a keepset which is not garbage collected 958 # Compute a keepset which is not garbage collected
844 def keyfn(fname, fnode): 959 def keyfn(fname, fnode):
845 return fileserverclient.getcachekey(reponame, fname, hex(fnode)) 960 return fileserverclient.getcachekey(reponame, fname, hex(fnode))
961
846 keepkeys = repackmod.keepset(repo, keyfn=keyfn, lastkeepkeys=keepkeys) 962 keepkeys = repackmod.keepset(repo, keyfn=keyfn, lastkeepkeys=keepkeys)
847 963
848 progress.complete() 964 progress.complete()
849 965
850 # write list of valid repos back 966 # write list of valid repos back
859 # prune cache 975 # prune cache
860 if sharedcache is not None: 976 if sharedcache is not None:
861 sharedcache.gc(keepkeys) 977 sharedcache.gc(keepkeys)
862 elif not filesrepacked: 978 elif not filesrepacked:
863 ui.warn(_("warning: no valid repos in repofile\n")) 979 ui.warn(_("warning: no valid repos in repofile\n"))
980
864 981
865 def log(orig, ui, repo, *pats, **opts): 982 def log(orig, ui, repo, *pats, **opts):
866 if not isenabled(repo): 983 if not isenabled(repo):
867 return orig(ui, repo, *pats, **opts) 984 return orig(ui, repo, *pats, **opts)
868 985
885 if not os.path.isfile(repo.wjoin(file)): 1002 if not os.path.isfile(repo.wjoin(file)):
886 isfile = False 1003 isfile = False
887 break 1004 break
888 1005
889 if isfile: 1006 if isfile:
890 ui.warn(_("warning: file log can be slow on large repos - " + 1007 ui.warn(
891 "use -f to speed it up\n")) 1008 _(
1009 "warning: file log can be slow on large repos - "
1010 + "use -f to speed it up\n"
1011 )
1012 )
892 1013
893 return orig(ui, repo, *pats, **opts) 1014 return orig(ui, repo, *pats, **opts)
1015
894 1016
895 def revdatelimit(ui, revset): 1017 def revdatelimit(ui, revset):
896 """Update revset so that only changesets no older than 'prefetchdays' days 1018 """Update revset so that only changesets no older than 'prefetchdays' days
897 are included. The default value is set to 14 days. If 'prefetchdays' is set 1019 are included. The default value is set to 14 days. If 'prefetchdays' is set
898 to zero or negative value then date restriction is not applied. 1020 to zero or negative value then date restriction is not applied.
900 days = ui.configint('remotefilelog', 'prefetchdays') 1022 days = ui.configint('remotefilelog', 'prefetchdays')
901 if days > 0: 1023 if days > 0:
902 revset = '(%s) & date(-%s)' % (revset, days) 1024 revset = '(%s) & date(-%s)' % (revset, days)
903 return revset 1025 return revset
904 1026
1027
905 def readytofetch(repo): 1028 def readytofetch(repo):
906 """Check that enough time has passed since the last background prefetch. 1029 """Check that enough time has passed since the last background prefetch.
907 This only relates to prefetches after operations that change the working 1030 This only relates to prefetches after operations that change the working
908 copy parent. Default delay between background prefetches is 2 minutes. 1031 copy parent. Default delay between background prefetches is 2 minutes.
909 """ 1032 """
918 os.utime(fname, None) 1041 os.utime(fname, None)
919 ready = True 1042 ready = True
920 1043
921 return ready 1044 return ready
922 1045
1046
923 def wcpprefetch(ui, repo, **kwargs): 1047 def wcpprefetch(ui, repo, **kwargs):
924 """Prefetches in background revisions specified by bgprefetchrevs revset. 1048 """Prefetches in background revisions specified by bgprefetchrevs revset.
925 Does background repack if backgroundrepack flag is set in config. 1049 Does background repack if backgroundrepack flag is set in config.
926 """ 1050 """
927 shallow = isenabled(repo) 1051 shallow = isenabled(repo)
941 repo.ranprefetch = True 1065 repo.ranprefetch = True
942 repo.backgroundprefetch(bgprefetchrevs, repack=bgrepack) 1066 repo.backgroundprefetch(bgprefetchrevs, repack=bgrepack)
943 1067
944 repo._afterlock(anon) 1068 repo._afterlock(anon)
945 1069
1070
946 def pull(orig, ui, repo, *pats, **opts): 1071 def pull(orig, ui, repo, *pats, **opts):
947 result = orig(ui, repo, *pats, **opts) 1072 result = orig(ui, repo, *pats, **opts)
948 1073
949 if isenabled(repo): 1074 if isenabled(repo):
950 # prefetch if it's configured 1075 # prefetch if it's configured
956 if prefetchrevset: 1081 if prefetchrevset:
957 ui.status(_("prefetching file contents\n")) 1082 ui.status(_("prefetching file contents\n"))
958 revs = scmutil.revrange(repo, [prefetchrevset]) 1083 revs = scmutil.revrange(repo, [prefetchrevset])
959 base = repo['.'].rev() 1084 base = repo['.'].rev()
960 if bgprefetch: 1085 if bgprefetch:
961 repo.backgroundprefetch(prefetchrevset, repack=bgrepack, 1086 repo.backgroundprefetch(
962 ensurestart=ensurestart) 1087 prefetchrevset, repack=bgrepack, ensurestart=ensurestart
1088 )
963 else: 1089 else:
964 repo.prefetch(revs, base=base) 1090 repo.prefetch(revs, base=base)
965 if bgrepack: 1091 if bgrepack:
966 repackmod.backgroundrepack(repo, incremental=True, 1092 repackmod.backgroundrepack(
967 ensurestart=ensurestart) 1093 repo, incremental=True, ensurestart=ensurestart
1094 )
968 elif bgrepack: 1095 elif bgrepack:
969 repackmod.backgroundrepack(repo, incremental=True, 1096 repackmod.backgroundrepack(
970 ensurestart=ensurestart) 1097 repo, incremental=True, ensurestart=ensurestart
1098 )
971 1099
972 return result 1100 return result
1101
973 1102
974 def exchangepull(orig, repo, remote, *args, **kwargs): 1103 def exchangepull(orig, repo, remote, *args, **kwargs):
975 # Hook into the callstream/getbundle to insert bundle capabilities 1104 # Hook into the callstream/getbundle to insert bundle capabilities
976 # during a pull. 1105 # during a pull.
977 def localgetbundle(orig, source, heads=None, common=None, bundlecaps=None, 1106 def localgetbundle(
978 **kwargs): 1107 orig, source, heads=None, common=None, bundlecaps=None, **kwargs
1108 ):
979 if not bundlecaps: 1109 if not bundlecaps:
980 bundlecaps = set() 1110 bundlecaps = set()
981 bundlecaps.add(constants.BUNDLE2_CAPABLITY) 1111 bundlecaps.add(constants.BUNDLE2_CAPABLITY)
982 return orig(source, heads=heads, common=common, bundlecaps=bundlecaps, 1112 return orig(
983 **kwargs) 1113 source, heads=heads, common=common, bundlecaps=bundlecaps, **kwargs
1114 )
984 1115
985 if util.safehasattr(remote, '_callstream'): 1116 if util.safehasattr(remote, '_callstream'):
986 remote._localrepo = repo 1117 remote._localrepo = repo
987 elif util.safehasattr(remote, 'getbundle'): 1118 elif util.safehasattr(remote, 'getbundle'):
988 extensions.wrapfunction(remote, 'getbundle', localgetbundle) 1119 extensions.wrapfunction(remote, 'getbundle', localgetbundle)
989 1120
990 return orig(repo, remote, *args, **kwargs) 1121 return orig(repo, remote, *args, **kwargs)
1122
991 1123
992 def _fileprefetchhook(repo, revs, match): 1124 def _fileprefetchhook(repo, revs, match):
993 if isenabled(repo): 1125 if isenabled(repo):
994 allfiles = [] 1126 allfiles = []
995 for rev in revs: 1127 for rev in revs:
1001 for path in ctx.walk(match): 1133 for path in ctx.walk(match):
1002 if (not sparsematch or sparsematch(path)) and path in mf: 1134 if (not sparsematch or sparsematch(path)) and path in mf:
1003 allfiles.append((path, hex(mf[path]))) 1135 allfiles.append((path, hex(mf[path])))
1004 repo.fileservice.prefetch(allfiles) 1136 repo.fileservice.prefetch(allfiles)
1005 1137
1006 @command('debugremotefilelog', [ 1138
1007 ('d', 'decompress', None, _('decompress the filelog first')), 1139 @command(
1008 ], _('hg debugremotefilelog <path>'), norepo=True) 1140 'debugremotefilelog',
1141 [('d', 'decompress', None, _('decompress the filelog first')),],
1142 _('hg debugremotefilelog <path>'),
1143 norepo=True,
1144 )
1009 def debugremotefilelog(ui, path, **opts): 1145 def debugremotefilelog(ui, path, **opts):
1010 return debugcommands.debugremotefilelog(ui, path, **opts) 1146 return debugcommands.debugremotefilelog(ui, path, **opts)
1011 1147
1012 @command('verifyremotefilelog', [ 1148
1013 ('d', 'decompress', None, _('decompress the filelogs first')), 1149 @command(
1014 ], _('hg verifyremotefilelogs <directory>'), norepo=True) 1150 'verifyremotefilelog',
1151 [('d', 'decompress', None, _('decompress the filelogs first')),],
1152 _('hg verifyremotefilelogs <directory>'),
1153 norepo=True,
1154 )
1015 def verifyremotefilelog(ui, path, **opts): 1155 def verifyremotefilelog(ui, path, **opts):
1016 return debugcommands.verifyremotefilelog(ui, path, **opts) 1156 return debugcommands.verifyremotefilelog(ui, path, **opts)
1017 1157
1018 @command('debugdatapack', [ 1158
1019 ('', 'long', None, _('print the long hashes')), 1159 @command(
1020 ('', 'node', '', _('dump the contents of node'), 'NODE'), 1160 'debugdatapack',
1021 ], _('hg debugdatapack <paths>'), norepo=True) 1161 [
1162 ('', 'long', None, _('print the long hashes')),
1163 ('', 'node', '', _('dump the contents of node'), 'NODE'),
1164 ],
1165 _('hg debugdatapack <paths>'),
1166 norepo=True,
1167 )
1022 def debugdatapack(ui, *paths, **opts): 1168 def debugdatapack(ui, *paths, **opts):
1023 return debugcommands.debugdatapack(ui, *paths, **opts) 1169 return debugcommands.debugdatapack(ui, *paths, **opts)
1024 1170
1025 @command('debughistorypack', [ 1171
1026 ], _('hg debughistorypack <path>'), norepo=True) 1172 @command('debughistorypack', [], _('hg debughistorypack <path>'), norepo=True)
1027 def debughistorypack(ui, path, **opts): 1173 def debughistorypack(ui, path, **opts):
1028 return debugcommands.debughistorypack(ui, path) 1174 return debugcommands.debughistorypack(ui, path)
1029 1175
1030 @command('debugkeepset', [ 1176
1031 ], _('hg debugkeepset')) 1177 @command('debugkeepset', [], _('hg debugkeepset'))
1032 def debugkeepset(ui, repo, **opts): 1178 def debugkeepset(ui, repo, **opts):
1033 # The command is used to measure keepset computation time 1179 # The command is used to measure keepset computation time
1034 def keyfn(fname, fnode): 1180 def keyfn(fname, fnode):
1035 return fileserverclient.getcachekey(repo.name, fname, hex(fnode)) 1181 return fileserverclient.getcachekey(repo.name, fname, hex(fnode))
1182
1036 repackmod.keepset(repo, keyfn) 1183 repackmod.keepset(repo, keyfn)
1037 return 1184 return
1038 1185
1039 @command('debugwaitonrepack', [ 1186
1040 ], _('hg debugwaitonrepack')) 1187 @command('debugwaitonrepack', [], _('hg debugwaitonrepack'))
1041 def debugwaitonrepack(ui, repo, **opts): 1188 def debugwaitonrepack(ui, repo, **opts):
1042 return debugcommands.debugwaitonrepack(repo) 1189 return debugcommands.debugwaitonrepack(repo)
1043 1190
1044 @command('debugwaitonprefetch', [ 1191
1045 ], _('hg debugwaitonprefetch')) 1192 @command('debugwaitonprefetch', [], _('hg debugwaitonprefetch'))
1046 def debugwaitonprefetch(ui, repo, **opts): 1193 def debugwaitonprefetch(ui, repo, **opts):
1047 return debugcommands.debugwaitonprefetch(repo) 1194 return debugcommands.debugwaitonprefetch(repo)
1195
1048 1196
1049 def resolveprefetchopts(ui, opts): 1197 def resolveprefetchopts(ui, opts):
1050 if not opts.get('rev'): 1198 if not opts.get('rev'):
1051 revset = ['.', 'draft()'] 1199 revset = ['.', 'draft()']
1052 1200
1066 if not opts.get('base'): 1214 if not opts.get('base'):
1067 opts['base'] = None 1215 opts['base'] = None
1068 1216
1069 return opts 1217 return opts
1070 1218
1071 @command('prefetch', [ 1219
1072 ('r', 'rev', [], _('prefetch the specified revisions'), _('REV')), 1220 @command(
1073 ('', 'repack', False, _('run repack after prefetch')), 1221 'prefetch',
1074 ('b', 'base', '', _("rev that is assumed to already be local")), 1222 [
1075 ] + commands.walkopts, _('hg prefetch [OPTIONS] [FILE...]')) 1223 ('r', 'rev', [], _('prefetch the specified revisions'), _('REV')),
1224 ('', 'repack', False, _('run repack after prefetch')),
1225 ('b', 'base', '', _("rev that is assumed to already be local")),
1226 ]
1227 + commands.walkopts,
1228 _('hg prefetch [OPTIONS] [FILE...]'),
1229 )
1076 def prefetch(ui, repo, *pats, **opts): 1230 def prefetch(ui, repo, *pats, **opts):
1077 """prefetch file revisions from the server 1231 """prefetch file revisions from the server
1078 1232
1079 Prefetchs file revisions for the specified revs and stores them in the 1233 Prefetchs file revisions for the specified revs and stores them in the
1080 local remotefilelog cache. If no rev is specified, the default rev is 1234 local remotefilelog cache. If no rev is specified, the default rev is
1093 1247
1094 ensurestart = repo.ui.configbool('devel', 'remotefilelog.ensurestart') 1248 ensurestart = repo.ui.configbool('devel', 'remotefilelog.ensurestart')
1095 1249
1096 # Run repack in background 1250 # Run repack in background
1097 if opts.get('repack'): 1251 if opts.get('repack'):
1098 repackmod.backgroundrepack(repo, incremental=True, 1252 repackmod.backgroundrepack(
1099 ensurestart=ensurestart) 1253 repo, incremental=True, ensurestart=ensurestart
1100 1254 )
1101 @command('repack', [ 1255
1102 ('', 'background', None, _('run in a background process'), None), 1256
1103 ('', 'incremental', None, _('do an incremental repack'), None), 1257 @command(
1104 ('', 'packsonly', None, _('only repack packs (skip loose objects)'), None), 1258 'repack',
1105 ], _('hg repack [OPTIONS]')) 1259 [
1260 ('', 'background', None, _('run in a background process'), None),
1261 ('', 'incremental', None, _('do an incremental repack'), None),
1262 (
1263 '',
1264 'packsonly',
1265 None,
1266 _('only repack packs (skip loose objects)'),
1267 None,
1268 ),
1269 ],
1270 _('hg repack [OPTIONS]'),
1271 )
1106 def repack_(ui, repo, *pats, **opts): 1272 def repack_(ui, repo, *pats, **opts):
1107 if opts.get(r'background'): 1273 if opts.get(r'background'):
1108 ensurestart = repo.ui.configbool('devel', 'remotefilelog.ensurestart') 1274 ensurestart = repo.ui.configbool('devel', 'remotefilelog.ensurestart')
1109 repackmod.backgroundrepack(repo, incremental=opts.get(r'incremental'), 1275 repackmod.backgroundrepack(
1110 packsonly=opts.get(r'packsonly', False), 1276 repo,
1111 ensurestart=ensurestart) 1277 incremental=opts.get(r'incremental'),
1278 packsonly=opts.get(r'packsonly', False),
1279 ensurestart=ensurestart,
1280 )
1112 return 1281 return
1113 1282
1114 options = {'packsonly': opts.get(r'packsonly')} 1283 options = {'packsonly': opts.get(r'packsonly')}
1115 1284
1116 try: 1285 try: