comparison hgext/remotefilelog/__init__.py @ 43077:687b865b95ad

formatting: byteify all mercurial/ and hgext/ string literals Done with python3.7 contrib/byteify-strings.py -i $(hg files 'set:mercurial/**.py - mercurial/thirdparty/** + hgext/**.py - hgext/fsmonitor/pywatchman/** - mercurial/__init__.py') black -l 80 -t py33 -S $(hg files 'set:**.py - mercurial/thirdparty/** - "contrib/python-zstandard/**" - hgext/fsmonitor/pywatchman/**') # skip-blame mass-reformatting only Differential Revision: https://phab.mercurial-scm.org/D6972
author Augie Fackler <augie@google.com>
date Sun, 06 Oct 2019 09:48:39 -0400
parents 2372284d9457
children eef9a2d67051
comparison
equal deleted inserted replaced
43076:2372284d9457 43077:687b865b95ad
183 command = registrar.command(cmdtable) 183 command = registrar.command(cmdtable)
184 184
185 configtable = {} 185 configtable = {}
186 configitem = registrar.configitem(configtable) 186 configitem = registrar.configitem(configtable)
187 187
188 configitem('remotefilelog', 'debug', default=False) 188 configitem(b'remotefilelog', b'debug', default=False)
189 189
190 configitem('remotefilelog', 'reponame', default='') 190 configitem(b'remotefilelog', b'reponame', default=b'')
191 configitem('remotefilelog', 'cachepath', default=None) 191 configitem(b'remotefilelog', b'cachepath', default=None)
192 configitem('remotefilelog', 'cachegroup', default=None) 192 configitem(b'remotefilelog', b'cachegroup', default=None)
193 configitem('remotefilelog', 'cacheprocess', default=None) 193 configitem(b'remotefilelog', b'cacheprocess', default=None)
194 configitem('remotefilelog', 'cacheprocess.includepath', default=None) 194 configitem(b'remotefilelog', b'cacheprocess.includepath', default=None)
195 configitem("remotefilelog", "cachelimit", default="1000 GB") 195 configitem(b"remotefilelog", b"cachelimit", default=b"1000 GB")
196 196
197 configitem( 197 configitem(
198 'remotefilelog', 198 b'remotefilelog',
199 'fallbackpath', 199 b'fallbackpath',
200 default=configitems.dynamicdefault, 200 default=configitems.dynamicdefault,
201 alias=[('remotefilelog', 'fallbackrepo')], 201 alias=[(b'remotefilelog', b'fallbackrepo')],
202 ) 202 )
203 203
204 configitem('remotefilelog', 'validatecachelog', default=None) 204 configitem(b'remotefilelog', b'validatecachelog', default=None)
205 configitem('remotefilelog', 'validatecache', default='on') 205 configitem(b'remotefilelog', b'validatecache', default=b'on')
206 configitem('remotefilelog', 'server', default=None) 206 configitem(b'remotefilelog', b'server', default=None)
207 configitem('remotefilelog', 'servercachepath', default=None) 207 configitem(b'remotefilelog', b'servercachepath', default=None)
208 configitem("remotefilelog", "serverexpiration", default=30) 208 configitem(b"remotefilelog", b"serverexpiration", default=30)
209 configitem('remotefilelog', 'backgroundrepack', default=False) 209 configitem(b'remotefilelog', b'backgroundrepack', default=False)
210 configitem('remotefilelog', 'bgprefetchrevs', default=None) 210 configitem(b'remotefilelog', b'bgprefetchrevs', default=None)
211 configitem('remotefilelog', 'pullprefetch', default=None) 211 configitem(b'remotefilelog', b'pullprefetch', default=None)
212 configitem('remotefilelog', 'backgroundprefetch', default=False) 212 configitem(b'remotefilelog', b'backgroundprefetch', default=False)
213 configitem('remotefilelog', 'prefetchdelay', default=120) 213 configitem(b'remotefilelog', b'prefetchdelay', default=120)
214 configitem('remotefilelog', 'prefetchdays', default=14) 214 configitem(b'remotefilelog', b'prefetchdays', default=14)
215 215
216 configitem('remotefilelog', 'getfilesstep', default=10000) 216 configitem(b'remotefilelog', b'getfilesstep', default=10000)
217 configitem('remotefilelog', 'getfilestype', default='optimistic') 217 configitem(b'remotefilelog', b'getfilestype', default=b'optimistic')
218 configitem('remotefilelog', 'batchsize', configitems.dynamicdefault) 218 configitem(b'remotefilelog', b'batchsize', configitems.dynamicdefault)
219 configitem('remotefilelog', 'fetchwarning', default='') 219 configitem(b'remotefilelog', b'fetchwarning', default=b'')
220 220
221 configitem('remotefilelog', 'includepattern', default=None) 221 configitem(b'remotefilelog', b'includepattern', default=None)
222 configitem('remotefilelog', 'excludepattern', default=None) 222 configitem(b'remotefilelog', b'excludepattern', default=None)
223 223
224 configitem('remotefilelog', 'gcrepack', default=False) 224 configitem(b'remotefilelog', b'gcrepack', default=False)
225 configitem('remotefilelog', 'repackonhggc', default=False) 225 configitem(b'remotefilelog', b'repackonhggc', default=False)
226 configitem('repack', 'chainorphansbysize', default=True, experimental=True) 226 configitem(b'repack', b'chainorphansbysize', default=True, experimental=True)
227 227
228 configitem('packs', 'maxpacksize', default=0) 228 configitem(b'packs', b'maxpacksize', default=0)
229 configitem('packs', 'maxchainlen', default=1000) 229 configitem(b'packs', b'maxchainlen', default=1000)
230 230
231 configitem('devel', 'remotefilelog.ensurestart', default=False) 231 configitem(b'devel', b'remotefilelog.ensurestart', default=False)
232 232
233 # default TTL limit is 30 days 233 # default TTL limit is 30 days
234 _defaultlimit = 60 * 60 * 24 * 30 234 _defaultlimit = 60 * 60 * 24 * 30
235 configitem('remotefilelog', 'nodettl', default=_defaultlimit) 235 configitem(b'remotefilelog', b'nodettl', default=_defaultlimit)
236 236
237 configitem('remotefilelog', 'data.gencountlimit', default=2), 237 configitem(b'remotefilelog', b'data.gencountlimit', default=2),
238 configitem('remotefilelog', 'data.generations', default=['1GB', '100MB', '1MB']) 238 configitem(
239 configitem('remotefilelog', 'data.maxrepackpacks', default=50) 239 b'remotefilelog', b'data.generations', default=[b'1GB', b'100MB', b'1MB']
240 configitem('remotefilelog', 'data.repackmaxpacksize', default='4GB') 240 )
241 configitem('remotefilelog', 'data.repacksizelimit', default='100MB') 241 configitem(b'remotefilelog', b'data.maxrepackpacks', default=50)
242 242 configitem(b'remotefilelog', b'data.repackmaxpacksize', default=b'4GB')
243 configitem('remotefilelog', 'history.gencountlimit', default=2), 243 configitem(b'remotefilelog', b'data.repacksizelimit', default=b'100MB')
244 configitem('remotefilelog', 'history.generations', default=['100MB']) 244
245 configitem('remotefilelog', 'history.maxrepackpacks', default=50) 245 configitem(b'remotefilelog', b'history.gencountlimit', default=2),
246 configitem('remotefilelog', 'history.repackmaxpacksize', default='400MB') 246 configitem(b'remotefilelog', b'history.generations', default=[b'100MB'])
247 configitem('remotefilelog', 'history.repacksizelimit', default='100MB') 247 configitem(b'remotefilelog', b'history.maxrepackpacks', default=50)
248 configitem(b'remotefilelog', b'history.repackmaxpacksize', default=b'400MB')
249 configitem(b'remotefilelog', b'history.repacksizelimit', default=b'100MB')
248 250
249 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for 251 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
250 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should 252 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
251 # be specifying the version(s) of Mercurial they are tested with, or 253 # be specifying the version(s) of Mercurial they are tested with, or
252 # leave the attribute unspecified. 254 # leave the attribute unspecified.
253 testedwith = 'ships-with-hg-core' 255 testedwith = b'ships-with-hg-core'
254 256
255 repoclass = localrepo.localrepository 257 repoclass = localrepo.localrepository
256 repoclass._basesupported.add(constants.SHALLOWREPO_REQUIREMENT) 258 repoclass._basesupported.add(constants.SHALLOWREPO_REQUIREMENT)
257 259
258 isenabled = shallowutil.isenabled 260 isenabled = shallowutil.isenabled
262 """Wraps user facing Mercurial commands to swap them out with shallow 264 """Wraps user facing Mercurial commands to swap them out with shallow
263 versions. 265 versions.
264 """ 266 """
265 hg.wirepeersetupfuncs.append(fileserverclient.peersetup) 267 hg.wirepeersetupfuncs.append(fileserverclient.peersetup)
266 268
267 entry = extensions.wrapcommand(commands.table, 'clone', cloneshallow) 269 entry = extensions.wrapcommand(commands.table, b'clone', cloneshallow)
268 entry[1].append( 270 entry[1].append(
269 ( 271 (
270 '', 272 b'',
271 'shallow', 273 b'shallow',
272 None, 274 None,
273 _("create a shallow clone which uses remote file " "history"), 275 _(b"create a shallow clone which uses remote file " b"history"),
274 ) 276 )
275 ) 277 )
276 278
277 extensions.wrapcommand( 279 extensions.wrapcommand(
278 commands.table, 'debugindex', debugcommands.debugindex 280 commands.table, b'debugindex', debugcommands.debugindex
279 ) 281 )
280 extensions.wrapcommand( 282 extensions.wrapcommand(
281 commands.table, 'debugindexdot', debugcommands.debugindexdot 283 commands.table, b'debugindexdot', debugcommands.debugindexdot
282 ) 284 )
283 extensions.wrapcommand(commands.table, 'log', log) 285 extensions.wrapcommand(commands.table, b'log', log)
284 extensions.wrapcommand(commands.table, 'pull', pull) 286 extensions.wrapcommand(commands.table, b'pull', pull)
285 287
286 # Prevent 'hg manifest --all' 288 # Prevent 'hg manifest --all'
287 def _manifest(orig, ui, repo, *args, **opts): 289 def _manifest(orig, ui, repo, *args, **opts):
288 if isenabled(repo) and opts.get(r'all'): 290 if isenabled(repo) and opts.get(r'all'):
289 raise error.Abort(_("--all is not supported in a shallow repo")) 291 raise error.Abort(_(b"--all is not supported in a shallow repo"))
290 292
291 return orig(ui, repo, *args, **opts) 293 return orig(ui, repo, *args, **opts)
292 294
293 extensions.wrapcommand(commands.table, "manifest", _manifest) 295 extensions.wrapcommand(commands.table, b"manifest", _manifest)
294 296
295 # Wrap remotefilelog with lfs code 297 # Wrap remotefilelog with lfs code
296 def _lfsloaded(loaded=False): 298 def _lfsloaded(loaded=False):
297 lfsmod = None 299 lfsmod = None
298 try: 300 try:
299 lfsmod = extensions.find('lfs') 301 lfsmod = extensions.find(b'lfs')
300 except KeyError: 302 except KeyError:
301 pass 303 pass
302 if lfsmod: 304 if lfsmod:
303 lfsmod.wrapfilelog(remotefilelog.remotefilelog) 305 lfsmod.wrapfilelog(remotefilelog.remotefilelog)
304 fileserverclient._lfsmod = lfsmod 306 fileserverclient._lfsmod = lfsmod
305 307
306 extensions.afterloaded('lfs', _lfsloaded) 308 extensions.afterloaded(b'lfs', _lfsloaded)
307 309
308 # debugdata needs remotefilelog.len to work 310 # debugdata needs remotefilelog.len to work
309 extensions.wrapcommand(commands.table, 'debugdata', debugdatashallow) 311 extensions.wrapcommand(commands.table, b'debugdata', debugdatashallow)
310 312
311 changegroup.cgpacker = shallowbundle.shallowcg1packer 313 changegroup.cgpacker = shallowbundle.shallowcg1packer
312 314
313 extensions.wrapfunction( 315 extensions.wrapfunction(
314 changegroup, '_addchangegroupfiles', shallowbundle.addchangegroupfiles 316 changegroup, b'_addchangegroupfiles', shallowbundle.addchangegroupfiles
315 ) 317 )
316 extensions.wrapfunction( 318 extensions.wrapfunction(
317 changegroup, 'makechangegroup', shallowbundle.makechangegroup 319 changegroup, b'makechangegroup', shallowbundle.makechangegroup
318 ) 320 )
319 extensions.wrapfunction(localrepo, 'makestore', storewrapper) 321 extensions.wrapfunction(localrepo, b'makestore', storewrapper)
320 extensions.wrapfunction(exchange, 'pull', exchangepull) 322 extensions.wrapfunction(exchange, b'pull', exchangepull)
321 extensions.wrapfunction(merge, 'applyupdates', applyupdates) 323 extensions.wrapfunction(merge, b'applyupdates', applyupdates)
322 extensions.wrapfunction(merge, '_checkunknownfiles', checkunknownfiles) 324 extensions.wrapfunction(merge, b'_checkunknownfiles', checkunknownfiles)
323 extensions.wrapfunction(context.workingctx, '_checklookup', checklookup) 325 extensions.wrapfunction(context.workingctx, b'_checklookup', checklookup)
324 extensions.wrapfunction(scmutil, '_findrenames', findrenames) 326 extensions.wrapfunction(scmutil, b'_findrenames', findrenames)
325 extensions.wrapfunction( 327 extensions.wrapfunction(
326 copies, '_computeforwardmissing', computeforwardmissing 328 copies, b'_computeforwardmissing', computeforwardmissing
327 ) 329 )
328 extensions.wrapfunction(dispatch, 'runcommand', runcommand) 330 extensions.wrapfunction(dispatch, b'runcommand', runcommand)
329 extensions.wrapfunction(repair, '_collectbrokencsets', _collectbrokencsets) 331 extensions.wrapfunction(repair, b'_collectbrokencsets', _collectbrokencsets)
330 extensions.wrapfunction(context.changectx, 'filectx', filectx) 332 extensions.wrapfunction(context.changectx, b'filectx', filectx)
331 extensions.wrapfunction(context.workingctx, 'filectx', workingfilectx) 333 extensions.wrapfunction(context.workingctx, b'filectx', workingfilectx)
332 extensions.wrapfunction(patch, 'trydiff', trydiff) 334 extensions.wrapfunction(patch, b'trydiff', trydiff)
333 extensions.wrapfunction(hg, 'verify', _verify) 335 extensions.wrapfunction(hg, b'verify', _verify)
334 scmutil.fileprefetchhooks.add('remotefilelog', _fileprefetchhook) 336 scmutil.fileprefetchhooks.add(b'remotefilelog', _fileprefetchhook)
335 337
336 # disappointing hacks below 338 # disappointing hacks below
337 extensions.wrapfunction(scmutil, 'getrenamedfn', getrenamedfn) 339 extensions.wrapfunction(scmutil, b'getrenamedfn', getrenamedfn)
338 extensions.wrapfunction(revset, 'filelog', filelogrevset) 340 extensions.wrapfunction(revset, b'filelog', filelogrevset)
339 revset.symbols['filelog'] = revset.filelog 341 revset.symbols[b'filelog'] = revset.filelog
340 extensions.wrapfunction(cmdutil, 'walkfilerevs', walkfilerevs) 342 extensions.wrapfunction(cmdutil, b'walkfilerevs', walkfilerevs)
341 343
342 344
343 def cloneshallow(orig, ui, repo, *args, **opts): 345 def cloneshallow(orig, ui, repo, *args, **opts):
344 if opts.get(r'shallow'): 346 if opts.get(r'shallow'):
345 repos = [] 347 repos = []
364 # wrapped. So we need to manually invoke our version of it. 366 # wrapped. So we need to manually invoke our version of it.
365 return exchangepull(orig, self, *args, **kwargs) 367 return exchangepull(orig, self, *args, **kwargs)
366 else: 368 else:
367 return orig(self, *args, **kwargs) 369 return orig(self, *args, **kwargs)
368 370
369 extensions.wrapfunction(exchange, 'pull', pull_shallow) 371 extensions.wrapfunction(exchange, b'pull', pull_shallow)
370 372
371 # Wrap the stream logic to add requirements and to pass include/exclude 373 # Wrap the stream logic to add requirements and to pass include/exclude
372 # patterns around. 374 # patterns around.
373 def setup_streamout(repo, remote): 375 def setup_streamout(repo, remote):
374 # Replace remote.stream_out with a version that sends file 376 # Replace remote.stream_out with a version that sends file
376 def stream_out_shallow(orig): 378 def stream_out_shallow(orig):
377 caps = remote.capabilities() 379 caps = remote.capabilities()
378 if constants.NETWORK_CAP_LEGACY_SSH_GETFILES in caps: 380 if constants.NETWORK_CAP_LEGACY_SSH_GETFILES in caps:
379 opts = {} 381 opts = {}
380 if repo.includepattern: 382 if repo.includepattern:
381 opts[r'includepattern'] = '\0'.join(repo.includepattern) 383 opts[r'includepattern'] = b'\0'.join(
384 repo.includepattern
385 )
382 if repo.excludepattern: 386 if repo.excludepattern:
383 opts[r'excludepattern'] = '\0'.join(repo.excludepattern) 387 opts[r'excludepattern'] = b'\0'.join(
384 return remote._callstream('stream_out_shallow', **opts) 388 repo.excludepattern
389 )
390 return remote._callstream(b'stream_out_shallow', **opts)
385 else: 391 else:
386 return orig() 392 return orig()
387 393
388 extensions.wrapfunction(remote, 'stream_out', stream_out_shallow) 394 extensions.wrapfunction(remote, b'stream_out', stream_out_shallow)
389 395
390 def stream_wrap(orig, op): 396 def stream_wrap(orig, op):
391 setup_streamout(op.repo, op.remote) 397 setup_streamout(op.repo, op.remote)
392 return orig(op) 398 return orig(op)
393 399
394 extensions.wrapfunction( 400 extensions.wrapfunction(
395 streamclone, 'maybeperformlegacystreamclone', stream_wrap 401 streamclone, b'maybeperformlegacystreamclone', stream_wrap
396 ) 402 )
397 403
398 def canperformstreamclone(orig, pullop, bundle2=False): 404 def canperformstreamclone(orig, pullop, bundle2=False):
399 # remotefilelog is currently incompatible with the 405 # remotefilelog is currently incompatible with the
400 # bundle2 flavor of streamclones, so force us to use 406 # bundle2 flavor of streamclones, so force us to use
401 # v1 instead. 407 # v1 instead.
402 if 'v2' in pullop.remotebundle2caps.get('stream', []): 408 if b'v2' in pullop.remotebundle2caps.get(b'stream', []):
403 pullop.remotebundle2caps['stream'] = [ 409 pullop.remotebundle2caps[b'stream'] = [
404 c for c in pullop.remotebundle2caps['stream'] if c != 'v2' 410 c for c in pullop.remotebundle2caps[b'stream'] if c != b'v2'
405 ] 411 ]
406 if bundle2: 412 if bundle2:
407 return False, None 413 return False, None
408 supported, requirements = orig(pullop, bundle2=bundle2) 414 supported, requirements = orig(pullop, bundle2=bundle2)
409 if requirements is not None: 415 if requirements is not None:
410 requirements.add(constants.SHALLOWREPO_REQUIREMENT) 416 requirements.add(constants.SHALLOWREPO_REQUIREMENT)
411 return supported, requirements 417 return supported, requirements
412 418
413 extensions.wrapfunction( 419 extensions.wrapfunction(
414 streamclone, 'canperformstreamclone', canperformstreamclone 420 streamclone, b'canperformstreamclone', canperformstreamclone
415 ) 421 )
416 422
417 try: 423 try:
418 orig(ui, repo, *args, **opts) 424 orig(ui, repo, *args, **opts)
419 finally: 425 finally:
420 if opts.get(r'shallow'): 426 if opts.get(r'shallow'):
421 for r in repos: 427 for r in repos:
422 if util.safehasattr(r, 'fileservice'): 428 if util.safehasattr(r, b'fileservice'):
423 r.fileservice.close() 429 r.fileservice.close()
424 430
425 431
426 def debugdatashallow(orig, *args, **kwds): 432 def debugdatashallow(orig, *args, **kwds):
427 oldlen = remotefilelog.remotefilelog.__len__ 433 oldlen = remotefilelog.remotefilelog.__len__
435 def reposetup(ui, repo): 441 def reposetup(ui, repo):
436 if not repo.local(): 442 if not repo.local():
437 return 443 return
438 444
439 # put here intentionally bc doesnt work in uisetup 445 # put here intentionally bc doesnt work in uisetup
440 ui.setconfig('hooks', 'update.prefetch', wcpprefetch) 446 ui.setconfig(b'hooks', b'update.prefetch', wcpprefetch)
441 ui.setconfig('hooks', 'commit.prefetch', wcpprefetch) 447 ui.setconfig(b'hooks', b'commit.prefetch', wcpprefetch)
442 448
443 isserverenabled = ui.configbool('remotefilelog', 'server') 449 isserverenabled = ui.configbool(b'remotefilelog', b'server')
444 isshallowclient = isenabled(repo) 450 isshallowclient = isenabled(repo)
445 451
446 if isserverenabled and isshallowclient: 452 if isserverenabled and isshallowclient:
447 raise RuntimeError("Cannot be both a server and shallow client.") 453 raise RuntimeError(b"Cannot be both a server and shallow client.")
448 454
449 if isshallowclient: 455 if isshallowclient:
450 setupclient(ui, repo) 456 setupclient(ui, repo)
451 457
452 if isserverenabled: 458 if isserverenabled:
479 orig, repo, actions, wctx, mctx, overwrite, wantfiledata, labels=None 485 orig, repo, actions, wctx, mctx, overwrite, wantfiledata, labels=None
480 ): 486 ):
481 if isenabled(repo): 487 if isenabled(repo):
482 manifest = mctx.manifest() 488 manifest = mctx.manifest()
483 files = [] 489 files = []
484 for f, args, msg in actions['g']: 490 for f, args, msg in actions[b'g']:
485 files.append((f, hex(manifest[f]))) 491 files.append((f, hex(manifest[f])))
486 # batch fetch the needed files from the server 492 # batch fetch the needed files from the server
487 repo.fileservice.prefetch(files) 493 repo.fileservice.prefetch(files)
488 return orig( 494 return orig(
489 repo, actions, wctx, mctx, overwrite, wantfiledata, labels=labels 495 repo, actions, wctx, mctx, overwrite, wantfiledata, labels=labels
496 files = [] 502 files = []
497 sparsematch = repo.maybesparsematch(mctx.rev()) 503 sparsematch = repo.maybesparsematch(mctx.rev())
498 for f, (m, actionargs, msg) in actions.iteritems(): 504 for f, (m, actionargs, msg) in actions.iteritems():
499 if sparsematch and not sparsematch(f): 505 if sparsematch and not sparsematch(f):
500 continue 506 continue
501 if m in ('c', 'dc', 'cm'): 507 if m in (b'c', b'dc', b'cm'):
502 files.append((f, hex(mctx.filenode(f)))) 508 files.append((f, hex(mctx.filenode(f))))
503 elif m == 'dg': 509 elif m == b'dg':
504 f2 = actionargs[0] 510 f2 = actionargs[0]
505 files.append((f2, hex(mctx.filenode(f2)))) 511 files.append((f2, hex(mctx.filenode(f2))))
506 # batch fetch the needed files from the server 512 # batch fetch the needed files from the server
507 repo.fileservice.prefetch(files) 513 repo.fileservice.prefetch(files)
508 return orig(repo, wctx, mctx, force, actions, *args, **kwargs) 514 return orig(repo, wctx, mctx, force, actions, *args, **kwargs)
524 530
525 # Prefetch the logic that compares added and removed files for renames 531 # Prefetch the logic that compares added and removed files for renames
526 def findrenames(orig, repo, matcher, added, removed, *args, **kwargs): 532 def findrenames(orig, repo, matcher, added, removed, *args, **kwargs):
527 if isenabled(repo): 533 if isenabled(repo):
528 files = [] 534 files = []
529 pmf = repo['.'].manifest() 535 pmf = repo[b'.'].manifest()
530 for f in removed: 536 for f in removed:
531 if f in pmf: 537 if f in pmf:
532 files.append((f, hex(pmf[f]))) 538 files.append((f, hex(pmf[f])))
533 # batch fetch the needed files from the server 539 # batch fetch the needed files from the server
534 repo.fileservice.prefetch(files) 540 repo.fileservice.prefetch(files)
711 cachedelta, 717 cachedelta,
712 _metatuple=_metatuple, 718 _metatuple=_metatuple,
713 ) 719 )
714 720
715 extensions.wrapfunction( 721 extensions.wrapfunction(
716 remotefilelog.remotefilelog, 'addrawrevision', addrawrevision 722 remotefilelog.remotefilelog, b'addrawrevision', addrawrevision
717 ) 723 )
718 724
719 def changelogadd(orig, self, *args): 725 def changelogadd(orig, self, *args):
720 oldlen = len(self) 726 oldlen = len(self)
721 node = orig(self, *args) 727 node = orig(self, *args)
726 linknode = self.node(link) 732 linknode = self.node(link)
727 if linknode == node: 733 if linknode == node:
728 log.addrawrevision(rt, tr, linknode, p1, p2, n, fl, c, m) 734 log.addrawrevision(rt, tr, linknode, p1, p2, n, fl, c, m)
729 else: 735 else:
730 raise error.ProgrammingError( 736 raise error.ProgrammingError(
731 'pending multiple integer revisions are not supported' 737 b'pending multiple integer revisions are not supported'
732 ) 738 )
733 else: 739 else:
734 # "link" is actually wrong here (it is set to len(changelog)) 740 # "link" is actually wrong here (it is set to len(changelog))
735 # if changelog remains unchanged, skip writing file revisions 741 # if changelog remains unchanged, skip writing file revisions
736 # but still do a sanity check about pending multiple revisions 742 # but still do a sanity check about pending multiple revisions
737 if len(set(x[3] for x in pendingfilecommits)) > 1: 743 if len(set(x[3] for x in pendingfilecommits)) > 1:
738 raise error.ProgrammingError( 744 raise error.ProgrammingError(
739 'pending multiple integer revisions are not supported' 745 b'pending multiple integer revisions are not supported'
740 ) 746 )
741 del pendingfilecommits[:] 747 del pendingfilecommits[:]
742 return node 748 return node
743 749
744 extensions.wrapfunction(changelog.changelog, 'add', changelogadd) 750 extensions.wrapfunction(changelog.changelog, b'add', changelogadd)
745 751
746 752
747 def getrenamedfn(orig, repo, endrev=None): 753 def getrenamedfn(orig, repo, endrev=None):
748 if not isenabled(repo) or copies.usechangesetcentricalgo(repo): 754 if not isenabled(repo) or copies.usechangesetcentricalgo(repo):
749 return orig(repo, endrev) 755 return orig(repo, endrev)
778 return orig(repo, match, follow, revs, fncache) 784 return orig(repo, match, follow, revs, fncache)
779 785
780 # remotefilelog's can't be walked in rev order, so throw. 786 # remotefilelog's can't be walked in rev order, so throw.
781 # The caller will see the exception and walk the commit tree instead. 787 # The caller will see the exception and walk the commit tree instead.
782 if not follow: 788 if not follow:
783 raise cmdutil.FileWalkError("Cannot walk via filelog") 789 raise cmdutil.FileWalkError(b"Cannot walk via filelog")
784 790
785 wanted = set() 791 wanted = set()
786 minrev, maxrev = min(revs), max(revs) 792 minrev, maxrev = min(revs), max(revs)
787 793
788 pctx = repo['.'] 794 pctx = repo[b'.']
789 for filename in match.files(): 795 for filename in match.files():
790 if filename not in pctx: 796 if filename not in pctx:
791 raise error.Abort( 797 raise error.Abort(
792 _('cannot follow file not in parent ' 'revision: "%s"') 798 _(b'cannot follow file not in parent ' b'revision: "%s"')
793 % filename 799 % filename
794 ) 800 )
795 fctx = pctx[filename] 801 fctx = pctx[filename]
796 802
797 linkrev = fctx.linkrev() 803 linkrev = fctx.linkrev()
819 825
820 if not isenabled(repo): 826 if not isenabled(repo):
821 return orig(repo, subset, x) 827 return orig(repo, subset, x)
822 828
823 # i18n: "filelog" is a keyword 829 # i18n: "filelog" is a keyword
824 pat = revset.getstring(x, _("filelog requires a pattern")) 830 pat = revset.getstring(x, _(b"filelog requires a pattern"))
825 m = match.match( 831 m = match.match(
826 repo.root, repo.getcwd(), [pat], default='relpath', ctx=repo[None] 832 repo.root, repo.getcwd(), [pat], default=b'relpath', ctx=repo[None]
827 ) 833 )
828 s = set() 834 s = set()
829 835
830 if not match.patkind(pat): 836 if not match.patkind(pat):
831 # slow 837 # slow
846 s.add(actx.linkrev()) 852 s.add(actx.linkrev())
847 853
848 return smartset.baseset([r for r in subset if r in s]) 854 return smartset.baseset([r for r in subset if r in s])
849 855
850 856
851 @command('gc', [], _('hg gc [REPO...]'), norepo=True) 857 @command(b'gc', [], _(b'hg gc [REPO...]'), norepo=True)
852 def gc(ui, *args, **opts): 858 def gc(ui, *args, **opts):
853 '''garbage collect the client and server filelog caches 859 '''garbage collect the client and server filelog caches
854 ''' 860 '''
855 cachepaths = set() 861 cachepaths = set()
856 862
859 if systemcache: 865 if systemcache:
860 cachepaths.add(systemcache) 866 cachepaths.add(systemcache)
861 867
862 # get repo client and server cache 868 # get repo client and server cache
863 repopaths = [] 869 repopaths = []
864 pwd = ui.environ.get('PWD') 870 pwd = ui.environ.get(b'PWD')
865 if pwd: 871 if pwd:
866 repopaths.append(pwd) 872 repopaths.append(pwd)
867 873
868 repopaths.extend(args) 874 repopaths.extend(args)
869 repos = [] 875 repos = []
887 remotefilelogserver.gcserver(ui, repo._repo) 893 remotefilelogserver.gcserver(ui, repo._repo)
888 894
889 895
890 def gcclient(ui, cachepath): 896 def gcclient(ui, cachepath):
891 # get list of repos that use this cache 897 # get list of repos that use this cache
892 repospath = os.path.join(cachepath, 'repos') 898 repospath = os.path.join(cachepath, b'repos')
893 if not os.path.exists(repospath): 899 if not os.path.exists(repospath):
894 ui.warn(_("no known cache at %s\n") % cachepath) 900 ui.warn(_(b"no known cache at %s\n") % cachepath)
895 return 901 return
896 902
897 reposfile = open(repospath, 'rb') 903 reposfile = open(repospath, b'rb')
898 repos = {r[:-1] for r in reposfile.readlines()} 904 repos = {r[:-1] for r in reposfile.readlines()}
899 reposfile.close() 905 reposfile.close()
900 906
901 # build list of useful files 907 # build list of useful files
902 validrepos = [] 908 validrepos = []
905 sharedcache = None 911 sharedcache = None
906 filesrepacked = False 912 filesrepacked = False
907 913
908 count = 0 914 count = 0
909 progress = ui.makeprogress( 915 progress = ui.makeprogress(
910 _("analyzing repositories"), unit="repos", total=len(repos) 916 _(b"analyzing repositories"), unit=b"repos", total=len(repos)
911 ) 917 )
912 for path in repos: 918 for path in repos:
913 progress.update(count) 919 progress.update(count)
914 count += 1 920 count += 1
915 try: 921 try:
916 path = ui.expandpath(os.path.normpath(path)) 922 path = ui.expandpath(os.path.normpath(path))
917 except TypeError as e: 923 except TypeError as e:
918 ui.warn(_("warning: malformed path: %r:%s\n") % (path, e)) 924 ui.warn(_(b"warning: malformed path: %r:%s\n") % (path, e))
919 traceback.print_exc() 925 traceback.print_exc()
920 continue 926 continue
921 try: 927 try:
922 peer = hg.peer(ui, {}, path) 928 peer = hg.peer(ui, {}, path)
923 repo = peer._repo 929 repo = peer._repo
930 # this repo was added to the repos file. We'd rather this loop succeed 936 # this repo was added to the repos file. We'd rather this loop succeed
931 # and too much be deleted, than the loop fail and nothing gets deleted. 937 # and too much be deleted, than the loop fail and nothing gets deleted.
932 if not isenabled(repo): 938 if not isenabled(repo):
933 continue 939 continue
934 940
935 if not util.safehasattr(repo, 'name'): 941 if not util.safehasattr(repo, b'name'):
936 ui.warn(_("repo %s is a misconfigured remotefilelog repo\n") % path) 942 ui.warn(
943 _(b"repo %s is a misconfigured remotefilelog repo\n") % path
944 )
937 continue 945 continue
938 946
939 # If garbage collection on repack and repack on hg gc are enabled 947 # If garbage collection on repack and repack on hg gc are enabled
940 # then loose files are repacked and garbage collected. 948 # then loose files are repacked and garbage collected.
941 # Otherwise regular garbage collection is performed. 949 # Otherwise regular garbage collection is performed.
942 repackonhggc = repo.ui.configbool('remotefilelog', 'repackonhggc') 950 repackonhggc = repo.ui.configbool(b'remotefilelog', b'repackonhggc')
943 gcrepack = repo.ui.configbool('remotefilelog', 'gcrepack') 951 gcrepack = repo.ui.configbool(b'remotefilelog', b'gcrepack')
944 if repackonhggc and gcrepack: 952 if repackonhggc and gcrepack:
945 try: 953 try:
946 repackmod.incrementalrepack(repo) 954 repackmod.incrementalrepack(repo)
947 filesrepacked = True 955 filesrepacked = True
948 continue 956 continue
964 progress.complete() 972 progress.complete()
965 973
966 # write list of valid repos back 974 # write list of valid repos back
967 oldumask = os.umask(0o002) 975 oldumask = os.umask(0o002)
968 try: 976 try:
969 reposfile = open(repospath, 'wb') 977 reposfile = open(repospath, b'wb')
970 reposfile.writelines([("%s\n" % r) for r in validrepos]) 978 reposfile.writelines([(b"%s\n" % r) for r in validrepos])
971 reposfile.close() 979 reposfile.close()
972 finally: 980 finally:
973 os.umask(oldumask) 981 os.umask(oldumask)
974 982
975 # prune cache 983 # prune cache
976 if sharedcache is not None: 984 if sharedcache is not None:
977 sharedcache.gc(keepkeys) 985 sharedcache.gc(keepkeys)
978 elif not filesrepacked: 986 elif not filesrepacked:
979 ui.warn(_("warning: no valid repos in repofile\n")) 987 ui.warn(_(b"warning: no valid repos in repofile\n"))
980 988
981 989
982 def log(orig, ui, repo, *pats, **opts): 990 def log(orig, ui, repo, *pats, **opts):
983 if not isenabled(repo): 991 if not isenabled(repo):
984 return orig(ui, repo, *pats, **opts) 992 return orig(ui, repo, *pats, **opts)
993 opts[r'removed'] = True 1001 opts[r'removed'] = True
994 1002
995 # If this is a non-follow log without any revs specified, recommend that 1003 # If this is a non-follow log without any revs specified, recommend that
996 # the user add -f to speed it up. 1004 # the user add -f to speed it up.
997 if not follow and not revs: 1005 if not follow and not revs:
998 match = scmutil.match(repo['.'], pats, pycompat.byteskwargs(opts)) 1006 match = scmutil.match(repo[b'.'], pats, pycompat.byteskwargs(opts))
999 isfile = not match.anypats() 1007 isfile = not match.anypats()
1000 if isfile: 1008 if isfile:
1001 for file in match.files(): 1009 for file in match.files():
1002 if not os.path.isfile(repo.wjoin(file)): 1010 if not os.path.isfile(repo.wjoin(file)):
1003 isfile = False 1011 isfile = False
1004 break 1012 break
1005 1013
1006 if isfile: 1014 if isfile:
1007 ui.warn( 1015 ui.warn(
1008 _( 1016 _(
1009 "warning: file log can be slow on large repos - " 1017 b"warning: file log can be slow on large repos - "
1010 + "use -f to speed it up\n" 1018 + b"use -f to speed it up\n"
1011 ) 1019 )
1012 ) 1020 )
1013 1021
1014 return orig(ui, repo, *pats, **opts) 1022 return orig(ui, repo, *pats, **opts)
1015 1023
1017 def revdatelimit(ui, revset): 1025 def revdatelimit(ui, revset):
1018 """Update revset so that only changesets no older than 'prefetchdays' days 1026 """Update revset so that only changesets no older than 'prefetchdays' days
1019 are included. The default value is set to 14 days. If 'prefetchdays' is set 1027 are included. The default value is set to 14 days. If 'prefetchdays' is set
1020 to zero or negative value then date restriction is not applied. 1028 to zero or negative value then date restriction is not applied.
1021 """ 1029 """
1022 days = ui.configint('remotefilelog', 'prefetchdays') 1030 days = ui.configint(b'remotefilelog', b'prefetchdays')
1023 if days > 0: 1031 if days > 0:
1024 revset = '(%s) & date(-%s)' % (revset, days) 1032 revset = b'(%s) & date(-%s)' % (revset, days)
1025 return revset 1033 return revset
1026 1034
1027 1035
1028 def readytofetch(repo): 1036 def readytofetch(repo):
1029 """Check that enough time has passed since the last background prefetch. 1037 """Check that enough time has passed since the last background prefetch.
1030 This only relates to prefetches after operations that change the working 1038 This only relates to prefetches after operations that change the working
1031 copy parent. Default delay between background prefetches is 2 minutes. 1039 copy parent. Default delay between background prefetches is 2 minutes.
1032 """ 1040 """
1033 timeout = repo.ui.configint('remotefilelog', 'prefetchdelay') 1041 timeout = repo.ui.configint(b'remotefilelog', b'prefetchdelay')
1034 fname = repo.vfs.join('lastprefetch') 1042 fname = repo.vfs.join(b'lastprefetch')
1035 1043
1036 ready = False 1044 ready = False
1037 with open(fname, 'a'): 1045 with open(fname, b'a'):
1038 # the with construct above is used to avoid race conditions 1046 # the with construct above is used to avoid race conditions
1039 modtime = os.path.getmtime(fname) 1047 modtime = os.path.getmtime(fname)
1040 if (time.time() - modtime) > timeout: 1048 if (time.time() - modtime) > timeout:
1041 os.utime(fname, None) 1049 os.utime(fname, None)
1042 ready = True 1050 ready = True
1047 def wcpprefetch(ui, repo, **kwargs): 1055 def wcpprefetch(ui, repo, **kwargs):
1048 """Prefetches in background revisions specified by bgprefetchrevs revset. 1056 """Prefetches in background revisions specified by bgprefetchrevs revset.
1049 Does background repack if backgroundrepack flag is set in config. 1057 Does background repack if backgroundrepack flag is set in config.
1050 """ 1058 """
1051 shallow = isenabled(repo) 1059 shallow = isenabled(repo)
1052 bgprefetchrevs = ui.config('remotefilelog', 'bgprefetchrevs') 1060 bgprefetchrevs = ui.config(b'remotefilelog', b'bgprefetchrevs')
1053 isready = readytofetch(repo) 1061 isready = readytofetch(repo)
1054 1062
1055 if not (shallow and bgprefetchrevs and isready): 1063 if not (shallow and bgprefetchrevs and isready):
1056 return 1064 return
1057 1065
1058 bgrepack = repo.ui.configbool('remotefilelog', 'backgroundrepack') 1066 bgrepack = repo.ui.configbool(b'remotefilelog', b'backgroundrepack')
1059 # update a revset with a date limit 1067 # update a revset with a date limit
1060 bgprefetchrevs = revdatelimit(ui, bgprefetchrevs) 1068 bgprefetchrevs = revdatelimit(ui, bgprefetchrevs)
1061 1069
1062 def anon(): 1070 def anon():
1063 if util.safehasattr(repo, 'ranprefetch') and repo.ranprefetch: 1071 if util.safehasattr(repo, b'ranprefetch') and repo.ranprefetch:
1064 return 1072 return
1065 repo.ranprefetch = True 1073 repo.ranprefetch = True
1066 repo.backgroundprefetch(bgprefetchrevs, repack=bgrepack) 1074 repo.backgroundprefetch(bgprefetchrevs, repack=bgrepack)
1067 1075
1068 repo._afterlock(anon) 1076 repo._afterlock(anon)
1071 def pull(orig, ui, repo, *pats, **opts): 1079 def pull(orig, ui, repo, *pats, **opts):
1072 result = orig(ui, repo, *pats, **opts) 1080 result = orig(ui, repo, *pats, **opts)
1073 1081
1074 if isenabled(repo): 1082 if isenabled(repo):
1075 # prefetch if it's configured 1083 # prefetch if it's configured
1076 prefetchrevset = ui.config('remotefilelog', 'pullprefetch') 1084 prefetchrevset = ui.config(b'remotefilelog', b'pullprefetch')
1077 bgrepack = repo.ui.configbool('remotefilelog', 'backgroundrepack') 1085 bgrepack = repo.ui.configbool(b'remotefilelog', b'backgroundrepack')
1078 bgprefetch = repo.ui.configbool('remotefilelog', 'backgroundprefetch') 1086 bgprefetch = repo.ui.configbool(b'remotefilelog', b'backgroundprefetch')
1079 ensurestart = repo.ui.configbool('devel', 'remotefilelog.ensurestart') 1087 ensurestart = repo.ui.configbool(b'devel', b'remotefilelog.ensurestart')
1080 1088
1081 if prefetchrevset: 1089 if prefetchrevset:
1082 ui.status(_("prefetching file contents\n")) 1090 ui.status(_(b"prefetching file contents\n"))
1083 revs = scmutil.revrange(repo, [prefetchrevset]) 1091 revs = scmutil.revrange(repo, [prefetchrevset])
1084 base = repo['.'].rev() 1092 base = repo[b'.'].rev()
1085 if bgprefetch: 1093 if bgprefetch:
1086 repo.backgroundprefetch( 1094 repo.backgroundprefetch(
1087 prefetchrevset, repack=bgrepack, ensurestart=ensurestart 1095 prefetchrevset, repack=bgrepack, ensurestart=ensurestart
1088 ) 1096 )
1089 else: 1097 else:
1111 bundlecaps.add(constants.BUNDLE2_CAPABLITY) 1119 bundlecaps.add(constants.BUNDLE2_CAPABLITY)
1112 return orig( 1120 return orig(
1113 source, heads=heads, common=common, bundlecaps=bundlecaps, **kwargs 1121 source, heads=heads, common=common, bundlecaps=bundlecaps, **kwargs
1114 ) 1122 )
1115 1123
1116 if util.safehasattr(remote, '_callstream'): 1124 if util.safehasattr(remote, b'_callstream'):
1117 remote._localrepo = repo 1125 remote._localrepo = repo
1118 elif util.safehasattr(remote, 'getbundle'): 1126 elif util.safehasattr(remote, b'getbundle'):
1119 extensions.wrapfunction(remote, 'getbundle', localgetbundle) 1127 extensions.wrapfunction(remote, b'getbundle', localgetbundle)
1120 1128
1121 return orig(repo, remote, *args, **kwargs) 1129 return orig(repo, remote, *args, **kwargs)
1122 1130
1123 1131
1124 def _fileprefetchhook(repo, revs, match): 1132 def _fileprefetchhook(repo, revs, match):
1135 allfiles.append((path, hex(mf[path]))) 1143 allfiles.append((path, hex(mf[path])))
1136 repo.fileservice.prefetch(allfiles) 1144 repo.fileservice.prefetch(allfiles)
1137 1145
1138 1146
1139 @command( 1147 @command(
1140 'debugremotefilelog', 1148 b'debugremotefilelog',
1141 [('d', 'decompress', None, _('decompress the filelog first')),], 1149 [(b'd', b'decompress', None, _(b'decompress the filelog first')),],
1142 _('hg debugremotefilelog <path>'), 1150 _(b'hg debugremotefilelog <path>'),
1143 norepo=True, 1151 norepo=True,
1144 ) 1152 )
1145 def debugremotefilelog(ui, path, **opts): 1153 def debugremotefilelog(ui, path, **opts):
1146 return debugcommands.debugremotefilelog(ui, path, **opts) 1154 return debugcommands.debugremotefilelog(ui, path, **opts)
1147 1155
1148 1156
1149 @command( 1157 @command(
1150 'verifyremotefilelog', 1158 b'verifyremotefilelog',
1151 [('d', 'decompress', None, _('decompress the filelogs first')),], 1159 [(b'd', b'decompress', None, _(b'decompress the filelogs first')),],
1152 _('hg verifyremotefilelogs <directory>'), 1160 _(b'hg verifyremotefilelogs <directory>'),
1153 norepo=True, 1161 norepo=True,
1154 ) 1162 )
1155 def verifyremotefilelog(ui, path, **opts): 1163 def verifyremotefilelog(ui, path, **opts):
1156 return debugcommands.verifyremotefilelog(ui, path, **opts) 1164 return debugcommands.verifyremotefilelog(ui, path, **opts)
1157 1165
1158 1166
1159 @command( 1167 @command(
1160 'debugdatapack', 1168 b'debugdatapack',
1161 [ 1169 [
1162 ('', 'long', None, _('print the long hashes')), 1170 (b'', b'long', None, _(b'print the long hashes')),
1163 ('', 'node', '', _('dump the contents of node'), 'NODE'), 1171 (b'', b'node', b'', _(b'dump the contents of node'), b'NODE'),
1164 ], 1172 ],
1165 _('hg debugdatapack <paths>'), 1173 _(b'hg debugdatapack <paths>'),
1166 norepo=True, 1174 norepo=True,
1167 ) 1175 )
1168 def debugdatapack(ui, *paths, **opts): 1176 def debugdatapack(ui, *paths, **opts):
1169 return debugcommands.debugdatapack(ui, *paths, **opts) 1177 return debugcommands.debugdatapack(ui, *paths, **opts)
1170 1178
1171 1179
1172 @command('debughistorypack', [], _('hg debughistorypack <path>'), norepo=True) 1180 @command(b'debughistorypack', [], _(b'hg debughistorypack <path>'), norepo=True)
1173 def debughistorypack(ui, path, **opts): 1181 def debughistorypack(ui, path, **opts):
1174 return debugcommands.debughistorypack(ui, path) 1182 return debugcommands.debughistorypack(ui, path)
1175 1183
1176 1184
1177 @command('debugkeepset', [], _('hg debugkeepset')) 1185 @command(b'debugkeepset', [], _(b'hg debugkeepset'))
1178 def debugkeepset(ui, repo, **opts): 1186 def debugkeepset(ui, repo, **opts):
1179 # The command is used to measure keepset computation time 1187 # The command is used to measure keepset computation time
1180 def keyfn(fname, fnode): 1188 def keyfn(fname, fnode):
1181 return fileserverclient.getcachekey(repo.name, fname, hex(fnode)) 1189 return fileserverclient.getcachekey(repo.name, fname, hex(fnode))
1182 1190
1183 repackmod.keepset(repo, keyfn) 1191 repackmod.keepset(repo, keyfn)
1184 return 1192 return
1185 1193
1186 1194
1187 @command('debugwaitonrepack', [], _('hg debugwaitonrepack')) 1195 @command(b'debugwaitonrepack', [], _(b'hg debugwaitonrepack'))
1188 def debugwaitonrepack(ui, repo, **opts): 1196 def debugwaitonrepack(ui, repo, **opts):
1189 return debugcommands.debugwaitonrepack(repo) 1197 return debugcommands.debugwaitonrepack(repo)
1190 1198
1191 1199
1192 @command('debugwaitonprefetch', [], _('hg debugwaitonprefetch')) 1200 @command(b'debugwaitonprefetch', [], _(b'hg debugwaitonprefetch'))
1193 def debugwaitonprefetch(ui, repo, **opts): 1201 def debugwaitonprefetch(ui, repo, **opts):
1194 return debugcommands.debugwaitonprefetch(repo) 1202 return debugcommands.debugwaitonprefetch(repo)
1195 1203
1196 1204
1197 def resolveprefetchopts(ui, opts): 1205 def resolveprefetchopts(ui, opts):
1198 if not opts.get('rev'): 1206 if not opts.get(b'rev'):
1199 revset = ['.', 'draft()'] 1207 revset = [b'.', b'draft()']
1200 1208
1201 prefetchrevset = ui.config('remotefilelog', 'pullprefetch', None) 1209 prefetchrevset = ui.config(b'remotefilelog', b'pullprefetch', None)
1202 if prefetchrevset: 1210 if prefetchrevset:
1203 revset.append('(%s)' % prefetchrevset) 1211 revset.append(b'(%s)' % prefetchrevset)
1204 bgprefetchrevs = ui.config('remotefilelog', 'bgprefetchrevs', None) 1212 bgprefetchrevs = ui.config(b'remotefilelog', b'bgprefetchrevs', None)
1205 if bgprefetchrevs: 1213 if bgprefetchrevs:
1206 revset.append('(%s)' % bgprefetchrevs) 1214 revset.append(b'(%s)' % bgprefetchrevs)
1207 revset = '+'.join(revset) 1215 revset = b'+'.join(revset)
1208 1216
1209 # update a revset with a date limit 1217 # update a revset with a date limit
1210 revset = revdatelimit(ui, revset) 1218 revset = revdatelimit(ui, revset)
1211 1219
1212 opts['rev'] = [revset] 1220 opts[b'rev'] = [revset]
1213 1221
1214 if not opts.get('base'): 1222 if not opts.get(b'base'):
1215 opts['base'] = None 1223 opts[b'base'] = None
1216 1224
1217 return opts 1225 return opts
1218 1226
1219 1227
1220 @command( 1228 @command(
1221 'prefetch', 1229 b'prefetch',
1222 [ 1230 [
1223 ('r', 'rev', [], _('prefetch the specified revisions'), _('REV')), 1231 (b'r', b'rev', [], _(b'prefetch the specified revisions'), _(b'REV')),
1224 ('', 'repack', False, _('run repack after prefetch')), 1232 (b'', b'repack', False, _(b'run repack after prefetch')),
1225 ('b', 'base', '', _("rev that is assumed to already be local")), 1233 (b'b', b'base', b'', _(b"rev that is assumed to already be local")),
1226 ] 1234 ]
1227 + commands.walkopts, 1235 + commands.walkopts,
1228 _('hg prefetch [OPTIONS] [FILE...]'), 1236 _(b'hg prefetch [OPTIONS] [FILE...]'),
1229 ) 1237 )
1230 def prefetch(ui, repo, *pats, **opts): 1238 def prefetch(ui, repo, *pats, **opts):
1231 """prefetch file revisions from the server 1239 """prefetch file revisions from the server
1232 1240
1233 Prefetchs file revisions for the specified revs and stores them in the 1241 Prefetchs file revisions for the specified revs and stores them in the
1237 1245
1238 Return 0 on success. 1246 Return 0 on success.
1239 """ 1247 """
1240 opts = pycompat.byteskwargs(opts) 1248 opts = pycompat.byteskwargs(opts)
1241 if not isenabled(repo): 1249 if not isenabled(repo):
1242 raise error.Abort(_("repo is not shallow")) 1250 raise error.Abort(_(b"repo is not shallow"))
1243 1251
1244 opts = resolveprefetchopts(ui, opts) 1252 opts = resolveprefetchopts(ui, opts)
1245 revs = scmutil.revrange(repo, opts.get('rev')) 1253 revs = scmutil.revrange(repo, opts.get(b'rev'))
1246 repo.prefetch(revs, opts.get('base'), pats, opts) 1254 repo.prefetch(revs, opts.get(b'base'), pats, opts)
1247 1255
1248 ensurestart = repo.ui.configbool('devel', 'remotefilelog.ensurestart') 1256 ensurestart = repo.ui.configbool(b'devel', b'remotefilelog.ensurestart')
1249 1257
1250 # Run repack in background 1258 # Run repack in background
1251 if opts.get('repack'): 1259 if opts.get(b'repack'):
1252 repackmod.backgroundrepack( 1260 repackmod.backgroundrepack(
1253 repo, incremental=True, ensurestart=ensurestart 1261 repo, incremental=True, ensurestart=ensurestart
1254 ) 1262 )
1255 1263
1256 1264
1257 @command( 1265 @command(
1258 'repack', 1266 b'repack',
1259 [ 1267 [
1260 ('', 'background', None, _('run in a background process'), None), 1268 (b'', b'background', None, _(b'run in a background process'), None),
1261 ('', 'incremental', None, _('do an incremental repack'), None), 1269 (b'', b'incremental', None, _(b'do an incremental repack'), None),
1262 ( 1270 (
1263 '', 1271 b'',
1264 'packsonly', 1272 b'packsonly',
1265 None, 1273 None,
1266 _('only repack packs (skip loose objects)'), 1274 _(b'only repack packs (skip loose objects)'),
1267 None, 1275 None,
1268 ), 1276 ),
1269 ], 1277 ],
1270 _('hg repack [OPTIONS]'), 1278 _(b'hg repack [OPTIONS]'),
1271 ) 1279 )
1272 def repack_(ui, repo, *pats, **opts): 1280 def repack_(ui, repo, *pats, **opts):
1273 if opts.get(r'background'): 1281 if opts.get(r'background'):
1274 ensurestart = repo.ui.configbool('devel', 'remotefilelog.ensurestart') 1282 ensurestart = repo.ui.configbool(b'devel', b'remotefilelog.ensurestart')
1275 repackmod.backgroundrepack( 1283 repackmod.backgroundrepack(
1276 repo, 1284 repo,
1277 incremental=opts.get(r'incremental'), 1285 incremental=opts.get(r'incremental'),
1278 packsonly=opts.get(r'packsonly', False), 1286 packsonly=opts.get(r'packsonly', False),
1279 ensurestart=ensurestart, 1287 ensurestart=ensurestart,
1280 ) 1288 )
1281 return 1289 return
1282 1290
1283 options = {'packsonly': opts.get(r'packsonly')} 1291 options = {b'packsonly': opts.get(r'packsonly')}
1284 1292
1285 try: 1293 try:
1286 if opts.get(r'incremental'): 1294 if opts.get(r'incremental'):
1287 repackmod.incrementalrepack(repo, options=options) 1295 repackmod.incrementalrepack(repo, options=options)
1288 else: 1296 else:
1289 repackmod.fullrepack(repo, options=options) 1297 repackmod.fullrepack(repo, options=options)
1290 except repackmod.RepackAlreadyRunning as ex: 1298 except repackmod.RepackAlreadyRunning as ex:
1291 # Don't propogate the exception if the repack is already in 1299 # Don't propogate the exception if the repack is already in
1292 # progress, since we want the command to exit 0. 1300 # progress, since we want the command to exit 0.
1293 repo.ui.warn('%s\n' % ex) 1301 repo.ui.warn(b'%s\n' % ex)