comparison mercurial/exchange.py @ 43076:2372284d9457

formatting: blacken the codebase This is using my patch to black (https://github.com/psf/black/pull/826) so we don't un-wrap collection literals. Done with: hg files 'set:**.py - mercurial/thirdparty/** - "contrib/python-zstandard/**"' | xargs black -S # skip-blame mass-reformatting only # no-check-commit reformats foo_bar functions Differential Revision: https://phab.mercurial-scm.org/D6971
author Augie Fackler <augie@google.com>
date Sun, 06 Oct 2019 09:45:02 -0400
parents 9fd7710d9ae2
children 687b865b95ad
comparison
equal deleted inserted replaced
43075:57875cf423c9 43076:2372284d9457
14 from .node import ( 14 from .node import (
15 hex, 15 hex,
16 nullid, 16 nullid,
17 nullrev, 17 nullrev,
18 ) 18 )
19 from .thirdparty import ( 19 from .thirdparty import attr
20 attr,
21 )
22 from . import ( 20 from . import (
23 bookmarks as bookmod, 21 bookmarks as bookmod,
24 bundle2, 22 bundle2,
25 changegroup, 23 changegroup,
26 discovery, 24 discovery,
38 streamclone, 36 streamclone,
39 url as urlmod, 37 url as urlmod,
40 util, 38 util,
41 wireprototypes, 39 wireprototypes,
42 ) 40 )
43 from .interfaces import ( 41 from .interfaces import repository
44 repository, 42 from .utils import stringutil
45 )
46 from .utils import (
47 stringutil,
48 )
49 43
50 urlerr = util.urlerr 44 urlerr = util.urlerr
51 urlreq = util.urlreq 45 urlreq = util.urlreq
52 46
53 _NARROWACL_SECTION = 'narrowacl' 47 _NARROWACL_SECTION = 'narrowacl'
54 48
55 # Maps bundle version human names to changegroup versions. 49 # Maps bundle version human names to changegroup versions.
56 _bundlespeccgversions = {'v1': '01', 50 _bundlespeccgversions = {
57 'v2': '02', 51 'v1': '01',
58 'packed1': 's1', 52 'v2': '02',
59 'bundle2': '02', #legacy 53 'packed1': 's1',
60 } 54 'bundle2': '02', # legacy
55 }
61 56
62 # Maps bundle version with content opts to choose which part to bundle 57 # Maps bundle version with content opts to choose which part to bundle
63 _bundlespeccontentopts = { 58 _bundlespeccontentopts = {
64 'v1': { 59 'v1': {
65 'changegroup': True, 60 'changegroup': True,
66 'cg.version': '01', 61 'cg.version': '01',
67 'obsolescence': False, 62 'obsolescence': False,
68 'phases': False, 63 'phases': False,
69 'tagsfnodescache': False, 64 'tagsfnodescache': False,
70 'revbranchcache': False 65 'revbranchcache': False,
71 }, 66 },
72 'v2': { 67 'v2': {
73 'changegroup': True, 68 'changegroup': True,
74 'cg.version': '02', 69 'cg.version': '02',
75 'obsolescence': False, 70 'obsolescence': False,
76 'phases': False, 71 'phases': False,
77 'tagsfnodescache': True, 72 'tagsfnodescache': True,
78 'revbranchcache': True 73 'revbranchcache': True,
79 }, 74 },
80 'packed1' : { 75 'packed1': {'cg.version': 's1'},
81 'cg.version': 's1' 76 }
77 _bundlespeccontentopts['bundle2'] = _bundlespeccontentopts['v2']
78
79 _bundlespecvariants = {
80 "streamv2": {
81 "changegroup": False,
82 "streamv2": True,
83 "tagsfnodescache": False,
84 "revbranchcache": False,
82 } 85 }
83 } 86 }
84 _bundlespeccontentopts['bundle2'] = _bundlespeccontentopts['v2']
85
86 _bundlespecvariants = {"streamv2": {"changegroup": False, "streamv2": True,
87 "tagsfnodescache": False,
88 "revbranchcache": False}}
89 87
90 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE. 88 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
91 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'} 89 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
90
92 91
93 @attr.s 92 @attr.s
94 class bundlespec(object): 93 class bundlespec(object):
95 compression = attr.ib() 94 compression = attr.ib()
96 wirecompression = attr.ib() 95 wirecompression = attr.ib()
97 version = attr.ib() 96 version = attr.ib()
98 wireversion = attr.ib() 97 wireversion = attr.ib()
99 params = attr.ib() 98 params = attr.ib()
100 contentopts = attr.ib() 99 contentopts = attr.ib()
101 100
101
102 def parsebundlespec(repo, spec, strict=True): 102 def parsebundlespec(repo, spec, strict=True):
103 """Parse a bundle string specification into parts. 103 """Parse a bundle string specification into parts.
104 104
105 Bundle specifications denote a well-defined bundle/exchange format. 105 Bundle specifications denote a well-defined bundle/exchange format.
106 The content of a given specification should not change over time in 106 The content of a given specification should not change over time in
130 bundle type/version is not recognized. 130 bundle type/version is not recognized.
131 131
132 Note: this function will likely eventually return a more complex data 132 Note: this function will likely eventually return a more complex data
133 structure, including bundle2 part information. 133 structure, including bundle2 part information.
134 """ 134 """
135
135 def parseparams(s): 136 def parseparams(s):
136 if ';' not in s: 137 if ';' not in s:
137 return s, {} 138 return s, {}
138 139
139 params = {} 140 params = {}
140 version, paramstr = s.split(';', 1) 141 version, paramstr = s.split(';', 1)
141 142
142 for p in paramstr.split(';'): 143 for p in paramstr.split(';'):
143 if '=' not in p: 144 if '=' not in p:
144 raise error.InvalidBundleSpecification( 145 raise error.InvalidBundleSpecification(
145 _('invalid bundle specification: ' 146 _(
146 'missing "=" in parameter: %s') % p) 147 'invalid bundle specification: '
148 'missing "=" in parameter: %s'
149 )
150 % p
151 )
147 152
148 key, value = p.split('=', 1) 153 key, value = p.split('=', 1)
149 key = urlreq.unquote(key) 154 key = urlreq.unquote(key)
150 value = urlreq.unquote(value) 155 value = urlreq.unquote(value)
151 params[key] = value 156 params[key] = value
152 157
153 return version, params 158 return version, params
154 159
155
156 if strict and '-' not in spec: 160 if strict and '-' not in spec:
157 raise error.InvalidBundleSpecification( 161 raise error.InvalidBundleSpecification(
158 _('invalid bundle specification; ' 162 _(
159 'must be prefixed with compression: %s') % spec) 163 'invalid bundle specification; '
164 'must be prefixed with compression: %s'
165 )
166 % spec
167 )
160 168
161 if '-' in spec: 169 if '-' in spec:
162 compression, version = spec.split('-', 1) 170 compression, version = spec.split('-', 1)
163 171
164 if compression not in util.compengines.supportedbundlenames: 172 if compression not in util.compengines.supportedbundlenames:
165 raise error.UnsupportedBundleSpecification( 173 raise error.UnsupportedBundleSpecification(
166 _('%s compression is not supported') % compression) 174 _('%s compression is not supported') % compression
175 )
167 176
168 version, params = parseparams(version) 177 version, params = parseparams(version)
169 178
170 if version not in _bundlespeccgversions: 179 if version not in _bundlespeccgversions:
171 raise error.UnsupportedBundleSpecification( 180 raise error.UnsupportedBundleSpecification(
172 _('%s is not a recognized bundle version') % version) 181 _('%s is not a recognized bundle version') % version
182 )
173 else: 183 else:
174 # Value could be just the compression or just the version, in which 184 # Value could be just the compression or just the version, in which
175 # case some defaults are assumed (but only when not in strict mode). 185 # case some defaults are assumed (but only when not in strict mode).
176 assert not strict 186 assert not strict
177 187
192 else: 202 else:
193 compression = 'bzip2' 203 compression = 'bzip2'
194 version = spec 204 version = spec
195 else: 205 else:
196 raise error.UnsupportedBundleSpecification( 206 raise error.UnsupportedBundleSpecification(
197 _('%s is not a recognized bundle specification') % spec) 207 _('%s is not a recognized bundle specification') % spec
208 )
198 209
199 # Bundle version 1 only supports a known set of compression engines. 210 # Bundle version 1 only supports a known set of compression engines.
200 if version == 'v1' and compression not in _bundlespecv1compengines: 211 if version == 'v1' and compression not in _bundlespecv1compengines:
201 raise error.UnsupportedBundleSpecification( 212 raise error.UnsupportedBundleSpecification(
202 _('compression engine %s is not supported on v1 bundles') % 213 _('compression engine %s is not supported on v1 bundles')
203 compression) 214 % compression
215 )
204 216
205 # The specification for packed1 can optionally declare the data formats 217 # The specification for packed1 can optionally declare the data formats
206 # required to apply it. If we see this metadata, compare against what the 218 # required to apply it. If we see this metadata, compare against what the
207 # repo supports and error if the bundle isn't compatible. 219 # repo supports and error if the bundle isn't compatible.
208 if version == 'packed1' and 'requirements' in params: 220 if version == 'packed1' and 'requirements' in params:
209 requirements = set(params['requirements'].split(',')) 221 requirements = set(params['requirements'].split(','))
210 missingreqs = requirements - repo.supportedformats 222 missingreqs = requirements - repo.supportedformats
211 if missingreqs: 223 if missingreqs:
212 raise error.UnsupportedBundleSpecification( 224 raise error.UnsupportedBundleSpecification(
213 _('missing support for repository features: %s') % 225 _('missing support for repository features: %s')
214 ', '.join(sorted(missingreqs))) 226 % ', '.join(sorted(missingreqs))
227 )
215 228
216 # Compute contentopts based on the version 229 # Compute contentopts based on the version
217 contentopts = _bundlespeccontentopts.get(version, {}).copy() 230 contentopts = _bundlespeccontentopts.get(version, {}).copy()
218 231
219 # Process the variants 232 # Process the variants
223 236
224 engine = util.compengines.forbundlename(compression) 237 engine = util.compengines.forbundlename(compression)
225 compression, wirecompression = engine.bundletype() 238 compression, wirecompression = engine.bundletype()
226 wireversion = _bundlespeccgversions[version] 239 wireversion = _bundlespeccgversions[version]
227 240
228 return bundlespec(compression, wirecompression, version, wireversion, 241 return bundlespec(
229 params, contentopts) 242 compression, wirecompression, version, wireversion, params, contentopts
243 )
244
230 245
231 def readbundle(ui, fh, fname, vfs=None): 246 def readbundle(ui, fh, fname, vfs=None):
232 header = changegroup.readexactly(fh, 4) 247 header = changegroup.readexactly(fh, 4)
233 248
234 alg = None 249 alg = None
254 elif version == 'S1': 269 elif version == 'S1':
255 return streamclone.streamcloneapplier(fh) 270 return streamclone.streamcloneapplier(fh)
256 else: 271 else:
257 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version)) 272 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
258 273
274
259 def getbundlespec(ui, fh): 275 def getbundlespec(ui, fh):
260 """Infer the bundlespec from a bundle file handle. 276 """Infer the bundlespec from a bundle file handle.
261 277
262 The input file handle is seeked and the original seek position is not 278 The input file handle is seeked and the original seek position is not
263 restored. 279 restored.
264 """ 280 """
281
265 def speccompression(alg): 282 def speccompression(alg):
266 try: 283 try:
267 return util.compengines.forbundletype(alg).bundletype()[0] 284 return util.compengines.forbundletype(alg).bundletype()[0]
268 except KeyError: 285 except KeyError:
269 return None 286 return None
290 if part.type == 'changegroup': 307 if part.type == 'changegroup':
291 version = part.params['version'] 308 version = part.params['version']
292 if version in ('01', '02'): 309 if version in ('01', '02'):
293 version = 'v2' 310 version = 'v2'
294 else: 311 else:
295 raise error.Abort(_('changegroup version %s does not have ' 312 raise error.Abort(
296 'a known bundlespec') % version, 313 _(
297 hint=_('try upgrading your Mercurial ' 314 'changegroup version %s does not have '
298 'client')) 315 'a known bundlespec'
316 )
317 % version,
318 hint=_('try upgrading your Mercurial ' 'client'),
319 )
299 elif part.type == 'stream2' and version is None: 320 elif part.type == 'stream2' and version is None:
300 # A stream2 part requires to be part of a v2 bundle 321 # A stream2 part requires to be part of a v2 bundle
301 requirements = urlreq.unquote(part.params['requirements']) 322 requirements = urlreq.unquote(part.params['requirements'])
302 splitted = requirements.split() 323 splitted = requirements.split()
303 params = bundle2._formatrequirementsparams(splitted) 324 params = bundle2._formatrequirementsparams(splitted)
304 return 'none-v2;stream=v2;%s' % params 325 return 'none-v2;stream=v2;%s' % params
305 326
306 if not version: 327 if not version:
307 raise error.Abort(_('could not identify changegroup version in ' 328 raise error.Abort(
308 'bundle')) 329 _('could not identify changegroup version in ' 'bundle')
330 )
309 331
310 return '%s-%s' % (comp, version) 332 return '%s-%s' % (comp, version)
311 elif isinstance(b, streamclone.streamcloneapplier): 333 elif isinstance(b, streamclone.streamcloneapplier):
312 requirements = streamclone.readbundle1header(fh)[2] 334 requirements = streamclone.readbundle1header(fh)[2]
313 formatted = bundle2._formatrequirementsparams(requirements) 335 formatted = bundle2._formatrequirementsparams(requirements)
314 return 'none-packed1;%s' % formatted 336 return 'none-packed1;%s' % formatted
315 else: 337 else:
316 raise error.Abort(_('unknown bundle type: %s') % b) 338 raise error.Abort(_('unknown bundle type: %s') % b)
339
317 340
318 def _computeoutgoing(repo, heads, common): 341 def _computeoutgoing(repo, heads, common):
319 """Computes which revs are outgoing given a set of common 342 """Computes which revs are outgoing given a set of common
320 and a set of heads. 343 and a set of heads.
321 344
332 common = [nullid] 355 common = [nullid]
333 if not heads: 356 if not heads:
334 heads = cl.heads() 357 heads = cl.heads()
335 return discovery.outgoing(repo, common, heads) 358 return discovery.outgoing(repo, common, heads)
336 359
360
337 def _checkpublish(pushop): 361 def _checkpublish(pushop):
338 repo = pushop.repo 362 repo = pushop.repo
339 ui = repo.ui 363 ui = repo.ui
340 behavior = ui.config('experimental', 'auto-publish') 364 behavior = ui.config('experimental', 'auto-publish')
341 if pushop.publish or behavior not in ('warn', 'confirm', 'abort'): 365 if pushop.publish or behavior not in ('warn', 'confirm', 'abort'):
348 published = repo.filtered('served').revs('not public()') 372 published = repo.filtered('served').revs('not public()')
349 else: 373 else:
350 published = repo.revs('::%ln - public()', pushop.revs) 374 published = repo.revs('::%ln - public()', pushop.revs)
351 if published: 375 if published:
352 if behavior == 'warn': 376 if behavior == 'warn':
353 ui.warn(_('%i changesets about to be published\n') 377 ui.warn(_('%i changesets about to be published\n') % len(published))
354 % len(published))
355 elif behavior == 'confirm': 378 elif behavior == 'confirm':
356 if ui.promptchoice(_('push and publish %i changesets (yn)?' 379 if ui.promptchoice(
357 '$$ &Yes $$ &No') % len(published)): 380 _('push and publish %i changesets (yn)?' '$$ &Yes $$ &No')
381 % len(published)
382 ):
358 raise error.Abort(_('user quit')) 383 raise error.Abort(_('user quit'))
359 elif behavior == 'abort': 384 elif behavior == 'abort':
360 msg = _('push would publish %i changesets') % len(published) 385 msg = _('push would publish %i changesets') % len(published)
361 hint = _("use --publish or adjust 'experimental.auto-publish'" 386 hint = _(
362 " config") 387 "use --publish or adjust 'experimental.auto-publish'" " config"
388 )
363 raise error.Abort(msg, hint=hint) 389 raise error.Abort(msg, hint=hint)
390
364 391
365 def _forcebundle1(op): 392 def _forcebundle1(op):
366 """return true if a pull/push must use bundle1 393 """return true if a pull/push must use bundle1
367 394
368 This function is used to allow testing of the older bundle version""" 395 This function is used to allow testing of the older bundle version"""
375 # developer config: devel.legacy.exchange 402 # developer config: devel.legacy.exchange
376 exchange = ui.configlist('devel', 'legacy.exchange') 403 exchange = ui.configlist('devel', 'legacy.exchange')
377 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange 404 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
378 return forcebundle1 or not op.remote.capable('bundle2') 405 return forcebundle1 or not op.remote.capable('bundle2')
379 406
407
380 class pushoperation(object): 408 class pushoperation(object):
381 """A object that represent a single push operation 409 """A object that represent a single push operation
382 410
383 Its purpose is to carry push related state and very common operations. 411 Its purpose is to carry push related state and very common operations.
384 412
385 A new pushoperation should be created at the beginning of each push and 413 A new pushoperation should be created at the beginning of each push and
386 discarded afterward. 414 discarded afterward.
387 """ 415 """
388 416
389 def __init__(self, repo, remote, force=False, revs=None, newbranch=False, 417 def __init__(
390 bookmarks=(), publish=False, pushvars=None): 418 self,
419 repo,
420 remote,
421 force=False,
422 revs=None,
423 newbranch=False,
424 bookmarks=(),
425 publish=False,
426 pushvars=None,
427 ):
391 # repo we push from 428 # repo we push from
392 self.repo = repo 429 self.repo = repo
393 self.ui = repo.ui 430 self.ui = repo.ui
394 # repo we push to 431 # repo we push to
395 self.remote = remote 432 self.remote = remote
481 common = self.outgoing.common 518 common = self.outgoing.common
482 nm = self.repo.changelog.nodemap 519 nm = self.repo.changelog.nodemap
483 cheads = [node for node in self.revs if nm[node] in common] 520 cheads = [node for node in self.revs if nm[node] in common]
484 # and 521 # and
485 # * commonheads parents on missing 522 # * commonheads parents on missing
486 revset = unfi.set('%ln and parents(roots(%ln))', 523 revset = unfi.set(
487 self.outgoing.commonheads, 524 '%ln and parents(roots(%ln))',
488 self.outgoing.missing) 525 self.outgoing.commonheads,
526 self.outgoing.missing,
527 )
489 cheads.extend(c.node() for c in revset) 528 cheads.extend(c.node() for c in revset)
490 return cheads 529 return cheads
491 530
492 @property 531 @property
493 def commonheads(self): 532 def commonheads(self):
495 if self.cgresult: 534 if self.cgresult:
496 return self.futureheads 535 return self.futureheads
497 else: 536 else:
498 return self.fallbackheads 537 return self.fallbackheads
499 538
539
500 # mapping of message used when pushing bookmark 540 # mapping of message used when pushing bookmark
501 bookmsgmap = {'update': (_("updating bookmark %s\n"), 541 bookmsgmap = {
502 _('updating bookmark %s failed!\n')), 542 'update': (
503 'export': (_("exporting bookmark %s\n"), 543 _("updating bookmark %s\n"),
504 _('exporting bookmark %s failed!\n')), 544 _('updating bookmark %s failed!\n'),
505 'delete': (_("deleting remote bookmark %s\n"), 545 ),
506 _('deleting remote bookmark %s failed!\n')), 546 'export': (
507 } 547 _("exporting bookmark %s\n"),
508 548 _('exporting bookmark %s failed!\n'),
509 549 ),
510 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(), 550 'delete': (
511 publish=False, opargs=None): 551 _("deleting remote bookmark %s\n"),
552 _('deleting remote bookmark %s failed!\n'),
553 ),
554 }
555
556
557 def push(
558 repo,
559 remote,
560 force=False,
561 revs=None,
562 newbranch=False,
563 bookmarks=(),
564 publish=False,
565 opargs=None,
566 ):
512 '''Push outgoing changesets (limited by revs) from a local 567 '''Push outgoing changesets (limited by revs) from a local
513 repository to remote. Return an integer: 568 repository to remote. Return an integer:
514 - None means nothing to push 569 - None means nothing to push
515 - 0 means HTTP error 570 - 0 means HTTP error
516 - 1 means we pushed and remote head count is unchanged *or* 571 - 1 means we pushed and remote head count is unchanged *or*
517 we have outgoing changesets but refused to push 572 we have outgoing changesets but refused to push
518 - other values as described by addchangegroup() 573 - other values as described by addchangegroup()
519 ''' 574 '''
520 if opargs is None: 575 if opargs is None:
521 opargs = {} 576 opargs = {}
522 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks, 577 pushop = pushoperation(
523 publish, **pycompat.strkwargs(opargs)) 578 repo,
579 remote,
580 force,
581 revs,
582 newbranch,
583 bookmarks,
584 publish,
585 **pycompat.strkwargs(opargs)
586 )
524 if pushop.remote.local(): 587 if pushop.remote.local():
525 missing = (set(pushop.repo.requirements) 588 missing = (
526 - pushop.remote.local().supported) 589 set(pushop.repo.requirements) - pushop.remote.local().supported
590 )
527 if missing: 591 if missing:
528 msg = _("required features are not" 592 msg = _(
529 " supported in the destination:" 593 "required features are not"
530 " %s") % (', '.join(sorted(missing))) 594 " supported in the destination:"
595 " %s"
596 ) % (', '.join(sorted(missing)))
531 raise error.Abort(msg) 597 raise error.Abort(msg)
532 598
533 if not pushop.remote.canpush(): 599 if not pushop.remote.canpush():
534 raise error.Abort(_("destination does not support push")) 600 raise error.Abort(_("destination does not support push"))
535 601
536 if not pushop.remote.capable('unbundle'): 602 if not pushop.remote.capable('unbundle'):
537 raise error.Abort(_('cannot push: destination does not support the ' 603 raise error.Abort(
538 'unbundle wire protocol command')) 604 _(
605 'cannot push: destination does not support the '
606 'unbundle wire protocol command'
607 )
608 )
539 609
540 # get lock as we might write phase data 610 # get lock as we might write phase data
541 wlock = lock = None 611 wlock = lock = None
542 try: 612 try:
543 # bundle2 push may receive a reply bundle touching bookmarks 613 # bundle2 push may receive a reply bundle touching bookmarks
544 # requiring the wlock. Take it now to ensure proper ordering. 614 # requiring the wlock. Take it now to ensure proper ordering.
545 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback') 615 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
546 if ((not _forcebundle1(pushop)) and 616 if (
547 maypushback and 617 (not _forcebundle1(pushop))
548 not bookmod.bookmarksinstore(repo)): 618 and maypushback
619 and not bookmod.bookmarksinstore(repo)
620 ):
549 wlock = pushop.repo.wlock() 621 wlock = pushop.repo.wlock()
550 lock = pushop.repo.lock() 622 lock = pushop.repo.lock()
551 pushop.trmanager = transactionmanager(pushop.repo, 623 pushop.trmanager = transactionmanager(
552 'push-response', 624 pushop.repo, 'push-response', pushop.remote.url()
553 pushop.remote.url()) 625 )
554 except error.LockUnavailable as err: 626 except error.LockUnavailable as err:
555 # source repo cannot be locked. 627 # source repo cannot be locked.
556 # We do not abort the push, but just disable the local phase 628 # We do not abort the push, but just disable the local phase
557 # synchronisation. 629 # synchronisation.
558 msg = ('cannot lock source repository: %s\n' 630 msg = 'cannot lock source repository: %s\n' % stringutil.forcebytestr(
559 % stringutil.forcebytestr(err)) 631 err
632 )
560 pushop.ui.debug(msg) 633 pushop.ui.debug(msg)
561 634
562 with wlock or util.nullcontextmanager(): 635 with wlock or util.nullcontextmanager():
563 with lock or util.nullcontextmanager(): 636 with lock or util.nullcontextmanager():
564 with pushop.trmanager or util.nullcontextmanager(): 637 with pushop.trmanager or util.nullcontextmanager():
575 if repo.ui.configbool('experimental', 'remotenames'): 648 if repo.ui.configbool('experimental', 'remotenames'):
576 logexchange.pullremotenames(repo, remote) 649 logexchange.pullremotenames(repo, remote)
577 650
578 return pushop 651 return pushop
579 652
653
580 # list of steps to perform discovery before push 654 # list of steps to perform discovery before push
581 pushdiscoveryorder = [] 655 pushdiscoveryorder = []
582 656
583 # Mapping between step name and function 657 # Mapping between step name and function
584 # 658 #
585 # This exists to help extensions wrap steps if necessary 659 # This exists to help extensions wrap steps if necessary
586 pushdiscoverymapping = {} 660 pushdiscoverymapping = {}
587 661
662
588 def pushdiscovery(stepname): 663 def pushdiscovery(stepname):
589 """decorator for function performing discovery before push 664 """decorator for function performing discovery before push
590 665
591 The function is added to the step -> function mapping and appended to the 666 The function is added to the step -> function mapping and appended to the
592 list of steps. Beware that decorated function will be added in order (this 667 list of steps. Beware that decorated function will be added in order (this
593 may matter). 668 may matter).
594 669
595 You can only use this decorator for a new step, if you want to wrap a step 670 You can only use this decorator for a new step, if you want to wrap a step
596 from an extension, change the pushdiscovery dictionary directly.""" 671 from an extension, change the pushdiscovery dictionary directly."""
672
597 def dec(func): 673 def dec(func):
598 assert stepname not in pushdiscoverymapping 674 assert stepname not in pushdiscoverymapping
599 pushdiscoverymapping[stepname] = func 675 pushdiscoverymapping[stepname] = func
600 pushdiscoveryorder.append(stepname) 676 pushdiscoveryorder.append(stepname)
601 return func 677 return func
678
602 return dec 679 return dec
680
603 681
604 def _pushdiscovery(pushop): 682 def _pushdiscovery(pushop):
605 """Run all discovery steps""" 683 """Run all discovery steps"""
606 for stepname in pushdiscoveryorder: 684 for stepname in pushdiscoveryorder:
607 step = pushdiscoverymapping[stepname] 685 step = pushdiscoverymapping[stepname]
608 step(pushop) 686 step(pushop)
609 687
688
610 @pushdiscovery('changeset') 689 @pushdiscovery('changeset')
611 def _pushdiscoverychangeset(pushop): 690 def _pushdiscoverychangeset(pushop):
612 """discover the changeset that need to be pushed""" 691 """discover the changeset that need to be pushed"""
613 fci = discovery.findcommonincoming 692 fci = discovery.findcommonincoming
614 if pushop.revs: 693 if pushop.revs:
615 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force, 694 commoninc = fci(
616 ancestorsof=pushop.revs) 695 pushop.repo,
696 pushop.remote,
697 force=pushop.force,
698 ancestorsof=pushop.revs,
699 )
617 else: 700 else:
618 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force) 701 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
619 common, inc, remoteheads = commoninc 702 common, inc, remoteheads = commoninc
620 fco = discovery.findcommonoutgoing 703 fco = discovery.findcommonoutgoing
621 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs, 704 outgoing = fco(
622 commoninc=commoninc, force=pushop.force) 705 pushop.repo,
706 pushop.remote,
707 onlyheads=pushop.revs,
708 commoninc=commoninc,
709 force=pushop.force,
710 )
623 pushop.outgoing = outgoing 711 pushop.outgoing = outgoing
624 pushop.remoteheads = remoteheads 712 pushop.remoteheads = remoteheads
625 pushop.incoming = inc 713 pushop.incoming = inc
714
626 715
627 @pushdiscovery('phase') 716 @pushdiscovery('phase')
628 def _pushdiscoveryphase(pushop): 717 def _pushdiscoveryphase(pushop):
629 """discover the phase that needs to be pushed 718 """discover the phase that needs to be pushed
630 719
631 (computed for both success and failure case for changesets push)""" 720 (computed for both success and failure case for changesets push)"""
632 outgoing = pushop.outgoing 721 outgoing = pushop.outgoing
633 unfi = pushop.repo.unfiltered() 722 unfi = pushop.repo.unfiltered()
634 remotephases = listkeys(pushop.remote, 'phases') 723 remotephases = listkeys(pushop.remote, 'phases')
635 724
636 if (pushop.ui.configbool('ui', '_usedassubrepo') 725 if (
637 and remotephases # server supports phases 726 pushop.ui.configbool('ui', '_usedassubrepo')
638 and not pushop.outgoing.missing # no changesets to be pushed 727 and remotephases # server supports phases
639 and remotephases.get('publishing', False)): 728 and not pushop.outgoing.missing # no changesets to be pushed
729 and remotephases.get('publishing', False)
730 ):
640 # When: 731 # When:
641 # - this is a subrepo push 732 # - this is a subrepo push
642 # - and remote support phase 733 # - and remote support phase
643 # - and no changeset are to be pushed 734 # - and no changeset are to be pushed
644 # - and remote is publishing 735 # - and remote is publishing
648 # on the remote. 739 # on the remote.
649 pushop.outdatedphases = [] 740 pushop.outdatedphases = []
650 pushop.fallbackoutdatedphases = [] 741 pushop.fallbackoutdatedphases = []
651 return 742 return
652 743
653 pushop.remotephases = phases.remotephasessummary(pushop.repo, 744 pushop.remotephases = phases.remotephasessummary(
654 pushop.fallbackheads, 745 pushop.repo, pushop.fallbackheads, remotephases
655 remotephases) 746 )
656 droots = pushop.remotephases.draftroots 747 droots = pushop.remotephases.draftroots
657 748
658 extracond = '' 749 extracond = ''
659 if not pushop.remotephases.publishing: 750 if not pushop.remotephases.publishing:
660 extracond = ' and public()' 751 extracond = ' and public()'
662 # Get the list of all revs draft on remote by public here. 753 # Get the list of all revs draft on remote by public here.
663 # XXX Beware that revset break if droots is not strictly 754 # XXX Beware that revset break if droots is not strictly
664 # XXX root we may want to ensure it is but it is costly 755 # XXX root we may want to ensure it is but it is costly
665 fallback = list(unfi.set(revset, droots, pushop.fallbackheads)) 756 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
666 if not pushop.remotephases.publishing and pushop.publish: 757 if not pushop.remotephases.publishing and pushop.publish:
667 future = list(unfi.set('%ln and (not public() or %ln::)', 758 future = list(
668 pushop.futureheads, droots)) 759 unfi.set(
760 '%ln and (not public() or %ln::)', pushop.futureheads, droots
761 )
762 )
669 elif not outgoing.missing: 763 elif not outgoing.missing:
670 future = fallback 764 future = fallback
671 else: 765 else:
672 # adds changeset we are going to push as draft 766 # adds changeset we are going to push as draft
673 # 767 #
674 # should not be necessary for publishing server, but because of an 768 # should not be necessary for publishing server, but because of an
675 # issue fixed in xxxxx we have to do it anyway. 769 # issue fixed in xxxxx we have to do it anyway.
676 fdroots = list(unfi.set('roots(%ln + %ln::)', 770 fdroots = list(
677 outgoing.missing, droots)) 771 unfi.set('roots(%ln + %ln::)', outgoing.missing, droots)
772 )
678 fdroots = [f.node() for f in fdroots] 773 fdroots = [f.node() for f in fdroots]
679 future = list(unfi.set(revset, fdroots, pushop.futureheads)) 774 future = list(unfi.set(revset, fdroots, pushop.futureheads))
680 pushop.outdatedphases = future 775 pushop.outdatedphases = future
681 pushop.fallbackoutdatedphases = fallback 776 pushop.fallbackoutdatedphases = fallback
777
682 778
683 @pushdiscovery('obsmarker') 779 @pushdiscovery('obsmarker')
684 def _pushdiscoveryobsmarkers(pushop): 780 def _pushdiscoveryobsmarkers(pushop):
685 if not obsolete.isenabled(pushop.repo, obsolete.exchangeopt): 781 if not obsolete.isenabled(pushop.repo, obsolete.exchangeopt):
686 return 782 return
694 repo = pushop.repo 790 repo = pushop.repo
695 # very naive computation, that can be quite expensive on big repo. 791 # very naive computation, that can be quite expensive on big repo.
696 # However: evolution is currently slow on them anyway. 792 # However: evolution is currently slow on them anyway.
697 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads)) 793 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
698 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes) 794 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
795
699 796
700 @pushdiscovery('bookmarks') 797 @pushdiscovery('bookmarks')
701 def _pushdiscoverybookmarks(pushop): 798 def _pushdiscoverybookmarks(pushop):
702 ui = pushop.ui 799 ui = pushop.ui
703 repo = pushop.repo.unfiltered() 800 repo = pushop.repo.unfiltered()
708 revnums = pycompat.maplist(repo.changelog.rev, pushop.revs) 805 revnums = pycompat.maplist(repo.changelog.rev, pushop.revs)
709 ancestors = repo.changelog.ancestors(revnums, inclusive=True) 806 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
710 807
711 remotebookmark = bookmod.unhexlifybookmarks(listkeys(remote, 'bookmarks')) 808 remotebookmark = bookmod.unhexlifybookmarks(listkeys(remote, 'bookmarks'))
712 809
713 explicit = {repo._bookmarks.expandname(bookmark) 810 explicit = {
714 for bookmark in pushop.bookmarks} 811 repo._bookmarks.expandname(bookmark) for bookmark in pushop.bookmarks
812 }
715 813
716 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark) 814 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
717 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp) 815 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
816
718 817
719 def _processcompared(pushop, pushed, explicit, remotebms, comp): 818 def _processcompared(pushop, pushed, explicit, remotebms, comp):
720 """take decision on bookmarks to push to the remote repo 819 """take decision on bookmarks to push to the remote repo
721 820
722 Exists to help extensions alter this behavior. 821 Exists to help extensions alter this behavior.
752 explicit.remove(b) 851 explicit.remove(b)
753 852
754 if explicit: 853 if explicit:
755 explicit = sorted(explicit) 854 explicit = sorted(explicit)
756 # we should probably list all of them 855 # we should probably list all of them
757 pushop.ui.warn(_('bookmark %s does not exist on the local ' 856 pushop.ui.warn(
758 'or remote repository!\n') % explicit[0]) 857 _(
858 'bookmark %s does not exist on the local '
859 'or remote repository!\n'
860 )
861 % explicit[0]
862 )
759 pushop.bkresult = 2 863 pushop.bkresult = 2
760 864
761 pushop.outbookmarks.sort() 865 pushop.outbookmarks.sort()
866
762 867
763 def _pushcheckoutgoing(pushop): 868 def _pushcheckoutgoing(pushop):
764 outgoing = pushop.outgoing 869 outgoing = pushop.outgoing
765 unfi = pushop.repo.unfiltered() 870 unfi = pushop.repo.unfiltered()
766 if not outgoing.missing: 871 if not outgoing.missing:
774 if unfi.obsstore: 879 if unfi.obsstore:
775 # this message are here for 80 char limit reason 880 # this message are here for 80 char limit reason
776 mso = _("push includes obsolete changeset: %s!") 881 mso = _("push includes obsolete changeset: %s!")
777 mspd = _("push includes phase-divergent changeset: %s!") 882 mspd = _("push includes phase-divergent changeset: %s!")
778 mscd = _("push includes content-divergent changeset: %s!") 883 mscd = _("push includes content-divergent changeset: %s!")
779 mst = {"orphan": _("push includes orphan changeset: %s!"), 884 mst = {
780 "phase-divergent": mspd, 885 "orphan": _("push includes orphan changeset: %s!"),
781 "content-divergent": mscd} 886 "phase-divergent": mspd,
887 "content-divergent": mscd,
888 }
782 # If we are to push if there is at least one 889 # If we are to push if there is at least one
783 # obsolete or unstable changeset in missing, at 890 # obsolete or unstable changeset in missing, at
784 # least one of the missinghead will be obsolete or 891 # least one of the missinghead will be obsolete or
785 # unstable. So checking heads only is ok 892 # unstable. So checking heads only is ok
786 for node in outgoing.missingheads: 893 for node in outgoing.missingheads:
793 raise error.Abort(mst[ctx.instabilities()[0]] % ctx) 900 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
794 901
795 discovery.checkheads(pushop) 902 discovery.checkheads(pushop)
796 return True 903 return True
797 904
905
798 # List of names of steps to perform for an outgoing bundle2, order matters. 906 # List of names of steps to perform for an outgoing bundle2, order matters.
799 b2partsgenorder = [] 907 b2partsgenorder = []
800 908
801 # Mapping between step name and function 909 # Mapping between step name and function
802 # 910 #
803 # This exists to help extensions wrap steps if necessary 911 # This exists to help extensions wrap steps if necessary
804 b2partsgenmapping = {} 912 b2partsgenmapping = {}
805 913
914
806 def b2partsgenerator(stepname, idx=None): 915 def b2partsgenerator(stepname, idx=None):
807 """decorator for function generating bundle2 part 916 """decorator for function generating bundle2 part
808 917
809 The function is added to the step -> function mapping and appended to the 918 The function is added to the step -> function mapping and appended to the
810 list of steps. Beware that decorated functions will be added in order 919 list of steps. Beware that decorated functions will be added in order
811 (this may matter). 920 (this may matter).
812 921
813 You can only use this decorator for new steps, if you want to wrap a step 922 You can only use this decorator for new steps, if you want to wrap a step
814 from an extension, attack the b2partsgenmapping dictionary directly.""" 923 from an extension, attack the b2partsgenmapping dictionary directly."""
924
815 def dec(func): 925 def dec(func):
816 assert stepname not in b2partsgenmapping 926 assert stepname not in b2partsgenmapping
817 b2partsgenmapping[stepname] = func 927 b2partsgenmapping[stepname] = func
818 if idx is None: 928 if idx is None:
819 b2partsgenorder.append(stepname) 929 b2partsgenorder.append(stepname)
820 else: 930 else:
821 b2partsgenorder.insert(idx, stepname) 931 b2partsgenorder.insert(idx, stepname)
822 return func 932 return func
933
823 return dec 934 return dec
935
824 936
825 def _pushb2ctxcheckheads(pushop, bundler): 937 def _pushb2ctxcheckheads(pushop, bundler):
826 """Generate race condition checking parts 938 """Generate race condition checking parts
827 939
828 Exists as an independent function to aid extensions 940 Exists as an independent function to aid extensions
844 affected |= remote - set(newheads) 956 affected |= remote - set(newheads)
845 if affected: 957 if affected:
846 data = iter(sorted(affected)) 958 data = iter(sorted(affected))
847 bundler.newpart('check:updated-heads', data=data) 959 bundler.newpart('check:updated-heads', data=data)
848 960
961
849 def _pushing(pushop): 962 def _pushing(pushop):
850 """return True if we are pushing anything""" 963 """return True if we are pushing anything"""
851 return bool(pushop.outgoing.missing 964 return bool(
852 or pushop.outdatedphases 965 pushop.outgoing.missing
853 or pushop.outobsmarkers 966 or pushop.outdatedphases
854 or pushop.outbookmarks) 967 or pushop.outobsmarkers
968 or pushop.outbookmarks
969 )
970
855 971
856 @b2partsgenerator('check-bookmarks') 972 @b2partsgenerator('check-bookmarks')
857 def _pushb2checkbookmarks(pushop, bundler): 973 def _pushb2checkbookmarks(pushop, bundler):
858 """insert bookmark move checking""" 974 """insert bookmark move checking"""
859 if not _pushing(pushop) or pushop.force: 975 if not _pushing(pushop) or pushop.force:
865 data = [] 981 data = []
866 for book, old, new in pushop.outbookmarks: 982 for book, old, new in pushop.outbookmarks:
867 data.append((book, old)) 983 data.append((book, old))
868 checkdata = bookmod.binaryencode(data) 984 checkdata = bookmod.binaryencode(data)
869 bundler.newpart('check:bookmarks', data=checkdata) 985 bundler.newpart('check:bookmarks', data=checkdata)
986
870 987
871 @b2partsgenerator('check-phases') 988 @b2partsgenerator('check-phases')
872 def _pushb2checkphases(pushop, bundler): 989 def _pushb2checkphases(pushop, bundler):
873 """insert phase move checking""" 990 """insert phase move checking"""
874 if not _pushing(pushop) or pushop.force: 991 if not _pushing(pushop) or pushop.force:
884 for nodes in checks: 1001 for nodes in checks:
885 nodes.sort() 1002 nodes.sort()
886 checkdata = phases.binaryencode(checks) 1003 checkdata = phases.binaryencode(checks)
887 bundler.newpart('check:phases', data=checkdata) 1004 bundler.newpart('check:phases', data=checkdata)
888 1005
1006
889 @b2partsgenerator('changeset') 1007 @b2partsgenerator('changeset')
890 def _pushb2ctx(pushop, bundler): 1008 def _pushb2ctx(pushop, bundler):
891 """handle changegroup push through bundle2 1009 """handle changegroup push through bundle2
892 1010
893 addchangegroup result is stored in the ``pushop.cgresult`` attribute. 1011 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
904 1022
905 b2caps = bundle2.bundle2caps(pushop.remote) 1023 b2caps = bundle2.bundle2caps(pushop.remote)
906 version = '01' 1024 version = '01'
907 cgversions = b2caps.get('changegroup') 1025 cgversions = b2caps.get('changegroup')
908 if cgversions: # 3.1 and 3.2 ship with an empty value 1026 if cgversions: # 3.1 and 3.2 ship with an empty value
909 cgversions = [v for v in cgversions 1027 cgversions = [
910 if v in changegroup.supportedoutgoingversions( 1028 v
911 pushop.repo)] 1029 for v in cgversions
1030 if v in changegroup.supportedoutgoingversions(pushop.repo)
1031 ]
912 if not cgversions: 1032 if not cgversions:
913 raise error.Abort(_('no common changegroup version')) 1033 raise error.Abort(_('no common changegroup version'))
914 version = max(cgversions) 1034 version = max(cgversions)
915 cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version, 1035 cgstream = changegroup.makestream(
916 'push') 1036 pushop.repo, pushop.outgoing, version, 'push'
1037 )
917 cgpart = bundler.newpart('changegroup', data=cgstream) 1038 cgpart = bundler.newpart('changegroup', data=cgstream)
918 if cgversions: 1039 if cgversions:
919 cgpart.addparam('version', version) 1040 cgpart.addparam('version', version)
920 if 'treemanifest' in pushop.repo.requirements: 1041 if 'treemanifest' in pushop.repo.requirements:
921 cgpart.addparam('treemanifest', '1') 1042 cgpart.addparam('treemanifest', '1')
1043
922 def handlereply(op): 1044 def handlereply(op):
923 """extract addchangegroup returns from server reply""" 1045 """extract addchangegroup returns from server reply"""
924 cgreplies = op.records.getreplies(cgpart.id) 1046 cgreplies = op.records.getreplies(cgpart.id)
925 assert len(cgreplies['changegroup']) == 1 1047 assert len(cgreplies['changegroup']) == 1
926 pushop.cgresult = cgreplies['changegroup'][0]['return'] 1048 pushop.cgresult = cgreplies['changegroup'][0]['return']
1049
927 return handlereply 1050 return handlereply
1051
928 1052
929 @b2partsgenerator('phase') 1053 @b2partsgenerator('phase')
930 def _pushb2phases(pushop, bundler): 1054 def _pushb2phases(pushop, bundler):
931 """handle phase push through bundle2""" 1055 """handle phase push through bundle2"""
932 if 'phases' in pushop.stepsdone: 1056 if 'phases' in pushop.stepsdone:
940 1064
941 if hasphaseheads and not legacyphase: 1065 if hasphaseheads and not legacyphase:
942 return _pushb2phaseheads(pushop, bundler) 1066 return _pushb2phaseheads(pushop, bundler)
943 elif haspushkey: 1067 elif haspushkey:
944 return _pushb2phasespushkey(pushop, bundler) 1068 return _pushb2phasespushkey(pushop, bundler)
1069
945 1070
946 def _pushb2phaseheads(pushop, bundler): 1071 def _pushb2phaseheads(pushop, bundler):
947 """push phase information through a bundle2 - binary part""" 1072 """push phase information through a bundle2 - binary part"""
948 pushop.stepsdone.add('phases') 1073 pushop.stepsdone.add('phases')
949 if pushop.outdatedphases: 1074 if pushop.outdatedphases:
950 updates = [[] for p in phases.allphases] 1075 updates = [[] for p in phases.allphases]
951 updates[0].extend(h.node() for h in pushop.outdatedphases) 1076 updates[0].extend(h.node() for h in pushop.outdatedphases)
952 phasedata = phases.binaryencode(updates) 1077 phasedata = phases.binaryencode(updates)
953 bundler.newpart('phase-heads', data=phasedata) 1078 bundler.newpart('phase-heads', data=phasedata)
1079
954 1080
955 def _pushb2phasespushkey(pushop, bundler): 1081 def _pushb2phasespushkey(pushop, bundler):
956 """push phase information through a bundle2 - pushkey part""" 1082 """push phase information through a bundle2 - pushkey part"""
957 pushop.stepsdone.add('phases') 1083 pushop.stepsdone.add('phases')
958 part2node = [] 1084 part2node = []
983 msg = _('server ignored update of %s to public!\n') % node 1109 msg = _('server ignored update of %s to public!\n') % node
984 elif not int(results[0]['return']): 1110 elif not int(results[0]['return']):
985 msg = _('updating %s to public failed!\n') % node 1111 msg = _('updating %s to public failed!\n') % node
986 if msg is not None: 1112 if msg is not None:
987 pushop.ui.warn(msg) 1113 pushop.ui.warn(msg)
1114
988 return handlereply 1115 return handlereply
1116
989 1117
990 @b2partsgenerator('obsmarkers') 1118 @b2partsgenerator('obsmarkers')
991 def _pushb2obsmarkers(pushop, bundler): 1119 def _pushb2obsmarkers(pushop, bundler):
992 if 'obsmarkers' in pushop.stepsdone: 1120 if 'obsmarkers' in pushop.stepsdone:
993 return 1121 return
997 pushop.stepsdone.add('obsmarkers') 1125 pushop.stepsdone.add('obsmarkers')
998 if pushop.outobsmarkers: 1126 if pushop.outobsmarkers:
999 markers = sorted(pushop.outobsmarkers) 1127 markers = sorted(pushop.outobsmarkers)
1000 bundle2.buildobsmarkerspart(bundler, markers) 1128 bundle2.buildobsmarkerspart(bundler, markers)
1001 1129
1130
1002 @b2partsgenerator('bookmarks') 1131 @b2partsgenerator('bookmarks')
1003 def _pushb2bookmarks(pushop, bundler): 1132 def _pushb2bookmarks(pushop, bundler):
1004 """handle bookmark push through bundle2""" 1133 """handle bookmark push through bundle2"""
1005 if 'bookmarks' in pushop.stepsdone: 1134 if 'bookmarks' in pushop.stepsdone:
1006 return 1135 return
1011 1140
1012 if not legacybooks and 'bookmarks' in b2caps: 1141 if not legacybooks and 'bookmarks' in b2caps:
1013 return _pushb2bookmarkspart(pushop, bundler) 1142 return _pushb2bookmarkspart(pushop, bundler)
1014 elif 'pushkey' in b2caps: 1143 elif 'pushkey' in b2caps:
1015 return _pushb2bookmarkspushkey(pushop, bundler) 1144 return _pushb2bookmarkspushkey(pushop, bundler)
1145
1016 1146
1017 def _bmaction(old, new): 1147 def _bmaction(old, new):
1018 """small utility for bookmark pushing""" 1148 """small utility for bookmark pushing"""
1019 if not old: 1149 if not old:
1020 return 'export' 1150 return 'export'
1021 elif not new: 1151 elif not new:
1022 return 'delete' 1152 return 'delete'
1023 return 'update' 1153 return 'update'
1024 1154
1155
1025 def _abortonsecretctx(pushop, node, b): 1156 def _abortonsecretctx(pushop, node, b):
1026 """abort if a given bookmark points to a secret changeset""" 1157 """abort if a given bookmark points to a secret changeset"""
1027 if node and pushop.repo[node].phase() == phases.secret: 1158 if node and pushop.repo[node].phase() == phases.secret:
1028 raise error.Abort(_('cannot push bookmark %s as it points to a secret' 1159 raise error.Abort(
1029 ' changeset') % b) 1160 _('cannot push bookmark %s as it points to a secret' ' changeset')
1161 % b
1162 )
1163
1030 1164
1031 def _pushb2bookmarkspart(pushop, bundler): 1165 def _pushb2bookmarkspart(pushop, bundler):
1032 pushop.stepsdone.add('bookmarks') 1166 pushop.stepsdone.add('bookmarks')
1033 if not pushop.outbookmarks: 1167 if not pushop.outbookmarks:
1034 return 1168 return
1047 # if success 1181 # if success
1048 for book, action in allactions: 1182 for book, action in allactions:
1049 ui.status(bookmsgmap[action][0] % book) 1183 ui.status(bookmsgmap[action][0] % book)
1050 1184
1051 return handlereply 1185 return handlereply
1186
1052 1187
1053 def _pushb2bookmarkspushkey(pushop, bundler): 1188 def _pushb2bookmarkspushkey(pushop, bundler):
1054 pushop.stepsdone.add('bookmarks') 1189 pushop.stepsdone.add('bookmarks')
1055 part2book = [] 1190 part2book = []
1056 enc = pushkey.encode 1191 enc = pushkey.encode
1092 ui.status(bookmsgmap[action][0] % book) 1227 ui.status(bookmsgmap[action][0] % book)
1093 else: 1228 else:
1094 ui.warn(bookmsgmap[action][1] % book) 1229 ui.warn(bookmsgmap[action][1] % book)
1095 if pushop.bkresult is not None: 1230 if pushop.bkresult is not None:
1096 pushop.bkresult = 1 1231 pushop.bkresult = 1
1232
1097 return handlereply 1233 return handlereply
1234
1098 1235
1099 @b2partsgenerator('pushvars', idx=0) 1236 @b2partsgenerator('pushvars', idx=0)
1100 def _getbundlesendvars(pushop, bundler): 1237 def _getbundlesendvars(pushop, bundler):
1101 '''send shellvars via bundle2''' 1238 '''send shellvars via bundle2'''
1102 pushvars = pushop.pushvars 1239 pushvars = pushop.pushvars
1103 if pushvars: 1240 if pushvars:
1104 shellvars = {} 1241 shellvars = {}
1105 for raw in pushvars: 1242 for raw in pushvars:
1106 if '=' not in raw: 1243 if '=' not in raw:
1107 msg = ("unable to parse variable '%s', should follow " 1244 msg = (
1108 "'KEY=VALUE' or 'KEY=' format") 1245 "unable to parse variable '%s', should follow "
1246 "'KEY=VALUE' or 'KEY=' format"
1247 )
1109 raise error.Abort(msg % raw) 1248 raise error.Abort(msg % raw)
1110 k, v = raw.split('=', 1) 1249 k, v = raw.split('=', 1)
1111 shellvars[k] = v 1250 shellvars[k] = v
1112 1251
1113 part = bundler.newpart('pushvars') 1252 part = bundler.newpart('pushvars')
1114 1253
1115 for key, value in shellvars.iteritems(): 1254 for key, value in shellvars.iteritems():
1116 part.addparam(key, value, mandatory=False) 1255 part.addparam(key, value, mandatory=False)
1117 1256
1257
1118 def _pushbundle2(pushop): 1258 def _pushbundle2(pushop):
1119 """push data to the remote using bundle2 1259 """push data to the remote using bundle2
1120 1260
1121 The only currently supported type of data is changegroup but this will 1261 The only currently supported type of data is changegroup but this will
1122 evolve in the future.""" 1262 evolve in the future."""
1123 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote)) 1263 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1124 pushback = (pushop.trmanager 1264 pushback = pushop.trmanager and pushop.ui.configbool(
1125 and pushop.ui.configbool('experimental', 'bundle2.pushback')) 1265 'experimental', 'bundle2.pushback'
1266 )
1126 1267
1127 # create reply capability 1268 # create reply capability
1128 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo, 1269 capsblob = bundle2.encodecaps(
1129 allowpushback=pushback, 1270 bundle2.getrepocaps(pushop.repo, allowpushback=pushback, role='client')
1130 role='client')) 1271 )
1131 bundler.newpart('replycaps', data=capsblob) 1272 bundler.newpart('replycaps', data=capsblob)
1132 replyhandlers = [] 1273 replyhandlers = []
1133 for partgenname in b2partsgenorder: 1274 for partgenname in b2partsgenorder:
1134 partgen = b2partsgenmapping[partgenname] 1275 partgen = b2partsgenmapping[partgenname]
1135 ret = partgen(pushop, bundler) 1276 ret = partgen(pushop, bundler)
1140 return 1281 return
1141 stream = util.chunkbuffer(bundler.getchunks()) 1282 stream = util.chunkbuffer(bundler.getchunks())
1142 try: 1283 try:
1143 try: 1284 try:
1144 with pushop.remote.commandexecutor() as e: 1285 with pushop.remote.commandexecutor() as e:
1145 reply = e.callcommand('unbundle', { 1286 reply = e.callcommand(
1146 'bundle': stream, 1287 'unbundle',
1147 'heads': ['force'], 1288 {
1148 'url': pushop.remote.url(), 1289 'bundle': stream,
1149 }).result() 1290 'heads': ['force'],
1291 'url': pushop.remote.url(),
1292 },
1293 ).result()
1150 except error.BundleValueError as exc: 1294 except error.BundleValueError as exc:
1151 raise error.Abort(_('missing support for %s') % exc) 1295 raise error.Abort(_('missing support for %s') % exc)
1152 try: 1296 try:
1153 trgetter = None 1297 trgetter = None
1154 if pushback: 1298 if pushback:
1167 raise 1311 raise
1168 pushop.pkfailcb[partid](pushop, exc) 1312 pushop.pkfailcb[partid](pushop, exc)
1169 for rephand in replyhandlers: 1313 for rephand in replyhandlers:
1170 rephand(op) 1314 rephand(op)
1171 1315
1316
1172 def _pushchangeset(pushop): 1317 def _pushchangeset(pushop):
1173 """Make the actual push of changeset bundle to remote repo""" 1318 """Make the actual push of changeset bundle to remote repo"""
1174 if 'changesets' in pushop.stepsdone: 1319 if 'changesets' in pushop.stepsdone:
1175 return 1320 return
1176 pushop.stepsdone.add('changesets') 1321 pushop.stepsdone.add('changesets')
1183 pushop.repo.prepushoutgoinghooks(pushop) 1328 pushop.repo.prepushoutgoinghooks(pushop)
1184 outgoing = pushop.outgoing 1329 outgoing = pushop.outgoing
1185 # TODO: get bundlecaps from remote 1330 # TODO: get bundlecaps from remote
1186 bundlecaps = None 1331 bundlecaps = None
1187 # create a changegroup from local 1332 # create a changegroup from local
1188 if pushop.revs is None and not (outgoing.excluded 1333 if pushop.revs is None and not (
1189 or pushop.repo.changelog.filteredrevs): 1334 outgoing.excluded or pushop.repo.changelog.filteredrevs
1335 ):
1190 # push everything, 1336 # push everything,
1191 # use the fast path, no race possible on push 1337 # use the fast path, no race possible on push
1192 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push', 1338 cg = changegroup.makechangegroup(
1193 fastpath=True, bundlecaps=bundlecaps) 1339 pushop.repo,
1340 outgoing,
1341 '01',
1342 'push',
1343 fastpath=True,
1344 bundlecaps=bundlecaps,
1345 )
1194 else: 1346 else:
1195 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 1347 cg = changegroup.makechangegroup(
1196 'push', bundlecaps=bundlecaps) 1348 pushop.repo, outgoing, '01', 'push', bundlecaps=bundlecaps
1349 )
1197 1350
1198 # apply changegroup to remote 1351 # apply changegroup to remote
1199 # local repo finds heads on server, finds out what 1352 # local repo finds heads on server, finds out what
1200 # revs it must push. once revs transferred, if server 1353 # revs it must push. once revs transferred, if server
1201 # finds it has different heads (someone else won 1354 # finds it has different heads (someone else won
1204 remoteheads = ['force'] 1357 remoteheads = ['force']
1205 else: 1358 else:
1206 remoteheads = pushop.remoteheads 1359 remoteheads = pushop.remoteheads
1207 # ssh: return remote's addchangegroup() 1360 # ssh: return remote's addchangegroup()
1208 # http: return remote's addchangegroup() or 0 for error 1361 # http: return remote's addchangegroup() or 0 for error
1209 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads, 1362 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads, pushop.repo.url())
1210 pushop.repo.url()) 1363
1211 1364
1212 def _pushsyncphase(pushop): 1365 def _pushsyncphase(pushop):
1213 """synchronise phase information locally and remotely""" 1366 """synchronise phase information locally and remotely"""
1214 cheads = pushop.commonheads 1367 cheads = pushop.commonheads
1215 # even when we don't push, exchanging phase data is useful 1368 # even when we don't push, exchanging phase data is useful
1216 remotephases = listkeys(pushop.remote, 'phases') 1369 remotephases = listkeys(pushop.remote, 'phases')
1217 if (pushop.ui.configbool('ui', '_usedassubrepo') 1370 if (
1218 and remotephases # server supports phases 1371 pushop.ui.configbool('ui', '_usedassubrepo')
1219 and pushop.cgresult is None # nothing was pushed 1372 and remotephases # server supports phases
1220 and remotephases.get('publishing', False)): 1373 and pushop.cgresult is None # nothing was pushed
1374 and remotephases.get('publishing', False)
1375 ):
1221 # When: 1376 # When:
1222 # - this is a subrepo push 1377 # - this is a subrepo push
1223 # - and remote support phase 1378 # - and remote support phase
1224 # - and no changeset was pushed 1379 # - and no changeset was pushed
1225 # - and remote is publishing 1380 # - and remote is publishing
1226 # We may be in issue 3871 case! 1381 # We may be in issue 3871 case!
1227 # We drop the possible phase synchronisation done by 1382 # We drop the possible phase synchronisation done by
1228 # courtesy to publish changesets possibly locally draft 1383 # courtesy to publish changesets possibly locally draft
1229 # on the remote. 1384 # on the remote.
1230 remotephases = {'publishing': 'True'} 1385 remotephases = {'publishing': 'True'}
1231 if not remotephases: # old server or public only reply from non-publishing 1386 if not remotephases: # old server or public only reply from non-publishing
1232 _localphasemove(pushop, cheads) 1387 _localphasemove(pushop, cheads)
1233 # don't push any phase data as there is nothing to push 1388 # don't push any phase data as there is nothing to push
1234 else: 1389 else:
1235 ana = phases.analyzeremotephases(pushop.repo, cheads, 1390 ana = phases.analyzeremotephases(pushop.repo, cheads, remotephases)
1236 remotephases)
1237 pheads, droots = ana 1391 pheads, droots = ana
1238 ### Apply remote phase on local 1392 ### Apply remote phase on local
1239 if remotephases.get('publishing', False): 1393 if remotephases.get('publishing', False):
1240 _localphasemove(pushop, cheads) 1394 _localphasemove(pushop, cheads)
1241 else: # publish = False 1395 else: # publish = False
1242 _localphasemove(pushop, pheads) 1396 _localphasemove(pushop, pheads)
1243 _localphasemove(pushop, cheads, phases.draft) 1397 _localphasemove(pushop, cheads, phases.draft)
1244 ### Apply local phase on remote 1398 ### Apply local phase on remote
1245 1399
1246 if pushop.cgresult: 1400 if pushop.cgresult:
1256 # filter heads already turned public by the push 1410 # filter heads already turned public by the push
1257 outdated = [c for c in outdated if c.node() not in pheads] 1411 outdated = [c for c in outdated if c.node() not in pheads]
1258 # fallback to independent pushkey command 1412 # fallback to independent pushkey command
1259 for newremotehead in outdated: 1413 for newremotehead in outdated:
1260 with pushop.remote.commandexecutor() as e: 1414 with pushop.remote.commandexecutor() as e:
1261 r = e.callcommand('pushkey', { 1415 r = e.callcommand(
1262 'namespace': 'phases', 1416 'pushkey',
1263 'key': newremotehead.hex(), 1417 {
1264 'old': '%d' % phases.draft, 1418 'namespace': 'phases',
1265 'new': '%d' % phases.public 1419 'key': newremotehead.hex(),
1266 }).result() 1420 'old': '%d' % phases.draft,
1421 'new': '%d' % phases.public,
1422 },
1423 ).result()
1267 1424
1268 if not r: 1425 if not r:
1269 pushop.ui.warn(_('updating %s to public failed!\n') 1426 pushop.ui.warn(
1270 % newremotehead) 1427 _('updating %s to public failed!\n') % newremotehead
1428 )
1429
1271 1430
1272 def _localphasemove(pushop, nodes, phase=phases.public): 1431 def _localphasemove(pushop, nodes, phase=phases.public):
1273 """move <nodes> to <phase> in the local source repo""" 1432 """move <nodes> to <phase> in the local source repo"""
1274 if pushop.trmanager: 1433 if pushop.trmanager:
1275 phases.advanceboundary(pushop.repo, 1434 phases.advanceboundary(
1276 pushop.trmanager.transaction(), 1435 pushop.repo, pushop.trmanager.transaction(), phase, nodes
1277 phase, 1436 )
1278 nodes)
1279 else: 1437 else:
1280 # repo is not locked, do not change any phases! 1438 # repo is not locked, do not change any phases!
1281 # Informs the user that phases should have been moved when 1439 # Informs the user that phases should have been moved when
1282 # applicable. 1440 # applicable.
1283 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()] 1441 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1284 phasestr = phases.phasenames[phase] 1442 phasestr = phases.phasenames[phase]
1285 if actualmoves: 1443 if actualmoves:
1286 pushop.ui.status(_('cannot lock source repo, skipping ' 1444 pushop.ui.status(
1287 'local %s phase update\n') % phasestr) 1445 _(
1446 'cannot lock source repo, skipping '
1447 'local %s phase update\n'
1448 )
1449 % phasestr
1450 )
1451
1288 1452
1289 def _pushobsolete(pushop): 1453 def _pushobsolete(pushop):
1290 """utility function to push obsolete markers to a remote""" 1454 """utility function to push obsolete markers to a remote"""
1291 if 'obsmarkers' in pushop.stepsdone: 1455 if 'obsmarkers' in pushop.stepsdone:
1292 return 1456 return
1303 rslts.append(remote.pushkey('obsolete', key, '', data)) 1467 rslts.append(remote.pushkey('obsolete', key, '', data))
1304 if [r for r in rslts if not r]: 1468 if [r for r in rslts if not r]:
1305 msg = _('failed to push some obsolete markers!\n') 1469 msg = _('failed to push some obsolete markers!\n')
1306 repo.ui.warn(msg) 1470 repo.ui.warn(msg)
1307 1471
1472
1308 def _pushbookmark(pushop): 1473 def _pushbookmark(pushop):
1309 """Update bookmark position on remote""" 1474 """Update bookmark position on remote"""
1310 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone: 1475 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1311 return 1476 return
1312 pushop.stepsdone.add('bookmarks') 1477 pushop.stepsdone.add('bookmarks')
1319 action = 'export' 1484 action = 'export'
1320 elif not new: 1485 elif not new:
1321 action = 'delete' 1486 action = 'delete'
1322 1487
1323 with remote.commandexecutor() as e: 1488 with remote.commandexecutor() as e:
1324 r = e.callcommand('pushkey', { 1489 r = e.callcommand(
1325 'namespace': 'bookmarks', 1490 'pushkey',
1326 'key': b, 1491 {
1327 'old': hex(old), 1492 'namespace': 'bookmarks',
1328 'new': hex(new), 1493 'key': b,
1329 }).result() 1494 'old': hex(old),
1495 'new': hex(new),
1496 },
1497 ).result()
1330 1498
1331 if r: 1499 if r:
1332 ui.status(bookmsgmap[action][0] % b) 1500 ui.status(bookmsgmap[action][0] % b)
1333 else: 1501 else:
1334 ui.warn(bookmsgmap[action][1] % b) 1502 ui.warn(bookmsgmap[action][1] % b)
1335 # discovery can have set the value form invalid entry 1503 # discovery can have set the value form invalid entry
1336 if pushop.bkresult is not None: 1504 if pushop.bkresult is not None:
1337 pushop.bkresult = 1 1505 pushop.bkresult = 1
1338 1506
1507
1339 class pulloperation(object): 1508 class pulloperation(object):
1340 """A object that represent a single pull operation 1509 """A object that represent a single pull operation
1341 1510
1342 It purpose is to carry pull related state and very common operation. 1511 It purpose is to carry pull related state and very common operation.
1343 1512
1344 A new should be created at the beginning of each pull and discarded 1513 A new should be created at the beginning of each pull and discarded
1345 afterward. 1514 afterward.
1346 """ 1515 """
1347 1516
1348 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(), 1517 def __init__(
1349 remotebookmarks=None, streamclonerequested=None, 1518 self,
1350 includepats=None, excludepats=None, depth=None): 1519 repo,
1520 remote,
1521 heads=None,
1522 force=False,
1523 bookmarks=(),
1524 remotebookmarks=None,
1525 streamclonerequested=None,
1526 includepats=None,
1527 excludepats=None,
1528 depth=None,
1529 ):
1351 # repo we pull into 1530 # repo we pull into
1352 self.repo = repo 1531 self.repo = repo
1353 # repo we pull from 1532 # repo we pull from
1354 self.remote = remote 1533 self.remote = remote
1355 # revision we try to pull (None is "all") 1534 # revision we try to pull (None is "all")
1356 self.heads = heads 1535 self.heads = heads
1357 # bookmark pulled explicitly 1536 # bookmark pulled explicitly
1358 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark) 1537 self.explicitbookmarks = [
1359 for bookmark in bookmarks] 1538 repo._bookmarks.expandname(bookmark) for bookmark in bookmarks
1539 ]
1360 # do we force pull? 1540 # do we force pull?
1361 self.force = force 1541 self.force = force
1362 # whether a streaming clone was requested 1542 # whether a streaming clone was requested
1363 self.streamclonerequested = streamclonerequested 1543 self.streamclonerequested = streamclonerequested
1364 # transaction manager 1544 # transaction manager
1412 1592
1413 def gettransaction(self): 1593 def gettransaction(self):
1414 # deprecated; talk to trmanager directly 1594 # deprecated; talk to trmanager directly
1415 return self.trmanager.transaction() 1595 return self.trmanager.transaction()
1416 1596
1597
1417 class transactionmanager(util.transactional): 1598 class transactionmanager(util.transactional):
1418 """An object to manage the life cycle of a transaction 1599 """An object to manage the life cycle of a transaction
1419 1600
1420 It creates the transaction on demand and calls the appropriate hooks when 1601 It creates the transaction on demand and calls the appropriate hooks when
1421 closing the transaction.""" 1602 closing the transaction."""
1603
1422 def __init__(self, repo, source, url): 1604 def __init__(self, repo, source, url):
1423 self.repo = repo 1605 self.repo = repo
1424 self.source = source 1606 self.source = source
1425 self.url = url 1607 self.url = url
1426 self._tr = None 1608 self._tr = None
1442 def release(self): 1624 def release(self):
1443 """release transaction if created""" 1625 """release transaction if created"""
1444 if self._tr is not None: 1626 if self._tr is not None:
1445 self._tr.release() 1627 self._tr.release()
1446 1628
1629
1447 def listkeys(remote, namespace): 1630 def listkeys(remote, namespace):
1448 with remote.commandexecutor() as e: 1631 with remote.commandexecutor() as e:
1449 return e.callcommand('listkeys', {'namespace': namespace}).result() 1632 return e.callcommand('listkeys', {'namespace': namespace}).result()
1633
1450 1634
1451 def _fullpullbundle2(repo, pullop): 1635 def _fullpullbundle2(repo, pullop):
1452 # The server may send a partial reply, i.e. when inlining 1636 # The server may send a partial reply, i.e. when inlining
1453 # pre-computed bundles. In that case, update the common 1637 # pre-computed bundles. In that case, update the common
1454 # set based on the results and pull another bundle. 1638 # set based on the results and pull another bundle.
1458 # - all remote heads are known locally. 1642 # - all remote heads are known locally.
1459 # The head check must use the unfiltered view as obsoletion 1643 # The head check must use the unfiltered view as obsoletion
1460 # markers can hide heads. 1644 # markers can hide heads.
1461 unfi = repo.unfiltered() 1645 unfi = repo.unfiltered()
1462 unficl = unfi.changelog 1646 unficl = unfi.changelog
1647
1463 def headsofdiff(h1, h2): 1648 def headsofdiff(h1, h2):
1464 """Returns heads(h1 % h2)""" 1649 """Returns heads(h1 % h2)"""
1465 res = unfi.set('heads(%ln %% %ln)', h1, h2) 1650 res = unfi.set('heads(%ln %% %ln)', h1, h2)
1466 return set(ctx.node() for ctx in res) 1651 return set(ctx.node() for ctx in res)
1652
1467 def headsofunion(h1, h2): 1653 def headsofunion(h1, h2):
1468 """Returns heads((h1 + h2) - null)""" 1654 """Returns heads((h1 + h2) - null)"""
1469 res = unfi.set('heads((%ln + %ln - null))', h1, h2) 1655 res = unfi.set('heads((%ln + %ln - null))', h1, h2)
1470 return set(ctx.node() for ctx in res) 1656 return set(ctx.node() for ctx in res)
1657
1471 while True: 1658 while True:
1472 old_heads = unficl.heads() 1659 old_heads = unficl.heads()
1473 clstart = len(unficl) 1660 clstart = len(unficl)
1474 _pullbundle2(pullop) 1661 _pullbundle2(pullop)
1475 if repository.NARROW_REQUIREMENT in repo.requirements: 1662 if repository.NARROW_REQUIREMENT in repo.requirements:
1484 break 1671 break
1485 new_heads = headsofdiff(unficl.heads(), old_heads) 1672 new_heads = headsofdiff(unficl.heads(), old_heads)
1486 pullop.common = headsofunion(new_heads, pullop.common) 1673 pullop.common = headsofunion(new_heads, pullop.common)
1487 pullop.rheads = set(pullop.rheads) - pullop.common 1674 pullop.rheads = set(pullop.rheads) - pullop.common
1488 1675
1489 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None, 1676
1490 streamclonerequested=None, includepats=None, excludepats=None, 1677 def pull(
1491 depth=None): 1678 repo,
1679 remote,
1680 heads=None,
1681 force=False,
1682 bookmarks=(),
1683 opargs=None,
1684 streamclonerequested=None,
1685 includepats=None,
1686 excludepats=None,
1687 depth=None,
1688 ):
1492 """Fetch repository data from a remote. 1689 """Fetch repository data from a remote.
1493 1690
1494 This is the main function used to retrieve data from a remote repository. 1691 This is the main function used to retrieve data from a remote repository.
1495 1692
1496 ``repo`` is the local repository to clone into. 1693 ``repo`` is the local repository to clone into.
1527 includepats, excludepats = repo.narrowpats 1724 includepats, excludepats = repo.narrowpats
1528 1725
1529 narrowspec.validatepatterns(includepats) 1726 narrowspec.validatepatterns(includepats)
1530 narrowspec.validatepatterns(excludepats) 1727 narrowspec.validatepatterns(excludepats)
1531 1728
1532 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks, 1729 pullop = pulloperation(
1533 streamclonerequested=streamclonerequested, 1730 repo,
1534 includepats=includepats, excludepats=excludepats, 1731 remote,
1535 depth=depth, 1732 heads,
1536 **pycompat.strkwargs(opargs)) 1733 force,
1734 bookmarks=bookmarks,
1735 streamclonerequested=streamclonerequested,
1736 includepats=includepats,
1737 excludepats=excludepats,
1738 depth=depth,
1739 **pycompat.strkwargs(opargs)
1740 )
1537 1741
1538 peerlocal = pullop.remote.local() 1742 peerlocal = pullop.remote.local()
1539 if peerlocal: 1743 if peerlocal:
1540 missing = set(peerlocal.requirements) - pullop.repo.supported 1744 missing = set(peerlocal.requirements) - pullop.repo.supported
1541 if missing: 1745 if missing:
1542 msg = _("required features are not" 1746 msg = _(
1543 " supported in the destination:" 1747 "required features are not"
1544 " %s") % (', '.join(sorted(missing))) 1748 " supported in the destination:"
1749 " %s"
1750 ) % (', '.join(sorted(missing)))
1545 raise error.Abort(msg) 1751 raise error.Abort(msg)
1546 1752
1547 pullop.trmanager = transactionmanager(repo, 'pull', remote.url()) 1753 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1548 wlock = util.nullcontextmanager() 1754 wlock = util.nullcontextmanager()
1549 if not bookmod.bookmarksinstore(repo): 1755 if not bookmod.bookmarksinstore(repo):
1569 if repo.ui.configbool('experimental', 'remotenames'): 1775 if repo.ui.configbool('experimental', 'remotenames'):
1570 logexchange.pullremotenames(repo, remote) 1776 logexchange.pullremotenames(repo, remote)
1571 1777
1572 return pullop 1778 return pullop
1573 1779
1780
1574 # list of steps to perform discovery before pull 1781 # list of steps to perform discovery before pull
1575 pulldiscoveryorder = [] 1782 pulldiscoveryorder = []
1576 1783
1577 # Mapping between step name and function 1784 # Mapping between step name and function
1578 # 1785 #
1579 # This exists to help extensions wrap steps if necessary 1786 # This exists to help extensions wrap steps if necessary
1580 pulldiscoverymapping = {} 1787 pulldiscoverymapping = {}
1581 1788
1789
1582 def pulldiscovery(stepname): 1790 def pulldiscovery(stepname):
1583 """decorator for function performing discovery before pull 1791 """decorator for function performing discovery before pull
1584 1792
1585 The function is added to the step -> function mapping and appended to the 1793 The function is added to the step -> function mapping and appended to the
1586 list of steps. Beware that decorated function will be added in order (this 1794 list of steps. Beware that decorated function will be added in order (this
1587 may matter). 1795 may matter).
1588 1796
1589 You can only use this decorator for a new step, if you want to wrap a step 1797 You can only use this decorator for a new step, if you want to wrap a step
1590 from an extension, change the pulldiscovery dictionary directly.""" 1798 from an extension, change the pulldiscovery dictionary directly."""
1799
1591 def dec(func): 1800 def dec(func):
1592 assert stepname not in pulldiscoverymapping 1801 assert stepname not in pulldiscoverymapping
1593 pulldiscoverymapping[stepname] = func 1802 pulldiscoverymapping[stepname] = func
1594 pulldiscoveryorder.append(stepname) 1803 pulldiscoveryorder.append(stepname)
1595 return func 1804 return func
1805
1596 return dec 1806 return dec
1807
1597 1808
1598 def _pulldiscovery(pullop): 1809 def _pulldiscovery(pullop):
1599 """Run all discovery steps""" 1810 """Run all discovery steps"""
1600 for stepname in pulldiscoveryorder: 1811 for stepname in pulldiscoveryorder:
1601 step = pulldiscoverymapping[stepname] 1812 step = pulldiscoverymapping[stepname]
1602 step(pullop) 1813 step(pullop)
1603 1814
1815
1604 @pulldiscovery('b1:bookmarks') 1816 @pulldiscovery('b1:bookmarks')
1605 def _pullbookmarkbundle1(pullop): 1817 def _pullbookmarkbundle1(pullop):
1606 """fetch bookmark data in bundle1 case 1818 """fetch bookmark data in bundle1 case
1607 1819
1608 If not using bundle2, we have to fetch bookmarks before changeset 1820 If not using bundle2, we have to fetch bookmarks before changeset
1621 def _pulldiscoverychangegroup(pullop): 1833 def _pulldiscoverychangegroup(pullop):
1622 """discovery phase for the pull 1834 """discovery phase for the pull
1623 1835
1624 Current handle changeset discovery only, will change handle all discovery 1836 Current handle changeset discovery only, will change handle all discovery
1625 at some point.""" 1837 at some point."""
1626 tmp = discovery.findcommonincoming(pullop.repo, 1838 tmp = discovery.findcommonincoming(
1627 pullop.remote, 1839 pullop.repo, pullop.remote, heads=pullop.heads, force=pullop.force
1628 heads=pullop.heads, 1840 )
1629 force=pullop.force)
1630 common, fetch, rheads = tmp 1841 common, fetch, rheads = tmp
1631 nm = pullop.repo.unfiltered().changelog.nodemap 1842 nm = pullop.repo.unfiltered().changelog.nodemap
1632 if fetch and rheads: 1843 if fetch and rheads:
1633 # If a remote heads is filtered locally, put in back in common. 1844 # If a remote heads is filtered locally, put in back in common.
1634 # 1845 #
1648 fetch = [] 1859 fetch = []
1649 pullop.common = common 1860 pullop.common = common
1650 pullop.fetch = fetch 1861 pullop.fetch = fetch
1651 pullop.rheads = rheads 1862 pullop.rheads = rheads
1652 1863
1864
1653 def _pullbundle2(pullop): 1865 def _pullbundle2(pullop):
1654 """pull data using bundle2 1866 """pull data using bundle2
1655 1867
1656 For now, the only supported data are changegroup.""" 1868 For now, the only supported data are changegroup."""
1657 kwargs = {'bundlecaps': caps20to10(pullop.repo, role='client')} 1869 kwargs = {'bundlecaps': caps20to10(pullop.repo, role='client')}
1686 1898
1687 kwargs['cg'] = pullop.fetch 1899 kwargs['cg'] = pullop.fetch
1688 1900
1689 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange') 1901 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
1690 hasbinaryphase = 'heads' in pullop.remotebundle2caps.get('phases', ()) 1902 hasbinaryphase = 'heads' in pullop.remotebundle2caps.get('phases', ())
1691 if (not legacyphase and hasbinaryphase): 1903 if not legacyphase and hasbinaryphase:
1692 kwargs['phases'] = True 1904 kwargs['phases'] = True
1693 pullop.stepsdone.add('phases') 1905 pullop.stepsdone.add('phases')
1694 1906
1695 if 'listkeys' in pullop.remotebundle2caps: 1907 if 'listkeys' in pullop.remotebundle2caps:
1696 if 'phases' not in pullop.stepsdone: 1908 if 'phases' not in pullop.stepsdone:
1701 hasbinarybook = 'bookmarks' in pullop.remotebundle2caps 1913 hasbinarybook = 'bookmarks' in pullop.remotebundle2caps
1702 1914
1703 if pullop.remotebookmarks is not None: 1915 if pullop.remotebookmarks is not None:
1704 pullop.stepsdone.add('request-bookmarks') 1916 pullop.stepsdone.add('request-bookmarks')
1705 1917
1706 if ('request-bookmarks' not in pullop.stepsdone 1918 if (
1919 'request-bookmarks' not in pullop.stepsdone
1707 and pullop.remotebookmarks is None 1920 and pullop.remotebookmarks is None
1708 and not legacybookmark and hasbinarybook): 1921 and not legacybookmark
1922 and hasbinarybook
1923 ):
1709 kwargs['bookmarks'] = True 1924 kwargs['bookmarks'] = True
1710 bookmarksrequested = True 1925 bookmarksrequested = True
1711 1926
1712 if 'listkeys' in pullop.remotebundle2caps: 1927 if 'listkeys' in pullop.remotebundle2caps:
1713 if 'request-bookmarks' not in pullop.stepsdone: 1928 if 'request-bookmarks' not in pullop.stepsdone:
1719 # If this is a full pull / clone and the server supports the clone bundles 1934 # If this is a full pull / clone and the server supports the clone bundles
1720 # feature, tell the server whether we attempted a clone bundle. The 1935 # feature, tell the server whether we attempted a clone bundle. The
1721 # presence of this flag indicates the client supports clone bundles. This 1936 # presence of this flag indicates the client supports clone bundles. This
1722 # will enable the server to treat clients that support clone bundles 1937 # will enable the server to treat clients that support clone bundles
1723 # differently from those that don't. 1938 # differently from those that don't.
1724 if (pullop.remote.capable('clonebundles') 1939 if (
1725 and pullop.heads is None and list(pullop.common) == [nullid]): 1940 pullop.remote.capable('clonebundles')
1941 and pullop.heads is None
1942 and list(pullop.common) == [nullid]
1943 ):
1726 kwargs['cbattempted'] = pullop.clonebundleattempted 1944 kwargs['cbattempted'] = pullop.clonebundleattempted
1727 1945
1728 if streaming: 1946 if streaming:
1729 pullop.repo.ui.status(_('streaming all changes\n')) 1947 pullop.repo.ui.status(_('streaming all changes\n'))
1730 elif not pullop.fetch: 1948 elif not pullop.fetch:
1744 args = dict(kwargs) 1962 args = dict(kwargs)
1745 args['source'] = 'pull' 1963 args['source'] = 'pull'
1746 bundle = e.callcommand('getbundle', args).result() 1964 bundle = e.callcommand('getbundle', args).result()
1747 1965
1748 try: 1966 try:
1749 op = bundle2.bundleoperation(pullop.repo, pullop.gettransaction, 1967 op = bundle2.bundleoperation(
1750 source='pull') 1968 pullop.repo, pullop.gettransaction, source='pull'
1969 )
1751 op.modes['bookmarks'] = 'records' 1970 op.modes['bookmarks'] = 'records'
1752 bundle2.processbundle(pullop.repo, bundle, op=op) 1971 bundle2.processbundle(pullop.repo, bundle, op=op)
1753 except bundle2.AbortFromPart as exc: 1972 except bundle2.AbortFromPart as exc:
1754 pullop.repo.ui.status(_('remote: abort: %s\n') % exc) 1973 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1755 raise error.Abort(_('pull failed on remote'), hint=exc.hint) 1974 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1777 1996
1778 # bookmark data were either already there or pulled in the bundle 1997 # bookmark data were either already there or pulled in the bundle
1779 if pullop.remotebookmarks is not None: 1998 if pullop.remotebookmarks is not None:
1780 _pullbookmarks(pullop) 1999 _pullbookmarks(pullop)
1781 2000
2001
1782 def _pullbundle2extraprepare(pullop, kwargs): 2002 def _pullbundle2extraprepare(pullop, kwargs):
1783 """hook function so that extensions can extend the getbundle call""" 2003 """hook function so that extensions can extend the getbundle call"""
2004
1784 2005
1785 def _pullchangeset(pullop): 2006 def _pullchangeset(pullop):
1786 """pull changeset from unbundle into the local repo""" 2007 """pull changeset from unbundle into the local repo"""
1787 # We delay the open of the transaction as late as possible so we 2008 # We delay the open of the transaction as late as possible so we
1788 # don't open transaction for nothing or you break future useful 2009 # don't open transaction for nothing or you break future useful
1801 # issue1320, avoid a race if remote changed after discovery 2022 # issue1320, avoid a race if remote changed after discovery
1802 pullop.heads = pullop.rheads 2023 pullop.heads = pullop.rheads
1803 2024
1804 if pullop.remote.capable('getbundle'): 2025 if pullop.remote.capable('getbundle'):
1805 # TODO: get bundlecaps from remote 2026 # TODO: get bundlecaps from remote
1806 cg = pullop.remote.getbundle('pull', common=pullop.common, 2027 cg = pullop.remote.getbundle(
1807 heads=pullop.heads or pullop.rheads) 2028 'pull', common=pullop.common, heads=pullop.heads or pullop.rheads
2029 )
1808 elif pullop.heads is None: 2030 elif pullop.heads is None:
1809 with pullop.remote.commandexecutor() as e: 2031 with pullop.remote.commandexecutor() as e:
1810 cg = e.callcommand('changegroup', { 2032 cg = e.callcommand(
1811 'nodes': pullop.fetch, 2033 'changegroup', {'nodes': pullop.fetch, 'source': 'pull',}
1812 'source': 'pull', 2034 ).result()
1813 }).result()
1814 2035
1815 elif not pullop.remote.capable('changegroupsubset'): 2036 elif not pullop.remote.capable('changegroupsubset'):
1816 raise error.Abort(_("partial pull cannot be done because " 2037 raise error.Abort(
1817 "other repository doesn't support " 2038 _(
1818 "changegroupsubset.")) 2039 "partial pull cannot be done because "
2040 "other repository doesn't support "
2041 "changegroupsubset."
2042 )
2043 )
1819 else: 2044 else:
1820 with pullop.remote.commandexecutor() as e: 2045 with pullop.remote.commandexecutor() as e:
1821 cg = e.callcommand('changegroupsubset', { 2046 cg = e.callcommand(
1822 'bases': pullop.fetch, 2047 'changegroupsubset',
1823 'heads': pullop.heads, 2048 {
1824 'source': 'pull', 2049 'bases': pullop.fetch,
1825 }).result() 2050 'heads': pullop.heads,
1826 2051 'source': 'pull',
1827 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull', 2052 },
1828 pullop.remote.url()) 2053 ).result()
2054
2055 bundleop = bundle2.applybundle(
2056 pullop.repo, cg, tr, 'pull', pullop.remote.url()
2057 )
1829 pullop.cgresult = bundle2.combinechangegroupresults(bundleop) 2058 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
2059
1830 2060
1831 def _pullphase(pullop): 2061 def _pullphase(pullop):
1832 # Get remote phases data from remote 2062 # Get remote phases data from remote
1833 if 'phases' in pullop.stepsdone: 2063 if 'phases' in pullop.stepsdone:
1834 return 2064 return
1835 remotephases = listkeys(pullop.remote, 'phases') 2065 remotephases = listkeys(pullop.remote, 'phases')
1836 _pullapplyphases(pullop, remotephases) 2066 _pullapplyphases(pullop, remotephases)
2067
1837 2068
1838 def _pullapplyphases(pullop, remotephases): 2069 def _pullapplyphases(pullop, remotephases):
1839 """apply phase movement from observed remote state""" 2070 """apply phase movement from observed remote state"""
1840 if 'phases' in pullop.stepsdone: 2071 if 'phases' in pullop.stepsdone:
1841 return 2072 return
1842 pullop.stepsdone.add('phases') 2073 pullop.stepsdone.add('phases')
1843 publishing = bool(remotephases.get('publishing', False)) 2074 publishing = bool(remotephases.get('publishing', False))
1844 if remotephases and not publishing: 2075 if remotephases and not publishing:
1845 # remote is new and non-publishing 2076 # remote is new and non-publishing
1846 pheads, _dr = phases.analyzeremotephases(pullop.repo, 2077 pheads, _dr = phases.analyzeremotephases(
1847 pullop.pulledsubset, 2078 pullop.repo, pullop.pulledsubset, remotephases
1848 remotephases) 2079 )
1849 dheads = pullop.pulledsubset 2080 dheads = pullop.pulledsubset
1850 else: 2081 else:
1851 # Remote is old or publishing all common changesets 2082 # Remote is old or publishing all common changesets
1852 # should be seen as public 2083 # should be seen as public
1853 pheads = pullop.pulledsubset 2084 pheads = pullop.pulledsubset
1868 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft] 2099 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1869 if dheads: 2100 if dheads:
1870 tr = pullop.gettransaction() 2101 tr = pullop.gettransaction()
1871 phases.advanceboundary(pullop.repo, tr, draft, dheads) 2102 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1872 2103
2104
1873 def _pullbookmarks(pullop): 2105 def _pullbookmarks(pullop):
1874 """process the remote bookmark information to update the local one""" 2106 """process the remote bookmark information to update the local one"""
1875 if 'bookmarks' in pullop.stepsdone: 2107 if 'bookmarks' in pullop.stepsdone:
1876 return 2108 return
1877 pullop.stepsdone.add('bookmarks') 2109 pullop.stepsdone.add('bookmarks')
1878 repo = pullop.repo 2110 repo = pullop.repo
1879 remotebookmarks = pullop.remotebookmarks 2111 remotebookmarks = pullop.remotebookmarks
1880 bookmod.updatefromremote(repo.ui, repo, remotebookmarks, 2112 bookmod.updatefromremote(
1881 pullop.remote.url(), 2113 repo.ui,
1882 pullop.gettransaction, 2114 repo,
1883 explicit=pullop.explicitbookmarks) 2115 remotebookmarks,
2116 pullop.remote.url(),
2117 pullop.gettransaction,
2118 explicit=pullop.explicitbookmarks,
2119 )
2120
1884 2121
1885 def _pullobsolete(pullop): 2122 def _pullobsolete(pullop):
1886 """utility function to pull obsolete markers from a remote 2123 """utility function to pull obsolete markers from a remote
1887 2124
1888 The `gettransaction` is function that return the pull transaction, creating 2125 The `gettransaction` is function that return the pull transaction, creating
1908 if markers: 2145 if markers:
1909 pullop.repo.obsstore.add(tr, markers) 2146 pullop.repo.obsstore.add(tr, markers)
1910 pullop.repo.invalidatevolatilesets() 2147 pullop.repo.invalidatevolatilesets()
1911 return tr 2148 return tr
1912 2149
2150
1913 def applynarrowacl(repo, kwargs): 2151 def applynarrowacl(repo, kwargs):
1914 """Apply narrow fetch access control. 2152 """Apply narrow fetch access control.
1915 2153
1916 This massages the named arguments for getbundle wire protocol commands 2154 This massages the named arguments for getbundle wire protocol commands
1917 so requested data is filtered through access control rules. 2155 so requested data is filtered through access control rules.
1918 """ 2156 """
1919 ui = repo.ui 2157 ui = repo.ui
1920 # TODO this assumes existence of HTTP and is a layering violation. 2158 # TODO this assumes existence of HTTP and is a layering violation.
1921 username = ui.shortuser(ui.environ.get('REMOTE_USER') or ui.username()) 2159 username = ui.shortuser(ui.environ.get('REMOTE_USER') or ui.username())
1922 user_includes = ui.configlist( 2160 user_includes = ui.configlist(
1923 _NARROWACL_SECTION, username + '.includes', 2161 _NARROWACL_SECTION,
1924 ui.configlist(_NARROWACL_SECTION, 'default.includes')) 2162 username + '.includes',
2163 ui.configlist(_NARROWACL_SECTION, 'default.includes'),
2164 )
1925 user_excludes = ui.configlist( 2165 user_excludes = ui.configlist(
1926 _NARROWACL_SECTION, username + '.excludes', 2166 _NARROWACL_SECTION,
1927 ui.configlist(_NARROWACL_SECTION, 'default.excludes')) 2167 username + '.excludes',
2168 ui.configlist(_NARROWACL_SECTION, 'default.excludes'),
2169 )
1928 if not user_includes: 2170 if not user_includes:
1929 raise error.Abort(_("{} configuration for user {} is empty") 2171 raise error.Abort(
1930 .format(_NARROWACL_SECTION, username)) 2172 _("{} configuration for user {} is empty").format(
2173 _NARROWACL_SECTION, username
2174 )
2175 )
1931 2176
1932 user_includes = [ 2177 user_includes = [
1933 'path:.' if p == '*' else 'path:' + p for p in user_includes] 2178 'path:.' if p == '*' else 'path:' + p for p in user_includes
2179 ]
1934 user_excludes = [ 2180 user_excludes = [
1935 'path:.' if p == '*' else 'path:' + p for p in user_excludes] 2181 'path:.' if p == '*' else 'path:' + p for p in user_excludes
2182 ]
1936 2183
1937 req_includes = set(kwargs.get(r'includepats', [])) 2184 req_includes = set(kwargs.get(r'includepats', []))
1938 req_excludes = set(kwargs.get(r'excludepats', [])) 2185 req_excludes = set(kwargs.get(r'excludepats', []))
1939 2186
1940 req_includes, req_excludes, invalid_includes = narrowspec.restrictpatterns( 2187 req_includes, req_excludes, invalid_includes = narrowspec.restrictpatterns(
1941 req_includes, req_excludes, user_includes, user_excludes) 2188 req_includes, req_excludes, user_includes, user_excludes
2189 )
1942 2190
1943 if invalid_includes: 2191 if invalid_includes:
1944 raise error.Abort( 2192 raise error.Abort(
1945 _("The following includes are not accessible for {}: {}") 2193 _("The following includes are not accessible for {}: {}").format(
1946 .format(username, invalid_includes)) 2194 username, invalid_includes
2195 )
2196 )
1947 2197
1948 new_args = {} 2198 new_args = {}
1949 new_args.update(kwargs) 2199 new_args.update(kwargs)
1950 new_args[r'narrow'] = True 2200 new_args[r'narrow'] = True
1951 new_args[r'narrow_acl'] = True 2201 new_args[r'narrow_acl'] = True
1952 new_args[r'includepats'] = req_includes 2202 new_args[r'includepats'] = req_includes
1953 if req_excludes: 2203 if req_excludes:
1954 new_args[r'excludepats'] = req_excludes 2204 new_args[r'excludepats'] = req_excludes
1955 2205
1956 return new_args 2206 return new_args
2207
1957 2208
1958 def _computeellipsis(repo, common, heads, known, match, depth=None): 2209 def _computeellipsis(repo, common, heads, known, match, depth=None):
1959 """Compute the shape of a narrowed DAG. 2210 """Compute the shape of a narrowed DAG.
1960 2211
1961 Args: 2212 Args:
2011 ellipsisroots[child].discard(child) 2262 ellipsisroots[child].discard(child)
2012 2263
2013 def splithead(head): 2264 def splithead(head):
2014 r1, r2, r3 = sorted(ellipsisroots[head]) 2265 r1, r2, r3 = sorted(ellipsisroots[head])
2015 for nr1, nr2 in ((r2, r3), (r1, r3), (r1, r2)): 2266 for nr1, nr2 in ((r2, r3), (r1, r3), (r1, r2)):
2016 mid = repo.revs('sort(merge() & %d::%d & %d::%d, -rev)', 2267 mid = repo.revs(
2017 nr1, head, nr2, head) 2268 'sort(merge() & %d::%d & %d::%d, -rev)', nr1, head, nr2, head
2269 )
2018 for j in mid: 2270 for j in mid:
2019 if j == nr2: 2271 if j == nr2:
2020 return nr2, (nr1, nr2) 2272 return nr2, (nr1, nr2)
2021 if j not in ellipsisroots or len(ellipsisroots[j]) < 2: 2273 if j not in ellipsisroots or len(ellipsisroots[j]) < 2:
2022 return j, (nr1, nr2) 2274 return j, (nr1, nr2)
2023 raise error.Abort(_('Failed to split up ellipsis node! head: %d, ' 2275 raise error.Abort(
2024 'roots: %d %d %d') % (head, r1, r2, r3)) 2276 _('Failed to split up ellipsis node! head: %d, ' 'roots: %d %d %d')
2277 % (head, r1, r2, r3)
2278 )
2025 2279
2026 missing = list(cl.findmissingrevs(common=commonrevs, heads=headsrevs)) 2280 missing = list(cl.findmissingrevs(common=commonrevs, heads=headsrevs))
2027 visit = reversed(missing) 2281 visit = reversed(missing)
2028 relevant_nodes = set() 2282 relevant_nodes = set()
2029 visitnodes = [cl.node(m) for m in missing] 2283 visitnodes = [cl.node(m) for m in missing]
2079 for c in commonrevs: 2333 for c in commonrevs:
2080 for head in ellipsisheads[c]: 2334 for head in ellipsisheads[c]:
2081 addroot(head, c) 2335 addroot(head, c)
2082 return visitnodes, relevant_nodes, ellipsisroots 2336 return visitnodes, relevant_nodes, ellipsisroots
2083 2337
2338
2084 def caps20to10(repo, role): 2339 def caps20to10(repo, role):
2085 """return a set with appropriate options to use bundle20 during getbundle""" 2340 """return a set with appropriate options to use bundle20 during getbundle"""
2086 caps = {'HG20'} 2341 caps = {'HG20'}
2087 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role)) 2342 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
2088 caps.add('bundle2=' + urlreq.quote(capsblob)) 2343 caps.add('bundle2=' + urlreq.quote(capsblob))
2089 return caps 2344 return caps
2090 2345
2346
2091 # List of names of steps to perform for a bundle2 for getbundle, order matters. 2347 # List of names of steps to perform for a bundle2 for getbundle, order matters.
2092 getbundle2partsorder = [] 2348 getbundle2partsorder = []
2093 2349
2094 # Mapping between step name and function 2350 # Mapping between step name and function
2095 # 2351 #
2096 # This exists to help extensions wrap steps if necessary 2352 # This exists to help extensions wrap steps if necessary
2097 getbundle2partsmapping = {} 2353 getbundle2partsmapping = {}
2098 2354
2355
2099 def getbundle2partsgenerator(stepname, idx=None): 2356 def getbundle2partsgenerator(stepname, idx=None):
2100 """decorator for function generating bundle2 part for getbundle 2357 """decorator for function generating bundle2 part for getbundle
2101 2358
2102 The function is added to the step -> function mapping and appended to the 2359 The function is added to the step -> function mapping and appended to the
2103 list of steps. Beware that decorated functions will be added in order 2360 list of steps. Beware that decorated functions will be added in order
2104 (this may matter). 2361 (this may matter).
2105 2362
2106 You can only use this decorator for new steps, if you want to wrap a step 2363 You can only use this decorator for new steps, if you want to wrap a step
2107 from an extension, attack the getbundle2partsmapping dictionary directly.""" 2364 from an extension, attack the getbundle2partsmapping dictionary directly."""
2365
2108 def dec(func): 2366 def dec(func):
2109 assert stepname not in getbundle2partsmapping 2367 assert stepname not in getbundle2partsmapping
2110 getbundle2partsmapping[stepname] = func 2368 getbundle2partsmapping[stepname] = func
2111 if idx is None: 2369 if idx is None:
2112 getbundle2partsorder.append(stepname) 2370 getbundle2partsorder.append(stepname)
2113 else: 2371 else:
2114 getbundle2partsorder.insert(idx, stepname) 2372 getbundle2partsorder.insert(idx, stepname)
2115 return func 2373 return func
2374
2116 return dec 2375 return dec
2376
2117 2377
2118 def bundle2requested(bundlecaps): 2378 def bundle2requested(bundlecaps):
2119 if bundlecaps is not None: 2379 if bundlecaps is not None:
2120 return any(cap.startswith('HG2') for cap in bundlecaps) 2380 return any(cap.startswith('HG2') for cap in bundlecaps)
2121 return False 2381 return False
2122 2382
2123 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None, 2383
2124 **kwargs): 2384 def getbundlechunks(
2385 repo, source, heads=None, common=None, bundlecaps=None, **kwargs
2386 ):
2125 """Return chunks constituting a bundle's raw data. 2387 """Return chunks constituting a bundle's raw data.
2126 2388
2127 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps 2389 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
2128 passed. 2390 passed.
2129 2391
2137 if not usebundle2: 2399 if not usebundle2:
2138 if bundlecaps and not kwargs.get('cg', True): 2400 if bundlecaps and not kwargs.get('cg', True):
2139 raise ValueError(_('request for bundle10 must include changegroup')) 2401 raise ValueError(_('request for bundle10 must include changegroup'))
2140 2402
2141 if kwargs: 2403 if kwargs:
2142 raise ValueError(_('unsupported getbundle arguments: %s') 2404 raise ValueError(
2143 % ', '.join(sorted(kwargs.keys()))) 2405 _('unsupported getbundle arguments: %s')
2406 % ', '.join(sorted(kwargs.keys()))
2407 )
2144 outgoing = _computeoutgoing(repo, heads, common) 2408 outgoing = _computeoutgoing(repo, heads, common)
2145 info['bundleversion'] = 1 2409 info['bundleversion'] = 1
2146 return info, changegroup.makestream(repo, outgoing, '01', source, 2410 return (
2147 bundlecaps=bundlecaps) 2411 info,
2412 changegroup.makestream(
2413 repo, outgoing, '01', source, bundlecaps=bundlecaps
2414 ),
2415 )
2148 2416
2149 # bundle20 case 2417 # bundle20 case
2150 info['bundleversion'] = 2 2418 info['bundleversion'] = 2
2151 b2caps = {} 2419 b2caps = {}
2152 for bcaps in bundlecaps: 2420 for bcaps in bundlecaps:
2153 if bcaps.startswith('bundle2='): 2421 if bcaps.startswith('bundle2='):
2154 blob = urlreq.unquote(bcaps[len('bundle2='):]) 2422 blob = urlreq.unquote(bcaps[len('bundle2=') :])
2155 b2caps.update(bundle2.decodecaps(blob)) 2423 b2caps.update(bundle2.decodecaps(blob))
2156 bundler = bundle2.bundle20(repo.ui, b2caps) 2424 bundler = bundle2.bundle20(repo.ui, b2caps)
2157 2425
2158 kwargs['heads'] = heads 2426 kwargs['heads'] = heads
2159 kwargs['common'] = common 2427 kwargs['common'] = common
2160 2428
2161 for name in getbundle2partsorder: 2429 for name in getbundle2partsorder:
2162 func = getbundle2partsmapping[name] 2430 func = getbundle2partsmapping[name]
2163 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps, 2431 func(
2164 **pycompat.strkwargs(kwargs)) 2432 bundler,
2433 repo,
2434 source,
2435 bundlecaps=bundlecaps,
2436 b2caps=b2caps,
2437 **pycompat.strkwargs(kwargs)
2438 )
2165 2439
2166 info['prefercompressed'] = bundler.prefercompressed 2440 info['prefercompressed'] = bundler.prefercompressed
2167 2441
2168 return info, bundler.getchunks() 2442 return info, bundler.getchunks()
2443
2169 2444
2170 @getbundle2partsgenerator('stream2') 2445 @getbundle2partsgenerator('stream2')
2171 def _getbundlestream2(bundler, repo, *args, **kwargs): 2446 def _getbundlestream2(bundler, repo, *args, **kwargs):
2172 return bundle2.addpartbundlestream2(bundler, repo, **kwargs) 2447 return bundle2.addpartbundlestream2(bundler, repo, **kwargs)
2173 2448
2449
2174 @getbundle2partsgenerator('changegroup') 2450 @getbundle2partsgenerator('changegroup')
2175 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None, 2451 def _getbundlechangegrouppart(
2176 b2caps=None, heads=None, common=None, **kwargs): 2452 bundler,
2453 repo,
2454 source,
2455 bundlecaps=None,
2456 b2caps=None,
2457 heads=None,
2458 common=None,
2459 **kwargs
2460 ):
2177 """add a changegroup part to the requested bundle""" 2461 """add a changegroup part to the requested bundle"""
2178 if not kwargs.get(r'cg', True): 2462 if not kwargs.get(r'cg', True):
2179 return 2463 return
2180 2464
2181 version = '01' 2465 version = '01'
2182 cgversions = b2caps.get('changegroup') 2466 cgversions = b2caps.get('changegroup')
2183 if cgversions: # 3.1 and 3.2 ship with an empty value 2467 if cgversions: # 3.1 and 3.2 ship with an empty value
2184 cgversions = [v for v in cgversions 2468 cgversions = [
2185 if v in changegroup.supportedoutgoingversions(repo)] 2469 v
2470 for v in cgversions
2471 if v in changegroup.supportedoutgoingversions(repo)
2472 ]
2186 if not cgversions: 2473 if not cgversions:
2187 raise error.Abort(_('no common changegroup version')) 2474 raise error.Abort(_('no common changegroup version'))
2188 version = max(cgversions) 2475 version = max(cgversions)
2189 2476
2190 outgoing = _computeoutgoing(repo, heads, common) 2477 outgoing = _computeoutgoing(repo, heads, common)
2196 exclude = sorted(filter(bool, kwargs.get(r'excludepats', []))) 2483 exclude = sorted(filter(bool, kwargs.get(r'excludepats', [])))
2197 matcher = narrowspec.match(repo.root, include=include, exclude=exclude) 2484 matcher = narrowspec.match(repo.root, include=include, exclude=exclude)
2198 else: 2485 else:
2199 matcher = None 2486 matcher = None
2200 2487
2201 cgstream = changegroup.makestream(repo, outgoing, version, source, 2488 cgstream = changegroup.makestream(
2202 bundlecaps=bundlecaps, matcher=matcher) 2489 repo, outgoing, version, source, bundlecaps=bundlecaps, matcher=matcher
2490 )
2203 2491
2204 part = bundler.newpart('changegroup', data=cgstream) 2492 part = bundler.newpart('changegroup', data=cgstream)
2205 if cgversions: 2493 if cgversions:
2206 part.addparam('version', version) 2494 part.addparam('version', version)
2207 2495
2208 part.addparam('nbchanges', '%d' % len(outgoing.missing), 2496 part.addparam('nbchanges', '%d' % len(outgoing.missing), mandatory=False)
2209 mandatory=False)
2210 2497
2211 if 'treemanifest' in repo.requirements: 2498 if 'treemanifest' in repo.requirements:
2212 part.addparam('treemanifest', '1') 2499 part.addparam('treemanifest', '1')
2213 2500
2214 if (kwargs.get(r'narrow', False) and kwargs.get(r'narrow_acl', False) 2501 if (
2215 and (include or exclude)): 2502 kwargs.get(r'narrow', False)
2503 and kwargs.get(r'narrow_acl', False)
2504 and (include or exclude)
2505 ):
2216 # this is mandatory because otherwise ACL clients won't work 2506 # this is mandatory because otherwise ACL clients won't work
2217 narrowspecpart = bundler.newpart('Narrow:responsespec') 2507 narrowspecpart = bundler.newpart('Narrow:responsespec')
2218 narrowspecpart.data = '%s\0%s' % ('\n'.join(include), 2508 narrowspecpart.data = '%s\0%s' % (
2219 '\n'.join(exclude)) 2509 '\n'.join(include),
2510 '\n'.join(exclude),
2511 )
2512
2220 2513
2221 @getbundle2partsgenerator('bookmarks') 2514 @getbundle2partsgenerator('bookmarks')
2222 def _getbundlebookmarkpart(bundler, repo, source, bundlecaps=None, 2515 def _getbundlebookmarkpart(
2223 b2caps=None, **kwargs): 2516 bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
2517 ):
2224 """add a bookmark part to the requested bundle""" 2518 """add a bookmark part to the requested bundle"""
2225 if not kwargs.get(r'bookmarks', False): 2519 if not kwargs.get(r'bookmarks', False):
2226 return 2520 return
2227 if 'bookmarks' not in b2caps: 2521 if 'bookmarks' not in b2caps:
2228 raise error.Abort(_('no common bookmarks exchange method')) 2522 raise error.Abort(_('no common bookmarks exchange method'))
2229 books = bookmod.listbinbookmarks(repo) 2523 books = bookmod.listbinbookmarks(repo)
2230 data = bookmod.binaryencode(books) 2524 data = bookmod.binaryencode(books)
2231 if data: 2525 if data:
2232 bundler.newpart('bookmarks', data=data) 2526 bundler.newpart('bookmarks', data=data)
2233 2527
2528
2234 @getbundle2partsgenerator('listkeys') 2529 @getbundle2partsgenerator('listkeys')
2235 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None, 2530 def _getbundlelistkeysparts(
2236 b2caps=None, **kwargs): 2531 bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
2532 ):
2237 """add parts containing listkeys namespaces to the requested bundle""" 2533 """add parts containing listkeys namespaces to the requested bundle"""
2238 listkeys = kwargs.get(r'listkeys', ()) 2534 listkeys = kwargs.get(r'listkeys', ())
2239 for namespace in listkeys: 2535 for namespace in listkeys:
2240 part = bundler.newpart('listkeys') 2536 part = bundler.newpart('listkeys')
2241 part.addparam('namespace', namespace) 2537 part.addparam('namespace', namespace)
2242 keys = repo.listkeys(namespace).items() 2538 keys = repo.listkeys(namespace).items()
2243 part.data = pushkey.encodekeys(keys) 2539 part.data = pushkey.encodekeys(keys)
2244 2540
2541
2245 @getbundle2partsgenerator('obsmarkers') 2542 @getbundle2partsgenerator('obsmarkers')
2246 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None, 2543 def _getbundleobsmarkerpart(
2247 b2caps=None, heads=None, **kwargs): 2544 bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
2545 ):
2248 """add an obsolescence markers part to the requested bundle""" 2546 """add an obsolescence markers part to the requested bundle"""
2249 if kwargs.get(r'obsmarkers', False): 2547 if kwargs.get(r'obsmarkers', False):
2250 if heads is None: 2548 if heads is None:
2251 heads = repo.heads() 2549 heads = repo.heads()
2252 subset = [c.node() for c in repo.set('::%ln', heads)] 2550 subset = [c.node() for c in repo.set('::%ln', heads)]
2253 markers = repo.obsstore.relevantmarkers(subset) 2551 markers = repo.obsstore.relevantmarkers(subset)
2254 markers = sorted(markers) 2552 markers = sorted(markers)
2255 bundle2.buildobsmarkerspart(bundler, markers) 2553 bundle2.buildobsmarkerspart(bundler, markers)
2256 2554
2555
2257 @getbundle2partsgenerator('phases') 2556 @getbundle2partsgenerator('phases')
2258 def _getbundlephasespart(bundler, repo, source, bundlecaps=None, 2557 def _getbundlephasespart(
2259 b2caps=None, heads=None, **kwargs): 2558 bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
2559 ):
2260 """add phase heads part to the requested bundle""" 2560 """add phase heads part to the requested bundle"""
2261 if kwargs.get(r'phases', False): 2561 if kwargs.get(r'phases', False):
2262 if not 'heads' in b2caps.get('phases'): 2562 if not 'heads' in b2caps.get('phases'):
2263 raise error.Abort(_('no common phases exchange method')) 2563 raise error.Abort(_('no common phases exchange method'))
2264 if heads is None: 2564 if heads is None:
2299 2599
2300 # generate the actual part 2600 # generate the actual part
2301 phasedata = phases.binaryencode(phasemapping) 2601 phasedata = phases.binaryencode(phasemapping)
2302 bundler.newpart('phase-heads', data=phasedata) 2602 bundler.newpart('phase-heads', data=phasedata)
2303 2603
2604
2304 @getbundle2partsgenerator('hgtagsfnodes') 2605 @getbundle2partsgenerator('hgtagsfnodes')
2305 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None, 2606 def _getbundletagsfnodes(
2306 b2caps=None, heads=None, common=None, 2607 bundler,
2307 **kwargs): 2608 repo,
2609 source,
2610 bundlecaps=None,
2611 b2caps=None,
2612 heads=None,
2613 common=None,
2614 **kwargs
2615 ):
2308 """Transfer the .hgtags filenodes mapping. 2616 """Transfer the .hgtags filenodes mapping.
2309 2617
2310 Only values for heads in this bundle will be transferred. 2618 Only values for heads in this bundle will be transferred.
2311 2619
2312 The part data consists of pairs of 20 byte changeset node and .hgtags 2620 The part data consists of pairs of 20 byte changeset node and .hgtags
2319 return 2627 return
2320 2628
2321 outgoing = _computeoutgoing(repo, heads, common) 2629 outgoing = _computeoutgoing(repo, heads, common)
2322 bundle2.addparttagsfnodescache(repo, bundler, outgoing) 2630 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
2323 2631
2632
2324 @getbundle2partsgenerator('cache:rev-branch-cache') 2633 @getbundle2partsgenerator('cache:rev-branch-cache')
2325 def _getbundlerevbranchcache(bundler, repo, source, bundlecaps=None, 2634 def _getbundlerevbranchcache(
2326 b2caps=None, heads=None, common=None, 2635 bundler,
2327 **kwargs): 2636 repo,
2637 source,
2638 bundlecaps=None,
2639 b2caps=None,
2640 heads=None,
2641 common=None,
2642 **kwargs
2643 ):
2328 """Transfer the rev-branch-cache mapping 2644 """Transfer the rev-branch-cache mapping
2329 2645
2330 The payload is a series of data related to each branch 2646 The payload is a series of data related to each branch
2331 2647
2332 1) branch name length 2648 1) branch name length
2337 """ 2653 """
2338 # Don't send unless: 2654 # Don't send unless:
2339 # - changeset are being exchanged, 2655 # - changeset are being exchanged,
2340 # - the client supports it. 2656 # - the client supports it.
2341 # - narrow bundle isn't in play (not currently compatible). 2657 # - narrow bundle isn't in play (not currently compatible).
2342 if (not kwargs.get(r'cg', True) 2658 if (
2659 not kwargs.get(r'cg', True)
2343 or 'rev-branch-cache' not in b2caps 2660 or 'rev-branch-cache' not in b2caps
2344 or kwargs.get(r'narrow', False) 2661 or kwargs.get(r'narrow', False)
2345 or repo.ui.has_section(_NARROWACL_SECTION)): 2662 or repo.ui.has_section(_NARROWACL_SECTION)
2663 ):
2346 return 2664 return
2347 2665
2348 outgoing = _computeoutgoing(repo, heads, common) 2666 outgoing = _computeoutgoing(repo, heads, common)
2349 bundle2.addpartrevbranchcache(repo, bundler, outgoing) 2667 bundle2.addpartrevbranchcache(repo, bundler, outgoing)
2668
2350 2669
2351 def check_heads(repo, their_heads, context): 2670 def check_heads(repo, their_heads, context):
2352 """check if the heads of a repo have been modified 2671 """check if the heads of a repo have been modified
2353 2672
2354 Used by peer for unbundling. 2673 Used by peer for unbundling.
2355 """ 2674 """
2356 heads = repo.heads() 2675 heads = repo.heads()
2357 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest() 2676 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
2358 if not (their_heads == ['force'] or their_heads == heads or 2677 if not (
2359 their_heads == ['hashed', heads_hash]): 2678 their_heads == ['force']
2679 or their_heads == heads
2680 or their_heads == ['hashed', heads_hash]
2681 ):
2360 # someone else committed/pushed/unbundled while we 2682 # someone else committed/pushed/unbundled while we
2361 # were transferring data 2683 # were transferring data
2362 raise error.PushRaced('repository changed while %s - ' 2684 raise error.PushRaced(
2363 'please try again' % context) 2685 'repository changed while %s - ' 'please try again' % context
2686 )
2687
2364 2688
2365 def unbundle(repo, cg, heads, source, url): 2689 def unbundle(repo, cg, heads, source, url):
2366 """Apply a bundle to a repo. 2690 """Apply a bundle to a repo.
2367 2691
2368 this function makes sure the repo is locked during the application and have 2692 this function makes sure the repo is locked during the application and have
2391 op = bundle2.applybundle(repo, cg, tr, source, url) 2715 op = bundle2.applybundle(repo, cg, tr, source, url)
2392 r = bundle2.combinechangegroupresults(op) 2716 r = bundle2.combinechangegroupresults(op)
2393 else: 2717 else:
2394 r = None 2718 r = None
2395 try: 2719 try:
2720
2396 def gettransaction(): 2721 def gettransaction():
2397 if not lockandtr[2]: 2722 if not lockandtr[2]:
2398 if not bookmod.bookmarksinstore(repo): 2723 if not bookmod.bookmarksinstore(repo):
2399 lockandtr[0] = repo.wlock() 2724 lockandtr[0] = repo.wlock()
2400 lockandtr[1] = repo.lock() 2725 lockandtr[1] = repo.lock()
2407 # Do greedy locking by default until we're satisfied with lazy 2732 # Do greedy locking by default until we're satisfied with lazy
2408 # locking. 2733 # locking.
2409 if not repo.ui.configbool('experimental', 'bundle2lazylocking'): 2734 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
2410 gettransaction() 2735 gettransaction()
2411 2736
2412 op = bundle2.bundleoperation(repo, gettransaction, 2737 op = bundle2.bundleoperation(
2413 captureoutput=captureoutput, 2738 repo,
2414 source='push') 2739 gettransaction,
2740 captureoutput=captureoutput,
2741 source='push',
2742 )
2415 try: 2743 try:
2416 op = bundle2.processbundle(repo, cg, op=op) 2744 op = bundle2.processbundle(repo, cg, op=op)
2417 finally: 2745 finally:
2418 r = op.reply 2746 r = op.reply
2419 if captureoutput and r is not None: 2747 if captureoutput and r is not None:
2420 repo.ui.pushbuffer(error=True, subproc=True) 2748 repo.ui.pushbuffer(error=True, subproc=True)
2749
2421 def recordout(output): 2750 def recordout(output):
2422 r.newpart('output', data=output, mandatory=False) 2751 r.newpart('output', data=output, mandatory=False)
2752
2423 if lockandtr[2] is not None: 2753 if lockandtr[2] is not None:
2424 lockandtr[2].close() 2754 lockandtr[2].close()
2425 except BaseException as exc: 2755 except BaseException as exc:
2426 exc.duringunbundle2 = True 2756 exc.duringunbundle2 = True
2427 if captureoutput and r is not None: 2757 if captureoutput and r is not None:
2428 parts = exc._bundle2salvagedoutput = r.salvageoutput() 2758 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2759
2429 def recordout(output): 2760 def recordout(output):
2430 part = bundle2.bundlepart('output', data=output, 2761 part = bundle2.bundlepart(
2431 mandatory=False) 2762 'output', data=output, mandatory=False
2763 )
2432 parts.append(part) 2764 parts.append(part)
2765
2433 raise 2766 raise
2434 finally: 2767 finally:
2435 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0]) 2768 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2436 if recordout is not None: 2769 if recordout is not None:
2437 recordout(repo.ui.popbuffer()) 2770 recordout(repo.ui.popbuffer())
2438 return r 2771 return r
2439 2772
2773
2440 def _maybeapplyclonebundle(pullop): 2774 def _maybeapplyclonebundle(pullop):
2441 """Apply a clone bundle from a remote, if possible.""" 2775 """Apply a clone bundle from a remote, if possible."""
2442 2776
2443 repo = pullop.repo 2777 repo = pullop.repo
2444 remote = pullop.remote 2778 remote = pullop.remote
2463 # attempt. 2797 # attempt.
2464 pullop.clonebundleattempted = True 2798 pullop.clonebundleattempted = True
2465 2799
2466 entries = parseclonebundlesmanifest(repo, res) 2800 entries = parseclonebundlesmanifest(repo, res)
2467 if not entries: 2801 if not entries:
2468 repo.ui.note(_('no clone bundles available on remote; ' 2802 repo.ui.note(
2469 'falling back to regular clone\n')) 2803 _(
2804 'no clone bundles available on remote; '
2805 'falling back to regular clone\n'
2806 )
2807 )
2470 return 2808 return
2471 2809
2472 entries = filterclonebundleentries( 2810 entries = filterclonebundleentries(
2473 repo, entries, streamclonerequested=pullop.streamclonerequested) 2811 repo, entries, streamclonerequested=pullop.streamclonerequested
2812 )
2474 2813
2475 if not entries: 2814 if not entries:
2476 # There is a thundering herd concern here. However, if a server 2815 # There is a thundering herd concern here. However, if a server
2477 # operator doesn't advertise bundles appropriate for its clients, 2816 # operator doesn't advertise bundles appropriate for its clients,
2478 # they deserve what's coming. Furthermore, from a client's 2817 # they deserve what's coming. Furthermore, from a client's
2479 # perspective, no automatic fallback would mean not being able to 2818 # perspective, no automatic fallback would mean not being able to
2480 # clone! 2819 # clone!
2481 repo.ui.warn(_('no compatible clone bundles available on server; ' 2820 repo.ui.warn(
2482 'falling back to regular clone\n')) 2821 _(
2483 repo.ui.warn(_('(you may want to report this to the server ' 2822 'no compatible clone bundles available on server; '
2484 'operator)\n')) 2823 'falling back to regular clone\n'
2824 )
2825 )
2826 repo.ui.warn(
2827 _('(you may want to report this to the server ' 'operator)\n')
2828 )
2485 return 2829 return
2486 2830
2487 entries = sortclonebundleentries(repo.ui, entries) 2831 entries = sortclonebundleentries(repo.ui, entries)
2488 2832
2489 url = entries[0]['URL'] 2833 url = entries[0]['URL']
2496 # clients flooding a server that was expecting expensive 2840 # clients flooding a server that was expecting expensive
2497 # clone load to be offloaded. 2841 # clone load to be offloaded.
2498 elif repo.ui.configbool('ui', 'clonebundlefallback'): 2842 elif repo.ui.configbool('ui', 'clonebundlefallback'):
2499 repo.ui.warn(_('falling back to normal clone\n')) 2843 repo.ui.warn(_('falling back to normal clone\n'))
2500 else: 2844 else:
2501 raise error.Abort(_('error applying bundle'), 2845 raise error.Abort(
2502 hint=_('if this error persists, consider contacting ' 2846 _('error applying bundle'),
2503 'the server operator or disable clone ' 2847 hint=_(
2504 'bundles via ' 2848 'if this error persists, consider contacting '
2505 '"--config ui.clonebundles=false"')) 2849 'the server operator or disable clone '
2850 'bundles via '
2851 '"--config ui.clonebundles=false"'
2852 ),
2853 )
2854
2506 2855
2507 def parseclonebundlesmanifest(repo, s): 2856 def parseclonebundlesmanifest(repo, s):
2508 """Parses the raw text of a clone bundles manifest. 2857 """Parses the raw text of a clone bundles manifest.
2509 2858
2510 Returns a list of dicts. The dicts have a ``URL`` key corresponding 2859 Returns a list of dicts. The dicts have a ``URL`` key corresponding
2537 2886
2538 m.append(attrs) 2887 m.append(attrs)
2539 2888
2540 return m 2889 return m
2541 2890
2891
2542 def isstreamclonespec(bundlespec): 2892 def isstreamclonespec(bundlespec):
2543 # Stream clone v1 2893 # Stream clone v1
2544 if (bundlespec.wirecompression == 'UN' and bundlespec.wireversion == 's1'): 2894 if bundlespec.wirecompression == 'UN' and bundlespec.wireversion == 's1':
2545 return True 2895 return True
2546 2896
2547 # Stream clone v2 2897 # Stream clone v2
2548 if (bundlespec.wirecompression == 'UN' and 2898 if (
2549 bundlespec.wireversion == '02' and 2899 bundlespec.wirecompression == 'UN'
2550 bundlespec.contentopts.get('streamv2')): 2900 and bundlespec.wireversion == '02'
2901 and bundlespec.contentopts.get('streamv2')
2902 ):
2551 return True 2903 return True
2552 2904
2553 return False 2905 return False
2906
2554 2907
2555 def filterclonebundleentries(repo, entries, streamclonerequested=False): 2908 def filterclonebundleentries(repo, entries, streamclonerequested=False):
2556 """Remove incompatible clone bundle manifest entries. 2909 """Remove incompatible clone bundle manifest entries.
2557 2910
2558 Accepts a list of entries parsed with ``parseclonebundlesmanifest`` 2911 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
2570 bundlespec = parsebundlespec(repo, spec, strict=True) 2923 bundlespec = parsebundlespec(repo, spec, strict=True)
2571 2924
2572 # If a stream clone was requested, filter out non-streamclone 2925 # If a stream clone was requested, filter out non-streamclone
2573 # entries. 2926 # entries.
2574 if streamclonerequested and not isstreamclonespec(bundlespec): 2927 if streamclonerequested and not isstreamclonespec(bundlespec):
2575 repo.ui.debug('filtering %s because not a stream clone\n' % 2928 repo.ui.debug(
2576 entry['URL']) 2929 'filtering %s because not a stream clone\n'
2930 % entry['URL']
2931 )
2577 continue 2932 continue
2578 2933
2579 except error.InvalidBundleSpecification as e: 2934 except error.InvalidBundleSpecification as e:
2580 repo.ui.debug(stringutil.forcebytestr(e) + '\n') 2935 repo.ui.debug(stringutil.forcebytestr(e) + '\n')
2581 continue 2936 continue
2582 except error.UnsupportedBundleSpecification as e: 2937 except error.UnsupportedBundleSpecification as e:
2583 repo.ui.debug('filtering %s because unsupported bundle ' 2938 repo.ui.debug(
2584 'spec: %s\n' % ( 2939 'filtering %s because unsupported bundle '
2585 entry['URL'], stringutil.forcebytestr(e))) 2940 'spec: %s\n' % (entry['URL'], stringutil.forcebytestr(e))
2941 )
2586 continue 2942 continue
2587 # If we don't have a spec and requested a stream clone, we don't know 2943 # If we don't have a spec and requested a stream clone, we don't know
2588 # what the entry is so don't attempt to apply it. 2944 # what the entry is so don't attempt to apply it.
2589 elif streamclonerequested: 2945 elif streamclonerequested:
2590 repo.ui.debug('filtering %s because cannot determine if a stream ' 2946 repo.ui.debug(
2591 'clone bundle\n' % entry['URL']) 2947 'filtering %s because cannot determine if a stream '
2948 'clone bundle\n' % entry['URL']
2949 )
2592 continue 2950 continue
2593 2951
2594 if 'REQUIRESNI' in entry and not sslutil.hassni: 2952 if 'REQUIRESNI' in entry and not sslutil.hassni:
2595 repo.ui.debug('filtering %s because SNI not supported\n' % 2953 repo.ui.debug(
2596 entry['URL']) 2954 'filtering %s because SNI not supported\n' % entry['URL']
2955 )
2597 continue 2956 continue
2598 2957
2599 newentries.append(entry) 2958 newentries.append(entry)
2600 2959
2601 return newentries 2960 return newentries
2961
2602 2962
2603 class clonebundleentry(object): 2963 class clonebundleentry(object):
2604 """Represents an item in a clone bundles manifest. 2964 """Represents an item in a clone bundles manifest.
2605 2965
2606 This rich class is needed to support sorting since sorted() in Python 3 2966 This rich class is needed to support sorting since sorted() in Python 3
2662 return self._cmp(other) >= 0 3022 return self._cmp(other) >= 0
2663 3023
2664 def __ne__(self, other): 3024 def __ne__(self, other):
2665 return self._cmp(other) != 0 3025 return self._cmp(other) != 0
2666 3026
3027
2667 def sortclonebundleentries(ui, entries): 3028 def sortclonebundleentries(ui, entries):
2668 prefers = ui.configlist('ui', 'clonebundleprefers') 3029 prefers = ui.configlist('ui', 'clonebundleprefers')
2669 if not prefers: 3030 if not prefers:
2670 return list(entries) 3031 return list(entries)
2671 3032
2672 prefers = [p.split('=', 1) for p in prefers] 3033 prefers = [p.split('=', 1) for p in prefers]
2673 3034
2674 items = sorted(clonebundleentry(v, prefers) for v in entries) 3035 items = sorted(clonebundleentry(v, prefers) for v in entries)
2675 return [i.value for i in items] 3036 return [i.value for i in items]
3037
2676 3038
2677 def trypullbundlefromurl(ui, repo, url): 3039 def trypullbundlefromurl(ui, repo, url):
2678 """Attempt to apply a bundle from a URL.""" 3040 """Attempt to apply a bundle from a URL."""
2679 with repo.lock(), repo.transaction('bundleurl') as tr: 3041 with repo.lock(), repo.transaction('bundleurl') as tr:
2680 try: 3042 try:
2685 cg.apply(repo) 3047 cg.apply(repo)
2686 else: 3048 else:
2687 bundle2.applybundle(repo, cg, tr, 'clonebundles', url) 3049 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2688 return True 3050 return True
2689 except urlerr.httperror as e: 3051 except urlerr.httperror as e:
2690 ui.warn(_('HTTP error fetching bundle: %s\n') % 3052 ui.warn(
2691 stringutil.forcebytestr(e)) 3053 _('HTTP error fetching bundle: %s\n')
3054 % stringutil.forcebytestr(e)
3055 )
2692 except urlerr.urlerror as e: 3056 except urlerr.urlerror as e:
2693 ui.warn(_('error fetching bundle: %s\n') % 3057 ui.warn(
2694 stringutil.forcebytestr(e.reason)) 3058 _('error fetching bundle: %s\n')
3059 % stringutil.forcebytestr(e.reason)
3060 )
2695 3061
2696 return False 3062 return False