mercurial/bundle2.py
changeset 43077 687b865b95ad
parent 43076 2372284d9457
child 43080 86e4daa2d54c
equal deleted inserted replaced
43076:2372284d9457 43077:687b865b95ad
   177 urlreq = util.urlreq
   177 urlreq = util.urlreq
   178 
   178 
   179 _pack = struct.pack
   179 _pack = struct.pack
   180 _unpack = struct.unpack
   180 _unpack = struct.unpack
   181 
   181 
   182 _fstreamparamsize = '>i'
   182 _fstreamparamsize = b'>i'
   183 _fpartheadersize = '>i'
   183 _fpartheadersize = b'>i'
   184 _fparttypesize = '>B'
   184 _fparttypesize = b'>B'
   185 _fpartid = '>I'
   185 _fpartid = b'>I'
   186 _fpayloadsize = '>i'
   186 _fpayloadsize = b'>i'
   187 _fpartparamcount = '>BB'
   187 _fpartparamcount = b'>BB'
   188 
   188 
   189 preferedchunksize = 32768
   189 preferedchunksize = 32768
   190 
   190 
   191 _parttypeforbidden = re.compile('[^a-zA-Z0-9_:-]')
   191 _parttypeforbidden = re.compile(b'[^a-zA-Z0-9_:-]')
   192 
   192 
   193 
   193 
   194 def outdebug(ui, message):
   194 def outdebug(ui, message):
   195     """debug regarding output stream (bundling)"""
   195     """debug regarding output stream (bundling)"""
   196     if ui.configbool('devel', 'bundle2.debug'):
   196     if ui.configbool(b'devel', b'bundle2.debug'):
   197         ui.debug('bundle2-output: %s\n' % message)
   197         ui.debug(b'bundle2-output: %s\n' % message)
   198 
   198 
   199 
   199 
   200 def indebug(ui, message):
   200 def indebug(ui, message):
   201     """debug on input stream (unbundling)"""
   201     """debug on input stream (unbundling)"""
   202     if ui.configbool('devel', 'bundle2.debug'):
   202     if ui.configbool(b'devel', b'bundle2.debug'):
   203         ui.debug('bundle2-input: %s\n' % message)
   203         ui.debug(b'bundle2-input: %s\n' % message)
   204 
   204 
   205 
   205 
   206 def validateparttype(parttype):
   206 def validateparttype(parttype):
   207     """raise ValueError if a parttype contains invalid character"""
   207     """raise ValueError if a parttype contains invalid character"""
   208     if _parttypeforbidden.search(parttype):
   208     if _parttypeforbidden.search(parttype):
   213     """return a struct format to read part parameter sizes
   213     """return a struct format to read part parameter sizes
   214 
   214 
   215     The number parameters is variable so we need to build that format
   215     The number parameters is variable so we need to build that format
   216     dynamically.
   216     dynamically.
   217     """
   217     """
   218     return '>' + ('BB' * nbparams)
   218     return b'>' + (b'BB' * nbparams)
   219 
   219 
   220 
   220 
   221 parthandlermapping = {}
   221 parthandlermapping = {}
   222 
   222 
   223 
   223 
   305     * a way to retrieve a transaction to add changes to the repo,
   305     * a way to retrieve a transaction to add changes to the repo,
   306     * a way to record the result of processing each part,
   306     * a way to record the result of processing each part,
   307     * a way to construct a bundle response when applicable.
   307     * a way to construct a bundle response when applicable.
   308     """
   308     """
   309 
   309 
   310     def __init__(self, repo, transactiongetter, captureoutput=True, source=''):
   310     def __init__(self, repo, transactiongetter, captureoutput=True, source=b''):
   311         self.repo = repo
   311         self.repo = repo
   312         self.ui = repo.ui
   312         self.ui = repo.ui
   313         self.records = unbundlerecords()
   313         self.records = unbundlerecords()
   314         self.reply = None
   314         self.reply = None
   315         self.captureoutput = captureoutput
   315         self.captureoutput = captureoutput
   335         return transaction
   335         return transaction
   336 
   336 
   337     def addhookargs(self, hookargs):
   337     def addhookargs(self, hookargs):
   338         if self.hookargs is None:
   338         if self.hookargs is None:
   339             raise error.ProgrammingError(
   339             raise error.ProgrammingError(
   340                 'attempted to add hookargs to '
   340                 b'attempted to add hookargs to '
   341                 'operation after transaction started'
   341                 b'operation after transaction started'
   342             )
   342             )
   343         self.hookargs.update(hookargs)
   343         self.hookargs.update(hookargs)
   344 
   344 
   345 
   345 
   346 class TransactionUnavailable(RuntimeError):
   346 class TransactionUnavailable(RuntimeError):
   356 
   356 
   357 
   357 
   358 def applybundle(repo, unbundler, tr, source, url=None, **kwargs):
   358 def applybundle(repo, unbundler, tr, source, url=None, **kwargs):
   359     # transform me into unbundler.apply() as soon as the freeze is lifted
   359     # transform me into unbundler.apply() as soon as the freeze is lifted
   360     if isinstance(unbundler, unbundle20):
   360     if isinstance(unbundler, unbundle20):
   361         tr.hookargs['bundle2'] = '1'
   361         tr.hookargs[b'bundle2'] = b'1'
   362         if source is not None and 'source' not in tr.hookargs:
   362         if source is not None and b'source' not in tr.hookargs:
   363             tr.hookargs['source'] = source
   363             tr.hookargs[b'source'] = source
   364         if url is not None and 'url' not in tr.hookargs:
   364         if url is not None and b'url' not in tr.hookargs:
   365             tr.hookargs['url'] = url
   365             tr.hookargs[b'url'] = url
   366         return processbundle(repo, unbundler, lambda: tr, source=source)
   366         return processbundle(repo, unbundler, lambda: tr, source=source)
   367     else:
   367     else:
   368         # the transactiongetter won't be used, but we might as well set it
   368         # the transactiongetter won't be used, but we might as well set it
   369         op = bundleoperation(repo, lambda: tr, source=source)
   369         op = bundleoperation(repo, lambda: tr, source=source)
   370         _processchangegroup(op, unbundler, tr, source, url, **kwargs)
   370         _processchangegroup(op, unbundler, tr, source, url, **kwargs)
   436             # that form if we need to.
   436             # that form if we need to.
   437             if seekerror:
   437             if seekerror:
   438                 raise exc
   438                 raise exc
   439 
   439 
   440         self.repo.ui.debug(
   440         self.repo.ui.debug(
   441             'bundle2-input-bundle: %i parts total\n' % self.count
   441             b'bundle2-input-bundle: %i parts total\n' % self.count
   442         )
   442         )
   443 
   443 
   444 
   444 
   445 def processbundle(repo, unbundler, transactiongetter=None, op=None, source=''):
   445 def processbundle(repo, unbundler, transactiongetter=None, op=None, source=b''):
   446     """This function process a bundle, apply effect to/from a repo
   446     """This function process a bundle, apply effect to/from a repo
   447 
   447 
   448     It iterates over each part then searches for and uses the proper handling
   448     It iterates over each part then searches for and uses the proper handling
   449     code to process the part. Parts are processed in order.
   449     code to process the part. Parts are processed in order.
   450 
   450 
   462     # todo:
   462     # todo:
   463     # - replace this is a init function soon.
   463     # - replace this is a init function soon.
   464     # - exception catching
   464     # - exception catching
   465     unbundler.params
   465     unbundler.params
   466     if repo.ui.debugflag:
   466     if repo.ui.debugflag:
   467         msg = ['bundle2-input-bundle:']
   467         msg = [b'bundle2-input-bundle:']
   468         if unbundler.params:
   468         if unbundler.params:
   469             msg.append(' %i params' % len(unbundler.params))
   469             msg.append(b' %i params' % len(unbundler.params))
   470         if op._gettransaction is None or op._gettransaction is _notransaction:
   470         if op._gettransaction is None or op._gettransaction is _notransaction:
   471             msg.append(' no-transaction')
   471             msg.append(b' no-transaction')
   472         else:
   472         else:
   473             msg.append(' with-transaction')
   473             msg.append(b' with-transaction')
   474         msg.append('\n')
   474         msg.append(b'\n')
   475         repo.ui.debug(''.join(msg))
   475         repo.ui.debug(b''.join(msg))
   476 
   476 
   477     processparts(repo, op, unbundler)
   477     processparts(repo, op, unbundler)
   478 
   478 
   479     return op
   479     return op
   480 
   480 
   485             _processpart(op, part)
   485             _processpart(op, part)
   486 
   486 
   487 
   487 
   488 def _processchangegroup(op, cg, tr, source, url, **kwargs):
   488 def _processchangegroup(op, cg, tr, source, url, **kwargs):
   489     ret = cg.apply(op.repo, tr, source, url, **kwargs)
   489     ret = cg.apply(op.repo, tr, source, url, **kwargs)
   490     op.records.add('changegroup', {'return': ret,})
   490     op.records.add(b'changegroup', {b'return': ret,})
   491     return ret
   491     return ret
   492 
   492 
   493 
   493 
   494 def _gethandler(op, part):
   494 def _gethandler(op, part):
   495     status = 'unknown'  # used by debug output
   495     status = b'unknown'  # used by debug output
   496     try:
   496     try:
   497         handler = parthandlermapping.get(part.type)
   497         handler = parthandlermapping.get(part.type)
   498         if handler is None:
   498         if handler is None:
   499             status = 'unsupported-type'
   499             status = b'unsupported-type'
   500             raise error.BundleUnknownFeatureError(parttype=part.type)
   500             raise error.BundleUnknownFeatureError(parttype=part.type)
   501         indebug(op.ui, 'found a handler for part %s' % part.type)
   501         indebug(op.ui, b'found a handler for part %s' % part.type)
   502         unknownparams = part.mandatorykeys - handler.params
   502         unknownparams = part.mandatorykeys - handler.params
   503         if unknownparams:
   503         if unknownparams:
   504             unknownparams = list(unknownparams)
   504             unknownparams = list(unknownparams)
   505             unknownparams.sort()
   505             unknownparams.sort()
   506             status = 'unsupported-params (%s)' % ', '.join(unknownparams)
   506             status = b'unsupported-params (%s)' % b', '.join(unknownparams)
   507             raise error.BundleUnknownFeatureError(
   507             raise error.BundleUnknownFeatureError(
   508                 parttype=part.type, params=unknownparams
   508                 parttype=part.type, params=unknownparams
   509             )
   509             )
   510         status = 'supported'
   510         status = b'supported'
   511     except error.BundleUnknownFeatureError as exc:
   511     except error.BundleUnknownFeatureError as exc:
   512         if part.mandatory:  # mandatory parts
   512         if part.mandatory:  # mandatory parts
   513             raise
   513             raise
   514         indebug(op.ui, 'ignoring unsupported advisory part %s' % exc)
   514         indebug(op.ui, b'ignoring unsupported advisory part %s' % exc)
   515         return  # skip to part processing
   515         return  # skip to part processing
   516     finally:
   516     finally:
   517         if op.ui.debugflag:
   517         if op.ui.debugflag:
   518             msg = ['bundle2-input-part: "%s"' % part.type]
   518             msg = [b'bundle2-input-part: "%s"' % part.type]
   519             if not part.mandatory:
   519             if not part.mandatory:
   520                 msg.append(' (advisory)')
   520                 msg.append(b' (advisory)')
   521             nbmp = len(part.mandatorykeys)
   521             nbmp = len(part.mandatorykeys)
   522             nbap = len(part.params) - nbmp
   522             nbap = len(part.params) - nbmp
   523             if nbmp or nbap:
   523             if nbmp or nbap:
   524                 msg.append(' (params:')
   524                 msg.append(b' (params:')
   525                 if nbmp:
   525                 if nbmp:
   526                     msg.append(' %i mandatory' % nbmp)
   526                     msg.append(b' %i mandatory' % nbmp)
   527                 if nbap:
   527                 if nbap:
   528                     msg.append(' %i advisory' % nbmp)
   528                     msg.append(b' %i advisory' % nbmp)
   529                 msg.append(')')
   529                 msg.append(b')')
   530             msg.append(' %s\n' % status)
   530             msg.append(b' %s\n' % status)
   531             op.ui.debug(''.join(msg))
   531             op.ui.debug(b''.join(msg))
   532 
   532 
   533     return handler
   533     return handler
   534 
   534 
   535 
   535 
   536 def _processpart(op, part):
   536 def _processpart(op, part):
   547     # parthandlermapping lookup (any KeyError raised by handler()
   547     # parthandlermapping lookup (any KeyError raised by handler()
   548     # itself represents a defect of a different variety).
   548     # itself represents a defect of a different variety).
   549     output = None
   549     output = None
   550     if op.captureoutput and op.reply is not None:
   550     if op.captureoutput and op.reply is not None:
   551         op.ui.pushbuffer(error=True, subproc=True)
   551         op.ui.pushbuffer(error=True, subproc=True)
   552         output = ''
   552         output = b''
   553     try:
   553     try:
   554         handler(op, part)
   554         handler(op, part)
   555     finally:
   555     finally:
   556         if output is not None:
   556         if output is not None:
   557             output = op.ui.popbuffer()
   557             output = op.ui.popbuffer()
   558         if output:
   558         if output:
   559             outpart = op.reply.newpart('output', data=output, mandatory=False)
   559             outpart = op.reply.newpart(b'output', data=output, mandatory=False)
   560             outpart.addparam(
   560             outpart.addparam(
   561                 'in-reply-to', pycompat.bytestr(part.id), mandatory=False
   561                 b'in-reply-to', pycompat.bytestr(part.id), mandatory=False
   562             )
   562             )
   563 
   563 
   564 
   564 
   565 def decodecaps(blob):
   565 def decodecaps(blob):
   566     """decode a bundle2 caps bytes blob into a dictionary
   566     """decode a bundle2 caps bytes blob into a dictionary
   573     The values are always a list."""
   573     The values are always a list."""
   574     caps = {}
   574     caps = {}
   575     for line in blob.splitlines():
   575     for line in blob.splitlines():
   576         if not line:
   576         if not line:
   577             continue
   577             continue
   578         if '=' not in line:
   578         if b'=' not in line:
   579             key, vals = line, ()
   579             key, vals = line, ()
   580         else:
   580         else:
   581             key, vals = line.split('=', 1)
   581             key, vals = line.split(b'=', 1)
   582             vals = vals.split(',')
   582             vals = vals.split(b',')
   583         key = urlreq.unquote(key)
   583         key = urlreq.unquote(key)
   584         vals = [urlreq.unquote(v) for v in vals]
   584         vals = [urlreq.unquote(v) for v in vals]
   585         caps[key] = vals
   585         caps[key] = vals
   586     return caps
   586     return caps
   587 
   587 
   592     for ca in sorted(caps):
   592     for ca in sorted(caps):
   593         vals = caps[ca]
   593         vals = caps[ca]
   594         ca = urlreq.quote(ca)
   594         ca = urlreq.quote(ca)
   595         vals = [urlreq.quote(v) for v in vals]
   595         vals = [urlreq.quote(v) for v in vals]
   596         if vals:
   596         if vals:
   597             ca = "%s=%s" % (ca, ','.join(vals))
   597             ca = b"%s=%s" % (ca, b','.join(vals))
   598         chunks.append(ca)
   598         chunks.append(ca)
   599     return '\n'.join(chunks)
   599     return b'\n'.join(chunks)
   600 
   600 
   601 
   601 
   602 bundletypes = {
   602 bundletypes = {
   603     "": ("", 'UN'),  # only when using unbundle on ssh and old http servers
   603     b"": (b"", b'UN'),  # only when using unbundle on ssh and old http servers
   604     # since the unification ssh accepts a header but there
   604     # since the unification ssh accepts a header but there
   605     # is no capability signaling it.
   605     # is no capability signaling it.
   606     "HG20": (),  # special-cased below
   606     b"HG20": (),  # special-cased below
   607     "HG10UN": ("HG10UN", 'UN'),
   607     b"HG10UN": (b"HG10UN", b'UN'),
   608     "HG10BZ": ("HG10", 'BZ'),
   608     b"HG10BZ": (b"HG10", b'BZ'),
   609     "HG10GZ": ("HG10GZ", 'GZ'),
   609     b"HG10GZ": (b"HG10GZ", b'GZ'),
   610 }
   610 }
   611 
   611 
   612 # hgweb uses this list to communicate its preferred type
   612 # hgweb uses this list to communicate its preferred type
   613 bundlepriority = ['HG10GZ', 'HG10BZ', 'HG10UN']
   613 bundlepriority = [b'HG10GZ', b'HG10BZ', b'HG10UN']
   614 
   614 
   615 
   615 
   616 class bundle20(object):
   616 class bundle20(object):
   617     """represent an outgoing bundle2 container
   617     """represent an outgoing bundle2 container
   618 
   618 
   619     Use the `addparam` method to add stream level parameter. and `newpart` to
   619     Use the `addparam` method to add stream level parameter. and `newpart` to
   620     populate it. Then call `getchunks` to retrieve all the binary chunks of
   620     populate it. Then call `getchunks` to retrieve all the binary chunks of
   621     data that compose the bundle2 container."""
   621     data that compose the bundle2 container."""
   622 
   622 
   623     _magicstring = 'HG20'
   623     _magicstring = b'HG20'
   624 
   624 
   625     def __init__(self, ui, capabilities=()):
   625     def __init__(self, ui, capabilities=()):
   626         self.ui = ui
   626         self.ui = ui
   627         self._params = []
   627         self._params = []
   628         self._parts = []
   628         self._parts = []
   629         self.capabilities = dict(capabilities)
   629         self.capabilities = dict(capabilities)
   630         self._compengine = util.compengines.forbundletype('UN')
   630         self._compengine = util.compengines.forbundletype(b'UN')
   631         self._compopts = None
   631         self._compopts = None
   632         # If compression is being handled by a consumer of the raw
   632         # If compression is being handled by a consumer of the raw
   633         # data (e.g. the wire protocol), unsetting this flag tells
   633         # data (e.g. the wire protocol), unsetting this flag tells
   634         # consumers that the bundle is best left uncompressed.
   634         # consumers that the bundle is best left uncompressed.
   635         self.prefercompressed = True
   635         self.prefercompressed = True
   636 
   636 
   637     def setcompression(self, alg, compopts=None):
   637     def setcompression(self, alg, compopts=None):
   638         """setup core part compression to <alg>"""
   638         """setup core part compression to <alg>"""
   639         if alg in (None, 'UN'):
   639         if alg in (None, b'UN'):
   640             return
   640             return
   641         assert not any(n.lower() == 'compression' for n, v in self._params)
   641         assert not any(n.lower() == b'compression' for n, v in self._params)
   642         self.addparam('Compression', alg)
   642         self.addparam(b'Compression', alg)
   643         self._compengine = util.compengines.forbundletype(alg)
   643         self._compengine = util.compengines.forbundletype(alg)
   644         self._compopts = compopts
   644         self._compopts = compopts
   645 
   645 
   646     @property
   646     @property
   647     def nbparts(self):
   647     def nbparts(self):
   681         return part
   681         return part
   682 
   682 
   683     # methods used to generate the bundle2 stream
   683     # methods used to generate the bundle2 stream
   684     def getchunks(self):
   684     def getchunks(self):
   685         if self.ui.debugflag:
   685         if self.ui.debugflag:
   686             msg = ['bundle2-output-bundle: "%s",' % self._magicstring]
   686             msg = [b'bundle2-output-bundle: "%s",' % self._magicstring]
   687             if self._params:
   687             if self._params:
   688                 msg.append(' (%i params)' % len(self._params))
   688                 msg.append(b' (%i params)' % len(self._params))
   689             msg.append(' %i parts total\n' % len(self._parts))
   689             msg.append(b' %i parts total\n' % len(self._parts))
   690             self.ui.debug(''.join(msg))
   690             self.ui.debug(b''.join(msg))
   691         outdebug(self.ui, 'start emission of %s stream' % self._magicstring)
   691         outdebug(self.ui, b'start emission of %s stream' % self._magicstring)
   692         yield self._magicstring
   692         yield self._magicstring
   693         param = self._paramchunk()
   693         param = self._paramchunk()
   694         outdebug(self.ui, 'bundle parameter: %s' % param)
   694         outdebug(self.ui, b'bundle parameter: %s' % param)
   695         yield _pack(_fstreamparamsize, len(param))
   695         yield _pack(_fstreamparamsize, len(param))
   696         if param:
   696         if param:
   697             yield param
   697             yield param
   698         for chunk in self._compengine.compressstream(
   698         for chunk in self._compengine.compressstream(
   699             self._getcorechunk(), self._compopts
   699             self._getcorechunk(), self._compopts
   705         blocks = []
   705         blocks = []
   706         for par, value in self._params:
   706         for par, value in self._params:
   707             par = urlreq.quote(par)
   707             par = urlreq.quote(par)
   708             if value is not None:
   708             if value is not None:
   709                 value = urlreq.quote(value)
   709                 value = urlreq.quote(value)
   710                 par = '%s=%s' % (par, value)
   710                 par = b'%s=%s' % (par, value)
   711             blocks.append(par)
   711             blocks.append(par)
   712         return ' '.join(blocks)
   712         return b' '.join(blocks)
   713 
   713 
   714     def _getcorechunk(self):
   714     def _getcorechunk(self):
   715         """yield chunk for the core part of the bundle
   715         """yield chunk for the core part of the bundle
   716 
   716 
   717         (all but headers and parameters)"""
   717         (all but headers and parameters)"""
   718         outdebug(self.ui, 'start of parts')
   718         outdebug(self.ui, b'start of parts')
   719         for part in self._parts:
   719         for part in self._parts:
   720             outdebug(self.ui, 'bundle part: "%s"' % part.type)
   720             outdebug(self.ui, b'bundle part: "%s"' % part.type)
   721             for chunk in part.getchunks(ui=self.ui):
   721             for chunk in part.getchunks(ui=self.ui):
   722                 yield chunk
   722                 yield chunk
   723         outdebug(self.ui, 'end of bundle')
   723         outdebug(self.ui, b'end of bundle')
   724         yield _pack(_fpartheadersize, 0)
   724         yield _pack(_fpartheadersize, 0)
   725 
   725 
   726     def salvageoutput(self):
   726     def salvageoutput(self):
   727         """return a list with a copy of all output parts in the bundle
   727         """return a list with a copy of all output parts in the bundle
   728 
   728 
   729         This is meant to be used during error handling to make sure we preserve
   729         This is meant to be used during error handling to make sure we preserve
   730         server output"""
   730         server output"""
   731         salvaged = []
   731         salvaged = []
   732         for part in self._parts:
   732         for part in self._parts:
   733             if part.type.startswith('output'):
   733             if part.type.startswith(b'output'):
   734                 salvaged.append(part.copy())
   734                 salvaged.append(part.copy())
   735         return salvaged
   735         return salvaged
   736 
   736 
   737 
   737 
   738 class unpackermixin(object):
   738 class unpackermixin(object):
   766 def getunbundler(ui, fp, magicstring=None):
   766 def getunbundler(ui, fp, magicstring=None):
   767     """return a valid unbundler object for a given magicstring"""
   767     """return a valid unbundler object for a given magicstring"""
   768     if magicstring is None:
   768     if magicstring is None:
   769         magicstring = changegroup.readexactly(fp, 4)
   769         magicstring = changegroup.readexactly(fp, 4)
   770     magic, version = magicstring[0:2], magicstring[2:4]
   770     magic, version = magicstring[0:2], magicstring[2:4]
   771     if magic != 'HG':
   771     if magic != b'HG':
   772         ui.debug(
   772         ui.debug(
   773             "error: invalid magic: %r (version %r), should be 'HG'\n"
   773             b"error: invalid magic: %r (version %r), should be 'HG'\n"
   774             % (magic, version)
   774             % (magic, version)
   775         )
   775         )
   776         raise error.Abort(_('not a Mercurial bundle'))
   776         raise error.Abort(_(b'not a Mercurial bundle'))
   777     unbundlerclass = formatmap.get(version)
   777     unbundlerclass = formatmap.get(version)
   778     if unbundlerclass is None:
   778     if unbundlerclass is None:
   779         raise error.Abort(_('unknown bundle version %s') % version)
   779         raise error.Abort(_(b'unknown bundle version %s') % version)
   780     unbundler = unbundlerclass(ui, fp)
   780     unbundler = unbundlerclass(ui, fp)
   781     indebug(ui, 'start processing of %s stream' % magicstring)
   781     indebug(ui, b'start processing of %s stream' % magicstring)
   782     return unbundler
   782     return unbundler
   783 
   783 
   784 
   784 
   785 class unbundle20(unpackermixin):
   785 class unbundle20(unpackermixin):
   786     """interpret a bundle2 stream
   786     """interpret a bundle2 stream
   787 
   787 
   788     This class is fed with a binary stream and yields parts through its
   788     This class is fed with a binary stream and yields parts through its
   789     `iterparts` methods."""
   789     `iterparts` methods."""
   790 
   790 
   791     _magicstring = 'HG20'
   791     _magicstring = b'HG20'
   792 
   792 
   793     def __init__(self, ui, fp):
   793     def __init__(self, ui, fp):
   794         """If header is specified, we do not read it out of the stream."""
   794         """If header is specified, we do not read it out of the stream."""
   795         self.ui = ui
   795         self.ui = ui
   796         self._compengine = util.compengines.forbundletype('UN')
   796         self._compengine = util.compengines.forbundletype(b'UN')
   797         self._compressed = None
   797         self._compressed = None
   798         super(unbundle20, self).__init__(fp)
   798         super(unbundle20, self).__init__(fp)
   799 
   799 
   800     @util.propertycache
   800     @util.propertycache
   801     def params(self):
   801     def params(self):
   802         """dictionary of stream level parameters"""
   802         """dictionary of stream level parameters"""
   803         indebug(self.ui, 'reading bundle2 stream parameters')
   803         indebug(self.ui, b'reading bundle2 stream parameters')
   804         params = {}
   804         params = {}
   805         paramssize = self._unpack(_fstreamparamsize)[0]
   805         paramssize = self._unpack(_fstreamparamsize)[0]
   806         if paramssize < 0:
   806         if paramssize < 0:
   807             raise error.BundleValueError(
   807             raise error.BundleValueError(
   808                 'negative bundle param size: %i' % paramssize
   808                 b'negative bundle param size: %i' % paramssize
   809             )
   809             )
   810         if paramssize:
   810         if paramssize:
   811             params = self._readexact(paramssize)
   811             params = self._readexact(paramssize)
   812             params = self._processallparams(params)
   812             params = self._processallparams(params)
   813         return params
   813         return params
   814 
   814 
   815     def _processallparams(self, paramsblock):
   815     def _processallparams(self, paramsblock):
   816         """"""
   816         """"""
   817         params = util.sortdict()
   817         params = util.sortdict()
   818         for p in paramsblock.split(' '):
   818         for p in paramsblock.split(b' '):
   819             p = p.split('=', 1)
   819             p = p.split(b'=', 1)
   820             p = [urlreq.unquote(i) for i in p]
   820             p = [urlreq.unquote(i) for i in p]
   821             if len(p) < 2:
   821             if len(p) < 2:
   822                 p.append(None)
   822                 p.append(None)
   823             self._processparam(*p)
   823             self._processparam(*p)
   824             params[p[0]] = p[1]
   824             params[p[0]] = p[1]
   840             raise ValueError(r'non letter first character: %s' % name)
   840             raise ValueError(r'non letter first character: %s' % name)
   841         try:
   841         try:
   842             handler = b2streamparamsmap[name.lower()]
   842             handler = b2streamparamsmap[name.lower()]
   843         except KeyError:
   843         except KeyError:
   844             if name[0:1].islower():
   844             if name[0:1].islower():
   845                 indebug(self.ui, "ignoring unknown parameter %s" % name)
   845                 indebug(self.ui, b"ignoring unknown parameter %s" % name)
   846             else:
   846             else:
   847                 raise error.BundleUnknownFeatureError(params=(name,))
   847                 raise error.BundleUnknownFeatureError(params=(name,))
   848         else:
   848         else:
   849             handler(self, name, value)
   849             handler(self, name, value)
   850 
   850 
   855         have no way to know then the reply end, relying on the bundle to be
   855         have no way to know then the reply end, relying on the bundle to be
   856         interpreted to know its end. This is terrible and we are sorry, but we
   856         interpreted to know its end. This is terrible and we are sorry, but we
   857         needed to move forward to get general delta enabled.
   857         needed to move forward to get general delta enabled.
   858         """
   858         """
   859         yield self._magicstring
   859         yield self._magicstring
   860         assert 'params' not in vars(self)
   860         assert b'params' not in vars(self)
   861         paramssize = self._unpack(_fstreamparamsize)[0]
   861         paramssize = self._unpack(_fstreamparamsize)[0]
   862         if paramssize < 0:
   862         if paramssize < 0:
   863             raise error.BundleValueError(
   863             raise error.BundleValueError(
   864                 'negative bundle param size: %i' % paramssize
   864                 b'negative bundle param size: %i' % paramssize
   865             )
   865             )
   866         if paramssize:
   866         if paramssize:
   867             params = self._readexact(paramssize)
   867             params = self._readexact(paramssize)
   868             self._processallparams(params)
   868             self._processallparams(params)
   869             # The payload itself is decompressed below, so drop
   869             # The payload itself is decompressed below, so drop
   870             # the compression parameter passed down to compensate.
   870             # the compression parameter passed down to compensate.
   871             outparams = []
   871             outparams = []
   872             for p in params.split(' '):
   872             for p in params.split(b' '):
   873                 k, v = p.split('=', 1)
   873                 k, v = p.split(b'=', 1)
   874                 if k.lower() != 'compression':
   874                 if k.lower() != b'compression':
   875                     outparams.append(p)
   875                     outparams.append(p)
   876             outparams = ' '.join(outparams)
   876             outparams = b' '.join(outparams)
   877             yield _pack(_fstreamparamsize, len(outparams))
   877             yield _pack(_fstreamparamsize, len(outparams))
   878             yield outparams
   878             yield outparams
   879         else:
   879         else:
   880             yield _pack(_fstreamparamsize, paramssize)
   880             yield _pack(_fstreamparamsize, paramssize)
   881         # From there, payload might need to be decompressed
   881         # From there, payload might need to be decompressed
   892                 emptycount += 1
   892                 emptycount += 1
   893                 continue
   893                 continue
   894             if size == flaginterrupt:
   894             if size == flaginterrupt:
   895                 continue
   895                 continue
   896             elif size < 0:
   896             elif size < 0:
   897                 raise error.BundleValueError('negative chunk size: %i')
   897                 raise error.BundleValueError(b'negative chunk size: %i')
   898             yield self._readexact(size)
   898             yield self._readexact(size)
   899 
   899 
   900     def iterparts(self, seekable=False):
   900     def iterparts(self, seekable=False):
   901         """yield all parts contained in the stream"""
   901         """yield all parts contained in the stream"""
   902         cls = seekableunbundlepart if seekable else unbundlepart
   902         cls = seekableunbundlepart if seekable else unbundlepart
   903         # make sure param have been loaded
   903         # make sure param have been loaded
   904         self.params
   904         self.params
   905         # From there, payload need to be decompressed
   905         # From there, payload need to be decompressed
   906         self._fp = self._compengine.decompressorreader(self._fp)
   906         self._fp = self._compengine.decompressorreader(self._fp)
   907         indebug(self.ui, 'start extraction of bundle2 parts')
   907         indebug(self.ui, b'start extraction of bundle2 parts')
   908         headerblock = self._readpartheader()
   908         headerblock = self._readpartheader()
   909         while headerblock is not None:
   909         while headerblock is not None:
   910             part = cls(self.ui, headerblock, self._fp)
   910             part = cls(self.ui, headerblock, self._fp)
   911             yield part
   911             yield part
   912             # Ensure part is fully consumed so we can start reading the next
   912             # Ensure part is fully consumed so we can start reading the next
   913             # part.
   913             # part.
   914             part.consume()
   914             part.consume()
   915 
   915 
   916             headerblock = self._readpartheader()
   916             headerblock = self._readpartheader()
   917         indebug(self.ui, 'end of bundle2 stream')
   917         indebug(self.ui, b'end of bundle2 stream')
   918 
   918 
   919     def _readpartheader(self):
   919     def _readpartheader(self):
   920         """reads a part header size and return the bytes blob
   920         """reads a part header size and return the bytes blob
   921 
   921 
   922         returns None if empty"""
   922         returns None if empty"""
   923         headersize = self._unpack(_fpartheadersize)[0]
   923         headersize = self._unpack(_fpartheadersize)[0]
   924         if headersize < 0:
   924         if headersize < 0:
   925             raise error.BundleValueError(
   925             raise error.BundleValueError(
   926                 'negative part header size: %i' % headersize
   926                 b'negative part header size: %i' % headersize
   927             )
   927             )
   928         indebug(self.ui, 'part header size: %i' % headersize)
   928         indebug(self.ui, b'part header size: %i' % headersize)
   929         if headersize:
   929         if headersize:
   930             return self._readexact(headersize)
   930             return self._readexact(headersize)
   931         return None
   931         return None
   932 
   932 
   933     def compressed(self):
   933     def compressed(self):
   934         self.params  # load params
   934         self.params  # load params
   935         return self._compressed
   935         return self._compressed
   936 
   936 
   937     def close(self):
   937     def close(self):
   938         """close underlying file"""
   938         """close underlying file"""
   939         if util.safehasattr(self._fp, 'close'):
   939         if util.safehasattr(self._fp, b'close'):
   940             return self._fp.close()
   940             return self._fp.close()
   941 
   941 
   942 
   942 
   943 formatmap = {'20': unbundle20}
   943 formatmap = {b'20': unbundle20}
   944 
   944 
   945 b2streamparamsmap = {}
   945 b2streamparamsmap = {}
   946 
   946 
   947 
   947 
   948 def b2streamparamhandler(name):
   948 def b2streamparamhandler(name):
   954         return func
   954         return func
   955 
   955 
   956     return decorator
   956     return decorator
   957 
   957 
   958 
   958 
   959 @b2streamparamhandler('compression')
   959 @b2streamparamhandler(b'compression')
   960 def processcompression(unbundler, param, value):
   960 def processcompression(unbundler, param, value):
   961     """read compression parameter and install payload decompression"""
   961     """read compression parameter and install payload decompression"""
   962     if value not in util.compengines.supportedbundletypes:
   962     if value not in util.compengines.supportedbundletypes:
   963         raise error.BundleUnknownFeatureError(params=(param,), values=(value,))
   963         raise error.BundleUnknownFeatureError(params=(param,), values=(value,))
   964     unbundler._compengine = util.compengines.forbundletype(value)
   964     unbundler._compengine = util.compengines.forbundletype(value)
   985     def __init__(
   985     def __init__(
   986         self,
   986         self,
   987         parttype,
   987         parttype,
   988         mandatoryparams=(),
   988         mandatoryparams=(),
   989         advisoryparams=(),
   989         advisoryparams=(),
   990         data='',
   990         data=b'',
   991         mandatory=True,
   991         mandatory=True,
   992     ):
   992     ):
   993         validateparttype(parttype)
   993         validateparttype(parttype)
   994         self.id = None
   994         self.id = None
   995         self.type = parttype
   995         self.type = parttype
   998         self._advisoryparams = list(advisoryparams)
   998         self._advisoryparams = list(advisoryparams)
   999         # checking for duplicated entries
   999         # checking for duplicated entries
  1000         self._seenparams = set()
  1000         self._seenparams = set()
  1001         for pname, __ in self._mandatoryparams + self._advisoryparams:
  1001         for pname, __ in self._mandatoryparams + self._advisoryparams:
  1002             if pname in self._seenparams:
  1002             if pname in self._seenparams:
  1003                 raise error.ProgrammingError('duplicated params: %s' % pname)
  1003                 raise error.ProgrammingError(b'duplicated params: %s' % pname)
  1004             self._seenparams.add(pname)
  1004             self._seenparams.add(pname)
  1005         # status of the part's generation:
  1005         # status of the part's generation:
  1006         # - None: not started,
  1006         # - None: not started,
  1007         # - False: currently generated,
  1007         # - False: currently generated,
  1008         # - True: generation done.
  1008         # - True: generation done.
  1009         self._generated = None
  1009         self._generated = None
  1010         self.mandatory = mandatory
  1010         self.mandatory = mandatory
  1011 
  1011 
  1012     def __repr__(self):
  1012     def __repr__(self):
  1013         cls = "%s.%s" % (self.__class__.__module__, self.__class__.__name__)
  1013         cls = b"%s.%s" % (self.__class__.__module__, self.__class__.__name__)
  1014         return '<%s object at %x; id: %s; type: %s; mandatory: %s>' % (
  1014         return b'<%s object at %x; id: %s; type: %s; mandatory: %s>' % (
  1015             cls,
  1015             cls,
  1016             id(self),
  1016             id(self),
  1017             self.id,
  1017             self.id,
  1018             self.type,
  1018             self.type,
  1019             self.mandatory,
  1019             self.mandatory,
  1022     def copy(self):
  1022     def copy(self):
  1023         """return a copy of the part
  1023         """return a copy of the part
  1024 
  1024 
  1025         The new part have the very same content but no partid assigned yet.
  1025         The new part have the very same content but no partid assigned yet.
  1026         Parts with generated data cannot be copied."""
  1026         Parts with generated data cannot be copied."""
  1027         assert not util.safehasattr(self.data, 'next')
  1027         assert not util.safehasattr(self.data, b'next')
  1028         return self.__class__(
  1028         return self.__class__(
  1029             self.type,
  1029             self.type,
  1030             self._mandatoryparams,
  1030             self._mandatoryparams,
  1031             self._advisoryparams,
  1031             self._advisoryparams,
  1032             self._data,
  1032             self._data,
  1039         return self._data
  1039         return self._data
  1040 
  1040 
  1041     @data.setter
  1041     @data.setter
  1042     def data(self, data):
  1042     def data(self, data):
  1043         if self._generated is not None:
  1043         if self._generated is not None:
  1044             raise error.ReadOnlyPartError('part is being generated')
  1044             raise error.ReadOnlyPartError(b'part is being generated')
  1045         self._data = data
  1045         self._data = data
  1046 
  1046 
  1047     @property
  1047     @property
  1048     def mandatoryparams(self):
  1048     def mandatoryparams(self):
  1049         # make it an immutable tuple to force people through ``addparam``
  1049         # make it an immutable tuple to force people through ``addparam``
  1052     @property
  1052     @property
  1053     def advisoryparams(self):
  1053     def advisoryparams(self):
  1054         # make it an immutable tuple to force people through ``addparam``
  1054         # make it an immutable tuple to force people through ``addparam``
  1055         return tuple(self._advisoryparams)
  1055         return tuple(self._advisoryparams)
  1056 
  1056 
  1057     def addparam(self, name, value='', mandatory=True):
  1057     def addparam(self, name, value=b'', mandatory=True):
  1058         """add a parameter to the part
  1058         """add a parameter to the part
  1059 
  1059 
  1060         If 'mandatory' is set to True, the remote handler must claim support
  1060         If 'mandatory' is set to True, the remote handler must claim support
  1061         for this parameter or the unbundling will be aborted.
  1061         for this parameter or the unbundling will be aborted.
  1062 
  1062 
  1063         The 'name' and 'value' cannot exceed 255 bytes each.
  1063         The 'name' and 'value' cannot exceed 255 bytes each.
  1064         """
  1064         """
  1065         if self._generated is not None:
  1065         if self._generated is not None:
  1066             raise error.ReadOnlyPartError('part is being generated')
  1066             raise error.ReadOnlyPartError(b'part is being generated')
  1067         if name in self._seenparams:
  1067         if name in self._seenparams:
  1068             raise ValueError('duplicated params: %s' % name)
  1068             raise ValueError(b'duplicated params: %s' % name)
  1069         self._seenparams.add(name)
  1069         self._seenparams.add(name)
  1070         params = self._advisoryparams
  1070         params = self._advisoryparams
  1071         if mandatory:
  1071         if mandatory:
  1072             params = self._mandatoryparams
  1072             params = self._mandatoryparams
  1073         params.append((name, value))
  1073         params.append((name, value))
  1074 
  1074 
  1075     # methods used to generates the bundle2 stream
  1075     # methods used to generates the bundle2 stream
  1076     def getchunks(self, ui):
  1076     def getchunks(self, ui):
  1077         if self._generated is not None:
  1077         if self._generated is not None:
  1078             raise error.ProgrammingError('part can only be consumed once')
  1078             raise error.ProgrammingError(b'part can only be consumed once')
  1079         self._generated = False
  1079         self._generated = False
  1080 
  1080 
  1081         if ui.debugflag:
  1081         if ui.debugflag:
  1082             msg = ['bundle2-output-part: "%s"' % self.type]
  1082             msg = [b'bundle2-output-part: "%s"' % self.type]
  1083             if not self.mandatory:
  1083             if not self.mandatory:
  1084                 msg.append(' (advisory)')
  1084                 msg.append(b' (advisory)')
  1085             nbmp = len(self.mandatoryparams)
  1085             nbmp = len(self.mandatoryparams)
  1086             nbap = len(self.advisoryparams)
  1086             nbap = len(self.advisoryparams)
  1087             if nbmp or nbap:
  1087             if nbmp or nbap:
  1088                 msg.append(' (params:')
  1088                 msg.append(b' (params:')
  1089                 if nbmp:
  1089                 if nbmp:
  1090                     msg.append(' %i mandatory' % nbmp)
  1090                     msg.append(b' %i mandatory' % nbmp)
  1091                 if nbap:
  1091                 if nbap:
  1092                     msg.append(' %i advisory' % nbmp)
  1092                     msg.append(b' %i advisory' % nbmp)
  1093                 msg.append(')')
  1093                 msg.append(b')')
  1094             if not self.data:
  1094             if not self.data:
  1095                 msg.append(' empty payload')
  1095                 msg.append(b' empty payload')
  1096             elif util.safehasattr(self.data, 'next') or util.safehasattr(
  1096             elif util.safehasattr(self.data, b'next') or util.safehasattr(
  1097                 self.data, '__next__'
  1097                 self.data, b'__next__'
  1098             ):
  1098             ):
  1099                 msg.append(' streamed payload')
  1099                 msg.append(b' streamed payload')
  1100             else:
  1100             else:
  1101                 msg.append(' %i bytes payload' % len(self.data))
  1101                 msg.append(b' %i bytes payload' % len(self.data))
  1102             msg.append('\n')
  1102             msg.append(b'\n')
  1103             ui.debug(''.join(msg))
  1103             ui.debug(b''.join(msg))
  1104 
  1104 
  1105         #### header
  1105         #### header
  1106         if self.mandatory:
  1106         if self.mandatory:
  1107             parttype = self.type.upper()
  1107             parttype = self.type.upper()
  1108         else:
  1108         else:
  1109             parttype = self.type.lower()
  1109             parttype = self.type.lower()
  1110         outdebug(ui, 'part %s: "%s"' % (pycompat.bytestr(self.id), parttype))
  1110         outdebug(ui, b'part %s: "%s"' % (pycompat.bytestr(self.id), parttype))
  1111         ## parttype
  1111         ## parttype
  1112         header = [
  1112         header = [
  1113             _pack(_fparttypesize, len(parttype)),
  1113             _pack(_fparttypesize, len(parttype)),
  1114             parttype,
  1114             parttype,
  1115             _pack(_fpartid, self.id),
  1115             _pack(_fpartid, self.id),
  1136         for key, value in advpar:
  1136         for key, value in advpar:
  1137             header.append(key)
  1137             header.append(key)
  1138             header.append(value)
  1138             header.append(value)
  1139         ## finalize header
  1139         ## finalize header
  1140         try:
  1140         try:
  1141             headerchunk = ''.join(header)
  1141             headerchunk = b''.join(header)
  1142         except TypeError:
  1142         except TypeError:
  1143             raise TypeError(
  1143             raise TypeError(
  1144                 r'Found a non-bytes trying to '
  1144                 r'Found a non-bytes trying to '
  1145                 r'build bundle part header: %r' % header
  1145                 r'build bundle part header: %r' % header
  1146             )
  1146             )
  1147         outdebug(ui, 'header chunk size: %i' % len(headerchunk))
  1147         outdebug(ui, b'header chunk size: %i' % len(headerchunk))
  1148         yield _pack(_fpartheadersize, len(headerchunk))
  1148         yield _pack(_fpartheadersize, len(headerchunk))
  1149         yield headerchunk
  1149         yield headerchunk
  1150         ## payload
  1150         ## payload
  1151         try:
  1151         try:
  1152             for chunk in self._payloadchunks():
  1152             for chunk in self._payloadchunks():
  1153                 outdebug(ui, 'payload chunk size: %i' % len(chunk))
  1153                 outdebug(ui, b'payload chunk size: %i' % len(chunk))
  1154                 yield _pack(_fpayloadsize, len(chunk))
  1154                 yield _pack(_fpayloadsize, len(chunk))
  1155                 yield chunk
  1155                 yield chunk
  1156         except GeneratorExit:
  1156         except GeneratorExit:
  1157             # GeneratorExit means that nobody is listening for our
  1157             # GeneratorExit means that nobody is listening for our
  1158             # results anyway, so just bail quickly rather than trying
  1158             # results anyway, so just bail quickly rather than trying
  1159             # to produce an error part.
  1159             # to produce an error part.
  1160             ui.debug('bundle2-generatorexit\n')
  1160             ui.debug(b'bundle2-generatorexit\n')
  1161             raise
  1161             raise
  1162         except BaseException as exc:
  1162         except BaseException as exc:
  1163             bexc = stringutil.forcebytestr(exc)
  1163             bexc = stringutil.forcebytestr(exc)
  1164             # backup exception data for later
  1164             # backup exception data for later
  1165             ui.debug(
  1165             ui.debug(
  1166                 'bundle2-input-stream-interrupt: encoding exception %s' % bexc
  1166                 b'bundle2-input-stream-interrupt: encoding exception %s' % bexc
  1167             )
  1167             )
  1168             tb = sys.exc_info()[2]
  1168             tb = sys.exc_info()[2]
  1169             msg = 'unexpected error: %s' % bexc
  1169             msg = b'unexpected error: %s' % bexc
  1170             interpart = bundlepart(
  1170             interpart = bundlepart(
  1171                 'error:abort', [('message', msg)], mandatory=False
  1171                 b'error:abort', [(b'message', msg)], mandatory=False
  1172             )
  1172             )
  1173             interpart.id = 0
  1173             interpart.id = 0
  1174             yield _pack(_fpayloadsize, -1)
  1174             yield _pack(_fpayloadsize, -1)
  1175             for chunk in interpart.getchunks(ui=ui):
  1175             for chunk in interpart.getchunks(ui=ui):
  1176                 yield chunk
  1176                 yield chunk
  1177             outdebug(ui, 'closing payload chunk')
  1177             outdebug(ui, b'closing payload chunk')
  1178             # abort current part payload
  1178             # abort current part payload
  1179             yield _pack(_fpayloadsize, 0)
  1179             yield _pack(_fpayloadsize, 0)
  1180             pycompat.raisewithtb(exc, tb)
  1180             pycompat.raisewithtb(exc, tb)
  1181         # end of payload
  1181         # end of payload
  1182         outdebug(ui, 'closing payload chunk')
  1182         outdebug(ui, b'closing payload chunk')
  1183         yield _pack(_fpayloadsize, 0)
  1183         yield _pack(_fpayloadsize, 0)
  1184         self._generated = True
  1184         self._generated = True
  1185 
  1185 
  1186     def _payloadchunks(self):
  1186     def _payloadchunks(self):
  1187         """yield chunks of a the part payload
  1187         """yield chunks of a the part payload
  1188 
  1188 
  1189         Exists to handle the different methods to provide data to a part."""
  1189         Exists to handle the different methods to provide data to a part."""
  1190         # we only support fixed size data now.
  1190         # we only support fixed size data now.
  1191         # This will be improved in the future.
  1191         # This will be improved in the future.
  1192         if util.safehasattr(self.data, 'next') or util.safehasattr(
  1192         if util.safehasattr(self.data, b'next') or util.safehasattr(
  1193             self.data, '__next__'
  1193             self.data, b'__next__'
  1194         ):
  1194         ):
  1195             buff = util.chunkbuffer(self.data)
  1195             buff = util.chunkbuffer(self.data)
  1196             chunk = buff.read(preferedchunksize)
  1196             chunk = buff.read(preferedchunksize)
  1197             while chunk:
  1197             while chunk:
  1198                 yield chunk
  1198                 yield chunk
  1221 
  1221 
  1222         returns None if empty"""
  1222         returns None if empty"""
  1223         headersize = self._unpack(_fpartheadersize)[0]
  1223         headersize = self._unpack(_fpartheadersize)[0]
  1224         if headersize < 0:
  1224         if headersize < 0:
  1225             raise error.BundleValueError(
  1225             raise error.BundleValueError(
  1226                 'negative part header size: %i' % headersize
  1226                 b'negative part header size: %i' % headersize
  1227             )
  1227             )
  1228         indebug(self.ui, 'part header size: %i\n' % headersize)
  1228         indebug(self.ui, b'part header size: %i\n' % headersize)
  1229         if headersize:
  1229         if headersize:
  1230             return self._readexact(headersize)
  1230             return self._readexact(headersize)
  1231         return None
  1231         return None
  1232 
  1232 
  1233     def __call__(self):
  1233     def __call__(self):
  1234 
  1234 
  1235         self.ui.debug(
  1235         self.ui.debug(
  1236             'bundle2-input-stream-interrupt:' ' opening out of band context\n'
  1236             b'bundle2-input-stream-interrupt:' b' opening out of band context\n'
  1237         )
  1237         )
  1238         indebug(self.ui, 'bundle2 stream interruption, looking for a part.')
  1238         indebug(self.ui, b'bundle2 stream interruption, looking for a part.')
  1239         headerblock = self._readpartheader()
  1239         headerblock = self._readpartheader()
  1240         if headerblock is None:
  1240         if headerblock is None:
  1241             indebug(self.ui, 'no part found during interruption.')
  1241             indebug(self.ui, b'no part found during interruption.')
  1242             return
  1242             return
  1243         part = unbundlepart(self.ui, headerblock, self._fp)
  1243         part = unbundlepart(self.ui, headerblock, self._fp)
  1244         op = interruptoperation(self.ui)
  1244         op = interruptoperation(self.ui)
  1245         hardabort = False
  1245         hardabort = False
  1246         try:
  1246         try:
  1250             raise
  1250             raise
  1251         finally:
  1251         finally:
  1252             if not hardabort:
  1252             if not hardabort:
  1253                 part.consume()
  1253                 part.consume()
  1254         self.ui.debug(
  1254         self.ui.debug(
  1255             'bundle2-input-stream-interrupt:' ' closing out of band context\n'
  1255             b'bundle2-input-stream-interrupt:' b' closing out of band context\n'
  1256         )
  1256         )
  1257 
  1257 
  1258 
  1258 
  1259 class interruptoperation(object):
  1259 class interruptoperation(object):
  1260     """A limited operation to be use by part handler during interruption
  1260     """A limited operation to be use by part handler during interruption
  1267         self.reply = None
  1267         self.reply = None
  1268         self.captureoutput = False
  1268         self.captureoutput = False
  1269 
  1269 
  1270     @property
  1270     @property
  1271     def repo(self):
  1271     def repo(self):
  1272         raise error.ProgrammingError('no repo access from stream interruption')
  1272         raise error.ProgrammingError(b'no repo access from stream interruption')
  1273 
  1273 
  1274     def gettransaction(self):
  1274     def gettransaction(self):
  1275         raise TransactionUnavailable('no repo access from stream interruption')
  1275         raise TransactionUnavailable(b'no repo access from stream interruption')
  1276 
  1276 
  1277 
  1277 
  1278 def decodepayloadchunks(ui, fh):
  1278 def decodepayloadchunks(ui, fh):
  1279     """Reads bundle2 part payload data into chunks.
  1279     """Reads bundle2 part payload data into chunks.
  1280 
  1280 
  1281     Part payload data consists of framed chunks. This function takes
  1281     Part payload data consists of framed chunks. This function takes
  1282     a file handle and emits those chunks.
  1282     a file handle and emits those chunks.
  1283     """
  1283     """
  1284     dolog = ui.configbool('devel', 'bundle2.debug')
  1284     dolog = ui.configbool(b'devel', b'bundle2.debug')
  1285     debug = ui.debug
  1285     debug = ui.debug
  1286 
  1286 
  1287     headerstruct = struct.Struct(_fpayloadsize)
  1287     headerstruct = struct.Struct(_fpayloadsize)
  1288     headersize = headerstruct.size
  1288     headersize = headerstruct.size
  1289     unpack = headerstruct.unpack
  1289     unpack = headerstruct.unpack
  1290 
  1290 
  1291     readexactly = changegroup.readexactly
  1291     readexactly = changegroup.readexactly
  1292     read = fh.read
  1292     read = fh.read
  1293 
  1293 
  1294     chunksize = unpack(readexactly(fh, headersize))[0]
  1294     chunksize = unpack(readexactly(fh, headersize))[0]
  1295     indebug(ui, 'payload chunk size: %i' % chunksize)
  1295     indebug(ui, b'payload chunk size: %i' % chunksize)
  1296 
  1296 
  1297     # changegroup.readexactly() is inlined below for performance.
  1297     # changegroup.readexactly() is inlined below for performance.
  1298     while chunksize:
  1298     while chunksize:
  1299         if chunksize >= 0:
  1299         if chunksize >= 0:
  1300             s = read(chunksize)
  1300             s = read(chunksize)
  1301             if len(s) < chunksize:
  1301             if len(s) < chunksize:
  1302                 raise error.Abort(
  1302                 raise error.Abort(
  1303                     _(
  1303                     _(
  1304                         'stream ended unexpectedly '
  1304                         b'stream ended unexpectedly '
  1305                         ' (got %d bytes, expected %d)'
  1305                         b' (got %d bytes, expected %d)'
  1306                     )
  1306                     )
  1307                     % (len(s), chunksize)
  1307                     % (len(s), chunksize)
  1308                 )
  1308                 )
  1309 
  1309 
  1310             yield s
  1310             yield s
  1312             # Interrupt "signal" detected. The regular stream is interrupted
  1312             # Interrupt "signal" detected. The regular stream is interrupted
  1313             # and a bundle2 part follows. Consume it.
  1313             # and a bundle2 part follows. Consume it.
  1314             interrupthandler(ui, fh)()
  1314             interrupthandler(ui, fh)()
  1315         else:
  1315         else:
  1316             raise error.BundleValueError(
  1316             raise error.BundleValueError(
  1317                 'negative payload chunk size: %s' % chunksize
  1317                 b'negative payload chunk size: %s' % chunksize
  1318             )
  1318             )
  1319 
  1319 
  1320         s = read(headersize)
  1320         s = read(headersize)
  1321         if len(s) < headersize:
  1321         if len(s) < headersize:
  1322             raise error.Abort(
  1322             raise error.Abort(
  1323                 _('stream ended unexpectedly ' ' (got %d bytes, expected %d)')
  1323                 _(b'stream ended unexpectedly ' b' (got %d bytes, expected %d)')
  1324                 % (len(s), chunksize)
  1324                 % (len(s), chunksize)
  1325             )
  1325             )
  1326 
  1326 
  1327         chunksize = unpack(s)[0]
  1327         chunksize = unpack(s)[0]
  1328 
  1328 
  1329         # indebug() inlined for performance.
  1329         # indebug() inlined for performance.
  1330         if dolog:
  1330         if dolog:
  1331             debug('bundle2-input: payload chunk size: %i\n' % chunksize)
  1331             debug(b'bundle2-input: payload chunk size: %i\n' % chunksize)
  1332 
  1332 
  1333 
  1333 
  1334 class unbundlepart(unpackermixin):
  1334 class unbundlepart(unpackermixin):
  1335     """a bundle part read from a bundle"""
  1335     """a bundle part read from a bundle"""
  1336 
  1336 
  1337     def __init__(self, ui, header, fp):
  1337     def __init__(self, ui, header, fp):
  1338         super(unbundlepart, self).__init__(fp)
  1338         super(unbundlepart, self).__init__(fp)
  1339         self._seekable = util.safehasattr(fp, 'seek') and util.safehasattr(
  1339         self._seekable = util.safehasattr(fp, b'seek') and util.safehasattr(
  1340             fp, 'tell'
  1340             fp, b'tell'
  1341         )
  1341         )
  1342         self.ui = ui
  1342         self.ui = ui
  1343         # unbundle state attr
  1343         # unbundle state attr
  1344         self._headerdata = header
  1344         self._headerdata = header
  1345         self._headeroffset = 0
  1345         self._headeroffset = 0
  1382 
  1382 
  1383     def _readheader(self):
  1383     def _readheader(self):
  1384         """read the header and setup the object"""
  1384         """read the header and setup the object"""
  1385         typesize = self._unpackheader(_fparttypesize)[0]
  1385         typesize = self._unpackheader(_fparttypesize)[0]
  1386         self.type = self._fromheader(typesize)
  1386         self.type = self._fromheader(typesize)
  1387         indebug(self.ui, 'part type: "%s"' % self.type)
  1387         indebug(self.ui, b'part type: "%s"' % self.type)
  1388         self.id = self._unpackheader(_fpartid)[0]
  1388         self.id = self._unpackheader(_fpartid)[0]
  1389         indebug(self.ui, 'part id: "%s"' % pycompat.bytestr(self.id))
  1389         indebug(self.ui, b'part id: "%s"' % pycompat.bytestr(self.id))
  1390         # extract mandatory bit from type
  1390         # extract mandatory bit from type
  1391         self.mandatory = self.type != self.type.lower()
  1391         self.mandatory = self.type != self.type.lower()
  1392         self.type = self.type.lower()
  1392         self.type = self.type.lower()
  1393         ## reading parameters
  1393         ## reading parameters
  1394         # param count
  1394         # param count
  1395         mancount, advcount = self._unpackheader(_fpartparamcount)
  1395         mancount, advcount = self._unpackheader(_fpartparamcount)
  1396         indebug(self.ui, 'part parameters: %i' % (mancount + advcount))
  1396         indebug(self.ui, b'part parameters: %i' % (mancount + advcount))
  1397         # param size
  1397         # param size
  1398         fparamsizes = _makefpartparamsizes(mancount + advcount)
  1398         fparamsizes = _makefpartparamsizes(mancount + advcount)
  1399         paramsizes = self._unpackheader(fparamsizes)
  1399         paramsizes = self._unpackheader(fparamsizes)
  1400         # make it a list of couple again
  1400         # make it a list of couple again
  1401         paramsizes = list(zip(paramsizes[::2], paramsizes[1::2]))
  1401         paramsizes = list(zip(paramsizes[::2], paramsizes[1::2]))
  1443             data = self._payloadstream.read(size)
  1443             data = self._payloadstream.read(size)
  1444         self._pos += len(data)
  1444         self._pos += len(data)
  1445         if size is None or len(data) < size:
  1445         if size is None or len(data) < size:
  1446             if not self.consumed and self._pos:
  1446             if not self.consumed and self._pos:
  1447                 self.ui.debug(
  1447                 self.ui.debug(
  1448                     'bundle2-input-part: total payload size %i\n' % self._pos
  1448                     b'bundle2-input-part: total payload size %i\n' % self._pos
  1449                 )
  1449                 )
  1450             self.consumed = True
  1450             self.consumed = True
  1451         return data
  1451         return data
  1452 
  1452 
  1453 
  1453 
  1476         super(seekableunbundlepart, self).__init__(ui, header, fp)
  1476         super(seekableunbundlepart, self).__init__(ui, header, fp)
  1477 
  1477 
  1478     def _payloadchunks(self, chunknum=0):
  1478     def _payloadchunks(self, chunknum=0):
  1479         '''seek to specified chunk and start yielding data'''
  1479         '''seek to specified chunk and start yielding data'''
  1480         if len(self._chunkindex) == 0:
  1480         if len(self._chunkindex) == 0:
  1481             assert chunknum == 0, 'Must start with chunk 0'
  1481             assert chunknum == 0, b'Must start with chunk 0'
  1482             self._chunkindex.append((0, self._tellfp()))
  1482             self._chunkindex.append((0, self._tellfp()))
  1483         else:
  1483         else:
  1484             assert chunknum < len(self._chunkindex), (
  1484             assert chunknum < len(self._chunkindex), (
  1485                 'Unknown chunk %d' % chunknum
  1485                 b'Unknown chunk %d' % chunknum
  1486             )
  1486             )
  1487             self._seekfp(self._chunkindex[chunknum][1])
  1487             self._seekfp(self._chunkindex[chunknum][1])
  1488 
  1488 
  1489         pos = self._chunkindex[chunknum][0]
  1489         pos = self._chunkindex[chunknum][0]
  1490 
  1490 
  1501         for chunk, (ppos, fpos) in enumerate(self._chunkindex):
  1501         for chunk, (ppos, fpos) in enumerate(self._chunkindex):
  1502             if ppos == pos:
  1502             if ppos == pos:
  1503                 return chunk, 0
  1503                 return chunk, 0
  1504             elif ppos > pos:
  1504             elif ppos > pos:
  1505                 return chunk - 1, pos - self._chunkindex[chunk - 1][0]
  1505                 return chunk - 1, pos - self._chunkindex[chunk - 1][0]
  1506         raise ValueError('Unknown chunk')
  1506         raise ValueError(b'Unknown chunk')
  1507 
  1507 
  1508     def tell(self):
  1508     def tell(self):
  1509         return self._pos
  1509         return self._pos
  1510 
  1510 
  1511     def seek(self, offset, whence=os.SEEK_SET):
  1511     def seek(self, offset, whence=os.SEEK_SET):
  1519                 chunk = self.read(32768)
  1519                 chunk = self.read(32768)
  1520                 while chunk:
  1520                 while chunk:
  1521                     chunk = self.read(32768)
  1521                     chunk = self.read(32768)
  1522             newpos = self._chunkindex[-1][0] - offset
  1522             newpos = self._chunkindex[-1][0] - offset
  1523         else:
  1523         else:
  1524             raise ValueError('Unknown whence value: %r' % (whence,))
  1524             raise ValueError(b'Unknown whence value: %r' % (whence,))
  1525 
  1525 
  1526         if newpos > self._chunkindex[-1][0] and not self.consumed:
  1526         if newpos > self._chunkindex[-1][0] and not self.consumed:
  1527             # Can't use self.consume() here because it advances self._pos.
  1527             # Can't use self.consume() here because it advances self._pos.
  1528             chunk = self.read(32768)
  1528             chunk = self.read(32768)
  1529             while chunk:
  1529             while chunk:
  1530                 chunk = self.read(32668)
  1530                 chunk = self.read(32668)
  1531 
  1531 
  1532         if not 0 <= newpos <= self._chunkindex[-1][0]:
  1532         if not 0 <= newpos <= self._chunkindex[-1][0]:
  1533             raise ValueError('Offset out of range')
  1533             raise ValueError(b'Offset out of range')
  1534 
  1534 
  1535         if self._pos != newpos:
  1535         if self._pos != newpos:
  1536             chunk, internaloffset = self._findchunk(newpos)
  1536             chunk, internaloffset = self._findchunk(newpos)
  1537             self._payloadstream = util.chunkbuffer(self._payloadchunks(chunk))
  1537             self._payloadstream = util.chunkbuffer(self._payloadchunks(chunk))
  1538             adjust = self.read(internaloffset)
  1538             adjust = self.read(internaloffset)
  1539             if len(adjust) != internaloffset:
  1539             if len(adjust) != internaloffset:
  1540                 raise error.Abort(_('Seek failed\n'))
  1540                 raise error.Abort(_(b'Seek failed\n'))
  1541             self._pos = newpos
  1541             self._pos = newpos
  1542 
  1542 
  1543     def _seekfp(self, offset, whence=0):
  1543     def _seekfp(self, offset, whence=0):
  1544         """move the underlying file pointer
  1544         """move the underlying file pointer
  1545 
  1545 
  1549 
  1549 
  1550         Do not use it to implement higher-level logic or methods."""
  1550         Do not use it to implement higher-level logic or methods."""
  1551         if self._seekable:
  1551         if self._seekable:
  1552             return self._fp.seek(offset, whence)
  1552             return self._fp.seek(offset, whence)
  1553         else:
  1553         else:
  1554             raise NotImplementedError(_('File pointer is not seekable'))
  1554             raise NotImplementedError(_(b'File pointer is not seekable'))
  1555 
  1555 
  1556     def _tellfp(self):
  1556     def _tellfp(self):
  1557         """return the file offset, or None if file is not seekable
  1557         """return the file offset, or None if file is not seekable
  1558 
  1558 
  1559         This method is meant for internal usage by the bundle2 protocol only.
  1559         This method is meant for internal usage by the bundle2 protocol only.
  1573 
  1573 
  1574 
  1574 
  1575 # These are only the static capabilities.
  1575 # These are only the static capabilities.
  1576 # Check the 'getrepocaps' function for the rest.
  1576 # Check the 'getrepocaps' function for the rest.
  1577 capabilities = {
  1577 capabilities = {
  1578     'HG20': (),
  1578     b'HG20': (),
  1579     'bookmarks': (),
  1579     b'bookmarks': (),
  1580     'error': ('abort', 'unsupportedcontent', 'pushraced', 'pushkey'),
  1580     b'error': (b'abort', b'unsupportedcontent', b'pushraced', b'pushkey'),
  1581     'listkeys': (),
  1581     b'listkeys': (),
  1582     'pushkey': (),
  1582     b'pushkey': (),
  1583     'digests': tuple(sorted(util.DIGESTS.keys())),
  1583     b'digests': tuple(sorted(util.DIGESTS.keys())),
  1584     'remote-changegroup': ('http', 'https'),
  1584     b'remote-changegroup': (b'http', b'https'),
  1585     'hgtagsfnodes': (),
  1585     b'hgtagsfnodes': (),
  1586     'rev-branch-cache': (),
  1586     b'rev-branch-cache': (),
  1587     'phases': ('heads',),
  1587     b'phases': (b'heads',),
  1588     'stream': ('v2',),
  1588     b'stream': (b'v2',),
  1589 }
  1589 }
  1590 
  1590 
  1591 
  1591 
  1592 def getrepocaps(repo, allowpushback=False, role=None):
  1592 def getrepocaps(repo, allowpushback=False, role=None):
  1593     """return the bundle2 capabilities for a given repo
  1593     """return the bundle2 capabilities for a given repo
  1596 
  1596 
  1597     The returned value is used for servers advertising their capabilities as
  1597     The returned value is used for servers advertising their capabilities as
  1598     well as clients advertising their capabilities to servers as part of
  1598     well as clients advertising their capabilities to servers as part of
  1599     bundle2 requests. The ``role`` argument specifies which is which.
  1599     bundle2 requests. The ``role`` argument specifies which is which.
  1600     """
  1600     """
  1601     if role not in ('client', 'server'):
  1601     if role not in (b'client', b'server'):
  1602         raise error.ProgrammingError('role argument must be client or server')
  1602         raise error.ProgrammingError(b'role argument must be client or server')
  1603 
  1603 
  1604     caps = capabilities.copy()
  1604     caps = capabilities.copy()
  1605     caps['changegroup'] = tuple(
  1605     caps[b'changegroup'] = tuple(
  1606         sorted(changegroup.supportedincomingversions(repo))
  1606         sorted(changegroup.supportedincomingversions(repo))
  1607     )
  1607     )
  1608     if obsolete.isenabled(repo, obsolete.exchangeopt):
  1608     if obsolete.isenabled(repo, obsolete.exchangeopt):
  1609         supportedformat = tuple('V%i' % v for v in obsolete.formats)
  1609         supportedformat = tuple(b'V%i' % v for v in obsolete.formats)
  1610         caps['obsmarkers'] = supportedformat
  1610         caps[b'obsmarkers'] = supportedformat
  1611     if allowpushback:
  1611     if allowpushback:
  1612         caps['pushback'] = ()
  1612         caps[b'pushback'] = ()
  1613     cpmode = repo.ui.config('server', 'concurrent-push-mode')
  1613     cpmode = repo.ui.config(b'server', b'concurrent-push-mode')
  1614     if cpmode == 'check-related':
  1614     if cpmode == b'check-related':
  1615         caps['checkheads'] = ('related',)
  1615         caps[b'checkheads'] = (b'related',)
  1616     if 'phases' in repo.ui.configlist('devel', 'legacy.exchange'):
  1616     if b'phases' in repo.ui.configlist(b'devel', b'legacy.exchange'):
  1617         caps.pop('phases')
  1617         caps.pop(b'phases')
  1618 
  1618 
  1619     # Don't advertise stream clone support in server mode if not configured.
  1619     # Don't advertise stream clone support in server mode if not configured.
  1620     if role == 'server':
  1620     if role == b'server':
  1621         streamsupported = repo.ui.configbool(
  1621         streamsupported = repo.ui.configbool(
  1622             'server', 'uncompressed', untrusted=True
  1622             b'server', b'uncompressed', untrusted=True
  1623         )
  1623         )
  1624         featuresupported = repo.ui.configbool('server', 'bundle2.stream')
  1624         featuresupported = repo.ui.configbool(b'server', b'bundle2.stream')
  1625 
  1625 
  1626         if not streamsupported or not featuresupported:
  1626         if not streamsupported or not featuresupported:
  1627             caps.pop('stream')
  1627             caps.pop(b'stream')
  1628     # Else always advertise support on client, because payload support
  1628     # Else always advertise support on client, because payload support
  1629     # should always be advertised.
  1629     # should always be advertised.
  1630 
  1630 
  1631     return caps
  1631     return caps
  1632 
  1632 
  1633 
  1633 
  1634 def bundle2caps(remote):
  1634 def bundle2caps(remote):
  1635     """return the bundle capabilities of a peer as dict"""
  1635     """return the bundle capabilities of a peer as dict"""
  1636     raw = remote.capable('bundle2')
  1636     raw = remote.capable(b'bundle2')
  1637     if not raw and raw != '':
  1637     if not raw and raw != b'':
  1638         return {}
  1638         return {}
  1639     capsblob = urlreq.unquote(remote.capable('bundle2'))
  1639     capsblob = urlreq.unquote(remote.capable(b'bundle2'))
  1640     return decodecaps(capsblob)
  1640     return decodecaps(capsblob)
  1641 
  1641 
  1642 
  1642 
  1643 def obsmarkersversion(caps):
  1643 def obsmarkersversion(caps):
  1644     """extract the list of supported obsmarkers versions from a bundle2caps dict
  1644     """extract the list of supported obsmarkers versions from a bundle2caps dict
  1645     """
  1645     """
  1646     obscaps = caps.get('obsmarkers', ())
  1646     obscaps = caps.get(b'obsmarkers', ())
  1647     return [int(c[1:]) for c in obscaps if c.startswith('V')]
  1647     return [int(c[1:]) for c in obscaps if c.startswith(b'V')]
  1648 
  1648 
  1649 
  1649 
  1650 def writenewbundle(
  1650 def writenewbundle(
  1651     ui,
  1651     ui,
  1652     repo,
  1652     repo,
  1657     opts,
  1657     opts,
  1658     vfs=None,
  1658     vfs=None,
  1659     compression=None,
  1659     compression=None,
  1660     compopts=None,
  1660     compopts=None,
  1661 ):
  1661 ):
  1662     if bundletype.startswith('HG10'):
  1662     if bundletype.startswith(b'HG10'):
  1663         cg = changegroup.makechangegroup(repo, outgoing, '01', source)
  1663         cg = changegroup.makechangegroup(repo, outgoing, b'01', source)
  1664         return writebundle(
  1664         return writebundle(
  1665             ui,
  1665             ui,
  1666             cg,
  1666             cg,
  1667             filename,
  1667             filename,
  1668             bundletype,
  1668             bundletype,
  1669             vfs=vfs,
  1669             vfs=vfs,
  1670             compression=compression,
  1670             compression=compression,
  1671             compopts=compopts,
  1671             compopts=compopts,
  1672         )
  1672         )
  1673     elif not bundletype.startswith('HG20'):
  1673     elif not bundletype.startswith(b'HG20'):
  1674         raise error.ProgrammingError('unknown bundle type: %s' % bundletype)
  1674         raise error.ProgrammingError(b'unknown bundle type: %s' % bundletype)
  1675 
  1675 
  1676     caps = {}
  1676     caps = {}
  1677     if 'obsolescence' in opts:
  1677     if b'obsolescence' in opts:
  1678         caps['obsmarkers'] = ('V1',)
  1678         caps[b'obsmarkers'] = (b'V1',)
  1679     bundle = bundle20(ui, caps)
  1679     bundle = bundle20(ui, caps)
  1680     bundle.setcompression(compression, compopts)
  1680     bundle.setcompression(compression, compopts)
  1681     _addpartsfromopts(ui, repo, bundle, source, outgoing, opts)
  1681     _addpartsfromopts(ui, repo, bundle, source, outgoing, opts)
  1682     chunkiter = bundle.getchunks()
  1682     chunkiter = bundle.getchunks()
  1683 
  1683 
  1692     # different right now. So we keep them separated for now for the sake of
  1692     # different right now. So we keep them separated for now for the sake of
  1693     # simplicity.
  1693     # simplicity.
  1694 
  1694 
  1695     # we might not always want a changegroup in such bundle, for example in
  1695     # we might not always want a changegroup in such bundle, for example in
  1696     # stream bundles
  1696     # stream bundles
  1697     if opts.get('changegroup', True):
  1697     if opts.get(b'changegroup', True):
  1698         cgversion = opts.get('cg.version')
  1698         cgversion = opts.get(b'cg.version')
  1699         if cgversion is None:
  1699         if cgversion is None:
  1700             cgversion = changegroup.safeversion(repo)
  1700             cgversion = changegroup.safeversion(repo)
  1701         cg = changegroup.makechangegroup(repo, outgoing, cgversion, source)
  1701         cg = changegroup.makechangegroup(repo, outgoing, cgversion, source)
  1702         part = bundler.newpart('changegroup', data=cg.getchunks())
  1702         part = bundler.newpart(b'changegroup', data=cg.getchunks())
  1703         part.addparam('version', cg.version)
  1703         part.addparam(b'version', cg.version)
  1704         if 'clcount' in cg.extras:
  1704         if b'clcount' in cg.extras:
  1705             part.addparam(
  1705             part.addparam(
  1706                 'nbchanges', '%d' % cg.extras['clcount'], mandatory=False
  1706                 b'nbchanges', b'%d' % cg.extras[b'clcount'], mandatory=False
  1707             )
  1707             )
  1708         if opts.get('phases') and repo.revs(
  1708         if opts.get(b'phases') and repo.revs(
  1709             '%ln and secret()', outgoing.missingheads
  1709             b'%ln and secret()', outgoing.missingheads
  1710         ):
  1710         ):
  1711             part.addparam('targetphase', '%d' % phases.secret, mandatory=False)
  1711             part.addparam(
  1712 
  1712                 b'targetphase', b'%d' % phases.secret, mandatory=False
  1713     if opts.get('streamv2', False):
  1713             )
       
  1714 
       
  1715     if opts.get(b'streamv2', False):
  1714         addpartbundlestream2(bundler, repo, stream=True)
  1716         addpartbundlestream2(bundler, repo, stream=True)
  1715 
  1717 
  1716     if opts.get('tagsfnodescache', True):
  1718     if opts.get(b'tagsfnodescache', True):
  1717         addparttagsfnodescache(repo, bundler, outgoing)
  1719         addparttagsfnodescache(repo, bundler, outgoing)
  1718 
  1720 
  1719     if opts.get('revbranchcache', True):
  1721     if opts.get(b'revbranchcache', True):
  1720         addpartrevbranchcache(repo, bundler, outgoing)
  1722         addpartrevbranchcache(repo, bundler, outgoing)
  1721 
  1723 
  1722     if opts.get('obsolescence', False):
  1724     if opts.get(b'obsolescence', False):
  1723         obsmarkers = repo.obsstore.relevantmarkers(outgoing.missing)
  1725         obsmarkers = repo.obsstore.relevantmarkers(outgoing.missing)
  1724         buildobsmarkerspart(bundler, obsmarkers)
  1726         buildobsmarkerspart(bundler, obsmarkers)
  1725 
  1727 
  1726     if opts.get('phases', False):
  1728     if opts.get(b'phases', False):
  1727         headsbyphase = phases.subsetphaseheads(repo, outgoing.missing)
  1729         headsbyphase = phases.subsetphaseheads(repo, outgoing.missing)
  1728         phasedata = phases.binaryencode(headsbyphase)
  1730         phasedata = phases.binaryencode(headsbyphase)
  1729         bundler.newpart('phase-heads', data=phasedata)
  1731         bundler.newpart(b'phase-heads', data=phasedata)
  1730 
  1732 
  1731 
  1733 
  1732 def addparttagsfnodescache(repo, bundler, outgoing):
  1734 def addparttagsfnodescache(repo, bundler, outgoing):
  1733     # we include the tags fnode cache for the bundle changeset
  1735     # we include the tags fnode cache for the bundle changeset
  1734     # (as an optional parts)
  1736     # (as an optional parts)
  1749         fnode = cache.getfnode(node, computemissing=False)
  1751         fnode = cache.getfnode(node, computemissing=False)
  1750         if fnode is not None:
  1752         if fnode is not None:
  1751             chunks.extend([node, fnode])
  1753             chunks.extend([node, fnode])
  1752 
  1754 
  1753     if chunks:
  1755     if chunks:
  1754         bundler.newpart('hgtagsfnodes', data=''.join(chunks))
  1756         bundler.newpart(b'hgtagsfnodes', data=b''.join(chunks))
  1755 
  1757 
  1756 
  1758 
  1757 def addpartrevbranchcache(repo, bundler, outgoing):
  1759 def addpartrevbranchcache(repo, bundler, outgoing):
  1758     # we include the rev branch cache for the bundle changeset
  1760     # we include the rev branch cache for the bundle changeset
  1759     # (as an optional parts)
  1761     # (as an optional parts)
  1772             for n in sorted(nodes):
  1774             for n in sorted(nodes):
  1773                 yield n
  1775                 yield n
  1774             for n in sorted(closed):
  1776             for n in sorted(closed):
  1775                 yield n
  1777                 yield n
  1776 
  1778 
  1777     bundler.newpart('cache:rev-branch-cache', data=generate(), mandatory=False)
  1779     bundler.newpart(b'cache:rev-branch-cache', data=generate(), mandatory=False)
  1778 
  1780 
  1779 
  1781 
  1780 def _formatrequirementsspec(requirements):
  1782 def _formatrequirementsspec(requirements):
  1781     requirements = [req for req in requirements if req != "shared"]
  1783     requirements = [req for req in requirements if req != b"shared"]
  1782     return urlreq.quote(','.join(sorted(requirements)))
  1784     return urlreq.quote(b','.join(sorted(requirements)))
  1783 
  1785 
  1784 
  1786 
  1785 def _formatrequirementsparams(requirements):
  1787 def _formatrequirementsparams(requirements):
  1786     requirements = _formatrequirementsspec(requirements)
  1788     requirements = _formatrequirementsspec(requirements)
  1787     params = "%s%s" % (urlreq.quote("requirements="), requirements)
  1789     params = b"%s%s" % (urlreq.quote(b"requirements="), requirements)
  1788     return params
  1790     return params
  1789 
  1791 
  1790 
  1792 
  1791 def addpartbundlestream2(bundler, repo, **kwargs):
  1793 def addpartbundlestream2(bundler, repo, **kwargs):
  1792     if not kwargs.get(r'stream', False):
  1794     if not kwargs.get(r'stream', False):
  1793         return
  1795         return
  1794 
  1796 
  1795     if not streamclone.allowservergeneration(repo):
  1797     if not streamclone.allowservergeneration(repo):
  1796         raise error.Abort(
  1798         raise error.Abort(
  1797             _(
  1799             _(
  1798                 'stream data requested but server does not allow '
  1800                 b'stream data requested but server does not allow '
  1799                 'this feature'
  1801                 b'this feature'
  1800             ),
  1802             ),
  1801             hint=_(
  1803             hint=_(
  1802                 'well-behaved clients should not be '
  1804                 b'well-behaved clients should not be '
  1803                 'requesting stream data from servers not '
  1805                 b'requesting stream data from servers not '
  1804                 'advertising it; the client may be buggy'
  1806                 b'advertising it; the client may be buggy'
  1805             ),
  1807             ),
  1806         )
  1808         )
  1807 
  1809 
  1808     # Stream clones don't compress well. And compression undermines a
  1810     # Stream clones don't compress well. And compression undermines a
  1809     # goal of stream clones, which is to be fast. Communicate the desire
  1811     # goal of stream clones, which is to be fast. Communicate the desire
  1813     # get the includes and excludes
  1815     # get the includes and excludes
  1814     includepats = kwargs.get(r'includepats')
  1816     includepats = kwargs.get(r'includepats')
  1815     excludepats = kwargs.get(r'excludepats')
  1817     excludepats = kwargs.get(r'excludepats')
  1816 
  1818 
  1817     narrowstream = repo.ui.configbool(
  1819     narrowstream = repo.ui.configbool(
  1818         'experimental', 'server.stream-narrow-clones'
  1820         b'experimental', b'server.stream-narrow-clones'
  1819     )
  1821     )
  1820 
  1822 
  1821     if (includepats or excludepats) and not narrowstream:
  1823     if (includepats or excludepats) and not narrowstream:
  1822         raise error.Abort(_('server does not support narrow stream clones'))
  1824         raise error.Abort(_(b'server does not support narrow stream clones'))
  1823 
  1825 
  1824     includeobsmarkers = False
  1826     includeobsmarkers = False
  1825     if repo.obsstore:
  1827     if repo.obsstore:
  1826         remoteversions = obsmarkersversion(bundler.capabilities)
  1828         remoteversions = obsmarkersversion(bundler.capabilities)
  1827         if not remoteversions:
  1829         if not remoteversions:
  1828             raise error.Abort(
  1830             raise error.Abort(
  1829                 _(
  1831                 _(
  1830                     'server has obsolescence markers, but client '
  1832                     b'server has obsolescence markers, but client '
  1831                     'cannot receive them via stream clone'
  1833                     b'cannot receive them via stream clone'
  1832                 )
  1834                 )
  1833             )
  1835             )
  1834         elif repo.obsstore._version in remoteversions:
  1836         elif repo.obsstore._version in remoteversions:
  1835             includeobsmarkers = True
  1837             includeobsmarkers = True
  1836 
  1838 
  1837     filecount, bytecount, it = streamclone.generatev2(
  1839     filecount, bytecount, it = streamclone.generatev2(
  1838         repo, includepats, excludepats, includeobsmarkers
  1840         repo, includepats, excludepats, includeobsmarkers
  1839     )
  1841     )
  1840     requirements = _formatrequirementsspec(repo.requirements)
  1842     requirements = _formatrequirementsspec(repo.requirements)
  1841     part = bundler.newpart('stream2', data=it)
  1843     part = bundler.newpart(b'stream2', data=it)
  1842     part.addparam('bytecount', '%d' % bytecount, mandatory=True)
  1844     part.addparam(b'bytecount', b'%d' % bytecount, mandatory=True)
  1843     part.addparam('filecount', '%d' % filecount, mandatory=True)
  1845     part.addparam(b'filecount', b'%d' % filecount, mandatory=True)
  1844     part.addparam('requirements', requirements, mandatory=True)
  1846     part.addparam(b'requirements', requirements, mandatory=True)
  1845 
  1847 
  1846 
  1848 
  1847 def buildobsmarkerspart(bundler, markers):
  1849 def buildobsmarkerspart(bundler, markers):
  1848     """add an obsmarker part to the bundler with <markers>
  1850     """add an obsmarker part to the bundler with <markers>
  1849 
  1851 
  1854         return None
  1856         return None
  1855 
  1857 
  1856     remoteversions = obsmarkersversion(bundler.capabilities)
  1858     remoteversions = obsmarkersversion(bundler.capabilities)
  1857     version = obsolete.commonversion(remoteversions)
  1859     version = obsolete.commonversion(remoteversions)
  1858     if version is None:
  1860     if version is None:
  1859         raise ValueError('bundler does not support common obsmarker format')
  1861         raise ValueError(b'bundler does not support common obsmarker format')
  1860     stream = obsolete.encodemarkers(markers, True, version=version)
  1862     stream = obsolete.encodemarkers(markers, True, version=version)
  1861     return bundler.newpart('obsmarkers', data=stream)
  1863     return bundler.newpart(b'obsmarkers', data=stream)
  1862 
  1864 
  1863 
  1865 
  1864 def writebundle(
  1866 def writebundle(
  1865     ui, cg, filename, bundletype, vfs=None, compression=None, compopts=None
  1867     ui, cg, filename, bundletype, vfs=None, compression=None, compopts=None
  1866 ):
  1868 ):
  1870     If no filename is specified, a temporary file is created.
  1872     If no filename is specified, a temporary file is created.
  1871     bz2 compression can be turned off.
  1873     bz2 compression can be turned off.
  1872     The bundle file will be deleted in case of errors.
  1874     The bundle file will be deleted in case of errors.
  1873     """
  1875     """
  1874 
  1876 
  1875     if bundletype == "HG20":
  1877     if bundletype == b"HG20":
  1876         bundle = bundle20(ui)
  1878         bundle = bundle20(ui)
  1877         bundle.setcompression(compression, compopts)
  1879         bundle.setcompression(compression, compopts)
  1878         part = bundle.newpart('changegroup', data=cg.getchunks())
  1880         part = bundle.newpart(b'changegroup', data=cg.getchunks())
  1879         part.addparam('version', cg.version)
  1881         part.addparam(b'version', cg.version)
  1880         if 'clcount' in cg.extras:
  1882         if b'clcount' in cg.extras:
  1881             part.addparam(
  1883             part.addparam(
  1882                 'nbchanges', '%d' % cg.extras['clcount'], mandatory=False
  1884                 b'nbchanges', b'%d' % cg.extras[b'clcount'], mandatory=False
  1883             )
  1885             )
  1884         chunkiter = bundle.getchunks()
  1886         chunkiter = bundle.getchunks()
  1885     else:
  1887     else:
  1886         # compression argument is only for the bundle2 case
  1888         # compression argument is only for the bundle2 case
  1887         assert compression is None
  1889         assert compression is None
  1888         if cg.version != '01':
  1890         if cg.version != b'01':
  1889             raise error.Abort(
  1891             raise error.Abort(
  1890                 _('old bundle types only supports v1 ' 'changegroups')
  1892                 _(b'old bundle types only supports v1 ' b'changegroups')
  1891             )
  1893             )
  1892         header, comp = bundletypes[bundletype]
  1894         header, comp = bundletypes[bundletype]
  1893         if comp not in util.compengines.supportedbundletypes:
  1895         if comp not in util.compengines.supportedbundletypes:
  1894             raise error.Abort(_('unknown stream compression type: %s') % comp)
  1896             raise error.Abort(_(b'unknown stream compression type: %s') % comp)
  1895         compengine = util.compengines.forbundletype(comp)
  1897         compengine = util.compengines.forbundletype(comp)
  1896 
  1898 
  1897         def chunkiter():
  1899         def chunkiter():
  1898             yield header
  1900             yield header
  1899             for chunk in compengine.compressstream(cg.getchunks(), compopts):
  1901             for chunk in compengine.compressstream(cg.getchunks(), compopts):
  1906     return changegroup.writechunks(ui, chunkiter, filename, vfs=vfs)
  1908     return changegroup.writechunks(ui, chunkiter, filename, vfs=vfs)
  1907 
  1909 
  1908 
  1910 
  1909 def combinechangegroupresults(op):
  1911 def combinechangegroupresults(op):
  1910     """logic to combine 0 or more addchangegroup results into one"""
  1912     """logic to combine 0 or more addchangegroup results into one"""
  1911     results = [r.get('return', 0) for r in op.records['changegroup']]
  1913     results = [r.get(b'return', 0) for r in op.records[b'changegroup']]
  1912     changedheads = 0
  1914     changedheads = 0
  1913     result = 1
  1915     result = 1
  1914     for ret in results:
  1916     for ret in results:
  1915         # If any changegroup result is 0, return 0
  1917         # If any changegroup result is 0, return 0
  1916         if ret == 0:
  1918         if ret == 0:
  1926         result = -1 + changedheads
  1928         result = -1 + changedheads
  1927     return result
  1929     return result
  1928 
  1930 
  1929 
  1931 
  1930 @parthandler(
  1932 @parthandler(
  1931     'changegroup', ('version', 'nbchanges', 'treemanifest', 'targetphase')
  1933     b'changegroup', (b'version', b'nbchanges', b'treemanifest', b'targetphase')
  1932 )
  1934 )
  1933 def handlechangegroup(op, inpart):
  1935 def handlechangegroup(op, inpart):
  1934     """apply a changegroup part on the repo
  1936     """apply a changegroup part on the repo
  1935 
  1937 
  1936     This is a very early implementation that will massive rework before being
  1938     This is a very early implementation that will massive rework before being
  1937     inflicted to any end-user.
  1939     inflicted to any end-user.
  1938     """
  1940     """
  1939     from . import localrepo
  1941     from . import localrepo
  1940 
  1942 
  1941     tr = op.gettransaction()
  1943     tr = op.gettransaction()
  1942     unpackerversion = inpart.params.get('version', '01')
  1944     unpackerversion = inpart.params.get(b'version', b'01')
  1943     # We should raise an appropriate exception here
  1945     # We should raise an appropriate exception here
  1944     cg = changegroup.getunbundler(unpackerversion, inpart, None)
  1946     cg = changegroup.getunbundler(unpackerversion, inpart, None)
  1945     # the source and url passed here are overwritten by the one contained in
  1947     # the source and url passed here are overwritten by the one contained in
  1946     # the transaction.hookargs argument. So 'bundle2' is a placeholder
  1948     # the transaction.hookargs argument. So 'bundle2' is a placeholder
  1947     nbchangesets = None
  1949     nbchangesets = None
  1948     if 'nbchanges' in inpart.params:
  1950     if b'nbchanges' in inpart.params:
  1949         nbchangesets = int(inpart.params.get('nbchanges'))
  1951         nbchangesets = int(inpart.params.get(b'nbchanges'))
  1950     if (
  1952     if (
  1951         'treemanifest' in inpart.params
  1953         b'treemanifest' in inpart.params
  1952         and 'treemanifest' not in op.repo.requirements
  1954         and b'treemanifest' not in op.repo.requirements
  1953     ):
  1955     ):
  1954         if len(op.repo.changelog) != 0:
  1956         if len(op.repo.changelog) != 0:
  1955             raise error.Abort(
  1957             raise error.Abort(
  1956                 _(
  1958                 _(
  1957                     "bundle contains tree manifests, but local repo is "
  1959                     b"bundle contains tree manifests, but local repo is "
  1958                     "non-empty and does not use tree manifests"
  1960                     b"non-empty and does not use tree manifests"
  1959                 )
  1961                 )
  1960             )
  1962             )
  1961         op.repo.requirements.add('treemanifest')
  1963         op.repo.requirements.add(b'treemanifest')
  1962         op.repo.svfs.options = localrepo.resolvestorevfsoptions(
  1964         op.repo.svfs.options = localrepo.resolvestorevfsoptions(
  1963             op.repo.ui, op.repo.requirements, op.repo.features
  1965             op.repo.ui, op.repo.requirements, op.repo.features
  1964         )
  1966         )
  1965         op.repo._writerequirements()
  1967         op.repo._writerequirements()
  1966     extrakwargs = {}
  1968     extrakwargs = {}
  1967     targetphase = inpart.params.get('targetphase')
  1969     targetphase = inpart.params.get(b'targetphase')
  1968     if targetphase is not None:
  1970     if targetphase is not None:
  1969         extrakwargs[r'targetphase'] = int(targetphase)
  1971         extrakwargs[r'targetphase'] = int(targetphase)
  1970     ret = _processchangegroup(
  1972     ret = _processchangegroup(
  1971         op,
  1973         op,
  1972         cg,
  1974         cg,
  1973         tr,
  1975         tr,
  1974         'bundle2',
  1976         b'bundle2',
  1975         'bundle2',
  1977         b'bundle2',
  1976         expectedtotal=nbchangesets,
  1978         expectedtotal=nbchangesets,
  1977         **extrakwargs
  1979         **extrakwargs
  1978     )
  1980     )
  1979     if op.reply is not None:
  1981     if op.reply is not None:
  1980         # This is definitely not the final form of this
  1982         # This is definitely not the final form of this
  1981         # return. But one need to start somewhere.
  1983         # return. But one need to start somewhere.
  1982         part = op.reply.newpart('reply:changegroup', mandatory=False)
  1984         part = op.reply.newpart(b'reply:changegroup', mandatory=False)
  1983         part.addparam(
  1985         part.addparam(
  1984             'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False
  1986             b'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False
  1985         )
  1987         )
  1986         part.addparam('return', '%i' % ret, mandatory=False)
  1988         part.addparam(b'return', b'%i' % ret, mandatory=False)
  1987     assert not inpart.read()
  1989     assert not inpart.read()
  1988 
  1990 
  1989 
  1991 
  1990 _remotechangegroupparams = tuple(
  1992 _remotechangegroupparams = tuple(
  1991     ['url', 'size', 'digests'] + ['digest:%s' % k for k in util.DIGESTS.keys()]
  1993     [b'url', b'size', b'digests']
       
  1994     + [b'digest:%s' % k for k in util.DIGESTS.keys()]
  1992 )
  1995 )
  1993 
  1996 
  1994 
  1997 
  1995 @parthandler('remote-changegroup', _remotechangegroupparams)
  1998 @parthandler(b'remote-changegroup', _remotechangegroupparams)
  1996 def handleremotechangegroup(op, inpart):
  1999 def handleremotechangegroup(op, inpart):
  1997     """apply a bundle10 on the repo, given an url and validation information
  2000     """apply a bundle10 on the repo, given an url and validation information
  1998 
  2001 
  1999     All the information about the remote bundle to import are given as
  2002     All the information about the remote bundle to import are given as
  2000     parameters. The parameters include:
  2003     parameters. The parameters include:
  2008         the client matches what the server knows about the bundle.
  2011         the client matches what the server knows about the bundle.
  2009 
  2012 
  2010     When multiple digest types are given, all of them are checked.
  2013     When multiple digest types are given, all of them are checked.
  2011     """
  2014     """
  2012     try:
  2015     try:
  2013         raw_url = inpart.params['url']
  2016         raw_url = inpart.params[b'url']
  2014     except KeyError:
  2017     except KeyError:
  2015         raise error.Abort(_('remote-changegroup: missing "%s" param') % 'url')
  2018         raise error.Abort(_(b'remote-changegroup: missing "%s" param') % b'url')
  2016     parsed_url = util.url(raw_url)
  2019     parsed_url = util.url(raw_url)
  2017     if parsed_url.scheme not in capabilities['remote-changegroup']:
  2020     if parsed_url.scheme not in capabilities[b'remote-changegroup']:
  2018         raise error.Abort(
  2021         raise error.Abort(
  2019             _('remote-changegroup does not support %s urls') % parsed_url.scheme
  2022             _(b'remote-changegroup does not support %s urls')
       
  2023             % parsed_url.scheme
  2020         )
  2024         )
  2021 
  2025 
  2022     try:
  2026     try:
  2023         size = int(inpart.params['size'])
  2027         size = int(inpart.params[b'size'])
  2024     except ValueError:
  2028     except ValueError:
  2025         raise error.Abort(
  2029         raise error.Abort(
  2026             _('remote-changegroup: invalid value for param "%s"') % 'size'
  2030             _(b'remote-changegroup: invalid value for param "%s"') % b'size'
  2027         )
  2031         )
  2028     except KeyError:
  2032     except KeyError:
  2029         raise error.Abort(_('remote-changegroup: missing "%s" param') % 'size')
  2033         raise error.Abort(
       
  2034             _(b'remote-changegroup: missing "%s" param') % b'size'
       
  2035         )
  2030 
  2036 
  2031     digests = {}
  2037     digests = {}
  2032     for typ in inpart.params.get('digests', '').split():
  2038     for typ in inpart.params.get(b'digests', b'').split():
  2033         param = 'digest:%s' % typ
  2039         param = b'digest:%s' % typ
  2034         try:
  2040         try:
  2035             value = inpart.params[param]
  2041             value = inpart.params[param]
  2036         except KeyError:
  2042         except KeyError:
  2037             raise error.Abort(
  2043             raise error.Abort(
  2038                 _('remote-changegroup: missing "%s" param') % param
  2044                 _(b'remote-changegroup: missing "%s" param') % param
  2039             )
  2045             )
  2040         digests[typ] = value
  2046         digests[typ] = value
  2041 
  2047 
  2042     real_part = util.digestchecker(url.open(op.ui, raw_url), size, digests)
  2048     real_part = util.digestchecker(url.open(op.ui, raw_url), size, digests)
  2043 
  2049 
  2045     from . import exchange
  2051     from . import exchange
  2046 
  2052 
  2047     cg = exchange.readbundle(op.repo.ui, real_part, raw_url)
  2053     cg = exchange.readbundle(op.repo.ui, real_part, raw_url)
  2048     if not isinstance(cg, changegroup.cg1unpacker):
  2054     if not isinstance(cg, changegroup.cg1unpacker):
  2049         raise error.Abort(
  2055         raise error.Abort(
  2050             _('%s: not a bundle version 1.0') % util.hidepassword(raw_url)
  2056             _(b'%s: not a bundle version 1.0') % util.hidepassword(raw_url)
  2051         )
  2057         )
  2052     ret = _processchangegroup(op, cg, tr, 'bundle2', 'bundle2')
  2058     ret = _processchangegroup(op, cg, tr, b'bundle2', b'bundle2')
  2053     if op.reply is not None:
  2059     if op.reply is not None:
  2054         # This is definitely not the final form of this
  2060         # This is definitely not the final form of this
  2055         # return. But one need to start somewhere.
  2061         # return. But one need to start somewhere.
  2056         part = op.reply.newpart('reply:changegroup')
  2062         part = op.reply.newpart(b'reply:changegroup')
  2057         part.addparam(
  2063         part.addparam(
  2058             'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False
  2064             b'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False
  2059         )
  2065         )
  2060         part.addparam('return', '%i' % ret, mandatory=False)
  2066         part.addparam(b'return', b'%i' % ret, mandatory=False)
  2061     try:
  2067     try:
  2062         real_part.validate()
  2068         real_part.validate()
  2063     except error.Abort as e:
  2069     except error.Abort as e:
  2064         raise error.Abort(
  2070         raise error.Abort(
  2065             _('bundle at %s is corrupted:\n%s')
  2071             _(b'bundle at %s is corrupted:\n%s')
  2066             % (util.hidepassword(raw_url), bytes(e))
  2072             % (util.hidepassword(raw_url), bytes(e))
  2067         )
  2073         )
  2068     assert not inpart.read()
  2074     assert not inpart.read()
  2069 
  2075 
  2070 
  2076 
  2071 @parthandler('reply:changegroup', ('return', 'in-reply-to'))
  2077 @parthandler(b'reply:changegroup', (b'return', b'in-reply-to'))
  2072 def handlereplychangegroup(op, inpart):
  2078 def handlereplychangegroup(op, inpart):
  2073     ret = int(inpart.params['return'])
  2079     ret = int(inpart.params[b'return'])
  2074     replyto = int(inpart.params['in-reply-to'])
  2080     replyto = int(inpart.params[b'in-reply-to'])
  2075     op.records.add('changegroup', {'return': ret}, replyto)
  2081     op.records.add(b'changegroup', {b'return': ret}, replyto)
  2076 
  2082 
  2077 
  2083 
  2078 @parthandler('check:bookmarks')
  2084 @parthandler(b'check:bookmarks')
  2079 def handlecheckbookmarks(op, inpart):
  2085 def handlecheckbookmarks(op, inpart):
  2080     """check location of bookmarks
  2086     """check location of bookmarks
  2081 
  2087 
  2082     This part is to be used to detect push race regarding bookmark, it
  2088     This part is to be used to detect push race regarding bookmark, it
  2083     contains binary encoded (bookmark, node) tuple. If the local state does
  2089     contains binary encoded (bookmark, node) tuple. If the local state does
  2084     not marks the one in the part, a PushRaced exception is raised
  2090     not marks the one in the part, a PushRaced exception is raised
  2085     """
  2091     """
  2086     bookdata = bookmarks.binarydecode(inpart)
  2092     bookdata = bookmarks.binarydecode(inpart)
  2087 
  2093 
  2088     msgstandard = (
  2094     msgstandard = (
  2089         'remote repository changed while pushing - please try again '
  2095         b'remote repository changed while pushing - please try again '
  2090         '(bookmark "%s" move from %s to %s)'
  2096         b'(bookmark "%s" move from %s to %s)'
  2091     )
  2097     )
  2092     msgmissing = (
  2098     msgmissing = (
  2093         'remote repository changed while pushing - please try again '
  2099         b'remote repository changed while pushing - please try again '
  2094         '(bookmark "%s" is missing, expected %s)'
  2100         b'(bookmark "%s" is missing, expected %s)'
  2095     )
  2101     )
  2096     msgexist = (
  2102     msgexist = (
  2097         'remote repository changed while pushing - please try again '
  2103         b'remote repository changed while pushing - please try again '
  2098         '(bookmark "%s" set on %s, expected missing)'
  2104         b'(bookmark "%s" set on %s, expected missing)'
  2099     )
  2105     )
  2100     for book, node in bookdata:
  2106     for book, node in bookdata:
  2101         currentnode = op.repo._bookmarks.get(book)
  2107         currentnode = op.repo._bookmarks.get(book)
  2102         if currentnode != node:
  2108         if currentnode != node:
  2103             if node is None:
  2109             if node is None:
  2111                     nodemod.short(currentnode),
  2117                     nodemod.short(currentnode),
  2112                 )
  2118                 )
  2113             raise error.PushRaced(finalmsg)
  2119             raise error.PushRaced(finalmsg)
  2114 
  2120 
  2115 
  2121 
  2116 @parthandler('check:heads')
  2122 @parthandler(b'check:heads')
  2117 def handlecheckheads(op, inpart):
  2123 def handlecheckheads(op, inpart):
  2118     """check that head of the repo did not change
  2124     """check that head of the repo did not change
  2119 
  2125 
  2120     This is used to detect a push race when using unbundle.
  2126     This is used to detect a push race when using unbundle.
  2121     This replaces the "heads" argument of unbundle."""
  2127     This replaces the "heads" argument of unbundle."""
  2124     while len(h) == 20:
  2130     while len(h) == 20:
  2125         heads.append(h)
  2131         heads.append(h)
  2126         h = inpart.read(20)
  2132         h = inpart.read(20)
  2127     assert not h
  2133     assert not h
  2128     # Trigger a transaction so that we are guaranteed to have the lock now.
  2134     # Trigger a transaction so that we are guaranteed to have the lock now.
  2129     if op.ui.configbool('experimental', 'bundle2lazylocking'):
  2135     if op.ui.configbool(b'experimental', b'bundle2lazylocking'):
  2130         op.gettransaction()
  2136         op.gettransaction()
  2131     if sorted(heads) != sorted(op.repo.heads()):
  2137     if sorted(heads) != sorted(op.repo.heads()):
  2132         raise error.PushRaced(
  2138         raise error.PushRaced(
  2133             'remote repository changed while pushing - ' 'please try again'
  2139             b'remote repository changed while pushing - ' b'please try again'
  2134         )
  2140         )
  2135 
  2141 
  2136 
  2142 
  2137 @parthandler('check:updated-heads')
  2143 @parthandler(b'check:updated-heads')
  2138 def handlecheckupdatedheads(op, inpart):
  2144 def handlecheckupdatedheads(op, inpart):
  2139     """check for race on the heads touched by a push
  2145     """check for race on the heads touched by a push
  2140 
  2146 
  2141     This is similar to 'check:heads' but focus on the heads actually updated
  2147     This is similar to 'check:heads' but focus on the heads actually updated
  2142     during the push. If other activities happen on unrelated heads, it is
  2148     during the push. If other activities happen on unrelated heads, it is
  2149     while len(h) == 20:
  2155     while len(h) == 20:
  2150         heads.append(h)
  2156         heads.append(h)
  2151         h = inpart.read(20)
  2157         h = inpart.read(20)
  2152     assert not h
  2158     assert not h
  2153     # trigger a transaction so that we are guaranteed to have the lock now.
  2159     # trigger a transaction so that we are guaranteed to have the lock now.
  2154     if op.ui.configbool('experimental', 'bundle2lazylocking'):
  2160     if op.ui.configbool(b'experimental', b'bundle2lazylocking'):
  2155         op.gettransaction()
  2161         op.gettransaction()
  2156 
  2162 
  2157     currentheads = set()
  2163     currentheads = set()
  2158     for ls in op.repo.branchmap().iterheads():
  2164     for ls in op.repo.branchmap().iterheads():
  2159         currentheads.update(ls)
  2165         currentheads.update(ls)
  2160 
  2166 
  2161     for h in heads:
  2167     for h in heads:
  2162         if h not in currentheads:
  2168         if h not in currentheads:
  2163             raise error.PushRaced(
  2169             raise error.PushRaced(
  2164                 'remote repository changed while pushing - ' 'please try again'
  2170                 b'remote repository changed while pushing - '
       
  2171                 b'please try again'
  2165             )
  2172             )
  2166 
  2173 
  2167 
  2174 
  2168 @parthandler('check:phases')
  2175 @parthandler(b'check:phases')
  2169 def handlecheckphases(op, inpart):
  2176 def handlecheckphases(op, inpart):
  2170     """check that phase boundaries of the repository did not change
  2177     """check that phase boundaries of the repository did not change
  2171 
  2178 
  2172     This is used to detect a push race.
  2179     This is used to detect a push race.
  2173     """
  2180     """
  2174     phasetonodes = phases.binarydecode(inpart)
  2181     phasetonodes = phases.binarydecode(inpart)
  2175     unfi = op.repo.unfiltered()
  2182     unfi = op.repo.unfiltered()
  2176     cl = unfi.changelog
  2183     cl = unfi.changelog
  2177     phasecache = unfi._phasecache
  2184     phasecache = unfi._phasecache
  2178     msg = (
  2185     msg = (
  2179         'remote repository changed while pushing - please try again '
  2186         b'remote repository changed while pushing - please try again '
  2180         '(%s is %s expected %s)'
  2187         b'(%s is %s expected %s)'
  2181     )
  2188     )
  2182     for expectedphase, nodes in enumerate(phasetonodes):
  2189     for expectedphase, nodes in enumerate(phasetonodes):
  2183         for n in nodes:
  2190         for n in nodes:
  2184             actualphase = phasecache.phase(unfi, cl.rev(n))
  2191             actualphase = phasecache.phase(unfi, cl.rev(n))
  2185             if actualphase != expectedphase:
  2192             if actualphase != expectedphase:
  2189                     phases.phasenames[expectedphase],
  2196                     phases.phasenames[expectedphase],
  2190                 )
  2197                 )
  2191                 raise error.PushRaced(finalmsg)
  2198                 raise error.PushRaced(finalmsg)
  2192 
  2199 
  2193 
  2200 
  2194 @parthandler('output')
  2201 @parthandler(b'output')
  2195 def handleoutput(op, inpart):
  2202 def handleoutput(op, inpart):
  2196     """forward output captured on the server to the client"""
  2203     """forward output captured on the server to the client"""
  2197     for line in inpart.read().splitlines():
  2204     for line in inpart.read().splitlines():
  2198         op.ui.status(_('remote: %s\n') % line)
  2205         op.ui.status(_(b'remote: %s\n') % line)
  2199 
  2206 
  2200 
  2207 
  2201 @parthandler('replycaps')
  2208 @parthandler(b'replycaps')
  2202 def handlereplycaps(op, inpart):
  2209 def handlereplycaps(op, inpart):
  2203     """Notify that a reply bundle should be created
  2210     """Notify that a reply bundle should be created
  2204 
  2211 
  2205     The payload contains the capabilities information for the reply"""
  2212     The payload contains the capabilities information for the reply"""
  2206     caps = decodecaps(inpart.read())
  2213     caps = decodecaps(inpart.read())
  2210 
  2217 
  2211 class AbortFromPart(error.Abort):
  2218 class AbortFromPart(error.Abort):
  2212     """Sub-class of Abort that denotes an error from a bundle2 part."""
  2219     """Sub-class of Abort that denotes an error from a bundle2 part."""
  2213 
  2220 
  2214 
  2221 
  2215 @parthandler('error:abort', ('message', 'hint'))
  2222 @parthandler(b'error:abort', (b'message', b'hint'))
  2216 def handleerrorabort(op, inpart):
  2223 def handleerrorabort(op, inpart):
  2217     """Used to transmit abort error over the wire"""
  2224     """Used to transmit abort error over the wire"""
  2218     raise AbortFromPart(
  2225     raise AbortFromPart(
  2219         inpart.params['message'], hint=inpart.params.get('hint')
  2226         inpart.params[b'message'], hint=inpart.params.get(b'hint')
  2220     )
  2227     )
  2221 
  2228 
  2222 
  2229 
  2223 @parthandler(
  2230 @parthandler(
  2224     'error:pushkey', ('namespace', 'key', 'new', 'old', 'ret', 'in-reply-to')
  2231     b'error:pushkey',
       
  2232     (b'namespace', b'key', b'new', b'old', b'ret', b'in-reply-to'),
  2225 )
  2233 )
  2226 def handleerrorpushkey(op, inpart):
  2234 def handleerrorpushkey(op, inpart):
  2227     """Used to transmit failure of a mandatory pushkey over the wire"""
  2235     """Used to transmit failure of a mandatory pushkey over the wire"""
  2228     kwargs = {}
  2236     kwargs = {}
  2229     for name in ('namespace', 'key', 'new', 'old', 'ret'):
  2237     for name in (b'namespace', b'key', b'new', b'old', b'ret'):
  2230         value = inpart.params.get(name)
  2238         value = inpart.params.get(name)
  2231         if value is not None:
  2239         if value is not None:
  2232             kwargs[name] = value
  2240             kwargs[name] = value
  2233     raise error.PushkeyFailed(
  2241     raise error.PushkeyFailed(
  2234         inpart.params['in-reply-to'], **pycompat.strkwargs(kwargs)
  2242         inpart.params[b'in-reply-to'], **pycompat.strkwargs(kwargs)
  2235     )
  2243     )
  2236 
  2244 
  2237 
  2245 
  2238 @parthandler('error:unsupportedcontent', ('parttype', 'params'))
  2246 @parthandler(b'error:unsupportedcontent', (b'parttype', b'params'))
  2239 def handleerrorunsupportedcontent(op, inpart):
  2247 def handleerrorunsupportedcontent(op, inpart):
  2240     """Used to transmit unknown content error over the wire"""
  2248     """Used to transmit unknown content error over the wire"""
  2241     kwargs = {}
  2249     kwargs = {}
  2242     parttype = inpart.params.get('parttype')
  2250     parttype = inpart.params.get(b'parttype')
  2243     if parttype is not None:
  2251     if parttype is not None:
  2244         kwargs['parttype'] = parttype
  2252         kwargs[b'parttype'] = parttype
  2245     params = inpart.params.get('params')
  2253     params = inpart.params.get(b'params')
  2246     if params is not None:
  2254     if params is not None:
  2247         kwargs['params'] = params.split('\0')
  2255         kwargs[b'params'] = params.split(b'\0')
  2248 
  2256 
  2249     raise error.BundleUnknownFeatureError(**pycompat.strkwargs(kwargs))
  2257     raise error.BundleUnknownFeatureError(**pycompat.strkwargs(kwargs))
  2250 
  2258 
  2251 
  2259 
  2252 @parthandler('error:pushraced', ('message',))
  2260 @parthandler(b'error:pushraced', (b'message',))
  2253 def handleerrorpushraced(op, inpart):
  2261 def handleerrorpushraced(op, inpart):
  2254     """Used to transmit push race error over the wire"""
  2262     """Used to transmit push race error over the wire"""
  2255     raise error.ResponseError(_('push failed:'), inpart.params['message'])
  2263     raise error.ResponseError(_(b'push failed:'), inpart.params[b'message'])
  2256 
  2264 
  2257 
  2265 
  2258 @parthandler('listkeys', ('namespace',))
  2266 @parthandler(b'listkeys', (b'namespace',))
  2259 def handlelistkeys(op, inpart):
  2267 def handlelistkeys(op, inpart):
  2260     """retrieve pushkey namespace content stored in a bundle2"""
  2268     """retrieve pushkey namespace content stored in a bundle2"""
  2261     namespace = inpart.params['namespace']
  2269     namespace = inpart.params[b'namespace']
  2262     r = pushkey.decodekeys(inpart.read())
  2270     r = pushkey.decodekeys(inpart.read())
  2263     op.records.add('listkeys', (namespace, r))
  2271     op.records.add(b'listkeys', (namespace, r))
  2264 
  2272 
  2265 
  2273 
  2266 @parthandler('pushkey', ('namespace', 'key', 'old', 'new'))
  2274 @parthandler(b'pushkey', (b'namespace', b'key', b'old', b'new'))
  2267 def handlepushkey(op, inpart):
  2275 def handlepushkey(op, inpart):
  2268     """process a pushkey request"""
  2276     """process a pushkey request"""
  2269     dec = pushkey.decode
  2277     dec = pushkey.decode
  2270     namespace = dec(inpart.params['namespace'])
  2278     namespace = dec(inpart.params[b'namespace'])
  2271     key = dec(inpart.params['key'])
  2279     key = dec(inpart.params[b'key'])
  2272     old = dec(inpart.params['old'])
  2280     old = dec(inpart.params[b'old'])
  2273     new = dec(inpart.params['new'])
  2281     new = dec(inpart.params[b'new'])
  2274     # Grab the transaction to ensure that we have the lock before performing the
  2282     # Grab the transaction to ensure that we have the lock before performing the
  2275     # pushkey.
  2283     # pushkey.
  2276     if op.ui.configbool('experimental', 'bundle2lazylocking'):
  2284     if op.ui.configbool(b'experimental', b'bundle2lazylocking'):
  2277         op.gettransaction()
  2285         op.gettransaction()
  2278     ret = op.repo.pushkey(namespace, key, old, new)
  2286     ret = op.repo.pushkey(namespace, key, old, new)
  2279     record = {'namespace': namespace, 'key': key, 'old': old, 'new': new}
  2287     record = {b'namespace': namespace, b'key': key, b'old': old, b'new': new}
  2280     op.records.add('pushkey', record)
  2288     op.records.add(b'pushkey', record)
  2281     if op.reply is not None:
  2289     if op.reply is not None:
  2282         rpart = op.reply.newpart('reply:pushkey')
  2290         rpart = op.reply.newpart(b'reply:pushkey')
  2283         rpart.addparam(
  2291         rpart.addparam(
  2284             'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False
  2292             b'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False
  2285         )
  2293         )
  2286         rpart.addparam('return', '%i' % ret, mandatory=False)
  2294         rpart.addparam(b'return', b'%i' % ret, mandatory=False)
  2287     if inpart.mandatory and not ret:
  2295     if inpart.mandatory and not ret:
  2288         kwargs = {}
  2296         kwargs = {}
  2289         for key in ('namespace', 'key', 'new', 'old', 'ret'):
  2297         for key in (b'namespace', b'key', b'new', b'old', b'ret'):
  2290             if key in inpart.params:
  2298             if key in inpart.params:
  2291                 kwargs[key] = inpart.params[key]
  2299                 kwargs[key] = inpart.params[key]
  2292         raise error.PushkeyFailed(
  2300         raise error.PushkeyFailed(
  2293             partid='%d' % inpart.id, **pycompat.strkwargs(kwargs)
  2301             partid=b'%d' % inpart.id, **pycompat.strkwargs(kwargs)
  2294         )
  2302         )
  2295 
  2303 
  2296 
  2304 
  2297 @parthandler('bookmarks')
  2305 @parthandler(b'bookmarks')
  2298 def handlebookmark(op, inpart):
  2306 def handlebookmark(op, inpart):
  2299     """transmit bookmark information
  2307     """transmit bookmark information
  2300 
  2308 
  2301     The part contains binary encoded bookmark information.
  2309     The part contains binary encoded bookmark information.
  2302 
  2310 
  2311     When mode is 'records', the information is recorded into the 'bookmarks'
  2319     When mode is 'records', the information is recorded into the 'bookmarks'
  2312     records of the bundle operation. This behavior is suitable for pulling.
  2320     records of the bundle operation. This behavior is suitable for pulling.
  2313     """
  2321     """
  2314     changes = bookmarks.binarydecode(inpart)
  2322     changes = bookmarks.binarydecode(inpart)
  2315 
  2323 
  2316     pushkeycompat = op.repo.ui.configbool('server', 'bookmarks-pushkey-compat')
  2324     pushkeycompat = op.repo.ui.configbool(
  2317     bookmarksmode = op.modes.get('bookmarks', 'apply')
  2325         b'server', b'bookmarks-pushkey-compat'
  2318 
  2326     )
  2319     if bookmarksmode == 'apply':
  2327     bookmarksmode = op.modes.get(b'bookmarks', b'apply')
       
  2328 
       
  2329     if bookmarksmode == b'apply':
  2320         tr = op.gettransaction()
  2330         tr = op.gettransaction()
  2321         bookstore = op.repo._bookmarks
  2331         bookstore = op.repo._bookmarks
  2322         if pushkeycompat:
  2332         if pushkeycompat:
  2323             allhooks = []
  2333             allhooks = []
  2324             for book, node in changes:
  2334             for book, node in changes:
  2325                 hookargs = tr.hookargs.copy()
  2335                 hookargs = tr.hookargs.copy()
  2326                 hookargs['pushkeycompat'] = '1'
  2336                 hookargs[b'pushkeycompat'] = b'1'
  2327                 hookargs['namespace'] = 'bookmarks'
  2337                 hookargs[b'namespace'] = b'bookmarks'
  2328                 hookargs['key'] = book
  2338                 hookargs[b'key'] = book
  2329                 hookargs['old'] = nodemod.hex(bookstore.get(book, ''))
  2339                 hookargs[b'old'] = nodemod.hex(bookstore.get(book, b''))
  2330                 hookargs['new'] = nodemod.hex(node if node is not None else '')
  2340                 hookargs[b'new'] = nodemod.hex(
       
  2341                     node if node is not None else b''
       
  2342                 )
  2331                 allhooks.append(hookargs)
  2343                 allhooks.append(hookargs)
  2332 
  2344 
  2333             for hookargs in allhooks:
  2345             for hookargs in allhooks:
  2334                 op.repo.hook(
  2346                 op.repo.hook(
  2335                     'prepushkey', throw=True, **pycompat.strkwargs(hookargs)
  2347                     b'prepushkey', throw=True, **pycompat.strkwargs(hookargs)
  2336                 )
  2348                 )
  2337 
  2349 
  2338         bookstore.applychanges(op.repo, op.gettransaction(), changes)
  2350         bookstore.applychanges(op.repo, op.gettransaction(), changes)
  2339 
  2351 
  2340         if pushkeycompat:
  2352         if pushkeycompat:
  2341 
  2353 
  2342             def runhook():
  2354             def runhook():
  2343                 for hookargs in allhooks:
  2355                 for hookargs in allhooks:
  2344                     op.repo.hook('pushkey', **pycompat.strkwargs(hookargs))
  2356                     op.repo.hook(b'pushkey', **pycompat.strkwargs(hookargs))
  2345 
  2357 
  2346             op.repo._afterlock(runhook)
  2358             op.repo._afterlock(runhook)
  2347 
  2359 
  2348     elif bookmarksmode == 'records':
  2360     elif bookmarksmode == b'records':
  2349         for book, node in changes:
  2361         for book, node in changes:
  2350             record = {'bookmark': book, 'node': node}
  2362             record = {b'bookmark': book, b'node': node}
  2351             op.records.add('bookmarks', record)
  2363             op.records.add(b'bookmarks', record)
  2352     else:
  2364     else:
  2353         raise error.ProgrammingError('unkown bookmark mode: %s' % bookmarksmode)
  2365         raise error.ProgrammingError(
  2354 
  2366             b'unkown bookmark mode: %s' % bookmarksmode
  2355 
  2367         )
  2356 @parthandler('phase-heads')
  2368 
       
  2369 
       
  2370 @parthandler(b'phase-heads')
  2357 def handlephases(op, inpart):
  2371 def handlephases(op, inpart):
  2358     """apply phases from bundle part to repo"""
  2372     """apply phases from bundle part to repo"""
  2359     headsbyphase = phases.binarydecode(inpart)
  2373     headsbyphase = phases.binarydecode(inpart)
  2360     phases.updatephases(op.repo.unfiltered(), op.gettransaction, headsbyphase)
  2374     phases.updatephases(op.repo.unfiltered(), op.gettransaction, headsbyphase)
  2361 
  2375 
  2362 
  2376 
  2363 @parthandler('reply:pushkey', ('return', 'in-reply-to'))
  2377 @parthandler(b'reply:pushkey', (b'return', b'in-reply-to'))
  2364 def handlepushkeyreply(op, inpart):
  2378 def handlepushkeyreply(op, inpart):
  2365     """retrieve the result of a pushkey request"""
  2379     """retrieve the result of a pushkey request"""
  2366     ret = int(inpart.params['return'])
  2380     ret = int(inpart.params[b'return'])
  2367     partid = int(inpart.params['in-reply-to'])
  2381     partid = int(inpart.params[b'in-reply-to'])
  2368     op.records.add('pushkey', {'return': ret}, partid)
  2382     op.records.add(b'pushkey', {b'return': ret}, partid)
  2369 
  2383 
  2370 
  2384 
  2371 @parthandler('obsmarkers')
  2385 @parthandler(b'obsmarkers')
  2372 def handleobsmarker(op, inpart):
  2386 def handleobsmarker(op, inpart):
  2373     """add a stream of obsmarkers to the repo"""
  2387     """add a stream of obsmarkers to the repo"""
  2374     tr = op.gettransaction()
  2388     tr = op.gettransaction()
  2375     markerdata = inpart.read()
  2389     markerdata = inpart.read()
  2376     if op.ui.config('experimental', 'obsmarkers-exchange-debug'):
  2390     if op.ui.config(b'experimental', b'obsmarkers-exchange-debug'):
  2377         op.ui.write('obsmarker-exchange: %i bytes received\n' % len(markerdata))
  2391         op.ui.write(
       
  2392             b'obsmarker-exchange: %i bytes received\n' % len(markerdata)
       
  2393         )
  2378     # The mergemarkers call will crash if marker creation is not enabled.
  2394     # The mergemarkers call will crash if marker creation is not enabled.
  2379     # we want to avoid this if the part is advisory.
  2395     # we want to avoid this if the part is advisory.
  2380     if not inpart.mandatory and op.repo.obsstore.readonly:
  2396     if not inpart.mandatory and op.repo.obsstore.readonly:
  2381         op.repo.ui.debug('ignoring obsolescence markers, feature not enabled\n')
  2397         op.repo.ui.debug(
       
  2398             b'ignoring obsolescence markers, feature not enabled\n'
       
  2399         )
  2382         return
  2400         return
  2383     new = op.repo.obsstore.mergemarkers(tr, markerdata)
  2401     new = op.repo.obsstore.mergemarkers(tr, markerdata)
  2384     op.repo.invalidatevolatilesets()
  2402     op.repo.invalidatevolatilesets()
  2385     op.records.add('obsmarkers', {'new': new})
  2403     op.records.add(b'obsmarkers', {b'new': new})
  2386     if op.reply is not None:
  2404     if op.reply is not None:
  2387         rpart = op.reply.newpart('reply:obsmarkers')
  2405         rpart = op.reply.newpart(b'reply:obsmarkers')
  2388         rpart.addparam(
  2406         rpart.addparam(
  2389             'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False
  2407             b'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False
  2390         )
  2408         )
  2391         rpart.addparam('new', '%i' % new, mandatory=False)
  2409         rpart.addparam(b'new', b'%i' % new, mandatory=False)
  2392 
  2410 
  2393 
  2411 
  2394 @parthandler('reply:obsmarkers', ('new', 'in-reply-to'))
  2412 @parthandler(b'reply:obsmarkers', (b'new', b'in-reply-to'))
  2395 def handleobsmarkerreply(op, inpart):
  2413 def handleobsmarkerreply(op, inpart):
  2396     """retrieve the result of a pushkey request"""
  2414     """retrieve the result of a pushkey request"""
  2397     ret = int(inpart.params['new'])
  2415     ret = int(inpart.params[b'new'])
  2398     partid = int(inpart.params['in-reply-to'])
  2416     partid = int(inpart.params[b'in-reply-to'])
  2399     op.records.add('obsmarkers', {'new': ret}, partid)
  2417     op.records.add(b'obsmarkers', {b'new': ret}, partid)
  2400 
  2418 
  2401 
  2419 
  2402 @parthandler('hgtagsfnodes')
  2420 @parthandler(b'hgtagsfnodes')
  2403 def handlehgtagsfnodes(op, inpart):
  2421 def handlehgtagsfnodes(op, inpart):
  2404     """Applies .hgtags fnodes cache entries to the local repo.
  2422     """Applies .hgtags fnodes cache entries to the local repo.
  2405 
  2423 
  2406     Payload is pairs of 20 byte changeset nodes and filenodes.
  2424     Payload is pairs of 20 byte changeset nodes and filenodes.
  2407     """
  2425     """
  2408     # Grab the transaction so we ensure that we have the lock at this point.
  2426     # Grab the transaction so we ensure that we have the lock at this point.
  2409     if op.ui.configbool('experimental', 'bundle2lazylocking'):
  2427     if op.ui.configbool(b'experimental', b'bundle2lazylocking'):
  2410         op.gettransaction()
  2428         op.gettransaction()
  2411     cache = tags.hgtagsfnodescache(op.repo.unfiltered())
  2429     cache = tags.hgtagsfnodescache(op.repo.unfiltered())
  2412 
  2430 
  2413     count = 0
  2431     count = 0
  2414     while True:
  2432     while True:
  2415         node = inpart.read(20)
  2433         node = inpart.read(20)
  2416         fnode = inpart.read(20)
  2434         fnode = inpart.read(20)
  2417         if len(node) < 20 or len(fnode) < 20:
  2435         if len(node) < 20 or len(fnode) < 20:
  2418             op.ui.debug('ignoring incomplete received .hgtags fnodes data\n')
  2436             op.ui.debug(b'ignoring incomplete received .hgtags fnodes data\n')
  2419             break
  2437             break
  2420         cache.setfnode(node, fnode)
  2438         cache.setfnode(node, fnode)
  2421         count += 1
  2439         count += 1
  2422 
  2440 
  2423     cache.write()
  2441     cache.write()
  2424     op.ui.debug('applied %i hgtags fnodes cache entries\n' % count)
  2442     op.ui.debug(b'applied %i hgtags fnodes cache entries\n' % count)
  2425 
  2443 
  2426 
  2444 
  2427 rbcstruct = struct.Struct('>III')
  2445 rbcstruct = struct.Struct(b'>III')
  2428 
  2446 
  2429 
  2447 
  2430 @parthandler('cache:rev-branch-cache')
  2448 @parthandler(b'cache:rev-branch-cache')
  2431 def handlerbc(op, inpart):
  2449 def handlerbc(op, inpart):
  2432     """receive a rev-branch-cache payload and update the local cache
  2450     """receive a rev-branch-cache payload and update the local cache
  2433 
  2451 
  2434     The payload is a series of data related to each branch
  2452     The payload is a series of data related to each branch
  2435 
  2453 
  2458             cache.setdata(branch, rev, node, True)
  2476             cache.setdata(branch, rev, node, True)
  2459         rawheader = inpart.read(rbcstruct.size)
  2477         rawheader = inpart.read(rbcstruct.size)
  2460     cache.write()
  2478     cache.write()
  2461 
  2479 
  2462 
  2480 
  2463 @parthandler('pushvars')
  2481 @parthandler(b'pushvars')
  2464 def bundle2getvars(op, part):
  2482 def bundle2getvars(op, part):
  2465     '''unbundle a bundle2 containing shellvars on the server'''
  2483     '''unbundle a bundle2 containing shellvars on the server'''
  2466     # An option to disable unbundling on server-side for security reasons
  2484     # An option to disable unbundling on server-side for security reasons
  2467     if op.ui.configbool('push', 'pushvars.server'):
  2485     if op.ui.configbool(b'push', b'pushvars.server'):
  2468         hookargs = {}
  2486         hookargs = {}
  2469         for key, value in part.advisoryparams:
  2487         for key, value in part.advisoryparams:
  2470             key = key.upper()
  2488             key = key.upper()
  2471             # We want pushed variables to have USERVAR_ prepended so we know
  2489             # We want pushed variables to have USERVAR_ prepended so we know
  2472             # they came from the --pushvar flag.
  2490             # they came from the --pushvar flag.
  2473             key = "USERVAR_" + key
  2491             key = b"USERVAR_" + key
  2474             hookargs[key] = value
  2492             hookargs[key] = value
  2475         op.addhookargs(hookargs)
  2493         op.addhookargs(hookargs)
  2476 
  2494 
  2477 
  2495 
  2478 @parthandler('stream2', ('requirements', 'filecount', 'bytecount'))
  2496 @parthandler(b'stream2', (b'requirements', b'filecount', b'bytecount'))
  2479 def handlestreamv2bundle(op, part):
  2497 def handlestreamv2bundle(op, part):
  2480 
  2498 
  2481     requirements = urlreq.unquote(part.params['requirements']).split(',')
  2499     requirements = urlreq.unquote(part.params[b'requirements']).split(b',')
  2482     filecount = int(part.params['filecount'])
  2500     filecount = int(part.params[b'filecount'])
  2483     bytecount = int(part.params['bytecount'])
  2501     bytecount = int(part.params[b'bytecount'])
  2484 
  2502 
  2485     repo = op.repo
  2503     repo = op.repo
  2486     if len(repo):
  2504     if len(repo):
  2487         msg = _('cannot apply stream clone to non empty repository')
  2505         msg = _(b'cannot apply stream clone to non empty repository')
  2488         raise error.Abort(msg)
  2506         raise error.Abort(msg)
  2489 
  2507 
  2490     repo.ui.debug('applying stream bundle\n')
  2508     repo.ui.debug(b'applying stream bundle\n')
  2491     streamclone.applybundlev2(repo, part, filecount, bytecount, requirements)
  2509     streamclone.applybundlev2(repo, part, filecount, bytecount, requirements)
  2492 
  2510 
  2493 
  2511 
  2494 def widen_bundle(
  2512 def widen_bundle(
  2495     bundler, repo, oldmatcher, newmatcher, common, known, cgversion, ellipses
  2513     bundler, repo, oldmatcher, newmatcher, common, known, cgversion, ellipses
  2507 
  2525 
  2508     returns bundle2 of the data required for extending
  2526     returns bundle2 of the data required for extending
  2509     """
  2527     """
  2510     commonnodes = set()
  2528     commonnodes = set()
  2511     cl = repo.changelog
  2529     cl = repo.changelog
  2512     for r in repo.revs("::%ln", common):
  2530     for r in repo.revs(b"::%ln", common):
  2513         commonnodes.add(cl.node(r))
  2531         commonnodes.add(cl.node(r))
  2514     if commonnodes:
  2532     if commonnodes:
  2515         # XXX: we should only send the filelogs (and treemanifest). user
  2533         # XXX: we should only send the filelogs (and treemanifest). user
  2516         # already has the changelog and manifest
  2534         # already has the changelog and manifest
  2517         packer = changegroup.getbundler(
  2535         packer = changegroup.getbundler(
  2523         )
  2541         )
  2524         cgdata = packer.generate(
  2542         cgdata = packer.generate(
  2525             {nodemod.nullid},
  2543             {nodemod.nullid},
  2526             list(commonnodes),
  2544             list(commonnodes),
  2527             False,
  2545             False,
  2528             'narrow_widen',
  2546             b'narrow_widen',
  2529             changelog=False,
  2547             changelog=False,
  2530         )
  2548         )
  2531 
  2549 
  2532         part = bundler.newpart('changegroup', data=cgdata)
  2550         part = bundler.newpart(b'changegroup', data=cgdata)
  2533         part.addparam('version', cgversion)
  2551         part.addparam(b'version', cgversion)
  2534         if 'treemanifest' in repo.requirements:
  2552         if b'treemanifest' in repo.requirements:
  2535             part.addparam('treemanifest', '1')
  2553             part.addparam(b'treemanifest', b'1')
  2536 
  2554 
  2537     return bundler
  2555     return bundler