mercurial/scmutil.py
changeset 43077 687b865b95ad
parent 43076 2372284d9457
child 43089 c59eb1560c44
equal deleted inserted replaced
43076:2372284d9457 43077:687b865b95ad
   156             if ctx.phase() >= phases.secret and not ctx.extinct():
   156             if ctx.phase() >= phases.secret and not ctx.extinct():
   157                 secretlist.append(n)
   157                 secretlist.append(n)
   158 
   158 
   159     if secretlist:
   159     if secretlist:
   160         ui.status(
   160         ui.status(
   161             _("no changes found (ignored %d secret changesets)\n")
   161             _(b"no changes found (ignored %d secret changesets)\n")
   162             % len(secretlist)
   162             % len(secretlist)
   163         )
   163         )
   164     else:
   164     else:
   165         ui.status(_("no changes found\n"))
   165         ui.status(_(b"no changes found\n"))
   166 
   166 
   167 
   167 
   168 def callcatch(ui, func):
   168 def callcatch(ui, func):
   169     """call func() with global exception handling
   169     """call func() with global exception handling
   170 
   170 
   179             raise
   179             raise
   180     # Global exception handling, alphabetically
   180     # Global exception handling, alphabetically
   181     # Mercurial-specific first, followed by built-in and library exceptions
   181     # Mercurial-specific first, followed by built-in and library exceptions
   182     except error.LockHeld as inst:
   182     except error.LockHeld as inst:
   183         if inst.errno == errno.ETIMEDOUT:
   183         if inst.errno == errno.ETIMEDOUT:
   184             reason = _('timed out waiting for lock held by %r') % (
   184             reason = _(b'timed out waiting for lock held by %r') % (
   185                 pycompat.bytestr(inst.locker)
   185                 pycompat.bytestr(inst.locker)
   186             )
   186             )
   187         else:
   187         else:
   188             reason = _('lock held by %r') % inst.locker
   188             reason = _(b'lock held by %r') % inst.locker
   189         ui.error(
   189         ui.error(
   190             _("abort: %s: %s\n")
   190             _(b"abort: %s: %s\n")
   191             % (inst.desc or stringutil.forcebytestr(inst.filename), reason)
   191             % (inst.desc or stringutil.forcebytestr(inst.filename), reason)
   192         )
   192         )
   193         if not inst.locker:
   193         if not inst.locker:
   194             ui.error(_("(lock might be very busy)\n"))
   194             ui.error(_(b"(lock might be very busy)\n"))
   195     except error.LockUnavailable as inst:
   195     except error.LockUnavailable as inst:
   196         ui.error(
   196         ui.error(
   197             _("abort: could not lock %s: %s\n")
   197             _(b"abort: could not lock %s: %s\n")
   198             % (
   198             % (
   199                 inst.desc or stringutil.forcebytestr(inst.filename),
   199                 inst.desc or stringutil.forcebytestr(inst.filename),
   200                 encoding.strtolocal(inst.strerror),
   200                 encoding.strtolocal(inst.strerror),
   201             )
   201             )
   202         )
   202         )
   203     except error.OutOfBandError as inst:
   203     except error.OutOfBandError as inst:
   204         if inst.args:
   204         if inst.args:
   205             msg = _("abort: remote error:\n")
   205             msg = _(b"abort: remote error:\n")
   206         else:
   206         else:
   207             msg = _("abort: remote error\n")
   207             msg = _(b"abort: remote error\n")
   208         ui.error(msg)
   208         ui.error(msg)
   209         if inst.args:
   209         if inst.args:
   210             ui.error(''.join(inst.args))
   210             ui.error(b''.join(inst.args))
   211         if inst.hint:
   211         if inst.hint:
   212             ui.error('(%s)\n' % inst.hint)
   212             ui.error(b'(%s)\n' % inst.hint)
   213     except error.RepoError as inst:
   213     except error.RepoError as inst:
   214         ui.error(_("abort: %s!\n") % inst)
   214         ui.error(_(b"abort: %s!\n") % inst)
   215         if inst.hint:
   215         if inst.hint:
   216             ui.error(_("(%s)\n") % inst.hint)
   216             ui.error(_(b"(%s)\n") % inst.hint)
   217     except error.ResponseError as inst:
   217     except error.ResponseError as inst:
   218         ui.error(_("abort: %s") % inst.args[0])
   218         ui.error(_(b"abort: %s") % inst.args[0])
   219         msg = inst.args[1]
   219         msg = inst.args[1]
   220         if isinstance(msg, type(u'')):
   220         if isinstance(msg, type(u'')):
   221             msg = pycompat.sysbytes(msg)
   221             msg = pycompat.sysbytes(msg)
   222         if not isinstance(msg, bytes):
   222         if not isinstance(msg, bytes):
   223             ui.error(" %r\n" % (msg,))
   223             ui.error(b" %r\n" % (msg,))
   224         elif not msg:
   224         elif not msg:
   225             ui.error(_(" empty string\n"))
   225             ui.error(_(b" empty string\n"))
   226         else:
   226         else:
   227             ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
   227             ui.error(b"\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
   228     except error.CensoredNodeError as inst:
   228     except error.CensoredNodeError as inst:
   229         ui.error(_("abort: file censored %s!\n") % inst)
   229         ui.error(_(b"abort: file censored %s!\n") % inst)
   230     except error.StorageError as inst:
   230     except error.StorageError as inst:
   231         ui.error(_("abort: %s!\n") % inst)
   231         ui.error(_(b"abort: %s!\n") % inst)
   232         if inst.hint:
   232         if inst.hint:
   233             ui.error(_("(%s)\n") % inst.hint)
   233             ui.error(_(b"(%s)\n") % inst.hint)
   234     except error.InterventionRequired as inst:
   234     except error.InterventionRequired as inst:
   235         ui.error("%s\n" % inst)
   235         ui.error(b"%s\n" % inst)
   236         if inst.hint:
   236         if inst.hint:
   237             ui.error(_("(%s)\n") % inst.hint)
   237             ui.error(_(b"(%s)\n") % inst.hint)
   238         return 1
   238         return 1
   239     except error.WdirUnsupported:
   239     except error.WdirUnsupported:
   240         ui.error(_("abort: working directory revision cannot be specified\n"))
   240         ui.error(_(b"abort: working directory revision cannot be specified\n"))
   241     except error.Abort as inst:
   241     except error.Abort as inst:
   242         ui.error(_("abort: %s\n") % inst)
   242         ui.error(_(b"abort: %s\n") % inst)
   243         if inst.hint:
   243         if inst.hint:
   244             ui.error(_("(%s)\n") % inst.hint)
   244             ui.error(_(b"(%s)\n") % inst.hint)
   245     except ImportError as inst:
   245     except ImportError as inst:
   246         ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
   246         ui.error(_(b"abort: %s!\n") % stringutil.forcebytestr(inst))
   247         m = stringutil.forcebytestr(inst).split()[-1]
   247         m = stringutil.forcebytestr(inst).split()[-1]
   248         if m in "mpatch bdiff".split():
   248         if m in b"mpatch bdiff".split():
   249             ui.error(_("(did you forget to compile extensions?)\n"))
   249             ui.error(_(b"(did you forget to compile extensions?)\n"))
   250         elif m in "zlib".split():
   250         elif m in b"zlib".split():
   251             ui.error(_("(is your Python install correct?)\n"))
   251             ui.error(_(b"(is your Python install correct?)\n"))
   252     except (IOError, OSError) as inst:
   252     except (IOError, OSError) as inst:
   253         if util.safehasattr(inst, "code"):  # HTTPError
   253         if util.safehasattr(inst, b"code"):  # HTTPError
   254             ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
   254             ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
   255         elif util.safehasattr(inst, "reason"):  # URLError or SSLError
   255         elif util.safehasattr(inst, b"reason"):  # URLError or SSLError
   256             try:  # usually it is in the form (errno, strerror)
   256             try:  # usually it is in the form (errno, strerror)
   257                 reason = inst.reason.args[1]
   257                 reason = inst.reason.args[1]
   258             except (AttributeError, IndexError):
   258             except (AttributeError, IndexError):
   259                 # it might be anything, for example a string
   259                 # it might be anything, for example a string
   260                 reason = inst.reason
   260                 reason = inst.reason
   261             if isinstance(reason, pycompat.unicode):
   261             if isinstance(reason, pycompat.unicode):
   262                 # SSLError of Python 2.7.9 contains a unicode
   262                 # SSLError of Python 2.7.9 contains a unicode
   263                 reason = encoding.unitolocal(reason)
   263                 reason = encoding.unitolocal(reason)
   264             ui.error(_("abort: error: %s\n") % reason)
   264             ui.error(_(b"abort: error: %s\n") % reason)
   265         elif (
   265         elif (
   266             util.safehasattr(inst, "args")
   266             util.safehasattr(inst, b"args")
   267             and inst.args
   267             and inst.args
   268             and inst.args[0] == errno.EPIPE
   268             and inst.args[0] == errno.EPIPE
   269         ):
   269         ):
   270             pass
   270             pass
   271         elif getattr(inst, "strerror", None):  # common IOError or OSError
   271         elif getattr(inst, "strerror", None):  # common IOError or OSError
   272             if getattr(inst, "filename", None) is not None:
   272             if getattr(inst, "filename", None) is not None:
   273                 ui.error(
   273                 ui.error(
   274                     _("abort: %s: '%s'\n")
   274                     _(b"abort: %s: '%s'\n")
   275                     % (
   275                     % (
   276                         encoding.strtolocal(inst.strerror),
   276                         encoding.strtolocal(inst.strerror),
   277                         stringutil.forcebytestr(inst.filename),
   277                         stringutil.forcebytestr(inst.filename),
   278                     )
   278                     )
   279                 )
   279                 )
   280             else:
   280             else:
   281                 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
   281                 ui.error(_(b"abort: %s\n") % encoding.strtolocal(inst.strerror))
   282         else:  # suspicious IOError
   282         else:  # suspicious IOError
   283             raise
   283             raise
   284     except MemoryError:
   284     except MemoryError:
   285         ui.error(_("abort: out of memory\n"))
   285         ui.error(_(b"abort: out of memory\n"))
   286     except SystemExit as inst:
   286     except SystemExit as inst:
   287         # Commands shouldn't sys.exit directly, but give a return code.
   287         # Commands shouldn't sys.exit directly, but give a return code.
   288         # Just in case catch this and and pass exit code to caller.
   288         # Just in case catch this and and pass exit code to caller.
   289         return inst.code
   289         return inst.code
   290 
   290 
   292 
   292 
   293 
   293 
   294 def checknewlabel(repo, lbl, kind):
   294 def checknewlabel(repo, lbl, kind):
   295     # Do not use the "kind" parameter in ui output.
   295     # Do not use the "kind" parameter in ui output.
   296     # It makes strings difficult to translate.
   296     # It makes strings difficult to translate.
   297     if lbl in ['tip', '.', 'null']:
   297     if lbl in [b'tip', b'.', b'null']:
   298         raise error.Abort(_("the name '%s' is reserved") % lbl)
   298         raise error.Abort(_(b"the name '%s' is reserved") % lbl)
   299     for c in (':', '\0', '\n', '\r'):
   299     for c in (b':', b'\0', b'\n', b'\r'):
   300         if c in lbl:
   300         if c in lbl:
   301             raise error.Abort(
   301             raise error.Abort(
   302                 _("%r cannot be used in a name") % pycompat.bytestr(c)
   302                 _(b"%r cannot be used in a name") % pycompat.bytestr(c)
   303             )
   303             )
   304     try:
   304     try:
   305         int(lbl)
   305         int(lbl)
   306         raise error.Abort(_("cannot use an integer as a name"))
   306         raise error.Abort(_(b"cannot use an integer as a name"))
   307     except ValueError:
   307     except ValueError:
   308         pass
   308         pass
   309     if lbl.strip() != lbl:
   309     if lbl.strip() != lbl:
   310         raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
   310         raise error.Abort(_(b"leading or trailing whitespace in name %r") % lbl)
   311 
   311 
   312 
   312 
   313 def checkfilename(f):
   313 def checkfilename(f):
   314     '''Check that the filename f is an acceptable filename for a tracked file'''
   314     '''Check that the filename f is an acceptable filename for a tracked file'''
   315     if '\r' in f or '\n' in f:
   315     if b'\r' in f or b'\n' in f:
   316         raise error.Abort(
   316         raise error.Abort(
   317             _("'\\n' and '\\r' disallowed in filenames: %r")
   317             _(b"'\\n' and '\\r' disallowed in filenames: %r")
   318             % pycompat.bytestr(f)
   318             % pycompat.bytestr(f)
   319         )
   319         )
   320 
   320 
   321 
   321 
   322 def checkportable(ui, f):
   322 def checkportable(ui, f):
   324     checkfilename(f)
   324     checkfilename(f)
   325     abort, warn = checkportabilityalert(ui)
   325     abort, warn = checkportabilityalert(ui)
   326     if abort or warn:
   326     if abort or warn:
   327         msg = util.checkwinfilename(f)
   327         msg = util.checkwinfilename(f)
   328         if msg:
   328         if msg:
   329             msg = "%s: %s" % (msg, procutil.shellquote(f))
   329             msg = b"%s: %s" % (msg, procutil.shellquote(f))
   330             if abort:
   330             if abort:
   331                 raise error.Abort(msg)
   331                 raise error.Abort(msg)
   332             ui.warn(_("warning: %s\n") % msg)
   332             ui.warn(_(b"warning: %s\n") % msg)
   333 
   333 
   334 
   334 
   335 def checkportabilityalert(ui):
   335 def checkportabilityalert(ui):
   336     '''check if the user's config requests nothing, a warning, or abort for
   336     '''check if the user's config requests nothing, a warning, or abort for
   337     non-portable filenames'''
   337     non-portable filenames'''
   338     val = ui.config('ui', 'portablefilenames')
   338     val = ui.config(b'ui', b'portablefilenames')
   339     lval = val.lower()
   339     lval = val.lower()
   340     bval = stringutil.parsebool(val)
   340     bval = stringutil.parsebool(val)
   341     abort = pycompat.iswindows or lval == 'abort'
   341     abort = pycompat.iswindows or lval == b'abort'
   342     warn = bval or lval == 'warn'
   342     warn = bval or lval == b'warn'
   343     if bval is None and not (warn or abort or lval == 'ignore'):
   343     if bval is None and not (warn or abort or lval == b'ignore'):
   344         raise error.ConfigError(
   344         raise error.ConfigError(
   345             _("ui.portablefilenames value is invalid ('%s')") % val
   345             _(b"ui.portablefilenames value is invalid ('%s')") % val
   346         )
   346         )
   347     return abort, warn
   347     return abort, warn
   348 
   348 
   349 
   349 
   350 class casecollisionauditor(object):
   350 class casecollisionauditor(object):
   351     def __init__(self, ui, abort, dirstate):
   351     def __init__(self, ui, abort, dirstate):
   352         self._ui = ui
   352         self._ui = ui
   353         self._abort = abort
   353         self._abort = abort
   354         allfiles = '\0'.join(dirstate)
   354         allfiles = b'\0'.join(dirstate)
   355         self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
   355         self._loweredfiles = set(encoding.lower(allfiles).split(b'\0'))
   356         self._dirstate = dirstate
   356         self._dirstate = dirstate
   357         # The purpose of _newfiles is so that we don't complain about
   357         # The purpose of _newfiles is so that we don't complain about
   358         # case collisions if someone were to call this object with the
   358         # case collisions if someone were to call this object with the
   359         # same filename twice.
   359         # same filename twice.
   360         self._newfiles = set()
   360         self._newfiles = set()
   362     def __call__(self, f):
   362     def __call__(self, f):
   363         if f in self._newfiles:
   363         if f in self._newfiles:
   364             return
   364             return
   365         fl = encoding.lower(f)
   365         fl = encoding.lower(f)
   366         if fl in self._loweredfiles and f not in self._dirstate:
   366         if fl in self._loweredfiles and f not in self._dirstate:
   367             msg = _('possible case-folding collision for %s') % f
   367             msg = _(b'possible case-folding collision for %s') % f
   368             if self._abort:
   368             if self._abort:
   369                 raise error.Abort(msg)
   369                 raise error.Abort(msg)
   370             self._ui.warn(_("warning: %s\n") % msg)
   370             self._ui.warn(_(b"warning: %s\n") % msg)
   371         self._loweredfiles.add(fl)
   371         self._loweredfiles.add(fl)
   372         self._newfiles.add(f)
   372         self._newfiles.add(f)
   373 
   373 
   374 
   374 
   375 def filteredhash(repo, maxrev):
   375 def filteredhash(repo, maxrev):
   390     key = None
   390     key = None
   391     revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
   391     revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
   392     if revs:
   392     if revs:
   393         s = hashlib.sha1()
   393         s = hashlib.sha1()
   394         for rev in revs:
   394         for rev in revs:
   395             s.update('%d;' % rev)
   395             s.update(b'%d;' % rev)
   396         key = s.digest()
   396         key = s.digest()
   397     return key
   397     return key
   398 
   398 
   399 
   399 
   400 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
   400 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
   421     if (seen_dirs is None) and followsym:
   421     if (seen_dirs is None) and followsym:
   422         seen_dirs = []
   422         seen_dirs = []
   423         adddir(seen_dirs, path)
   423         adddir(seen_dirs, path)
   424     for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
   424     for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
   425         dirs.sort()
   425         dirs.sort()
   426         if '.hg' in dirs:
   426         if b'.hg' in dirs:
   427             yield root  # found a repository
   427             yield root  # found a repository
   428             qroot = os.path.join(root, '.hg', 'patches')
   428             qroot = os.path.join(root, b'.hg', b'patches')
   429             if os.path.isdir(os.path.join(qroot, '.hg')):
   429             if os.path.isdir(os.path.join(qroot, b'.hg')):
   430                 yield qroot  # we have a patch queue repo here
   430                 yield qroot  # we have a patch queue repo here
   431             if recurse:
   431             if recurse:
   432                 # avoid recursing inside the .hg directory
   432                 # avoid recursing inside the .hg directory
   433                 dirs.remove('.hg')
   433                 dirs.remove(b'.hg')
   434             else:
   434             else:
   435                 dirs[:] = []  # don't descend further
   435                 dirs[:] = []  # don't descend further
   436         elif followsym:
   436         elif followsym:
   437             newdirs = []
   437             newdirs = []
   438             for d in dirs:
   438             for d in dirs:
   474     """Format given revision and node depending on the current verbosity"""
   474     """Format given revision and node depending on the current verbosity"""
   475     if ui.debugflag:
   475     if ui.debugflag:
   476         hexfunc = hex
   476         hexfunc = hex
   477     else:
   477     else:
   478         hexfunc = short
   478         hexfunc = short
   479     return '%d:%s' % (rev, hexfunc(node))
   479     return b'%d:%s' % (rev, hexfunc(node))
   480 
   480 
   481 
   481 
   482 def resolvehexnodeidprefix(repo, prefix):
   482 def resolvehexnodeidprefix(repo, prefix):
   483     if prefix.startswith('x') and repo.ui.configbool(
   483     if prefix.startswith(b'x') and repo.ui.configbool(
   484         'experimental', 'revisions.prefixhexnode'
   484         b'experimental', b'revisions.prefixhexnode'
   485     ):
   485     ):
   486         prefix = prefix[1:]
   486         prefix = prefix[1:]
   487     try:
   487     try:
   488         # Uses unfiltered repo because it's faster when prefix is ambiguous/
   488         # Uses unfiltered repo because it's faster when prefix is ambiguous/
   489         # This matches the shortesthexnodeidprefix() function below.
   489         # This matches the shortesthexnodeidprefix() function below.
   490         node = repo.unfiltered().changelog._partialmatch(prefix)
   490         node = repo.unfiltered().changelog._partialmatch(prefix)
   491     except error.AmbiguousPrefixLookupError:
   491     except error.AmbiguousPrefixLookupError:
   492         revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
   492         revset = repo.ui.config(
       
   493             b'experimental', b'revisions.disambiguatewithin'
       
   494         )
   493         if revset:
   495         if revset:
   494             # Clear config to avoid infinite recursion
   496             # Clear config to avoid infinite recursion
   495             configoverrides = {
   497             configoverrides = {
   496                 ('experimental', 'revisions.disambiguatewithin'): None
   498                 (b'experimental', b'revisions.disambiguatewithin'): None
   497             }
   499             }
   498             with repo.ui.configoverride(configoverrides):
   500             with repo.ui.configoverride(configoverrides):
   499                 revs = repo.anyrevs([revset], user=True)
   501                 revs = repo.anyrevs([revset], user=True)
   500                 matches = []
   502                 matches = []
   501                 for rev in revs:
   503                 for rev in revs:
   538 
   540 
   539     minlength = max(minlength, 1)
   541     minlength = max(minlength, 1)
   540 
   542 
   541     def disambiguate(prefix):
   543     def disambiguate(prefix):
   542         """Disambiguate against revnums."""
   544         """Disambiguate against revnums."""
   543         if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
   545         if repo.ui.configbool(b'experimental', b'revisions.prefixhexnode'):
   544             if mayberevnum(repo, prefix):
   546             if mayberevnum(repo, prefix):
   545                 return 'x' + prefix
   547                 return b'x' + prefix
   546             else:
   548             else:
   547                 return prefix
   549                 return prefix
   548 
   550 
   549         hexnode = hex(node)
   551         hexnode = hex(node)
   550         for length in range(len(prefix), len(hexnode) + 1):
   552         for length in range(len(prefix), len(hexnode) + 1):
   551             prefix = hexnode[:length]
   553             prefix = hexnode[:length]
   552             if not mayberevnum(repo, prefix):
   554             if not mayberevnum(repo, prefix):
   553                 return prefix
   555                 return prefix
   554 
   556 
   555     cl = repo.unfiltered().changelog
   557     cl = repo.unfiltered().changelog
   556     revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
   558     revset = repo.ui.config(b'experimental', b'revisions.disambiguatewithin')
   557     if revset:
   559     if revset:
   558         revs = None
   560         revs = None
   559         if cache is not None:
   561         if cache is not None:
   560             revs = cache.get('disambiguationrevset')
   562             revs = cache.get(b'disambiguationrevset')
   561         if revs is None:
   563         if revs is None:
   562             revs = repo.anyrevs([revset], user=True)
   564             revs = repo.anyrevs([revset], user=True)
   563             if cache is not None:
   565             if cache is not None:
   564                 cache['disambiguationrevset'] = revs
   566                 cache[b'disambiguationrevset'] = revs
   565         if cl.rev(node) in revs:
   567         if cl.rev(node) in revs:
   566             hexnode = hex(node)
   568             hexnode = hex(node)
   567             nodetree = None
   569             nodetree = None
   568             if cache is not None:
   570             if cache is not None:
   569                 nodetree = cache.get('disambiguationnodetree')
   571                 nodetree = cache.get(b'disambiguationnodetree')
   570             if not nodetree:
   572             if not nodetree:
   571                 try:
   573                 try:
   572                     nodetree = parsers.nodetree(cl.index, len(revs))
   574                     nodetree = parsers.nodetree(cl.index, len(revs))
   573                 except AttributeError:
   575                 except AttributeError:
   574                     # no native nodetree
   576                     # no native nodetree
   575                     pass
   577                     pass
   576                 else:
   578                 else:
   577                     for r in revs:
   579                     for r in revs:
   578                         nodetree.insert(r)
   580                         nodetree.insert(r)
   579                     if cache is not None:
   581                     if cache is not None:
   580                         cache['disambiguationnodetree'] = nodetree
   582                         cache[b'disambiguationnodetree'] = nodetree
   581             if nodetree is not None:
   583             if nodetree is not None:
   582                 length = max(nodetree.shortest(node), minlength)
   584                 length = max(nodetree.shortest(node), minlength)
   583                 prefix = hexnode[:length]
   585                 prefix = hexnode[:length]
   584                 return disambiguate(prefix)
   586                 return disambiguate(prefix)
   585             for length in range(minlength, len(hexnode) + 1):
   587             for length in range(minlength, len(hexnode) + 1):
   618     i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
   620     i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
   619     not "max(public())".
   621     not "max(public())".
   620     """
   622     """
   621     if not isinstance(symbol, bytes):
   623     if not isinstance(symbol, bytes):
   622         msg = (
   624         msg = (
   623             "symbol (%s of type %s) was not a string, did you mean "
   625             b"symbol (%s of type %s) was not a string, did you mean "
   624             "repo[symbol]?" % (symbol, type(symbol))
   626             b"repo[symbol]?" % (symbol, type(symbol))
   625         )
   627         )
   626         raise error.ProgrammingError(msg)
   628         raise error.ProgrammingError(msg)
   627     try:
   629     try:
   628         if symbol in ('.', 'tip', 'null'):
   630         if symbol in (b'.', b'tip', b'null'):
   629             return repo[symbol]
   631             return repo[symbol]
   630 
   632 
   631         try:
   633         try:
   632             r = int(symbol)
   634             r = int(symbol)
   633             if '%d' % r != symbol:
   635             if b'%d' % r != symbol:
   634                 raise ValueError
   636                 raise ValueError
   635             l = len(repo.changelog)
   637             l = len(repo.changelog)
   636             if r < 0:
   638             if r < 0:
   637                 r += l
   639                 r += l
   638             if r < 0 or r >= l and r != wdirrev:
   640             if r < 0 or r >= l and r != wdirrev:
   664         node = resolvehexnodeidprefix(repo, symbol)
   666         node = resolvehexnodeidprefix(repo, symbol)
   665         if node is not None:
   667         if node is not None:
   666             rev = repo.changelog.rev(node)
   668             rev = repo.changelog.rev(node)
   667             return repo[rev]
   669             return repo[rev]
   668 
   670 
   669         raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
   671         raise error.RepoLookupError(_(b"unknown revision '%s'") % symbol)
   670 
   672 
   671     except error.WdirUnsupported:
   673     except error.WdirUnsupported:
   672         return repo[None]
   674         return repo[None]
   673     except (
   675     except (
   674         error.FilteredIndexError,
   676         error.FilteredIndexError,
   681 def _filterederror(repo, changeid):
   683 def _filterederror(repo, changeid):
   682     """build an exception to be raised about a filtered changeid
   684     """build an exception to be raised about a filtered changeid
   683 
   685 
   684     This is extracted in a function to help extensions (eg: evolve) to
   686     This is extracted in a function to help extensions (eg: evolve) to
   685     experiment with various message variants."""
   687     experiment with various message variants."""
   686     if repo.filtername.startswith('visible'):
   688     if repo.filtername.startswith(b'visible'):
   687 
   689 
   688         # Check if the changeset is obsolete
   690         # Check if the changeset is obsolete
   689         unfilteredrepo = repo.unfiltered()
   691         unfilteredrepo = repo.unfiltered()
   690         ctx = revsymbol(unfilteredrepo, changeid)
   692         ctx = revsymbol(unfilteredrepo, changeid)
   691 
   693 
   692         # If the changeset is obsolete, enrich the message with the reason
   694         # If the changeset is obsolete, enrich the message with the reason
   693         # that made this changeset not visible
   695         # that made this changeset not visible
   694         if ctx.obsolete():
   696         if ctx.obsolete():
   695             msg = obsutil._getfilteredreason(repo, changeid, ctx)
   697             msg = obsutil._getfilteredreason(repo, changeid, ctx)
   696         else:
   698         else:
   697             msg = _("hidden revision '%s'") % changeid
   699             msg = _(b"hidden revision '%s'") % changeid
   698 
   700 
   699         hint = _('use --hidden to access hidden revisions')
   701         hint = _(b'use --hidden to access hidden revisions')
   700 
   702 
   701         return error.FilteredRepoLookupError(msg, hint=hint)
   703         return error.FilteredRepoLookupError(msg, hint=hint)
   702     msg = _("filtered revision '%s' (not in '%s' subset)")
   704     msg = _(b"filtered revision '%s' (not in '%s' subset)")
   703     msg %= (changeid, repo.filtername)
   705     msg %= (changeid, repo.filtername)
   704     return error.FilteredRepoLookupError(msg)
   706     return error.FilteredRepoLookupError(msg)
   705 
   707 
   706 
   708 
   707 def revsingle(repo, revspec, default='.', localalias=None):
   709 def revsingle(repo, revspec, default=b'.', localalias=None):
   708     if not revspec and revspec != 0:
   710     if not revspec and revspec != 0:
   709         return repo[default]
   711         return repo[default]
   710 
   712 
   711     l = revrange(repo, [revspec], localalias=localalias)
   713     l = revrange(repo, [revspec], localalias=localalias)
   712     if not l:
   714     if not l:
   713         raise error.Abort(_('empty revision set'))
   715         raise error.Abort(_(b'empty revision set'))
   714     return repo[l.last()]
   716     return repo[l.last()]
   715 
   717 
   716 
   718 
   717 def _pairspec(revspec):
   719 def _pairspec(revspec):
   718     tree = revsetlang.parse(revspec)
   720     tree = revsetlang.parse(revspec)
   719     return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
   721     return tree and tree[0] in (
       
   722         b'range',
       
   723         b'rangepre',
       
   724         b'rangepost',
       
   725         b'rangeall',
       
   726     )
   720 
   727 
   721 
   728 
   722 def revpair(repo, revs):
   729 def revpair(repo, revs):
   723     if not revs:
   730     if not revs:
   724         return repo['.'], repo[None]
   731         return repo[b'.'], repo[None]
   725 
   732 
   726     l = revrange(repo, revs)
   733     l = revrange(repo, revs)
   727 
   734 
   728     if not l:
   735     if not l:
   729         raise error.Abort(_('empty revision range'))
   736         raise error.Abort(_(b'empty revision range'))
   730 
   737 
   731     first = l.first()
   738     first = l.first()
   732     second = l.last()
   739     second = l.last()
   733 
   740 
   734     if (
   741     if (
   735         first == second
   742         first == second
   736         and len(revs) >= 2
   743         and len(revs) >= 2
   737         and not all(revrange(repo, [r]) for r in revs)
   744         and not all(revrange(repo, [r]) for r in revs)
   738     ):
   745     ):
   739         raise error.Abort(_('empty revision on one side of range'))
   746         raise error.Abort(_(b'empty revision on one side of range'))
   740 
   747 
   741     # if top-level is range expression, the result must always be a pair
   748     # if top-level is range expression, the result must always be a pair
   742     if first == second and len(revs) == 1 and not _pairspec(revs[0]):
   749     if first == second and len(revs) == 1 and not _pairspec(revs[0]):
   743         return repo[first], repo[None]
   750         return repo[first], repo[None]
   744 
   751 
   767     integer revisions.
   774     integer revisions.
   768     """
   775     """
   769     allspecs = []
   776     allspecs = []
   770     for spec in specs:
   777     for spec in specs:
   771         if isinstance(spec, int):
   778         if isinstance(spec, int):
   772             spec = revsetlang.formatspec('%d', spec)
   779             spec = revsetlang.formatspec(b'%d', spec)
   773         allspecs.append(spec)
   780         allspecs.append(spec)
   774     return repo.anyrevs(allspecs, user=True, localalias=localalias)
   781     return repo.anyrevs(allspecs, user=True, localalias=localalias)
   775 
   782 
   776 
   783 
   777 def meaningfulparents(repo, ctx):
   784 def meaningfulparents(repo, ctx):
   806     of what ui.relative-paths is set to.
   813     of what ui.relative-paths is set to.
   807     """
   814     """
   808     if forcerelativevalue is not None:
   815     if forcerelativevalue is not None:
   809         relative = forcerelativevalue
   816         relative = forcerelativevalue
   810     else:
   817     else:
   811         config = repo.ui.config('ui', 'relative-paths')
   818         config = repo.ui.config(b'ui', b'relative-paths')
   812         if config == 'legacy':
   819         if config == b'legacy':
   813             relative = legacyrelativevalue
   820             relative = legacyrelativevalue
   814         else:
   821         else:
   815             relative = stringutil.parsebool(config)
   822             relative = stringutil.parsebool(config)
   816             if relative is None:
   823             if relative is None:
   817                 raise error.ConfigError(
   824                 raise error.ConfigError(
   818                     _("ui.relative-paths is not a boolean ('%s')") % config
   825                     _(b"ui.relative-paths is not a boolean ('%s')") % config
   819                 )
   826                 )
   820 
   827 
   821     if relative:
   828     if relative:
   822         cwd = repo.getcwd()
   829         cwd = repo.getcwd()
   823         pathto = repo.pathto
   830         pathto = repo.pathto
   824         return lambda f: pathto(f, cwd)
   831         return lambda f: pathto(f, cwd)
   825     elif repo.ui.configbool('ui', 'slash'):
   832     elif repo.ui.configbool(b'ui', b'slash'):
   826         return lambda f: f
   833         return lambda f: f
   827     else:
   834     else:
   828         return util.localpath
   835         return util.localpath
   829 
   836 
   830 
   837 
   837     '''Checks if any patterns, including --include and --exclude were given.
   844     '''Checks if any patterns, including --include and --exclude were given.
   838 
   845 
   839     Some commands (e.g. addremove) use this condition for deciding whether to
   846     Some commands (e.g. addremove) use this condition for deciding whether to
   840     print absolute or relative paths.
   847     print absolute or relative paths.
   841     '''
   848     '''
   842     return bool(pats or opts.get('include') or opts.get('exclude'))
   849     return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
   843 
   850 
   844 
   851 
   845 def expandpats(pats):
   852 def expandpats(pats):
   846     '''Expand bare globs when running on windows.
   853     '''Expand bare globs when running on windows.
   847     On posix we assume it already has already been done by sh.'''
   854     On posix we assume it already has already been done by sh.'''
   861         ret.append(kindpat)
   868         ret.append(kindpat)
   862     return ret
   869     return ret
   863 
   870 
   864 
   871 
   865 def matchandpats(
   872 def matchandpats(
   866     ctx, pats=(), opts=None, globbed=False, default='relpath', badfn=None
   873     ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
   867 ):
   874 ):
   868     '''Return a matcher and the patterns that were used.
   875     '''Return a matcher and the patterns that were used.
   869     The matcher will warn about bad matches, unless an alternate badfn callback
   876     The matcher will warn about bad matches, unless an alternate badfn callback
   870     is provided.'''
   877     is provided.'''
   871     if opts is None:
   878     if opts is None:
   872         opts = {}
   879         opts = {}
   873     if not globbed and default == 'relpath':
   880     if not globbed and default == b'relpath':
   874         pats = expandpats(pats or [])
   881         pats = expandpats(pats or [])
   875 
   882 
   876     uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
   883     uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
   877 
   884 
   878     def bad(f, msg):
   885     def bad(f, msg):
   879         ctx.repo().ui.warn("%s: %s\n" % (uipathfn(f), msg))
   886         ctx.repo().ui.warn(b"%s: %s\n" % (uipathfn(f), msg))
   880 
   887 
   881     if badfn is None:
   888     if badfn is None:
   882         badfn = bad
   889         badfn = bad
   883 
   890 
   884     m = ctx.match(
   891     m = ctx.match(
   885         pats,
   892         pats,
   886         opts.get('include'),
   893         opts.get(b'include'),
   887         opts.get('exclude'),
   894         opts.get(b'exclude'),
   888         default,
   895         default,
   889         listsubrepos=opts.get('subrepos'),
   896         listsubrepos=opts.get(b'subrepos'),
   890         badfn=badfn,
   897         badfn=badfn,
   891     )
   898     )
   892 
   899 
   893     if m.always():
   900     if m.always():
   894         pats = []
   901         pats = []
   895     return m, pats
   902     return m, pats
   896 
   903 
   897 
   904 
   898 def match(
   905 def match(
   899     ctx, pats=(), opts=None, globbed=False, default='relpath', badfn=None
   906     ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
   900 ):
   907 ):
   901     '''Return a matcher that will warn about bad matches.'''
   908     '''Return a matcher that will warn about bad matches.'''
   902     return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
   909     return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
   903 
   910 
   904 
   911 
   929 
   936 
   930 def getorigvfs(ui, repo):
   937 def getorigvfs(ui, repo):
   931     """return a vfs suitable to save 'orig' file
   938     """return a vfs suitable to save 'orig' file
   932 
   939 
   933     return None if no special directory is configured"""
   940     return None if no special directory is configured"""
   934     origbackuppath = ui.config('ui', 'origbackuppath')
   941     origbackuppath = ui.config(b'ui', b'origbackuppath')
   935     if not origbackuppath:
   942     if not origbackuppath:
   936         return None
   943         return None
   937     return vfs.vfs(repo.wvfs.join(origbackuppath))
   944     return vfs.vfs(repo.wvfs.join(origbackuppath))
   938 
   945 
   939 
   946 
   947 
   954 
   948     Returns an absolute path
   955     Returns an absolute path
   949     '''
   956     '''
   950     origvfs = getorigvfs(ui, repo)
   957     origvfs = getorigvfs(ui, repo)
   951     if origvfs is None:
   958     if origvfs is None:
   952         return repo.wjoin(filepath + ".orig")
   959         return repo.wjoin(filepath + b".orig")
   953 
   960 
   954     origbackupdir = origvfs.dirname(filepath)
   961     origbackupdir = origvfs.dirname(filepath)
   955     if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
   962     if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
   956         ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
   963         ui.note(_(b'creating directory: %s\n') % origvfs.join(origbackupdir))
   957 
   964 
   958         # Remove any files that conflict with the backup file's path
   965         # Remove any files that conflict with the backup file's path
   959         for f in reversed(list(util.finddirs(filepath))):
   966         for f in reversed(list(util.finddirs(filepath))):
   960             if origvfs.isfileorlink(f):
   967             if origvfs.isfileorlink(f):
   961                 ui.note(_('removing conflicting file: %s\n') % origvfs.join(f))
   968                 ui.note(_(b'removing conflicting file: %s\n') % origvfs.join(f))
   962                 origvfs.unlink(f)
   969                 origvfs.unlink(f)
   963                 break
   970                 break
   964 
   971 
   965         origvfs.makedirs(origbackupdir)
   972         origvfs.makedirs(origbackupdir)
   966 
   973 
   967     if origvfs.isdir(filepath) and not origvfs.islink(filepath):
   974     if origvfs.isdir(filepath) and not origvfs.islink(filepath):
   968         ui.note(
   975         ui.note(
   969             _('removing conflicting directory: %s\n') % origvfs.join(filepath)
   976             _(b'removing conflicting directory: %s\n') % origvfs.join(filepath)
   970         )
   977         )
   971         origvfs.rmtree(filepath, forcibly=True)
   978         origvfs.rmtree(filepath, forcibly=True)
   972 
   979 
   973     return origvfs.join(filepath)
   980     return origvfs.join(filepath)
   974 
   981 
  1012     assert fixphase or targetphase is None
  1019     assert fixphase or targetphase is None
  1013     if not replacements and not moves:
  1020     if not replacements and not moves:
  1014         return
  1021         return
  1015 
  1022 
  1016     # translate mapping's other forms
  1023     # translate mapping's other forms
  1017     if not util.safehasattr(replacements, 'items'):
  1024     if not util.safehasattr(replacements, b'items'):
  1018         replacements = {(n,): () for n in replacements}
  1025         replacements = {(n,): () for n in replacements}
  1019     else:
  1026     else:
  1020         # upgrading non tuple "source" to tuple ones for BC
  1027         # upgrading non tuple "source" to tuple ones for BC
  1021         repls = {}
  1028         repls = {}
  1022         for key, value in replacements.items():
  1029         for key, value in replacements.items():
  1035             for oldnode in oldnodes:
  1042             for oldnode in oldnodes:
  1036                 if oldnode in moves:
  1043                 if oldnode in moves:
  1037                     continue
  1044                     continue
  1038                 if len(newnodes) > 1:
  1045                 if len(newnodes) > 1:
  1039                     # usually a split, take the one with biggest rev number
  1046                     # usually a split, take the one with biggest rev number
  1040                     newnode = next(unfi.set('max(%ln)', newnodes)).node()
  1047                     newnode = next(unfi.set(b'max(%ln)', newnodes)).node()
  1041                 elif len(newnodes) == 0:
  1048                 elif len(newnodes) == 0:
  1042                     # move bookmark backwards
  1049                     # move bookmark backwards
  1043                     allreplaced = []
  1050                     allreplaced = []
  1044                     for rep in replacements:
  1051                     for rep in replacements:
  1045                         allreplaced.extend(rep)
  1052                         allreplaced.extend(rep)
  1046                     roots = list(
  1053                     roots = list(
  1047                         unfi.set('max((::%n) - %ln)', oldnode, allreplaced)
  1054                         unfi.set(b'max((::%n) - %ln)', oldnode, allreplaced)
  1048                     )
  1055                     )
  1049                     if roots:
  1056                     if roots:
  1050                         newnode = roots[0].node()
  1057                         newnode = roots[0].node()
  1051                     else:
  1058                     else:
  1052                         newnode = nullid
  1059                         newnode = nullid
  1084             if newphase > ctx.phase():
  1091             if newphase > ctx.phase():
  1085                 toretract.setdefault(newphase, []).append(newnode)
  1092                 toretract.setdefault(newphase, []).append(newnode)
  1086             elif newphase < ctx.phase():
  1093             elif newphase < ctx.phase():
  1087                 toadvance.setdefault(newphase, []).append(newnode)
  1094                 toadvance.setdefault(newphase, []).append(newnode)
  1088 
  1095 
  1089     with repo.transaction('cleanup') as tr:
  1096     with repo.transaction(b'cleanup') as tr:
  1090         # Move bookmarks
  1097         # Move bookmarks
  1091         bmarks = repo._bookmarks
  1098         bmarks = repo._bookmarks
  1092         bmarkchanges = []
  1099         bmarkchanges = []
  1093         for oldnode, newnode in moves.items():
  1100         for oldnode, newnode in moves.items():
  1094             oldbmarks = repo.nodebookmarks(oldnode)
  1101             oldbmarks = repo.nodebookmarks(oldnode)
  1095             if not oldbmarks:
  1102             if not oldbmarks:
  1096                 continue
  1103                 continue
  1097             from . import bookmarks  # avoid import cycle
  1104             from . import bookmarks  # avoid import cycle
  1098 
  1105 
  1099             repo.ui.debug(
  1106             repo.ui.debug(
  1100                 'moving bookmarks %r from %s to %s\n'
  1107                 b'moving bookmarks %r from %s to %s\n'
  1101                 % (
  1108                 % (
  1102                     pycompat.rapply(pycompat.maybebytestr, oldbmarks),
  1109                     pycompat.rapply(pycompat.maybebytestr, oldbmarks),
  1103                     hex(oldnode),
  1110                     hex(oldnode),
  1104                     hex(newnode),
  1111                     hex(newnode),
  1105                 )
  1112                 )
  1106             )
  1113             )
  1107             # Delete divergent bookmarks being parents of related newnodes
  1114             # Delete divergent bookmarks being parents of related newnodes
  1108             deleterevs = repo.revs(
  1115             deleterevs = repo.revs(
  1109                 'parents(roots(%ln & (::%n))) - parents(%n)',
  1116                 b'parents(roots(%ln & (::%n))) - parents(%n)',
  1110                 allnewnodes,
  1117                 allnewnodes,
  1111                 newnode,
  1118                 newnode,
  1112                 oldnode,
  1119                 oldnode,
  1113             )
  1120             )
  1114             deletenodes = _containsnode(repo, deleterevs)
  1121             deletenodes = _containsnode(repo, deleterevs)
  1123         for phase, nodes in toretract.items():
  1130         for phase, nodes in toretract.items():
  1124             phases.retractboundary(repo, tr, phase, nodes)
  1131             phases.retractboundary(repo, tr, phase, nodes)
  1125         for phase, nodes in toadvance.items():
  1132         for phase, nodes in toadvance.items():
  1126             phases.advanceboundary(repo, tr, phase, nodes)
  1133             phases.advanceboundary(repo, tr, phase, nodes)
  1127 
  1134 
  1128         mayusearchived = repo.ui.config('experimental', 'cleanup-as-archived')
  1135         mayusearchived = repo.ui.config(b'experimental', b'cleanup-as-archived')
  1129         # Obsolete or strip nodes
  1136         # Obsolete or strip nodes
  1130         if obsolete.isenabled(repo, obsolete.createmarkersopt):
  1137         if obsolete.isenabled(repo, obsolete.createmarkersopt):
  1131             # If a node is already obsoleted, and we want to obsolete it
  1138             # If a node is already obsoleted, and we want to obsolete it
  1132             # without a successor, skip that obssolete request since it's
  1139             # without a successor, skip that obssolete request since it's
  1133             # unnecessary. That's the "if s or not isobs(n)" check below.
  1140             # unnecessary. That's the "if s or not isobs(n)" check below.
  1169 
  1176 
  1170 def addremove(repo, matcher, prefix, uipathfn, opts=None):
  1177 def addremove(repo, matcher, prefix, uipathfn, opts=None):
  1171     if opts is None:
  1178     if opts is None:
  1172         opts = {}
  1179         opts = {}
  1173     m = matcher
  1180     m = matcher
  1174     dry_run = opts.get('dry_run')
  1181     dry_run = opts.get(b'dry_run')
  1175     try:
  1182     try:
  1176         similarity = float(opts.get('similarity') or 0)
  1183         similarity = float(opts.get(b'similarity') or 0)
  1177     except ValueError:
  1184     except ValueError:
  1178         raise error.Abort(_('similarity must be a number'))
  1185         raise error.Abort(_(b'similarity must be a number'))
  1179     if similarity < 0 or similarity > 100:
  1186     if similarity < 0 or similarity > 100:
  1180         raise error.Abort(_('similarity must be between 0 and 100'))
  1187         raise error.Abort(_(b'similarity must be between 0 and 100'))
  1181     similarity /= 100.0
  1188     similarity /= 100.0
  1182 
  1189 
  1183     ret = 0
  1190     ret = 0
  1184 
  1191 
  1185     wctx = repo[None]
  1192     wctx = repo[None]
  1186     for subpath in sorted(wctx.substate):
  1193     for subpath in sorted(wctx.substate):
  1187         submatch = matchmod.subdirmatcher(subpath, m)
  1194         submatch = matchmod.subdirmatcher(subpath, m)
  1188         if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
  1195         if opts.get(b'subrepos') or m.exact(subpath) or any(submatch.files()):
  1189             sub = wctx.sub(subpath)
  1196             sub = wctx.sub(subpath)
  1190             subprefix = repo.wvfs.reljoin(prefix, subpath)
  1197             subprefix = repo.wvfs.reljoin(prefix, subpath)
  1191             subuipathfn = subdiruipathfn(subpath, uipathfn)
  1198             subuipathfn = subdiruipathfn(subpath, uipathfn)
  1192             try:
  1199             try:
  1193                 if sub.addremove(submatch, subprefix, subuipathfn, opts):
  1200                 if sub.addremove(submatch, subprefix, subuipathfn, opts):
  1194                     ret = 1
  1201                     ret = 1
  1195             except error.LookupError:
  1202             except error.LookupError:
  1196                 repo.ui.status(
  1203                 repo.ui.status(
  1197                     _("skipping missing subrepository: %s\n")
  1204                     _(b"skipping missing subrepository: %s\n")
  1198                     % uipathfn(subpath)
  1205                     % uipathfn(subpath)
  1199                 )
  1206                 )
  1200 
  1207 
  1201     rejected = []
  1208     rejected = []
  1202 
  1209 
  1214     toprint = unknownset.copy()
  1221     toprint = unknownset.copy()
  1215     toprint.update(deleted)
  1222     toprint.update(deleted)
  1216     for abs in sorted(toprint):
  1223     for abs in sorted(toprint):
  1217         if repo.ui.verbose or not m.exact(abs):
  1224         if repo.ui.verbose or not m.exact(abs):
  1218             if abs in unknownset:
  1225             if abs in unknownset:
  1219                 status = _('adding %s\n') % uipathfn(abs)
  1226                 status = _(b'adding %s\n') % uipathfn(abs)
  1220                 label = 'ui.addremove.added'
  1227                 label = b'ui.addremove.added'
  1221             else:
  1228             else:
  1222                 status = _('removing %s\n') % uipathfn(abs)
  1229                 status = _(b'removing %s\n') % uipathfn(abs)
  1223                 label = 'ui.addremove.removed'
  1230                 label = b'ui.addremove.removed'
  1224             repo.ui.status(status, label=label)
  1231             repo.ui.status(status, label=label)
  1225 
  1232 
  1226     renames = _findrenames(
  1233     renames = _findrenames(
  1227         repo, m, added + unknown, removed + deleted, similarity, uipathfn
  1234         repo, m, added + unknown, removed + deleted, similarity, uipathfn
  1228     )
  1235     )
  1248         unknownset = set(unknown + forgotten)
  1255         unknownset = set(unknown + forgotten)
  1249         toprint = unknownset.copy()
  1256         toprint = unknownset.copy()
  1250         toprint.update(deleted)
  1257         toprint.update(deleted)
  1251         for abs in sorted(toprint):
  1258         for abs in sorted(toprint):
  1252             if abs in unknownset:
  1259             if abs in unknownset:
  1253                 status = _('adding %s\n') % abs
  1260                 status = _(b'adding %s\n') % abs
  1254             else:
  1261             else:
  1255                 status = _('removing %s\n') % abs
  1262                 status = _(b'removing %s\n') % abs
  1256             repo.ui.status(status)
  1263             repo.ui.status(status)
  1257 
  1264 
  1258     # TODO: We should probably have the caller pass in uipathfn and apply it to
  1265     # TODO: We should probably have the caller pass in uipathfn and apply it to
  1259     # the messages above too. legacyrelativevalue=True is consistent with how
  1266     # the messages above too. legacyrelativevalue=True is consistent with how
  1260     # it used to work.
  1267     # it used to work.
  1290         ignored=False,
  1297         ignored=False,
  1291         full=False,
  1298         full=False,
  1292     )
  1299     )
  1293     for abs, st in walkresults.iteritems():
  1300     for abs, st in walkresults.iteritems():
  1294         dstate = dirstate[abs]
  1301         dstate = dirstate[abs]
  1295         if dstate == '?' and audit_path.check(abs):
  1302         if dstate == b'?' and audit_path.check(abs):
  1296             unknown.append(abs)
  1303             unknown.append(abs)
  1297         elif dstate != 'r' and not st:
  1304         elif dstate != b'r' and not st:
  1298             deleted.append(abs)
  1305             deleted.append(abs)
  1299         elif dstate == 'r' and st:
  1306         elif dstate == b'r' and st:
  1300             forgotten.append(abs)
  1307             forgotten.append(abs)
  1301         # for finding renames
  1308         # for finding renames
  1302         elif dstate == 'r' and not st:
  1309         elif dstate == b'r' and not st:
  1303             removed.append(abs)
  1310             removed.append(abs)
  1304         elif dstate == 'a':
  1311         elif dstate == b'a':
  1305             added.append(abs)
  1312             added.append(abs)
  1306 
  1313 
  1307     return added, unknown, deleted, removed, forgotten
  1314     return added, unknown, deleted, removed, forgotten
  1308 
  1315 
  1309 
  1316 
  1319                 or not matcher.exact(old)
  1326                 or not matcher.exact(old)
  1320                 or not matcher.exact(new)
  1327                 or not matcher.exact(new)
  1321             ):
  1328             ):
  1322                 repo.ui.status(
  1329                 repo.ui.status(
  1323                     _(
  1330                     _(
  1324                         'recording removal of %s as rename to %s '
  1331                         b'recording removal of %s as rename to %s '
  1325                         '(%d%% similar)\n'
  1332                         b'(%d%% similar)\n'
  1326                     )
  1333                     )
  1327                     % (uipathfn(old), uipathfn(new), score * 100)
  1334                     % (uipathfn(old), uipathfn(new), score * 100)
  1328                 )
  1335                 )
  1329             renames[new] = old
  1336             renames[new] = old
  1330     return renames
  1337     return renames
  1417     """Update the dirstate to reflect the intent of copying src to dst. For
  1424     """Update the dirstate to reflect the intent of copying src to dst. For
  1418     different reasons it might not end with dst being marked as copied from src.
  1425     different reasons it might not end with dst being marked as copied from src.
  1419     """
  1426     """
  1420     origsrc = repo.dirstate.copied(src) or src
  1427     origsrc = repo.dirstate.copied(src) or src
  1421     if dst == origsrc:  # copying back a copy?
  1428     if dst == origsrc:  # copying back a copy?
  1422         if repo.dirstate[dst] not in 'mn' and not dryrun:
  1429         if repo.dirstate[dst] not in b'mn' and not dryrun:
  1423             repo.dirstate.normallookup(dst)
  1430             repo.dirstate.normallookup(dst)
  1424     else:
  1431     else:
  1425         if repo.dirstate[origsrc] == 'a' and origsrc == src:
  1432         if repo.dirstate[origsrc] == b'a' and origsrc == src:
  1426             if not ui.quiet:
  1433             if not ui.quiet:
  1427                 ui.warn(
  1434                 ui.warn(
  1428                     _(
  1435                     _(
  1429                         "%s has not been committed yet, so no copy "
  1436                         b"%s has not been committed yet, so no copy "
  1430                         "data will be stored for %s.\n"
  1437                         b"data will be stored for %s.\n"
  1431                     )
  1438                     )
  1432                     % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
  1439                     % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
  1433                 )
  1440                 )
  1434             if repo.dirstate[dst] in '?r' and not dryrun:
  1441             if repo.dirstate[dst] in b'?r' and not dryrun:
  1435                 wctx.add([dst])
  1442                 wctx.add([dst])
  1436         elif not dryrun:
  1443         elif not dryrun:
  1437             wctx.copy(origsrc, dst)
  1444             wctx.copy(origsrc, dst)
  1438 
  1445 
  1439 
  1446 
  1442 
  1449 
  1443     A matcher can be provided as an optimization. It is probably a bug to pass
  1450     A matcher can be provided as an optimization. It is probably a bug to pass
  1444     a matcher that doesn't match all the differences between the parent of the
  1451     a matcher that doesn't match all the differences between the parent of the
  1445     working copy and newctx.
  1452     working copy and newctx.
  1446     """
  1453     """
  1447     oldctx = repo['.']
  1454     oldctx = repo[b'.']
  1448     ds = repo.dirstate
  1455     ds = repo.dirstate
  1449     ds.setparents(newctx.node(), nullid)
  1456     ds.setparents(newctx.node(), nullid)
  1450     copies = dict(ds.copies())
  1457     copies = dict(ds.copies())
  1451     s = newctx.status(oldctx, match=match)
  1458     s = newctx.status(oldctx, match=match)
  1452     for f in s.modified:
  1459     for f in s.modified:
  1453         if ds[f] == 'r':
  1460         if ds[f] == b'r':
  1454             # modified + removed -> removed
  1461             # modified + removed -> removed
  1455             continue
  1462             continue
  1456         ds.normallookup(f)
  1463         ds.normallookup(f)
  1457 
  1464 
  1458     for f in s.added:
  1465     for f in s.added:
  1459         if ds[f] == 'r':
  1466         if ds[f] == b'r':
  1460             # added + removed -> unknown
  1467             # added + removed -> unknown
  1461             ds.drop(f)
  1468             ds.drop(f)
  1462         elif ds[f] != 'a':
  1469         elif ds[f] != b'a':
  1463             ds.add(f)
  1470             ds.add(f)
  1464 
  1471 
  1465     for f in s.removed:
  1472     for f in s.removed:
  1466         if ds[f] == 'a':
  1473         if ds[f] == b'a':
  1467             # removed + added -> normal
  1474             # removed + added -> normal
  1468             ds.normallookup(f)
  1475             ds.normallookup(f)
  1469         elif ds[f] != 'r':
  1476         elif ds[f] != b'r':
  1470             ds.remove(f)
  1477             ds.remove(f)
  1471 
  1478 
  1472     # Merge old parent and old working dir copies
  1479     # Merge old parent and old working dir copies
  1473     oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
  1480     oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
  1474     oldcopies.update(copies)
  1481     oldcopies.update(copies)
  1475     copies = dict(
  1482     copies = dict(
  1476         (dst, oldcopies.get(src, src)) for dst, src in oldcopies.iteritems()
  1483         (dst, oldcopies.get(src, src)) for dst, src in oldcopies.iteritems()
  1477     )
  1484     )
  1478     # Adjust the dirstate copies
  1485     # Adjust the dirstate copies
  1479     for dst, src in copies.iteritems():
  1486     for dst, src in copies.iteritems():
  1480         if src not in newctx or dst in newctx or ds[dst] != 'a':
  1487         if src not in newctx or dst in newctx or ds[dst] != b'a':
  1481             src = None
  1488             src = None
  1482         ds.copy(src, dst)
  1489         ds.copy(src, dst)
  1483 
  1490 
  1484 
  1491 
  1485 def writerequires(opener, requirements):
  1492 def writerequires(opener, requirements):
  1486     with opener('requires', 'w', atomictemp=True) as fp:
  1493     with opener(b'requires', b'w', atomictemp=True) as fp:
  1487         for r in sorted(requirements):
  1494         for r in sorted(requirements):
  1488             fp.write("%s\n" % r)
  1495             fp.write(b"%s\n" % r)
  1489 
  1496 
  1490 
  1497 
  1491 class filecachesubentry(object):
  1498 class filecachesubentry(object):
  1492     def __init__(self, path, stat):
  1499     def __init__(self, path, stat):
  1493         self.path = path
  1500         self.path = path
  1662     Note that both key and value are treated as UTF-8 and converted to
  1669     Note that both key and value are treated as UTF-8 and converted to
  1663     the local encoding. This allows uniformity between local and
  1670     the local encoding. This allows uniformity between local and
  1664     remote data sources.
  1671     remote data sources.
  1665     """
  1672     """
  1666 
  1673 
  1667     spec = repo.ui.config("extdata", source)
  1674     spec = repo.ui.config(b"extdata", source)
  1668     if not spec:
  1675     if not spec:
  1669         raise error.Abort(_("unknown extdata source '%s'") % source)
  1676         raise error.Abort(_(b"unknown extdata source '%s'") % source)
  1670 
  1677 
  1671     data = {}
  1678     data = {}
  1672     src = proc = None
  1679     src = proc = None
  1673     try:
  1680     try:
  1674         if spec.startswith("shell:"):
  1681         if spec.startswith(b"shell:"):
  1675             # external commands should be run relative to the repo root
  1682             # external commands should be run relative to the repo root
  1676             cmd = spec[6:]
  1683             cmd = spec[6:]
  1677             proc = subprocess.Popen(
  1684             proc = subprocess.Popen(
  1678                 procutil.tonativestr(cmd),
  1685                 procutil.tonativestr(cmd),
  1679                 shell=True,
  1686                 shell=True,
  1685             src = proc.stdout
  1692             src = proc.stdout
  1686         else:
  1693         else:
  1687             # treat as a URL or file
  1694             # treat as a URL or file
  1688             src = url.open(repo.ui, spec)
  1695             src = url.open(repo.ui, spec)
  1689         for l in src:
  1696         for l in src:
  1690             if " " in l:
  1697             if b" " in l:
  1691                 k, v = l.strip().split(" ", 1)
  1698                 k, v = l.strip().split(b" ", 1)
  1692             else:
  1699             else:
  1693                 k, v = l.strip(), ""
  1700                 k, v = l.strip(), b""
  1694 
  1701 
  1695             k = encoding.tolocal(k)
  1702             k = encoding.tolocal(k)
  1696             try:
  1703             try:
  1697                 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
  1704                 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
  1698             except (error.LookupError, error.RepoLookupError):
  1705             except (error.LookupError, error.RepoLookupError):
  1707                 pass
  1714                 pass
  1708         if src:
  1715         if src:
  1709             src.close()
  1716             src.close()
  1710     if proc and proc.returncode != 0:
  1717     if proc and proc.returncode != 0:
  1711         raise error.Abort(
  1718         raise error.Abort(
  1712             _("extdata command '%s' failed: %s")
  1719             _(b"extdata command '%s' failed: %s")
  1713             % (cmd, procutil.explainexit(proc.returncode))
  1720             % (cmd, procutil.explainexit(proc.returncode))
  1714         )
  1721         )
  1715 
  1722 
  1716     return data
  1723     return data
  1717 
  1724 
  1718 
  1725 
  1719 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
  1726 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
  1720     if lock is None:
  1727     if lock is None:
  1721         raise error.LockInheritanceContractViolation(
  1728         raise error.LockInheritanceContractViolation(
  1722             'lock can only be inherited while held'
  1729             b'lock can only be inherited while held'
  1723         )
  1730         )
  1724     if environ is None:
  1731     if environ is None:
  1725         environ = {}
  1732         environ = {}
  1726     with lock.inherit() as locker:
  1733     with lock.inherit() as locker:
  1727         environ[envvar] = locker
  1734         environ[envvar] = locker
  1733 
  1740 
  1734     This can only be called while the wlock is held. This takes all the
  1741     This can only be called while the wlock is held. This takes all the
  1735     arguments that ui.system does, and returns the exit code of the
  1742     arguments that ui.system does, and returns the exit code of the
  1736     subprocess."""
  1743     subprocess."""
  1737     return _locksub(
  1744     return _locksub(
  1738         repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args, **kwargs
  1745         repo, repo.currentwlock(), b'HG_WLOCK_LOCKER', cmd, *args, **kwargs
  1739     )
  1746     )
  1740 
  1747 
  1741 
  1748 
  1742 class progress(object):
  1749 class progress(object):
  1743     def __init__(self, ui, updatebar, topic, unit="", total=None):
  1750     def __init__(self, ui, updatebar, topic, unit=b"", total=None):
  1744         self.ui = ui
  1751         self.ui = ui
  1745         self.pos = 0
  1752         self.pos = 0
  1746         self.topic = topic
  1753         self.topic = topic
  1747         self.unit = unit
  1754         self.unit = unit
  1748         self.total = total
  1755         self.total = total
  1749         self.debug = ui.configbool('progress', 'debug')
  1756         self.debug = ui.configbool(b'progress', b'debug')
  1750         self._updatebar = updatebar
  1757         self._updatebar = updatebar
  1751 
  1758 
  1752     def __enter__(self):
  1759     def __enter__(self):
  1753         return self
  1760         return self
  1754 
  1761 
  1755     def __exit__(self, exc_type, exc_value, exc_tb):
  1762     def __exit__(self, exc_type, exc_value, exc_tb):
  1756         self.complete()
  1763         self.complete()
  1757 
  1764 
  1758     def update(self, pos, item="", total=None):
  1765     def update(self, pos, item=b"", total=None):
  1759         assert pos is not None
  1766         assert pos is not None
  1760         if total:
  1767         if total:
  1761             self.total = total
  1768             self.total = total
  1762         self.pos = pos
  1769         self.pos = pos
  1763         self._updatebar(self.topic, self.pos, item, self.unit, self.total)
  1770         self._updatebar(self.topic, self.pos, item, self.unit, self.total)
  1764         if self.debug:
  1771         if self.debug:
  1765             self._printdebug(item)
  1772             self._printdebug(item)
  1766 
  1773 
  1767     def increment(self, step=1, item="", total=None):
  1774     def increment(self, step=1, item=b"", total=None):
  1768         self.update(self.pos + step, item, total)
  1775         self.update(self.pos + step, item, total)
  1769 
  1776 
  1770     def complete(self):
  1777     def complete(self):
  1771         self.pos = None
  1778         self.pos = None
  1772         self.unit = ""
  1779         self.unit = b""
  1773         self.total = None
  1780         self.total = None
  1774         self._updatebar(self.topic, self.pos, "", self.unit, self.total)
  1781         self._updatebar(self.topic, self.pos, b"", self.unit, self.total)
  1775 
  1782 
  1776     def _printdebug(self, item):
  1783     def _printdebug(self, item):
  1777         if self.unit:
  1784         if self.unit:
  1778             unit = ' ' + self.unit
  1785             unit = b' ' + self.unit
  1779         if item:
  1786         if item:
  1780             item = ' ' + item
  1787             item = b' ' + item
  1781 
  1788 
  1782         if self.total:
  1789         if self.total:
  1783             pct = 100.0 * self.pos / self.total
  1790             pct = 100.0 * self.pos / self.total
  1784             self.ui.debug(
  1791             self.ui.debug(
  1785                 '%s:%s %d/%d%s (%4.2f%%)\n'
  1792                 b'%s:%s %d/%d%s (%4.2f%%)\n'
  1786                 % (self.topic, item, self.pos, self.total, unit, pct)
  1793                 % (self.topic, item, self.pos, self.total, unit, pct)
  1787             )
  1794             )
  1788         else:
  1795         else:
  1789             self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
  1796             self.ui.debug(b'%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
  1790 
  1797 
  1791 
  1798 
  1792 def gdinitconfig(ui):
  1799 def gdinitconfig(ui):
  1793     """helper function to know if a repo should be created as general delta
  1800     """helper function to know if a repo should be created as general delta
  1794     """
  1801     """
  1795     # experimental config: format.generaldelta
  1802     # experimental config: format.generaldelta
  1796     return ui.configbool('format', 'generaldelta') or ui.configbool(
  1803     return ui.configbool(b'format', b'generaldelta') or ui.configbool(
  1797         'format', 'usegeneraldelta'
  1804         b'format', b'usegeneraldelta'
  1798     )
  1805     )
  1799 
  1806 
  1800 
  1807 
  1801 def gddeltaconfig(ui):
  1808 def gddeltaconfig(ui):
  1802     """helper function to know if incoming delta should be optimised
  1809     """helper function to know if incoming delta should be optimised
  1803     """
  1810     """
  1804     # experimental config: format.generaldelta
  1811     # experimental config: format.generaldelta
  1805     return ui.configbool('format', 'generaldelta')
  1812     return ui.configbool(b'format', b'generaldelta')
  1806 
  1813 
  1807 
  1814 
  1808 class simplekeyvaluefile(object):
  1815 class simplekeyvaluefile(object):
  1809     """A simple file with key=value lines
  1816     """A simple file with key=value lines
  1810 
  1817 
  1811     Keys must be alphanumerics and start with a letter, values must not
  1818     Keys must be alphanumerics and start with a letter, values must not
  1812     contain '\n' characters"""
  1819     contain '\n' characters"""
  1813 
  1820 
  1814     firstlinekey = '__firstline'
  1821     firstlinekey = b'__firstline'
  1815 
  1822 
  1816     def __init__(self, vfs, path, keys=None):
  1823     def __init__(self, vfs, path, keys=None):
  1817         self.vfs = vfs
  1824         self.vfs = vfs
  1818         self.path = path
  1825         self.path = path
  1819 
  1826 
  1825         __firstline key."""
  1832         __firstline key."""
  1826         lines = self.vfs.readlines(self.path)
  1833         lines = self.vfs.readlines(self.path)
  1827         d = {}
  1834         d = {}
  1828         if firstlinenonkeyval:
  1835         if firstlinenonkeyval:
  1829             if not lines:
  1836             if not lines:
  1830                 e = _("empty simplekeyvalue file")
  1837                 e = _(b"empty simplekeyvalue file")
  1831                 raise error.CorruptedState(e)
  1838                 raise error.CorruptedState(e)
  1832             # we don't want to include '\n' in the __firstline
  1839             # we don't want to include '\n' in the __firstline
  1833             d[self.firstlinekey] = lines[0][:-1]
  1840             d[self.firstlinekey] = lines[0][:-1]
  1834             del lines[0]
  1841             del lines[0]
  1835 
  1842 
  1836         try:
  1843         try:
  1837             # the 'if line.strip()' part prevents us from failing on empty
  1844             # the 'if line.strip()' part prevents us from failing on empty
  1838             # lines which only contain '\n' therefore are not skipped
  1845             # lines which only contain '\n' therefore are not skipped
  1839             # by 'if line'
  1846             # by 'if line'
  1840             updatedict = dict(
  1847             updatedict = dict(
  1841                 line[:-1].split('=', 1) for line in lines if line.strip()
  1848                 line[:-1].split(b'=', 1) for line in lines if line.strip()
  1842             )
  1849             )
  1843             if self.firstlinekey in updatedict:
  1850             if self.firstlinekey in updatedict:
  1844                 e = _("%r can't be used as a key")
  1851                 e = _(b"%r can't be used as a key")
  1845                 raise error.CorruptedState(e % self.firstlinekey)
  1852                 raise error.CorruptedState(e % self.firstlinekey)
  1846             d.update(updatedict)
  1853             d.update(updatedict)
  1847         except ValueError as e:
  1854         except ValueError as e:
  1848             raise error.CorruptedState(str(e))
  1855             raise error.CorruptedState(str(e))
  1849         return d
  1856         return d
  1855 
  1862 
  1856         If 'firstline' is not None, it is written to file before
  1863         If 'firstline' is not None, it is written to file before
  1857         everything else, as it is, not in a key=value form"""
  1864         everything else, as it is, not in a key=value form"""
  1858         lines = []
  1865         lines = []
  1859         if firstline is not None:
  1866         if firstline is not None:
  1860             lines.append('%s\n' % firstline)
  1867             lines.append(b'%s\n' % firstline)
  1861 
  1868 
  1862         for k, v in data.items():
  1869         for k, v in data.items():
  1863             if k == self.firstlinekey:
  1870             if k == self.firstlinekey:
  1864                 e = "key name '%s' is reserved" % self.firstlinekey
  1871                 e = b"key name '%s' is reserved" % self.firstlinekey
  1865                 raise error.ProgrammingError(e)
  1872                 raise error.ProgrammingError(e)
  1866             if not k[0:1].isalpha():
  1873             if not k[0:1].isalpha():
  1867                 e = "keys must start with a letter in a key-value file"
  1874                 e = b"keys must start with a letter in a key-value file"
  1868                 raise error.ProgrammingError(e)
  1875                 raise error.ProgrammingError(e)
  1869             if not k.isalnum():
  1876             if not k.isalnum():
  1870                 e = "invalid key name in a simple key-value file"
  1877                 e = b"invalid key name in a simple key-value file"
  1871                 raise error.ProgrammingError(e)
  1878                 raise error.ProgrammingError(e)
  1872             if '\n' in v:
  1879             if b'\n' in v:
  1873                 e = "invalid value in a simple key-value file"
  1880                 e = b"invalid value in a simple key-value file"
  1874                 raise error.ProgrammingError(e)
  1881                 raise error.ProgrammingError(e)
  1875             lines.append("%s=%s\n" % (k, v))
  1882             lines.append(b"%s=%s\n" % (k, v))
  1876         with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
  1883         with self.vfs(self.path, mode=b'wb', atomictemp=True) as fp:
  1877             fp.write(''.join(lines))
  1884             fp.write(b''.join(lines))
  1878 
  1885 
  1879 
  1886 
  1880 _reportobsoletedsource = [
  1887 _reportobsoletedsource = [
  1881     'debugobsolete',
  1888     b'debugobsolete',
  1882     'pull',
  1889     b'pull',
  1883     'push',
  1890     b'push',
  1884     'serve',
  1891     b'serve',
  1885     'unbundle',
  1892     b'unbundle',
  1886 ]
  1893 ]
  1887 
  1894 
  1888 _reportnewcssource = [
  1895 _reportnewcssource = [
  1889     'pull',
  1896     b'pull',
  1890     'unbundle',
  1897     b'unbundle',
  1891 ]
  1898 ]
  1892 
  1899 
  1893 
  1900 
  1894 def prefetchfiles(repo, revs, match):
  1901 def prefetchfiles(repo, revs, match):
  1895     """Invokes the registered file prefetch functions, allowing extensions to
  1902     """Invokes the registered file prefetch functions, allowing extensions to
  1910 
  1917 
  1911 # A marker that tells the evolve extension to suppress its own reporting
  1918 # A marker that tells the evolve extension to suppress its own reporting
  1912 _reportstroubledchangesets = True
  1919 _reportstroubledchangesets = True
  1913 
  1920 
  1914 
  1921 
  1915 def registersummarycallback(repo, otr, txnname=''):
  1922 def registersummarycallback(repo, otr, txnname=b''):
  1916     """register a callback to issue a summary after the transaction is closed
  1923     """register a callback to issue a summary after the transaction is closed
  1917     """
  1924     """
  1918 
  1925 
  1919     def txmatch(sources):
  1926     def txmatch(sources):
  1920         return any(txnname.startswith(source) for source in sources)
  1927         return any(txnname.startswith(source) for source in sources)
  1935             repo = reporef()
  1942             repo = reporef()
  1936             if filtername:
  1943             if filtername:
  1937                 repo = repo.filtered(filtername)
  1944                 repo = repo.filtered(filtername)
  1938             func(repo, tr)
  1945             func(repo, tr)
  1939 
  1946 
  1940         newcat = '%02i-txnreport' % len(categories)
  1947         newcat = b'%02i-txnreport' % len(categories)
  1941         otr.addpostclose(newcat, wrapped)
  1948         otr.addpostclose(newcat, wrapped)
  1942         categories.append(newcat)
  1949         categories.append(newcat)
  1943         return wrapped
  1950         return wrapped
  1944 
  1951 
  1945     @reportsummary
  1952     @reportsummary
  1946     def reportchangegroup(repo, tr):
  1953     def reportchangegroup(repo, tr):
  1947         cgchangesets = tr.changes.get('changegroup-count-changesets', 0)
  1954         cgchangesets = tr.changes.get(b'changegroup-count-changesets', 0)
  1948         cgrevisions = tr.changes.get('changegroup-count-revisions', 0)
  1955         cgrevisions = tr.changes.get(b'changegroup-count-revisions', 0)
  1949         cgfiles = tr.changes.get('changegroup-count-files', 0)
  1956         cgfiles = tr.changes.get(b'changegroup-count-files', 0)
  1950         cgheads = tr.changes.get('changegroup-count-heads', 0)
  1957         cgheads = tr.changes.get(b'changegroup-count-heads', 0)
  1951         if cgchangesets or cgrevisions or cgfiles:
  1958         if cgchangesets or cgrevisions or cgfiles:
  1952             htext = ""
  1959             htext = b""
  1953             if cgheads:
  1960             if cgheads:
  1954                 htext = _(" (%+d heads)") % cgheads
  1961                 htext = _(b" (%+d heads)") % cgheads
  1955             msg = _("added %d changesets with %d changes to %d files%s\n")
  1962             msg = _(b"added %d changesets with %d changes to %d files%s\n")
  1956             repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
  1963             repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
  1957 
  1964 
  1958     if txmatch(_reportobsoletedsource):
  1965     if txmatch(_reportobsoletedsource):
  1959 
  1966 
  1960         @reportsummary
  1967         @reportsummary
  1961         def reportobsoleted(repo, tr):
  1968         def reportobsoleted(repo, tr):
  1962             obsoleted = obsutil.getobsoleted(repo, tr)
  1969             obsoleted = obsutil.getobsoleted(repo, tr)
  1963             newmarkers = len(tr.changes.get('obsmarkers', ()))
  1970             newmarkers = len(tr.changes.get(b'obsmarkers', ()))
  1964             if newmarkers:
  1971             if newmarkers:
  1965                 repo.ui.status(_('%i new obsolescence markers\n') % newmarkers)
  1972                 repo.ui.status(_(b'%i new obsolescence markers\n') % newmarkers)
  1966             if obsoleted:
  1973             if obsoleted:
  1967                 repo.ui.status(_('obsoleted %i changesets\n') % len(obsoleted))
  1974                 repo.ui.status(_(b'obsoleted %i changesets\n') % len(obsoleted))
  1968 
  1975 
  1969     if obsolete.isenabled(
  1976     if obsolete.isenabled(
  1970         repo, obsolete.createmarkersopt
  1977         repo, obsolete.createmarkersopt
  1971     ) and repo.ui.configbool('experimental', 'evolution.report-instabilities'):
  1978     ) and repo.ui.configbool(
       
  1979         b'experimental', b'evolution.report-instabilities'
       
  1980     ):
  1972         instabilitytypes = [
  1981         instabilitytypes = [
  1973             ('orphan', 'orphan'),
  1982             (b'orphan', b'orphan'),
  1974             ('phase-divergent', 'phasedivergent'),
  1983             (b'phase-divergent', b'phasedivergent'),
  1975             ('content-divergent', 'contentdivergent'),
  1984             (b'content-divergent', b'contentdivergent'),
  1976         ]
  1985         ]
  1977 
  1986 
  1978         def getinstabilitycounts(repo):
  1987         def getinstabilitycounts(repo):
  1979             filtered = repo.changelog.filteredrevs
  1988             filtered = repo.changelog.filteredrevs
  1980             counts = {}
  1989             counts = {}
  2001     if txmatch(_reportnewcssource):
  2010     if txmatch(_reportnewcssource):
  2002 
  2011 
  2003         @reportsummary
  2012         @reportsummary
  2004         def reportnewcs(repo, tr):
  2013         def reportnewcs(repo, tr):
  2005             """Report the range of new revisions pulled/unbundled."""
  2014             """Report the range of new revisions pulled/unbundled."""
  2006             origrepolen = tr.changes.get('origrepolen', len(repo))
  2015             origrepolen = tr.changes.get(b'origrepolen', len(repo))
  2007             unfi = repo.unfiltered()
  2016             unfi = repo.unfiltered()
  2008             if origrepolen >= len(unfi):
  2017             if origrepolen >= len(unfi):
  2009                 return
  2018                 return
  2010 
  2019 
  2011             # Compute the bounds of new visible revisions' range.
  2020             # Compute the bounds of new visible revisions' range.
  2014                 minrev, maxrev = repo[revs.min()], repo[revs.max()]
  2023                 minrev, maxrev = repo[revs.min()], repo[revs.max()]
  2015 
  2024 
  2016                 if minrev == maxrev:
  2025                 if minrev == maxrev:
  2017                     revrange = minrev
  2026                     revrange = minrev
  2018                 else:
  2027                 else:
  2019                     revrange = '%s:%s' % (minrev, maxrev)
  2028                     revrange = b'%s:%s' % (minrev, maxrev)
  2020                 draft = len(repo.revs('%ld and draft()', revs))
  2029                 draft = len(repo.revs(b'%ld and draft()', revs))
  2021                 secret = len(repo.revs('%ld and secret()', revs))
  2030                 secret = len(repo.revs(b'%ld and secret()', revs))
  2022                 if not (draft or secret):
  2031                 if not (draft or secret):
  2023                     msg = _('new changesets %s\n') % revrange
  2032                     msg = _(b'new changesets %s\n') % revrange
  2024                 elif draft and secret:
  2033                 elif draft and secret:
  2025                     msg = _('new changesets %s (%d drafts, %d secrets)\n')
  2034                     msg = _(b'new changesets %s (%d drafts, %d secrets)\n')
  2026                     msg %= (revrange, draft, secret)
  2035                     msg %= (revrange, draft, secret)
  2027                 elif draft:
  2036                 elif draft:
  2028                     msg = _('new changesets %s (%d drafts)\n')
  2037                     msg = _(b'new changesets %s (%d drafts)\n')
  2029                     msg %= (revrange, draft)
  2038                     msg %= (revrange, draft)
  2030                 elif secret:
  2039                 elif secret:
  2031                     msg = _('new changesets %s (%d secrets)\n')
  2040                     msg = _(b'new changesets %s (%d secrets)\n')
  2032                     msg %= (revrange, secret)
  2041                     msg %= (revrange, secret)
  2033                 else:
  2042                 else:
  2034                     errormsg = 'entered unreachable condition'
  2043                     errormsg = b'entered unreachable condition'
  2035                     raise error.ProgrammingError(errormsg)
  2044                     raise error.ProgrammingError(errormsg)
  2036                 repo.ui.status(msg)
  2045                 repo.ui.status(msg)
  2037 
  2046 
  2038             # search new changesets directly pulled as obsolete
  2047             # search new changesets directly pulled as obsolete
  2039             duplicates = tr.changes.get('revduplicates', ())
  2048             duplicates = tr.changes.get(b'revduplicates', ())
  2040             obsadded = unfi.revs(
  2049             obsadded = unfi.revs(
  2041                 '(%d: + %ld) and obsolete()', origrepolen, duplicates
  2050                 b'(%d: + %ld) and obsolete()', origrepolen, duplicates
  2042             )
  2051             )
  2043             cl = repo.changelog
  2052             cl = repo.changelog
  2044             extinctadded = [r for r in obsadded if r not in cl]
  2053             extinctadded = [r for r in obsadded if r not in cl]
  2045             if extinctadded:
  2054             if extinctadded:
  2046                 # They are not just obsolete, but obsolete and invisible
  2055                 # They are not just obsolete, but obsolete and invisible
  2047                 # we call them "extinct" internally but the terms have not been
  2056                 # we call them "extinct" internally but the terms have not been
  2048                 # exposed to users.
  2057                 # exposed to users.
  2049                 msg = '(%d other changesets obsolete on arrival)\n'
  2058                 msg = b'(%d other changesets obsolete on arrival)\n'
  2050                 repo.ui.status(msg % len(extinctadded))
  2059                 repo.ui.status(msg % len(extinctadded))
  2051 
  2060 
  2052         @reportsummary
  2061         @reportsummary
  2053         def reportphasechanges(repo, tr):
  2062         def reportphasechanges(repo, tr):
  2054             """Report statistics of phase changes for changesets pre-existing
  2063             """Report statistics of phase changes for changesets pre-existing
  2055             pull/unbundle.
  2064             pull/unbundle.
  2056             """
  2065             """
  2057             origrepolen = tr.changes.get('origrepolen', len(repo))
  2066             origrepolen = tr.changes.get(b'origrepolen', len(repo))
  2058             phasetracking = tr.changes.get('phases', {})
  2067             phasetracking = tr.changes.get(b'phases', {})
  2059             if not phasetracking:
  2068             if not phasetracking:
  2060                 return
  2069                 return
  2061             published = [
  2070             published = [
  2062                 rev
  2071                 rev
  2063                 for rev, (old, new) in phasetracking.iteritems()
  2072                 for rev, (old, new) in phasetracking.iteritems()
  2064                 if new == phases.public and rev < origrepolen
  2073                 if new == phases.public and rev < origrepolen
  2065             ]
  2074             ]
  2066             if not published:
  2075             if not published:
  2067                 return
  2076                 return
  2068             repo.ui.status(
  2077             repo.ui.status(
  2069                 _('%d local changesets published\n') % len(published)
  2078                 _(b'%d local changesets published\n') % len(published)
  2070             )
  2079             )
  2071 
  2080 
  2072 
  2081 
  2073 def getinstabilitymessage(delta, instability):
  2082 def getinstabilitymessage(delta, instability):
  2074     """function to return the message to show warning about new instabilities
  2083     """function to return the message to show warning about new instabilities
  2075 
  2084 
  2076     exists as a separate function so that extension can wrap to show more
  2085     exists as a separate function so that extension can wrap to show more
  2077     information like how to fix instabilities"""
  2086     information like how to fix instabilities"""
  2078     if delta > 0:
  2087     if delta > 0:
  2079         return _('%i new %s changesets\n') % (delta, instability)
  2088         return _(b'%i new %s changesets\n') % (delta, instability)
  2080 
  2089 
  2081 
  2090 
  2082 def nodesummaries(repo, nodes, maxnumnodes=4):
  2091 def nodesummaries(repo, nodes, maxnumnodes=4):
  2083     if len(nodes) <= maxnumnodes or repo.ui.verbose:
  2092     if len(nodes) <= maxnumnodes or repo.ui.verbose:
  2084         return ' '.join(short(h) for h in nodes)
  2093         return b' '.join(short(h) for h in nodes)
  2085     first = ' '.join(short(h) for h in nodes[:maxnumnodes])
  2094     first = b' '.join(short(h) for h in nodes[:maxnumnodes])
  2086     return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
  2095     return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes)
  2087 
  2096 
  2088 
  2097 
  2089 def enforcesinglehead(repo, tr, desc, accountclosed=False):
  2098 def enforcesinglehead(repo, tr, desc, accountclosed=False):
  2090     """check that no named branch has multiple heads"""
  2099     """check that no named branch has multiple heads"""
  2091     if desc in ('strip', 'repair'):
  2100     if desc in (b'strip', b'repair'):
  2092         # skip the logic during strip
  2101         # skip the logic during strip
  2093         return
  2102         return
  2094     visible = repo.filtered('visible')
  2103     visible = repo.filtered(b'visible')
  2095     # possible improvement: we could restrict the check to affected branch
  2104     # possible improvement: we could restrict the check to affected branch
  2096     bm = visible.branchmap()
  2105     bm = visible.branchmap()
  2097     for name in bm:
  2106     for name in bm:
  2098         heads = bm.branchheads(name, closed=accountclosed)
  2107         heads = bm.branchheads(name, closed=accountclosed)
  2099         if len(heads) > 1:
  2108         if len(heads) > 1:
  2100             msg = _('rejecting multiple heads on branch "%s"')
  2109             msg = _(b'rejecting multiple heads on branch "%s"')
  2101             msg %= name
  2110             msg %= name
  2102             hint = _('%d heads: %s')
  2111             hint = _(b'%d heads: %s')
  2103             hint %= (len(heads), nodesummaries(repo, heads))
  2112             hint %= (len(heads), nodesummaries(repo, heads))
  2104             raise error.Abort(msg, hint=hint)
  2113             raise error.Abort(msg, hint=hint)
  2105 
  2114 
  2106 
  2115 
  2107 def wrapconvertsink(sink):
  2116 def wrapconvertsink(sink):
  2119                        2) 'nowarn': don't warn while unhiding changesets
  2128                        2) 'nowarn': don't warn while unhiding changesets
  2120 
  2129 
  2121     returns a repo object with the required changesets unhidden
  2130     returns a repo object with the required changesets unhidden
  2122     """
  2131     """
  2123     if not repo.filtername or not repo.ui.configbool(
  2132     if not repo.filtername or not repo.ui.configbool(
  2124         'experimental', 'directaccess'
  2133         b'experimental', b'directaccess'
  2125     ):
  2134     ):
  2126         return repo
  2135         return repo
  2127 
  2136 
  2128     if repo.filtername not in ('visible', 'visible-hidden'):
  2137     if repo.filtername not in (b'visible', b'visible-hidden'):
  2129         return repo
  2138         return repo
  2130 
  2139 
  2131     symbols = set()
  2140     symbols = set()
  2132     for spec in specs:
  2141     for spec in specs:
  2133         try:
  2142         try:
  2143     revs = _getrevsfromsymbols(repo, symbols)
  2152     revs = _getrevsfromsymbols(repo, symbols)
  2144 
  2153 
  2145     if not revs:
  2154     if not revs:
  2146         return repo
  2155         return repo
  2147 
  2156 
  2148     if hiddentype == 'warn':
  2157     if hiddentype == b'warn':
  2149         unfi = repo.unfiltered()
  2158         unfi = repo.unfiltered()
  2150         revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
  2159         revstr = b", ".join([pycompat.bytestr(unfi[l]) for l in revs])
  2151         repo.ui.warn(
  2160         repo.ui.warn(
  2152             _(
  2161             _(
  2153                 "warning: accessing hidden changesets for write "
  2162                 b"warning: accessing hidden changesets for write "
  2154                 "operation: %s\n"
  2163                 b"operation: %s\n"
  2155             )
  2164             )
  2156             % revstr
  2165             % revstr
  2157         )
  2166         )
  2158 
  2167 
  2159     # we have to use new filtername to separate branch/tags cache until we can
  2168     # we have to use new filtername to separate branch/tags cache until we can
  2160     # disbale these cache when revisions are dynamically pinned.
  2169     # disbale these cache when revisions are dynamically pinned.
  2161     return repo.filtered('visible-hidden', revs)
  2170     return repo.filtered(b'visible-hidden', revs)
  2162 
  2171 
  2163 
  2172 
  2164 def _getrevsfromsymbols(repo, symbols):
  2173 def _getrevsfromsymbols(repo, symbols):
  2165     """parse the list of symbols and returns a set of revision numbers of hidden
  2174     """parse the list of symbols and returns a set of revision numbers of hidden
  2166     changesets present in symbols"""
  2175     changesets present in symbols"""
  2167     revs = set()
  2176     revs = set()
  2168     unfi = repo.unfiltered()
  2177     unfi = repo.unfiltered()
  2169     unficl = unfi.changelog
  2178     unficl = unfi.changelog
  2170     cl = repo.changelog
  2179     cl = repo.changelog
  2171     tiprev = len(unficl)
  2180     tiprev = len(unficl)
  2172     allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
  2181     allowrevnums = repo.ui.configbool(b'experimental', b'directaccess.revnums')
  2173     for s in symbols:
  2182     for s in symbols:
  2174         try:
  2183         try:
  2175             n = int(s)
  2184             n = int(s)
  2176             if n <= tiprev:
  2185             if n <= tiprev:
  2177                 if not allowrevnums:
  2186                 if not allowrevnums:
  2199 def bookmarkrevs(repo, mark):
  2208 def bookmarkrevs(repo, mark):
  2200     """
  2209     """
  2201     Select revisions reachable by a given bookmark
  2210     Select revisions reachable by a given bookmark
  2202     """
  2211     """
  2203     return repo.revs(
  2212     return repo.revs(
  2204         "ancestors(bookmark(%s)) - "
  2213         b"ancestors(bookmark(%s)) - "
  2205         "ancestors(head() and not bookmark(%s)) - "
  2214         b"ancestors(head() and not bookmark(%s)) - "
  2206         "ancestors(bookmark() and not bookmark(%s))",
  2215         b"ancestors(bookmark() and not bookmark(%s))",
  2207         mark,
  2216         mark,
  2208         mark,
  2217         mark,
  2209         mark,
  2218         mark,
  2210     )
  2219     )
  2211 
  2220