--- a/mercurial/scmutil.py Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/scmutil.py Sun Oct 06 09:48:39 2019 -0400
@@ -158,11 +158,11 @@
if secretlist:
ui.status(
- _("no changes found (ignored %d secret changesets)\n")
+ _(b"no changes found (ignored %d secret changesets)\n")
% len(secretlist)
)
else:
- ui.status(_("no changes found\n"))
+ ui.status(_(b"no changes found\n"))
def callcatch(ui, func):
@@ -181,20 +181,20 @@
# Mercurial-specific first, followed by built-in and library exceptions
except error.LockHeld as inst:
if inst.errno == errno.ETIMEDOUT:
- reason = _('timed out waiting for lock held by %r') % (
+ reason = _(b'timed out waiting for lock held by %r') % (
pycompat.bytestr(inst.locker)
)
else:
- reason = _('lock held by %r') % inst.locker
+ reason = _(b'lock held by %r') % inst.locker
ui.error(
- _("abort: %s: %s\n")
+ _(b"abort: %s: %s\n")
% (inst.desc or stringutil.forcebytestr(inst.filename), reason)
)
if not inst.locker:
- ui.error(_("(lock might be very busy)\n"))
+ ui.error(_(b"(lock might be very busy)\n"))
except error.LockUnavailable as inst:
ui.error(
- _("abort: could not lock %s: %s\n")
+ _(b"abort: could not lock %s: %s\n")
% (
inst.desc or stringutil.forcebytestr(inst.filename),
encoding.strtolocal(inst.strerror),
@@ -202,57 +202,57 @@
)
except error.OutOfBandError as inst:
if inst.args:
- msg = _("abort: remote error:\n")
+ msg = _(b"abort: remote error:\n")
else:
- msg = _("abort: remote error\n")
+ msg = _(b"abort: remote error\n")
ui.error(msg)
if inst.args:
- ui.error(''.join(inst.args))
+ ui.error(b''.join(inst.args))
if inst.hint:
- ui.error('(%s)\n' % inst.hint)
+ ui.error(b'(%s)\n' % inst.hint)
except error.RepoError as inst:
- ui.error(_("abort: %s!\n") % inst)
+ ui.error(_(b"abort: %s!\n") % inst)
if inst.hint:
- ui.error(_("(%s)\n") % inst.hint)
+ ui.error(_(b"(%s)\n") % inst.hint)
except error.ResponseError as inst:
- ui.error(_("abort: %s") % inst.args[0])
+ ui.error(_(b"abort: %s") % inst.args[0])
msg = inst.args[1]
if isinstance(msg, type(u'')):
msg = pycompat.sysbytes(msg)
if not isinstance(msg, bytes):
- ui.error(" %r\n" % (msg,))
+ ui.error(b" %r\n" % (msg,))
elif not msg:
- ui.error(_(" empty string\n"))
+ ui.error(_(b" empty string\n"))
else:
- ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
+ ui.error(b"\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
except error.CensoredNodeError as inst:
- ui.error(_("abort: file censored %s!\n") % inst)
+ ui.error(_(b"abort: file censored %s!\n") % inst)
except error.StorageError as inst:
- ui.error(_("abort: %s!\n") % inst)
+ ui.error(_(b"abort: %s!\n") % inst)
if inst.hint:
- ui.error(_("(%s)\n") % inst.hint)
+ ui.error(_(b"(%s)\n") % inst.hint)
except error.InterventionRequired as inst:
- ui.error("%s\n" % inst)
+ ui.error(b"%s\n" % inst)
if inst.hint:
- ui.error(_("(%s)\n") % inst.hint)
+ ui.error(_(b"(%s)\n") % inst.hint)
return 1
except error.WdirUnsupported:
- ui.error(_("abort: working directory revision cannot be specified\n"))
+ ui.error(_(b"abort: working directory revision cannot be specified\n"))
except error.Abort as inst:
- ui.error(_("abort: %s\n") % inst)
+ ui.error(_(b"abort: %s\n") % inst)
if inst.hint:
- ui.error(_("(%s)\n") % inst.hint)
+ ui.error(_(b"(%s)\n") % inst.hint)
except ImportError as inst:
- ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
+ ui.error(_(b"abort: %s!\n") % stringutil.forcebytestr(inst))
m = stringutil.forcebytestr(inst).split()[-1]
- if m in "mpatch bdiff".split():
- ui.error(_("(did you forget to compile extensions?)\n"))
- elif m in "zlib".split():
- ui.error(_("(is your Python install correct?)\n"))
+ if m in b"mpatch bdiff".split():
+ ui.error(_(b"(did you forget to compile extensions?)\n"))
+ elif m in b"zlib".split():
+ ui.error(_(b"(is your Python install correct?)\n"))
except (IOError, OSError) as inst:
- if util.safehasattr(inst, "code"): # HTTPError
- ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
- elif util.safehasattr(inst, "reason"): # URLError or SSLError
+ if util.safehasattr(inst, b"code"): # HTTPError
+ ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
+ elif util.safehasattr(inst, b"reason"): # URLError or SSLError
try: # usually it is in the form (errno, strerror)
reason = inst.reason.args[1]
except (AttributeError, IndexError):
@@ -261,9 +261,9 @@
if isinstance(reason, pycompat.unicode):
# SSLError of Python 2.7.9 contains a unicode
reason = encoding.unitolocal(reason)
- ui.error(_("abort: error: %s\n") % reason)
+ ui.error(_(b"abort: error: %s\n") % reason)
elif (
- util.safehasattr(inst, "args")
+ util.safehasattr(inst, b"args")
and inst.args
and inst.args[0] == errno.EPIPE
):
@@ -271,18 +271,18 @@
elif getattr(inst, "strerror", None): # common IOError or OSError
if getattr(inst, "filename", None) is not None:
ui.error(
- _("abort: %s: '%s'\n")
+ _(b"abort: %s: '%s'\n")
% (
encoding.strtolocal(inst.strerror),
stringutil.forcebytestr(inst.filename),
)
)
else:
- ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
+ ui.error(_(b"abort: %s\n") % encoding.strtolocal(inst.strerror))
else: # suspicious IOError
raise
except MemoryError:
- ui.error(_("abort: out of memory\n"))
+ ui.error(_(b"abort: out of memory\n"))
except SystemExit as inst:
# Commands shouldn't sys.exit directly, but give a return code.
# Just in case catch this and and pass exit code to caller.
@@ -294,27 +294,27 @@
def checknewlabel(repo, lbl, kind):
# Do not use the "kind" parameter in ui output.
# It makes strings difficult to translate.
- if lbl in ['tip', '.', 'null']:
- raise error.Abort(_("the name '%s' is reserved") % lbl)
- for c in (':', '\0', '\n', '\r'):
+ if lbl in [b'tip', b'.', b'null']:
+ raise error.Abort(_(b"the name '%s' is reserved") % lbl)
+ for c in (b':', b'\0', b'\n', b'\r'):
if c in lbl:
raise error.Abort(
- _("%r cannot be used in a name") % pycompat.bytestr(c)
+ _(b"%r cannot be used in a name") % pycompat.bytestr(c)
)
try:
int(lbl)
- raise error.Abort(_("cannot use an integer as a name"))
+ raise error.Abort(_(b"cannot use an integer as a name"))
except ValueError:
pass
if lbl.strip() != lbl:
- raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
+ raise error.Abort(_(b"leading or trailing whitespace in name %r") % lbl)
def checkfilename(f):
'''Check that the filename f is an acceptable filename for a tracked file'''
- if '\r' in f or '\n' in f:
+ if b'\r' in f or b'\n' in f:
raise error.Abort(
- _("'\\n' and '\\r' disallowed in filenames: %r")
+ _(b"'\\n' and '\\r' disallowed in filenames: %r")
% pycompat.bytestr(f)
)
@@ -326,23 +326,23 @@
if abort or warn:
msg = util.checkwinfilename(f)
if msg:
- msg = "%s: %s" % (msg, procutil.shellquote(f))
+ msg = b"%s: %s" % (msg, procutil.shellquote(f))
if abort:
raise error.Abort(msg)
- ui.warn(_("warning: %s\n") % msg)
+ ui.warn(_(b"warning: %s\n") % msg)
def checkportabilityalert(ui):
'''check if the user's config requests nothing, a warning, or abort for
non-portable filenames'''
- val = ui.config('ui', 'portablefilenames')
+ val = ui.config(b'ui', b'portablefilenames')
lval = val.lower()
bval = stringutil.parsebool(val)
- abort = pycompat.iswindows or lval == 'abort'
- warn = bval or lval == 'warn'
- if bval is None and not (warn or abort or lval == 'ignore'):
+ abort = pycompat.iswindows or lval == b'abort'
+ warn = bval or lval == b'warn'
+ if bval is None and not (warn or abort or lval == b'ignore'):
raise error.ConfigError(
- _("ui.portablefilenames value is invalid ('%s')") % val
+ _(b"ui.portablefilenames value is invalid ('%s')") % val
)
return abort, warn
@@ -351,8 +351,8 @@
def __init__(self, ui, abort, dirstate):
self._ui = ui
self._abort = abort
- allfiles = '\0'.join(dirstate)
- self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
+ allfiles = b'\0'.join(dirstate)
+ self._loweredfiles = set(encoding.lower(allfiles).split(b'\0'))
self._dirstate = dirstate
# The purpose of _newfiles is so that we don't complain about
# case collisions if someone were to call this object with the
@@ -364,10 +364,10 @@
return
fl = encoding.lower(f)
if fl in self._loweredfiles and f not in self._dirstate:
- msg = _('possible case-folding collision for %s') % f
+ msg = _(b'possible case-folding collision for %s') % f
if self._abort:
raise error.Abort(msg)
- self._ui.warn(_("warning: %s\n") % msg)
+ self._ui.warn(_(b"warning: %s\n") % msg)
self._loweredfiles.add(fl)
self._newfiles.add(f)
@@ -392,7 +392,7 @@
if revs:
s = hashlib.sha1()
for rev in revs:
- s.update('%d;' % rev)
+ s.update(b'%d;' % rev)
key = s.digest()
return key
@@ -423,14 +423,14 @@
adddir(seen_dirs, path)
for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
dirs.sort()
- if '.hg' in dirs:
+ if b'.hg' in dirs:
yield root # found a repository
- qroot = os.path.join(root, '.hg', 'patches')
- if os.path.isdir(os.path.join(qroot, '.hg')):
+ qroot = os.path.join(root, b'.hg', b'patches')
+ if os.path.isdir(os.path.join(qroot, b'.hg')):
yield qroot # we have a patch queue repo here
if recurse:
# avoid recursing inside the .hg directory
- dirs.remove('.hg')
+ dirs.remove(b'.hg')
else:
dirs[:] = [] # don't descend further
elif followsym:
@@ -476,12 +476,12 @@
hexfunc = hex
else:
hexfunc = short
- return '%d:%s' % (rev, hexfunc(node))
+ return b'%d:%s' % (rev, hexfunc(node))
def resolvehexnodeidprefix(repo, prefix):
- if prefix.startswith('x') and repo.ui.configbool(
- 'experimental', 'revisions.prefixhexnode'
+ if prefix.startswith(b'x') and repo.ui.configbool(
+ b'experimental', b'revisions.prefixhexnode'
):
prefix = prefix[1:]
try:
@@ -489,11 +489,13 @@
# This matches the shortesthexnodeidprefix() function below.
node = repo.unfiltered().changelog._partialmatch(prefix)
except error.AmbiguousPrefixLookupError:
- revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
+ revset = repo.ui.config(
+ b'experimental', b'revisions.disambiguatewithin'
+ )
if revset:
# Clear config to avoid infinite recursion
configoverrides = {
- ('experimental', 'revisions.disambiguatewithin'): None
+ (b'experimental', b'revisions.disambiguatewithin'): None
}
with repo.ui.configoverride(configoverrides):
revs = repo.anyrevs([revset], user=True)
@@ -540,9 +542,9 @@
def disambiguate(prefix):
"""Disambiguate against revnums."""
- if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
+ if repo.ui.configbool(b'experimental', b'revisions.prefixhexnode'):
if mayberevnum(repo, prefix):
- return 'x' + prefix
+ return b'x' + prefix
else:
return prefix
@@ -553,20 +555,20 @@
return prefix
cl = repo.unfiltered().changelog
- revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
+ revset = repo.ui.config(b'experimental', b'revisions.disambiguatewithin')
if revset:
revs = None
if cache is not None:
- revs = cache.get('disambiguationrevset')
+ revs = cache.get(b'disambiguationrevset')
if revs is None:
revs = repo.anyrevs([revset], user=True)
if cache is not None:
- cache['disambiguationrevset'] = revs
+ cache[b'disambiguationrevset'] = revs
if cl.rev(node) in revs:
hexnode = hex(node)
nodetree = None
if cache is not None:
- nodetree = cache.get('disambiguationnodetree')
+ nodetree = cache.get(b'disambiguationnodetree')
if not nodetree:
try:
nodetree = parsers.nodetree(cl.index, len(revs))
@@ -577,7 +579,7 @@
for r in revs:
nodetree.insert(r)
if cache is not None:
- cache['disambiguationnodetree'] = nodetree
+ cache[b'disambiguationnodetree'] = nodetree
if nodetree is not None:
length = max(nodetree.shortest(node), minlength)
prefix = hexnode[:length]
@@ -620,17 +622,17 @@
"""
if not isinstance(symbol, bytes):
msg = (
- "symbol (%s of type %s) was not a string, did you mean "
- "repo[symbol]?" % (symbol, type(symbol))
+ b"symbol (%s of type %s) was not a string, did you mean "
+ b"repo[symbol]?" % (symbol, type(symbol))
)
raise error.ProgrammingError(msg)
try:
- if symbol in ('.', 'tip', 'null'):
+ if symbol in (b'.', b'tip', b'null'):
return repo[symbol]
try:
r = int(symbol)
- if '%d' % r != symbol:
+ if b'%d' % r != symbol:
raise ValueError
l = len(repo.changelog)
if r < 0:
@@ -666,7 +668,7 @@
rev = repo.changelog.rev(node)
return repo[rev]
- raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
+ raise error.RepoLookupError(_(b"unknown revision '%s'") % symbol)
except error.WdirUnsupported:
return repo[None]
@@ -683,7 +685,7 @@
This is extracted in a function to help extensions (eg: evolve) to
experiment with various message variants."""
- if repo.filtername.startswith('visible'):
+ if repo.filtername.startswith(b'visible'):
# Check if the changeset is obsolete
unfilteredrepo = repo.unfiltered()
@@ -694,39 +696,44 @@
if ctx.obsolete():
msg = obsutil._getfilteredreason(repo, changeid, ctx)
else:
- msg = _("hidden revision '%s'") % changeid
+ msg = _(b"hidden revision '%s'") % changeid
- hint = _('use --hidden to access hidden revisions')
+ hint = _(b'use --hidden to access hidden revisions')
return error.FilteredRepoLookupError(msg, hint=hint)
- msg = _("filtered revision '%s' (not in '%s' subset)")
+ msg = _(b"filtered revision '%s' (not in '%s' subset)")
msg %= (changeid, repo.filtername)
return error.FilteredRepoLookupError(msg)
-def revsingle(repo, revspec, default='.', localalias=None):
+def revsingle(repo, revspec, default=b'.', localalias=None):
if not revspec and revspec != 0:
return repo[default]
l = revrange(repo, [revspec], localalias=localalias)
if not l:
- raise error.Abort(_('empty revision set'))
+ raise error.Abort(_(b'empty revision set'))
return repo[l.last()]
def _pairspec(revspec):
tree = revsetlang.parse(revspec)
- return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
+ return tree and tree[0] in (
+ b'range',
+ b'rangepre',
+ b'rangepost',
+ b'rangeall',
+ )
def revpair(repo, revs):
if not revs:
- return repo['.'], repo[None]
+ return repo[b'.'], repo[None]
l = revrange(repo, revs)
if not l:
- raise error.Abort(_('empty revision range'))
+ raise error.Abort(_(b'empty revision range'))
first = l.first()
second = l.last()
@@ -736,7 +743,7 @@
and len(revs) >= 2
and not all(revrange(repo, [r]) for r in revs)
):
- raise error.Abort(_('empty revision on one side of range'))
+ raise error.Abort(_(b'empty revision on one side of range'))
# if top-level is range expression, the result must always be a pair
if first == second and len(revs) == 1 and not _pairspec(revs[0]):
@@ -769,7 +776,7 @@
allspecs = []
for spec in specs:
if isinstance(spec, int):
- spec = revsetlang.formatspec('%d', spec)
+ spec = revsetlang.formatspec(b'%d', spec)
allspecs.append(spec)
return repo.anyrevs(allspecs, user=True, localalias=localalias)
@@ -808,21 +815,21 @@
if forcerelativevalue is not None:
relative = forcerelativevalue
else:
- config = repo.ui.config('ui', 'relative-paths')
- if config == 'legacy':
+ config = repo.ui.config(b'ui', b'relative-paths')
+ if config == b'legacy':
relative = legacyrelativevalue
else:
relative = stringutil.parsebool(config)
if relative is None:
raise error.ConfigError(
- _("ui.relative-paths is not a boolean ('%s')") % config
+ _(b"ui.relative-paths is not a boolean ('%s')") % config
)
if relative:
cwd = repo.getcwd()
pathto = repo.pathto
return lambda f: pathto(f, cwd)
- elif repo.ui.configbool('ui', 'slash'):
+ elif repo.ui.configbool(b'ui', b'slash'):
return lambda f: f
else:
return util.localpath
@@ -839,7 +846,7 @@
Some commands (e.g. addremove) use this condition for deciding whether to
print absolute or relative paths.
'''
- return bool(pats or opts.get('include') or opts.get('exclude'))
+ return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
def expandpats(pats):
@@ -863,30 +870,30 @@
def matchandpats(
- ctx, pats=(), opts=None, globbed=False, default='relpath', badfn=None
+ ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
):
'''Return a matcher and the patterns that were used.
The matcher will warn about bad matches, unless an alternate badfn callback
is provided.'''
if opts is None:
opts = {}
- if not globbed and default == 'relpath':
+ if not globbed and default == b'relpath':
pats = expandpats(pats or [])
uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
def bad(f, msg):
- ctx.repo().ui.warn("%s: %s\n" % (uipathfn(f), msg))
+ ctx.repo().ui.warn(b"%s: %s\n" % (uipathfn(f), msg))
if badfn is None:
badfn = bad
m = ctx.match(
pats,
- opts.get('include'),
- opts.get('exclude'),
+ opts.get(b'include'),
+ opts.get(b'exclude'),
default,
- listsubrepos=opts.get('subrepos'),
+ listsubrepos=opts.get(b'subrepos'),
badfn=badfn,
)
@@ -896,7 +903,7 @@
def match(
- ctx, pats=(), opts=None, globbed=False, default='relpath', badfn=None
+ ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
):
'''Return a matcher that will warn about bad matches.'''
return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
@@ -931,7 +938,7 @@
"""return a vfs suitable to save 'orig' file
return None if no special directory is configured"""
- origbackuppath = ui.config('ui', 'origbackuppath')
+ origbackuppath = ui.config(b'ui', b'origbackuppath')
if not origbackuppath:
return None
return vfs.vfs(repo.wvfs.join(origbackuppath))
@@ -949,16 +956,16 @@
'''
origvfs = getorigvfs(ui, repo)
if origvfs is None:
- return repo.wjoin(filepath + ".orig")
+ return repo.wjoin(filepath + b".orig")
origbackupdir = origvfs.dirname(filepath)
if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
- ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
+ ui.note(_(b'creating directory: %s\n') % origvfs.join(origbackupdir))
# Remove any files that conflict with the backup file's path
for f in reversed(list(util.finddirs(filepath))):
if origvfs.isfileorlink(f):
- ui.note(_('removing conflicting file: %s\n') % origvfs.join(f))
+ ui.note(_(b'removing conflicting file: %s\n') % origvfs.join(f))
origvfs.unlink(f)
break
@@ -966,7 +973,7 @@
if origvfs.isdir(filepath) and not origvfs.islink(filepath):
ui.note(
- _('removing conflicting directory: %s\n') % origvfs.join(filepath)
+ _(b'removing conflicting directory: %s\n') % origvfs.join(filepath)
)
origvfs.rmtree(filepath, forcibly=True)
@@ -1014,7 +1021,7 @@
return
# translate mapping's other forms
- if not util.safehasattr(replacements, 'items'):
+ if not util.safehasattr(replacements, b'items'):
replacements = {(n,): () for n in replacements}
else:
# upgrading non tuple "source" to tuple ones for BC
@@ -1037,14 +1044,14 @@
continue
if len(newnodes) > 1:
# usually a split, take the one with biggest rev number
- newnode = next(unfi.set('max(%ln)', newnodes)).node()
+ newnode = next(unfi.set(b'max(%ln)', newnodes)).node()
elif len(newnodes) == 0:
# move bookmark backwards
allreplaced = []
for rep in replacements:
allreplaced.extend(rep)
roots = list(
- unfi.set('max((::%n) - %ln)', oldnode, allreplaced)
+ unfi.set(b'max((::%n) - %ln)', oldnode, allreplaced)
)
if roots:
newnode = roots[0].node()
@@ -1086,7 +1093,7 @@
elif newphase < ctx.phase():
toadvance.setdefault(newphase, []).append(newnode)
- with repo.transaction('cleanup') as tr:
+ with repo.transaction(b'cleanup') as tr:
# Move bookmarks
bmarks = repo._bookmarks
bmarkchanges = []
@@ -1097,7 +1104,7 @@
from . import bookmarks # avoid import cycle
repo.ui.debug(
- 'moving bookmarks %r from %s to %s\n'
+ b'moving bookmarks %r from %s to %s\n'
% (
pycompat.rapply(pycompat.maybebytestr, oldbmarks),
hex(oldnode),
@@ -1106,7 +1113,7 @@
)
# Delete divergent bookmarks being parents of related newnodes
deleterevs = repo.revs(
- 'parents(roots(%ln & (::%n))) - parents(%n)',
+ b'parents(roots(%ln & (::%n))) - parents(%n)',
allnewnodes,
newnode,
oldnode,
@@ -1125,7 +1132,7 @@
for phase, nodes in toadvance.items():
phases.advanceboundary(repo, tr, phase, nodes)
- mayusearchived = repo.ui.config('experimental', 'cleanup-as-archived')
+ mayusearchived = repo.ui.config(b'experimental', b'cleanup-as-archived')
# Obsolete or strip nodes
if obsolete.isenabled(repo, obsolete.createmarkersopt):
# If a node is already obsoleted, and we want to obsolete it
@@ -1171,13 +1178,13 @@
if opts is None:
opts = {}
m = matcher
- dry_run = opts.get('dry_run')
+ dry_run = opts.get(b'dry_run')
try:
- similarity = float(opts.get('similarity') or 0)
+ similarity = float(opts.get(b'similarity') or 0)
except ValueError:
- raise error.Abort(_('similarity must be a number'))
+ raise error.Abort(_(b'similarity must be a number'))
if similarity < 0 or similarity > 100:
- raise error.Abort(_('similarity must be between 0 and 100'))
+ raise error.Abort(_(b'similarity must be between 0 and 100'))
similarity /= 100.0
ret = 0
@@ -1185,7 +1192,7 @@
wctx = repo[None]
for subpath in sorted(wctx.substate):
submatch = matchmod.subdirmatcher(subpath, m)
- if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
+ if opts.get(b'subrepos') or m.exact(subpath) or any(submatch.files()):
sub = wctx.sub(subpath)
subprefix = repo.wvfs.reljoin(prefix, subpath)
subuipathfn = subdiruipathfn(subpath, uipathfn)
@@ -1194,7 +1201,7 @@
ret = 1
except error.LookupError:
repo.ui.status(
- _("skipping missing subrepository: %s\n")
+ _(b"skipping missing subrepository: %s\n")
% uipathfn(subpath)
)
@@ -1216,11 +1223,11 @@
for abs in sorted(toprint):
if repo.ui.verbose or not m.exact(abs):
if abs in unknownset:
- status = _('adding %s\n') % uipathfn(abs)
- label = 'ui.addremove.added'
+ status = _(b'adding %s\n') % uipathfn(abs)
+ label = b'ui.addremove.added'
else:
- status = _('removing %s\n') % uipathfn(abs)
- label = 'ui.addremove.removed'
+ status = _(b'removing %s\n') % uipathfn(abs)
+ label = b'ui.addremove.removed'
repo.ui.status(status, label=label)
renames = _findrenames(
@@ -1250,9 +1257,9 @@
toprint.update(deleted)
for abs in sorted(toprint):
if abs in unknownset:
- status = _('adding %s\n') % abs
+ status = _(b'adding %s\n') % abs
else:
- status = _('removing %s\n') % abs
+ status = _(b'removing %s\n') % abs
repo.ui.status(status)
# TODO: We should probably have the caller pass in uipathfn and apply it to
@@ -1292,16 +1299,16 @@
)
for abs, st in walkresults.iteritems():
dstate = dirstate[abs]
- if dstate == '?' and audit_path.check(abs):
+ if dstate == b'?' and audit_path.check(abs):
unknown.append(abs)
- elif dstate != 'r' and not st:
+ elif dstate != b'r' and not st:
deleted.append(abs)
- elif dstate == 'r' and st:
+ elif dstate == b'r' and st:
forgotten.append(abs)
# for finding renames
- elif dstate == 'r' and not st:
+ elif dstate == b'r' and not st:
removed.append(abs)
- elif dstate == 'a':
+ elif dstate == b'a':
added.append(abs)
return added, unknown, deleted, removed, forgotten
@@ -1321,8 +1328,8 @@
):
repo.ui.status(
_(
- 'recording removal of %s as rename to %s '
- '(%d%% similar)\n'
+ b'recording removal of %s as rename to %s '
+ b'(%d%% similar)\n'
)
% (uipathfn(old), uipathfn(new), score * 100)
)
@@ -1419,19 +1426,19 @@
"""
origsrc = repo.dirstate.copied(src) or src
if dst == origsrc: # copying back a copy?
- if repo.dirstate[dst] not in 'mn' and not dryrun:
+ if repo.dirstate[dst] not in b'mn' and not dryrun:
repo.dirstate.normallookup(dst)
else:
- if repo.dirstate[origsrc] == 'a' and origsrc == src:
+ if repo.dirstate[origsrc] == b'a' and origsrc == src:
if not ui.quiet:
ui.warn(
_(
- "%s has not been committed yet, so no copy "
- "data will be stored for %s.\n"
+ b"%s has not been committed yet, so no copy "
+ b"data will be stored for %s.\n"
)
% (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
)
- if repo.dirstate[dst] in '?r' and not dryrun:
+ if repo.dirstate[dst] in b'?r' and not dryrun:
wctx.add([dst])
elif not dryrun:
wctx.copy(origsrc, dst)
@@ -1444,29 +1451,29 @@
a matcher that doesn't match all the differences between the parent of the
working copy and newctx.
"""
- oldctx = repo['.']
+ oldctx = repo[b'.']
ds = repo.dirstate
ds.setparents(newctx.node(), nullid)
copies = dict(ds.copies())
s = newctx.status(oldctx, match=match)
for f in s.modified:
- if ds[f] == 'r':
+ if ds[f] == b'r':
# modified + removed -> removed
continue
ds.normallookup(f)
for f in s.added:
- if ds[f] == 'r':
+ if ds[f] == b'r':
# added + removed -> unknown
ds.drop(f)
- elif ds[f] != 'a':
+ elif ds[f] != b'a':
ds.add(f)
for f in s.removed:
- if ds[f] == 'a':
+ if ds[f] == b'a':
# removed + added -> normal
ds.normallookup(f)
- elif ds[f] != 'r':
+ elif ds[f] != b'r':
ds.remove(f)
# Merge old parent and old working dir copies
@@ -1477,15 +1484,15 @@
)
# Adjust the dirstate copies
for dst, src in copies.iteritems():
- if src not in newctx or dst in newctx or ds[dst] != 'a':
+ if src not in newctx or dst in newctx or ds[dst] != b'a':
src = None
ds.copy(src, dst)
def writerequires(opener, requirements):
- with opener('requires', 'w', atomictemp=True) as fp:
+ with opener(b'requires', b'w', atomictemp=True) as fp:
for r in sorted(requirements):
- fp.write("%s\n" % r)
+ fp.write(b"%s\n" % r)
class filecachesubentry(object):
@@ -1664,14 +1671,14 @@
remote data sources.
"""
- spec = repo.ui.config("extdata", source)
+ spec = repo.ui.config(b"extdata", source)
if not spec:
- raise error.Abort(_("unknown extdata source '%s'") % source)
+ raise error.Abort(_(b"unknown extdata source '%s'") % source)
data = {}
src = proc = None
try:
- if spec.startswith("shell:"):
+ if spec.startswith(b"shell:"):
# external commands should be run relative to the repo root
cmd = spec[6:]
proc = subprocess.Popen(
@@ -1687,10 +1694,10 @@
# treat as a URL or file
src = url.open(repo.ui, spec)
for l in src:
- if " " in l:
- k, v = l.strip().split(" ", 1)
+ if b" " in l:
+ k, v = l.strip().split(b" ", 1)
else:
- k, v = l.strip(), ""
+ k, v = l.strip(), b""
k = encoding.tolocal(k)
try:
@@ -1709,7 +1716,7 @@
src.close()
if proc and proc.returncode != 0:
raise error.Abort(
- _("extdata command '%s' failed: %s")
+ _(b"extdata command '%s' failed: %s")
% (cmd, procutil.explainexit(proc.returncode))
)
@@ -1719,7 +1726,7 @@
def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
if lock is None:
raise error.LockInheritanceContractViolation(
- 'lock can only be inherited while held'
+ b'lock can only be inherited while held'
)
if environ is None:
environ = {}
@@ -1735,18 +1742,18 @@
arguments that ui.system does, and returns the exit code of the
subprocess."""
return _locksub(
- repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args, **kwargs
+ repo, repo.currentwlock(), b'HG_WLOCK_LOCKER', cmd, *args, **kwargs
)
class progress(object):
- def __init__(self, ui, updatebar, topic, unit="", total=None):
+ def __init__(self, ui, updatebar, topic, unit=b"", total=None):
self.ui = ui
self.pos = 0
self.topic = topic
self.unit = unit
self.total = total
- self.debug = ui.configbool('progress', 'debug')
+ self.debug = ui.configbool(b'progress', b'debug')
self._updatebar = updatebar
def __enter__(self):
@@ -1755,7 +1762,7 @@
def __exit__(self, exc_type, exc_value, exc_tb):
self.complete()
- def update(self, pos, item="", total=None):
+ def update(self, pos, item=b"", total=None):
assert pos is not None
if total:
self.total = total
@@ -1764,37 +1771,37 @@
if self.debug:
self._printdebug(item)
- def increment(self, step=1, item="", total=None):
+ def increment(self, step=1, item=b"", total=None):
self.update(self.pos + step, item, total)
def complete(self):
self.pos = None
- self.unit = ""
+ self.unit = b""
self.total = None
- self._updatebar(self.topic, self.pos, "", self.unit, self.total)
+ self._updatebar(self.topic, self.pos, b"", self.unit, self.total)
def _printdebug(self, item):
if self.unit:
- unit = ' ' + self.unit
+ unit = b' ' + self.unit
if item:
- item = ' ' + item
+ item = b' ' + item
if self.total:
pct = 100.0 * self.pos / self.total
self.ui.debug(
- '%s:%s %d/%d%s (%4.2f%%)\n'
+ b'%s:%s %d/%d%s (%4.2f%%)\n'
% (self.topic, item, self.pos, self.total, unit, pct)
)
else:
- self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
+ self.ui.debug(b'%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
def gdinitconfig(ui):
"""helper function to know if a repo should be created as general delta
"""
# experimental config: format.generaldelta
- return ui.configbool('format', 'generaldelta') or ui.configbool(
- 'format', 'usegeneraldelta'
+ return ui.configbool(b'format', b'generaldelta') or ui.configbool(
+ b'format', b'usegeneraldelta'
)
@@ -1802,7 +1809,7 @@
"""helper function to know if incoming delta should be optimised
"""
# experimental config: format.generaldelta
- return ui.configbool('format', 'generaldelta')
+ return ui.configbool(b'format', b'generaldelta')
class simplekeyvaluefile(object):
@@ -1811,7 +1818,7 @@
Keys must be alphanumerics and start with a letter, values must not
contain '\n' characters"""
- firstlinekey = '__firstline'
+ firstlinekey = b'__firstline'
def __init__(self, vfs, path, keys=None):
self.vfs = vfs
@@ -1827,7 +1834,7 @@
d = {}
if firstlinenonkeyval:
if not lines:
- e = _("empty simplekeyvalue file")
+ e = _(b"empty simplekeyvalue file")
raise error.CorruptedState(e)
# we don't want to include '\n' in the __firstline
d[self.firstlinekey] = lines[0][:-1]
@@ -1838,10 +1845,10 @@
# lines which only contain '\n' therefore are not skipped
# by 'if line'
updatedict = dict(
- line[:-1].split('=', 1) for line in lines if line.strip()
+ line[:-1].split(b'=', 1) for line in lines if line.strip()
)
if self.firstlinekey in updatedict:
- e = _("%r can't be used as a key")
+ e = _(b"%r can't be used as a key")
raise error.CorruptedState(e % self.firstlinekey)
d.update(updatedict)
except ValueError as e:
@@ -1857,37 +1864,37 @@
everything else, as it is, not in a key=value form"""
lines = []
if firstline is not None:
- lines.append('%s\n' % firstline)
+ lines.append(b'%s\n' % firstline)
for k, v in data.items():
if k == self.firstlinekey:
- e = "key name '%s' is reserved" % self.firstlinekey
+ e = b"key name '%s' is reserved" % self.firstlinekey
raise error.ProgrammingError(e)
if not k[0:1].isalpha():
- e = "keys must start with a letter in a key-value file"
+ e = b"keys must start with a letter in a key-value file"
raise error.ProgrammingError(e)
if not k.isalnum():
- e = "invalid key name in a simple key-value file"
+ e = b"invalid key name in a simple key-value file"
raise error.ProgrammingError(e)
- if '\n' in v:
- e = "invalid value in a simple key-value file"
+ if b'\n' in v:
+ e = b"invalid value in a simple key-value file"
raise error.ProgrammingError(e)
- lines.append("%s=%s\n" % (k, v))
- with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
- fp.write(''.join(lines))
+ lines.append(b"%s=%s\n" % (k, v))
+ with self.vfs(self.path, mode=b'wb', atomictemp=True) as fp:
+ fp.write(b''.join(lines))
_reportobsoletedsource = [
- 'debugobsolete',
- 'pull',
- 'push',
- 'serve',
- 'unbundle',
+ b'debugobsolete',
+ b'pull',
+ b'push',
+ b'serve',
+ b'unbundle',
]
_reportnewcssource = [
- 'pull',
- 'unbundle',
+ b'pull',
+ b'unbundle',
]
@@ -1912,7 +1919,7 @@
_reportstroubledchangesets = True
-def registersummarycallback(repo, otr, txnname=''):
+def registersummarycallback(repo, otr, txnname=b''):
"""register a callback to issue a summary after the transaction is closed
"""
@@ -1937,22 +1944,22 @@
repo = repo.filtered(filtername)
func(repo, tr)
- newcat = '%02i-txnreport' % len(categories)
+ newcat = b'%02i-txnreport' % len(categories)
otr.addpostclose(newcat, wrapped)
categories.append(newcat)
return wrapped
@reportsummary
def reportchangegroup(repo, tr):
- cgchangesets = tr.changes.get('changegroup-count-changesets', 0)
- cgrevisions = tr.changes.get('changegroup-count-revisions', 0)
- cgfiles = tr.changes.get('changegroup-count-files', 0)
- cgheads = tr.changes.get('changegroup-count-heads', 0)
+ cgchangesets = tr.changes.get(b'changegroup-count-changesets', 0)
+ cgrevisions = tr.changes.get(b'changegroup-count-revisions', 0)
+ cgfiles = tr.changes.get(b'changegroup-count-files', 0)
+ cgheads = tr.changes.get(b'changegroup-count-heads', 0)
if cgchangesets or cgrevisions or cgfiles:
- htext = ""
+ htext = b""
if cgheads:
- htext = _(" (%+d heads)") % cgheads
- msg = _("added %d changesets with %d changes to %d files%s\n")
+ htext = _(b" (%+d heads)") % cgheads
+ msg = _(b"added %d changesets with %d changes to %d files%s\n")
repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
if txmatch(_reportobsoletedsource):
@@ -1960,19 +1967,21 @@
@reportsummary
def reportobsoleted(repo, tr):
obsoleted = obsutil.getobsoleted(repo, tr)
- newmarkers = len(tr.changes.get('obsmarkers', ()))
+ newmarkers = len(tr.changes.get(b'obsmarkers', ()))
if newmarkers:
- repo.ui.status(_('%i new obsolescence markers\n') % newmarkers)
+ repo.ui.status(_(b'%i new obsolescence markers\n') % newmarkers)
if obsoleted:
- repo.ui.status(_('obsoleted %i changesets\n') % len(obsoleted))
+ repo.ui.status(_(b'obsoleted %i changesets\n') % len(obsoleted))
if obsolete.isenabled(
repo, obsolete.createmarkersopt
- ) and repo.ui.configbool('experimental', 'evolution.report-instabilities'):
+ ) and repo.ui.configbool(
+ b'experimental', b'evolution.report-instabilities'
+ ):
instabilitytypes = [
- ('orphan', 'orphan'),
- ('phase-divergent', 'phasedivergent'),
- ('content-divergent', 'contentdivergent'),
+ (b'orphan', b'orphan'),
+ (b'phase-divergent', b'phasedivergent'),
+ (b'content-divergent', b'contentdivergent'),
]
def getinstabilitycounts(repo):
@@ -2003,7 +2012,7 @@
@reportsummary
def reportnewcs(repo, tr):
"""Report the range of new revisions pulled/unbundled."""
- origrepolen = tr.changes.get('origrepolen', len(repo))
+ origrepolen = tr.changes.get(b'origrepolen', len(repo))
unfi = repo.unfiltered()
if origrepolen >= len(unfi):
return
@@ -2016,29 +2025,29 @@
if minrev == maxrev:
revrange = minrev
else:
- revrange = '%s:%s' % (minrev, maxrev)
- draft = len(repo.revs('%ld and draft()', revs))
- secret = len(repo.revs('%ld and secret()', revs))
+ revrange = b'%s:%s' % (minrev, maxrev)
+ draft = len(repo.revs(b'%ld and draft()', revs))
+ secret = len(repo.revs(b'%ld and secret()', revs))
if not (draft or secret):
- msg = _('new changesets %s\n') % revrange
+ msg = _(b'new changesets %s\n') % revrange
elif draft and secret:
- msg = _('new changesets %s (%d drafts, %d secrets)\n')
+ msg = _(b'new changesets %s (%d drafts, %d secrets)\n')
msg %= (revrange, draft, secret)
elif draft:
- msg = _('new changesets %s (%d drafts)\n')
+ msg = _(b'new changesets %s (%d drafts)\n')
msg %= (revrange, draft)
elif secret:
- msg = _('new changesets %s (%d secrets)\n')
+ msg = _(b'new changesets %s (%d secrets)\n')
msg %= (revrange, secret)
else:
- errormsg = 'entered unreachable condition'
+ errormsg = b'entered unreachable condition'
raise error.ProgrammingError(errormsg)
repo.ui.status(msg)
# search new changesets directly pulled as obsolete
- duplicates = tr.changes.get('revduplicates', ())
+ duplicates = tr.changes.get(b'revduplicates', ())
obsadded = unfi.revs(
- '(%d: + %ld) and obsolete()', origrepolen, duplicates
+ b'(%d: + %ld) and obsolete()', origrepolen, duplicates
)
cl = repo.changelog
extinctadded = [r for r in obsadded if r not in cl]
@@ -2046,7 +2055,7 @@
# They are not just obsolete, but obsolete and invisible
# we call them "extinct" internally but the terms have not been
# exposed to users.
- msg = '(%d other changesets obsolete on arrival)\n'
+ msg = b'(%d other changesets obsolete on arrival)\n'
repo.ui.status(msg % len(extinctadded))
@reportsummary
@@ -2054,8 +2063,8 @@
"""Report statistics of phase changes for changesets pre-existing
pull/unbundle.
"""
- origrepolen = tr.changes.get('origrepolen', len(repo))
- phasetracking = tr.changes.get('phases', {})
+ origrepolen = tr.changes.get(b'origrepolen', len(repo))
+ phasetracking = tr.changes.get(b'phases', {})
if not phasetracking:
return
published = [
@@ -2066,7 +2075,7 @@
if not published:
return
repo.ui.status(
- _('%d local changesets published\n') % len(published)
+ _(b'%d local changesets published\n') % len(published)
)
@@ -2076,30 +2085,30 @@
exists as a separate function so that extension can wrap to show more
information like how to fix instabilities"""
if delta > 0:
- return _('%i new %s changesets\n') % (delta, instability)
+ return _(b'%i new %s changesets\n') % (delta, instability)
def nodesummaries(repo, nodes, maxnumnodes=4):
if len(nodes) <= maxnumnodes or repo.ui.verbose:
- return ' '.join(short(h) for h in nodes)
- first = ' '.join(short(h) for h in nodes[:maxnumnodes])
- return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
+ return b' '.join(short(h) for h in nodes)
+ first = b' '.join(short(h) for h in nodes[:maxnumnodes])
+ return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes)
def enforcesinglehead(repo, tr, desc, accountclosed=False):
"""check that no named branch has multiple heads"""
- if desc in ('strip', 'repair'):
+ if desc in (b'strip', b'repair'):
# skip the logic during strip
return
- visible = repo.filtered('visible')
+ visible = repo.filtered(b'visible')
# possible improvement: we could restrict the check to affected branch
bm = visible.branchmap()
for name in bm:
heads = bm.branchheads(name, closed=accountclosed)
if len(heads) > 1:
- msg = _('rejecting multiple heads on branch "%s"')
+ msg = _(b'rejecting multiple heads on branch "%s"')
msg %= name
- hint = _('%d heads: %s')
+ hint = _(b'%d heads: %s')
hint %= (len(heads), nodesummaries(repo, heads))
raise error.Abort(msg, hint=hint)
@@ -2121,11 +2130,11 @@
returns a repo object with the required changesets unhidden
"""
if not repo.filtername or not repo.ui.configbool(
- 'experimental', 'directaccess'
+ b'experimental', b'directaccess'
):
return repo
- if repo.filtername not in ('visible', 'visible-hidden'):
+ if repo.filtername not in (b'visible', b'visible-hidden'):
return repo
symbols = set()
@@ -2145,20 +2154,20 @@
if not revs:
return repo
- if hiddentype == 'warn':
+ if hiddentype == b'warn':
unfi = repo.unfiltered()
- revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
+ revstr = b", ".join([pycompat.bytestr(unfi[l]) for l in revs])
repo.ui.warn(
_(
- "warning: accessing hidden changesets for write "
- "operation: %s\n"
+ b"warning: accessing hidden changesets for write "
+ b"operation: %s\n"
)
% revstr
)
# we have to use new filtername to separate branch/tags cache until we can
# disbale these cache when revisions are dynamically pinned.
- return repo.filtered('visible-hidden', revs)
+ return repo.filtered(b'visible-hidden', revs)
def _getrevsfromsymbols(repo, symbols):
@@ -2169,7 +2178,7 @@
unficl = unfi.changelog
cl = repo.changelog
tiprev = len(unficl)
- allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
+ allowrevnums = repo.ui.configbool(b'experimental', b'directaccess.revnums')
for s in symbols:
try:
n = int(s)
@@ -2201,9 +2210,9 @@
Select revisions reachable by a given bookmark
"""
return repo.revs(
- "ancestors(bookmark(%s)) - "
- "ancestors(head() and not bookmark(%s)) - "
- "ancestors(bookmark() and not bookmark(%s))",
+ b"ancestors(bookmark(%s)) - "
+ b"ancestors(head() and not bookmark(%s)) - "
+ b"ancestors(bookmark() and not bookmark(%s))",
mark,
mark,
mark,