179 raise |
179 raise |
180 # Global exception handling, alphabetically |
180 # Global exception handling, alphabetically |
181 # Mercurial-specific first, followed by built-in and library exceptions |
181 # Mercurial-specific first, followed by built-in and library exceptions |
182 except error.LockHeld as inst: |
182 except error.LockHeld as inst: |
183 if inst.errno == errno.ETIMEDOUT: |
183 if inst.errno == errno.ETIMEDOUT: |
184 reason = _('timed out waiting for lock held by %r') % ( |
184 reason = _(b'timed out waiting for lock held by %r') % ( |
185 pycompat.bytestr(inst.locker) |
185 pycompat.bytestr(inst.locker) |
186 ) |
186 ) |
187 else: |
187 else: |
188 reason = _('lock held by %r') % inst.locker |
188 reason = _(b'lock held by %r') % inst.locker |
189 ui.error( |
189 ui.error( |
190 _("abort: %s: %s\n") |
190 _(b"abort: %s: %s\n") |
191 % (inst.desc or stringutil.forcebytestr(inst.filename), reason) |
191 % (inst.desc or stringutil.forcebytestr(inst.filename), reason) |
192 ) |
192 ) |
193 if not inst.locker: |
193 if not inst.locker: |
194 ui.error(_("(lock might be very busy)\n")) |
194 ui.error(_(b"(lock might be very busy)\n")) |
195 except error.LockUnavailable as inst: |
195 except error.LockUnavailable as inst: |
196 ui.error( |
196 ui.error( |
197 _("abort: could not lock %s: %s\n") |
197 _(b"abort: could not lock %s: %s\n") |
198 % ( |
198 % ( |
199 inst.desc or stringutil.forcebytestr(inst.filename), |
199 inst.desc or stringutil.forcebytestr(inst.filename), |
200 encoding.strtolocal(inst.strerror), |
200 encoding.strtolocal(inst.strerror), |
201 ) |
201 ) |
202 ) |
202 ) |
203 except error.OutOfBandError as inst: |
203 except error.OutOfBandError as inst: |
204 if inst.args: |
204 if inst.args: |
205 msg = _("abort: remote error:\n") |
205 msg = _(b"abort: remote error:\n") |
206 else: |
206 else: |
207 msg = _("abort: remote error\n") |
207 msg = _(b"abort: remote error\n") |
208 ui.error(msg) |
208 ui.error(msg) |
209 if inst.args: |
209 if inst.args: |
210 ui.error(''.join(inst.args)) |
210 ui.error(b''.join(inst.args)) |
211 if inst.hint: |
211 if inst.hint: |
212 ui.error('(%s)\n' % inst.hint) |
212 ui.error(b'(%s)\n' % inst.hint) |
213 except error.RepoError as inst: |
213 except error.RepoError as inst: |
214 ui.error(_("abort: %s!\n") % inst) |
214 ui.error(_(b"abort: %s!\n") % inst) |
215 if inst.hint: |
215 if inst.hint: |
216 ui.error(_("(%s)\n") % inst.hint) |
216 ui.error(_(b"(%s)\n") % inst.hint) |
217 except error.ResponseError as inst: |
217 except error.ResponseError as inst: |
218 ui.error(_("abort: %s") % inst.args[0]) |
218 ui.error(_(b"abort: %s") % inst.args[0]) |
219 msg = inst.args[1] |
219 msg = inst.args[1] |
220 if isinstance(msg, type(u'')): |
220 if isinstance(msg, type(u'')): |
221 msg = pycompat.sysbytes(msg) |
221 msg = pycompat.sysbytes(msg) |
222 if not isinstance(msg, bytes): |
222 if not isinstance(msg, bytes): |
223 ui.error(" %r\n" % (msg,)) |
223 ui.error(b" %r\n" % (msg,)) |
224 elif not msg: |
224 elif not msg: |
225 ui.error(_(" empty string\n")) |
225 ui.error(_(b" empty string\n")) |
226 else: |
226 else: |
227 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg))) |
227 ui.error(b"\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg))) |
228 except error.CensoredNodeError as inst: |
228 except error.CensoredNodeError as inst: |
229 ui.error(_("abort: file censored %s!\n") % inst) |
229 ui.error(_(b"abort: file censored %s!\n") % inst) |
230 except error.StorageError as inst: |
230 except error.StorageError as inst: |
231 ui.error(_("abort: %s!\n") % inst) |
231 ui.error(_(b"abort: %s!\n") % inst) |
232 if inst.hint: |
232 if inst.hint: |
233 ui.error(_("(%s)\n") % inst.hint) |
233 ui.error(_(b"(%s)\n") % inst.hint) |
234 except error.InterventionRequired as inst: |
234 except error.InterventionRequired as inst: |
235 ui.error("%s\n" % inst) |
235 ui.error(b"%s\n" % inst) |
236 if inst.hint: |
236 if inst.hint: |
237 ui.error(_("(%s)\n") % inst.hint) |
237 ui.error(_(b"(%s)\n") % inst.hint) |
238 return 1 |
238 return 1 |
239 except error.WdirUnsupported: |
239 except error.WdirUnsupported: |
240 ui.error(_("abort: working directory revision cannot be specified\n")) |
240 ui.error(_(b"abort: working directory revision cannot be specified\n")) |
241 except error.Abort as inst: |
241 except error.Abort as inst: |
242 ui.error(_("abort: %s\n") % inst) |
242 ui.error(_(b"abort: %s\n") % inst) |
243 if inst.hint: |
243 if inst.hint: |
244 ui.error(_("(%s)\n") % inst.hint) |
244 ui.error(_(b"(%s)\n") % inst.hint) |
245 except ImportError as inst: |
245 except ImportError as inst: |
246 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst)) |
246 ui.error(_(b"abort: %s!\n") % stringutil.forcebytestr(inst)) |
247 m = stringutil.forcebytestr(inst).split()[-1] |
247 m = stringutil.forcebytestr(inst).split()[-1] |
248 if m in "mpatch bdiff".split(): |
248 if m in b"mpatch bdiff".split(): |
249 ui.error(_("(did you forget to compile extensions?)\n")) |
249 ui.error(_(b"(did you forget to compile extensions?)\n")) |
250 elif m in "zlib".split(): |
250 elif m in b"zlib".split(): |
251 ui.error(_("(is your Python install correct?)\n")) |
251 ui.error(_(b"(is your Python install correct?)\n")) |
252 except (IOError, OSError) as inst: |
252 except (IOError, OSError) as inst: |
253 if util.safehasattr(inst, "code"): # HTTPError |
253 if util.safehasattr(inst, b"code"): # HTTPError |
254 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst)) |
254 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst)) |
255 elif util.safehasattr(inst, "reason"): # URLError or SSLError |
255 elif util.safehasattr(inst, b"reason"): # URLError or SSLError |
256 try: # usually it is in the form (errno, strerror) |
256 try: # usually it is in the form (errno, strerror) |
257 reason = inst.reason.args[1] |
257 reason = inst.reason.args[1] |
258 except (AttributeError, IndexError): |
258 except (AttributeError, IndexError): |
259 # it might be anything, for example a string |
259 # it might be anything, for example a string |
260 reason = inst.reason |
260 reason = inst.reason |
261 if isinstance(reason, pycompat.unicode): |
261 if isinstance(reason, pycompat.unicode): |
262 # SSLError of Python 2.7.9 contains a unicode |
262 # SSLError of Python 2.7.9 contains a unicode |
263 reason = encoding.unitolocal(reason) |
263 reason = encoding.unitolocal(reason) |
264 ui.error(_("abort: error: %s\n") % reason) |
264 ui.error(_(b"abort: error: %s\n") % reason) |
265 elif ( |
265 elif ( |
266 util.safehasattr(inst, "args") |
266 util.safehasattr(inst, b"args") |
267 and inst.args |
267 and inst.args |
268 and inst.args[0] == errno.EPIPE |
268 and inst.args[0] == errno.EPIPE |
269 ): |
269 ): |
270 pass |
270 pass |
271 elif getattr(inst, "strerror", None): # common IOError or OSError |
271 elif getattr(inst, "strerror", None): # common IOError or OSError |
272 if getattr(inst, "filename", None) is not None: |
272 if getattr(inst, "filename", None) is not None: |
273 ui.error( |
273 ui.error( |
274 _("abort: %s: '%s'\n") |
274 _(b"abort: %s: '%s'\n") |
275 % ( |
275 % ( |
276 encoding.strtolocal(inst.strerror), |
276 encoding.strtolocal(inst.strerror), |
277 stringutil.forcebytestr(inst.filename), |
277 stringutil.forcebytestr(inst.filename), |
278 ) |
278 ) |
279 ) |
279 ) |
280 else: |
280 else: |
281 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror)) |
281 ui.error(_(b"abort: %s\n") % encoding.strtolocal(inst.strerror)) |
282 else: # suspicious IOError |
282 else: # suspicious IOError |
283 raise |
283 raise |
284 except MemoryError: |
284 except MemoryError: |
285 ui.error(_("abort: out of memory\n")) |
285 ui.error(_(b"abort: out of memory\n")) |
286 except SystemExit as inst: |
286 except SystemExit as inst: |
287 # Commands shouldn't sys.exit directly, but give a return code. |
287 # Commands shouldn't sys.exit directly, but give a return code. |
288 # Just in case catch this and and pass exit code to caller. |
288 # Just in case catch this and and pass exit code to caller. |
289 return inst.code |
289 return inst.code |
290 |
290 |
292 |
292 |
293 |
293 |
294 def checknewlabel(repo, lbl, kind): |
294 def checknewlabel(repo, lbl, kind): |
295 # Do not use the "kind" parameter in ui output. |
295 # Do not use the "kind" parameter in ui output. |
296 # It makes strings difficult to translate. |
296 # It makes strings difficult to translate. |
297 if lbl in ['tip', '.', 'null']: |
297 if lbl in [b'tip', b'.', b'null']: |
298 raise error.Abort(_("the name '%s' is reserved") % lbl) |
298 raise error.Abort(_(b"the name '%s' is reserved") % lbl) |
299 for c in (':', '\0', '\n', '\r'): |
299 for c in (b':', b'\0', b'\n', b'\r'): |
300 if c in lbl: |
300 if c in lbl: |
301 raise error.Abort( |
301 raise error.Abort( |
302 _("%r cannot be used in a name") % pycompat.bytestr(c) |
302 _(b"%r cannot be used in a name") % pycompat.bytestr(c) |
303 ) |
303 ) |
304 try: |
304 try: |
305 int(lbl) |
305 int(lbl) |
306 raise error.Abort(_("cannot use an integer as a name")) |
306 raise error.Abort(_(b"cannot use an integer as a name")) |
307 except ValueError: |
307 except ValueError: |
308 pass |
308 pass |
309 if lbl.strip() != lbl: |
309 if lbl.strip() != lbl: |
310 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl) |
310 raise error.Abort(_(b"leading or trailing whitespace in name %r") % lbl) |
311 |
311 |
312 |
312 |
313 def checkfilename(f): |
313 def checkfilename(f): |
314 '''Check that the filename f is an acceptable filename for a tracked file''' |
314 '''Check that the filename f is an acceptable filename for a tracked file''' |
315 if '\r' in f or '\n' in f: |
315 if b'\r' in f or b'\n' in f: |
316 raise error.Abort( |
316 raise error.Abort( |
317 _("'\\n' and '\\r' disallowed in filenames: %r") |
317 _(b"'\\n' and '\\r' disallowed in filenames: %r") |
318 % pycompat.bytestr(f) |
318 % pycompat.bytestr(f) |
319 ) |
319 ) |
320 |
320 |
321 |
321 |
322 def checkportable(ui, f): |
322 def checkportable(ui, f): |
324 checkfilename(f) |
324 checkfilename(f) |
325 abort, warn = checkportabilityalert(ui) |
325 abort, warn = checkportabilityalert(ui) |
326 if abort or warn: |
326 if abort or warn: |
327 msg = util.checkwinfilename(f) |
327 msg = util.checkwinfilename(f) |
328 if msg: |
328 if msg: |
329 msg = "%s: %s" % (msg, procutil.shellquote(f)) |
329 msg = b"%s: %s" % (msg, procutil.shellquote(f)) |
330 if abort: |
330 if abort: |
331 raise error.Abort(msg) |
331 raise error.Abort(msg) |
332 ui.warn(_("warning: %s\n") % msg) |
332 ui.warn(_(b"warning: %s\n") % msg) |
333 |
333 |
334 |
334 |
335 def checkportabilityalert(ui): |
335 def checkportabilityalert(ui): |
336 '''check if the user's config requests nothing, a warning, or abort for |
336 '''check if the user's config requests nothing, a warning, or abort for |
337 non-portable filenames''' |
337 non-portable filenames''' |
338 val = ui.config('ui', 'portablefilenames') |
338 val = ui.config(b'ui', b'portablefilenames') |
339 lval = val.lower() |
339 lval = val.lower() |
340 bval = stringutil.parsebool(val) |
340 bval = stringutil.parsebool(val) |
341 abort = pycompat.iswindows or lval == 'abort' |
341 abort = pycompat.iswindows or lval == b'abort' |
342 warn = bval or lval == 'warn' |
342 warn = bval or lval == b'warn' |
343 if bval is None and not (warn or abort or lval == 'ignore'): |
343 if bval is None and not (warn or abort or lval == b'ignore'): |
344 raise error.ConfigError( |
344 raise error.ConfigError( |
345 _("ui.portablefilenames value is invalid ('%s')") % val |
345 _(b"ui.portablefilenames value is invalid ('%s')") % val |
346 ) |
346 ) |
347 return abort, warn |
347 return abort, warn |
348 |
348 |
349 |
349 |
350 class casecollisionauditor(object): |
350 class casecollisionauditor(object): |
351 def __init__(self, ui, abort, dirstate): |
351 def __init__(self, ui, abort, dirstate): |
352 self._ui = ui |
352 self._ui = ui |
353 self._abort = abort |
353 self._abort = abort |
354 allfiles = '\0'.join(dirstate) |
354 allfiles = b'\0'.join(dirstate) |
355 self._loweredfiles = set(encoding.lower(allfiles).split('\0')) |
355 self._loweredfiles = set(encoding.lower(allfiles).split(b'\0')) |
356 self._dirstate = dirstate |
356 self._dirstate = dirstate |
357 # The purpose of _newfiles is so that we don't complain about |
357 # The purpose of _newfiles is so that we don't complain about |
358 # case collisions if someone were to call this object with the |
358 # case collisions if someone were to call this object with the |
359 # same filename twice. |
359 # same filename twice. |
360 self._newfiles = set() |
360 self._newfiles = set() |
474 """Format given revision and node depending on the current verbosity""" |
474 """Format given revision and node depending on the current verbosity""" |
475 if ui.debugflag: |
475 if ui.debugflag: |
476 hexfunc = hex |
476 hexfunc = hex |
477 else: |
477 else: |
478 hexfunc = short |
478 hexfunc = short |
479 return '%d:%s' % (rev, hexfunc(node)) |
479 return b'%d:%s' % (rev, hexfunc(node)) |
480 |
480 |
481 |
481 |
482 def resolvehexnodeidprefix(repo, prefix): |
482 def resolvehexnodeidprefix(repo, prefix): |
483 if prefix.startswith('x') and repo.ui.configbool( |
483 if prefix.startswith(b'x') and repo.ui.configbool( |
484 'experimental', 'revisions.prefixhexnode' |
484 b'experimental', b'revisions.prefixhexnode' |
485 ): |
485 ): |
486 prefix = prefix[1:] |
486 prefix = prefix[1:] |
487 try: |
487 try: |
488 # Uses unfiltered repo because it's faster when prefix is ambiguous/ |
488 # Uses unfiltered repo because it's faster when prefix is ambiguous/ |
489 # This matches the shortesthexnodeidprefix() function below. |
489 # This matches the shortesthexnodeidprefix() function below. |
490 node = repo.unfiltered().changelog._partialmatch(prefix) |
490 node = repo.unfiltered().changelog._partialmatch(prefix) |
491 except error.AmbiguousPrefixLookupError: |
491 except error.AmbiguousPrefixLookupError: |
492 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin') |
492 revset = repo.ui.config( |
|
493 b'experimental', b'revisions.disambiguatewithin' |
|
494 ) |
493 if revset: |
495 if revset: |
494 # Clear config to avoid infinite recursion |
496 # Clear config to avoid infinite recursion |
495 configoverrides = { |
497 configoverrides = { |
496 ('experimental', 'revisions.disambiguatewithin'): None |
498 (b'experimental', b'revisions.disambiguatewithin'): None |
497 } |
499 } |
498 with repo.ui.configoverride(configoverrides): |
500 with repo.ui.configoverride(configoverrides): |
499 revs = repo.anyrevs([revset], user=True) |
501 revs = repo.anyrevs([revset], user=True) |
500 matches = [] |
502 matches = [] |
501 for rev in revs: |
503 for rev in revs: |
538 |
540 |
539 minlength = max(minlength, 1) |
541 minlength = max(minlength, 1) |
540 |
542 |
541 def disambiguate(prefix): |
543 def disambiguate(prefix): |
542 """Disambiguate against revnums.""" |
544 """Disambiguate against revnums.""" |
543 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'): |
545 if repo.ui.configbool(b'experimental', b'revisions.prefixhexnode'): |
544 if mayberevnum(repo, prefix): |
546 if mayberevnum(repo, prefix): |
545 return 'x' + prefix |
547 return b'x' + prefix |
546 else: |
548 else: |
547 return prefix |
549 return prefix |
548 |
550 |
549 hexnode = hex(node) |
551 hexnode = hex(node) |
550 for length in range(len(prefix), len(hexnode) + 1): |
552 for length in range(len(prefix), len(hexnode) + 1): |
551 prefix = hexnode[:length] |
553 prefix = hexnode[:length] |
552 if not mayberevnum(repo, prefix): |
554 if not mayberevnum(repo, prefix): |
553 return prefix |
555 return prefix |
554 |
556 |
555 cl = repo.unfiltered().changelog |
557 cl = repo.unfiltered().changelog |
556 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin') |
558 revset = repo.ui.config(b'experimental', b'revisions.disambiguatewithin') |
557 if revset: |
559 if revset: |
558 revs = None |
560 revs = None |
559 if cache is not None: |
561 if cache is not None: |
560 revs = cache.get('disambiguationrevset') |
562 revs = cache.get(b'disambiguationrevset') |
561 if revs is None: |
563 if revs is None: |
562 revs = repo.anyrevs([revset], user=True) |
564 revs = repo.anyrevs([revset], user=True) |
563 if cache is not None: |
565 if cache is not None: |
564 cache['disambiguationrevset'] = revs |
566 cache[b'disambiguationrevset'] = revs |
565 if cl.rev(node) in revs: |
567 if cl.rev(node) in revs: |
566 hexnode = hex(node) |
568 hexnode = hex(node) |
567 nodetree = None |
569 nodetree = None |
568 if cache is not None: |
570 if cache is not None: |
569 nodetree = cache.get('disambiguationnodetree') |
571 nodetree = cache.get(b'disambiguationnodetree') |
570 if not nodetree: |
572 if not nodetree: |
571 try: |
573 try: |
572 nodetree = parsers.nodetree(cl.index, len(revs)) |
574 nodetree = parsers.nodetree(cl.index, len(revs)) |
573 except AttributeError: |
575 except AttributeError: |
574 # no native nodetree |
576 # no native nodetree |
575 pass |
577 pass |
576 else: |
578 else: |
577 for r in revs: |
579 for r in revs: |
578 nodetree.insert(r) |
580 nodetree.insert(r) |
579 if cache is not None: |
581 if cache is not None: |
580 cache['disambiguationnodetree'] = nodetree |
582 cache[b'disambiguationnodetree'] = nodetree |
581 if nodetree is not None: |
583 if nodetree is not None: |
582 length = max(nodetree.shortest(node), minlength) |
584 length = max(nodetree.shortest(node), minlength) |
583 prefix = hexnode[:length] |
585 prefix = hexnode[:length] |
584 return disambiguate(prefix) |
586 return disambiguate(prefix) |
585 for length in range(minlength, len(hexnode) + 1): |
587 for length in range(minlength, len(hexnode) + 1): |
681 def _filterederror(repo, changeid): |
683 def _filterederror(repo, changeid): |
682 """build an exception to be raised about a filtered changeid |
684 """build an exception to be raised about a filtered changeid |
683 |
685 |
684 This is extracted in a function to help extensions (eg: evolve) to |
686 This is extracted in a function to help extensions (eg: evolve) to |
685 experiment with various message variants.""" |
687 experiment with various message variants.""" |
686 if repo.filtername.startswith('visible'): |
688 if repo.filtername.startswith(b'visible'): |
687 |
689 |
688 # Check if the changeset is obsolete |
690 # Check if the changeset is obsolete |
689 unfilteredrepo = repo.unfiltered() |
691 unfilteredrepo = repo.unfiltered() |
690 ctx = revsymbol(unfilteredrepo, changeid) |
692 ctx = revsymbol(unfilteredrepo, changeid) |
691 |
693 |
692 # If the changeset is obsolete, enrich the message with the reason |
694 # If the changeset is obsolete, enrich the message with the reason |
693 # that made this changeset not visible |
695 # that made this changeset not visible |
694 if ctx.obsolete(): |
696 if ctx.obsolete(): |
695 msg = obsutil._getfilteredreason(repo, changeid, ctx) |
697 msg = obsutil._getfilteredreason(repo, changeid, ctx) |
696 else: |
698 else: |
697 msg = _("hidden revision '%s'") % changeid |
699 msg = _(b"hidden revision '%s'") % changeid |
698 |
700 |
699 hint = _('use --hidden to access hidden revisions') |
701 hint = _(b'use --hidden to access hidden revisions') |
700 |
702 |
701 return error.FilteredRepoLookupError(msg, hint=hint) |
703 return error.FilteredRepoLookupError(msg, hint=hint) |
702 msg = _("filtered revision '%s' (not in '%s' subset)") |
704 msg = _(b"filtered revision '%s' (not in '%s' subset)") |
703 msg %= (changeid, repo.filtername) |
705 msg %= (changeid, repo.filtername) |
704 return error.FilteredRepoLookupError(msg) |
706 return error.FilteredRepoLookupError(msg) |
705 |
707 |
706 |
708 |
707 def revsingle(repo, revspec, default='.', localalias=None): |
709 def revsingle(repo, revspec, default=b'.', localalias=None): |
708 if not revspec and revspec != 0: |
710 if not revspec and revspec != 0: |
709 return repo[default] |
711 return repo[default] |
710 |
712 |
711 l = revrange(repo, [revspec], localalias=localalias) |
713 l = revrange(repo, [revspec], localalias=localalias) |
712 if not l: |
714 if not l: |
713 raise error.Abort(_('empty revision set')) |
715 raise error.Abort(_(b'empty revision set')) |
714 return repo[l.last()] |
716 return repo[l.last()] |
715 |
717 |
716 |
718 |
717 def _pairspec(revspec): |
719 def _pairspec(revspec): |
718 tree = revsetlang.parse(revspec) |
720 tree = revsetlang.parse(revspec) |
719 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall') |
721 return tree and tree[0] in ( |
|
722 b'range', |
|
723 b'rangepre', |
|
724 b'rangepost', |
|
725 b'rangeall', |
|
726 ) |
720 |
727 |
721 |
728 |
722 def revpair(repo, revs): |
729 def revpair(repo, revs): |
723 if not revs: |
730 if not revs: |
724 return repo['.'], repo[None] |
731 return repo[b'.'], repo[None] |
725 |
732 |
726 l = revrange(repo, revs) |
733 l = revrange(repo, revs) |
727 |
734 |
728 if not l: |
735 if not l: |
729 raise error.Abort(_('empty revision range')) |
736 raise error.Abort(_(b'empty revision range')) |
730 |
737 |
731 first = l.first() |
738 first = l.first() |
732 second = l.last() |
739 second = l.last() |
733 |
740 |
734 if ( |
741 if ( |
735 first == second |
742 first == second |
736 and len(revs) >= 2 |
743 and len(revs) >= 2 |
737 and not all(revrange(repo, [r]) for r in revs) |
744 and not all(revrange(repo, [r]) for r in revs) |
738 ): |
745 ): |
739 raise error.Abort(_('empty revision on one side of range')) |
746 raise error.Abort(_(b'empty revision on one side of range')) |
740 |
747 |
741 # if top-level is range expression, the result must always be a pair |
748 # if top-level is range expression, the result must always be a pair |
742 if first == second and len(revs) == 1 and not _pairspec(revs[0]): |
749 if first == second and len(revs) == 1 and not _pairspec(revs[0]): |
743 return repo[first], repo[None] |
750 return repo[first], repo[None] |
744 |
751 |
861 ret.append(kindpat) |
868 ret.append(kindpat) |
862 return ret |
869 return ret |
863 |
870 |
864 |
871 |
865 def matchandpats( |
872 def matchandpats( |
866 ctx, pats=(), opts=None, globbed=False, default='relpath', badfn=None |
873 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None |
867 ): |
874 ): |
868 '''Return a matcher and the patterns that were used. |
875 '''Return a matcher and the patterns that were used. |
869 The matcher will warn about bad matches, unless an alternate badfn callback |
876 The matcher will warn about bad matches, unless an alternate badfn callback |
870 is provided.''' |
877 is provided.''' |
871 if opts is None: |
878 if opts is None: |
872 opts = {} |
879 opts = {} |
873 if not globbed and default == 'relpath': |
880 if not globbed and default == b'relpath': |
874 pats = expandpats(pats or []) |
881 pats = expandpats(pats or []) |
875 |
882 |
876 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True) |
883 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True) |
877 |
884 |
878 def bad(f, msg): |
885 def bad(f, msg): |
879 ctx.repo().ui.warn("%s: %s\n" % (uipathfn(f), msg)) |
886 ctx.repo().ui.warn(b"%s: %s\n" % (uipathfn(f), msg)) |
880 |
887 |
881 if badfn is None: |
888 if badfn is None: |
882 badfn = bad |
889 badfn = bad |
883 |
890 |
884 m = ctx.match( |
891 m = ctx.match( |
885 pats, |
892 pats, |
886 opts.get('include'), |
893 opts.get(b'include'), |
887 opts.get('exclude'), |
894 opts.get(b'exclude'), |
888 default, |
895 default, |
889 listsubrepos=opts.get('subrepos'), |
896 listsubrepos=opts.get(b'subrepos'), |
890 badfn=badfn, |
897 badfn=badfn, |
891 ) |
898 ) |
892 |
899 |
893 if m.always(): |
900 if m.always(): |
894 pats = [] |
901 pats = [] |
895 return m, pats |
902 return m, pats |
896 |
903 |
897 |
904 |
898 def match( |
905 def match( |
899 ctx, pats=(), opts=None, globbed=False, default='relpath', badfn=None |
906 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None |
900 ): |
907 ): |
901 '''Return a matcher that will warn about bad matches.''' |
908 '''Return a matcher that will warn about bad matches.''' |
902 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0] |
909 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0] |
903 |
910 |
904 |
911 |
1084 if newphase > ctx.phase(): |
1091 if newphase > ctx.phase(): |
1085 toretract.setdefault(newphase, []).append(newnode) |
1092 toretract.setdefault(newphase, []).append(newnode) |
1086 elif newphase < ctx.phase(): |
1093 elif newphase < ctx.phase(): |
1087 toadvance.setdefault(newphase, []).append(newnode) |
1094 toadvance.setdefault(newphase, []).append(newnode) |
1088 |
1095 |
1089 with repo.transaction('cleanup') as tr: |
1096 with repo.transaction(b'cleanup') as tr: |
1090 # Move bookmarks |
1097 # Move bookmarks |
1091 bmarks = repo._bookmarks |
1098 bmarks = repo._bookmarks |
1092 bmarkchanges = [] |
1099 bmarkchanges = [] |
1093 for oldnode, newnode in moves.items(): |
1100 for oldnode, newnode in moves.items(): |
1094 oldbmarks = repo.nodebookmarks(oldnode) |
1101 oldbmarks = repo.nodebookmarks(oldnode) |
1095 if not oldbmarks: |
1102 if not oldbmarks: |
1096 continue |
1103 continue |
1097 from . import bookmarks # avoid import cycle |
1104 from . import bookmarks # avoid import cycle |
1098 |
1105 |
1099 repo.ui.debug( |
1106 repo.ui.debug( |
1100 'moving bookmarks %r from %s to %s\n' |
1107 b'moving bookmarks %r from %s to %s\n' |
1101 % ( |
1108 % ( |
1102 pycompat.rapply(pycompat.maybebytestr, oldbmarks), |
1109 pycompat.rapply(pycompat.maybebytestr, oldbmarks), |
1103 hex(oldnode), |
1110 hex(oldnode), |
1104 hex(newnode), |
1111 hex(newnode), |
1105 ) |
1112 ) |
1106 ) |
1113 ) |
1107 # Delete divergent bookmarks being parents of related newnodes |
1114 # Delete divergent bookmarks being parents of related newnodes |
1108 deleterevs = repo.revs( |
1115 deleterevs = repo.revs( |
1109 'parents(roots(%ln & (::%n))) - parents(%n)', |
1116 b'parents(roots(%ln & (::%n))) - parents(%n)', |
1110 allnewnodes, |
1117 allnewnodes, |
1111 newnode, |
1118 newnode, |
1112 oldnode, |
1119 oldnode, |
1113 ) |
1120 ) |
1114 deletenodes = _containsnode(repo, deleterevs) |
1121 deletenodes = _containsnode(repo, deleterevs) |
1169 |
1176 |
1170 def addremove(repo, matcher, prefix, uipathfn, opts=None): |
1177 def addremove(repo, matcher, prefix, uipathfn, opts=None): |
1171 if opts is None: |
1178 if opts is None: |
1172 opts = {} |
1179 opts = {} |
1173 m = matcher |
1180 m = matcher |
1174 dry_run = opts.get('dry_run') |
1181 dry_run = opts.get(b'dry_run') |
1175 try: |
1182 try: |
1176 similarity = float(opts.get('similarity') or 0) |
1183 similarity = float(opts.get(b'similarity') or 0) |
1177 except ValueError: |
1184 except ValueError: |
1178 raise error.Abort(_('similarity must be a number')) |
1185 raise error.Abort(_(b'similarity must be a number')) |
1179 if similarity < 0 or similarity > 100: |
1186 if similarity < 0 or similarity > 100: |
1180 raise error.Abort(_('similarity must be between 0 and 100')) |
1187 raise error.Abort(_(b'similarity must be between 0 and 100')) |
1181 similarity /= 100.0 |
1188 similarity /= 100.0 |
1182 |
1189 |
1183 ret = 0 |
1190 ret = 0 |
1184 |
1191 |
1185 wctx = repo[None] |
1192 wctx = repo[None] |
1186 for subpath in sorted(wctx.substate): |
1193 for subpath in sorted(wctx.substate): |
1187 submatch = matchmod.subdirmatcher(subpath, m) |
1194 submatch = matchmod.subdirmatcher(subpath, m) |
1188 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()): |
1195 if opts.get(b'subrepos') or m.exact(subpath) or any(submatch.files()): |
1189 sub = wctx.sub(subpath) |
1196 sub = wctx.sub(subpath) |
1190 subprefix = repo.wvfs.reljoin(prefix, subpath) |
1197 subprefix = repo.wvfs.reljoin(prefix, subpath) |
1191 subuipathfn = subdiruipathfn(subpath, uipathfn) |
1198 subuipathfn = subdiruipathfn(subpath, uipathfn) |
1192 try: |
1199 try: |
1193 if sub.addremove(submatch, subprefix, subuipathfn, opts): |
1200 if sub.addremove(submatch, subprefix, subuipathfn, opts): |
1194 ret = 1 |
1201 ret = 1 |
1195 except error.LookupError: |
1202 except error.LookupError: |
1196 repo.ui.status( |
1203 repo.ui.status( |
1197 _("skipping missing subrepository: %s\n") |
1204 _(b"skipping missing subrepository: %s\n") |
1198 % uipathfn(subpath) |
1205 % uipathfn(subpath) |
1199 ) |
1206 ) |
1200 |
1207 |
1201 rejected = [] |
1208 rejected = [] |
1202 |
1209 |
1442 |
1449 |
1443 A matcher can be provided as an optimization. It is probably a bug to pass |
1450 A matcher can be provided as an optimization. It is probably a bug to pass |
1444 a matcher that doesn't match all the differences between the parent of the |
1451 a matcher that doesn't match all the differences between the parent of the |
1445 working copy and newctx. |
1452 working copy and newctx. |
1446 """ |
1453 """ |
1447 oldctx = repo['.'] |
1454 oldctx = repo[b'.'] |
1448 ds = repo.dirstate |
1455 ds = repo.dirstate |
1449 ds.setparents(newctx.node(), nullid) |
1456 ds.setparents(newctx.node(), nullid) |
1450 copies = dict(ds.copies()) |
1457 copies = dict(ds.copies()) |
1451 s = newctx.status(oldctx, match=match) |
1458 s = newctx.status(oldctx, match=match) |
1452 for f in s.modified: |
1459 for f in s.modified: |
1453 if ds[f] == 'r': |
1460 if ds[f] == b'r': |
1454 # modified + removed -> removed |
1461 # modified + removed -> removed |
1455 continue |
1462 continue |
1456 ds.normallookup(f) |
1463 ds.normallookup(f) |
1457 |
1464 |
1458 for f in s.added: |
1465 for f in s.added: |
1459 if ds[f] == 'r': |
1466 if ds[f] == b'r': |
1460 # added + removed -> unknown |
1467 # added + removed -> unknown |
1461 ds.drop(f) |
1468 ds.drop(f) |
1462 elif ds[f] != 'a': |
1469 elif ds[f] != b'a': |
1463 ds.add(f) |
1470 ds.add(f) |
1464 |
1471 |
1465 for f in s.removed: |
1472 for f in s.removed: |
1466 if ds[f] == 'a': |
1473 if ds[f] == b'a': |
1467 # removed + added -> normal |
1474 # removed + added -> normal |
1468 ds.normallookup(f) |
1475 ds.normallookup(f) |
1469 elif ds[f] != 'r': |
1476 elif ds[f] != b'r': |
1470 ds.remove(f) |
1477 ds.remove(f) |
1471 |
1478 |
1472 # Merge old parent and old working dir copies |
1479 # Merge old parent and old working dir copies |
1473 oldcopies = copiesmod.pathcopies(newctx, oldctx, match) |
1480 oldcopies = copiesmod.pathcopies(newctx, oldctx, match) |
1474 oldcopies.update(copies) |
1481 oldcopies.update(copies) |
1475 copies = dict( |
1482 copies = dict( |
1476 (dst, oldcopies.get(src, src)) for dst, src in oldcopies.iteritems() |
1483 (dst, oldcopies.get(src, src)) for dst, src in oldcopies.iteritems() |
1477 ) |
1484 ) |
1478 # Adjust the dirstate copies |
1485 # Adjust the dirstate copies |
1479 for dst, src in copies.iteritems(): |
1486 for dst, src in copies.iteritems(): |
1480 if src not in newctx or dst in newctx or ds[dst] != 'a': |
1487 if src not in newctx or dst in newctx or ds[dst] != b'a': |
1481 src = None |
1488 src = None |
1482 ds.copy(src, dst) |
1489 ds.copy(src, dst) |
1483 |
1490 |
1484 |
1491 |
1485 def writerequires(opener, requirements): |
1492 def writerequires(opener, requirements): |
1486 with opener('requires', 'w', atomictemp=True) as fp: |
1493 with opener(b'requires', b'w', atomictemp=True) as fp: |
1487 for r in sorted(requirements): |
1494 for r in sorted(requirements): |
1488 fp.write("%s\n" % r) |
1495 fp.write(b"%s\n" % r) |
1489 |
1496 |
1490 |
1497 |
1491 class filecachesubentry(object): |
1498 class filecachesubentry(object): |
1492 def __init__(self, path, stat): |
1499 def __init__(self, path, stat): |
1493 self.path = path |
1500 self.path = path |
1733 |
1740 |
1734 This can only be called while the wlock is held. This takes all the |
1741 This can only be called while the wlock is held. This takes all the |
1735 arguments that ui.system does, and returns the exit code of the |
1742 arguments that ui.system does, and returns the exit code of the |
1736 subprocess.""" |
1743 subprocess.""" |
1737 return _locksub( |
1744 return _locksub( |
1738 repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args, **kwargs |
1745 repo, repo.currentwlock(), b'HG_WLOCK_LOCKER', cmd, *args, **kwargs |
1739 ) |
1746 ) |
1740 |
1747 |
1741 |
1748 |
1742 class progress(object): |
1749 class progress(object): |
1743 def __init__(self, ui, updatebar, topic, unit="", total=None): |
1750 def __init__(self, ui, updatebar, topic, unit=b"", total=None): |
1744 self.ui = ui |
1751 self.ui = ui |
1745 self.pos = 0 |
1752 self.pos = 0 |
1746 self.topic = topic |
1753 self.topic = topic |
1747 self.unit = unit |
1754 self.unit = unit |
1748 self.total = total |
1755 self.total = total |
1749 self.debug = ui.configbool('progress', 'debug') |
1756 self.debug = ui.configbool(b'progress', b'debug') |
1750 self._updatebar = updatebar |
1757 self._updatebar = updatebar |
1751 |
1758 |
1752 def __enter__(self): |
1759 def __enter__(self): |
1753 return self |
1760 return self |
1754 |
1761 |
1755 def __exit__(self, exc_type, exc_value, exc_tb): |
1762 def __exit__(self, exc_type, exc_value, exc_tb): |
1756 self.complete() |
1763 self.complete() |
1757 |
1764 |
1758 def update(self, pos, item="", total=None): |
1765 def update(self, pos, item=b"", total=None): |
1759 assert pos is not None |
1766 assert pos is not None |
1760 if total: |
1767 if total: |
1761 self.total = total |
1768 self.total = total |
1762 self.pos = pos |
1769 self.pos = pos |
1763 self._updatebar(self.topic, self.pos, item, self.unit, self.total) |
1770 self._updatebar(self.topic, self.pos, item, self.unit, self.total) |
1764 if self.debug: |
1771 if self.debug: |
1765 self._printdebug(item) |
1772 self._printdebug(item) |
1766 |
1773 |
1767 def increment(self, step=1, item="", total=None): |
1774 def increment(self, step=1, item=b"", total=None): |
1768 self.update(self.pos + step, item, total) |
1775 self.update(self.pos + step, item, total) |
1769 |
1776 |
1770 def complete(self): |
1777 def complete(self): |
1771 self.pos = None |
1778 self.pos = None |
1772 self.unit = "" |
1779 self.unit = b"" |
1773 self.total = None |
1780 self.total = None |
1774 self._updatebar(self.topic, self.pos, "", self.unit, self.total) |
1781 self._updatebar(self.topic, self.pos, b"", self.unit, self.total) |
1775 |
1782 |
1776 def _printdebug(self, item): |
1783 def _printdebug(self, item): |
1777 if self.unit: |
1784 if self.unit: |
1778 unit = ' ' + self.unit |
1785 unit = b' ' + self.unit |
1779 if item: |
1786 if item: |
1780 item = ' ' + item |
1787 item = b' ' + item |
1781 |
1788 |
1782 if self.total: |
1789 if self.total: |
1783 pct = 100.0 * self.pos / self.total |
1790 pct = 100.0 * self.pos / self.total |
1784 self.ui.debug( |
1791 self.ui.debug( |
1785 '%s:%s %d/%d%s (%4.2f%%)\n' |
1792 b'%s:%s %d/%d%s (%4.2f%%)\n' |
1786 % (self.topic, item, self.pos, self.total, unit, pct) |
1793 % (self.topic, item, self.pos, self.total, unit, pct) |
1787 ) |
1794 ) |
1788 else: |
1795 else: |
1789 self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit)) |
1796 self.ui.debug(b'%s:%s %d%s\n' % (self.topic, item, self.pos, unit)) |
1790 |
1797 |
1791 |
1798 |
1792 def gdinitconfig(ui): |
1799 def gdinitconfig(ui): |
1793 """helper function to know if a repo should be created as general delta |
1800 """helper function to know if a repo should be created as general delta |
1794 """ |
1801 """ |
1795 # experimental config: format.generaldelta |
1802 # experimental config: format.generaldelta |
1796 return ui.configbool('format', 'generaldelta') or ui.configbool( |
1803 return ui.configbool(b'format', b'generaldelta') or ui.configbool( |
1797 'format', 'usegeneraldelta' |
1804 b'format', b'usegeneraldelta' |
1798 ) |
1805 ) |
1799 |
1806 |
1800 |
1807 |
1801 def gddeltaconfig(ui): |
1808 def gddeltaconfig(ui): |
1802 """helper function to know if incoming delta should be optimised |
1809 """helper function to know if incoming delta should be optimised |
1803 """ |
1810 """ |
1804 # experimental config: format.generaldelta |
1811 # experimental config: format.generaldelta |
1805 return ui.configbool('format', 'generaldelta') |
1812 return ui.configbool(b'format', b'generaldelta') |
1806 |
1813 |
1807 |
1814 |
1808 class simplekeyvaluefile(object): |
1815 class simplekeyvaluefile(object): |
1809 """A simple file with key=value lines |
1816 """A simple file with key=value lines |
1810 |
1817 |
1811 Keys must be alphanumerics and start with a letter, values must not |
1818 Keys must be alphanumerics and start with a letter, values must not |
1812 contain '\n' characters""" |
1819 contain '\n' characters""" |
1813 |
1820 |
1814 firstlinekey = '__firstline' |
1821 firstlinekey = b'__firstline' |
1815 |
1822 |
1816 def __init__(self, vfs, path, keys=None): |
1823 def __init__(self, vfs, path, keys=None): |
1817 self.vfs = vfs |
1824 self.vfs = vfs |
1818 self.path = path |
1825 self.path = path |
1819 |
1826 |
1855 |
1862 |
1856 If 'firstline' is not None, it is written to file before |
1863 If 'firstline' is not None, it is written to file before |
1857 everything else, as it is, not in a key=value form""" |
1864 everything else, as it is, not in a key=value form""" |
1858 lines = [] |
1865 lines = [] |
1859 if firstline is not None: |
1866 if firstline is not None: |
1860 lines.append('%s\n' % firstline) |
1867 lines.append(b'%s\n' % firstline) |
1861 |
1868 |
1862 for k, v in data.items(): |
1869 for k, v in data.items(): |
1863 if k == self.firstlinekey: |
1870 if k == self.firstlinekey: |
1864 e = "key name '%s' is reserved" % self.firstlinekey |
1871 e = b"key name '%s' is reserved" % self.firstlinekey |
1865 raise error.ProgrammingError(e) |
1872 raise error.ProgrammingError(e) |
1866 if not k[0:1].isalpha(): |
1873 if not k[0:1].isalpha(): |
1867 e = "keys must start with a letter in a key-value file" |
1874 e = b"keys must start with a letter in a key-value file" |
1868 raise error.ProgrammingError(e) |
1875 raise error.ProgrammingError(e) |
1869 if not k.isalnum(): |
1876 if not k.isalnum(): |
1870 e = "invalid key name in a simple key-value file" |
1877 e = b"invalid key name in a simple key-value file" |
1871 raise error.ProgrammingError(e) |
1878 raise error.ProgrammingError(e) |
1872 if '\n' in v: |
1879 if b'\n' in v: |
1873 e = "invalid value in a simple key-value file" |
1880 e = b"invalid value in a simple key-value file" |
1874 raise error.ProgrammingError(e) |
1881 raise error.ProgrammingError(e) |
1875 lines.append("%s=%s\n" % (k, v)) |
1882 lines.append(b"%s=%s\n" % (k, v)) |
1876 with self.vfs(self.path, mode='wb', atomictemp=True) as fp: |
1883 with self.vfs(self.path, mode=b'wb', atomictemp=True) as fp: |
1877 fp.write(''.join(lines)) |
1884 fp.write(b''.join(lines)) |
1878 |
1885 |
1879 |
1886 |
1880 _reportobsoletedsource = [ |
1887 _reportobsoletedsource = [ |
1881 'debugobsolete', |
1888 b'debugobsolete', |
1882 'pull', |
1889 b'pull', |
1883 'push', |
1890 b'push', |
1884 'serve', |
1891 b'serve', |
1885 'unbundle', |
1892 b'unbundle', |
1886 ] |
1893 ] |
1887 |
1894 |
1888 _reportnewcssource = [ |
1895 _reportnewcssource = [ |
1889 'pull', |
1896 b'pull', |
1890 'unbundle', |
1897 b'unbundle', |
1891 ] |
1898 ] |
1892 |
1899 |
1893 |
1900 |
1894 def prefetchfiles(repo, revs, match): |
1901 def prefetchfiles(repo, revs, match): |
1895 """Invokes the registered file prefetch functions, allowing extensions to |
1902 """Invokes the registered file prefetch functions, allowing extensions to |
1935 repo = reporef() |
1942 repo = reporef() |
1936 if filtername: |
1943 if filtername: |
1937 repo = repo.filtered(filtername) |
1944 repo = repo.filtered(filtername) |
1938 func(repo, tr) |
1945 func(repo, tr) |
1939 |
1946 |
1940 newcat = '%02i-txnreport' % len(categories) |
1947 newcat = b'%02i-txnreport' % len(categories) |
1941 otr.addpostclose(newcat, wrapped) |
1948 otr.addpostclose(newcat, wrapped) |
1942 categories.append(newcat) |
1949 categories.append(newcat) |
1943 return wrapped |
1950 return wrapped |
1944 |
1951 |
1945 @reportsummary |
1952 @reportsummary |
1946 def reportchangegroup(repo, tr): |
1953 def reportchangegroup(repo, tr): |
1947 cgchangesets = tr.changes.get('changegroup-count-changesets', 0) |
1954 cgchangesets = tr.changes.get(b'changegroup-count-changesets', 0) |
1948 cgrevisions = tr.changes.get('changegroup-count-revisions', 0) |
1955 cgrevisions = tr.changes.get(b'changegroup-count-revisions', 0) |
1949 cgfiles = tr.changes.get('changegroup-count-files', 0) |
1956 cgfiles = tr.changes.get(b'changegroup-count-files', 0) |
1950 cgheads = tr.changes.get('changegroup-count-heads', 0) |
1957 cgheads = tr.changes.get(b'changegroup-count-heads', 0) |
1951 if cgchangesets or cgrevisions or cgfiles: |
1958 if cgchangesets or cgrevisions or cgfiles: |
1952 htext = "" |
1959 htext = b"" |
1953 if cgheads: |
1960 if cgheads: |
1954 htext = _(" (%+d heads)") % cgheads |
1961 htext = _(b" (%+d heads)") % cgheads |
1955 msg = _("added %d changesets with %d changes to %d files%s\n") |
1962 msg = _(b"added %d changesets with %d changes to %d files%s\n") |
1956 repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext)) |
1963 repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext)) |
1957 |
1964 |
1958 if txmatch(_reportobsoletedsource): |
1965 if txmatch(_reportobsoletedsource): |
1959 |
1966 |
1960 @reportsummary |
1967 @reportsummary |
1961 def reportobsoleted(repo, tr): |
1968 def reportobsoleted(repo, tr): |
1962 obsoleted = obsutil.getobsoleted(repo, tr) |
1969 obsoleted = obsutil.getobsoleted(repo, tr) |
1963 newmarkers = len(tr.changes.get('obsmarkers', ())) |
1970 newmarkers = len(tr.changes.get(b'obsmarkers', ())) |
1964 if newmarkers: |
1971 if newmarkers: |
1965 repo.ui.status(_('%i new obsolescence markers\n') % newmarkers) |
1972 repo.ui.status(_(b'%i new obsolescence markers\n') % newmarkers) |
1966 if obsoleted: |
1973 if obsoleted: |
1967 repo.ui.status(_('obsoleted %i changesets\n') % len(obsoleted)) |
1974 repo.ui.status(_(b'obsoleted %i changesets\n') % len(obsoleted)) |
1968 |
1975 |
1969 if obsolete.isenabled( |
1976 if obsolete.isenabled( |
1970 repo, obsolete.createmarkersopt |
1977 repo, obsolete.createmarkersopt |
1971 ) and repo.ui.configbool('experimental', 'evolution.report-instabilities'): |
1978 ) and repo.ui.configbool( |
|
1979 b'experimental', b'evolution.report-instabilities' |
|
1980 ): |
1972 instabilitytypes = [ |
1981 instabilitytypes = [ |
1973 ('orphan', 'orphan'), |
1982 (b'orphan', b'orphan'), |
1974 ('phase-divergent', 'phasedivergent'), |
1983 (b'phase-divergent', b'phasedivergent'), |
1975 ('content-divergent', 'contentdivergent'), |
1984 (b'content-divergent', b'contentdivergent'), |
1976 ] |
1985 ] |
1977 |
1986 |
1978 def getinstabilitycounts(repo): |
1987 def getinstabilitycounts(repo): |
1979 filtered = repo.changelog.filteredrevs |
1988 filtered = repo.changelog.filteredrevs |
1980 counts = {} |
1989 counts = {} |
2014 minrev, maxrev = repo[revs.min()], repo[revs.max()] |
2023 minrev, maxrev = repo[revs.min()], repo[revs.max()] |
2015 |
2024 |
2016 if minrev == maxrev: |
2025 if minrev == maxrev: |
2017 revrange = minrev |
2026 revrange = minrev |
2018 else: |
2027 else: |
2019 revrange = '%s:%s' % (minrev, maxrev) |
2028 revrange = b'%s:%s' % (minrev, maxrev) |
2020 draft = len(repo.revs('%ld and draft()', revs)) |
2029 draft = len(repo.revs(b'%ld and draft()', revs)) |
2021 secret = len(repo.revs('%ld and secret()', revs)) |
2030 secret = len(repo.revs(b'%ld and secret()', revs)) |
2022 if not (draft or secret): |
2031 if not (draft or secret): |
2023 msg = _('new changesets %s\n') % revrange |
2032 msg = _(b'new changesets %s\n') % revrange |
2024 elif draft and secret: |
2033 elif draft and secret: |
2025 msg = _('new changesets %s (%d drafts, %d secrets)\n') |
2034 msg = _(b'new changesets %s (%d drafts, %d secrets)\n') |
2026 msg %= (revrange, draft, secret) |
2035 msg %= (revrange, draft, secret) |
2027 elif draft: |
2036 elif draft: |
2028 msg = _('new changesets %s (%d drafts)\n') |
2037 msg = _(b'new changesets %s (%d drafts)\n') |
2029 msg %= (revrange, draft) |
2038 msg %= (revrange, draft) |
2030 elif secret: |
2039 elif secret: |
2031 msg = _('new changesets %s (%d secrets)\n') |
2040 msg = _(b'new changesets %s (%d secrets)\n') |
2032 msg %= (revrange, secret) |
2041 msg %= (revrange, secret) |
2033 else: |
2042 else: |
2034 errormsg = 'entered unreachable condition' |
2043 errormsg = b'entered unreachable condition' |
2035 raise error.ProgrammingError(errormsg) |
2044 raise error.ProgrammingError(errormsg) |
2036 repo.ui.status(msg) |
2045 repo.ui.status(msg) |
2037 |
2046 |
2038 # search new changesets directly pulled as obsolete |
2047 # search new changesets directly pulled as obsolete |
2039 duplicates = tr.changes.get('revduplicates', ()) |
2048 duplicates = tr.changes.get(b'revduplicates', ()) |
2040 obsadded = unfi.revs( |
2049 obsadded = unfi.revs( |
2041 '(%d: + %ld) and obsolete()', origrepolen, duplicates |
2050 b'(%d: + %ld) and obsolete()', origrepolen, duplicates |
2042 ) |
2051 ) |
2043 cl = repo.changelog |
2052 cl = repo.changelog |
2044 extinctadded = [r for r in obsadded if r not in cl] |
2053 extinctadded = [r for r in obsadded if r not in cl] |
2045 if extinctadded: |
2054 if extinctadded: |
2046 # They are not just obsolete, but obsolete and invisible |
2055 # They are not just obsolete, but obsolete and invisible |
2047 # we call them "extinct" internally but the terms have not been |
2056 # we call them "extinct" internally but the terms have not been |
2048 # exposed to users. |
2057 # exposed to users. |
2049 msg = '(%d other changesets obsolete on arrival)\n' |
2058 msg = b'(%d other changesets obsolete on arrival)\n' |
2050 repo.ui.status(msg % len(extinctadded)) |
2059 repo.ui.status(msg % len(extinctadded)) |
2051 |
2060 |
2052 @reportsummary |
2061 @reportsummary |
2053 def reportphasechanges(repo, tr): |
2062 def reportphasechanges(repo, tr): |
2054 """Report statistics of phase changes for changesets pre-existing |
2063 """Report statistics of phase changes for changesets pre-existing |
2055 pull/unbundle. |
2064 pull/unbundle. |
2056 """ |
2065 """ |
2057 origrepolen = tr.changes.get('origrepolen', len(repo)) |
2066 origrepolen = tr.changes.get(b'origrepolen', len(repo)) |
2058 phasetracking = tr.changes.get('phases', {}) |
2067 phasetracking = tr.changes.get(b'phases', {}) |
2059 if not phasetracking: |
2068 if not phasetracking: |
2060 return |
2069 return |
2061 published = [ |
2070 published = [ |
2062 rev |
2071 rev |
2063 for rev, (old, new) in phasetracking.iteritems() |
2072 for rev, (old, new) in phasetracking.iteritems() |
2064 if new == phases.public and rev < origrepolen |
2073 if new == phases.public and rev < origrepolen |
2065 ] |
2074 ] |
2066 if not published: |
2075 if not published: |
2067 return |
2076 return |
2068 repo.ui.status( |
2077 repo.ui.status( |
2069 _('%d local changesets published\n') % len(published) |
2078 _(b'%d local changesets published\n') % len(published) |
2070 ) |
2079 ) |
2071 |
2080 |
2072 |
2081 |
2073 def getinstabilitymessage(delta, instability): |
2082 def getinstabilitymessage(delta, instability): |
2074 """function to return the message to show warning about new instabilities |
2083 """function to return the message to show warning about new instabilities |
2075 |
2084 |
2076 exists as a separate function so that extension can wrap to show more |
2085 exists as a separate function so that extension can wrap to show more |
2077 information like how to fix instabilities""" |
2086 information like how to fix instabilities""" |
2078 if delta > 0: |
2087 if delta > 0: |
2079 return _('%i new %s changesets\n') % (delta, instability) |
2088 return _(b'%i new %s changesets\n') % (delta, instability) |
2080 |
2089 |
2081 |
2090 |
2082 def nodesummaries(repo, nodes, maxnumnodes=4): |
2091 def nodesummaries(repo, nodes, maxnumnodes=4): |
2083 if len(nodes) <= maxnumnodes or repo.ui.verbose: |
2092 if len(nodes) <= maxnumnodes or repo.ui.verbose: |
2084 return ' '.join(short(h) for h in nodes) |
2093 return b' '.join(short(h) for h in nodes) |
2085 first = ' '.join(short(h) for h in nodes[:maxnumnodes]) |
2094 first = b' '.join(short(h) for h in nodes[:maxnumnodes]) |
2086 return _("%s and %d others") % (first, len(nodes) - maxnumnodes) |
2095 return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes) |
2087 |
2096 |
2088 |
2097 |
2089 def enforcesinglehead(repo, tr, desc, accountclosed=False): |
2098 def enforcesinglehead(repo, tr, desc, accountclosed=False): |
2090 """check that no named branch has multiple heads""" |
2099 """check that no named branch has multiple heads""" |
2091 if desc in ('strip', 'repair'): |
2100 if desc in (b'strip', b'repair'): |
2092 # skip the logic during strip |
2101 # skip the logic during strip |
2093 return |
2102 return |
2094 visible = repo.filtered('visible') |
2103 visible = repo.filtered(b'visible') |
2095 # possible improvement: we could restrict the check to affected branch |
2104 # possible improvement: we could restrict the check to affected branch |
2096 bm = visible.branchmap() |
2105 bm = visible.branchmap() |
2097 for name in bm: |
2106 for name in bm: |
2098 heads = bm.branchheads(name, closed=accountclosed) |
2107 heads = bm.branchheads(name, closed=accountclosed) |
2099 if len(heads) > 1: |
2108 if len(heads) > 1: |
2100 msg = _('rejecting multiple heads on branch "%s"') |
2109 msg = _(b'rejecting multiple heads on branch "%s"') |
2101 msg %= name |
2110 msg %= name |
2102 hint = _('%d heads: %s') |
2111 hint = _(b'%d heads: %s') |
2103 hint %= (len(heads), nodesummaries(repo, heads)) |
2112 hint %= (len(heads), nodesummaries(repo, heads)) |
2104 raise error.Abort(msg, hint=hint) |
2113 raise error.Abort(msg, hint=hint) |
2105 |
2114 |
2106 |
2115 |
2107 def wrapconvertsink(sink): |
2116 def wrapconvertsink(sink): |
2143 revs = _getrevsfromsymbols(repo, symbols) |
2152 revs = _getrevsfromsymbols(repo, symbols) |
2144 |
2153 |
2145 if not revs: |
2154 if not revs: |
2146 return repo |
2155 return repo |
2147 |
2156 |
2148 if hiddentype == 'warn': |
2157 if hiddentype == b'warn': |
2149 unfi = repo.unfiltered() |
2158 unfi = repo.unfiltered() |
2150 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs]) |
2159 revstr = b", ".join([pycompat.bytestr(unfi[l]) for l in revs]) |
2151 repo.ui.warn( |
2160 repo.ui.warn( |
2152 _( |
2161 _( |
2153 "warning: accessing hidden changesets for write " |
2162 b"warning: accessing hidden changesets for write " |
2154 "operation: %s\n" |
2163 b"operation: %s\n" |
2155 ) |
2164 ) |
2156 % revstr |
2165 % revstr |
2157 ) |
2166 ) |
2158 |
2167 |
2159 # we have to use new filtername to separate branch/tags cache until we can |
2168 # we have to use new filtername to separate branch/tags cache until we can |
2160 # disbale these cache when revisions are dynamically pinned. |
2169 # disbale these cache when revisions are dynamically pinned. |
2161 return repo.filtered('visible-hidden', revs) |
2170 return repo.filtered(b'visible-hidden', revs) |
2162 |
2171 |
2163 |
2172 |
2164 def _getrevsfromsymbols(repo, symbols): |
2173 def _getrevsfromsymbols(repo, symbols): |
2165 """parse the list of symbols and returns a set of revision numbers of hidden |
2174 """parse the list of symbols and returns a set of revision numbers of hidden |
2166 changesets present in symbols""" |
2175 changesets present in symbols""" |
2167 revs = set() |
2176 revs = set() |
2168 unfi = repo.unfiltered() |
2177 unfi = repo.unfiltered() |
2169 unficl = unfi.changelog |
2178 unficl = unfi.changelog |
2170 cl = repo.changelog |
2179 cl = repo.changelog |
2171 tiprev = len(unficl) |
2180 tiprev = len(unficl) |
2172 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums') |
2181 allowrevnums = repo.ui.configbool(b'experimental', b'directaccess.revnums') |
2173 for s in symbols: |
2182 for s in symbols: |
2174 try: |
2183 try: |
2175 n = int(s) |
2184 n = int(s) |
2176 if n <= tiprev: |
2185 if n <= tiprev: |
2177 if not allowrevnums: |
2186 if not allowrevnums: |