# HG changeset patch # User Matt Mackall # Date 1450471211 21600 # Node ID 8f016345e6b0eb05a0f0ea933baf544dd81c8cac # Parent 2e31a17ad1bfba63741e9eecaa48ae12daded4c4# Parent e240e914d2261788c3ba700401659c11ed820fe3 merge with stable diff -r e240e914d226 -r 8f016345e6b0 Makefile --- a/Makefile Thu Dec 17 17:16:02 2015 -0800 +++ b/Makefile Fri Dec 18 14:40:11 2015 -0600 @@ -161,12 +161,11 @@ rm -rf dist/mercurial-*.mpkg deb: - mkdir -p packages/debian-unknown - contrib/builddeb --release unknown + contrib/builddeb docker-debian-jessie: mkdir -p packages/debian-jessie - contrib/dockerdeb jessie + contrib/dockerdeb debian jessie fedora20: mkdir -p packages/fedora20 diff -r e240e914d226 -r 8f016345e6b0 contrib/Makefile.python --- a/contrib/Makefile.python Thu Dec 17 17:16:02 2015 -0800 +++ b/contrib/Makefile.python Fri Dec 18 14:40:11 2015 -0600 @@ -47,8 +47,8 @@ [ -f $(PYTHON_SRCFILE) ] || wget http://www.python.org/ftp/python/$(PYTHONVER)/$(PYTHON_SRCFILE) || curl -OL http://www.python.org/ftp/python/$(PYTHONVER)/$(PYTHON_SRCFILE) || [ -f $(PYTHON_SRCFILE) ] rm -rf $(PYTHON_SRCDIR) tar xf $(PYTHON_SRCFILE) - # Ubuntu disables SSLv2 the hard way, disable it on old Pythons too - -sed -i 's,self.*SSLv2_method(),0;//\0,g' $(PYTHON_SRCDIR)/Modules/_ssl.c + # Debian/Ubuntu disables SSLv2,3 the hard way, disable it on old Pythons too + -sed -i 's,self.*SSLv[23]_method(),0;//\0,g' $(PYTHON_SRCDIR)/Modules/_ssl.c # Find multiarch system libraries on Ubuntu and disable fortify error when setting argv LDFLAGS="-L/usr/lib/`dpkg-architecture -qDEB_HOST_MULTIARCH`"; \ BASECFLAGS=-U_FORTIFY_SOURCE; \ diff -r e240e914d226 -r 8f016345e6b0 contrib/bash_completion --- a/contrib/bash_completion Thu Dec 17 17:16:02 2015 -0800 +++ b/contrib/bash_completion Fri Dec 18 14:40:11 2015 -0600 @@ -629,7 +629,7 @@ _hg_cmd_shelve() { - if [[ "$prev" = @(-d|--delete|-l|--list) ]]; then + if [[ "$prev" = @(-d|--delete|-l|--list|-p|--patch|--stat) ]]; then _hg_shelves else _hg_status "mard" diff -r e240e914d226 -r 8f016345e6b0 contrib/builddeb --- a/contrib/builddeb Thu Dec 17 17:16:02 2015 -0800 +++ b/contrib/builddeb Fri Dec 18 14:40:11 2015 -0600 @@ -8,12 +8,18 @@ BUILD=1 CLEANUP=1 -DEBVERSION=jessie +DISTID=`(lsb_release -is 2> /dev/null | tr '[:upper:]' '[:lower:]') || echo debian` +CODENAME=`lsb_release -cs 2> /dev/null || echo unknown` while [ "$1" ]; do case "$1" in - --release ) + --distid ) + shift + DISTID="$1" shift - DEBVERSION="$1" + ;; + --codename ) + shift + CODENAME="$1" shift ;; --cleanup ) @@ -24,11 +30,6 @@ shift CLEANUP= ;; - --debbuilddir ) - shift - DEBBUILDDIR="$1" - shift - ;; * ) echo "Invalid parameter $1!" 1>&2 exit 1 @@ -82,7 +83,8 @@ fi if [ "$CLEANUP" ] ; then echo - OUTPUTDIR=${OUTPUTDIR:=packages/debian-$DEBVERSION} + OUTPUTDIR=${OUTPUTDIR:=packages/$DISTID-$CODENAME} + mkdir -p "$OUTPUTDIR" find ../mercurial*.deb ../mercurial_*.build ../mercurial_*.changes \ -type f -newer $control -print0 | \ xargs -Inarf -0 mv narf "$OUTPUTDIR" diff -r e240e914d226 -r 8f016345e6b0 contrib/check-commit --- a/contrib/check-commit Thu Dec 17 17:16:02 2015 -0800 +++ b/contrib/check-commit Fri Dec 18 14:40:11 2015 -0600 @@ -27,7 +27,7 @@ (r"^# .*\n[A-Z][a-z]\S+", "don't capitalize summary lines"), (r"^# .*\n[^\n]*: *[A-Z][a-z]\S+", "don't capitalize summary lines"), (r"^# .*\n.*\.\s+$", "don't add trailing period on summary line"), - (r"^# .*\n.{78,}", "summary line too long (limit is 78)"), + (r"^# .*\n[^#].{77,}", "summary line too long (limit is 78)"), (r"^\+\n \n", "adds double empty line"), (r"^ \n\+\n", "adds double empty line"), (r"^\+[ \t]+def [a-z]+_[a-z]", "adds a function with foo_bar naming"), diff -r e240e914d226 -r 8f016345e6b0 contrib/check-config.py --- a/contrib/check-config.py Thu Dec 17 17:16:02 2015 -0800 +++ b/contrib/check-config.py Fri Dec 18 14:40:11 2015 -0600 @@ -13,14 +13,16 @@ foundopts = {} documented = {} -configre = (r"""ui\.config(|int|bool|list)\(['"](\S+)['"], ?""" - r"""['"](\S+)['"](,\s(?:default=)?(\S+?))?\)""") +configre = (r"""ui\.config(|int|bool|list)\(['"](\S+)['"],\s*""" + r"""['"](\S+)['"](,\s+(?:default=)?(\S+?))?\)""") +configpartialre = (r"""ui\.config""") def main(args): for f in args: sect = '' prevname = '' confsect = '' + carryover = '' for l in open(f): # check topic-like bits @@ -40,29 +42,35 @@ if m: confsect = m.group(1) continue - m = re.match(r'^\s+(?:#\s*)?([a-z._]+) = ', l) + m = re.match(r'^\s+(?:#\s*)?(\S+) = ', l) if m: name = confsect + '.' + m.group(1) documented[name] = 1 # like the bugzilla extension - m = re.match(r'^\s*([a-z]+\.[a-z]+)$', l) + m = re.match(r'^\s*(\S+\.\S+)$', l) + if m: + documented[m.group(1)] = 1 + + # like convert + m = re.match(r'^\s*:(\S+\.\S+):\s+', l) if m: documented[m.group(1)] = 1 # quoted in help or docstrings - m = re.match(r'.*?``([-a-z_]+\.[-a-z_]+)``', l) + m = re.match(r'.*?``(\S+\.\S+)``', l) if m: documented[m.group(1)] = 1 # look for ignore markers m = re.search(r'# (?:internal|experimental|deprecated|developer)' - ' config: (\S+.\S+)$', l) + ' config: (\S+\.\S+)$', l) if m: documented[m.group(1)] = 1 # look for code-like bits - m = re.search(configre, l) + line = carryover + l + m = re.search(configre, line, re.MULTILINE) if m: ctype = m.group(1) if not ctype: @@ -78,6 +86,13 @@ print "conflict on %s: %r != %r" % (name, (ctype, default), foundopts[name]) foundopts[name] = (ctype, default) + carryover = '' + else: + m = re.search(configpartialre, line) + if m: + carryover = line + else: + carryover = '' for name in sorted(foundopts): if name not in documented: diff -r e240e914d226 -r 8f016345e6b0 contrib/check-py3-compat.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/check-py3-compat.py Fri Dec 18 14:40:11 2015 -0600 @@ -0,0 +1,43 @@ +#!/usr/bin/env python +# +# check-py3-compat - check Python 3 compatibility of Mercurial files +# +# Copyright 2015 Gregory Szorc +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from __future__ import absolute_import, print_function + +import ast +import sys + +def check_compat(f): + """Check Python 3 compatibility for a file.""" + with open(f, 'rb') as fh: + content = fh.read() + + # Ignore empty files. + if not content.strip(): + return + + root = ast.parse(content) + futures = set() + haveprint = False + for node in ast.walk(root): + if isinstance(node, ast.ImportFrom): + if node.module == '__future__': + futures |= set(n.name for n in node.names) + elif isinstance(node, ast.Print): + haveprint = True + + if 'absolute_import' not in futures: + print('%s not using absolute_import' % f) + if haveprint and 'print_function' not in futures: + print('%s requires print_function' % f) + +if __name__ == '__main__': + for f in sys.argv[1:]: + check_compat(f) + + sys.exit(0) diff -r e240e914d226 -r 8f016345e6b0 contrib/dockerdeb --- a/contrib/dockerdeb Thu Dec 17 17:16:02 2015 -0800 +++ b/contrib/dockerdeb Fri Dec 18 14:40:11 2015 -0600 @@ -8,8 +8,9 @@ checkdocker -DEBPLATFORM="$1" -PLATFORM="debian-$1" +DISTID="$1" +CODENAME="$2" +PLATFORM="$1-$2" shift # extra params are passed to build process OUTPUTDIR=${OUTPUTDIR:=$ROOTDIR/packages/$PLATFORM} @@ -26,8 +27,8 @@ sh -c "cd /mnt/$dn && make clean && make local" fi $DOCKER run -u $DBUILDUSER --rm -v $PWD/..:/mnt $CONTAINER \ - sh -c "cd /mnt/$dn && DEB_BUILD_OPTIONS='${DEB_BUILD_OPTIONS:=}' contrib/builddeb --build --release $DEBPLATFORM" -contrib/builddeb --cleanup --release $DEBPLATFORM + sh -c "cd /mnt/$dn && DEB_BUILD_OPTIONS='${DEB_BUILD_OPTIONS:=}' contrib/builddeb --build --distid $DISTID --codename $CODENAME" +contrib/builddeb --cleanup --distid $DISTID --codename $CODENAME if [ $(uname) = "Darwin" ] ; then $DOCKER run -u $DBUILDUSER --rm -v $PWD/..:/mnt $CONTAINER \ sh -c "cd /mnt/$dn && make clean" diff -r e240e914d226 -r 8f016345e6b0 contrib/import-checker.py --- a/contrib/import-checker.py Thu Dec 17 17:16:02 2015 -0800 +++ b/contrib/import-checker.py Fri Dec 18 14:40:11 2015 -0600 @@ -1,4 +1,7 @@ +#!/usr/bin/env python + import ast +import collections import os import sys @@ -11,6 +14,8 @@ # Whitelist of modules that symbols can be directly imported from. allowsymbolimports = ( '__future__', + 'mercurial.hgweb.common', + 'mercurial.hgweb.request', 'mercurial.i18n', 'mercurial.node', ) @@ -35,6 +40,17 @@ return False +def walklocal(root): + """Recursively yield all descendant nodes but not in a different scope""" + todo = collections.deque(ast.iter_child_nodes(root)) + yield root, False + while todo: + node = todo.popleft() + newscope = isinstance(node, ast.FunctionDef) + if not newscope: + todo.extend(ast.iter_child_nodes(node)) + yield node, newscope + def dotted_name_of_path(path, trimpure=False): """Given a relative path to a source file, return its dotted module name. @@ -237,7 +253,7 @@ >>> sorted(imported_modules( ... 'import foo1; from bar import bar1', ... modulename, localmods)) - ['foo.bar.__init__', 'foo.bar.bar1', 'foo.foo1'] + ['foo.bar.bar1', 'foo.foo1'] >>> sorted(imported_modules( ... 'from bar.bar1 import name1, name2, name3', ... modulename, localmods)) @@ -284,21 +300,28 @@ continue absname, dottedpath, hassubmod = found - yield dottedpath if not hassubmod: + # "dottedpath" is not a package; must be imported + yield dottedpath # examination of "node.names" should be redundant # e.g.: from mercurial.node import nullid, nullrev continue + modnotfound = False prefix = absname + '.' for n in node.names: found = fromlocal(prefix + n.name) if not found: # this should be a function or a property of "node.module" + modnotfound = True continue yield found[1] + if modnotfound: + # "dottedpath" is a package, but imported because of non-module + # lookup + yield dottedpath -def verify_import_convention(module, source): +def verify_import_convention(module, source, localmods): """Verify imports match our established coding convention. We have 2 conventions: legacy and modern. The modern convention is in @@ -311,11 +334,11 @@ absolute = usingabsolute(root) if absolute: - return verify_modern_convention(module, root) + return verify_modern_convention(module, root, localmods) else: return verify_stdlib_on_own_line(root) -def verify_modern_convention(module, root): +def verify_modern_convention(module, root, localmods, root_col_offset=0): """Verify a file conforms to the modern import convention rules. The rules of the modern convention are: @@ -342,6 +365,7 @@ and readability problems. See `requirealias`. """ topmodule = module.split('.')[0] + fromlocal = fromlocalfunc(module, localmods) # Whether a local/non-stdlib import has been performed. seenlocal = False @@ -352,29 +376,36 @@ # Relative import levels encountered so far. seenlevels = set() - for node in ast.walk(root): - if isinstance(node, ast.Import): + for node, newscope in walklocal(root): + def msg(fmt, *args): + return (fmt % args, node.lineno) + if newscope: + # Check for local imports in function + for r in verify_modern_convention(module, node, localmods, + node.col_offset + 4): + yield r + elif isinstance(node, ast.Import): # Disallow "import foo, bar" and require separate imports # for each module. if len(node.names) > 1: - yield 'multiple imported names: %s' % ', '.join( - n.name for n in node.names) + yield msg('multiple imported names: %s', + ', '.join(n.name for n in node.names)) name = node.names[0].name asname = node.names[0].asname # Ignore sorting rules on imports inside blocks. - if node.col_offset == 0: + if node.col_offset == root_col_offset: if lastname and name < lastname: - yield 'imports not lexically sorted: %s < %s' % ( - name, lastname) + yield msg('imports not lexically sorted: %s < %s', + name, lastname) lastname = name # stdlib imports should be before local imports. stdlib = name in stdlib_modules - if stdlib and seenlocal and node.col_offset == 0: - yield 'stdlib import follows local import: %s' % name + if stdlib and seenlocal and node.col_offset == root_col_offset: + yield msg('stdlib import follows local import: %s', name) if not stdlib: seenlocal = True @@ -382,11 +413,11 @@ # Import of sibling modules should use relative imports. topname = name.split('.')[0] if topname == topmodule: - yield 'import should be relative: %s' % name + yield msg('import should be relative: %s', name) if name in requirealias and asname != requirealias[name]: - yield '%s module must be "as" aliased to %s' % ( - name, requirealias[name]) + yield msg('%s module must be "as" aliased to %s', + name, requirealias[name]) elif isinstance(node, ast.ImportFrom): # Resolve the full imported module name. @@ -400,39 +431,49 @@ topname = fullname.split('.')[0] if topname == topmodule: - yield 'import should be relative: %s' % fullname + yield msg('import should be relative: %s', fullname) # __future__ is special since it needs to come first and use # symbol import. if fullname != '__future__': if not fullname or fullname in stdlib_modules: - yield 'relative import of stdlib module' + yield msg('relative import of stdlib module') else: seenlocal = True # Direct symbol import is only allowed from certain modules and # must occur before non-symbol imports. - if node.module and node.col_offset == 0: - if fullname not in allowsymbolimports: - yield 'direct symbol import from %s' % fullname + if node.module and node.col_offset == root_col_offset: + found = fromlocal(node.module, node.level) + if found and found[2]: # node.module is a package + prefix = found[0] + '.' + symbols = [n.name for n in node.names + if not fromlocal(prefix + n.name)] + else: + symbols = [n.name for n in node.names] - if seennonsymbolrelative: - yield ('symbol import follows non-symbol import: %s' % - fullname) + if symbols and fullname not in allowsymbolimports: + yield msg('direct symbol import %s from %s', + ', '.join(symbols), fullname) + + if symbols and seennonsymbolrelative: + yield msg('symbol import follows non-symbol import: %s', + fullname) if not node.module: assert node.level seennonsymbolrelative = True # Only allow 1 group per level. - if node.level in seenlevels and node.col_offset == 0: - yield 'multiple "from %s import" statements' % ( - '.' * node.level) + if (node.level in seenlevels + and node.col_offset == root_col_offset): + yield msg('multiple "from %s import" statements', + '.' * node.level) # Higher-level groups come before lower-level groups. if any(node.level > l for l in seenlevels): - yield 'higher-level import should come first: %s' % ( - fullname) + yield msg('higher-level import should come first: %s', + fullname) seenlevels.add(node.level) @@ -442,14 +483,14 @@ for n in node.names: if lastentryname and n.name < lastentryname: - yield 'imports from %s not lexically sorted: %s < %s' % ( - fullname, n.name, lastentryname) + yield msg('imports from %s not lexically sorted: %s < %s', + fullname, n.name, lastentryname) lastentryname = n.name if n.name in requirealias and n.asname != requirealias[n.name]: - yield '%s from %s must be "as" aliased to %s' % ( - n.name, fullname, requirealias[n.name]) + yield msg('%s from %s must be "as" aliased to %s', + n.name, fullname, requirealias[n.name]) def verify_stdlib_on_own_line(root): """Given some python source, verify that stdlib imports are done @@ -460,7 +501,7 @@ http://bugs.python.org/issue19510. >>> list(verify_stdlib_on_own_line(ast.parse('import sys, foo'))) - ['mixed imports\\n stdlib: sys\\n relative: foo'] + [('mixed imports\\n stdlib: sys\\n relative: foo', 1)] >>> list(verify_stdlib_on_own_line(ast.parse('import sys, os'))) [] >>> list(verify_stdlib_on_own_line(ast.parse('import foo, bar'))) @@ -474,7 +515,7 @@ if from_stdlib[True] and from_stdlib[False]: yield ('mixed imports\n stdlib: %s\n relative: %s' % (', '.join(sorted(from_stdlib[True])), - ', '.join(sorted(from_stdlib[False])))) + ', '.join(sorted(from_stdlib[False]))), node.lineno) class CircularImport(Exception): pass @@ -546,9 +587,9 @@ src = f.read() used_imports[modname] = sorted( imported_modules(src, modname, localmods, ignore_nested=True)) - for error in verify_import_convention(modname, src): + for error, lineno in verify_import_convention(modname, src, localmods): any_errors = True - print source_path, error + print '%s:%d: %s' % (source_path, lineno, error) f.close() cycles = find_cycles(used_imports) if cycles: diff -r e240e914d226 -r 8f016345e6b0 contrib/perf.py --- a/contrib/perf.py Thu Dec 17 17:16:02 2015 -0800 +++ b/contrib/perf.py Fri Dec 18 14:40:11 2015 -0600 @@ -2,8 +2,9 @@ '''helper extension to measure performance''' from mercurial import cmdutil, scmutil, util, commands, obsolete -from mercurial import repoview, branchmap, merge, copies +from mercurial import repoview, branchmap, merge, copies, error import time, os, sys +import random import functools formatteropts = commands.formatteropts @@ -11,11 +12,16 @@ cmdtable = {} command = cmdutil.command(cmdtable) +def getlen(ui): + if ui.configbool("perf", "stub"): + return lambda x: 1 + return len + def gettimer(ui, opts=None): """return a timer function and formatter: (timer, formatter) - This functions exist to gather the creation of formatter in a single - place instead of duplicating it in all performance command.""" + This function exists to gather the creation of formatter in a single + place instead of duplicating it in all performance commands.""" # enforce an idle period before execution to counteract power management # experimental config: perf.presleep @@ -28,8 +34,15 @@ ui.fout = ui.ferr # get a formatter fm = ui.formatter('perf', opts) + # stub function, runs code only once instead of in a loop + # experimental config: perf.stub + if ui.configbool("perf", "stub"): + return functools.partial(stub_timer, fm), fm return functools.partial(_timer, fm), fm +def stub_timer(fm, func, title=None): + func() + def _timer(fm, func, title=None): results = [] begin = time.time() @@ -91,7 +104,7 @@ #m = match.always(repo.root, repo.getcwd()) #timer(lambda: sum(map(len, repo.dirstate.status(m, [], False, False, # False)))) - timer, fm = gettimer(ui, **opts) + timer, fm = gettimer(ui, opts) timer(lambda: sum(map(len, repo.status(unknown=opts['unknown'])))) fm.end() @@ -193,7 +206,7 @@ fm.end() @command('perfdirstatefoldmap', formatteropts) -def perffilefoldmap(ui, repo, **opts): +def perfdirstatefoldmap(ui, repo, **opts): timer, fm = gettimer(ui, opts) dirstate = repo.dirstate 'a' in dirstate @@ -239,8 +252,8 @@ def d(): # acceptremote is True because we don't want prompts in the middle of # our benchmark - merge.calculateupdates(repo, wctx, rctx, ancestor, False, False, False, - acceptremote=True) + merge.calculateupdates(repo, wctx, rctx, [ancestor], False, False, + acceptremote=True, followcopies=True) timer(d) fm.end() @@ -293,14 +306,24 @@ timer, fm = gettimer(ui, opts) cmd = sys.argv[0] def d(): - os.system("HGRCPATH= %s version -q > /dev/null" % cmd) + if os.name != 'nt': + os.system("HGRCPATH= %s version -q > /dev/null" % cmd) + else: + os.environ['HGRCPATH'] = '' + os.system("%s version -q > NUL" % cmd) timer(d) fm.end() @command('perfparents', formatteropts) def perfparents(ui, repo, **opts): timer, fm = gettimer(ui, opts) - nl = [repo.changelog.node(i) for i in xrange(1000)] + # control the number of commits perfparents iterates over + # experimental config: perf.parentscount + count = ui.configint("perf", "parentscount", 1000) + if len(repo.changelog) < count: + raise error.Abort("repo needs %d commits for this test" % count) + repo = repo.unfiltered() + nl = [repo.changelog.node(i) for i in xrange(count)] def d(): for n in nl: repo.changelog.parents(n) @@ -308,7 +331,7 @@ fm.end() @command('perfctxfiles', formatteropts) -def perfparents(ui, repo, x, **opts): +def perfctxfiles(ui, repo, x, **opts): x = int(x) timer, fm = gettimer(ui, opts) def d(): @@ -317,7 +340,7 @@ fm.end() @command('perfrawfiles', formatteropts) -def perfparents(ui, repo, x, **opts): +def perfrawfiles(ui, repo, x, **opts): x = int(x) timer, fm = gettimer(ui, opts) cl = repo.changelog @@ -329,10 +352,6 @@ @command('perflookup', formatteropts) def perflookup(ui, repo, rev, **opts): timer, fm = gettimer(ui, opts) - -@command('perflookup', formatteropts) -def perflookup(ui, repo, rev, **opts): - timer, fm = gettimer(ui, opts) timer(lambda: len(repo.lookup(rev))) fm.end() @@ -358,10 +377,12 @@ @command('perflog', [('', 'rename', False, 'ask log to follow renames')] + formatteropts) -def perflog(ui, repo, **opts): +def perflog(ui, repo, rev=None, **opts): + if rev is None: + rev=[] timer, fm = gettimer(ui, opts) ui.pushbuffer() - timer(lambda: commands.log(ui, repo, rev=[], date='', user='', + timer(lambda: commands.log(ui, repo, rev=rev, date='', user='', copies=opts.get('rename'))) ui.popbuffer() fm.end() @@ -381,10 +402,12 @@ fm.end() @command('perftemplating', formatteropts) -def perftemplating(ui, repo, **opts): +def perftemplating(ui, repo, rev=None, **opts): + if rev is None: + rev=[] timer, fm = gettimer(ui, opts) ui.pushbuffer() - timer(lambda: commands.log(ui, repo, rev=[], date='', user='', + timer(lambda: commands.log(ui, repo, rev=rev, date='', user='', template='{date|shortdate} [{rev}:{node|short}]' ' {author|person}: {desc|firstline}\n')) ui.popbuffer() @@ -410,10 +433,13 @@ timer, fm = gettimer(ui, opts) s = repo.store s.fncache._load() + lock = repo.lock() + tr = repo.transaction('perffncachewrite') def d(): s.fncache._dirty = True - s.fncache.write() + s.fncache.write(tr) timer(d) + lock.release() fm.end() @command('perffncacheencode', formatteropts) @@ -454,18 +480,20 @@ timer, fm = gettimer(ui, opts) from mercurial import revlog dist = opts['dist'] + _len = getlen(ui) def d(): r = revlog.revlog(lambda fn: open(fn, 'rb'), file_) - for x in xrange(0, len(r), dist): + for x in xrange(0, _len(r), dist): r.revision(r.node(x)) timer(d) fm.end() @command('perfrevset', - [('C', 'clear', False, 'clear volatile cache between each call.')] + [('C', 'clear', False, 'clear volatile cache between each call.'), + ('', 'contexts', False, 'obtain changectx for each revision')] + formatteropts, "REVSET") -def perfrevset(ui, repo, expr, clear=False, **opts): +def perfrevset(ui, repo, expr, clear=False, contexts=False, **opts): """benchmark the execution time of a revset Use the --clean option if need to evaluate the impact of build volatile @@ -475,7 +503,10 @@ def d(): if clear: repo.invalidatevolatilesets() - for r in repo.revs(expr): pass + if contexts: + for ctx in repo.set(expr): pass + else: + for r in repo.revs(expr): pass timer(d) fm.end() @@ -576,3 +607,79 @@ timer, fm = gettimer(ui) timer(lambda: len(obsolete.obsstore(repo.svfs))) fm.end() + +@command('perflrucachedict', formatteropts + + [('', 'size', 4, 'size of cache'), + ('', 'gets', 10000, 'number of key lookups'), + ('', 'sets', 10000, 'number of key sets'), + ('', 'mixed', 10000, 'number of mixed mode operations'), + ('', 'mixedgetfreq', 50, 'frequency of get vs set ops in mixed mode')], + norepo=True) +def perflrucache(ui, size=4, gets=10000, sets=10000, mixed=10000, + mixedgetfreq=50, **opts): + def doinit(): + for i in xrange(10000): + util.lrucachedict(size) + + values = [] + for i in xrange(size): + values.append(random.randint(0, sys.maxint)) + + # Get mode fills the cache and tests raw lookup performance with no + # eviction. + getseq = [] + for i in xrange(gets): + getseq.append(random.choice(values)) + + def dogets(): + d = util.lrucachedict(size) + for v in values: + d[v] = v + for key in getseq: + value = d[key] + value # silence pyflakes warning + + # Set mode tests insertion speed with cache eviction. + setseq = [] + for i in xrange(sets): + setseq.append(random.randint(0, sys.maxint)) + + def dosets(): + d = util.lrucachedict(size) + for v in setseq: + d[v] = v + + # Mixed mode randomly performs gets and sets with eviction. + mixedops = [] + for i in xrange(mixed): + r = random.randint(0, 100) + if r < mixedgetfreq: + op = 0 + else: + op = 1 + + mixedops.append((op, random.randint(0, size * 2))) + + def domixed(): + d = util.lrucachedict(size) + + for op, v in mixedops: + if op == 0: + try: + d[v] + except KeyError: + pass + else: + d[v] = v + + benches = [ + (doinit, 'init'), + (dogets, 'gets'), + (dosets, 'sets'), + (domixed, 'mixed') + ] + + for fn, title in benches: + timer, fm = gettimer(ui, opts) + timer(fn, title=title) + fm.end() diff -r e240e914d226 -r 8f016345e6b0 contrib/revsetbenchmarks.py --- a/contrib/revsetbenchmarks.py Thu Dec 17 17:16:02 2015 -0800 +++ b/contrib/revsetbenchmarks.py Fri Dec 18 14:40:11 2015 -0600 @@ -53,10 +53,13 @@ fullcmd += cmd return check_output(fullcmd, stderr=STDOUT) -def perf(revset, target=None): +def perf(revset, target=None, contexts=False): """run benchmark for this very revset""" try: - output = hg(['perfrevset', revset], repo=target) + args = ['perfrevset', revset] + if contexts: + args.append('--contexts') + output = hg(args, repo=target) return parseoutput(output) except CalledProcessError as exc: print >> sys.stderr, 'abort: cannot run revset benchmark: %s' % exc.cmd @@ -238,6 +241,9 @@ default=','.join(DEFAULTVARIANTS), help="comma separated list of variant to test " "(eg: plain,min,sorted) (plain = no modification)") +parser.add_option('', '--contexts', + action='store_true', + help='obtain changectx from results instead of integer revs') (options, args) = parser.parse_args() @@ -283,7 +289,7 @@ varres = {} for var in variants: varrset = applyvariants(rset, var) - data = perf(varrset, target=options.repo) + data = perf(varrset, target=options.repo, contexts=options.contexts) varres[var] = data res.append(varres) printresult(variants, idx, varres, len(revsets), diff -r e240e914d226 -r 8f016345e6b0 contrib/wix/templates.wxs --- a/contrib/wix/templates.wxs Thu Dec 17 17:16:02 2015 -0800 +++ b/contrib/wix/templates.wxs Fri Dec 18 14:40:11 2015 -0600 @@ -34,6 +34,7 @@ + diff -r e240e914d226 -r 8f016345e6b0 doc/check-seclevel.py --- a/doc/check-seclevel.py Thu Dec 17 17:16:02 2015 -0800 +++ b/doc/check-seclevel.py Fri Dec 18 14:40:11 2015 -0600 @@ -6,9 +6,8 @@ import optparse # import from the live mercurial repo +os.environ['HGMODULEPOLICY'] = 'py' sys.path.insert(0, "..") -# fall back to pure modules if required C extensions are not available -sys.path.append(os.path.join('..', 'mercurial', 'pure')) from mercurial import demandimport; demandimport.enable() from mercurial.commands import table from mercurial.help import helptable diff -r e240e914d226 -r 8f016345e6b0 doc/gendoc.py --- a/doc/gendoc.py Thu Dec 17 17:16:02 2015 -0800 +++ b/doc/gendoc.py Fri Dec 18 14:40:11 2015 -0600 @@ -4,10 +4,12 @@ """ import os, sys, textwrap + +# This script is executed during installs and may not have C extensions +# available. Relax C module requirements. +os.environ['HGMODULEPOLICY'] = 'allow' # import from the live mercurial repo sys.path.insert(0, "..") -# fall back to pure modules if required C extensions are not available -sys.path.append(os.path.join('..', 'mercurial', 'pure')) from mercurial import demandimport; demandimport.enable() from mercurial import minirst from mercurial.commands import table, globalopts diff -r e240e914d226 -r 8f016345e6b0 hgext/censor.py --- a/hgext/censor.py Thu Dec 17 17:16:02 2015 -0800 +++ b/hgext/censor.py Fri Dec 18 14:40:11 2015 -0600 @@ -28,6 +28,7 @@ from mercurial.node import short from mercurial import cmdutil, error, filelog, revlog, scmutil, util from mercurial.i18n import _ +from mercurial import lock as lockmod cmdtable = {} command = cmdutil.command(cmdtable) @@ -42,6 +43,15 @@ ('t', 'tombstone', '', _('replacement tombstone data'), _('TEXT'))], _('-r REV [-t TEXT] [FILE]')) def censor(ui, repo, path, rev='', tombstone='', **opts): + wlock = lock = None + try: + wlock = repo.wlock() + lock = repo.lock() + return _docensor(ui, repo, path, rev, tombstone, **opts) + finally: + lockmod.release(lock, wlock) + +def _docensor(ui, repo, path, rev='', tombstone='', **opts): if not path: raise error.Abort(_('must specify file path to censor')) if not rev: diff -r e240e914d226 -r 8f016345e6b0 hgext/clonebundles.py --- a/hgext/clonebundles.py Thu Dec 17 17:16:02 2015 -0800 +++ b/hgext/clonebundles.py Fri Dec 18 14:40:11 2015 -0600 @@ -47,7 +47,7 @@ * Generating bundle files of repository content (typically periodically, such as once per day). * A file server that clients have network access to and that Python knows - how to talk to through its normal URL handling facility (typically a + how to talk to through its normal URL handling facility (typically an HTTP server). * A process for keeping the bundles manifest in sync with available bundle files. @@ -183,7 +183,7 @@ experimental.clonebundleprefers List of "key=value" properties the client prefers in bundles. Downloaded bundle manifests will be sorted by the preferences in this list. e.g. - the value "BUNDLESPEC=gzip-v1, BUNDLESPEC=bzip2=v1" will prefer a gzipped + the value "BUNDLESPEC=gzip-v1, BUNDLESPEC=bzip2-v1" will prefer a gzipped version 1 bundle type then bzip2 version 1 bundle type. If not defined, the order in the manifest will be used and the first diff -r e240e914d226 -r 8f016345e6b0 hgext/color.py --- a/hgext/color.py Thu Dec 17 17:16:02 2015 -0800 +++ b/hgext/color.py Fri Dec 18 14:40:11 2015 -0600 @@ -419,16 +419,6 @@ _styles[status] = ' '.join(good) class colorui(uimod.ui): - def popbuffer(self, labeled=False): - if self._colormode is None: - return super(colorui, self).popbuffer(labeled) - - self._bufferstates.pop() - if labeled: - return ''.join(self.label(a, label) for a, label - in self._buffers.pop()) - return ''.join(a for a, label in self._buffers.pop()) - _colormode = 'ansi' def write(self, *args, **opts): if self._colormode is None: @@ -436,13 +426,16 @@ label = opts.get('label', '') if self._buffers: - self._buffers[-1].extend([(str(a), label) for a in args]) + if self._bufferapplylabels: + self._buffers[-1].extend(self.label(a, label) for a in args) + else: + self._buffers[-1].extend(args) elif self._colormode == 'win32': for a in args: win32print(a, super(colorui, self).write, **opts) else: return super(colorui, self).write( - *[self.label(str(a), label) for a in args], **opts) + *[self.label(a, label) for a in args], **opts) def write_err(self, *args, **opts): if self._colormode is None: @@ -456,7 +449,7 @@ win32print(a, super(colorui, self).write_err, **opts) else: return super(colorui, self).write_err( - *[self.label(str(a), label) for a in args], **opts) + *[self.label(a, label) for a in args], **opts) def showlabel(self, msg, label): if label and msg: diff -r e240e914d226 -r 8f016345e6b0 hgext/convert/hg.py --- a/hgext/convert/hg.py Thu Dec 17 17:16:02 2015 -0800 +++ b/hgext/convert/hg.py Fri Dec 18 14:40:11 2015 -0600 @@ -23,6 +23,7 @@ from mercurial.node import bin, hex, nullid from mercurial import hg, util, context, bookmarks, error, scmutil, exchange from mercurial import phases +from mercurial import lock as lockmod from mercurial import merge as mergemod from common import NoRepo, commit, converter_source, converter_sink, mapfile @@ -191,7 +192,6 @@ self.repo, p1ctx, p2ctx, anc, True, # branchmerge True, # force - False, # partial False, # acceptremote False, # followcopies ) @@ -410,12 +410,19 @@ def putbookmarks(self, updatedbookmark): if not len(updatedbookmark): return - - self.ui.status(_("updating bookmarks\n")) - destmarks = self.repo._bookmarks - for bookmark in updatedbookmark: - destmarks[bookmark] = bin(updatedbookmark[bookmark]) - destmarks.write() + wlock = lock = tr = None + try: + wlock = self.repo.wlock() + lock = self.repo.lock() + tr = self.repo.transaction('bookmark') + self.ui.status(_("updating bookmarks\n")) + destmarks = self.repo._bookmarks + for bookmark in updatedbookmark: + destmarks[bookmark] = bin(updatedbookmark[bookmark]) + destmarks.recordchange(tr) + tr.close() + finally: + lockmod.release(lock, wlock, tr) def hascommitfrommap(self, rev): # the exact semantics of clonebranches is unclear so we can't say no diff -r e240e914d226 -r 8f016345e6b0 hgext/convert/subversion.py --- a/hgext/convert/subversion.py Thu Dec 17 17:16:02 2015 -0800 +++ b/hgext/convert/subversion.py Fri Dec 18 14:40:11 2015 -0600 @@ -1041,7 +1041,7 @@ relpaths.append(p.strip('/')) args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths, strict_node_history] - # undocumented feature: debugsvnlog can be disabled + # developer config: convert.svn.debugsvnlog if not self.ui.configbool('convert', 'svn.debugsvnlog', True): return directlogstream(*args) arg = encodeargs(args) diff -r e240e914d226 -r 8f016345e6b0 hgext/gpg.py --- a/hgext/gpg.py Thu Dec 17 17:16:02 2015 -0800 +++ b/hgext/gpg.py Fri Dec 18 14:40:11 2015 -0600 @@ -9,6 +9,7 @@ from mercurial import util, commands, match, cmdutil, error from mercurial import node as hgnode from mercurial.i18n import _ +from mercurial import lock as lockmod cmdtable = {} command = cmdutil.command(cmdtable) @@ -168,7 +169,7 @@ ui.write("%-30s %s\n" % (keystr(ui, k), r)) @command("sigcheck", [], _('hg sigcheck REV')) -def check(ui, repo, rev): +def sigcheck(ui, repo, rev): """verify all the signatures there may be for a particular revision""" mygpg = newgpg(ui) rev = repo.lookup(rev) @@ -222,7 +223,14 @@ See :hg:`help dates` for a list of formats valid for -d/--date. """ + wlock = None + try: + wlock = repo.wlock() + return _dosign(ui, repo, *revs, **opts) + finally: + lockmod.release(wlock) +def _dosign(ui, repo, *revs, **opts): mygpg = newgpg(ui, **opts) sigver = "0" sigmessage = "" diff -r e240e914d226 -r 8f016345e6b0 hgext/graphlog.py --- a/hgext/graphlog.py Thu Dec 17 17:16:02 2015 -0800 +++ b/hgext/graphlog.py Fri Dec 18 14:40:11 2015 -0600 @@ -49,7 +49,7 @@ ] + commands.logopts + commands.walkopts, _('[OPTION]... [FILE]'), inferrepo=True) -def graphlog(ui, repo, *pats, **opts): +def glog(ui, repo, *pats, **opts): """show revision history alongside an ASCII revision graph Print a revision history alongside a revision graph drawn with diff -r e240e914d226 -r 8f016345e6b0 hgext/histedit.py --- a/hgext/histedit.py Thu Dec 17 17:16:02 2015 -0800 +++ b/hgext/histedit.py Fri Dec 18 14:40:11 2015 -0600 @@ -143,12 +143,30 @@ repository that Mercurial does not detect to be related to the source repo, you can add a ``--force`` option. +Config +------ + Histedit rule lines are truncated to 80 characters by default. You can customize this behavior by setting a different length in your configuration file:: [histedit] linelen = 120 # truncate rule lines at 120 characters + +``hg histedit`` attempts to automatically choose an appropriate base +revision to use. To change which base revision is used, define a +revset in your configuration file:: + + [histedit] + defaultrev = only(.) & draft() + +By default each edited revision needs to be present in histedit commands. +To remove revision you need to use ``drop`` operation. You can configure +the drop to be implicit for missing commits by adding: + + [histedit] + dropmissing = True + """ try: @@ -166,6 +184,7 @@ from mercurial import error from mercurial import copies from mercurial import context +from mercurial import destutil from mercurial import exchange from mercurial import extensions from mercurial import hg @@ -181,6 +200,18 @@ cmdtable = {} command = cmdutil.command(cmdtable) +class _constraints(object): + # aborts if there are multiple rules for one node + noduplicates = 'noduplicates' + # abort if the node does belong to edited stack + forceother = 'forceother' + # abort if the node doesn't belong to edited stack + noother = 'noother' + + @classmethod + def known(cls): + return set([v for k, v in cls.__dict__.items() if k[0] != '_']) + # Note for extension authors: ONLY specify testedwith = 'internal' for # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should # be specifying the version(s) of Mercurial they are tested with, or @@ -203,10 +234,10 @@ """) class histeditstate(object): - def __init__(self, repo, parentctxnode=None, rules=None, keep=None, + def __init__(self, repo, parentctxnode=None, actions=None, keep=None, topmost=None, replacements=None, lock=None, wlock=None): self.repo = repo - self.rules = rules + self.actions = actions self.keep = keep self.topmost = topmost self.parentctxnode = parentctxnode @@ -236,7 +267,9 @@ parentctxnode, rules, keep, topmost, replacements, backupfile = data self.parentctxnode = parentctxnode - self.rules = rules + rules = "\n".join(["%s %s" % (verb, rest) for [verb, rest] in rules]) + actions = parserules(rules, self) + self.actions = actions self.keep = keep self.topmost = topmost self.replacements = replacements @@ -248,10 +281,9 @@ fp.write('%s\n' % node.hex(self.parentctxnode)) fp.write('%s\n' % node.hex(self.topmost)) fp.write('%s\n' % self.keep) - fp.write('%d\n' % len(self.rules)) - for rule in self.rules: - fp.write('%s\n' % rule[0]) # action - fp.write('%s\n' % rule[1]) # remainder + fp.write('%d\n' % len(self.actions)) + for action in self.actions: + fp.write('%s\n' % action.tostate()) fp.write('%d\n' % len(self.replacements)) for replacement in self.replacements: fp.write('%s%s\n' % (node.hex(replacement[0]), ''.join(node.hex(r) @@ -316,6 +348,7 @@ def inprogress(self): return self.repo.vfs.exists('histedit-state') + class histeditaction(object): def __init__(self, state, node): self.state = state @@ -326,13 +359,54 @@ def fromrule(cls, state, rule): """Parses the given rule, returning an instance of the histeditaction. """ - repo = state.repo rulehash = rule.strip().split(' ', 1)[0] + return cls(state, node.bin(rulehash)) + + def verify(self): + """ Verifies semantic correctness of the rule""" + repo = self.repo + ha = node.hex(self.node) try: - node = repo[rulehash].node() + self.node = repo[ha].node() except error.RepoError: - raise error.Abort(_('unknown changeset %s listed') % rulehash[:12]) - return cls(state, node) + raise error.Abort(_('unknown changeset %s listed') + % ha[:12]) + + def torule(self): + """build a histedit rule line for an action + + by default lines are in the form: + + """ + ctx = self.repo[self.node] + summary = '' + if ctx.description(): + summary = ctx.description().splitlines()[0] + line = '%s %s %d %s' % (self.verb, ctx, ctx.rev(), summary) + # trim to 75 columns by default so it's not stupidly wide in my editor + # (the 5 more are left for verb) + maxlen = self.repo.ui.configint('histedit', 'linelen', default=80) + maxlen = max(maxlen, 22) # avoid truncating hash + return util.ellipsis(line, maxlen) + + def tostate(self): + """Print an action in format used by histedit state files + (the first line is a verb, the remainder is the second) + """ + return "%s\n%s" % (self.verb, node.hex(self.node)) + + def constraints(self): + """Return a set of constrains that this action should be verified for + """ + return set([_constraints.noduplicates, _constraints.noother]) + + def nodetoverify(self): + """Returns a node associated with the action that will be used for + verification purposes. + + If the action doesn't correspond to node it should return None + """ + return self.node def run(self): """Runs the action. The default behavior is simply apply the action's @@ -346,7 +420,7 @@ parentctx, but does not commit them.""" repo = self.repo rulectx = repo[self.node] - hg.update(repo, self.state.parentctxnode) + hg.update(repo, self.state.parentctxnode, quietempty=True) stats = applychanges(repo.ui, repo, rulectx, {}) if stats and stats[3] > 0: raise error.InterventionRequired(_('Fix up the change and run ' @@ -502,6 +576,29 @@ editor=editor) return repo.commitctx(new) +def _isdirtywc(repo): + return repo[None].dirty(missing=True) + +def abortdirty(): + raise error.Abort(_('working copy has pending changes'), + hint=_('amend, commit, or revert them and run histedit ' + '--continue, or abort with histedit --abort')) + + +actiontable = {} +actionlist = [] + +def addhisteditaction(verbs): + def wrap(cls): + cls.verb = verbs[0] + for verb in verbs: + actiontable[verb] = cls + actionlist.append(cls) + return cls + return wrap + + +@addhisteditaction(['pick', 'p']) class pick(histeditaction): def run(self): rulectx = self.repo[self.node] @@ -511,11 +608,12 @@ return super(pick, self).run() +@addhisteditaction(['edit', 'e']) class edit(histeditaction): def run(self): repo = self.repo rulectx = repo[self.node] - hg.update(repo, self.state.parentctxnode) + hg.update(repo, self.state.parentctxnode, quietempty=True) applychanges(repo.ui, repo, rulectx, {}) raise error.InterventionRequired( _('Make changes as needed, you may commit or record as needed ' @@ -525,6 +623,7 @@ def commiteditor(self): return cmdutil.getcommiteditor(edit=True, editform='histedit.edit') +@addhisteditaction(['fold', 'f']) class fold(histeditaction): def continuedirty(self): repo = self.repo @@ -618,6 +717,24 @@ replacements.append((ich, (n,))) return repo[n], replacements +class base(histeditaction): + def constraints(self): + return set([_constraints.forceother]) + + def run(self): + if self.repo['.'].node() != self.node: + mergemod.update(self.repo, self.node, False, True) + # branchmerge, force) + return self.continueclean() + + def continuedirty(self): + abortdirty() + + def continueclean(self): + basectx = self.repo['.'] + return basectx, [] + +@addhisteditaction(['_multifold']) class _multifold(fold): """fold subclass used for when multiple folds happen in a row @@ -630,6 +747,7 @@ def skipprompt(self): return True +@addhisteditaction(["roll", "r"]) class rollup(fold): def mergedescs(self): return False @@ -637,11 +755,13 @@ def skipprompt(self): return True +@addhisteditaction(["drop", "d"]) class drop(histeditaction): def run(self): parentctx = self.repo[self.state.parentctxnode] return parentctx, [(self.node, tuple())] +@addhisteditaction(["mess", "m"]) class message(histeditaction): def commiteditor(self): return cmdutil.getcommiteditor(edit=True, editform='histedit.mess') @@ -672,20 +792,6 @@ raise error.Abort(msg, hint=hint) return repo.lookup(roots[0]) -actiontable = {'p': pick, - 'pick': pick, - 'e': edit, - 'edit': edit, - 'f': fold, - 'fold': fold, - '_multifold': _multifold, - 'r': rollup, - 'roll': rollup, - 'd': drop, - 'drop': drop, - 'm': message, - 'mess': message, - } @command('histedit', [('', 'commands', '', @@ -699,13 +805,19 @@ ('f', 'force', False, _('force outgoing even for unrelated repositories')), ('r', 'rev', [], _('first revision to be edited'), _('REV'))], - _("ANCESTOR | --outgoing [URL]")) + _("[ANCESTOR] | --outgoing [URL]")) def histedit(ui, repo, *freeargs, **opts): """interactively edit changeset history - This command edits changesets between ANCESTOR and the parent of + This command edits changesets between an ANCESTOR and the parent of the working directory. + The value from the "histedit.defaultrev" config option is used as a + revset to select the base revision when ANCESTOR is not specified. + The first revision returned by the revset is used. By default, this + selects the editable history that is unique to the ancestry of the + working directory. + With --outgoing, this edits changesets not found in the destination repository. If URL of the destination is omitted, the 'default-push' (or 'default') path will be used. @@ -719,6 +831,56 @@ such ambiguous situation. See :hg:`help revsets` for detail about selecting revisions. + .. container:: verbose + + Examples: + + - A number of changes have been made. + Revision 3 is no longer needed. + + Start history editing from revision 3:: + + hg histedit -r 3 + + An editor opens, containing the list of revisions, + with specific actions specified:: + + pick 5339bf82f0ca 3 Zworgle the foobar + pick 8ef592ce7cc4 4 Bedazzle the zerlog + pick 0a9639fcda9d 5 Morgify the cromulancy + + Additional information about the possible actions + to take appears below the list of revisions. + + To remove revision 3 from the history, + its action (at the beginning of the relevant line) + is changed to 'drop':: + + drop 5339bf82f0ca 3 Zworgle the foobar + pick 8ef592ce7cc4 4 Bedazzle the zerlog + pick 0a9639fcda9d 5 Morgify the cromulancy + + - A number of changes have been made. + Revision 2 and 4 need to be swapped. + + Start history editing from revision 2:: + + hg histedit -r 2 + + An editor opens, containing the list of revisions, + with specific actions specified:: + + pick 252a1af424ad 2 Blorb a morgwazzle + pick 5339bf82f0ca 3 Zworgle the foobar + pick 8ef592ce7cc4 4 Bedazzle the zerlog + + To swap revision 2 and 4, its lines are swapped + in the editor:: + + pick 8ef592ce7cc4 4 Bedazzle the zerlog + pick 5339bf82f0ca 3 Zworgle the foobar + pick 252a1af424ad 2 Blorb a morgwazzle + Returns 0 on success, 1 if user intervention is required (not only for intentional "edit" command, but also for resolving unexpected conflicts). @@ -728,11 +890,16 @@ state.wlock = repo.wlock() state.lock = repo.lock() _histedit(ui, repo, state, *freeargs, **opts) + except error.Abort: + if repo.vfs.exists('histedit-last-edit.txt'): + ui.warn(_('warning: histedit rules saved ' + 'to: .hg/histedit-last-edit.txt\n')) + raise finally: release(state.lock, state.wlock) def _histedit(ui, repo, state, *freeargs, **opts): - # TODO only abort if we try and histedit mq patches, not just + # TODO only abort if we try to histedit mq patches, not just # blanket if mq patches are applied somewhere mq = getattr(repo, 'mq', None) if mq and mq.applied: @@ -775,10 +942,10 @@ else: revs.extend(freeargs) if len(revs) == 0: - # experimental config: histedit.defaultrev - histeditdefault = ui.config('histedit', 'defaultrev') - if histeditdefault: - revs.append(histeditdefault) + defaultrev = destutil.desthistedit(ui, repo) + if defaultrev is not None: + revs.append(defaultrev) + if len(revs) != 1: raise error.Abort( _('histedit requires exactly one ancestor revision')) @@ -797,7 +964,7 @@ if not rules: comment = editcomment % (node.short(state.parentctxnode), node.short(state.topmost)) - rules = ruleeditor(repo, ui, state.rules, comment) + rules = ruleeditor(repo, ui, state.actions, comment) else: if rules == '-': f = sys.stdin @@ -805,10 +972,11 @@ f = open(rules) rules = f.read() f.close() - rules = [l for l in (r.strip() for r in rules.splitlines()) - if l and not l.startswith('#')] - rules = verifyrules(rules, repo, [repo[c] for [_a, c] in state.rules]) - state.rules = rules + actions = parserules(rules, state) + ctxs = [repo[act.nodetoverify()] \ + for act in state.actions if act.nodetoverify()] + verifyactions(actions, state, ctxs) + state.actions = actions state.write() return elif goal == 'abort': @@ -840,7 +1008,7 @@ # check whether we should update away if repo.unfiltered().revs('parents() and (%n or %ln::)', state.parentctxnode, leafs | tmpnodes): - hg.clean(repo, state.topmost) + hg.clean(repo, state.topmost, show_stats=True, quietempty=True) cleanupnode(ui, repo, 'created', tmpnodes) cleanupnode(ui, repo, 'temp', leafs) except Exception: @@ -856,6 +1024,8 @@ cmdutil.checkunfinished(repo) cmdutil.bailifchanged(repo) + if repo.vfs.exists('histedit-last-edit.txt'): + repo.vfs.unlink('histedit-last-edit.txt') topmost, empty = repo.dirstate.parents() if outg: if freeargs: @@ -878,7 +1048,8 @@ ctxs = [repo[r] for r in revs] if not rules: comment = editcomment % (node.short(root), node.short(topmost)) - rules = ruleeditor(repo, ui, [['pick', c] for c in ctxs], comment) + actions = [pick(state, r) for r in revs] + rules = ruleeditor(repo, ui, actions, comment) else: if rules == '-': f = sys.stdin @@ -886,14 +1057,13 @@ f = open(rules) rules = f.read() f.close() - rules = [l for l in (r.strip() for r in rules.splitlines()) - if l and not l.startswith('#')] - rules = verifyrules(rules, repo, ctxs) + actions = parserules(rules, state) + verifyactions(actions, state, ctxs) parentctxnode = repo[root].parents()[0].node() state.parentctxnode = parentctxnode - state.rules = rules + state.actions = actions state.topmost = topmost state.replacements = replacements @@ -906,23 +1076,23 @@ # preprocess rules so that we can hide inner folds from the user # and only show one editor - rules = state.rules[:] - for idx, ((action, ha), (nextact, unused)) in enumerate( - zip(rules, rules[1:] + [(None, None)])): - if action == 'fold' and nextact == 'fold': - state.rules[idx] = '_multifold', ha + actions = state.actions[:] + for idx, (action, nextact) in enumerate( + zip(actions, actions[1:] + [None])): + if action.verb == 'fold' and nextact and nextact.verb == 'fold': + state.actions[idx].__class__ = _multifold - while state.rules: + while state.actions: state.write() - action, ha = state.rules.pop(0) - ui.debug('histedit: processing %s %s\n' % (action, ha[:12])) - actobj = actiontable[action].fromrule(state, ha) + actobj = state.actions.pop(0) + ui.debug('histedit: processing %s %s\n' % (actobj.verb,\ + actobj.torule())) parentctx, replacement_ = actobj.run() state.parentctxnode = parentctx.node() state.replacements.extend(replacement_) state.write() - hg.update(repo, state.parentctxnode) + hg.update(repo, state.parentctxnode, quietempty=True) mapping, tmpnodes, created, ntm = processreplacement(state) if mapping: @@ -966,17 +1136,13 @@ def bootstrapcontinue(ui, state, opts): repo = state.repo - if state.rules: - action, currentnode = state.rules.pop(0) - - actobj = actiontable[action].fromrule(state, currentnode) + if state.actions: + actobj = state.actions.pop(0) - s = repo.status() - if s.modified or s.added or s.removed or s.deleted: + if _isdirtywc(repo): actobj.continuedirty() - s = repo.status() - if s.modified or s.added or s.removed or s.deleted: - raise error.Abort(_("working copy still dirty")) + if _isdirtywc(repo): + abortdirty() parentctx, replacements = actobj.continueclean() @@ -1002,32 +1168,15 @@ hint=_('see "hg help phases" for details')) return [c.node() for c in ctxs] -def makedesc(repo, action, rev): - """build a initial action line for a ctx - - line are in the form: - - - """ - ctx = repo[rev] - summary = '' - if ctx.description(): - summary = ctx.description().splitlines()[0] - line = '%s %s %d %s' % (action, ctx, ctx.rev(), summary) - # trim to 80 columns so it's not stupidly wide in my editor - maxlen = repo.ui.configint('histedit', 'linelen', default=80) - maxlen = max(maxlen, 22) # avoid truncating hash - return util.ellipsis(line, maxlen) - -def ruleeditor(repo, ui, rules, editcomment=""): +def ruleeditor(repo, ui, actions, editcomment=""): """open an editor to edit rules rules are in the format [ [act, ctx], ...] like in state.rules """ - rules = '\n'.join([makedesc(repo, act, rev) for [act, rev] in rules]) + rules = '\n'.join([act.torule() for act in actions]) rules += '\n\n' rules += editcomment - rules = ui.edit(rules, ui.username()) + rules = ui.edit(rules, ui.username(), {'prefix': 'histedit'}) # Save edit rules in .hg/histedit-last-edit.txt in case # the user needs to ask for help after something @@ -1038,40 +1187,66 @@ return rules -def verifyrules(rules, repo, ctxs): - """Verify that there exists exactly one edit rule per given changeset. +def parserules(rules, state): + """Read the histedit rules string and return list of action objects """ + rules = [l for l in (r.strip() for r in rules.splitlines()) + if l and not l.startswith('#')] + actions = [] + for r in rules: + if ' ' not in r: + raise error.Abort(_('malformed line "%s"') % r) + verb, rest = r.split(' ', 1) + + if verb not in actiontable: + raise error.Abort(_('unknown action "%s"') % verb) + + action = actiontable[verb].fromrule(state, rest) + actions.append(action) + return actions + +def verifyactions(actions, state, ctxs): + """Verify that there exists exactly one action per given changeset and + other constraints. Will abort if there are to many or too few rules, a malformed rule, or a rule on a changeset outside of the user-given range. """ - parsed = [] expected = set(c.hex() for c in ctxs) seen = set() - for r in rules: - if ' ' not in r: - raise error.Abort(_('malformed line "%s"') % r) - action, rest = r.split(' ', 1) - ha = rest.strip().split(' ', 1)[0] - try: - ha = repo[ha].hex() - except error.RepoError: - raise error.Abort(_('unknown changeset %s listed') % ha[:12]) - if ha not in expected: - raise error.Abort( - _('may not use changesets other than the ones listed')) - if ha in seen: - raise error.Abort(_('duplicated command for changeset %s') % - ha[:12]) - seen.add(ha) - if action not in actiontable or action.startswith('_'): - raise error.Abort(_('unknown action "%s"') % action) - parsed.append([action, ha]) + for action in actions: + action.verify() + constraints = action.constraints() + for constraint in constraints: + if constraint not in _constraints.known(): + raise error.Abort(_('unknown constraint "%s"') % constraint) + + nodetoverify = action.nodetoverify() + if nodetoverify is not None: + ha = node.hex(nodetoverify) + if _constraints.noother in constraints and ha not in expected: + raise error.Abort( + _('may not use "%s" with changesets ' + 'other than the ones listed') % action.verb) + if _constraints.forceother in constraints and ha in expected: + raise error.Abort( + _('may not use "%s" with changesets ' + 'within the edited list') % action.verb) + if _constraints.noduplicates in constraints and ha in seen: + raise error.Abort(_('duplicated command for changeset %s') % + ha[:12]) + seen.add(ha) missing = sorted(expected - seen) # sort to stabilize output - if missing: + + if state.repo.ui.configbool('histedit', 'dropmissing'): + drops = [drop(state, node.bin(n)) for n in missing] + # put the in the beginning so they execute immediately and + # don't show in the edit-plan in the future + actions[:0] = drops + elif missing: raise error.Abort(_('missing rules for changeset %s') % missing[0][:12], - hint=_('do you want to use the drop action?')) - return parsed + hint=_('use "drop %s" to discard, see also: ' + '"hg help -e histedit.config"') % missing[0][:12]) def newnodestoabort(state): """process the list of replacements to return @@ -1179,13 +1354,20 @@ # nothing to move moves.append((bk, new[-1])) if moves: - marks = repo._bookmarks - for mark, new in moves: - old = marks[mark] - ui.note(_('histedit: moving bookmarks %s from %s to %s\n') - % (mark, node.short(old), node.short(new))) - marks[mark] = new - marks.write() + lock = tr = None + try: + lock = repo.lock() + tr = repo.transaction('histedit') + marks = repo._bookmarks + for mark, new in moves: + old = marks[mark] + ui.note(_('histedit: moving bookmarks %s from %s to %s\n') + % (mark, node.short(old), node.short(new))) + marks[mark] = new + marks.recordchange(tr) + tr.close() + finally: + release(tr, lock) def cleanupnode(ui, repo, name, nodes): """strip a group of nodes from the repository @@ -1219,8 +1401,8 @@ if os.path.exists(os.path.join(repo.path, 'histedit-state')): state = histeditstate(repo) state.read() - histedit_nodes = set([repo[rulehash].node() for (action, rulehash) - in state.rules if rulehash in repo]) + histedit_nodes = set([action.nodetoverify() for action + in state.actions if action.nodetoverify()]) strip_nodes = set([repo[n].node() for n in nodelist]) common_nodes = histedit_nodes & strip_nodes if common_nodes: @@ -1235,14 +1417,16 @@ return state = histeditstate(repo) state.read() - if state.rules: + if state.actions: # i18n: column positioning for "hg summary" ui.write(_('hist: %s (histedit --continue)\n') % (ui.label(_('%d remaining'), 'histedit.remaining') % - len(state.rules))) + len(state.actions))) def extsetup(ui): cmdutil.summaryhooks.add('histedit', summaryhook) cmdutil.unfinishedstates.append( ['histedit-state', False, True, _('histedit in progress'), _("use 'hg histedit --continue' or 'hg histedit --abort'")]) + if ui.configbool("experimental", "histeditng"): + globals()['base'] = addhisteditaction(['base', 'b'])(base) diff -r e240e914d226 -r 8f016345e6b0 hgext/largefiles/lfcommands.py --- a/hgext/largefiles/lfcommands.py Thu Dec 17 17:16:02 2015 -0800 +++ b/hgext/largefiles/lfcommands.py Fri Dec 18 14:40:11 2015 -0600 @@ -444,12 +444,14 @@ updated, removed = 0, 0 for lfile in lfiles: abslfile = repo.wjoin(lfile) + abslfileorig = cmdutil.origpath(ui, repo, abslfile) absstandin = repo.wjoin(lfutil.standin(lfile)) + absstandinorig = cmdutil.origpath(ui, repo, absstandin) if os.path.exists(absstandin): - if (os.path.exists(absstandin + '.orig') and + if (os.path.exists(absstandinorig) and os.path.exists(abslfile)): - shutil.copyfile(abslfile, abslfile + '.orig') - util.unlinkpath(absstandin + '.orig') + shutil.copyfile(abslfile, abslfileorig) + util.unlinkpath(absstandinorig) expecthash = lfutil.readstandin(repo, lfile) if expecthash != '': if lfile not in repo[None]: # not switched to normal file diff -r e240e914d226 -r 8f016345e6b0 hgext/largefiles/overrides.py --- a/hgext/largefiles/overrides.py Thu Dec 17 17:16:02 2015 -0800 +++ b/hgext/largefiles/overrides.py Fri Dec 18 14:40:11 2015 -0600 @@ -458,11 +458,11 @@ # writing the files into the working copy and lfcommands.updatelfiles # will update the largefiles. def overridecalculateupdates(origfn, repo, p1, p2, pas, branchmerge, force, - partial, acceptremote, followcopies): + acceptremote, followcopies, matcher=None): overwrite = force and not branchmerge actions, diverge, renamedelete = origfn( - repo, p1, p2, pas, branchmerge, force, partial, acceptremote, - followcopies) + repo, p1, p2, pas, branchmerge, force, acceptremote, + followcopies, matcher=matcher) if overwrite: return actions, diverge, renamedelete @@ -481,6 +481,9 @@ (lm, largs, lmsg) = actions.get(lfile, (None, None, None)) (sm, sargs, smsg) = actions.get(standin, (None, None, None)) if sm in ('g', 'dc') and lm != 'r': + if sm == 'dc': + f1, f2, fa, move, anc = sargs + sargs = (p2[f2].flags(),) # Case 1: normal file in the working copy, largefile in # the second parent usermsg = _('remote turned local normal file %s into a largefile\n' @@ -496,6 +499,9 @@ else: actions[standin] = ('r', None, 'replaced by non-standin') elif lm in ('g', 'dc') and sm != 'r': + if lm == 'dc': + f1, f2, fa, move, anc = largs + largs = (p2[f2].flags(),) # Case 2: largefile in the working copy, normal file in # the second parent usermsg = _('remote turned local largefile %s into a normal file\n' @@ -538,7 +544,7 @@ # largefiles. This will handle identical edits without prompting the user. def overridefilemerge(origfn, premerge, repo, mynode, orig, fcd, fco, fca, labels=None): - if not lfutil.isstandin(orig): + if not lfutil.isstandin(orig) or fcd.isabsent() or fco.isabsent(): return origfn(premerge, repo, mynode, orig, fcd, fco, fca, labels=labels) @@ -555,7 +561,7 @@ (lfutil.splitstandin(orig), ahash, dhash, ohash), 0) == 1)): repo.wwrite(fcd.path(), fco.data(), fco.flags()) - return True, 0 + return True, 0, False def copiespathcopies(orig, ctx1, ctx2, match=None): copies = orig(ctx1, ctx2, match=match) @@ -1358,8 +1364,11 @@ err = 0 return err -def mergeupdate(orig, repo, node, branchmerge, force, partial, +def mergeupdate(orig, repo, node, branchmerge, force, *args, **kwargs): + matcher = kwargs.get('matcher', None) + # note if this is a partial update + partial = matcher and not matcher.always() wlock = repo.wlock() try: # branch | | | @@ -1399,7 +1408,7 @@ oldstandins = lfutil.getstandinsstate(repo) - result = orig(repo, node, branchmerge, force, partial, *args, **kwargs) + result = orig(repo, node, branchmerge, force, *args, **kwargs) newstandins = lfutil.getstandinsstate(repo) filelist = lfutil.getlfilestoupdate(oldstandins, newstandins) diff -r e240e914d226 -r 8f016345e6b0 hgext/mq.py --- a/hgext/mq.py Thu Dec 17 17:16:02 2015 -0800 +++ b/hgext/mq.py Fri Dec 18 14:40:11 2015 -0600 @@ -68,6 +68,7 @@ from mercurial import commands, cmdutil, hg, scmutil, util, revset from mercurial import extensions, error, phases from mercurial import patch as patchmod +from mercurial import lock as lockmod from mercurial import localrepo from mercurial import subrepo import os, re, errno, shutil @@ -697,11 +698,13 @@ absf = repo.wjoin(f) if os.path.lexists(absf): self.ui.note(_('saving current version of %s as %s\n') % - (f, f + '.orig')) + (f, cmdutil.origpath(self.ui, repo, f))) + + absorig = cmdutil.origpath(self.ui, repo, absf) if copy: - util.copyfile(absf, absf + '.orig') + util.copyfile(absf, absorig) else: - util.rename(absf, absf + '.orig') + util.rename(absf, absorig) def printdiff(self, repo, diffopts, node1, node2=None, files=None, fp=None, changes=None, opts={}): @@ -1788,27 +1791,34 @@ # Ensure we create a new changeset in the same phase than # the old one. - n = newcommit(repo, oldphase, message, user, ph.date, + lock = tr = None + try: + lock = repo.lock() + tr = repo.transaction('mq') + n = newcommit(repo, oldphase, message, user, ph.date, match=match, force=True, editor=editor) - # only write patch after a successful commit - c = [list(x) for x in refreshchanges] - if inclsubs: - self.putsubstate2changes(substatestate, c) - chunks = patchmod.diff(repo, patchparent, - changes=c, opts=diffopts) - comments = str(ph) - if comments: - patchf.write(comments) - for chunk in chunks: - patchf.write(chunk) - patchf.close() - - marks = repo._bookmarks - for bm in bmlist: - marks[bm] = n - marks.write() - - self.applied.append(statusentry(n, patchfn)) + # only write patch after a successful commit + c = [list(x) for x in refreshchanges] + if inclsubs: + self.putsubstate2changes(substatestate, c) + chunks = patchmod.diff(repo, patchparent, + changes=c, opts=diffopts) + comments = str(ph) + if comments: + patchf.write(comments) + for chunk in chunks: + patchf.write(chunk) + patchf.close() + + marks = repo._bookmarks + for bm in bmlist: + marks[bm] = n + marks.recordchange(tr) + tr.close() + + self.applied.append(statusentry(n, patchfn)) + finally: + lockmod.release(lock, tr) except: # re-raises ctx = repo[cparents[0]] repo.dirstate.rebuild(ctx.node(), ctx.manifest()) diff -r e240e914d226 -r 8f016345e6b0 hgext/pager.py --- a/hgext/pager.py Thu Dec 17 17:16:02 2015 -0800 +++ b/hgext/pager.py Fri Dec 18 14:40:11 2015 -0600 @@ -49,9 +49,13 @@ To ignore global commands like :hg:`version` or :hg:`help`, you have to specify them in your user configuration file. -The --pager=... option can also be used to control when the pager is -used. Use a boolean value like yes, no, on, off, or use auto for -normal behavior. +To control whether the pager is used at all for an individual command, +you can use --pager=:: + + - use as needed: `auto`. + - require the pager: `yes` or `on`. + - suppress the pager: `no` or `off` (any unrecognized value + will also work). ''' diff -r e240e914d226 -r 8f016345e6b0 hgext/patchbomb.py --- a/hgext/patchbomb.py Thu Dec 17 17:16:02 2015 -0800 +++ b/hgext/patchbomb.py Fri Dec 18 14:40:11 2015 -0600 @@ -58,7 +58,7 @@ ''' import os, errno, socket, tempfile, cStringIO -import email +import email as emailmod from mercurial import cmdutil, commands, hg, mail, patch, util, error from mercurial import scmutil @@ -155,7 +155,7 @@ body += '\n'.join(patchlines) if addattachment: - msg = email.MIMEMultipart.MIMEMultipart() + msg = emailmod.MIMEMultipart.MIMEMultipart() if body: msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test'))) p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch', @@ -272,15 +272,15 @@ or prompt(ui, 'Subject:', 'A bundle for your repository')) body = _getdescription(repo, '', sender, **opts) - msg = email.MIMEMultipart.MIMEMultipart() + msg = emailmod.MIMEMultipart.MIMEMultipart() if body: msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test'))) - datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle') + datapart = emailmod.MIMEBase.MIMEBase('application', 'x-mercurial-bundle') datapart.set_payload(bundle) bundlename = '%s.hg' % opts.get('bundlename', 'bundle') datapart.add_header('Content-Disposition', 'attachment', filename=bundlename) - email.Encoders.encode_base64(datapart) + emailmod.Encoders.encode_base64(datapart) msg.attach(datapart) msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test')) return [(msg, subj, None)] @@ -403,7 +403,7 @@ ('', 'intro', None, _('send an introduction email for a single patch')), ] + emailopts + commands.remoteopts, _('hg email [OPTION]... [DEST]...')) -def patchbomb(ui, repo, *revs, **opts): +def email(ui, repo, *revs, **opts): '''send changesets by email By default, diffs are sent in the format generated by @@ -641,7 +641,7 @@ if not parent.endswith('>'): parent += '>' - sender_addr = email.Utils.parseaddr(sender)[1] + sender_addr = emailmod.Utils.parseaddr(sender)[1] sender = mail.addressencode(ui, sender, _charsets, opts.get('test')) sendmail = None firstpatch = None @@ -660,7 +660,7 @@ parent = m['Message-Id'] m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version() - m['Date'] = email.Utils.formatdate(start_time[0], localtime=True) + m['Date'] = emailmod.Utils.formatdate(start_time[0], localtime=True) start_time = (start_time[0] + 1, start_time[1]) m['From'] = sender @@ -678,7 +678,7 @@ fp = util.popen(os.environ['PAGER'], 'w') else: fp = ui - generator = email.Generator.Generator(fp, mangle_from_=False) + generator = emailmod.Generator.Generator(fp, mangle_from_=False) try: generator.flatten(m, 0) fp.write('\n') @@ -702,7 +702,7 @@ # Exim does not remove the Bcc field del m['Bcc'] fp = cStringIO.StringIO() - generator = email.Generator.Generator(fp, mangle_from_=False) + generator = emailmod.Generator.Generator(fp, mangle_from_=False) generator.flatten(m, 0) sendmail(sender_addr, to + bcc + cc, fp.getvalue()) diff -r e240e914d226 -r 8f016345e6b0 hgext/progress.py --- a/hgext/progress.py Thu Dec 17 17:16:02 2015 -0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,17 +0,0 @@ -# progress.py show progress bars for some actions -# -# Copyright (C) 2010 Augie Fackler -# -# This software may be used and distributed according to the terms of the -# GNU General Public License version 2 or any later version. - -"""show progress bars for some actions (DEPRECATED) - -This extension has been merged into core, you can remove it from your config. -See hg help config.progress for configuration options. -""" -# Note for extension authors: ONLY specify testedwith = 'internal' for -# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should -# be specifying the version(s) of Mercurial they are tested with, or -# leave the attribute unspecified. -testedwith = 'internal' diff -r e240e914d226 -r 8f016345e6b0 hgext/rebase.py --- a/hgext/rebase.py Thu Dec 17 17:16:02 2015 -0800 +++ b/hgext/rebase.py Fri Dec 18 14:40:11 2015 -0600 @@ -30,8 +30,11 @@ revtodo = -1 nullmerge = -2 revignored = -3 -# To do with obsolescence +# successor in rebase destination revprecursor = -4 +# plain prune (no successor) +revpruned = -5 +revskipped = (revignored, revprecursor, revpruned) cmdtable = {} command = cmdutil.command(cmdtable) @@ -44,14 +47,6 @@ def _nothingtorebase(): return 1 -def _savegraft(ctx, extra): - s = ctx.extra().get('source', None) - if s is not None: - extra['source'] = s - -def _savebranch(ctx, extra): - extra['branch'] = ctx.branch() - def _makeextrafn(copiers): """make an extrafn out of the given copy-functions. @@ -220,8 +215,9 @@ abortf = opts.get('abort') collapsef = opts.get('collapse', False) collapsemsg = cmdutil.logmessage(ui, opts) + date = opts.get('date', None) e = opts.get('extrafn') # internal, used by e.g. hgsubversion - extrafns = [_savegraft] + extrafns = [] if e: extrafns = [e] keepf = opts.get('keep', False) @@ -390,18 +386,13 @@ if dest.closesbranch() and not keepbranchesf: ui.status(_('reopening closed branch head %s\n') % dest) - if keepbranchesf: - # insert _savebranch at the start of extrafns so if - # there's a user-provided extrafn it can clobber branch if - # desired - extrafns.insert(0, _savebranch) - if collapsef: - branches = set() - for rev in state: - branches.add(repo[rev].branch()) - if len(branches) > 1: - raise error.Abort(_('cannot collapse multiple named ' - 'branches')) + if keepbranchesf and collapsef: + branches = set() + for rev in state: + branches.add(repo[rev].branch()) + if len(branches) > 1: + raise error.Abort(_('cannot collapse multiple named ' + 'branches')) # Rebase if not targetancestors: @@ -434,7 +425,7 @@ targetancestors) storestatus(repo, originalwd, target, state, collapsef, keepf, keepbranchesf, external, activebookmark) - if len(repo.parents()) == 2: + if len(repo[None].parents()) == 2: repo.ui.debug('resuming interrupted rebase\n') else: try: @@ -454,7 +445,8 @@ editor = cmdutil.getcommiteditor(editform=editform, **opts) newnode = concludenode(repo, rev, p1, p2, extrafn=extrafn, editor=editor, - keepbranches=keepbranchesf) + keepbranches=keepbranchesf, + date=date) else: # Skip commit if we are collapsing repo.dirstate.beginparentchange() @@ -482,6 +474,9 @@ targetctx.description().split('\n', 1)[0]) msg = _('note: not rebasing %s, already in destination as %s\n') ui.status(msg % (desc, desctarget)) + elif state[rev] == revpruned: + msg = _('note: not rebasing %s, it has no successor\n') + ui.status(msg % desc) else: ui.status(_('already rebased %s as %s\n') % (desc, repo[state[rev]])) @@ -505,7 +500,8 @@ editor = cmdutil.getcommiteditor(edit=editopt, editform=editform) newnode = concludenode(repo, rev, p1, external, commitmsg=commitmsg, extrafn=extrafn, editor=editor, - keepbranches=keepbranchesf) + keepbranches=keepbranchesf, + date=date) if newnode is None: newrev = target else: @@ -543,13 +539,19 @@ collapsedas = newnode clearrebased(ui, repo, state, skipped, collapsedas) - if currentbookmarks: - updatebookmarks(repo, targetnode, nstate, currentbookmarks) - if activebookmark not in repo._bookmarks: - # active bookmark was divergent one and has been deleted - activebookmark = None + tr = None + try: + tr = repo.transaction('bookmark') + if currentbookmarks: + updatebookmarks(repo, targetnode, nstate, currentbookmarks, tr) + if activebookmark not in repo._bookmarks: + # active bookmark was divergent one and has been deleted + activebookmark = None + tr.close() + finally: + release(tr) + clearstatus(repo) - clearstatus(repo) ui.note(_("rebase completed\n")) util.unlinkpath(repo.sjoin('undo'), ignoremissing=True) if skipped: @@ -586,7 +588,7 @@ ', '.join(str(p) for p in sorted(parents)))) def concludenode(repo, rev, p1, p2, commitmsg=None, editor=None, extrafn=None, - keepbranches=False): + keepbranches=False, date=None): '''Commit the wd changes with parents p1 and p2. Reuse commit info from rev but also store useful information in extra. Return node of committed revision.''' @@ -597,7 +599,10 @@ if commitmsg is None: commitmsg = ctx.description() keepbranch = keepbranches and repo[p1].branch() != ctx.branch() - extra = {'rebase_source': ctx.hex()} + extra = ctx.extra().copy() + if not keepbranches: + del extra['branch'] + extra['rebase_source'] = ctx.hex() if extrafn: extrafn(ctx, extra) @@ -608,8 +613,10 @@ if keepbranch: repo.ui.setconfig('ui', 'allowemptycommit', True) # Commit might fail if unresolved files exist + if date is None: + date = ctx.date() newnode = repo.commit(text=commitmsg, user=ctx.user(), - date=ctx.date(), extra=extra, editor=editor) + date=date, extra=extra, editor=editor) finally: repo.ui.restoreconfig(backup) @@ -625,7 +632,7 @@ # Update to target and merge it with local if repo['.'].rev() != p1: repo.ui.debug(" update to %d:%s\n" % (p1, repo[p1])) - merge.update(repo, p1, False, True, False) + merge.update(repo, p1, False, True) else: repo.ui.debug(" already in target\n") repo.dirstate.write(repo.currenttransaction()) @@ -634,7 +641,7 @@ repo.ui.debug(" detach base %d:%s\n" % (base, repo[base])) # When collapsing in-place, the parent is the common ancestor, we # have to allow merging with it. - stats = merge.update(repo, rev, True, True, False, base, collapse, + stats = merge.update(repo, rev, True, True, base, collapse, labels=['dest', 'source']) if collapse: copies.duplicatecopies(repo, rev, target) @@ -668,7 +675,7 @@ elif p1n in state: if state[p1n] == nullmerge: p1 = target - elif state[p1n] in (revignored, revprecursor): + elif state[p1n] in revskipped: p1 = nearestrebased(repo, p1n, state) if p1 is None: p1 = target @@ -684,7 +691,7 @@ if p2n in state: if p1 == target: # p1n in targetancestors or external p1 = state[p2n] - elif state[p2n] in (revignored, revprecursor): + elif state[p2n] in revskipped: p2 = nearestrebased(repo, p2n, state) if p2 is None: # no ancestors rebased yet, detach @@ -802,7 +809,7 @@ mq.seriesdirty = True mq.savedirty() -def updatebookmarks(repo, targetnode, nstate, originalbookmarks): +def updatebookmarks(repo, targetnode, nstate, originalbookmarks, tr): 'Move bookmarks to their correct changesets, and delete divergent ones' marks = repo._bookmarks for k, v in originalbookmarks.iteritems(): @@ -810,8 +817,7 @@ # update the bookmarks for revs that have moved marks[k] = nstate[v] bookmarks.deletedivergent(repo, [targetnode], k) - - marks.write() + marks.recordchange(tr) def storestatus(repo, originalwd, target, state, collapse, keep, keepbranches, external, activebookmark): @@ -874,7 +880,7 @@ else: oldrev, newrev = l.split(':') if newrev in (str(nullmerge), str(revignored), - str(revprecursor)): + str(revprecursor), str(revpruned)): state[repo[oldrev].rev()] = int(newrev) elif newrev == nullid: state[repo[oldrev].rev()] = revtodo @@ -908,7 +914,7 @@ def needupdate(repo, state): '''check whether we should `update --clean` away from a merge, or if somehow the working dir got forcibly updated, e.g. by older hg''' - parents = [p.rev() for p in repo.parents()] + parents = [p.rev() for p in repo[None].parents()] # Are we in a merge state at all? if len(parents) < 2: @@ -952,7 +958,7 @@ if cleanup: # Update away from the rebase if necessary if needupdate(repo, state): - merge.update(repo, originalwd, False, True, False) + merge.update(repo, originalwd, False, True) # Strip from the first rebased revision rebased = filter(lambda x: x >= 0 and x != target, state.values()) @@ -1058,7 +1064,10 @@ for ignored in set(rebasedomain) - set(rebaseset): state[ignored] = revignored for r in obsoletenotrebased: - state[r] = revprecursor + if obsoletenotrebased[r] is None: + state[r] = revpruned + else: + state[r] = revprecursor return repo['.'].rev(), dest.rev(), state def clearrebased(ui, repo, state, skipped, collapsedas=None): @@ -1172,7 +1181,9 @@ def _computeobsoletenotrebased(repo, rebasesetrevs, dest): """return a mapping obsolete => successor for all obsolete nodes to be - rebased that have a successors in the destination""" + rebased that have a successors in the destination + + obsolete => None entries in the mapping indicate nodes with no succesor""" obsoletenotrebased = {} # Build a mapping successor => obsolete nodes for the obsolete @@ -1198,6 +1209,11 @@ for s in allsuccessors: if s in ancs: obsoletenotrebased[allsuccessors[s]] = s + elif (s == allsuccessors[s] and + allsuccessors.values().count(s) == 1): + # plain prune + obsoletenotrebased[s] = None + return obsoletenotrebased def summaryhook(ui, repo): diff -r e240e914d226 -r 8f016345e6b0 hgext/share.py --- a/hgext/share.py Thu Dec 17 17:16:02 2015 -0800 +++ b/hgext/share.py Fri Dec 18 14:40:11 2015 -0600 @@ -73,7 +73,8 @@ the broken clone to reset it to a changeset that still exists. """ - return hg.share(ui, source, dest, not noupdate, bookmarks) + return hg.share(ui, source, dest=dest, update=not noupdate, + bookmarks=bookmarks) @command('unshare', [], '') def unshare(ui, repo): @@ -121,7 +122,7 @@ return orig(ui, source, *args, **opts) def extsetup(ui): - extensions.wrapfunction(bookmarks.bmstore, 'getbkfile', getbkfile) + extensions.wrapfunction(bookmarks, '_getbkfile', getbkfile) extensions.wrapfunction(bookmarks.bmstore, 'recordchange', recordchange) extensions.wrapfunction(bookmarks.bmstore, '_writerepo', writerepo) extensions.wrapcommand(commands.table, 'clone', clone) @@ -149,12 +150,12 @@ srcurl, branches = parseurl(source) return repository(repo.ui, srcurl) -def getbkfile(orig, self, repo): +def getbkfile(orig, repo): if _hassharedbookmarks(repo): srcrepo = _getsrcrepo(repo) if srcrepo is not None: repo = srcrepo - return orig(self, repo) + return orig(repo) def recordchange(orig, self, tr): # Continue with write to local bookmarks file as usual diff -r e240e914d226 -r 8f016345e6b0 hgext/shelve.py --- a/hgext/shelve.py Thu Dec 17 17:16:02 2015 -0800 +++ b/hgext/shelve.py Fri Dec 18 14:40:11 2015 -0600 @@ -224,7 +224,14 @@ def createcmd(ui, repo, pats, opts): """subcommand that creates a new shelve""" + wlock = repo.wlock() + try: + cmdutil.checkunfinished(repo) + return _docreatecmd(ui, repo, pats, opts) + finally: + lockmod.release(wlock) +def _docreatecmd(ui, repo, pats, opts): def mutableancestors(ctx): """return all mutable ancestors for ctx (included) @@ -276,7 +283,7 @@ repo.mq.checkapplied = saved if parent.node() != nullid: - desc = "changes to '%s'" % parent.description().split('\n', 1)[0] + desc = "changes to: %s" % parent.description().split('\n', 1)[0] else: desc = '(changes in empty repository)' @@ -285,9 +292,8 @@ name = opts['name'] - wlock = lock = tr = None + lock = tr = None try: - wlock = repo.wlock() lock = repo.lock() # use an uncommitted transaction to generate the bundle to avoid @@ -346,7 +352,7 @@ _aborttransaction(repo) finally: - lockmod.release(tr, lock, wlock) + lockmod.release(tr, lock) def cleanupcmd(ui, repo): """subcommand that deletes all shelves""" @@ -467,7 +473,6 @@ def unshelveabort(ui, repo, state, opts): """subcommand that abort an in-progress unshelve""" - wlock = repo.wlock() lock = None try: checkparents(repo, state) @@ -491,7 +496,7 @@ finally: shelvedstate.clear(repo) ui.warn(_("unshelve of '%s' aborted\n") % state.name) - lockmod.release(lock, wlock) + lockmod.release(lock) def mergefiles(ui, repo, wctx, shelvectx): """updates to wctx and merges the changes from shelvectx into the @@ -507,7 +512,7 @@ # revert will overwrite unknown files, so move them out of the way for file in repo.status(unknown=True).unknown: if file in files: - util.rename(file, file + ".orig") + util.rename(file, cmdutil.origpath(ui, repo, file)) ui.pushbuffer(True) cmdutil.revert(ui, repo, shelvectx, repo.dirstate.parents(), *pathtofiles(repo, files), @@ -527,11 +532,10 @@ """subcommand to continue an in-progress unshelve""" # We're finishing off a merge. First parent is our original # parent, second is the temporary "fake" commit we're unshelving. - wlock = repo.wlock() lock = None try: checkparents(repo, state) - ms = merge.mergestate(repo) + ms = merge.mergestate.read(repo) if [f for f in ms if ms[f] == 'u']: raise error.Abort( _("unresolved conflicts, can't continue"), @@ -565,15 +569,16 @@ unshelvecleanup(ui, repo, state.name, opts) ui.status(_("unshelve of '%s' complete\n") % state.name) finally: - lockmod.release(lock, wlock) + lockmod.release(lock) @command('unshelve', [('a', 'abort', None, _('abort an incomplete unshelve operation')), ('c', 'continue', None, _('continue an incomplete unshelve operation')), - ('', 'keep', None, + ('k', 'keep', None, _('keep shelve after unshelving')), + ('t', 'tool', '', _('specify merge tool')), ('', 'date', '', _('set date for temporary commits (DEPRECATED)'), _('DATE'))], _('hg unshelve [SHELVED]')) @@ -609,6 +614,13 @@ than ``maxbackups`` backups are kept, if same timestamp prevents from deciding exact order of them, for safety. """ + wlock = repo.wlock() + try: + return _dounshelve(ui, repo, *shelved, **opts) + finally: + lockmod.release(wlock) + +def _dounshelve(ui, repo, *shelved, **opts): abortf = opts['abort'] continuef = opts['continue'] if not abortf and not continuef: @@ -620,6 +632,8 @@ if shelved: raise error.Abort(_('cannot combine abort/continue with ' 'naming a shelved change')) + if abortf and opts.get('tool', False): + ui.warn(_('tool option will be ignored\n')) try: state = shelvedstate.load(repo) @@ -647,9 +661,10 @@ raise error.Abort(_("shelved change '%s' not found") % basename) oldquiet = ui.quiet - wlock = lock = tr = None + lock = tr = None + forcemerge = ui.backupconfig('ui', 'forcemerge') try: - wlock = repo.wlock() + ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'unshelve') lock = repo.lock() tr = repo.transaction('unshelve', report=lambda x: None) @@ -675,7 +690,7 @@ backup = repo.ui.backupconfig('phases', 'new-commit') try: - repo.ui. setconfig('phases', 'new-commit', phases.secret) + repo.ui.setconfig('phases', 'new-commit', phases.secret) return repo.commit(message, 'shelve@localhost', opts.get('date'), match) finally: @@ -706,6 +721,7 @@ 'rev' : [shelvectx.rev()], 'dest' : str(tmpwctx.rev()), 'keep' : True, + 'tool' : opts.get('tool', ''), }) except error.InterventionRequired: tr.close() @@ -743,7 +759,8 @@ ui.quiet = oldquiet if tr: tr.release() - lockmod.release(lock, wlock) + lockmod.release(lock) + ui.restoreconfig(forcemerge) @command('shelve', [('A', 'addremove', None, @@ -796,8 +813,6 @@ To delete specific shelved changes, use ``--delete``. To delete all shelved changes, use ``--cleanup``. ''' - cmdutil.checkunfinished(repo) - allowables = [ ('addremove', set(['create'])), # 'create' is pseudo action ('cleanup', set(['cleanup'])), diff -r e240e914d226 -r 8f016345e6b0 hgext/strip.py --- a/hgext/strip.py Thu Dec 17 17:16:02 2015 -0800 +++ b/hgext/strip.py Fri Dec 18 14:40:11 2015 -0600 @@ -7,7 +7,7 @@ from mercurial.node import nullid from mercurial.lock import release from mercurial import cmdutil, hg, scmutil, util, error -from mercurial import repair, bookmarks, merge +from mercurial import repair, bookmarks as bookmarksmod , merge cmdtable = {} command = cmdutil.command(cmdtable) @@ -44,7 +44,7 @@ raise error.Abort(_("local changed subrepos found" + excsuffix)) return s -def strip(ui, repo, revs, update=True, backup=True, force=None, bookmark=None): +def strip(ui, repo, revs, update=True, backup=True, force=None, bookmarks=None): wlock = lock = None try: wlock = repo.wlock() @@ -62,13 +62,21 @@ repair.strip(ui, repo, revs, backup) - marks = repo._bookmarks - if bookmark: - if bookmark == repo._activebookmark: - bookmarks.deactivate(repo) - del marks[bookmark] - marks.write() - ui.write(_("bookmark '%s' deleted\n") % bookmark) + repomarks = repo._bookmarks + if bookmarks: + tr = None + try: + tr = repo.transaction('strip') + if repo._activebookmark in bookmarks: + bookmarksmod.deactivate(repo) + for bookmark in bookmarks: + del repomarks[bookmark] + repomarks.recordchange(tr) + tr.close() + for bookmark in sorted(bookmarks): + ui.write(_("bookmark '%s' deleted\n") % bookmark) + finally: + release(tr) finally: release(lock, wlock) @@ -85,7 +93,7 @@ ('n', '', None, _('ignored (DEPRECATED)')), ('k', 'keep', None, _("do not modify working directory during " "strip")), - ('B', 'bookmark', '', _("remove revs only reachable from given" + ('B', 'bookmark', [], _("remove revs only reachable from given" " bookmark"))], _('hg strip [-k] [-f] [-n] [-B bookmark] [-r] REV...')) def stripcmd(ui, repo, *revs, **opts): @@ -127,27 +135,36 @@ wlock = repo.wlock() try: - if opts.get('bookmark'): - mark = opts.get('bookmark') - marks = repo._bookmarks - if mark not in marks: - raise error.Abort(_("bookmark '%s' not found") % mark) + bookmarks = set(opts.get('bookmark')) + if bookmarks: + repomarks = repo._bookmarks + if not bookmarks.issubset(repomarks): + raise error.Abort(_("bookmark '%s' not found") % + ','.join(sorted(bookmarks - set(repomarks.keys())))) # If the requested bookmark is not the only one pointing to a # a revision we have to only delete the bookmark and not strip # anything. revsets cannot detect that case. - uniquebm = True - for m, n in marks.iteritems(): - if m != mark and n == repo[mark].node(): - uniquebm = False - break - if uniquebm: - rsrevs = repair.stripbmrevset(repo, mark) - revs.update(set(rsrevs)) + nodetobookmarks = {} + for mark, node in repomarks.iteritems(): + nodetobookmarks.setdefault(node, []).append(mark) + for marks in nodetobookmarks.values(): + if bookmarks.issuperset(marks): + rsrevs = repair.stripbmrevset(repo, marks[0]) + revs.update(set(rsrevs)) if not revs: - del marks[mark] - marks.write() - ui.write(_("bookmark '%s' deleted\n") % mark) + lock = tr = None + try: + lock = repo.lock() + tr = repo.transaction('bookmark') + for bookmark in bookmarks: + del repomarks[bookmark] + repomarks.recordchange(tr) + tr.close() + for bookmark in sorted(bookmarks): + ui.write(_("bookmark '%s' deleted\n") % bookmark) + finally: + release(lock, tr) if not revs: raise error.Abort(_('empty revision set')) @@ -208,14 +225,13 @@ repo.dirstate.write(repo.currenttransaction()) # clear resolve state - ms = merge.mergestate(repo) - ms.reset(repo['.'].node()) + merge.mergestate.clean(repo, repo['.'].node()) update = False strip(ui, repo, revs, backup=backup, update=update, - force=opts.get('force'), bookmark=opts.get('bookmark')) + force=opts.get('force'), bookmarks=bookmarks) finally: wlock.release() diff -r e240e914d226 -r 8f016345e6b0 hgext/transplant.py --- a/hgext/transplant.py Thu Dec 17 17:16:02 2015 -0800 +++ b/hgext/transplant.py Fri Dec 18 14:40:11 2015 -0600 @@ -20,6 +20,7 @@ from mercurial import bundlerepo, hg, merge, match from mercurial import patch, revlog, scmutil, util, error, cmdutil from mercurial import revset, templatekw, exchange +from mercurial import lock as lockmod class TransplantError(error.Abort): pass @@ -127,9 +128,8 @@ diffopts = patch.difffeatureopts(self.ui, opts) diffopts.git = True - lock = wlock = tr = None + lock = tr = None try: - wlock = repo.wlock() lock = repo.lock() tr = repo.transaction('transplant') for rev in revs: @@ -152,7 +152,7 @@ if pulls: if source != repo: exchange.pull(repo, source.peer(), heads=pulls) - merge.update(repo, pulls[-1], False, False, None) + merge.update(repo, pulls[-1], False, False) p1, p2 = repo.dirstate.parents() pulls = [] @@ -216,7 +216,7 @@ tr.close() if pulls: exchange.pull(repo, source.peer(), heads=pulls) - merge.update(repo, pulls[-1], False, False, None) + merge.update(repo, pulls[-1], False, False) finally: self.saveseries(revmap, merges) self.transplants.write() @@ -224,7 +224,6 @@ tr.release() if lock: lock.release() - wlock.release() def filter(self, filter, node, changelog, patchfile): '''arbitrarily rewrite changeset before applying it''' @@ -345,7 +344,6 @@ merge = True extra = {'transplant_source': node} - wlock = repo.wlock() try: p1, p2 = repo.dirstate.parents() if p1 != parent: @@ -367,7 +365,9 @@ return n, node finally: - wlock.release() + # TODO: get rid of this meaningless try/finally enclosing. + # this is kept only to reduce changes in a patch. + pass def readseries(self): nodes = [] @@ -572,6 +572,14 @@ and then resume where you left off by calling :hg:`transplant --continue/-c`. ''' + wlock = None + try: + wlock = repo.wlock() + return _dotransplant(ui, repo, *revs, **opts) + finally: + lockmod.release(wlock) + +def _dotransplant(ui, repo, *revs, **opts): def incwalk(repo, csets, match=util.always): for node in csets: if match(node): @@ -599,7 +607,7 @@ return if not (opts.get('source') or revs or opts.get('merge') or opts.get('branch')): - raise error.Abort(_('no source URL, branch revision or revision ' + raise error.Abort(_('no source URL, branch revision, or revision ' 'list provided')) if opts.get('all'): if not opts.get('branch'): diff -r e240e914d226 -r 8f016345e6b0 mercurial/__init__.py --- a/mercurial/__init__.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/__init__.py Fri Dec 18 14:40:11 2015 -0600 @@ -0,0 +1,144 @@ +# __init__.py - Startup and module loading logic for Mercurial. +# +# Copyright 2015 Gregory Szorc +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from __future__ import absolute_import + +import imp +import os +import sys +import zipimport + +__all__ = [] + +# Rules for how modules can be loaded. Values are: +# +# c - require C extensions +# allow - allow pure Python implementation when C loading fails +# py - only load pure Python modules +modulepolicy = '@MODULELOADPOLICY@' + +# By default, require the C extensions for performance reasons. +if modulepolicy == '@' 'MODULELOADPOLICY' '@': + modulepolicy = 'c' + +# PyPy doesn't load C extensions. +# +# The canonical way to do this is to test platform.python_implementation(). +# But we don't import platform and don't bloat for it here. +if '__pypy__' in sys.builtin_module_names: + modulepolicy = 'py' + +# Environment variable can always force settings. +modulepolicy = os.environ.get('HGMODULEPOLICY', modulepolicy) + +# Modules that have both Python and C implementations. See also the +# set of .py files under mercurial/pure/. +_dualmodules = set([ + 'mercurial.base85', + 'mercurial.bdiff', + 'mercurial.diffhelpers', + 'mercurial.mpatch', + 'mercurial.osutil', + 'mercurial.parsers', +]) + +class hgimporter(object): + """Object that conforms to import hook interface defined in PEP-302.""" + def find_module(self, name, path=None): + # We only care about modules that have both C and pure implementations. + if name in _dualmodules: + return self + return None + + def load_module(self, name): + mod = sys.modules.get(name, None) + if mod: + return mod + + mercurial = sys.modules['mercurial'] + + # The zip importer behaves sufficiently differently from the default + # importer to warrant its own code path. + loader = getattr(mercurial, '__loader__', None) + if isinstance(loader, zipimport.zipimporter): + def ziploader(*paths): + """Obtain a zipimporter for a directory under the main zip.""" + path = os.path.join(loader.archive, *paths) + zl = sys.path_importer_cache.get(path) + if not zl: + zl = zipimport.zipimporter(path) + return zl + + try: + if modulepolicy == 'py': + raise ImportError() + + zl = ziploader('mercurial') + mod = zl.load_module(name) + # Unlike imp, ziploader doesn't expose module metadata that + # indicates the type of module. So just assume what we found + # is OK (even though it could be a pure Python module). + except ImportError: + if modulepolicy == 'c': + raise + zl = ziploader('mercurial', 'pure') + mod = zl.load_module(name) + + sys.modules[name] = mod + return mod + + # Unlike the default importer which searches special locations and + # sys.path, we only look in the directory where "mercurial" was + # imported from. + + # imp.find_module doesn't support submodules (modules with "."). + # Instead you have to pass the parent package's __path__ attribute + # as the path argument. + stem = name.split('.')[-1] + + try: + if modulepolicy == 'py': + raise ImportError() + + modinfo = imp.find_module(stem, mercurial.__path__) + + # The Mercurial installer used to copy files from + # mercurial/pure/*.py to mercurial/*.py. Therefore, it's possible + # for some installations to have .py files under mercurial/*. + # Loading Python modules when we expected C versions could result + # in a) poor performance b) loading a version from a previous + # Mercurial version, potentially leading to incompatibility. Either + # scenario is bad. So we verify that modules loaded from + # mercurial/* are C extensions. If the current policy allows the + # loading of .py modules, the module will be re-imported from + # mercurial/pure/* below. + if modinfo[2][2] != imp.C_EXTENSION: + raise ImportError('.py version of %s found where C ' + 'version should exist' % name) + + except ImportError: + if modulepolicy == 'c': + raise + + # Could not load the C extension and pure Python is allowed. So + # try to load them. + from . import pure + modinfo = imp.find_module(stem, pure.__path__) + if not modinfo: + raise ImportError('could not find mercurial module %s' % + name) + + mod = imp.load_module(name, *modinfo) + sys.modules[name] = mod + return mod + +# We automagically register our custom importer as a side-effect of loading. +# This is necessary to ensure that any entry points are able to import +# mercurial.* modules without having to perform this registration themselves. +if not any(isinstance(x, hgimporter) for x in sys.meta_path): + # meta_path is used before any implicit finders and before sys.path. + sys.meta_path.insert(0, hgimporter()) diff -r e240e914d226 -r 8f016345e6b0 mercurial/base85.c --- a/mercurial/base85.c Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/base85.c Fri Dec 18 14:40:11 2015 -0600 @@ -18,8 +18,7 @@ "abcdefghijklmnopqrstuvwxyz!#$%&()*+-;<=>?@^_`{|}~"; static char b85dec[256]; -static void -b85prep(void) +static void b85prep(void) { unsigned i; @@ -28,8 +27,7 @@ b85dec[(int)(b85chars[i])] = i + 1; } -static PyObject * -b85encode(PyObject *self, PyObject *args) +static PyObject *b85encode(PyObject *self, PyObject *args) { const unsigned char *text; PyObject *out; @@ -76,8 +74,7 @@ return out; } -static PyObject * -b85decode(PyObject *self, PyObject *args) +static PyObject *b85decode(PyObject *self, PyObject *args) { PyObject *out; const char *text; diff -r e240e914d226 -r 8f016345e6b0 mercurial/bookmarks.py --- a/mercurial/bookmarks.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/bookmarks.py Fri Dec 18 14:40:11 2015 -0600 @@ -22,6 +22,25 @@ util, ) +def _getbkfile(repo): + """Hook so that extensions that mess with the store can hook bm storage. + + For core, this just handles wether we should see pending + bookmarks or the committed ones. Other extensions (like share) + may need to tweak this behavior further. + """ + bkfile = None + if 'HG_PENDING' in os.environ: + try: + bkfile = repo.vfs('bookmarks.pending') + except IOError as inst: + if inst.errno != errno.ENOENT: + raise + if bkfile is None: + bkfile = repo.vfs('bookmarks') + return bkfile + + class bmstore(dict): """Storage for bookmarks. @@ -41,7 +60,7 @@ dict.__init__(self) self._repo = repo try: - bkfile = self.getbkfile(repo) + bkfile = _getbkfile(repo) for line in bkfile: line = line.strip() if not line: @@ -59,18 +78,15 @@ except IOError as inst: if inst.errno != errno.ENOENT: raise + self._clean = True - def getbkfile(self, repo): - bkfile = None - if 'HG_PENDING' in os.environ: - try: - bkfile = repo.vfs('bookmarks.pending') - except IOError as inst: - if inst.errno != errno.ENOENT: - raise - if bkfile is None: - bkfile = repo.vfs('bookmarks') - return bkfile + def __setitem__(self, *args, **kwargs): + self._clean = False + return dict.__setitem__(self, *args, **kwargs) + + def __delitem__(self, key): + self._clean = False + return dict.__delitem__(self, key) def recordchange(self, tr): """record that bookmarks have been changed in a transaction @@ -89,6 +105,10 @@ We also store a backup of the previous state in undo.bookmarks that can be copied back on rollback. ''' + msg = 'bm.write() is deprecated, use bm.recordchange(transaction)' + self._repo.ui.deprecwarn(msg, '3.7') + if self._clean: + return repo = self._repo if (repo.ui.configbool('devel', 'all-warnings') or repo.ui.configbool('devel', 'check-locks')): @@ -114,9 +134,14 @@ wlock = repo.wlock() try: - file = repo.vfs('bookmarks', 'w', atomictemp=True) - self._write(file) - file.close() + file_ = repo.vfs('bookmarks', 'w', atomictemp=True) + try: + self._write(file_) + except: # re-raises + file_.discard() + raise + finally: + file_.close() finally: wlock.release() @@ -124,6 +149,7 @@ def _write(self, fp): for name, node in self.iteritems(): fp.write("%s %s\n" % (hex(node), encoding.fromlocal(name))) + self._clean = True def readactive(repo): """ @@ -249,7 +275,14 @@ update = True if update: - marks.write() + lock = tr = None + try: + lock = repo.lock() + tr = repo.transaction('bookmark') + marks.recordchange(tr) + tr.close() + finally: + lockmod.release(tr, lock) return update def listbookmarks(repo): diff -r e240e914d226 -r 8f016345e6b0 mercurial/changegroup.py --- a/mercurial/changegroup.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/changegroup.py Fri Dec 18 14:40:11 2015 -0600 @@ -312,6 +312,7 @@ - number of heads stays the same: 1 """ repo = repo.unfiltered() + wasempty = (len(repo.changelog) == 0) def csmap(x): repo.ui.debug("add changeset %s\n" % short(x)) return len(cl) @@ -386,7 +387,7 @@ self.callback = None pr = prog(_('files'), efiles) newrevs, newfiles = _addchangegroupfiles( - repo, self, revmap, trp, pr, needfiles) + repo, self, revmap, trp, pr, needfiles, wasempty) revisions += newrevs files += newfiles @@ -612,7 +613,8 @@ clrevorder = {} mfs = {} # needed manifests fnodes = {} # needed file nodes - changedfiles = set() + # maps manifest node id -> set(changed files) + mfchangedfiles = {} # Callback for the changelog, used to collect changed files and manifest # nodes. @@ -620,9 +622,12 @@ def lookupcl(x): c = cl.read(x) clrevorder[x] = len(clrevorder) - changedfiles.update(c[3]) + n = c[0] # record the first changeset introducing this manifest version - mfs.setdefault(c[0], x) + mfs.setdefault(n, x) + # Record a complete list of potentially-changed files in + # this manifest. + mfchangedfiles.setdefault(n, set()).update(c[3]) return x self._verbosenote(_('uncompressed size of bundle content:\n')) @@ -651,19 +656,36 @@ # Callback for the manifest, used to collect linkrevs for filelog # revisions. # Returns the linkrev node (collected in lookupcl). - def lookupmflinknode(x): - clnode = mfs[x] - if not fastpathlinkrev: + if fastpathlinkrev: + lookupmflinknode = mfs.__getitem__ + else: + def lookupmflinknode(x): + """Callback for looking up the linknode for manifests. + + Returns the linkrev node for the specified manifest. + + SIDE EFFECT: + + fclnodes gets populated with the list of relevant + file nodes. + + Note that this means you can't trust fclnodes until + after manifests have been sent to the client. + """ + clnode = mfs[x] mdata = ml.readfast(x) - for f, n in mdata.iteritems(): - if f in changedfiles: - # record the first changeset introducing this filelog - # version - fclnodes = fnodes.setdefault(f, {}) - fclnode = fclnodes.setdefault(n, clnode) - if clrevorder[clnode] < clrevorder[fclnode]: - fclnodes[n] = clnode - return clnode + for f in mfchangedfiles[x]: + try: + n = mdata[f] + except KeyError: + continue + # record the first changeset introducing this filelog + # version + fclnodes = fnodes.setdefault(f, {}) + fclnode = fclnodes.setdefault(n, clnode) + if clrevorder[clnode] < clrevorder[fclnode]: + fclnodes[n] = clnode + return clnode mfnodes = self.prune(ml, mfs, commonrevs) for x in self._packmanifests(mfnodes, lookupmflinknode): @@ -672,17 +694,20 @@ mfs.clear() clrevs = set(cl.rev(x) for x in clnodes) - def linknodes(filerevlog, fname): - if fastpathlinkrev: + if not fastpathlinkrev: + def linknodes(unused, fname): + return fnodes.get(fname, {}) + else: + cln = cl.node + def linknodes(filerevlog, fname): llr = filerevlog.linkrev - def genfilenodes(): - for r in filerevlog: - linkrev = llr(r) - if linkrev in clrevs: - yield filerevlog.node(r), cl.node(linkrev) - return dict(genfilenodes()) - return fnodes.get(fname, {}) + fln = filerevlog.node + revs = ((r, llr(r)) for r in filerevlog) + return dict((fln(r), cln(lr)) for r, lr in revs if lr in clrevs) + changedfiles = set() + for x in mfchangedfiles.itervalues(): + changedfiles.update(x) for chunk in self.generatefiles(changedfiles, linknodes, commonrevs, source): yield chunk @@ -903,7 +928,7 @@ # to avoid a race we use changegroupsubset() (issue1320) return changegroupsubset(repo, basenodes, repo.heads(), source) -def _addchangegroupfiles(repo, source, revmap, trp, pr, needfiles): +def _addchangegroupfiles(repo, source, revmap, trp, pr, needfiles, wasempty): revisions = 0 files = 0 while True: diff -r e240e914d226 -r 8f016345e6b0 mercurial/cmdutil.py --- a/mercurial/cmdutil.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/cmdutil.py Fri Dec 18 14:40:11 2015 -0600 @@ -70,11 +70,11 @@ testfile = ui.config('experimental', 'crecordtest', None) oldwrite = setupwrapcolorwrite(ui) try: - newchunks = filterchunks(ui, originalhunks, usecurses, testfile, - operation) + newchunks, newopts = filterchunks(ui, originalhunks, usecurses, + testfile, operation) finally: ui.write = oldwrite - return newchunks + return newchunks, newopts def dorecord(ui, repo, commitfunc, cmdsuggest, backupall, filterfn, *pats, **opts): @@ -116,14 +116,16 @@ diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True) diffopts.nodates = True diffopts.git = True + diffopts.showfunc = True originaldiff = patch.diff(repo, changes=status, opts=diffopts) originalchunks = patch.parsepatch(originaldiff) # 1. filter patch, so we have intending-to apply subset of it try: - chunks = filterfn(ui, originalchunks) + chunks, newopts = filterfn(ui, originalchunks) except patch.PatchError as err: raise error.Abort(_('error parsing patch: %s') % err) + opts.update(newopts) # We need to keep a backup of files that have been newly added and # modified during the recording process because there is a previous @@ -181,9 +183,9 @@ # 3a. apply filtered patch to clean repo (clean) if backups: # Equivalent to hg.revert - choices = lambda key: key in backups + m = scmutil.matchfiles(repo, backups.keys()) mergemod.update(repo, repo.dirstate.p1(), - False, True, choices) + False, True, matcher=m) # 3b. (apply) if dopatch: @@ -436,6 +438,19 @@ raise error.Abort(_("invalid format spec '%%%s' in output filename") % inst.args[0]) +class _unclosablefile(object): + def __init__(self, fp): + self._fp = fp + + def close(self): + pass + + def __iter__(self): + return iter(self._fp) + + def __getattr__(self, attr): + return getattr(self._fp, attr) + def makefileobj(repo, pat, node=None, desc=None, total=None, seqno=None, revwidth=None, mode='wb', modemap=None, pathname=None): @@ -447,22 +462,7 @@ fp = repo.ui.fout else: fp = repo.ui.fin - if util.safehasattr(fp, 'fileno'): - return os.fdopen(os.dup(fp.fileno()), mode) - else: - # if this fp can't be duped properly, return - # a dummy object that can be closed - class wrappedfileobj(object): - noop = lambda x: None - def __init__(self, f): - self.f = f - def __getattr__(self, attr): - if attr == 'close': - return self.noop - else: - return getattr(self.f, attr) - - return wrappedfileobj(fp) + return _unclosablefile(fp) if util.safehasattr(pat, 'write') and writable: return pat if util.safehasattr(pat, 'read') and 'r' in mode: @@ -1052,9 +1052,8 @@ fp = makefileobj(repo, template, node, desc=desc, total=total, seqno=seqno, revwidth=revwidth, mode='wb', modemap=filemode) - if fp != template: - shouldclose = True - if fp and fp != sys.stdout and util.safehasattr(fp, 'name'): + shouldclose = True + if fp and not getattr(fp, 'name', '').startswith('<'): repo.ui.note("%s\n" % fp.name) if not fp: @@ -1182,9 +1181,9 @@ def show(self, ctx, copies=None, matchfn=None, **props): if self.buffered: - self.ui.pushbuffer() + self.ui.pushbuffer(labeled=True) self._show(ctx, copies, matchfn, props) - self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True) + self.hunk[ctx.rev()] = self.ui.popbuffer() else: self._show(ctx, copies, matchfn, props) @@ -1297,16 +1296,17 @@ label='log.summary') self.ui.write("\n") - self.showpatch(changenode, matchfn) - - def showpatch(self, node, matchfn): + self.showpatch(ctx, matchfn) + + def showpatch(self, ctx, matchfn): if not matchfn: matchfn = self.matchfn if matchfn: stat = self.diffopts.get('stat') diff = self.diffopts.get('patch') diffopts = patch.diffallopts(self.ui, self.diffopts) - prev = self.repo.changelog.parents(node)[0] + node = ctx.node() + prev = ctx.p1() if stat: diffordiffstat(self.ui, self.repo, diffopts, prev, node, match=matchfn, stat=True) @@ -1488,7 +1488,7 @@ # write changeset metadata, then patch if requested key = self._parts['changeset'] self.ui.write(templater.stringify(self.t(key, **props))) - self.showpatch(ctx.node(), matchfn) + self.showpatch(ctx, matchfn) if self._parts['footer']: if not self.footer: @@ -2153,17 +2153,31 @@ return revs, expr, filematcher -def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None, +def _graphnodeformatter(ui, displayer): + spec = ui.config('ui', 'graphnodetemplate') + if not spec: + return templatekw.showgraphnode # fast path for "{graphnode}" + + templ = formatter.gettemplater(ui, 'graphnode', spec) + cache = {} + if isinstance(displayer, changeset_templater): + cache = displayer.cache # reuse cache of slow templates + props = templatekw.keywords.copy() + props['templ'] = templ + props['cache'] = cache + def formatnode(repo, ctx): + props['ctx'] = ctx + props['repo'] = repo + props['revcache'] = {} + return templater.stringify(templ('graphnode', **props)) + return formatnode + +def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None, filematcher=None): + formatnode = _graphnodeformatter(ui, displayer) seen, state = [], graphmod.asciistate() for rev, type, ctx, parents in dag: - char = 'o' - if ctx.node() in showparents: - char = '@' - elif ctx.obsolete(): - char = 'x' - elif ctx.closesbranch(): - char = '_' + char = formatnode(repo, ctx) copies = None if getrenamed and ctx.rev(): copies = [] @@ -2196,9 +2210,8 @@ endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev) displayer = show_changeset(ui, repo, opts, buffered=True) - showparents = [ctx.node() for ctx in repo[None].parents()] - displaygraph(ui, revdag, displayer, showparents, - graphmod.asciiedges, getrenamed, filematcher) + displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed, + filematcher) def checkunsupportedgraphflags(pats, opts): for op in ["newest_first"]: @@ -2614,6 +2627,11 @@ message = old.description() pureextra = extra.copy() + if 'amend_source' in pureextra: + del pureextra['amend_source'] + pureoldextra = old.extra() + if 'amend_source' in pureoldextra: + del pureoldextra['amend_source'] extra['amend_source'] = old.hex() new = context.memctx(repo, @@ -2631,7 +2649,7 @@ and newdesc == old.description() and user == old.user() and date == old.date() - and pureextra == old.extra()): + and pureextra == pureoldextra): # nothing changed. continuing here would create a new node # anyway because of the amend_source noise. # @@ -3078,7 +3096,7 @@ xlist.append(abs) if dobackup and (backup <= dobackup or wctx[abs].cmp(ctx[abs])): - bakname = "%s.orig" % rel + bakname = origpath(ui, repo, rel) ui.note(_('saving current version of %s as %s\n') % (rel, bakname)) if not opts.get('dry_run'): @@ -3110,6 +3128,26 @@ finally: wlock.release() +def origpath(ui, repo, filepath): + '''customize where .orig files are created + + Fetch user defined path from config file: [ui] origbackuppath = + Fall back to default (filepath) if not specified + ''' + origbackuppath = ui.config('ui', 'origbackuppath', None) + if origbackuppath is None: + return filepath + ".orig" + + filepathfromroot = os.path.relpath(filepath, start=repo.root) + fullorigpath = repo.wjoin(origbackuppath, filepathfromroot) + + origbackupdir = repo.vfs.dirname(fullorigpath) + if not repo.vfs.exists(origbackupdir): + ui.note(_('creating directory: %s\n') % origbackupdir) + util.makedirs(origbackupdir) + + return fullorigpath + ".orig" + def _revertprefetch(repo, ctx, *files): """Let extension changing the storage layer prefetch content""" pass @@ -3171,7 +3209,7 @@ try: - chunks = recordfilter(repo.ui, originalchunks) + chunks, opts = recordfilter(repo.ui, originalchunks) if reversehunks: chunks = patch.reversehunks(chunks) diff -r e240e914d226 -r 8f016345e6b0 mercurial/commands.py --- a/mercurial/commands.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/commands.py Fri Dec 18 14:40:11 2015 -0600 @@ -5,7 +5,7 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -from node import hex, bin, nullid, nullrev, short +from node import hex, bin, nullhex, nullid, nullrev, short from lock import release from i18n import _ import os, re, difflib, time, tempfile, errno, shlex @@ -15,7 +15,6 @@ import archival, changegroup, cmdutil, hbisect import sshserver, hgweb import extensions -from hgweb import server as hgweb_server import merge as mergemod import minirst, revset, fileset import dagparser, context, simplemerge, graphmod, copies @@ -24,6 +23,7 @@ import phases, obsolete, exchange, bundle2, repair, lock as lockmod import ui as uimod import streamclone +import commandserver table = {} @@ -172,6 +172,12 @@ _('recurse into subrepositories')) ] +debugrevlogopts = [ + ('c', 'changelog', False, _('open changelog')), + ('m', 'manifest', False, _('open manifest')), + ('', 'dir', False, _('open directory manifest')), +] + # Commands start here, listed alphabetically @command('^add', @@ -187,21 +193,36 @@ The files will be added to the repository at the next commit. To undo an add before that, see :hg:`forget`. - If no names are given, add all files to the repository. + If no names are given, add all files to the repository (except + files matching ``.hgignore``). .. container:: verbose - An example showing how new (unknown) files are added - automatically by :hg:`add`:: - - $ ls - foo.c - $ hg status - ? foo.c - $ hg add - adding foo.c - $ hg status - A foo.c + Examples: + + - New (unknown) files are added + automatically by :hg:`add`:: + + $ ls + foo.c + $ hg status + ? foo.c + $ hg add + adding foo.c + $ hg status + A foo.c + + - Specific files to be added can be specified:: + + $ ls + bar.c foo.c + $ hg status + ? bar.c + ? foo.c + $ hg add bar.c + $ hg status + A bar.c + ? foo.c Returns 0 if all files are successfully added. """ @@ -220,9 +241,9 @@ Add all new files and remove all missing files from the repository. - New files are ignored if they match any of the patterns in - ``.hgignore``. As with add, these changes take effect at the next - commit. + Unless names are given, new files are ignored if they match any of + the patterns in ``.hgignore``. As with add, these changes take + effect at the next commit. Use the -s/--similarity option to detect renamed files. This option takes a percentage between 0 (disabled) and 100 (files must @@ -234,6 +255,46 @@ not specified, -s/--similarity defaults to 100 and only renames of identical files are detected. + .. container:: verbose + + Examples: + + - A number of files (bar.c and foo.c) are new, + while foobar.c has been removed (without using :hg:`remove`) + from the repository:: + + $ ls + bar.c foo.c + $ hg status + ! foobar.c + ? bar.c + ? foo.c + $ hg addremove + adding bar.c + adding foo.c + removing foobar.c + $ hg status + A bar.c + A foo.c + R foobar.c + + - A file foobar.c was moved to foo.c without using :hg:`rename`. + Afterwards, it was edited slightly:: + + $ ls + foo.c + $ hg status + ! foobar.c + ? foo.c + $ hg addremove --similarity 90 + removing foobar.c + adding foo.c + recording removal of foobar.c as rename to foo.c (94% similar) + $ hg status -C + A foo.c + foobar.c + R foobar.c + Returns 0 if all files are successfully added. """ try: @@ -264,7 +325,7 @@ """show changeset information by line for each file List changes in files, showing the revision id responsible for - each line + each line. This command is useful for discovering when a change was made and by whom. @@ -403,7 +464,7 @@ directory; use -r/--rev to specify a different revision. The archive type is automatically detected based on file - extension (or override using -t/--type). + extension (to override, use -t/--type). .. container:: verbose @@ -486,6 +547,23 @@ .. container:: verbose + Examples: + + - Reverse the effect of the parent of the working directory. + This backout will be committed immediately:: + + hg backout -r . + + - Reverse the effect of previous bad revision 23:: + + hg backout -r 23 + hg commit -m "Backout revision 23" + + - Reverse the effect of previous bad revision 23 and + commit the backout immediately:: + + hg backout -r 23 --commit + By default, the pending changeset will have one parent, maintaining a linear history. With --merge, the pending changeset will instead have two parents: the old parent of the @@ -504,6 +582,15 @@ Returns 0 on success, 1 if nothing to backout or there are unresolved files. ''' + wlock = lock = None + try: + wlock = repo.wlock() + lock = repo.lock() + return _dobackout(ui, repo, node, rev, commit, **opts) + finally: + release(lock, wlock) + +def _dobackout(ui, repo, node=None, rev=None, commit=False, **opts): if rev and node: raise error.Abort(_("please specify just one revision")) @@ -542,7 +629,6 @@ parent = p1 # the backout should appear on the same branch - wlock = repo.wlock() try: branch = repo.dirstate.branch() bheads = repo.branchheads(branch) @@ -552,8 +638,7 @@ try: ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'backout') - stats = mergemod.update(repo, parent, True, True, False, - node, False) + stats = mergemod.update(repo, parent, True, True, node, False) repo.setparents(op1, op2) dsguard.close() hg._showstats(repo, stats) @@ -605,7 +690,9 @@ finally: ui.setconfig('ui', 'forcemerge', '', '') finally: - wlock.release() + # TODO: get rid of this meaningless try/finally enclosing. + # this is kept only to reduce changes in a patch. + pass return 0 @command('bisect', @@ -1100,7 +1187,7 @@ Use the command :hg:`update` to switch to an existing branch. Use :hg:`commit --close-branch` to mark this branch head as closed. - When all heads of the branch are closed, the branch will be + When all heads of a branch are closed, the branch will be considered closed. Returns 0 on success. @@ -1120,7 +1207,7 @@ ui.status(_('reset working directory to branch %s\n') % label) elif label: if not opts.get('force') and label in repo.branchmap(): - if label not in [p.branch() for p in repo.parents()]: + if label not in [p.branch() for p in repo[None].parents()]: raise error.Abort(_('a branch of the same name already' ' exists'), # i18n: "it" refers to an existing branch @@ -1214,19 +1301,20 @@ def bundle(ui, repo, fname, dest=None, **opts): """create a changegroup file - Generate a compressed changegroup file collecting changesets not - known to be in another repository. - - If you omit the destination repository, then hg assumes the - destination will have all the nodes you specify with --base - parameters. To create a bundle containing all changesets, use - -a/--all (or --base null). + Generate a changegroup file collecting changesets to be added + to a repository. + + To create a bundle containing all changesets, use -a/--all + (or --base null). Otherwise, hg assumes the destination will have + all the nodes you specify with --base parameters. Otherwise, hg + will assume the repository has all the nodes in destination, or + default-push/default if no destination is specified. You can change bundle format with the -t/--type option. You can specify a compression, a bundle version or both using a dash (comp-version). The available compression methods are: none, bzip2, and gzip (by default, bundles are compressed using bzip2). The - available format are: v1, v2 (default to most suitable). + available formats are: v1, v2 (default to most suitable). The bundle file can then be transferred using conventional means and applied to another repository with the unbundle or pull @@ -1257,6 +1345,11 @@ hint=_('use "hg debugcreatestreamclonebundle"')) if opts.get('all'): + if dest: + raise error.Abort(_("--all is incompatible with specifying " + "a destination")) + if opts.get('base'): + ui.warn(_("ignoring --base because --all was specified\n")) base = ['null'] else: base = scmutil.revrange(repo, opts.get('base')) @@ -1337,7 +1430,8 @@ @command('^clone', [('U', 'noupdate', None, _('the clone will include an empty working ' 'directory (only a repository)')), - ('u', 'updaterev', '', _('revision, tag or branch to check out'), _('REV')), + ('u', 'updaterev', '', _('revision, tag, or branch to check out'), + _('REV')), ('r', 'rev', [], _('include the specified changeset'), _('REV')), ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')), ('', 'pull', None, _('use pull protocol to copy metadata')), @@ -1505,7 +1599,32 @@ See :hg:`help dates` for a list of formats valid for -d/--date. Returns 0 on success, 1 if nothing changed. + + .. container:: verbose + + Examples: + + - commit all files ending in .py:: + + hg commit --include "set:**.py" + + - commit all non-binary files:: + + hg commit --exclude "set:binary()" + + - amend the current commit and set the date to now:: + + hg commit --amend --date now """ + wlock = lock = None + try: + wlock = repo.wlock() + lock = repo.lock() + return _docommit(ui, repo, *pats, **opts) + finally: + release(lock, wlock) + +def _docommit(ui, repo, *pats, **opts): if opts.get('interactive'): opts.pop('interactive') cmdutil.dorecord(ui, repo, commit, None, False, @@ -1530,8 +1649,8 @@ if not bheads: raise error.Abort(_('can only close branch heads')) elif opts.get('amend'): - if repo.parents()[0].p1().branch() != branch and \ - repo.parents()[0].p2().branch() != branch: + if repo[None].parents()[0].p1().branch() != branch and \ + repo[None].parents()[0].p2().branch() != branch: raise error.Abort(_('can only close branch heads')) if opts.get('amend'): @@ -1547,6 +1666,9 @@ if not allowunstable and old.children(): raise error.Abort(_('cannot amend changeset with children')) + newextra = extra.copy() + newextra['branch'] = branch + extra = newextra # commitfunc is used only for temporary amend commit by cmdutil.amend def commitfunc(ui, repo, message, match, opts): return repo.commit(message, @@ -2111,11 +2233,7 @@ ui.write(line) ui.write("\n") -@command('debugdata', - [('c', 'changelog', False, _('open changelog')), - ('m', 'manifest', False, _('open manifest')), - ('', 'dir', False, _('open directory manifest'))], - _('-c|-m|FILE REV')) +@command('debugdata', debugrevlogopts, _('-c|-m|FILE REV')) def debugdata(ui, repo, file_, rev=None, **opts): """dump the contents of a data file revision""" if opts.get('changelog') or opts.get('manifest'): @@ -2321,11 +2439,8 @@ else: raise error.Abort(_("no ignore patterns found")) -@command('debugindex', - [('c', 'changelog', False, _('open changelog')), - ('m', 'manifest', False, _('open manifest')), - ('', 'dir', False, _('open directory manifest')), - ('f', 'format', 0, _('revlog format'), _('FORMAT'))], +@command('debugindex', debugrevlogopts + + [('f', 'format', 0, _('revlog format'), _('FORMAT'))], _('[-f FORMAT] -c|-m|FILE'), optionalrepo=True) def debugindex(ui, repo, file_=None, **opts): @@ -2380,16 +2495,11 @@ i, r.flags(i), r.start(i), r.length(i), r.rawsize(i), base, r.linkrev(i), pr[0], pr[1], shortfn(node))) -@command('debugindexdot', [], _('FILE'), optionalrepo=True) -def debugindexdot(ui, repo, file_): +@command('debugindexdot', debugrevlogopts, + _('-c|-m|FILE'), optionalrepo=True) +def debugindexdot(ui, repo, file_=None, **opts): """dump an index DAG as a graphviz dot file""" - r = None - if repo: - filelog = repo.file(file_) - if len(filelog): - r = filelog - if not r: - r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_) + r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts) ui.write(("digraph G {\n")) for i in r: node = r.node(i) @@ -2399,6 +2509,117 @@ ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i)) ui.write("}\n") +@command('debugdeltachain', + debugrevlogopts + formatteropts, + _('-c|-m|FILE'), + optionalrepo=True) +def debugdeltachain(ui, repo, file_=None, **opts): + """dump information about delta chains in a revlog + + Output can be templatized. Available template keywords are: + + rev revision number + chainid delta chain identifier (numbered by unique base) + chainlen delta chain length to this revision + prevrev previous revision in delta chain + deltatype role of delta / how it was computed + compsize compressed size of revision + uncompsize uncompressed size of revision + chainsize total size of compressed revisions in chain + chainratio total chain size divided by uncompressed revision size + (new delta chains typically start at ratio 2.00) + lindist linear distance from base revision in delta chain to end + of this revision + extradist total size of revisions not part of this delta chain from + base of delta chain to end of this revision; a measurement + of how much extra data we need to read/seek across to read + the delta chain for this revision + extraratio extradist divided by chainsize; another representation of + how much unrelated data is needed to load this delta chain + """ + r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts) + index = r.index + generaldelta = r.version & revlog.REVLOGGENERALDELTA + + def revinfo(rev): + iterrev = rev + e = index[iterrev] + chain = [] + compsize = e[1] + uncompsize = e[2] + chainsize = 0 + + if generaldelta: + if e[3] == e[5]: + deltatype = 'p1' + elif e[3] == e[6]: + deltatype = 'p2' + elif e[3] == rev - 1: + deltatype = 'prev' + elif e[3] == rev: + deltatype = 'base' + else: + deltatype = 'other' + else: + if e[3] == rev: + deltatype = 'base' + else: + deltatype = 'prev' + + while iterrev != e[3]: + chain.append(iterrev) + chainsize += e[1] + if generaldelta: + iterrev = e[3] + else: + iterrev -= 1 + e = index[iterrev] + else: + chainsize += e[1] + chain.append(iterrev) + + chain.reverse() + return compsize, uncompsize, deltatype, chain, chainsize + + fm = ui.formatter('debugdeltachain', opts) + + fm.plain(' rev chain# chainlen prev delta ' + 'size rawsize chainsize ratio lindist extradist ' + 'extraratio\n') + + chainbases = {} + for rev in r: + comp, uncomp, deltatype, chain, chainsize = revinfo(rev) + chainbase = chain[0] + chainid = chainbases.setdefault(chainbase, len(chainbases) + 1) + basestart = r.start(chainbase) + revstart = r.start(rev) + lineardist = revstart + comp - basestart + extradist = lineardist - chainsize + try: + prevrev = chain[-2] + except IndexError: + prevrev = -1 + + chainratio = float(chainsize) / float(uncomp) + extraratio = float(extradist) / float(chainsize) + + fm.startitem() + fm.write('rev chainid chainlen prevrev deltatype compsize ' + 'uncompsize chainsize chainratio lindist extradist ' + 'extraratio', + '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n', + rev, chainid, len(chain), prevrev, deltatype, comp, + uncomp, chainsize, chainratio, lineardist, extradist, + extraratio, + rev=rev, chainid=chainid, chainlen=len(chain), + prevrev=prevrev, deltatype=deltatype, compsize=comp, + uncompsize=uncomp, chainsize=chainsize, + chainratio=chainratio, lindist=lineardist, + extradist=extradist, extraratio=extraratio) + + fm.end() + @command('debuginstall', [], '', norepo=True) def debuginstall(ui): '''test Mercurial installation @@ -2522,6 +2743,12 @@ Use --verbose to print out information about whether v1 or v2 merge state was chosen.""" + def _hashornull(h): + if h == nullhex: + return 'null' + else: + return h + def printrecords(version): ui.write(('* version %s records\n') % version) if version == 1: @@ -2539,7 +2766,7 @@ driver, mdstate = record.split('\0', 1) ui.write(('merge driver: %s (state "%s")\n') % (driver, mdstate)) - elif rtype in 'FD': + elif rtype in 'FDC': r = record.split('\0') f, state, hash, lfile, afile, anode, ofile = r[0:7] if version == 1: @@ -2547,15 +2774,20 @@ flags = r[7] else: onode, flags = r[7:9] - ui.write(('file: %s (state "%s", hash %s)\n') - % (f, state, hash)) + ui.write(('file: %s (record type "%s", state "%s", hash %s)\n') + % (f, rtype, state, _hashornull(hash))) ui.write((' local path: %s (flags "%s")\n') % (lfile, flags)) - ui.write((' ancestor path: %s (node %s)\n') % (afile, anode)) - ui.write((' other path: %s (node %s)\n') % (ofile, onode)) + ui.write((' ancestor path: %s (node %s)\n') + % (afile, _hashornull(anode))) + ui.write((' other path: %s (node %s)\n') + % (ofile, _hashornull(onode))) else: ui.write(('unrecognized entry: %s\t%s\n') % (rtype, record.replace('\0', '\t'))) + # Avoid mergestate.read() since it may raise an exception for unsupported + # merge state records. We shouldn't be doing this, but this is OK since this + # command is pretty low-level. ms = mergemod.mergestate(repo) # sort so that reasonable information is on top @@ -2881,21 +3113,17 @@ wlock = repo.wlock() try: dirstate = repo.dirstate - + changedfiles = None # See command doc for what minimal does. if opts.get('minimal'): + manifestfiles = set(ctx.manifest().keys()) dirstatefiles = set(dirstate) - ctxfiles = set(ctx.manifest().keys()) - for file in (dirstatefiles | ctxfiles): - indirstate = file in dirstatefiles - inctx = file in ctxfiles - - if indirstate and not inctx and dirstate[file] != 'a': - dirstate.drop(file) - elif inctx and not indirstate: - dirstate.normallookup(file) - else: - dirstate.rebuild(ctx.node(), ctx.manifest()) + manifestonly = manifestfiles - dirstatefiles + dsonly = dirstatefiles - manifestfiles + dsnotadded = set(f for f in dsonly if dirstate[f] != 'a') + changedfiles = manifestonly | dsnotadded + + dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles) finally: wlock.release() @@ -2921,11 +3149,8 @@ else: ui.write(_("%s not renamed\n") % rel) -@command('debugrevlog', - [('c', 'changelog', False, _('open changelog')), - ('m', 'manifest', False, _('open manifest')), - ('', 'dir', False, _('open directory manifest')), - ('d', 'dump', False, _('dump index data'))], +@command('debugrevlog', debugrevlogopts + + [('d', 'dump', False, _('dump index data'))], _('-c|-m|FILE'), optionalrepo=True) def debugrevlog(ui, repo, file_=None, **opts): @@ -3162,8 +3387,12 @@ [('', 'nodates', None, _('do not display the saved mtime')), ('', 'datesort', None, _('sort by saved mtime'))], _('[OPTION]...')) -def debugstate(ui, repo, nodates=None, datesort=None): +def debugstate(ui, repo, **opts): """show the contents of the current dirstate""" + + nodates = opts.get('nodates') + datesort = opts.get('datesort') + timestr = "" if datesort: keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename @@ -3628,7 +3857,14 @@ Returns 0 on successful completion. ''' - + wlock = None + try: + wlock = repo.wlock() + return _dograft(ui, repo, *revs, **opts) + finally: + release(wlock) + +def _dograft(ui, repo, *revs, **opts): revs = list(revs) revs.extend(opts['rev']) @@ -3734,7 +3970,6 @@ if not revs: return -1 - wlock = repo.wlock() try: for pos, ctx in enumerate(repo.set("%ld", revs)): desc = '%d:%s "%s"' % (ctx.rev(), ctx, @@ -3746,10 +3981,10 @@ if opts.get('dry_run'): continue - source = ctx.extra().get('source') - extra = {} + extra = ctx.extra().copy() + del extra['branch'] + source = extra.get('source') if source: - extra['source'] = source extra['intermediate-source'] = ctx.hex() else: extra['source'] = ctx.hex() @@ -3779,9 +4014,17 @@ # write out state for --continue nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]] repo.vfs.write('graftstate', ''.join(nodelines)) + extra = '' + if opts.get('user'): + extra += ' --user %s' % opts['user'] + if opts.get('date'): + extra += ' --date %s' % opts['date'] + if opts.get('log'): + extra += ' --log' + hint=_('use hg resolve and hg graft --continue%s') % extra raise error.Abort( _("unresolved conflicts, can't continue"), - hint=_('use hg resolve and hg graft --continue')) + hint=hint) else: cont = False @@ -3793,7 +4036,9 @@ _('note: graft of %d:%s created no changes to commit\n') % (ctx.rev(), ctx)) finally: - wlock.release() + # TODO: get rid of this meaningless try/finally enclosing. + # this is kept only to reduce changes in a patch. + pass # remove state when we complete successfully if not opts.get('dry_run'): @@ -4129,11 +4374,16 @@ keep.append(sys.platform.lower()) section = None + subtopic = None if name and '.' in name: name, section = name.split('.', 1) section = section.lower() - - text = help.help_(ui, name, **opts) + if '.' in section: + subtopic, section = section.split('.', 1) + else: + subtopic = section + + text = help.help_(ui, name, subtopic=subtopic, **opts) formatted, pruned = minirst.format(text, textwidth, keep=keep, section=section) @@ -4195,6 +4445,9 @@ hg id -r tip http://selenic.com/hg/ + See :hg:`log` for generating more information about specific revisions, + including full hash identifiers. + Returns 0 if successful. """ @@ -4328,14 +4581,23 @@ Import a list of patches and commit them individually (unless --no-commit is specified). - Because import first applies changes to the working directory, - import will abort if there are outstanding changes. + To read a patch from standard input, use "-" as the patch name. If + a URL is specified, the patch will be downloaded from there. + + Import first applies changes to the working directory (unless + --bypass is specified), import will abort if there are outstanding + changes. + + Use --bypass to apply and commit patches directly to the + repository, without affecting the working directory. Without + --exact, patches will be applied on top of the working directory + parent revision. You can import a patch straight from a mail message. Even patches as attachments work (to use the body part, it must have type text/plain or text/x-patch). From and Subject headers of email message are used as default committer and commit message. All - text/plain body parts before first diff are added to commit + text/plain body parts before first diff are added to the commit message. If the imported patch was generated by :hg:`export`, user and @@ -4349,14 +4611,6 @@ the patch. This may happen due to character set problems or other deficiencies in the text patch format. - Use --bypass to apply and commit patches directly to the - repository, not touching the working directory. Without --exact, - patches will be applied on top of the working directory parent - revision. - - With -s/--similarity, hg will attempt to discover renames and - copies in the patch in the same way as :hg:`addremove`. - Use --partial to ensure a changeset will be created from the patch even if some hunks fail to apply. Hunks that fail to apply will be written to a .rej file. Conflicts can then be resolved @@ -4367,14 +4621,15 @@ cleanly, :hg:`import --partial` will create an empty changeset, importing only the patch metadata. + With -s/--similarity, hg will attempt to discover renames and + copies in the patch in the same way as :hg:`addremove`. + It is possible to use external patch programs to perform the patch by setting the ``ui.patch`` configuration option. For the default internal tool, the fuzz can also be configured via ``patch.fuzz``. See :hg:`help config` for more information about configuration files and how to use these options. - To read a patch from standard input, use "-" as the patch name. If - a URL is specified, the patch will be downloaded from it. See :hg:`help dates` for a list of formats valid for -d/--date. .. container:: verbose @@ -4419,6 +4674,7 @@ if date: opts['date'] = util.parsedate(date) + exact = opts.get('exact') update = not opts.get('bypass') if not update and opts.get('no_commit'): raise error.Abort(_('cannot use --no-commit with --bypass')) @@ -4430,15 +4686,11 @@ raise error.Abort(_('similarity must be between 0 and 100')) if sim and not update: raise error.Abort(_('cannot use --similarity with --bypass')) - if opts.get('exact') and opts.get('edit'): - raise error.Abort(_('cannot use --exact with --edit')) - if opts.get('exact') and opts.get('prefix'): - raise error.Abort(_('cannot use --exact with --prefix')) - - if update: - cmdutil.checkunfinished(repo) - if (opts.get('exact') or not opts.get('force')) and update: - cmdutil.bailifchanged(repo) + if exact: + if opts.get('edit'): + raise error.Abort(_('cannot use --exact with --edit')) + if opts.get('prefix'): + raise error.Abort(_('cannot use --exact with --prefix')) base = opts["base"] wlock = dsguard = lock = tr = None @@ -4449,12 +4701,18 @@ try: try: wlock = repo.wlock() + + if update: + cmdutil.checkunfinished(repo) + if (exact or not opts.get('force')): + cmdutil.bailifchanged(repo) + if not opts.get('no_commit'): lock = repo.lock() tr = repo.transaction('import') else: dsguard = cmdutil.dirstateguard(repo, 'import') - parents = repo.parents() + parents = repo[None].parents() for patchurl in patches: if patchurl == '-': ui.status(_('applying patch from stdin\n')) @@ -4473,8 +4731,8 @@ if msg: haspatch = True ui.note(msg + '\n') - if update or opts.get('exact'): - parents = repo.parents() + if update or exact: + parents = repo[None].parents() else: parents = [repo[node]] if rej: @@ -4573,8 +4831,7 @@ cmdutil.checkunsupportedgraphflags([], opts) def display(other, chlist, displayer): revdag = cmdutil.graphrevs(other, chlist, opts) - showparents = [ctx.node() for ctx in repo[None].parents()] - cmdutil.displaygraph(ui, revdag, displayer, showparents, + cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges) hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True) @@ -4755,6 +5012,10 @@ hg log -k bug --template "{rev}\\n" + - the full hash identifier of the working directory parent:: + + hg log -r . --template "{node}\\n" + - list available log templates:: hg log -T list @@ -5007,9 +5268,7 @@ revdag = cmdutil.graphrevs(repo, o, opts) displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True) - showparents = [ctx.node() for ctx in repo[None].parents()] - cmdutil.displaygraph(ui, revdag, displayer, showparents, - graphmod.asciiedges) + cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges) cmdutil.outgoinghooks(ui, repo, other, opts, o) return 0 @@ -5043,6 +5302,13 @@ last changed (before the working directory revision or the argument to --rev if given) is printed. + This command is equivalent to:: + + hg log -r "parents()" or + hg log -r "parents(REV)" or + hg log -r "max(file(FILE))" or + hg log -r "max(::REV and file(FILE))" + See :hg:`summary` and :hg:`help revsets` for related information. Returns 0 on success. @@ -5122,6 +5388,8 @@ else: ui.write("%s = %s\n" % (name, util.hidepassword(path.rawloc))) + for subopt, value in sorted(path.suboptions.items()): + ui.write('%s:%s = %s\n' % (name, subopt, value)) @command('phase', [('p', 'public', False, _('set changeset phase to public')), @@ -5227,10 +5495,9 @@ checkout, movemarkfrom, brev = updata ret = hg.update(repo, checkout) except error.UpdateAbort as inst: - ui.warn(_("not updating: %s\n") % str(inst)) - if inst.hint: - ui.warn(_("(%s)\n") % inst.hint) - return 0 + msg = _("not updating: %s") % str(inst) + hint = inst.hint + raise error.UpdateAbort(msg, hint=hint) if not ret and not checkout: if bookmarks.update(repo, [movemarkfrom], repo['.'].node()): ui.status(_("updating bookmark %s\n") % repo._activebookmark) @@ -5398,7 +5665,8 @@ if not path: raise error.Abort(_('default repository not configured!'), hint=_('see the "path" section in "hg help config"')) - dest, branches = path.pushloc, (path.branch, opts.get('branch') or []) + dest = path.pushloc or path.loc + branches = (path.branch, opts.get('branch') or []) ui.status(_('pushing to %s\n') % util.hidepassword(dest)) revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev')) other = hg.peer(repo, opts, dest) @@ -5590,7 +5858,7 @@ if show: fm = ui.formatter('resolve', opts) - ms = mergemod.mergestate(repo) + ms = mergemod.mergestate.read(repo) m = scmutil.match(repo[None], pats, opts) for f in ms: if not m(f): @@ -5605,7 +5873,7 @@ wlock = repo.wlock() try: - ms = mergemod.mergestate(repo) + ms = mergemod.mergestate.read(repo) if not (ms.active() or repo.dirstate.p2() != nullid): raise error.Abort( @@ -5655,7 +5923,11 @@ else: # backup pre-resolve (merge uses .orig for its own purposes) a = repo.wjoin(f) - util.copyfile(a, a + ".resolve") + try: + util.copyfile(a, a + ".resolve") + except (IOError, OSError) as inst: + if inst.errno != errno.ENOENT: + raise try: # preresolve file @@ -5673,7 +5945,12 @@ # replace filemerge's .orig file with our resolve file, but only # for merges that are complete if complete: - util.rename(a + ".resolve", a + ".orig") + try: + util.rename(a + ".resolve", + cmdutil.origpath(ui, repo, a)) + except OSError as inst: + if inst.errno != errno.ENOENT: + raise for f in tocomplete: try: @@ -5689,9 +5966,14 @@ # replace filemerge's .orig file with our resolve file a = repo.wjoin(f) - util.rename(a + ".resolve", a + ".orig") + try: + util.rename(a + ".resolve", cmdutil.origpath(ui, repo, a)) + except OSError as inst: + if inst.errno != errno.ENOENT: + raise ms.commit() + ms.recordactions() if not didwork and pats: ui.warn(_("arguments do not match paths that need resolving\n")) @@ -5919,81 +6201,10 @@ s.serve_forever() if opts["cmdserver"]: - import commandserver service = commandserver.createservice(ui, repo, opts) - return cmdutil.service(opts, initfn=service.init, runfn=service.run) - - # this way we can check if something was given in the command-line - if opts.get('port'): - opts['port'] = util.getport(opts.get('port')) - - if repo: - baseui = repo.baseui else: - baseui = ui - optlist = ("name templates style address port prefix ipv6" - " accesslog errorlog certificate encoding") - for o in optlist.split(): - val = opts.get(o, '') - if val in (None, ''): # should check against default options instead - continue - baseui.setconfig("web", o, val, 'serve') - if repo and repo.ui != baseui: - repo.ui.setconfig("web", o, val, 'serve') - - o = opts.get('web_conf') or opts.get('webdir_conf') - if not o: - if not repo: - raise error.RepoError(_("there is no Mercurial repository" - " here (.hg not found)")) - o = repo - - app = hgweb.hgweb(o, baseui=baseui) - service = httpservice(ui, app, opts) - cmdutil.service(opts, initfn=service.init, runfn=service.run) - -class httpservice(object): - def __init__(self, ui, app, opts): - self.ui = ui - self.app = app - self.opts = opts - - def init(self): - util.setsignalhandler() - self.httpd = hgweb_server.create_server(self.ui, self.app) - - if self.opts['port'] and not self.ui.verbose: - return - - if self.httpd.prefix: - prefix = self.httpd.prefix.strip('/') + '/' - else: - prefix = '' - - port = ':%d' % self.httpd.port - if port == ':80': - port = '' - - bindaddr = self.httpd.addr - if bindaddr == '0.0.0.0': - bindaddr = '*' - elif ':' in bindaddr: # IPv6 - bindaddr = '[%s]' % bindaddr - - fqaddr = self.httpd.fqaddr - if ':' in fqaddr: - fqaddr = '[%s]' % fqaddr - if self.opts['port']: - write = self.ui.status - else: - write = self.ui.write - write(_('listening at http://%s%s/%s (bound to %s:%d)\n') % - (fqaddr, port, prefix, bindaddr, self.httpd.port)) - self.ui.flush() # avoid buffering of status message - - def run(self): - self.httpd.serve_forever() - + service = hgweb.createservice(ui, repo, opts) + return cmdutil.service(opts, initfn=service.init, runfn=service.run) @command('^status|st', [('A', 'all', None, _('show status of all files')), @@ -6205,8 +6416,15 @@ if d in status.added: status.added.remove(d) - ms = mergemod.mergestate(repo) - unresolved = [f for f in ms if ms[f] == 'u'] + try: + ms = mergemod.mergestate.read(repo) + except error.UnsupportedMergeRecords as e: + s = ' '.join(e.recordtypes) + ui.warn( + _('warning: merge state has unsupported record types: %s\n') % s) + unresolved = 0 + else: + unresolved = [f for f in ms if ms[f] == 'u'] subs = [s for s in ctx.substate if ctx.sub(s).dirty()] @@ -6227,6 +6445,8 @@ t = ', '.join(t) cleanworkdir = False + if repo.vfs.exists('graftstate'): + t += _(' (graft in progress)') if repo.vfs.exists('updatestate'): t += _(' (interrupted update)') elif len(parents) > 1: @@ -6284,6 +6504,18 @@ if draft or secret: ui.status(_('phases: %s\n') % ', '.join(t)) + if obsolete.isenabled(repo, obsolete.createmarkersopt): + for trouble in ("unstable", "divergent", "bumped"): + numtrouble = len(repo.revs(trouble + "()")) + # We write all the possibilities to ease translation + troublemsg = { + "unstable": _("unstable: %d changeset"), + "divergent": _("divergent: %d changeset"), + "bumped": _("bumped: %d changeset"), + } + if numtrouble > 0: + ui.status(troublemsg[trouble] % numtrouble + "\n") + cmdutil.summaryhooks(ui, repo) if opts.get('remote'): @@ -6504,6 +6736,7 @@ This lists both regular and local tags. When the -v/--verbose switch is used, a third column "local" is printed for local tags. + When the -q/--quiet switch is used, only the tag name is printed. Returns 0 on success. """ @@ -6582,10 +6815,10 @@ tr.close() except error.BundleUnknownFeatureError as exc: raise error.Abort(_('%s: unknown bundle feature, %s') - % (fname, exc), - hint=_("see https://mercurial-scm.org/" - "wiki/BundleFeature for more " - "information")) + % (fname, exc), + hint=_("see https://mercurial-scm.org/" + "wiki/BundleFeature for more " + "information")) finally: if tr: tr.release() diff -r e240e914d226 -r 8f016345e6b0 mercurial/commandserver.py --- a/mercurial/commandserver.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/commandserver.py Fri Dec 18 14:40:11 2015 -0600 @@ -5,10 +5,21 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -from i18n import _ +from __future__ import absolute_import + +import SocketServer +import errno +import os import struct -import sys, os, errno, traceback, SocketServer -import dispatch, encoding, util, error +import sys +import traceback + +from .i18n import _ +from . import ( + encoding, + error, + util, +) logfile = None @@ -32,6 +43,10 @@ self.out = out self.channel = channel + @property + def name(self): + return '<%c-channel>' % self.channel + def write(self, data): if not data: return @@ -64,6 +79,10 @@ self.out = out self.channel = channel + @property + def name(self): + return '<%c-channel>' % self.channel + def read(self, size=-1): if size < 0: # if we need to consume all the clients input, ask for 4k chunks @@ -174,6 +193,7 @@ def runcommand(self): """ reads a list of \0 terminated arguments, executes and writes the return code to the result channel """ + from . import dispatch # avoid cycle length = struct.unpack('>I', self._read(4))[0] if not length: diff -r e240e914d226 -r 8f016345e6b0 mercurial/context.py --- a/mercurial/context.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/context.py Fri Dec 18 14:40:11 2015 -0600 @@ -221,9 +221,10 @@ return self._parents[0] def p2(self): - if len(self._parents) == 2: - return self._parents[1] - return changectx(self._repo, -1) + parents = self._parents + if len(parents) == 2: + return parents[1] + return changectx(self._repo, nullrev) def _fileinfo(self, path): if '_manifest' in self.__dict__: @@ -270,7 +271,7 @@ r = self._repo return matchmod.match(r.root, r.getcwd(), pats, include, exclude, default, - auditor=r.auditor, ctx=self, + auditor=r.nofsauditor, ctx=self, listsubrepos=listsubrepos, badfn=badfn) def diff(self, ctx2=None, match=None, **opts): @@ -335,17 +336,19 @@ if listsubrepos: for subpath, sub in scmutil.itersubrepos(ctx1, ctx2): - rev2 = ctx2.subrev(subpath) try: - submatch = matchmod.narrowmatcher(subpath, match) - s = sub.status(rev2, match=submatch, ignored=listignored, - clean=listclean, unknown=listunknown, - listsubrepos=True) - for rfiles, sfiles in zip(r, s): - rfiles.extend("%s/%s" % (subpath, f) for f in sfiles) - except error.LookupError: - self._repo.ui.status(_("skipping missing " - "subrepository: %s\n") % subpath) + rev2 = ctx2.subrev(subpath) + except KeyError: + # A subrepo that existed in node1 was deleted between + # node1 and node2 (inclusive). Thus, ctx2's substate + # won't contain that subpath. The best we can do ignore it. + rev2 = None + submatch = matchmod.narrowmatcher(subpath, match) + s = sub.status(rev2, match=submatch, ignored=listignored, + clean=listclean, unknown=listunknown, + listsubrepos=True) + for rfiles, sfiles in zip(r, s): + rfiles.extend("%s/%s" % (subpath, f) for f in sfiles) for l in r: l.sort() @@ -511,10 +514,11 @@ @propertycache def _parents(self): - p = self._repo.changelog.parentrevs(self._rev) - if p[1] == nullrev: - p = p[:-1] - return [changectx(self._repo, x) for x in p] + repo = self._repo + p1, p2 = repo.changelog.parentrevs(self._rev) + if p2 == nullrev: + return [changectx(repo, p1)] + return [changectx(repo, p1), changectx(repo, p2)] def changeset(self): return self._changeset @@ -747,11 +751,22 @@ def islink(self): return 'l' in self.flags() + def isabsent(self): + """whether this filectx represents a file not in self._changectx + + This is mainly for merge code to detect change/delete conflicts. This is + expected to be True for all subclasses of basectx.""" + return False + + _customcmp = False def cmp(self, fctx): """compare with other file context returns True if different than fctx. """ + if fctx._customcmp: + return fctx.cmp(self) + if (fctx._filerev is None and (self._repo._encodefilterpats # if file data starts with '\1\n', empty metadata block is @@ -1140,17 +1155,17 @@ # filesystem doesn't support them copiesget = self._repo.dirstate.copies().get - - if len(self._parents) < 2: + parents = self.parents() + if len(parents) < 2: # when we have one parent, it's easy: copy from parent - man = self._parents[0].manifest() + man = parents[0].manifest() def func(f): f = copiesget(f, f) return man.flags(f) else: # merges are tricky: we try to reconstruct the unstored # result from the merge (issue1802) - p1, p2 = self._parents + p1, p2 = parents pa = p1.ancestor(p2) m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest() @@ -1180,10 +1195,11 @@ an extra 'a'. This is used by manifests merge to see that files are different and by update logic to avoid deleting newly added files. """ + parents = self.parents() - man1 = self._parents[0].manifest() + man1 = parents[0].manifest() man = man1.copy() - if len(self._parents) > 1: + if len(parents) > 1: man2 = self.p2().manifest() def getman(f): if f in man1: @@ -1694,7 +1710,7 @@ def date(self): t, tz = self._changectx.date() try: - return (util.statmtimesec(self._repo.wvfs.lstat(self._path)), tz) + return (self._repo.wvfs.lstat(self._path).st_mtime, tz) except OSError as err: if err.errno != errno.ENOENT: raise diff -r e240e914d226 -r 8f016345e6b0 mercurial/crecord.py --- a/mercurial/crecord.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/crecord.py Fri Dec 18 14:40:11 2015 -0600 @@ -24,6 +24,7 @@ encoding, error, patch as patchmod, + util, ) # This is required for ncurses to display non-ASCII characters in default user @@ -450,11 +451,11 @@ # if there are no changed files if len(headers) == 0: - return [] + return [], {} uiheaders = [uiheader(h) for h in headers] # let user choose headers/hunks/lines, and mark their applied flags # accordingly - chunkselector(ui, uiheaders) + ret = chunkselector(ui, uiheaders) appliedhunklist = [] for hdr in uiheaders: if (hdr.applied and @@ -472,7 +473,7 @@ else: fixoffset += hnk.removed - hnk.added - return appliedhunklist + return (appliedhunklist, ret) def gethw(): """ @@ -501,6 +502,7 @@ raise error.Abort(chunkselector.initerr) # ncurses does not restore signal handler for SIGTSTP signal.signal(signal.SIGTSTP, f) + return chunkselector.opts def testdecorator(testfn, f): def u(*args, **kwargs): @@ -521,6 +523,7 @@ while True: if chunkselector.handlekeypressed(testcommands.pop(0), test=True): break + return chunkselector.opts class curseschunkselector(object): def __init__(self, headerlist, ui): @@ -528,6 +531,7 @@ self.headerlist = patch(headerlist) self.ui = ui + self.opts = {} self.errorstr = None # list of all chunks @@ -1007,7 +1011,7 @@ pairname="legend") printstring(self.statuswin, " (f)old/unfold; (c)onfirm applied; (q)uit; (?) help " - "| [X]=hunk applied **=folded", + "| [X]=hunk applied **=folded, toggle [a]mend mode", pairname="legend") except curses.error: pass @@ -1363,7 +1367,7 @@ F : fold / unfold parent item and all of its ancestors m : edit / resume editing the commit message e : edit the currently selected hunk - a : toggle amend mode (hg rev >= 2.2) + a : toggle amend mode (hg rev >= 2.2), only with commit -i c : confirm selected changes r : review/edit and confirm selected changes q : quit without confirming (no changes will be made) @@ -1430,6 +1434,35 @@ else: return False + def toggleamend(self, opts, test): + """Toggle the amend flag. + + When the amend flag is set, a commit will modify the most recently + committed changeset, instead of creating a new changeset. Otherwise, a + new changeset will be created (the normal commit behavior). + + """ + try: + ver = float(util.version()[:3]) + except ValueError: + ver = 1 + if ver < 2.19: + msg = ("The amend option is unavailable with hg versions < 2.2\n\n" + "Press any key to continue.") + elif opts.get('amend') is None: + opts['amend'] = True + msg = ("Amend option is turned on -- commiting the currently " + "selected changes will not create a new changeset, but " + "instead update the most recently committed changeset.\n\n" + "Press any key to continue.") + elif opts.get('amend') is True: + opts['amend'] = None + msg = ("Amend option is turned off -- commiting the currently " + "selected changes will create a new changeset.\n\n" + "Press any key to continue.") + if not test: + self.confirmationwindow(msg) + def recenterdisplayedarea(self): """ once we scrolled with pg up pg down we can be pointing outside of the @@ -1567,6 +1600,8 @@ self.leftarrowshiftevent() elif keypressed in ["q"]: raise error.Abort(_('user quit')) + elif keypressed in ['a']: + self.toggleamend(self.opts, test) elif keypressed in ["c"]: if self.confirmcommit(): return True diff -r e240e914d226 -r 8f016345e6b0 mercurial/demandimport.py --- a/mercurial/demandimport.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/demandimport.py Fri Dec 18 14:40:11 2015 -0600 @@ -245,6 +245,10 @@ def enable(): "enable global demand-loading of modules" + # PyPy doesn't work with demand import. + if '__pypy__' in sys.builtin_module_names: + return + if os.environ.get('HGDEMANDIMPORT') != 'disable': builtins.__import__ = _demandimport diff -r e240e914d226 -r 8f016345e6b0 mercurial/destutil.py --- a/mercurial/destutil.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/destutil.py Fri Dec 18 14:40:11 2015 -0600 @@ -5,6 +5,8 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. +from __future__ import absolute_import + from .i18n import _ from . import ( bookmarks, @@ -198,3 +200,18 @@ else: node = _destmergebranch(repo) return repo[node].rev() + +histeditdefaultrevset = 'reverse(only(.) and not public() and not ::merge())' + +def desthistedit(ui, repo): + """Default base revision to edit for `hg histedit`.""" + default = ui.config('histedit', 'defaultrev', histeditdefaultrevset) + if default: + revs = repo.revs(default) + if revs: + # The revset supplied by the user may not be in ascending order nor + # take the first revision. So do this manually. + revs.sort() + return revs.first() + + return None diff -r e240e914d226 -r 8f016345e6b0 mercurial/dirstate.py --- a/mercurial/dirstate.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/dirstate.py Fri Dec 18 14:40:11 2015 -0600 @@ -31,7 +31,7 @@ '''Get "now" timestamp on filesystem''' tmpfd, tmpname = vfs.mkstemp() try: - return util.statmtimesec(os.fstat(tmpfd)) + return os.fstat(tmpfd).st_mtime finally: os.close(tmpfd) vfs.unlink(tmpname) @@ -471,7 +471,7 @@ def normal(self, f): '''Mark a file normal and clean.''' s = os.lstat(self._join(f)) - mtime = util.statmtimesec(s) + mtime = s.st_mtime self._addpath(f, 'n', s.st_mode, s.st_size & _rangemask, mtime & _rangemask) if f in self._copymap: @@ -639,17 +639,22 @@ def rebuild(self, parent, allfiles, changedfiles=None): if changedfiles is None: + # Rebuild entire dirstate changedfiles = allfiles - oldmap = self._map - self.clear() - for f in allfiles: - if f not in changedfiles: - self._map[f] = oldmap[f] + lastnormaltime = self._lastnormaltime + self.clear() + self._lastnormaltime = lastnormaltime + + for f in changedfiles: + mode = 0o666 + if f in allfiles and 'x' in allfiles.flags(f): + mode = 0o777 + + if f in allfiles: + self._map[f] = dirstatetuple('n', mode, -1, 0) else: - if 'x' in allfiles.flags(f): - self._map[f] = dirstatetuple('n', 0o777, -1, 0) - else: - self._map[f] = dirstatetuple('n', 0o666, -1, 0) + self._map.pop(f, None) + self._pl = (parent, nullid) self._dirty = True @@ -657,13 +662,6 @@ if not self._dirty: return - # enough 'delaywrite' prevents 'pack_dirstate' from dropping - # timestamp of each entries in dirstate, because of 'now > mtime' - delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0) - if delaywrite > 0: - import time # to avoid useless import - time.sleep(delaywrite) - filename = self._filename if tr is False: # not explicitly specified if (self._ui.configbool('devel', 'all-warnings') @@ -704,7 +702,24 @@ def _writedirstate(self, st): # use the modification time of the newly created temporary file as the # filesystem's notion of 'now' - now = util.statmtimesec(util.fstat(st)) & _rangemask + now = util.fstat(st).st_mtime & _rangemask + + # enough 'delaywrite' prevents 'pack_dirstate' from dropping + # timestamp of each entries in dirstate, because of 'now > mtime' + delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0) + if delaywrite > 0: + # do we have any files to delay for? + for f, e in self._map.iteritems(): + if e[0] == 'n' and e[3] == now: + import time # to avoid useless import + # rather than sleep n seconds, sleep until the next + # multiple of n seconds + clock = time.time() + start = int(clock) - (int(clock) % delaywrite) + end = start + delaywrite + time.sleep(end - clock) + break + st.write(parsers.pack_dirstate(self._map, self._copymap, self._pl, now)) st.close() self._lastnormaltime = 0 @@ -1008,14 +1023,8 @@ # We may not have walked the full directory tree above, # so stat and check everything we missed. nf = iter(visit).next - pos = 0 - while pos < len(visit): - # visit in mid-sized batches so that we don't - # block signals indefinitely - xr = xrange(pos, min(len(visit), pos + 1000)) - for st in util.statfiles([join(visit[n]) for n in xr]): - results[nf()] = st - pos += 1000 + for st in util.statfiles([join(i) for i in visit]): + results[nf()] = st return results def status(self, match, subrepos, ignored, clean, unknown): @@ -1084,16 +1093,15 @@ if not st and state in "nma": dadd(fn) elif state == 'n': - mtime = util.statmtimesec(st) if (size >= 0 and ((size != st.st_size and size != st.st_size & _rangemask) or ((mode ^ st.st_mode) & 0o100 and checkexec)) or size == -2 # other parent or fn in copymap): madd(fn) - elif time != mtime and time != mtime & _rangemask: + elif time != st.st_mtime and time != st.st_mtime & _rangemask: ladd(fn) - elif mtime == lastnormaltime: + elif st.st_mtime == lastnormaltime: # fn may have just been marked as normal and it may have # changed in the same second without changing its size. # This can happen if we quickly do multiple commits. diff -r e240e914d226 -r 8f016345e6b0 mercurial/discovery.py --- a/mercurial/discovery.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/discovery.py Fri Dec 18 14:40:11 2015 -0600 @@ -238,12 +238,42 @@ unsynced = set() return {None: (oldheads, newheads, unsynced)} -def checkheads(repo, remote, outgoing, remoteheads, newbranch=False, inc=False, - newbookmarks=[]): +def _nowarnheads(pushop): + # Compute newly pushed bookmarks. We don't warn about bookmarked heads. + + # internal config: bookmarks.pushing + newbookmarks = pushop.ui.configlist('bookmarks', 'pushing') + + repo = pushop.repo.unfiltered() + remote = pushop.remote + localbookmarks = repo._bookmarks + remotebookmarks = remote.listkeys('bookmarks') + bookmarkedheads = set() + for bm in localbookmarks: + rnode = remotebookmarks.get(bm) + if rnode and rnode in repo: + lctx, rctx = repo[bm], repo[rnode] + if bookmarks.validdest(repo, rctx, lctx): + bookmarkedheads.add(lctx.node()) + else: + if bm in newbookmarks and bm not in remotebookmarks: + bookmarkedheads.add(repo[bm].node()) + + return bookmarkedheads + +def checkheads(pushop): """Check that a push won't add any outgoing head raise Abort error and display ui message as needed. """ + + repo = pushop.repo.unfiltered() + remote = pushop.remote + outgoing = pushop.outgoing + remoteheads = pushop.remoteheads + newbranch = pushop.newbranch + inc = bool(pushop.incoming) + # Check for each named branch if we're creating new remote heads. # To be a remote head after push, node must be either: # - unknown locally @@ -268,19 +298,8 @@ hint=_("use 'hg push --new-branch' to create" " new remote branches")) - # 2. Compute newly pushed bookmarks. We don't warn about bookmarked heads. - localbookmarks = repo._bookmarks - remotebookmarks = remote.listkeys('bookmarks') - bookmarkedheads = set() - for bm in localbookmarks: - rnode = remotebookmarks.get(bm) - if rnode and rnode in repo: - lctx, rctx = repo[bm], repo[rnode] - if bookmarks.validdest(repo, rctx, lctx): - bookmarkedheads.add(lctx.node()) - else: - if bm in newbookmarks and bm not in remotebookmarks: - bookmarkedheads.add(repo[bm].node()) + # 2. Find heads that we need not warn about + nowarnheads = _nowarnheads(pushop) # 3. Check for new heads. # If there are more heads after the push than before, a suitable @@ -366,7 +385,7 @@ " pushing new heads") elif len(newhs) > len(oldhs): # remove bookmarked or existing remote heads from the new heads list - dhs = sorted(newhs - bookmarkedheads - oldhs) + dhs = sorted(newhs - nowarnheads - oldhs) if dhs: if errormsg is None: if branch not in ('default', None): diff -r e240e914d226 -r 8f016345e6b0 mercurial/dispatch.py --- a/mercurial/dispatch.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/dispatch.py Fri Dec 18 14:40:11 2015 -0600 @@ -320,7 +320,6 @@ except socket.error as inst: ui.warn(_("abort: %s\n") % inst.args[-1]) except: # re-raises - myver = util.version() # For compatibility checking, we discard the portion of the hg # version after the + on the assumption that if a "normal # user" is running a build with a + in it the packager @@ -328,8 +327,7 @@ # 'make local' copy of hg (where the version number can be out # of date) will be clueful enough to notice the implausible # version number and try updating. - compare = myver.split('+')[0] - ct = tuplever(compare) + ct = util.versiontuple(n=2) worst = None, ct, '' if ui.config('ui', 'supportcontact', None) is None: for name, mod in extensions.extensions(): @@ -344,7 +342,7 @@ if testedwith == 'internal': continue - tested = [tuplever(t) for t in testedwith.split()] + tested = [util.versiontuple(t, 2) for t in testedwith.split()] if ct in tested: continue @@ -369,7 +367,8 @@ warning = (_("** unknown exception encountered, " "please report by visiting\n** ") + bugtracker + '\n') warning += ((_("** Python %s\n") % sys.version.replace('\n', '')) + - (_("** Mercurial Distributed SCM (version %s)\n") % myver) + + (_("** Mercurial Distributed SCM (version %s)\n") % + util.version()) + (_("** Extensions loaded: %s\n") % ", ".join([x[0] for x in extensions.extensions()]))) ui.log("commandexception", "%s\n%s\n", warning, traceback.format_exc()) @@ -378,15 +377,6 @@ return -1 -def tuplever(v): - try: - # Assertion: tuplever is only used for extension compatibility - # checking. Otherwise, the discarding of extra version fields is - # incorrect. - return tuple([int(i) for i in v.split('.')[0:2]]) - except ValueError: - return tuple() - def aliasargs(fn, givenargs): args = getattr(fn, 'args', []) if args: @@ -864,7 +854,7 @@ if options['version']: return commands.version_(ui) if options['help']: - return commands.help_(ui, cmd, command=True) + return commands.help_(ui, cmd, command=cmd is not None) elif not cmd: return commands.help_(ui, 'shortlist') diff -r e240e914d226 -r 8f016345e6b0 mercurial/encoding.py --- a/mercurial/encoding.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/encoding.py Fri Dec 18 14:40:11 2015 -0600 @@ -5,8 +5,15 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -import error -import unicodedata, locale, os +from __future__ import absolute_import + +import locale +import os +import unicodedata + +from . import ( + error, +) # These unicode characters are ignored by HFS+ (Apple Technote 1150, # "Unicode Subtleties"), so we need to ignore them in some places for @@ -194,7 +201,7 @@ 'ellipsis' is always placed at trimmed side. >>> ellipsis = '+++' - >>> from mercurial import encoding + >>> from . import encoding >>> encoding.encoding = 'utf-8' >>> t= '1234567890' >>> print trim(t, 12, ellipsis=ellipsis) @@ -290,7 +297,7 @@ def asciilower(s): # delay importing avoids cyclic dependency around "parsers" in # pure Python build (util => i18n => encoding => parsers => util) - import parsers + from . import parsers impl = getattr(parsers, 'asciilower', _asciilower) global asciilower asciilower = impl @@ -306,7 +313,7 @@ def asciiupper(s): # delay importing avoids cyclic dependency around "parsers" in # pure Python build (util => i18n => encoding => parsers => util) - import parsers + from . import parsers impl = getattr(parsers, 'asciiupper', _asciiupper) global asciiupper asciiupper = impl @@ -400,7 +407,7 @@ if not _jsonmap: for x in xrange(32): - _jsonmap[chr(x)] = "\u%04x" %x + _jsonmap[chr(x)] = "\\u%04x" % x for x in xrange(32, 256): c = chr(x) _jsonmap[c] = c @@ -414,6 +421,25 @@ return ''.join(_jsonmap[c] for c in toutf8b(s)) +_utf8len = [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 3, 4] + +def getutf8char(s, pos): + '''get the next full utf-8 character in the given string, starting at pos + + Raises a UnicodeError if the given location does not start a valid + utf-8 character. + ''' + + # find how many bytes to attempt decoding from first nibble + l = _utf8len[ord(s[pos]) >> 4] + if not l: # ascii + return s[pos] + + c = s[pos:pos + l] + # validate with attempted decode + c.decode("utf-8") + return c + def toutf8b(s): '''convert a local, possibly-binary string into UTF-8b @@ -444,24 +470,32 @@ internal surrogate encoding as a UTF-8 string.) ''' - if isinstance(s, localstr): - return s._utf8 + if "\xed" not in s: + if isinstance(s, localstr): + return s._utf8 + try: + s.decode('utf-8') + return s + except UnicodeDecodeError: + pass - try: - s.decode('utf-8') - return s - except UnicodeDecodeError: - # surrogate-encode any characters that don't round-trip - s2 = s.decode('utf-8', 'ignore').encode('utf-8') - r = "" - pos = 0 - for c in s: - if s2[pos:pos + 1] == c: - r += c + r = "" + pos = 0 + l = len(s) + while pos < l: + try: + c = getutf8char(s, pos) + if "\xed\xb0\x80" <= c <= "\xed\xb3\xbf": + # have to re-escape existing U+DCxx characters + c = unichr(0xdc00 + ord(s[pos])).encode('utf-8') pos += 1 else: - r += unichr(0xdc00 + ord(c)).encode('utf-8') - return r + pos += len(c) + except UnicodeDecodeError: + c = unichr(0xdc00 + ord(s[pos])).encode('utf-8') + pos += 1 + r += c + return r def fromutf8b(s): '''Given a UTF-8b string, return a local, possibly-binary string. @@ -470,11 +504,17 @@ is a round-trip process for strings like filenames, but metadata that's was passed through tolocal will remain in UTF-8. + >>> roundtrip = lambda x: fromutf8b(toutf8b(x)) == x >>> m = "\\xc3\\xa9\\x99abcd" - >>> n = toutf8b(m) - >>> n + >>> toutf8b(m) '\\xc3\\xa9\\xed\\xb2\\x99abcd' - >>> fromutf8b(n) == m + >>> roundtrip(m) + True + >>> roundtrip("\\xc2\\xc2\\x80") + True + >>> roundtrip("\\xef\\xbf\\xbd") + True + >>> roundtrip("\\xef\\xef\\xbf\\xbd") True ''' @@ -485,7 +525,7 @@ u = s.decode("utf-8") r = "" for c in u: - if ord(c) & 0xff00 == 0xdc00: + if ord(c) & 0xffff00 == 0xdc00: r += chr(ord(c) & 0xff) else: r += c.encode("utf-8") diff -r e240e914d226 -r 8f016345e6b0 mercurial/error.py --- a/mercurial/error.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/error.py Fri Dec 18 14:40:11 2015 -0600 @@ -72,6 +72,12 @@ class UpdateAbort(Abort): """Raised when an update is aborted for destination issue""" +class ResponseExpected(Abort): + """Raised when an EOF is received for a prompt""" + def __init__(self): + from .i18n import _ + Abort.__init__(self, _('response expected')) + class OutOfBandError(Exception): """Exception raised when a remote repo reports failure""" @@ -106,6 +112,16 @@ class RequirementError(RepoError): """Exception raised if .hg/requires has an unknown entry.""" +class UnsupportedMergeRecords(Abort): + def __init__(self, recordtypes): + from .i18n import _ + self.recordtypes = sorted(recordtypes) + s = ' '.join(self.recordtypes) + Abort.__init__( + self, _('unsupported merge state records: %s') % s, + hint=_('see https://mercurial-scm.org/wiki/MergeStateRecords for ' + 'more information')) + class LockError(IOError): def __init__(self, errno, strerror, filename, desc): IOError.__init__(self, errno, strerror, filename) diff -r e240e914d226 -r 8f016345e6b0 mercurial/exchange.py --- a/mercurial/exchange.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/exchange.py Fri Dec 18 14:40:11 2015 -0600 @@ -571,13 +571,7 @@ elif ctx.troubled(): raise error.Abort(mst[ctx.troubles()[0]] % ctx) - # internal config: bookmarks.pushing - newbm = pushop.ui.configlist('bookmarks', 'pushing') - discovery.checkheads(unfi, pushop.remote, outgoing, - pushop.remoteheads, - pushop.newbranch, - bool(pushop.incoming), - newbm) + discovery.checkheads(pushop) return True # List of names of steps to perform for an outgoing bundle2, order matters. @@ -1427,6 +1421,11 @@ return func return dec +def bundle2requested(bundlecaps): + if bundlecaps is not None: + return any(cap.startswith('HG2') for cap in bundlecaps) + return False + def getbundle(repo, source, heads=None, common=None, bundlecaps=None, **kwargs): """return a full bundle (with potentially multiple kind of parts) @@ -1442,10 +1441,8 @@ The implementation is at a very early stage and will get massive rework when the API of bundle is refined. """ + usebundle2 = bundle2requested(bundlecaps) # bundle10 case - usebundle2 = False - if bundlecaps is not None: - usebundle2 = any((cap.startswith('HG2') for cap in bundlecaps)) if not usebundle2: if bundlecaps and not kwargs.get('cg', True): raise ValueError(_('request for bundle10 must include changegroup')) diff -r e240e914d226 -r 8f016345e6b0 mercurial/extensions.py --- a/mercurial/extensions.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/extensions.py Fri Dec 18 14:40:11 2015 -0600 @@ -24,7 +24,8 @@ _extensions = {} _aftercallbacks = {} _order = [] -_ignore = ['hbisect', 'bookmarks', 'parentrevspec', 'interhg', 'inotify'] +_builtin = set(['hbisect', 'bookmarks', 'parentrevspec', 'progress', 'interhg', + 'inotify']) def extensions(ui=None): if ui: @@ -75,7 +76,7 @@ shortname = name[6:] else: shortname = name - if shortname in _ignore: + if shortname in _builtin: return None if shortname in _extensions: return _extensions[shortname] @@ -100,6 +101,17 @@ if ui.debugflag: ui.traceback() mod = importh(name) + + # Before we do anything with the extension, check against minimum stated + # compatibility. This gives extension authors a mechanism to have their + # extensions short circuit when loaded with a known incompatible version + # of Mercurial. + minver = getattr(mod, 'minimumhgversion', None) + if minver and util.versiontuple(minver, 2) > util.versiontuple(n=2): + ui.warn(_('(third party extension %s requires version %s or newer ' + 'of Mercurial; disabling)\n') % (shortname, minver)) + return + _extensions[shortname] = mod _order.append(shortname) for fn in _aftercallbacks.get(shortname, []): diff -r e240e914d226 -r 8f016345e6b0 mercurial/filemerge.py --- a/mercurial/filemerge.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/filemerge.py Fri Dec 18 14:40:11 2015 -0600 @@ -13,9 +13,10 @@ import tempfile from .i18n import _ -from .node import short +from .node import nullid, short from . import ( + cmdutil, error, match, simplemerge, @@ -43,6 +44,50 @@ mergeonly = 'mergeonly' # just the full merge, no premerge fullmerge = 'fullmerge' # both premerge and merge +class absentfilectx(object): + """Represents a file that's ostensibly in a context but is actually not + present in it. + + This is here because it's very specific to the filemerge code for now -- + other code is likely going to break with the values this returns.""" + def __init__(self, ctx, f): + self._ctx = ctx + self._f = f + + def path(self): + return self._f + + def size(self): + return None + + def data(self): + return None + + def filenode(self): + return nullid + + _customcmp = True + def cmp(self, fctx): + """compare with other file context + + returns True if different from fctx. + """ + return not (fctx.isabsent() and + fctx.ctx() == self.ctx() and + fctx.path() == self.path()) + + def flags(self): + return '' + + def changectx(self): + return self._ctx + + def isbinary(self): + return False + + def isabsent(self): + return True + def internaltool(name, mergetype, onfailure=None, precheck=None): '''return a decorator for populating internal merge tool table''' def decorator(func): @@ -75,8 +120,11 @@ exe = _toolstr(ui, tool, "executable", tool) return util.findexe(util.expandpath(exe)) -def _picktool(repo, ui, path, binary, symlink): - def check(tool, pat, symlink, binary): +def _picktool(repo, ui, path, binary, symlink, changedelete): + def supportscd(tool): + return tool in internals and internals[tool].mergetype == nomerge + + def check(tool, pat, symlink, binary, changedelete): tmsg = tool if pat: tmsg += " specified for " + pat @@ -89,6 +137,10 @@ ui.warn(_("tool %s can't handle symlinks\n") % tmsg) elif binary and not _toolbool(ui, tool, "binary"): ui.warn(_("tool %s can't handle binary\n") % tmsg) + elif changedelete and not supportscd(tool): + # the nomerge tools are the only tools that support change/delete + # conflicts + pass elif not util.gui() and _toolbool(ui, tool, "gui"): ui.warn(_("tool %s requires a GUI\n") % tmsg) else: @@ -100,21 +152,27 @@ force = ui.config('ui', 'forcemerge') if force: toolpath = _findtool(ui, force) - if toolpath: - return (force, util.shellquote(toolpath)) + if changedelete and not supportscd(toolpath): + return ":prompt", None else: - # mimic HGMERGE if given tool not found - return (force, force) + if toolpath: + return (force, util.shellquote(toolpath)) + else: + # mimic HGMERGE if given tool not found + return (force, force) # HGMERGE takes next precedence hgmerge = os.environ.get("HGMERGE") if hgmerge: - return (hgmerge, hgmerge) + if changedelete and not supportscd(hgmerge): + return ":prompt", None + else: + return (hgmerge, hgmerge) # then patterns for pat, tool in ui.configitems("merge-patterns"): mf = match.match(repo.root, '', [pat]) - if mf(path) and check(tool, pat, symlink, False): + if mf(path) and check(tool, pat, symlink, False, changedelete): toolpath = _findtool(ui, tool) return (tool, util.shellquote(toolpath)) @@ -131,17 +189,19 @@ tools = sorted([(-p, t) for t, p in tools.items() if t not in disabled]) uimerge = ui.config("ui", "merge") if uimerge: - if uimerge not in names: + # external tools defined in uimerge won't be able to handle + # change/delete conflicts + if uimerge not in names and not changedelete: return (uimerge, uimerge) tools.insert(0, (None, uimerge)) # highest priority tools.append((None, "hgmerge")) # the old default, if found for p, t in tools: - if check(t, None, symlink, binary): + if check(t, None, symlink, binary, changedelete): toolpath = _findtool(ui, t) return (t, util.shellquote(toolpath)) # internal merge or prompt as last resort - if symlink or binary: + if symlink or binary or changedelete: return ":prompt", None return ":merge", None @@ -175,23 +235,53 @@ ui = repo.ui fd = fcd.path() - if ui.promptchoice(_(" no tool found to merge %s\n" - "keep (l)ocal or take (o)ther?" - "$$ &Local $$ &Other") % fd, 0): - return _iother(repo, mynode, orig, fcd, fco, fca, toolconf) - else: - return _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf) + try: + if fco.isabsent(): + index = ui.promptchoice( + _("local changed %s which remote deleted\n" + "use (c)hanged version, (d)elete, or leave (u)nresolved?" + "$$ &Changed $$ &Delete $$ &Unresolved") % fd, 0) + choice = ['local', 'other', 'unresolved'][index] + elif fcd.isabsent(): + index = ui.promptchoice( + _("remote changed %s which local deleted\n" + "use (c)hanged version, leave (d)eleted, or " + "leave (u)nresolved?" + "$$ &Changed $$ &Deleted $$ &Unresolved") % fd, 0) + choice = ['other', 'local', 'unresolved'][index] + else: + index = ui.promptchoice( + _("no tool found to merge %s\n" + "keep (l)ocal, take (o)ther, or leave (u)nresolved?" + "$$ &Local $$ &Other $$ &Unresolved") % fd, 2) + choice = ['local', 'other', 'unresolved'][index] + + if choice == 'other': + return _iother(repo, mynode, orig, fcd, fco, fca, toolconf) + elif choice == 'local': + return _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf) + elif choice == 'unresolved': + return _ifail(repo, mynode, orig, fcd, fco, fca, toolconf) + except error.ResponseExpected: + ui.write("\n") + return _ifail(repo, mynode, orig, fcd, fco, fca, toolconf) @internaltool('local', nomerge) def _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf): """Uses the local version of files as the merged version.""" - return 0 + return 0, fcd.isabsent() @internaltool('other', nomerge) def _iother(repo, mynode, orig, fcd, fco, fca, toolconf): """Uses the other version of files as the merged version.""" - repo.wwrite(fcd.path(), fco.data(), fco.flags()) - return 0 + if fco.isabsent(): + # local changed, remote deleted -- 'deleted' picked + repo.wvfs.unlinkpath(fcd.path()) + deleted = True + else: + repo.wwrite(fcd.path(), fco.data(), fco.flags()) + deleted = False + return 0, deleted @internaltool('fail', nomerge) def _ifail(repo, mynode, orig, fcd, fco, fca, toolconf): @@ -199,11 +289,14 @@ Rather than attempting to merge files that were modified on both branches, it marks them as unresolved. The resolve command must be used to resolve these conflicts.""" - return 1 + # for change/delete conflicts write out the changed version, then fail + if fcd.isabsent(): + repo.wwrite(fcd.path(), fco.data(), fco.flags()) + return 1, False -def _premerge(repo, toolconf, files, labels=None): +def _premerge(repo, fcd, fco, fca, toolconf, files, labels=None): tool, toolpath, binary, symlink = toolconf - if symlink: + if symlink or fcd.isabsent() or fco.isabsent(): return 1 a, b, c, back = files @@ -236,12 +329,16 @@ util.copyfile(back, a) # restore from backup and try again return 1 # continue merging -def _symlinkcheck(repo, mynode, orig, fcd, fco, fca, toolconf): +def _mergecheck(repo, mynode, orig, fcd, fco, fca, toolconf): tool, toolpath, binary, symlink = toolconf if symlink: repo.ui.warn(_('warning: internal %s cannot merge symlinks ' 'for %s\n') % (tool, fcd.path())) return False + if fcd.isabsent() or fco.isabsent(): + repo.ui.warn(_('warning: internal %s cannot merge change/delete ' + 'conflict for %s\n') % (tool, fcd.path())) + return False return True def _merge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels, mode): @@ -255,12 +352,12 @@ ui = repo.ui r = simplemerge.simplemerge(ui, a, b, c, label=labels, mode=mode) - return True, r + return True, r, False @internaltool('union', fullmerge, _("warning: conflicts while merging %s! " "(edit, then use 'hg resolve --mark')\n"), - precheck=_symlinkcheck) + precheck=_mergecheck) def _iunion(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): """ Uses the internal non-interactive simple merge algorithm for merging @@ -272,7 +369,7 @@ @internaltool('merge', fullmerge, _("warning: conflicts while merging %s! " "(edit, then use 'hg resolve --mark')\n"), - precheck=_symlinkcheck) + precheck=_mergecheck) def _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): """ Uses the internal non-interactive simple merge algorithm for merging @@ -285,7 +382,7 @@ @internaltool('merge3', fullmerge, _("warning: conflicts while merging %s! " "(edit, then use 'hg resolve --mark')\n"), - precheck=_symlinkcheck) + precheck=_mergecheck) def _imerge3(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): """ Uses the internal non-interactive simple merge algorithm for merging @@ -305,30 +402,26 @@ """ assert localorother is not None tool, toolpath, binary, symlink = toolconf - if symlink: - repo.ui.warn(_('warning: :merge-%s cannot merge symlinks ' - 'for %s\n') % (localorother, fcd.path())) - return False, 1 a, b, c, back = files r = simplemerge.simplemerge(repo.ui, a, b, c, label=labels, localorother=localorother) return True, r -@internaltool('merge-local', mergeonly) +@internaltool('merge-local', mergeonly, precheck=_mergecheck) def _imergelocal(*args, **kwargs): """ Like :merge, but resolve all conflicts non-interactively in favor of the local changes.""" success, status = _imergeauto(localorother='local', *args, **kwargs) - return success, status + return success, status, False -@internaltool('merge-other', mergeonly) +@internaltool('merge-other', mergeonly, precheck=_mergecheck) def _imergeother(*args, **kwargs): """ Like :merge, but resolve all conflicts non-interactively in favor of the other changes.""" success, status = _imergeauto(localorother='other', *args, **kwargs) - return success, status + return success, status, False @internaltool('tagmerge', mergeonly, _("automatic tag merging of %s failed! " @@ -338,7 +431,8 @@ """ Uses the internal tag merge algorithm (experimental). """ - return tagmerge.merge(repo, fcd, fco, fca) + success, status = tagmerge.merge(repo, fcd, fco, fca) + return success, status, False @internaltool('dump', fullmerge) def _idump(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): @@ -356,10 +450,14 @@ util.copyfile(a, a + ".local") repo.wwrite(fd + ".other", fco.data(), fco.flags()) repo.wwrite(fd + ".base", fca.data(), fca.flags()) - return False, 1 + return False, 1, False def _xmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): tool, toolpath, binary, symlink = toolconf + if fcd.isabsent() or fco.isabsent(): + repo.ui.warn(_('warning: %s cannot merge change/delete conflict ' + 'for %s\n') % (tool, fcd.path())) + return False, 1, None a, b, c, back = files out = "" env = {'HG_FILE': fcd.path(), @@ -383,7 +481,7 @@ repo.ui.debug('launching merge tool: %s\n' % cmd) r = ui.system(cmd, cwd=repo.root, environ=env) repo.ui.debug('merge tool returned: %s\n' % r) - return True, r + return True, r, False def _formatconflictmarker(repo, ctx, template, label, pad): """Applies the given template to the ctx, prefixed by the label. @@ -448,8 +546,8 @@ fca = ancestor file context fcd = local file context for current/destination file - Returns whether the merge is complete, and the return value of the merge. - """ + Returns whether the merge is complete, the return value of the merge, and + a boolean indicating whether the file was deleted from disk.""" def temp(prefix, ctx): pre = "%s~%s." % (os.path.basename(ctx.path()), prefix) @@ -461,18 +559,19 @@ return name if not fco.cmp(fcd): # files identical? - return True, None + return True, None, False ui = repo.ui fd = fcd.path() binary = fcd.isbinary() or fco.isbinary() or fca.isbinary() symlink = 'l' in fcd.flags() + fco.flags() - tool, toolpath = _picktool(repo, ui, fd, binary, symlink) + changedelete = fcd.isabsent() or fco.isabsent() + tool, toolpath = _picktool(repo, ui, fd, binary, symlink, changedelete) if tool in internals and tool.startswith('internal:'): # normalize to new-style names (':merge' etc) tool = tool[len('internal'):] - ui.debug("picked tool '%s' for %s (binary %s symlink %s)\n" % - (tool, fd, binary, symlink)) + ui.debug("picked tool '%s' for %s (binary %s symlink %s changedelete %s)\n" + % (tool, fd, binary, symlink, changedelete)) if tool in internals: func = internals[tool] @@ -488,7 +587,8 @@ toolconf = tool, toolpath, binary, symlink if mergetype == nomerge: - return True, func(repo, mynode, orig, fcd, fco, fca, toolconf) + r, deleted = func(repo, mynode, orig, fcd, fco, fca, toolconf) + return True, r, deleted if premerge: if orig != fco.path(): @@ -502,14 +602,17 @@ toolconf): if onfailure: ui.warn(onfailure % fd) - return True, 1 + return True, 1, False a = repo.wjoin(fd) b = temp("base", fca) c = temp("other", fco) - back = a + ".orig" - if premerge: - util.copyfile(a, back) + if not fcd.isabsent(): + back = cmdutil.origpath(ui, repo, a) + if premerge: + util.copyfile(a, back) + else: + back = None files = (a, b, c, back) r = 1 @@ -521,12 +624,13 @@ labels = _formatlabels(repo, fcd, fco, fca, labels) if premerge and mergetype == fullmerge: - r = _premerge(repo, toolconf, files, labels=labels) + r = _premerge(repo, fcd, fco, fca, toolconf, files, labels=labels) # complete if premerge successful (r is 0) - return not r, r + return not r, r, False - needcheck, r = func(repo, mynode, orig, fcd, fco, fca, toolconf, files, - labels=labels) + needcheck, r, deleted = func(repo, mynode, orig, fcd, fco, fca, + toolconf, files, labels=labels) + if needcheck: r = _check(r, ui, tool, fcd, files) @@ -534,9 +638,9 @@ if onfailure: ui.warn(onfailure % fd) - return True, r + return True, r, deleted finally: - if not r: + if not r and back is not None: util.unlink(back) util.unlink(b) util.unlink(c) @@ -561,13 +665,13 @@ if not r and not checked and (_toolbool(ui, tool, "checkchanged") or 'changed' in _toollist(ui, tool, "check")): - if filecmp.cmp(a, back): + if back is not None and filecmp.cmp(a, back): if ui.promptchoice(_(" output file %s appears unchanged\n" "was merge successful (yn)?" "$$ &Yes $$ &No") % fd, 1): r = 1 - if _toolbool(ui, tool, "fixeol"): + if back is not None and _toolbool(ui, tool, "fixeol"): _matcheol(a, back) return r diff -r e240e914d226 -r 8f016345e6b0 mercurial/fileset.py --- a/mercurial/fileset.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/fileset.py Fri Dec 18 14:40:11 2015 -0600 @@ -159,13 +159,22 @@ def deleted(mctx, x): """``deleted()`` - File that is deleted according to :hg:`status`. + Alias for ``missing()``. """ # i18n: "deleted" is a keyword getargs(x, 0, 0, _("deleted takes no arguments")) s = mctx.status().deleted return [f for f in mctx.subset if f in s] +def missing(mctx, x): + """``missing()`` + File that is missing according to :hg:`status`. + """ + # i18n: "missing" is a keyword + getargs(x, 0, 0, _("missing takes no arguments")) + s = mctx.status().deleted + return [f for f in mctx.subset if f in s] + def unknown(mctx, x): """``unknown()`` File that is unknown according to :hg:`status`. These files will only be @@ -249,7 +258,7 @@ getargs(x, 0, 0, _("resolved takes no arguments")) if mctx.ctx.rev() is not None: return [] - ms = merge.mergestate(mctx.ctx.repo()) + ms = merge.mergestate.read(mctx.ctx.repo()) return [f for f in mctx.subset if f in ms and ms[f] == 'r'] def unresolved(mctx, x): @@ -260,7 +269,7 @@ getargs(x, 0, 0, _("unresolved takes no arguments")) if mctx.ctx.rev() is not None: return [] - ms = merge.mergestate(mctx.ctx.repo()) + ms = merge.mergestate.read(mctx.ctx.repo()) return [f for f in mctx.subset if f in ms and ms[f] == 'u'] def hgignore(mctx, x): @@ -441,6 +450,7 @@ 'grep': grep, 'ignored': ignored, 'hgignore': hgignore, + 'missing': missing, 'modified': modified, 'portable': portable, 'removed': removed, @@ -511,7 +521,7 @@ # do we need status info? if (_intree(['modified', 'added', 'removed', 'deleted', - 'unknown', 'ignored', 'clean'], tree) or + 'missing', 'unknown', 'ignored', 'clean'], tree) or # Using matchctx.existing() on a workingctx requires us to check # for deleted files. (ctx.rev() is None and _intree(_existingcallers, tree))): diff -r e240e914d226 -r 8f016345e6b0 mercurial/help.py --- a/mercurial/help.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/help.py Fri Dec 18 14:40:11 2015 -0600 @@ -27,11 +27,12 @@ '''return a text listing of the given extensions''' rst = [] if exts: - rst.append('\n%s\n\n' % header) for name, desc in sorted(exts.iteritems()): if not showdeprecated and any(w in desc for w in _exclkeywords): continue rst.append('%s:%s: %s\n' % (' ' * indent, name, desc)) + if rst: + rst.insert(0, '\n%s\n\n' % header) return rst def extshelp(ui): @@ -83,6 +84,13 @@ if notomitted: rst.append('\n\n.. container:: notomitted\n\n %s\n\n' % notomitted) +def filtercmd(ui, cmd, kw, doc): + if not ui.debugflag and cmd.startswith("debug") and kw != "debug": + return True + if not ui.verbose and doc and any(w in doc for w in _exclkeywords): + return True + return False + def topicmatch(ui, kw): """Return help topics matching kw. @@ -115,32 +123,38 @@ doclines = docs.splitlines() if doclines: summary = doclines[0] - cmdname = cmd.split('|')[0].lstrip('^') + cmdname = cmd.partition('|')[0].lstrip('^') + if filtercmd(ui, cmdname, kw, docs): + continue results['commands'].append((cmdname, summary)) for name, docs in itertools.chain( extensions.enabled(False).iteritems(), extensions.disabled().iteritems()): # extensions.load ignores the UI argument mod = extensions.load(None, name, '') - name = name.split('.')[-1] + name = name.rpartition('.')[-1] if lowercontains(name) or lowercontains(docs): # extension docs are already translated results['extensions'].append((name, docs.splitlines()[0])) for cmd, entry in getattr(mod, 'cmdtable', {}).iteritems(): if kw in cmd or (len(entry) > 2 and lowercontains(entry[2])): - cmdname = cmd.split('|')[0].lstrip('^') + cmdname = cmd.partition('|')[0].lstrip('^') if entry[0].__doc__: cmddoc = gettext(entry[0].__doc__).splitlines()[0] else: cmddoc = _('(no help text available)') + if filtercmd(ui, cmdname, kw, cmddoc): + continue results['extensioncommands'].append((cmdname, cmddoc)) return results -def loaddoc(topic): +def loaddoc(topic, subdir=None): """Return a delayed loader for help/topic.txt.""" def loader(ui): docdir = os.path.join(util.datapath, 'help') + if subdir: + docdir = os.path.join(docdir, subdir) path = os.path.join(docdir, topic + ".txt") doc = gettext(util.readfile(path)) for rewriter in helphooks.get(topic, []): @@ -149,6 +163,21 @@ return loader +internalstable = sorted([ + (['bundles'], _('container for exchange of repository data'), + loaddoc('bundles', subdir='internals')), + (['changegroups'], _('representation of revlog data'), + loaddoc('changegroups', subdir='internals')), +]) + +def internalshelp(ui): + """Generate the index for the "internals" topic.""" + lines = [] + for names, header, doc in internalstable: + lines.append(' :%s: %s\n' % (names[0], header)) + + return ''.join(lines) + helptable = sorted([ (["config", "hgrc"], _("Configuration Files"), loaddoc('config')), (["dates"], _("Date Formats"), loaddoc('dates')), @@ -175,8 +204,15 @@ (["phases"], _("Working with Phases"), loaddoc('phases')), (['scripting'], _('Using Mercurial from scripts and automation'), loaddoc('scripting')), + (['internals'], _("Technical implementation topics"), + internalshelp), ]) +# Maps topics with sub-topics to a list of their sub-topics. +subtopics = { + 'internals': internalstable, +} + # Map topics to lists of callable taking the current topic help and # returning the updated version helphooks = {} @@ -226,7 +262,7 @@ addtopicsymbols('hgweb', '.. webcommandsmarker', webcommands.commands, dedent=True) -def help_(ui, name, unknowncmd=False, full=True, **opts): +def help_(ui, name, unknowncmd=False, full=True, subtopic=None, **opts): ''' Generate the help for 'name' as unformatted restructured text. If 'name' is None, describe the commands available. @@ -234,7 +270,7 @@ import commands # avoid cycle - def helpcmd(name): + def helpcmd(name, subtopic=None): try: aliases, entry = cmdutil.findcmd(name, commands.table, strict=unknowncmd) @@ -318,7 +354,7 @@ return rst - def helplist(select=None): + def helplist(select=None, **opts): # list of commands if name == "shortlist": header = _('basic commands:\n\n') @@ -330,7 +366,7 @@ h = {} cmds = {} for c, e in commands.table.iteritems(): - f = c.split("|", 1)[0] + f = c.partition("|")[0] if select and not select(f): continue if (not select and name != 'shortlist' and @@ -339,10 +375,8 @@ if name == "shortlist" and not f.startswith("^"): continue f = f.lstrip("^") - if not ui.debugflag and f.startswith("debug") and name != "debug": - continue doc = e[0].__doc__ - if not ui.verbose and doc and any(w in doc for w in _exclkeywords): + if filtercmd(ui, f, name, doc): continue doc = gettext(doc) if not doc: @@ -366,7 +400,9 @@ else: rst.append(' :%s: %s\n' % (f, h[f])) - if not name: + ex = opts.get + anyopts = (ex('keyword') or not (ex('command') or ex('extension'))) + if not name and anyopts: exts = listexts(_('enabled extensions:'), extensions.enabled()) if exts: rst.append('\n') @@ -403,12 +439,20 @@ % (name and " " + name or "")) return rst - def helptopic(name): - for names, header, doc in helptable: - if name in names: - break - else: - raise error.UnknownCommand(name) + def helptopic(name, subtopic=None): + # Look for sub-topic entry first. + header, doc = None, None + if subtopic and name in subtopics: + for names, header, doc in subtopics[name]: + if subtopic in names: + break + + if not header: + for names, header, doc in helptable: + if name in names: + break + else: + raise error.UnknownCommand(name) rst = [minirst.section(header)] @@ -431,7 +475,7 @@ pass return rst - def helpext(name): + def helpext(name, subtopic=None): try: mod = extensions.find(name) doc = gettext(mod.__doc__) or _('no help text available') @@ -445,7 +489,7 @@ head, tail = doc, "" else: head, tail = doc.split('\n', 1) - rst = [_('%s extension - %s\n\n') % (name.split('.')[-1], head)] + rst = [_('%s extension - %s\n\n') % (name.rpartition('.')[-1], head)] if tail: rst.extend(tail.splitlines(True)) rst.append('\n') @@ -460,20 +504,21 @@ ct = mod.cmdtable except AttributeError: ct = {} - modcmds = set([c.split('|', 1)[0] for c in ct]) + modcmds = set([c.partition('|')[0] for c in ct]) rst.extend(helplist(modcmds.__contains__)) else: rst.append(_('(use "hg help extensions" for information on enabling' ' extensions)\n')) return rst - def helpextcmd(name): + def helpextcmd(name, subtopic=None): cmd, ext, mod = extensions.disabledcmd(ui, name, ui.configbool('ui', 'strict')) doc = gettext(mod.__doc__).splitlines()[0] rst = listexts(_("'%s' is provided by the following " - "extension:") % cmd, {ext: doc}, indent=4) + "extension:") % cmd, {ext: doc}, indent=4, + showdeprecated=True) rst.append('\n') rst.append(_('(use "hg help extensions" for information on enabling ' 'extensions)\n')) @@ -482,8 +527,8 @@ rst = [] kw = opts.get('keyword') - if kw: - matches = topicmatch(ui, name) + if kw or name is None and any(opts[o] for o in opts): + matches = topicmatch(ui, name or '') helpareas = [] if opts.get('extension'): helpareas += [('extensions', _('Extensions'))] @@ -515,7 +560,7 @@ queries = (helptopic, helpcmd, helpext, helpextcmd) for f in queries: try: - rst = f(name) + rst = f(name, subtopic) break except error.UnknownCommand: pass @@ -530,6 +575,6 @@ # program name if not ui.quiet: rst = [_("Mercurial Distributed SCM\n"), '\n'] - rst.extend(helplist()) + rst.extend(helplist(None, **opts)) return ''.join(rst) diff -r e240e914d226 -r 8f016345e6b0 mercurial/help/config.txt --- a/mercurial/help/config.txt Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/help/config.txt Fri Dec 18 14:40:11 2015 -0600 @@ -666,6 +666,14 @@ ``format`` ---------- +``usegeneraldelta`` + Enable or disable the "generaldelta" repository format which improves + repository compression by allowing "revlog" to store delta against arbitrary + revision instead of the previous stored one. This provides significant + improvement for repositories with branches. Enabled by default. Disabling + this option ensures that the on-disk format of newly created repository will + be compatible with Mercurial before version 1.9. + ``usestore`` Enable or disable the "store" repository format which improves compatibility with systems that fold case or otherwise mangle @@ -1119,29 +1127,43 @@ ``paths`` --------- -Assigns symbolic names to repositories. The left side is the -symbolic name, and the right gives the directory or URL that is the -location of the repository. Default paths can be declared by setting -the following entries. +Assigns symbolic names and behavior to repositories. + +Options are symbolic names defining the URL or directory that is the +location of the repository. Example:: + + [paths] + my_server = https://example.com/my_repo + local_path = /home/me/repo + +These symbolic names can be used from the command line. To pull +from ``my_server``: :hg:`pull my_server`. To push to ``local_path``: +:hg:`push local_path`. + +Options containing colons (``:``) denote sub-options that can influence +behavior for that specific path. Example:: + + [paths] + my_server = https://example.com/my_path + my_server:pushurl = ssh://example.com/my_path + +The following sub-options can be defined: + +``pushurl`` + The URL to use for push operations. If not defined, the location + defined by the path's main entry is used. + +The following special named paths exist: ``default`` - Directory or URL to use when pulling if no source is specified. - (default: repository from which the current repository was cloned) + The URL or directory to use when no source or remote is specified. + + :hg:`clone` will automatically define this path to the location the + repository was cloned from. ``default-push`` - Optional. Directory or URL to use when pushing if no destination - is specified. - -Custom paths can be defined by assigning the path to a name that later can be -used from the command line. Example:: - - [paths] - my_path = http://example.com/path - -To push to the path defined in ``my_path`` run the command:: - - hg push my_path - + (deprecated) The URL or directory for the default :hg:`push` location. + ``default:pushurl`` should be used instead. ``phases`` ---------- @@ -1311,6 +1333,23 @@ Instruct HTTP clients not to send request headers longer than this many bytes. (default: 1024) +``bundle1`` + Whether to allow clients to push and pull using the legacy bundle1 + exchange format. (default: True) + +``bundle1.push`` + Whether to allow clients to push using the legacy bundle1 exchange + format. (default: True) + +``bundle1.pull`` + Whether to allow clients to pull using the legacy bundle1 exchange + format. (default: True) + + Large repositories using the *generaldelta* storage format should + consider setting this option because converting *generaldelta* + repositories to the exchange format required by the bundle1 data + format can consume a lot of CPU. + ``smtp`` -------- @@ -1446,6 +1485,10 @@ Encoding to try if it's not possible to decode the changelog using UTF-8. (default: ISO-8859-1) +``graphnodetemplate`` + The template used to print changeset nodes in an ASCII revision graph. + (default: ``{graphnode}``) + ``ignore`` A file to read per-user ignore patterns from. This file should be in the same format as a repository-wide .hgignore file. Filenames @@ -1488,6 +1531,10 @@ markers is different from the encoding of the merged files, serious problems may occur. +``origbackuppath`` + The path to a directory used to store generated .orig files. If the path is + not a directory, one will be created. + ``patch`` An optional external tool that ``hg import`` and some extensions will use for applying patches. By default Mercurial uses an diff -r e240e914d226 -r 8f016345e6b0 mercurial/help/internals/bundles.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/mercurial/help/internals/bundles.txt Fri Dec 18 14:40:11 2015 -0600 @@ -0,0 +1,97 @@ +Bundles +======= + +A bundle is a container for repository data. + +Bundles are used as standalone files as well as the interchange format +over the wire protocol used when two Mercurial peers communicate with +each other. + +Headers +------- + +Bundles produced since Mercurial 0.7 (September 2005) have a 4 byte +header identifying the major bundle type. The header always begins with +``HG`` and the follow 2 bytes indicate the bundle type/version. Some +bundle types have additional data after this 4 byte header. + +The following sections describe each bundle header/type. + +HG10 +---- + +``HG10`` headers indicate a *changegroup bundle*. This is the original +bundle format, so it is sometimes referred to as *bundle1*. It has been +present since version 0.7 (released September 2005). + +This header is followed by 2 bytes indicating the compression algorithm +used for data that follows. All subsequent data following this +compression identifier is compressed according to the algorithm/method +specified. + +Supported algorithms include the following. + +``BZ`` + *bzip2* compression. + + Bzip2 compressors emit a leading ``BZ`` header. Mercurial uses this + leading ``BZ`` as part of the bundle header. Therefore consumers + of bzip2 bundles need to *seed* the bzip2 decompressor with ``BZ`` or + seek the input stream back to the beginning of the algorithm component + of the bundle header so that decompressor input is valid. This behavior + is unique among supported compression algorithms. + + Supported since version 0.7 (released December 2006). + +``GZ`` + *zlib* compression. + + Supported since version 0.9.2 (released December 2006). + +``UN`` + *Uncompressed* or no compression. Unmodified changegroup data follows. + + Supported since version 0.9.2 (released December 2006). + +3rd party extensions may implement their own compression. However, no +authority reserves values for their compression algorithm identifiers. + +HG2X +---- + +``HG2X`` headers (where ``X`` is any value) denote a *bundle2* bundle. +Bundle2 bundles are a container format for various kinds of repository +data and capabilities, beyond changegroup data (which was the only data +supported by ``HG10`` bundles. + +``HG20`` is currently the only defined bundle2 version. + +The ``HG20`` format is not yet documented here. See the inline comments +in ``mercurial/exchange.py`` for now. + +Initial ``HG20`` support was added in Mercurial 3.0 (released May +2014). However, bundle2 bundles were hidden behind an experimental flag +until version 3.5 (released August 2015), when they were enabled in the +wire protocol. Various commands (including ``hg bundle``) did not +support generating bundle2 files until Mercurial 3.6 (released November +2015). + +HGS1 +---- + +*Experimental* + +A ``HGS1`` header indicates a *streaming clone bundle*. This is a bundle +that contains raw revlog data from a repository store. (Typically revlog +data is exchanged in the form of changegroups.) + +The purpose of *streaming clone bundles* are to *clone* repository data +very efficiently. + +The ``HGS1`` header is always followed by 2 bytes indicating a +compression algorithm of the data that follows. Only ``UN`` +(uncompressed data) is currently allowed. + +``HGS1UN`` support was added as an experimental feature in version 3.6 +(released November 2015) as part of the initial offering of the *clone +bundles* feature. diff -r e240e914d226 -r 8f016345e6b0 mercurial/help/internals/changegroups.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/mercurial/help/internals/changegroups.txt Fri Dec 18 14:40:11 2015 -0600 @@ -0,0 +1,142 @@ +Changegroups +============ + +Changegroups are representations of repository revlog data, specifically +the changelog, manifest, and filelogs. + +There are 2 versions of changegroups: ``1`` and ``2``. From a +high-level, they are almost exactly the same, with the only difference +being a header on entries in the changeset segment. + +Changegroups consists of 3 logical segments:: + + +---------------------------------+ + | | | | + | changeset | manifest | filelogs | + | | | | + +---------------------------------+ + +The principle building block of each segment is a *chunk*. A *chunk* +is a framed piece of data:: + + +---------------------------------------+ + | | | + | length | data | + | (32 bits) | bytes | + | | | + +---------------------------------------+ + +Each chunk starts with a 32-bit big-endian signed integer indicating +the length of the raw data that follows. + +There is a special case chunk that has 0 length (``0x00000000``). We +call this an *empty chunk*. + +Delta Groups +------------ + +A *delta group* expresses the content of a revlog as a series of deltas, +or patches against previous revisions. + +Delta groups consist of 0 or more *chunks* followed by the *empty chunk* +to signal the end of the delta group:: + + +------------------------------------------------------------------------+ + | | | | | | + | chunk0 length | chunk0 data | chunk1 length | chunk1 data | 0x0 | + | (32 bits) | (various) | (32 bits) | (various) | (32 bits) | + | | | | | | + +------------------------------------------------------------+-----------+ + +Each *chunk*'s data consists of the following:: + + +-----------------------------------------+ + | | | | + | delta header | mdiff header | delta | + | (various) | (12 bytes) | (various) | + | | | | + +-----------------------------------------+ + +The *length* field is the byte length of the remaining 3 logical pieces +of data. The *delta* is a diff from an existing entry in the changelog. + +The *delta header* is different between versions ``1`` and ``2`` of the +changegroup format. + +Version 1:: + + +------------------------------------------------------+ + | | | | | + | node | p1 node | p2 node | link node | + | (20 bytes) | (20 bytes) | (20 bytes) | (20 bytes) | + | | | | | + +------------------------------------------------------+ + +Version 2:: + + +------------------------------------------------------------------+ + | | | | | | + | node | p1 node | p2 node | base node | link node | + | (20 bytes) | (20 bytes) | (20 bytes) | (20 bytes) | (20 bytes) | + | | | | | | + +------------------------------------------------------------------+ + +The *mdiff header* consists of 3 32-bit big-endian signed integers +describing offsets at which to apply the following delta content:: + + +-------------------------------------+ + | | | | + | offset | old length | new length | + | (32 bits) | (32 bits) | (32 bits) | + | | | | + +-------------------------------------+ + +In version 1, the delta is always applied against the previous node from +the changegroup or the first parent if this is the first entry in the +changegroup. + +In version 2, the delta base node is encoded in the entry in the +changegroup. This allows the delta to be expressed against any parent, +which can result in smaller deltas and more efficient encoding of data. + +Changeset Segment +----------------- + +The *changeset segment* consists of a single *delta group* holding +changelog data. It is followed by an *empty chunk* to denote the +boundary to the *manifests segment*. + +Manifest Segment +---------------- + +The *manifest segment* consists of a single *delta group* holding +manifest data. It is followed by an *empty chunk* to denote the boundary +to the *filelogs segment*. + +Filelogs Segment +---------------- + +The *filelogs* segment consists of multiple sub-segments, each +corresponding to an individual file whose data is being described:: + + +--------------------------------------+ + | | | | | + | filelog0 | filelog1 | filelog2 | ... | + | | | | | + +--------------------------------------+ + +The final filelog sub-segment is followed by an *empty chunk* to denote +the end of the segment and the overall changegroup. + +Each filelog sub-segment consists of the following:: + + +------------------------------------------+ + | | | | + | filename size | filename | delta group | + | (32 bits) | (various) | (various) | + | | | | + +------------------------------------------+ + +That is, a *chunk* consisting of the filename (not terminated or padded) +followed by N chunks constituting the *delta group* for this file. + diff -r e240e914d226 -r 8f016345e6b0 mercurial/hg.py --- a/mercurial/hg.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/hg.py Fri Dec 18 14:40:11 2015 -0600 @@ -235,13 +235,7 @@ destvfs.write('sharedpath', sharedpath) r = repository(ui, destwvfs.base) - - default = srcrepo.ui.config('paths', 'default') - if default: - fp = r.vfs("hgrc", "w", text=True) - fp.write("[paths]\n") - fp.write("default = %s\n" % default) - fp.close() + postshare(srcrepo, r, bookmarks=bookmarks) if update: r.ui.status(_("updating working directory\n")) @@ -257,8 +251,24 @@ continue _update(r, uprev) +def postshare(sourcerepo, destrepo, bookmarks=True): + """Called after a new shared repo is created. + + The new repo only has a requirements file and pointer to the source. + This function configures additional shared data. + + Extensions can wrap this function and write additional entries to + destrepo/.hg/shared to indicate additional pieces of data to be shared. + """ + default = sourcerepo.ui.config('paths', 'default') + if default: + fp = destrepo.vfs("hgrc", "w", text=True) + fp.write("[paths]\n") + fp.write("default = %s\n" % default) + fp.close() + if bookmarks: - fp = r.vfs('shared', 'w') + fp = destrepo.vfs('shared', 'w') fp.write('bookmarks\n') fp.close() @@ -546,13 +556,22 @@ "support clone by revision")) revs = [srcpeer.lookup(r) for r in rev] checkout = revs[0] - if destpeer.local(): + local = destpeer.local() + if local: if not stream: if pull: stream = False else: stream = None - destpeer.local().clone(srcpeer, heads=revs, stream=stream) + # internal config: ui.quietbookmarkmove + quiet = local.ui.backupconfig('ui', 'quietbookmarkmove') + try: + local.ui.setconfig( + 'ui', 'quietbookmarkmove', True, 'clone') + exchange.pull(local, srcpeer, revs, + streamclonerequested=stream) + finally: + local.ui.restoreconfig(quiet) elif srcrepo: exchange.push(srcrepo, destpeer, revs=revs, bookmarks=srcrepo._bookmarks.keys()) @@ -618,7 +637,9 @@ srcpeer.close() return srcpeer, destpeer -def _showstats(repo, stats): +def _showstats(repo, stats, quietempty=False): + if quietempty and not any(stats): + return repo.ui.status(_("%d files updated, %d files merged, " "%d files removed, %d files unresolved\n") % stats) @@ -628,13 +649,13 @@ When overwrite is set, changes are clobbered, merged else returns stats (see pydoc mercurial.merge.applyupdates)""" - return mergemod.update(repo, node, False, overwrite, None, + return mergemod.update(repo, node, False, overwrite, labels=['working copy', 'destination']) -def update(repo, node): +def update(repo, node, quietempty=False): """update the working directory to node, merging linear changes""" stats = updaterepo(repo, node, False) - _showstats(repo, stats) + _showstats(repo, stats, quietempty) if stats[3]: repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n")) return stats[3] > 0 @@ -642,18 +663,18 @@ # naming conflict in clone() _update = update -def clean(repo, node, show_stats=True): +def clean(repo, node, show_stats=True, quietempty=False): """forcibly switch the working directory to node, clobbering changes""" stats = updaterepo(repo, node, True) util.unlinkpath(repo.join('graftstate'), ignoremissing=True) if show_stats: - _showstats(repo, stats) + _showstats(repo, stats, quietempty) return stats[3] > 0 def merge(repo, node, force=None, remind=True): """Branch merge with node, resolving changes. Return true if any unresolved conflicts.""" - stats = mergemod.update(repo, node, True, force, False) + stats = mergemod.update(repo, node, True, force) _showstats(repo, stats) if stats[3]: repo.ui.status(_("use 'hg resolve' to retry unresolved file merges " diff -r e240e914d226 -r 8f016345e6b0 mercurial/hgweb/__init__.py --- a/mercurial/hgweb/__init__.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/hgweb/__init__.py Fri Dec 18 14:40:11 2015 -0600 @@ -6,8 +6,22 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. +from __future__ import absolute_import + import os -import hgweb_mod, hgwebdir_mod + +from ..i18n import _ + +from .. import ( + error, + util, +) + +from . import ( + hgweb_mod, + hgwebdir_mod, + server, +) def hgweb(config, name=None, baseui=None): '''create an hgweb wsgi object @@ -29,3 +43,83 @@ def hgwebdir(config, baseui=None): return hgwebdir_mod.hgwebdir(config, baseui=baseui) +class httpservice(object): + def __init__(self, ui, app, opts): + self.ui = ui + self.app = app + self.opts = opts + + def init(self): + util.setsignalhandler() + self.httpd = server.create_server(self.ui, self.app) + + if self.opts['port'] and not self.ui.verbose: + return + + if self.httpd.prefix: + prefix = self.httpd.prefix.strip('/') + '/' + else: + prefix = '' + + port = ':%d' % self.httpd.port + if port == ':80': + port = '' + + bindaddr = self.httpd.addr + if bindaddr == '0.0.0.0': + bindaddr = '*' + elif ':' in bindaddr: # IPv6 + bindaddr = '[%s]' % bindaddr + + fqaddr = self.httpd.fqaddr + if ':' in fqaddr: + fqaddr = '[%s]' % fqaddr + if self.opts['port']: + write = self.ui.status + else: + write = self.ui.write + write(_('listening at http://%s%s/%s (bound to %s:%d)\n') % + (fqaddr, port, prefix, bindaddr, self.httpd.port)) + self.ui.flush() # avoid buffering of status message + + def run(self): + self.httpd.serve_forever() + +def createservice(ui, repo, opts): + # this way we can check if something was given in the command-line + if opts.get('port'): + opts['port'] = util.getport(opts.get('port')) + + alluis = set([ui]) + if repo: + baseui = repo.baseui + alluis.update([repo.baseui, repo.ui]) + else: + baseui = ui + webconf = opts.get('web_conf') or opts.get('webdir_conf') + if webconf: + # load server settings (e.g. web.port) to "copied" ui, which allows + # hgwebdir to reload webconf cleanly + servui = ui.copy() + servui.readconfig(webconf, sections=['web']) + alluis.add(servui) + else: + servui = ui + + optlist = ("name templates style address port prefix ipv6" + " accesslog errorlog certificate encoding") + for o in optlist.split(): + val = opts.get(o, '') + if val in (None, ''): # should check against default options instead + continue + for u in alluis: + u.setconfig("web", o, val, 'serve') + + if webconf: + app = hgwebdir_mod.hgwebdir(webconf, baseui=baseui) + else: + if not repo: + raise error.RepoError(_("there is no Mercurial repository" + " here (.hg not found)")) + app = hgweb_mod.hgweb(repo, baseui=baseui) + return httpservice(servui, app, opts) diff -r e240e914d226 -r 8f016345e6b0 mercurial/hgweb/common.py --- a/mercurial/hgweb/common.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/hgweb/common.py Fri Dec 18 14:40:11 2015 -0600 @@ -6,7 +6,12 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -import errno, mimetypes, os +from __future__ import absolute_import + +import BaseHTTPServer +import errno +import mimetypes +import os HTTP_OK = 200 HTTP_NOT_MODIFIED = 304 @@ -102,8 +107,7 @@ raise AttributeError def _statusmessage(code): - from BaseHTTPServer import BaseHTTPRequestHandler - responses = BaseHTTPRequestHandler.responses + responses = BaseHTTPServer.BaseHTTPRequestHandler.responses return responses.get(code, ('Error', 'Unknown error'))[0] def statusmessage(code, message=None): diff -r e240e914d226 -r 8f016345e6b0 mercurial/hgweb/hgweb_mod.py --- a/mercurial/hgweb/hgweb_mod.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/hgweb/hgweb_mod.py Fri Dec 18 14:40:11 2015 -0600 @@ -6,15 +6,41 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. +from __future__ import absolute_import + import contextlib import os -from mercurial import ui, hg, hook, error, encoding, templater, util, repoview -from mercurial.templatefilters import websub -from common import ErrorResponse, permhooks, caching -from common import HTTP_OK, HTTP_NOT_MODIFIED, HTTP_BAD_REQUEST -from common import HTTP_NOT_FOUND, HTTP_SERVER_ERROR -from request import wsgirequest -import webcommands, protocol, webutil + +from .common import ( + ErrorResponse, + HTTP_BAD_REQUEST, + HTTP_NOT_FOUND, + HTTP_NOT_MODIFIED, + HTTP_OK, + HTTP_SERVER_ERROR, + caching, + permhooks, +) +from .request import wsgirequest + +from .. import ( + encoding, + error, + hg, + hook, + repoview, + templatefilters, + templater, + ui as uimod, + util, +) + +from . import ( + protocol, + webcommands, + webutil, + wsgicgi, +) perms = { 'changegroup': 'pull', @@ -158,7 +184,7 @@ or req.url.strip('/') or self.repo.root) def websubfilter(text): - return websub(text, self.websubtable) + return templatefilters.websub(text, self.websubtable) # create the templater @@ -195,7 +221,7 @@ if baseui: u = baseui.copy() else: - u = ui.ui() + u = uimod.ui() r = hg.repository(u, repo) else: # we trust caller to give us a private copy @@ -260,7 +286,6 @@ if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."): raise RuntimeError("This function is only intended to be " "called while running as a CGI script.") - import mercurial.hgweb.wsgicgi as wsgicgi wsgicgi.launch(self) def __call__(self, env, respond): @@ -304,8 +329,8 @@ parts = parts[len(repo_parts):] query = '/'.join(parts) else: - query = req.env['QUERY_STRING'].split('&', 1)[0] - query = query.split(';', 1)[0] + query = req.env['QUERY_STRING'].partition('&')[0] + query = query.partition(';')[0] # process this if it's a protocol request # protocol bits don't need to create any URLs diff -r e240e914d226 -r 8f016345e6b0 mercurial/hgweb/hgwebdir_mod.py --- a/mercurial/hgweb/hgwebdir_mod.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/hgweb/hgwebdir_mod.py Fri Dec 18 14:40:11 2015 -0600 @@ -6,15 +6,42 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -import os, re, time -from mercurial.i18n import _ -from mercurial import ui, hg, scmutil, util, templater -from mercurial import error, encoding -from common import ErrorResponse, get_mtime, staticfile, paritygen, ismember, \ - get_contact, HTTP_OK, HTTP_NOT_FOUND, HTTP_SERVER_ERROR -from hgweb_mod import hgweb, makebreadcrumb -from request import wsgirequest -import webutil +from __future__ import absolute_import + +import os +import re +import time + +from ..i18n import _ + +from .common import ( + ErrorResponse, + HTTP_NOT_FOUND, + HTTP_OK, + HTTP_SERVER_ERROR, + get_contact, + get_mtime, + ismember, + paritygen, + staticfile, +) +from .request import wsgirequest + +from .. import ( + encoding, + error, + hg, + scmutil, + templater, + ui as uimod, + util, +) + +from . import ( + hgweb_mod, + webutil, + wsgicgi, +) def cleannames(items): return [(util.pconvert(name).strip('/'), path) for name, path in items] @@ -108,7 +135,7 @@ if self.baseui: u = self.baseui.copy() else: - u = ui.ui() + u = uimod.ui() u.setconfig('ui', 'report_untrusted', 'off', 'hgwebdir') u.setconfig('ui', 'nontty', 'true', 'hgwebdir') # displaying bundling progress bar while serving feels wrong and may @@ -161,7 +188,6 @@ if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."): raise RuntimeError("This function is only intended to be " "called while running as a CGI script.") - import mercurial.hgweb.wsgicgi as wsgicgi wsgicgi.launch(self) def __call__(self, env, respond): @@ -231,7 +257,7 @@ try: # ensure caller gets private copy of ui repo = hg.repository(self.ui.copy(), real) - return hgweb(repo).run_wsgi(req) + return hgweb_mod.hgweb(repo).run_wsgi(req) except IOError as inst: msg = inst.strerror raise ErrorResponse(HTTP_SERVER_ERROR, msg) @@ -426,7 +452,7 @@ self.updatereqenv(req.env) return tmpl("index", entries=entries, subdir=subdir, - pathdef=makebreadcrumb('/' + subdir, self.prefix), + pathdef=hgweb_mod.makebreadcrumb('/' + subdir, self.prefix), sortcolumn=sortcolumn, descending=descending, **dict(sort)) diff -r e240e914d226 -r 8f016345e6b0 mercurial/hgweb/protocol.py --- a/mercurial/hgweb/protocol.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/hgweb/protocol.py Fri Dec 18 14:40:11 2015 -0600 @@ -5,9 +5,21 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -import cgi, cStringIO, zlib, urllib -from mercurial import util, wireproto -from common import HTTP_OK +from __future__ import absolute_import + +import cStringIO +import cgi +import urllib +import zlib + +from .common import ( + HTTP_OK, +) + +from .. import ( + util, + wireproto, +) HGTYPE = 'application/mercurial-0.1' HGERRTYPE = 'application/hg-error' diff -r e240e914d226 -r 8f016345e6b0 mercurial/hgweb/request.py --- a/mercurial/hgweb/request.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/hgweb/request.py Fri Dec 18 14:40:11 2015 -0600 @@ -6,9 +6,21 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -import socket, cgi, errno -from mercurial import util -from common import ErrorResponse, statusmessage, HTTP_NOT_MODIFIED +from __future__ import absolute_import + +import cgi +import errno +import socket + +from .common import ( + ErrorResponse, + HTTP_NOT_MODIFIED, + statusmessage, +) + +from .. import ( + util, +) shortcuts = { 'cl': [('cmd', ['changelog']), ('rev', None)], @@ -80,7 +92,7 @@ if self._start_response is not None: self.headers.append(('Content-Type', type)) if filename: - filename = (filename.split('/')[-1] + filename = (filename.rpartition('/')[-1] .replace('\\', '\\\\').replace('"', '\\"')) self.headers.append(('Content-Disposition', 'inline; filename="%s"' % filename)) diff -r e240e914d226 -r 8f016345e6b0 mercurial/hgweb/server.py --- a/mercurial/hgweb/server.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/hgweb/server.py Fri Dec 18 14:40:11 2015 -0600 @@ -6,10 +6,27 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -import os, sys, errno, urllib, BaseHTTPServer, socket, SocketServer, traceback -from mercurial import util, error -from mercurial.hgweb import common -from mercurial.i18n import _ +from __future__ import absolute_import + +import BaseHTTPServer +import SocketServer +import errno +import os +import socket +import sys +import traceback +import urllib + +from ..i18n import _ + +from .. import ( + error, + util, +) + +from . import ( + common, +) def _splitURI(uri): """Return path and query that has been split from uri @@ -197,47 +214,6 @@ self.wfile.write('0\r\n\r\n') self.wfile.flush() -class _httprequesthandleropenssl(_httprequesthandler): - """HTTPS handler based on pyOpenSSL""" - - url_scheme = 'https' - - @staticmethod - def preparehttpserver(httpserver, ssl_cert): - try: - import OpenSSL - OpenSSL.SSL.Context - except ImportError: - raise error.Abort(_("SSL support is unavailable")) - ctx = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_METHOD) - ctx.use_privatekey_file(ssl_cert) - ctx.use_certificate_file(ssl_cert) - sock = socket.socket(httpserver.address_family, httpserver.socket_type) - httpserver.socket = OpenSSL.SSL.Connection(ctx, sock) - httpserver.server_bind() - httpserver.server_activate() - - def setup(self): - self.connection = self.request - self.rfile = socket._fileobject(self.request, "rb", self.rbufsize) - self.wfile = socket._fileobject(self.request, "wb", self.wbufsize) - - def do_write(self): - import OpenSSL - try: - _httprequesthandler.do_write(self) - except OpenSSL.SSL.SysCallError as inst: - if inst.args[0] != errno.EPIPE: - raise - - def handle_one_request(self): - import OpenSSL - try: - _httprequesthandler.handle_one_request(self) - except (OpenSSL.SSL.SysCallError, OpenSSL.SSL.ZeroReturnError): - self.close_connection = True - pass - class _httprequesthandlerssl(_httprequesthandler): """HTTPS handler based on Python's ssl module""" @@ -260,8 +236,8 @@ self.wfile = socket._fileobject(self.request, "wb", self.wbufsize) try: - from threading import activeCount - activeCount() # silence pyflakes + import threading + threading.activeCount() # silence pyflakes and bypass demandimport _mixin = SocketServer.ThreadingMixIn except ImportError: if util.safehasattr(os, "fork"): @@ -311,10 +287,7 @@ def create_server(ui, app): if ui.config('web', 'certificate'): - if sys.version_info >= (2, 6): - handler = _httprequesthandlerssl - else: - handler = _httprequesthandleropenssl + handler = _httprequesthandlerssl else: handler = _httprequesthandler diff -r e240e914d226 -r 8f016345e6b0 mercurial/hgweb/webcommands.py --- a/mercurial/hgweb/webcommands.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/hgweb/webcommands.py Fri Dec 18 14:40:11 2015 -0600 @@ -5,18 +5,43 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -import os, mimetypes, re, cgi, copy -import webutil -from mercurial import error, encoding, archival, templater, templatefilters -from mercurial.node import short, hex -from mercurial import util -from common import paritygen, staticfile, get_contact, ErrorResponse -from common import HTTP_OK, HTTP_FORBIDDEN, HTTP_NOT_FOUND -from mercurial import graphmod, patch -from mercurial import scmutil -from mercurial.i18n import _ -from mercurial.error import ParseError, RepoLookupError, Abort -from mercurial import revset +from __future__ import absolute_import + +import cgi +import copy +import mimetypes +import os +import re + +from ..i18n import _ +from ..node import hex, short + +from .common import ( + ErrorResponse, + HTTP_FORBIDDEN, + HTTP_NOT_FOUND, + HTTP_OK, + get_contact, + paritygen, + staticfile, +) + +from .. import ( + archival, + encoding, + error, + graphmod, + patch, + revset, + scmutil, + templatefilters, + templater, + util, +) + +from . import ( + webutil, +) __all__ = [] commands = {} @@ -120,20 +145,10 @@ file=f, path=webutil.up(f), text=lines(), - rev=fctx.rev(), symrev=webutil.symrevorshortnode(req, fctx), - node=fctx.hex(), - author=fctx.user(), - date=fctx.date(), - desc=fctx.description(), - extra=fctx.extra(), - branch=webutil.nodebranchnodefault(fctx), - parent=webutil.parents(fctx), - child=webutil.children(fctx), rename=webutil.renamelink(fctx), - tags=webutil.nodetagsdict(web.repo, fctx.node()), - bookmarks=webutil.nodebookmarksdict(web.repo, fctx.node()), - permissions=fctx.manifest().flags(f)) + permissions=fctx.manifest().flags(f), + **webutil.commonentry(web.repo, fctx)) @webcommand('file') def file(web, req, tmpl): @@ -225,7 +240,7 @@ revdef = 'reverse(%s)' % query try: tree = revset.parse(revdef) - except ParseError: + except error.ParseError: # can't parse to a revset tree return MODE_KEYWORD, query @@ -249,7 +264,8 @@ # RepoLookupError: no such revision, e.g. in 'revision:' # Abort: bookmark/tag not exists # LookupError: ambiguous identifier, e.g. in '(bc)' on a large repo - except (ParseError, RepoLookupError, Abort, LookupError): + except (error.ParseError, error.RepoLookupError, error.Abort, + LookupError): return MODE_KEYWORD, query def changelist(**map): @@ -263,20 +279,9 @@ yield tmpl('searchentry', parity=parity.next(), - author=ctx.user(), - parent=webutil.parents(ctx), - child=webutil.children(ctx), changelogtag=showtags, - desc=ctx.description(), - extra=ctx.extra(), - date=ctx.date(), files=files, - rev=ctx.rev(), - node=hex(n), - tags=webutil.nodetagsdict(web.repo, n), - bookmarks=webutil.nodebookmarksdict(web.repo, n), - inbranch=webutil.nodeinbranch(web.repo, ctx), - branches=webutil.nodebranchdict(web.repo, ctx)) + **webutil.commonentry(web.repo, ctx)) if count >= revcount: break @@ -546,20 +551,14 @@ "basename": d} return tmpl("manifest", - rev=ctx.rev(), symrev=symrev, - node=hex(node), path=abspath, up=webutil.up(abspath), upparity=parity.next(), fentries=filelist, dentries=dirlist, archives=web.archivelist(hex(node)), - tags=webutil.nodetagsdict(web.repo, node), - bookmarks=webutil.nodebookmarksdict(web.repo, node), - branch=webutil.nodebranchnodefault(ctx), - inbranch=webutil.nodeinbranch(web.repo, ctx), - branches=webutil.nodebranchdict(web.repo, ctx)) + **webutil.commonentry(web.repo, ctx)) @webcommand('tags') def tags(web, req, tmpl): @@ -693,22 +692,11 @@ revs = web.repo.changelog.revs(start, end - 1) for i in revs: ctx = web.repo[i] - n = ctx.node() - hn = hex(n) l.append(tmpl( - 'shortlogentry', + 'shortlogentry', parity=parity.next(), - author=ctx.user(), - desc=ctx.description(), - extra=ctx.extra(), - date=ctx.date(), - rev=i, - node=hn, - tags=webutil.nodetagsdict(web.repo, n), - bookmarks=webutil.nodebookmarksdict(web.repo, n), - inbranch=webutil.nodeinbranch(web.repo, ctx), - branches=webutil.nodebranchdict(web.repo, ctx))) + **webutil.commonentry(web.repo, ctx))) l.reverse() yield l @@ -753,12 +741,8 @@ raise if fctx is not None: - n = fctx.node() path = fctx.path() ctx = fctx.changectx() - else: - n = ctx.node() - # path already defined in except clause parity = paritygen(web.stripecount) style = web.config('web', 'style', 'paper') @@ -766,7 +750,7 @@ style = req.form['style'][0] diffs = webutil.diffs(web.repo, tmpl, ctx, None, [path], parity, style) - if fctx: + if fctx is not None: rename = webutil.renamelink(fctx) ctx = fctx else: @@ -774,20 +758,10 @@ ctx = ctx return tmpl("filediff", file=path, - node=hex(n), - rev=ctx.rev(), symrev=webutil.symrevorshortnode(req, ctx), - date=ctx.date(), - desc=ctx.description(), - extra=ctx.extra(), - author=ctx.user(), rename=rename, - branch=webutil.nodebranchnodefault(ctx), - parent=webutil.parents(ctx), - child=webutil.children(ctx), - tags=webutil.nodetagsdict(web.repo, n), - bookmarks=webutil.nodebookmarksdict(web.repo, n), - diff=diffs) + diff=diffs, + **webutil.commonentry(web.repo, ctx)) diff = webcommand('diff')(filediff) @@ -812,7 +786,6 @@ if 'file' not in req.form: raise ErrorResponse(HTTP_NOT_FOUND, 'file not given') path = webutil.cleanpath(web.repo, req.form['file'][0]) - rename = path in ctx and webutil.renamelink(ctx[path]) or [] parsecontext = lambda v: v == 'full' and -1 or int(v) if 'context' in req.form: @@ -828,6 +801,7 @@ return [_('(binary file %s, hash: %s)') % (mt, hex(f.filenode()))] return f.data().splitlines() + fctx = None parent = ctx.p1() leftrev = parent.rev() leftnode = parent.node() @@ -843,30 +817,26 @@ leftlines = filelines(pfctx) else: rightlines = () - fctx = ctx.parents()[0][path] - leftlines = filelines(fctx) + pfctx = ctx.parents()[0][path] + leftlines = filelines(pfctx) comparison = webutil.compare(tmpl, context, leftlines, rightlines) + if fctx is not None: + rename = webutil.renamelink(fctx) + ctx = fctx + else: + rename = [] + ctx = ctx return tmpl('filecomparison', file=path, - node=hex(ctx.node()), - rev=ctx.rev(), symrev=webutil.symrevorshortnode(req, ctx), - date=ctx.date(), - desc=ctx.description(), - extra=ctx.extra(), - author=ctx.user(), rename=rename, - branch=webutil.nodebranchnodefault(ctx), - parent=webutil.parents(fctx), - child=webutil.children(fctx), - tags=webutil.nodetagsdict(web.repo, ctx.node()), - bookmarks=webutil.nodebookmarksdict(web.repo, ctx.node()), leftrev=leftrev, leftnode=hex(leftnode), rightrev=rightrev, rightnode=hex(rightnode), - comparison=comparison) + comparison=comparison, + **webutil.commonentry(web.repo, ctx)) @webcommand('annotate') def annotate(web, req, tmpl): @@ -918,20 +888,10 @@ file=f, annotate=annotate, path=webutil.up(f), - rev=fctx.rev(), symrev=webutil.symrevorshortnode(req, fctx), - node=fctx.hex(), - author=fctx.user(), - date=fctx.date(), - desc=fctx.description(), - extra=fctx.extra(), rename=webutil.renamelink(fctx), - branch=webutil.nodebranchnodefault(fctx), - parent=webutil.parents(fctx), - child=webutil.children(fctx), - tags=webutil.nodetagsdict(web.repo, fctx.node()), - bookmarks=webutil.nodebookmarksdict(web.repo, fctx.node()), - permissions=fctx.manifest().flags(f)) + permissions=fctx.manifest().flags(f), + **webutil.commonentry(web.repo, fctx)) @webcommand('filelog') def filelog(web, req, tmpl): @@ -993,23 +953,12 @@ for i in revs: iterfctx = fctx.filectx(i) - l.append({"parity": parity.next(), - "filerev": i, - "file": f, - "node": iterfctx.hex(), - "author": iterfctx.user(), - "date": iterfctx.date(), - "rename": webutil.renamelink(iterfctx), - "parent": webutil.parents(iterfctx), - "child": webutil.children(iterfctx), - "desc": iterfctx.description(), - "extra": iterfctx.extra(), - "tags": webutil.nodetagsdict(repo, iterfctx.node()), - "bookmarks": webutil.nodebookmarksdict( - repo, iterfctx.node()), - "branch": webutil.nodebranchnodefault(iterfctx), - "inbranch": webutil.nodeinbranch(repo, iterfctx), - "branches": webutil.nodebranchdict(repo, iterfctx)}) + l.append(dict( + parity=parity.next(), + filerev=i, + file=f, + rename=webutil.renamelink(iterfctx), + **webutil.commonentry(repo, iterfctx))) for e in reversed(l): yield e @@ -1018,11 +967,16 @@ revnav = webutil.filerevnav(web.repo, fctx.path()) nav = revnav.gen(end - 1, revcount, count) - return tmpl("filelog", file=f, node=fctx.hex(), nav=nav, + return tmpl("filelog", + file=f, + nav=nav, symrev=webutil.symrevorshortnode(req, fctx), entries=entries, latestentry=latestentry, - revcount=revcount, morevars=morevars, lessvars=lessvars) + revcount=revcount, + morevars=morevars, + lessvars=lessvars, + **webutil.commonentry(web.repo, fctx)) @webcommand('archive') def archive(web, req, tmpl): @@ -1248,7 +1202,7 @@ def _getdoc(e): doc = e[0].__doc__ if doc: - doc = _(doc).split('\n')[0] + doc = _(doc).partition('\n')[0] else: doc = _('(no help text available)') return doc @@ -1268,8 +1222,7 @@ The ``help`` template will be rendered when requesting help for a topic. ``helptopics`` will be rendered for the index of help topics. """ - from mercurial import commands # avoid cycle - from mercurial import help as helpmod # avoid cycle + from .. import commands, help as helpmod # avoid cycle topicname = req.form.get('node', [None])[0] if not topicname: @@ -1278,7 +1231,7 @@ yield {'topic': entries[0], 'summary': summary} early, other = [], [] - primary = lambda s: s.split('|')[0] + primary = lambda s: s.partition('|')[0] for c, e in commands.table.iteritems(): doc = _getdoc(e) if 'DEPRECATED' in doc or c.startswith('debug'): diff -r e240e914d226 -r 8f016345e6b0 mercurial/hgweb/webutil.py --- a/mercurial/hgweb/webutil.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/hgweb/webutil.py Fri Dec 18 14:40:11 2015 -0600 @@ -6,15 +6,32 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -import os, copy +from __future__ import absolute_import + +import copy +import difflib +import os import re -from mercurial import match, patch, error, ui, util, pathutil, context -from mercurial.i18n import _ -from mercurial.node import hex, nullid, short -from mercurial.templatefilters import revescape -from common import ErrorResponse, paritygen -from common import HTTP_NOT_FOUND -import difflib + +from ..i18n import _ +from ..node import hex, nullid, short + +from .common import ( + ErrorResponse, + HTTP_NOT_FOUND, + paritygen, +) + +from .. import ( + context, + error, + match, + patch, + pathutil, + templatefilters, + ui as uimod, + util, +) def up(p): if p[0] != "/": @@ -124,20 +141,28 @@ def hex(self, rev): return hex(self._changelog.node(self._revlog.linkrev(rev))) +class _siblings(object): + def __init__(self, siblings=[], hiderev=None): + self.siblings = [s for s in siblings if s.node() != nullid] + if len(self.siblings) == 1 and self.siblings[0].rev() == hiderev: + self.siblings = [] -def _siblings(siblings=[], hiderev=None): - siblings = [s for s in siblings if s.node() != nullid] - if len(siblings) == 1 and siblings[0].rev() == hiderev: - return - for s in siblings: - d = {'node': s.hex(), 'rev': s.rev()} - d['user'] = s.user() - d['date'] = s.date() - d['description'] = s.description() - d['branch'] = s.branch() - if util.safehasattr(s, 'path'): - d['file'] = s.path() - yield d + def __iter__(self): + for s in self.siblings: + d = { + 'node': s.hex(), + 'rev': s.rev(), + 'user': s.user(), + 'date': s.date(), + 'description': s.description(), + 'branch': s.branch(), + } + if util.safehasattr(s, 'path'): + d['file'] = s.path() + yield d + + def __len__(self): + return len(self.siblings) def parents(ctx, hide=None): if isinstance(ctx, context.basefilectx): @@ -283,6 +308,25 @@ return fctx +def commonentry(repo, ctx): + node = ctx.node() + return { + 'rev': ctx.rev(), + 'node': hex(node), + 'author': ctx.user(), + 'desc': ctx.description(), + 'date': ctx.date(), + 'extra': ctx.extra(), + 'phase': ctx.phasestr(), + 'branch': nodebranchnodefault(ctx), + 'inbranch': nodeinbranch(repo, ctx), + 'branches': nodebranchdict(repo, ctx), + 'tags': nodetagsdict(repo, node), + 'bookmarks': nodebookmarksdict(repo, node), + 'parent': lambda **x: parents(ctx), + 'child': lambda **x: children(ctx), + } + def changelistentry(web, ctx, tmpl): '''Obtain a dictionary to be used for entries in a changelist. @@ -295,26 +339,18 @@ showtags = showtag(repo, tmpl, 'changelogtag', n) files = listfilediffs(tmpl, ctx.files(), n, web.maxfiles) - return { - "author": ctx.user(), - "parent": parents(ctx, rev - 1), - "child": children(ctx, rev + 1), - "changelogtag": showtags, - "desc": ctx.description(), - "extra": ctx.extra(), - "date": ctx.date(), - "files": files, - "rev": rev, - "node": hex(n), - "tags": nodetagsdict(repo, n), - "bookmarks": nodebookmarksdict(repo, n), - "inbranch": nodeinbranch(repo, ctx), - "branches": nodebranchdict(repo, ctx) - } + entry = commonentry(repo, ctx) + entry.update( + parent=lambda **x: parents(ctx, rev - 1), + child=lambda **x: children(ctx, rev + 1), + changelogtag=showtags, + files=files, + ) + return entry def symrevorshortnode(req, ctx): if 'node' in req.form: - return revescape(req.form['node'][0]) + return templatefilters.revescape(req.form['node'][0]) else: return short(ctx.node()) @@ -351,29 +387,16 @@ return dict( diff=diff, - rev=ctx.rev(), - node=ctx.hex(), symrev=symrevorshortnode(req, ctx), - parent=tuple(parents(ctx)), - child=children(ctx), basenode=basectx.hex(), changesettag=showtags, changesetbookmark=showbookmarks, changesetbranch=showbranch, - author=ctx.user(), - desc=ctx.description(), - extra=ctx.extra(), - date=ctx.date(), - phase=ctx.phasestr(), files=files, diffsummary=lambda **x: diffsummary(diffstatsgen), diffstat=diffstats, archives=web.archivelist(ctx.hex()), - tags=nodetagsdict(web.repo, ctx.node()), - bookmarks=nodebookmarksdict(web.repo, ctx.node()), - branch=showbranch, - inbranch=nodeinbranch(web.repo, ctx), - branches=nodebranchdict(web.repo, ctx)) + **commonentry(web.repo, ctx)) def listfilediffs(tmpl, files, node, max): for f in files[:max]: @@ -537,7 +560,7 @@ yield {'name': key, 'value': str(value), 'separator': separator} separator = '&' -class wsgiui(ui.ui): +class wsgiui(uimod.ui): # default termwidth breaks under mod_wsgi def termwidth(self): return 80 diff -r e240e914d226 -r 8f016345e6b0 mercurial/hgweb/wsgicgi.py --- a/mercurial/hgweb/wsgicgi.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/hgweb/wsgicgi.py Fri Dec 18 14:40:11 2015 -0600 @@ -8,9 +8,18 @@ # This was originally copied from the public domain code at # http://www.python.org/dev/peps/pep-0333/#the-server-gateway-side -import os, sys -from mercurial import util -from mercurial.hgweb import common +from __future__ import absolute_import + +import os +import sys + +from .. import ( + util, +) + +from . import ( + common, +) def launch(application): util.setbinary(sys.stdin) diff -r e240e914d226 -r 8f016345e6b0 mercurial/localrepo.py --- a/mercurial/localrepo.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/localrepo.py Fri Dec 18 14:40:11 2015 -0600 @@ -214,6 +214,8 @@ self.path = self.wvfs.join(".hg") self.origroot = path self.auditor = pathutil.pathauditor(self.root, self._checknested) + self.nofsauditor = pathutil.pathauditor(self.root, self._checknested, + realfs=False) self.vfs = scmutil.vfs(self.path) self.opener = self.vfs self.baseui = baseui @@ -258,8 +260,7 @@ '\0\0\0\2' # represents revlogv2 ' dummy changelog to prevent using the old repo layout' ) - # experimental config: format.generaldelta - if self.ui.configbool('format', 'generaldelta', False): + if scmutil.gdinitconfig(self.ui): self.requirements.add("generaldelta") if self.ui.configbool('experimental', 'treemanifest', False): self.requirements.add("treemanifest") @@ -359,6 +360,7 @@ aggressivemergedeltas = self.ui.configbool('format', 'aggressivemergedeltas', False) self.svfs.options['aggressivemergedeltas'] = aggressivemergedeltas + self.svfs.options['lazydeltabase'] = not scmutil.gddeltaconfig(self.ui) def _writerequirements(self): scmutil.writerequires(self.vfs, self.requirements) @@ -517,15 +519,23 @@ return iter(self.changelog) def revs(self, expr, *args): - '''Return a list of revisions matching the given revset''' + '''Find revisions matching a revset. + + The revset is specified as a string ``expr`` that may contain + %-formatting to escape certain types. See ``revset.formatspec``. + + Return a revset.abstractsmartset, which is a list-like interface + that contains integer revisions. + ''' expr = revset.formatspec(expr, *args) m = revset.match(None, expr) return m(self) def set(self, expr, *args): - ''' - Yield a context for each matching revision, after doing arg - replacement via revset.formatspec + '''Find revisions matching a revset and emit changectx instances. + + This is a convenience wrapper around ``revs()`` that iterates the + result and is a generator of changectx instances. ''' for r in self.revs(expr, *args): yield self[r] @@ -751,6 +761,7 @@ return self._tagscache.nodetagscache.get(node, []) def nodebookmarks(self, node): + """return the list of bookmarks pointing to the specified node""" marks = [] for bookmark, n in self._bookmarks.iteritems(): if n == node: @@ -797,12 +808,13 @@ return repo[key].branch() def known(self, nodes): - nm = self.changelog.nodemap - pc = self._phasecache + cl = self.changelog + nm = cl.nodemap + filtered = cl.filteredrevs result = [] for n in nodes: r = nm.get(n) - resp = not (r is None or pc.phase(self, r) >= phases.secret) + resp = not (r is None or r in filtered) result.append(resp) return result @@ -840,13 +852,15 @@ f = f[1:] return filelog.filelog(self.svfs, f) + def parents(self, changeid=None): + '''get list of changectxs for parents of changeid''' + msg = 'repo.parents() is deprecated, use repo[%r].parents()' % changeid + self.ui.deprecwarn(msg, '3.7') + return self[changeid].parents() + def changectx(self, changeid): return self[changeid] - def parents(self, changeid=None): - '''get list of changectxs for parents of changeid''' - return self[changeid].parents() - def setparents(self, p1, p2=nullid): self.dirstate.beginparentchange() copies = self.dirstate.setparents(p1, p2) @@ -1161,15 +1175,14 @@ % self.dirstate.branch()) self.dirstate.invalidate() - parents = tuple([p.rev() for p in self.parents()]) + parents = tuple([p.rev() for p in self[None].parents()]) if len(parents) > 1: ui.status(_('working directory now based on ' 'revisions %d and %d\n') % parents) else: ui.status(_('working directory now based on ' 'revision %d\n') % parents) - ms = mergemod.mergestate(self) - ms.reset(self['.'].node()) + mergemod.mergestate.clean(self, self['.'].node()) # TODO: if we know which new heads may result from this rollback, pass # them to destroy(), which will prevent the branchhead cache from being @@ -1456,8 +1469,11 @@ match.explicitdir = vdirs.append match.bad = fail - wlock = self.wlock() + wlock = lock = tr = None try: + wlock = self.wlock() + lock = self.lock() # for recent changelog (see issue4368) + wctx = self[None] merge = len(wctx.parents()) > 1 @@ -1556,7 +1572,7 @@ if merge and cctx.deleted(): raise error.Abort(_("cannot commit merge with missing files")) - ms = mergemod.mergestate(self) + ms = mergemod.mergestate.read(self) if list(ms.unresolved()): raise error.Abort(_('unresolved merge conflicts ' @@ -1589,19 +1605,21 @@ try: self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2) + tr = self.transaction('commit') ret = self.commitctx(cctx, True) except: # re-raises if edited: self.ui.write( _('note: commit message saved in %s\n') % msgfn) raise - # update bookmarks, dirstate and mergestate bookmarks.update(self, [p1, p2], ret) cctx.markcommitted(ret) ms.reset() + tr.close() + finally: - wlock.release() + lockmod.release(tr, lock, wlock) def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2): # hack for command that use a temporary commit (eg: histedit) @@ -1838,22 +1856,6 @@ """ return util.hooks() - def clone(self, remote, heads=[], stream=None): - '''clone remote repository. - - keyword arguments: - heads: list of revs to clone (forces use of pull) - stream: use streaming clone if possible''' - # internal config: ui.quietbookmarkmove - quiet = self.ui.backupconfig('ui', 'quietbookmarkmove') - try: - self.ui.setconfig('ui', 'quietbookmarkmove', True, 'clone') - pullop = exchange.pull(self, remote, heads, - streamclonerequested=stream) - return pullop.cgresult - finally: - self.ui.restoreconfig(quiet) - def pushkey(self, namespace, key, old, new): try: tr = self.currenttransaction() diff -r e240e914d226 -r 8f016345e6b0 mercurial/lsprof.py --- a/mercurial/lsprof.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/lsprof.py Fri Dec 18 14:40:11 2015 -0600 @@ -1,5 +1,12 @@ +from __future__ import absolute_import + +import _lsprof import sys -from _lsprof import Profiler, profiler_entry + +Profiler = _lsprof.Profiler + +# PyPy doesn't expose profiler_entry from the module. +profiler_entry = getattr(_lsprof, 'profiler_entry', None) __all__ = ['profile', 'Stats'] @@ -22,8 +29,13 @@ def sort(self, crit="inlinetime"): """XXX docstring""" - if crit not in profiler_entry.__dict__: + # profiler_entries isn't defined when running under PyPy. + if profiler_entry: + if crit not in profiler_entry.__dict__: + raise ValueError("Can't sort by %s" % crit) + elif self.data and not getattr(self.data[0], crit, None): raise ValueError("Can't sort by %s" % crit) + self.data.sort(key=lambda x: getattr(x, crit), reverse=True) for e in self.data: if e.calls: diff -r e240e914d226 -r 8f016345e6b0 mercurial/manifest.c --- a/mercurial/manifest.c Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/manifest.c Fri Dec 18 14:40:11 2015 -0600 @@ -242,7 +242,7 @@ hash = nodeof(l); consumed = pl + 41; flags = PyString_FromStringAndSize(l->start + consumed, - l->len - consumed - 1); + l->len - consumed - 1); if (!path || !hash || !flags) { goto done; } diff -r e240e914d226 -r 8f016345e6b0 mercurial/manifest.py --- a/mercurial/manifest.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/manifest.py Fri Dec 18 14:40:11 2015 -0600 @@ -334,36 +334,44 @@ # zero copy representation of base as a buffer addbuf = util.buffer(base) - # start with a readonly loop that finds the offset of - # each line and creates the deltas - for f, todelete in changes: - # bs will either be the index of the item or the insert point - start, end = _msearch(addbuf, f, start) - if not todelete: - h, fl = self._lm[f] - l = "%s\0%s%s\n" % (f, revlog.hex(h), fl) - else: - if start == end: - # item we want to delete was not found, error out - raise AssertionError( - _("failed to remove %s from manifest") % f) - l = "" - if dstart is not None and dstart <= start and dend >= start: - if dend < end: + changes = list(changes) + if len(changes) < 1000: + # start with a readonly loop that finds the offset of + # each line and creates the deltas + for f, todelete in changes: + # bs will either be the index of the item or the insert point + start, end = _msearch(addbuf, f, start) + if not todelete: + h, fl = self._lm[f] + l = "%s\0%s%s\n" % (f, revlog.hex(h), fl) + else: + if start == end: + # item we want to delete was not found, error out + raise AssertionError( + _("failed to remove %s from manifest") % f) + l = "" + if dstart is not None and dstart <= start and dend >= start: + if dend < end: + dend = end + if l: + dline.append(l) + else: + if dstart is not None: + delta.append([dstart, dend, "".join(dline)]) + dstart = start dend = end - if l: - dline.append(l) - else: - if dstart is not None: - delta.append([dstart, dend, "".join(dline)]) - dstart = start - dend = end - dline = [l] + dline = [l] - if dstart is not None: - delta.append([dstart, dend, "".join(dline)]) - # apply the delta to the base, and get a delta for addrevision - deltatext, arraytext = _addlistdelta(base, delta) + if dstart is not None: + delta.append([dstart, dend, "".join(dline)]) + # apply the delta to the base, and get a delta for addrevision + deltatext, arraytext = _addlistdelta(base, delta) + else: + # For large changes, it's much cheaper to just build the text and + # diff it. + arraytext = array.array('c', self.text()) + deltatext = mdiff.textdiff(base, arraytext) + return arraytext, deltatext def _msearch(m, s, lo=0, hi=None): @@ -609,7 +617,7 @@ def setflag(self, f, flags): """Set the flags (symlink, executable) for path f.""" - assert 'd' not in flags + assert 't' not in flags self._load() dir, subpath = _splittopdir(f) if dir: @@ -732,9 +740,12 @@ def _matches(self, match): '''recursively generate a new manifest filtered by the match argument. ''' + + visit = match.visitdir(self._dir[:-1] or '.') + if visit == 'all': + return self.copy() ret = treemanifest(self._dir) - - if not match.visitdir(self._dir[:-1] or '.'): + if not visit: return ret self._load() @@ -807,7 +818,7 @@ def parse(self, text, readsubtree): for f, n, fl in _parse(text): - if fl == 'd': + if fl == 't': f = f + '/' self._dirs[f] = readsubtree(self._subpath(f), n) elif '/' in f: @@ -838,7 +849,7 @@ """ self._load() flags = self.flags - dirs = [(d[:-1], self._dirs[d]._node, 'd') for d in self._dirs] + dirs = [(d[:-1], self._dirs[d]._node, 't') for d in self._dirs] files = [(f, self._files[f], flags(f)) for f in self._files] return _text(sorted(dirs + files), usemanifestv2) diff -r e240e914d226 -r 8f016345e6b0 mercurial/match.py --- a/mercurial/match.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/match.py Fri Dec 18 14:40:11 2015 -0600 @@ -227,9 +227,15 @@ has potential matches in it or one of its subdirectories. This is based on the match's primary, included, and excluded patterns. + Returns the string 'all' if the given directory and all subdirectories + should be visited. Otherwise returns True or False indicating whether + the given directory should be visited. + This function's behavior is undefined if it has returned False for one of the dir's parent directories. ''' + if self.prefix() and dir in self._fileroots: + return 'all' if dir in self._excluderoots: return False if (self._includeroots and @@ -654,9 +660,11 @@ if "#" in line: global _commentre if not _commentre: - _commentre = re.compile(r'((^|[^\\])(\\\\)*)#.*') + _commentre = util.re.compile(r'((?:^|[^\\])(?:\\\\)*)#.*') # remove comments prefixed by an even number of escapes - line = _commentre.sub(r'\1', line) + m = _commentre.search(line) + if m: + line = line[:m.end(1)] # fixup properly escaped comments that survived the above line = line.replace("\\#", "#") line = line.rstrip() diff -r e240e914d226 -r 8f016345e6b0 mercurial/merge.py --- a/mercurial/merge.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/merge.py Fri Dec 18 14:40:11 2015 -0600 @@ -16,6 +16,7 @@ from .node import ( bin, hex, + nullhex, nullid, nullrev, ) @@ -42,50 +43,69 @@ class mergestate(object): '''track 3-way merge state of individual files - it is stored on disk when needed. Two file are used, one with an old - format, one with a new format. Both contains similar data, but the new - format can store new kinds of field. - - Current new format is a list of arbitrary record of the form: - - [type][length][content] + The merge state is stored on disk when needed. Two files are used: one with + an old format (version 1), and one with a new format (version 2). Version 2 + stores a superset of the data in version 1, including new kinds of records + in the future. For more about the new format, see the documentation for + `_readrecordsv2`. - Type is a single character, length is a 4 bytes integer, content is an - arbitrary suites of bytes of length `length`. + Each record can contain arbitrary content, and has an associated type. This + `type` should be a letter. If `type` is uppercase, the record is mandatory: + versions of Mercurial that don't support it should abort. If `type` is + lowercase, the record can be safely ignored. - Type should be a letter. Capital letter are mandatory record, Mercurial - should abort if they are unknown. lower case record can be safely ignored. - - Currently known record: + Currently known records: L: the node of the "local" part of the merge (hexified version) O: the node of the "other" part of the merge (hexified version) F: a file to be merged entry + C: a change/delete or delete/change conflict D: a file that the external merge driver will merge internally (experimental) m: the external merge driver defined for this merge plus its run state (experimental) + X: unsupported mandatory record type (used in tests) + x: unsupported advisory record type (used in tests) Merge driver run states (experimental): u: driver-resolved files unmarked -- needs to be run next time we're about to resolve or commit m: driver-resolved files marked -- only needs to be run before commit s: success/skipped -- does not need to be run any more + ''' statepathv1 = 'merge/state' statepathv2 = 'merge/state2' + @staticmethod + def clean(repo, node=None, other=None): + """Initialize a brand new merge state, removing any existing state on + disk.""" + ms = mergestate(repo) + ms.reset(node, other) + return ms + + @staticmethod + def read(repo): + """Initialize the merge state, reading it from disk.""" + ms = mergestate(repo) + ms._read() + return ms + def __init__(self, repo): + """Initialize the merge state. + + Do not use this directly! Instead call read() or clean().""" self._repo = repo self._dirty = False - self._read() def reset(self, node=None, other=None): self._state = {} self._local = None self._other = None - if 'otherctx' in vars(self): - del self.otherctx + for var in ('localctx', 'otherctx'): + if var in vars(self): + delattr(self, var) if node: self._local = node self._other = other @@ -95,6 +115,7 @@ else: self._mdstate = 'u' shutil.rmtree(self._repo.join('merge'), True) + self._results = {} self._dirty = False def _read(self): @@ -106,10 +127,12 @@ self._state = {} self._local = None self._other = None - if 'otherctx' in vars(self): - del self.otherctx + for var in ('localctx', 'otherctx'): + if var in vars(self): + delattr(self, var) self._readmergedriver = None self._mdstate = 's' + unsupported = set() records = self._readrecords() for rtype, record in records: if rtype == 'L': @@ -125,14 +148,17 @@ self._readmergedriver = bits[0] self._mdstate = mdstate - elif rtype in 'FD': + elif rtype in 'FDC': bits = record.split('\0') self._state[bits[0]] = bits[1:] elif not rtype.islower(): - raise error.Abort(_('unsupported merge state record: %s') - % rtype) + unsupported.add(rtype) + self._results = {} self._dirty = False + if unsupported: + raise error.UnsupportedMergeRecords(unsupported) + def _readrecords(self): """Read merge state from disk and return a list of record (TYPE, data) @@ -206,8 +232,21 @@ def _readrecordsv2(self): """read on disk merge state for version 2 file - returns list of record [(TYPE, data), ...] - """ + This format is a list of arbitrary records of the form: + + [type][length][content] + + `type` is a single character, `length` is a 4 byte integer, and + `content` is an arbitrary byte sequence of length `length`. + + Mercurial versions prior to 3.7 have a bug where if there are + unsupported mandatory merge records, attempting to clear out the merge + state with hg update --clean or similar aborts. The 't' record type + works around that by writing out what those versions treat as an + advisory record, but later versions interpret as special: the first + character is the 'real' record type and everything onwards is the data. + + Returns list of records [(TYPE, data), ...].""" records = [] try: f = self._repo.vfs(self.statepathv2) @@ -221,6 +260,8 @@ off += 4 record = data[off:(off + length)] off += length + if rtype == 't': + rtype, record = record[0], record[1:] records.append((rtype, record)) f.close() except IOError as err: @@ -248,7 +289,15 @@ return configmergedriver @util.propertycache + def localctx(self): + if self._local is None: + raise RuntimeError("localctx accessed but self._local isn't set") + return self._repo[self._local] + + @util.propertycache def otherctx(self): + if self._other is None: + raise RuntimeError("localctx accessed but self._local isn't set") return self._repo[self._other] def active(self): @@ -266,20 +315,28 @@ def commit(self): """Write current state on disk (if necessary)""" if self._dirty: - records = [] - records.append(('L', hex(self._local))) - records.append(('O', hex(self._other))) - if self.mergedriver: - records.append(('m', '\0'.join([ - self.mergedriver, self._mdstate]))) - for d, v in self._state.iteritems(): - if v[0] == 'd': - records.append(('D', '\0'.join([d] + v))) - else: - records.append(('F', '\0'.join([d] + v))) + records = self._makerecords() self._writerecords(records) self._dirty = False + def _makerecords(self): + records = [] + records.append(('L', hex(self._local))) + records.append(('O', hex(self._other))) + if self.mergedriver: + records.append(('m', '\0'.join([ + self.mergedriver, self._mdstate]))) + for d, v in self._state.iteritems(): + if v[0] == 'd': + records.append(('D', '\0'.join([d] + v))) + # v[1] == local ('cd'), v[6] == other ('dc') -- not supported by + # older versions of Mercurial + elif v[1] == nullhex or v[6] == nullhex: + records.append(('C', '\0'.join([d] + v))) + else: + records.append(('F', '\0'.join([d] + v))) + return records + def _writerecords(self, records): """Write current state on disk (both v1 and v2)""" self._writerecordsv1(records) @@ -298,10 +355,16 @@ f.close() def _writerecordsv2(self, records): - """Write current state on disk in a version 2 file""" + """Write current state on disk in a version 2 file + + See the docstring for _readrecordsv2 for why we use 't'.""" + # these are the records that all version 2 clients can read + whitelist = 'LOF' f = self._repo.vfs(self.statepathv2, 'w') for key, data in records: assert len(key) == 1 + if key not in whitelist: + key, data = 't', '%s%s' % (key, data) format = '>sI%is' % len(data) f.write(_pack(format, key, len(data), data)) f.close() @@ -315,8 +378,11 @@ note: also write the local version to the `.hg/merge` directory. """ - hash = util.sha1(fcl.path()).hexdigest() - self._repo.vfs.write('merge/' + hash, fcl.data()) + if fcl.isabsent(): + hash = nullhex + else: + hash = util.sha1(fcl.path()).hexdigest() + self._repo.vfs.write('merge/' + hash, fcl.data()) self._state[fd] = ['u', hash, fcl.path(), fca.path(), hex(fca.filenode()), fco.path(), hex(fco.filenode()), @@ -363,8 +429,9 @@ stateentry = self._state[dfile] state, hash, lfile, afile, anode, ofile, onode, flags = stateentry octx = self._repo[self._other] - fcd = wctx[dfile] - fco = octx[ofile] + fcd = self._filectxorabsent(hash, wctx, dfile) + fco = self._filectxorabsent(onode, octx, ofile) + # TODO: move this to filectxorabsent fca = self._repo.filectx(afile, fileid=anode) # "premerge" x flags flo = fco.flags() @@ -378,29 +445,117 @@ flags = flo if preresolve: # restore local - f = self._repo.vfs('merge/' + hash) - self._repo.wwrite(dfile, f.read(), flags) - f.close() - complete, r = filemerge.premerge(self._repo, self._local, lfile, - fcd, fco, fca, labels=labels) + if hash != nullhex: + f = self._repo.vfs('merge/' + hash) + self._repo.wwrite(dfile, f.read(), flags) + f.close() + else: + self._repo.wvfs.unlinkpath(dfile, ignoremissing=True) + complete, r, deleted = filemerge.premerge(self._repo, self._local, + lfile, fcd, fco, fca, + labels=labels) else: - complete, r = filemerge.filemerge(self._repo, self._local, lfile, - fcd, fco, fca, labels=labels) + complete, r, deleted = filemerge.filemerge(self._repo, self._local, + lfile, fcd, fco, fca, + labels=labels) if r is None: # no real conflict del self._state[dfile] self._dirty = True elif not r: self.mark(dfile, 'r') + + if complete: + action = None + if deleted: + if fcd.isabsent(): + # dc: local picked. Need to drop if present, which may + # happen on re-resolves. + action = 'f' + else: + # cd: remote picked (or otherwise deleted) + action = 'r' + else: + if fcd.isabsent(): # dc: remote picked + action = 'g' + elif fco.isabsent(): # cd: local picked + if dfile in self.localctx: + action = 'am' + else: + action = 'a' + # else: regular merges (no action necessary) + self._results[dfile] = r, action + return complete, r + def _filectxorabsent(self, hexnode, ctx, f): + if hexnode == nullhex: + return filemerge.absentfilectx(ctx, f) + else: + return ctx[f] + def preresolve(self, dfile, wctx, labels=None): + """run premerge process for dfile + + Returns whether the merge is complete, and the exit code.""" return self._resolve(True, dfile, wctx, labels=labels) def resolve(self, dfile, wctx, labels=None): - """rerun merge process for file path `dfile`""" + """run merge process (assuming premerge was run) for dfile + + Returns the exit code of the merge.""" return self._resolve(False, dfile, wctx, labels=labels)[1] + def counts(self): + """return counts for updated, merged and removed files in this + session""" + updated, merged, removed = 0, 0, 0 + for r, action in self._results.itervalues(): + if r is None: + updated += 1 + elif r == 0: + if action == 'r': + removed += 1 + else: + merged += 1 + return updated, merged, removed + + def unresolvedcount(self): + """get unresolved count for this merge (persistent)""" + return len([True for f, entry in self._state.iteritems() + if entry[0] == 'u']) + + def actions(self): + """return lists of actions to perform on the dirstate""" + actions = {'r': [], 'f': [], 'a': [], 'am': [], 'g': []} + for f, (r, action) in self._results.iteritems(): + if action is not None: + actions[action].append((f, None, "merge result")) + return actions + + def recordactions(self): + """record remove/add/get actions in the dirstate""" + branchmerge = self._repo.dirstate.p2() != nullid + recordupdates(self._repo, self.actions(), branchmerge) + + def queueremove(self, f): + """queues a file to be removed from the dirstate + + Meant for use by custom merge drivers.""" + self._results[f] = 0, 'r' + + def queueadd(self, f): + """queues a file to be added to the dirstate + + Meant for use by custom merge drivers.""" + self._results[f] = 0, 'a' + + def queueget(self, f): + """queues a file to be marked modified in the dirstate + + Meant for use by custom merge drivers.""" + self._results[f] = 0, 'g' + def _checkunknownfile(repo, wctx, mctx, f, f2=None): if f2 is None: f2 = f @@ -479,7 +634,7 @@ if actions: # k, dr, e and rd are no-op - for m in 'a', 'f', 'g', 'cd', 'dc': + for m in 'a', 'am', 'f', 'g', 'cd', 'dc': for f, args, msg in actions[m]: pmmf.add(f) for f, args, msg in actions['r']: @@ -528,15 +683,17 @@ This is currently not implemented -- it's an extension point.""" return True -def manifestmerge(repo, wctx, p2, pa, branchmerge, force, partial, +def manifestmerge(repo, wctx, p2, pa, branchmerge, force, matcher, acceptremote, followcopies): """ Merge p1 and p2 with ancestor pa and generate merge action list branchmerge and force are as passed in to update - partial = function to filter file lists + matcher = matcher to filter file lists acceptremote = accept the incoming changes without prompting """ + if matcher is not None and matcher.always(): + matcher = None copy, movewithdir, diverge, renamedelete = {}, {}, {}, {} @@ -550,7 +707,7 @@ repo.ui.note(_("resolving manifests\n")) repo.ui.debug(" branchmerge: %s, force: %s, partial: %s\n" - % (bool(branchmerge), bool(force), bool(partial))) + % (bool(branchmerge), bool(force), bool(matcher))) repo.ui.debug(" ancestor: %s, local: %s, remote: %s\n" % (pa, wctx, p2)) m1, m2, ma = wctx.manifest(), p2.manifest(), pa.manifest() @@ -565,12 +722,13 @@ break # Compare manifests + if matcher is not None: + m1 = m1.matches(matcher) + m2 = m2.matches(matcher) diff = m1.diff(m2) actions = {} for f, ((n1, fl1), (n2, fl2)) in diff.iteritems(): - if partial and not partial(f): - continue if n1 and n2: # file exists on both local and remote side if f not in ma: fa = copy.get(f, None) @@ -618,7 +776,8 @@ if acceptremote: actions[f] = ('r', None, "remote delete") else: - actions[f] = ('cd', None, "prompt changed/deleted") + actions[f] = ('cd', (f, None, f, False, pa.node()), + "prompt changed/deleted") elif n1[20:] == 'a': # This extra 'a' is added by working copy manifest to mark # the file as locally added. We should forget it instead of @@ -668,7 +827,8 @@ if acceptremote: actions[f] = ('c', (fl2,), "remote recreating") else: - actions[f] = ('dc', (fl2,), "prompt deleted/changed") + actions[f] = ('dc', (None, f, f, False, pa.node()), + "prompt deleted/changed") return actions, diverge, renamedelete @@ -684,13 +844,12 @@ # remote did change but ended up with same content del actions[f] # don't get = keep local deleted -def calculateupdates(repo, wctx, mctx, ancestors, branchmerge, force, partial, - acceptremote, followcopies): +def calculateupdates(repo, wctx, mctx, ancestors, branchmerge, force, + acceptremote, followcopies, matcher=None): "Calculate the actions needed to merge mctx into wctx using ancestors" - if len(ancestors) == 1: # default actions, diverge, renamedelete = manifestmerge( - repo, wctx, mctx, ancestors[0], branchmerge, force, partial, + repo, wctx, mctx, ancestors[0], branchmerge, force, matcher, acceptremote, followcopies) _checkunknownfiles(repo, wctx, mctx, force, actions) @@ -705,7 +864,7 @@ for ancestor in ancestors: repo.ui.note(_('\ncalculating bids for ancestor %s\n') % ancestor) actions, diverge1, renamedelete1 = manifestmerge( - repo, wctx, mctx, ancestor, branchmerge, force, partial, + repo, wctx, mctx, ancestor, branchmerge, force, matcher, acceptremote, followcopies) _checkunknownfiles(repo, wctx, mctx, force, actions) @@ -834,25 +993,34 @@ describes how many files were affected by the update. """ - updated, merged, removed, unresolved = 0, 0, 0, 0 - ms = mergestate(repo) - ms.reset(wctx.p1().node(), mctx.node()) + updated, merged, removed = 0, 0, 0 + ms = mergestate.clean(repo, wctx.p1().node(), mctx.node()) moves = [] for m, l in actions.items(): l.sort() - # prescan for merges - for f, args, msg in actions['m']: + # 'cd' and 'dc' actions are treated like other merge conflicts + mergeactions = sorted(actions['cd']) + mergeactions.extend(sorted(actions['dc'])) + mergeactions.extend(actions['m']) + for f, args, msg in mergeactions: f1, f2, fa, move, anc = args if f == '.hgsubstate': # merged internally continue - repo.ui.debug(" preserving %s for resolve of %s\n" % (f1, f)) - fcl = wctx[f1] - fco = mctx[f2] + if f1 is None: + fcl = filemerge.absentfilectx(wctx, fa) + else: + repo.ui.debug(" preserving %s for resolve of %s\n" % (f1, f)) + fcl = wctx[f1] + if f2 is None: + fco = filemerge.absentfilectx(mctx, fa) + else: + fco = mctx[f2] actx = repo[anc] if fa in actx: fca = actx[fa] else: + # TODO: move to absentfilectx fca = repo.filectx(f1, fileid=nullrev) ms.add(fcl, fco, fca, f) if f1 != f and move: @@ -905,6 +1073,12 @@ z += 1 progress(_updating, z, item=f, total=numupdates, unit=_files) + # re-add/mark as modified (manifest only, just log it) + for f, args, msg in actions['am']: + repo.ui.debug(" %s: %s -> am\n" % (f, msg)) + z += 1 + progress(_updating, z, item=f, total=numupdates, unit=_files) + # keep (noop, just log it) for f, args, msg in actions['k']: repo.ui.debug(" %s: %s -> k\n" % (f, msg)) @@ -942,7 +1116,6 @@ util.setflags(repo.wjoin(f), 'l' in flags, 'x' in flags) updated += 1 - mergeactions = actions['m'] # the ordering is important here -- ms.mergedriver will raise if the merge # driver has changed, and we want to be able to bypass it when overwrite is # True @@ -965,7 +1138,7 @@ # premerge tocomplete = [] - for f, args, msg in actions['m']: + for f, args, msg in mergeactions: repo.ui.debug(" %s: %s -> m (premerge)\n" % (f, msg)) z += 1 progress(_updating, z, item=f, total=numupdates, unit=_files) @@ -975,15 +1148,7 @@ continue audit(f) complete, r = ms.preresolve(f, wctx, labels=labels) - if complete: - if r is not None and r > 0: - unresolved += 1 - else: - if r is None: - updated += 1 - else: - merged += 1 - else: + if not complete: numupdates += 1 tocomplete.append((f, args, msg)) @@ -992,25 +1157,29 @@ repo.ui.debug(" %s: %s -> m (merge)\n" % (f, msg)) z += 1 progress(_updating, z, item=f, total=numupdates, unit=_files) - r = ms.resolve(f, wctx, labels=labels) - if r is not None and r > 0: - unresolved += 1 - else: - if r is None: - updated += 1 - else: - merged += 1 + ms.resolve(f, wctx, labels=labels) ms.commit() + unresolved = ms.unresolvedcount() + if usemergedriver and not unresolved and ms.mdstate() != 's': if not driverconclude(repo, ms, wctx, labels=labels): # XXX setting unresolved to at least 1 is a hack to make sure we # error out - return updated, merged, removed, max(unresolved, 1) + unresolved = max(unresolved, 1) ms.commit() + msupdated, msmerged, msremoved = ms.counts() + updated += msupdated + merged += msmerged + removed += msremoved + + extraactions = ms.actions() + for k, acts in extraactions.iteritems(): + actions[k].extend(acts) + progress(_updating, None, total=numupdates, unit=_files) return updated, merged, removed, unresolved @@ -1018,38 +1187,44 @@ def recordupdates(repo, actions, branchmerge): "record merge actions to the dirstate" # remove (must come first) - for f, args, msg in actions['r']: + for f, args, msg in actions.get('r', []): if branchmerge: repo.dirstate.remove(f) else: repo.dirstate.drop(f) # forget (must come first) - for f, args, msg in actions['f']: + for f, args, msg in actions.get('f', []): repo.dirstate.drop(f) # re-add - for f, args, msg in actions['a']: - if not branchmerge: + for f, args, msg in actions.get('a', []): + repo.dirstate.add(f) + + # re-add/mark as modified + for f, args, msg in actions.get('am', []): + if branchmerge: + repo.dirstate.normallookup(f) + else: repo.dirstate.add(f) # exec change - for f, args, msg in actions['e']: + for f, args, msg in actions.get('e', []): repo.dirstate.normallookup(f) # keep - for f, args, msg in actions['k']: + for f, args, msg in actions.get('k', []): pass # get - for f, args, msg in actions['g']: + for f, args, msg in actions.get('g', []): if branchmerge: repo.dirstate.otherparent(f) else: repo.dirstate.normal(f) # merge - for f, args, msg in actions['m']: + for f, args, msg in actions.get('m', []): f1, f2, fa, move, anc = args if branchmerge: # We've done a branch merge, mark this file as merged @@ -1074,7 +1249,7 @@ repo.dirstate.drop(f1) # directory rename, move local - for f, args, msg in actions['dm']: + for f, args, msg in actions.get('dm', []): f0, flag = args if branchmerge: repo.dirstate.add(f) @@ -1085,7 +1260,7 @@ repo.dirstate.drop(f0) # directory rename, get - for f, args, msg in actions['dg']: + for f, args, msg in actions.get('dg', []): f0, flag = args if branchmerge: repo.dirstate.add(f) @@ -1093,15 +1268,15 @@ else: repo.dirstate.normal(f) -def update(repo, node, branchmerge, force, partial, ancestor=None, - mergeancestor=False, labels=None): +def update(repo, node, branchmerge, force, ancestor=None, + mergeancestor=False, labels=None, matcher=None): """ Perform a merge between the working directory and the given node node = the node to update to, or None if unspecified branchmerge = whether to merge between branches force = whether to force branch merging or file overwriting - partial = a function to filter file lists (dirstate not updated) + matcher = a matcher to filter file lists (dirstate not updated) mergeancestor = whether it is merging with an ancestor. If true, we should accept the incoming changes for any prompts that occur. If false, merging with an ancestor (fast-forward) is only allowed @@ -1140,6 +1315,13 @@ onode = node wlock = repo.wlock() + # If we're doing a partial update, we need to skip updating + # the dirstate, so make a note of any partial-ness to the + # update here. + if matcher is None or matcher.always(): + partial = False + else: + partial = True try: wc = repo[None] pl = wc.parents() @@ -1168,8 +1350,12 @@ fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2) ### check phase - if not overwrite and len(pl) > 1: - raise error.Abort(_("outstanding uncommitted merge")) + if not overwrite: + if len(pl) > 1: + raise error.Abort(_("outstanding uncommitted merge")) + ms = mergestate.read(repo) + if list(ms.unresolved()): + raise error.Abort(_("outstanding merge conflicts")) if branchmerge: if pas == [p2]: raise error.Abort(_("merging with a working directory ancestor" @@ -1231,10 +1417,10 @@ ### calculate phase actionbyfile, diverge, renamedelete = calculateupdates( - repo, wc, p2, pas, branchmerge, force, partial, mergeancestor, - followcopies) + repo, wc, p2, pas, branchmerge, force, mergeancestor, + followcopies, matcher=matcher) # Convert to dictionary-of-lists format - actions = dict((m, []) for m in 'a f g cd dc r dm dg m e k'.split()) + actions = dict((m, []) for m in 'a am f g cd dc r dm dg m e k'.split()) for f, (m, args, msg) in actionbyfile.iteritems(): if m not in actions: actions[m] = [] @@ -1248,35 +1434,32 @@ else: _checkcollision(repo, wc.manifest(), actions) - # Prompt and create actions. TODO: Move this towards resolve phase. + # Prompt and create actions. Most of this is in the resolve phase + # already, but we can't handle .hgsubstate in filemerge or + # subrepo.submerge yet so we have to keep prompting for it. for f, args, msg in sorted(actions['cd']): + if f != '.hgsubstate': + continue if repo.ui.promptchoice( _("local changed %s which remote deleted\n" "use (c)hanged version or (d)elete?" "$$ &Changed $$ &Delete") % f, 0): actions['r'].append((f, None, "prompt delete")) + elif f in p1: + actions['am'].append((f, None, "prompt keep")) else: actions['a'].append((f, None, "prompt keep")) - del actions['cd'][:] for f, args, msg in sorted(actions['dc']): - flags, = args + if f != '.hgsubstate': + continue + f1, f2, fa, move, anc = args + flags = p2[f2].flags() if repo.ui.promptchoice( _("remote changed %s which local deleted\n" "use (c)hanged version or leave (d)eleted?" "$$ &Changed $$ &Deleted") % f, 0) == 0: actions['g'].append((f, (flags,), "prompt recreating")) - del actions['dc'][:] - - ### apply phase - if not branchmerge: # just jump to the new rev - fp1, fp2, xp1, xp2 = fp2, nullid, xp2, '' - if not partial: - repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2) - # note that we're in the middle of an update - repo.vfs.write('updatestate', p2.hex()) - - stats = applyupdates(repo, actions, wc, p2, overwrite, labels=labels) # divergent renames for f, fl in sorted(diverge.iteritems()): @@ -1292,6 +1475,16 @@ for nf in fl: repo.ui.warn(" %s\n" % nf) + ### apply phase + if not branchmerge: # just jump to the new rev + fp1, fp2, xp1, xp2 = fp2, nullid, xp2, '' + if not partial: + repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2) + # note that we're in the middle of an update + repo.vfs.write('updatestate', p2.hex()) + + stats = applyupdates(repo, actions, wc, p2, overwrite, labels=labels) + if not partial: repo.dirstate.beginparentchange() repo.setparents(fp1, fp2) @@ -1309,18 +1502,19 @@ repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3]) return stats -def graft(repo, ctx, pctx, labels): +def graft(repo, ctx, pctx, labels, keepparent=False): """Do a graft-like merge. This is a merge where the merge ancestor is chosen such that one or more changesets are grafted onto the current changeset. In addition to the merge, this fixes up the dirstate to include only - a single parent and tries to duplicate any renames/copies - appropriately. + a single parent (if keepparent is False) and tries to duplicate any + renames/copies appropriately. ctx - changeset to rebase pctx - merge base, usually ctx.p1() labels - merge labels eg ['local', 'graft'] + keepparent - keep second parent if any """ # If we're grafting a descendant onto an ancestor, be sure to pass @@ -1331,12 +1525,17 @@ # which local deleted". mergeancestor = repo.changelog.isancestor(repo['.'].node(), ctx.node()) - stats = update(repo, ctx.node(), True, True, False, pctx.node(), + stats = update(repo, ctx.node(), True, True, pctx.node(), mergeancestor=mergeancestor, labels=labels) - # drop the second merge parent + pother = nullid + parents = ctx.parents() + if keepparent and len(parents) == 2 and pctx in parents: + parents.remove(pctx) + pother = parents[0].node() + repo.dirstate.beginparentchange() - repo.setparents(repo['.'].node(), nullid) + repo.setparents(repo['.'].node(), pother) repo.dirstate.write(repo.currenttransaction()) # fix up dirstate for copies and renames copies.duplicatecopies(repo, ctx.rev(), pctx.rev()) diff -r e240e914d226 -r 8f016345e6b0 mercurial/node.py --- a/mercurial/node.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/node.py Fri Dec 18 14:40:11 2015 -0600 @@ -9,17 +9,18 @@ import binascii +# This ugly style has a noticeable effect in manifest parsing +hex = binascii.hexlify +bin = binascii.unhexlify + nullrev = -1 nullid = "\0" * 20 +nullhex = hex(nullid) # pseudo identifiers for working directory # (they are experimental, so don't add too many dependencies on them) wdirrev = 0x7fffffff wdirid = "\xff" * 20 -# This ugly style has a noticeable effect in manifest parsing -hex = binascii.hexlify -bin = binascii.unhexlify - def short(node): return hex(node[:6]) diff -r e240e914d226 -r 8f016345e6b0 mercurial/obsolete.py --- a/mercurial/obsolete.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/obsolete.py Fri Dec 18 14:40:11 2015 -0600 @@ -67,10 +67,20 @@ comment associated with each format for details. """ -import errno, struct -import util, base85, node, parsers, error -import phases -from i18n import _ +from __future__ import absolute_import + +import errno +import struct + +from .i18n import _ +from . import ( + base85, + error, + node, + parsers, + phases, + util, +) _pack = struct.pack _unpack = struct.unpack diff -r e240e914d226 -r 8f016345e6b0 mercurial/osutil.c --- a/mercurial/osutil.c Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/osutil.c Fri Dec 18 14:40:11 2015 -0600 @@ -613,9 +613,14 @@ int ret, kind; char *path; + /* With a large file count or on a slow filesystem, + don't block signals for long (issue4878). */ + if ((i % 1000) == 999 && PyErr_CheckSignals() == -1) + goto bail; + pypath = PySequence_GetItem(names, i); if (!pypath) - return NULL; + goto bail; path = PyString_AsString(pypath); if (path == NULL) { Py_DECREF(pypath); diff -r e240e914d226 -r 8f016345e6b0 mercurial/parsers.c --- a/mercurial/parsers.c Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/parsers.c Fri Dec 18 14:40:11 2015 -0600 @@ -1286,19 +1286,21 @@ long phase; if (!PyArg_ParseTuple(args, "O", &roots)) - goto release_none; + goto done; if (roots == NULL || !PyList_Check(roots)) - goto release_none; + goto done; phases = calloc(len, 1); /* phase per rev: {0: public, 1: draft, 2: secret} */ - if (phases == NULL) - goto release_none; + if (phases == NULL) { + PyErr_NoMemory(); + goto done; + } /* Put the phase information of all the roots in phases */ numphase = PyList_GET_SIZE(roots)+1; minrevallphases = len + 1; phasessetlist = PyList_New(numphase); if (phasessetlist == NULL) - goto release_none; + goto done; PyList_SET_ITEM(phasessetlist, 0, Py_None); Py_INCREF(Py_None); @@ -1307,13 +1309,13 @@ phaseroots = PyList_GET_ITEM(roots, i); phaseset = PySet_New(NULL); if (phaseset == NULL) - goto release_phasesetlist; + goto release; PyList_SET_ITEM(phasessetlist, i+1, phaseset); if (!PyList_Check(phaseroots)) - goto release_phasesetlist; + goto release; minrevphase = add_roots_get_min(self, phaseroots, i+1, phases); if (minrevphase == -2) /* Error from add_roots_get_min */ - goto release_phasesetlist; + goto release; minrevallphases = MIN(minrevallphases, minrevphase); } /* Propagate the phase information from the roots to the revs */ @@ -1322,43 +1324,40 @@ for (i = minrevallphases; i < len; i++) { if (index_get_parents(self, i, parents, (int)len - 1) < 0) - goto release_phasesetlist; + goto release; set_phase_from_parents(phases, parents[0], parents[1], i); } } /* Transform phase list to a python list */ phaseslist = PyList_New(len); if (phaseslist == NULL) - goto release_phasesetlist; + goto release; for (i = 0; i < len; i++) { + PyObject *phaseval; + phase = phases[i]; /* We only store the sets of phase for non public phase, the public phase * is computed as a difference */ if (phase != 0) { phaseset = PyList_GET_ITEM(phasessetlist, phase); rev = PyInt_FromLong(i); + if (rev == NULL) + goto release; PySet_Add(phaseset, rev); Py_XDECREF(rev); } - PyList_SET_ITEM(phaseslist, i, PyInt_FromLong(phase)); + phaseval = PyInt_FromLong(phase); + if (phaseval == NULL) + goto release; + PyList_SET_ITEM(phaseslist, i, phaseval); } - ret = PyList_New(2); - if (ret == NULL) - goto release_phaseslist; - - PyList_SET_ITEM(ret, 0, phaseslist); - PyList_SET_ITEM(ret, 1, phasessetlist); - /* We don't release phaseslist and phasessetlist as we return them to - * python */ - goto release_phases; - -release_phaseslist: + ret = PyTuple_Pack(2, phaseslist, phasessetlist); + +release: Py_XDECREF(phaseslist); -release_phasesetlist: Py_XDECREF(phasessetlist); -release_phases: +done: free(phases); -release_none: return ret; } @@ -1404,8 +1403,10 @@ } nothead = calloc(len, 1); - if (nothead == NULL) + if (nothead == NULL) { + PyErr_NoMemory(); goto bail; + } for (i = 0; i < len; i++) { int isfiltered; @@ -1995,19 +1996,19 @@ for (i = 0; i < 2; i++) { int p = parents[i]; - long nsp, sp; + long sp; int dp; if (p == -1) continue; dp = depth[p]; - nsp = sp = seen[p]; + sp = seen[p]; if (dp <= dv) { depth[p] = dv + 1; if (sp != sv) { interesting[sv] += 1; - nsp = seen[p] = sv; + seen[p] = sv; if (sp) { interesting[sp] -= 1; if (interesting[sp] == 0) @@ -2016,7 +2017,7 @@ } } else if (dv == dp - 1) { - nsp = sp | sv; + long nsp = sp | sv; if (nsp == sp) continue; seen[p] = nsp; diff -r e240e914d226 -r 8f016345e6b0 mercurial/patch.py --- a/mercurial/patch.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/patch.py Fri Dec 18 14:40:11 2015 -0600 @@ -1106,8 +1106,8 @@ applied[newhunk.filename()].append(newhunk) else: fixoffset += chunk.removed - chunk.added - return sum([h for h in applied.itervalues() - if h[0].special() or len(h) > 1], []) + return (sum([h for h in applied.itervalues() + if h[0].special() or len(h) > 1], []), {}) class hunk(object): def __init__(self, desc, num, lr, context): self.number = num @@ -1491,7 +1491,6 @@ self.toline += len(self.before) + h.added self.before = [] self.hunk = [] - self.proc = '' self.context = context def addhunk(self, hunk): @@ -2146,7 +2145,7 @@ 'ignoreblanklines') if formatchanging: buildopts['text'] = opts and opts.get('text') - buildopts['nobinary'] = get('nobinary') + buildopts['nobinary'] = get('nobinary', forceplain=False) buildopts['noprefix'] = get('noprefix', forceplain=False) return mdiff.diffopts(**buildopts) diff -r e240e914d226 -r 8f016345e6b0 mercurial/pathencode.c --- a/mercurial/pathencode.c Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/pathencode.c Fri Dec 18 14:40:11 2015 -0600 @@ -517,8 +517,7 @@ newlen = _lowerencode(NULL, 0, path, len); ret = PyString_FromStringAndSize(NULL, newlen); if (ret) - newlen = _lowerencode(PyString_AS_STRING(ret), newlen, - path, len); + _lowerencode(PyString_AS_STRING(ret), newlen, path, len); return ret; } diff -r e240e914d226 -r 8f016345e6b0 mercurial/pathutil.py --- a/mercurial/pathutil.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/pathutil.py Fri Dec 18 14:40:11 2015 -0600 @@ -23,15 +23,22 @@ - under top-level .hg - starts at the root of a windows drive - contains ".." + + More check are also done about the file system states: - traverses a symlink (e.g. a/symlink_here/b) - inside a nested repository (a callback can be used to approve some nested repositories, e.g., subrepositories) + + The file system checks are only done when 'realfs' is set to True (the + default). They should be disable then we are auditing path for operation on + stored history. ''' - def __init__(self, root, callback=None): + def __init__(self, root, callback=None, realfs=True): self.audited = set() self.auditeddir = set() self.root = root + self._realfs = realfs self.callback = callback if os.path.lexists(root) and not util.checkcase(root): self.normcase = util.normcase @@ -81,25 +88,8 @@ normprefix = os.sep.join(normparts) if normprefix in self.auditeddir: break - curpath = os.path.join(self.root, prefix) - try: - st = os.lstat(curpath) - except OSError as err: - # EINVAL can be raised as invalid path syntax under win32. - # They must be ignored for patterns can be checked too. - if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL): - raise - else: - if stat.S_ISLNK(st.st_mode): - raise error.Abort( - _('path %r traverses symbolic link %r') - % (path, prefix)) - elif (stat.S_ISDIR(st.st_mode) and - os.path.isdir(os.path.join(curpath, '.hg'))): - if not self.callback or not self.callback(curpath): - raise error.Abort(_("path '%s' is inside nested " - "repo %r") - % (path, prefix)) + if self._realfs: + self._checkfs(prefix, path) prefixes.append(normprefix) parts.pop() normparts.pop() @@ -109,6 +99,26 @@ # want to add "foo/bar/baz" before checking if there's a "foo/.hg" self.auditeddir.update(prefixes) + def _checkfs(self, prefix, path): + """raise exception if a file system backed check fails""" + curpath = os.path.join(self.root, prefix) + try: + st = os.lstat(curpath) + except OSError as err: + # EINVAL can be raised as invalid path syntax under win32. + # They must be ignored for patterns can be checked too. + if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL): + raise + else: + if stat.S_ISLNK(st.st_mode): + msg = _('path %r traverses symbolic link %r') % (path, prefix) + raise error.Abort(msg) + elif (stat.S_ISDIR(st.st_mode) and + os.path.isdir(os.path.join(curpath, '.hg'))): + if not self.callback or not self.callback(curpath): + msg = _("path '%s' is inside nested repo %r") + raise error.Abort(msg % (path, prefix)) + def check(self, path): try: self(path) diff -r e240e914d226 -r 8f016345e6b0 mercurial/phases.py --- a/mercurial/phases.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/phases.py Fri Dec 18 14:40:11 2015 -0600 @@ -308,9 +308,19 @@ raise error.Abort(_('cannot change null revision phase')) currentroots = currentroots.copy() currentroots.update(newroots) - ctxs = repo.set('roots(%ln::)', currentroots) - currentroots.intersection_update(ctx.node() for ctx in ctxs) - self._updateroots(targetphase, currentroots, tr) + + # Only compute new roots for revs above the roots that are being + # retracted. + minnewroot = min(repo[n].rev() for n in newroots) + aboveroots = [n for n in currentroots + if repo[n].rev() >= minnewroot] + updatedroots = repo.set('roots(%ln::)', aboveroots) + + finalroots = set(n for n in currentroots if repo[n].rev() < + minnewroot) + finalroots.update(ctx.node() for ctx in updatedroots) + + self._updateroots(targetphase, finalroots, tr) repo.invalidatevolatilesets() def filterunknown(self, repo): diff -r e240e914d226 -r 8f016345e6b0 mercurial/posix.py --- a/mercurial/posix.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/posix.py Fri Dec 18 14:40:11 2015 -0600 @@ -15,7 +15,6 @@ import pwd import re import select -import socket import stat import sys import tempfile @@ -29,7 +28,16 @@ posixfile = open normpath = os.path.normpath samestat = os.path.samestat -oslink = os.link +try: + oslink = os.link +except AttributeError: + # Some platforms build Python without os.link on systems that are + # vaguely unix-like but don't have hardlink support. For those + # poor souls, just say we tried and that it failed so we fall back + # to copies. + def oslink(src, dst): + raise OSError(errno.EINVAL, + 'hardlinks not supported: %s to %s' % (src, dst)) unlink = os.unlink rename = os.rename removedirs = os.removedirs @@ -261,40 +269,17 @@ except UnicodeDecodeError: # OS X percent-encodes any bytes that aren't valid utf-8 s = '' - g = '' - l = 0 - for c in path: - o = ord(c) - if l and o < 128 or o >= 192: - # we want a continuation byte, but didn't get one - s += ''.join(["%%%02X" % ord(x) for x in g]) - g = '' - l = 0 - if l == 0 and o < 128: - # ascii - s += c - elif l == 0 and 194 <= o < 245: - # valid leading bytes - if o < 224: - l = 1 - elif o < 240: - l = 2 - else: - l = 3 - g = c - elif l > 0 and 128 <= o < 192: - # valid continuations - g += c - l -= 1 - if not l: - s += g - g = '' - else: - # invalid - s += "%%%02X" % o + pos = 0 + l = len(path) + while pos < l: + try: + c = encoding.getutf8char(path, pos) + pos += len(c) + except ValueError: + c = '%%%02X' % ord(path[pos]) + pos += 1 + s += c - # any remaining partial characters - s += ''.join(["%%%02X" % ord(x) for x in g]) u = s.decode('utf-8') # Decompose then lowercase (HFS+ technote specifies lower) @@ -569,46 +554,6 @@ def executablepath(): return None # available on Windows only -class unixdomainserver(socket.socket): - def __init__(self, join, subsystem): - '''Create a unix domain socket with the given prefix.''' - super(unixdomainserver, self).__init__(socket.AF_UNIX) - sockname = subsystem + '.sock' - self.realpath = self.path = join(sockname) - if os.path.islink(self.path): - if os.path.exists(self.path): - self.realpath = os.readlink(self.path) - else: - os.unlink(self.path) - try: - self.bind(self.realpath) - except socket.error as err: - if err.args[0] == 'AF_UNIX path too long': - tmpdir = tempfile.mkdtemp(prefix='hg-%s-' % subsystem) - self.realpath = os.path.join(tmpdir, sockname) - try: - self.bind(self.realpath) - os.symlink(self.realpath, self.path) - except (OSError, socket.error): - self.cleanup() - raise - else: - raise - self.listen(5) - - def cleanup(self): - def okayifmissing(f, path): - try: - f(path) - except OSError as err: - if err.errno != errno.ENOENT: - raise - - okayifmissing(os.unlink, self.path) - if self.realpath != self.path: - okayifmissing(os.unlink, self.realpath) - okayifmissing(os.rmdir, os.path.dirname(self.realpath)) - def statislink(st): '''check whether a stat result is a symlink''' return st and stat.S_ISLNK(st.st_mode) diff -r e240e914d226 -r 8f016345e6b0 mercurial/pure/base85.py --- a/mercurial/pure/base85.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/pure/base85.py Fri Dec 18 14:40:11 2015 -0600 @@ -5,6 +5,8 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. +from __future__ import absolute_import + import struct _b85chars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ" \ diff -r e240e914d226 -r 8f016345e6b0 mercurial/pure/bdiff.py --- a/mercurial/pure/bdiff.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/pure/bdiff.py Fri Dec 18 14:40:11 2015 -0600 @@ -5,7 +5,11 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -import struct, difflib, re +from __future__ import absolute_import + +import difflib +import re +import struct def splitnewlines(text): '''like str.splitlines, but only split on newlines.''' diff -r e240e914d226 -r 8f016345e6b0 mercurial/pure/diffhelpers.py --- a/mercurial/pure/diffhelpers.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/pure/diffhelpers.py Fri Dec 18 14:40:11 2015 -0600 @@ -5,6 +5,8 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. +from __future__ import absolute_import + def addlines(fp, hunk, lena, lenb, a, b): while True: todoa = lena - len(a) diff -r e240e914d226 -r 8f016345e6b0 mercurial/pure/mpatch.py --- a/mercurial/pure/mpatch.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/pure/mpatch.py Fri Dec 18 14:40:11 2015 -0600 @@ -5,11 +5,12 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. +from __future__ import absolute_import + +import cStringIO import struct -try: - from cStringIO import StringIO -except ImportError: - from StringIO import StringIO + +StringIO = cStringIO.StringIO # This attempts to apply a series of patches in time proportional to # the total size of the patches, rather than patches * len(text). This diff -r e240e914d226 -r 8f016345e6b0 mercurial/pure/osutil.py --- a/mercurial/pure/osutil.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/pure/osutil.py Fri Dec 18 14:40:11 2015 -0600 @@ -5,6 +5,8 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. +from __future__ import absolute_import + import os import stat as statmod @@ -58,7 +60,8 @@ if os.name != 'nt': posixfile = open else: - import ctypes, msvcrt + import ctypes + import msvcrt _kernel32 = ctypes.windll.kernel32 diff -r e240e914d226 -r 8f016345e6b0 mercurial/pure/parsers.py --- a/mercurial/pure/parsers.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/pure/parsers.py Fri Dec 18 14:40:11 2015 -0600 @@ -5,8 +5,13 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -from mercurial.node import nullid -import struct, zlib, cStringIO +from __future__ import absolute_import + +import cStringIO +import struct +import zlib + +from .node import nullid _pack = struct.pack _unpack = struct.unpack diff -r e240e914d226 -r 8f016345e6b0 mercurial/repair.py --- a/mercurial/repair.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/repair.py Fri Dec 18 14:40:11 2015 -0600 @@ -203,6 +203,18 @@ repo.ui.popbuffer() f.close() + for m in updatebm: + bm[m] = repo[newbmtarget].node() + lock = tr = None + try: + lock = repo.lock() + tr = repo.transaction('repair') + bm.recordchange(tr) + tr.close() + finally: + tr.release() + lock.release() + # remove undo files for undovfs, undofile in repo.undofiles(): try: @@ -212,9 +224,6 @@ ui.warn(_('error removing %s: %s\n') % (undovfs.join(undofile), str(e))) - for m in updatebm: - bm[m] = repo[newbmtarget].node() - bm.write() except: # re-raises if backupfile: ui.warn(_("strip failed, full bundle stored in '%s'\n") diff -r e240e914d226 -r 8f016345e6b0 mercurial/repoview.py --- a/mercurial/repoview.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/repoview.py Fri Dec 18 14:40:11 2015 -0600 @@ -300,22 +300,16 @@ # some cache may be implemented later unfi = self._unfilteredrepo unfichangelog = unfi.changelog + # bypass call to changelog.method + unfiindex = unfichangelog.index + unfilen = len(unfiindex) - 1 + unfinode = unfiindex[unfilen - 1][7] + revs = filterrevs(unfi, self.filtername) cl = self._clcache - newkey = (len(unfichangelog), unfichangelog.tip(), hash(revs), - unfichangelog._delayed) - if cl is not None: - # we need to check curkey too for some obscure reason. - # MQ test show a corruption of the underlying repo (in _clcache) - # without change in the cachekey. - oldfilter = cl.filteredrevs - try: - cl.filteredrevs = () # disable filtering for tip - curkey = (len(cl), cl.tip(), hash(oldfilter), cl._delayed) - finally: - cl.filteredrevs = oldfilter - if newkey != self._clcachekey or newkey != curkey: - cl = None + newkey = (unfilen, unfinode, hash(revs), unfichangelog._delayed) + if cl is not None and newkey != self._clcachekey: + cl = None # could have been made None by the previous if if cl is None: cl = copy.copy(unfichangelog) diff -r e240e914d226 -r 8f016345e6b0 mercurial/revlog.py --- a/mercurial/revlog.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/revlog.py Fri Dec 18 14:40:11 2015 -0600 @@ -11,13 +11,30 @@ and O(changes) merge between branches. """ -# import stuff from node for others to import from revlog +from __future__ import absolute_import + import collections +import errno import os -from node import bin, hex, nullid, nullrev -from i18n import _ -import ancestor, mdiff, parsers, error, util, templatefilters -import struct, zlib, errno +import struct +import zlib + +# import stuff from node for others to import from revlog +from .node import ( + bin, + hex, + nullid, + nullrev, +) +from .i18n import _ +from . import ( + ancestor, + error, + mdiff, + parsers, + templatefilters, + util, +) _pack = struct.pack _unpack = struct.unpack @@ -206,14 +223,21 @@ self.indexfile = indexfile self.datafile = indexfile[:-2] + ".d" self.opener = opener + # 3-tuple of (node, rev, text) for a raw revision. self._cache = None + # 2-tuple of (rev, baserev) defining the base revision the delta chain + # begins at for a revision. self._basecache = None + # 2-tuple of (offset, data) of raw data from the revlog at an offset. self._chunkcache = (0, '') + # How much data to read and cache into the raw revlog data cache. self._chunkcachesize = 65536 self._maxchainlen = None self._aggressivemergedeltas = False self.index = [] + # Mapping of partial identifiers to full nodes. self._pcache = {} + # Mapping of revision integer to full node. self._nodecache = {nullid: nullrev} self._nodepos = None @@ -231,6 +255,7 @@ self._maxchainlen = opts['maxchainlen'] if 'aggressivemergedeltas' in opts: self._aggressivemergedeltas = opts['aggressivemergedeltas'] + self._lazydeltabase = bool(opts.get('lazydeltabase', False)) if self._chunkcachesize <= 0: raise RevlogError(_('revlog chunk cache size %r is not greater ' @@ -926,6 +951,10 @@ return hash(text, p1, p2) != node def _addchunk(self, offset, data): + """Add a segment to the revlog cache. + + Accepts an absolute offset and the data that is at that location. + """ o, d = self._chunkcache # try to add to existing cache if o + len(d) == offset and len(d) + len(data) < _chunksize: @@ -934,13 +963,15 @@ self._chunkcache = offset, data def _loadchunk(self, offset, length, df=None): - """Load a chunk/segment from the revlog. + """Load a segment of raw data from the revlog. - Accepts absolute offset, length to read, and an optional existing + Accepts an absolute offset, length to read, and an optional existing file handle to read from. If an existing file handle is passed, it will be seeked and the original seek position will NOT be restored. + + Returns a str or buffer of raw byte data. """ if df is not None: closehandle = False @@ -968,6 +999,16 @@ return d def _getchunk(self, offset, length, df=None): + """Obtain a segment of raw data from the revlog. + + Accepts an absolute offset, length of bytes to obtain, and an + optional file handle to the already-opened revlog. If the file + handle is used, it's original seek position will not be preserved. + + Requests for data may be returned from a cache. + + Returns a str or a buffer instance of raw byte data. + """ o, d = self._chunkcache l = len(d) @@ -982,6 +1023,18 @@ return self._loadchunk(offset, length, df=df) def _chunkraw(self, startrev, endrev, df=None): + """Obtain a segment of raw data corresponding to a range of revisions. + + Accepts the start and end revisions and an optional already-open + file handle to be used for reading. If the file handle is read, its + seek position will not be preserved. + + Requests for data may be satisfied by a cache. + + Returns a str or a buffer instance of raw byte data. Callers will + need to call ``self.start(rev)`` and ``self.length()`` to determine + where each revision's data begins and ends. + """ start = self.start(startrev) end = self.end(endrev) if self._inline: @@ -991,12 +1044,29 @@ return self._getchunk(start, length, df=df) def _chunk(self, rev, df=None): + """Obtain a single decompressed chunk for a revision. + + Accepts an integer revision and an optional already-open file handle + to be used for reading. If used, the seek position of the file will not + be preserved. + + Returns a str holding uncompressed data for the requested revision. + """ return decompress(self._chunkraw(rev, rev, df=df)) def _chunks(self, revs, df=None): - '''faster version of [self._chunk(rev) for rev in revs] + """Obtain decompressed chunks for the specified revisions. - Assumes that revs is in ascending order.''' + Accepts an iterable of numeric revisions that are assumed to be in + ascending order. Also accepts an optional already-open file handle + to be used for reading. If used, the seek position of the file will + not be preserved. + + This function is similar to calling ``self._chunk()`` multiple times, + but is faster. + + Returns a list with decompressed data for each requested revision. + """ if not revs: return [] start = self.start @@ -1032,6 +1102,7 @@ return l def _chunkclear(self): + """Clear the raw chunk cache.""" self._chunkcache = (0, '') def deltaparent(self, rev): @@ -1353,9 +1424,8 @@ curr = len(self) prev = curr - 1 base = chainbase = curr - chainlen = None offset = self.end(prev) - d = None + delta = None if self._basecache is None: self._basecache = (prev, self.chainbase(prev)) basecache = self._basecache @@ -1371,40 +1441,39 @@ # should we try to build a delta? if prev != nullrev: - if self._generaldelta: - if p2r != nullrev and self._aggressivemergedeltas: - d = builddelta(p1r) - d2 = builddelta(p2r) - p1good = self._isgooddelta(d, textlen) - p2good = self._isgooddelta(d2, textlen) - if p1good and p2good: - # If both are good deltas, choose the smallest - if d2[1] < d[1]: - d = d2 - elif p2good: - # If only p2 is good, use it - d = d2 - elif p1good: - pass - else: - # Neither is good, try against prev to hopefully save us - # a fulltext. - d = builddelta(prev) - else: + tested = set() + if cachedelta and self._generaldelta and self._lazydeltabase: + # Assume what we received from the server is a good choice + # build delta will reuse the cache + candidatedelta = builddelta(cachedelta[0]) + tested.add(cachedelta[0]) + if self._isgooddelta(candidatedelta, textlen): + delta = candidatedelta + if delta is None and self._generaldelta: + # exclude already lazy tested base if any + parents = [p for p in (p1r, p2r) + if p != nullrev and p not in tested] + if parents and not self._aggressivemergedeltas: # Pick whichever parent is closer to us (to minimize the - # chance of having to build a fulltext). Since - # nullrev == -1, any non-merge commit will always pick p1r. - drev = p2r if p2r > p1r else p1r - d = builddelta(drev) - # If the chosen delta will result in us making a full text, - # give it one last try against prev. - if drev != prev and not self._isgooddelta(d, textlen): - d = builddelta(prev) - else: - d = builddelta(prev) - dist, l, data, base, chainbase, chainlen, compresseddeltalen = d - - if not self._isgooddelta(d, textlen): + # chance of having to build a fulltext). + parents = [max(parents)] + tested.update(parents) + pdeltas = [] + for p in parents: + pd = builddelta(p) + if self._isgooddelta(pd, textlen): + pdeltas.append(pd) + if pdeltas: + delta = min(pdeltas, key=lambda x: x[1]) + if delta is None and prev not in tested: + # other approach failed try against prev to hopefully save us a + # fulltext. + candidatedelta = builddelta(prev) + if self._isgooddelta(candidatedelta, textlen): + delta = candidatedelta + if delta is not None: + dist, l, data, base, chainbase, chainlen, compresseddeltalen = delta + else: text = buildtext() data = self.compress(text) l = len(data[1]) + len(data[0]) @@ -1438,7 +1507,7 @@ # We work around this issue by inserting a seek() before writing. # Note: This is likely not necessary on Python 3. ifh.seek(0, os.SEEK_END) - if dfh: + if dfh: dfh.seek(0, os.SEEK_END) curr = len(self) - 1 diff -r e240e914d226 -r 8f016345e6b0 mercurial/revset.py --- a/mercurial/revset.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/revset.py Fri Dec 18 14:40:11 2015 -0600 @@ -1164,8 +1164,16 @@ m = matchmod.match(repo.root, repo.getcwd(), pats, include=inc, exclude=exc, ctx=repo[rev], default=default) + # This directly read the changelog data as creating changectx for all + # revisions is quite expensive. + getchangeset = repo.changelog.read + wdirrev = node.wdirrev def matches(x): - for f in repo[x].files(): + if x == wdirrev: + files = repo[x].files() + else: + files = getchangeset(x)[3] + for f in files: if m(f): return True return False @@ -1684,7 +1692,7 @@ from . import hg # avoid start-up nasties # i18n: "remote" is a keyword - l = getargs(x, 0, 2, _("remote takes one, two or no arguments")) + l = getargs(x, 0, 2, _("remote takes zero, one, or two arguments")) q = '.' if len(l) > 0: diff -r e240e914d226 -r 8f016345e6b0 mercurial/scmutil.py --- a/mercurial/scmutil.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/scmutil.py Fri Dec 18 14:40:11 2015 -0600 @@ -1170,3 +1170,16 @@ subprocess.""" return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args, **kwargs) + +def gdinitconfig(ui): + """helper function to know if a repo should be created as general delta + """ + # experimental config: format.generaldelta + return (ui.configbool('format', 'generaldelta', False) + or ui.configbool('format', 'usegeneraldelta', True)) + +def gddeltaconfig(ui): + """helper function to know if incoming delta should be optimised + """ + # experimental config: format.generaldelta + return ui.configbool('format', 'generaldelta', False) diff -r e240e914d226 -r 8f016345e6b0 mercurial/similar.py --- a/mercurial/similar.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/similar.py Fri Dec 18 14:40:11 2015 -0600 @@ -5,10 +5,14 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -from i18n import _ -import util -import mdiff -import bdiff +from __future__ import absolute_import + +from .i18n import _ +from . import ( + bdiff, + mdiff, + util, +) def _findexactmatches(repo, added, removed): '''find renamed files that have no changes diff -r e240e914d226 -r 8f016345e6b0 mercurial/subrepo.py --- a/mercurial/subrepo.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/subrepo.py Fri Dec 18 14:40:11 2015 -0600 @@ -1910,7 +1910,7 @@ status = self.status(None) names = status.modified for name in names: - bakname = "%s.orig" % name + bakname = cmdutil.origpath(self.ui, self._subparent, name) self.ui.note(_('saving current version of %s as %s\n') % (name, bakname)) self.wvfs.rename(name, bakname) diff -r e240e914d226 -r 8f016345e6b0 mercurial/templatefilters.py --- a/mercurial/templatefilters.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/templatefilters.py Fri Dec 18 14:40:11 2015 -0600 @@ -223,7 +223,7 @@ raise TypeError('cannot encode type %s' % obj.__class__.__name__) def _uescape(c): - if ord(c) < 0x80: + if 0x20 <= ord(c) < 0x80: return c else: return '\\u%04x' % ord(c) diff -r e240e914d226 -r 8f016345e6b0 mercurial/templatekw.py --- a/mercurial/templatekw.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/templatekw.py Fri Dec 18 14:40:11 2015 -0600 @@ -7,7 +7,7 @@ from __future__ import absolute_import -from .node import hex +from .node import hex, nullid from . import ( error, hbisect, @@ -340,6 +340,21 @@ """ return showlist('file', args['ctx'].files(), **args) +def showgraphnode(repo, ctx, **args): + """:graphnode: String. The character representing the changeset node in + an ASCII revision graph""" + wpnodes = repo.dirstate.parents() + if wpnodes[1] == nullid: + wpnodes = wpnodes[:1] + if ctx.node() in wpnodes: + return '@' + elif ctx.obsolete(): + return 'x' + elif ctx.closesbranch(): + return '_' + else: + return 'o' + def showlatesttag(**args): """:latesttag: List of strings. The global tags on the most recent globally tagged ancestor of this changeset. @@ -518,6 +533,7 @@ 'file_dels': showfiledels, 'file_mods': showfilemods, 'files': showfiles, + 'graphnode': showgraphnode, 'latesttag': showlatesttag, 'latesttagdistance': showlatesttagdistance, 'manifest': showmanifest, diff -r e240e914d226 -r 8f016345e6b0 mercurial/templater.py --- a/mercurial/templater.py Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/templater.py Fri Dec 18 14:40:11 2015 -0600 @@ -336,7 +336,7 @@ specifying files to include or exclude.""" if len(args) > 2: # i18n: "diff" is a keyword - raise error.ParseError(_("diff expects one, two or no arguments")) + raise error.ParseError(_("diff expects zero, one, or two arguments")) def getpatterns(i): if i < len(args): diff -r e240e914d226 -r 8f016345e6b0 mercurial/templates/monoblue/map --- a/mercurial/templates/monoblue/map Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/templates/monoblue/map Fri Dec 18 14:40:11 2015 -0600 @@ -23,7 +23,7 @@ naventry = '{label|escape} ' navshortentry = '{label|escape} ' navgraphentry = '{label|escape} ' -filenaventry = '{label|escape}' +filenaventry = '{label|escape} ' filedifflink = '{file|escape} ' filenodelink = ' diff -r e240e914d226 -r 8f016345e6b0 mercurial/templates/paper/filelog.tmpl --- a/mercurial/templates/paper/filelog.tmpl Thu Dec 17 17:16:02 2015 -0800 +++ b/mercurial/templates/paper/filelog.tmpl Fri Dec 18 14:40:11 2015 -0600 @@ -44,7 +44,10 @@
-

log {file|escape}

+

+ log {file|escape} @ {rev}:{node|short} + {branch%changelogbranchname}{tags%changelogtag}{bookmarks%changelogtag} +