# HG changeset patch # User Matt Mackall # Date 1452544032 21600 # Node ID 4571c0b383378f5eec5189e3a39c672a169ce566 # Parent 374fad80ce6975447d7abf55c5127e776f1ad4b6# Parent 6c7d26cef0cda4752c02b7478f1270a84393510b merge with stable diff -r 6c7d26cef0cd -r 4571c0b38337 .hgignore --- a/.hgignore Fri Jan 08 16:27:25 2016 +0100 +++ b/.hgignore Mon Jan 11 14:27:12 2016 -0600 @@ -20,6 +20,7 @@ \#*\# .\#* tests/.coverage* +tests/.testtimes* tests/annotated tests/*.err tests/htmlcov diff -r 6c7d26cef0cd -r 4571c0b38337 Makefile --- a/Makefile Fri Jan 08 16:27:25 2016 +0100 +++ b/Makefile Mon Jan 11 14:27:12 2016 -0600 @@ -161,12 +161,11 @@ rm -rf dist/mercurial-*.mpkg deb: - mkdir -p packages/debian-unknown - contrib/builddeb --release unknown + contrib/builddeb docker-debian-jessie: mkdir -p packages/debian-jessie - contrib/dockerdeb jessie + contrib/dockerdeb debian jessie fedora20: mkdir -p packages/fedora20 diff -r 6c7d26cef0cd -r 4571c0b38337 contrib/Makefile.python --- a/contrib/Makefile.python Fri Jan 08 16:27:25 2016 +0100 +++ b/contrib/Makefile.python Mon Jan 11 14:27:12 2016 -0600 @@ -47,8 +47,8 @@ [ -f $(PYTHON_SRCFILE) ] || wget http://www.python.org/ftp/python/$(PYTHONVER)/$(PYTHON_SRCFILE) || curl -OL http://www.python.org/ftp/python/$(PYTHONVER)/$(PYTHON_SRCFILE) || [ -f $(PYTHON_SRCFILE) ] rm -rf $(PYTHON_SRCDIR) tar xf $(PYTHON_SRCFILE) - # Ubuntu disables SSLv2 the hard way, disable it on old Pythons too - -sed -i 's,self.*SSLv2_method(),0;//\0,g' $(PYTHON_SRCDIR)/Modules/_ssl.c + # Debian/Ubuntu disables SSLv2,3 the hard way, disable it on old Pythons too + -sed -i 's,self.*SSLv[23]_method(),0;//\0,g' $(PYTHON_SRCDIR)/Modules/_ssl.c # Find multiarch system libraries on Ubuntu and disable fortify error when setting argv LDFLAGS="-L/usr/lib/`dpkg-architecture -qDEB_HOST_MULTIARCH`"; \ BASECFLAGS=-U_FORTIFY_SOURCE; \ diff -r 6c7d26cef0cd -r 4571c0b38337 contrib/bash_completion --- a/contrib/bash_completion Fri Jan 08 16:27:25 2016 +0100 +++ b/contrib/bash_completion Mon Jan 11 14:27:12 2016 -0600 @@ -629,7 +629,7 @@ _hg_cmd_shelve() { - if [[ "$prev" = @(-d|--delete|-l|--list) ]]; then + if [[ "$prev" = @(-d|--delete|-l|--list|-p|--patch|--stat) ]]; then _hg_shelves else _hg_status "mard" diff -r 6c7d26cef0cd -r 4571c0b38337 contrib/builddeb --- a/contrib/builddeb Fri Jan 08 16:27:25 2016 +0100 +++ b/contrib/builddeb Mon Jan 11 14:27:12 2016 -0600 @@ -8,12 +8,18 @@ BUILD=1 CLEANUP=1 -DEBVERSION=jessie +DISTID=`(lsb_release -is 2> /dev/null | tr '[:upper:]' '[:lower:]') || echo debian` +CODENAME=`lsb_release -cs 2> /dev/null || echo unknown` while [ "$1" ]; do case "$1" in - --release ) + --distid ) + shift + DISTID="$1" shift - DEBVERSION="$1" + ;; + --codename ) + shift + CODENAME="$1" shift ;; --cleanup ) @@ -24,11 +30,6 @@ shift CLEANUP= ;; - --debbuilddir ) - shift - DEBBUILDDIR="$1" - shift - ;; * ) echo "Invalid parameter $1!" 1>&2 exit 1 @@ -82,7 +83,8 @@ fi if [ "$CLEANUP" ] ; then echo - OUTPUTDIR=${OUTPUTDIR:=packages/debian-$DEBVERSION} + OUTPUTDIR=${OUTPUTDIR:=packages/$DISTID-$CODENAME} + mkdir -p "$OUTPUTDIR" find ../mercurial*.deb ../mercurial_*.build ../mercurial_*.changes \ -type f -newer $control -print0 | \ xargs -Inarf -0 mv narf "$OUTPUTDIR" diff -r 6c7d26cef0cd -r 4571c0b38337 contrib/check-code.py --- a/contrib/check-code.py Fri Jan 08 16:27:25 2016 +0100 +++ b/contrib/check-code.py Mon Jan 11 14:27:12 2016 -0600 @@ -122,10 +122,12 @@ (r'^( *)\t', "don't use tabs to indent"), (r'sed (-e )?\'(\d+|/[^/]*/)i(?!\\\n)', "put a backslash-escaped newline after sed 'i' command"), - (r'^diff *-\w*u.*$\n(^ \$ |^$)', "prefix diff -u with cmp"), + (r'^diff *-\w*[uU].*$\n(^ \$ |^$)', "prefix diff -u/-U with cmp"), + (r'^\s+(if)? diff *-\w*[uU]', "prefix diff -u/-U with cmp"), (r'seq ', "don't use 'seq', use $TESTDIR/seq.py"), (r'\butil\.Abort\b', "directly use error.Abort"), (r'\|&', "don't use |&, use 2>&1"), + (r'\w = +\w', "only one space after = allowed"), ], # warnings [ @@ -146,7 +148,7 @@ uprefix = r"^ \$ " utestpats = [ [ - (r'^(\S.*|| [$>] .*)[ \t]\n', "trailing whitespace on non-output"), + (r'^(\S.*|| [$>] \S.*)[ \t]\n', "trailing whitespace on non-output"), (uprefix + r'.*\|\s*sed[^|>\n]*\n', "use regex test output patterns instead of sed"), (uprefix + r'(true|exit 0)', "explicit zero exit unnecessary"), @@ -219,6 +221,7 @@ (r'(\w|\)),\w', "missing whitespace after ,"), (r'(\w|\))[+/*\-<>]\w', "missing whitespace in expression"), (r'^\s+(\w|\.)+=\w[^,()\n]*$', "missing whitespace in assignment"), + (r'\w\s=\s\s+\w', "gratuitous whitespace after ="), (r'.{81}', "line too long"), (r' x+[xo][\'"]\n\s+[\'"]x', 'string join across lines with no space'), (r'[^\n]\Z', "no trailing newline"), @@ -336,6 +339,7 @@ (r'\w+ (\+\+|--)', "use foo++, not foo ++"), (r'\w,\w', "missing whitespace after ,"), (r'^[^#]\w[+/*]\w', "missing whitespace in expression"), + (r'\w\s=\s\s+\w', "gratuitous whitespace after ="), (r'^#\s+\w', "use #foo, not # foo"), (r'[^\n]\Z', "no trailing newline"), (r'^\s*#import\b', "use only #include in standard C code"), @@ -477,7 +481,13 @@ name, match, f) continue if "no-" "check-code" in pre: - print "Skipping %s it has no-" "check-code" % f + # If you're looking at this line, it's because a file has: + # no- check- code + # but the reason to output skipping is to make life for + # tests easier. So, instead of writing it with a normal + # spelling, we write it with the expected spelling from + # tests/test-check-code.t + print "Skipping %s it has no-che?k-code (glob)" % f return "Skip" # skip checking this file for p, r in filters: post = re.sub(p, r, post) diff -r 6c7d26cef0cd -r 4571c0b38337 contrib/check-commit --- a/contrib/check-commit Fri Jan 08 16:27:25 2016 +0100 +++ b/contrib/check-commit Mon Jan 11 14:27:12 2016 -0600 @@ -26,8 +26,10 @@ "summary line doesn't start with 'topic: '"), (r"^# .*\n[A-Z][a-z]\S+", "don't capitalize summary lines"), (r"^# .*\n[^\n]*: *[A-Z][a-z]\S+", "don't capitalize summary lines"), + (r"^# [^\n]*\n\S*[^A-Za-z0-9-]\S*: ", + "summary keyword should be most user-relevant one-word command or topic"), (r"^# .*\n.*\.\s+$", "don't add trailing period on summary line"), - (r"^# .*\n.{78,}", "summary line too long (limit is 78)"), + (r"^# .*\n[^#].{77,}", "summary line too long (limit is 78)"), (r"^\+\n \n", "adds double empty line"), (r"^ \n\+\n", "adds double empty line"), (r"^\+[ \t]+def [a-z]+_[a-z]", "adds a function with foo_bar naming"), diff -r 6c7d26cef0cd -r 4571c0b38337 contrib/check-config.py --- a/contrib/check-config.py Fri Jan 08 16:27:25 2016 +0100 +++ b/contrib/check-config.py Mon Jan 11 14:27:12 2016 -0600 @@ -13,14 +13,16 @@ foundopts = {} documented = {} -configre = (r"""ui\.config(|int|bool|list)\(['"](\S+)['"], ?""" - r"""['"](\S+)['"](,\s(?:default=)?(\S+?))?\)""") +configre = (r"""ui\.config(|int|bool|list)\(['"](\S+)['"],\s*""" + r"""['"](\S+)['"](,\s+(?:default=)?(\S+?))?\)""") +configpartialre = (r"""ui\.config""") def main(args): for f in args: sect = '' prevname = '' confsect = '' + carryover = '' for l in open(f): # check topic-like bits @@ -40,29 +42,35 @@ if m: confsect = m.group(1) continue - m = re.match(r'^\s+(?:#\s*)?([a-z._]+) = ', l) + m = re.match(r'^\s+(?:#\s*)?(\S+) = ', l) if m: name = confsect + '.' + m.group(1) documented[name] = 1 # like the bugzilla extension - m = re.match(r'^\s*([a-z]+\.[a-z]+)$', l) + m = re.match(r'^\s*(\S+\.\S+)$', l) + if m: + documented[m.group(1)] = 1 + + # like convert + m = re.match(r'^\s*:(\S+\.\S+):\s+', l) if m: documented[m.group(1)] = 1 # quoted in help or docstrings - m = re.match(r'.*?``([-a-z_]+\.[-a-z_]+)``', l) + m = re.match(r'.*?``(\S+\.\S+)``', l) if m: documented[m.group(1)] = 1 # look for ignore markers m = re.search(r'# (?:internal|experimental|deprecated|developer)' - ' config: (\S+.\S+)$', l) + ' config: (\S+\.\S+)$', l) if m: documented[m.group(1)] = 1 # look for code-like bits - m = re.search(configre, l) + line = carryover + l + m = re.search(configre, line, re.MULTILINE) if m: ctype = m.group(1) if not ctype: @@ -78,6 +86,13 @@ print "conflict on %s: %r != %r" % (name, (ctype, default), foundopts[name]) foundopts[name] = (ctype, default) + carryover = '' + else: + m = re.search(configpartialre, line) + if m: + carryover = line + else: + carryover = '' for name in sorted(foundopts): if name not in documented: diff -r 6c7d26cef0cd -r 4571c0b38337 contrib/check-py3-compat.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/check-py3-compat.py Mon Jan 11 14:27:12 2016 -0600 @@ -0,0 +1,43 @@ +#!/usr/bin/env python +# +# check-py3-compat - check Python 3 compatibility of Mercurial files +# +# Copyright 2015 Gregory Szorc +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from __future__ import absolute_import, print_function + +import ast +import sys + +def check_compat(f): + """Check Python 3 compatibility for a file.""" + with open(f, 'rb') as fh: + content = fh.read() + + # Ignore empty files. + if not content.strip(): + return + + root = ast.parse(content) + futures = set() + haveprint = False + for node in ast.walk(root): + if isinstance(node, ast.ImportFrom): + if node.module == '__future__': + futures |= set(n.name for n in node.names) + elif isinstance(node, ast.Print): + haveprint = True + + if 'absolute_import' not in futures: + print('%s not using absolute_import' % f) + if haveprint and 'print_function' not in futures: + print('%s requires print_function' % f) + +if __name__ == '__main__': + for f in sys.argv[1:]: + check_compat(f) + + sys.exit(0) diff -r 6c7d26cef0cd -r 4571c0b38337 contrib/dirstatenonnormalcheck.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/dirstatenonnormalcheck.py Mon Jan 11 14:27:12 2016 -0600 @@ -0,0 +1,57 @@ +# dirstatenonnormalcheck.py - extension to check the consistency of the +# dirstate's non-normal map +# +# For most operations on dirstate, this extensions checks that the nonnormalset +# contains the right entries. +# It compares the nonnormal file to a nonnormalset built from the map of all +# the files in the dirstate to check that they contain the same files. + +from __future__ import absolute_import + +from mercurial import ( + dirstate, + extensions, +) + +def nonnormalentries(dmap): + """Compute nonnormal entries from dirstate's dmap""" + res = set() + for f, e in dmap.iteritems(): + if e[0] != 'n' or e[3] == -1: + res.add(f) + return res + +def checkconsistency(ui, orig, dmap, _nonnormalset, label): + """Compute nonnormalset from dmap, check that it matches _nonnormalset""" + nonnormalcomputedmap = nonnormalentries(dmap) + if _nonnormalset != nonnormalcomputedmap: + ui.develwarn("%s call to %s\n" % (label, orig)) + ui.develwarn("inconsistency in nonnormalset\n") + ui.develwarn("[nonnormalset] %s\n" % _nonnormalset) + ui.develwarn("[map] %s\n" % nonnormalcomputedmap) + +def _checkdirstate(orig, self, arg): + """Check nonnormal set consistency before and after the call to orig""" + checkconsistency(self._ui, orig, self._map, self._nonnormalset, "before") + r = orig(self, arg) + checkconsistency(self._ui, orig, self._map, self._nonnormalset, "after") + return r + +def extsetup(ui): + """Wrap functions modifying dirstate to check nonnormalset consistency""" + dirstatecl = dirstate.dirstate + devel = ui.configbool('devel', 'all-warnings') + paranoid = ui.configbool('experimental', 'nonnormalparanoidcheck') + if devel: + extensions.wrapfunction(dirstatecl, '_writedirstate', _checkdirstate) + if paranoid: + # We don't do all these checks when paranoid is disable as it would + # make the extension run very slowly on large repos + extensions.wrapfunction(dirstatecl, 'normallookup', _checkdirstate) + extensions.wrapfunction(dirstatecl, 'otherparent', _checkdirstate) + extensions.wrapfunction(dirstatecl, 'normal', _checkdirstate) + extensions.wrapfunction(dirstatecl, 'write', _checkdirstate) + extensions.wrapfunction(dirstatecl, 'add', _checkdirstate) + extensions.wrapfunction(dirstatecl, 'remove', _checkdirstate) + extensions.wrapfunction(dirstatecl, 'merge', _checkdirstate) + extensions.wrapfunction(dirstatecl, 'drop', _checkdirstate) diff -r 6c7d26cef0cd -r 4571c0b38337 contrib/dockerdeb --- a/contrib/dockerdeb Fri Jan 08 16:27:25 2016 +0100 +++ b/contrib/dockerdeb Mon Jan 11 14:27:12 2016 -0600 @@ -8,8 +8,9 @@ checkdocker -DEBPLATFORM="$1" -PLATFORM="debian-$1" +DISTID="$1" +CODENAME="$2" +PLATFORM="$1-$2" shift # extra params are passed to build process OUTPUTDIR=${OUTPUTDIR:=$ROOTDIR/packages/$PLATFORM} @@ -26,8 +27,8 @@ sh -c "cd /mnt/$dn && make clean && make local" fi $DOCKER run -u $DBUILDUSER --rm -v $PWD/..:/mnt $CONTAINER \ - sh -c "cd /mnt/$dn && DEB_BUILD_OPTIONS='${DEB_BUILD_OPTIONS:=}' contrib/builddeb --build --release $DEBPLATFORM" -contrib/builddeb --cleanup --release $DEBPLATFORM + sh -c "cd /mnt/$dn && DEB_BUILD_OPTIONS='${DEB_BUILD_OPTIONS:=}' contrib/builddeb --build --distid $DISTID --codename $CODENAME" +contrib/builddeb --cleanup --distid $DISTID --codename $CODENAME if [ $(uname) = "Darwin" ] ; then $DOCKER run -u $DBUILDUSER --rm -v $PWD/..:/mnt $CONTAINER \ sh -c "cd /mnt/$dn && make clean" diff -r 6c7d26cef0cd -r 4571c0b38337 contrib/fixpax.py --- a/contrib/fixpax.py Fri Jan 08 16:27:25 2016 +0100 +++ b/contrib/fixpax.py Mon Jan 11 14:27:12 2016 -0600 @@ -1,3 +1,4 @@ +#!/usr/bin/env python # fixpax - fix ownership in bdist_mpkg output # # Copyright 2015 Matt Mackall diff -r 6c7d26cef0cd -r 4571c0b38337 contrib/import-checker.py --- a/contrib/import-checker.py Fri Jan 08 16:27:25 2016 +0100 +++ b/contrib/import-checker.py Mon Jan 11 14:27:12 2016 -0600 @@ -1,4 +1,7 @@ +#!/usr/bin/env python + import ast +import collections import os import sys @@ -11,6 +14,8 @@ # Whitelist of modules that symbols can be directly imported from. allowsymbolimports = ( '__future__', + 'mercurial.hgweb.common', + 'mercurial.hgweb.request', 'mercurial.i18n', 'mercurial.node', ) @@ -35,6 +40,17 @@ return False +def walklocal(root): + """Recursively yield all descendant nodes but not in a different scope""" + todo = collections.deque(ast.iter_child_nodes(root)) + yield root, False + while todo: + node = todo.popleft() + newscope = isinstance(node, ast.FunctionDef) + if not newscope: + todo.extend(ast.iter_child_nodes(node)) + yield node, newscope + def dotted_name_of_path(path, trimpure=False): """Given a relative path to a source file, return its dotted module name. @@ -45,7 +61,7 @@ >>> dotted_name_of_path('zlibmodule.so') 'zlib' """ - parts = path.split('/') + parts = path.replace(os.sep, '/').split('/') parts[-1] = parts[-1].split('.', 1)[0] # remove .py and .so and .ARCH.so if parts[-1].endswith('module'): parts[-1] = parts[-1][:-6] @@ -163,9 +179,6 @@ # consider them stdlib. for m in ['msvcrt', '_winreg']: yield m - # These get missed too - for m in 'ctypes', 'email', 'multiprocessing': - yield m yield 'builtins' # python3 only for m in 'fcntl', 'grp', 'pwd', 'termios': # Unix only yield m @@ -198,11 +211,12 @@ or top == libpath and d in ('hgext', 'mercurial')): del dirs[i] for name in files: - if name == '__init__.py': - continue if not name.endswith(('.py', '.so', '.pyc', '.pyo', '.pyd')): continue - full_path = os.path.join(top, name) + if name.startswith('__init__.py'): + full_path = top + else: + full_path = os.path.join(top, name) rel_path = full_path[len(libpath) + 1:] mod = dotted_name_of_path(rel_path) yield mod @@ -237,7 +251,7 @@ >>> sorted(imported_modules( ... 'import foo1; from bar import bar1', ... modulename, localmods)) - ['foo.bar.__init__', 'foo.bar.bar1', 'foo.foo1'] + ['foo.bar.bar1', 'foo.foo1'] >>> sorted(imported_modules( ... 'from bar.bar1 import name1, name2, name3', ... modulename, localmods)) @@ -284,21 +298,28 @@ continue absname, dottedpath, hassubmod = found - yield dottedpath if not hassubmod: + # "dottedpath" is not a package; must be imported + yield dottedpath # examination of "node.names" should be redundant # e.g.: from mercurial.node import nullid, nullrev continue + modnotfound = False prefix = absname + '.' for n in node.names: found = fromlocal(prefix + n.name) if not found: # this should be a function or a property of "node.module" + modnotfound = True continue yield found[1] + if modnotfound: + # "dottedpath" is a package, but imported because of non-module + # lookup + yield dottedpath -def verify_import_convention(module, source): +def verify_import_convention(module, source, localmods): """Verify imports match our established coding convention. We have 2 conventions: legacy and modern. The modern convention is in @@ -311,11 +332,11 @@ absolute = usingabsolute(root) if absolute: - return verify_modern_convention(module, root) + return verify_modern_convention(module, root, localmods) else: return verify_stdlib_on_own_line(root) -def verify_modern_convention(module, root): +def verify_modern_convention(module, root, localmods, root_col_offset=0): """Verify a file conforms to the modern import convention rules. The rules of the modern convention are: @@ -342,6 +363,7 @@ and readability problems. See `requirealias`. """ topmodule = module.split('.')[0] + fromlocal = fromlocalfunc(module, localmods) # Whether a local/non-stdlib import has been performed. seenlocal = False @@ -352,29 +374,36 @@ # Relative import levels encountered so far. seenlevels = set() - for node in ast.walk(root): - if isinstance(node, ast.Import): + for node, newscope in walklocal(root): + def msg(fmt, *args): + return (fmt % args, node.lineno) + if newscope: + # Check for local imports in function + for r in verify_modern_convention(module, node, localmods, + node.col_offset + 4): + yield r + elif isinstance(node, ast.Import): # Disallow "import foo, bar" and require separate imports # for each module. if len(node.names) > 1: - yield 'multiple imported names: %s' % ', '.join( - n.name for n in node.names) + yield msg('multiple imported names: %s', + ', '.join(n.name for n in node.names)) name = node.names[0].name asname = node.names[0].asname # Ignore sorting rules on imports inside blocks. - if node.col_offset == 0: + if node.col_offset == root_col_offset: if lastname and name < lastname: - yield 'imports not lexically sorted: %s < %s' % ( - name, lastname) + yield msg('imports not lexically sorted: %s < %s', + name, lastname) lastname = name # stdlib imports should be before local imports. stdlib = name in stdlib_modules - if stdlib and seenlocal and node.col_offset == 0: - yield 'stdlib import follows local import: %s' % name + if stdlib and seenlocal and node.col_offset == root_col_offset: + yield msg('stdlib import follows local import: %s', name) if not stdlib: seenlocal = True @@ -382,11 +411,11 @@ # Import of sibling modules should use relative imports. topname = name.split('.')[0] if topname == topmodule: - yield 'import should be relative: %s' % name + yield msg('import should be relative: %s', name) if name in requirealias and asname != requirealias[name]: - yield '%s module must be "as" aliased to %s' % ( - name, requirealias[name]) + yield msg('%s module must be "as" aliased to %s', + name, requirealias[name]) elif isinstance(node, ast.ImportFrom): # Resolve the full imported module name. @@ -400,39 +429,49 @@ topname = fullname.split('.')[0] if topname == topmodule: - yield 'import should be relative: %s' % fullname + yield msg('import should be relative: %s', fullname) # __future__ is special since it needs to come first and use # symbol import. if fullname != '__future__': if not fullname or fullname in stdlib_modules: - yield 'relative import of stdlib module' + yield msg('relative import of stdlib module') else: seenlocal = True # Direct symbol import is only allowed from certain modules and # must occur before non-symbol imports. - if node.module and node.col_offset == 0: - if fullname not in allowsymbolimports: - yield 'direct symbol import from %s' % fullname + if node.module and node.col_offset == root_col_offset: + found = fromlocal(node.module, node.level) + if found and found[2]: # node.module is a package + prefix = found[0] + '.' + symbols = [n.name for n in node.names + if not fromlocal(prefix + n.name)] + else: + symbols = [n.name for n in node.names] - if seennonsymbolrelative: - yield ('symbol import follows non-symbol import: %s' % - fullname) + if symbols and fullname not in allowsymbolimports: + yield msg('direct symbol import %s from %s', + ', '.join(symbols), fullname) + + if symbols and seennonsymbolrelative: + yield msg('symbol import follows non-symbol import: %s', + fullname) if not node.module: assert node.level seennonsymbolrelative = True # Only allow 1 group per level. - if node.level in seenlevels and node.col_offset == 0: - yield 'multiple "from %s import" statements' % ( - '.' * node.level) + if (node.level in seenlevels + and node.col_offset == root_col_offset): + yield msg('multiple "from %s import" statements', + '.' * node.level) # Higher-level groups come before lower-level groups. if any(node.level > l for l in seenlevels): - yield 'higher-level import should come first: %s' % ( - fullname) + yield msg('higher-level import should come first: %s', + fullname) seenlevels.add(node.level) @@ -442,14 +481,14 @@ for n in node.names: if lastentryname and n.name < lastentryname: - yield 'imports from %s not lexically sorted: %s < %s' % ( - fullname, n.name, lastentryname) + yield msg('imports from %s not lexically sorted: %s < %s', + fullname, n.name, lastentryname) lastentryname = n.name if n.name in requirealias and n.asname != requirealias[n.name]: - yield '%s from %s must be "as" aliased to %s' % ( - n.name, fullname, requirealias[n.name]) + yield msg('%s from %s must be "as" aliased to %s', + n.name, fullname, requirealias[n.name]) def verify_stdlib_on_own_line(root): """Given some python source, verify that stdlib imports are done @@ -460,7 +499,7 @@ http://bugs.python.org/issue19510. >>> list(verify_stdlib_on_own_line(ast.parse('import sys, foo'))) - ['mixed imports\\n stdlib: sys\\n relative: foo'] + [('mixed imports\\n stdlib: sys\\n relative: foo', 1)] >>> list(verify_stdlib_on_own_line(ast.parse('import sys, os'))) [] >>> list(verify_stdlib_on_own_line(ast.parse('import foo, bar'))) @@ -474,7 +513,7 @@ if from_stdlib[True] and from_stdlib[False]: yield ('mixed imports\n stdlib: %s\n relative: %s' % (', '.join(sorted(from_stdlib[True])), - ', '.join(sorted(from_stdlib[False])))) + ', '.join(sorted(from_stdlib[False]))), node.lineno) class CircularImport(Exception): pass @@ -546,9 +585,9 @@ src = f.read() used_imports[modname] = sorted( imported_modules(src, modname, localmods, ignore_nested=True)) - for error in verify_import_convention(modname, src): + for error, lineno in verify_import_convention(modname, src, localmods): any_errors = True - print source_path, error + print '%s:%d: %s' % (source_path, lineno, error) f.close() cycles = find_cycles(used_imports) if cycles: diff -r 6c7d26cef0cd -r 4571c0b38337 contrib/mercurial.spec diff -r 6c7d26cef0cd -r 4571c0b38337 contrib/perf.py --- a/contrib/perf.py Fri Jan 08 16:27:25 2016 +0100 +++ b/contrib/perf.py Mon Jan 11 14:27:12 2016 -0600 @@ -2,20 +2,28 @@ '''helper extension to measure performance''' from mercurial import cmdutil, scmutil, util, commands, obsolete -from mercurial import repoview, branchmap, merge, copies +from mercurial import repoview, branchmap, merge, copies, error, revlog +from mercurial import mdiff import time, os, sys +import random import functools formatteropts = commands.formatteropts +revlogopts = commands.debugrevlogopts cmdtable = {} command = cmdutil.command(cmdtable) +def getlen(ui): + if ui.configbool("perf", "stub"): + return lambda x: 1 + return len + def gettimer(ui, opts=None): """return a timer function and formatter: (timer, formatter) - This functions exist to gather the creation of formatter in a single - place instead of duplicating it in all performance command.""" + This function exists to gather the creation of formatter in a single + place instead of duplicating it in all performance commands.""" # enforce an idle period before execution to counteract power management # experimental config: perf.presleep @@ -28,8 +36,15 @@ ui.fout = ui.ferr # get a formatter fm = ui.formatter('perf', opts) + # stub function, runs code only once instead of in a loop + # experimental config: perf.stub + if ui.configbool("perf", "stub"): + return functools.partial(stub_timer, fm), fm return functools.partial(_timer, fm), fm +def stub_timer(fm, func, title=None): + func() + def _timer(fm, func, title=None): results = [] begin = time.time() @@ -91,7 +106,7 @@ #m = match.always(repo.root, repo.getcwd()) #timer(lambda: sum(map(len, repo.dirstate.status(m, [], False, False, # False)))) - timer, fm = gettimer(ui, **opts) + timer, fm = gettimer(ui, opts) timer(lambda: sum(map(len, repo.status(unknown=opts['unknown'])))) fm.end() @@ -193,7 +208,7 @@ fm.end() @command('perfdirstatefoldmap', formatteropts) -def perffilefoldmap(ui, repo, **opts): +def perfdirstatefoldmap(ui, repo, **opts): timer, fm = gettimer(ui, opts) dirstate = repo.dirstate 'a' in dirstate @@ -239,8 +254,8 @@ def d(): # acceptremote is True because we don't want prompts in the middle of # our benchmark - merge.calculateupdates(repo, wctx, rctx, ancestor, False, False, False, - acceptremote=True) + merge.calculateupdates(repo, wctx, rctx, [ancestor], False, False, + acceptremote=True, followcopies=True) timer(d) fm.end() @@ -260,8 +275,7 @@ ctx = scmutil.revsingle(repo, rev, rev) t = ctx.manifestnode() def d(): - repo.manifest._mancache.clear() - repo.manifest._cache = None + repo.manifest.clearcaches() repo.manifest.read(t) timer(d) fm.end() @@ -293,14 +307,24 @@ timer, fm = gettimer(ui, opts) cmd = sys.argv[0] def d(): - os.system("HGRCPATH= %s version -q > /dev/null" % cmd) + if os.name != 'nt': + os.system("HGRCPATH= %s version -q > /dev/null" % cmd) + else: + os.environ['HGRCPATH'] = '' + os.system("%s version -q > NUL" % cmd) timer(d) fm.end() @command('perfparents', formatteropts) def perfparents(ui, repo, **opts): timer, fm = gettimer(ui, opts) - nl = [repo.changelog.node(i) for i in xrange(1000)] + # control the number of commits perfparents iterates over + # experimental config: perf.parentscount + count = ui.configint("perf", "parentscount", 1000) + if len(repo.changelog) < count: + raise error.Abort("repo needs %d commits for this test" % count) + repo = repo.unfiltered() + nl = [repo.changelog.node(i) for i in xrange(count)] def d(): for n in nl: repo.changelog.parents(n) @@ -308,7 +332,7 @@ fm.end() @command('perfctxfiles', formatteropts) -def perfparents(ui, repo, x, **opts): +def perfctxfiles(ui, repo, x, **opts): x = int(x) timer, fm = gettimer(ui, opts) def d(): @@ -317,7 +341,7 @@ fm.end() @command('perfrawfiles', formatteropts) -def perfparents(ui, repo, x, **opts): +def perfrawfiles(ui, repo, x, **opts): x = int(x) timer, fm = gettimer(ui, opts) cl = repo.changelog @@ -329,10 +353,6 @@ @command('perflookup', formatteropts) def perflookup(ui, repo, rev, **opts): timer, fm = gettimer(ui, opts) - -@command('perflookup', formatteropts) -def perflookup(ui, repo, rev, **opts): - timer, fm = gettimer(ui, opts) timer(lambda: len(repo.lookup(rev))) fm.end() @@ -358,10 +378,12 @@ @command('perflog', [('', 'rename', False, 'ask log to follow renames')] + formatteropts) -def perflog(ui, repo, **opts): +def perflog(ui, repo, rev=None, **opts): + if rev is None: + rev=[] timer, fm = gettimer(ui, opts) ui.pushbuffer() - timer(lambda: commands.log(ui, repo, rev=[], date='', user='', + timer(lambda: commands.log(ui, repo, rev=rev, date='', user='', copies=opts.get('rename'))) ui.popbuffer() fm.end() @@ -381,10 +403,12 @@ fm.end() @command('perftemplating', formatteropts) -def perftemplating(ui, repo, **opts): +def perftemplating(ui, repo, rev=None, **opts): + if rev is None: + rev=[] timer, fm = gettimer(ui, opts) ui.pushbuffer() - timer(lambda: commands.log(ui, repo, rev=[], date='', user='', + timer(lambda: commands.log(ui, repo, rev=rev, date='', user='', template='{date|shortdate} [{rev}:{node|short}]' ' {author|person}: {desc|firstline}\n')) ui.popbuffer() @@ -410,10 +434,14 @@ timer, fm = gettimer(ui, opts) s = repo.store s.fncache._load() + lock = repo.lock() + tr = repo.transaction('perffncachewrite') def d(): s.fncache._dirty = True - s.fncache.write() + s.fncache.write(tr) timer(d) + lock.release() + tr.close() fm.end() @command('perffncacheencode', formatteropts) @@ -447,25 +475,124 @@ timer(d, title) fm.end() -@command('perfrevlog', - [('d', 'dist', 100, 'distance between the revisions')] + formatteropts, - "[INDEXFILE]") -def perfrevlog(ui, repo, file_, **opts): +@command('perfrevlog', revlogopts + formatteropts + + [('d', 'dist', 100, 'distance between the revisions'), + ('s', 'startrev', 0, 'revision to start reading at')], + '-c|-m|FILE') +def perfrevlog(ui, repo, file_=None, startrev=0, **opts): + """Benchmark reading a series of revisions from a revlog. + + By default, we read every ``-d/--dist`` revision from 0 to tip of + the specified revlog. + + The start revision can be defined via ``-s/--startrev``. + """ timer, fm = gettimer(ui, opts) - from mercurial import revlog dist = opts['dist'] + _len = getlen(ui) def d(): - r = revlog.revlog(lambda fn: open(fn, 'rb'), file_) - for x in xrange(0, len(r), dist): + r = cmdutil.openrevlog(repo, 'perfrevlog', file_, opts) + for x in xrange(startrev, _len(r), dist): r.revision(r.node(x)) timer(d) fm.end() +@command('perfrevlogrevision', revlogopts + formatteropts + + [('', 'cache', False, 'use caches instead of clearing')], + '-c|-m|FILE REV') +def perfrevlogrevision(ui, repo, file_, rev=None, cache=None, **opts): + """Benchmark obtaining a revlog revision. + + Obtaining a revlog revision consists of roughly the following steps: + + 1. Compute the delta chain + 2. Obtain the raw chunks for that delta chain + 3. Decompress each raw chunk + 4. Apply binary patches to obtain fulltext + 5. Verify hash of fulltext + + This command measures the time spent in each of these phases. + """ + if opts.get('changelog') or opts.get('manifest'): + file_, rev = None, file_ + elif rev is None: + raise error.CommandError('perfrevlogrevision', 'invalid arguments') + + r = cmdutil.openrevlog(repo, 'perfrevlogrevision', file_, opts) + node = r.lookup(rev) + rev = r.rev(node) + + def dodeltachain(rev): + if not cache: + r.clearcaches() + r._deltachain(rev) + + def doread(chain): + if not cache: + r.clearcaches() + r._chunkraw(chain[0], chain[-1]) + + def dodecompress(data, chain): + if not cache: + r.clearcaches() + + start = r.start + length = r.length + inline = r._inline + iosize = r._io.size + buffer = util.buffer + offset = start(chain[0]) + + for rev in chain: + chunkstart = start(rev) + if inline: + chunkstart += (rev + 1) * iosize + chunklength = length(rev) + b = buffer(data, chunkstart - offset, chunklength) + revlog.decompress(b) + + def dopatch(text, bins): + if not cache: + r.clearcaches() + mdiff.patches(text, bins) + + def dohash(text): + if not cache: + r.clearcaches() + r._checkhash(text, node, rev) + + def dorevision(): + if not cache: + r.clearcaches() + r.revision(node) + + chain = r._deltachain(rev)[0] + data = r._chunkraw(chain[0], chain[-1])[1] + bins = r._chunks(chain) + text = str(bins[0]) + bins = bins[1:] + text = mdiff.patches(text, bins) + + benches = [ + (lambda: dorevision(), 'full'), + (lambda: dodeltachain(rev), 'deltachain'), + (lambda: doread(chain), 'read'), + (lambda: dodecompress(data, chain), 'decompress'), + (lambda: dopatch(text, bins), 'patch'), + (lambda: dohash(text), 'hash'), + ] + + for fn, title in benches: + timer, fm = gettimer(ui, opts) + timer(fn, title=title) + fm.end() + @command('perfrevset', - [('C', 'clear', False, 'clear volatile cache between each call.')] + [('C', 'clear', False, 'clear volatile cache between each call.'), + ('', 'contexts', False, 'obtain changectx for each revision')] + formatteropts, "REVSET") -def perfrevset(ui, repo, expr, clear=False, **opts): +def perfrevset(ui, repo, expr, clear=False, contexts=False, **opts): """benchmark the execution time of a revset Use the --clean option if need to evaluate the impact of build volatile @@ -475,7 +602,10 @@ def d(): if clear: repo.invalidatevolatilesets() - for r in repo.revs(expr): pass + if contexts: + for ctx in repo.set(expr): pass + else: + for r in repo.revs(expr): pass timer(d) fm.end() @@ -576,3 +706,79 @@ timer, fm = gettimer(ui) timer(lambda: len(obsolete.obsstore(repo.svfs))) fm.end() + +@command('perflrucachedict', formatteropts + + [('', 'size', 4, 'size of cache'), + ('', 'gets', 10000, 'number of key lookups'), + ('', 'sets', 10000, 'number of key sets'), + ('', 'mixed', 10000, 'number of mixed mode operations'), + ('', 'mixedgetfreq', 50, 'frequency of get vs set ops in mixed mode')], + norepo=True) +def perflrucache(ui, size=4, gets=10000, sets=10000, mixed=10000, + mixedgetfreq=50, **opts): + def doinit(): + for i in xrange(10000): + util.lrucachedict(size) + + values = [] + for i in xrange(size): + values.append(random.randint(0, sys.maxint)) + + # Get mode fills the cache and tests raw lookup performance with no + # eviction. + getseq = [] + for i in xrange(gets): + getseq.append(random.choice(values)) + + def dogets(): + d = util.lrucachedict(size) + for v in values: + d[v] = v + for key in getseq: + value = d[key] + value # silence pyflakes warning + + # Set mode tests insertion speed with cache eviction. + setseq = [] + for i in xrange(sets): + setseq.append(random.randint(0, sys.maxint)) + + def dosets(): + d = util.lrucachedict(size) + for v in setseq: + d[v] = v + + # Mixed mode randomly performs gets and sets with eviction. + mixedops = [] + for i in xrange(mixed): + r = random.randint(0, 100) + if r < mixedgetfreq: + op = 0 + else: + op = 1 + + mixedops.append((op, random.randint(0, size * 2))) + + def domixed(): + d = util.lrucachedict(size) + + for op, v in mixedops: + if op == 0: + try: + d[v] + except KeyError: + pass + else: + d[v] = v + + benches = [ + (doinit, 'init'), + (dogets, 'gets'), + (dosets, 'sets'), + (domixed, 'mixed') + ] + + for fn, title in benches: + timer, fm = gettimer(ui, opts) + timer(fn, title=title) + fm.end() diff -r 6c7d26cef0cd -r 4571c0b38337 contrib/revsetbenchmarks.py --- a/contrib/revsetbenchmarks.py Fri Jan 08 16:27:25 2016 +0100 +++ b/contrib/revsetbenchmarks.py Mon Jan 11 14:27:12 2016 -0600 @@ -53,10 +53,13 @@ fullcmd += cmd return check_output(fullcmd, stderr=STDOUT) -def perf(revset, target=None): +def perf(revset, target=None, contexts=False): """run benchmark for this very revset""" try: - output = hg(['perfrevset', revset], repo=target) + args = ['perfrevset', revset] + if contexts: + args.append('--contexts') + output = hg(args, repo=target) return parseoutput(output) except CalledProcessError as exc: print >> sys.stderr, 'abort: cannot run revset benchmark: %s' % exc.cmd @@ -238,6 +241,9 @@ default=','.join(DEFAULTVARIANTS), help="comma separated list of variant to test " "(eg: plain,min,sorted) (plain = no modification)") +parser.add_option('', '--contexts', + action='store_true', + help='obtain changectx from results instead of integer revs') (options, args) = parser.parse_args() @@ -283,7 +289,7 @@ varres = {} for var in variants: varrset = applyvariants(rset, var) - data = perf(varrset, target=options.repo) + data = perf(varrset, target=options.repo, contexts=options.contexts) varres[var] = data res.append(varres) printresult(variants, idx, varres, len(revsets), diff -r 6c7d26cef0cd -r 4571c0b38337 contrib/win32/mercurial.iss --- a/contrib/win32/mercurial.iss Fri Jan 08 16:27:25 2016 +0100 +++ b/contrib/win32/mercurial.iss Mon Jan 11 14:27:12 2016 -0600 @@ -23,6 +23,7 @@ [Setup] AppCopyright=Copyright 2005-2015 Matt Mackall and others AppName=Mercurial +AppVersion={#VERSION} #if ARCH == "x64" AppVerName=Mercurial {#VERSION} (64-bit) OutputBaseFilename=Mercurial-{#VERSION}-x64 @@ -83,6 +84,7 @@ Source: doc\*.html; DestDir: {app}\Docs Source: doc\style.css; DestDir: {app}\Docs Source: mercurial\help\*.txt; DestDir: {app}\help +Source: mercurial\help\internals\*.txt; DestDir: {app}\help\internals Source: mercurial\default.d\*.rc; DestDir: {app}\default.d Source: mercurial\locale\*.*; DestDir: {app}\locale; Flags: recursesubdirs createallsubdirs skipifsourcedoesntexist Source: mercurial\templates\*.*; DestDir: {app}\Templates; Flags: recursesubdirs createallsubdirs diff -r 6c7d26cef0cd -r 4571c0b38337 contrib/wix/guids.wxi --- a/contrib/wix/guids.wxi Fri Jan 08 16:27:25 2016 +0100 +++ b/contrib/wix/guids.wxi Mon Jan 11 14:27:12 2016 -0600 @@ -19,7 +19,8 @@ - + + diff -r 6c7d26cef0cd -r 4571c0b38337 contrib/wix/help.wxs --- a/contrib/wix/help.wxs Fri Jan 08 16:27:25 2016 +0100 +++ b/contrib/wix/help.wxs Mon Jan 11 14:27:12 2016 -0600 @@ -5,9 +5,16 @@ + + + + + + + - + @@ -28,6 +35,15 @@ + + + + + + + + + diff -r 6c7d26cef0cd -r 4571c0b38337 contrib/wix/mercurial.wxs --- a/contrib/wix/mercurial.wxs Fri Jan 08 16:27:25 2016 +0100 +++ b/contrib/wix/mercurial.wxs Mon Jan 11 14:27:12 2016 -0600 @@ -124,7 +124,7 @@ - + diff -r 6c7d26cef0cd -r 4571c0b38337 contrib/wix/templates.wxs --- a/contrib/wix/templates.wxs Fri Jan 08 16:27:25 2016 +0100 +++ b/contrib/wix/templates.wxs Mon Jan 11 14:27:12 2016 -0600 @@ -34,6 +34,7 @@ + diff -r 6c7d26cef0cd -r 4571c0b38337 doc/check-seclevel.py --- a/doc/check-seclevel.py Fri Jan 08 16:27:25 2016 +0100 +++ b/doc/check-seclevel.py Mon Jan 11 14:27:12 2016 -0600 @@ -6,9 +6,8 @@ import optparse # import from the live mercurial repo +os.environ['HGMODULEPOLICY'] = 'py' sys.path.insert(0, "..") -# fall back to pure modules if required C extensions are not available -sys.path.append(os.path.join('..', 'mercurial', 'pure')) from mercurial import demandimport; demandimport.enable() from mercurial.commands import table from mercurial.help import helptable @@ -88,7 +87,7 @@ for name in sorted(extensions.enabled().keys() + extensions.disabled().keys()): - mod = extensions.load(None, name, None) + mod = extensions.load(ui, name, None) if not mod.__doc__: ui.note(('skip checking %s extension: no help document\n') % name) continue @@ -128,6 +127,9 @@ optparser.add_option("-v", "--verbose", help="enable additional output", action="store_true") + optparser.add_option("-d", "--debug", + help="debug mode", + action="store_true") optparser.add_option("-f", "--file", help="filename to read in (or '-' for stdin)", action="store", default="") @@ -153,6 +155,7 @@ ui = uimod.ui() ui.setconfig('ui', 'verbose', options.verbose, '--verbose') + ui.setconfig('ui', 'debug', options.debug, '--debug') if options.file: if checkfile(ui, options.file, options.initlevel): diff -r 6c7d26cef0cd -r 4571c0b38337 doc/gendoc.py --- a/doc/gendoc.py Fri Jan 08 16:27:25 2016 +0100 +++ b/doc/gendoc.py Mon Jan 11 14:27:12 2016 -0600 @@ -1,13 +1,16 @@ +#!/usr/bin/env python """usage: %s DOC ... where DOC is the name of a document """ import os, sys, textwrap + +# This script is executed during installs and may not have C extensions +# available. Relax C module requirements. +os.environ['HGMODULEPOLICY'] = 'allow' # import from the live mercurial repo sys.path.insert(0, "..") -# fall back to pure modules if required C extensions are not available -sys.path.append(os.path.join('..', 'mercurial', 'pure')) from mercurial import demandimport; demandimport.enable() from mercurial import minirst from mercurial.commands import table, globalopts @@ -107,7 +110,7 @@ " :depth: 1\n\n") for extensionname in sorted(allextensionnames()): - mod = extensions.load(None, extensionname, None) + mod = extensions.load(ui, extensionname, None) ui.write(minirst.subsection(extensionname)) ui.write("%s\n\n" % gettext(mod.__doc__)) cmdtable = getattr(mod, 'cmdtable', None) diff -r 6c7d26cef0cd -r 4571c0b38337 hgext/censor.py --- a/hgext/censor.py Fri Jan 08 16:27:25 2016 +0100 +++ b/hgext/censor.py Mon Jan 11 14:27:12 2016 -0600 @@ -28,6 +28,7 @@ from mercurial.node import short from mercurial import cmdutil, error, filelog, revlog, scmutil, util from mercurial.i18n import _ +from mercurial import lock as lockmod cmdtable = {} command = cmdutil.command(cmdtable) @@ -42,6 +43,15 @@ ('t', 'tombstone', '', _('replacement tombstone data'), _('TEXT'))], _('-r REV [-t TEXT] [FILE]')) def censor(ui, repo, path, rev='', tombstone='', **opts): + wlock = lock = None + try: + wlock = repo.wlock() + lock = repo.lock() + return _docensor(ui, repo, path, rev, tombstone, **opts) + finally: + lockmod.release(lock, wlock) + +def _docensor(ui, repo, path, rev='', tombstone='', **opts): if not path: raise error.Abort(_('must specify file path to censor')) if not rev: diff -r 6c7d26cef0cd -r 4571c0b38337 hgext/clonebundles.py --- a/hgext/clonebundles.py Fri Jan 08 16:27:25 2016 +0100 +++ b/hgext/clonebundles.py Mon Jan 11 14:27:12 2016 -0600 @@ -47,7 +47,7 @@ * Generating bundle files of repository content (typically periodically, such as once per day). * A file server that clients have network access to and that Python knows - how to talk to through its normal URL handling facility (typically a + how to talk to through its normal URL handling facility (typically an HTTP server). * A process for keeping the bundles manifest in sync with available bundle files. @@ -183,7 +183,7 @@ experimental.clonebundleprefers List of "key=value" properties the client prefers in bundles. Downloaded bundle manifests will be sorted by the preferences in this list. e.g. - the value "BUNDLESPEC=gzip-v1, BUNDLESPEC=bzip2=v1" will prefer a gzipped + the value "BUNDLESPEC=gzip-v1, BUNDLESPEC=bzip2-v1" will prefer a gzipped version 1 bundle type then bzip2 version 1 bundle type. If not defined, the order in the manifest will be used and the first diff -r 6c7d26cef0cd -r 4571c0b38337 hgext/color.py --- a/hgext/color.py Fri Jan 08 16:27:25 2016 +0100 +++ b/hgext/color.py Mon Jan 11 14:27:12 2016 -0600 @@ -419,16 +419,6 @@ _styles[status] = ' '.join(good) class colorui(uimod.ui): - def popbuffer(self, labeled=False): - if self._colormode is None: - return super(colorui, self).popbuffer(labeled) - - self._bufferstates.pop() - if labeled: - return ''.join(self.label(a, label) for a, label - in self._buffers.pop()) - return ''.join(a for a, label in self._buffers.pop()) - _colormode = 'ansi' def write(self, *args, **opts): if self._colormode is None: @@ -436,13 +426,16 @@ label = opts.get('label', '') if self._buffers: - self._buffers[-1].extend([(str(a), label) for a in args]) + if self._bufferapplylabels: + self._buffers[-1].extend(self.label(a, label) for a in args) + else: + self._buffers[-1].extend(args) elif self._colormode == 'win32': for a in args: win32print(a, super(colorui, self).write, **opts) else: return super(colorui, self).write( - *[self.label(str(a), label) for a in args], **opts) + *[self.label(a, label) for a in args], **opts) def write_err(self, *args, **opts): if self._colormode is None: @@ -456,7 +449,7 @@ win32print(a, super(colorui, self).write_err, **opts) else: return super(colorui, self).write_err( - *[self.label(str(a), label) for a in args], **opts) + *[self.label(a, label) for a in args], **opts) def showlabel(self, msg, label): if label and msg: diff -r 6c7d26cef0cd -r 4571c0b38337 hgext/convert/hg.py --- a/hgext/convert/hg.py Fri Jan 08 16:27:25 2016 +0100 +++ b/hgext/convert/hg.py Mon Jan 11 14:27:12 2016 -0600 @@ -23,6 +23,7 @@ from mercurial.node import bin, hex, nullid from mercurial import hg, util, context, bookmarks, error, scmutil, exchange from mercurial import phases +from mercurial import lock as lockmod from mercurial import merge as mergemod from common import NoRepo, commit, converter_source, converter_sink, mapfile @@ -191,7 +192,6 @@ self.repo, p1ctx, p2ctx, anc, True, # branchmerge True, # force - False, # partial False, # acceptremote False, # followcopies ) @@ -410,12 +410,19 @@ def putbookmarks(self, updatedbookmark): if not len(updatedbookmark): return - - self.ui.status(_("updating bookmarks\n")) - destmarks = self.repo._bookmarks - for bookmark in updatedbookmark: - destmarks[bookmark] = bin(updatedbookmark[bookmark]) - destmarks.write() + wlock = lock = tr = None + try: + wlock = self.repo.wlock() + lock = self.repo.lock() + tr = self.repo.transaction('bookmark') + self.ui.status(_("updating bookmarks\n")) + destmarks = self.repo._bookmarks + for bookmark in updatedbookmark: + destmarks[bookmark] = bin(updatedbookmark[bookmark]) + destmarks.recordchange(tr) + tr.close() + finally: + lockmod.release(lock, wlock, tr) def hascommitfrommap(self, rev): # the exact semantics of clonebranches is unclear so we can't say no diff -r 6c7d26cef0cd -r 4571c0b38337 hgext/convert/subversion.py --- a/hgext/convert/subversion.py Fri Jan 08 16:27:25 2016 +0100 +++ b/hgext/convert/subversion.py Mon Jan 11 14:27:12 2016 -0600 @@ -1041,7 +1041,7 @@ relpaths.append(p.strip('/')) args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths, strict_node_history] - # undocumented feature: debugsvnlog can be disabled + # developer config: convert.svn.debugsvnlog if not self.ui.configbool('convert', 'svn.debugsvnlog', True): return directlogstream(*args) arg = encodeargs(args) diff -r 6c7d26cef0cd -r 4571c0b38337 hgext/eol.py --- a/hgext/eol.py Fri Jan 08 16:27:25 2016 +0100 +++ b/hgext/eol.py Mon Jan 11 14:27:12 2016 -0600 @@ -201,7 +201,7 @@ data = ctx[f].data() if (target == "to-lf" and "\r\n" in data or target == "to-crlf" and singlelf.search(data)): - failed.append((str(ctx), target, f)) + failed.append((f, target, str(ctx))) break return failed @@ -244,7 +244,7 @@ if failed: eols = {'to-lf': 'CRLF', 'to-crlf': 'LF'} msgs = [] - for node, target, f in failed: + for f, target, node in sorted(failed): msgs.append(_(" %s in %s should not have %s line endings") % (f, node, eols[target])) raise error.Abort(_("end-of-line check failed:\n") + "\n".join(msgs)) diff -r 6c7d26cef0cd -r 4571c0b38337 hgext/extdiff.py --- a/hgext/extdiff.py Fri Jan 08 16:27:25 2016 +0100 +++ b/hgext/extdiff.py Mon Jan 11 14:27:12 2016 -0600 @@ -270,15 +270,17 @@ ui.note(_('cleaning up temp directory\n')) shutil.rmtree(tmproot) -@command('extdiff', - [('p', 'program', '', - _('comparison program to run'), _('CMD')), +extdiffopts = [ ('o', 'option', [], _('pass option to comparison program'), _('OPT')), ('r', 'rev', [], _('revision'), _('REV')), ('c', 'change', '', _('change made by revision'), _('REV')), ('', 'patch', None, _('compare patches for two revisions')) - ] + commands.walkopts + commands.subrepoopts, + ] + commands.walkopts + commands.subrepoopts + +@command('extdiff', + [('p', 'program', '', _('comparison program to run'), _('CMD')), + ] + extdiffopts, _('hg extdiff [OPT]... [FILE]...'), inferrepo=True) def extdiff(ui, repo, *pats, **opts): @@ -367,6 +369,5 @@ # right encoding) prevents that. mydiff.__doc__ = doc.decode(encoding.encoding) return mydiff - cmdtable[cmd] = (save(cmdline), - cmdtable['extdiff'][1][1:], - _('hg %s [OPTION]... [FILE]...') % cmd) + command(cmd, extdiffopts[:], _('hg %s [OPTION]... [FILE]...') % cmd, + inferrepo=True)(save(cmdline)) diff -r 6c7d26cef0cd -r 4571c0b38337 hgext/gpg.py --- a/hgext/gpg.py Fri Jan 08 16:27:25 2016 +0100 +++ b/hgext/gpg.py Mon Jan 11 14:27:12 2016 -0600 @@ -9,6 +9,7 @@ from mercurial import util, commands, match, cmdutil, error from mercurial import node as hgnode from mercurial.i18n import _ +from mercurial import lock as lockmod cmdtable = {} command = cmdutil.command(cmdtable) @@ -168,7 +169,7 @@ ui.write("%-30s %s\n" % (keystr(ui, k), r)) @command("sigcheck", [], _('hg sigcheck REV')) -def check(ui, repo, rev): +def sigcheck(ui, repo, rev): """verify all the signatures there may be for a particular revision""" mygpg = newgpg(ui) rev = repo.lookup(rev) @@ -222,7 +223,14 @@ See :hg:`help dates` for a list of formats valid for -d/--date. """ + wlock = None + try: + wlock = repo.wlock() + return _dosign(ui, repo, *revs, **opts) + finally: + lockmod.release(wlock) +def _dosign(ui, repo, *revs, **opts): mygpg = newgpg(ui, **opts) sigver = "0" sigmessage = "" diff -r 6c7d26cef0cd -r 4571c0b38337 hgext/graphlog.py --- a/hgext/graphlog.py Fri Jan 08 16:27:25 2016 +0100 +++ b/hgext/graphlog.py Mon Jan 11 14:27:12 2016 -0600 @@ -49,7 +49,7 @@ ] + commands.logopts + commands.walkopts, _('[OPTION]... [FILE]'), inferrepo=True) -def graphlog(ui, repo, *pats, **opts): +def glog(ui, repo, *pats, **opts): """show revision history alongside an ASCII revision graph Print a revision history alongside a revision graph drawn with diff -r 6c7d26cef0cd -r 4571c0b38337 hgext/highlight/highlight.py --- a/hgext/highlight/highlight.py Fri Jan 08 16:27:25 2016 +0100 +++ b/hgext/highlight/highlight.py Mon Jan 11 14:27:12 2016 -0600 @@ -25,7 +25,7 @@ # append a to the syntax highlighting css old_header = tmpl.load('header') if SYNTAX_CSS not in old_header: - new_header = old_header + SYNTAX_CSS + new_header = old_header + SYNTAX_CSS tmpl.cache['header'] = new_header text = fctx.data() diff -r 6c7d26cef0cd -r 4571c0b38337 hgext/histedit.py --- a/hgext/histedit.py Fri Jan 08 16:27:25 2016 +0100 +++ b/hgext/histedit.py Mon Jan 11 14:27:12 2016 -0600 @@ -143,19 +143,33 @@ repository that Mercurial does not detect to be related to the source repo, you can add a ``--force`` option. +Config +------ + Histedit rule lines are truncated to 80 characters by default. You can customize this behavior by setting a different length in your configuration file:: [histedit] linelen = 120 # truncate rule lines at 120 characters + +``hg histedit`` attempts to automatically choose an appropriate base +revision to use. To change which base revision is used, define a +revset in your configuration file:: + + [histedit] + defaultrev = only(.) & draft() + +By default each edited revision needs to be present in histedit commands. +To remove revision you need to use ``drop`` operation. You can configure +the drop to be implicit for missing commits by adding: + + [histedit] + dropmissing = True + """ -try: - import cPickle as pickle - pickle.dump # import now -except ImportError: - import pickle +import pickle import errno import os import sys @@ -166,6 +180,7 @@ from mercurial import error from mercurial import copies from mercurial import context +from mercurial import destutil from mercurial import exchange from mercurial import extensions from mercurial import hg @@ -181,32 +196,71 @@ cmdtable = {} command = cmdutil.command(cmdtable) +class _constraints(object): + # aborts if there are multiple rules for one node + noduplicates = 'noduplicates' + # abort if the node does belong to edited stack + forceother = 'forceother' + # abort if the node doesn't belong to edited stack + noother = 'noother' + + @classmethod + def known(cls): + return set([v for k, v in cls.__dict__.items() if k[0] != '_']) + # Note for extension authors: ONLY specify testedwith = 'internal' for # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should # be specifying the version(s) of Mercurial they are tested with, or # leave the attribute unspecified. testedwith = 'internal' -# i18n: command names and abbreviations must remain untranslated -editcomment = _("""# Edit history between %s and %s -# -# Commits are listed from least to most recent -# -# Commands: -# p, pick = use commit -# e, edit = use commit, but stop for amending -# f, fold = use commit, but combine it with the one above -# r, roll = like fold, but discard this commit's description -# d, drop = remove commit from history -# m, mess = edit commit message without changing commit content -# +actiontable = {} +primaryactions = set() +secondaryactions = set() +tertiaryactions = set() +internalactions = set() + +def geteditcomment(first, last): + """ construct the editor comment + The comment includes:: + - an intro + - sorted primary commands + - sorted short commands + - sorted long commands + + Commands are only included once. + """ + intro = _("""Edit history between %s and %s + +Commits are listed from least to most recent + +Commands: """) + actions = [] + def addverb(v): + a = actiontable[v] + lines = a.message.split("\n") + if len(a.verbs): + v = ', '.join(sorted(a.verbs, key=lambda v: len(v))) + actions.append(" %s = %s" % (v, lines[0])) + actions.extend([' %s' for l in lines[1:]]) + + for v in ( + sorted(primaryactions) + + sorted(secondaryactions) + + sorted(tertiaryactions) + ): + addverb(v) + actions.append('') + + return ''.join(['# %s\n' % l if l else '#\n' + for l in ((intro % (first, last)).split('\n')) + actions]) class histeditstate(object): - def __init__(self, repo, parentctxnode=None, rules=None, keep=None, + def __init__(self, repo, parentctxnode=None, actions=None, keep=None, topmost=None, replacements=None, lock=None, wlock=None): self.repo = repo - self.rules = rules + self.actions = actions self.keep = keep self.topmost = topmost self.parentctxnode = parentctxnode @@ -221,22 +275,24 @@ def read(self): """Load histedit state from disk and set fields appropriately.""" try: - fp = self.repo.vfs('histedit-state', 'r') + state = self.repo.vfs.read('histedit-state') except IOError as err: if err.errno != errno.ENOENT: raise raise error.Abort(_('no histedit in progress')) - try: - data = pickle.load(fp) + if state.startswith('v1\n'): + data = self._load() + parentctxnode, rules, keep, topmost, replacements, backupfile = data + else: + data = pickle.loads(state) parentctxnode, rules, keep, topmost, replacements = data backupfile = None - except pickle.UnpicklingError: - data = self._load() - parentctxnode, rules, keep, topmost, replacements, backupfile = data self.parentctxnode = parentctxnode - self.rules = rules + rules = "\n".join(["%s %s" % (verb, rest) for [verb, rest] in rules]) + actions = parserules(rules, self) + self.actions = actions self.keep = keep self.topmost = topmost self.replacements = replacements @@ -248,10 +304,9 @@ fp.write('%s\n' % node.hex(self.parentctxnode)) fp.write('%s\n' % node.hex(self.topmost)) fp.write('%s\n' % self.keep) - fp.write('%d\n' % len(self.rules)) - for rule in self.rules: - fp.write('%s\n' % rule[0]) # action - fp.write('%s\n' % rule[1]) # remainder + fp.write('%d\n' % len(self.actions)) + for action in self.actions: + fp.write('%s\n' % action.tostate()) fp.write('%d\n' % len(self.replacements)) for replacement in self.replacements: fp.write('%s%s\n' % (node.hex(replacement[0]), ''.join(node.hex(r) @@ -316,6 +371,7 @@ def inprogress(self): return self.repo.vfs.exists('histedit-state') + class histeditaction(object): def __init__(self, state, node): self.state = state @@ -326,13 +382,58 @@ def fromrule(cls, state, rule): """Parses the given rule, returning an instance of the histeditaction. """ - repo = state.repo rulehash = rule.strip().split(' ', 1)[0] try: - node = repo[rulehash].node() + rev = node.bin(rulehash) + except TypeError: + raise error.ParseError("invalid changeset %s" % rulehash) + return cls(state, rev) + + def verify(self, prev): + """ Verifies semantic correctness of the rule""" + repo = self.repo + ha = node.hex(self.node) + try: + self.node = repo[ha].node() except error.RepoError: - raise error.Abort(_('unknown changeset %s listed') % rulehash[:12]) - return cls(state, node) + raise error.ParseError(_('unknown changeset %s listed') + % ha[:12]) + + def torule(self): + """build a histedit rule line for an action + + by default lines are in the form: + + """ + ctx = self.repo[self.node] + summary = '' + if ctx.description(): + summary = ctx.description().splitlines()[0] + line = '%s %s %d %s' % (self.verb, ctx, ctx.rev(), summary) + # trim to 75 columns by default so it's not stupidly wide in my editor + # (the 5 more are left for verb) + maxlen = self.repo.ui.configint('histedit', 'linelen', default=80) + maxlen = max(maxlen, 22) # avoid truncating hash + return util.ellipsis(line, maxlen) + + def tostate(self): + """Print an action in format used by histedit state files + (the first line is a verb, the remainder is the second) + """ + return "%s\n%s" % (self.verb, node.hex(self.node)) + + def constraints(self): + """Return a set of constrains that this action should be verified for + """ + return set([_constraints.noduplicates, _constraints.noother]) + + def nodetoverify(self): + """Returns a node associated with the action that will be used for + verification purposes. + + If the action doesn't correspond to node it should return None + """ + return self.node def run(self): """Runs the action. The default behavior is simply apply the action's @@ -346,11 +447,13 @@ parentctx, but does not commit them.""" repo = self.repo rulectx = repo[self.node] - hg.update(repo, self.state.parentctxnode) + hg.update(repo, self.state.parentctxnode, quietempty=True) stats = applychanges(repo.ui, repo, rulectx, {}) if stats and stats[3] > 0: - raise error.InterventionRequired(_('Fix up the change and run ' - 'hg histedit --continue')) + raise error.InterventionRequired( + _('Fix up the change (%s %s)') % + (self.verb, node.short(self.node)), + hint=_('hg histedit --continue to resume')) def continuedirty(self): """Continues the action when changes have been applied to the working @@ -411,7 +514,7 @@ wcpar = repo.dirstate.parents()[0] if ctx.p1().node() == wcpar: # edits are "in place" we do not need to make any merge, - # just applies changes on parent for edition + # just applies changes on parent for editing cmdutil.revert(ui, repo, ctx, (wcpar, node.nullid), all=True) stats = None else: @@ -439,7 +542,7 @@ return None for c in ctxs: if not c.mutable(): - raise error.Abort( + raise error.ParseError( _("cannot fold into public change %s") % node.short(c.node())) base = first.parents()[0] @@ -502,6 +605,38 @@ editor=editor) return repo.commitctx(new) +def _isdirtywc(repo): + return repo[None].dirty(missing=True) + +def abortdirty(): + raise error.Abort(_('working copy has pending changes'), + hint=_('amend, commit, or revert them and run histedit ' + '--continue, or abort with histedit --abort')) + +def action(verbs, message, priority=False, internal=False): + def wrap(cls): + assert not priority or not internal + verb = verbs[0] + if priority: + primaryactions.add(verb) + elif internal: + internalactions.add(verb) + elif len(verbs) > 1: + secondaryactions.add(verb) + else: + tertiaryactions.add(verb) + + cls.verb = verb + cls.verbs = verbs + cls.message = message + for verb in verbs: + actiontable[verb] = cls + return cls + return wrap + +@action(['pick', 'p'], + _('use commit'), + priority=True) class pick(histeditaction): def run(self): rulectx = self.repo[self.node] @@ -511,21 +646,41 @@ return super(pick, self).run() +@action(['edit', 'e'], + _('use commit, but stop for amending'), + priority=True) class edit(histeditaction): def run(self): repo = self.repo rulectx = repo[self.node] - hg.update(repo, self.state.parentctxnode) + hg.update(repo, self.state.parentctxnode, quietempty=True) applychanges(repo.ui, repo, rulectx, {}) raise error.InterventionRequired( - _('Make changes as needed, you may commit or record as needed ' - 'now.\nWhen you are finished, run hg histedit --continue to ' - 'resume.')) + _('Editing (%s), you may commit or record as needed now.') + % node.short(self.node), + hint=_('hg histedit --continue to resume')) def commiteditor(self): return cmdutil.getcommiteditor(edit=True, editform='histedit.edit') +@action(['fold', 'f'], + _('use commit, but combine it with the one above')) class fold(histeditaction): + def verify(self, prev): + """ Verifies semantic correctness of the fold rule""" + super(fold, self).verify(prev) + repo = self.repo + if not prev: + c = repo[self.node].parents()[0] + elif not prev.verb in ('pick', 'base'): + return + else: + c = repo[prev.node] + if not c.mutable(): + raise error.ParseError( + _("cannot fold into public change %s") % node.short(c.node())) + + def continuedirty(self): repo = self.repo rulectx = repo[self.node] @@ -618,7 +773,25 @@ replacements.append((ich, (n,))) return repo[n], replacements -class _multifold(fold): +class base(histeditaction): + def constraints(self): + return set([_constraints.forceother]) + + def run(self): + if self.repo['.'].node() != self.node: + mergemod.update(self.repo, self.node, False, True) + # branchmerge, force) + return self.continueclean() + + def continuedirty(self): + abortdirty() + + def continueclean(self): + basectx = self.repo['.'] + return basectx, [] + +@action(['_multifold'], + _( """fold subclass used for when multiple folds happen in a row We only want to fire the editor for the folded message once when @@ -626,10 +799,14 @@ similar to rollup, but we should preserve both messages so that when the last fold operation runs we can show the user all the commit messages in their editor. - """ + """), + internal=True) +class _multifold(fold): def skipprompt(self): return True +@action(["roll", "r"], + _("like fold, but discard this commit's description")) class rollup(fold): def mergedescs(self): return False @@ -637,11 +814,16 @@ def skipprompt(self): return True +@action(["drop", "d"], + _('remove commit from history')) class drop(histeditaction): def run(self): parentctx = self.repo[self.state.parentctxnode] return parentctx, [(self.node, tuple())] +@action(["mess", "m"], + _('edit commit message without changing commit content'), + priority=True) class message(histeditaction): def commiteditor(self): return cmdutil.getcommiteditor(edit=True, editform='histedit.mess') @@ -672,20 +854,6 @@ raise error.Abort(msg, hint=hint) return repo.lookup(roots[0]) -actiontable = {'p': pick, - 'pick': pick, - 'e': edit, - 'edit': edit, - 'f': fold, - 'fold': fold, - '_multifold': _multifold, - 'r': rollup, - 'roll': rollup, - 'd': drop, - 'drop': drop, - 'm': message, - 'mess': message, - } @command('histedit', [('', 'commands', '', @@ -699,25 +867,83 @@ ('f', 'force', False, _('force outgoing even for unrelated repositories')), ('r', 'rev', [], _('first revision to be edited'), _('REV'))], - _("ANCESTOR | --outgoing [URL]")) + _("[ANCESTOR] | --outgoing [URL]")) def histedit(ui, repo, *freeargs, **opts): """interactively edit changeset history - This command edits changesets between ANCESTOR and the parent of + This command edits changesets between an ANCESTOR and the parent of the working directory. + The value from the "histedit.defaultrev" config option is used as a + revset to select the base revision when ANCESTOR is not specified. + The first revision returned by the revset is used. By default, this + selects the editable history that is unique to the ancestry of the + working directory. + With --outgoing, this edits changesets not found in the destination repository. If URL of the destination is omitted, the 'default-push' (or 'default') path will be used. - For safety, this command is also aborted if there are ambiguous - outgoing revisions which may confuse users: for example, if there - are multiple branches containing outgoing revisions. + .. container:: verbose + + If you use --outgoing, this command will abort if there are ambiguous + outgoing revisions. For example, if there are multiple branches + containing outgoing revisions. + + Use "min(outgoing() and ::.)" or similar revset specification + instead of --outgoing to specify edit target revision exactly in + such ambiguous situation. See :hg:`help revsets` for detail about + selecting revisions. + + .. container:: verbose + + Examples: + + - A number of changes have been made. + Revision 3 is no longer needed. + + Start history editing from revision 3:: + + hg histedit -r 3 + + An editor opens, containing the list of revisions, + with specific actions specified:: + + pick 5339bf82f0ca 3 Zworgle the foobar + pick 8ef592ce7cc4 4 Bedazzle the zerlog + pick 0a9639fcda9d 5 Morgify the cromulancy - Use "min(outgoing() and ::.)" or similar revset specification - instead of --outgoing to specify edit target revision exactly in - such ambiguous situation. See :hg:`help revsets` for detail about - selecting revisions. + Additional information about the possible actions + to take appears below the list of revisions. + + To remove revision 3 from the history, + its action (at the beginning of the relevant line) + is changed to 'drop':: + + drop 5339bf82f0ca 3 Zworgle the foobar + pick 8ef592ce7cc4 4 Bedazzle the zerlog + pick 0a9639fcda9d 5 Morgify the cromulancy + + - A number of changes have been made. + Revision 2 and 4 need to be swapped. + + Start history editing from revision 2:: + + hg histedit -r 2 + + An editor opens, containing the list of revisions, + with specific actions specified:: + + pick 252a1af424ad 2 Blorb a morgwazzle + pick 5339bf82f0ca 3 Zworgle the foobar + pick 8ef592ce7cc4 4 Bedazzle the zerlog + + To swap revision 2 and 4, its lines are swapped + in the editor:: + + pick 8ef592ce7cc4 4 Bedazzle the zerlog + pick 5339bf82f0ca 3 Zworgle the foobar + pick 252a1af424ad 2 Blorb a morgwazzle Returns 0 on success, 1 if user intervention is required (not only for intentional "edit" command, but also for resolving unexpected @@ -732,7 +958,7 @@ release(state.lock, state.wlock) def _histedit(ui, repo, state, *freeargs, **opts): - # TODO only abort if we try and histedit mq patches, not just + # TODO only abort if we try to histedit mq patches, not just # blanket if mq patches are applied somewhere mq = getattr(repo, 'mq', None) if mq and mq.applied: @@ -775,10 +1001,10 @@ else: revs.extend(freeargs) if len(revs) == 0: - # experimental config: histedit.defaultrev - histeditdefault = ui.config('histedit', 'defaultrev') - if histeditdefault: - revs.append(histeditdefault) + defaultrev = destutil.desthistedit(ui, repo) + if defaultrev is not None: + revs.append(defaultrev) + if len(revs) != 1: raise error.Abort( _('histedit requires exactly one ancestor revision')) @@ -795,9 +1021,9 @@ elif goal == 'edit-plan': state.read() if not rules: - comment = editcomment % (node.short(state.parentctxnode), + comment = geteditcomment(node.short(state.parentctxnode), node.short(state.topmost)) - rules = ruleeditor(repo, ui, state.rules, comment) + rules = ruleeditor(repo, ui, state.actions, comment) else: if rules == '-': f = sys.stdin @@ -805,10 +1031,11 @@ f = open(rules) rules = f.read() f.close() - rules = [l for l in (r.strip() for r in rules.splitlines()) - if l and not l.startswith('#')] - rules = verifyrules(rules, repo, [repo[c] for [_a, c] in state.rules]) - state.rules = rules + actions = parserules(rules, state) + ctxs = [repo[act.nodetoverify()] \ + for act in state.actions if act.nodetoverify()] + warnverifyactions(ui, repo, actions, state, ctxs) + state.actions = actions state.write() return elif goal == 'abort': @@ -840,7 +1067,7 @@ # check whether we should update away if repo.unfiltered().revs('parents() and (%n or %ln::)', state.parentctxnode, leafs | tmpnodes): - hg.clean(repo, state.topmost) + hg.clean(repo, state.topmost, show_stats=True, quietempty=True) cleanupnode(ui, repo, 'created', tmpnodes) cleanupnode(ui, repo, 'temp', leafs) except Exception: @@ -877,8 +1104,9 @@ ctxs = [repo[r] for r in revs] if not rules: - comment = editcomment % (node.short(root), node.short(topmost)) - rules = ruleeditor(repo, ui, [['pick', c] for c in ctxs], comment) + comment = geteditcomment(node.short(root), node.short(topmost)) + actions = [pick(state, r) for r in revs] + rules = ruleeditor(repo, ui, actions, comment) else: if rules == '-': f = sys.stdin @@ -886,14 +1114,13 @@ f = open(rules) rules = f.read() f.close() - rules = [l for l in (r.strip() for r in rules.splitlines()) - if l and not l.startswith('#')] - rules = verifyrules(rules, repo, ctxs) + actions = parserules(rules, state) + warnverifyactions(ui, repo, actions, state, ctxs) parentctxnode = repo[root].parents()[0].node() state.parentctxnode = parentctxnode - state.rules = rules + state.actions = actions state.topmost = topmost state.replacements = replacements @@ -906,23 +1133,29 @@ # preprocess rules so that we can hide inner folds from the user # and only show one editor - rules = state.rules[:] - for idx, ((action, ha), (nextact, unused)) in enumerate( - zip(rules, rules[1:] + [(None, None)])): - if action == 'fold' and nextact == 'fold': - state.rules[idx] = '_multifold', ha + actions = state.actions[:] + for idx, (action, nextact) in enumerate( + zip(actions, actions[1:] + [None])): + if action.verb == 'fold' and nextact and nextact.verb == 'fold': + state.actions[idx].__class__ = _multifold - while state.rules: + total = len(state.actions) + pos = 0 + while state.actions: state.write() - action, ha = state.rules.pop(0) - ui.debug('histedit: processing %s %s\n' % (action, ha[:12])) - actobj = actiontable[action].fromrule(state, ha) + actobj = state.actions.pop(0) + pos += 1 + ui.progress(_("editing"), pos, actobj.torule(), + _('changes'), total) + ui.debug('histedit: processing %s %s\n' % (actobj.verb,\ + actobj.torule())) parentctx, replacement_ = actobj.run() state.parentctxnode = parentctx.node() state.replacements.extend(replacement_) state.write() + ui.progress(_("editing"), None) - hg.update(repo, state.parentctxnode) + hg.update(repo, state.parentctxnode, quietempty=True) mapping, tmpnodes, created, ntm = processreplacement(state) if mapping: @@ -963,20 +1196,18 @@ state.clear() if os.path.exists(repo.sjoin('undo')): os.unlink(repo.sjoin('undo')) + if repo.vfs.exists('histedit-last-edit.txt'): + repo.vfs.unlink('histedit-last-edit.txt') def bootstrapcontinue(ui, state, opts): repo = state.repo - if state.rules: - action, currentnode = state.rules.pop(0) - - actobj = actiontable[action].fromrule(state, currentnode) + if state.actions: + actobj = state.actions.pop(0) - s = repo.status() - if s.modified or s.added or s.removed or s.deleted: + if _isdirtywc(repo): actobj.continuedirty() - s = repo.status() - if s.modified or s.added or s.removed or s.deleted: - raise error.Abort(_("working copy still dirty")) + if _isdirtywc(repo): + abortdirty() parentctx, replacements = actobj.continueclean() @@ -1002,32 +1233,15 @@ hint=_('see "hg help phases" for details')) return [c.node() for c in ctxs] -def makedesc(repo, action, rev): - """build a initial action line for a ctx - - line are in the form: - - - """ - ctx = repo[rev] - summary = '' - if ctx.description(): - summary = ctx.description().splitlines()[0] - line = '%s %s %d %s' % (action, ctx, ctx.rev(), summary) - # trim to 80 columns so it's not stupidly wide in my editor - maxlen = repo.ui.configint('histedit', 'linelen', default=80) - maxlen = max(maxlen, 22) # avoid truncating hash - return util.ellipsis(line, maxlen) - -def ruleeditor(repo, ui, rules, editcomment=""): +def ruleeditor(repo, ui, actions, editcomment=""): """open an editor to edit rules rules are in the format [ [act, ctx], ...] like in state.rules """ - rules = '\n'.join([makedesc(repo, act, rev) for [act, rev] in rules]) + rules = '\n'.join([act.torule() for act in actions]) rules += '\n\n' rules += editcomment - rules = ui.edit(rules, ui.username()) + rules = ui.edit(rules, ui.username(), {'prefix': 'histedit'}) # Save edit rules in .hg/histedit-last-edit.txt in case # the user needs to ask for help after something @@ -1038,40 +1252,79 @@ return rules -def verifyrules(rules, repo, ctxs): - """Verify that there exists exactly one edit rule per given changeset. +def parserules(rules, state): + """Read the histedit rules string and return list of action objects """ + rules = [l for l in (r.strip() for r in rules.splitlines()) + if l and not l.startswith('#')] + actions = [] + for r in rules: + if ' ' not in r: + raise error.ParseError(_('malformed line "%s"') % r) + verb, rest = r.split(' ', 1) + + if verb not in actiontable: + raise error.ParseError(_('unknown action "%s"') % verb) + + action = actiontable[verb].fromrule(state, rest) + actions.append(action) + return actions + +def warnverifyactions(ui, repo, actions, state, ctxs): + try: + verifyactions(actions, state, ctxs) + except error.ParseError: + if repo.vfs.exists('histedit-last-edit.txt'): + ui.warn(_('warning: histedit rules saved ' + 'to: .hg/histedit-last-edit.txt\n')) + raise + +def verifyactions(actions, state, ctxs): + """Verify that there exists exactly one action per given changeset and + other constraints. Will abort if there are to many or too few rules, a malformed rule, or a rule on a changeset outside of the user-given range. """ - parsed = [] expected = set(c.hex() for c in ctxs) seen = set() - for r in rules: - if ' ' not in r: - raise error.Abort(_('malformed line "%s"') % r) - action, rest = r.split(' ', 1) - ha = rest.strip().split(' ', 1)[0] - try: - ha = repo[ha].hex() - except error.RepoError: - raise error.Abort(_('unknown changeset %s listed') % ha[:12]) - if ha not in expected: - raise error.Abort( - _('may not use changesets other than the ones listed')) - if ha in seen: - raise error.Abort(_('duplicated command for changeset %s') % - ha[:12]) - seen.add(ha) - if action not in actiontable or action.startswith('_'): - raise error.Abort(_('unknown action "%s"') % action) - parsed.append([action, ha]) + prev = None + for action in actions: + action.verify(prev) + prev = action + constraints = action.constraints() + for constraint in constraints: + if constraint not in _constraints.known(): + raise error.ParseError(_('unknown constraint "%s"') % + constraint) + + nodetoverify = action.nodetoverify() + if nodetoverify is not None: + ha = node.hex(nodetoverify) + if _constraints.noother in constraints and ha not in expected: + raise error.ParseError( + _('may not use "%s" with changesets ' + 'other than the ones listed') % action.verb) + if _constraints.forceother in constraints and ha in expected: + raise error.ParseError( + _('may not use "%s" with changesets ' + 'within the edited list') % action.verb) + if _constraints.noduplicates in constraints and ha in seen: + raise error.ParseError(_( + 'duplicated command for changeset %s') % + ha[:12]) + seen.add(ha) missing = sorted(expected - seen) # sort to stabilize output - if missing: - raise error.Abort(_('missing rules for changeset %s') % + + if state.repo.ui.configbool('histedit', 'dropmissing'): + drops = [drop(state, node.bin(n)) for n in missing] + # put the in the beginning so they execute immediately and + # don't show in the edit-plan in the future + actions[:0] = drops + elif missing: + raise error.ParseError(_('missing rules for changeset %s') % missing[0][:12], - hint=_('do you want to use the drop action?')) - return parsed + hint=_('use "drop %s" to discard, see also: ' + '"hg help -e histedit.config"') % missing[0][:12]) def newnodestoabort(state): """process the list of replacements to return @@ -1079,7 +1332,7 @@ 1) the list of final node 2) the list of temporary node - This meant to be used on abort as less data are required in this case. + This is meant to be used on abort as less data are required in this case. """ replacements = state.replacements allsuccs = set() @@ -1179,13 +1432,20 @@ # nothing to move moves.append((bk, new[-1])) if moves: - marks = repo._bookmarks - for mark, new in moves: - old = marks[mark] - ui.note(_('histedit: moving bookmarks %s from %s to %s\n') - % (mark, node.short(old), node.short(new))) - marks[mark] = new - marks.write() + lock = tr = None + try: + lock = repo.lock() + tr = repo.transaction('histedit') + marks = repo._bookmarks + for mark, new in moves: + old = marks[mark] + ui.note(_('histedit: moving bookmarks %s from %s to %s\n') + % (mark, node.short(old), node.short(new))) + marks[mark] = new + marks.recordchange(tr) + tr.close() + finally: + release(tr, lock) def cleanupnode(ui, repo, name, nodes): """strip a group of nodes from the repository @@ -1219,8 +1479,8 @@ if os.path.exists(os.path.join(repo.path, 'histedit-state')): state = histeditstate(repo) state.read() - histedit_nodes = set([repo[rulehash].node() for (action, rulehash) - in state.rules if rulehash in repo]) + histedit_nodes = set([action.nodetoverify() for action + in state.actions if action.nodetoverify()]) strip_nodes = set([repo[n].node() for n in nodelist]) common_nodes = histedit_nodes & strip_nodes if common_nodes: @@ -1235,14 +1495,20 @@ return state = histeditstate(repo) state.read() - if state.rules: + if state.actions: # i18n: column positioning for "hg summary" ui.write(_('hist: %s (histedit --continue)\n') % (ui.label(_('%d remaining'), 'histedit.remaining') % - len(state.rules))) + len(state.actions))) def extsetup(ui): cmdutil.summaryhooks.add('histedit', summaryhook) cmdutil.unfinishedstates.append( ['histedit-state', False, True, _('histedit in progress'), _("use 'hg histedit --continue' or 'hg histedit --abort'")]) + cmdutil.afterresolvedstates.append( + ['histedit-state', _('hg histedit --continue')]) + if ui.configbool("experimental", "histeditng"): + globals()['base'] = action(['base', 'b'], + _('checkout changeset and apply further changesets from there') + )(base) diff -r 6c7d26cef0cd -r 4571c0b38337 hgext/largefiles/lfcommands.py --- a/hgext/largefiles/lfcommands.py Fri Jan 08 16:27:25 2016 +0100 +++ b/hgext/largefiles/lfcommands.py Mon Jan 11 14:27:12 2016 -0600 @@ -444,12 +444,14 @@ updated, removed = 0, 0 for lfile in lfiles: abslfile = repo.wjoin(lfile) + abslfileorig = scmutil.origpath(ui, repo, abslfile) absstandin = repo.wjoin(lfutil.standin(lfile)) + absstandinorig = scmutil.origpath(ui, repo, absstandin) if os.path.exists(absstandin): - if (os.path.exists(absstandin + '.orig') and + if (os.path.exists(absstandinorig) and os.path.exists(abslfile)): - shutil.copyfile(abslfile, abslfile + '.orig') - util.unlinkpath(absstandin + '.orig') + shutil.copyfile(abslfile, abslfileorig) + util.unlinkpath(absstandinorig) expecthash = lfutil.readstandin(repo, lfile) if expecthash != '': if lfile not in repo[None]: # not switched to normal file diff -r 6c7d26cef0cd -r 4571c0b38337 hgext/largefiles/overrides.py --- a/hgext/largefiles/overrides.py Fri Jan 08 16:27:25 2016 +0100 +++ b/hgext/largefiles/overrides.py Mon Jan 11 14:27:12 2016 -0600 @@ -458,11 +458,11 @@ # writing the files into the working copy and lfcommands.updatelfiles # will update the largefiles. def overridecalculateupdates(origfn, repo, p1, p2, pas, branchmerge, force, - partial, acceptremote, followcopies): + acceptremote, followcopies, matcher=None): overwrite = force and not branchmerge actions, diverge, renamedelete = origfn( - repo, p1, p2, pas, branchmerge, force, partial, acceptremote, - followcopies) + repo, p1, p2, pas, branchmerge, force, acceptremote, + followcopies, matcher=matcher) if overwrite: return actions, diverge, renamedelete @@ -481,6 +481,9 @@ (lm, largs, lmsg) = actions.get(lfile, (None, None, None)) (sm, sargs, smsg) = actions.get(standin, (None, None, None)) if sm in ('g', 'dc') and lm != 'r': + if sm == 'dc': + f1, f2, fa, move, anc = sargs + sargs = (p2[f2].flags(), False) # Case 1: normal file in the working copy, largefile in # the second parent usermsg = _('remote turned local normal file %s into a largefile\n' @@ -496,6 +499,9 @@ else: actions[standin] = ('r', None, 'replaced by non-standin') elif lm in ('g', 'dc') and sm != 'r': + if lm == 'dc': + f1, f2, fa, move, anc = largs + largs = (p2[f2].flags(), False) # Case 2: largefile in the working copy, normal file in # the second parent usermsg = _('remote turned local largefile %s into a normal file\n' @@ -538,7 +544,7 @@ # largefiles. This will handle identical edits without prompting the user. def overridefilemerge(origfn, premerge, repo, mynode, orig, fcd, fco, fca, labels=None): - if not lfutil.isstandin(orig): + if not lfutil.isstandin(orig) or fcd.isabsent() or fco.isabsent(): return origfn(premerge, repo, mynode, orig, fcd, fco, fca, labels=labels) @@ -555,7 +561,7 @@ (lfutil.splitstandin(orig), ahash, dhash, ohash), 0) == 1)): repo.wwrite(fcd.path(), fco.data(), fco.flags()) - return True, 0 + return True, 0, False def copiespathcopies(orig, ctx1, ctx2, match=None): copies = orig(ctx1, ctx2, match=match) @@ -806,9 +812,11 @@ ui.status(_("%d largefiles cached\n") % numcached) return result +revsetpredicate = revset.extpredicate() + +@revsetpredicate('pulled()') def pulledrevsetsymbol(repo, subset, x): - """``pulled()`` - Changesets that just has been pulled. + """Changesets that just has been pulled. Only available with largefiles from pull --lfrev expressions. @@ -1358,8 +1366,11 @@ err = 0 return err -def mergeupdate(orig, repo, node, branchmerge, force, partial, +def mergeupdate(orig, repo, node, branchmerge, force, *args, **kwargs): + matcher = kwargs.get('matcher', None) + # note if this is a partial update + partial = matcher and not matcher.always() wlock = repo.wlock() try: # branch | | | @@ -1399,7 +1410,7 @@ oldstandins = lfutil.getstandinsstate(repo) - result = orig(repo, node, branchmerge, force, partial, *args, **kwargs) + result = orig(repo, node, branchmerge, force, *args, **kwargs) newstandins = lfutil.getstandinsstate(repo) filelist = lfutil.getlfilestoupdate(oldstandins, newstandins) diff -r 6c7d26cef0cd -r 4571c0b38337 hgext/largefiles/uisetup.py --- a/hgext/largefiles/uisetup.py Fri Jan 08 16:27:25 2016 +0100 +++ b/hgext/largefiles/uisetup.py Mon Jan 11 14:27:12 2016 -0600 @@ -9,7 +9,7 @@ '''setup for largefiles extension: uisetup''' from mercurial import archival, cmdutil, commands, extensions, filemerge, hg, \ - httppeer, merge, scmutil, sshpeer, wireproto, revset, subrepo, copies + httppeer, merge, scmutil, sshpeer, wireproto, subrepo, copies from mercurial.i18n import _ from mercurial.hgweb import hgweb_mod, webcommands @@ -83,7 +83,6 @@ ('', 'lfrev', [], _('download largefiles for these revisions'), _('REV'))] entry[1].extend(pullopt) - revset.symbols['pulled'] = overrides.pulledrevsetsymbol entry = extensions.wrapcommand(commands.table, 'clone', overrides.overrideclone) @@ -170,3 +169,5 @@ if name == 'transplant': extensions.wrapcommand(getattr(module, 'cmdtable'), 'transplant', overrides.overridetransplant) + + overrides.revsetpredicate.setup() diff -r 6c7d26cef0cd -r 4571c0b38337 hgext/mq.py --- a/hgext/mq.py Fri Jan 08 16:27:25 2016 +0100 +++ b/hgext/mq.py Mon Jan 11 14:27:12 2016 -0600 @@ -68,6 +68,7 @@ from mercurial import commands, cmdutil, hg, scmutil, util, revset from mercurial import extensions, error, phases from mercurial import patch as patchmod +from mercurial import lock as lockmod from mercurial import localrepo from mercurial import subrepo import os, re, errno, shutil @@ -699,11 +700,13 @@ absf = repo.wjoin(f) if os.path.lexists(absf): self.ui.note(_('saving current version of %s as %s\n') % - (f, f + '.orig')) + (f, scmutil.origpath(self.ui, repo, f))) + + absorig = scmutil.origpath(self.ui, repo, absf) if copy: - util.copyfile(absf, absf + '.orig') + util.copyfile(absf, absorig) else: - util.rename(absf, absf + '.orig') + util.rename(absf, absorig) def printdiff(self, repo, diffopts, node1, node2=None, files=None, fp=None, changes=None, opts={}): @@ -1790,27 +1793,34 @@ # Ensure we create a new changeset in the same phase than # the old one. - n = newcommit(repo, oldphase, message, user, ph.date, + lock = tr = None + try: + lock = repo.lock() + tr = repo.transaction('mq') + n = newcommit(repo, oldphase, message, user, ph.date, match=match, force=True, editor=editor) - # only write patch after a successful commit - c = [list(x) for x in refreshchanges] - if inclsubs: - self.putsubstate2changes(substatestate, c) - chunks = patchmod.diff(repo, patchparent, - changes=c, opts=diffopts) - comments = str(ph) - if comments: - patchf.write(comments) - for chunk in chunks: - patchf.write(chunk) - patchf.close() - - marks = repo._bookmarks - for bm in bmlist: - marks[bm] = n - marks.write() - - self.applied.append(statusentry(n, patchfn)) + # only write patch after a successful commit + c = [list(x) for x in refreshchanges] + if inclsubs: + self.putsubstate2changes(substatestate, c) + chunks = patchmod.diff(repo, patchparent, + changes=c, opts=diffopts) + comments = str(ph) + if comments: + patchf.write(comments) + for chunk in chunks: + patchf.write(chunk) + patchf.close() + + marks = repo._bookmarks + for bm in bmlist: + marks[bm] = n + marks.recordchange(tr) + tr.close() + + self.applied.append(statusentry(n, patchfn)) + finally: + lockmod.release(lock, tr) except: # re-raises ctx = repo[cparents[0]] repo.dirstate.rebuild(ctx.node(), ctx.manifest()) @@ -3548,9 +3558,11 @@ # i18n: column positioning for "hg summary" ui.note(_("mq: (empty queue)\n")) +revsetpredicate = revset.extpredicate() + +@revsetpredicate('mq()') def revsetmq(repo, subset, x): - """``mq()`` - Changesets managed by MQ. + """Changesets managed by MQ. """ revset.getargs(x, 0, 0, _("mq takes no arguments")) applied = set([repo[r.node].rev() for r in repo.mq.applied]) @@ -3586,7 +3598,7 @@ if extmodule.__file__ != __file__: dotable(getattr(extmodule, 'cmdtable', {})) - revset.symbols['mq'] = revsetmq + revsetpredicate.setup() colortable = {'qguard.negative': 'red', 'qguard.positive': 'yellow', diff -r 6c7d26cef0cd -r 4571c0b38337 hgext/pager.py --- a/hgext/pager.py Fri Jan 08 16:27:25 2016 +0100 +++ b/hgext/pager.py Mon Jan 11 14:27:12 2016 -0600 @@ -49,9 +49,13 @@ To ignore global commands like :hg:`version` or :hg:`help`, you have to specify them in your user configuration file. -The --pager=... option can also be used to control when the pager is -used. Use a boolean value like yes, no, on, off, or use auto for -normal behavior. +To control whether the pager is used at all for an individual command, +you can use --pager=:: + + - use as needed: `auto`. + - require the pager: `yes` or `on`. + - suppress the pager: `no` or `off` (any unrecognized value + will also work). ''' diff -r 6c7d26cef0cd -r 4571c0b38337 hgext/patchbomb.py --- a/hgext/patchbomb.py Fri Jan 08 16:27:25 2016 +0100 +++ b/hgext/patchbomb.py Mon Jan 11 14:27:12 2016 -0600 @@ -44,6 +44,13 @@ directly from the commandline. See the [email] and [smtp] sections in hgrc(5) for details. +By default, :hg:`email` will prompt for a ``To`` or ``CC`` header if +you do not supply one via configuration or the command line. You can +override this to never prompt by configuring an empty value:: + + [email] + cc = + You can control the default inclusion of an introduction message with the ``patchbomb.intro`` configuration option. The configuration is always overwritten by command line flags like --intro and --desc:: @@ -58,7 +65,7 @@ ''' import os, errno, socket, tempfile, cStringIO -import email +import email as emailmod from mercurial import cmdutil, commands, hg, mail, patch, util, error from mercurial import scmutil @@ -155,7 +162,7 @@ body += '\n'.join(patchlines) if addattachment: - msg = email.MIMEMultipart.MIMEMultipart() + msg = emailmod.MIMEMultipart.MIMEMultipart() if body: msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test'))) p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch', @@ -272,15 +279,15 @@ or prompt(ui, 'Subject:', 'A bundle for your repository')) body = _getdescription(repo, '', sender, **opts) - msg = email.MIMEMultipart.MIMEMultipart() + msg = emailmod.MIMEMultipart.MIMEMultipart() if body: msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test'))) - datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle') + datapart = emailmod.MIMEBase.MIMEBase('application', 'x-mercurial-bundle') datapart.set_payload(bundle) bundlename = '%s.hg' % opts.get('bundlename', 'bundle') datapart.add_header('Content-Disposition', 'attachment', filename=bundlename) - email.Encoders.encode_base64(datapart) + emailmod.Encoders.encode_base64(datapart) msg.attach(datapart) msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test')) return [(msg, subj, None)] @@ -403,7 +410,7 @@ ('', 'intro', None, _('send an introduction email for a single patch')), ] + emailopts + commands.remoteopts, _('hg email [OPTION]... [DEST]...')) -def patchbomb(ui, repo, *revs, **opts): +def email(ui, repo, *revs, **opts): '''send changesets by email By default, diffs are sent in the format generated by @@ -596,10 +603,12 @@ # not on the command line: fallback to config and then maybe ask addr = (ui.config('email', configkey) or - ui.config('patchbomb', configkey) or - '') - if not addr and ask: - addr = prompt(ui, header, default=default) + ui.config('patchbomb', configkey)) + if not addr: + specified = (ui.hasconfig('email', configkey) or + ui.hasconfig('patchbomb', configkey)) + if not specified and ask: + addr = prompt(ui, header, default=default) if addr: showaddrs.append('%s: %s' % (header, addr)) return mail.addrlistencode(ui, [addr], _charsets, opts.get('test')) @@ -641,7 +650,7 @@ if not parent.endswith('>'): parent += '>' - sender_addr = email.Utils.parseaddr(sender)[1] + sender_addr = emailmod.Utils.parseaddr(sender)[1] sender = mail.addressencode(ui, sender, _charsets, opts.get('test')) sendmail = None firstpatch = None @@ -660,7 +669,7 @@ parent = m['Message-Id'] m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version() - m['Date'] = email.Utils.formatdate(start_time[0], localtime=True) + m['Date'] = emailmod.Utils.formatdate(start_time[0], localtime=True) start_time = (start_time[0] + 1, start_time[1]) m['From'] = sender @@ -678,7 +687,7 @@ fp = util.popen(os.environ['PAGER'], 'w') else: fp = ui - generator = email.Generator.Generator(fp, mangle_from_=False) + generator = emailmod.Generator.Generator(fp, mangle_from_=False) try: generator.flatten(m, 0) fp.write('\n') @@ -702,7 +711,7 @@ # Exim does not remove the Bcc field del m['Bcc'] fp = cStringIO.StringIO() - generator = email.Generator.Generator(fp, mangle_from_=False) + generator = emailmod.Generator.Generator(fp, mangle_from_=False) generator.flatten(m, 0) sendmail(sender_addr, to + bcc + cc, fp.getvalue()) diff -r 6c7d26cef0cd -r 4571c0b38337 hgext/progress.py --- a/hgext/progress.py Fri Jan 08 16:27:25 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,17 +0,0 @@ -# progress.py show progress bars for some actions -# -# Copyright (C) 2010 Augie Fackler -# -# This software may be used and distributed according to the terms of the -# GNU General Public License version 2 or any later version. - -"""show progress bars for some actions (DEPRECATED) - -This extension has been merged into core, you can remove it from your config. -See hg help config.progress for configuration options. -""" -# Note for extension authors: ONLY specify testedwith = 'internal' for -# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should -# be specifying the version(s) of Mercurial they are tested with, or -# leave the attribute unspecified. -testedwith = 'internal' diff -r 6c7d26cef0cd -r 4571c0b38337 hgext/rebase.py --- a/hgext/rebase.py Fri Jan 08 16:27:25 2016 +0100 +++ b/hgext/rebase.py Mon Jan 11 14:27:12 2016 -0600 @@ -30,8 +30,11 @@ revtodo = -1 nullmerge = -2 revignored = -3 -# To do with obsolescence +# successor in rebase destination revprecursor = -4 +# plain prune (no successor) +revpruned = -5 +revskipped = (revignored, revprecursor, revpruned) cmdtable = {} command = cmdutil.command(cmdtable) @@ -44,14 +47,6 @@ def _nothingtorebase(): return 1 -def _savegraft(ctx, extra): - s = ctx.extra().get('source', None) - if s is not None: - extra['source'] = s - -def _savebranch(ctx, extra): - extra['branch'] = ctx.branch() - def _makeextrafn(copiers): """make an extrafn out of the given copy-functions. @@ -69,6 +64,9 @@ branch = repo[None].branch() return repo[branch].rev() +revsetpredicate = revset.extpredicate() + +@revsetpredicate('_destrebase') def _revsetdestrebase(repo, subset, x): # ``_rebasedefaultdest()`` @@ -115,50 +113,35 @@ useful for linearizing *local* changes relative to a master development tree. - You should not rebase changesets that have already been shared - with others. Doing so will force everybody else to perform the - same rebase or they will end up with duplicated changesets after - pulling in your rebased changesets. - - In its default configuration, Mercurial will prevent you from - rebasing published changes. See :hg:`help phases` for details. + Published commits cannot be rebased (see :hg:`help phases`). + To copy commits, see :hg:`help graft`. If you don't specify a destination changeset (``-d/--dest``), rebase uses the current branch tip as the destination. (The destination changeset is not modified by rebasing, but new changesets are added as its descendants.) - You can specify which changesets to rebase in two ways: as a - "source" changeset or as a "base" changeset. Both are shorthand - for a topologically related set of changesets (the "source - branch"). If you specify source (``-s/--source``), rebase will - rebase that changeset and all of its descendants onto dest. If you - specify base (``-b/--base``), rebase will select ancestors of base - back to but not including the common ancestor with dest. Thus, - ``-b`` is less precise but more convenient than ``-s``: you can - specify any changeset in the source branch, and rebase will select - the whole branch. If you specify neither ``-s`` nor ``-b``, rebase - uses the parent of the working directory as the base. + There are three ways to select changesets:: + + 1. Explicitly select them using ``--rev``. + + 2. Use ``--source`` to select a root changeset and include all of its + descendants. + + 3. Use ``--base`` to select a changeset; rebase will find ancestors + and their descendants which are not also ancestors of the destination. - For advanced usage, a third way is available through the ``--rev`` - option. It allows you to specify an arbitrary set of changesets to - rebase. Descendants of revs you specify with this option are not - automatically included in the rebase. + Rebase will destroy original changesets unless you use ``--keep``. + It will also move your bookmarks (even if you do). + + Some changesets may be dropped if they do not contribute changes + (e.g. merges from the destination branch). - By default, rebase recreates the changesets in the source branch - as descendants of dest and then destroys the originals. Use - ``--keep`` to preserve the original source changesets. Some - changesets in the source branch (e.g. merges from the destination - branch) may be dropped if they no longer contribute any change. + Unlike ``merge``, rebase will do nothing if you are at the branch tip of + a named branch with two heads. You will need to explicitly specify source + and/or destination. - One result of the rules for selecting the destination changeset - and source branch is that, unlike ``merge``, rebase will do - nothing if you are at the branch tip of a named branch - with two heads. You need to explicitly specify source and/or - destination (or ``update`` to the other head, if it's the head of - the intended source branch). - - If a rebase is interrupted to manually resolve a merge, it can be + If a rebase is interrupted to manually resolve a conflict, it can be continued with --continue/-c or aborted with --abort/-a. .. container:: verbose @@ -220,8 +203,9 @@ abortf = opts.get('abort') collapsef = opts.get('collapse', False) collapsemsg = cmdutil.logmessage(ui, opts) + date = opts.get('date', None) e = opts.get('extrafn') # internal, used by e.g. hgsubversion - extrafns = [_savegraft] + extrafns = [] if e: extrafns = [e] keepf = opts.get('keep', False) @@ -367,6 +351,13 @@ # - split changesets are not rebased if at least one of the # changeset resulting from the split is an ancestor of dest rebaseset = rebasesetrevs - set(obsoletenotrebased) + if rebasesetrevs and not rebaseset: + msg = _('all requested changesets have equivalents ' + 'or were marked as obsolete') + hint = _('to force the rebase, set the config ' + 'experimental.rebaseskipobsolete to False') + raise error.Abort(msg, hint=hint) + result = buildstate(repo, dest, rebaseset, collapsef, obsoletenotrebased) @@ -390,18 +381,13 @@ if dest.closesbranch() and not keepbranchesf: ui.status(_('reopening closed branch head %s\n') % dest) - if keepbranchesf: - # insert _savebranch at the start of extrafns so if - # there's a user-provided extrafn it can clobber branch if - # desired - extrafns.insert(0, _savebranch) - if collapsef: - branches = set() - for rev in state: - branches.add(repo[rev].branch()) - if len(branches) > 1: - raise error.Abort(_('cannot collapse multiple named ' - 'branches')) + if keepbranchesf and collapsef: + branches = set() + for rev in state: + branches.add(repo[rev].branch()) + if len(branches) > 1: + raise error.Abort(_('cannot collapse multiple named ' + 'branches')) # Rebase if not targetancestors: @@ -434,7 +420,7 @@ targetancestors) storestatus(repo, originalwd, target, state, collapsef, keepf, keepbranchesf, external, activebookmark) - if len(repo.parents()) == 2: + if len(repo[None].parents()) == 2: repo.ui.debug('resuming interrupted rebase\n') else: try: @@ -454,7 +440,8 @@ editor = cmdutil.getcommiteditor(editform=editform, **opts) newnode = concludenode(repo, rev, p1, p2, extrafn=extrafn, editor=editor, - keepbranches=keepbranchesf) + keepbranches=keepbranchesf, + date=date) else: # Skip commit if we are collapsing repo.dirstate.beginparentchange() @@ -482,6 +469,9 @@ targetctx.description().split('\n', 1)[0]) msg = _('note: not rebasing %s, already in destination as %s\n') ui.status(msg % (desc, desctarget)) + elif state[rev] == revpruned: + msg = _('note: not rebasing %s, it has no successor\n') + ui.status(msg % desc) else: ui.status(_('already rebased %s as %s\n') % (desc, repo[state[rev]])) @@ -505,7 +495,8 @@ editor = cmdutil.getcommiteditor(edit=editopt, editform=editform) newnode = concludenode(repo, rev, p1, external, commitmsg=commitmsg, extrafn=extrafn, editor=editor, - keepbranches=keepbranchesf) + keepbranches=keepbranchesf, + date=date) if newnode is None: newrev = target else: @@ -543,13 +534,19 @@ collapsedas = newnode clearrebased(ui, repo, state, skipped, collapsedas) - if currentbookmarks: - updatebookmarks(repo, targetnode, nstate, currentbookmarks) - if activebookmark not in repo._bookmarks: - # active bookmark was divergent one and has been deleted - activebookmark = None + tr = None + try: + tr = repo.transaction('bookmark') + if currentbookmarks: + updatebookmarks(repo, targetnode, nstate, currentbookmarks, tr) + if activebookmark not in repo._bookmarks: + # active bookmark was divergent one and has been deleted + activebookmark = None + tr.close() + finally: + release(tr) + clearstatus(repo) - clearstatus(repo) ui.note(_("rebase completed\n")) util.unlinkpath(repo.sjoin('undo'), ignoremissing=True) if skipped: @@ -586,7 +583,7 @@ ', '.join(str(p) for p in sorted(parents)))) def concludenode(repo, rev, p1, p2, commitmsg=None, editor=None, extrafn=None, - keepbranches=False): + keepbranches=False, date=None): '''Commit the wd changes with parents p1 and p2. Reuse commit info from rev but also store useful information in extra. Return node of committed revision.''' @@ -597,7 +594,10 @@ if commitmsg is None: commitmsg = ctx.description() keepbranch = keepbranches and repo[p1].branch() != ctx.branch() - extra = {'rebase_source': ctx.hex()} + extra = ctx.extra().copy() + if not keepbranches: + del extra['branch'] + extra['rebase_source'] = ctx.hex() if extrafn: extrafn(ctx, extra) @@ -608,8 +608,10 @@ if keepbranch: repo.ui.setconfig('ui', 'allowemptycommit', True) # Commit might fail if unresolved files exist + if date is None: + date = ctx.date() newnode = repo.commit(text=commitmsg, user=ctx.user(), - date=ctx.date(), extra=extra, editor=editor) + date=date, extra=extra, editor=editor) finally: repo.ui.restoreconfig(backup) @@ -625,7 +627,7 @@ # Update to target and merge it with local if repo['.'].rev() != p1: repo.ui.debug(" update to %d:%s\n" % (p1, repo[p1])) - merge.update(repo, p1, False, True, False) + merge.update(repo, p1, False, True) else: repo.ui.debug(" already in target\n") repo.dirstate.write(repo.currenttransaction()) @@ -634,7 +636,7 @@ repo.ui.debug(" detach base %d:%s\n" % (base, repo[base])) # When collapsing in-place, the parent is the common ancestor, we # have to allow merging with it. - stats = merge.update(repo, rev, True, True, False, base, collapse, + stats = merge.update(repo, rev, True, True, base, collapse, labels=['dest', 'source']) if collapse: copies.duplicatecopies(repo, rev, target) @@ -668,7 +670,7 @@ elif p1n in state: if state[p1n] == nullmerge: p1 = target - elif state[p1n] in (revignored, revprecursor): + elif state[p1n] in revskipped: p1 = nearestrebased(repo, p1n, state) if p1 is None: p1 = target @@ -684,7 +686,7 @@ if p2n in state: if p1 == target: # p1n in targetancestors or external p1 = state[p2n] - elif state[p2n] in (revignored, revprecursor): + elif state[p2n] in revskipped: p2 = nearestrebased(repo, p2n, state) if p2 is None: # no ancestors rebased yet, detach @@ -802,7 +804,7 @@ mq.seriesdirty = True mq.savedirty() -def updatebookmarks(repo, targetnode, nstate, originalbookmarks): +def updatebookmarks(repo, targetnode, nstate, originalbookmarks, tr): 'Move bookmarks to their correct changesets, and delete divergent ones' marks = repo._bookmarks for k, v in originalbookmarks.iteritems(): @@ -810,8 +812,7 @@ # update the bookmarks for revs that have moved marks[k] = nstate[v] bookmarks.deletedivergent(repo, [targetnode], k) - - marks.write() + marks.recordchange(tr) def storestatus(repo, originalwd, target, state, collapse, keep, keepbranches, external, activebookmark): @@ -874,7 +875,7 @@ else: oldrev, newrev = l.split(':') if newrev in (str(nullmerge), str(revignored), - str(revprecursor)): + str(revprecursor), str(revpruned)): state[repo[oldrev].rev()] = int(newrev) elif newrev == nullid: state[repo[oldrev].rev()] = revtodo @@ -908,7 +909,7 @@ def needupdate(repo, state): '''check whether we should `update --clean` away from a merge, or if somehow the working dir got forcibly updated, e.g. by older hg''' - parents = [p.rev() for p in repo.parents()] + parents = [p.rev() for p in repo[None].parents()] # Are we in a merge state at all? if len(parents) < 2: @@ -952,7 +953,7 @@ if cleanup: # Update away from the rebase if necessary if needupdate(repo, state): - merge.update(repo, originalwd, False, True, False) + merge.update(repo, originalwd, False, True) # Strip from the first rebased revision rebased = filter(lambda x: x >= 0 and x != target, state.values()) @@ -1058,7 +1059,10 @@ for ignored in set(rebasedomain) - set(rebaseset): state[ignored] = revignored for r in obsoletenotrebased: - state[r] = revprecursor + if obsoletenotrebased[r] is None: + state[r] = revpruned + else: + state[r] = revprecursor return repo['.'].rev(), dest.rev(), state def clearrebased(ui, repo, state, skipped, collapsedas=None): @@ -1172,7 +1176,9 @@ def _computeobsoletenotrebased(repo, rebasesetrevs, dest): """return a mapping obsolete => successor for all obsolete nodes to be - rebased that have a successors in the destination""" + rebased that have a successors in the destination + + obsolete => None entries in the mapping indicate nodes with no succesor""" obsoletenotrebased = {} # Build a mapping successor => obsolete nodes for the obsolete @@ -1198,6 +1204,11 @@ for s in allsuccessors: if s in ancs: obsoletenotrebased[allsuccessors[s]] = s + elif (s == allsuccessors[s] and + allsuccessors.values().count(s) == 1): + # plain prune + obsoletenotrebased[s] = None + return obsoletenotrebased def summaryhook(ui, repo): @@ -1228,6 +1239,8 @@ cmdutil.unfinishedstates.append( ['rebasestate', False, False, _('rebase in progress'), _("use 'hg rebase --continue' or 'hg rebase --abort'")]) + cmdutil.afterresolvedstates.append( + ['rebasestate', _('hg rebase --continue')]) # ensure rebased rev are not hidden extensions.wrapfunction(repoview, '_getdynamicblockers', _rebasedvisible) - revset.symbols['_destrebase'] = _revsetdestrebase + revsetpredicate.setup() diff -r 6c7d26cef0cd -r 4571c0b38337 hgext/share.py --- a/hgext/share.py Fri Jan 08 16:27:25 2016 +0100 +++ b/hgext/share.py Mon Jan 11 14:27:12 2016 -0600 @@ -73,7 +73,8 @@ the broken clone to reset it to a changeset that still exists. """ - return hg.share(ui, source, dest, not noupdate, bookmarks) + return hg.share(ui, source, dest=dest, update=not noupdate, + bookmarks=bookmarks) @command('unshare', [], '') def unshare(ui, repo): @@ -121,7 +122,7 @@ return orig(ui, source, *args, **opts) def extsetup(ui): - extensions.wrapfunction(bookmarks.bmstore, 'getbkfile', getbkfile) + extensions.wrapfunction(bookmarks, '_getbkfile', getbkfile) extensions.wrapfunction(bookmarks.bmstore, 'recordchange', recordchange) extensions.wrapfunction(bookmarks.bmstore, '_writerepo', writerepo) extensions.wrapcommand(commands.table, 'clone', clone) @@ -149,12 +150,12 @@ srcurl, branches = parseurl(source) return repository(repo.ui, srcurl) -def getbkfile(orig, self, repo): +def getbkfile(orig, repo): if _hassharedbookmarks(repo): srcrepo = _getsrcrepo(repo) if srcrepo is not None: repo = srcrepo - return orig(self, repo) + return orig(repo) def recordchange(orig, self, tr): # Continue with write to local bookmarks file as usual diff -r 6c7d26cef0cd -r 4571c0b38337 hgext/shelve.py --- a/hgext/shelve.py Fri Jan 08 16:27:25 2016 +0100 +++ b/hgext/shelve.py Mon Jan 11 14:27:12 2016 -0600 @@ -224,7 +224,14 @@ def createcmd(ui, repo, pats, opts): """subcommand that creates a new shelve""" + wlock = repo.wlock() + try: + cmdutil.checkunfinished(repo) + return _docreatecmd(ui, repo, pats, opts) + finally: + lockmod.release(wlock) +def _docreatecmd(ui, repo, pats, opts): def mutableancestors(ctx): """return all mutable ancestors for ctx (included) @@ -276,7 +283,7 @@ repo.mq.checkapplied = saved if parent.node() != nullid: - desc = "changes to '%s'" % parent.description().split('\n', 1)[0] + desc = "changes to: %s" % parent.description().split('\n', 1)[0] else: desc = '(changes in empty repository)' @@ -285,9 +292,8 @@ name = opts['name'] - wlock = lock = tr = None + lock = tr = None try: - wlock = repo.wlock() lock = repo.lock() # use an uncommitted transaction to generate the bundle to avoid @@ -346,7 +352,7 @@ _aborttransaction(repo) finally: - lockmod.release(tr, lock, wlock) + lockmod.release(tr, lock) def cleanupcmd(ui, repo): """subcommand that deletes all shelves""" @@ -467,7 +473,6 @@ def unshelveabort(ui, repo, state, opts): """subcommand that abort an in-progress unshelve""" - wlock = repo.wlock() lock = None try: checkparents(repo, state) @@ -491,7 +496,7 @@ finally: shelvedstate.clear(repo) ui.warn(_("unshelve of '%s' aborted\n") % state.name) - lockmod.release(lock, wlock) + lockmod.release(lock) def mergefiles(ui, repo, wctx, shelvectx): """updates to wctx and merges the changes from shelvectx into the @@ -507,7 +512,7 @@ # revert will overwrite unknown files, so move them out of the way for file in repo.status(unknown=True).unknown: if file in files: - util.rename(file, file + ".orig") + util.rename(file, scmutil.origpath(ui, repo, file)) ui.pushbuffer(True) cmdutil.revert(ui, repo, shelvectx, repo.dirstate.parents(), *pathtofiles(repo, files), @@ -527,11 +532,10 @@ """subcommand to continue an in-progress unshelve""" # We're finishing off a merge. First parent is our original # parent, second is the temporary "fake" commit we're unshelving. - wlock = repo.wlock() lock = None try: checkparents(repo, state) - ms = merge.mergestate(repo) + ms = merge.mergestate.read(repo) if [f for f in ms if ms[f] == 'u']: raise error.Abort( _("unresolved conflicts, can't continue"), @@ -565,15 +569,16 @@ unshelvecleanup(ui, repo, state.name, opts) ui.status(_("unshelve of '%s' complete\n") % state.name) finally: - lockmod.release(lock, wlock) + lockmod.release(lock) @command('unshelve', [('a', 'abort', None, _('abort an incomplete unshelve operation')), ('c', 'continue', None, _('continue an incomplete unshelve operation')), - ('', 'keep', None, + ('k', 'keep', None, _('keep shelve after unshelving')), + ('t', 'tool', '', _('specify merge tool')), ('', 'date', '', _('set date for temporary commits (DEPRECATED)'), _('DATE'))], _('hg unshelve [SHELVED]')) @@ -609,6 +614,13 @@ than ``maxbackups`` backups are kept, if same timestamp prevents from deciding exact order of them, for safety. """ + wlock = repo.wlock() + try: + return _dounshelve(ui, repo, *shelved, **opts) + finally: + lockmod.release(wlock) + +def _dounshelve(ui, repo, *shelved, **opts): abortf = opts['abort'] continuef = opts['continue'] if not abortf and not continuef: @@ -620,6 +632,8 @@ if shelved: raise error.Abort(_('cannot combine abort/continue with ' 'naming a shelved change')) + if abortf and opts.get('tool', False): + ui.warn(_('tool option will be ignored\n')) try: state = shelvedstate.load(repo) @@ -647,9 +661,10 @@ raise error.Abort(_("shelved change '%s' not found") % basename) oldquiet = ui.quiet - wlock = lock = tr = None + lock = tr = None + forcemerge = ui.backupconfig('ui', 'forcemerge') try: - wlock = repo.wlock() + ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'unshelve') lock = repo.lock() tr = repo.transaction('unshelve', report=lambda x: None) @@ -675,7 +690,7 @@ backup = repo.ui.backupconfig('phases', 'new-commit') try: - repo.ui. setconfig('phases', 'new-commit', phases.secret) + repo.ui.setconfig('phases', 'new-commit', phases.secret) return repo.commit(message, 'shelve@localhost', opts.get('date'), match) finally: @@ -706,6 +721,7 @@ 'rev' : [shelvectx.rev()], 'dest' : str(tmpwctx.rev()), 'keep' : True, + 'tool' : opts.get('tool', ''), }) except error.InterventionRequired: tr.close() @@ -743,7 +759,8 @@ ui.quiet = oldquiet if tr: tr.release() - lockmod.release(lock, wlock) + lockmod.release(lock) + ui.restoreconfig(forcemerge) @command('shelve', [('A', 'addremove', None, @@ -796,8 +813,6 @@ To delete specific shelved changes, use ``--delete``. To delete all shelved changes, use ``--cleanup``. ''' - cmdutil.checkunfinished(repo) - allowables = [ ('addremove', set(['create'])), # 'create' is pseudo action ('cleanup', set(['cleanup'])), @@ -837,3 +852,5 @@ [shelvedstate._filename, False, False, _('unshelve already in progress'), _("use 'hg unshelve --continue' or 'hg unshelve --abort'")]) + cmdutil.afterresolvedstates.append( + [shelvedstate._filename, _('hg unshelve --continue')]) diff -r 6c7d26cef0cd -r 4571c0b38337 hgext/strip.py --- a/hgext/strip.py Fri Jan 08 16:27:25 2016 +0100 +++ b/hgext/strip.py Mon Jan 11 14:27:12 2016 -0600 @@ -7,7 +7,7 @@ from mercurial.node import nullid from mercurial.lock import release from mercurial import cmdutil, hg, scmutil, util, error -from mercurial import repair, bookmarks, merge +from mercurial import repair, bookmarks as bookmarksmod , merge cmdtable = {} command = cmdutil.command(cmdtable) @@ -44,7 +44,7 @@ raise error.Abort(_("local changed subrepos found" + excsuffix)) return s -def strip(ui, repo, revs, update=True, backup=True, force=None, bookmark=None): +def strip(ui, repo, revs, update=True, backup=True, force=None, bookmarks=None): wlock = lock = None try: wlock = repo.wlock() @@ -62,13 +62,21 @@ repair.strip(ui, repo, revs, backup) - marks = repo._bookmarks - if bookmark: - if bookmark == repo._activebookmark: - bookmarks.deactivate(repo) - del marks[bookmark] - marks.write() - ui.write(_("bookmark '%s' deleted\n") % bookmark) + repomarks = repo._bookmarks + if bookmarks: + tr = None + try: + tr = repo.transaction('strip') + if repo._activebookmark in bookmarks: + bookmarksmod.deactivate(repo) + for bookmark in bookmarks: + del repomarks[bookmark] + repomarks.recordchange(tr) + tr.close() + for bookmark in sorted(bookmarks): + ui.write(_("bookmark '%s' deleted\n") % bookmark) + finally: + release(tr) finally: release(lock, wlock) @@ -85,7 +93,7 @@ ('n', '', None, _('ignored (DEPRECATED)')), ('k', 'keep', None, _("do not modify working directory during " "strip")), - ('B', 'bookmark', '', _("remove revs only reachable from given" + ('B', 'bookmark', [], _("remove revs only reachable from given" " bookmark"))], _('hg strip [-k] [-f] [-n] [-B bookmark] [-r] REV...')) def stripcmd(ui, repo, *revs, **opts): @@ -127,27 +135,36 @@ wlock = repo.wlock() try: - if opts.get('bookmark'): - mark = opts.get('bookmark') - marks = repo._bookmarks - if mark not in marks: - raise error.Abort(_("bookmark '%s' not found") % mark) + bookmarks = set(opts.get('bookmark')) + if bookmarks: + repomarks = repo._bookmarks + if not bookmarks.issubset(repomarks): + raise error.Abort(_("bookmark '%s' not found") % + ','.join(sorted(bookmarks - set(repomarks.keys())))) # If the requested bookmark is not the only one pointing to a # a revision we have to only delete the bookmark and not strip # anything. revsets cannot detect that case. - uniquebm = True - for m, n in marks.iteritems(): - if m != mark and n == repo[mark].node(): - uniquebm = False - break - if uniquebm: - rsrevs = repair.stripbmrevset(repo, mark) - revs.update(set(rsrevs)) + nodetobookmarks = {} + for mark, node in repomarks.iteritems(): + nodetobookmarks.setdefault(node, []).append(mark) + for marks in nodetobookmarks.values(): + if bookmarks.issuperset(marks): + rsrevs = repair.stripbmrevset(repo, marks[0]) + revs.update(set(rsrevs)) if not revs: - del marks[mark] - marks.write() - ui.write(_("bookmark '%s' deleted\n") % mark) + lock = tr = None + try: + lock = repo.lock() + tr = repo.transaction('bookmark') + for bookmark in bookmarks: + del repomarks[bookmark] + repomarks.recordchange(tr) + tr.close() + for bookmark in sorted(bookmarks): + ui.write(_("bookmark '%s' deleted\n") % bookmark) + finally: + release(lock, tr) if not revs: raise error.Abort(_('empty revision set')) @@ -208,14 +225,13 @@ repo.dirstate.write(repo.currenttransaction()) # clear resolve state - ms = merge.mergestate(repo) - ms.reset(repo['.'].node()) + merge.mergestate.clean(repo, repo['.'].node()) update = False strip(ui, repo, revs, backup=backup, update=update, - force=opts.get('force'), bookmark=opts.get('bookmark')) + force=opts.get('force'), bookmarks=bookmarks) finally: wlock.release() diff -r 6c7d26cef0cd -r 4571c0b38337 hgext/transplant.py --- a/hgext/transplant.py Fri Jan 08 16:27:25 2016 +0100 +++ b/hgext/transplant.py Mon Jan 11 14:27:12 2016 -0600 @@ -20,6 +20,7 @@ from mercurial import bundlerepo, hg, merge, match from mercurial import patch, revlog, scmutil, util, error, cmdutil from mercurial import revset, templatekw, exchange +from mercurial import lock as lockmod class TransplantError(error.Abort): pass @@ -127,9 +128,8 @@ diffopts = patch.difffeatureopts(self.ui, opts) diffopts.git = True - lock = wlock = tr = None + lock = tr = None try: - wlock = repo.wlock() lock = repo.lock() tr = repo.transaction('transplant') for rev in revs: @@ -152,7 +152,7 @@ if pulls: if source != repo: exchange.pull(repo, source.peer(), heads=pulls) - merge.update(repo, pulls[-1], False, False, None) + merge.update(repo, pulls[-1], False, False) p1, p2 = repo.dirstate.parents() pulls = [] @@ -216,7 +216,7 @@ tr.close() if pulls: exchange.pull(repo, source.peer(), heads=pulls) - merge.update(repo, pulls[-1], False, False, None) + merge.update(repo, pulls[-1], False, False) finally: self.saveseries(revmap, merges) self.transplants.write() @@ -224,7 +224,6 @@ tr.release() if lock: lock.release() - wlock.release() def filter(self, filter, node, changelog, patchfile): '''arbitrarily rewrite changeset before applying it''' @@ -283,7 +282,7 @@ p2 = node self.log(user, date, message, p1, p2, merge=merge) self.ui.write(str(inst) + '\n') - raise TransplantError(_('fix up the merge and run ' + raise TransplantError(_('fix up the working directory and run ' 'hg transplant --continue')) else: files = None @@ -304,6 +303,9 @@ return n + def canresume(self): + return os.path.exists(os.path.join(self.path, 'journal')) + def resume(self, repo, source, opts): '''recover last transaction and apply remaining changesets''' if os.path.exists(os.path.join(self.path, 'journal')): @@ -345,7 +347,6 @@ merge = True extra = {'transplant_source': node} - wlock = repo.wlock() try: p1, p2 = repo.dirstate.parents() if p1 != parent: @@ -367,7 +368,9 @@ return n, node finally: - wlock.release() + # TODO: get rid of this meaningless try/finally enclosing. + # this is kept only to reduce changes in a patch. + pass def readseries(self): nodes = [] @@ -572,6 +575,14 @@ and then resume where you left off by calling :hg:`transplant --continue/-c`. ''' + wlock = None + try: + wlock = repo.wlock() + return _dotransplant(ui, repo, *revs, **opts) + finally: + lockmod.release(wlock) + +def _dotransplant(ui, repo, *revs, **opts): def incwalk(repo, csets, match=util.always): for node in csets: if match(node): @@ -599,7 +610,7 @@ return if not (opts.get('source') or revs or opts.get('merge') or opts.get('branch')): - raise error.Abort(_('no source URL, branch revision or revision ' + raise error.Abort(_('no source URL, branch revision, or revision ' 'list provided')) if opts.get('all'): if not opts.get('branch'): @@ -619,11 +630,14 @@ tp = transplanter(ui, repo, opts) - cmdutil.checkunfinished(repo) p1, p2 = repo.dirstate.parents() if len(repo) > 0 and p1 == revlog.nullid: raise error.Abort(_('no revision checked out')) - if not opts.get('continue'): + if opts.get('continue'): + if not tp.canresume(): + raise error.Abort(_('no transplant to continue')) + else: + cmdutil.checkunfinished(repo) if p2 != revlog.nullid: raise error.Abort(_('outstanding uncommitted merges')) m, a, r, d = repo.status()[:4] @@ -685,9 +699,11 @@ if cleanupfn: cleanupfn() +revsetpredicate = revset.extpredicate() + +@revsetpredicate('transplanted([set])') def revsettransplanted(repo, subset, x): - """``transplanted([set])`` - Transplanted changesets in set, or all transplanted changesets. + """Transplanted changesets in set, or all transplanted changesets. """ if x: s = revset.getset(repo, subset, x) @@ -703,10 +719,10 @@ return n and revlog.hex(n) or '' def extsetup(ui): - revset.symbols['transplanted'] = revsettransplanted + revsetpredicate.setup() templatekw.keywords['transplanted'] = kwtransplanted cmdutil.unfinishedstates.append( - ['series', True, False, _('transplant in progress'), + ['transplant/journal', True, False, _('transplant in progress'), _("use 'hg transplant --continue' or 'hg update' to abort")]) # tell hggettext to extract docstrings from these functions: diff -r 6c7d26cef0cd -r 4571c0b38337 hgext/zeroconf/Zeroconf.py --- a/hgext/zeroconf/Zeroconf.py Fri Jan 08 16:27:25 2016 +0100 +++ b/hgext/zeroconf/Zeroconf.py Mon Jan 11 14:27:12 2016 -0600 @@ -150,7 +150,7 @@ _TYPE_TXT = 16 _TYPE_AAAA = 28 _TYPE_SRV = 33 -_TYPE_ANY = 255 +_TYPE_ANY = 255 # Mapping constants to names @@ -522,7 +522,7 @@ def readString(self, len): """Reads a string of a given length from the packet""" format = '!' + str(len) + 's' - length = struct.calcsize(format) + length = struct.calcsize(format) info = struct.unpack(format, self.data[self.offset:self.offset+length]) self.offset += length return info[0] diff -r 6c7d26cef0cd -r 4571c0b38337 i18n/check-translation.py diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/__init__.py --- a/mercurial/__init__.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/__init__.py Mon Jan 11 14:27:12 2016 -0600 @@ -0,0 +1,144 @@ +# __init__.py - Startup and module loading logic for Mercurial. +# +# Copyright 2015 Gregory Szorc +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from __future__ import absolute_import + +import imp +import os +import sys +import zipimport + +__all__ = [] + +# Rules for how modules can be loaded. Values are: +# +# c - require C extensions +# allow - allow pure Python implementation when C loading fails +# py - only load pure Python modules +modulepolicy = '@MODULELOADPOLICY@' + +# By default, require the C extensions for performance reasons. +if modulepolicy == '@' 'MODULELOADPOLICY' '@': + modulepolicy = 'c' + +# PyPy doesn't load C extensions. +# +# The canonical way to do this is to test platform.python_implementation(). +# But we don't import platform and don't bloat for it here. +if '__pypy__' in sys.builtin_module_names: + modulepolicy = 'py' + +# Environment variable can always force settings. +modulepolicy = os.environ.get('HGMODULEPOLICY', modulepolicy) + +# Modules that have both Python and C implementations. See also the +# set of .py files under mercurial/pure/. +_dualmodules = set([ + 'mercurial.base85', + 'mercurial.bdiff', + 'mercurial.diffhelpers', + 'mercurial.mpatch', + 'mercurial.osutil', + 'mercurial.parsers', +]) + +class hgimporter(object): + """Object that conforms to import hook interface defined in PEP-302.""" + def find_module(self, name, path=None): + # We only care about modules that have both C and pure implementations. + if name in _dualmodules: + return self + return None + + def load_module(self, name): + mod = sys.modules.get(name, None) + if mod: + return mod + + mercurial = sys.modules['mercurial'] + + # The zip importer behaves sufficiently differently from the default + # importer to warrant its own code path. + loader = getattr(mercurial, '__loader__', None) + if isinstance(loader, zipimport.zipimporter): + def ziploader(*paths): + """Obtain a zipimporter for a directory under the main zip.""" + path = os.path.join(loader.archive, *paths) + zl = sys.path_importer_cache.get(path) + if not zl: + zl = zipimport.zipimporter(path) + return zl + + try: + if modulepolicy == 'py': + raise ImportError() + + zl = ziploader('mercurial') + mod = zl.load_module(name) + # Unlike imp, ziploader doesn't expose module metadata that + # indicates the type of module. So just assume what we found + # is OK (even though it could be a pure Python module). + except ImportError: + if modulepolicy == 'c': + raise + zl = ziploader('mercurial', 'pure') + mod = zl.load_module(name) + + sys.modules[name] = mod + return mod + + # Unlike the default importer which searches special locations and + # sys.path, we only look in the directory where "mercurial" was + # imported from. + + # imp.find_module doesn't support submodules (modules with "."). + # Instead you have to pass the parent package's __path__ attribute + # as the path argument. + stem = name.split('.')[-1] + + try: + if modulepolicy == 'py': + raise ImportError() + + modinfo = imp.find_module(stem, mercurial.__path__) + + # The Mercurial installer used to copy files from + # mercurial/pure/*.py to mercurial/*.py. Therefore, it's possible + # for some installations to have .py files under mercurial/*. + # Loading Python modules when we expected C versions could result + # in a) poor performance b) loading a version from a previous + # Mercurial version, potentially leading to incompatibility. Either + # scenario is bad. So we verify that modules loaded from + # mercurial/* are C extensions. If the current policy allows the + # loading of .py modules, the module will be re-imported from + # mercurial/pure/* below. + if modinfo[2][2] != imp.C_EXTENSION: + raise ImportError('.py version of %s found where C ' + 'version should exist' % name) + + except ImportError: + if modulepolicy == 'c': + raise + + # Could not load the C extension and pure Python is allowed. So + # try to load them. + from . import pure + modinfo = imp.find_module(stem, pure.__path__) + if not modinfo: + raise ImportError('could not find mercurial module %s' % + name) + + mod = imp.load_module(name, *modinfo) + sys.modules[name] = mod + return mod + +# We automagically register our custom importer as a side-effect of loading. +# This is necessary to ensure that any entry points are able to import +# mercurial.* modules without having to perform this registration themselves. +if not any(isinstance(x, hgimporter) for x in sys.meta_path): + # meta_path is used before any implicit finders and before sys.path. + sys.meta_path.insert(0, hgimporter()) diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/base85.c --- a/mercurial/base85.c Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/base85.c Mon Jan 11 14:27:12 2016 -0600 @@ -18,8 +18,7 @@ "abcdefghijklmnopqrstuvwxyz!#$%&()*+-;<=>?@^_`{|}~"; static char b85dec[256]; -static void -b85prep(void) +static void b85prep(void) { unsigned i; @@ -28,8 +27,7 @@ b85dec[(int)(b85chars[i])] = i + 1; } -static PyObject * -b85encode(PyObject *self, PyObject *args) +static PyObject *b85encode(PyObject *self, PyObject *args) { const unsigned char *text; PyObject *out; @@ -76,8 +74,7 @@ return out; } -static PyObject * -b85decode(PyObject *self, PyObject *args) +static PyObject *b85decode(PyObject *self, PyObject *args) { PyObject *out; const char *text; diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/bookmarks.py --- a/mercurial/bookmarks.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/bookmarks.py Mon Jan 11 14:27:12 2016 -0600 @@ -22,26 +22,44 @@ util, ) +def _getbkfile(repo): + """Hook so that extensions that mess with the store can hook bm storage. + + For core, this just handles wether we should see pending + bookmarks or the committed ones. Other extensions (like share) + may need to tweak this behavior further. + """ + bkfile = None + if 'HG_PENDING' in os.environ: + try: + bkfile = repo.vfs('bookmarks.pending') + except IOError as inst: + if inst.errno != errno.ENOENT: + raise + if bkfile is None: + bkfile = repo.vfs('bookmarks') + return bkfile + + class bmstore(dict): """Storage for bookmarks. - This object should do all bookmark reads and writes, so that it's - fairly simple to replace the storage underlying bookmarks without - having to clone the logic surrounding bookmarks. + This object should do all bookmark-related reads and writes, so + that it's fairly simple to replace the storage underlying + bookmarks without having to clone the logic surrounding + bookmarks. This type also should manage the active bookmark, if + any. This particular bmstore implementation stores bookmarks as {hash}\s{name}\n (the same format as localtags) in .hg/bookmarks. The mapping is stored as {name: nodeid}. - - This class does NOT handle the "active" bookmark state at this - time. """ def __init__(self, repo): dict.__init__(self) self._repo = repo try: - bkfile = self.getbkfile(repo) + bkfile = _getbkfile(repo) for line in bkfile: line = line.strip() if not line: @@ -59,18 +77,29 @@ except IOError as inst: if inst.errno != errno.ENOENT: raise + self._clean = True + self._active = _readactive(repo, self) + self._aclean = True - def getbkfile(self, repo): - bkfile = None - if 'HG_PENDING' in os.environ: - try: - bkfile = repo.vfs('bookmarks.pending') - except IOError as inst: - if inst.errno != errno.ENOENT: - raise - if bkfile is None: - bkfile = repo.vfs('bookmarks') - return bkfile + @property + def active(self): + return self._active + + @active.setter + def active(self, mark): + if mark is not None and mark not in self: + raise AssertionError('bookmark %s does not exist!' % mark) + + self._active = mark + self._aclean = False + + def __setitem__(self, *args, **kwargs): + self._clean = False + return dict.__setitem__(self, *args, **kwargs) + + def __delitem__(self, key): + self._clean = False + return dict.__delitem__(self, key) def recordchange(self, tr): """record that bookmarks have been changed in a transaction @@ -89,6 +118,13 @@ We also store a backup of the previous state in undo.bookmarks that can be copied back on rollback. ''' + msg = 'bm.write() is deprecated, use bm.recordchange(transaction)' + self._repo.ui.deprecwarn(msg, '3.7') + # TODO: writing the active bookmark should probably also use a + # transaction. + self._writeactive() + if self._clean: + return repo = self._repo if (repo.ui.configbool('devel', 'all-warnings') or repo.ui.configbool('devel', 'check-locks')): @@ -108,24 +144,53 @@ def _writerepo(self, repo): """Factored out for extensibility""" - if repo._activebookmark not in self: - deactivate(repo) + rbm = repo._bookmarks + if rbm.active not in self: + rbm.active = None + rbm._writeactive() wlock = repo.wlock() try: - file = repo.vfs('bookmarks', 'w', atomictemp=True) - self._write(file) - file.close() + file_ = repo.vfs('bookmarks', 'w', atomictemp=True) + try: + self._write(file_) + except: # re-raises + file_.discard() + raise + finally: + file_.close() finally: wlock.release() + def _writeactive(self): + if self._aclean: + return + wlock = self._repo.wlock() + try: + if self._active is not None: + f = self._repo.vfs('bookmarks.current', 'w', atomictemp=True) + try: + f.write(encoding.fromlocal(self._active)) + finally: + f.close() + else: + try: + self._repo.vfs.unlink('bookmarks.current') + except OSError as inst: + if inst.errno != errno.ENOENT: + raise + finally: + wlock.release() + self._aclean = True + def _write(self, fp): for name, node in self.iteritems(): fp.write("%s %s\n" % (hex(node), encoding.fromlocal(name))) + self._clean = True -def readactive(repo): +def _readactive(repo, marks): """ Get the active bookmark. We can have an active bookmark that updates itself as we commit. This function returns the name of that bookmark. @@ -139,10 +204,19 @@ raise return None try: - # No readline() in osutil.posixfile, reading everything is cheap + # No readline() in osutil.posixfile, reading everything is + # cheap. + # Note that it's possible for readlines() here to raise + # IOError, since we might be reading the active mark over + # static-http which only tries to load the file when we try + # to read from it. mark = encoding.tolocal((file.readlines() or [''])[0]) - if mark == '' or mark not in repo._bookmarks: + if mark == '' or mark not in marks: mark = None + except IOError as inst: + if inst.errno != errno.ENOENT: + raise + return None finally: file.close() return mark @@ -153,35 +227,15 @@ follow new commits that are made. The name is recorded in .hg/bookmarks.current """ - if mark not in repo._bookmarks: - raise AssertionError('bookmark %s does not exist!' % mark) - - active = repo._activebookmark - if active == mark: - return - - wlock = repo.wlock() - try: - file = repo.vfs('bookmarks.current', 'w', atomictemp=True) - file.write(encoding.fromlocal(mark)) - file.close() - finally: - wlock.release() - repo._activebookmark = mark + repo._bookmarks.active = mark + repo._bookmarks._writeactive() def deactivate(repo): """ Unset the active bookmark in this repository. """ - wlock = repo.wlock() - try: - repo.vfs.unlink('bookmarks.current') - repo._activebookmark = None - except OSError as inst: - if inst.errno != errno.ENOENT: - raise - finally: - wlock.release() + repo._bookmarks.active = None + repo._bookmarks._writeactive() def isactivewdirparent(repo): """ @@ -231,7 +285,7 @@ deletefrom = parents marks = repo._bookmarks update = False - active = repo._activebookmark + active = marks.active if not active: return False @@ -249,7 +303,14 @@ update = True if update: - marks.write() + lock = tr = None + try: + lock = repo.lock() + tr = repo.transaction('bookmark') + marks.recordchange(tr) + tr.close() + finally: + lockmod.release(tr, lock) return update def listbookmarks(repo): diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/byterange.py --- a/mercurial/byterange.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/byterange.py Mon Jan 11 14:27:12 2016 -0600 @@ -17,11 +17,25 @@ # $Id: byterange.py,v 1.9 2005/02/14 21:55:07 mstenner Exp $ +from __future__ import absolute_import + +import email +import ftplib +import mimetypes import os +import re +import socket import stat import urllib import urllib2 -import email.Utils + +addclosehook = urllib.addclosehook +addinfourl = urllib.addinfourl +splitattr = urllib.splitattr +splitpasswd = urllib.splitpasswd +splitport = urllib.splitport +splituser = urllib.splituser +unquote = urllib.unquote class RangeError(IOError): """Error raised when an unsatisfiable range is requested.""" @@ -196,8 +210,6 @@ server would. """ def open_local_file(self, req): - import mimetypes - import email host = req.get_host() file = req.get_selector() localfile = urllib.url2pathname(file) @@ -234,13 +246,6 @@ # follows: # -- range support modifications start/end here -from urllib import splitport, splituser, splitpasswd, splitattr, \ - unquote, addclosehook, addinfourl -import ftplib -import socket -import mimetypes -import email - class FTPRangeHandler(urllib2.FTPHandler): def ftp_open(self, req): host = req.get_host() @@ -406,7 +411,6 @@ if range_header is None: return None if _rangere is None: - import re _rangere = re.compile(r'^bytes=(\d{1,})-(\d*)') match = _rangere.match(range_header) if match: diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/changegroup.py --- a/mercurial/changegroup.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/changegroup.py Mon Jan 11 14:27:12 2016 -0600 @@ -32,6 +32,7 @@ _CHANGEGROUPV1_DELTA_HEADER = "20s20s20s20s" _CHANGEGROUPV2_DELTA_HEADER = "20s20s20s20s20s" +_CHANGEGROUPV3_DELTA_HEADER = ">20s20s20s20s20sH" def readexactly(stream, n): '''read n bytes from stream.read and abort if less was available''' @@ -246,7 +247,8 @@ deltabase = p1 else: deltabase = prevnode - return node, p1, p2, deltabase, cs + flags = 0 + return node, p1, p2, deltabase, cs, flags def deltachunk(self, prevnode): l = self._chunklength() @@ -255,9 +257,9 @@ headerdata = readexactly(self._stream, self.deltaheadersize) header = struct.unpack(self.deltaheader, headerdata) delta = readexactly(self._stream, l - self.deltaheadersize) - node, p1, p2, deltabase, cs = self._deltaheader(header, prevnode) + node, p1, p2, deltabase, cs, flags = self._deltaheader(header, prevnode) return {'node': node, 'p1': p1, 'p2': p2, 'cs': cs, - 'deltabase': deltabase, 'delta': delta} + 'deltabase': deltabase, 'delta': delta, 'flags': flags} def getchunks(self): """returns all the chunks contains in the bundle @@ -312,6 +314,7 @@ - number of heads stays the same: 1 """ repo = repo.unfiltered() + wasempty = (len(repo.changelog) == 0) def csmap(x): repo.ui.debug("add changeset %s\n" % short(x)) return len(cl) @@ -386,7 +389,7 @@ self.callback = None pr = prog(_('files'), efiles) newrevs, newfiles = _addchangegroupfiles( - repo, self, revmap, trp, pr, needfiles) + repo, self, revmap, trp, pr, needfiles, wasempty) revisions += newrevs files += newfiles @@ -494,7 +497,23 @@ def _deltaheader(self, headertuple, prevnode): node, p1, p2, deltabase, cs = headertuple - return node, p1, p2, deltabase, cs + flags = 0 + return node, p1, p2, deltabase, cs, flags + +class cg3unpacker(cg2unpacker): + """Unpacker for cg3 streams. + + cg3 streams add support for exchanging treemanifests and revlog + flags, so the only changes from cg2 are the delta header and + version number. + """ + deltaheader = _CHANGEGROUPV3_DELTA_HEADER + deltaheadersize = struct.calcsize(deltaheader) + version = '03' + + def _deltaheader(self, headertuple, prevnode): + node, p1, p2, deltabase, cs, flags = headertuple + return node, p1, p2, deltabase, cs, flags class headerlessfixup(object): def __init__(self, fh, h): @@ -508,6 +527,27 @@ return d return readexactly(self._fh, n) +def _moddirs(files): + """Given a set of modified files, find the list of modified directories. + + This returns a list of (path to changed dir, changed dir) tuples, + as that's what the one client needs anyway. + + >>> _moddirs(['a/b/c.py', 'a/b/c.txt', 'a/d/e/f/g.txt', 'i.txt', ]) + [('/', 'a/'), ('a/', 'b/'), ('a/', 'd/'), ('a/d/', 'e/'), ('a/d/e/', 'f/')] + + """ + alldirs = set() + for f in files: + path = f.split('/')[:-1] + for i in xrange(len(path) - 1, -1, -1): + dn = '/'.join(path[:i]) + current = dn + '/', path[i] + '/' + if current in alldirs: + break + alldirs.add(current) + return sorted(alldirs) + class cg1packer(object): deltaheader = _CHANGEGROUPV1_DELTA_HEADER version = '01' @@ -593,7 +633,7 @@ rr, rl = revlog.rev, revlog.linkrev return [n for n in missing if rl(rr(n)) not in commonrevs] - def _packmanifests(self, mfnodes, lookuplinknode): + def _packmanifests(self, mfnodes, tmfnodes, lookuplinknode): """Pack flat manifests into a changegroup stream.""" ml = self._repo.manifest size = 0 @@ -602,6 +642,11 @@ size += len(chunk) yield chunk self._verbosenote(_('%8.i (manifests)\n') % size) + # It looks odd to assert this here, but tmfnodes doesn't get + # filled in until after we've called lookuplinknode for + # sending root manifests, so the only way to tell the streams + # got crossed is to check after we've done all the work. + assert not tmfnodes def generate(self, commonrevs, clnodes, fastpathlinkrev, source): '''yield a sequence of changegroup chunks (strings)''' @@ -611,8 +656,10 @@ clrevorder = {} mfs = {} # needed manifests + tmfnodes = {} fnodes = {} # needed file nodes - changedfiles = set() + # maps manifest node id -> set(changed files) + mfchangedfiles = {} # Callback for the changelog, used to collect changed files and manifest # nodes. @@ -620,9 +667,12 @@ def lookupcl(x): c = cl.read(x) clrevorder[x] = len(clrevorder) - changedfiles.update(c[3]) + n = c[0] # record the first changeset introducing this manifest version - mfs.setdefault(c[0], x) + mfs.setdefault(n, x) + # Record a complete list of potentially-changed files in + # this manifest. + mfchangedfiles.setdefault(n, set()).update(c[3]) return x self._verbosenote(_('uncompressed size of bundle content:\n')) @@ -648,41 +698,91 @@ # simply take the slowpath, which already has the 'clrevorder' logic. # This was also fixed in cc0ff93d0c0c. fastpathlinkrev = fastpathlinkrev and not self._reorder + # Treemanifests don't work correctly with fastpathlinkrev + # either, because we don't discover which directory nodes to + # send along with files. This could probably be fixed. + fastpathlinkrev = fastpathlinkrev and ( + 'treemanifest' not in repo.requirements) # Callback for the manifest, used to collect linkrevs for filelog # revisions. # Returns the linkrev node (collected in lookupcl). - def lookupmflinknode(x): - clnode = mfs[x] - if not fastpathlinkrev: - mdata = ml.readfast(x) - for f, n in mdata.iteritems(): - if f in changedfiles: - # record the first changeset introducing this filelog - # version - fclnodes = fnodes.setdefault(f, {}) - fclnode = fclnodes.setdefault(n, clnode) + if fastpathlinkrev: + lookupmflinknode = mfs.__getitem__ + else: + def lookupmflinknode(x): + """Callback for looking up the linknode for manifests. + + Returns the linkrev node for the specified manifest. + + SIDE EFFECT: + + 1) fclnodes gets populated with the list of relevant + file nodes if we're not using fastpathlinkrev + 2) When treemanifests are in use, collects treemanifest nodes + to send + + Note that this means manifests must be completely sent to + the client before you can trust the list of files and + treemanifests to send. + """ + clnode = mfs[x] + # We no longer actually care about reading deltas of + # the manifest here, because we already know the list + # of changed files, so for treemanifests (which + # lazily-load anyway to *generate* a readdelta) we can + # just load them with read() and then we'll actually + # be able to correctly load node IDs from the + # submanifest entries. + if 'treemanifest' in repo.requirements: + mdata = ml.read(x) + else: + mdata = ml.readfast(x) + for f in mfchangedfiles[x]: + try: + n = mdata[f] + except KeyError: + continue + # record the first changeset introducing this filelog + # version + fclnodes = fnodes.setdefault(f, {}) + fclnode = fclnodes.setdefault(n, clnode) + if clrevorder[clnode] < clrevorder[fclnode]: + fclnodes[n] = clnode + # gather list of changed treemanifest nodes + if 'treemanifest' in repo.requirements: + submfs = {'/': mdata} + for dn, bn in _moddirs(mfchangedfiles[x]): + submf = submfs[dn] + submf = submf._dirs[bn] + submfs[submf.dir()] = submf + tmfclnodes = tmfnodes.setdefault(submf.dir(), {}) + tmfclnodes.setdefault(submf._node, clnode) if clrevorder[clnode] < clrevorder[fclnode]: - fclnodes[n] = clnode - return clnode + tmfclnodes[n] = clnode + return clnode mfnodes = self.prune(ml, mfs, commonrevs) - for x in self._packmanifests(mfnodes, lookupmflinknode): + for x in self._packmanifests( + mfnodes, tmfnodes, lookupmflinknode): yield x mfs.clear() clrevs = set(cl.rev(x) for x in clnodes) - def linknodes(filerevlog, fname): - if fastpathlinkrev: + if not fastpathlinkrev: + def linknodes(unused, fname): + return fnodes.get(fname, {}) + else: + cln = cl.node + def linknodes(filerevlog, fname): llr = filerevlog.linkrev - def genfilenodes(): - for r in filerevlog: - linkrev = llr(r) - if linkrev in clrevs: - yield filerevlog.node(r), cl.node(linkrev) - return dict(genfilenodes()) - return fnodes.get(fname, {}) + fln = filerevlog.node + revs = ((r, llr(r)) for r in filerevlog) + return dict((fln(r), cln(lr)) for r, lr in revs if lr in clrevs) + changedfiles = set() + for x in mfchangedfiles.itervalues(): + changedfiles.update(x) for chunk in self.generatefiles(changedfiles, linknodes, commonrevs, source): yield chunk @@ -751,14 +851,16 @@ delta = revlog.revdiff(base, rev) p1n, p2n = revlog.parents(node) basenode = revlog.node(base) - meta = self.builddeltaheader(node, p1n, p2n, basenode, linknode) + flags = revlog.flags(rev) + meta = self.builddeltaheader(node, p1n, p2n, basenode, linknode, flags) meta += prefix l = len(meta) + len(delta) yield chunkheader(l) yield meta yield delta - def builddeltaheader(self, node, p1n, p2n, basenode, linknode): + def builddeltaheader(self, node, p1n, p2n, basenode, linknode, flags): # do nothing with basenode, it is implicitly the previous one in HG10 + # do nothing with flags, it is implicitly 0 for cg1 and cg2 return struct.pack(self.deltaheader, node, p1n, p2n, linknode) class cg2packer(cg1packer): @@ -781,12 +883,40 @@ return prev return dp - def builddeltaheader(self, node, p1n, p2n, basenode, linknode): + def builddeltaheader(self, node, p1n, p2n, basenode, linknode, flags): + # Do nothing with flags, it is implicitly 0 in cg1 and cg2 return struct.pack(self.deltaheader, node, p1n, p2n, basenode, linknode) +class cg3packer(cg2packer): + version = '03' + deltaheader = _CHANGEGROUPV3_DELTA_HEADER + + def _packmanifests(self, mfnodes, tmfnodes, lookuplinknode): + # Note that debug prints are super confusing in this code, as + # tmfnodes gets populated by the calls to lookuplinknode in + # the superclass's manifest packer. In the future we should + # probably see if we can refactor this somehow to be less + # confusing. + for x in super(cg3packer, self)._packmanifests( + mfnodes, {}, lookuplinknode): + yield x + dirlog = self._repo.manifest.dirlog + for name, nodes in tmfnodes.iteritems(): + # For now, directory headers are simply file headers with + # a trailing '/' on the path (already in the name). + yield self.fileheader(name) + for chunk in self.group(nodes, dirlog(name), nodes.get): + yield chunk + + def builddeltaheader(self, node, p1n, p2n, basenode, linknode, flags): + return struct.pack( + self.deltaheader, node, p1n, p2n, basenode, linknode, flags) + packermap = {'01': (cg1packer, cg1unpacker), # cg2 adds support for exchanging generaldelta '02': (cg2packer, cg2unpacker), + # cg3 adds support for exchanging treemanifests + '03': (cg3packer, cg3unpacker), } def _changegroupinfo(repo, nodes, source): @@ -903,7 +1033,7 @@ # to avoid a race we use changegroupsubset() (issue1320) return changegroupsubset(repo, basenodes, repo.heads(), source) -def _addchangegroupfiles(repo, source, revmap, trp, pr, needfiles): +def _addchangegroupfiles(repo, source, revmap, trp, pr, needfiles, wasempty): revisions = 0 files = 0 while True: @@ -913,15 +1043,32 @@ f = chunkdata["filename"] repo.ui.debug("adding %s revisions\n" % f) pr() - fl = repo.file(f) + directory = (f[-1] == '/') + if directory: + # a directory using treemanifests + # TODO fixup repo requirements safely + if 'treemanifest' not in repo.requirements: + if not wasempty: + raise error.Abort(_( + "bundle contains tree manifests, but local repo is " + "non-empty and does not use tree manifests")) + repo.requirements.add('treemanifest') + repo._applyopenerreqs() + repo._writerequirements() + repo.manifest._treeondisk = True + repo.manifest._treeinmem = True + fl = repo.manifest.dirlog(f) + else: + fl = repo.file(f) o = len(fl) try: if not fl.addgroup(source, revmap, trp): raise error.Abort(_("received file revlog group is empty")) except error.CensoredBaseError as e: raise error.Abort(_("received delta base is censored: %s") % e) - revisions += len(fl) - o - files += 1 + if not directory: + revisions += len(fl) - o + files += 1 if f in needfiles: needs = needfiles[f] for new in xrange(o, len(fl)): diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/changelog.py --- a/mercurial/changelog.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/changelog.py Mon Jan 11 14:27:12 2016 -0600 @@ -360,6 +360,17 @@ files = l[3:] return (manifest, user, (time, timezone), files, desc, extra) + def readfiles(self, node): + """ + short version of read that only returns the files modified by the cset + """ + text = self.revision(node) + if not text: + return [] + last = text.index("\n\n") + l = text[:last].split('\n') + return l[3:] + def add(self, manifest, files, desc, transaction, p1, p2, user, date=None, extra=None): # Convert to UTF-8 encoded bytestrings as the very first diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/cmdutil.py --- a/mercurial/cmdutil.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/cmdutil.py Mon Jan 11 14:27:12 2016 -0600 @@ -70,11 +70,11 @@ testfile = ui.config('experimental', 'crecordtest', None) oldwrite = setupwrapcolorwrite(ui) try: - newchunks = filterchunks(ui, originalhunks, usecurses, testfile, - operation) + newchunks, newopts = filterchunks(ui, originalhunks, usecurses, + testfile, operation) finally: ui.write = oldwrite - return newchunks + return newchunks, newopts def dorecord(ui, repo, commitfunc, cmdsuggest, backupall, filterfn, *pats, **opts): @@ -116,14 +116,16 @@ diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True) diffopts.nodates = True diffopts.git = True - originaldiff = patch.diff(repo, changes=status, opts=diffopts) + diffopts.showfunc = True + originaldiff = patch.diff(repo, changes=status, opts=diffopts) originalchunks = patch.parsepatch(originaldiff) # 1. filter patch, so we have intending-to apply subset of it try: - chunks = filterfn(ui, originalchunks) + chunks, newopts = filterfn(ui, originalchunks) except patch.PatchError as err: raise error.Abort(_('error parsing patch: %s') % err) + opts.update(newopts) # We need to keep a backup of files that have been newly added and # modified during the recording process because there is a previous @@ -181,9 +183,9 @@ # 3a. apply filtered patch to clean repo (clean) if backups: # Equivalent to hg.revert - choices = lambda key: key in backups + m = scmutil.matchfiles(repo, backups.keys()) mergemod.update(repo, repo.dirstate.p1(), - False, True, choices) + False, True, matcher=m) # 3b. (apply) if dopatch: @@ -436,6 +438,19 @@ raise error.Abort(_("invalid format spec '%%%s' in output filename") % inst.args[0]) +class _unclosablefile(object): + def __init__(self, fp): + self._fp = fp + + def close(self): + pass + + def __iter__(self): + return iter(self._fp) + + def __getattr__(self, attr): + return getattr(self._fp, attr) + def makefileobj(repo, pat, node=None, desc=None, total=None, seqno=None, revwidth=None, mode='wb', modemap=None, pathname=None): @@ -447,22 +462,7 @@ fp = repo.ui.fout else: fp = repo.ui.fin - if util.safehasattr(fp, 'fileno'): - return os.fdopen(os.dup(fp.fileno()), mode) - else: - # if this fp can't be duped properly, return - # a dummy object that can be closed - class wrappedfileobj(object): - noop = lambda x: None - def __init__(self, f): - self.f = f - def __getattr__(self, attr): - if attr == 'close': - return self.noop - else: - return getattr(self.f, attr) - - return wrappedfileobj(fp) + return _unclosablefile(fp) if util.safehasattr(pat, 'write') and writable: return pat if util.safehasattr(pat, 'read') and 'r' in mode: @@ -870,20 +870,21 @@ extractdata = patch.extract(ui, hunk) tmpname = extractdata.get('filename') message = extractdata.get('message') - user = extractdata.get('user') - date = extractdata.get('date') + user = opts.get('user') or extractdata.get('user') + date = opts.get('date') or extractdata.get('date') branch = extractdata.get('branch') nodeid = extractdata.get('nodeid') p1 = extractdata.get('p1') p2 = extractdata.get('p2') + nocommit = opts.get('no_commit') + importbranch = opts.get('import_branch') update = not opts.get('bypass') strip = opts["strip"] prefix = opts["prefix"] sim = float(opts.get('similarity') or 0) if not tmpname: return (None, None, False) - msg = _('applied to working directory') rejects = False @@ -932,7 +933,7 @@ if p2 != parents[1]: repo.setparents(p1.node(), p2.node()) - if opts.get('exact') or opts.get('import_branch'): + if opts.get('exact') or importbranch: repo.dirstate.setbranch(branch or 'default') partial = opts.get('partial', False) @@ -947,7 +948,7 @@ rejects = True files = list(files) - if opts.get('no_commit'): + if nocommit: if message: msgs.append(message) else: @@ -970,15 +971,15 @@ try: if partial: repo.ui.setconfig('ui', 'allowemptycommit', True) - n = repo.commit(message, opts.get('user') or user, - opts.get('date') or date, match=m, + n = repo.commit(message, user, + date, match=m, editor=editor, extra=extra) for idfunc in extrapostimport: extrapostimportmap[idfunc](repo[n]) finally: repo.ui.restoreconfig(allowemptyback) else: - if opts.get('exact') or opts.get('import_branch'): + if opts.get('exact') or importbranch: branch = branch or 'default' else: branch = p1.branch() @@ -996,19 +997,20 @@ editor = getcommiteditor(editform='import.bypass') memctx = context.makememctx(repo, (p1.node(), p2.node()), message, - opts.get('user') or user, - opts.get('date') or date, + user, + date, branch, files, store, editor=editor) n = memctx.commit() finally: store.close() - if opts.get('exact') and opts.get('no_commit'): + if opts.get('exact') and nocommit: # --exact with --no-commit is still useful in that it does merge # and branch bits ui.warn(_("warning: can't check exact import with --no-commit\n")) elif opts.get('exact') and hex(n) != nodeid: raise error.Abort(_('patch is damaged or loses information')) + msg = _('applied to working directory') if n: # i18n: refers to a short changeset id msg = _('created %s') % short(n) @@ -1052,9 +1054,8 @@ fp = makefileobj(repo, template, node, desc=desc, total=total, seqno=seqno, revwidth=revwidth, mode='wb', modemap=filemode) - if fp != template: - shouldclose = True - if fp and fp != sys.stdout and util.safehasattr(fp, 'name'): + shouldclose = True + if fp and not getattr(fp, 'name', '').startswith('<'): repo.ui.note("%s\n" % fp.name) if not fp: @@ -1182,9 +1183,9 @@ def show(self, ctx, copies=None, matchfn=None, **props): if self.buffered: - self.ui.pushbuffer() + self.ui.pushbuffer(labeled=True) self._show(ctx, copies, matchfn, props) - self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True) + self.hunk[ctx.rev()] = self.ui.popbuffer() else: self._show(ctx, copies, matchfn, props) @@ -1297,16 +1298,17 @@ label='log.summary') self.ui.write("\n") - self.showpatch(changenode, matchfn) - - def showpatch(self, node, matchfn): + self.showpatch(ctx, matchfn) + + def showpatch(self, ctx, matchfn): if not matchfn: matchfn = self.matchfn if matchfn: stat = self.diffopts.get('stat') diff = self.diffopts.get('patch') diffopts = patch.diffallopts(self.ui, self.diffopts) - prev = self.repo.changelog.parents(node)[0] + node = ctx.node() + prev = ctx.p1().node() if stat: diffordiffstat(self.ui, self.repo, diffopts, prev, node, match=matchfn, stat=True) @@ -1488,7 +1490,7 @@ # write changeset metadata, then patch if requested key = self._parts['changeset'] self.ui.write(templater.stringify(self.t(key, **props))) - self.showpatch(ctx.node(), matchfn) + self.showpatch(ctx, matchfn) if self._parts['footer']: if not self.footer: @@ -2153,17 +2155,31 @@ return revs, expr, filematcher -def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None, +def _graphnodeformatter(ui, displayer): + spec = ui.config('ui', 'graphnodetemplate') + if not spec: + return templatekw.showgraphnode # fast path for "{graphnode}" + + templ = formatter.gettemplater(ui, 'graphnode', spec) + cache = {} + if isinstance(displayer, changeset_templater): + cache = displayer.cache # reuse cache of slow templates + props = templatekw.keywords.copy() + props['templ'] = templ + props['cache'] = cache + def formatnode(repo, ctx): + props['ctx'] = ctx + props['repo'] = repo + props['revcache'] = {} + return templater.stringify(templ('graphnode', **props)) + return formatnode + +def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None, filematcher=None): + formatnode = _graphnodeformatter(ui, displayer) seen, state = [], graphmod.asciistate() for rev, type, ctx, parents in dag: - char = 'o' - if ctx.node() in showparents: - char = '@' - elif ctx.obsolete(): - char = 'x' - elif ctx.closesbranch(): - char = '_' + char = formatnode(repo, ctx) copies = None if getrenamed and ctx.rev(): copies = [] @@ -2196,9 +2212,8 @@ endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev) displayer = show_changeset(ui, repo, opts, buffered=True) - showparents = [ctx.node() for ctx in repo[None].parents()] - displaygraph(ui, revdag, displayer, showparents, - graphmod.asciiedges, getrenamed, filematcher) + displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed, + filematcher) def checkunsupportedgraphflags(pats, opts): for op in ["newest_first"]: @@ -2515,13 +2530,14 @@ # First, do a regular commit to record all changes in the working # directory (if there are any) ui.callhooks = False - activebookmark = repo._activebookmark + activebookmark = repo._bookmarks.active try: - repo._activebookmark = None + repo._bookmarks.active = None opts['message'] = 'temporary amend commit for %s' % old node = commit(ui, repo, commitfunc, pats, opts) finally: - repo._activebookmark = activebookmark + repo._bookmarks.active = activebookmark + repo._bookmarks.recordchange(tr) ui.callhooks = True ctx = repo[node] @@ -2614,6 +2630,11 @@ message = old.description() pureextra = extra.copy() + if 'amend_source' in pureextra: + del pureextra['amend_source'] + pureoldextra = old.extra() + if 'amend_source' in pureoldextra: + del pureoldextra['amend_source'] extra['amend_source'] = old.hex() new = context.memctx(repo, @@ -2626,12 +2647,12 @@ extra=extra, editor=editor) - newdesc = changelog.stripdesc(new.description()) + newdesc = changelog.stripdesc(new.description()) if ((not node) and newdesc == old.description() and user == old.user() and date == old.date() - and pureextra == old.extra()): + and pureextra == pureoldextra): # nothing changed. continuing here would create a new node # anyway because of the amend_source noise. # @@ -3078,7 +3099,7 @@ xlist.append(abs) if dobackup and (backup <= dobackup or wctx[abs].cmp(ctx[abs])): - bakname = "%s.orig" % rel + bakname = scmutil.origpath(ui, repo, rel) ui.note(_('saving current version of %s as %s\n') % (rel, bakname)) if not opts.get('dry_run'): @@ -3160,9 +3181,9 @@ diffopts = patch.difffeatureopts(repo.ui, whitespace=True) diffopts.nodates = True diffopts.git = True - reversehunks = repo.ui.configbool('experimental', - 'revertalternateinteractivemode', - True) + reversehunks = repo.ui.configbool('experimental', + 'revertalternateinteractivemode', + True) if reversehunks: diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts) else: @@ -3171,7 +3192,7 @@ try: - chunks = recordfilter(repo.ui, originalchunks) + chunks, opts = recordfilter(repo.ui, originalchunks) if reversehunks: chunks = patch.reversehunks(chunks) @@ -3323,6 +3344,19 @@ if clearable and repo.vfs.exists(f): util.unlink(repo.join(f)) +afterresolvedstates = [ + ('graftstate', + _('hg graft --continue')), + ] + +def checkafterresolved(repo): + contmsg = _("continue: %s\n") + for f, msg in afterresolvedstates: + if repo.vfs.exists(f): + repo.ui.warn(contmsg % msg) + return + repo.ui.note(contmsg % _("hg commit")) + class dirstateguard(object): '''Restore dirstate at unexpected failure. diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/commands.py --- a/mercurial/commands.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/commands.py Mon Jan 11 14:27:12 2016 -0600 @@ -5,7 +5,7 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -from node import hex, bin, nullid, nullrev, short +from node import hex, bin, nullhex, nullid, nullrev, short from lock import release from i18n import _ import os, re, difflib, time, tempfile, errno, shlex @@ -15,7 +15,6 @@ import archival, changegroup, cmdutil, hbisect import sshserver, hgweb import extensions -from hgweb import server as hgweb_server import merge as mergemod import minirst, revset, fileset import dagparser, context, simplemerge, graphmod, copies @@ -24,6 +23,7 @@ import phases, obsolete, exchange, bundle2, repair, lock as lockmod import ui as uimod import streamclone +import commandserver table = {} @@ -172,6 +172,12 @@ _('recurse into subrepositories')) ] +debugrevlogopts = [ + ('c', 'changelog', False, _('open changelog')), + ('m', 'manifest', False, _('open manifest')), + ('', 'dir', False, _('open directory manifest')), +] + # Commands start here, listed alphabetically @command('^add', @@ -187,21 +193,36 @@ The files will be added to the repository at the next commit. To undo an add before that, see :hg:`forget`. - If no names are given, add all files to the repository. + If no names are given, add all files to the repository (except + files matching ``.hgignore``). .. container:: verbose - An example showing how new (unknown) files are added - automatically by :hg:`add`:: - - $ ls - foo.c - $ hg status - ? foo.c - $ hg add - adding foo.c - $ hg status - A foo.c + Examples: + + - New (unknown) files are added + automatically by :hg:`add`:: + + $ ls + foo.c + $ hg status + ? foo.c + $ hg add + adding foo.c + $ hg status + A foo.c + + - Specific files to be added can be specified:: + + $ ls + bar.c foo.c + $ hg status + ? bar.c + ? foo.c + $ hg add bar.c + $ hg status + A bar.c + ? foo.c Returns 0 if all files are successfully added. """ @@ -220,9 +241,9 @@ Add all new files and remove all missing files from the repository. - New files are ignored if they match any of the patterns in - ``.hgignore``. As with add, these changes take effect at the next - commit. + Unless names are given, new files are ignored if they match any of + the patterns in ``.hgignore``. As with add, these changes take + effect at the next commit. Use the -s/--similarity option to detect renamed files. This option takes a percentage between 0 (disabled) and 100 (files must @@ -234,6 +255,46 @@ not specified, -s/--similarity defaults to 100 and only renames of identical files are detected. + .. container:: verbose + + Examples: + + - A number of files (bar.c and foo.c) are new, + while foobar.c has been removed (without using :hg:`remove`) + from the repository:: + + $ ls + bar.c foo.c + $ hg status + ! foobar.c + ? bar.c + ? foo.c + $ hg addremove + adding bar.c + adding foo.c + removing foobar.c + $ hg status + A bar.c + A foo.c + R foobar.c + + - A file foobar.c was moved to foo.c without using :hg:`rename`. + Afterwards, it was edited slightly:: + + $ ls + foo.c + $ hg status + ! foobar.c + ? foo.c + $ hg addremove --similarity 90 + removing foobar.c + adding foo.c + recording removal of foobar.c as rename to foo.c (94% similar) + $ hg status -C + A foo.c + foobar.c + R foobar.c + Returns 0 if all files are successfully added. """ try: @@ -264,11 +325,14 @@ """show changeset information by line for each file List changes in files, showing the revision id responsible for - each line + each line. This command is useful for discovering when a change was made and by whom. + If you include --file, --user, or --date, the revision number is + suppressed unless you also include --number. + Without the -a/--text option, annotate will avoid processing files it detects as binary. With -a, annotate will annotate the file anyway, although the results will probably be neither useful @@ -403,7 +467,7 @@ directory; use -r/--rev to specify a different revision. The archive type is automatically detected based on file - extension (or override using -t/--type). + extension (to override, use -t/--type). .. container:: verbose @@ -481,11 +545,28 @@ .. note:: - backout cannot be used to fix either an unwanted or - incorrect merge. + :hg:`backout` cannot be used to fix either an unwanted or + incorrect merge. .. container:: verbose + Examples: + + - Reverse the effect of the parent of the working directory. + This backout will be committed immediately:: + + hg backout -r . + + - Reverse the effect of previous bad revision 23:: + + hg backout -r 23 + hg commit -m "Backout revision 23" + + - Reverse the effect of previous bad revision 23 and + commit the backout immediately:: + + hg backout -r 23 --commit + By default, the pending changeset will have one parent, maintaining a linear history. With --merge, the pending changeset will instead have two parents: the old parent of the @@ -504,6 +585,15 @@ Returns 0 on success, 1 if nothing to backout or there are unresolved files. ''' + wlock = lock = None + try: + wlock = repo.wlock() + lock = repo.lock() + return _dobackout(ui, repo, node, rev, commit, **opts) + finally: + release(lock, wlock) + +def _dobackout(ui, repo, node=None, rev=None, commit=False, **opts): if rev and node: raise error.Abort(_("please specify just one revision")) @@ -542,7 +632,6 @@ parent = p1 # the backout should appear on the same branch - wlock = repo.wlock() try: branch = repo.dirstate.branch() bheads = repo.branchheads(branch) @@ -552,8 +641,7 @@ try: ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'backout') - stats = mergemod.update(repo, parent, True, True, False, - node, False) + stats = mergemod.update(repo, parent, True, True, node, False) repo.setparents(op1, op2) dsguard.close() hg._showstats(repo, stats) @@ -605,7 +693,9 @@ finally: ui.setconfig('ui', 'forcemerge', '', '') finally: - wlock.release() + # TODO: get rid of this meaningless try/finally enclosing. + # this is kept only to reduce changes in a patch. + pass return 0 @command('bisect', @@ -1100,7 +1190,7 @@ Use the command :hg:`update` to switch to an existing branch. Use :hg:`commit --close-branch` to mark this branch head as closed. - When all heads of the branch are closed, the branch will be + When all heads of a branch are closed, the branch will be considered closed. Returns 0 on success. @@ -1120,7 +1210,7 @@ ui.status(_('reset working directory to branch %s\n') % label) elif label: if not opts.get('force') and label in repo.branchmap(): - if label not in [p.branch() for p in repo.parents()]: + if label not in [p.branch() for p in repo[None].parents()]: raise error.Abort(_('a branch of the same name already' ' exists'), # i18n: "it" refers to an existing branch @@ -1214,19 +1304,20 @@ def bundle(ui, repo, fname, dest=None, **opts): """create a changegroup file - Generate a compressed changegroup file collecting changesets not - known to be in another repository. - - If you omit the destination repository, then hg assumes the - destination will have all the nodes you specify with --base - parameters. To create a bundle containing all changesets, use - -a/--all (or --base null). + Generate a changegroup file collecting changesets to be added + to a repository. + + To create a bundle containing all changesets, use -a/--all + (or --base null). Otherwise, hg assumes the destination will have + all the nodes you specify with --base parameters. Otherwise, hg + will assume the repository has all the nodes in destination, or + default-push/default if no destination is specified. You can change bundle format with the -t/--type option. You can specify a compression, a bundle version or both using a dash (comp-version). The available compression methods are: none, bzip2, and gzip (by default, bundles are compressed using bzip2). The - available format are: v1, v2 (default to most suitable). + available formats are: v1, v2 (default to most suitable). The bundle file can then be transferred using conventional means and applied to another repository with the unbundle or pull @@ -1257,6 +1348,11 @@ hint=_('use "hg debugcreatestreamclonebundle"')) if opts.get('all'): + if dest: + raise error.Abort(_("--all is incompatible with specifying " + "a destination")) + if opts.get('base'): + ui.warn(_("ignoring --base because --all was specified\n")) base = ['null'] else: base = scmutil.revrange(repo, opts.get('base')) @@ -1337,7 +1433,8 @@ @command('^clone', [('U', 'noupdate', None, _('the clone will include an empty working ' 'directory (only a repository)')), - ('u', 'updaterev', '', _('revision, tag or branch to check out'), _('REV')), + ('u', 'updaterev', '', _('revision, tag, or branch to check out'), + _('REV')), ('r', 'rev', [], _('include the specified changeset'), _('REV')), ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')), ('', 'pull', None, _('use pull protocol to copy metadata')), @@ -1360,20 +1457,23 @@ destinations. For ``ssh://`` destinations, no working directory or ``.hg/hgrc`` will be created on the remote side. - To pull only a subset of changesets, specify one or more revisions - identifiers with -r/--rev or branches with -b/--branch. The - resulting clone will contain only the specified changesets and - their ancestors. These options (or 'clone src#rev dest') imply - --pull, even for local source repositories. Note that specifying a - tag will include the tagged changeset but not the changeset - containing the tag. - If the source repository has a bookmark called '@' set, that revision will be checked out in the new repository by default. To check out a particular version, use -u/--update, or -U/--noupdate to create a clone with no working directory. + To pull only a subset of changesets, specify one or more revisions + identifiers with -r/--rev or branches with -b/--branch. The + resulting clone will contain only the specified changesets and + their ancestors. These options (or 'clone src#rev dest') imply + --pull, even for local source repositories. + + .. note:: + + Specifying a tag will include the tagged changeset but not the + changeset containing the tag. + .. container:: verbose For efficiency, hardlinks are used for cloning whenever the @@ -1505,7 +1605,32 @@ See :hg:`help dates` for a list of formats valid for -d/--date. Returns 0 on success, 1 if nothing changed. + + .. container:: verbose + + Examples: + + - commit all files ending in .py:: + + hg commit --include "set:**.py" + + - commit all non-binary files:: + + hg commit --exclude "set:binary()" + + - amend the current commit and set the date to now:: + + hg commit --amend --date now """ + wlock = lock = None + try: + wlock = repo.wlock() + lock = repo.lock() + return _docommit(ui, repo, *pats, **opts) + finally: + release(lock, wlock) + +def _docommit(ui, repo, *pats, **opts): if opts.get('interactive'): opts.pop('interactive') cmdutil.dorecord(ui, repo, commit, None, False, @@ -1530,8 +1655,8 @@ if not bheads: raise error.Abort(_('can only close branch heads')) elif opts.get('amend'): - if repo.parents()[0].p1().branch() != branch and \ - repo.parents()[0].p2().branch() != branch: + if repo[None].parents()[0].p1().branch() != branch and \ + repo[None].parents()[0].p2().branch() != branch: raise error.Abort(_('can only close branch heads')) if opts.get('amend'): @@ -1547,6 +1672,9 @@ if not allowunstable and old.children(): raise error.Abort(_('cannot amend changeset with children')) + newextra = extra.copy() + newextra['branch'] = branch + extra = newextra # commitfunc is used only for temporary amend commit by cmdutil.amend def commitfunc(ui, repo, message, match, opts): return repo.commit(message, @@ -2111,11 +2239,7 @@ ui.write(line) ui.write("\n") -@command('debugdata', - [('c', 'changelog', False, _('open changelog')), - ('m', 'manifest', False, _('open manifest')), - ('', 'dir', False, _('open directory manifest'))], - _('-c|-m|FILE REV')) +@command('debugdata', debugrevlogopts, _('-c|-m|FILE REV')) def debugdata(ui, repo, file_, rev=None, **opts): """dump the contents of a data file revision""" if opts.get('changelog') or opts.get('manifest'): @@ -2311,21 +2435,51 @@ raise error.Abort(_('unknown bundle type specified with --type')) changegroup.writebundle(ui, bundle, bundlepath, bundletype) -@command('debugignore', [], '') -def debugignore(ui, repo, *values, **opts): - """display the combined ignore pattern""" +@command('debugignore', [], '[FILE]') +def debugignore(ui, repo, *files, **opts): + """display the combined ignore pattern and information about ignored files + + With no argument display the combined ignore pattern. + + Given space separated file names, shows if the given file is ignored and + if so, show the ignore rule (file and line number) that matched it. + """ ignore = repo.dirstate._ignore - includepat = getattr(ignore, 'includepat', None) - if includepat is not None: - ui.write("%s\n" % includepat) + if not files: + # Show all the patterns + includepat = getattr(ignore, 'includepat', None) + if includepat is not None: + ui.write("%s\n" % includepat) + else: + raise error.Abort(_("no ignore patterns found")) else: - raise error.Abort(_("no ignore patterns found")) - -@command('debugindex', - [('c', 'changelog', False, _('open changelog')), - ('m', 'manifest', False, _('open manifest')), - ('', 'dir', False, _('open directory manifest')), - ('f', 'format', 0, _('revlog format'), _('FORMAT'))], + for f in files: + ignored = None + ignoredata = None + if f != '.': + if ignore(f): + ignored = f + ignoredata = repo.dirstate._ignorefileandline(f) + else: + for p in util.finddirs(f): + if ignore(p): + ignored = p + ignoredata = repo.dirstate._ignorefileandline(p) + break + if ignored: + if ignored == f: + ui.write("%s is ignored\n" % f) + else: + ui.write("%s is ignored because of containing folder %s\n" + % (f, ignored)) + ignorefile, lineno, line = ignoredata + ui.write("(ignore rule in %s, line %d: '%s')\n" + % (ignorefile, lineno, line)) + else: + ui.write("%s is not ignored\n" % f) + +@command('debugindex', debugrevlogopts + + [('f', 'format', 0, _('revlog format'), _('FORMAT'))], _('[-f FORMAT] -c|-m|FILE'), optionalrepo=True) def debugindex(ui, repo, file_=None, **opts): @@ -2380,16 +2534,11 @@ i, r.flags(i), r.start(i), r.length(i), r.rawsize(i), base, r.linkrev(i), pr[0], pr[1], shortfn(node))) -@command('debugindexdot', [], _('FILE'), optionalrepo=True) -def debugindexdot(ui, repo, file_): +@command('debugindexdot', debugrevlogopts, + _('-c|-m|FILE'), optionalrepo=True) +def debugindexdot(ui, repo, file_=None, **opts): """dump an index DAG as a graphviz dot file""" - r = None - if repo: - filelog = repo.file(file_) - if len(filelog): - r = filelog - if not r: - r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_) + r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts) ui.write(("digraph G {\n")) for i in r: node = r.node(i) @@ -2399,6 +2548,107 @@ ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i)) ui.write("}\n") +@command('debugdeltachain', + debugrevlogopts + formatteropts, + _('-c|-m|FILE'), + optionalrepo=True) +def debugdeltachain(ui, repo, file_=None, **opts): + """dump information about delta chains in a revlog + + Output can be templatized. Available template keywords are: + + rev revision number + chainid delta chain identifier (numbered by unique base) + chainlen delta chain length to this revision + prevrev previous revision in delta chain + deltatype role of delta / how it was computed + compsize compressed size of revision + uncompsize uncompressed size of revision + chainsize total size of compressed revisions in chain + chainratio total chain size divided by uncompressed revision size + (new delta chains typically start at ratio 2.00) + lindist linear distance from base revision in delta chain to end + of this revision + extradist total size of revisions not part of this delta chain from + base of delta chain to end of this revision; a measurement + of how much extra data we need to read/seek across to read + the delta chain for this revision + extraratio extradist divided by chainsize; another representation of + how much unrelated data is needed to load this delta chain + """ + r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts) + index = r.index + generaldelta = r.version & revlog.REVLOGGENERALDELTA + + def revinfo(rev): + e = index[rev] + compsize = e[1] + uncompsize = e[2] + chainsize = 0 + + if generaldelta: + if e[3] == e[5]: + deltatype = 'p1' + elif e[3] == e[6]: + deltatype = 'p2' + elif e[3] == rev - 1: + deltatype = 'prev' + elif e[3] == rev: + deltatype = 'base' + else: + deltatype = 'other' + else: + if e[3] == rev: + deltatype = 'base' + else: + deltatype = 'prev' + + chain = r._deltachain(rev)[0] + for iterrev in chain: + e = index[iterrev] + chainsize += e[1] + + return compsize, uncompsize, deltatype, chain, chainsize + + fm = ui.formatter('debugdeltachain', opts) + + fm.plain(' rev chain# chainlen prev delta ' + 'size rawsize chainsize ratio lindist extradist ' + 'extraratio\n') + + chainbases = {} + for rev in r: + comp, uncomp, deltatype, chain, chainsize = revinfo(rev) + chainbase = chain[0] + chainid = chainbases.setdefault(chainbase, len(chainbases) + 1) + basestart = r.start(chainbase) + revstart = r.start(rev) + lineardist = revstart + comp - basestart + extradist = lineardist - chainsize + try: + prevrev = chain[-2] + except IndexError: + prevrev = -1 + + chainratio = float(chainsize) / float(uncomp) + extraratio = float(extradist) / float(chainsize) + + fm.startitem() + fm.write('rev chainid chainlen prevrev deltatype compsize ' + 'uncompsize chainsize chainratio lindist extradist ' + 'extraratio', + '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n', + rev, chainid, len(chain), prevrev, deltatype, comp, + uncomp, chainsize, chainratio, lineardist, extradist, + extraratio, + rev=rev, chainid=chainid, chainlen=len(chain), + prevrev=prevrev, deltatype=deltatype, compsize=comp, + uncompsize=uncomp, chainsize=chainsize, + chainratio=chainratio, lindist=lineardist, + extradist=extradist, extraratio=extraratio) + + fm.end() + @command('debuginstall', [], '', norepo=True) def debuginstall(ui): '''test Mercurial installation @@ -2522,6 +2772,12 @@ Use --verbose to print out information about whether v1 or v2 merge state was chosen.""" + def _hashornull(h): + if h == nullhex: + return 'null' + else: + return h + def printrecords(version): ui.write(('* version %s records\n') % version) if version == 1: @@ -2539,7 +2795,7 @@ driver, mdstate = record.split('\0', 1) ui.write(('merge driver: %s (state "%s")\n') % (driver, mdstate)) - elif rtype in 'FD': + elif rtype in 'FDC': r = record.split('\0') f, state, hash, lfile, afile, anode, ofile = r[0:7] if version == 1: @@ -2547,15 +2803,20 @@ flags = r[7] else: onode, flags = r[7:9] - ui.write(('file: %s (state "%s", hash %s)\n') - % (f, state, hash)) + ui.write(('file: %s (record type "%s", state "%s", hash %s)\n') + % (f, rtype, state, _hashornull(hash))) ui.write((' local path: %s (flags "%s")\n') % (lfile, flags)) - ui.write((' ancestor path: %s (node %s)\n') % (afile, anode)) - ui.write((' other path: %s (node %s)\n') % (ofile, onode)) + ui.write((' ancestor path: %s (node %s)\n') + % (afile, _hashornull(anode))) + ui.write((' other path: %s (node %s)\n') + % (ofile, _hashornull(onode))) else: ui.write(('unrecognized entry: %s\t%s\n') % (rtype, record.replace('\0', '\t'))) + # Avoid mergestate.read() since it may raise an exception for unsupported + # merge state records. We shouldn't be doing this, but this is OK since this + # command is pretty low-level. ms = mergemod.mergestate(repo) # sort so that reasonable information is on top @@ -2881,21 +3142,17 @@ wlock = repo.wlock() try: dirstate = repo.dirstate - + changedfiles = None # See command doc for what minimal does. if opts.get('minimal'): + manifestfiles = set(ctx.manifest().keys()) dirstatefiles = set(dirstate) - ctxfiles = set(ctx.manifest().keys()) - for file in (dirstatefiles | ctxfiles): - indirstate = file in dirstatefiles - inctx = file in ctxfiles - - if indirstate and not inctx and dirstate[file] != 'a': - dirstate.drop(file) - elif inctx and not indirstate: - dirstate.normallookup(file) - else: - dirstate.rebuild(ctx.node(), ctx.manifest()) + manifestonly = manifestfiles - dirstatefiles + dsonly = dirstatefiles - manifestfiles + dsnotadded = set(f for f in dsonly if dirstate[f] != 'a') + changedfiles = manifestonly | dsnotadded + + dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles) finally: wlock.release() @@ -2921,11 +3178,8 @@ else: ui.write(_("%s not renamed\n") % rel) -@command('debugrevlog', - [('c', 'changelog', False, _('open changelog')), - ('m', 'manifest', False, _('open manifest')), - ('', 'dir', False, _('open directory manifest')), - ('d', 'dump', False, _('dump index data'))], +@command('debugrevlog', debugrevlogopts + + [('d', 'dump', False, _('dump index data'))], _('-c|-m|FILE'), optionalrepo=True) def debugrevlog(ui, repo, file_=None, **opts): @@ -3162,8 +3416,12 @@ [('', 'nodates', None, _('do not display the saved mtime')), ('', 'datesort', None, _('sort by saved mtime'))], _('[OPTION]...')) -def debugstate(ui, repo, nodates=None, datesort=None): +def debugstate(ui, repo, **opts): """show the contents of the current dirstate""" + + nodates = opts.get('nodates') + datesort = opts.get('datesort') + timestr = "" if datesort: keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename @@ -3304,7 +3562,7 @@ .. note:: - diff may generate unexpected results for merges, as it will + :hg:`diff` may generate unexpected results for merges, as it will default to comparing against the working directory's first parent changeset if no revisions are specified. @@ -3312,7 +3570,7 @@ between those revisions. If only one revision is specified then that revision is compared to the working directory, and, when no revisions are specified, the working directory files are compared - to its parent. + to its first parent. Alternatively you can specify -c/--change with a revision to see the changes in that changeset relative to its first parent. @@ -3395,7 +3653,7 @@ .. note:: - export may generate unexpected diff output for merge + :hg:`export` may generate unexpected diff output for merge changesets, as it will compare the merge changeset against its first parent only. @@ -3599,8 +3857,8 @@ .. note:: - The -c/--continue option does not reapply earlier options, except - for --force. + The -c/--continue option does not reapply earlier options, except + for --force. .. container:: verbose @@ -3623,12 +3881,23 @@ hg log --debug -r . + - show revisions sorted by date:: + + hg log -r 'sort(all(), date)' + See :hg:`help revisions` and :hg:`help revsets` for more about specifying revisions. Returns 0 on successful completion. ''' - + wlock = None + try: + wlock = repo.wlock() + return _dograft(ui, repo, *revs, **opts) + finally: + release(wlock) + +def _dograft(ui, repo, *revs, **opts): revs = list(revs) revs.extend(opts['rev']) @@ -3734,7 +4003,6 @@ if not revs: return -1 - wlock = repo.wlock() try: for pos, ctx in enumerate(repo.set("%ld", revs)): desc = '%d:%s "%s"' % (ctx.rev(), ctx, @@ -3746,10 +4014,10 @@ if opts.get('dry_run'): continue - source = ctx.extra().get('source') - extra = {} + extra = ctx.extra().copy() + del extra['branch'] + source = extra.get('source') if source: - extra['source'] = source extra['intermediate-source'] = ctx.hex() else: extra['source'] = ctx.hex() @@ -3779,9 +4047,17 @@ # write out state for --continue nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]] repo.vfs.write('graftstate', ''.join(nodelines)) + extra = '' + if opts.get('user'): + extra += ' --user %s' % opts['user'] + if opts.get('date'): + extra += ' --date %s' % opts['date'] + if opts.get('log'): + extra += ' --log' + hint=_('use hg resolve and hg graft --continue%s') % extra raise error.Abort( _("unresolved conflicts, can't continue"), - hint=_('use hg resolve and hg graft --continue')) + hint=hint) else: cont = False @@ -3793,7 +4069,9 @@ _('note: graft of %d:%s created no changes to commit\n') % (ctx.rev(), ctx)) finally: - wlock.release() + # TODO: get rid of this meaningless try/finally enclosing. + # this is kept only to reduce changes in a patch. + pass # remove state when we complete successfully if not opts.get('dry_run'): @@ -4129,11 +4407,16 @@ keep.append(sys.platform.lower()) section = None + subtopic = None if name and '.' in name: name, section = name.split('.', 1) section = section.lower() - - text = help.help_(ui, name, **opts) + if '.' in section: + subtopic, section = section.split('.', 1) + else: + subtopic = section + + text = help.help_(ui, name, subtopic=subtopic, **opts) formatted, pruned = minirst.format(text, textwidth, keep=keep, section=section) @@ -4195,6 +4478,9 @@ hg id -r tip http://selenic.com/hg/ + See :hg:`log` for generating more information about specific revisions, + including full hash identifiers. + Returns 0 if successful. """ @@ -4328,14 +4614,23 @@ Import a list of patches and commit them individually (unless --no-commit is specified). - Because import first applies changes to the working directory, - import will abort if there are outstanding changes. + To read a patch from standard input, use "-" as the patch name. If + a URL is specified, the patch will be downloaded from there. + + Import first applies changes to the working directory (unless + --bypass is specified), import will abort if there are outstanding + changes. + + Use --bypass to apply and commit patches directly to the + repository, without affecting the working directory. Without + --exact, patches will be applied on top of the working directory + parent revision. You can import a patch straight from a mail message. Even patches as attachments work (to use the body part, it must have type text/plain or text/x-patch). From and Subject headers of email message are used as default committer and commit message. All - text/plain body parts before first diff are added to commit + text/plain body parts before first diff are added to the commit message. If the imported patch was generated by :hg:`export`, user and @@ -4349,23 +4644,21 @@ the patch. This may happen due to character set problems or other deficiencies in the text patch format. - Use --bypass to apply and commit patches directly to the - repository, not touching the working directory. Without --exact, - patches will be applied on top of the working directory parent - revision. - - With -s/--similarity, hg will attempt to discover renames and - copies in the patch in the same way as :hg:`addremove`. - Use --partial to ensure a changeset will be created from the patch even if some hunks fail to apply. Hunks that fail to apply will be written to a .rej file. Conflicts can then be resolved by hand before :hg:`commit --amend` is run to update the created changeset. This flag exists to let people import patches that partially apply without losing the associated metadata (author, - date, description, ...). Note that when none of the hunk applies - cleanly, :hg:`import --partial` will create an empty changeset, - importing only the patch metadata. + date, description, ...). + + .. note:: + + When no hunks apply cleanly, :hg:`import --partial` will create + an empty changeset, importing only the patch metadata. + + With -s/--similarity, hg will attempt to discover renames and + copies in the patch in the same way as :hg:`addremove`. It is possible to use external patch programs to perform the patch by setting the ``ui.patch`` configuration option. For the default @@ -4373,8 +4666,6 @@ See :hg:`help config` for more information about configuration files and how to use these options. - To read a patch from standard input, use "-" as the patch name. If - a URL is specified, the patch will be downloaded from it. See :hg:`help dates` for a list of formats valid for -d/--date. .. container:: verbose @@ -4419,6 +4710,7 @@ if date: opts['date'] = util.parsedate(date) + exact = opts.get('exact') update = not opts.get('bypass') if not update and opts.get('no_commit'): raise error.Abort(_('cannot use --no-commit with --bypass')) @@ -4430,15 +4722,11 @@ raise error.Abort(_('similarity must be between 0 and 100')) if sim and not update: raise error.Abort(_('cannot use --similarity with --bypass')) - if opts.get('exact') and opts.get('edit'): - raise error.Abort(_('cannot use --exact with --edit')) - if opts.get('exact') and opts.get('prefix'): - raise error.Abort(_('cannot use --exact with --prefix')) - - if update: - cmdutil.checkunfinished(repo) - if (opts.get('exact') or not opts.get('force')) and update: - cmdutil.bailifchanged(repo) + if exact: + if opts.get('edit'): + raise error.Abort(_('cannot use --exact with --edit')) + if opts.get('prefix'): + raise error.Abort(_('cannot use --exact with --prefix')) base = opts["base"] wlock = dsguard = lock = tr = None @@ -4449,12 +4737,18 @@ try: try: wlock = repo.wlock() + + if update: + cmdutil.checkunfinished(repo) + if (exact or not opts.get('force')): + cmdutil.bailifchanged(repo) + if not opts.get('no_commit'): lock = repo.lock() tr = repo.transaction('import') else: dsguard = cmdutil.dirstateguard(repo, 'import') - parents = repo.parents() + parents = repo[None].parents() for patchurl in patches: if patchurl == '-': ui.status(_('applying patch from stdin\n')) @@ -4473,8 +4767,8 @@ if msg: haspatch = True ui.note(msg + '\n') - if update or opts.get('exact'): - parents = repo.parents() + if update or exact: + parents = repo[None].parents() else: parents = [repo[node]] if rej: @@ -4573,8 +4867,7 @@ cmdutil.checkunsupportedgraphflags([], opts) def display(other, chlist, displayer): revdag = cmdutil.graphrevs(other, chlist, opts) - showparents = [ctx.node() for ctx in repo[None].parents()] - cmdutil.displaygraph(ui, revdag, displayer, showparents, + cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges) hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True) @@ -4716,14 +5009,14 @@ .. note:: - log -p/--patch may generate unexpected diff output for merge + :hg:`log --patch` may generate unexpected diff output for merge changesets, as it will only compare the merge changeset against its first parent. Also, only files different from BOTH parents will appear in files:. .. note:: - for performance reasons, log FILE may omit duplicate changes + For performance reasons, :hg:`log FILE` may omit duplicate changes made on branches and will not show removals or mode changes. To see all such changes, use the --removed switch. @@ -4755,6 +5048,10 @@ hg log -k bug --template "{rev}\\n" + - the full hash identifier of the working directory parent:: + + hg log -r . --template "{node}\\n" + - list available log templates:: hg log -T list @@ -4774,7 +5071,7 @@ See :hg:`help dates` for a list of formats valid for -d/--date. See :hg:`help revisions` and :hg:`help revsets` for more about - specifying revisions. + specifying and ordering revisions. See :hg:`help templates` for more about pre-packaged styles and specifying custom templates. @@ -4913,7 +5210,7 @@ head, the other head is merged with by default. Otherwise, an explicit revision with which to merge with must be provided. - :hg:`resolve` must be used to resolve unresolved files. + See :hg:`help resolve` for information on handling file conflicts. To undo an uncommitted merge, use :hg:`update --clean .` which will check out a clean copy of the original merge parent, losing @@ -5007,9 +5304,7 @@ revdag = cmdutil.graphrevs(repo, o, opts) displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True) - showparents = [ctx.node() for ctx in repo[None].parents()] - cmdutil.displaygraph(ui, revdag, displayer, showparents, - graphmod.asciiedges) + cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges) cmdutil.outgoinghooks(ui, repo, other, opts, o) return 0 @@ -5043,6 +5338,13 @@ last changed (before the working directory revision or the argument to --rev if given) is printed. + This command is equivalent to:: + + hg log -r "p1()+p2()" or + hg log -r "p1(REV)+p2(REV)" or + hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or + hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))" + See :hg:`summary` and :hg:`help revsets` for related information. Returns 0 on success. @@ -5098,10 +5400,13 @@ When ``default-push`` is set, it will be used for push and ``default`` will be used for pull; otherwise ``default`` is used as the fallback for both. When cloning a repository, the clone - source is written as ``default`` in ``.hg/hgrc``. Note that - ``default`` and ``default-push`` apply to all inbound (e.g. - :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and - :hg:`bundle`) operations. + source is written as ``default`` in ``.hg/hgrc``. + + .. note:: + + ``default`` and ``default-push`` apply to all inbound (e.g. + :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` + and :hg:`bundle`) operations. See :hg:`help urls` for more information. @@ -5122,6 +5427,8 @@ else: ui.write("%s = %s\n" % (name, util.hidepassword(path.rawloc))) + for subopt, value in sorted(path.suboptions.items()): + ui.write('%s:%s = %s\n' % (name, subopt, value)) @command('phase', [('p', 'public', False, _('set changeset phase to public')), @@ -5223,14 +5530,13 @@ brev = checkout movemarkfrom = None if not checkout: - updata = destutil.destupdate(repo) + updata = destutil.destupdate(repo) checkout, movemarkfrom, brev = updata ret = hg.update(repo, checkout) except error.UpdateAbort as inst: - ui.warn(_("not updating: %s\n") % str(inst)) - if inst.hint: - ui.warn(_("(%s)\n") % inst.hint) - return 0 + msg = _("not updating: %s") % str(inst) + hint = inst.hint + raise error.UpdateAbort(msg, hint=hint) if not ret and not checkout: if bookmarks.update(repo, [movemarkfrom], repo['.'].node()): ui.status(_("updating bookmark %s\n") % repo._activebookmark) @@ -5366,9 +5672,9 @@ .. note:: - Extra care should be taken with the -f/--force option, - which will push all new heads on all branches, an action which will - almost always cause confusion for collaborators. + Extra care should be taken with the -f/--force option, + which will push all new heads on all branches, an action which will + almost always cause confusion for collaborators. If -r/--rev is used, the specified revision and all its ancestors will be pushed to the remote repository. @@ -5394,13 +5700,12 @@ # this lets simultaneous -r, -b options continue working opts.setdefault('rev', []).append("null") - path = ui.paths.getpath(dest, default='default-push') - if not path: - path = ui.paths.getpath(dest, default='default') + path = ui.paths.getpath(dest, default=('default-push', 'default')) if not path: raise error.Abort(_('default repository not configured!'), hint=_('see the "path" section in "hg help config"')) - dest, branches = path.loc, (path.branch, opts.get('branch') or []) + dest = path.pushloc or path.loc + branches = (path.branch, opts.get('branch') or []) ui.status(_('pushing to %s\n') % util.hidepassword(dest)) revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev')) other = hg.peer(repo, opts, dest) @@ -5491,8 +5796,10 @@ -Af R R R R ========= == == == == - Note that remove never deletes files in Added [A] state from the - working directory, not even if option --force is specified. + .. note:: + + :hg:`remove` never deletes files in Added [A] state from the + working directory, not even if ``--force`` is specified. Returns 0 on success, 1 if any warnings encountered. """ @@ -5572,9 +5879,11 @@ - :hg:`resolve -l`: list files which had or still have conflicts. In the printed list, ``U`` = unresolved and ``R`` = resolved. - Note that Mercurial will not let you commit files with unresolved - merge conflicts. You must use :hg:`resolve -m ...` before you can - commit after a conflicting merge. + .. note:: + + Mercurial will not let you commit files with unresolved merge + conflicts. You must use :hg:`resolve -m ...` before you can + commit after a conflicting merge. Returns 0 on success, 1 if any files fail a resolve attempt. """ @@ -5592,7 +5901,7 @@ if show: fm = ui.formatter('resolve', opts) - ms = mergemod.mergestate(repo) + ms = mergemod.mergestate.read(repo) m = scmutil.match(repo[None], pats, opts) for f in ms: if not m(f): @@ -5607,7 +5916,7 @@ wlock = repo.wlock() try: - ms = mergemod.mergestate(repo) + ms = mergemod.mergestate.read(repo) if not (ms.active() or repo.dirstate.p2() != nullid): raise error.Abort( @@ -5657,7 +5966,11 @@ else: # backup pre-resolve (merge uses .orig for its own purposes) a = repo.wjoin(f) - util.copyfile(a, a + ".resolve") + try: + util.copyfile(a, a + ".resolve") + except (IOError, OSError) as inst: + if inst.errno != errno.ENOENT: + raise try: # preresolve file @@ -5675,7 +5988,12 @@ # replace filemerge's .orig file with our resolve file, but only # for merges that are complete if complete: - util.rename(a + ".resolve", a + ".orig") + try: + util.rename(a + ".resolve", + scmutil.origpath(ui, repo, a)) + except OSError as inst: + if inst.errno != errno.ENOENT: + raise for f in tocomplete: try: @@ -5691,9 +6009,14 @@ # replace filemerge's .orig file with our resolve file a = repo.wjoin(f) - util.rename(a + ".resolve", a + ".orig") + try: + util.rename(a + ".resolve", scmutil.origpath(ui, repo, a)) + except OSError as inst: + if inst.errno != errno.ENOENT: + raise ms.commit() + ms.recordactions() if not didwork and pats: ui.warn(_("arguments do not match paths that need resolving\n")) @@ -5717,6 +6040,7 @@ driverresolvedf = list(ms.driverresolved()) if not unresolvedf and not driverresolvedf: ui.status(_('(no more unresolved files)\n')) + cmdutil.checkafterresolved(repo) elif not unresolvedf: ui.status(_('(no more unresolved files -- ' 'run "hg resolve --all" to conclude)\n')) @@ -5921,81 +6245,10 @@ s.serve_forever() if opts["cmdserver"]: - import commandserver service = commandserver.createservice(ui, repo, opts) - return cmdutil.service(opts, initfn=service.init, runfn=service.run) - - # this way we can check if something was given in the command-line - if opts.get('port'): - opts['port'] = util.getport(opts.get('port')) - - if repo: - baseui = repo.baseui else: - baseui = ui - optlist = ("name templates style address port prefix ipv6" - " accesslog errorlog certificate encoding") - for o in optlist.split(): - val = opts.get(o, '') - if val in (None, ''): # should check against default options instead - continue - baseui.setconfig("web", o, val, 'serve') - if repo and repo.ui != baseui: - repo.ui.setconfig("web", o, val, 'serve') - - o = opts.get('web_conf') or opts.get('webdir_conf') - if not o: - if not repo: - raise error.RepoError(_("there is no Mercurial repository" - " here (.hg not found)")) - o = repo - - app = hgweb.hgweb(o, baseui=baseui) - service = httpservice(ui, app, opts) - cmdutil.service(opts, initfn=service.init, runfn=service.run) - -class httpservice(object): - def __init__(self, ui, app, opts): - self.ui = ui - self.app = app - self.opts = opts - - def init(self): - util.setsignalhandler() - self.httpd = hgweb_server.create_server(self.ui, self.app) - - if self.opts['port'] and not self.ui.verbose: - return - - if self.httpd.prefix: - prefix = self.httpd.prefix.strip('/') + '/' - else: - prefix = '' - - port = ':%d' % self.httpd.port - if port == ':80': - port = '' - - bindaddr = self.httpd.addr - if bindaddr == '0.0.0.0': - bindaddr = '*' - elif ':' in bindaddr: # IPv6 - bindaddr = '[%s]' % bindaddr - - fqaddr = self.httpd.fqaddr - if ':' in fqaddr: - fqaddr = '[%s]' % fqaddr - if self.opts['port']: - write = self.ui.status - else: - write = self.ui.write - write(_('listening at http://%s%s/%s (bound to %s:%d)\n') % - (fqaddr, port, prefix, bindaddr, self.httpd.port)) - self.ui.flush() # avoid buffering of status message - - def run(self): - self.httpd.serve_forever() - + service = hgweb.createservice(ui, repo, opts) + return cmdutil.service(opts, initfn=service.init, runfn=service.run) @command('^status|st', [('A', 'all', None, _('show status of all files')), @@ -6029,7 +6282,7 @@ .. note:: - status may appear to disagree with diff if permissions have + :hg:`status` may appear to disagree with diff if permissions have changed or a merge has occurred. The standard diff format does not report permission changes and diff only reports changes relative to one merge parent. @@ -6207,8 +6460,15 @@ if d in status.added: status.added.remove(d) - ms = mergemod.mergestate(repo) - unresolved = [f for f in ms if ms[f] == 'u'] + try: + ms = mergemod.mergestate.read(repo) + except error.UnsupportedMergeRecords as e: + s = ' '.join(e.recordtypes) + ui.warn( + _('warning: merge state has unsupported record types: %s\n') % s) + unresolved = 0 + else: + unresolved = [f for f in ms if ms[f] == 'u'] subs = [s for s in ctx.substate if ctx.sub(s).dirty()] @@ -6229,6 +6489,8 @@ t = ', '.join(t) cleanworkdir = False + if repo.vfs.exists('graftstate'): + t += _(' (graft in progress)') if repo.vfs.exists('updatestate'): t += _(' (interrupted update)') elif len(parents) > 1: @@ -6286,6 +6548,18 @@ if draft or secret: ui.status(_('phases: %s\n') % ', '.join(t)) + if obsolete.isenabled(repo, obsolete.createmarkersopt): + for trouble in ("unstable", "divergent", "bumped"): + numtrouble = len(repo.revs(trouble + "()")) + # We write all the possibilities to ease translation + troublemsg = { + "unstable": _("unstable: %d changeset"), + "divergent": _("divergent: %d changeset"), + "bumped": _("bumped: %d changeset"), + } + if numtrouble > 0: + ui.status(troublemsg[trouble] % numtrouble + "\n") + cmdutil.summaryhooks(ui, repo) if opts.get('remote'): @@ -6506,6 +6780,7 @@ This lists both regular and local tags. When the -v/--verbose switch is used, a third column "local" is printed for local tags. + When the -q/--quiet switch is used, only the tag name is printed. Returns 0 on success. """ @@ -6584,10 +6859,10 @@ tr.close() except error.BundleUnknownFeatureError as exc: raise error.Abort(_('%s: unknown bundle feature, %s') - % (fname, exc), - hint=_("see https://mercurial-scm.org/" - "wiki/BundleFeature for more " - "information")) + % (fname, exc), + hint=_("see https://mercurial-scm.org/" + "wiki/BundleFeature for more " + "information")) finally: if tr: tr.release() @@ -6692,7 +6967,7 @@ if check: cmdutil.bailifchanged(repo, merge=False) if rev is None: - updata = destutil.destupdate(repo, clean=clean, check=check) + updata = destutil.destupdate(repo, clean=clean, check=check) rev, movemarkfrom, brev = updata repo.ui.setconfig('ui', 'forcemerge', tool, 'update') diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/commandserver.py --- a/mercurial/commandserver.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/commandserver.py Mon Jan 11 14:27:12 2016 -0600 @@ -5,10 +5,21 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -from i18n import _ +from __future__ import absolute_import + +import SocketServer +import errno +import os import struct -import sys, os, errno, traceback, SocketServer -import dispatch, encoding, util, error +import sys +import traceback + +from .i18n import _ +from . import ( + encoding, + error, + util, +) logfile = None @@ -32,6 +43,10 @@ self.out = out self.channel = channel + @property + def name(self): + return '<%c-channel>' % self.channel + def write(self, data): if not data: return @@ -64,6 +79,10 @@ self.out = out self.channel = channel + @property + def name(self): + return '<%c-channel>' % self.channel + def read(self, size=-1): if size < 0: # if we need to consume all the clients input, ask for 4k chunks @@ -174,6 +193,7 @@ def runcommand(self): """ reads a list of \0 terminated arguments, executes and writes the return code to the result channel """ + from . import dispatch # avoid cycle length = struct.unpack('>I', self._read(4))[0] if not length: @@ -194,9 +214,17 @@ self.repo.ui = self.repo.dirstate._ui = repoui self.repo.invalidateall() + # reset last-print time of progress bar per command + # (progbar is singleton, we don't have to do for all uis) + if copiedui._progbar: + copiedui._progbar.resetstate() + for ui in uis: - # any kind of interaction must use server channels - ui.setconfig('ui', 'nontty', 'true', 'commandserver') + # any kind of interaction must use server channels, but chg may + # replace channels by fully functional tty files. so nontty is + # enforced only if cin is a channel. + if not util.safehasattr(self.cin, 'fileno'): + ui.setconfig('ui', 'nontty', 'true', 'commandserver') req = dispatch.request(args[:], copiedui, self.repo, self.cin, self.cout, self.cerr) diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/config.py --- a/mercurial/config.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/config.py Mon Jan 11 14:27:12 2016 -0600 @@ -30,6 +30,8 @@ return config(self) def __contains__(self, section): return section in self._data + def hasitem(self, section, item): + return item in self._data.get(section, {}) def __getitem__(self, section): return self._data.get(section, {}) def __iter__(self): diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/context.py --- a/mercurial/context.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/context.py Mon Jan 11 14:27:12 2016 -0600 @@ -5,17 +5,37 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. +from __future__ import absolute_import + +import errno +import os import re +import stat -from node import nullid, nullrev, wdirid, short, hex, bin -from i18n import _ -import mdiff, error, util, scmutil, subrepo, patch, encoding, phases -import match as matchmod -import os, errno, stat -import obsolete as obsmod -import repoview -import fileset -import revlog +from .i18n import _ +from .node import ( + bin, + hex, + nullid, + nullrev, + short, + wdirid, +) +from . import ( + encoding, + error, + fileset, + match as matchmod, + mdiff, + obsolete as obsmod, + patch, + phases, + repoview, + revlog, + scmutil, + subrepo, + util, +) propertycache = util.propertycache @@ -124,6 +144,8 @@ # The file was not a new file in mf2, so an entry # from diff is really a difference. modified.append(fn) + elif flag1 != flag2: + modified.append(fn) elif self[fn].cmp(other[fn]): # node2 was newnode, but the working file doesn't # match the one in mf1. @@ -221,9 +243,10 @@ return self._parents[0] def p2(self): - if len(self._parents) == 2: - return self._parents[1] - return changectx(self._repo, -1) + parents = self._parents + if len(parents) == 2: + return parents[1] + return changectx(self._repo, nullrev) def _fileinfo(self, path): if '_manifest' in self.__dict__: @@ -270,7 +293,7 @@ r = self._repo return matchmod.match(r.root, r.getcwd(), pats, include, exclude, default, - auditor=r.auditor, ctx=self, + auditor=r.nofsauditor, ctx=self, listsubrepos=listsubrepos, badfn=badfn) def diff(self, ctx2=None, match=None, **opts): @@ -335,17 +358,19 @@ if listsubrepos: for subpath, sub in scmutil.itersubrepos(ctx1, ctx2): - rev2 = ctx2.subrev(subpath) try: - submatch = matchmod.narrowmatcher(subpath, match) - s = sub.status(rev2, match=submatch, ignored=listignored, - clean=listclean, unknown=listunknown, - listsubrepos=True) - for rfiles, sfiles in zip(r, s): - rfiles.extend("%s/%s" % (subpath, f) for f in sfiles) - except error.LookupError: - self._repo.ui.status(_("skipping missing " - "subrepository: %s\n") % subpath) + rev2 = ctx2.subrev(subpath) + except KeyError: + # A subrepo that existed in node1 was deleted between + # node1 and node2 (inclusive). Thus, ctx2's substate + # won't contain that subpath. The best we can do ignore it. + rev2 = None + submatch = matchmod.narrowmatcher(subpath, match) + s = sub.status(rev2, match=submatch, ignored=listignored, + clean=listclean, unknown=listunknown, + listsubrepos=True) + for rfiles, sfiles in zip(r, s): + rfiles.extend("%s/%s" % (subpath, f) for f in sfiles) for l in r: l.sort() @@ -366,8 +391,8 @@ extra = {} if branch: extra['branch'] = encoding.fromlocal(branch) - ctx = memctx(repo, parents, text, files, getfilectx, user, - date, extra, editor) + ctx = memctx(repo, parents, text, files, getfilectx, user, + date, extra, editor) return ctx class changectx(basectx): @@ -511,10 +536,11 @@ @propertycache def _parents(self): - p = self._repo.changelog.parentrevs(self._rev) - if p[1] == nullrev: - p = p[:-1] - return [changectx(self._repo, x) for x in p] + repo = self._repo + p1, p2 = repo.changelog.parentrevs(self._rev) + if p2 == nullrev: + return [changectx(repo, p1)] + return [changectx(repo, p1), changectx(repo, p2)] def changeset(self): return self._changeset @@ -747,11 +773,22 @@ def islink(self): return 'l' in self.flags() + def isabsent(self): + """whether this filectx represents a file not in self._changectx + + This is mainly for merge code to detect change/delete conflicts. This is + expected to be True for all subclasses of basectx.""" + return False + + _customcmp = False def cmp(self, fctx): """compare with other file context returns True if different than fctx. """ + if fctx._customcmp: + return fctx.cmp(self) + if (fctx._filerev is None and (self._repo._encodefilterpats # if file data starts with '\1\n', empty metadata block is @@ -1140,17 +1177,17 @@ # filesystem doesn't support them copiesget = self._repo.dirstate.copies().get - - if len(self._parents) < 2: + parents = self.parents() + if len(parents) < 2: # when we have one parent, it's easy: copy from parent - man = self._parents[0].manifest() + man = parents[0].manifest() def func(f): f = copiesget(f, f) return man.flags(f) else: # merges are tricky: we try to reconstruct the unstored # result from the merge (issue1802) - p1, p2 = self._parents + p1, p2 = parents pa = p1.ancestor(p2) m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest() @@ -1180,10 +1217,11 @@ an extra 'a'. This is used by manifests merge to see that files are different and by update logic to avoid deleting newly added files. """ + parents = self.parents() - man1 = self._parents[0].manifest() + man1 = parents[0].manifest() man = man1.copy() - if len(self._parents) > 1: + if len(parents) > 1: man2 = self.p2().manifest() def getman(f): if f in man1: @@ -1694,7 +1732,7 @@ def date(self): t, tz = self._changectx.date() try: - return (util.statmtimesec(self._repo.wvfs.lstat(self._path)), tz) + return (self._repo.wvfs.lstat(self._path).st_mtime, tz) except OSError as err: if err.errno != errno.ENOENT: raise diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/crecord.py --- a/mercurial/crecord.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/crecord.py Mon Jan 11 14:27:12 2016 -0600 @@ -24,6 +24,7 @@ encoding, error, patch as patchmod, + util, ) # This is required for ncurses to display non-ASCII characters in default user @@ -455,11 +456,11 @@ # if there are no changed files if len(headers) == 0: - return [] + return [], {} uiheaders = [uiheader(h) for h in headers] # let user choose headers/hunks/lines, and mark their applied flags # accordingly - chunkselector(ui, uiheaders) + ret = chunkselector(ui, uiheaders) appliedhunklist = [] for hdr in uiheaders: if (hdr.applied and @@ -477,7 +478,7 @@ else: fixoffset += hnk.removed - hnk.added - return appliedhunklist + return (appliedhunklist, ret) def gethw(): """ @@ -506,6 +507,7 @@ raise error.Abort(chunkselector.initerr) # ncurses does not restore signal handler for SIGTSTP signal.signal(signal.SIGTSTP, f) + return chunkselector.opts def testdecorator(testfn, f): def u(*args, **kwargs): @@ -526,6 +528,7 @@ while True: if chunkselector.handlekeypressed(testcommands.pop(0), test=True): break + return chunkselector.opts class curseschunkselector(object): def __init__(self, headerlist, ui): @@ -533,6 +536,7 @@ self.headerlist = patch(headerlist) self.ui = ui + self.opts = {} self.errorstr = None # list of all chunks @@ -1012,7 +1016,7 @@ pairname="legend") printstring(self.statuswin, " (f)old/unfold; (c)onfirm applied; (q)uit; (?) help " - "| [X]=hunk applied **=folded", + "| [X]=hunk applied **=folded, toggle [a]mend mode", pairname="legend") except curses.error: pass @@ -1368,7 +1372,7 @@ F : fold / unfold parent item and all of its ancestors m : edit / resume editing the commit message e : edit the currently selected hunk - a : toggle amend mode (hg rev >= 2.2) + a : toggle amend mode (hg rev >= 2.2), only with commit -i c : confirm selected changes r : review/edit and confirm selected changes q : quit without confirming (no changes will be made) @@ -1435,6 +1439,35 @@ else: return False + def toggleamend(self, opts, test): + """Toggle the amend flag. + + When the amend flag is set, a commit will modify the most recently + committed changeset, instead of creating a new changeset. Otherwise, a + new changeset will be created (the normal commit behavior). + + """ + try: + ver = float(util.version()[:3]) + except ValueError: + ver = 1 + if ver < 2.19: + msg = ("The amend option is unavailable with hg versions < 2.2\n\n" + "Press any key to continue.") + elif opts.get('amend') is None: + opts['amend'] = True + msg = ("Amend option is turned on -- commiting the currently " + "selected changes will not create a new changeset, but " + "instead update the most recently committed changeset.\n\n" + "Press any key to continue.") + elif opts.get('amend') is True: + opts['amend'] = None + msg = ("Amend option is turned off -- commiting the currently " + "selected changes will create a new changeset.\n\n" + "Press any key to continue.") + if not test: + self.confirmationwindow(msg) + def recenterdisplayedarea(self): """ once we scrolled with pg up pg down we can be pointing outside of the @@ -1572,6 +1605,8 @@ self.leftarrowshiftevent() elif keypressed in ["q"]: raise error.Abort(_('user quit')) + elif keypressed in ['a']: + self.toggleamend(self.opts, test) elif keypressed in ["c"]: if self.confirmcommit(): return True diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/demandimport.py --- a/mercurial/demandimport.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/demandimport.py Mon Jan 11 14:27:12 2016 -0600 @@ -133,6 +133,8 @@ self._load() setattr(self._module, attr, val) +_pypy = '__pypy__' in sys.builtin_module_names + def _demandimport(name, globals=None, locals=None, fromlist=None, level=level): if not locals or name in ignore or fromlist == ('*',): # these cases we can't really delay @@ -182,16 +184,28 @@ symbol._addref(globalname) if level >= 0: - # Mercurial's enforced import style does not use - # "from a import b,c,d" or "from .a import b,c,d" syntax. In - # addition, this appears to be giving errors with some modules - # for unknown reasons. Since we shouldn't be using this syntax - # much, work around the problems. + # The "from a import b,c,d" or "from .a import b,c,d" + # syntax gives errors with some modules for unknown + # reasons. Work around the problem. if name: return _hgextimport(_origimport, name, globals, locals, fromlist, level) - mod = _hgextimport(_origimport, name, globals, locals, level=level) + if _pypy: + # PyPy's __import__ throws an exception if invoked + # with an empty name and no fromlist. Recreate the + # desired behaviour by hand. + mn = globalname + mod = sys.modules[mn] + if getattr(mod, '__path__', nothing) is nothing: + mn = mn.rsplit('.', 1)[0] + mod = sys.modules[mn] + if level > 1: + mn = mn.rsplit('.', level - 1)[0] + mod = sys.modules[mn] + else: + mod = _hgextimport(_origimport, name, globals, locals, + level=level) for x in fromlist: processfromitem(mod, x) diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/destutil.py --- a/mercurial/destutil.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/destutil.py Mon Jan 11 14:27:12 2016 -0600 @@ -5,6 +5,8 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. +from __future__ import absolute_import + from .i18n import _ from . import ( bookmarks, @@ -198,3 +200,21 @@ else: node = _destmergebranch(repo) return repo[node].rev() + +histeditdefaultrevset = 'reverse(only(.) and not public() and not ::merge())' + +def desthistedit(ui, repo): + """Default base revision to edit for `hg histedit`.""" + # Avoid cycle: scmutil -> revset -> destutil + from . import scmutil + + default = ui.config('histedit', 'defaultrev', histeditdefaultrevset) + if default: + revs = scmutil.revrange(repo, [default]) + if revs: + # The revset supplied by the user may not be in ascending order nor + # take the first revision. So do this manually. + revs.sort() + return revs.first() + + return None diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/dirstate.py --- a/mercurial/dirstate.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/dirstate.py Mon Jan 11 14:27:12 2016 -0600 @@ -5,11 +5,25 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -from node import nullid -from i18n import _ -import scmutil, util, osutil, parsers, encoding, pathutil, error -import os, stat, errno -import match as matchmod +from __future__ import absolute_import + +import collections +import errno +import os +import stat + +from .i18n import _ +from .node import nullid +from . import ( + encoding, + error, + match as matchmod, + osutil, + parsers, + pathutil, + scmutil, + util, +) propertycache = util.propertycache filecache = scmutil.filecache @@ -31,11 +45,19 @@ '''Get "now" timestamp on filesystem''' tmpfd, tmpname = vfs.mkstemp() try: - return util.statmtimesec(os.fstat(tmpfd)) + return os.fstat(tmpfd).st_mtime finally: os.close(tmpfd) vfs.unlink(tmpname) +def nonnormalentries(dmap): + '''Compute the nonnormal dirstate entries from the dmap''' + try: + return parsers.nonnormalentries(dmap) + except AttributeError: + return set(fname for fname, e in dmap.iteritems() + if e[0] != 'n' or e[3] == -1) + def _trypending(root, vfs, filename): '''Open file to be read according to HG_PENDING environment variable @@ -119,6 +141,10 @@ return self._copymap @propertycache + def _nonnormalset(self): + return nonnormalentries(self._map) + + @propertycache def _filefoldmap(self): try: makefilefoldmap = parsers.make_file_foldmap @@ -178,15 +204,7 @@ @rootcache('.hgignore') def _ignore(self): - files = [] - if os.path.exists(self._join('.hgignore')): - files.append(self._join('.hgignore')) - for name, path in self._ui.configitems("ui"): - if name == 'ignore' or name.startswith('ignore.'): - # we need to use os.path.join here rather than self._join - # because path is arbitrary and user-specified - files.append(os.path.join(self._rootdir, util.expandpath(path))) - + files = self._ignorefiles() if not files: return util.never @@ -418,7 +436,7 @@ def invalidate(self): for a in ("_map", "_copymap", "_filefoldmap", "_dirfoldmap", "_branch", - "_pl", "_dirs", "_ignore"): + "_pl", "_dirs", "_ignore", "_nonnormalset"): if a in self.__dict__: delattr(self, a) self._lastnormaltime = 0 @@ -467,15 +485,19 @@ self._dirs.addpath(f) self._dirty = True self._map[f] = dirstatetuple(state, mode, size, mtime) + if state != 'n' or mtime == -1: + self._nonnormalset.add(f) def normal(self, f): '''Mark a file normal and clean.''' s = os.lstat(self._join(f)) - mtime = util.statmtimesec(s) + mtime = s.st_mtime self._addpath(f, 'n', s.st_mode, s.st_size & _rangemask, mtime & _rangemask) if f in self._copymap: del self._copymap[f] + if f in self._nonnormalset: + self._nonnormalset.remove(f) if mtime > self._lastnormaltime: # Remember the most recent modification timeslot for status(), # to make sure we won't miss future size-preserving file content @@ -503,6 +525,8 @@ self._addpath(f, 'n', 0, -1, -1) if f in self._copymap: del self._copymap[f] + if f in self._nonnormalset: + self._nonnormalset.remove(f) def otherparent(self, f): '''Mark as coming from the other parent, always dirty.''' @@ -538,6 +562,7 @@ elif entry[0] == 'n' and entry[2] == -2: # other parent size = -2 self._map[f] = dirstatetuple('r', 0, size, 0) + self._nonnormalset.add(f) if size == 0 and f in self._copymap: del self._copymap[f] @@ -553,6 +578,8 @@ self._dirty = True self._droppath(f) del self._map[f] + if f in self._nonnormalset: + self._nonnormalset.remove(f) def _discoverpath(self, path, normed, ignoremissing, exists, storemap): if exists is None: @@ -630,6 +657,7 @@ def clear(self): self._map = {} + self._nonnormalset = set() if "_dirs" in self.__dict__: delattr(self, "_dirs") self._copymap = {} @@ -639,17 +667,24 @@ def rebuild(self, parent, allfiles, changedfiles=None): if changedfiles is None: + # Rebuild entire dirstate changedfiles = allfiles - oldmap = self._map - self.clear() - for f in allfiles: - if f not in changedfiles: - self._map[f] = oldmap[f] + lastnormaltime = self._lastnormaltime + self.clear() + self._lastnormaltime = lastnormaltime + + for f in changedfiles: + mode = 0o666 + if f in allfiles and 'x' in allfiles.flags(f): + mode = 0o777 + + if f in allfiles: + self._map[f] = dirstatetuple('n', mode, -1, 0) else: - if 'x' in allfiles.flags(f): - self._map[f] = dirstatetuple('n', 0o777, -1, 0) - else: - self._map[f] = dirstatetuple('n', 0o666, -1, 0) + self._map.pop(f, None) + if f in self._nonnormalset: + self._nonnormalset.remove(f) + self._pl = (parent, nullid) self._dirty = True @@ -657,13 +692,6 @@ if not self._dirty: return - # enough 'delaywrite' prevents 'pack_dirstate' from dropping - # timestamp of each entries in dirstate, because of 'now > mtime' - delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0) - if delaywrite > 0: - import time # to avoid useless import - time.sleep(delaywrite) - filename = self._filename if tr is False: # not explicitly specified if (self._ui.configbool('devel', 'all-warnings') @@ -689,6 +717,7 @@ for f, e in dmap.iteritems(): if e[0] == 'n' and e[3] == now: dmap[f] = dirstatetuple(e[0], e[1], e[2], -1) + self._nonnormalset.add(f) # emulate that all 'dirstate.normal' results are written out self._lastnormaltime = 0 @@ -704,8 +733,26 @@ def _writedirstate(self, st): # use the modification time of the newly created temporary file as the # filesystem's notion of 'now' - now = util.statmtimesec(util.fstat(st)) & _rangemask + now = util.fstat(st).st_mtime & _rangemask + + # enough 'delaywrite' prevents 'pack_dirstate' from dropping + # timestamp of each entries in dirstate, because of 'now > mtime' + delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0) + if delaywrite > 0: + # do we have any files to delay for? + for f, e in self._map.iteritems(): + if e[0] == 'n' and e[3] == now: + import time # to avoid useless import + # rather than sleep n seconds, sleep until the next + # multiple of n seconds + clock = time.time() + start = int(clock) - (int(clock) % delaywrite) + end = start + delaywrite + time.sleep(end - clock) + break + st.write(parsers.pack_dirstate(self._map, self._copymap, self._pl, now)) + self._nonnormalset = nonnormalentries(self._map) st.close() self._lastnormaltime = 0 self._dirty = self._dirtypl = False @@ -720,6 +767,37 @@ return True return False + def _ignorefiles(self): + files = [] + if os.path.exists(self._join('.hgignore')): + files.append(self._join('.hgignore')) + for name, path in self._ui.configitems("ui"): + if name == 'ignore' or name.startswith('ignore.'): + # we need to use os.path.join here rather than self._join + # because path is arbitrary and user-specified + files.append(os.path.join(self._rootdir, util.expandpath(path))) + return files + + def _ignorefileandline(self, f): + files = collections.deque(self._ignorefiles()) + visited = set() + while files: + i = files.popleft() + patterns = matchmod.readpatternfile(i, self._ui.warn, + sourceinfo=True) + for pattern, lineno, line in patterns: + kind, p = matchmod._patsplit(pattern, 'glob') + if kind == "subinclude": + if p not in visited: + files.append(p) + continue + m = matchmod.match(self._root, '', [], [pattern], + warn=self._ui.warn) + if m(f): + return (i, lineno, line) + visited.add(i) + return (None, -1, "") + def _walkexplicit(self, match, subrepos): '''Get stat data about the files explicitly specified by match. @@ -1008,14 +1086,8 @@ # We may not have walked the full directory tree above, # so stat and check everything we missed. nf = iter(visit).next - pos = 0 - while pos < len(visit): - # visit in mid-sized batches so that we don't - # block signals indefinitely - xr = xrange(pos, min(len(visit), pos + 1000)) - for st in util.statfiles([join(visit[n]) for n in xr]): - results[nf()] = st - pos += 1000 + for st in util.statfiles([join(i) for i in visit]): + results[nf()] = st return results def status(self, match, subrepos, ignored, clean, unknown): @@ -1084,16 +1156,15 @@ if not st and state in "nma": dadd(fn) elif state == 'n': - mtime = util.statmtimesec(st) if (size >= 0 and ((size != st.st_size and size != st.st_size & _rangemask) or ((mode ^ st.st_mode) & 0o100 and checkexec)) or size == -2 # other parent or fn in copymap): madd(fn) - elif time != mtime and time != mtime & _rangemask: + elif time != st.st_mtime and time != st.st_mtime & _rangemask: ladd(fn) - elif mtime == lastnormaltime: + elif st.st_mtime == lastnormaltime: # fn may have just been marked as normal and it may have # changed in the same second without changing its size. # This can happen if we quickly do multiple commits. diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/discovery.py --- a/mercurial/discovery.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/discovery.py Mon Jan 11 14:27:12 2016 -0600 @@ -238,12 +238,42 @@ unsynced = set() return {None: (oldheads, newheads, unsynced)} -def checkheads(repo, remote, outgoing, remoteheads, newbranch=False, inc=False, - newbookmarks=[]): +def _nowarnheads(pushop): + # Compute newly pushed bookmarks. We don't warn about bookmarked heads. + + # internal config: bookmarks.pushing + newbookmarks = pushop.ui.configlist('bookmarks', 'pushing') + + repo = pushop.repo.unfiltered() + remote = pushop.remote + localbookmarks = repo._bookmarks + remotebookmarks = remote.listkeys('bookmarks') + bookmarkedheads = set() + for bm in localbookmarks: + rnode = remotebookmarks.get(bm) + if rnode and rnode in repo: + lctx, rctx = repo[bm], repo[rnode] + if bookmarks.validdest(repo, rctx, lctx): + bookmarkedheads.add(lctx.node()) + else: + if bm in newbookmarks and bm not in remotebookmarks: + bookmarkedheads.add(repo[bm].node()) + + return bookmarkedheads + +def checkheads(pushop): """Check that a push won't add any outgoing head raise Abort error and display ui message as needed. """ + + repo = pushop.repo.unfiltered() + remote = pushop.remote + outgoing = pushop.outgoing + remoteheads = pushop.remoteheads + newbranch = pushop.newbranch + inc = bool(pushop.incoming) + # Check for each named branch if we're creating new remote heads. # To be a remote head after push, node must be either: # - unknown locally @@ -268,19 +298,8 @@ hint=_("use 'hg push --new-branch' to create" " new remote branches")) - # 2. Compute newly pushed bookmarks. We don't warn about bookmarked heads. - localbookmarks = repo._bookmarks - remotebookmarks = remote.listkeys('bookmarks') - bookmarkedheads = set() - for bm in localbookmarks: - rnode = remotebookmarks.get(bm) - if rnode and rnode in repo: - lctx, rctx = repo[bm], repo[rnode] - if bookmarks.validdest(repo, rctx, lctx): - bookmarkedheads.add(lctx.node()) - else: - if bm in newbookmarks and bm not in remotebookmarks: - bookmarkedheads.add(repo[bm].node()) + # 2. Find heads that we need not warn about + nowarnheads = _nowarnheads(pushop) # 3. Check for new heads. # If there are more heads after the push than before, a suitable @@ -366,7 +385,7 @@ " pushing new heads") elif len(newhs) > len(oldhs): # remove bookmarked or existing remote heads from the new heads list - dhs = sorted(newhs - bookmarkedheads - oldhs) + dhs = sorted(newhs - nowarnheads - oldhs) if dhs: if errormsg is None: if branch not in ('default', None): diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/dispatch.py --- a/mercurial/dispatch.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/dispatch.py Mon Jan 11 14:27:12 2016 -0600 @@ -5,7 +5,7 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -from __future__ import absolute_import +from __future__ import absolute_import, print_function import atexit import difflib @@ -59,6 +59,13 @@ # probably be investigated and tweaked. return [s for s in symbols if sim(s) > 0.6] +def _reportsimilar(write, similar): + if len(similar) == 1: + write(_("(did you mean %s?)\n") % similar[0]) + elif similar: + ss = ", ".join(sorted(similar)) + write(_("(did you mean one of %s?)\n") % ss) + def _formatparse(write, inst): similar = [] if isinstance(inst, error.UnknownIdentifier): @@ -71,12 +78,7 @@ write(_("unexpected leading whitespace\n")) else: write(_("hg: parse error: %s\n") % inst.args[0]) - if similar: - if len(similar) == 1: - write(_("(did you mean %r?)\n") % similar[0]) - else: - ss = ", ".join(sorted(similar)) - write(_("(did you mean one of %s?)\n") % ss) + _reportsimilar(write, similar) def dispatch(req): "run the command specified in req.args" @@ -107,6 +109,8 @@ return -1 except error.ParseError as inst: _formatparse(ferr.write, inst) + if inst.hint: + ferr.write(_("(%s)\n") % inst.hint) return -1 msg = ' '.join(' ' in a and repr(a) or a for a in req.args) @@ -202,6 +206,8 @@ (inst.args[0], " ".join(inst.args[1]))) except error.ParseError as inst: _formatparse(ui.warn, inst) + if inst.hint: + ui.warn(_("(%s)\n") % inst.hint) return -1 except error.LockHeld as inst: if inst.errno == errno.ETIMEDOUT: @@ -258,13 +264,14 @@ if len(inst.args) == 2: sim = _getsimilar(inst.args[1], inst.args[0]) if sim: - ui.warn(_('(did you mean one of %s?)\n') % - ', '.join(sorted(sim))) + _reportsimilar(ui.warn, sim) suggested = True if not suggested: commands.help_(ui, 'shortlist') except error.InterventionRequired as inst: ui.warn("%s\n" % inst) + if inst.hint: + ui.warn(_("(%s)\n") % inst.hint) return 1 except error.Abort as inst: ui.warn(_("abort: %s\n") % inst) @@ -320,7 +327,6 @@ except socket.error as inst: ui.warn(_("abort: %s\n") % inst.args[-1]) except: # re-raises - myver = util.version() # For compatibility checking, we discard the portion of the hg # version after the + on the assumption that if a "normal # user" is running a build with a + in it the packager @@ -328,8 +334,7 @@ # 'make local' copy of hg (where the version number can be out # of date) will be clueful enough to notice the implausible # version number and try updating. - compare = myver.split('+')[0] - ct = tuplever(compare) + ct = util.versiontuple(n=2) worst = None, ct, '' if ui.config('ui', 'supportcontact', None) is None: for name, mod in extensions.extensions(): @@ -344,7 +349,7 @@ if testedwith == 'internal': continue - tested = [tuplever(t) for t in testedwith.split()] + tested = [util.versiontuple(t, 2) for t in testedwith.split()] if ct in tested: continue @@ -369,7 +374,8 @@ warning = (_("** unknown exception encountered, " "please report by visiting\n** ") + bugtracker + '\n') warning += ((_("** Python %s\n") % sys.version.replace('\n', '')) + - (_("** Mercurial Distributed SCM (version %s)\n") % myver) + + (_("** Mercurial Distributed SCM (version %s)\n") % + util.version()) + (_("** Extensions loaded: %s\n") % ", ".join([x[0] for x in extensions.extensions()]))) ui.log("commandexception", "%s\n%s\n", warning, traceback.format_exc()) @@ -378,15 +384,6 @@ return -1 -def tuplever(v): - try: - # Assertion: tuplever is only used for extension compatibility - # checking. Otherwise, the discarding of extra version fields is - # incorrect. - return tuple([int(i) for i in v.split('.')[0:2]]) - except ValueError: - return tuple() - def aliasargs(fn, givenargs): args = getattr(fn, 'args', []) if args: @@ -437,6 +434,7 @@ self.help = '' self.norepo = True self.optionalrepo = False + self.inferrepo = False self.badalias = None self.unknowncmd = False @@ -502,6 +500,8 @@ self.norepo = False if cmd in commands.optionalrepo.split(' '): self.optionalrepo = True + if cmd in commands.inferrepo.split(' '): + self.inferrepo = True if self.help.startswith("hg " + cmd): # drop prefix in old-style help lines so hg shows the alias self.help = self.help[4 + len(cmd):] @@ -560,6 +560,8 @@ commands.norepo += ' %s' % alias if aliasdef.optionalrepo: commands.optionalrepo += ' %s' % alias + if aliasdef.inferrepo: + commands.inferrepo += ' %s' % alias def _parse(ui, args): options = {} @@ -726,9 +728,11 @@ strict = True norepo = commands.norepo optionalrepo = commands.optionalrepo + inferrepo = commands.inferrepo def restorecommands(): commands.norepo = norepo commands.optionalrepo = optionalrepo + commands.inferrepo = inferrepo cmdtable = commands.table.copy() addaliases(lui, cmdtable) else: @@ -864,7 +868,7 @@ if options['version']: return commands.version_(ui) if options['help']: - return commands.help_(ui, cmd, command=True) + return commands.help_(ui, cmd, command=cmd is not None) elif not cmd: return commands.help_(ui, 'shortlist') @@ -976,9 +980,9 @@ finally: thread.stop() thread.join() - print 'Collected %d stack frames (%d unique) in %2.2f seconds.' % ( + print('Collected %d stack frames (%d unique) in %2.2f seconds.' % ( time.clock() - start_time, thread.num_frames(), - thread.num_frames(unique=True)) + thread.num_frames(unique=True))) def statprofile(ui, func, fp): diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/encoding.py --- a/mercurial/encoding.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/encoding.py Mon Jan 11 14:27:12 2016 -0600 @@ -5,8 +5,15 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -import error -import unicodedata, locale, os +from __future__ import absolute_import + +import locale +import os +import unicodedata + +from . import ( + error, +) # These unicode characters are ignored by HFS+ (Apple Technote 1150, # "Unicode Subtleties"), so we need to ignore them in some places for @@ -194,7 +201,7 @@ 'ellipsis' is always placed at trimmed side. >>> ellipsis = '+++' - >>> from mercurial import encoding + >>> from . import encoding >>> encoding.encoding = 'utf-8' >>> t= '1234567890' >>> print trim(t, 12, ellipsis=ellipsis) @@ -290,7 +297,7 @@ def asciilower(s): # delay importing avoids cyclic dependency around "parsers" in # pure Python build (util => i18n => encoding => parsers => util) - import parsers + from . import parsers impl = getattr(parsers, 'asciilower', _asciilower) global asciilower asciilower = impl @@ -306,7 +313,7 @@ def asciiupper(s): # delay importing avoids cyclic dependency around "parsers" in # pure Python build (util => i18n => encoding => parsers => util) - import parsers + from . import parsers impl = getattr(parsers, 'asciiupper', _asciiupper) global asciiupper asciiupper = impl @@ -400,7 +407,7 @@ if not _jsonmap: for x in xrange(32): - _jsonmap[chr(x)] = "\u%04x" %x + _jsonmap[chr(x)] = "\\u%04x" % x for x in xrange(32, 256): c = chr(x) _jsonmap[c] = c @@ -414,6 +421,25 @@ return ''.join(_jsonmap[c] for c in toutf8b(s)) +_utf8len = [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 3, 4] + +def getutf8char(s, pos): + '''get the next full utf-8 character in the given string, starting at pos + + Raises a UnicodeError if the given location does not start a valid + utf-8 character. + ''' + + # find how many bytes to attempt decoding from first nibble + l = _utf8len[ord(s[pos]) >> 4] + if not l: # ascii + return s[pos] + + c = s[pos:pos + l] + # validate with attempted decode + c.decode("utf-8") + return c + def toutf8b(s): '''convert a local, possibly-binary string into UTF-8b @@ -444,24 +470,32 @@ internal surrogate encoding as a UTF-8 string.) ''' - if isinstance(s, localstr): - return s._utf8 + if "\xed" not in s: + if isinstance(s, localstr): + return s._utf8 + try: + s.decode('utf-8') + return s + except UnicodeDecodeError: + pass - try: - s.decode('utf-8') - return s - except UnicodeDecodeError: - # surrogate-encode any characters that don't round-trip - s2 = s.decode('utf-8', 'ignore').encode('utf-8') - r = "" - pos = 0 - for c in s: - if s2[pos:pos + 1] == c: - r += c + r = "" + pos = 0 + l = len(s) + while pos < l: + try: + c = getutf8char(s, pos) + if "\xed\xb0\x80" <= c <= "\xed\xb3\xbf": + # have to re-escape existing U+DCxx characters + c = unichr(0xdc00 + ord(s[pos])).encode('utf-8') pos += 1 else: - r += unichr(0xdc00 + ord(c)).encode('utf-8') - return r + pos += len(c) + except UnicodeDecodeError: + c = unichr(0xdc00 + ord(s[pos])).encode('utf-8') + pos += 1 + r += c + return r def fromutf8b(s): '''Given a UTF-8b string, return a local, possibly-binary string. @@ -470,11 +504,19 @@ is a round-trip process for strings like filenames, but metadata that's was passed through tolocal will remain in UTF-8. + >>> roundtrip = lambda x: fromutf8b(toutf8b(x)) == x >>> m = "\\xc3\\xa9\\x99abcd" - >>> n = toutf8b(m) - >>> n + >>> toutf8b(m) '\\xc3\\xa9\\xed\\xb2\\x99abcd' - >>> fromutf8b(n) == m + >>> roundtrip(m) + True + >>> roundtrip("\\xc2\\xc2\\x80") + True + >>> roundtrip("\\xef\\xbf\\xbd") + True + >>> roundtrip("\\xef\\xef\\xbf\\xbd") + True + >>> roundtrip("\\xf1\\x80\\x80\\x80\\x80") True ''' @@ -482,11 +524,19 @@ if "\xed" not in s: return s - u = s.decode("utf-8") + # We could do this with the unicode type but some Python builds + # use UTF-16 internally (issue5031) which causes non-BMP code + # points to be escaped. Instead, we use our handy getutf8char + # helper again to walk the string without "decoding" it. + r = "" - for c in u: - if ord(c) & 0xff00 == 0xdc00: - r += chr(ord(c) & 0xff) - else: - r += c.encode("utf-8") + pos = 0 + l = len(s) + while pos < l: + c = getutf8char(s, pos) + pos += len(c) + # unescape U+DCxx characters + if "\xed\xb0\x80" <= c <= "\xed\xb3\xbf": + c = chr(ord(c.decode("utf-8")) & 0xff) + r += c return r diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/error.py --- a/mercurial/error.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/error.py Mon Jan 11 14:27:12 2016 -0600 @@ -50,7 +50,7 @@ class CommandError(Exception): """Exception raised on errors in parsing the command line.""" -class InterventionRequired(Exception): +class InterventionRequired(HintException): """Exception raised when a command requires human intervention.""" class Abort(HintException): @@ -72,14 +72,16 @@ class UpdateAbort(Abort): """Raised when an update is aborted for destination issue""" -class OutOfBandError(Exception): +class ResponseExpected(Abort): + """Raised when an EOF is received for a prompt""" + def __init__(self): + from .i18n import _ + Abort.__init__(self, _('response expected')) + +class OutOfBandError(HintException): """Exception raised when a remote repo reports failure""" - def __init__(self, *args, **kw): - Exception.__init__(self, *args) - self.hint = kw.get('hint') - -class ParseError(Exception): +class ParseError(HintException): """Raised when parsing config files and {rev,file}sets (msg[, pos])""" class UnknownIdentifier(ParseError): @@ -106,6 +108,16 @@ class RequirementError(RepoError): """Exception raised if .hg/requires has an unknown entry.""" +class UnsupportedMergeRecords(Abort): + def __init__(self, recordtypes): + from .i18n import _ + self.recordtypes = sorted(recordtypes) + s = ' '.join(self.recordtypes) + Abort.__init__( + self, _('unsupported merge state records: %s') % s, + hint=_('see https://mercurial-scm.org/wiki/MergeStateRecords for ' + 'more information')) + class LockError(IOError): def __init__(self, errno, strerror, filename, desc): IOError.__init__(self, errno, strerror, filename) diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/exchange.py --- a/mercurial/exchange.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/exchange.py Mon Jan 11 14:27:12 2016 -0600 @@ -5,16 +5,35 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -from i18n import _ -from node import hex, nullid -import errno, urllib, urllib2 -import util, scmutil, changegroup, base85, error -import discovery, phases, obsolete, bookmarks as bookmod, bundle2, pushkey -import lock as lockmod -import streamclone -import sslutil -import tags -import url as urlmod +from __future__ import absolute_import + +import errno +import urllib +import urllib2 + +from .i18n import _ +from .node import ( + hex, + nullid, +) +from . import ( + base85, + bookmarks as bookmod, + bundle2, + changegroup, + discovery, + error, + lock as lockmod, + obsolete, + phases, + pushkey, + scmutil, + sslutil, + streamclone, + tags, + url as urlmod, + util, +) # Maps bundle compression human names to internal representation. _bundlespeccompressions = {'none': None, @@ -571,13 +590,7 @@ elif ctx.troubled(): raise error.Abort(mst[ctx.troubles()[0]] % ctx) - # internal config: bookmarks.pushing - newbm = pushop.ui.configlist('bookmarks', 'pushing') - discovery.checkheads(unfi, pushop.remote, outgoing, - pushop.remoteheads, - pushop.newbranch, - bool(pushop.incoming), - newbm) + discovery.checkheads(pushop) return True # List of names of steps to perform for an outgoing bundle2, order matters. @@ -1386,10 +1399,14 @@ remoteobs = pullop.remote.listkeys('obsolete') if 'dump0' in remoteobs: tr = pullop.gettransaction() + markers = [] for key in sorted(remoteobs, reverse=True): if key.startswith('dump'): data = base85.b85decode(remoteobs[key]) - pullop.repo.obsstore.mergemarkers(tr, data) + version, newmarks = obsolete._readmarkers(data) + markers += newmarks + if markers: + pullop.repo.obsstore.add(tr, markers) pullop.repo.invalidatevolatilesets() return tr @@ -1427,6 +1444,11 @@ return func return dec +def bundle2requested(bundlecaps): + if bundlecaps is not None: + return any(cap.startswith('HG2') for cap in bundlecaps) + return False + def getbundle(repo, source, heads=None, common=None, bundlecaps=None, **kwargs): """return a full bundle (with potentially multiple kind of parts) @@ -1442,10 +1464,8 @@ The implementation is at a very early stage and will get massive rework when the API of bundle is refined. """ + usebundle2 = bundle2requested(bundlecaps) # bundle10 case - usebundle2 = False - if bundlecaps is not None: - usebundle2 = any((cap.startswith('HG2') for cap in bundlecaps)) if not usebundle2: if bundlecaps and not kwargs.get('cg', True): raise ValueError(_('request for bundle10 must include changegroup')) diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/extensions.py --- a/mercurial/extensions.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/extensions.py Mon Jan 11 14:27:12 2016 -0600 @@ -24,7 +24,8 @@ _extensions = {} _aftercallbacks = {} _order = [] -_ignore = ['hbisect', 'bookmarks', 'parentrevspec', 'interhg', 'inotify'] +_builtin = set(['hbisect', 'bookmarks', 'parentrevspec', 'progress', 'interhg', + 'inotify']) def extensions(ui=None): if ui: @@ -44,7 +45,7 @@ '''return module with given extension name''' mod = None try: - mod = _extensions[name] + mod = _extensions[name] except KeyError: for k, v in _extensions.iteritems(): if k.endswith('.' + name) or k.endswith('/' + name): @@ -75,7 +76,7 @@ shortname = name[6:] else: shortname = name - if shortname in _ignore: + if shortname in _builtin: return None if shortname in _extensions: return _extensions[shortname] @@ -100,6 +101,17 @@ if ui.debugflag: ui.traceback() mod = importh(name) + + # Before we do anything with the extension, check against minimum stated + # compatibility. This gives extension authors a mechanism to have their + # extensions short circuit when loaded with a known incompatible version + # of Mercurial. + minver = getattr(mod, 'minimumhgversion', None) + if minver and util.versiontuple(minver, 2) > util.versiontuple(n=2): + ui.warn(_('(third party extension %s requires version %s or newer ' + 'of Mercurial; disabling)\n') % (shortname, minver)) + return + _extensions[shortname] = mod _order.append(shortname) for fn in _aftercallbacks.get(shortname, []): diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/filemerge.py --- a/mercurial/filemerge.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/filemerge.py Mon Jan 11 14:27:12 2016 -0600 @@ -13,11 +13,12 @@ import tempfile from .i18n import _ -from .node import short +from .node import nullid, short from . import ( error, match, + scmutil, simplemerge, tagmerge, templatekw, @@ -43,6 +44,50 @@ mergeonly = 'mergeonly' # just the full merge, no premerge fullmerge = 'fullmerge' # both premerge and merge +class absentfilectx(object): + """Represents a file that's ostensibly in a context but is actually not + present in it. + + This is here because it's very specific to the filemerge code for now -- + other code is likely going to break with the values this returns.""" + def __init__(self, ctx, f): + self._ctx = ctx + self._f = f + + def path(self): + return self._f + + def size(self): + return None + + def data(self): + return None + + def filenode(self): + return nullid + + _customcmp = True + def cmp(self, fctx): + """compare with other file context + + returns True if different from fctx. + """ + return not (fctx.isabsent() and + fctx.ctx() == self.ctx() and + fctx.path() == self.path()) + + def flags(self): + return '' + + def changectx(self): + return self._ctx + + def isbinary(self): + return False + + def isabsent(self): + return True + def internaltool(name, mergetype, onfailure=None, precheck=None): '''return a decorator for populating internal merge tool table''' def decorator(func): @@ -75,8 +120,11 @@ exe = _toolstr(ui, tool, "executable", tool) return util.findexe(util.expandpath(exe)) -def _picktool(repo, ui, path, binary, symlink): - def check(tool, pat, symlink, binary): +def _picktool(repo, ui, path, binary, symlink, changedelete): + def supportscd(tool): + return tool in internals and internals[tool].mergetype == nomerge + + def check(tool, pat, symlink, binary, changedelete): tmsg = tool if pat: tmsg += " specified for " + pat @@ -89,6 +137,10 @@ ui.warn(_("tool %s can't handle symlinks\n") % tmsg) elif binary and not _toolbool(ui, tool, "binary"): ui.warn(_("tool %s can't handle binary\n") % tmsg) + elif changedelete and not supportscd(tool): + # the nomerge tools are the only tools that support change/delete + # conflicts + pass elif not util.gui() and _toolbool(ui, tool, "gui"): ui.warn(_("tool %s requires a GUI\n") % tmsg) else: @@ -100,21 +152,27 @@ force = ui.config('ui', 'forcemerge') if force: toolpath = _findtool(ui, force) - if toolpath: - return (force, util.shellquote(toolpath)) + if changedelete and not supportscd(toolpath): + return ":prompt", None else: - # mimic HGMERGE if given tool not found - return (force, force) + if toolpath: + return (force, util.shellquote(toolpath)) + else: + # mimic HGMERGE if given tool not found + return (force, force) # HGMERGE takes next precedence hgmerge = os.environ.get("HGMERGE") if hgmerge: - return (hgmerge, hgmerge) + if changedelete and not supportscd(hgmerge): + return ":prompt", None + else: + return (hgmerge, hgmerge) # then patterns for pat, tool in ui.configitems("merge-patterns"): mf = match.match(repo.root, '', [pat]) - if mf(path) and check(tool, pat, symlink, False): + if mf(path) and check(tool, pat, symlink, False, changedelete): toolpath = _findtool(ui, tool) return (tool, util.shellquote(toolpath)) @@ -131,17 +189,19 @@ tools = sorted([(-p, t) for t, p in tools.items() if t not in disabled]) uimerge = ui.config("ui", "merge") if uimerge: - if uimerge not in names: + # external tools defined in uimerge won't be able to handle + # change/delete conflicts + if uimerge not in names and not changedelete: return (uimerge, uimerge) tools.insert(0, (None, uimerge)) # highest priority tools.append((None, "hgmerge")) # the old default, if found for p, t in tools: - if check(t, None, symlink, binary): + if check(t, None, symlink, binary, changedelete): toolpath = _findtool(ui, t) return (t, util.shellquote(toolpath)) # internal merge or prompt as last resort - if symlink or binary: + if symlink or binary or changedelete: return ":prompt", None return ":merge", None @@ -175,23 +235,53 @@ ui = repo.ui fd = fcd.path() - if ui.promptchoice(_(" no tool found to merge %s\n" - "keep (l)ocal or take (o)ther?" - "$$ &Local $$ &Other") % fd, 0): - return _iother(repo, mynode, orig, fcd, fco, fca, toolconf) - else: - return _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf) + try: + if fco.isabsent(): + index = ui.promptchoice( + _("local changed %s which remote deleted\n" + "use (c)hanged version, (d)elete, or leave (u)nresolved?" + "$$ &Changed $$ &Delete $$ &Unresolved") % fd, 2) + choice = ['local', 'other', 'unresolved'][index] + elif fcd.isabsent(): + index = ui.promptchoice( + _("remote changed %s which local deleted\n" + "use (c)hanged version, leave (d)eleted, or " + "leave (u)nresolved?" + "$$ &Changed $$ &Deleted $$ &Unresolved") % fd, 2) + choice = ['other', 'local', 'unresolved'][index] + else: + index = ui.promptchoice( + _("no tool found to merge %s\n" + "keep (l)ocal, take (o)ther, or leave (u)nresolved?" + "$$ &Local $$ &Other $$ &Unresolved") % fd, 2) + choice = ['local', 'other', 'unresolved'][index] + + if choice == 'other': + return _iother(repo, mynode, orig, fcd, fco, fca, toolconf) + elif choice == 'local': + return _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf) + elif choice == 'unresolved': + return _ifail(repo, mynode, orig, fcd, fco, fca, toolconf) + except error.ResponseExpected: + ui.write("\n") + return _ifail(repo, mynode, orig, fcd, fco, fca, toolconf) @internaltool('local', nomerge) def _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf): """Uses the local version of files as the merged version.""" - return 0 + return 0, fcd.isabsent() @internaltool('other', nomerge) def _iother(repo, mynode, orig, fcd, fco, fca, toolconf): """Uses the other version of files as the merged version.""" - repo.wwrite(fcd.path(), fco.data(), fco.flags()) - return 0 + if fco.isabsent(): + # local changed, remote deleted -- 'deleted' picked + repo.wvfs.unlinkpath(fcd.path()) + deleted = True + else: + repo.wwrite(fcd.path(), fco.data(), fco.flags()) + deleted = False + return 0, deleted @internaltool('fail', nomerge) def _ifail(repo, mynode, orig, fcd, fco, fca, toolconf): @@ -199,11 +289,14 @@ Rather than attempting to merge files that were modified on both branches, it marks them as unresolved. The resolve command must be used to resolve these conflicts.""" - return 1 + # for change/delete conflicts write out the changed version, then fail + if fcd.isabsent(): + repo.wwrite(fcd.path(), fco.data(), fco.flags()) + return 1, False -def _premerge(repo, toolconf, files, labels=None): +def _premerge(repo, fcd, fco, fca, toolconf, files, labels=None): tool, toolpath, binary, symlink = toolconf - if symlink: + if symlink or fcd.isabsent() or fco.isabsent(): return 1 a, b, c, back = files @@ -236,12 +329,16 @@ util.copyfile(back, a) # restore from backup and try again return 1 # continue merging -def _symlinkcheck(repo, mynode, orig, fcd, fco, fca, toolconf): +def _mergecheck(repo, mynode, orig, fcd, fco, fca, toolconf): tool, toolpath, binary, symlink = toolconf if symlink: repo.ui.warn(_('warning: internal %s cannot merge symlinks ' 'for %s\n') % (tool, fcd.path())) return False + if fcd.isabsent() or fco.isabsent(): + repo.ui.warn(_('warning: internal %s cannot merge change/delete ' + 'conflict for %s\n') % (tool, fcd.path())) + return False return True def _merge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels, mode): @@ -255,12 +352,12 @@ ui = repo.ui r = simplemerge.simplemerge(ui, a, b, c, label=labels, mode=mode) - return True, r + return True, r, False @internaltool('union', fullmerge, _("warning: conflicts while merging %s! " "(edit, then use 'hg resolve --mark')\n"), - precheck=_symlinkcheck) + precheck=_mergecheck) def _iunion(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): """ Uses the internal non-interactive simple merge algorithm for merging @@ -272,7 +369,7 @@ @internaltool('merge', fullmerge, _("warning: conflicts while merging %s! " "(edit, then use 'hg resolve --mark')\n"), - precheck=_symlinkcheck) + precheck=_mergecheck) def _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): """ Uses the internal non-interactive simple merge algorithm for merging @@ -285,7 +382,7 @@ @internaltool('merge3', fullmerge, _("warning: conflicts while merging %s! " "(edit, then use 'hg resolve --mark')\n"), - precheck=_symlinkcheck) + precheck=_mergecheck) def _imerge3(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): """ Uses the internal non-interactive simple merge algorithm for merging @@ -305,30 +402,26 @@ """ assert localorother is not None tool, toolpath, binary, symlink = toolconf - if symlink: - repo.ui.warn(_('warning: :merge-%s cannot merge symlinks ' - 'for %s\n') % (localorother, fcd.path())) - return False, 1 a, b, c, back = files r = simplemerge.simplemerge(repo.ui, a, b, c, label=labels, localorother=localorother) return True, r -@internaltool('merge-local', mergeonly) +@internaltool('merge-local', mergeonly, precheck=_mergecheck) def _imergelocal(*args, **kwargs): """ Like :merge, but resolve all conflicts non-interactively in favor of the local changes.""" success, status = _imergeauto(localorother='local', *args, **kwargs) - return success, status + return success, status, False -@internaltool('merge-other', mergeonly) +@internaltool('merge-other', mergeonly, precheck=_mergecheck) def _imergeother(*args, **kwargs): """ Like :merge, but resolve all conflicts non-interactively in favor of the other changes.""" success, status = _imergeauto(localorother='other', *args, **kwargs) - return success, status + return success, status, False @internaltool('tagmerge', mergeonly, _("automatic tag merging of %s failed! " @@ -338,7 +431,8 @@ """ Uses the internal tag merge algorithm (experimental). """ - return tagmerge.merge(repo, fcd, fco, fca) + success, status = tagmerge.merge(repo, fcd, fco, fca) + return success, status, False @internaltool('dump', fullmerge) def _idump(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): @@ -356,10 +450,14 @@ util.copyfile(a, a + ".local") repo.wwrite(fd + ".other", fco.data(), fco.flags()) repo.wwrite(fd + ".base", fca.data(), fca.flags()) - return False, 1 + return False, 1, False def _xmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): tool, toolpath, binary, symlink = toolconf + if fcd.isabsent() or fco.isabsent(): + repo.ui.warn(_('warning: %s cannot merge change/delete conflict ' + 'for %s\n') % (tool, fcd.path())) + return False, 1, None a, b, c, back = files out = "" env = {'HG_FILE': fcd.path(), @@ -383,7 +481,7 @@ repo.ui.debug('launching merge tool: %s\n' % cmd) r = ui.system(cmd, cwd=repo.root, environ=env) repo.ui.debug('merge tool returned: %s\n' % r) - return True, r + return True, r, False def _formatconflictmarker(repo, ctx, template, label, pad): """Applies the given template to the ctx, prefixed by the label. @@ -448,8 +546,8 @@ fca = ancestor file context fcd = local file context for current/destination file - Returns whether the merge is complete, and the return value of the merge. - """ + Returns whether the merge is complete, the return value of the merge, and + a boolean indicating whether the file was deleted from disk.""" def temp(prefix, ctx): pre = "%s~%s." % (os.path.basename(ctx.path()), prefix) @@ -461,18 +559,19 @@ return name if not fco.cmp(fcd): # files identical? - return True, None + return True, None, False ui = repo.ui fd = fcd.path() binary = fcd.isbinary() or fco.isbinary() or fca.isbinary() symlink = 'l' in fcd.flags() + fco.flags() - tool, toolpath = _picktool(repo, ui, fd, binary, symlink) + changedelete = fcd.isabsent() or fco.isabsent() + tool, toolpath = _picktool(repo, ui, fd, binary, symlink, changedelete) if tool in internals and tool.startswith('internal:'): # normalize to new-style names (':merge' etc) tool = tool[len('internal'):] - ui.debug("picked tool '%s' for %s (binary %s symlink %s)\n" % - (tool, fd, binary, symlink)) + ui.debug("picked tool '%s' for %s (binary %s symlink %s changedelete %s)\n" + % (tool, fd, binary, symlink, changedelete)) if tool in internals: func = internals[tool] @@ -488,7 +587,8 @@ toolconf = tool, toolpath, binary, symlink if mergetype == nomerge: - return True, func(repo, mynode, orig, fcd, fco, fca, toolconf) + r, deleted = func(repo, mynode, orig, fcd, fco, fca, toolconf) + return True, r, deleted if premerge: if orig != fco.path(): @@ -502,14 +602,17 @@ toolconf): if onfailure: ui.warn(onfailure % fd) - return True, 1 + return True, 1, False a = repo.wjoin(fd) b = temp("base", fca) c = temp("other", fco) - back = a + ".orig" - if premerge: - util.copyfile(a, back) + if not fcd.isabsent(): + back = scmutil.origpath(ui, repo, a) + if premerge: + util.copyfile(a, back) + else: + back = None files = (a, b, c, back) r = 1 @@ -521,12 +624,13 @@ labels = _formatlabels(repo, fcd, fco, fca, labels) if premerge and mergetype == fullmerge: - r = _premerge(repo, toolconf, files, labels=labels) + r = _premerge(repo, fcd, fco, fca, toolconf, files, labels=labels) # complete if premerge successful (r is 0) - return not r, r + return not r, r, False - needcheck, r = func(repo, mynode, orig, fcd, fco, fca, toolconf, files, - labels=labels) + needcheck, r, deleted = func(repo, mynode, orig, fcd, fco, fca, + toolconf, files, labels=labels) + if needcheck: r = _check(r, ui, tool, fcd, files) @@ -534,9 +638,9 @@ if onfailure: ui.warn(onfailure % fd) - return True, r + return True, r, deleted finally: - if not r: + if not r and back is not None: util.unlink(back) util.unlink(b) util.unlink(c) @@ -561,13 +665,13 @@ if not r and not checked and (_toolbool(ui, tool, "checkchanged") or 'changed' in _toollist(ui, tool, "check")): - if filecmp.cmp(a, back): + if back is not None and filecmp.cmp(a, back): if ui.promptchoice(_(" output file %s appears unchanged\n" "was merge successful (yn)?" "$$ &Yes $$ &No") % fd, 1): r = 1 - if _toolbool(ui, tool, "fixeol"): + if back is not None and _toolbool(ui, tool, "fixeol"): _matcheol(a, back) return r diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/fileset.py --- a/mercurial/fileset.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/fileset.py Mon Jan 11 14:27:12 2016 -0600 @@ -128,47 +128,99 @@ return [f for f in xl if f not in yl] def listset(mctx, a, b): - raise error.ParseError(_("can't use a list in this context")) + raise error.ParseError(_("can't use a list in this context"), + hint=_('see hg help "filesets.x or y"')) + +# symbols are callable like: +# fun(mctx, x) +# with: +# mctx - current matchctx instance +# x - argument in tree form +symbols = {} + +# filesets using matchctx.status() +_statuscallers = set() + +# filesets using matchctx.existing() +_existingcallers = set() + +def predicate(decl, callstatus=False, callexisting=False): + """Return a decorator for fileset predicate function + + 'decl' argument is the declaration (including argument list like + 'adds(pattern)') or the name (for internal use only) of predicate. + Optional 'callstatus' argument indicates whether predicate implies + 'matchctx.status()' at runtime or not (False, by default). + + Optional 'callexisting' argument indicates whether predicate + implies 'matchctx.existing()' at runtime or not (False, by + default). + """ + def decorator(func): + i = decl.find('(') + if i > 0: + name = decl[:i] + else: + name = decl + symbols[name] = func + if callstatus: + _statuscallers.add(name) + if callexisting: + _existingcallers.add(name) + if func.__doc__: + func.__doc__ = "``%s``\n %s" % (decl, func.__doc__.strip()) + return func + return decorator + +@predicate('modified()', callstatus=True) def modified(mctx, x): - """``modified()`` - File that is modified according to :hg:`status`. + """File that is modified according to :hg:`status`. """ # i18n: "modified" is a keyword getargs(x, 0, 0, _("modified takes no arguments")) s = mctx.status().modified return [f for f in mctx.subset if f in s] +@predicate('added()', callstatus=True) def added(mctx, x): - """``added()`` - File that is added according to :hg:`status`. + """File that is added according to :hg:`status`. """ # i18n: "added" is a keyword getargs(x, 0, 0, _("added takes no arguments")) s = mctx.status().added return [f for f in mctx.subset if f in s] +@predicate('removed()', callstatus=True) def removed(mctx, x): - """``removed()`` - File that is removed according to :hg:`status`. + """File that is removed according to :hg:`status`. """ # i18n: "removed" is a keyword getargs(x, 0, 0, _("removed takes no arguments")) s = mctx.status().removed return [f for f in mctx.subset if f in s] +@predicate('deleted()', callstatus=True) def deleted(mctx, x): - """``deleted()`` - File that is deleted according to :hg:`status`. + """Alias for ``missing()``. """ # i18n: "deleted" is a keyword getargs(x, 0, 0, _("deleted takes no arguments")) s = mctx.status().deleted return [f for f in mctx.subset if f in s] +@predicate('missing()', callstatus=True) +def missing(mctx, x): + """File that is missing according to :hg:`status`. + """ + # i18n: "missing" is a keyword + getargs(x, 0, 0, _("missing takes no arguments")) + s = mctx.status().deleted + return [f for f in mctx.subset if f in s] + +@predicate('unknown()', callstatus=True) def unknown(mctx, x): - """``unknown()`` - File that is unknown according to :hg:`status`. These files will only be + """File that is unknown according to :hg:`status`. These files will only be considered if this predicate is used. """ # i18n: "unknown" is a keyword @@ -176,9 +228,9 @@ s = mctx.status().unknown return [f for f in mctx.subset if f in s] +@predicate('ignored()', callstatus=True) def ignored(mctx, x): - """``ignored()`` - File that is ignored according to :hg:`status`. These files will only be + """File that is ignored according to :hg:`status`. These files will only be considered if this predicate is used. """ # i18n: "ignored" is a keyword @@ -186,9 +238,9 @@ s = mctx.status().ignored return [f for f in mctx.subset if f in s] +@predicate('clean()', callstatus=True) def clean(mctx, x): - """``clean()`` - File that is clean according to :hg:`status`. + """File that is clean according to :hg:`status`. """ # i18n: "clean" is a keyword getargs(x, 0, 0, _("clean takes no arguments")) @@ -197,7 +249,13 @@ def func(mctx, a, b): if a[0] == 'symbol' and a[1] in symbols: - return symbols[a[1]](mctx, b) + funcname = a[1] + enabled = mctx._existingenabled + mctx._existingenabled = funcname in _existingcallers + try: + return symbols[funcname](mctx, b) + finally: + mctx._existingenabled = enabled keep = lambda fn: getattr(fn, '__doc__', None) is not None @@ -217,64 +275,64 @@ raise error.ParseError(err) return l +@predicate('binary()', callexisting=True) def binary(mctx, x): - """``binary()`` - File that appears to be binary (contains NUL bytes). + """File that appears to be binary (contains NUL bytes). """ # i18n: "binary" is a keyword getargs(x, 0, 0, _("binary takes no arguments")) return [f for f in mctx.existing() if util.binary(mctx.ctx[f].data())] +@predicate('exec()', callexisting=True) def exec_(mctx, x): - """``exec()`` - File that is marked as executable. + """File that is marked as executable. """ # i18n: "exec" is a keyword getargs(x, 0, 0, _("exec takes no arguments")) return [f for f in mctx.existing() if mctx.ctx.flags(f) == 'x'] +@predicate('symlink()', callexisting=True) def symlink(mctx, x): - """``symlink()`` - File that is marked as a symlink. + """File that is marked as a symlink. """ # i18n: "symlink" is a keyword getargs(x, 0, 0, _("symlink takes no arguments")) return [f for f in mctx.existing() if mctx.ctx.flags(f) == 'l'] +@predicate('resolved()') def resolved(mctx, x): - """``resolved()`` - File that is marked resolved according to :hg:`resolve -l`. + """File that is marked resolved according to :hg:`resolve -l`. """ # i18n: "resolved" is a keyword getargs(x, 0, 0, _("resolved takes no arguments")) if mctx.ctx.rev() is not None: return [] - ms = merge.mergestate(mctx.ctx.repo()) + ms = merge.mergestate.read(mctx.ctx.repo()) return [f for f in mctx.subset if f in ms and ms[f] == 'r'] +@predicate('unresolved()') def unresolved(mctx, x): - """``unresolved()`` - File that is marked unresolved according to :hg:`resolve -l`. + """File that is marked unresolved according to :hg:`resolve -l`. """ # i18n: "unresolved" is a keyword getargs(x, 0, 0, _("unresolved takes no arguments")) if mctx.ctx.rev() is not None: return [] - ms = merge.mergestate(mctx.ctx.repo()) + ms = merge.mergestate.read(mctx.ctx.repo()) return [f for f in mctx.subset if f in ms and ms[f] == 'u'] +@predicate('hgignore()') def hgignore(mctx, x): - """``hgignore()`` - File that matches the active .hgignore pattern. + """File that matches the active .hgignore pattern. """ # i18n: "hgignore" is a keyword getargs(x, 0, 0, _("hgignore takes no arguments")) ignore = mctx.ctx.repo().dirstate._ignore return [f for f in mctx.subset if ignore(f)] +@predicate('portable()') def portable(mctx, x): - """``portable()`` - File that has a portable name. (This doesn't include filenames with case + """File that has a portable name. (This doesn't include filenames with case collisions.) """ # i18n: "portable" is a keyword @@ -282,9 +340,9 @@ checkwinfilename = util.checkwinfilename return [f for f in mctx.subset if checkwinfilename(f) is None] +@predicate('grep(regex)', callexisting=True) def grep(mctx, x): - """``grep(regex)`` - File contains the given regular expression. + """File contains the given regular expression. """ try: # i18n: "grep" is a keyword @@ -309,9 +367,9 @@ except ValueError: raise error.ParseError(_("couldn't parse size: %s") % s) +@predicate('size(expression)', callexisting=True) def size(mctx, x): - """``size(expression)`` - File size matches the given expression. Examples: + """File size matches the given expression. Examples: - 1k (files from 1024 to 2047 bytes) - < 20k (files less than 20480 bytes) @@ -347,9 +405,9 @@ return [f for f in mctx.existing() if m(mctx.ctx[f].size())] +@predicate('encoding(name)', callexisting=True) def encoding(mctx, x): - """``encoding(name)`` - File can be successfully decoded with the given character + """File can be successfully decoded with the given character encoding. May not be useful for encodings other than ASCII and UTF-8. """ @@ -370,9 +428,9 @@ return s +@predicate('eol(style)', callexisting=True) def eol(mctx, x): - """``eol(style)`` - File contains newlines of the given style (dos, unix, mac). Binary + """File contains newlines of the given style (dos, unix, mac). Binary files are excluded, files with mixed line endings match multiple styles. """ @@ -393,9 +451,9 @@ s.append(f) return s +@predicate('copied()') def copied(mctx, x): - """``copied()`` - File that is recorded as being copied. + """File that is recorded as being copied. """ # i18n: "copied" is a keyword getargs(x, 0, 0, _("copied takes no arguments")) @@ -406,9 +464,9 @@ s.append(f) return s +@predicate('subrepo([pattern])') def subrepo(mctx, x): - """``subrepo([pattern])`` - Subrepositories whose paths match the given pattern. + """Subrepositories whose paths match the given pattern. """ # i18n: "subrepo" is a keyword getargs(x, 0, 1, _("subrepo takes at most one argument")) @@ -429,29 +487,6 @@ else: return [sub for sub in sstate] -symbols = { - 'added': added, - 'binary': binary, - 'clean': clean, - 'copied': copied, - 'deleted': deleted, - 'encoding': encoding, - 'eol': eol, - 'exec': exec_, - 'grep': grep, - 'ignored': ignored, - 'hgignore': hgignore, - 'modified': modified, - 'portable': portable, - 'removed': removed, - 'resolved': resolved, - 'size': size, - 'symlink': symlink, - 'unknown': unknown, - 'unresolved': unresolved, - 'subrepo': subrepo, -} - methods = { 'string': stringset, 'symbol': stringset, @@ -469,6 +504,7 @@ self.ctx = ctx self.subset = subset self._status = status + self._existingenabled = False def status(self): return self._status def matcher(self, patterns): @@ -476,6 +512,7 @@ def filter(self, files): return [f for f in files if f in self.subset] def existing(self): + assert self._existingenabled, 'unexpected existing() invocation' if self._status is not None: removed = set(self._status[3]) unknown = set(self._status[4] + self._status[5]) @@ -497,21 +534,11 @@ return True return False -# filesets using matchctx.existing() -_existingcallers = [ - 'binary', - 'exec', - 'grep', - 'size', - 'symlink', -] - def getfileset(ctx, expr): tree = parse(expr) # do we need status info? - if (_intree(['modified', 'added', 'removed', 'deleted', - 'unknown', 'ignored', 'clean'], tree) or + if (_intree(_statuscallers, tree) or # Using matchctx.existing() on a workingctx requires us to check # for deleted files. (ctx.rev() is None and _intree(_existingcallers, tree))): diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/hbisect.py --- a/mercurial/hbisect.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/hbisect.py Mon Jan 11 14:27:12 2016 -0600 @@ -11,7 +11,6 @@ from __future__ import absolute_import import collections -import os from .i18n import _ from .node import ( @@ -143,13 +142,12 @@ def load_state(repo): state = {'current': [], 'good': [], 'bad': [], 'skip': []} - if os.path.exists(repo.join("bisect.state")): - for l in repo.vfs("bisect.state"): - kind, node = l[:-1].split() - node = repo.lookup(node) - if kind not in state: - raise error.Abort(_("unknown bisect kind %s") % kind) - state[kind].append(node) + for l in repo.vfs.tryreadlines("bisect.state"): + kind, node = l[:-1].split() + node = repo.lookup(node) + if kind not in state: + raise error.Abort(_("unknown bisect kind %s") % kind) + state[kind].append(node) return state diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/help.py --- a/mercurial/help.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/help.py Mon Jan 11 14:27:12 2016 -0600 @@ -5,14 +5,33 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -from i18n import gettext, _ -import itertools, os, textwrap -import error -import extensions, revset, fileset, templatekw, templatefilters, filemerge -import templater -import encoding, util, minirst -import cmdutil -import hgweb.webcommands as webcommands +from __future__ import absolute_import + +import itertools +import os +import textwrap + +from .i18n import ( + _, + gettext, +) +from . import ( + cmdutil, + encoding, + error, + extensions, + filemerge, + fileset, + minirst, + revset, + templatefilters, + templatekw, + templater, + util, +) +from .hgweb import ( + webcommands, +) _exclkeywords = [ "(DEPRECATED)", @@ -27,11 +46,12 @@ '''return a text listing of the given extensions''' rst = [] if exts: - rst.append('\n%s\n\n' % header) for name, desc in sorted(exts.iteritems()): if not showdeprecated and any(w in desc for w in _exclkeywords): continue rst.append('%s:%s: %s\n' % (' ' * indent, name, desc)) + if rst: + rst.insert(0, '\n%s\n\n' % header) return rst def extshelp(ui): @@ -83,6 +103,13 @@ if notomitted: rst.append('\n\n.. container:: notomitted\n\n %s\n\n' % notomitted) +def filtercmd(ui, cmd, kw, doc): + if not ui.debugflag and cmd.startswith("debug") and kw != "debug": + return True + if not ui.verbose and doc and any(w in doc for w in _exclkeywords): + return True + return False + def topicmatch(ui, kw): """Return help topics matching kw. @@ -103,7 +130,7 @@ or lowercontains(header) or (callable(doc) and lowercontains(doc(ui)))): results['topics'].append((names[0], header)) - import commands # avoid cycle + from . import commands # avoid cycle for cmd, entry in commands.table.iteritems(): if len(entry) == 3: summary = entry[2] @@ -115,32 +142,37 @@ doclines = docs.splitlines() if doclines: summary = doclines[0] - cmdname = cmd.split('|')[0].lstrip('^') + cmdname = cmd.partition('|')[0].lstrip('^') + if filtercmd(ui, cmdname, kw, docs): + continue results['commands'].append((cmdname, summary)) for name, docs in itertools.chain( extensions.enabled(False).iteritems(), extensions.disabled().iteritems()): - # extensions.load ignores the UI argument - mod = extensions.load(None, name, '') - name = name.split('.')[-1] + mod = extensions.load(ui, name, '') + name = name.rpartition('.')[-1] if lowercontains(name) or lowercontains(docs): # extension docs are already translated results['extensions'].append((name, docs.splitlines()[0])) for cmd, entry in getattr(mod, 'cmdtable', {}).iteritems(): if kw in cmd or (len(entry) > 2 and lowercontains(entry[2])): - cmdname = cmd.split('|')[0].lstrip('^') + cmdname = cmd.partition('|')[0].lstrip('^') if entry[0].__doc__: cmddoc = gettext(entry[0].__doc__).splitlines()[0] else: cmddoc = _('(no help text available)') + if filtercmd(ui, cmdname, kw, cmddoc): + continue results['extensioncommands'].append((cmdname, cmddoc)) return results -def loaddoc(topic): +def loaddoc(topic, subdir=None): """Return a delayed loader for help/topic.txt.""" def loader(ui): docdir = os.path.join(util.datapath, 'help') + if subdir: + docdir = os.path.join(docdir, subdir) path = os.path.join(docdir, topic + ".txt") doc = gettext(util.readfile(path)) for rewriter in helphooks.get(topic, []): @@ -149,6 +181,23 @@ return loader +internalstable = sorted([ + (['bundles'], _('container for exchange of repository data'), + loaddoc('bundles', subdir='internals')), + (['changegroups'], _('representation of revlog data'), + loaddoc('changegroups', subdir='internals')), + (['revlogs'], _('revision storage mechanism'), + loaddoc('revlogs', subdir='internals')), +]) + +def internalshelp(ui): + """Generate the index for the "internals" topic.""" + lines = [] + for names, header, doc in internalstable: + lines.append(' :%s: %s\n' % (names[0], header)) + + return ''.join(lines) + helptable = sorted([ (["config", "hgrc"], _("Configuration Files"), loaddoc('config')), (["dates"], _("Date Formats"), loaddoc('dates')), @@ -175,8 +224,15 @@ (["phases"], _("Working with Phases"), loaddoc('phases')), (['scripting'], _('Using Mercurial from scripts and automation'), loaddoc('scripting')), + (['internals'], _("Technical implementation topics"), + internalshelp), ]) +# Maps topics with sub-topics to a list of their sub-topics. +subtopics = { + 'internals': internalstable, +} + # Map topics to lists of callable taking the current topic help and # returning the updated version helphooks = {} @@ -226,15 +282,15 @@ addtopicsymbols('hgweb', '.. webcommandsmarker', webcommands.commands, dedent=True) -def help_(ui, name, unknowncmd=False, full=True, **opts): +def help_(ui, name, unknowncmd=False, full=True, subtopic=None, **opts): ''' Generate the help for 'name' as unformatted restructured text. If 'name' is None, describe the commands available. ''' - import commands # avoid cycle + from . import commands # avoid cycle - def helpcmd(name): + def helpcmd(name, subtopic=None): try: aliases, entry = cmdutil.findcmd(name, commands.table, strict=unknowncmd) @@ -318,7 +374,7 @@ return rst - def helplist(select=None): + def helplist(select=None, **opts): # list of commands if name == "shortlist": header = _('basic commands:\n\n') @@ -330,7 +386,7 @@ h = {} cmds = {} for c, e in commands.table.iteritems(): - f = c.split("|", 1)[0] + f = c.partition("|")[0] if select and not select(f): continue if (not select and name != 'shortlist' and @@ -339,10 +395,8 @@ if name == "shortlist" and not f.startswith("^"): continue f = f.lstrip("^") - if not ui.debugflag and f.startswith("debug") and name != "debug": - continue doc = e[0].__doc__ - if not ui.verbose and doc and any(w in doc for w in _exclkeywords): + if filtercmd(ui, f, name, doc): continue doc = gettext(doc) if not doc: @@ -366,7 +420,9 @@ else: rst.append(' :%s: %s\n' % (f, h[f])) - if not name: + ex = opts.get + anyopts = (ex('keyword') or not (ex('command') or ex('extension'))) + if not name and anyopts: exts = listexts(_('enabled extensions:'), extensions.enabled()) if exts: rst.append('\n') @@ -403,12 +459,20 @@ % (name and " " + name or "")) return rst - def helptopic(name): - for names, header, doc in helptable: - if name in names: - break - else: - raise error.UnknownCommand(name) + def helptopic(name, subtopic=None): + # Look for sub-topic entry first. + header, doc = None, None + if subtopic and name in subtopics: + for names, header, doc in subtopics[name]: + if subtopic in names: + break + + if not header: + for names, header, doc in helptable: + if name in names: + break + else: + raise error.UnknownCommand(name) rst = [minirst.section(header)] @@ -431,7 +495,7 @@ pass return rst - def helpext(name): + def helpext(name, subtopic=None): try: mod = extensions.find(name) doc = gettext(mod.__doc__) or _('no help text available') @@ -445,7 +509,7 @@ head, tail = doc, "" else: head, tail = doc.split('\n', 1) - rst = [_('%s extension - %s\n\n') % (name.split('.')[-1], head)] + rst = [_('%s extension - %s\n\n') % (name.rpartition('.')[-1], head)] if tail: rst.extend(tail.splitlines(True)) rst.append('\n') @@ -460,20 +524,21 @@ ct = mod.cmdtable except AttributeError: ct = {} - modcmds = set([c.split('|', 1)[0] for c in ct]) + modcmds = set([c.partition('|')[0] for c in ct]) rst.extend(helplist(modcmds.__contains__)) else: rst.append(_('(use "hg help extensions" for information on enabling' ' extensions)\n')) return rst - def helpextcmd(name): + def helpextcmd(name, subtopic=None): cmd, ext, mod = extensions.disabledcmd(ui, name, ui.configbool('ui', 'strict')) doc = gettext(mod.__doc__).splitlines()[0] rst = listexts(_("'%s' is provided by the following " - "extension:") % cmd, {ext: doc}, indent=4) + "extension:") % cmd, {ext: doc}, indent=4, + showdeprecated=True) rst.append('\n') rst.append(_('(use "hg help extensions" for information on enabling ' 'extensions)\n')) @@ -482,8 +547,8 @@ rst = [] kw = opts.get('keyword') - if kw: - matches = topicmatch(ui, name) + if kw or name is None and any(opts[o] for o in opts): + matches = topicmatch(ui, name or '') helpareas = [] if opts.get('extension'): helpareas += [('extensions', _('Extensions'))] @@ -515,7 +580,7 @@ queries = (helptopic, helpcmd, helpext, helpextcmd) for f in queries: try: - rst = f(name) + rst = f(name, subtopic) break except error.UnknownCommand: pass @@ -530,6 +595,6 @@ # program name if not ui.quiet: rst = [_("Mercurial Distributed SCM\n"), '\n'] - rst.extend(helplist()) + rst.extend(helplist(None, **opts)) return ''.join(rst) diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/help/config.txt --- a/mercurial/help/config.txt Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/help/config.txt Mon Jan 11 14:27:12 2016 -0600 @@ -11,8 +11,8 @@ See :hg:`help config.syntax` and :hg:`help config.files` for information about how and where to override things. -Format -====== +Structure +========= The configuration files use a simple ini-file format. A configuration file consists of sections, led by a ``[section]`` header and followed @@ -666,29 +666,45 @@ ``format`` ---------- -``usestore`` - Enable or disable the "store" repository format which improves - compatibility with systems that fold case or otherwise mangle - filenames. Enabled by default. Disabling this option will allow - you to store longer filenames in some situations at the expense of - compatibility and ensures that the on-disk format of newly created - repositories will be compatible with Mercurial before version 0.9.4. +``usegeneraldelta`` + Enable or disable the "generaldelta" repository format which improves + repository compression by allowing "revlog" to store delta against arbitrary + revision instead of the previous stored one. This provides significant + improvement for repositories with branches. + + Repositories with this on-disk format require Mercurial version 1.9. + + Enabled by default. + +``dotencode`` + Enable or disable the "dotencode" repository format which enhances + the "fncache" repository format (which has to be enabled to use + dotencode) to avoid issues with filenames starting with ._ on + Mac OS X and spaces on Windows. + + Repositories with this on-disk format require Mercurial version 1.7. + + Enabled by default. ``usefncache`` Enable or disable the "fncache" repository format which enhances the "store" repository format (which has to be enabled to use fncache) to allow longer filenames and avoids using Windows - reserved names, e.g. "nul". Enabled by default. Disabling this - option ensures that the on-disk format of newly created - repositories will be compatible with Mercurial before version 1.1. + reserved names, e.g. "nul". + + Repositories with this on-disk format require Mercurial version 1.1. + + Enabled by default. -``dotencode`` - Enable or disable the "dotencode" repository format which enhances - the "fncache" repository format (which has to be enabled to use - dotencode) to avoid issues with filenames starting with ._ on - Mac OS X and spaces on Windows. Enabled by default. Disabling this - option ensures that the on-disk format of newly created - repositories will be compatible with Mercurial before version 1.7. +``usestore`` + Enable or disable the "store" repository format which improves + compatibility with systems that fold case or otherwise mangle + filenames. Disabling this option will allow you to store longer filenames + in some situations at the expense of compatibility. + + Repositories with this on-disk format require Mercurial version 0.9.4. + + Enabled by default. ``graph`` --------- @@ -726,7 +742,7 @@ hooks can be run for the same action by appending a suffix to the action. Overriding a site-wide hook can be done by changing its value or setting it to an empty string. Hooks can be prioritized -by adding a prefix of ``priority`` to the hook name on a new line +by adding a prefix of ``priority.`` to the hook name on a new line and setting the priority. The default priority is 0. Example ``.hg/hgrc``:: @@ -779,7 +795,7 @@ command line are passed as ``$HG_ARGS``. Parsed command line arguments are passed as ``$HG_PATS`` and ``$HG_OPTS``. These contain string representations of the data internally passed to . ``$HG_OPTS`` - is a dictionary of options (with unspecified options set to their + is a dictionary of options (with unspecified options set to their defaults). ``$HG_PATS`` is a list of arguments. If the hook returns failure, the command doesn't execute and Mercurial returns the failure code. @@ -971,6 +987,19 @@ Optional. Always use the proxy, even for localhost and any entries in ``http_proxy.no``. (default: False) +``merge`` +--------- + +This section specifies behavior during merges and updates. + +``checkunknown`` + Controls behavior when an unknown file on disk has the same name as a tracked + file in the changeset being merged or updated to, and has different + contents. Options are ``abort``, ``warn`` and ``ignore``. With ``abort``, + abort on such files. With ``warn``, warn on such files and back them up as + .orig. With ``ignore``, don't print a warning and back them up as + .orig. (default: ``abort``) + ``merge-patterns`` ------------------ @@ -1119,29 +1148,43 @@ ``paths`` --------- -Assigns symbolic names to repositories. The left side is the -symbolic name, and the right gives the directory or URL that is the -location of the repository. Default paths can be declared by setting -the following entries. +Assigns symbolic names and behavior to repositories. + +Options are symbolic names defining the URL or directory that is the +location of the repository. Example:: + + [paths] + my_server = https://example.com/my_repo + local_path = /home/me/repo + +These symbolic names can be used from the command line. To pull +from ``my_server``: :hg:`pull my_server`. To push to ``local_path``: +:hg:`push local_path`. + +Options containing colons (``:``) denote sub-options that can influence +behavior for that specific path. Example:: + + [paths] + my_server = https://example.com/my_path + my_server:pushurl = ssh://example.com/my_path + +The following sub-options can be defined: + +``pushurl`` + The URL to use for push operations. If not defined, the location + defined by the path's main entry is used. + +The following special named paths exist: ``default`` - Directory or URL to use when pulling if no source is specified. - (default: repository from which the current repository was cloned) + The URL or directory to use when no source or remote is specified. + + :hg:`clone` will automatically define this path to the location the + repository was cloned from. ``default-push`` - Optional. Directory or URL to use when pushing if no destination - is specified. - -Custom paths can be defined by assigning the path to a name that later can be -used from the command line. Example:: - - [paths] - my_path = http://example.com/path - -To push to the path defined in ``my_path`` run the command:: - - hg push my_path - + (deprecated) The URL or directory for the default :hg:`push` location. + ``default:pushurl`` should be used instead. ``phases`` ---------- @@ -1256,12 +1299,12 @@ Format of the progress bar. Valid entries for the format field are ``topic``, ``bar``, ``number``, - ``unit``, ``estimate``, speed, and item. item defaults to the last 20 - characters of the item, but this can be changed by adding either ``-`` - which would take the last num characters, or ``+`` for the first num - characters. + ``unit``, ``estimate``, ``speed``, and ``item``. ``item`` defaults to the + last 20 characters of the item, but this can be changed by adding either + ``-`` which would take the last num characters, or ``+`` for the + first num characters. - (default: Topic bar number estimate) + (default: topic bar number estimate) ``width`` If set, the maximum width of the progress information (that is, min(width, @@ -1311,6 +1354,35 @@ Instruct HTTP clients not to send request headers longer than this many bytes. (default: 1024) +``bundle1`` + Whether to allow clients to push and pull using the legacy bundle1 + exchange format. (default: True) + +``bundle1gd`` + Like ``bundle1`` but only used if the repository is using the + *generaldelta* storage format. (default: True) + +``bundle1.push`` + Whether to allow clients to push using the legacy bundle1 exchange + format. (default: True) + +``bundle1gd.push`` + Like ``bundle1.push`` but only used if the repository is using the + *generaldelta* storage format. (default: True) + +``bundle1.pull`` + Whether to allow clients to pull using the legacy bundle1 exchange + format. (default: True) + +``bundle1gd.pull`` + Like ``bundle1.pull`` but only used if the repository is using the + *generaldelta* storage format. (default: True) + + Large repositories using the *generaldelta* storage format should + consider setting this option because converting *generaldelta* + repositories to the exchange format required by the bundle1 data + format can consume a lot of CPU. + ``smtp`` -------- @@ -1446,6 +1518,10 @@ Encoding to try if it's not possible to decode the changelog using UTF-8. (default: ISO-8859-1) +``graphnodetemplate`` + The template used to print changeset nodes in an ASCII revision graph. + (default: ``{graphnode}``) + ``ignore`` A file to read per-user ignore patterns from. This file should be in the same format as a repository-wide .hgignore file. Filenames @@ -1488,6 +1564,10 @@ markers is different from the encoding of the merged files, serious problems may occur. +``origbackuppath`` + The path to a directory used to store generated .orig files. If the path is + not a directory, one will be created. + ``patch`` An optional external tool that ``hg import`` and some extensions will use for applying patches. By default Mercurial uses an @@ -1565,7 +1645,7 @@ username are expanded. (default: ``$EMAIL`` or ``username@hostname``. If the username in - hgrc is empty, e.g. if the system admin set ``username =`` in the + hgrc is empty, e.g. if the system admin set ``username =`` in the system hgrc, it has to be specified manually or in a different hgrc file) diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/help/internals/bundles.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/mercurial/help/internals/bundles.txt Mon Jan 11 14:27:12 2016 -0600 @@ -0,0 +1,97 @@ +Bundles +======= + +A bundle is a container for repository data. + +Bundles are used as standalone files as well as the interchange format +over the wire protocol used when two Mercurial peers communicate with +each other. + +Headers +------- + +Bundles produced since Mercurial 0.7 (September 2005) have a 4 byte +header identifying the major bundle type. The header always begins with +``HG`` and the follow 2 bytes indicate the bundle type/version. Some +bundle types have additional data after this 4 byte header. + +The following sections describe each bundle header/type. + +HG10 +---- + +``HG10`` headers indicate a *changegroup bundle*. This is the original +bundle format, so it is sometimes referred to as *bundle1*. It has been +present since version 0.7 (released September 2005). + +This header is followed by 2 bytes indicating the compression algorithm +used for data that follows. All subsequent data following this +compression identifier is compressed according to the algorithm/method +specified. + +Supported algorithms include the following. + +``BZ`` + *bzip2* compression. + + Bzip2 compressors emit a leading ``BZ`` header. Mercurial uses this + leading ``BZ`` as part of the bundle header. Therefore consumers + of bzip2 bundles need to *seed* the bzip2 decompressor with ``BZ`` or + seek the input stream back to the beginning of the algorithm component + of the bundle header so that decompressor input is valid. This behavior + is unique among supported compression algorithms. + + Supported since version 0.7 (released December 2006). + +``GZ`` + *zlib* compression. + + Supported since version 0.9.2 (released December 2006). + +``UN`` + *Uncompressed* or no compression. Unmodified changegroup data follows. + + Supported since version 0.9.2 (released December 2006). + +3rd party extensions may implement their own compression. However, no +authority reserves values for their compression algorithm identifiers. + +HG2X +---- + +``HG2X`` headers (where ``X`` is any value) denote a *bundle2* bundle. +Bundle2 bundles are a container format for various kinds of repository +data and capabilities, beyond changegroup data (which was the only data +supported by ``HG10`` bundles. + +``HG20`` is currently the only defined bundle2 version. + +The ``HG20`` format is not yet documented here. See the inline comments +in ``mercurial/exchange.py`` for now. + +Initial ``HG20`` support was added in Mercurial 3.0 (released May +2014). However, bundle2 bundles were hidden behind an experimental flag +until version 3.5 (released August 2015), when they were enabled in the +wire protocol. Various commands (including ``hg bundle``) did not +support generating bundle2 files until Mercurial 3.6 (released November +2015). + +HGS1 +---- + +*Experimental* + +A ``HGS1`` header indicates a *streaming clone bundle*. This is a bundle +that contains raw revlog data from a repository store. (Typically revlog +data is exchanged in the form of changegroups.) + +The purpose of *streaming clone bundles* are to *clone* repository data +very efficiently. + +The ``HGS1`` header is always followed by 2 bytes indicating a +compression algorithm of the data that follows. Only ``UN`` +(uncompressed data) is currently allowed. + +``HGS1UN`` support was added as an experimental feature in version 3.6 +(released November 2015) as part of the initial offering of the *clone +bundles* feature. diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/help/internals/changegroups.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/mercurial/help/internals/changegroups.txt Mon Jan 11 14:27:12 2016 -0600 @@ -0,0 +1,157 @@ +Changegroups +============ + +Changegroups are representations of repository revlog data, specifically +the changelog, manifest, and filelogs. + +There are 3 versions of changegroups: ``1``, ``2``, and ``3``. From a +high-level, versions ``1`` and ``2`` are almost exactly the same, with +the only difference being a header on entries in the changeset +segment. Version ``3`` adds support for exchanging treemanifests and +includes revlog flags in the delta header. + +Changegroups consists of 3 logical segments:: + + +---------------------------------+ + | | | | + | changeset | manifest | filelogs | + | | | | + +---------------------------------+ + +The principle building block of each segment is a *chunk*. A *chunk* +is a framed piece of data:: + + +---------------------------------------+ + | | | + | length | data | + | (32 bits) | bytes | + | | | + +---------------------------------------+ + +Each chunk starts with a 32-bit big-endian signed integer indicating +the length of the raw data that follows. + +There is a special case chunk that has 0 length (``0x00000000``). We +call this an *empty chunk*. + +Delta Groups +------------ + +A *delta group* expresses the content of a revlog as a series of deltas, +or patches against previous revisions. + +Delta groups consist of 0 or more *chunks* followed by the *empty chunk* +to signal the end of the delta group:: + + +------------------------------------------------------------------------+ + | | | | | | + | chunk0 length | chunk0 data | chunk1 length | chunk1 data | 0x0 | + | (32 bits) | (various) | (32 bits) | (various) | (32 bits) | + | | | | | | + +------------------------------------------------------------+-----------+ + +Each *chunk*'s data consists of the following:: + + +-----------------------------------------+ + | | | | + | delta header | mdiff header | delta | + | (various) | (12 bytes) | (various) | + | | | | + +-----------------------------------------+ + +The *length* field is the byte length of the remaining 3 logical pieces +of data. The *delta* is a diff from an existing entry in the changelog. + +The *delta header* is different between versions ``1``, ``2``, and +``3`` of the changegroup format. + +Version 1:: + + +------------------------------------------------------+ + | | | | | + | node | p1 node | p2 node | link node | + | (20 bytes) | (20 bytes) | (20 bytes) | (20 bytes) | + | | | | | + +------------------------------------------------------+ + +Version 2:: + + +------------------------------------------------------------------+ + | | | | | | + | node | p1 node | p2 node | base node | link node | + | (20 bytes) | (20 bytes) | (20 bytes) | (20 bytes) | (20 bytes) | + | | | | | | + +------------------------------------------------------------------+ + +Version 3:: + + +------------------------------------------------------------------------------+ + | | | | | | | + | node | p1 node | p2 node | base node | link node | flags | + | (20 bytes) | (20 bytes) | (20 bytes) | (20 bytes) | (20 bytes) | (2 bytes) | + | | | | | | | + +------------------------------------------------------------------------------+ + +The *mdiff header* consists of 3 32-bit big-endian signed integers +describing offsets at which to apply the following delta content:: + + +-------------------------------------+ + | | | | + | offset | old length | new length | + | (32 bits) | (32 bits) | (32 bits) | + | | | | + +-------------------------------------+ + +In version 1, the delta is always applied against the previous node from +the changegroup or the first parent if this is the first entry in the +changegroup. + +In version 2, the delta base node is encoded in the entry in the +changegroup. This allows the delta to be expressed against any parent, +which can result in smaller deltas and more efficient encoding of data. + +Changeset Segment +----------------- + +The *changeset segment* consists of a single *delta group* holding +changelog data. It is followed by an *empty chunk* to denote the +boundary to the *manifests segment*. + +Manifest Segment +---------------- + +The *manifest segment* consists of a single *delta group* holding +manifest data. It is followed by an *empty chunk* to denote the boundary +to the *filelogs segment*. + +Filelogs Segment +---------------- + +The *filelogs* segment consists of multiple sub-segments, each +corresponding to an individual file whose data is being described:: + + +--------------------------------------+ + | | | | | + | filelog0 | filelog1 | filelog2 | ... | + | | | | | + +--------------------------------------+ + +In version ``3`` of the changegroup format, filelogs may include +directory logs when treemanifests are in use. directory logs are +identified by having a trailing '/' on their filename (see below). + +The final filelog sub-segment is followed by an *empty chunk* to denote +the end of the segment and the overall changegroup. + +Each filelog sub-segment consists of the following:: + + +------------------------------------------+ + | | | | + | filename size | filename | delta group | + | (32 bits) | (various) | (various) | + | | | | + +------------------------------------------+ + +That is, a *chunk* consisting of the filename (not terminated or padded) +followed by N chunks constituting the *delta group* for this file. + diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/help/internals/revlogs.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/mercurial/help/internals/revlogs.txt Mon Jan 11 14:27:12 2016 -0600 @@ -0,0 +1,193 @@ +Revisions Logs +============== + +Revision logs - or *revlogs* - are an append only data structure for +storing discrete entries, or *revisions*. They are the primary storage +mechanism of repository data. + +Revlogs effectively model a directed acyclic graph (DAG). Each node +has edges to 1 or 2 *parent* nodes. Each node contains metadata and +the raw value for that node. + +Revlogs consist of entries which have metadata and revision data. +Metadata includes the hash of the revision's content, sizes, and +links to its *parent* entries. The collective metadata is referred +to as the *index* and the revision data is the *data*. + +Revision data is stored as a series of compressed deltas against previous +revisions. + +Revlogs are written in an append-only fashion. We never need to rewrite +a file to insert nor do we need to remove data. Rolling back in-progress +writes can be performed by truncating files. Read locks can be avoided +using simple techniques. This means that references to other data in +the same revlog *always* refer to a previous entry. + +Revlogs can be modeled as 0-indexed arrays. The first revision is +revision #0 and the second is revision #1. The revision -1 is typically +used to mean *does not exist* or *not defined*. + +File Format +----------- + +A revlog begins with a 32-bit big endian integer holding version info +and feature flags. + +This integer is logically divided into 2 16-bit shorts. The least +significant half of the integer is the format/version short. The other +short holds feature flags that dictate behavior of the revlog. + +Only 1 bit of the format/version short is currently used. Remaining +bits are reserved for future use. + +The following values for the format/version short are defined: + +0 + The original revlog version. +1 + RevlogNG (*next generation*). It replaced version 0 when it was + implemented in 2006. + +The feature flags short consists of bit flags. Where 0 is the least +significant bit, the following bit offsets define flags: + +0 + Store revision data inline. +1 + Generaldelta encoding. + +2-15 + Reserved for future use. + +The following header values are common: + +00 00 00 01 + RevlogNG +00 01 00 01 + RevlogNG + inline +00 02 00 01 + RevlogNG + generaldelta +00 03 00 01 + RevlogNG + inline + generaldelta + +Following the 32-bit header is *index* data. Inlined revision data is possibly +located between index entries. More on this layout is described below. + +RevlogNG Format +--------------- + +RevlogNG (version 1) begins with an index describing the revisions in +the revlog. If the ``inline`` flag is set, revision data is stored inline, +or between index entries (as opposed to in a separate container). + +Each index entry is 64 bytes. The byte layout of each entry is as +follows, with byte 0 being the first byte (all data stored as big endian): + +0-5 (6 bytes) + Absolute offset of revision data from beginning of revlog. +6-7 (2 bytes) + Bit flags impacting revision behavior. +8-11 (4 bytes) + Compressed length of revision data / chunk as stored in revlog. +12-15 (4 bytes) + Uncompressed length of revision data / chunk. +16-19 (4 bytes) + Base or previous revision this revision's delta was produced against. + -1 means this revision holds full text (as opposed to a delta). + For generaldelta repos, this is the previous revision in the delta + chain. For non-generaldelta repos, this is the base or first + revision in the delta chain. +20-23 (4 bytes) + A revision this revision is *linked* to. This allows a revision in + one revlog to be forever associated with a revision in another + revlog. For example, a file's revlog may point to the changelog + revision that introduced it. +24-27 (4 bytes) + Revision of 1st parent. -1 indicates no parent. +28-31 (4 bytes) + Revision of 2nd parent. -1 indicates no 2nd parent. +32-63 (32 bytes) + Hash of revision's full text. Currently, SHA-1 is used and only + the first 20 bytes of this field are used. The rest of the bytes + are ignored and should be stored as \0. + +If inline revision data is being stored, the compressed revision data +(of length from bytes offset 8-11 from the index entry) immediately +follows the index entry. There is no header on the revision data. There +is no padding between it and the index entries before and after. + +If revision data is not inline, then raw revision data is stored in a +separate byte container. The offsets from bytes 0-5 and the compressed +length from bytes 8-11 define how to access this data. + +Delta Chains +------------ + +Revision data is encoded as a chain of *chunks*. Each chain begins with +the compressed original full text for that revision. Each subsequent +*chunk* is a *delta* against the previous revision. We therefore call +these chains of chunks/deltas *delta chains*. + +The full text for a revision is reconstructed by loading the original +full text for the base revision of a *delta chain* and then applying +*deltas* until the target revision is reconstructed. + +*Delta chains* are limited in length so lookup time is bound. They are +limited to ~2x the length of the revision's data. The linear distance +between the base chunk and the final chunk is also limited so the +amount of read I/O to load all chunks in the delta chain is bound. + +Deltas and delta chains are either computed against the previous +revision in the revlog or another revision (almost certainly one of +the parents of the revision). Historically, deltas were computed against +the previous revision. The *generaldelta* revlog feature flag (enabled +by default in Mercurial 3.7) activates the mode where deltas are +computed against an arbitrary revision (almost certainly a parent revision). + +File Storage +------------ + +Revlogs logically consist of an index (metadata of entries) and +revision data. This data may be stored together in a single file or in +separate files. The mechanism used is indicated by the ``inline`` feature +flag on the revlog. + +Mercurial's behavior is to use inline storage until a revlog reaches a +certain size, at which point it will be converted to non-inline. The +reason there is a size limit on inline storage is to establish an upper +bound on how much data must be read to load the index. It would be a waste +to read tens or hundreds of extra megabytes of data just to access the +index data. + +The actual layout of revlog files on disk is governed by the repository's +*store format*. Typically, a ``.i`` file represents the index revlog +(possibly containing inline data) and a ``.d`` file holds the revision data. + +Revision Entries +---------------- + +Revision entries consist of an optional 1 byte header followed by an +encoding of the revision data. The headers are as follows: + +\0 (0x00) + Revision data is the entirety of the entry, including this header. +u (0x75) + Raw revision data follows. +x (0x78) + zlib (RFC 1950) data. + + The 0x78 value is actually the first byte of the zlib header (CMF byte). + +Hash Computation +---------------- + +The hash of the revision is stored in the index and is used both as a primary +key and for data integrity verification. + +Currently, SHA-1 is the only supported hashing algorithm. To obtain the SHA-1 +hash of a revision: + +1. Hash the parent nodes +2. Hash the fulltext of the revision + +The 20 byte node ids of the parents are fed into the hasher in ascending order. \ No newline at end of file diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/help/phases.txt --- a/mercurial/help/phases.txt Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/help/phases.txt Mon Jan 11 14:27:12 2016 -0600 @@ -28,6 +28,12 @@ Phases can also be manually manipulated with the :hg:`phase` command if needed. See :hg:`help -v phase` for examples. +To make yours commits secret by default, put this in your +configuration file:: + + [phases] + new-commit = secret + Phases and servers ================== diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/hg.py --- a/mercurial/hg.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/hg.py Mon Jan 11 14:27:12 2016 -0600 @@ -52,7 +52,7 @@ if not hashbranch and not branches: x = revs or None if util.safehasattr(revs, 'first'): - y = revs.first() + y = revs.first() elif revs: y = revs[0] else: @@ -235,13 +235,7 @@ destvfs.write('sharedpath', sharedpath) r = repository(ui, destwvfs.base) - - default = srcrepo.ui.config('paths', 'default') - if default: - fp = r.vfs("hgrc", "w", text=True) - fp.write("[paths]\n") - fp.write("default = %s\n" % default) - fp.close() + postshare(srcrepo, r, bookmarks=bookmarks) if update: r.ui.status(_("updating working directory\n")) @@ -257,8 +251,24 @@ continue _update(r, uprev) +def postshare(sourcerepo, destrepo, bookmarks=True): + """Called after a new shared repo is created. + + The new repo only has a requirements file and pointer to the source. + This function configures additional shared data. + + Extensions can wrap this function and write additional entries to + destrepo/.hg/shared to indicate additional pieces of data to be shared. + """ + default = sourcerepo.ui.config('paths', 'default') + if default: + fp = destrepo.vfs("hgrc", "w", text=True) + fp.write("[paths]\n") + fp.write("default = %s\n" % default) + fp.close() + if bookmarks: - fp = r.vfs('shared', 'w') + fp = destrepo.vfs('shared', 'w') fp.write('bookmarks\n') fp.close() @@ -546,13 +556,22 @@ "support clone by revision")) revs = [srcpeer.lookup(r) for r in rev] checkout = revs[0] - if destpeer.local(): + local = destpeer.local() + if local: if not stream: if pull: stream = False else: stream = None - destpeer.local().clone(srcpeer, heads=revs, stream=stream) + # internal config: ui.quietbookmarkmove + quiet = local.ui.backupconfig('ui', 'quietbookmarkmove') + try: + local.ui.setconfig( + 'ui', 'quietbookmarkmove', True, 'clone') + exchange.pull(local, srcpeer, revs, + streamclonerequested=stream) + finally: + local.ui.restoreconfig(quiet) elif srcrepo: exchange.push(srcrepo, destpeer, revs=revs, bookmarks=srcrepo._bookmarks.keys()) @@ -618,7 +637,9 @@ srcpeer.close() return srcpeer, destpeer -def _showstats(repo, stats): +def _showstats(repo, stats, quietempty=False): + if quietempty and not any(stats): + return repo.ui.status(_("%d files updated, %d files merged, " "%d files removed, %d files unresolved\n") % stats) @@ -628,13 +649,13 @@ When overwrite is set, changes are clobbered, merged else returns stats (see pydoc mercurial.merge.applyupdates)""" - return mergemod.update(repo, node, False, overwrite, None, + return mergemod.update(repo, node, False, overwrite, labels=['working copy', 'destination']) -def update(repo, node): +def update(repo, node, quietempty=False): """update the working directory to node, merging linear changes""" stats = updaterepo(repo, node, False) - _showstats(repo, stats) + _showstats(repo, stats, quietempty) if stats[3]: repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n")) return stats[3] > 0 @@ -642,18 +663,18 @@ # naming conflict in clone() _update = update -def clean(repo, node, show_stats=True): +def clean(repo, node, show_stats=True, quietempty=False): """forcibly switch the working directory to node, clobbering changes""" stats = updaterepo(repo, node, True) util.unlinkpath(repo.join('graftstate'), ignoremissing=True) if show_stats: - _showstats(repo, stats) + _showstats(repo, stats, quietempty) return stats[3] > 0 def merge(repo, node, force=None, remind=True): """Branch merge with node, resolving changes. Return true if any unresolved conflicts.""" - stats = mergemod.update(repo, node, True, force, False) + stats = mergemod.update(repo, node, True, force) _showstats(repo, stats) if stats[3]: repo.ui.status(_("use 'hg resolve' to retry unresolved file merges " diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/hgweb/__init__.py --- a/mercurial/hgweb/__init__.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/hgweb/__init__.py Mon Jan 11 14:27:12 2016 -0600 @@ -6,8 +6,22 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. +from __future__ import absolute_import + import os -import hgweb_mod, hgwebdir_mod + +from ..i18n import _ + +from .. import ( + error, + util, +) + +from . import ( + hgweb_mod, + hgwebdir_mod, + server, +) def hgweb(config, name=None, baseui=None): '''create an hgweb wsgi object @@ -29,3 +43,83 @@ def hgwebdir(config, baseui=None): return hgwebdir_mod.hgwebdir(config, baseui=baseui) +class httpservice(object): + def __init__(self, ui, app, opts): + self.ui = ui + self.app = app + self.opts = opts + + def init(self): + util.setsignalhandler() + self.httpd = server.create_server(self.ui, self.app) + + if self.opts['port'] and not self.ui.verbose: + return + + if self.httpd.prefix: + prefix = self.httpd.prefix.strip('/') + '/' + else: + prefix = '' + + port = ':%d' % self.httpd.port + if port == ':80': + port = '' + + bindaddr = self.httpd.addr + if bindaddr == '0.0.0.0': + bindaddr = '*' + elif ':' in bindaddr: # IPv6 + bindaddr = '[%s]' % bindaddr + + fqaddr = self.httpd.fqaddr + if ':' in fqaddr: + fqaddr = '[%s]' % fqaddr + if self.opts['port']: + write = self.ui.status + else: + write = self.ui.write + write(_('listening at http://%s%s/%s (bound to %s:%d)\n') % + (fqaddr, port, prefix, bindaddr, self.httpd.port)) + self.ui.flush() # avoid buffering of status message + + def run(self): + self.httpd.serve_forever() + +def createservice(ui, repo, opts): + # this way we can check if something was given in the command-line + if opts.get('port'): + opts['port'] = util.getport(opts.get('port')) + + alluis = set([ui]) + if repo: + baseui = repo.baseui + alluis.update([repo.baseui, repo.ui]) + else: + baseui = ui + webconf = opts.get('web_conf') or opts.get('webdir_conf') + if webconf: + # load server settings (e.g. web.port) to "copied" ui, which allows + # hgwebdir to reload webconf cleanly + servui = ui.copy() + servui.readconfig(webconf, sections=['web']) + alluis.add(servui) + else: + servui = ui + + optlist = ("name templates style address port prefix ipv6" + " accesslog errorlog certificate encoding") + for o in optlist.split(): + val = opts.get(o, '') + if val in (None, ''): # should check against default options instead + continue + for u in alluis: + u.setconfig("web", o, val, 'serve') + + if webconf: + app = hgwebdir_mod.hgwebdir(webconf, baseui=baseui) + else: + if not repo: + raise error.RepoError(_("there is no Mercurial repository" + " here (.hg not found)")) + app = hgweb_mod.hgweb(repo, baseui=baseui) + return httpservice(servui, app, opts) diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/hgweb/common.py --- a/mercurial/hgweb/common.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/hgweb/common.py Mon Jan 11 14:27:12 2016 -0600 @@ -6,7 +6,12 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -import errno, mimetypes, os +from __future__ import absolute_import + +import BaseHTTPServer +import errno +import mimetypes +import os HTTP_OK = 200 HTTP_NOT_MODIFIED = 304 @@ -102,8 +107,7 @@ raise AttributeError def _statusmessage(code): - from BaseHTTPServer import BaseHTTPRequestHandler - responses = BaseHTTPRequestHandler.responses + responses = BaseHTTPServer.BaseHTTPRequestHandler.responses return responses.get(code, ('Error', 'Unknown error'))[0] def statusmessage(code, message=None): diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/hgweb/hgweb_mod.py --- a/mercurial/hgweb/hgweb_mod.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/hgweb/hgweb_mod.py Mon Jan 11 14:27:12 2016 -0600 @@ -6,15 +6,41 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. +from __future__ import absolute_import + import contextlib import os -from mercurial import ui, hg, hook, error, encoding, templater, util, repoview -from mercurial.templatefilters import websub -from common import ErrorResponse, permhooks, caching -from common import HTTP_OK, HTTP_NOT_MODIFIED, HTTP_BAD_REQUEST -from common import HTTP_NOT_FOUND, HTTP_SERVER_ERROR -from request import wsgirequest -import webcommands, protocol, webutil + +from .common import ( + ErrorResponse, + HTTP_BAD_REQUEST, + HTTP_NOT_FOUND, + HTTP_NOT_MODIFIED, + HTTP_OK, + HTTP_SERVER_ERROR, + caching, + permhooks, +) +from .request import wsgirequest + +from .. import ( + encoding, + error, + hg, + hook, + repoview, + templatefilters, + templater, + ui as uimod, + util, +) + +from . import ( + protocol, + webcommands, + webutil, + wsgicgi, +) perms = { 'changegroup': 'pull', @@ -158,7 +184,7 @@ or req.url.strip('/') or self.repo.root) def websubfilter(text): - return websub(text, self.websubtable) + return templatefilters.websub(text, self.websubtable) # create the templater @@ -195,7 +221,7 @@ if baseui: u = baseui.copy() else: - u = ui.ui() + u = uimod.ui() r = hg.repository(u, repo) else: # we trust caller to give us a private copy @@ -260,7 +286,6 @@ if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."): raise RuntimeError("This function is only intended to be " "called while running as a CGI script.") - import mercurial.hgweb.wsgicgi as wsgicgi wsgicgi.launch(self) def __call__(self, env, respond): @@ -304,8 +329,8 @@ parts = parts[len(repo_parts):] query = '/'.join(parts) else: - query = req.env['QUERY_STRING'].split('&', 1)[0] - query = query.split(';', 1)[0] + query = req.env['QUERY_STRING'].partition('&')[0] + query = query.partition(';')[0] # process this if it's a protocol request # protocol bits don't need to create any URLs diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/hgweb/hgwebdir_mod.py --- a/mercurial/hgweb/hgwebdir_mod.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/hgweb/hgwebdir_mod.py Mon Jan 11 14:27:12 2016 -0600 @@ -6,15 +6,42 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -import os, re, time -from mercurial.i18n import _ -from mercurial import ui, hg, scmutil, util, templater -from mercurial import error, encoding -from common import ErrorResponse, get_mtime, staticfile, paritygen, ismember, \ - get_contact, HTTP_OK, HTTP_NOT_FOUND, HTTP_SERVER_ERROR -from hgweb_mod import hgweb, makebreadcrumb -from request import wsgirequest -import webutil +from __future__ import absolute_import + +import os +import re +import time + +from ..i18n import _ + +from .common import ( + ErrorResponse, + HTTP_NOT_FOUND, + HTTP_OK, + HTTP_SERVER_ERROR, + get_contact, + get_mtime, + ismember, + paritygen, + staticfile, +) +from .request import wsgirequest + +from .. import ( + encoding, + error, + hg, + scmutil, + templater, + ui as uimod, + util, +) + +from . import ( + hgweb_mod, + webutil, + wsgicgi, +) def cleannames(items): return [(util.pconvert(name).strip('/'), path) for name, path in items] @@ -108,7 +135,7 @@ if self.baseui: u = self.baseui.copy() else: - u = ui.ui() + u = uimod.ui() u.setconfig('ui', 'report_untrusted', 'off', 'hgwebdir') u.setconfig('ui', 'nontty', 'true', 'hgwebdir') # displaying bundling progress bar while serving feels wrong and may @@ -161,7 +188,6 @@ if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."): raise RuntimeError("This function is only intended to be " "called while running as a CGI script.") - import mercurial.hgweb.wsgicgi as wsgicgi wsgicgi.launch(self) def __call__(self, env, respond): @@ -231,7 +257,7 @@ try: # ensure caller gets private copy of ui repo = hg.repository(self.ui.copy(), real) - return hgweb(repo).run_wsgi(req) + return hgweb_mod.hgweb(repo).run_wsgi(req) except IOError as inst: msg = inst.strerror raise ErrorResponse(HTTP_SERVER_ERROR, msg) @@ -426,7 +452,7 @@ self.updatereqenv(req.env) return tmpl("index", entries=entries, subdir=subdir, - pathdef=makebreadcrumb('/' + subdir, self.prefix), + pathdef=hgweb_mod.makebreadcrumb('/' + subdir, self.prefix), sortcolumn=sortcolumn, descending=descending, **dict(sort)) diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/hgweb/protocol.py --- a/mercurial/hgweb/protocol.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/hgweb/protocol.py Mon Jan 11 14:27:12 2016 -0600 @@ -5,9 +5,21 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -import cgi, cStringIO, zlib, urllib -from mercurial import util, wireproto -from common import HTTP_OK +from __future__ import absolute_import + +import cStringIO +import cgi +import urllib +import zlib + +from .common import ( + HTTP_OK, +) + +from .. import ( + util, + wireproto, +) HGTYPE = 'application/mercurial-0.1' HGERRTYPE = 'application/hg-error' diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/hgweb/request.py --- a/mercurial/hgweb/request.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/hgweb/request.py Mon Jan 11 14:27:12 2016 -0600 @@ -6,9 +6,21 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -import socket, cgi, errno -from mercurial import util -from common import ErrorResponse, statusmessage, HTTP_NOT_MODIFIED +from __future__ import absolute_import + +import cgi +import errno +import socket + +from .common import ( + ErrorResponse, + HTTP_NOT_MODIFIED, + statusmessage, +) + +from .. import ( + util, +) shortcuts = { 'cl': [('cmd', ['changelog']), ('rev', None)], @@ -80,7 +92,7 @@ if self._start_response is not None: self.headers.append(('Content-Type', type)) if filename: - filename = (filename.split('/')[-1] + filename = (filename.rpartition('/')[-1] .replace('\\', '\\\\').replace('"', '\\"')) self.headers.append(('Content-Disposition', 'inline; filename="%s"' % filename)) diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/hgweb/server.py --- a/mercurial/hgweb/server.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/hgweb/server.py Mon Jan 11 14:27:12 2016 -0600 @@ -6,10 +6,27 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -import os, sys, errno, urllib, BaseHTTPServer, socket, SocketServer, traceback -from mercurial import util, error -from mercurial.hgweb import common -from mercurial.i18n import _ +from __future__ import absolute_import + +import BaseHTTPServer +import SocketServer +import errno +import os +import socket +import sys +import traceback +import urllib + +from ..i18n import _ + +from .. import ( + error, + util, +) + +from . import ( + common, +) def _splitURI(uri): """Return path and query that has been split from uri @@ -197,47 +214,6 @@ self.wfile.write('0\r\n\r\n') self.wfile.flush() -class _httprequesthandleropenssl(_httprequesthandler): - """HTTPS handler based on pyOpenSSL""" - - url_scheme = 'https' - - @staticmethod - def preparehttpserver(httpserver, ssl_cert): - try: - import OpenSSL - OpenSSL.SSL.Context - except ImportError: - raise error.Abort(_("SSL support is unavailable")) - ctx = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_METHOD) - ctx.use_privatekey_file(ssl_cert) - ctx.use_certificate_file(ssl_cert) - sock = socket.socket(httpserver.address_family, httpserver.socket_type) - httpserver.socket = OpenSSL.SSL.Connection(ctx, sock) - httpserver.server_bind() - httpserver.server_activate() - - def setup(self): - self.connection = self.request - self.rfile = socket._fileobject(self.request, "rb", self.rbufsize) - self.wfile = socket._fileobject(self.request, "wb", self.wbufsize) - - def do_write(self): - import OpenSSL - try: - _httprequesthandler.do_write(self) - except OpenSSL.SSL.SysCallError as inst: - if inst.args[0] != errno.EPIPE: - raise - - def handle_one_request(self): - import OpenSSL - try: - _httprequesthandler.handle_one_request(self) - except (OpenSSL.SSL.SysCallError, OpenSSL.SSL.ZeroReturnError): - self.close_connection = True - pass - class _httprequesthandlerssl(_httprequesthandler): """HTTPS handler based on Python's ssl module""" @@ -260,8 +236,8 @@ self.wfile = socket._fileobject(self.request, "wb", self.wbufsize) try: - from threading import activeCount - activeCount() # silence pyflakes + import threading + threading.activeCount() # silence pyflakes and bypass demandimport _mixin = SocketServer.ThreadingMixIn except ImportError: if util.safehasattr(os, "fork"): @@ -311,10 +287,7 @@ def create_server(ui, app): if ui.config('web', 'certificate'): - if sys.version_info >= (2, 6): - handler = _httprequesthandlerssl - else: - handler = _httprequesthandleropenssl + handler = _httprequesthandlerssl else: handler = _httprequesthandler diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/hgweb/webcommands.py --- a/mercurial/hgweb/webcommands.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/hgweb/webcommands.py Mon Jan 11 14:27:12 2016 -0600 @@ -5,18 +5,43 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -import os, mimetypes, re, cgi, copy -import webutil -from mercurial import error, encoding, archival, templater, templatefilters -from mercurial.node import short, hex -from mercurial import util -from common import paritygen, staticfile, get_contact, ErrorResponse -from common import HTTP_OK, HTTP_FORBIDDEN, HTTP_NOT_FOUND -from mercurial import graphmod, patch -from mercurial import scmutil -from mercurial.i18n import _ -from mercurial.error import ParseError, RepoLookupError, Abort -from mercurial import revset +from __future__ import absolute_import + +import cgi +import copy +import mimetypes +import os +import re + +from ..i18n import _ +from ..node import hex, short + +from .common import ( + ErrorResponse, + HTTP_FORBIDDEN, + HTTP_NOT_FOUND, + HTTP_OK, + get_contact, + paritygen, + staticfile, +) + +from .. import ( + archival, + encoding, + error, + graphmod, + patch, + revset, + scmutil, + templatefilters, + templater, + util, +) + +from . import ( + webutil, +) __all__ = [] commands = {} @@ -120,20 +145,10 @@ file=f, path=webutil.up(f), text=lines(), - rev=fctx.rev(), symrev=webutil.symrevorshortnode(req, fctx), - node=fctx.hex(), - author=fctx.user(), - date=fctx.date(), - desc=fctx.description(), - extra=fctx.extra(), - branch=webutil.nodebranchnodefault(fctx), - parent=webutil.parents(fctx), - child=webutil.children(fctx), rename=webutil.renamelink(fctx), - tags=webutil.nodetagsdict(web.repo, fctx.node()), - bookmarks=webutil.nodebookmarksdict(web.repo, fctx.node()), - permissions=fctx.manifest().flags(f)) + permissions=fctx.manifest().flags(f), + **webutil.commonentry(web.repo, fctx)) @webcommand('file') def file(web, req, tmpl): @@ -225,7 +240,7 @@ revdef = 'reverse(%s)' % query try: tree = revset.parse(revdef) - except ParseError: + except error.ParseError: # can't parse to a revset tree return MODE_KEYWORD, query @@ -249,7 +264,8 @@ # RepoLookupError: no such revision, e.g. in 'revision:' # Abort: bookmark/tag not exists # LookupError: ambiguous identifier, e.g. in '(bc)' on a large repo - except (ParseError, RepoLookupError, Abort, LookupError): + except (error.ParseError, error.RepoLookupError, error.Abort, + LookupError): return MODE_KEYWORD, query def changelist(**map): @@ -263,20 +279,9 @@ yield tmpl('searchentry', parity=parity.next(), - author=ctx.user(), - parent=webutil.parents(ctx), - child=webutil.children(ctx), changelogtag=showtags, - desc=ctx.description(), - extra=ctx.extra(), - date=ctx.date(), files=files, - rev=ctx.rev(), - node=hex(n), - tags=webutil.nodetagsdict(web.repo, n), - bookmarks=webutil.nodebookmarksdict(web.repo, n), - inbranch=webutil.nodeinbranch(web.repo, ctx), - branches=webutil.nodebranchdict(web.repo, ctx)) + **webutil.commonentry(web.repo, ctx)) if count >= revcount: break @@ -546,20 +551,14 @@ "basename": d} return tmpl("manifest", - rev=ctx.rev(), symrev=symrev, - node=hex(node), path=abspath, up=webutil.up(abspath), upparity=parity.next(), fentries=filelist, dentries=dirlist, archives=web.archivelist(hex(node)), - tags=webutil.nodetagsdict(web.repo, node), - bookmarks=webutil.nodebookmarksdict(web.repo, node), - branch=webutil.nodebranchnodefault(ctx), - inbranch=webutil.nodeinbranch(web.repo, ctx), - branches=webutil.nodebranchdict(web.repo, ctx)) + **webutil.commonentry(web.repo, ctx)) @webcommand('tags') def tags(web, req, tmpl): @@ -693,22 +692,11 @@ revs = web.repo.changelog.revs(start, end - 1) for i in revs: ctx = web.repo[i] - n = ctx.node() - hn = hex(n) l.append(tmpl( - 'shortlogentry', + 'shortlogentry', parity=parity.next(), - author=ctx.user(), - desc=ctx.description(), - extra=ctx.extra(), - date=ctx.date(), - rev=i, - node=hn, - tags=webutil.nodetagsdict(web.repo, n), - bookmarks=webutil.nodebookmarksdict(web.repo, n), - inbranch=webutil.nodeinbranch(web.repo, ctx), - branches=webutil.nodebranchdict(web.repo, ctx))) + **webutil.commonentry(web.repo, ctx))) l.reverse() yield l @@ -753,12 +741,8 @@ raise if fctx is not None: - n = fctx.node() path = fctx.path() ctx = fctx.changectx() - else: - n = ctx.node() - # path already defined in except clause parity = paritygen(web.stripecount) style = web.config('web', 'style', 'paper') @@ -766,7 +750,7 @@ style = req.form['style'][0] diffs = webutil.diffs(web.repo, tmpl, ctx, None, [path], parity, style) - if fctx: + if fctx is not None: rename = webutil.renamelink(fctx) ctx = fctx else: @@ -774,20 +758,10 @@ ctx = ctx return tmpl("filediff", file=path, - node=hex(n), - rev=ctx.rev(), symrev=webutil.symrevorshortnode(req, ctx), - date=ctx.date(), - desc=ctx.description(), - extra=ctx.extra(), - author=ctx.user(), rename=rename, - branch=webutil.nodebranchnodefault(ctx), - parent=webutil.parents(ctx), - child=webutil.children(ctx), - tags=webutil.nodetagsdict(web.repo, n), - bookmarks=webutil.nodebookmarksdict(web.repo, n), - diff=diffs) + diff=diffs, + **webutil.commonentry(web.repo, ctx)) diff = webcommand('diff')(filediff) @@ -812,7 +786,6 @@ if 'file' not in req.form: raise ErrorResponse(HTTP_NOT_FOUND, 'file not given') path = webutil.cleanpath(web.repo, req.form['file'][0]) - rename = path in ctx and webutil.renamelink(ctx[path]) or [] parsecontext = lambda v: v == 'full' and -1 or int(v) if 'context' in req.form: @@ -828,6 +801,7 @@ return [_('(binary file %s, hash: %s)') % (mt, hex(f.filenode()))] return f.data().splitlines() + fctx = None parent = ctx.p1() leftrev = parent.rev() leftnode = parent.node() @@ -843,30 +817,26 @@ leftlines = filelines(pfctx) else: rightlines = () - fctx = ctx.parents()[0][path] - leftlines = filelines(fctx) + pfctx = ctx.parents()[0][path] + leftlines = filelines(pfctx) comparison = webutil.compare(tmpl, context, leftlines, rightlines) + if fctx is not None: + rename = webutil.renamelink(fctx) + ctx = fctx + else: + rename = [] + ctx = ctx return tmpl('filecomparison', file=path, - node=hex(ctx.node()), - rev=ctx.rev(), symrev=webutil.symrevorshortnode(req, ctx), - date=ctx.date(), - desc=ctx.description(), - extra=ctx.extra(), - author=ctx.user(), rename=rename, - branch=webutil.nodebranchnodefault(ctx), - parent=webutil.parents(fctx), - child=webutil.children(fctx), - tags=webutil.nodetagsdict(web.repo, ctx.node()), - bookmarks=webutil.nodebookmarksdict(web.repo, ctx.node()), leftrev=leftrev, leftnode=hex(leftnode), rightrev=rightrev, rightnode=hex(rightnode), - comparison=comparison) + comparison=comparison, + **webutil.commonentry(web.repo, ctx)) @webcommand('annotate') def annotate(web, req, tmpl): @@ -918,20 +888,10 @@ file=f, annotate=annotate, path=webutil.up(f), - rev=fctx.rev(), symrev=webutil.symrevorshortnode(req, fctx), - node=fctx.hex(), - author=fctx.user(), - date=fctx.date(), - desc=fctx.description(), - extra=fctx.extra(), rename=webutil.renamelink(fctx), - branch=webutil.nodebranchnodefault(fctx), - parent=webutil.parents(fctx), - child=webutil.children(fctx), - tags=webutil.nodetagsdict(web.repo, fctx.node()), - bookmarks=webutil.nodebookmarksdict(web.repo, fctx.node()), - permissions=fctx.manifest().flags(f)) + permissions=fctx.manifest().flags(f), + **webutil.commonentry(web.repo, fctx)) @webcommand('filelog') def filelog(web, req, tmpl): @@ -993,23 +953,12 @@ for i in revs: iterfctx = fctx.filectx(i) - l.append({"parity": parity.next(), - "filerev": i, - "file": f, - "node": iterfctx.hex(), - "author": iterfctx.user(), - "date": iterfctx.date(), - "rename": webutil.renamelink(iterfctx), - "parent": webutil.parents(iterfctx), - "child": webutil.children(iterfctx), - "desc": iterfctx.description(), - "extra": iterfctx.extra(), - "tags": webutil.nodetagsdict(repo, iterfctx.node()), - "bookmarks": webutil.nodebookmarksdict( - repo, iterfctx.node()), - "branch": webutil.nodebranchnodefault(iterfctx), - "inbranch": webutil.nodeinbranch(repo, iterfctx), - "branches": webutil.nodebranchdict(repo, iterfctx)}) + l.append(dict( + parity=parity.next(), + filerev=i, + file=f, + rename=webutil.renamelink(iterfctx), + **webutil.commonentry(repo, iterfctx))) for e in reversed(l): yield e @@ -1018,11 +967,16 @@ revnav = webutil.filerevnav(web.repo, fctx.path()) nav = revnav.gen(end - 1, revcount, count) - return tmpl("filelog", file=f, node=fctx.hex(), nav=nav, + return tmpl("filelog", + file=f, + nav=nav, symrev=webutil.symrevorshortnode(req, fctx), entries=entries, latestentry=latestentry, - revcount=revcount, morevars=morevars, lessvars=lessvars) + revcount=revcount, + morevars=morevars, + lessvars=lessvars, + **webutil.commonentry(web.repo, fctx)) @webcommand('archive') def archive(web, req, tmpl): @@ -1248,7 +1202,7 @@ def _getdoc(e): doc = e[0].__doc__ if doc: - doc = _(doc).split('\n')[0] + doc = _(doc).partition('\n')[0] else: doc = _('(no help text available)') return doc @@ -1268,8 +1222,7 @@ The ``help`` template will be rendered when requesting help for a topic. ``helptopics`` will be rendered for the index of help topics. """ - from mercurial import commands # avoid cycle - from mercurial import help as helpmod # avoid cycle + from .. import commands, help as helpmod # avoid cycle topicname = req.form.get('node', [None])[0] if not topicname: @@ -1278,7 +1231,7 @@ yield {'topic': entries[0], 'summary': summary} early, other = [], [] - primary = lambda s: s.split('|')[0] + primary = lambda s: s.partition('|')[0] for c, e in commands.table.iteritems(): doc = _getdoc(e) if 'DEPRECATED' in doc or c.startswith('debug'): @@ -1303,10 +1256,35 @@ return tmpl('helptopics', topics=topics, earlycommands=earlycommands, othercommands=othercommands, title='Index') + # Render an index of sub-topics. + if topicname in helpmod.subtopics: + topics = [] + for entries, summary, _doc in helpmod.subtopics[topicname]: + topics.append({ + 'topic': '%s.%s' % (topicname, entries[0]), + 'basename': entries[0], + 'summary': summary, + }) + + return tmpl('helptopics', topics=topics, title=topicname, + subindex=True) + u = webutil.wsgiui() u.verbose = True + + # Render a page from a sub-topic. + if '.' in topicname: + # TODO implement support for rendering sections, like + # `hg help` works. + topic, subtopic = topicname.split('.', 1) + if topic not in helpmod.subtopics: + raise ErrorResponse(HTTP_NOT_FOUND) + else: + topic = topicname + subtopic = None + try: - doc = helpmod.help_(u, topicname) + doc = helpmod.help_(u, topic, subtopic=subtopic) except error.UnknownCommand: raise ErrorResponse(HTTP_NOT_FOUND) return tmpl('help', topic=topicname, doc=doc) diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/hgweb/webutil.py --- a/mercurial/hgweb/webutil.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/hgweb/webutil.py Mon Jan 11 14:27:12 2016 -0600 @@ -6,15 +6,32 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -import os, copy +from __future__ import absolute_import + +import copy +import difflib +import os import re -from mercurial import match, patch, error, ui, util, pathutil, context -from mercurial.i18n import _ -from mercurial.node import hex, nullid, short -from mercurial.templatefilters import revescape -from common import ErrorResponse, paritygen -from common import HTTP_NOT_FOUND -import difflib + +from ..i18n import _ +from ..node import hex, nullid, short + +from .common import ( + ErrorResponse, + HTTP_NOT_FOUND, + paritygen, +) + +from .. import ( + context, + error, + match, + patch, + pathutil, + templatefilters, + ui as uimod, + util, +) def up(p): if p[0] != "/": @@ -124,20 +141,28 @@ def hex(self, rev): return hex(self._changelog.node(self._revlog.linkrev(rev))) +class _siblings(object): + def __init__(self, siblings=[], hiderev=None): + self.siblings = [s for s in siblings if s.node() != nullid] + if len(self.siblings) == 1 and self.siblings[0].rev() == hiderev: + self.siblings = [] -def _siblings(siblings=[], hiderev=None): - siblings = [s for s in siblings if s.node() != nullid] - if len(siblings) == 1 and siblings[0].rev() == hiderev: - return - for s in siblings: - d = {'node': s.hex(), 'rev': s.rev()} - d['user'] = s.user() - d['date'] = s.date() - d['description'] = s.description() - d['branch'] = s.branch() - if util.safehasattr(s, 'path'): - d['file'] = s.path() - yield d + def __iter__(self): + for s in self.siblings: + d = { + 'node': s.hex(), + 'rev': s.rev(), + 'user': s.user(), + 'date': s.date(), + 'description': s.description(), + 'branch': s.branch(), + } + if util.safehasattr(s, 'path'): + d['file'] = s.path() + yield d + + def __len__(self): + return len(self.siblings) def parents(ctx, hide=None): if isinstance(ctx, context.basefilectx): @@ -283,6 +308,25 @@ return fctx +def commonentry(repo, ctx): + node = ctx.node() + return { + 'rev': ctx.rev(), + 'node': hex(node), + 'author': ctx.user(), + 'desc': ctx.description(), + 'date': ctx.date(), + 'extra': ctx.extra(), + 'phase': ctx.phasestr(), + 'branch': nodebranchnodefault(ctx), + 'inbranch': nodeinbranch(repo, ctx), + 'branches': nodebranchdict(repo, ctx), + 'tags': nodetagsdict(repo, node), + 'bookmarks': nodebookmarksdict(repo, node), + 'parent': lambda **x: parents(ctx), + 'child': lambda **x: children(ctx), + } + def changelistentry(web, ctx, tmpl): '''Obtain a dictionary to be used for entries in a changelist. @@ -295,26 +339,18 @@ showtags = showtag(repo, tmpl, 'changelogtag', n) files = listfilediffs(tmpl, ctx.files(), n, web.maxfiles) - return { - "author": ctx.user(), - "parent": parents(ctx, rev - 1), - "child": children(ctx, rev + 1), - "changelogtag": showtags, - "desc": ctx.description(), - "extra": ctx.extra(), - "date": ctx.date(), - "files": files, - "rev": rev, - "node": hex(n), - "tags": nodetagsdict(repo, n), - "bookmarks": nodebookmarksdict(repo, n), - "inbranch": nodeinbranch(repo, ctx), - "branches": nodebranchdict(repo, ctx) - } + entry = commonentry(repo, ctx) + entry.update( + parent=lambda **x: parents(ctx, rev - 1), + child=lambda **x: children(ctx, rev + 1), + changelogtag=showtags, + files=files, + ) + return entry def symrevorshortnode(req, ctx): if 'node' in req.form: - return revescape(req.form['node'][0]) + return templatefilters.revescape(req.form['node'][0]) else: return short(ctx.node()) @@ -351,29 +387,16 @@ return dict( diff=diff, - rev=ctx.rev(), - node=ctx.hex(), symrev=symrevorshortnode(req, ctx), - parent=tuple(parents(ctx)), - child=children(ctx), basenode=basectx.hex(), changesettag=showtags, changesetbookmark=showbookmarks, changesetbranch=showbranch, - author=ctx.user(), - desc=ctx.description(), - extra=ctx.extra(), - date=ctx.date(), - phase=ctx.phasestr(), files=files, diffsummary=lambda **x: diffsummary(diffstatsgen), diffstat=diffstats, archives=web.archivelist(ctx.hex()), - tags=nodetagsdict(web.repo, ctx.node()), - bookmarks=nodebookmarksdict(web.repo, ctx.node()), - branch=showbranch, - inbranch=nodeinbranch(web.repo, ctx), - branches=nodebranchdict(web.repo, ctx)) + **commonentry(web.repo, ctx)) def listfilediffs(tmpl, files, node, max): for f in files[:max]: @@ -537,7 +560,7 @@ yield {'name': key, 'value': str(value), 'separator': separator} separator = '&' -class wsgiui(ui.ui): +class wsgiui(uimod.ui): # default termwidth breaks under mod_wsgi def termwidth(self): return 80 diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/hgweb/wsgicgi.py --- a/mercurial/hgweb/wsgicgi.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/hgweb/wsgicgi.py Mon Jan 11 14:27:12 2016 -0600 @@ -8,9 +8,18 @@ # This was originally copied from the public domain code at # http://www.python.org/dev/peps/pep-0333/#the-server-gateway-side -import os, sys -from mercurial import util -from mercurial.hgweb import common +from __future__ import absolute_import + +import os +import sys + +from .. import ( + util, +) + +from . import ( + common, +) def launch(application): util.setbinary(sys.stdin) diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/httpclient/__init__.py --- a/mercurial/httpclient/__init__.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/httpclient/__init__.py Mon Jan 11 14:27:12 2016 -0600 @@ -36,6 +36,7 @@ * notices when the server responds early to a request * implements ssl inline instead of in a different class """ +from __future__ import absolute_import # Many functions in this file have too many arguments. # pylint: disable=R0913 @@ -48,8 +49,10 @@ import select import socket -import _readers -import socketutil +from . import ( + _readers, + socketutil, + ) logger = logging.getLogger(__name__) @@ -124,6 +127,12 @@ # pylint: disable=W0212 self._reader._close() + def getheader(self, header, default=None): + return self.headers.getheader(header, default=default) + + def getheaders(self): + return self.headers.items() + def readline(self): """Read a single line from the response body. @@ -279,6 +288,14 @@ # pylint: disable=W0212 self._load_response = self._reader._load +def _foldheaders(headers): + """Given some headers, rework them so we can safely overwrite values. + + >>> _foldheaders({'Accept-Encoding': 'wat'}) + {'accept-encoding': ('Accept-Encoding', 'wat')} + """ + return dict((k.lower(), (k, v)) for k, v in headers.iteritems()) + class HTTPConnection(object): """Connection to a single http server. @@ -292,7 +309,8 @@ def __init__(self, host, port=None, use_ssl=None, ssl_validator=None, timeout=TIMEOUT_DEFAULT, continue_timeout=TIMEOUT_ASSUME_CONTINUE, - proxy_hostport=None, ssl_wrap_socket=None, **ssl_opts): + proxy_hostport=None, proxy_headers=None, + ssl_wrap_socket=None, **ssl_opts): """Create a new HTTPConnection. Args: @@ -307,6 +325,13 @@ "100 Continue" response. Default is TIMEOUT_ASSUME_CONTINUE. proxy_hostport: Optional. Tuple of (host, port) to use as an http proxy for the connection. Default is to not use a proxy. + proxy_headers: Optional dict of header keys and values to send to + a proxy when using CONNECT. For compatibility with + httplib, the Proxy-Authorization header may be + specified in headers for request(), which will clobber + any such header specified here if specified. Providing + this option and not proxy_hostport will raise an + ValueError. ssl_wrap_socket: Optional function to use for wrapping sockets. If unspecified, the one from the ssl module will be used if available, or something that's compatible with @@ -330,10 +355,7 @@ elif use_ssl is None: use_ssl = (port == 443) elif port is None: - if use_ssl: - port = 443 - else: - port = 80 + port = (use_ssl and 443 or 80) self.port = port if use_ssl and not socketutil.have_ssl: raise Exception('ssl requested but unavailable on this Python') @@ -346,13 +368,20 @@ self._current_response_taken = False if proxy_hostport is None: self._proxy_host = self._proxy_port = None + if proxy_headers: + raise ValueError( + 'proxy_headers may not be specified unless ' + 'proxy_hostport is also specified.') + else: + self._proxy_headers = {} else: self._proxy_host, self._proxy_port = proxy_hostport + self._proxy_headers = _foldheaders(proxy_headers or {}) self.timeout = timeout self.continue_timeout = continue_timeout - def _connect(self): + def _connect(self, proxy_headers): """Connect to the host and port specified in __init__.""" if self.sock: return @@ -362,10 +391,9 @@ sock = socketutil.create_connection((self._proxy_host, self._proxy_port)) if self.ssl: - # TODO proxy header support data = self._buildheaders('CONNECT', '%s:%d' % (self.host, self.port), - {}, HTTP_VER_1_0) + proxy_headers, HTTP_VER_1_0) sock.send(data) sock.setblocking(0) r = self.response_class(sock, self.timeout, 'CONNECT') @@ -468,10 +496,10 @@ return True return False - def _reconnect(self, where): + def _reconnect(self, where, pheaders): logger.info('reconnecting during %s', where) self.close() - self._connect() + self._connect(pheaders) def request(self, method, path, body=None, headers={}, expect_continue=False): @@ -492,11 +520,20 @@ logger.info('sending %s request for %s to %s on port %s', method, path, self.host, self.port) - hdrs = dict((k.lower(), (k, v)) for k, v in headers.iteritems()) + hdrs = _foldheaders(headers) if hdrs.get('expect', ('', ''))[1].lower() == '100-continue': expect_continue = True elif expect_continue: hdrs['expect'] = ('Expect', '100-Continue') + # httplib compatibility: if the user specified a + # proxy-authorization header, that's actually intended for a + # proxy CONNECT action, not the real request, but only if + # we're going to use a proxy. + pheaders = dict(self._proxy_headers) + if self._proxy_host and self.ssl: + pa = hdrs.pop('proxy-authorization', None) + if pa is not None: + pheaders['proxy-authorization'] = pa chunked = False if body and HDR_CONTENT_LENGTH not in hdrs: @@ -513,7 +550,7 @@ # conditions where we'll want to retry, so make a note of the # state of self.sock fresh_socket = self.sock is None - self._connect() + self._connect(pheaders) outgoing_headers = self._buildheaders( method, path, hdrs, self.http_version) response = None @@ -588,7 +625,7 @@ logger.info( 'Connection appeared closed in read on first' ' request loop iteration, will retry.') - self._reconnect('read') + self._reconnect('read', pheaders) continue else: # We didn't just send the first data hunk, @@ -645,7 +682,7 @@ elif (e[0] not in (errno.ECONNRESET, errno.EPIPE) and not first): raise - self._reconnect('write') + self._reconnect('write', pheaders) amt = self.sock.send(out) logger.debug('sent %d', amt) first = False @@ -664,8 +701,8 @@ # data at all, and in all probability the socket was # closed before the server even saw our request. Try # the request again on a fresh socket. - logging.debug('response._select() failed during request().' - ' Assuming request needs to be retried.') + logger.debug('response._select() failed during request().' + ' Assuming request needs to be retried.') self.sock = None # Call this method explicitly to re-try the # request. We don't use self.request() because diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/httpclient/_readers.py --- a/mercurial/httpclient/_readers.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/httpclient/_readers.py Mon Jan 11 14:27:12 2016 -0600 @@ -31,6 +31,7 @@ This module is package-private. It is not expected that these will have any clients outside of httpplus. """ +from __future__ import absolute_import import httplib import logging @@ -98,11 +99,12 @@ return result def readto(self, delimstr, blocks = None): - """return available data chunks up to the first one in which delimstr - occurs. No data will be returned after delimstr -- the chunk in which - it occurs will be split and the remainder pushed back onto the available - data queue. If blocks is supplied chunks will be added to blocks, otherwise - a new list will be allocated. + """return available data chunks up to the first one in which + delimstr occurs. No data will be returned after delimstr -- + the chunk in which it occurs will be split and the remainder + pushed back onto the available data queue. If blocks is + supplied chunks will be added to blocks, otherwise a new list + will be allocated. """ if blocks is None: blocks = [] diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/httpclient/socketutil.py --- a/mercurial/httpclient/socketutil.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/httpclient/socketutil.py Mon Jan 11 14:27:12 2016 -0600 @@ -32,6 +32,8 @@ socket.create_connection method, but fall back to the old methods if those are unavailable. """ +from __future__ import absolute_import + import logging import socket diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/httpconnection.py --- a/mercurial/httpconnection.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/httpconnection.py Mon Jan 11 14:27:12 2016 -0600 @@ -7,16 +7,21 @@ # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. + +from __future__ import absolute_import + import logging +import os import socket import urllib import urllib2 -import os -from mercurial import httpclient -from mercurial import sslutil -from mercurial import util -from mercurial.i18n import _ +from .i18n import _ +from . import ( + httpclient, + sslutil, + util, +) # moved here from url.py to avoid a cycle class httpsendfile(object): diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/httppeer.py --- a/mercurial/httppeer.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/httppeer.py Mon Jan 11 14:27:12 2016 -0600 @@ -254,7 +254,7 @@ os.unlink(filename) def _callcompressable(self, cmd, **args): - stream = self._callstream(cmd, **args) + stream = self._callstream(cmd, **args) return util.chunkbuffer(zgenerator(stream)) def _abort(self, exception): diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/keepalive.py --- a/mercurial/keepalive.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/keepalive.py Mon Jan 11 14:27:12 2016 -0600 @@ -107,15 +107,17 @@ # $Id: keepalive.py,v 1.14 2006/04/04 21:00:32 mstenner Exp $ +from __future__ import absolute_import, print_function + import errno import httplib import socket +import sys import thread import urllib2 DEBUG = None -import sys if sys.version_info < (2, 4): HANDLE_ERRORS = 1 else: HANDLE_ERRORS = 0 @@ -537,13 +539,13 @@ # NOTE: we DO propagate the error, though, because we cannot simply # ignore the error... the caller will know if they can retry. if self.debuglevel > 0: - print "send:", repr(str) + print("send:", repr(str)) try: blocksize = 8192 read = getattr(str, 'read', None) if read is not None: if self.debuglevel > 0: - print "sending a read()able" + print("sending a read()able") data = read(blocksize) while data: self.sock.sendall(data) @@ -595,7 +597,7 @@ urllib2.install_opener(opener) pos = {0: 'off', 1: 'on'} for i in (0, 1): - print " fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i) + print(" fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i)) HANDLE_ERRORS = i try: fo = urllib2.urlopen(url) @@ -606,17 +608,18 @@ except AttributeError: status, reason = None, None except IOError as e: - print " EXCEPTION: %s" % e + print(" EXCEPTION: %s" % e) raise else: - print " status = %s, reason = %s" % (status, reason) + print(" status = %s, reason = %s" % (status, reason)) HANDLE_ERRORS = orig hosts = keepalive_handler.open_connections() - print "open connections:", hosts + print("open connections:", hosts) keepalive_handler.close_all() def continuity(url): - from util import md5 + from . import util + md5 = util.md5 format = '%25s: %s' # first fetch the file with the normal http handler @@ -626,7 +629,7 @@ foo = fo.read() fo.close() m = md5(foo) - print format % ('normal urllib', m.hexdigest()) + print(format % ('normal urllib', m.hexdigest())) # now install the keepalive handler and try again opener = urllib2.build_opener(HTTPHandler()) @@ -636,7 +639,7 @@ foo = fo.read() fo.close() m = md5(foo) - print format % ('keepalive read', m.hexdigest()) + print(format % ('keepalive read', m.hexdigest())) fo = urllib2.urlopen(url) foo = '' @@ -647,25 +650,25 @@ else: break fo.close() m = md5(foo) - print format % ('keepalive readline', m.hexdigest()) + print(format % ('keepalive readline', m.hexdigest())) def comp(N, url): - print ' making %i connections to:\n %s' % (N, url) + print(' making %i connections to:\n %s' % (N, url)) sys.stdout.write(' first using the normal urllib handlers') # first use normal opener opener = urllib2.build_opener() urllib2.install_opener(opener) t1 = fetch(N, url) - print ' TIME: %.3f s' % t1 + print(' TIME: %.3f s' % t1) sys.stdout.write(' now using the keepalive handler ') # now install the keepalive handler and try again opener = urllib2.build_opener(HTTPHandler()) urllib2.install_opener(opener) t2 = fetch(N, url) - print ' TIME: %.3f s' % t2 - print ' improvement factor: %.2f' % (t1 / t2) + print(' TIME: %.3f s' % t2) + print(' improvement factor: %.2f' % (t1 / t2)) def fetch(N, url, delay=0): import time @@ -684,7 +687,7 @@ for i in lens[1:]: j = j + 1 if not i == lens[0]: - print "WARNING: inconsistent length on read %i: %i" % (j, i) + print("WARNING: inconsistent length on read %i: %i" % (j, i)) return diff @@ -693,16 +696,16 @@ dbbackup = DEBUG class FakeLogger(object): def debug(self, msg, *args): - print msg % args + print(msg % args) info = warning = error = debug DEBUG = FakeLogger() - print " fetching the file to establish a connection" + print(" fetching the file to establish a connection") fo = urllib2.urlopen(url) data1 = fo.read() fo.close() i = 20 - print " waiting %i seconds for the server to close the connection" % i + print(" waiting %i seconds for the server to close the connection" % i) while i > 0: sys.stdout.write('\r %2i' % i) sys.stdout.flush() @@ -710,42 +713,41 @@ i -= 1 sys.stderr.write('\r') - print " fetching the file a second time" + print(" fetching the file a second time") fo = urllib2.urlopen(url) data2 = fo.read() fo.close() if data1 == data2: - print ' data are identical' + print(' data are identical') else: - print ' ERROR: DATA DIFFER' + print(' ERROR: DATA DIFFER') DEBUG = dbbackup def test(url, N=10): - print "checking error handler (do this on a non-200)" + print("checking error handler (do this on a non-200)") try: error_handler(url) except IOError: - print "exiting - exception will prevent further tests" + print("exiting - exception will prevent further tests") sys.exit() - print - print "performing continuity test (making sure stuff isn't corrupted)" + print('') + print("performing continuity test (making sure stuff isn't corrupted)") continuity(url) - print - print "performing speed comparison" + print('') + print("performing speed comparison") comp(N, url) - print - print "performing dropped-connection check" + print('') + print("performing dropped-connection check") test_timeout(url) if __name__ == '__main__': import time - import sys try: N = int(sys.argv[1]) url = sys.argv[2] except (IndexError, ValueError): - print "%s " % sys.argv[0] + print("%s " % sys.argv[0]) else: test(url, N) diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/localrepo.py --- a/mercurial/localrepo.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/localrepo.py Mon Jan 11 14:27:12 2016 -0600 @@ -4,21 +4,60 @@ # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -from node import hex, nullid, wdirrev, short -from i18n import _ + +from __future__ import absolute_import + +import errno +import inspect +import os +import random +import time import urllib -import peer, changegroup, subrepo, pushkey, obsolete, repoview -import changelog, dirstate, filelog, manifest, context, bookmarks, phases -import lock as lockmod -import transaction, store, encoding, exchange, bundle2 -import scmutil, util, extensions, hook, error, revset, cmdutil -import match as matchmod -import merge as mergemod -import tags as tagsmod -from lock import release -import weakref, errno, os, time, inspect, random -import branchmap, pathutil -import namespaces +import weakref + +from .i18n import _ +from .node import ( + hex, + nullid, + short, + wdirrev, +) +from . import ( + bookmarks, + branchmap, + bundle2, + changegroup, + changelog, + cmdutil, + context, + dirstate, + encoding, + error, + exchange, + extensions, + filelog, + hook, + lock as lockmod, + manifest, + match as matchmod, + merge as mergemod, + namespaces, + obsolete, + pathutil, + peer, + phases, + pushkey, + repoview, + revset, + scmutil, + store, + subrepo, + tags as tagsmod, + transaction, + util, +) + +release = lockmod.release propertycache = util.propertycache filecache = scmutil.filecache @@ -214,6 +253,8 @@ self.path = self.wvfs.join(".hg") self.origroot = path self.auditor = pathutil.pathauditor(self.root, self._checknested) + self.nofsauditor = pathutil.pathauditor(self.root, self._checknested, + realfs=False) self.vfs = scmutil.vfs(self.path) self.opener = self.vfs self.baseui = baseui @@ -258,8 +299,7 @@ '\0\0\0\2' # represents revlogv2 ' dummy changelog to prevent using the old repo layout' ) - # experimental config: format.generaldelta - if self.ui.configbool('format', 'generaldelta', False): + if scmutil.gdinitconfig(self.ui): self.requirements.add("generaldelta") if self.ui.configbool('experimental', 'treemanifest', False): self.requirements.add("treemanifest") @@ -359,6 +399,7 @@ aggressivemergedeltas = self.ui.configbool('format', 'aggressivemergedeltas', False) self.svfs.options['aggressivemergedeltas'] = aggressivemergedeltas + self.svfs.options['lazydeltabase'] = not scmutil.gddeltaconfig(self.ui) def _writerequirements(self): scmutil.writerequires(self.vfs, self.requirements) @@ -418,13 +459,13 @@ pass return proxycls(self, name) - @repofilecache('bookmarks') + @repofilecache('bookmarks', 'bookmarks.current') def _bookmarks(self): return bookmarks.bmstore(self) - @repofilecache('bookmarks.current') + @property def _activebookmark(self): - return bookmarks.readactive(self) + return self._bookmarks.active def bookmarkheads(self, bookmark): name = bookmark.split('@', 1)[0] @@ -517,15 +558,23 @@ return iter(self.changelog) def revs(self, expr, *args): - '''Return a list of revisions matching the given revset''' + '''Find revisions matching a revset. + + The revset is specified as a string ``expr`` that may contain + %-formatting to escape certain types. See ``revset.formatspec``. + + Return a revset.abstractsmartset, which is a list-like interface + that contains integer revisions. + ''' expr = revset.formatspec(expr, *args) m = revset.match(None, expr) return m(self) def set(self, expr, *args): - ''' - Yield a context for each matching revision, after doing arg - replacement via revset.formatspec + '''Find revisions matching a revset and emit changectx instances. + + This is a convenience wrapper around ``revs()`` that iterates the + result and is a generator of changectx instances. ''' for r in self.revs(expr, *args): yield self[r] @@ -751,6 +800,7 @@ return self._tagscache.nodetagscache.get(node, []) def nodebookmarks(self, node): + """return the list of bookmarks pointing to the specified node""" marks = [] for bookmark, n in self._bookmarks.iteritems(): if n == node: @@ -797,12 +847,13 @@ return repo[key].branch() def known(self, nodes): - nm = self.changelog.nodemap - pc = self._phasecache + cl = self.changelog + nm = cl.nodemap + filtered = cl.filteredrevs result = [] for n in nodes: r = nm.get(n) - resp = not (r is None or pc.phase(self, r) >= phases.secret) + resp = not (r is None or r in filtered) result.append(resp) return result @@ -840,13 +891,15 @@ f = f[1:] return filelog.filelog(self.svfs, f) + def parents(self, changeid=None): + '''get list of changectxs for parents of changeid''' + msg = 'repo.parents() is deprecated, use repo[%r].parents()' % changeid + self.ui.deprecwarn(msg, '3.7') + return self[changeid].parents() + def changectx(self, changeid): return self[changeid] - def parents(self, changeid=None): - '''get list of changectxs for parents of changeid''' - return self[changeid].parents() - def setparents(self, p1, p2=nullid): self.dirstate.beginparentchange() copies = self.dirstate.setparents(p1, p2) @@ -1161,15 +1214,14 @@ % self.dirstate.branch()) self.dirstate.invalidate() - parents = tuple([p.rev() for p in self.parents()]) + parents = tuple([p.rev() for p in self[None].parents()]) if len(parents) > 1: ui.status(_('working directory now based on ' 'revisions %d and %d\n') % parents) else: ui.status(_('working directory now based on ' 'revision %d\n') % parents) - ms = mergemod.mergestate(self) - ms.reset(self['.'].node()) + mergemod.mergestate.clean(self, self['.'].node()) # TODO: if we know which new heads may result from this rollback, pass # them to destroy(), which will prevent the branchhead cache from being @@ -1456,8 +1508,11 @@ match.explicitdir = vdirs.append match.bad = fail - wlock = self.wlock() + wlock = lock = tr = None try: + wlock = self.wlock() + lock = self.lock() # for recent changelog (see issue4368) + wctx = self[None] merge = len(wctx.parents()) > 1 @@ -1556,7 +1611,7 @@ if merge and cctx.deleted(): raise error.Abort(_("cannot commit merge with missing files")) - ms = mergemod.mergestate(self) + ms = mergemod.mergestate.read(self) if list(ms.unresolved()): raise error.Abort(_('unresolved merge conflicts ' @@ -1589,19 +1644,21 @@ try: self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2) + tr = self.transaction('commit') ret = self.commitctx(cctx, True) except: # re-raises if edited: self.ui.write( _('note: commit message saved in %s\n') % msgfn) raise - # update bookmarks, dirstate and mergestate bookmarks.update(self, [p1, p2], ret) cctx.markcommitted(ret) ms.reset() + tr.close() + finally: - wlock.release() + lockmod.release(tr, lock, wlock) def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2): # hack for command that use a temporary commit (eg: histedit) @@ -1838,22 +1895,6 @@ """ return util.hooks() - def clone(self, remote, heads=[], stream=None): - '''clone remote repository. - - keyword arguments: - heads: list of revs to clone (forces use of pull) - stream: use streaming clone if possible''' - # internal config: ui.quietbookmarkmove - quiet = self.ui.backupconfig('ui', 'quietbookmarkmove') - try: - self.ui.setconfig('ui', 'quietbookmarkmove', True, 'clone') - pullop = exchange.pull(self, remote, heads, - streamclonerequested=stream) - return pullop.cgresult - finally: - self.ui.restoreconfig(quiet) - def pushkey(self, namespace, key, old, new): try: tr = self.currenttransaction() diff -r 6c7d26cef0cd -r 4571c0b38337 mercurial/lsprof.py --- a/mercurial/lsprof.py Fri Jan 08 16:27:25 2016 +0100 +++ b/mercurial/lsprof.py Mon Jan 11 14:27:12 2016 -0600 @@ -1,5 +1,12 @@ +from __future__ import absolute_import, print_function + +import _lsprof import sys -from _lsprof import Profiler, profiler_entry + +Profiler = _lsprof.Profiler + +# PyPy doesn't expose profiler_entry from the module. +profiler_entry = getattr(_lsprof, 'profiler_entry', None) __all__ = ['profile', 'Stats'] @@ -22,8 +29,13 @@ def sort(self, crit="inlinetime"): """XXX docstring""" - if crit not in profiler_entry.__dict__: + # profiler_entries isn't defined when running under PyPy. + if profiler_entry: + if crit not in profiler_entry.__dict__: + raise ValueError("Can't sort by %s" % crit) + elif self.data and not getattr(self.data[0], crit, None): raise ValueError("Can't sort by %s" % crit) + self.data.sort(key=lambda x: getattr(x, crit), reverse=True) for e in self.data: if e.calls: @@ -101,7 +113,7 @@ import os sys.argv = sys.argv[1:] if not sys.argv: - print >> sys.stderr, "usage: lsprof.py + + Help: internals + + + +
+ + +
+ + + + + + + + + + + + + +

Topics

+ + bundles + + + container for exchange of repository data +
+ + changegroups + + + representation of revlog data +
+ + revlogs + + + revision storage mechanism +
+
+
+ + + + + + + + +Sub-topic topics rendered properly + + $ get-with-headers.py 127.0.0.1:$HGPORT "help/internals.changegroups" + 200 Script output follows + + + + + + + + + + Help: internals.changegroups + + + +
+ + +
+ +

Help: internals.changegroups

+ + +
+

representation of revlog data

+

Changegroups

+

+ Changegroups are representations of repository revlog data, specifically + the changelog, manifest, and filelogs. +

+

+ There are 3 versions of changegroups: "1", "2", and "3". From a + high-level, versions "1" and "2" are almost exactly the same, with + the only difference being a header on entries in the changeset + segment. Version "3" adds support for exchanging treemanifests and + includes revlog flags in the delta header. +

+

+ Changegroups consists of 3 logical segments: +

+
+  +---------------------------------+
+  |           |          |          |
+  | changeset | manifest | filelogs |
+  |           |          |          |
+  +---------------------------------+
+  
+

+ The principle building block of each segment is a *chunk*. A *chunk* + is a framed piece of data: +

+
+  +---------------------------------------+
+  |           |                           |
+  |  length   |           data            |
+  | (32 bits) |       <length> bytes      |
+  |           |                           |
+  +---------------------------------------+
+  
+

+ Each chunk starts with a 32-bit big-endian signed integer indicating + the length of the raw data that follows. +

+

+ There is a special case chunk that has 0 length ("0x00000000"). We + call this an *empty chunk*. +

+

Delta Groups

+

+ A *delta group* expresses the content of a revlog as a series of deltas, + or patches against previous revisions. +

+

+ Delta groups consist of 0 or more *chunks* followed by the *empty chunk* + to signal the end of the delta group: +

+
+  +------------------------------------------------------------------------+
+  |                |             |               |             |           |
+  | chunk0 length  | chunk0 data | chunk1 length | chunk1 data |    0x0    |
+  |   (32 bits)    |  (various)  |   (32 bits)   |  (various)  | (32 bits) |
+  |                |             |               |             |           |
+  +------------------------------------------------------------+-----------+
+  
+

+ Each *chunk*'s data consists of the following: +

+
+  +-----------------------------------------+
+  |              |              |           |
+  | delta header | mdiff header |   delta   |
+  |  (various)   |  (12 bytes)  | (various) |
+  |              |              |           |
+  +-----------------------------------------+
+  
+

+ The *length* field is the byte length of the remaining 3 logical pieces + of data. The *delta* is a diff from an existing entry in the changelog. +

+

+ The *delta header* is different between versions "1", "2", and + "3" of the changegroup format. +

+

+ Version 1: +

+
+  +------------------------------------------------------+
+  |            |             |             |             |
+  |    node    |   p1 node   |   p2 node   |  link node  |
+  | (20 bytes) |  (20 bytes) |  (20 bytes) |  (20 bytes) |
+  |            |             |             |             |
+  +------------------------------------------------------+
+  
+

+ Version 2: +

+
+  +------------------------------------------------------------------+
+  |            |             |             |            |            |
+  |    node    |   p1 node   |   p2 node   | base node  | link node  |
+  | (20 bytes) |  (20 bytes) |  (20 bytes) | (20 bytes) | (20 bytes) |
+  |            |             |             |            |            |
+  +------------------------------------------------------------------+
+  
+

+ Version 3: +

+
+  +------------------------------------------------------------------------------+
+  |            |             |             |            |            |           |
+  |    node    |   p1 node   |   p2 node   | base node  | link node  | flags     |
+  | (20 bytes) |  (20 bytes) |  (20 bytes) | (20 bytes) | (20 bytes) | (2 bytes) |
+  |            |             |             |            |            |           |
+  +------------------------------------------------------------------------------+
+  
+

+ The *mdiff header* consists of 3 32-bit big-endian signed integers + describing offsets at which to apply the following delta content: +

+
+  +-------------------------------------+
+  |           |            |            |
+  |  offset   | old length | new length |
+  | (32 bits) |  (32 bits) |  (32 bits) |
+  |           |            |            |
+  +-------------------------------------+
+  
+

+ In version 1, the delta is always applied against the previous node from + the changegroup or the first parent if this is the first entry in the + changegroup. +

+

+ In version 2, the delta base node is encoded in the entry in the + changegroup. This allows the delta to be expressed against any parent, + which can result in smaller deltas and more efficient encoding of data. +

+

Changeset Segment

+

+ The *changeset segment* consists of a single *delta group* holding + changelog data. It is followed by an *empty chunk* to denote the + boundary to the *manifests segment*. +

+

Manifest Segment

+

+ The *manifest segment* consists of a single *delta group* holding + manifest data. It is followed by an *empty chunk* to denote the boundary + to the *filelogs segment*. +

+

Filelogs Segment

+

+ The *filelogs* segment consists of multiple sub-segments, each + corresponding to an individual file whose data is being described: +

+
+  +--------------------------------------+
+  |          |          |          |     |
+  | filelog0 | filelog1 | filelog2 | ... |
+  |          |          |          |     |
+  +--------------------------------------+
+  
+

+ In version "3" of the changegroup format, filelogs may include + directory logs when treemanifests are in use. directory logs are + identified by having a trailing '/' on their filename (see below). +

+

+ The final filelog sub-segment is followed by an *empty chunk* to denote + the end of the segment and the overall changegroup. +

+

+ Each filelog sub-segment consists of the following: +

+
+  +------------------------------------------+
+  |               |            |             |
+  | filename size |  filename  | delta group |
+  |   (32 bits)   |  (various) |  (various)  |
+  |               |            |             |
+  +------------------------------------------+
+  
+

+ That is, a *chunk* consisting of the filename (not terminated or padded) + followed by N chunks constituting the *delta group* for this file. +

+ +
+
+
+ + + + + + + + $ killdaemons.py #endif diff -r 6c7d26cef0cd -r 4571c0b38337 tests/test-hgignore.t --- a/tests/test-hgignore.t Fri Jan 08 16:27:25 2016 +0100 +++ b/tests/test-hgignore.t Mon Jan 11 14:27:12 2016 -0600 @@ -55,6 +55,29 @@ ? a.c ? syntax +Ensure that comments work: + + $ touch 'foo#bar' 'quux#' +#if no-windows + $ touch 'baz\#wat' +#endif + $ cat <<'EOF' >> .hgignore + > # full-line comment + > # whitespace-only comment line + > syntax# pattern, no whitespace, then comment + > a.c # pattern, then whitespace, then comment + > baz\\# # escaped comment character + > foo\#b # escaped comment character + > quux\## escaped comment character at end of name + > EOF + $ hg status + A dir/b.o + ? .hgignore + $ rm 'foo#bar' 'quux#' +#if no-windows + $ rm 'baz\#wat' +#endif + Check it does not ignore the current directory '.': $ echo "^\." > .hgignore @@ -143,6 +166,10 @@ $ hg debugignore (?:(?:|.*/)[^/]*(?:/|$)) + $ hg debugignore b.o + b.o is ignored + (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 1: '*') + $ cd .. Check patterns that match only the directory @@ -168,6 +195,11 @@ ? a.c ? a.o ? syntax + $ hg debugignore a.c + a.c is not ignored + $ hg debugignore dir/c.o + dir/c.o is ignored + (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 2: 'dir/**/c.o') Check using 'include:' in ignore file @@ -251,3 +283,6 @@ $ hg status | grep file2 [1] + $ hg debugignore dir1/file2 + dir1/file2 is ignored + (ignore rule in dir2/.hgignore, line 1: 'file*2') diff -r 6c7d26cef0cd -r 4571c0b38337 tests/test-hgweb-commands.t --- a/tests/test-hgweb-commands.t Fri Jan 08 16:27:25 2016 +0100 +++ b/tests/test-hgweb-commands.t Mon Jan 11 14:27:12 2016 -0600 @@ -6,6 +6,11 @@ - unbundle, tested in test-push-http - changegroupsubset, tested in test-pull + $ cat << EOF >> $HGRCPATH + > [format] + > usegeneraldelta=yes + > EOF + Set up the repo $ hg init test @@ -1892,7 +1897,7 @@ $ get-with-headers.py 127.0.0.1:$HGPORT '?cmd=capabilities'; echo 200 Script output follows - lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1*%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 (glob) + lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch bundle2=HG20%0Achangegroup%3D01%2C02%2C03%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 heads @@ -2098,10 +2103,34 @@ capabilities - $ get-with-headers.py 127.0.0.1:$HGPORT '?cmd=capabilities'; echo +(plain version to check the format) + + $ get-with-headers.py 127.0.0.1:$HGPORT '?cmd=capabilities' | dd ibs=75 count=1 2> /dev/null; echo 200 Script output follows - lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch stream-preferred stream bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1*%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 (glob) + lookup changegroupsubset branchmap pushkey known + +(spread version to check the content) + + $ get-with-headers.py 127.0.0.1:$HGPORT '?cmd=capabilities' | tr ' ' '\n'; echo + 200 + Script + output + follows + + lookup + changegroupsubset + branchmap + pushkey + known + getbundle + unbundlehash + batch + stream-preferred + streamreqs=generaldelta,revlogv1 + bundle2=HG20%0Achangegroup%3D01%2C02%2C03%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps + unbundle=HG10GZ,HG10BZ,HG10UN + httpheader=1024 heads diff -r 6c7d26cef0cd -r 4571c0b38337 tests/test-hgweb-diffs.t --- a/tests/test-hgweb-diffs.t Fri Jan 08 16:27:25 2016 +0100 +++ b/tests/test-hgweb-diffs.t Mon Jan 11 14:27:12 2016 -0600 @@ -927,7 +927,7 @@ parents - 0cd96de13884 + d73db4d812ff children diff -r 6c7d26cef0cd -r 4571c0b38337 tests/test-hgweb-filelog.t --- a/tests/test-hgweb-filelog.t Fri Jan 08 16:27:25 2016 +0100 +++ b/tests/test-hgweb-filelog.t Mon Jan 11 14:27:12 2016 -0600 @@ -179,7 +179,7 @@
  • help
  • @@ -187,7 +187,10 @@
    -

    log a

    +

    + log a @ 4:3f41bc784e7e + a-branch +