Mercurial > hg
changeset 28977:740156eedf2c stable 3.8-rc
merge default into stable for 3.8 code freeze
author | Matt Mackall <mpm@selenic.com> |
---|---|
date | Sat, 16 Apr 2016 18:06:48 -0500 |
parents | 2d39f987f0ba (current diff) 9fb2e8c8f320 (diff) |
children | 1f277ae29168 |
files | contrib/hgfixes/__init__.py contrib/hgfixes/fix_bytes.py contrib/hgfixes/fix_bytesmod.py contrib/hgfixes/fix_leftover_imports.py tests/test-1102.t tests/test-1993.t tests/test-586.t tests/test-module-imports.t |
diffstat | 443 files changed, 22790 insertions(+), 7500 deletions(-) [+] |
line wrap: on
line diff
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/.editorconfig Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,13 @@ +# See http://EditorConfig.org for the specification + +root = true + +[*.py] +indent_size = 4 +indent_style = space +trim_trailing_whitespace = true + +[*.{c,h}] +indent_size = 8 +indent_style = tab +trim_trailing_whitespace = true
--- a/.hgignore Tue Mar 29 11:54:46 2016 -0500 +++ b/.hgignore Sat Apr 16 18:06:48 2016 -0500 @@ -21,10 +21,13 @@ .\#* tests/.coverage* tests/.testtimes* +tests/.hypothesis +tests/hypothesis-generated tests/annotated tests/*.err tests/htmlcov build +contrib/chg/chg contrib/hgsh/hgsh contrib/vagrant/.vagrant dist @@ -37,6 +40,7 @@ MANIFEST MANIFEST.in patches +mercurial/__modulepolicy__.py mercurial/__version__.py mercurial/hgpythonlib.h mercurial.egg-info
--- a/Makefile Tue Mar 29 11:54:46 2016 -0500 +++ b/Makefile Sat Apr 16 18:06:48 2016 -0500 @@ -59,13 +59,15 @@ clean: -$(PYTHON) setup.py clean --all # ignore errors from this command - find contrib doc hgext i18n mercurial tests \ + find contrib doc hgext hgext3rd i18n mercurial tests \ \( -name '*.py[cdo]' -o -name '*.so' \) -exec rm -f '{}' ';' rm -f $(addprefix mercurial/,$(notdir $(wildcard mercurial/pure/[a-z]*.py))) rm -f MANIFEST MANIFEST.in hgext/__index__.py tests/*.err + rm -f mercurial/__modulepolicy__.py if test -d .hg; then rm -f mercurial/__version__.py; fi - rm -rf build mercurial/locale + rm -rf build packages mercurial/locale $(MAKE) -C doc clean + $(MAKE) -C contrib/chg distclean install: install-bin install-doc @@ -167,6 +169,10 @@ mkdir -p packages/debian-jessie contrib/dockerdeb debian jessie +docker-ubuntu-trusty: + mkdir -p packages/ubuntu-trusty + contrib/dockerdeb ubuntu trusty + fedora20: mkdir -p packages/fedora20 contrib/buildrpm
--- a/contrib/casesmash.py Tue Mar 29 11:54:46 2016 -0500 +++ b/contrib/casesmash.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,5 +1,9 @@ -import os, __builtin__ -from mercurial import util +from __future__ import absolute_import +import __builtin__ +import os +from mercurial import ( + util, +) def lowerwrap(scope, funcname): f = getattr(scope, funcname)
--- a/contrib/check-code.py Tue Mar 29 11:54:46 2016 -0500 +++ b/contrib/check-code.py Sat Apr 16 18:06:48 2016 -0500 @@ -14,14 +14,18 @@ * doublecheck that it is a false match * improve the rule pattern * add an ignore pattern to the rule (3rd arg) which matches your good line - (you can append a short comment and match this, like: #re-raises, # no-py24) + (you can append a short comment and match this, like: #re-raises) * change the pattern to a warning and list the exception in test-check-code-hg * ONLY use no--check-code for skipping entire files from external sources """ -import re, glob, os, sys +from __future__ import absolute_import, print_function +import glob import keyword import optparse +import os +import re +import sys try: import re2 except ImportError: @@ -90,7 +94,7 @@ (r'pushd|popd', "don't use 'pushd' or 'popd', use 'cd'"), (r'\W\$?\(\([^\)\n]*\)\)', "don't use (()) or $(()), use 'expr'"), (r'grep.*-q', "don't use 'grep -q', redirect to /dev/null"), - (r'(?<!hg )grep.*-a', "don't use 'grep -a', use in-line python"), + (r'(?<!hg )grep.* -a', "don't use 'grep -a', use in-line python"), (r'sed.*-i', "don't use 'sed -i', use a temporary file"), (r'\becho\b.*\\n', "don't use 'echo \\n', use printf"), (r'echo -n', "don't use 'echo -n', use printf"), @@ -128,6 +132,7 @@ (r'\butil\.Abort\b', "directly use error.Abort"), (r'\|&', "don't use |&, use 2>&1"), (r'\w = +\w', "only one space after = allowed"), + (r'\bsed\b.*[^\\]\\n', "don't use 'sed ... \\n', use a \\ and a newline"), ], # warnings [ @@ -176,6 +181,19 @@ 'write "file:/*/$TESTTMP" + (glob) to match on windows too'), (r'^ (cat|find): .*: No such file or directory', 'use test -f to test for file existence'), + (r'^ diff -[^ -]*p', + "don't use (external) diff with -p for portability"), + (r'^ [-+][-+][-+] .* [-+]0000 \(glob\)', + "glob timezone field in diff output for portability"), + (r'^ @@ -[0-9]+ [+][0-9]+,[0-9]+ @@', + "use '@@ -N* +N,n @@ (glob)' style chunk header for portability"), + (r'^ @@ -[0-9]+,[0-9]+ [+][0-9]+ @@', + "use '@@ -N,n +N* @@ (glob)' style chunk header for portability"), + (r'^ @@ -[0-9]+ [+][0-9]+ @@', + "use '@@ -N* +N* @@ (glob)' style chunk header for portability"), + (uprefix + r'hg( +-[^ ]+( +[^ ]+)?)* +extdiff' + r'( +(-[^ po-]+|--(?!program|option)[^ ]+|[^-][^ ]*))*$', + "use $RUNTESTDIR/pdiff via extdiff (or -o/-p for false-positives)"), ], # warnings [ @@ -205,9 +223,6 @@ "tuple parameter unpacking not available in Python 3+"), (r'lambda\s*\(.*,.*\)', "tuple parameter unpacking not available in Python 3+"), - (r'import (.+,[^.]+\.[^.]+|[^.]+\.[^.]+,)', - '2to3 can\'t always rewrite "import qux, foo.bar", ' - 'use "import foo.bar" on its own line instead.'), (r'(?<!def)\s+(cmp)\(', "cmp is not available in Python 3+"), (r'\breduce\s*\(.*', "reduce is not available in Python 3+"), (r'dict\(.*=', 'dict() is different in Py2 and 3 and is slower than {}', @@ -232,9 +247,11 @@ "don't use camelcase in identifiers"), (r'^\s*(if|while|def|class|except|try)\s[^[\n]*:\s*[^\\n]#\s]+', "linebreak after :"), - (r'class\s[^( \n]+:', "old-style class, use class foo(object)"), + (r'class\s[^( \n]+:', "old-style class, use class foo(object)", + r'#.*old-style'), (r'class\s[^( \n]+\(\):', - "class foo() creates old style object, use class foo(object)"), + "class foo() creates old style object, use class foo(object)", + r'#.*old-style'), (r'\b(%s)\(' % '|'.join(k for k in keyword.kwlist if k not in ('print', 'exec')), "Python keyword is not a function"), @@ -298,6 +315,9 @@ # XXX only catch mutable arguments on the first line of the definition (r'def.*[( ]\w+=\{\}', "don't use mutable default arguments"), (r'\butil\.Abort\b', "directly use error.Abort"), + (r'^import Queue', "don't use Queue, use util.queue + util.empty"), + (r'^import cStringIO', "don't use cStringIO.StringIO, use util.stringio"), + (r'^import urllib', "don't use urllib, use util.urlreq/util.urlerr"), ], # warnings [ @@ -343,6 +363,8 @@ (r'^#\s+\w', "use #foo, not # foo"), (r'[^\n]\Z', "no trailing newline"), (r'^\s*#import\b', "use only #include in standard C code"), + (r'strcpy\(', "don't use strcpy, use strlcpy or memcpy"), + (r'strcat\(', "don't use strcat"), ], # warnings [] @@ -431,12 +453,12 @@ msgid = fname, lineno, line if msgid != self._lastseen: if blame: - print "%s:%d (%s):" % (fname, lineno, blame) + print("%s:%d (%s):" % (fname, lineno, blame)) else: - print "%s:%d:" % (fname, lineno) - print " > %s" % line + print("%s:%d:" % (fname, lineno)) + print(" > %s" % line) self._lastseen = msgid - print " " + msg + print(" " + msg) _defaultlogger = norepeatlogger() @@ -466,19 +488,19 @@ try: fp = open(f) except IOError as e: - print "Skipping %s, %s" % (f, str(e).split(':', 1)[0]) + print("Skipping %s, %s" % (f, str(e).split(':', 1)[0])) return result pre = post = fp.read() fp.close() for name, match, magic, filters, pats in checks: if debug: - print name, f + print(name, f) fc = 0 - if not (re.match(match, f) or (magic and re.search(magic, f))): + if not (re.match(match, f) or (magic and re.search(magic, pre))): if debug: - print "Skipping %s for %s it doesn't match %s" % ( - name, match, f) + print("Skipping %s for %s it doesn't match %s" % ( + name, match, f)) continue if "no-" "check-code" in pre: # If you're looking at this line, it's because a file has: @@ -487,7 +509,7 @@ # tests easier. So, instead of writing it with a normal # spelling, we write it with the expected spelling from # tests/test-check-code.t - print "Skipping %s it has no-che?k-code (glob)" % f + print("Skipping %s it has no-che?k-code (glob)" % f) return "Skip" # skip checking this file for p, r in filters: post = re.sub(p, r, post) @@ -499,7 +521,7 @@ # print post # uncomment to show filtered version if debug: - print "Checking %s for %s" % (name, f) + print("Checking %s for %s" % (name, f)) prelines = None errors = [] @@ -530,8 +552,8 @@ if ignore and re.search(ignore, l, re.MULTILINE): if debug: - print "Skipping %s for %s:%s (ignore pattern)" % ( - name, f, n) + print("Skipping %s for %s:%s (ignore pattern)" % ( + name, f, n)) continue bd = "" if blame: @@ -551,7 +573,7 @@ logfunc(*e) fc += 1 if maxerr and fc >= maxerr: - print " (too many errors, giving up)" + print(" (too many errors, giving up)") break return result
--- a/contrib/check-commit Tue Mar 29 11:54:46 2016 -0500 +++ b/contrib/check-commit Sat Apr 16 18:06:48 2016 -0500 @@ -23,7 +23,8 @@ errors = [ (beforepatch + r".*[(]bc[)]", "(BC) needs to be uppercase"), - (beforepatch + r".*[(]issue \d\d\d", "no space allowed between issue and number"), + (beforepatch + r".*[(]issue \d\d\d", + "no space allowed between issue and number"), (beforepatch + r".*[(]bug(\d|\s)", "use (issueDDDD) instead of bug"), (commitheader + r"# User [^@\n]+\n", "username is not an email address"), (commitheader + r"(?!merge with )[^#]\S+[^:] ", @@ -34,7 +35,7 @@ "summary keyword should be most user-relevant one-word command or topic"), (afterheader + r".*\.\s*\n", "don't add trailing period on summary line"), (afterheader + r".{79,}", "summary line too long (limit is 78)"), - (r"\n\+\n \n", "adds double empty line"), + (r"\n\+\n( |\+)\n", "adds double empty line"), (r"\n \n\+\n", "adds double empty line"), (r"\n\+[ \t]+def [a-z]+_[a-z]", "adds a function with foo_bar naming"), ] @@ -45,13 +46,12 @@ return first return second -def checkcommit(commit, node = None): +def checkcommit(commit, node=None): exitcode = 0 printed = node is None hits = [] for exp, msg in errors: - m = re.search(exp, commit) - if m: + for m in re.finditer(exp, commit): end = m.end() trailing = re.search(r'(\\n)+$', exp) if trailing:
--- a/contrib/check-config.py Tue Mar 29 11:54:46 2016 -0500 +++ b/contrib/check-config.py Sat Apr 16 18:06:48 2016 -0500 @@ -7,6 +7,7 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. +from __future__ import absolute_import, print_function import re import sys @@ -82,9 +83,9 @@ if re.match('[a-z.]+$', default): default = '<variable>' if name in foundopts and (ctype, default) != foundopts[name]: - print l - print "conflict on %s: %r != %r" % (name, (ctype, default), - foundopts[name]) + print(l) + print("conflict on %s: %r != %r" % (name, (ctype, default), + foundopts[name])) foundopts[name] = (ctype, default) carryover = '' else: @@ -102,7 +103,10 @@ ctype, default = foundopts[name] if default: default = ' [%s]' % default - print "undocumented: %s (%s)%s" % (name, ctype, default) + print("undocumented: %s (%s)%s" % (name, ctype, default)) if __name__ == "__main__": - sys.exit(main(sys.argv[1:])) + if len(sys.argv) > 1: + sys.exit(main(sys.argv[1:])) + else: + sys.exit(main([l.rstrip() for l in sys.stdin]))
--- a/contrib/check-py3-compat.py Tue Mar 29 11:54:46 2016 -0500 +++ b/contrib/check-py3-compat.py Sat Apr 16 18:06:48 2016 -0500 @@ -10,18 +10,21 @@ from __future__ import absolute_import, print_function import ast +import imp +import os import sys +import traceback -def check_compat(f): - """Check Python 3 compatibility for a file.""" +def check_compat_py2(f): + """Check Python 3 compatibility for a file with Python 2""" with open(f, 'rb') as fh: content = fh.read() + root = ast.parse(content) # Ignore empty files. - if not content.strip(): + if not root.body: return - root = ast.parse(content) futures = set() haveprint = False for node in ast.walk(root): @@ -36,8 +39,45 @@ if haveprint and 'print_function' not in futures: print('%s requires print_function' % f) +def check_compat_py3(f): + """Check Python 3 compatibility of a file with Python 3.""" + with open(f, 'rb') as fh: + content = fh.read() + + try: + ast.parse(content) + except SyntaxError as e: + print('%s: invalid syntax: %s' % (f, e)) + return + + # Try to import the module. + # For now we only support mercurial.* and hgext.* modules because figuring + # out module paths for things not in a package can be confusing. + if f.startswith(('hgext/', 'mercurial/')) and not f.endswith('__init__.py'): + assert f.endswith('.py') + name = f.replace('/', '.')[:-3] + with open(f, 'r') as fh: + try: + imp.load_module(name, fh, '', ('py', 'r', imp.PY_SOURCE)) + except Exception as e: + exc_type, exc_value, tb = sys.exc_info() + frame = traceback.extract_tb(tb)[-1] + + if frame.filename: + filename = os.path.basename(frame.filename) + print('%s: error importing: <%s> %s (error at %s:%d)' % ( + f, type(e).__name__, e, filename, frame.lineno)) + else: + print('%s: error importing module: <%s> %s (line %d)' % ( + f, type(e).__name__, e, frame.lineno)) + if __name__ == '__main__': + if sys.version_info[0] == 2: + fn = check_compat_py2 + else: + fn = check_compat_py3 + for f in sys.argv[1:]: - check_compat(f) + fn(f) sys.exit(0)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/chg/Makefile Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,52 @@ +HG = $(CURDIR)/../../hg + +TARGET = chg +SRCS = chg.c hgclient.c util.c +OBJS = $(SRCS:.c=.o) + +CFLAGS ?= -O2 -Wall -Wextra -pedantic -g +CPPFLAGS ?= -D_FORTIFY_SOURCE=2 +override CFLAGS += -std=gnu99 +ifdef HGPATH +override CPPFLAGS += -DHGPATH=\"$(HGPATH)\" +endif + +DESTDIR = +PREFIX = /usr/local +MANDIR = $(PREFIX)/share/man/man1 + +CHGSOCKDIR = /tmp/chg$(shell id -u) +CHGSOCKNAME = $(CHGSOCKDIR)/server + +.PHONY: all +all: $(TARGET) + +$(TARGET): $(OBJS) + $(CC) $(LDFLAGS) -o $@ $(OBJS) + +chg.o: hgclient.h util.h +hgclient.o: hgclient.h util.h +util.o: util.h + +.PHONY: install +install: $(TARGET) + install -d $(DESTDIR)$(PREFIX)/bin + install -m 755 $(TARGET) $(DESTDIR)$(PREFIX)/bin + install -d $(DESTDIR)$(MANDIR) + install -m 644 chg.1 $(DESTDIR)$(MANDIR) + +.PHONY: serve +serve: + [ -d $(CHGSOCKDIR) ] || ( umask 077; mkdir $(CHGSOCKDIR) ) + $(HG) serve --cwd / --cmdserver chgunix \ + --address $(CHGSOCKNAME) \ + --config extensions.chgserver= \ + --config cmdserver.log=/dev/stderr + +.PHONY: clean +clean: + $(RM) $(OBJS) + +.PHONY: distclean +distclean: + $(RM) $(OBJS) $(TARGET)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/chg/README Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,30 @@ +cHg +=== + +A fast client for Mercurial command server running on Unix. + +Install: + + $ make + $ make install + +Usage: + + $ chg help # show help of Mercurial + $ alias hg=chg # replace hg command + $ chg --kill-chg-daemon # terminate background server + +Environment variables: + +Although cHg tries to update environment variables, some of them cannot be +changed after spawning the server. The following variables are specially +handled: + + * configuration files are reloaded automatically by default. + * CHGHG or HG specifies the path to the hg executable spawned as the + background command server. + +The following variables are available for testing: + + * CHGDEBUG enables debug messages. + * CHGSOCKNAME specifies the socket path of the background cmdserver.
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/chg/chg.1 Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,41 @@ +.\" Hey, EMACS: -*- nroff -*- +.\" First parameter, NAME, should be all caps +.\" Second parameter, SECTION, should be 1-8, maybe w/ subsection +.\" other parameters are allowed: see man(7), man(1) +.TH CHG 1 "March 3, 2013" +.\" Please adjust this date whenever revising the manpage. +.\" +.\" Some roff macros, for reference: +.\" .nh disable hyphenation +.\" .hy enable hyphenation +.\" .ad l left justify +.\" .ad b justify to both left and right margins +.\" .nf disable filling +.\" .fi enable filling +.\" .br insert line break +.\" .sp <n> insert n+1 empty lines +.\" for manpage-specific macros, see man(7) +.SH NAME +chg \- a fast client for Mercurial command server +.SH SYNOPSIS +.B chg +.IR command " [" options "] [" arguments "]..." +.br +.SH DESCRIPTION +The +.B chg +command is the wrapper for +.B hg +command. +It uses the Mercurial command server to reduce start-up overhead. +.SH OPTIONS +This program accepts the same command line syntax as the +.B hg +command. Additionally it accepts the following options. +.TP +.B \-\-kill\-chg\-daemon +Terminate the background command servers. +.SH SEE ALSO +.BR hg (1), +.SH AUTHOR +Written by Yuya Nishihara <yuya@tcha.org>.
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/chg/chg.c Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,589 @@ +/* + * A fast client for Mercurial command server + * + * Copyright (c) 2011 Yuya Nishihara <yuya@tcha.org> + * + * This software may be used and distributed according to the terms of the + * GNU General Public License version 2 or any later version. + */ + +#include <assert.h> +#include <errno.h> +#include <fcntl.h> +#include <signal.h> +#include <stdio.h> +#include <stdlib.h> +#include <string.h> +#include <sys/file.h> +#include <sys/stat.h> +#include <sys/types.h> +#include <sys/un.h> +#include <sys/wait.h> +#include <time.h> +#include <unistd.h> + +#include "hgclient.h" +#include "util.h" + +#ifndef UNIX_PATH_MAX +#define UNIX_PATH_MAX (sizeof(((struct sockaddr_un *)NULL)->sun_path)) +#endif + +struct cmdserveropts { + char sockname[UNIX_PATH_MAX]; + char redirectsockname[UNIX_PATH_MAX]; + char lockfile[UNIX_PATH_MAX]; + size_t argsize; + const char **args; + int lockfd; + int sockdirfd; +}; + +static void initcmdserveropts(struct cmdserveropts *opts) { + memset(opts, 0, sizeof(struct cmdserveropts)); + opts->lockfd = -1; + opts->sockdirfd = AT_FDCWD; +} + +static void freecmdserveropts(struct cmdserveropts *opts) { + free(opts->args); + opts->args = NULL; + opts->argsize = 0; + assert(opts->lockfd == -1 && "should be closed by unlockcmdserver()"); + if (opts->sockdirfd != AT_FDCWD) { + close(opts->sockdirfd); + opts->sockdirfd = AT_FDCWD; + } +} + +/* + * Test if an argument is a sensitive flag that should be passed to the server. + * Return 0 if not, otherwise the number of arguments starting from the current + * one that should be passed to the server. + */ +static size_t testsensitiveflag(const char *arg) +{ + static const struct { + const char *name; + size_t narg; + } flags[] = { + {"--config", 1}, + {"--cwd", 1}, + {"--repo", 1}, + {"--repository", 1}, + {"--traceback", 0}, + {"-R", 1}, + }; + size_t i; + for (i = 0; i < sizeof(flags) / sizeof(flags[0]); ++i) { + size_t len = strlen(flags[i].name); + size_t narg = flags[i].narg; + if (memcmp(arg, flags[i].name, len) == 0) { + if (arg[len] == '\0') { + /* --flag (value) */ + return narg + 1; + } else if (arg[len] == '=' && narg > 0) { + /* --flag=value */ + return 1; + } else if (flags[i].name[1] != '-') { + /* short flag */ + return 1; + } + } + } + return 0; +} + +/* + * Parse argv[] and put sensitive flags to opts->args + */ +static void setcmdserverargs(struct cmdserveropts *opts, + int argc, const char *argv[]) +{ + size_t i, step; + opts->argsize = 0; + for (i = 0, step = 1; i < (size_t)argc; i += step, step = 1) { + if (!argv[i]) + continue; /* pass clang-analyse */ + if (strcmp(argv[i], "--") == 0) + break; + size_t n = testsensitiveflag(argv[i]); + if (n == 0 || i + n > (size_t)argc) + continue; + opts->args = reallocx(opts->args, + (n + opts->argsize) * sizeof(char *)); + memcpy(opts->args + opts->argsize, argv + i, + sizeof(char *) * n); + opts->argsize += n; + step = n; + } +} + +static void preparesockdir(const char *sockdir) +{ + int r; + r = mkdir(sockdir, 0700); + if (r < 0 && errno != EEXIST) + abortmsgerrno("cannot create sockdir %s", sockdir); + + struct stat st; + r = lstat(sockdir, &st); + if (r < 0) + abortmsgerrno("cannot stat %s", sockdir); + if (!S_ISDIR(st.st_mode)) + abortmsg("cannot create sockdir %s (file exists)", sockdir); + if (st.st_uid != geteuid() || st.st_mode & 0077) + abortmsg("insecure sockdir %s", sockdir); +} + +static void setcmdserveropts(struct cmdserveropts *opts) +{ + int r; + char sockdir[UNIX_PATH_MAX]; + const char *envsockname = getenv("CHGSOCKNAME"); + if (!envsockname) { + /* by default, put socket file in secure directory + * (permission of socket file may be ignored on some Unices) */ + const char *tmpdir = getenv("TMPDIR"); + if (!tmpdir) + tmpdir = "/tmp"; + r = snprintf(sockdir, sizeof(sockdir), "%s/chg%d", + tmpdir, geteuid()); + if (r < 0 || (size_t)r >= sizeof(sockdir)) + abortmsg("too long TMPDIR (r = %d)", r); + preparesockdir(sockdir); + } + + const char *basename = (envsockname) ? envsockname : sockdir; + const char *sockfmt = (envsockname) ? "%s" : "%s/server"; + const char *lockfmt = (envsockname) ? "%s.lock" : "%s/lock"; + r = snprintf(opts->sockname, sizeof(opts->sockname), sockfmt, basename); + if (r < 0 || (size_t)r >= sizeof(opts->sockname)) + abortmsg("too long TMPDIR or CHGSOCKNAME (r = %d)", r); + r = snprintf(opts->lockfile, sizeof(opts->lockfile), lockfmt, basename); + if (r < 0 || (size_t)r >= sizeof(opts->lockfile)) + abortmsg("too long TMPDIR or CHGSOCKNAME (r = %d)", r); +} + +/* + * Acquire a file lock that indicates a client is trying to start and connect + * to a server, before executing a command. The lock is released upon exit or + * explicit unlock. Will block if the lock is held by another process. + */ +static void lockcmdserver(struct cmdserveropts *opts) +{ + if (opts->lockfd == -1) { + opts->lockfd = open(opts->lockfile, + O_RDWR | O_CREAT | O_NOFOLLOW, 0600); + if (opts->lockfd == -1) + abortmsgerrno("cannot create lock file %s", + opts->lockfile); + fsetcloexec(opts->lockfd); + } + int r = flock(opts->lockfd, LOCK_EX); + if (r == -1) + abortmsgerrno("cannot acquire lock"); +} + +/* + * Release the file lock held by calling lockcmdserver. Will do nothing if + * lockcmdserver is not called. + */ +static void unlockcmdserver(struct cmdserveropts *opts) +{ + if (opts->lockfd == -1) + return; + flock(opts->lockfd, LOCK_UN); + close(opts->lockfd); + opts->lockfd = -1; +} + +static const char *gethgcmd(void) +{ + static const char *hgcmd = NULL; + if (!hgcmd) { + hgcmd = getenv("CHGHG"); + if (!hgcmd || hgcmd[0] == '\0') + hgcmd = getenv("HG"); + if (!hgcmd || hgcmd[0] == '\0') +#ifdef HGPATH + hgcmd = (HGPATH); +#else + hgcmd = "hg"; +#endif + } + return hgcmd; +} + +static void execcmdserver(const struct cmdserveropts *opts) +{ + const char *hgcmd = gethgcmd(); + + const char *baseargv[] = { + hgcmd, + "serve", + "--cmdserver", "chgunix", + "--address", opts->sockname, + "--daemon-postexec", "chdir:/", + "--config", "extensions.chgserver=", + }; + size_t baseargvsize = sizeof(baseargv) / sizeof(baseargv[0]); + size_t argsize = baseargvsize + opts->argsize + 1; + + const char **argv = mallocx(sizeof(char *) * argsize); + memcpy(argv, baseargv, sizeof(baseargv)); + memcpy(argv + baseargvsize, opts->args, sizeof(char *) * opts->argsize); + argv[argsize - 1] = NULL; + + if (putenv("CHGINTERNALMARK=") != 0) + abortmsgerrno("failed to putenv"); + if (execvp(hgcmd, (char **)argv) < 0) + abortmsgerrno("failed to exec cmdserver"); + free(argv); +} + +/* Retry until we can connect to the server. Give up after some time. */ +static hgclient_t *retryconnectcmdserver(struct cmdserveropts *opts, pid_t pid) +{ + static const struct timespec sleepreq = {0, 10 * 1000000}; + int pst = 0; + + debugmsg("try connect to %s repeatedly", opts->sockname); + for (unsigned int i = 0; i < 10 * 100; i++) { + hgclient_t *hgc = hgc_open(opts->sockname); + if (hgc) + return hgc; + + if (pid > 0) { + /* collect zombie if child process fails to start */ + int r = waitpid(pid, &pst, WNOHANG); + if (r != 0) + goto cleanup; + } + + nanosleep(&sleepreq, NULL); + } + + abortmsg("timed out waiting for cmdserver %s", opts->sockname); + return NULL; + +cleanup: + if (WIFEXITED(pst)) { + if (WEXITSTATUS(pst) == 0) + abortmsg("could not connect to cmdserver " + "(exited with status 0)"); + debugmsg("cmdserver exited with status %d", WEXITSTATUS(pst)); + exit(WEXITSTATUS(pst)); + } else if (WIFSIGNALED(pst)) { + abortmsg("cmdserver killed by signal %d", WTERMSIG(pst)); + } else { + abortmsg("error while waiting for cmdserver"); + } + return NULL; +} + +/* Connect to a cmdserver. Will start a new server on demand. */ +static hgclient_t *connectcmdserver(struct cmdserveropts *opts) +{ + const char *sockname = opts->redirectsockname[0] ? + opts->redirectsockname : opts->sockname; + debugmsg("try connect to %s", sockname); + hgclient_t *hgc = hgc_open(sockname); + if (hgc) + return hgc; + + lockcmdserver(opts); + hgc = hgc_open(sockname); + if (hgc) { + unlockcmdserver(opts); + debugmsg("cmdserver is started by another process"); + return hgc; + } + + /* prevent us from being connected to an outdated server: we were + * told by a server to redirect to opts->redirectsockname and that + * address does not work. we do not want to connect to the server + * again because it will probably tell us the same thing. */ + if (sockname == opts->redirectsockname) + unlink(opts->sockname); + + debugmsg("start cmdserver at %s", opts->sockname); + + pid_t pid = fork(); + if (pid < 0) + abortmsg("failed to fork cmdserver process"); + if (pid == 0) { + execcmdserver(opts); + } else { + hgc = retryconnectcmdserver(opts, pid); + } + + unlockcmdserver(opts); + return hgc; +} + +static void killcmdserver(const struct cmdserveropts *opts) +{ + /* resolve config hash */ + char *resolvedpath = realpath(opts->sockname, NULL); + if (resolvedpath) { + unlink(resolvedpath); + free(resolvedpath); + } +} + +static pid_t peerpid = 0; + +static void forwardsignal(int sig) +{ + assert(peerpid > 0); + if (kill(peerpid, sig) < 0) + abortmsgerrno("cannot kill %d", peerpid); + debugmsg("forward signal %d", sig); +} + +static void handlestopsignal(int sig) +{ + sigset_t unblockset, oldset; + struct sigaction sa, oldsa; + if (sigemptyset(&unblockset) < 0) + goto error; + if (sigaddset(&unblockset, sig) < 0) + goto error; + memset(&sa, 0, sizeof(sa)); + sa.sa_handler = SIG_DFL; + sa.sa_flags = SA_RESTART; + if (sigemptyset(&sa.sa_mask) < 0) + goto error; + + forwardsignal(sig); + if (raise(sig) < 0) /* resend to self */ + goto error; + if (sigaction(sig, &sa, &oldsa) < 0) + goto error; + if (sigprocmask(SIG_UNBLOCK, &unblockset, &oldset) < 0) + goto error; + /* resent signal will be handled before sigprocmask() returns */ + if (sigprocmask(SIG_SETMASK, &oldset, NULL) < 0) + goto error; + if (sigaction(sig, &oldsa, NULL) < 0) + goto error; + return; + +error: + abortmsgerrno("failed to handle stop signal"); +} + +static void setupsignalhandler(pid_t pid) +{ + if (pid <= 0) + return; + peerpid = pid; + + struct sigaction sa; + memset(&sa, 0, sizeof(sa)); + sa.sa_handler = forwardsignal; + sa.sa_flags = SA_RESTART; + if (sigemptyset(&sa.sa_mask) < 0) + goto error; + + if (sigaction(SIGHUP, &sa, NULL) < 0) + goto error; + if (sigaction(SIGINT, &sa, NULL) < 0) + goto error; + + /* terminate frontend by double SIGTERM in case of server freeze */ + sa.sa_flags |= SA_RESETHAND; + if (sigaction(SIGTERM, &sa, NULL) < 0) + goto error; + + /* propagate job control requests to worker */ + sa.sa_handler = forwardsignal; + sa.sa_flags = SA_RESTART; + if (sigaction(SIGCONT, &sa, NULL) < 0) + goto error; + sa.sa_handler = handlestopsignal; + sa.sa_flags = SA_RESTART; + if (sigaction(SIGTSTP, &sa, NULL) < 0) + goto error; + + return; + +error: + abortmsgerrno("failed to set up signal handlers"); +} + +/* This implementation is based on hgext/pager.py (pre 369741ef7253) */ +static void setuppager(hgclient_t *hgc, const char *const args[], + size_t argsize) +{ + const char *pagercmd = hgc_getpager(hgc, args, argsize); + if (!pagercmd) + return; + + int pipefds[2]; + if (pipe(pipefds) < 0) + return; + pid_t pid = fork(); + if (pid < 0) + goto error; + if (pid == 0) { + close(pipefds[0]); + if (dup2(pipefds[1], fileno(stdout)) < 0) + goto error; + if (isatty(fileno(stderr))) { + if (dup2(pipefds[1], fileno(stderr)) < 0) + goto error; + } + close(pipefds[1]); + hgc_attachio(hgc); /* reattach to pager */ + return; + } else { + dup2(pipefds[0], fileno(stdin)); + close(pipefds[0]); + close(pipefds[1]); + + int r = execlp("/bin/sh", "/bin/sh", "-c", pagercmd, NULL); + if (r < 0) { + abortmsgerrno("cannot start pager '%s'", pagercmd); + } + return; + } + +error: + close(pipefds[0]); + close(pipefds[1]); + abortmsgerrno("failed to prepare pager"); +} + +/* Run instructions sent from the server like unlink and set redirect path + * Return 1 if reconnect is needed, otherwise 0 */ +static int runinstructions(struct cmdserveropts *opts, const char **insts) +{ + int needreconnect = 0; + if (!insts) + return needreconnect; + + assert(insts); + opts->redirectsockname[0] = '\0'; + const char **pinst; + for (pinst = insts; *pinst; pinst++) { + debugmsg("instruction: %s", *pinst); + if (strncmp(*pinst, "unlink ", 7) == 0) { + unlink(*pinst + 7); + } else if (strncmp(*pinst, "redirect ", 9) == 0) { + int r = snprintf(opts->redirectsockname, + sizeof(opts->redirectsockname), + "%s", *pinst + 9); + if (r < 0 || r >= (int)sizeof(opts->redirectsockname)) + abortmsg("redirect path is too long (%d)", r); + needreconnect = 1; + } else if (strncmp(*pinst, "exit ", 5) == 0) { + int n = 0; + if (sscanf(*pinst + 5, "%d", &n) != 1) + abortmsg("cannot read the exit code"); + exit(n); + } else if (strcmp(*pinst, "reconnect") == 0) { + needreconnect = 1; + } else { + abortmsg("unknown instruction: %s", *pinst); + } + } + return needreconnect; +} + +/* + * Test whether the command is unsupported or not. This is not designed to + * cover all cases. But it's fast, does not depend on the server and does + * not return false positives. + */ +static int isunsupported(int argc, const char *argv[]) +{ + enum { + SERVE = 1, + DAEMON = 2, + SERVEDAEMON = SERVE | DAEMON, + TIME = 4, + }; + unsigned int state = 0; + int i; + for (i = 0; i < argc; ++i) { + if (strcmp(argv[i], "--") == 0) + break; + if (i == 0 && strcmp("serve", argv[i]) == 0) + state |= SERVE; + else if (strcmp("-d", argv[i]) == 0 || + strcmp("--daemon", argv[i]) == 0) + state |= DAEMON; + else if (strcmp("--time", argv[i]) == 0) + state |= TIME; + } + return (state & TIME) == TIME || + (state & SERVEDAEMON) == SERVEDAEMON; +} + +static void execoriginalhg(const char *argv[]) +{ + debugmsg("execute original hg"); + if (execvp(gethgcmd(), (char **)argv) < 0) + abortmsgerrno("failed to exec original hg"); +} + +int main(int argc, const char *argv[], const char *envp[]) +{ + if (getenv("CHGDEBUG")) + enabledebugmsg(); + + if (!getenv("HGPLAIN") && isatty(fileno(stderr))) + enablecolor(); + + if (getenv("CHGINTERNALMARK")) + abortmsg("chg started by chg detected.\n" + "Please make sure ${HG:-hg} is not a symlink or " + "wrapper to chg. Alternatively, set $CHGHG to the " + "path of real hg."); + + if (isunsupported(argc - 1, argv + 1)) + execoriginalhg(argv); + + struct cmdserveropts opts; + initcmdserveropts(&opts); + setcmdserveropts(&opts); + setcmdserverargs(&opts, argc, argv); + + if (argc == 2) { + if (strcmp(argv[1], "--kill-chg-daemon") == 0) { + killcmdserver(&opts); + return 0; + } + } + + hgclient_t *hgc; + size_t retry = 0; + while (1) { + hgc = connectcmdserver(&opts); + if (!hgc) + abortmsg("cannot open hg client"); + hgc_setenv(hgc, envp); + const char **insts = hgc_validate(hgc, argv + 1, argc - 1); + int needreconnect = runinstructions(&opts, insts); + free(insts); + if (!needreconnect) + break; + hgc_close(hgc); + if (++retry > 10) + abortmsg("too many redirections.\n" + "Please make sure %s is not a wrapper which " + "changes sensitive environment variables " + "before executing hg. If you have to use a " + "wrapper, wrap chg instead of hg.", + gethgcmd()); + } + + setupsignalhandler(hgc_peerpid(hgc)); + setuppager(hgc, argv + 1, argc - 1); + int exitcode = hgc_runcommand(hgc, argv + 1, argc - 1); + hgc_close(hgc); + freecmdserveropts(&opts); + return exitcode; +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/chg/hgclient.c Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,576 @@ +/* + * A command server client that uses Unix domain socket + * + * Copyright (c) 2011 Yuya Nishihara <yuya@tcha.org> + * + * This software may be used and distributed according to the terms of the + * GNU General Public License version 2 or any later version. + */ + +#include <arpa/inet.h> /* for ntohl(), htonl() */ +#include <assert.h> +#include <ctype.h> +#include <errno.h> +#include <fcntl.h> +#include <signal.h> +#include <stdint.h> +#include <stdio.h> +#include <stdlib.h> +#include <string.h> +#include <sys/socket.h> +#include <sys/stat.h> +#include <sys/un.h> +#include <unistd.h> + +#include "hgclient.h" +#include "util.h" + +enum { + CAP_GETENCODING = 0x0001, + CAP_RUNCOMMAND = 0x0002, + /* cHg extension: */ + CAP_ATTACHIO = 0x0100, + CAP_CHDIR = 0x0200, + CAP_GETPAGER = 0x0400, + CAP_SETENV = 0x0800, + CAP_SETUMASK = 0x1000, + CAP_VALIDATE = 0x2000, +}; + +typedef struct { + const char *name; + unsigned int flag; +} cappair_t; + +static const cappair_t captable[] = { + {"getencoding", CAP_GETENCODING}, + {"runcommand", CAP_RUNCOMMAND}, + {"attachio", CAP_ATTACHIO}, + {"chdir", CAP_CHDIR}, + {"getpager", CAP_GETPAGER}, + {"setenv", CAP_SETENV}, + {"setumask", CAP_SETUMASK}, + {"validate", CAP_VALIDATE}, + {NULL, 0}, /* terminator */ +}; + +typedef struct { + char ch; + char *data; + size_t maxdatasize; + size_t datasize; +} context_t; + +struct hgclient_tag_ { + int sockfd; + pid_t pid; + context_t ctx; + unsigned int capflags; +}; + +static const size_t defaultdatasize = 4096; + +static void initcontext(context_t *ctx) +{ + ctx->ch = '\0'; + ctx->data = malloc(defaultdatasize); + ctx->maxdatasize = (ctx->data) ? defaultdatasize : 0; + ctx->datasize = 0; + debugmsg("initialize context buffer with size %zu", ctx->maxdatasize); +} + +static void enlargecontext(context_t *ctx, size_t newsize) +{ + if (newsize <= ctx->maxdatasize) + return; + + newsize = defaultdatasize + * ((newsize + defaultdatasize - 1) / defaultdatasize); + ctx->data = reallocx(ctx->data, newsize); + ctx->maxdatasize = newsize; + debugmsg("enlarge context buffer to %zu", ctx->maxdatasize); +} + +static void freecontext(context_t *ctx) +{ + debugmsg("free context buffer"); + free(ctx->data); + ctx->data = NULL; + ctx->maxdatasize = 0; + ctx->datasize = 0; +} + +/* Read channeled response from cmdserver */ +static void readchannel(hgclient_t *hgc) +{ + assert(hgc); + + ssize_t rsize = recv(hgc->sockfd, &hgc->ctx.ch, sizeof(hgc->ctx.ch), 0); + if (rsize != sizeof(hgc->ctx.ch)) { + /* server would have exception and traceback would be printed */ + debugmsg("failed to read channel"); + exit(255); + } + + uint32_t datasize_n; + rsize = recv(hgc->sockfd, &datasize_n, sizeof(datasize_n), 0); + if (rsize != sizeof(datasize_n)) + abortmsg("failed to read data size"); + + /* datasize denotes the maximum size to write if input request */ + hgc->ctx.datasize = ntohl(datasize_n); + enlargecontext(&hgc->ctx, hgc->ctx.datasize); + + if (isupper(hgc->ctx.ch) && hgc->ctx.ch != 'S') + return; /* assumes input request */ + + size_t cursize = 0; + while (cursize < hgc->ctx.datasize) { + rsize = recv(hgc->sockfd, hgc->ctx.data + cursize, + hgc->ctx.datasize - cursize, 0); + if (rsize < 0) + abortmsg("failed to read data block"); + cursize += rsize; + } +} + +static void sendall(int sockfd, const void *data, size_t datasize) +{ + const char *p = data; + const char *const endp = p + datasize; + while (p < endp) { + ssize_t r = send(sockfd, p, endp - p, 0); + if (r < 0) + abortmsgerrno("cannot communicate"); + p += r; + } +} + +/* Write lengh-data block to cmdserver */ +static void writeblock(const hgclient_t *hgc) +{ + assert(hgc); + + const uint32_t datasize_n = htonl(hgc->ctx.datasize); + sendall(hgc->sockfd, &datasize_n, sizeof(datasize_n)); + + sendall(hgc->sockfd, hgc->ctx.data, hgc->ctx.datasize); +} + +static void writeblockrequest(const hgclient_t *hgc, const char *chcmd) +{ + debugmsg("request %s, block size %zu", chcmd, hgc->ctx.datasize); + + char buf[strlen(chcmd) + 1]; + memcpy(buf, chcmd, sizeof(buf) - 1); + buf[sizeof(buf) - 1] = '\n'; + sendall(hgc->sockfd, buf, sizeof(buf)); + + writeblock(hgc); +} + +/* Build '\0'-separated list of args. argsize < 0 denotes that args are + * terminated by NULL. */ +static void packcmdargs(context_t *ctx, const char *const args[], + ssize_t argsize) +{ + ctx->datasize = 0; + const char *const *const end = (argsize >= 0) ? args + argsize : NULL; + for (const char *const *it = args; it != end && *it; ++it) { + const size_t n = strlen(*it) + 1; /* include '\0' */ + enlargecontext(ctx, ctx->datasize + n); + memcpy(ctx->data + ctx->datasize, *it, n); + ctx->datasize += n; + } + + if (ctx->datasize > 0) + --ctx->datasize; /* strip last '\0' */ +} + +/* Extract '\0'-separated list of args to new buffer, terminated by NULL */ +static const char **unpackcmdargsnul(const context_t *ctx) +{ + const char **args = NULL; + size_t nargs = 0, maxnargs = 0; + const char *s = ctx->data; + const char *e = ctx->data + ctx->datasize; + for (;;) { + if (nargs + 1 >= maxnargs) { /* including last NULL */ + maxnargs += 256; + args = reallocx(args, maxnargs * sizeof(args[0])); + } + args[nargs] = s; + nargs++; + s = memchr(s, '\0', e - s); + if (!s) + break; + s++; + } + args[nargs] = NULL; + return args; +} + +static void handlereadrequest(hgclient_t *hgc) +{ + context_t *ctx = &hgc->ctx; + size_t r = fread(ctx->data, sizeof(ctx->data[0]), ctx->datasize, stdin); + ctx->datasize = r; + writeblock(hgc); +} + +/* Read single-line */ +static void handlereadlinerequest(hgclient_t *hgc) +{ + context_t *ctx = &hgc->ctx; + if (!fgets(ctx->data, ctx->datasize, stdin)) + ctx->data[0] = '\0'; + ctx->datasize = strlen(ctx->data); + writeblock(hgc); +} + +/* Execute the requested command and write exit code */ +static void handlesystemrequest(hgclient_t *hgc) +{ + context_t *ctx = &hgc->ctx; + enlargecontext(ctx, ctx->datasize + 1); + ctx->data[ctx->datasize] = '\0'; /* terminate last string */ + + const char **args = unpackcmdargsnul(ctx); + if (!args[0] || !args[1]) + abortmsg("missing command or cwd in system request"); + debugmsg("run '%s' at '%s'", args[0], args[1]); + int32_t r = runshellcmd(args[0], args + 2, args[1]); + free(args); + + uint32_t r_n = htonl(r); + memcpy(ctx->data, &r_n, sizeof(r_n)); + ctx->datasize = sizeof(r_n); + writeblock(hgc); +} + +/* Read response of command execution until receiving 'r'-esult */ +static void handleresponse(hgclient_t *hgc) +{ + for (;;) { + readchannel(hgc); + context_t *ctx = &hgc->ctx; + debugmsg("response read from channel %c, size %zu", + ctx->ch, ctx->datasize); + switch (ctx->ch) { + case 'o': + fwrite(ctx->data, sizeof(ctx->data[0]), ctx->datasize, + stdout); + break; + case 'e': + fwrite(ctx->data, sizeof(ctx->data[0]), ctx->datasize, + stderr); + break; + case 'd': + /* assumes last char is '\n' */ + ctx->data[ctx->datasize - 1] = '\0'; + debugmsg("server: %s", ctx->data); + break; + case 'r': + return; + case 'I': + handlereadrequest(hgc); + break; + case 'L': + handlereadlinerequest(hgc); + break; + case 'S': + handlesystemrequest(hgc); + break; + default: + if (isupper(ctx->ch)) + abortmsg("cannot handle response (ch = %c)", + ctx->ch); + } + } +} + +static unsigned int parsecapabilities(const char *s, const char *e) +{ + unsigned int flags = 0; + while (s < e) { + const char *t = strchr(s, ' '); + if (!t || t > e) + t = e; + const cappair_t *cap; + for (cap = captable; cap->flag; ++cap) { + size_t n = t - s; + if (strncmp(s, cap->name, n) == 0 && + strlen(cap->name) == n) { + flags |= cap->flag; + break; + } + } + s = t + 1; + } + return flags; +} + +static void readhello(hgclient_t *hgc) +{ + readchannel(hgc); + context_t *ctx = &hgc->ctx; + if (ctx->ch != 'o') { + char ch = ctx->ch; + if (ch == 'e') { + /* write early error and will exit */ + fwrite(ctx->data, sizeof(ctx->data[0]), ctx->datasize, + stderr); + handleresponse(hgc); + } + abortmsg("unexpected channel of hello message (ch = %c)", ch); + } + enlargecontext(ctx, ctx->datasize + 1); + ctx->data[ctx->datasize] = '\0'; + debugmsg("hello received: %s (size = %zu)", ctx->data, ctx->datasize); + + const char *s = ctx->data; + const char *const dataend = ctx->data + ctx->datasize; + while (s < dataend) { + const char *t = strchr(s, ':'); + if (!t || t[1] != ' ') + break; + const char *u = strchr(t + 2, '\n'); + if (!u) + u = dataend; + if (strncmp(s, "capabilities:", t - s + 1) == 0) { + hgc->capflags = parsecapabilities(t + 2, u); + } else if (strncmp(s, "pid:", t - s + 1) == 0) { + hgc->pid = strtol(t + 2, NULL, 10); + } + s = u + 1; + } + debugmsg("capflags=0x%04x, pid=%d", hgc->capflags, hgc->pid); +} + +static void attachio(hgclient_t *hgc) +{ + debugmsg("request attachio"); + static const char chcmd[] = "attachio\n"; + sendall(hgc->sockfd, chcmd, sizeof(chcmd) - 1); + readchannel(hgc); + context_t *ctx = &hgc->ctx; + if (ctx->ch != 'I') + abortmsg("unexpected response for attachio (ch = %c)", ctx->ch); + + static const int fds[3] = {STDIN_FILENO, STDOUT_FILENO, STDERR_FILENO}; + struct msghdr msgh; + memset(&msgh, 0, sizeof(msgh)); + struct iovec iov = {ctx->data, ctx->datasize}; /* dummy payload */ + msgh.msg_iov = &iov; + msgh.msg_iovlen = 1; + char fdbuf[CMSG_SPACE(sizeof(fds))]; + msgh.msg_control = fdbuf; + msgh.msg_controllen = sizeof(fdbuf); + struct cmsghdr *cmsg = CMSG_FIRSTHDR(&msgh); + cmsg->cmsg_level = SOL_SOCKET; + cmsg->cmsg_type = SCM_RIGHTS; + cmsg->cmsg_len = CMSG_LEN(sizeof(fds)); + memcpy(CMSG_DATA(cmsg), fds, sizeof(fds)); + msgh.msg_controllen = cmsg->cmsg_len; + ssize_t r = sendmsg(hgc->sockfd, &msgh, 0); + if (r < 0) + abortmsgerrno("sendmsg failed"); + + handleresponse(hgc); + int32_t n; + if (ctx->datasize != sizeof(n)) + abortmsg("unexpected size of attachio result"); + memcpy(&n, ctx->data, sizeof(n)); + n = ntohl(n); + if (n != sizeof(fds) / sizeof(fds[0])) + abortmsg("failed to send fds (n = %d)", n); +} + +static void chdirtocwd(hgclient_t *hgc) +{ + if (!getcwd(hgc->ctx.data, hgc->ctx.maxdatasize)) + abortmsgerrno("failed to getcwd"); + hgc->ctx.datasize = strlen(hgc->ctx.data); + writeblockrequest(hgc, "chdir"); +} + +static void forwardumask(hgclient_t *hgc) +{ + mode_t mask = umask(0); + umask(mask); + + static const char command[] = "setumask\n"; + sendall(hgc->sockfd, command, sizeof(command) - 1); + uint32_t data = htonl(mask); + sendall(hgc->sockfd, &data, sizeof(data)); +} + +/*! + * Open connection to per-user cmdserver + * + * If no background server running, returns NULL. + */ +hgclient_t *hgc_open(const char *sockname) +{ + int fd = socket(AF_UNIX, SOCK_STREAM, 0); + if (fd < 0) + abortmsgerrno("cannot create socket"); + + /* don't keep fd on fork(), so that it can be closed when the parent + * process get terminated. */ + fsetcloexec(fd); + + struct sockaddr_un addr; + addr.sun_family = AF_UNIX; + strncpy(addr.sun_path, sockname, sizeof(addr.sun_path)); + addr.sun_path[sizeof(addr.sun_path) - 1] = '\0'; + + int r = connect(fd, (struct sockaddr *)&addr, sizeof(addr)); + if (r < 0) { + close(fd); + if (errno == ENOENT || errno == ECONNREFUSED) + return NULL; + abortmsgerrno("cannot connect to %s", addr.sun_path); + } + debugmsg("connected to %s", addr.sun_path); + + hgclient_t *hgc = mallocx(sizeof(hgclient_t)); + memset(hgc, 0, sizeof(*hgc)); + hgc->sockfd = fd; + initcontext(&hgc->ctx); + + readhello(hgc); + if (!(hgc->capflags & CAP_RUNCOMMAND)) + abortmsg("insufficient capability: runcommand"); + if (hgc->capflags & CAP_ATTACHIO) + attachio(hgc); + if (hgc->capflags & CAP_CHDIR) + chdirtocwd(hgc); + if (hgc->capflags & CAP_SETUMASK) + forwardumask(hgc); + + return hgc; +} + +/*! + * Close connection and free allocated memory + */ +void hgc_close(hgclient_t *hgc) +{ + assert(hgc); + freecontext(&hgc->ctx); + close(hgc->sockfd); + free(hgc); +} + +pid_t hgc_peerpid(const hgclient_t *hgc) +{ + assert(hgc); + return hgc->pid; +} + +/*! + * Send command line arguments to let the server load the repo config and check + * whether it can process our request directly or not. + * Make sure hgc_setenv is called before calling this. + * + * @return - NULL, the server believes it can handle our request, or does not + * support "validate" command. + * - a list of strings, the server probably cannot handle our request + * and it sent instructions telling us what to do next. See + * chgserver.py for possible instruction formats. + * the list should be freed by the caller. + * the last string is guaranteed to be NULL. + */ +const char **hgc_validate(hgclient_t *hgc, const char *const args[], + size_t argsize) +{ + assert(hgc); + if (!(hgc->capflags & CAP_VALIDATE)) + return NULL; + + packcmdargs(&hgc->ctx, args, argsize); + writeblockrequest(hgc, "validate"); + handleresponse(hgc); + + /* the server returns '\0' if it can handle our request */ + if (hgc->ctx.datasize <= 1) + return NULL; + + /* make sure the buffer is '\0' terminated */ + enlargecontext(&hgc->ctx, hgc->ctx.datasize + 1); + hgc->ctx.data[hgc->ctx.datasize] = '\0'; + return unpackcmdargsnul(&hgc->ctx); +} + +/*! + * Execute the specified Mercurial command + * + * @return result code + */ +int hgc_runcommand(hgclient_t *hgc, const char *const args[], size_t argsize) +{ + assert(hgc); + + packcmdargs(&hgc->ctx, args, argsize); + writeblockrequest(hgc, "runcommand"); + handleresponse(hgc); + + int32_t exitcode_n; + if (hgc->ctx.datasize != sizeof(exitcode_n)) { + abortmsg("unexpected size of exitcode"); + } + memcpy(&exitcode_n, hgc->ctx.data, sizeof(exitcode_n)); + return ntohl(exitcode_n); +} + +/*! + * (Re-)send client's stdio channels so that the server can access to tty + */ +void hgc_attachio(hgclient_t *hgc) +{ + assert(hgc); + if (!(hgc->capflags & CAP_ATTACHIO)) + return; + attachio(hgc); +} + +/*! + * Get pager command for the given Mercurial command args + * + * If no pager enabled, returns NULL. The return value becomes invalid + * once you run another request to hgc. + */ +const char *hgc_getpager(hgclient_t *hgc, const char *const args[], + size_t argsize) +{ + assert(hgc); + + if (!(hgc->capflags & CAP_GETPAGER)) + return NULL; + + packcmdargs(&hgc->ctx, args, argsize); + writeblockrequest(hgc, "getpager"); + handleresponse(hgc); + + if (hgc->ctx.datasize < 1 || hgc->ctx.data[0] == '\0') + return NULL; + enlargecontext(&hgc->ctx, hgc->ctx.datasize + 1); + hgc->ctx.data[hgc->ctx.datasize] = '\0'; + return hgc->ctx.data; +} + +/*! + * Update server's environment variables + * + * @param envp list of environment variables in "NAME=VALUE" format, + * terminated by NULL. + */ +void hgc_setenv(hgclient_t *hgc, const char *const envp[]) +{ + assert(hgc && envp); + if (!(hgc->capflags & CAP_SETENV)) + return; + packcmdargs(&hgc->ctx, envp, /*argsize*/ -1); + writeblockrequest(hgc, "setenv"); +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/chg/hgclient.h Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,31 @@ +/* + * A command server client that uses Unix domain socket + * + * Copyright (c) 2011 Yuya Nishihara <yuya@tcha.org> + * + * This software may be used and distributed according to the terms of the + * GNU General Public License version 2 or any later version. + */ + +#ifndef HGCLIENT_H_ +#define HGCLIENT_H_ + +#include <sys/types.h> + +struct hgclient_tag_; +typedef struct hgclient_tag_ hgclient_t; + +hgclient_t *hgc_open(const char *sockname); +void hgc_close(hgclient_t *hgc); + +pid_t hgc_peerpid(const hgclient_t *hgc); + +const char **hgc_validate(hgclient_t *hgc, const char *const args[], + size_t argsize); +int hgc_runcommand(hgclient_t *hgc, const char *const args[], size_t argsize); +void hgc_attachio(hgclient_t *hgc); +const char *hgc_getpager(hgclient_t *hgc, const char *const args[], + size_t argsize); +void hgc_setenv(hgclient_t *hgc, const char *const envp[]); + +#endif /* HGCLIENT_H_ */
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/chg/util.c Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,190 @@ +/* + * Utility functions + * + * Copyright (c) 2011 Yuya Nishihara <yuya@tcha.org> + * + * This software may be used and distributed according to the terms of the + * GNU General Public License version 2 or any later version. + */ + +#include <errno.h> +#include <fcntl.h> +#include <signal.h> +#include <stdarg.h> +#include <stdio.h> +#include <stdlib.h> +#include <string.h> +#include <sys/types.h> +#include <sys/wait.h> +#include <unistd.h> + +#include "util.h" + +static int colorenabled = 0; + +static inline void fsetcolor(FILE *fp, const char *code) +{ + if (!colorenabled) + return; + fprintf(fp, "\033[%sm", code); +} + +static void vabortmsgerrno(int no, const char *fmt, va_list args) +{ + fsetcolor(stderr, "1;31"); + fputs("chg: abort: ", stderr); + vfprintf(stderr, fmt, args); + if (no != 0) + fprintf(stderr, " (errno = %d, %s)", no, strerror(no)); + fsetcolor(stderr, ""); + fputc('\n', stderr); + exit(255); +} + +void abortmsg(const char *fmt, ...) +{ + va_list args; + va_start(args, fmt); + vabortmsgerrno(0, fmt, args); + va_end(args); +} + +void abortmsgerrno(const char *fmt, ...) +{ + int no = errno; + va_list args; + va_start(args, fmt); + vabortmsgerrno(no, fmt, args); + va_end(args); +} + +static int debugmsgenabled = 0; + +void enablecolor(void) +{ + colorenabled = 1; +} + +void enabledebugmsg(void) +{ + debugmsgenabled = 1; +} + +void debugmsg(const char *fmt, ...) +{ + if (!debugmsgenabled) + return; + + va_list args; + va_start(args, fmt); + fsetcolor(stderr, "1;30"); + fputs("chg: debug: ", stderr); + vfprintf(stderr, fmt, args); + fsetcolor(stderr, ""); + fputc('\n', stderr); + va_end(args); +} + +void fchdirx(int dirfd) +{ + int r = fchdir(dirfd); + if (r == -1) + abortmsgerrno("failed to fchdir"); +} + +void fsetcloexec(int fd) +{ + int flags = fcntl(fd, F_GETFD); + if (flags < 0) + abortmsgerrno("cannot get flags of fd %d", fd); + if (fcntl(fd, F_SETFD, flags | FD_CLOEXEC) < 0) + abortmsgerrno("cannot set flags of fd %d", fd); +} + +void *mallocx(size_t size) +{ + void *result = malloc(size); + if (!result) + abortmsg("failed to malloc"); + return result; +} + +void *reallocx(void *ptr, size_t size) +{ + void *result = realloc(ptr, size); + if (!result) + abortmsg("failed to realloc"); + return result; +} + +/* + * Execute a shell command in mostly the same manner as system(), with the + * give environment variables, after chdir to the given cwd. Returns a status + * code compatible with the Python subprocess module. + */ +int runshellcmd(const char *cmd, const char *envp[], const char *cwd) +{ + enum { F_SIGINT = 1, F_SIGQUIT = 2, F_SIGMASK = 4, F_WAITPID = 8 }; + unsigned int doneflags = 0; + int status = 0; + struct sigaction newsa, oldsaint, oldsaquit; + sigset_t oldmask; + + /* block or mask signals just as system() does */ + memset(&newsa, 0, sizeof(newsa)); + newsa.sa_handler = SIG_IGN; + newsa.sa_flags = 0; + if (sigemptyset(&newsa.sa_mask) < 0) + goto done; + if (sigaction(SIGINT, &newsa, &oldsaint) < 0) + goto done; + doneflags |= F_SIGINT; + if (sigaction(SIGQUIT, &newsa, &oldsaquit) < 0) + goto done; + doneflags |= F_SIGQUIT; + + if (sigaddset(&newsa.sa_mask, SIGCHLD) < 0) + goto done; + if (sigprocmask(SIG_BLOCK, &newsa.sa_mask, &oldmask) < 0) + goto done; + doneflags |= F_SIGMASK; + + pid_t pid = fork(); + if (pid < 0) + goto done; + if (pid == 0) { + sigaction(SIGINT, &oldsaint, NULL); + sigaction(SIGQUIT, &oldsaquit, NULL); + sigprocmask(SIG_SETMASK, &oldmask, NULL); + if (cwd && chdir(cwd) < 0) + _exit(127); + const char *argv[] = {"sh", "-c", cmd, NULL}; + if (envp) { + execve("/bin/sh", (char **)argv, (char **)envp); + } else { + execv("/bin/sh", (char **)argv); + } + _exit(127); + } else { + if (waitpid(pid, &status, 0) < 0) + goto done; + doneflags |= F_WAITPID; + } + +done: + if (doneflags & F_SIGINT) + sigaction(SIGINT, &oldsaint, NULL); + if (doneflags & F_SIGQUIT) + sigaction(SIGQUIT, &oldsaquit, NULL); + if (doneflags & F_SIGMASK) + sigprocmask(SIG_SETMASK, &oldmask, NULL); + + /* no way to report other errors, use 127 (= shell termination) */ + if (!(doneflags & F_WAITPID)) + return 127; + if (WIFEXITED(status)) + return WEXITSTATUS(status); + if (WIFSIGNALED(status)) + return -WTERMSIG(status); + return 127; +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/chg/util.h Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,33 @@ +/* + * Utility functions + * + * Copyright (c) 2011 Yuya Nishihara <yuya@tcha.org> + * + * This software may be used and distributed according to the terms of the + * GNU General Public License version 2 or any later version. + */ + +#ifndef UTIL_H_ +#define UTIL_H_ + +#ifdef __GNUC__ +#define PRINTF_FORMAT_ __attribute__((format(printf, 1, 2))) +#else +#define PRINTF_FORMAT_ +#endif + +void abortmsg(const char *fmt, ...) PRINTF_FORMAT_; +void abortmsgerrno(const char *fmt, ...) PRINTF_FORMAT_; + +void enablecolor(void); +void enabledebugmsg(void); +void debugmsg(const char *fmt, ...) PRINTF_FORMAT_; + +void fchdirx(int dirfd); +void fsetcloexec(int fd); +void *mallocx(size_t size); +void *reallocx(void *ptr, size_t size); + +int runshellcmd(const char *cmd, const char *envp[], const char *cwd); + +#endif /* UTIL_H_ */
--- a/contrib/debugcmdserver.py Tue Mar 29 11:54:46 2016 -0500 +++ b/contrib/debugcmdserver.py Sat Apr 16 18:06:48 2016 -0500 @@ -7,10 +7,12 @@ # $ ./hg serve --cmds pipe | ./contrib/debugcmdserver.py - # o, 52 -> 'capabilities: getencoding runcommand\nencoding: UTF-8' -import sys, struct +from __future__ import absolute_import, print_function +import struct +import sys if len(sys.argv) != 2: - print 'usage: debugcmdserver.py FILE' + print('usage: debugcmdserver.py FILE') sys.exit(1) outputfmt = '>cI'
--- a/contrib/debugshell.py Tue Mar 29 11:54:46 2016 -0500 +++ b/contrib/debugshell.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,9 +1,10 @@ # debugshell extension """a python shell with repo, changelog & manifest objects""" -import sys +from __future__ import absolute_import +import code import mercurial -import code +import sys from mercurial import ( cmdutil, demandimport,
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/docker/ubuntu-trusty Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,11 @@ +FROM ubuntu:trusty +RUN apt-get update && apt-get install -y \ + build-essential \ + debhelper \ + dh-python \ + devscripts \ + python \ + python-all-dev \ + python-docutils \ + zip \ + unzip
--- a/contrib/fixpax.py Tue Mar 29 11:54:46 2016 -0500 +++ b/contrib/fixpax.py Sat Apr 16 18:06:48 2016 -0500 @@ -11,7 +11,10 @@ *.mpkg/Contents/Packages/*.pkg/Contents/Archive.pax.gz """ -import sys, os, gzip +from __future__ import absolute_import, print_function +import gzip +import os +import sys def fixpax(iname, oname): i = gzip.GzipFile(iname) @@ -55,7 +58,7 @@ if __name__ == '__main__': for iname in sys.argv[1:]: - print 'fixing file ownership in %s' % iname + print('fixing file ownership in %s' % iname) oname = sys.argv[1] + '.tmp' fixpax(iname, oname) os.rename(oname, iname)
--- a/contrib/hg-ssh Tue Mar 29 11:54:46 2016 -0500 +++ b/contrib/hg-ssh Sat Apr 16 18:06:48 2016 -0500 @@ -52,7 +52,7 @@ orig_cmd = os.getenv('SSH_ORIGINAL_COMMAND', '?') try: cmdargv = shlex.split(orig_cmd) - except ValueError, e: + except ValueError as e: sys.stderr.write('Illegal command "%s": %s\n' % (orig_cmd, e)) sys.exit(255) @@ -77,7 +77,7 @@ sys.exit(255) def rejectpush(ui, **kwargs): - ui.warn("Permission denied\n") + ui.warn(("Permission denied\n")) # mercurial hooks use unix process conventions for hook return values # so a truthy return means failure return True
--- a/contrib/hgclient.py Tue Mar 29 11:54:46 2016 -0500 +++ b/contrib/hgclient.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,6 +1,20 @@ # A minimal client for Mercurial's command server -import os, sys, signal, struct, socket, subprocess, time, cStringIO +from __future__ import absolute_import, print_function +import os +import signal +import socket +import struct +import subprocess +import sys +import time + +try: + import cStringIO as io + stringio = io.StringIO +except ImportError: + import io + stringio = io.StringIO def connectpipe(path=None): cmdline = ['hg', 'serve', '--cmdserver', 'pipe'] @@ -69,13 +83,13 @@ def runcommand(server, args, output=sys.stdout, error=sys.stderr, input=None, outfilter=lambda x: x): - print '*** runcommand', ' '.join(args) + print('*** runcommand', ' '.join(args)) sys.stdout.flush() server.stdin.write('runcommand\n') writeblock(server, '\0'.join(args)) if not input: - input = cStringIO.StringIO() + input = stringio() while True: ch, data = readchannel(server) @@ -92,10 +106,10 @@ elif ch == 'r': ret, = struct.unpack('>i', data) if ret != 0: - print ' [%d]' % ret + print(' [%d]' % ret) return ret else: - print "unexpected channel %c: %r" % (ch, data) + print("unexpected channel %c: %r" % (ch, data)) if ch.isupper(): return
--- a/contrib/hgfixes/fix_bytes.py Tue Mar 29 11:54:46 2016 -0500 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,98 +0,0 @@ -"""Fixer that changes plain strings to bytes strings.""" - -import re - -from lib2to3 import fixer_base -from lib2to3.pgen2 import token -from lib2to3.fixer_util import Name -from lib2to3.pygram import python_symbols as syms - -_re = re.compile(r'[rR]?[\'\"]') - -# XXX: Implementing a blacklist in 2to3 turned out to be more troublesome than -# blacklisting some modules inside the fixers. So, this is what I came with. - -blacklist = ('mercurial/demandimport.py', - 'mercurial/py3kcompat.py', # valid python 3 already - 'mercurial/i18n.py', - ) - -def isdocstring(node): - def isclassorfunction(ancestor): - symbols = (syms.funcdef, syms.classdef) - # if the current node is a child of a function definition, a class - # definition or a file, then it is a docstring - if ancestor.type == syms.simple_stmt: - try: - while True: - if ancestor.type in symbols: - return True - ancestor = ancestor.parent - except AttributeError: - return False - return False - - def ismodule(ancestor): - # Our child is a docstring if we are a simple statement, and our - # ancestor is file_input. In other words, our child is a lone string in - # the source file. - try: - if (ancestor.type == syms.simple_stmt and - ancestor.parent.type == syms.file_input): - return True - except AttributeError: - return False - - def isdocassignment(ancestor): - # Assigning to __doc__, definitely a string - try: - while True: - if (ancestor.type == syms.expr_stmt and - Name('__doc__') in ancestor.children): - return True - ancestor = ancestor.parent - except AttributeError: - return False - - if ismodule(node.parent) or \ - isdocassignment(node.parent) or \ - isclassorfunction(node.parent): - return True - return False - -def shouldtransform(node): - specialnames = ['__main__'] - - if node.value in specialnames: - return False - - ggparent = node.parent.parent.parent - sggparent = str(ggparent) - - if 'getattr' in sggparent or \ - 'hasattr' in sggparent or \ - 'setattr' in sggparent or \ - 'encode' in sggparent or \ - 'decode' in sggparent: - return False - - return True - -class FixBytes(fixer_base.BaseFix): - - PATTERN = 'STRING' - - def transform(self, node, results): - # The filename may be prefixed with a build directory. - if self.filename.endswith(blacklist): - return - if node.type == token.STRING: - if _re.match(node.value): - if isdocstring(node): - return - if not shouldtransform(node): - return - new = node.clone() - new.value = 'b' + new.value - return new -
--- a/contrib/hgfixes/fix_bytesmod.py Tue Mar 29 11:54:46 2016 -0500 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,63 +0,0 @@ -"""Fixer that changes bytes % whatever to a function that actually formats -it.""" - -from lib2to3 import fixer_base -from lib2to3.fixer_util import is_tuple, Call, Comma, Name, touch_import - -# XXX: Implementing a blacklist in 2to3 turned out to be more troublesome than -# blacklisting some modules inside the fixers. So, this is what I came with. - -blacklist = ['mercurial/demandimport.py', - 'mercurial/py3kcompat.py', - 'mercurial/i18n.py', - ] - -def isnumberremainder(formatstr, data): - try: - if data.value.isdigit(): - return True - except AttributeError: - return False - -class FixBytesmod(fixer_base.BaseFix): - # XXX: There's one case (I suppose) I can't handle: when a remainder - # operation like foo % bar is performed, I can't really know what the - # contents of foo and bar are. I believe the best approach is to "correct" - # the to-be-converted code and let bytesformatter handle that case in - # runtime. - PATTERN = ''' - term< formatstr=STRING '%' data=STRING > | - term< formatstr=STRING '%' data=atom > | - term< formatstr=NAME '%' data=any > | - term< formatstr=any '%' data=any > - ''' - - def transform(self, node, results): - for bfn in blacklist: - if self.filename.endswith(bfn): - return - if not self.filename.endswith('mercurial/py3kcompat.py'): - touch_import('mercurial', 'py3kcompat', node=node) - - formatstr = results['formatstr'].clone() - data = results['data'].clone() - formatstr.prefix = '' # remove spaces from start - - if isnumberremainder(formatstr, data): - return - - # We have two possibilities: - # 1- An identifier or name is passed, it is going to be a leaf, thus, we - # just need to copy its value as an argument to the formatter; - # 2- A tuple is explicitly passed. In this case, we're gonna explode it - # to pass to the formatter - # TODO: Check for normal strings. They don't need to be translated - - if is_tuple(data): - args = [formatstr, Comma().clone()] + \ - [c.clone() for c in data.children[:]] - else: - args = [formatstr, Comma().clone(), data] - - call = Call(Name('bytesformatter', prefix=' '), args) - return call
--- a/contrib/hgfixes/fix_leftover_imports.py Tue Mar 29 11:54:46 2016 -0500 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,107 +0,0 @@ -"Fixer that translates some APIs ignored by the default 2to3 fixers." - -# FIXME: This fixer has some ugly hacks. Its main design is based on that of -# fix_imports, from lib2to3. Unfortunately, the fix_imports framework only -# changes module names "without dots", meaning it won't work for some changes -# in the email module/package. Thus this fixer was born. I believe that with a -# bit more thinking, a more generic fixer can be implemented, but I'll leave -# that as future work. - -from lib2to3.fixer_util import Name -from lib2to3.fixes import fix_imports - -# This maps the old names to the new names. Note that a drawback of the current -# design is that the dictionary keys MUST have EXACTLY one dot (.) in them, -# otherwise things will break. (If you don't need a module hierarchy, you're -# better of just inherit from fix_imports and overriding the MAPPING dict.) - -MAPPING = {'email.Utils': 'email.utils', - 'email.Errors': 'email.errors', - 'email.Header': 'email.header', - 'email.Parser': 'email.parser', - 'email.Encoders': 'email.encoders', - 'email.MIMEText': 'email.mime.text', - 'email.MIMEBase': 'email.mime.base', - 'email.Generator': 'email.generator', - 'email.MIMEMultipart': 'email.mime.multipart', -} - -def alternates(members): - return "(" + "|".join(map(repr, members)) + ")" - -def build_pattern(mapping=MAPPING): - packages = {} - for key in mapping: - # What we are doing here is the following: with dotted names, we'll - # have something like package_name <trailer '.' module>. Then, we are - # making a dictionary to copy this structure. For example, if - # mapping={'A.B': 'a.b', 'A.C': 'a.c'}, it will generate the dictionary - # {'A': ['b', 'c']} to, then, generate something like "A <trailer '.' - # ('b' | 'c')". - name = key.split('.') - prefix = name[0] - if prefix in packages: - packages[prefix].append(name[1:][0]) - else: - packages[prefix] = name[1:] - - mod_list = ' | '.join(["'%s' '.' ('%s')" % - (key, "' | '".join(packages[key])) for key in packages]) - mod_list = '(' + mod_list + ' )' - - yield """name_import=import_name< 'import' module_name=dotted_name< %s > > - """ % mod_list - - yield """name_import=import_name< 'import' - multiple_imports=dotted_as_names< any* - module_name=dotted_name< %s > - any* > - >""" % mod_list - - packs = ' | '.join(["'%s' trailer<'.' ('%s')>" % (key, - "' | '".join(packages[key])) for key in packages]) - - yield "power< package=(%s) trailer<'.' any > any* >" % packs - -class FixLeftoverImports(fix_imports.FixImports): - # We want to run this fixer after fix_import has run (this shouldn't matter - # for hg, though, as setup3k prefers to run the default fixers first) - mapping = MAPPING - - def build_pattern(self): - return "|".join(build_pattern(self.mapping)) - - def transform(self, node, results): - # Mostly copied from fix_imports.py - import_mod = results.get("module_name") - if import_mod: - try: - mod_name = import_mod.value - except AttributeError: - # XXX: A hack to remove whitespace prefixes and suffixes - mod_name = str(import_mod).strip() - new_name = self.mapping[mod_name] - import_mod.replace(Name(new_name, prefix=import_mod.prefix)) - if "name_import" in results: - # If it's not a "from x import x, y" or "import x as y" import, - # marked its usage to be replaced. - self.replace[mod_name] = new_name - if "multiple_imports" in results: - # This is a nasty hack to fix multiple imports on a line (e.g., - # "import StringIO, urlparse"). The problem is that I can't - # figure out an easy way to make a pattern recognize the keys of - # MAPPING randomly sprinkled in an import statement. - results = self.match(node) - if results: - self.transform(node, results) - else: - # Replace usage of the module. - # Now this is, mostly, a hack - bare_name = results["package"][0] - bare_name_text = ''.join(map(str, results['package'])).strip() - new_name = self.replace.get(bare_name_text) - prefix = results['package'][0].prefix - if new_name: - bare_name.replace(Name(new_name, prefix=prefix)) - results["package"][1].replace(Name('')) -
--- a/contrib/import-checker.py Tue Mar 29 11:54:46 2016 -0500 +++ b/contrib/import-checker.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,8 +1,11 @@ #!/usr/bin/env python +from __future__ import absolute_import, print_function + import ast import collections import os +import re import sys # Import a minimal set of stdlib modules needed for list_stdlib_modules() @@ -182,6 +185,8 @@ yield 'builtins' # python3 only for m in 'fcntl', 'grp', 'pwd', 'termios': # Unix only yield m + for m in 'cPickle', 'datetime': # in Python (not C) on PyPy + yield m stdlib_prefixes = set([sys.prefix, sys.exec_prefix]) # We need to supplement the list of prefixes for the search to work # when run from within a virtualenv. @@ -200,10 +205,8 @@ stdlib_prefixes.add(dirname) for libpath in sys.path: # We want to walk everything in sys.path that starts with - # something in stdlib_prefixes. check-code suppressed because - # the ast module used by this script implies the availability - # of any(). - if not any(libpath.startswith(p) for p in stdlib_prefixes): # no-py24 + # something in stdlib_prefixes. + if not any(libpath.startswith(p) for p in stdlib_prefixes): continue for top, dirs, files in os.walk(libpath): for i, d in reversed(list(enumerate(dirs))): @@ -223,7 +226,7 @@ stdlib_modules = set(list_stdlib_modules()) -def imported_modules(source, modulename, localmods, ignore_nested=False): +def imported_modules(source, modulename, f, localmods, ignore_nested=False): """Given the source of a file as a string, yield the names imported by that file. @@ -237,6 +240,7 @@ Returns: A list of absolute module names imported by the given source. + >>> f = 'foo/xxx.py' >>> modulename = 'foo.xxx' >>> localmods = {'foo.__init__': True, ... 'foo.foo1': True, 'foo.foo2': True, @@ -245,43 +249,43 @@ >>> # standard library (= not locally defined ones) >>> sorted(imported_modules( ... 'from stdlib1 import foo, bar; import stdlib2', - ... modulename, localmods)) + ... modulename, f, localmods)) [] >>> # relative importing >>> sorted(imported_modules( ... 'import foo1; from bar import bar1', - ... modulename, localmods)) + ... modulename, f, localmods)) ['foo.bar.bar1', 'foo.foo1'] >>> sorted(imported_modules( ... 'from bar.bar1 import name1, name2, name3', - ... modulename, localmods)) + ... modulename, f, localmods)) ['foo.bar.bar1'] >>> # absolute importing >>> sorted(imported_modules( ... 'from baz import baz1, name1', - ... modulename, localmods)) + ... modulename, f, localmods)) ['baz.__init__', 'baz.baz1'] >>> # mixed importing, even though it shouldn't be recommended >>> sorted(imported_modules( ... 'import stdlib, foo1, baz', - ... modulename, localmods)) + ... modulename, f, localmods)) ['baz.__init__', 'foo.foo1'] >>> # ignore_nested >>> sorted(imported_modules( ... '''import foo ... def wat(): ... import bar - ... ''', modulename, localmods)) + ... ''', modulename, f, localmods)) ['foo.__init__', 'foo.bar.__init__'] >>> sorted(imported_modules( ... '''import foo ... def wat(): ... import bar - ... ''', modulename, localmods, ignore_nested=True)) + ... ''', modulename, f, localmods, ignore_nested=True)) ['foo.__init__'] """ fromlocal = fromlocalfunc(modulename, localmods) - for node in ast.walk(ast.parse(source)): + for node in ast.walk(ast.parse(source, f)): if ignore_nested and getattr(node, 'col_offset', 0) > 0: continue if isinstance(node, ast.Import): @@ -366,7 +370,7 @@ fromlocal = fromlocalfunc(module, localmods) # Whether a local/non-stdlib import has been performed. - seenlocal = False + seenlocal = None # Whether a relative, non-symbol import has been seen. seennonsymbolrelative = False # The last name to be imported (for sorting). @@ -403,10 +407,11 @@ # stdlib imports should be before local imports. stdlib = name in stdlib_modules if stdlib and seenlocal and node.col_offset == root_col_offset: - yield msg('stdlib import follows local import: %s', name) + yield msg('stdlib import "%s" follows local import: %s', + name, seenlocal) if not stdlib: - seenlocal = True + seenlocal = name # Import of sibling modules should use relative imports. topname = name.split('.')[0] @@ -437,7 +442,7 @@ if not fullname or fullname in stdlib_modules: yield msg('relative import of stdlib module') else: - seenlocal = True + seenlocal = fullname # Direct symbol import is only allowed from certain modules and # must occur before non-symbol imports. @@ -494,10 +499,6 @@ """Given some python source, verify that stdlib imports are done in separate statements from relative local module imports. - Observing this limitation is important as it works around an - annoying lib2to3 bug in relative import rewrites: - http://bugs.python.org/issue19510. - >>> list(verify_stdlib_on_own_line(ast.parse('import sys, foo'))) [('mixed imports\\n stdlib: sys\\n relative: foo', 1)] >>> list(verify_stdlib_on_own_line(ast.parse('import sys, os'))) @@ -547,16 +548,17 @@ All module names recorded in `imports` should be absolute one. + >>> from __future__ import print_function >>> imports = {'top.foo': ['top.bar', 'os.path', 'top.qux'], ... 'top.bar': ['top.baz', 'sys'], ... 'top.baz': ['top.foo'], ... 'top.qux': ['top.foo']} - >>> print '\\n'.join(sorted(find_cycles(imports))) + >>> print('\\n'.join(sorted(find_cycles(imports)))) top.bar -> top.baz -> top.foo -> top.bar top.foo -> top.qux -> top.foo """ cycles = set() - for mod in sorted(imports.iterkeys()): + for mod in sorted(imports.keys()): try: checkmod(mod, imports) except CircularImport as e: @@ -567,9 +569,101 @@ def _cycle_sortkey(c): return len(c), c +def embedded(f, modname, src): + """Extract embedded python code + + >>> def test(fn, lines): + ... for s, m, f, l in embedded(fn, "example", lines): + ... print("%s %s %s" % (m, f, l)) + ... print(repr(s)) + >>> lines = [ + ... 'comment', + ... ' >>> from __future__ import print_function', + ... " >>> ' multiline", + ... " ... string'", + ... ' ', + ... 'comment', + ... ' $ cat > foo.py <<EOF', + ... ' > from __future__ import print_function', + ... ' > EOF', + ... ] + >>> test("example.t", lines) + example[2] doctest.py 2 + "from __future__ import print_function\\n' multiline\\nstring'\\n" + example[7] foo.py 7 + 'from __future__ import print_function\\n' + """ + inlinepython = 0 + shpython = 0 + script = [] + prefix = 6 + t = '' + n = 0 + for l in src: + n += 1 + if not l.endswith(b'\n'): + l += b'\n' + if l.startswith(b' >>> '): # python inlines + if shpython: + print("%s:%d: Parse Error" % (f, n)) + if not inlinepython: + # We've just entered a Python block. + inlinepython = n + t = 'doctest.py' + script.append(l[prefix:]) + continue + if l.startswith(b' ... '): # python inlines + script.append(l[prefix:]) + continue + cat = re.search(r"\$ \s*cat\s*>\s*(\S+\.py)\s*<<\s*EOF", l) + if cat: + if inlinepython: + yield ''.join(script), ("%s[%d]" % + (modname, inlinepython)), t, inlinepython + script = [] + inlinepython = 0 + shpython = n + t = cat.group(1) + continue + if shpython and l.startswith(b' > '): # sh continuation + if l == b' > EOF\n': + yield ''.join(script), ("%s[%d]" % + (modname, shpython)), t, shpython + script = [] + shpython = 0 + else: + script.append(l[4:]) + continue + if inlinepython and l == b' \n': + yield ''.join(script), ("%s[%d]" % + (modname, inlinepython)), t, inlinepython + script = [] + inlinepython = 0 + continue + +def sources(f, modname): + """Yields possibly multiple sources from a filepath + + input: filepath, modulename + yields: script(string), modulename, filepath, linenumber + + For embedded scripts, the modulename and filepath will be different + from the function arguments. linenumber is an offset relative to + the input file. + """ + py = False + if f.endswith('.py'): + with open(f) as src: + yield src.read(), modname, f, 0 + py = True + if py or f.endswith('.t'): + with open(f) as src: + for script, modname, t, line in embedded(f, modname, src): + yield script, modname, t, line + def main(argv): if len(argv) < 2 or (argv[1] == '-' and len(argv) > 2): - print 'Usage: %s {-|file [file] [file] ...}' + print('Usage: %s {-|file [file] [file] ...}') return 1 if argv[1] == '-': argv = argv[:1] @@ -580,15 +674,19 @@ for source_path in argv[1:]: modname = dotted_name_of_path(source_path, trimpure=True) localmods[modname] = source_path - for modname, source_path in sorted(localmods.iteritems()): - f = open(source_path) - src = f.read() - used_imports[modname] = sorted( - imported_modules(src, modname, localmods, ignore_nested=True)) - for error, lineno in verify_import_convention(modname, src, localmods): - any_errors = True - print '%s:%d: %s' % (source_path, lineno, error) - f.close() + for localmodname, source_path in sorted(localmods.items()): + for src, modname, name, line in sources(source_path, localmodname): + try: + used_imports[modname] = sorted( + imported_modules(src, modname, name, localmods, + ignore_nested=True)) + for error, lineno in verify_import_convention(modname, src, + localmods): + any_errors = True + print('%s:%d: %s' % (source_path, lineno + line, error)) + except SyntaxError as e: + print('%s:%d: SyntaxError: %s' % + (source_path, e.lineno + line, e)) cycles = find_cycles(used_imports) if cycles: firstmods = set() @@ -599,7 +697,7 @@ # of cycles that are effectively duplicates. if first in firstmods: continue - print 'Import cycle:', c + print('Import cycle:', c) firstmods.add(first) any_errors = True return any_errors != 0
--- a/contrib/memory.py Tue Mar 29 11:54:46 2016 -0500 +++ b/contrib/memory.py Sat Apr 16 18:06:48 2016 -0500 @@ -11,6 +11,7 @@ prints it to ``stderr`` on exit. ''' +from __future__ import absolute_import import atexit def memusage(ui):
--- a/contrib/mercurial.spec Tue Mar 29 11:54:46 2016 -0500 +++ b/contrib/mercurial.spec Sat Apr 16 18:06:48 2016 -0500 @@ -158,4 +158,5 @@ %endif %{_libdir}/python%{pythonver}/site-packages/%{name} %{_libdir}/python%{pythonver}/site-packages/hgext +%{_libdir}/python%{pythonver}/site-packages/hgext3rd %endif
--- a/contrib/perf.py Tue Mar 29 11:54:46 2016 -0500 +++ b/contrib/perf.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,12 +1,26 @@ # perf.py - performance test routines '''helper extension to measure performance''' -from mercurial import cmdutil, scmutil, util, commands, obsolete -from mercurial import repoview, branchmap, merge, copies, error, revlog -from mercurial import mdiff -import time, os, sys +from __future__ import absolute_import +import functools +import os import random -import functools +import sys +import time +from mercurial import ( + branchmap, + cmdutil, + commands, + copies, + error, + mdiff, + merge, + obsolete, + repoview, + revlog, + scmutil, + util, +) formatteropts = commands.formatteropts revlogopts = commands.debugrevlogopts
--- a/contrib/python-hook-examples.py Tue Mar 29 11:54:46 2016 -0500 +++ b/contrib/python-hook-examples.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,7 +1,11 @@ ''' Examples of useful python hooks for Mercurial. ''' -from mercurial import patch, util +from __future__ import absolute_import +from mercurial import ( + patch, + util, +) def diffstat(ui, repo, **kwargs): '''Example usage:
--- a/contrib/revsetbenchmarks.py Tue Mar 29 11:54:46 2016 -0500 +++ b/contrib/revsetbenchmarks.py Sat Apr 16 18:06:48 2016 -0500 @@ -8,13 +8,22 @@ # # call with --help for details -import sys +from __future__ import absolute_import, print_function +import math import os import re -import math -from subprocess import check_call, Popen, CalledProcessError, STDOUT, PIPE +import sys +from subprocess import ( + CalledProcessError, + check_call, + PIPE, + Popen, + STDOUT, +) # cannot use argparse, python 2.7 only -from optparse import OptionParser +from optparse import ( + OptionParser, +) DEFAULTVARIANTS = ['plain', 'min', 'max', 'first', 'last', 'reverse', 'reverse+first', 'reverse+last', @@ -36,7 +45,7 @@ check_output(['make', 'local'], stderr=None) # suppress output except for error/warning except CalledProcessError as exc: - print >> sys.stderr, 'update to revision %s failed, aborting' % rev + print('update to revision %s failed, aborting'%rev, file=sys.stderr) sys.exit(exc.returncode) @@ -62,11 +71,11 @@ output = hg(args, repo=target) return parseoutput(output) except CalledProcessError as exc: - print >> sys.stderr, 'abort: cannot run revset benchmark: %s' % exc.cmd - if exc.output is None: - print >> sys.stderr, '(no output)' + print('abort: cannot run revset benchmark: %s'%exc.cmd, file=sys.stderr) + if getattr(exc, 'output', None) is None: # no output before 2.7 + print('(no output)', file=sys.stderr) else: - print >> sys.stderr, exc.output + print(exc.output, file=sys.stderr) return None outputre = re.compile(r'! wall (\d+.\d+) comb (\d+.\d+) user (\d+.\d+) ' @@ -80,8 +89,8 @@ """ match = outputre.search(output) if not match: - print >> sys.stderr, 'abort: invalid output:' - print >> sys.stderr, output + print('abort: invalid output:', file=sys.stderr) + print(output, file=sys.stderr) sys.exit(1) return {'comb': float(match.group(2)), 'count': int(match.group(5)), @@ -183,7 +192,7 @@ out.append(formattiming(data[var]['user'])) out.append(formattiming(data[var]['sys'])) out.append('%6d' % data[var]['count']) - print mask % (idx, ' '.join(out)) + print(mask % (idx, ' '.join(out))) def printheader(variants, maxidx, verbose=False, relative=False): header = [' ' * (idxwidth(maxidx) + 1)] @@ -200,14 +209,14 @@ header.append('%-8s' % 'user') header.append('%-8s' % 'sys') header.append('%6s' % 'count') - print ' '.join(header) + print(' '.join(header)) def getrevs(spec): """get the list of rev matched by a revset""" try: out = check_output(['hg', 'log', '--template={rev}\n', '--rev', spec]) except CalledProcessError as exc: - print >> sys.stderr, "abort, can't get revision from %s" % spec + print("abort, can't get revision from %s"%spec, file=sys.stderr) sys.exit(exc.returncode) return [r for r in out.split() if r] @@ -261,14 +270,14 @@ revsets = [l.strip() for l in revsetsfile if not l.startswith('#')] revsets = [l for l in revsets if l] -print "Revsets to benchmark" -print "----------------------------" +print("Revsets to benchmark") +print("----------------------------") for idx, rset in enumerate(revsets): - print "%i) %s" % (idx, rset) + print("%i) %s" % (idx, rset)) -print "----------------------------" -print +print("----------------------------") +print() revs = [] for a in args: @@ -278,9 +287,9 @@ results = [] for r in revs: - print "----------------------------" + print("----------------------------") printrevision(r) - print "----------------------------" + print("----------------------------") update(r) res = [] results.append(res) @@ -295,31 +304,31 @@ printresult(variants, idx, varres, len(revsets), verbose=options.verbose) sys.stdout.flush() - print "----------------------------" + print("----------------------------") -print """ +print(""" Result by revset ================ -""" +""") -print 'Revision:' +print('Revision:') for idx, rev in enumerate(revs): sys.stdout.write('%i) ' % idx) sys.stdout.flush() printrevision(rev) -print -print +print() +print() for ridx, rset in enumerate(revsets): - print "revset #%i: %s" % (ridx, rset) + print("revset #%i: %s" % (ridx, rset)) printheader(variants, len(results), verbose=options.verbose, relative=True) ref = None for idx, data in enumerate(results): printresult(variants, idx, data[ridx], len(results), verbose=options.verbose, reference=ref) ref = data[ridx] - print + print()
--- a/contrib/showstack.py Tue Mar 29 11:54:46 2016 -0500 +++ b/contrib/showstack.py Sat Apr 16 18:06:48 2016 -0500 @@ -2,7 +2,10 @@ # # binds to both SIGQUIT (Ctrl-\) and SIGINFO (Ctrl-T on BSDs) -import sys, signal, traceback +from __future__ import absolute_import +import signal +import sys +import traceback def sigshow(*args): sys.stderr.write("\n")
--- a/contrib/simplemerge Tue Mar 29 11:54:46 2016 -0500 +++ b/contrib/simplemerge Sat Apr 16 18:06:48 2016 -0500 @@ -47,7 +47,7 @@ opts = {} try: args = fancyopts.fancyopts(sys.argv[1:], options, opts) - except fancyopts.getopt.GetoptError, e: + except fancyopts.getopt.GetoptError as e: raise ParseError(e) if opts['help']: showhelp() @@ -55,11 +55,11 @@ if len(args) != 3: raise ParseError(_('wrong number of arguments')) sys.exit(simplemerge.simplemerge(ui.ui(), *args, **opts)) -except ParseError, e: +except ParseError as e: sys.stdout.write("%s: %s\n" % (sys.argv[0], e)) showhelp() sys.exit(1) -except error.Abort, e: +except error.Abort as e: sys.stderr.write("abort: %s\n" % e) sys.exit(255) except KeyboardInterrupt:
--- a/contrib/synthrepo.py Tue Mar 29 11:54:46 2016 -0500 +++ b/contrib/synthrepo.py Sat Apr 16 18:06:48 2016 -0500 @@ -36,10 +36,30 @@ - Symlinks and binary files are ignored ''' -import bisect, collections, itertools, json, os, random, time, sys -from mercurial import cmdutil, context, patch, scmutil, util, hg, error +from __future__ import absolute_import +import bisect +import collections +import itertools +import json +import os +import random +import sys +import time +from mercurial import ( + cmdutil, + context, + error, + hg, + patch, + scmutil, + util, +) from mercurial.i18n import _ -from mercurial.node import nullrev, nullid, short +from mercurial.node import ( + nullid, + nullrev, + short, +) # Note for extension authors: ONLY specify testedwith = 'internal' for # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
--- a/contrib/win32/hgwebdir_wsgi.py Tue Mar 29 11:54:46 2016 -0500 +++ b/contrib/win32/hgwebdir_wsgi.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,66 +1,104 @@ # An example WSGI script for IIS/isapi-wsgi to export multiple hgweb repos -# Copyright 2010 Sune Foldager <cryo@cyanite.org> +# Copyright 2010-2016 Sune Foldager <cyano@me.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. # # Requirements: -# - Python 2.6 -# - PyWin32 build 214 or newer -# - Mercurial installed from source (python setup.py install) -# - IIS 7 -# -# Earlier versions will in general work as well, but the PyWin32 version is -# necessary for win32traceutil to work correctly. +# - Python 2.7, preferably 64 bit +# - PyWin32 for Python 2.7 (32 or 64 bit) +# - Mercurial installed from source (python setup.py install) or download the +# python module installer from https://www.mercurial-scm.org/wiki/Download +# - IIS 7 or newer # # # Installation and use: # -# - Download the isapi-wsgi source and run python setup.py install: -# http://code.google.com/p/isapi-wsgi/ +# - Download or clone the isapi-wsgi source and run python setup.py install. +# https://github.com/hexdump42/isapi-wsgi +# +# - Create a directory to hold the shim dll, config files etc. This can reside +# inside the standard IIS directory, C:\inetpub, or anywhere else. Copy this +# script there. # # - Run this script (i.e. python hgwebdir_wsgi.py) to get a shim dll. The # shim is identical for all scripts, so you can just copy and rename one -# from an earlier run, if you wish. +# from an earlier run, if you wish. The shim needs to reside in the same +# directory as this script. +# +# - Start IIS manager and create a new app pool: +# .NET CLR Version: No Managed Code +# Advanced Settings: Enable 32 Bit Applications, if using 32 bit Python. +# You can adjust the identity and maximum worker processes if you wish. This +# setup works fine with multiple worker processes. # -# - Setup an IIS application where your hgwebdir is to be served from. -# On 64-bit systems, make sure it's assigned a 32-bit app pool. +# - Create an IIS application where your hgwebdir is to be served from. +# Assign it the app pool you just created and point its physical path to the +# directory you created. +# +# - In the application, remove all handler mappings and setup a wildcard script +# handler mapping of type IsapiModule with the shim dll as its executable. +# This file MUST reside in the same directory as the shim. The easiest way +# to do all this is to close IIS manager, place a web.config file in your +# directory and start IIS manager again. The file should contain: # -# - In the application, setup a wildcard script handler mapping of type -# IsapiModule with the shim dll as its executable. This file MUST reside -# in the same directory as the shim. Remove all other handlers, if you wish. +# <?xml version="1.0" encoding="UTF-8"?> +# <configuration> +# <system.webServer> +# <handlers accessPolicy="Read, Script"> +# <clear /> +# <add name="hgwebdir" path="*" verb="*" modules="IsapiModule" +# scriptProcessor="C:\your\directory\_hgwebdir_wsgi.dll" +# resourceType="Unspecified" requireAccess="None" +# preCondition="bitness64" /> +# </handlers> +# </system.webServer> +# </configuration> +# +# Where "bitness64" should be replaced with "bitness32" for 32 bit Python. +# +# - Edit ISAPI And CGI Restrictions on the web server (global setting). Add a +# restriction pointing to your shim dll and allow it to run. # -# - Make sure the ISAPI and CGI restrictions (configured globally on the -# web server) includes the shim dll, to allow it to run. +# - Create a configuration file in your directory and adjust the configuration +# variables below to match your needs. Example configuration: +# +# [web] +# style = gitweb +# push_ssl = false +# allow_push = * +# encoding = utf8 # -# - Adjust the configuration variables below to match your needs. +# [server] +# validate = true +# +# [paths] +# repo1 = c:\your\directory\repo1 +# repo2 = c:\your\directory\repo2 +# +# - Restart the web server and see if things are running. # # Configuration file location -hgweb_config = r'c:\src\iis\hg\hgweb.config' +hgweb_config = r'c:\your\directory\wsgi.config' # Global settings for IIS path translation path_strip = 0 # Strip this many path elements off (when using url rewrite) path_prefix = 1 # This many path elements are prefixes (depends on the # virtual path of the IIS application). +from __future__ import absolute_import import sys # Adjust python path if this is not a system-wide install -#sys.path.insert(0, r'c:\path\to\python\lib') +#sys.path.insert(0, r'C:\your\custom\hg\build\lib.win32-2.7') # Enable tracing. Run 'python -m win32traceutil' to debug if getattr(sys, 'isapidllhandle', None) is not None: import win32traceutil win32traceutil.SetupForPrint # silence unused import warning -# To serve pages in local charset instead of UTF-8, remove the two lines below -import os -os.environ['HGENCODING'] = 'UTF-8' - - import isapi_wsgi -from mercurial import demandimport; demandimport.enable() from mercurial.hgweb.hgwebdir_mod import hgwebdir # Example tweak: Replace isapi_wsgi's handler to provide better error message
--- a/contrib/wix/help.wxs Tue Mar 29 11:54:46 2016 -0500 +++ b/contrib/wix/help.wxs Sat Apr 16 18:06:48 2016 -0500 @@ -40,6 +40,7 @@ <Component Id="help.internals" Guid="$(var.help.internals.guid)" Win64='$(var.IsX64)'> <File Id="internals.bundles.txt" Name="bundles.txt" KeyPath="yes" /> <File Id="internals.changegroups.txt" Name="changegroups.txt" /> + <File Id="internals.requirements.txt" Name="requirements.txt" /> <File Id="internals.revlogs.txt" Name="revlogs.txt" /> </Component> </Directory>
--- a/doc/check-seclevel.py Tue Mar 29 11:54:46 2016 -0500 +++ b/doc/check-seclevel.py Sat Apr 16 18:06:48 2016 -0500 @@ -2,18 +2,26 @@ # # checkseclevel - checking section title levels in each online help document -import sys, os +from __future__ import absolute_import + import optparse +import os +import sys # import from the live mercurial repo os.environ['HGMODULEPOLICY'] = 'py' sys.path.insert(0, "..") from mercurial import demandimport; demandimport.enable() -from mercurial.commands import table -from mercurial.help import helptable -from mercurial import extensions -from mercurial import minirst -from mercurial import ui as uimod +from mercurial import ( + commands, + extensions, + help, + minirst, + ui as uimod, +) + +table = commands.table +helptable = help.helptable level2mark = ['"', '=', '-', '.', '#'] reservedmarks = ['"']
--- a/doc/docchecker Tue Mar 29 11:54:46 2016 -0500 +++ b/doc/docchecker Sat Apr 16 18:06:48 2016 -0500 @@ -10,45 +10,54 @@ import re leadingline = re.compile(r'(^\s*)(\S.*)$') -hg_backtick = re.compile(r""":hg:`[^`]*'[^`]*`""") -hg_cramped = re.compile(r'\w:hg:`') + +checks = [ + (r""":hg:`[^`]*'[^`]*`""", + """warning: please avoid nesting ' in :hg:`...`"""), + (r'\w:hg:`', + 'warning: please have a space before :hg:'), + (r"""(?:[^a-z][^'.])hg ([^,;"`]*'(?!hg)){2}""", + '''warning: please use " instead of ' for hg ... "..."'''), +] def check(line): - if hg_backtick.search(line): - print(line) - print("""warning: please avoid nesting ' in :hg:`...`""") - if hg_cramped.search(line): - print(line) - print('warning: please have a space before :hg:') + messages = [] + for match, msg in checks: + if re.search(match, line): + messages.append(msg) + if messages: + print(line) + for msg in messages: + print(msg) def work(file): - (llead, lline) = ('', '') + (llead, lline) = ('', '') - for line in file: - # this section unwraps lines - match = leadingline.match(line) - if not match: - check(lline) - (llead, lline) = ('', '') - continue + for line in file: + # this section unwraps lines + match = leadingline.match(line) + if not match: + check(lline) + (llead, lline) = ('', '') + continue - lead, line = match.group(1), match.group(2) - if (lead == llead): - if (lline != ''): - lline += ' ' + line - else: - lline = line - else: - check(lline) - (llead, lline) = (lead, line) - check(lline) + lead, line = match.group(1), match.group(2) + if (lead == llead): + if (lline != ''): + lline += ' ' + line + else: + lline = line + else: + check(lline) + (llead, lline) = (lead, line) + check(lline) def main(): - for f in sys.argv[1:]: - try: - with open(f) as file: - work(file) - except: - print("failed to process %s" % f) + for f in sys.argv[1:]: + try: + with open(f) as file: + work(file) + except BaseException as e: + print("failed to process %s: %s" % (f, e)) main()
--- a/doc/gendoc.py Tue Mar 29 11:54:46 2016 -0500 +++ b/doc/gendoc.py Sat Apr 16 18:06:48 2016 -0500 @@ -4,7 +4,11 @@ where DOC is the name of a document """ -import os, sys, textwrap +from __future__ import absolute_import + +import os +import sys +import textwrap # This script is executed during installs and may not have C extensions # available. Relax C module requirements. @@ -12,12 +16,22 @@ # import from the live mercurial repo sys.path.insert(0, "..") from mercurial import demandimport; demandimport.enable() -from mercurial import minirst -from mercurial.commands import table, globalopts -from mercurial.i18n import gettext, _ -from mercurial.help import helptable, loaddoc -from mercurial import extensions -from mercurial import ui as uimod +from mercurial import ( + commands, + extensions, + help, + minirst, + ui as uimod, +) +from mercurial.i18n import ( + gettext, + _, +) + +table = commands.table +globalopts = commands.globalopts +helptable = help.helptable +loaddoc = help.loaddoc def get_desc(docstr): if not docstr:
--- a/doc/hgmanpage.py Tue Mar 29 11:54:46 2016 -0500 +++ b/doc/hgmanpage.py Sat Apr 16 18:06:48 2016 -0500 @@ -41,12 +41,18 @@ by the command whatis or apropos. """ +from __future__ import absolute_import __docformat__ = 'reStructuredText' +import inspect import re -from docutils import nodes, writers, languages +from docutils import ( + languages, + nodes, + writers, +) try: import roman except ImportError:
--- a/doc/runrst Tue Mar 29 11:54:46 2016 -0500 +++ b/doc/runrst Sat Apr 16 18:06:48 2016 -0500 @@ -30,10 +30,22 @@ linktext = nodes.literal(rawtext, text) parts = text.split() cmd, args = parts[1], parts[2:] + refuri = "hg.1.html#%s" % cmd if cmd == 'help' and args: - cmd = args[0] # link to 'dates' for 'hg help dates' + if args[0] == 'config': + # :hg:`help config` + refuri = "hgrc.5.html" + elif args[0].startswith('config.'): + # :hg:`help config.SECTION...` + refuri = "hgrc.5.html#%s" % args[0].split('.', 2)[1] + elif len(args) >= 2 and args[0] == '-c': + # :hg:`help -c COMMAND ...` is equivalent to :hg:`COMMAND` + # (mainly for :hg:`help -c config`) + refuri = "hg.1.html#%s" % args[1] + else: + refuri = "hg.1.html#%s" % args[0] node = nodes.reference(rawtext, '', linktext, - refuri="hg.1.html#%s" % cmd) + refuri=refuri) return [node], [] roles.register_local_role("hg", role_hg)
--- a/hgext/__init__.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/__init__.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,1 +1,3 @@ -# placeholder +from __future__ import absolute_import +import pkgutil +__path__ = pkgutil.extend_path(__path__, __name__)
--- a/hgext/acl.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/acl.py Sat Apr 16 18:06:48 2016 -0500 @@ -191,9 +191,18 @@ ''' +from __future__ import absolute_import + +import getpass + from mercurial.i18n import _ -from mercurial import util, match, error -import getpass, urllib +from mercurial import ( + error, + match, + util, +) + +urlreq = util.urlreq # Note for extension authors: ONLY specify testedwith = 'internal' for # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should @@ -279,7 +288,7 @@ if source == 'serve' and 'url' in kwargs: url = kwargs['url'].split(':') if url[0] == 'remote' and url[1].startswith('http'): - user = urllib.unquote(url[3]) + user = urlreq.unquote(url[3]) if user is None: user = getpass.getuser()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hgext/automv.py Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,100 @@ +# automv.py +# +# Copyright 2013-2016 Facebook, Inc. +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. +"""Check for unrecorded moves at commit time (EXPERIMENTAL) + +This extension checks at commit/amend time if any of the committed files +comes from an unrecorded mv. + +The threshold at which a file is considered a move can be set with the +``automv.similarity`` config option. This option takes a percentage between 0 +(disabled) and 100 (files must be identical), the default is 95. + +""" + +# Using 95 as a default similarity is based on an analysis of the mercurial +# repositories of the cpython, mozilla-central & mercurial repositories, as +# well as 2 very large facebook repositories. At 95 50% of all potential +# missed moves would be caught, as well as correspond with 87% of all +# explicitly marked moves. Together, 80% of moved files are 95% similar or +# more. +# +# See http://markmail.org/thread/5pxnljesvufvom57 for context. + +from __future__ import absolute_import + +from mercurial import ( + commands, + copies, + error, + extensions, + scmutil, + similar +) +from mercurial.i18n import _ + +def extsetup(ui): + entry = extensions.wrapcommand( + commands.table, 'commit', mvcheck) + entry[1].append( + ('', 'no-automv', None, + _('disable automatic file move detection'))) + +def mvcheck(orig, ui, repo, *pats, **opts): + """Hook to check for moves at commit time""" + renames = None + disabled = opts.pop('no_automv', False) + if not disabled: + threshold = ui.configint('automv', 'similarity', 95) + if not 0 <= threshold <= 100: + raise error.Abort(_('automv.similarity must be between 0 and 100')) + if threshold > 0: + match = scmutil.match(repo[None], pats, opts) + added, removed = _interestingfiles(repo, match) + renames = _findrenames(repo, match, added, removed, + threshold / 100.0) + + with repo.wlock(): + if renames is not None: + scmutil._markchanges(repo, (), (), renames) + return orig(ui, repo, *pats, **opts) + +def _interestingfiles(repo, matcher): + """Find what files were added or removed in this commit. + + Returns a tuple of two lists: (added, removed). Only files not *already* + marked as moved are included in the added list. + + """ + stat = repo.status(match=matcher) + added = stat[1] + removed = stat[2] + + copy = copies._forwardcopies(repo['.'], repo[None], matcher) + # remove the copy files for which we already have copy info + added = [f for f in added if f not in copy] + + return added, removed + +def _findrenames(repo, matcher, added, removed, similarity): + """Find what files in added are really moved files. + + Any file named in removed that is at least similarity% similar to a file + in added is seen as a rename. + + """ + renames = {} + if similarity > 0: + for src, dst, score in similar.findrenames( + repo, added, removed, similarity): + if repo.ui.verbose: + repo.ui.status( + _('detected move of %s as %s (%d%% similar)\n') % ( + matcher.rel(src), matcher.rel(dst), score * 100)) + renames[dst] = src + if renames: + repo.ui.status(_('detected move of %d files\n') % len(renames)) + return renames
--- a/hgext/blackbox.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/blackbox.py Sat Apr 16 18:06:48 2016 -0500 @@ -10,10 +10,16 @@ Logs event information to .hg/blackbox.log to help debug and diagnose problems. The events that get logged can be configured via the blackbox.track config key. + Examples:: [blackbox] track = * + # dirty is *EXPENSIVE* (slow); + # each log entry indicates `+` if the repository is dirty, like :hg:`id`. + dirty = True + # record the source of log messages + logsource = True [blackbox] track = command, commandfinish, commandexception, exthook, pythonhook @@ -29,9 +35,19 @@ """ -from mercurial import util, cmdutil +from __future__ import absolute_import + +import errno +import re + from mercurial.i18n import _ -import errno, os, re +from mercurial.node import hex + +from mercurial import ( + cmdutil, + ui as uimod, + util, +) cmdtable = {} command = cmdutil.command(cmdtable) @@ -40,10 +56,47 @@ # be specifying the version(s) of Mercurial they are tested with, or # leave the attribute unspecified. testedwith = 'internal' -lastblackbox = None +lastui = None + +filehandles = {} + +def _openlog(vfs): + path = vfs.join('blackbox.log') + if path in filehandles: + return filehandles[path] + filehandles[path] = fp = vfs('blackbox.log', 'a') + return fp + +def _closelog(vfs): + path = vfs.join('blackbox.log') + fp = filehandles[path] + del filehandles[path] + fp.close() def wrapui(ui): class blackboxui(ui.__class__): + def __init__(self, src=None): + super(blackboxui, self).__init__(src) + if src is None: + self._partialinit() + else: + self._bbfp = getattr(src, '_bbfp', None) + self._bbinlog = False + self._bbrepo = getattr(src, '_bbrepo', None) + self._bbvfs = getattr(src, '_bbvfs', None) + + def _partialinit(self): + if util.safehasattr(self, '_bbvfs'): + return + self._bbfp = None + self._bbinlog = False + self._bbrepo = None + self._bbvfs = None + + def copy(self): + self._partialinit() + return self.__class__(self) + @util.propertycache def track(self): return self.configlist('blackbox', 'track', ['*']) @@ -51,76 +104,109 @@ def _openlogfile(self): def rotate(oldpath, newpath): try: - os.unlink(newpath) + self._bbvfs.unlink(newpath) except OSError as err: if err.errno != errno.ENOENT: self.debug("warning: cannot remove '%s': %s\n" % (newpath, err.strerror)) try: if newpath: - os.rename(oldpath, newpath) + self._bbvfs.rename(oldpath, newpath) except OSError as err: if err.errno != errno.ENOENT: self.debug("warning: cannot rename '%s' to '%s': %s\n" % (newpath, oldpath, err.strerror)) - fp = self._bbopener('blackbox.log', 'a') + fp = _openlog(self._bbvfs) maxsize = self.configbytes('blackbox', 'maxsize', 1048576) if maxsize > 0: - st = os.fstat(fp.fileno()) + st = self._bbvfs.fstat(fp) if st.st_size >= maxsize: path = fp.name - fp.close() + _closelog(self._bbvfs) maxfiles = self.configint('blackbox', 'maxfiles', 7) for i in xrange(maxfiles - 1, 1, -1): rotate(oldpath='%s.%d' % (path, i - 1), newpath='%s.%d' % (path, i)) rotate(oldpath=path, newpath=maxfiles > 0 and path + '.1') - fp = self._bbopener('blackbox.log', 'a') + fp = _openlog(self._bbvfs) return fp + def _bbwrite(self, fmt, *args): + self._bbfp.write(fmt % args) + self._bbfp.flush() + def log(self, event, *msg, **opts): - global lastblackbox + global lastui super(blackboxui, self).log(event, *msg, **opts) + self._partialinit() if not '*' in self.track and not event in self.track: return - if util.safehasattr(self, '_blackbox'): - blackbox = self._blackbox - elif util.safehasattr(self, '_bbopener'): + if self._bbfp: + ui = self + elif self._bbvfs: try: - self._blackbox = self._openlogfile() + self._bbfp = self._openlogfile() except (IOError, OSError) as err: self.debug('warning: cannot write to blackbox.log: %s\n' % err.strerror) - del self._bbopener - self._blackbox = None - blackbox = self._blackbox + del self._bbvfs + self._bbfp = None + ui = self else: # certain ui instances exist outside the context of # a repo, so just default to the last blackbox that # was seen. - blackbox = lastblackbox + ui = lastui - if blackbox: + if not ui or not ui._bbfp: + return + if not lastui or ui._bbrepo: + lastui = ui + if ui._bbinlog: + # recursion guard + return + try: + ui._bbinlog = True date = util.datestr(None, '%Y/%m/%d %H:%M:%S') user = util.getuser() - pid = str(os.getpid()) + pid = str(util.getpid()) formattedmsg = msg[0] % msg[1:] + rev = '(unknown)' + changed = '' + if ui._bbrepo: + ctx = ui._bbrepo[None] + parents = ctx.parents() + rev = ('+'.join([hex(p.node()) for p in parents])) + if (ui.configbool('blackbox', 'dirty', False) and ( + any(ui._bbrepo.status()) or + any(ctx.sub(s).dirty() for s in ctx.substate) + )): + changed = '+' + if ui.configbool('blackbox', 'logsource', False): + src = ' [%s]' % event + else: + src = '' try: - blackbox.write('%s %s (%s)> %s' % - (date, user, pid, formattedmsg)) + ui._bbwrite('%s %s @%s%s (%s)%s> %s', + date, user, rev, changed, pid, src, formattedmsg) except IOError as err: self.debug('warning: cannot write to blackbox.log: %s\n' % err.strerror) - lastblackbox = blackbox + finally: + ui._bbinlog = False def setrepo(self, repo): - self._bbopener = repo.vfs + self._bbfp = None + self._bbinlog = False + self._bbrepo = repo + self._bbvfs = repo.vfs ui.__class__ = blackboxui + uimod.ui = blackboxui def uisetup(ui): wrapui(ui) @@ -143,12 +229,12 @@ '''view the recent repository events ''' - if not os.path.exists(repo.join('blackbox.log')): + if not repo.vfs.exists('blackbox.log'): return limit = opts.get('limit') - blackbox = repo.vfs('blackbox.log', 'r') - lines = blackbox.read().split('\n') + fp = repo.vfs('blackbox.log', 'r') + lines = fp.read().split('\n') count = 0 output = []
--- a/hgext/bugzilla.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/bugzilla.py Sat Apr 16 18:06:48 2016 -0500 @@ -277,10 +277,21 @@ Changeset commit comment. Bug 1234. ''' +from __future__ import absolute_import + +import re +import time +import urlparse +import xmlrpclib + from mercurial.i18n import _ from mercurial.node import short -from mercurial import cmdutil, mail, util, error -import re, time, urlparse, xmlrpclib +from mercurial import ( + cmdutil, + error, + mail, + util, +) # Note for extension authors: ONLY specify testedwith = 'internal' for # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should @@ -875,8 +886,10 @@ count -= 1 return root - mapfile = self.ui.config('bugzilla', 'style') + mapfile = None tmpl = self.ui.config('bugzilla', 'template') + if not tmpl: + mapfile = self.ui.config('bugzilla', 'style') if not mapfile and not tmpl: tmpl = _('changeset {node|short} in repo {root} refers ' 'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
--- a/hgext/censor.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/censor.py Sat Apr 16 18:06:48 2016 -0500 @@ -25,10 +25,20 @@ revisions if they are allowed by the "censor.policy=ignore" config option. """ +from __future__ import absolute_import + +from mercurial.i18n import _ from mercurial.node import short -from mercurial import cmdutil, error, filelog, revlog, scmutil, util -from mercurial.i18n import _ -from mercurial import lock as lockmod + +from mercurial import ( + cmdutil, + error, + filelog, + lock as lockmod, + revlog, + scmutil, + util, +) cmdtable = {} command = cmdutil.command(cmdtable)
--- a/hgext/chgserver.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/chgserver.py Sat Apr 16 18:06:48 2016 -0500 @@ -22,18 +22,35 @@ 'setenv' command replace os.environ completely -'SIGHUP' signal - reload configuration files +'setumask' command + set umask + +'validate' command + reload the config and check if the server is up to date + +Config +------ + +:: + + [chgserver] + idletimeout = 3600 # seconds, after which an idle server will exit + skiphash = False # whether to skip config or env change checks """ from __future__ import absolute_import import SocketServer import errno +import gc +import inspect import os +import random import re -import signal import struct +import sys +import threading +import time import traceback from mercurial.i18n import _ @@ -44,6 +61,7 @@ commandserver, dispatch, error, + extensions, osutil, util, ) @@ -56,6 +74,108 @@ _log = commandserver.log +def _hashlist(items): + """return sha1 hexdigest for a list""" + return util.sha1(str(items)).hexdigest() + +# sensitive config sections affecting confighash +_configsections = [ + 'extdiff', # uisetup will register new commands + 'extensions', +] + +# sensitive environment variables affecting confighash +_envre = re.compile(r'''\A(?: + CHGHG + |HG.* + |LANG(?:UAGE)? + |LC_.* + |LD_.* + |PATH + |PYTHON.* + |TERM(?:INFO)? + |TZ + )\Z''', re.X) + +def _confighash(ui): + """return a quick hash for detecting config/env changes + + confighash is the hash of sensitive config items and environment variables. + + for chgserver, it is designed that once confighash changes, the server is + not qualified to serve its client and should redirect the client to a new + server. different from mtimehash, confighash change will not mark the + server outdated and exit since the user can have different configs at the + same time. + """ + sectionitems = [] + for section in _configsections: + sectionitems.append(ui.configitems(section)) + sectionhash = _hashlist(sectionitems) + envitems = [(k, v) for k, v in os.environ.iteritems() if _envre.match(k)] + envhash = _hashlist(sorted(envitems)) + return sectionhash[:6] + envhash[:6] + +def _getmtimepaths(ui): + """get a list of paths that should be checked to detect change + + The list will include: + - extensions (will not cover all files for complex extensions) + - mercurial/__version__.py + - python binary + """ + modules = [m for n, m in extensions.extensions(ui)] + try: + from mercurial import __version__ + modules.append(__version__) + except ImportError: + pass + files = [sys.executable] + for m in modules: + try: + files.append(inspect.getabsfile(m)) + except TypeError: + pass + return sorted(set(files)) + +def _mtimehash(paths): + """return a quick hash for detecting file changes + + mtimehash calls stat on given paths and calculate a hash based on size and + mtime of each file. mtimehash does not read file content because reading is + expensive. therefore it's not 100% reliable for detecting content changes. + it's possible to return different hashes for same file contents. + it's also possible to return a same hash for different file contents for + some carefully crafted situation. + + for chgserver, it is designed that once mtimehash changes, the server is + considered outdated immediately and should no longer provide service. + """ + def trystat(path): + try: + st = os.stat(path) + return (st.st_mtime, st.st_size) + except OSError: + # could be ENOENT, EPERM etc. not fatal in any case + pass + return _hashlist(map(trystat, paths))[:12] + +class hashstate(object): + """a structure storing confighash, mtimehash, paths used for mtimehash""" + def __init__(self, confighash, mtimehash, mtimepaths): + self.confighash = confighash + self.mtimehash = mtimehash + self.mtimepaths = mtimepaths + + @staticmethod + def fromui(ui, mtimepaths=None): + if mtimepaths is None: + mtimepaths = _getmtimepaths(ui) + confighash = _confighash(ui) + mtimehash = _mtimehash(mtimepaths) + _log('confighash = %s mtimehash = %s\n' % (confighash, mtimehash)) + return hashstate(confighash, mtimehash, mtimepaths) + # copied from hgext/pager.py:uisetup() def _setuppagercmd(ui, options, cmd): if not ui.formatted(): @@ -116,6 +236,15 @@ def system(self, cmd, environ=None, cwd=None, onerr=None, errprefix=None): + # fallback to the original system method if the output needs to be + # captured (to self._buffers), or the output stream is not stdout + # (e.g. stderr, cStringIO), because the chg client is not aware of + # these situations and will behave differently (write to stdout). + if (any(s[1] for s in self._bufferstates) + or not util.safehasattr(self.fout, 'fileno') + or self.fout.fileno() != sys.stdout.fileno()): + return super(chgui, self).system(cmd, environ, cwd, onerr, + errprefix) # copied from mercurial/util.py:system() self.flush() def py2shell(val): @@ -139,23 +268,39 @@ return chgui(srcui) -def _renewui(srcui): +def _loadnewui(srcui, args): newui = srcui.__class__() for a in ['fin', 'fout', 'ferr', 'environ']: setattr(newui, a, getattr(srcui, a)) if util.safehasattr(srcui, '_csystem'): newui._csystem = srcui._csystem + + # internal config: extensions.chgserver + newui.setconfig('extensions', 'chgserver', + srcui.config('extensions', 'chgserver'), '--config') + + # command line args + args = args[:] + dispatch._parseconfig(newui, dispatch._earlygetopt(['--config'], args)) + # stolen from tortoisehg.util.copydynamicconfig() for section, name, value in srcui.walkconfig(): source = srcui.configsource(section, name) - if ':' in source: - # path:line + if ':' in source or source == '--config': + # path:line or command line continue if source == 'none': # ui.configsource returns 'none' by default source = '' newui.setconfig(section, name, value, source) - return newui + + # load wd and repo config, copied from dispatch.py + cwds = dispatch._earlygetopt(['--cwd'], args) + cwd = cwds and os.path.realpath(cwds[-1]) or None + rpath = dispatch._earlygetopt(["-R", "--repository", "--repo"], args) + path, newlui = dispatch._getlocal(newui, rpath, wd=cwd) + + return (newui, newlui) class channeledsystem(object): """Propagate ui.system() request in the following format: @@ -178,7 +323,7 @@ self.channel = channel def __call__(self, cmd, environ, cwd): - args = [util.quotecommand(cmd), cwd or '.'] + args = [util.quotecommand(cmd), os.path.abspath(cwd or '.')] args.extend('%s=%s' % (k, v) for k, v in environ.iteritems()) data = '\0'.join(args) self.out.write(struct.pack('>cI', self.channel, len(data))) @@ -200,11 +345,16 @@ ] class chgcmdserver(commandserver.server): - def __init__(self, ui, repo, fin, fout, sock): + def __init__(self, ui, repo, fin, fout, sock, hashstate, baseaddress): super(chgcmdserver, self).__init__( _newchgui(ui, channeledsystem(fin, fout, 'S')), repo, fin, fout) self.clientsock = sock self._oldios = [] # original (self.ch, ui.fp, fd) before "attachio" + self.hashstate = hashstate + self.baseaddress = baseaddress + if hashstate is not None: + self.capabilities = self.capabilities.copy() + self.capabilities['validate'] = chgcmdserver.validate def cleanup(self): # dispatch._runcatch() does not flush outputs if exception is not @@ -277,30 +427,74 @@ setattr(ui, fn, fp) del self._oldios[:] + def validate(self): + """Reload the config and check if the server is up to date + + Read a list of '\0' separated arguments. + Write a non-empty list of '\0' separated instruction strings or '\0' + if the list is empty. + An instruction string could be either: + - "unlink $path", the client should unlink the path to stop the + outdated server. + - "redirect $path", the client should attempt to connect to $path + first. If it does not work, start a new server. It implies + "reconnect". + - "exit $n", the client should exit directly with code n. + This may happen if we cannot parse the config. + - "reconnect", the client should close the connection and + reconnect. + If neither "reconnect" nor "redirect" is included in the instruction + list, the client can continue with this server after completing all + the instructions. + """ + args = self._readlist() + try: + self.ui, lui = _loadnewui(self.ui, args) + except error.ParseError as inst: + dispatch._formatparse(self.ui.warn, inst) + self.ui.flush() + self.cresult.write('exit 255') + return + newhash = hashstate.fromui(lui, self.hashstate.mtimepaths) + insts = [] + if newhash.mtimehash != self.hashstate.mtimehash: + addr = _hashaddress(self.baseaddress, self.hashstate.confighash) + insts.append('unlink %s' % addr) + # mtimehash is empty if one or more extensions fail to load. + # to be compatible with hg, still serve the client this time. + if self.hashstate.mtimehash: + insts.append('reconnect') + if newhash.confighash != self.hashstate.confighash: + addr = _hashaddress(self.baseaddress, newhash.confighash) + insts.append('redirect %s' % addr) + _log('validate: %s\n' % insts) + self.cresult.write('\0'.join(insts) or '\0') + def chdir(self): """Change current directory Note that the behavior of --cwd option is bit different from this. It does not affect --config parameter. """ - length = struct.unpack('>I', self._read(4))[0] - if not length: + path = self._readstr() + if not path: return - path = self._read(length) _log('chdir to %r\n' % path) os.chdir(path) + def setumask(self): + """Change umask""" + mask = struct.unpack('>I', self._read(4))[0] + _log('setumask %r\n' % mask) + os.umask(mask) + def getpager(self): """Read cmdargs and write pager command to r-channel if enabled If pager isn't enabled, this writes '\0' because channeledoutput does not allow to write empty data. """ - length = struct.unpack('>I', self._read(4))[0] - if not length: - args = [] - else: - args = self._read(length).split('\0') + args = self._readlist() try: cmd, _func, args, options, _cmdoptions = dispatch._parse(self.ui, args) @@ -323,41 +517,39 @@ Note that not all variables can make an effect on the running process. """ - length = struct.unpack('>I', self._read(4))[0] - if not length: - return - s = self._read(length) + l = self._readlist() try: - newenv = dict(l.split('=', 1) for l in s.split('\0')) + newenv = dict(s.split('=', 1) for s in l) except ValueError: raise ValueError('unexpected value in setenv request') - - diffkeys = set(k for k in set(os.environ.keys() + newenv.keys()) - if os.environ.get(k) != newenv.get(k)) - _log('change env: %r\n' % sorted(diffkeys)) - + _log('setenv: %r\n' % sorted(newenv.keys())) os.environ.clear() os.environ.update(newenv) - - if set(['HGPLAIN', 'HGPLAINEXCEPT']) & diffkeys: - # reload config so that ui.plain() takes effect - self.ui = _renewui(self.ui) - _clearenvaliases(commands.table) capabilities = commandserver.server.capabilities.copy() capabilities.update({'attachio': attachio, 'chdir': chdir, 'getpager': getpager, - 'setenv': setenv}) + 'setenv': setenv, + 'setumask': setumask}) # copied from mercurial/commandserver.py class _requesthandler(SocketServer.StreamRequestHandler): def handle(self): + # use a different process group from the master process, making this + # process pass kernel "is_current_pgrp_orphaned" check so signals like + # SIGTSTP, SIGTTIN, SIGTTOU are not ignored. + os.setpgid(0, 0) + # change random state otherwise forked request handlers would have a + # same state inherited from parent. + random.seed() ui = self.server.ui repo = self.server.repo - sv = chgcmdserver(ui, repo, self.rfile, self.wfile, self.connection) + sv = None try: + sv = chgcmdserver(ui, repo, self.rfile, self.wfile, self.connection, + self.server.hashstate, self.server.baseaddress) try: sv.serve() # handle exceptions that may be raised by command server. most of @@ -374,23 +566,147 @@ except: # re-raises # also write traceback to error channel. otherwise client cannot # see it because it is written to server's stderr by default. - traceback.print_exc(file=sv.cerr) + if sv: + cerr = sv.cerr + else: + cerr = commandserver.channeledoutput(self.wfile, 'e') + traceback.print_exc(file=cerr) raise + finally: + # trigger __del__ since ForkingMixIn uses os._exit + gc.collect() + +def _tempaddress(address): + return '%s.%d.tmp' % (address, os.getpid()) + +def _hashaddress(address, hashstr): + return '%s-%s' % (address, hashstr) + +class AutoExitMixIn: # use old-style to comply with SocketServer design + lastactive = time.time() + idletimeout = 3600 # default 1 hour + + def startautoexitthread(self): + # note: the auto-exit check here is cheap enough to not use a thread, + # be done in serve_forever. however SocketServer is hook-unfriendly, + # you simply cannot hook serve_forever without copying a lot of code. + # besides, serve_forever's docstring suggests using thread. + thread = threading.Thread(target=self._autoexitloop) + thread.daemon = True + thread.start() + + def _autoexitloop(self, interval=1): + while True: + time.sleep(interval) + if not self.issocketowner(): + _log('%s is not owned, exiting.\n' % self.server_address) + break + if time.time() - self.lastactive > self.idletimeout: + _log('being idle too long. exiting.\n') + break + self.shutdown() + + def process_request(self, request, address): + self.lastactive = time.time() + return SocketServer.ForkingMixIn.process_request( + self, request, address) + + def server_bind(self): + # use a unique temp address so we can stat the file and do ownership + # check later + tempaddress = _tempaddress(self.server_address) + # use relative path instead of full path at bind() if possible, since + # AF_UNIX path has very small length limit (107 chars) on common + # platforms (see sys/un.h) + dirname, basename = os.path.split(tempaddress) + bakwdfd = None + if dirname: + bakwdfd = os.open('.', os.O_DIRECTORY) + os.chdir(dirname) + self.socket.bind(basename) + self._socketstat = os.stat(basename) + # rename will replace the old socket file if exists atomically. the + # old server will detect ownership change and exit. + util.rename(basename, self.server_address) + if bakwdfd: + os.fchdir(bakwdfd) + os.close(bakwdfd) + + def issocketowner(self): + try: + stat = os.stat(self.server_address) + return (stat.st_ino == self._socketstat.st_ino and + stat.st_mtime == self._socketstat.st_mtime) + except OSError: + return False + + def unlinksocketfile(self): + if not self.issocketowner(): + return + # it is possible to have a race condition here that we may + # remove another server's socket file. but that's okay + # since that server will detect and exit automatically and + # the client will start a new server on demand. + try: + os.unlink(self.server_address) + except OSError as exc: + if exc.errno != errno.ENOENT: + raise class chgunixservice(commandserver.unixservice): def init(self): - # drop options set for "hg serve --cmdserver" command - self.ui.setconfig('progress', 'assume-tty', None) - signal.signal(signal.SIGHUP, self._reloadconfig) - class cls(SocketServer.ForkingMixIn, SocketServer.UnixStreamServer): + if self.repo: + # one chgserver can serve multiple repos. drop repo infomation + self.ui.setconfig('bundle', 'mainreporoot', '', 'repo') + self.repo = None + self._inithashstate() + self._checkextensions() + class cls(AutoExitMixIn, SocketServer.ForkingMixIn, + SocketServer.UnixStreamServer): ui = self.ui repo = self.repo + hashstate = self.hashstate + baseaddress = self.baseaddress self.server = cls(self.address, _requesthandler) - # avoid writing "listening at" message to stdout before attachio - # request, which calls setvbuf() + self.server.idletimeout = self.ui.configint( + 'chgserver', 'idletimeout', self.server.idletimeout) + self.server.startautoexitthread() + self._createsymlink() + + def _inithashstate(self): + self.baseaddress = self.address + if self.ui.configbool('chgserver', 'skiphash', False): + self.hashstate = None + return + self.hashstate = hashstate.fromui(self.ui) + self.address = _hashaddress(self.address, self.hashstate.confighash) - def _reloadconfig(self, signum, frame): - self.ui = self.server.ui = _renewui(self.ui) + def _checkextensions(self): + if not self.hashstate: + return + if extensions.notloaded(): + # one or more extensions failed to load. mtimehash becomes + # meaningless because we do not know the paths of those extensions. + # set mtimehash to an illegal hash value to invalidate the server. + self.hashstate.mtimehash = '' + + def _createsymlink(self): + if self.baseaddress == self.address: + return + tempaddress = _tempaddress(self.baseaddress) + os.symlink(os.path.basename(self.address), tempaddress) + util.rename(tempaddress, self.baseaddress) + + def run(self): + try: + self.server.serve_forever() + finally: + self.server.unlinksocketfile() def uisetup(ui): commandserver._servicemap['chgunix'] = chgunixservice + + # CHGINTERNALMARK is temporarily set by chg client to detect if chg will + # start another chg. drop it to avoid possible side effects. + if 'CHGINTERNALMARK' in os.environ: + del os.environ['CHGINTERNALMARK']
--- a/hgext/children.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/children.py Sat Apr 16 18:06:48 2016 -0500 @@ -14,9 +14,15 @@ "children(REV)"` instead. ''' -from mercurial import cmdutil -from mercurial.commands import templateopts +from __future__ import absolute_import + from mercurial.i18n import _ +from mercurial import ( + cmdutil, + commands, +) + +templateopts = commands.templateopts cmdtable = {} command = cmdutil.command(cmdtable) @@ -43,8 +49,8 @@ Please use :hg:`log` instead:: - hg children => hg log -r 'children()' - hg children -r REV => hg log -r 'children(REV)' + hg children => hg log -r "children()" + hg children -r REV => hg log -r "children(REV)" See :hg:`help log` and :hg:`help revsets.children`.
--- a/hgext/churn.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/churn.py Sat Apr 16 18:06:48 2016 -0500 @@ -8,11 +8,21 @@ '''command to display statistics about repository history''' +from __future__ import absolute_import + +import datetime +import os +import time + from mercurial.i18n import _ -from mercurial import patch, cmdutil, scmutil, util, commands, error -from mercurial import encoding -import os -import time, datetime +from mercurial import ( + cmdutil, + commands, + encoding, + patch, + scmutil, + util, +) cmdtable = {} command = cmdutil.command(cmdtable) @@ -23,12 +33,7 @@ testedwith = 'internal' def maketemplater(ui, repo, tmpl): - try: - t = cmdutil.changeset_templater(ui, repo, False, None, tmpl, - None, False) - except SyntaxError as inst: - raise error.Abort(inst.args[0]) - return t + return cmdutil.changeset_templater(ui, repo, False, None, tmpl, None, False) def changedlines(ui, repo, ctx1, ctx2, fns): added, removed = 0, 0 @@ -83,7 +88,8 @@ rate[key] = [r + l for r, l in zip(rate.get(key, (0, 0)), lines)] state['count'] += 1 - ui.progress(_('analyzing'), state['count'], total=len(repo)) + ui.progress(_('analyzing'), state['count'], total=len(repo), + unit=_('revisions')) for ctx in cmdutil.walkchangerevs(repo, m, opts, prep): continue
--- a/hgext/clonebundles.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/clonebundles.py Sat Apr 16 18:06:48 2016 -0500 @@ -162,6 +162,8 @@ Mercurial server when the bundle hosting service fails. """ +from __future__ import absolute_import + from mercurial import ( extensions, wireproto,
--- a/hgext/color.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/color.py Sat Apr 16 18:06:48 2016 -0500 @@ -153,11 +153,18 @@ If ``pagermode`` is not defined, the ``mode`` will be used. ''' -import os +from __future__ import absolute_import -from mercurial import cmdutil, commands, dispatch, extensions, subrepo, util -from mercurial import ui as uimod -from mercurial import templater, error +import os +from mercurial import ( + cmdutil, + commands, + dispatch, + extensions, + subrepo, + ui as uimod, + util, +) from mercurial.i18n import _ cmdtable = {} @@ -425,7 +432,7 @@ return super(colorui, self).write(*args, **opts) label = opts.get('label', '') - if self._buffers: + if self._buffers and not opts.get('prompt', False): if self._bufferapplylabels: self._buffers[-1].extend(self.label(a, label) for a in args) else: @@ -480,29 +487,6 @@ for s in msg.split('\n')]) return msg -def templatelabel(context, mapping, args): - if len(args) != 2: - # i18n: "label" is a keyword - raise error.ParseError(_("label expects two arguments")) - - # add known effects to the mapping so symbols like 'red', 'bold', - # etc. don't need to be quoted - mapping.update(dict([(k, k) for k in _effects])) - - thing = args[1][0](context, mapping, args[1][1]) - - # apparently, repo could be a string that is the favicon? - repo = mapping.get('repo', '') - if isinstance(repo, str): - return thing - - label = args[0][0](context, mapping, args[0][1]) - - thing = templater.stringify(thing) - label = templater.stringify(label) - - return repo.ui.label(thing, label) - def uisetup(ui): if ui.plain(): return @@ -524,8 +508,6 @@ return orig(gitsub, commands, env, stream, cwd) extensions.wrapfunction(dispatch, '_runcommand', colorcmd) extensions.wrapfunction(subrepo.gitsubrepo, '_gitnodir', colorgit) - templatelabel.__doc__ = templater.funcs['label'].__doc__ - templater.funcs['label'] = templatelabel def extsetup(ui): commands.globalopts.append( @@ -549,7 +531,8 @@ if os.name != 'nt': w32effects = None else: - import re, ctypes + import ctypes + import re _kernel32 = ctypes.windll.kernel32
--- a/hgext/convert/__init__.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/convert/__init__.py Sat Apr 16 18:06:48 2016 -0500 @@ -7,12 +7,20 @@ '''import revisions from foreign VCS repositories into Mercurial''' -import convcmd -import cvsps -import subversion -from mercurial import cmdutil, templatekw +from __future__ import absolute_import + +from mercurial import ( + cmdutil, + registrar, +) from mercurial.i18n import _ +from . import ( + convcmd, + cvsps, + subversion, +) + cmdtable = {} command = cmdutil.command(cmdtable) # Note for extension authors: ONLY specify testedwith = 'internal' for @@ -429,22 +437,22 @@ return subversion.revsplit(rev)[0] return rev +templatekeyword = registrar.templatekeyword() + +@templatekeyword('svnrev') def kwsvnrev(repo, ctx, **args): - """:svnrev: String. Converted subversion revision number.""" + """String. Converted subversion revision number.""" return kwconverted(ctx, 'svnrev') +@templatekeyword('svnpath') def kwsvnpath(repo, ctx, **args): - """:svnpath: String. Converted subversion revision project path.""" + """String. Converted subversion revision project path.""" return kwconverted(ctx, 'svnpath') +@templatekeyword('svnuuid') def kwsvnuuid(repo, ctx, **args): - """:svnuuid: String. Converted subversion revision repository identifier.""" + """String. Converted subversion revision repository identifier.""" return kwconverted(ctx, 'svnuuid') -def extsetup(ui): - templatekw.keywords['svnrev'] = kwsvnrev - templatekw.keywords['svnpath'] = kwsvnpath - templatekw.keywords['svnuuid'] = kwsvnuuid - # tell hggettext to extract docstrings from these functions: i18nfunctions = [kwsvnrev, kwsvnpath, kwsvnuuid]
--- a/hgext/convert/bzr.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/convert/bzr.py Sat Apr 16 18:06:48 2016 -0500 @@ -7,9 +7,16 @@ # This module is for handling 'bzr', that was formerly known as Bazaar-NG; # it cannot access 'bar' repositories, but they were never used very much +from __future__ import absolute_import import os -from mercurial import demandimport, error +from mercurial import ( + demandimport, + error +) +from mercurial.i18n import _ +from . import common + # these do not work with demandimport, blacklist demandimport.ignore.extend([ 'bzrlib.transactions', @@ -17,49 +24,50 @@ 'ElementPath', ]) -from mercurial.i18n import _ -from mercurial import error -from common import NoRepo, commit, converter_source - try: # bazaar imports - from bzrlib import bzrdir, revision, errors - from bzrlib.revisionspec import RevisionSpec + import bzrlib.bzrdir + import bzrlib.errors + import bzrlib.revision + import bzrlib.revisionspec.RevisionSpec + bzrdir = bzrlib.bzrdir + errors = bzrlib.errors + revision = bzrlib.revision + revisionspec = bzrlib.revisionspec except ImportError: pass supportedkinds = ('file', 'symlink') -class bzr_source(converter_source): +class bzr_source(common.converter_source): """Reads Bazaar repositories by using the Bazaar Python libraries""" def __init__(self, ui, path, revs=None): super(bzr_source, self).__init__(ui, path, revs=revs) if not os.path.exists(os.path.join(path, '.bzr')): - raise NoRepo(_('%s does not look like a Bazaar repository') - % path) + raise common.NoRepo(_('%s does not look like a Bazaar repository') + % path) try: # access bzrlib stuff bzrdir except NameError: - raise NoRepo(_('Bazaar modules could not be loaded')) + raise common.NoRepo(_('Bazaar modules could not be loaded')) path = os.path.abspath(path) self._checkrepotype(path) try: self.sourcerepo = bzrdir.BzrDir.open(path).open_repository() except errors.NoRepositoryPresent: - raise NoRepo(_('%s does not look like a Bazaar repository') - % path) + raise common.NoRepo(_('%s does not look like a Bazaar repository') + % path) self._parentids = {} def _checkrepotype(self, path): # Lightweight checkouts detection is informational but probably # fragile at API level. It should not terminate the conversion. try: - from bzrlib import bzrdir dir = bzrdir.BzrDir.open_containing(path)[0] try: tree = dir.open_workingtree(recommend_upgrade=False) @@ -102,7 +110,7 @@ revid = None for branch in self._bzrbranches(): try: - r = RevisionSpec.from_string(self.revs[0]) + r = revisionspec.RevisionSpec.from_string(self.revs[0]) info = r.in_history(branch) except errors.BzrError: pass @@ -160,7 +168,7 @@ branch = self.recode(rev.properties.get('branch-nick', u'default')) if branch == 'trunk': branch = 'default' - return commit(parents=parents, + return common.commit(parents=parents, date='%d %d' % (rev.timestamp, -rev.timezone), author=self.recode(rev.committer), desc=self.recode(rev.message),
--- a/hgext/convert/common.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/convert/common.py Sat Apr 16 18:06:48 2016 -0500 @@ -4,10 +4,21 @@ # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. +from __future__ import absolute_import -import base64, errno, subprocess, os, datetime, re +import base64 import cPickle as pickle -from mercurial import phases, util, error +import datetime +import errno +import os +import re +import subprocess + +from mercurial import ( + error, + phases, + util, +) from mercurial.i18n import _ propertycache = util.propertycache @@ -44,11 +55,13 @@ class commit(object): def __init__(self, author, date, desc, parents, branch=None, rev=None, - extra={}, sortkey=None, saverev=True, phase=phases.draft): + extra={}, sortkey=None, saverev=True, phase=phases.draft, + optparents=None): self.author = author or 'unknown' self.date = date or '0 0' self.desc = desc - self.parents = parents + self.parents = parents # will be converted and used as parents + self.optparents = optparents or [] # will be used if already converted self.branch = branch self.rev = rev self.extra = extra
--- a/hgext/convert/convcmd.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/convert/convcmd.py Sat Apr 16 18:06:48 2016 -0500 @@ -4,22 +4,50 @@ # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. +from __future__ import absolute_import -from common import NoRepo, MissingTool, SKIPREV, mapfile -from cvs import convert_cvs -from darcs import darcs_source -from git import convert_git -from hg import mercurial_source, mercurial_sink -from subversion import svn_source, svn_sink -from monotone import monotone_source -from gnuarch import gnuarch_source -from bzr import bzr_source -from p4 import p4_source -import filemap +import os +import shlex +import shutil + +from mercurial import ( + encoding, + error, + hg, + util, +) +from mercurial.i18n import _ -import os, shutil, shlex -from mercurial import hg, util, encoding, error -from mercurial.i18n import _ +from . import ( + bzr, + common, + cvs, + darcs, + filemap, + git, + gnuarch, + hg as hgconvert, + monotone, + p4, + subversion, +) + +mapfile = common.mapfile +MissingTool = common.MissingTool +NoRepo = common.NoRepo +SKIPREV = common.SKIPREV + +bzr_source = bzr.bzr_source +convert_cvs = cvs.convert_cvs +convert_git = git.convert_git +darcs_source = darcs.darcs_source +gnuarch_source = gnuarch.gnuarch_source +mercurial_sink = hgconvert.mercurial_sink +mercurial_source = hgconvert.mercurial_source +monotone_source = monotone.monotone_source +p4_source = p4.p4_source +svn_sink = subversion.svn_sink +svn_source = subversion.svn_source orig_encoding = 'ascii' @@ -117,7 +145,7 @@ def getfile(self, file, rev): self.retrieved += 1 self.ui.progress(_('getting files'), self.retrieved, - item=file, total=self.filecount) + item=file, total=self.filecount, unit=_('files')) return self.source.getfile(file, rev) def targetfilebelongstosource(self, targetfilename): @@ -444,6 +472,9 @@ parents = [self.map.get(p, p) for p in parents] except KeyError: parents = [b[0] for b in pbranches] + parents.extend(self.map[x] + for x in commit.optparents + if x in self.map) if len(pbranches) != 2: cleanp2 = set() if len(parents) < 3:
--- a/hgext/convert/cvs.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/convert/cvs.py Sat Apr 16 18:06:48 2016 -0500 @@ -4,15 +4,31 @@ # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. +from __future__ import absolute_import -import os, re, socket, errno -from cStringIO import StringIO -from mercurial import encoding, util, error +import errno +import os +import re +import socket + +from mercurial import ( + encoding, + error, + util, +) from mercurial.i18n import _ -from common import NoRepo, commit, converter_source, checktool -from common import makedatetimestamp -import cvsps +from . import ( + common, + cvsps, +) + +stringio = util.stringio +checktool = common.checktool +commit = common.commit +converter_source = common.converter_source +makedatetimestamp = common.makedatetimestamp +NoRepo = common.NoRepo class convert_cvs(converter_source): def __init__(self, ui, path, revs=None): @@ -211,7 +227,7 @@ # file-objects returned by socket.makefile() do not handle # large read() requests very well. chunksize = 65536 - output = StringIO() + output = stringio() while count > 0: data = fp.read(min(count, chunksize)) if not data:
--- a/hgext/convert/cvsps.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/convert/cvsps.py Sat Apr 16 18:06:48 2016 -0500 @@ -4,13 +4,17 @@ # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. +from __future__ import absolute_import +import cPickle as pickle import os import re -import cPickle as pickle + +from mercurial import ( + hook, + util, +) from mercurial.i18n import _ -from mercurial import hook -from mercurial import util class logentry(object): '''Class logentry has the following attributes:
--- a/hgext/convert/darcs.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/convert/darcs.py Sat Apr 16 18:06:48 2016 -0500 @@ -4,39 +4,52 @@ # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. +from __future__ import absolute_import -from common import NoRepo, checktool, commandline, commit, converter_source +import errno +import os +import re +import shutil +import tempfile from mercurial.i18n import _ -from mercurial import util, error -import os, shutil, tempfile, re, errno +from mercurial import ( + error, + util, +) +from . import common +NoRepo = common.NoRepo # The naming drift of ElementTree is fun! try: - from xml.etree.cElementTree import ElementTree, XMLParser + import xml.etree.cElementTree.ElementTree as ElementTree + import xml.etree.cElementTree.XMLParser as XMLParser except ImportError: try: - from xml.etree.ElementTree import ElementTree, XMLParser + import xml.etree.ElementTree.ElementTree as ElementTree + import xml.etree.ElementTree.XMLParser as XMLParser except ImportError: try: - from elementtree.cElementTree import ElementTree, XMLParser + import elementtree.cElementTree.ElementTree as ElementTree + import elementtree.cElementTree.XMLParser as XMLParser except ImportError: try: - from elementtree.ElementTree import ElementTree, XMLParser + import elementtree.ElementTree.ElementTree as ElementTree + import elementtree.ElementTree.XMLParser as XMLParser except ImportError: pass -class darcs_source(converter_source, commandline): +class darcs_source(common.converter_source, common.commandline): def __init__(self, ui, path, revs=None): - converter_source.__init__(self, ui, path, revs=revs) - commandline.__init__(self, ui, 'darcs') + common.converter_source.__init__(self, ui, path, revs=revs) + common.commandline.__init__(self, ui, 'darcs') # check for _darcs, ElementTree so that we can easily skip # test-convert-darcs if ElementTree is not around if not os.path.exists(os.path.join(path, '_darcs')): raise NoRepo(_("%s does not look like a darcs repository") % path) - checktool('darcs') + common.checktool('darcs') version = self.run0('--version').splitlines()[0].strip() if version < '2.1': raise error.Abort(_('darcs version 2.1 or newer needed (found %r)') @@ -139,10 +152,10 @@ desc = elt.findtext('name') + '\n' + elt.findtext('comment', '') # etree can return unicode objects for name, comment, and author, # so recode() is used to ensure str objects are emitted. - return commit(author=self.recode(elt.get('author')), - date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'), - desc=self.recode(desc).strip(), - parents=self.parents[rev]) + return common.commit(author=self.recode(elt.get('author')), + date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'), + desc=self.recode(desc).strip(), + parents=self.parents[rev]) def pull(self, rev): output, status = self.run('pull', self.path, all=True,
--- a/hgext/convert/filemap.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/convert/filemap.py Sat Apr 16 18:06:48 2016 -0500 @@ -3,12 +3,16 @@ # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. +from __future__ import absolute_import import posixpath import shlex +from mercurial import ( + error, +) from mercurial.i18n import _ -from mercurial import error -from common import SKIPREV, converter_source +from . import common +SKIPREV = common.SKIPREV def rpairs(path): '''Yield tuples with path split at '/', starting with the full path. @@ -164,7 +168,7 @@ # touch files we're interested in, but also merges that merge two # or more interesting revisions. -class filemap_source(converter_source): +class filemap_source(common.converter_source): def __init__(self, ui, baseconverter, filemap): super(filemap_source, self).__init__(ui) self.base = baseconverter
--- a/hgext/convert/git.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/convert/git.py Sat Apr 16 18:06:48 2016 -0500 @@ -4,14 +4,19 @@ # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. +from __future__ import absolute_import import os -import subprocess -from mercurial import util, config, error -from mercurial.node import hex, nullid +from mercurial import ( + config, + error, + node as nodemod, +) from mercurial.i18n import _ -from common import NoRepo, commit, converter_source, checktool, commandline +from . import ( + common, +) class submodule(object): def __init__(self, path, node, url): @@ -25,7 +30,7 @@ def hgsubstate(self): return "%s %s" % (self.node, self.path) -class convert_git(converter_source, commandline): +class convert_git(common.converter_source, common.commandline): # Windows does not support GIT_DIR= construct while other systems # cannot remove environment variable. Just assume none have # both issues. @@ -48,19 +53,15 @@ def gitpipe(self, *args, **kwargs): return self._gitcmd(self._run3, *args, **kwargs) - def gitread(self, s): - fh = self.gitopen(s) - data = fh.read() - return data, fh.close() - def __init__(self, ui, path, revs=None): super(convert_git, self).__init__(ui, path, revs=revs) - commandline.__init__(self, ui, 'git') + common.commandline.__init__(self, ui, 'git') if os.path.isdir(path + "/.git"): path += "/.git" if not os.path.exists(path + "/objects"): - raise NoRepo(_("%s does not look like a Git repository") % path) + raise common.NoRepo(_("%s does not look like a Git repository") % + path) # The default value (50) is based on the default for 'git diff'. similarity = ui.configint('convert', 'git.similarity', default=50) @@ -75,7 +76,7 @@ else: self.simopt = [] - checktool('git', 'git') + common.checktool('git', 'git') self.path = path self.submodules = [] @@ -102,7 +103,7 @@ return heads def catfile(self, rev, type): - if rev == hex(nullid): + if rev == nodemod.nullhex: raise IOError self.catfilepipe[0].write(rev+'\n') self.catfilepipe[0].flush() @@ -119,7 +120,7 @@ return data def getfile(self, name, rev): - if rev == hex(nullid): + if rev == nodemod.nullhex: return None, None if name == '.hgsub': data = '\n'.join([m.hgsub() for m in self.submoditer()]) @@ -133,7 +134,7 @@ return data, mode def submoditer(self): - null = hex(nullid) + null = nodemod.nullhex for m in sorted(self.submodules, key=lambda p: p.path): if m.node != null: yield m @@ -210,7 +211,7 @@ subexists[0] = True if entry[4] == 'D' or renamesource: subdeleted[0] = True - changes.append(('.hgsub', hex(nullid))) + changes.append(('.hgsub', nodemod.nullhex)) else: changes.append(('.hgsub', '')) elif entry[1] == '160000' or entry[0] == ':160000': @@ -218,7 +219,7 @@ subexists[0] = True else: if renamesource: - h = hex(nullid) + h = nodemod.nullhex self.modecache[(f, h)] = (p and "x") or (s and "l") or "" changes.append((f, h)) @@ -255,7 +256,7 @@ if subexists[0]: if subdeleted[0]: - changes.append(('.hgsubstate', hex(nullid))) + changes.append(('.hgsubstate', nodemod.nullhex)) else: self.retrievegitmodules(version) changes.append(('.hgsubstate', '')) @@ -292,8 +293,9 @@ tz = -int(tzs) * (int(tzh) * 3600 + int(tzm)) date = tm + " " + str(tz) - c = commit(parents=parents, date=date, author=author, desc=message, - rev=version) + c = common.commit(parents=parents, date=date, author=author, + desc=message, + rev=version) return c def numcommits(self): @@ -350,6 +352,8 @@ output, status = self.gitrunlines('diff-tree', '--name-only', '--root', '-r', version, '%s^%s' % (version, i + 1), '--') + if status: + raise error.Abort(_('cannot read changes in %s') % version) changes = [f.rstrip('\n') for f in output] return changes
--- a/hgext/convert/gnuarch.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/convert/gnuarch.py Sat Apr 16 18:06:48 2016 -0500 @@ -5,14 +5,22 @@ # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. +from __future__ import absolute_import -from common import NoRepo, commandline, commit, converter_source +import email +import os +import shutil +import stat +import tempfile +from mercurial import ( + encoding, + error, + util, +) from mercurial.i18n import _ -from mercurial import encoding, util, error -import os, shutil, tempfile, stat -from email.Parser import Parser +from . import common -class gnuarch_source(converter_source, commandline): +class gnuarch_source(common.converter_source, common.commandline): class gnuarch_rev(object): def __init__(self, rev): @@ -31,7 +39,7 @@ super(gnuarch_source, self).__init__(ui, path, revs=revs) if not os.path.exists(os.path.join(path, '{arch}')): - raise NoRepo(_("%s does not look like a GNU Arch repository") + raise common.NoRepo(_("%s does not look like a GNU Arch repository") % path) # Could use checktool, but we want to check for baz or tla. @@ -44,7 +52,7 @@ else: raise error.Abort(_('cannot find a GNU Arch tool')) - commandline.__init__(self, ui, self.execmd) + common.commandline.__init__(self, ui, self.execmd) self.path = os.path.realpath(path) self.tmppath = None @@ -54,7 +62,7 @@ self.changes = {} self.parents = {} self.tags = {} - self.catlogparser = Parser() + self.catlogparser = email.Parser.Parser() self.encoding = encoding.encoding self.archives = [] @@ -175,8 +183,9 @@ def getcommit(self, rev): changes = self.changes[rev] - return commit(author=changes.author, date=changes.date, - desc=changes.summary, parents=self.parents[rev], rev=rev) + return common.commit(author=changes.author, date=changes.date, + desc=changes.summary, parents=self.parents[rev], + rev=rev) def gettags(self): return self.tags
--- a/hgext/convert/hg.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/convert/hg.py Sat Apr 16 18:06:48 2016 -0500 @@ -16,24 +16,37 @@ # identifier to be stored in the converted revision. This will cause # the converted revision to have a different identity than the # source. +from __future__ import absolute_import +import os +import re +import time -import os, time, cStringIO +from mercurial import ( + bookmarks, + context, + error, + exchange, + hg, + lock as lockmod, + merge as mergemod, + node as nodemod, + phases, + scmutil, + util, +) +stringio = util.stringio + from mercurial.i18n import _ -from mercurial.node import bin, hex, nullid -from mercurial import hg, util, context, bookmarks, error, scmutil, exchange -from mercurial import phases -from mercurial import lock as lockmod -from mercurial import merge as mergemod +from . import common +mapfile = common.mapfile +NoRepo = common.NoRepo -from common import NoRepo, commit, converter_source, converter_sink, mapfile - -import re sha1re = re.compile(r'\b[0-9a-f]{12,40}\b') -class mercurial_sink(converter_sink): +class mercurial_sink(common.converter_sink): def __init__(self, ui, path): - converter_sink.__init__(self, ui, path) + common.converter_sink.__init__(self, ui, path) self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True) self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False) self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default') @@ -125,14 +138,14 @@ self.before() def _rewritetags(self, source, revmap, data): - fp = cStringIO.StringIO() + fp = stringio() for line in data.splitlines(): s = line.split(' ', 1) if len(s) != 2: continue revid = revmap.get(source.lookuprev(s[0])) if not revid: - if s[0] == hex(nullid): + if s[0] == nodemod.nullhex: revid = s[0] else: continue @@ -140,7 +153,7 @@ return fp.getvalue() def _rewritesubstate(self, source, data): - fp = cStringIO.StringIO() + fp = stringio() for line in data.splitlines(): s = line.split(' ', 1) if len(s) != 2: @@ -148,7 +161,7 @@ revid = s[0] subpath = s[1] - if revid != hex(nullid): + if revid != nodemod.nullhex: revmap = self.subrevmaps.get(subpath) if revmap is None: revmap = mapfile(self.ui, @@ -250,13 +263,13 @@ parents = pl nparents = len(parents) if self.filemapmode and nparents == 1: - m1node = self.repo.changelog.read(bin(parents[0]))[0] + m1node = self.repo.changelog.read(nodemod.bin(parents[0]))[0] parent = parents[0] if len(parents) < 2: - parents.append(nullid) + parents.append(nodemod.nullid) if len(parents) < 2: - parents.append(nullid) + parents.append(nodemod.nullid) p2 = parents.pop(0) text = commit.desc @@ -283,12 +296,12 @@ # Only transplant stores its reference in binary if label == 'transplant_source': - node = hex(node) + node = nodemod.hex(node) newrev = revmap.get(node) if newrev is not None: if label == 'transplant_source': - newrev = bin(newrev) + newrev = nodemod.bin(newrev) extra[label] = newrev @@ -302,7 +315,7 @@ p2 = parents.pop(0) p1ctx = self.repo[p1] p2ctx = None - if p2 != nullid: + if p2 != nodemod.nullid: p2ctx = self.repo[p2] fileset = set(files) if full: @@ -324,7 +337,7 @@ phases.phasenames[commit.phase], 'convert') with self.repo.transaction("convert") as tr: - node = hex(self.repo.commitctx(ctx)) + node = nodemod.hex(self.repo.commitctx(ctx)) # If the node value has changed, but the phase is lower than # draft, set it back to draft since it hasn't been exposed @@ -340,7 +353,7 @@ if self.filemapmode and nparents == 1: man = self.repo.manifest - mnode = self.repo.changelog.read(bin(p2))[0] + mnode = self.repo.changelog.read(nodemod.bin(p2))[0] closed = 'close' in commit.extra if not closed and not man.cmp(m1node, man.revision(mnode)): self.ui.status(_("filtering out empty revision\n")) @@ -354,7 +367,7 @@ tagparent = parentctx.node() except error.RepoError: parentctx = None - tagparent = nullid + tagparent = nodemod.nullid oldlines = set() for branch, heads in self.repo.branchmap().iteritems(): @@ -397,7 +410,7 @@ [".hgtags"], getfilectx, "convert-repo", date, extra) node = self.repo.commitctx(ctx) - return hex(node), hex(tagparent) + return nodemod.hex(node), nodemod.hex(tagparent) def setfilemapmode(self, active): self.filemapmode = active @@ -413,7 +426,7 @@ self.ui.status(_("updating bookmarks\n")) destmarks = self.repo._bookmarks for bookmark in updatedbookmark: - destmarks[bookmark] = bin(updatedbookmark[bookmark]) + destmarks[bookmark] = nodemod.bin(updatedbookmark[bookmark]) destmarks.recordchange(tr) tr.close() finally: @@ -430,9 +443,9 @@ 'are not implemented)') % rev) return rev in self.repo -class mercurial_source(converter_source): +class mercurial_source(common.converter_source): def __init__(self, ui, path, revs=None): - converter_source.__init__(self, ui, path, revs) + common.converter_source.__init__(self, ui, path, revs) self.ignoreerrors = ui.configbool('convert', 'hg.ignoreerrors', False) self.ignored = set() self.saverev = ui.configbool('convert', 'hg.saverev', False) @@ -493,7 +506,7 @@ return [p for p in ctx.parents() if p and self.keep(p.node())] def getheads(self): - return [hex(h) for h in self._heads if self.keep(h)] + return [nodemod.hex(h) for h in self._heads if self.keep(h)] def getfile(self, name, rev): try: @@ -569,21 +582,29 @@ def getcommit(self, rev): ctx = self._changectx(rev) - parents = [p.hex() for p in self._parents(ctx)] + _parents = self._parents(ctx) + parents = [p.hex() for p in _parents] + optparents = [p.hex() for p in ctx.parents() if p and p not in _parents] crev = rev - return commit(author=ctx.user(), - date=util.datestr(ctx.date(), '%Y-%m-%d %H:%M:%S %1%2'), - desc=ctx.description(), rev=crev, parents=parents, - branch=ctx.branch(), extra=ctx.extra(), - sortkey=ctx.rev(), saverev=self.saverev, - phase=ctx.phase()) + return common.commit(author=ctx.user(), + date=util.datestr(ctx.date(), + '%Y-%m-%d %H:%M:%S %1%2'), + desc=ctx.description(), + rev=crev, + parents=parents, + optparents=optparents, + branch=ctx.branch(), + extra=ctx.extra(), + sortkey=ctx.rev(), + saverev=self.saverev, + phase=ctx.phase()) def gettags(self): # This will get written to .hgtags, filter non global tags out. tags = [t for t in self.repo.tagslist() if self.repo.tagtype(t[0]) == 'global'] - return dict([(name, hex(node)) for name, node in tags + return dict([(name, nodemod.hex(node)) for name, node in tags if self.keep(node)]) def getchangedfiles(self, rev, i): @@ -622,7 +643,7 @@ def lookuprev(self, rev): try: - return hex(self.repo.lookup(rev)) + return nodemod.hex(self.repo.lookup(rev)) except (error.RepoError, error.LookupError): return None
--- a/hgext/convert/monotone.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/convert/monotone.py Sat Apr 16 18:06:48 2016 -0500 @@ -5,28 +5,34 @@ # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. +from __future__ import absolute_import -import os, re -from mercurial import util, error -from common import NoRepo, commit, converter_source, checktool -from common import commandline +import os +import re + +from mercurial import ( + error, + util, +) from mercurial.i18n import _ -class monotone_source(converter_source, commandline): +from . import common + +class monotone_source(common.converter_source, common.commandline): def __init__(self, ui, path=None, revs=None): - converter_source.__init__(self, ui, path, revs) + common.converter_source.__init__(self, ui, path, revs) if revs and len(revs) > 1: raise error.Abort(_('monotone source does not support specifying ' 'multiple revs')) - commandline.__init__(self, ui, 'mtn') + common.commandline.__init__(self, ui, 'mtn') self.ui = ui self.path = path self.automatestdio = False self.revs = revs - norepo = NoRepo(_("%s does not look like a monotone repository") - % path) + norepo = common.NoRepo(_("%s does not look like a monotone repository") + % path) if not os.path.exists(os.path.join(path, '_MTN')): # Could be a monotone repository (SQLite db file) try: @@ -69,7 +75,7 @@ self.files = None self.dirs = None - checktool('mtn', abort=False) + common.checktool('mtn', abort=False) def mtnrun(self, *args, **kwargs): if self.automatestdio: @@ -302,7 +308,7 @@ certs = self.mtngetcerts(rev) if certs.get('suspend') == certs["branch"]: extra['close'] = 1 - return commit( + return common.commit( author=certs["author"], date=util.datestr(util.strdate(certs["date"], "%Y-%m-%dT%H:%M:%S")), desc=certs["changelog"],
--- a/hgext/convert/p4.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/convert/p4.py Sat Apr 16 18:06:48 2016 -0500 @@ -4,13 +4,18 @@ # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. +from __future__ import absolute_import -from mercurial import util, error +import marshal +import re + +from mercurial import ( + error, + util, +) from mercurial.i18n import _ -from common import commit, converter_source, checktool, NoRepo -import marshal -import re +from . import common def loaditer(f): "Yield the dictionary objects generated by p4" @@ -37,17 +42,18 @@ filename = filename.replace(k, v) return filename -class p4_source(converter_source): +class p4_source(common.converter_source): def __init__(self, ui, path, revs=None): # avoid import cycle - import convcmd + from . import convcmd super(p4_source, self).__init__(ui, path, revs=revs) if "/" in path and not path.startswith('//'): - raise NoRepo(_('%s does not look like a P4 repository') % path) + raise common.NoRepo(_('%s does not look like a P4 repository') % + path) - checktool('p4', abort=False) + common.checktool('p4', abort=False) self.p4changes = {} self.heads = {} @@ -142,10 +148,10 @@ parents = [] date = (int(d["time"]), 0) # timezone not set - c = commit(author=self.recode(d["user"]), - date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'), - parents=parents, desc=desc, branch=None, - extra={"p4": change}) + c = common.commit(author=self.recode(d["user"]), + date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'), + parents=parents, desc=desc, branch=None, + extra={"p4": change}) files = [] copies = {}
--- a/hgext/convert/subversion.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/convert/subversion.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,38 +1,58 @@ # Subversion 1.4/1.5 Python API backend # # Copyright(C) 2007 Daniel Holth et al +from __future__ import absolute_import -import os, re, sys, tempfile, urllib, urllib2 +import cPickle as pickle +import os +import re +import sys +import tempfile import xml.dom.minidom -import cPickle as pickle -from mercurial import strutil, scmutil, util, encoding, error +from mercurial import ( + encoding, + error, + scmutil, + strutil, + util, +) from mercurial.i18n import _ +from . import common + +stringio = util.stringio propertycache = util.propertycache +urlerr = util.urlerr +urlreq = util.urlreq + +commandline = common.commandline +commit = common.commit +converter_sink = common.converter_sink +converter_source = common.converter_source +decodeargs = common.decodeargs +encodeargs = common.encodeargs +makedatetimestamp = common.makedatetimestamp +mapfile = common.mapfile +MissingTool = common.MissingTool +NoRepo = common.NoRepo # Subversion stuff. Works best with very recent Python SVN bindings # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing # these bindings. -from cStringIO import StringIO - -from common import NoRepo, MissingTool, commit, encodeargs, decodeargs -from common import commandline, converter_source, converter_sink, mapfile -from common import makedatetimestamp - try: - from svn.core import SubversionException, Pool import svn import svn.client import svn.core import svn.ra import svn.delta - import transport + from . import transport import warnings warnings.filterwarnings('ignore', module='svn.core', category=DeprecationWarning) + svn.core.SubversionException # trigger import to catch error except ImportError: svn = None @@ -74,12 +94,12 @@ # so we can extend it safely with new components. The "safe" # characters were taken from the "svn_uri__char_validity" table in # libsvn_subr/path.c. - return urllib.quote(s, "!$&'()*+,-./:=@_~") + return urlreq.quote(s, "!$&'()*+,-./:=@_~") def geturl(path): try: return svn.client.url_from_path(svn.core.svn_path_canonicalize(path)) - except SubversionException: + except svn.core.SubversionException: # svn.client.url_from_path() fails with local repositories pass if os.path.isdir(path): @@ -213,10 +233,10 @@ # for the svn-specific "not found" XML. def httpcheck(ui, path, proto): try: - opener = urllib2.build_opener() + opener = urlreq.buildopener() rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path)) data = rsp.read() - except urllib2.HTTPError as inst: + except urlerr.httperror as inst: if inst.code != 404: # Except for 404 we cannot know for sure this is not an svn repo ui.warn(_('svn: cannot probe remote repository, assume it could ' @@ -225,7 +245,7 @@ return True data = inst.fp.read() except Exception: - # Could be urllib2.URLError if the URL is invalid or anything else. + # Could be urlerr.urlerror if the URL is invalid or anything else. return False return '<m:human-readable errcode="160013">' in data @@ -240,7 +260,7 @@ if (os.name == 'nt' and path[:1] == '/' and path[1:2].isalpha() and path[2:6].lower() == '%3a/'): path = path[:2] + ':/' + path[6:] - path = urllib.url2pathname(path) + path = urlreq.url2pathname(path) except ValueError: proto = 'file' path = os.path.abspath(url) @@ -310,13 +330,13 @@ self.baseurl = svn.ra.get_repos_root(self.ra) # Module is either empty or a repository path starting with # a slash and not ending with a slash. - self.module = urllib.unquote(self.url[len(self.baseurl):]) + self.module = urlreq.unquote(self.url[len(self.baseurl):]) self.prevmodule = None self.rootmodule = self.module self.commits = {} self.paths = {} self.uuid = svn.ra.get_uuid(self.ra) - except SubversionException: + except svn.core.SubversionException: ui.traceback() svnversion = '%d.%d.%d' % (svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR, @@ -377,7 +397,7 @@ svn.client.ls(self.url.rstrip('/') + '/' + quote(path), optrev, False, self.ctx) return True - except SubversionException: + except svn.core.SubversionException: return False def getheads(self): @@ -676,7 +696,7 @@ prevmodule = self.reparent('') dirent = svn.ra.stat(self.ra, path.strip('/'), stop) self.reparent(prevmodule) - except SubversionException: + except svn.core.SubversionException: dirent = None if not dirent: raise SvnPathNotFound(_('%s not found up to revision %d') @@ -728,7 +748,7 @@ for i, (path, ent) in enumerate(paths): self.ui.progress(_('scanning paths'), i, item=path, - total=len(paths)) + total=len(paths), unit=_('paths')) entrypath = self.getrelpath(path) kind = self._checkpath(entrypath, revnum) @@ -948,7 +968,7 @@ firstcset.parents.append(latest) except SvnPathNotFound: pass - except SubversionException as xxx_todo_changeme: + except svn.core.SubversionException as xxx_todo_changeme: (inst, num) = xxx_todo_changeme.args if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION: raise error.Abort(_('svn: branch has no revision %s') @@ -965,7 +985,7 @@ if self.module != new_module: self.module = new_module self.reparent(self.module) - io = StringIO() + io = stringio() info = svn.ra.get_file(self.ra, file, revnum, io) data = io.getvalue() # ra.get_file() seems to keep a reference on the input buffer @@ -975,7 +995,7 @@ info = info[-1] mode = ("svn:executable" in info) and 'x' or '' mode = ("svn:special" in info) and 'l' or mode - except SubversionException as e: + except svn.core.SubversionException as e: notfound = (svn.core.SVN_ERR_FS_NOT_FOUND, svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND) if e.apr_err in notfound: # File not found @@ -990,7 +1010,7 @@ def _iterfiles(self, path, revnum): """Enumerate all files in path at revnum, recursively.""" path = path.strip('/') - pool = Pool() + pool = svn.core.Pool() rpath = '/'.join([self.baseurl, quote(path)]).strip('/') entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool) if path:
--- a/hgext/convert/transport.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/convert/transport.py Sat Apr 16 18:06:48 2016 -0500 @@ -16,12 +16,18 @@ # You should have received a copy of the GNU General Public License # along with this program; if not, see <http://www.gnu.org/licenses/>. +from __future__ import absolute_import -from mercurial import util -from svn.core import SubversionException, Pool -import svn.ra import svn.client import svn.core +import svn.ra + +Pool = svn.core.Pool +SubversionException = svn.core.SubversionException + +from mercurial import ( + util, +) # Some older versions of the Python bindings need to be # explicitly initialized. But what we want to do probably
--- a/hgext/eol.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/eol.py Sat Apr 16 18:06:48 2016 -0500 @@ -91,9 +91,18 @@ used. """ +from __future__ import absolute_import + +import os +import re from mercurial.i18n import _ -from mercurial import util, config, extensions, match, error -import re, os +from mercurial import ( + config, + error, + extensions, + match, + util, +) # Note for extension authors: ONLY specify testedwith = 'internal' for # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
--- a/hgext/extdiff.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/extdiff.py Sat Apr 16 18:06:48 2016 -0500 @@ -60,11 +60,28 @@ pretty fast (at least faster than having to compare the entire tree). ''' +from __future__ import absolute_import + +import os +import re +import shlex +import shutil +import tempfile from mercurial.i18n import _ -from mercurial.node import short, nullid -from mercurial import cmdutil, scmutil, util, commands, encoding, filemerge -from mercurial import archival, error -import os, shlex, shutil, tempfile, re +from mercurial.node import ( + nullid, + short, +) +from mercurial import ( + archival, + cmdutil, + commands, + encoding, + error, + filemerge, + scmutil, + util, +) cmdtable = {} command = cmdutil.command(cmdtable)
--- a/hgext/factotum.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/factotum.py Sat Apr 16 18:06:48 2016 -0500 @@ -45,10 +45,19 @@ ''' +from __future__ import absolute_import + +import os from mercurial.i18n import _ -from mercurial.url import passwordmgr -from mercurial import httpconnection, error -import os, urllib2 +from mercurial import ( + error, + httpconnection, + url, + util, +) + +urlreq = util.urlreq +passwordmgr = url.passwordmgr ERRMAX = 128 @@ -93,7 +102,7 @@ @monkeypatch_method(passwordmgr) def find_user_password(self, realm, authuri): - user, passwd = urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password( + user, passwd = urlreq.httppasswordmgrwithdefaultrealm.find_user_password( self, realm, authuri) if user and passwd: self._writedebug(user, passwd)
--- a/hgext/fetch.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/fetch.py Sat Apr 16 18:06:48 2016 -0500 @@ -61,7 +61,7 @@ branchnode = None if parent != branchnode: raise error.Abort(_('working directory not at branch tip'), - hint=_('use "hg update" to check out branch tip')) + hint=_("use 'hg update' to check out branch tip")) wlock = lock = None try:
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hgext/fsmonitor/__init__.py Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,694 @@ +# __init__.py - fsmonitor initialization and overrides +# +# Copyright 2013-2016 Facebook, Inc. +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''Faster status operations with the Watchman file monitor (EXPERIMENTAL) + +Integrates the file-watching program Watchman with Mercurial to produce faster +status results. + +On a particular Linux system, for a real-world repository with over 400,000 +files hosted on ext4, vanilla `hg status` takes 1.3 seconds. On the same +system, with fsmonitor it takes about 0.3 seconds. + +fsmonitor requires no configuration -- it will tell Watchman about your +repository as necessary. You'll need to install Watchman from +https://facebook.github.io/watchman/ and make sure it is in your PATH. + +The following configuration options exist: + +:: + + [fsmonitor] + mode = {off, on, paranoid} + +When `mode = off`, fsmonitor will disable itself (similar to not loading the +extension at all). When `mode = on`, fsmonitor will be enabled (the default). +When `mode = paranoid`, fsmonitor will query both Watchman and the filesystem, +and ensure that the results are consistent. + +:: + + [fsmonitor] + timeout = (float) + +A value, in seconds, that determines how long fsmonitor will wait for Watchman +to return results. Defaults to `2.0`. + +:: + + [fsmonitor] + blacklistusers = (list of userids) + +A list of usernames for which fsmonitor will disable itself altogether. + +:: + + [fsmonitor] + walk_on_invalidate = (boolean) + +Whether or not to walk the whole repo ourselves when our cached state has been +invalidated, for example when Watchman has been restarted or .hgignore rules +have been changed. Walking the repo in that case can result in competing for +I/O with Watchman. For large repos it is recommended to set this value to +false. You may wish to set this to true if you have a very fast filesystem +that can outpace the IPC overhead of getting the result data for the full repo +from Watchman. Defaults to false. + +fsmonitor is incompatible with the largefiles and eol extensions, and +will disable itself if any of those are active. + +''' + +# Platforms Supported +# =================== +# +# **Linux:** *Stable*. Watchman and fsmonitor are both known to work reliably, +# even under severe loads. +# +# **Mac OS X:** *Stable*. The Mercurial test suite passes with fsmonitor +# turned on, on case-insensitive HFS+. There has been a reasonable amount of +# user testing under normal loads. +# +# **Solaris, BSD:** *Alpha*. watchman and fsmonitor are believed to work, but +# very little testing has been done. +# +# **Windows:** *Alpha*. Not in a release version of watchman or fsmonitor yet. +# +# Known Issues +# ============ +# +# * fsmonitor will disable itself if any of the following extensions are +# enabled: largefiles, inotify, eol; or if the repository has subrepos. +# * fsmonitor will produce incorrect results if nested repos that are not +# subrepos exist. *Workaround*: add nested repo paths to your `.hgignore`. +# +# The issues related to nested repos and subrepos are probably not fundamental +# ones. Patches to fix them are welcome. + +from __future__ import absolute_import + +import os +import stat +import sys + +from mercurial import ( + context, + extensions, + localrepo, + merge, + pathutil, + scmutil, + util, +) +from mercurial import match as matchmod +from mercurial.i18n import _ + +from . import ( + state, + watchmanclient, +) + +# Note for extension authors: ONLY specify testedwith = 'internal' for +# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should +# be specifying the version(s) of Mercurial they are tested with, or +# leave the attribute unspecified. +testedwith = 'internal' + +# This extension is incompatible with the following blacklisted extensions +# and will disable itself when encountering one of these: +_blacklist = ['largefiles', 'eol'] + +def _handleunavailable(ui, state, ex): + """Exception handler for Watchman interaction exceptions""" + if isinstance(ex, watchmanclient.Unavailable): + if ex.warn: + ui.warn(str(ex) + '\n') + if ex.invalidate: + state.invalidate() + ui.log('fsmonitor', 'Watchman unavailable: %s\n', ex.msg) + else: + ui.log('fsmonitor', 'Watchman exception: %s\n', ex) + +def _hashignore(ignore): + """Calculate hash for ignore patterns and filenames + + If this information changes between Mercurial invocations, we can't + rely on Watchman information anymore and have to re-scan the working + copy. + + """ + sha1 = util.sha1() + if util.safehasattr(ignore, 'includepat'): + sha1.update(ignore.includepat) + sha1.update('\0\0') + if util.safehasattr(ignore, 'excludepat'): + sha1.update(ignore.excludepat) + sha1.update('\0\0') + if util.safehasattr(ignore, 'patternspat'): + sha1.update(ignore.patternspat) + sha1.update('\0\0') + if util.safehasattr(ignore, '_files'): + for f in ignore._files: + sha1.update(f) + sha1.update('\0') + return sha1.hexdigest() + +def overridewalk(orig, self, match, subrepos, unknown, ignored, full=True): + '''Replacement for dirstate.walk, hooking into Watchman. + + Whenever full is False, ignored is False, and the Watchman client is + available, use Watchman combined with saved state to possibly return only a + subset of files.''' + def bail(): + return orig(match, subrepos, unknown, ignored, full=True) + + if full or ignored or not self._watchmanclient.available(): + return bail() + state = self._fsmonitorstate + clock, ignorehash, notefiles = state.get() + if not clock: + if state.walk_on_invalidate: + return bail() + # Initial NULL clock value, see + # https://facebook.github.io/watchman/docs/clockspec.html + clock = 'c:0:0' + notefiles = [] + + def fwarn(f, msg): + self._ui.warn('%s: %s\n' % (self.pathto(f), msg)) + return False + + def badtype(mode): + kind = _('unknown') + if stat.S_ISCHR(mode): + kind = _('character device') + elif stat.S_ISBLK(mode): + kind = _('block device') + elif stat.S_ISFIFO(mode): + kind = _('fifo') + elif stat.S_ISSOCK(mode): + kind = _('socket') + elif stat.S_ISDIR(mode): + kind = _('directory') + return _('unsupported file type (type is %s)') % kind + + ignore = self._ignore + dirignore = self._dirignore + if unknown: + if _hashignore(ignore) != ignorehash and clock != 'c:0:0': + # ignore list changed -- can't rely on Watchman state any more + if state.walk_on_invalidate: + return bail() + notefiles = [] + clock = 'c:0:0' + else: + # always ignore + ignore = util.always + dirignore = util.always + + matchfn = match.matchfn + matchalways = match.always() + dmap = self._map + nonnormalset = getattr(self, '_nonnormalset', None) + + copymap = self._copymap + getkind = stat.S_IFMT + dirkind = stat.S_IFDIR + regkind = stat.S_IFREG + lnkkind = stat.S_IFLNK + join = self._join + normcase = util.normcase + fresh_instance = False + + exact = skipstep3 = False + if matchfn == match.exact: # match.exact + exact = True + dirignore = util.always # skip step 2 + elif match.files() and not match.anypats(): # match.match, no patterns + skipstep3 = True + + if not exact and self._checkcase: + # note that even though we could receive directory entries, we're only + # interested in checking if a file with the same name exists. So only + # normalize files if possible. + normalize = self._normalizefile + skipstep3 = False + else: + normalize = None + + # step 1: find all explicit files + results, work, dirsnotfound = self._walkexplicit(match, subrepos) + + skipstep3 = skipstep3 and not (work or dirsnotfound) + work = [d for d in work if not dirignore(d[0])] + + if not work and (exact or skipstep3): + for s in subrepos: + del results[s] + del results['.hg'] + return results + + # step 2: query Watchman + try: + # Use the user-configured timeout for the query. + # Add a little slack over the top of the user query to allow for + # overheads while transferring the data + self._watchmanclient.settimeout(state.timeout + 0.1) + result = self._watchmanclient.command('query', { + 'fields': ['mode', 'mtime', 'size', 'exists', 'name'], + 'since': clock, + 'expression': [ + 'not', [ + 'anyof', ['dirname', '.hg'], + ['name', '.hg', 'wholename'] + ] + ], + 'sync_timeout': int(state.timeout * 1000), + 'empty_on_fresh_instance': state.walk_on_invalidate, + }) + except Exception as ex: + _handleunavailable(self._ui, state, ex) + self._watchmanclient.clearconnection() + return bail() + else: + # We need to propagate the last observed clock up so that we + # can use it for our next query + state.setlastclock(result['clock']) + if result['is_fresh_instance']: + if state.walk_on_invalidate: + state.invalidate() + return bail() + fresh_instance = True + # Ignore any prior noteable files from the state info + notefiles = [] + + # for file paths which require normalization and we encounter a case + # collision, we store our own foldmap + if normalize: + foldmap = dict((normcase(k), k) for k in results) + + switch_slashes = os.sep == '\\' + # The order of the results is, strictly speaking, undefined. + # For case changes on a case insensitive filesystem we may receive + # two entries, one with exists=True and another with exists=False. + # The exists=True entries in the same response should be interpreted + # as being happens-after the exists=False entries due to the way that + # Watchman tracks files. We use this property to reconcile deletes + # for name case changes. + for entry in result['files']: + fname = entry['name'] + if switch_slashes: + fname = fname.replace('\\', '/') + if normalize: + normed = normcase(fname) + fname = normalize(fname, True, True) + foldmap[normed] = fname + fmode = entry['mode'] + fexists = entry['exists'] + kind = getkind(fmode) + + if not fexists: + # if marked as deleted and we don't already have a change + # record, mark it as deleted. If we already have an entry + # for fname then it was either part of walkexplicit or was + # an earlier result that was a case change + if fname not in results and fname in dmap and ( + matchalways or matchfn(fname)): + results[fname] = None + elif kind == dirkind: + if fname in dmap and (matchalways or matchfn(fname)): + results[fname] = None + elif kind == regkind or kind == lnkkind: + if fname in dmap: + if matchalways or matchfn(fname): + results[fname] = entry + elif (matchalways or matchfn(fname)) and not ignore(fname): + results[fname] = entry + elif fname in dmap and (matchalways or matchfn(fname)): + results[fname] = None + + # step 3: query notable files we don't already know about + # XXX try not to iterate over the entire dmap + if normalize: + # any notable files that have changed case will already be handled + # above, so just check membership in the foldmap + notefiles = set((normalize(f, True, True) for f in notefiles + if normcase(f) not in foldmap)) + visit = set((f for f in notefiles if (f not in results and matchfn(f) + and (f in dmap or not ignore(f))))) + + if nonnormalset is not None and not fresh_instance: + if matchalways: + visit.update(f for f in nonnormalset if f not in results) + visit.update(f for f in copymap if f not in results) + else: + visit.update(f for f in nonnormalset + if f not in results and matchfn(f)) + visit.update(f for f in copymap + if f not in results and matchfn(f)) + else: + if matchalways: + visit.update(f for f, st in dmap.iteritems() + if (f not in results and + (st[2] < 0 or st[0] != 'n' or fresh_instance))) + visit.update(f for f in copymap if f not in results) + else: + visit.update(f for f, st in dmap.iteritems() + if (f not in results and + (st[2] < 0 or st[0] != 'n' or fresh_instance) + and matchfn(f))) + visit.update(f for f in copymap + if f not in results and matchfn(f)) + + audit = pathutil.pathauditor(self._root).check + auditpass = [f for f in visit if audit(f)] + auditpass.sort() + auditfail = visit.difference(auditpass) + for f in auditfail: + results[f] = None + + nf = iter(auditpass).next + for st in util.statfiles([join(f) for f in auditpass]): + f = nf() + if st or f in dmap: + results[f] = st + + for s in subrepos: + del results[s] + del results['.hg'] + return results + +def overridestatus( + orig, self, node1='.', node2=None, match=None, ignored=False, + clean=False, unknown=False, listsubrepos=False): + listignored = ignored + listclean = clean + listunknown = unknown + + def _cmpsets(l1, l2): + try: + if 'FSMONITOR_LOG_FILE' in os.environ: + fn = os.environ['FSMONITOR_LOG_FILE'] + f = open(fn, 'wb') + else: + fn = 'fsmonitorfail.log' + f = self.opener(fn, 'wb') + except (IOError, OSError): + self.ui.warn(_('warning: unable to write to %s\n') % fn) + return + + try: + for i, (s1, s2) in enumerate(zip(l1, l2)): + if set(s1) != set(s2): + f.write('sets at position %d are unequal\n' % i) + f.write('watchman returned: %s\n' % s1) + f.write('stat returned: %s\n' % s2) + finally: + f.close() + + if isinstance(node1, context.changectx): + ctx1 = node1 + else: + ctx1 = self[node1] + if isinstance(node2, context.changectx): + ctx2 = node2 + else: + ctx2 = self[node2] + + working = ctx2.rev() is None + parentworking = working and ctx1 == self['.'] + match = match or matchmod.always(self.root, self.getcwd()) + + # Maybe we can use this opportunity to update Watchman's state. + # Mercurial uses workingcommitctx and/or memctx to represent the part of + # the workingctx that is to be committed. So don't update the state in + # that case. + # HG_PENDING is set in the environment when the dirstate is being updated + # in the middle of a transaction; we must not update our state in that + # case, or we risk forgetting about changes in the working copy. + updatestate = (parentworking and match.always() and + not isinstance(ctx2, (context.workingcommitctx, + context.memctx)) and + 'HG_PENDING' not in os.environ) + + try: + if self._fsmonitorstate.walk_on_invalidate: + # Use a short timeout to query the current clock. If that + # takes too long then we assume that the service will be slow + # to answer our query. + # walk_on_invalidate indicates that we prefer to walk the + # tree ourselves because we can ignore portions that Watchman + # cannot and we tend to be faster in the warmer buffer cache + # cases. + self._watchmanclient.settimeout(0.1) + else: + # Give Watchman more time to potentially complete its walk + # and return the initial clock. In this mode we assume that + # the filesystem will be slower than parsing a potentially + # very large Watchman result set. + self._watchmanclient.settimeout( + self._fsmonitorstate.timeout + 0.1) + startclock = self._watchmanclient.getcurrentclock() + except Exception as ex: + self._watchmanclient.clearconnection() + _handleunavailable(self.ui, self._fsmonitorstate, ex) + # boo, Watchman failed. bail + return orig(node1, node2, match, listignored, listclean, + listunknown, listsubrepos) + + if updatestate: + # We need info about unknown files. This may make things slower the + # first time, but whatever. + stateunknown = True + else: + stateunknown = listunknown + + r = orig(node1, node2, match, listignored, listclean, stateunknown, + listsubrepos) + modified, added, removed, deleted, unknown, ignored, clean = r + + if updatestate: + notefiles = modified + added + removed + deleted + unknown + self._fsmonitorstate.set( + self._fsmonitorstate.getlastclock() or startclock, + _hashignore(self.dirstate._ignore), + notefiles) + + if not listunknown: + unknown = [] + + # don't do paranoid checks if we're not going to query Watchman anyway + full = listclean or match.traversedir is not None + if self._fsmonitorstate.mode == 'paranoid' and not full: + # run status again and fall back to the old walk this time + self.dirstate._fsmonitordisable = True + + # shut the UI up + quiet = self.ui.quiet + self.ui.quiet = True + fout, ferr = self.ui.fout, self.ui.ferr + self.ui.fout = self.ui.ferr = open(os.devnull, 'wb') + + try: + rv2 = orig( + node1, node2, match, listignored, listclean, listunknown, + listsubrepos) + finally: + self.dirstate._fsmonitordisable = False + self.ui.quiet = quiet + self.ui.fout, self.ui.ferr = fout, ferr + + # clean isn't tested since it's set to True above + _cmpsets([modified, added, removed, deleted, unknown, ignored, clean], + rv2) + modified, added, removed, deleted, unknown, ignored, clean = rv2 + + return scmutil.status( + modified, added, removed, deleted, unknown, ignored, clean) + +def makedirstate(cls): + class fsmonitordirstate(cls): + def _fsmonitorinit(self, fsmonitorstate, watchmanclient): + # _fsmonitordisable is used in paranoid mode + self._fsmonitordisable = False + self._fsmonitorstate = fsmonitorstate + self._watchmanclient = watchmanclient + + def walk(self, *args, **kwargs): + orig = super(fsmonitordirstate, self).walk + if self._fsmonitordisable: + return orig(*args, **kwargs) + return overridewalk(orig, self, *args, **kwargs) + + def rebuild(self, *args, **kwargs): + self._fsmonitorstate.invalidate() + return super(fsmonitordirstate, self).rebuild(*args, **kwargs) + + def invalidate(self, *args, **kwargs): + self._fsmonitorstate.invalidate() + return super(fsmonitordirstate, self).invalidate(*args, **kwargs) + + return fsmonitordirstate + +def wrapdirstate(orig, self): + ds = orig(self) + # only override the dirstate when Watchman is available for the repo + if util.safehasattr(self, '_fsmonitorstate'): + ds.__class__ = makedirstate(ds.__class__) + ds._fsmonitorinit(self._fsmonitorstate, self._watchmanclient) + return ds + +def extsetup(ui): + wrapfilecache(localrepo.localrepository, 'dirstate', wrapdirstate) + if sys.platform == 'darwin': + # An assist for avoiding the dangling-symlink fsevents bug + extensions.wrapfunction(os, 'symlink', wrapsymlink) + + extensions.wrapfunction(merge, 'update', wrapupdate) + +def wrapsymlink(orig, source, link_name): + ''' if we create a dangling symlink, also touch the parent dir + to encourage fsevents notifications to work more correctly ''' + try: + return orig(source, link_name) + finally: + try: + os.utime(os.path.dirname(link_name), None) + except OSError: + pass + +class state_update(object): + ''' This context mananger is responsible for dispatching the state-enter + and state-leave signals to the watchman service ''' + + def __init__(self, repo, node, distance, partial): + self.repo = repo + self.node = node + self.distance = distance + self.partial = partial + + def __enter__(self): + self._state('state-enter') + return self + + def __exit__(self, type_, value, tb): + status = 'ok' if type_ is None else 'failed' + self._state('state-leave', status=status) + + def _state(self, cmd, status='ok'): + if not util.safehasattr(self.repo, '_watchmanclient'): + return + try: + commithash = self.repo[self.node].hex() + self.repo._watchmanclient.command(cmd, { + 'name': 'hg.update', + 'metadata': { + # the target revision + 'rev': commithash, + # approximate number of commits between current and target + 'distance': self.distance, + # success/failure (only really meaningful for state-leave) + 'status': status, + # whether the working copy parent is changing + 'partial': self.partial, + }}) + except Exception as e: + # Swallow any errors; fire and forget + self.repo.ui.log( + 'watchman', 'Exception %s while running %s\n', e, cmd) + +# Bracket working copy updates with calls to the watchman state-enter +# and state-leave commands. This allows clients to perform more intelligent +# settling during bulk file change scenarios +# https://facebook.github.io/watchman/docs/cmd/subscribe.html#advanced-settling +def wrapupdate(orig, repo, node, branchmerge, force, ancestor=None, + mergeancestor=False, labels=None, matcher=None, **kwargs): + + distance = 0 + partial = True + if matcher is None or matcher.always(): + partial = False + wc = repo[None] + parents = wc.parents() + if len(parents) == 2: + anc = repo.changelog.ancestor(parents[0].node(), parents[1].node()) + ancrev = repo[anc].rev() + distance = abs(repo[node].rev() - ancrev) + elif len(parents) == 1: + distance = abs(repo[node].rev() - parents[0].rev()) + + with state_update(repo, node, distance, partial): + return orig( + repo, node, branchmerge, force, ancestor, mergeancestor, + labels, matcher, *kwargs) + +def reposetup(ui, repo): + # We don't work with largefiles or inotify + exts = extensions.enabled() + for ext in _blacklist: + if ext in exts: + ui.warn(_('The fsmonitor extension is incompatible with the %s ' + 'extension and has been disabled.\n') % ext) + return + + if util.safehasattr(repo, 'dirstate'): + # We don't work with subrepos either. Note that we can get passed in + # e.g. a statichttprepo, which throws on trying to access the substate. + # XXX This sucks. + try: + # if repo[None].substate can cause a dirstate parse, which is too + # slow. Instead, look for a file called hgsubstate, + if repo.wvfs.exists('.hgsubstate') or repo.wvfs.exists('.hgsub'): + return + except AttributeError: + return + + fsmonitorstate = state.state(repo) + if fsmonitorstate.mode == 'off': + return + + try: + client = watchmanclient.client(repo) + except Exception as ex: + _handleunavailable(ui, fsmonitorstate, ex) + return + + repo._fsmonitorstate = fsmonitorstate + repo._watchmanclient = client + + # at this point since fsmonitorstate wasn't present, repo.dirstate is + # not a fsmonitordirstate + repo.dirstate.__class__ = makedirstate(repo.dirstate.__class__) + # nuke the dirstate so that _fsmonitorinit and subsequent configuration + # changes take effect on it + del repo._filecache['dirstate'] + delattr(repo.unfiltered(), 'dirstate') + + class fsmonitorrepo(repo.__class__): + def status(self, *args, **kwargs): + orig = super(fsmonitorrepo, self).status + return overridestatus(orig, self, *args, **kwargs) + + repo.__class__ = fsmonitorrepo + +def wrapfilecache(cls, propname, wrapper): + """Wraps a filecache property. These can't be wrapped using the normal + wrapfunction. This should eventually go into upstream Mercurial. + """ + assert callable(wrapper) + for currcls in cls.__mro__: + if propname in currcls.__dict__: + origfn = currcls.__dict__[propname].func + assert callable(origfn) + def wrap(*args, **kwargs): + return wrapper(origfn, *args, **kwargs) + currcls.__dict__[propname].func = wrap + break + + if currcls is object: + raise AttributeError( + _("type '%s' has no property '%s'") % (cls, propname))
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hgext/fsmonitor/pywatchman/__init__.py Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,779 @@ +# Copyright 2014-present Facebook, Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name Facebook nor the names of its contributors may be used to +# endorse or promote products derived from this software without specific +# prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import os +import errno +import math +import socket +import subprocess +import time + +# Sometimes it's really hard to get Python extensions to compile, +# so fall back to a pure Python implementation. +try: + import bser +except ImportError: + import pybser as bser + +import capabilities + +if os.name == 'nt': + import ctypes + import ctypes.wintypes + + wintypes = ctypes.wintypes + GENERIC_READ = 0x80000000 + GENERIC_WRITE = 0x40000000 + FILE_FLAG_OVERLAPPED = 0x40000000 + OPEN_EXISTING = 3 + INVALID_HANDLE_VALUE = -1 + FORMAT_MESSAGE_FROM_SYSTEM = 0x00001000 + FORMAT_MESSAGE_ALLOCATE_BUFFER = 0x00000100 + FORMAT_MESSAGE_IGNORE_INSERTS = 0x00000200 + WAIT_TIMEOUT = 0x00000102 + WAIT_OBJECT_0 = 0x00000000 + ERROR_IO_PENDING = 997 + + class OVERLAPPED(ctypes.Structure): + _fields_ = [ + ("Internal", wintypes.ULONG), ("InternalHigh", wintypes.ULONG), + ("Offset", wintypes.DWORD), ("OffsetHigh", wintypes.DWORD), + ("hEvent", wintypes.HANDLE) + ] + + def __init__(self): + self.Offset = 0 + self.OffsetHigh = 0 + self.hEvent = 0 + + LPDWORD = ctypes.POINTER(wintypes.DWORD) + + CreateFile = ctypes.windll.kernel32.CreateFileA + CreateFile.argtypes = [wintypes.LPSTR, wintypes.DWORD, wintypes.DWORD, + wintypes.LPVOID, wintypes.DWORD, wintypes.DWORD, + wintypes.HANDLE] + CreateFile.restype = wintypes.HANDLE + + CloseHandle = ctypes.windll.kernel32.CloseHandle + CloseHandle.argtypes = [wintypes.HANDLE] + CloseHandle.restype = wintypes.BOOL + + ReadFile = ctypes.windll.kernel32.ReadFile + ReadFile.argtypes = [wintypes.HANDLE, wintypes.LPVOID, wintypes.DWORD, + LPDWORD, ctypes.POINTER(OVERLAPPED)] + ReadFile.restype = wintypes.BOOL + + WriteFile = ctypes.windll.kernel32.WriteFile + WriteFile.argtypes = [wintypes.HANDLE, wintypes.LPVOID, wintypes.DWORD, + LPDWORD, ctypes.POINTER(OVERLAPPED)] + WriteFile.restype = wintypes.BOOL + + GetLastError = ctypes.windll.kernel32.GetLastError + GetLastError.argtypes = [] + GetLastError.restype = wintypes.DWORD + + FormatMessage = ctypes.windll.kernel32.FormatMessageA + FormatMessage.argtypes = [wintypes.DWORD, wintypes.LPVOID, wintypes.DWORD, + wintypes.DWORD, ctypes.POINTER(wintypes.LPSTR), + wintypes.DWORD, wintypes.LPVOID] + FormatMessage.restype = wintypes.DWORD + + LocalFree = ctypes.windll.kernel32.LocalFree + + GetOverlappedResultEx = ctypes.windll.kernel32.GetOverlappedResultEx + GetOverlappedResultEx.argtypes = [wintypes.HANDLE, + ctypes.POINTER(OVERLAPPED), LPDWORD, + wintypes.DWORD, wintypes.BOOL] + GetOverlappedResultEx.restype = wintypes.BOOL + + CancelIoEx = ctypes.windll.kernel32.CancelIoEx + CancelIoEx.argtypes = [wintypes.HANDLE, ctypes.POINTER(OVERLAPPED)] + CancelIoEx.restype = wintypes.BOOL + +# 2 bytes marker, 1 byte int size, 8 bytes int64 value +sniff_len = 13 + +# This is a helper for debugging the client. +_debugging = False +if _debugging: + + def log(fmt, *args): + print('[%s] %s' % + (time.strftime("%a, %d %b %Y %H:%M:%S", time.gmtime()), + fmt % args[:])) +else: + + def log(fmt, *args): + pass + + +class WatchmanError(Exception): + pass + + +class SocketTimeout(WatchmanError): + """A specialized exception raised for socket timeouts during communication to/from watchman. + This makes it easier to implement non-blocking loops as callers can easily distinguish + between a routine timeout and an actual error condition. + + Note that catching WatchmanError will also catch this as it is a super-class, so backwards + compatibility in exception handling is preserved. + """ + + +class CommandError(WatchmanError): + """error returned by watchman + + self.msg is the message returned by watchman. + """ + + def __init__(self, msg, cmd=None): + self.msg = msg + self.cmd = cmd + super(CommandError, self).__init__('watchman command error: %s' % msg) + + def setCommand(self, cmd): + self.cmd = cmd + + def __str__(self): + if self.cmd: + return '%s, while executing %s' % (self.msg, self.cmd) + return self.msg + + +class Transport(object): + """ communication transport to the watchman server """ + buf = None + + def close(self): + """ tear it down """ + raise NotImplementedError() + + def readBytes(self, size): + """ read size bytes """ + raise NotImplementedError() + + def write(self, buf): + """ write some data """ + raise NotImplementedError() + + def setTimeout(self, value): + pass + + def readLine(self): + """ read a line + Maintains its own buffer, callers of the transport should not mix + calls to readBytes and readLine. + """ + if self.buf is None: + self.buf = [] + + # Buffer may already have a line if we've received unilateral + # response(s) from the server + if len(self.buf) == 1 and "\n" in self.buf[0]: + (line, b) = self.buf[0].split("\n", 1) + self.buf = [b] + return line + + while True: + b = self.readBytes(4096) + if "\n" in b: + result = ''.join(self.buf) + (line, b) = b.split("\n", 1) + self.buf = [b] + return result + line + self.buf.append(b) + + +class Codec(object): + """ communication encoding for the watchman server """ + transport = None + + def __init__(self, transport): + self.transport = transport + + def receive(self): + raise NotImplementedError() + + def send(self, *args): + raise NotImplementedError() + + def setTimeout(self, value): + self.transport.setTimeout(value) + + +class UnixSocketTransport(Transport): + """ local unix domain socket transport """ + sock = None + + def __init__(self, sockpath, timeout): + self.sockpath = sockpath + self.timeout = timeout + + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + try: + sock.settimeout(self.timeout) + sock.connect(self.sockpath) + self.sock = sock + except socket.error as e: + raise WatchmanError('unable to connect to %s: %s' % + (self.sockpath, e)) + + def close(self): + self.sock.close() + self.sock = None + + def setTimeout(self, value): + self.timeout = value + self.sock.settimeout(self.timeout) + + def readBytes(self, size): + try: + buf = [self.sock.recv(size)] + if not buf[0]: + raise WatchmanError('empty watchman response') + return buf[0] + except socket.timeout: + raise SocketTimeout('timed out waiting for response') + + def write(self, data): + try: + self.sock.sendall(data) + except socket.timeout: + raise SocketTimeout('timed out sending query command') + + +class WindowsNamedPipeTransport(Transport): + """ connect to a named pipe """ + + def __init__(self, sockpath, timeout): + self.sockpath = sockpath + self.timeout = int(math.ceil(timeout * 1000)) + self._iobuf = None + + self.pipe = CreateFile(sockpath, GENERIC_READ | GENERIC_WRITE, 0, None, + OPEN_EXISTING, FILE_FLAG_OVERLAPPED, None) + + if self.pipe == INVALID_HANDLE_VALUE: + self.pipe = None + self._raise_win_err('failed to open pipe %s' % sockpath, + GetLastError()) + + def _win32_strerror(self, err): + """ expand a win32 error code into a human readable message """ + + # FormatMessage will allocate memory and assign it here + buf = ctypes.c_char_p() + FormatMessage( + FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_ALLOCATE_BUFFER + | FORMAT_MESSAGE_IGNORE_INSERTS, None, err, 0, buf, 0, None) + try: + return buf.value + finally: + LocalFree(buf) + + def _raise_win_err(self, msg, err): + raise IOError('%s win32 error code: %d %s' % + (msg, err, self._win32_strerror(err))) + + def close(self): + if self.pipe: + CloseHandle(self.pipe) + self.pipe = None + + def readBytes(self, size): + """ A read can block for an unbounded amount of time, even if the + kernel reports that the pipe handle is signalled, so we need to + always perform our reads asynchronously + """ + + # try to satisfy the read from any buffered data + if self._iobuf: + if size >= len(self._iobuf): + res = self._iobuf + self.buf = None + return res + res = self._iobuf[:size] + self._iobuf = self._iobuf[size:] + return res + + # We need to initiate a read + buf = ctypes.create_string_buffer(size) + olap = OVERLAPPED() + + log('made read buff of size %d', size) + + # ReadFile docs warn against sending in the nread parameter for async + # operations, so we always collect it via GetOverlappedResultEx + immediate = ReadFile(self.pipe, buf, size, None, olap) + + if not immediate: + err = GetLastError() + if err != ERROR_IO_PENDING: + self._raise_win_err('failed to read %d bytes' % size, + GetLastError()) + + nread = wintypes.DWORD() + if not GetOverlappedResultEx(self.pipe, olap, nread, + 0 if immediate else self.timeout, True): + err = GetLastError() + CancelIoEx(self.pipe, olap) + + if err == WAIT_TIMEOUT: + log('GetOverlappedResultEx timedout') + raise SocketTimeout('timed out after waiting %dms for read' % + self.timeout) + + log('GetOverlappedResultEx reports error %d', err) + self._raise_win_err('error while waiting for read', err) + + nread = nread.value + if nread == 0: + # Docs say that named pipes return 0 byte when the other end did + # a zero byte write. Since we don't ever do that, the only + # other way this shows up is if the client has gotten in a weird + # state, so let's bail out + CancelIoEx(self.pipe, olap) + raise IOError('Async read yielded 0 bytes; unpossible!') + + # Holds precisely the bytes that we read from the prior request + buf = buf[:nread] + + returned_size = min(nread, size) + if returned_size == nread: + return buf + + # keep any left-overs around for a later read to consume + self._iobuf = buf[returned_size:] + return buf[:returned_size] + + def write(self, data): + olap = OVERLAPPED() + immediate = WriteFile(self.pipe, ctypes.c_char_p(data), len(data), + None, olap) + + if not immediate: + err = GetLastError() + if err != ERROR_IO_PENDING: + self._raise_win_err('failed to write %d bytes' % len(data), + GetLastError()) + + # Obtain results, waiting if needed + nwrote = wintypes.DWORD() + if GetOverlappedResultEx(self.pipe, olap, nwrote, 0 if immediate else + self.timeout, True): + return nwrote.value + + err = GetLastError() + + # It's potentially unsafe to allow the write to continue after + # we unwind, so let's make a best effort to avoid that happening + CancelIoEx(self.pipe, olap) + + if err == WAIT_TIMEOUT: + raise SocketTimeout('timed out after waiting %dms for write' % + self.timeout) + self._raise_win_err('error while waiting for write of %d bytes' % + len(data), err) + + +class CLIProcessTransport(Transport): + """ open a pipe to the cli to talk to the service + This intended to be used only in the test harness! + + The CLI is an oddball because we only support JSON input + and cannot send multiple commands through the same instance, + so we spawn a new process for each command. + + We disable server spawning for this implementation, again, because + it is intended to be used only in our test harness. You really + should not need to use the CLI transport for anything real. + + While the CLI can output in BSER, our Transport interface doesn't + support telling this instance that it should do so. That effectively + limits this implementation to JSON input and output only at this time. + + It is the responsibility of the caller to set the send and + receive codecs appropriately. + """ + proc = None + closed = True + + def __init__(self, sockpath, timeout): + self.sockpath = sockpath + self.timeout = timeout + + def close(self): + if self.proc: + self.proc.kill() + self.proc = None + + def _connect(self): + if self.proc: + return self.proc + args = [ + 'watchman', + '--sockname={}'.format(self.sockpath), + '--logfile=/BOGUS', + '--statefile=/BOGUS', + '--no-spawn', + '--no-local', + '--no-pretty', + '-j', + ] + self.proc = subprocess.Popen(args, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE) + return self.proc + + def readBytes(self, size): + self._connect() + res = self.proc.stdout.read(size) + if res == '': + raise WatchmanError('EOF on CLI process transport') + return res + + def write(self, data): + if self.closed: + self.closed = False + self.proc = None + self._connect() + res = self.proc.stdin.write(data) + self.proc.stdin.close() + self.closed = True + return res + + +class BserCodec(Codec): + """ use the BSER encoding. This is the default, preferred codec """ + + def _loads(self, response): + return bser.loads(response) + + def receive(self): + buf = [self.transport.readBytes(sniff_len)] + if not buf[0]: + raise WatchmanError('empty watchman response') + + elen = bser.pdu_len(buf[0]) + + rlen = len(buf[0]) + while elen > rlen: + buf.append(self.transport.readBytes(elen - rlen)) + rlen += len(buf[-1]) + + response = ''.join(buf) + try: + res = self._loads(response) + return res + except ValueError as e: + raise WatchmanError('watchman response decode error: %s' % e) + + def send(self, *args): + cmd = bser.dumps(*args) + self.transport.write(cmd) + + +class ImmutableBserCodec(BserCodec): + """ use the BSER encoding, decoding values using the newer + immutable object support """ + + def _loads(self, response): + return bser.loads(response, False) + + +class JsonCodec(Codec): + """ Use json codec. This is here primarily for testing purposes """ + json = None + + def __init__(self, transport): + super(JsonCodec, self).__init__(transport) + # optional dep on json, only if JsonCodec is used + import json + self.json = json + + def receive(self): + line = self.transport.readLine() + try: + return self.json.loads(line) + except Exception as e: + print(e, line) + raise + + def send(self, *args): + cmd = self.json.dumps(*args) + self.transport.write(cmd + "\n") + + +class client(object): + """ Handles the communication with the watchman service """ + sockpath = None + transport = None + sendCodec = None + recvCodec = None + sendConn = None + recvConn = None + subs = {} # Keyed by subscription name + sub_by_root = {} # Keyed by root, then by subscription name + logs = [] # When log level is raised + unilateral = ['log', 'subscription'] + tport = None + useImmutableBser = None + + def __init__(self, + sockpath=None, + timeout=1.0, + transport=None, + sendEncoding=None, + recvEncoding=None, + useImmutableBser=False): + self.sockpath = sockpath + self.timeout = timeout + self.useImmutableBser = useImmutableBser + + transport = transport or os.getenv('WATCHMAN_TRANSPORT') or 'local' + if transport == 'local' and os.name == 'nt': + self.transport = WindowsNamedPipeTransport + elif transport == 'local': + self.transport = UnixSocketTransport + elif transport == 'cli': + self.transport = CLIProcessTransport + if sendEncoding is None: + sendEncoding = 'json' + if recvEncoding is None: + recvEncoding = sendEncoding + else: + raise WatchmanError('invalid transport %s' % transport) + + sendEncoding = sendEncoding or os.getenv('WATCHMAN_ENCODING') or 'bser' + recvEncoding = recvEncoding or os.getenv('WATCHMAN_ENCODING') or 'bser' + + self.recvCodec = self._parseEncoding(recvEncoding) + self.sendCodec = self._parseEncoding(sendEncoding) + + def _parseEncoding(self, enc): + if enc == 'bser': + if self.useImmutableBser: + return ImmutableBserCodec + return BserCodec + elif enc == 'json': + return JsonCodec + else: + raise WatchmanError('invalid encoding %s' % enc) + + def _hasprop(self, result, name): + if self.useImmutableBser: + return hasattr(result, name) + return name in result + + def _resolvesockname(self): + # if invoked via a trigger, watchman will set this env var; we + # should use it unless explicitly set otherwise + path = os.getenv('WATCHMAN_SOCK') + if path: + return path + + cmd = ['watchman', '--output-encoding=bser', 'get-sockname'] + try: + p = subprocess.Popen(cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + close_fds=os.name != 'nt') + except OSError as e: + raise WatchmanError('"watchman" executable not in PATH (%s)', e) + + stdout, stderr = p.communicate() + exitcode = p.poll() + + if exitcode: + raise WatchmanError("watchman exited with code %d" % exitcode) + + result = bser.loads(stdout) + if 'error' in result: + raise WatchmanError('get-sockname error: %s' % result['error']) + + return result['sockname'] + + def _connect(self): + """ establish transport connection """ + + if self.recvConn: + return + + if self.sockpath is None: + self.sockpath = self._resolvesockname() + + self.tport = self.transport(self.sockpath, self.timeout) + self.sendConn = self.sendCodec(self.tport) + self.recvConn = self.recvCodec(self.tport) + + def __del__(self): + self.close() + + def close(self): + if self.tport: + self.tport.close() + self.tport = None + self.recvConn = None + self.sendConn = None + + def receive(self): + """ receive the next PDU from the watchman service + + If the client has activated subscriptions or logs then + this PDU may be a unilateral PDU sent by the service to + inform the client of a log event or subscription change. + + It may also simply be the response portion of a request + initiated by query. + + There are clients in production that subscribe and call + this in a loop to retrieve all subscription responses, + so care should be taken when making changes here. + """ + + self._connect() + result = self.recvConn.receive() + if self._hasprop(result, 'error'): + raise CommandError(result['error']) + + if self._hasprop(result, 'log'): + self.logs.append(result['log']) + + if self._hasprop(result, 'subscription'): + sub = result['subscription'] + if not (sub in self.subs): + self.subs[sub] = [] + self.subs[sub].append(result) + + # also accumulate in {root,sub} keyed store + root = os.path.normcase(result['root']) + if not root in self.sub_by_root: + self.sub_by_root[root] = {} + if not sub in self.sub_by_root[root]: + self.sub_by_root[root][sub] = [] + self.sub_by_root[root][sub].append(result) + + return result + + def isUnilateralResponse(self, res): + for k in self.unilateral: + if k in res: + return True + return False + + def getLog(self, remove=True): + """ Retrieve buffered log data + + If remove is true the data will be removed from the buffer. + Otherwise it will be left in the buffer + """ + res = self.logs + if remove: + self.logs = [] + return res + + def getSubscription(self, name, remove=True, root=None): + """ Retrieve the data associated with a named subscription + + If remove is True (the default), the subscription data is removed + from the buffer. Otherwise the data is returned but left in + the buffer. + + Returns None if there is no data associated with `name` + + If root is not None, then only return the subscription + data that matches both root and name. When used in this way, + remove processing impacts both the unscoped and scoped stores + for the subscription data. + """ + + if root is not None: + if not root in self.sub_by_root: + return None + if not name in self.sub_by_root[root]: + return None + sub = self.sub_by_root[root][name] + if remove: + del self.sub_by_root[root][name] + # don't let this grow unbounded + if name in self.subs: + del self.subs[name] + return sub + + if not (name in self.subs): + return None + sub = self.subs[name] + if remove: + del self.subs[name] + return sub + + def query(self, *args): + """ Send a query to the watchman service and return the response + + This call will block until the response is returned. + If any unilateral responses are sent by the service in between + the request-response they will be buffered up in the client object + and NOT returned via this method. + """ + + log('calling client.query') + self._connect() + try: + self.sendConn.send(args) + + res = self.receive() + while self.isUnilateralResponse(res): + res = self.receive() + + return res + except CommandError as ex: + ex.setCommand(args) + raise ex + + def capabilityCheck(self, optional=None, required=None): + """ Perform a server capability check """ + res = self.query('version', { + 'optional': optional or [], + 'required': required or [] + }) + + if not self._hasprop(res, 'capabilities'): + # Server doesn't support capabilities, so we need to + # synthesize the results based on the version + capabilities.synthesize(res, optional) + if 'error' in res: + raise CommandError(res['error']) + + return res + + def setTimeout(self, value): + self.recvConn.setTimeout(value) + self.sendConn.setTimeout(value) + +# no-check-code -- this is a 3rd party library
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hgext/fsmonitor/pywatchman/bser.c Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,950 @@ +/* +Copyright (c) 2013-2015, Facebook, Inc. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + * Neither the name Facebook nor the names of its contributors may be used to + endorse or promote products derived from this software without specific + prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ + +#include <Python.h> +#ifdef _MSC_VER +#define inline __inline +#include "msc_stdint.h" +#endif + +/* Return the smallest size int that can store the value */ +#define INT_SIZE(x) (((x) == ((int8_t)x)) ? 1 : \ + ((x) == ((int16_t)x)) ? 2 : \ + ((x) == ((int32_t)x)) ? 4 : 8) + +#define BSER_ARRAY 0x00 +#define BSER_OBJECT 0x01 +#define BSER_STRING 0x02 +#define BSER_INT8 0x03 +#define BSER_INT16 0x04 +#define BSER_INT32 0x05 +#define BSER_INT64 0x06 +#define BSER_REAL 0x07 +#define BSER_TRUE 0x08 +#define BSER_FALSE 0x09 +#define BSER_NULL 0x0a +#define BSER_TEMPLATE 0x0b +#define BSER_SKIP 0x0c + +// An immutable object representation of BSER_OBJECT. +// Rather than build a hash table, key -> value are obtained +// by walking the list of keys to determine the offset into +// the values array. The assumption is that the number of +// array elements will be typically small (~6 for the top +// level query result and typically 3-5 for the file entries) +// so that the time overhead for this is small compared to +// using a proper hash table. Even with this simplistic +// approach, this is still faster for the mercurial use case +// as it helps to eliminate creating N other objects to +// represent the stat information in the hgwatchman extension +typedef struct { + PyObject_HEAD + PyObject *keys; // tuple of field names + PyObject *values; // tuple of values +} bserObject; + +static Py_ssize_t bserobj_tuple_length(PyObject *o) { + bserObject *obj = (bserObject*)o; + + return PySequence_Length(obj->keys); +} + +static PyObject *bserobj_tuple_item(PyObject *o, Py_ssize_t i) { + bserObject *obj = (bserObject*)o; + + return PySequence_GetItem(obj->values, i); +} + +static PySequenceMethods bserobj_sq = { + bserobj_tuple_length, /* sq_length */ + 0, /* sq_concat */ + 0, /* sq_repeat */ + bserobj_tuple_item, /* sq_item */ + 0, /* sq_ass_item */ + 0, /* sq_contains */ + 0, /* sq_inplace_concat */ + 0 /* sq_inplace_repeat */ +}; + +static void bserobj_dealloc(PyObject *o) { + bserObject *obj = (bserObject*)o; + + Py_CLEAR(obj->keys); + Py_CLEAR(obj->values); + PyObject_Del(o); +} + +static PyObject *bserobj_getattrro(PyObject *o, PyObject *name) { + bserObject *obj = (bserObject*)o; + Py_ssize_t i, n; + const char *namestr; + + if (PyIndex_Check(name)) { + i = PyNumber_AsSsize_t(name, PyExc_IndexError); + if (i == -1 && PyErr_Occurred()) { + return NULL; + } + return PySequence_GetItem(obj->values, i); + } + + // hack^Wfeature to allow mercurial to use "st_size" to reference "size" + namestr = PyString_AsString(name); + if (!strncmp(namestr, "st_", 3)) { + namestr += 3; + } + + n = PyTuple_GET_SIZE(obj->keys); + for (i = 0; i < n; i++) { + const char *item_name = NULL; + PyObject *key = PyTuple_GET_ITEM(obj->keys, i); + + item_name = PyString_AsString(key); + if (!strcmp(item_name, namestr)) { + return PySequence_GetItem(obj->values, i); + } + } + PyErr_Format(PyExc_AttributeError, + "bserobject has no attribute '%.400s'", namestr); + return NULL; +} + +static PyMappingMethods bserobj_map = { + bserobj_tuple_length, /* mp_length */ + bserobj_getattrro, /* mp_subscript */ + 0 /* mp_ass_subscript */ +}; + +PyTypeObject bserObjectType = { + PyVarObject_HEAD_INIT(NULL, 0) + "bserobj_tuple", /* tp_name */ + sizeof(bserObject), /* tp_basicsize */ + 0, /* tp_itemsize */ + bserobj_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_compare */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + &bserobj_sq, /* tp_as_sequence */ + &bserobj_map, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + bserobj_getattrro, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT, /* tp_flags */ + "bserobj tuple", /* tp_doc */ + 0, /* tp_traverse */ + 0, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + 0, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + 0, /* tp_init */ + 0, /* tp_alloc */ + 0, /* tp_new */ +}; + + +static PyObject *bser_loads_recursive(const char **ptr, const char *end, + int mutable); + +static const char bser_true = BSER_TRUE; +static const char bser_false = BSER_FALSE; +static const char bser_null = BSER_NULL; +static const char bser_string_hdr = BSER_STRING; +static const char bser_array_hdr = BSER_ARRAY; +static const char bser_object_hdr = BSER_OBJECT; + +static inline uint32_t next_power_2(uint32_t n) +{ + n |= (n >> 16); + n |= (n >> 8); + n |= (n >> 4); + n |= (n >> 2); + n |= (n >> 1); + return n + 1; +} + +// A buffer we use for building up the serialized result +struct bser_buffer { + char *buf; + int wpos, allocd; +}; +typedef struct bser_buffer bser_t; + +static int bser_append(bser_t *bser, const char *data, uint32_t len) +{ + int newlen = next_power_2(bser->wpos + len); + if (newlen > bser->allocd) { + char *nbuf = realloc(bser->buf, newlen); + if (!nbuf) { + return 0; + } + + bser->buf = nbuf; + bser->allocd = newlen; + } + + memcpy(bser->buf + bser->wpos, data, len); + bser->wpos += len; + return 1; +} + +static int bser_init(bser_t *bser) +{ + bser->allocd = 8192; + bser->wpos = 0; + bser->buf = malloc(bser->allocd); + + if (!bser->buf) { + return 0; + } + + // Leave room for the serialization header, which includes + // our overall length. To make things simpler, we'll use an + // int32 for the header +#define EMPTY_HEADER "\x00\x01\x05\x00\x00\x00\x00" + bser_append(bser, EMPTY_HEADER, sizeof(EMPTY_HEADER)-1); + + return 1; +} + +static void bser_dtor(bser_t *bser) +{ + free(bser->buf); + bser->buf = NULL; +} + +static int bser_long(bser_t *bser, int64_t val) +{ + int8_t i8; + int16_t i16; + int32_t i32; + int64_t i64; + char sz; + int size = INT_SIZE(val); + char *iptr; + + switch (size) { + case 1: + sz = BSER_INT8; + i8 = (int8_t)val; + iptr = (char*)&i8; + break; + case 2: + sz = BSER_INT16; + i16 = (int16_t)val; + iptr = (char*)&i16; + break; + case 4: + sz = BSER_INT32; + i32 = (int32_t)val; + iptr = (char*)&i32; + break; + case 8: + sz = BSER_INT64; + i64 = (int64_t)val; + iptr = (char*)&i64; + break; + default: + PyErr_SetString(PyExc_RuntimeError, + "Cannot represent this long value!?"); + return 0; + } + + if (!bser_append(bser, &sz, sizeof(sz))) { + return 0; + } + + return bser_append(bser, iptr, size); +} + +static int bser_string(bser_t *bser, PyObject *sval) +{ + char *buf = NULL; + Py_ssize_t len; + int res; + PyObject *utf = NULL; + + if (PyUnicode_Check(sval)) { + utf = PyUnicode_AsEncodedString(sval, "utf-8", "ignore"); + sval = utf; + } + + res = PyString_AsStringAndSize(sval, &buf, &len); + if (res == -1) { + res = 0; + goto out; + } + + if (!bser_append(bser, &bser_string_hdr, sizeof(bser_string_hdr))) { + res = 0; + goto out; + } + + if (!bser_long(bser, len)) { + res = 0; + goto out; + } + + if (len > UINT32_MAX) { + PyErr_Format(PyExc_ValueError, "string too big"); + res = 0; + goto out; + } + + res = bser_append(bser, buf, (uint32_t)len); + +out: + if (utf) { + Py_DECREF(utf); + } + + return res; +} + +static int bser_recursive(bser_t *bser, PyObject *val) +{ + if (PyBool_Check(val)) { + if (val == Py_True) { + return bser_append(bser, &bser_true, sizeof(bser_true)); + } + return bser_append(bser, &bser_false, sizeof(bser_false)); + } + + if (val == Py_None) { + return bser_append(bser, &bser_null, sizeof(bser_null)); + } + + if (PyInt_Check(val)) { + return bser_long(bser, PyInt_AS_LONG(val)); + } + + if (PyLong_Check(val)) { + return bser_long(bser, PyLong_AsLongLong(val)); + } + + if (PyString_Check(val) || PyUnicode_Check(val)) { + return bser_string(bser, val); + } + + + if (PyFloat_Check(val)) { + double dval = PyFloat_AS_DOUBLE(val); + char sz = BSER_REAL; + + if (!bser_append(bser, &sz, sizeof(sz))) { + return 0; + } + + return bser_append(bser, (char*)&dval, sizeof(dval)); + } + + if (PyList_Check(val)) { + Py_ssize_t i, len = PyList_GET_SIZE(val); + + if (!bser_append(bser, &bser_array_hdr, sizeof(bser_array_hdr))) { + return 0; + } + + if (!bser_long(bser, len)) { + return 0; + } + + for (i = 0; i < len; i++) { + PyObject *ele = PyList_GET_ITEM(val, i); + + if (!bser_recursive(bser, ele)) { + return 0; + } + } + + return 1; + } + + if (PyTuple_Check(val)) { + Py_ssize_t i, len = PyTuple_GET_SIZE(val); + + if (!bser_append(bser, &bser_array_hdr, sizeof(bser_array_hdr))) { + return 0; + } + + if (!bser_long(bser, len)) { + return 0; + } + + for (i = 0; i < len; i++) { + PyObject *ele = PyTuple_GET_ITEM(val, i); + + if (!bser_recursive(bser, ele)) { + return 0; + } + } + + return 1; + } + + if (PyMapping_Check(val)) { + Py_ssize_t len = PyMapping_Length(val); + Py_ssize_t pos = 0; + PyObject *key, *ele; + + if (!bser_append(bser, &bser_object_hdr, sizeof(bser_object_hdr))) { + return 0; + } + + if (!bser_long(bser, len)) { + return 0; + } + + while (PyDict_Next(val, &pos, &key, &ele)) { + if (!bser_string(bser, key)) { + return 0; + } + if (!bser_recursive(bser, ele)) { + return 0; + } + } + + return 1; + } + + PyErr_SetString(PyExc_ValueError, "Unsupported value type"); + return 0; +} + +static PyObject *bser_dumps(PyObject *self, PyObject *args) +{ + PyObject *val = NULL, *res; + bser_t bser; + uint32_t len; + + if (!PyArg_ParseTuple(args, "O", &val)) { + return NULL; + } + + if (!bser_init(&bser)) { + return PyErr_NoMemory(); + } + + if (!bser_recursive(&bser, val)) { + bser_dtor(&bser); + if (errno == ENOMEM) { + return PyErr_NoMemory(); + } + // otherwise, we've already set the error to something reasonable + return NULL; + } + + // Now fill in the overall length + len = bser.wpos - (sizeof(EMPTY_HEADER) - 1); + memcpy(bser.buf + 3, &len, sizeof(len)); + + res = PyString_FromStringAndSize(bser.buf, bser.wpos); + bser_dtor(&bser); + + return res; +} + +int bunser_int(const char **ptr, const char *end, int64_t *val) +{ + int needed; + const char *buf = *ptr; + int8_t i8; + int16_t i16; + int32_t i32; + int64_t i64; + + switch (buf[0]) { + case BSER_INT8: + needed = 2; + break; + case BSER_INT16: + needed = 3; + break; + case BSER_INT32: + needed = 5; + break; + case BSER_INT64: + needed = 9; + break; + default: + PyErr_Format(PyExc_ValueError, + "invalid bser int encoding 0x%02x", buf[0]); + return 0; + } + if (end - buf < needed) { + PyErr_SetString(PyExc_ValueError, "input buffer to small for int encoding"); + return 0; + } + *ptr = buf + needed; + switch (buf[0]) { + case BSER_INT8: + memcpy(&i8, buf + 1, sizeof(i8)); + *val = i8; + return 1; + case BSER_INT16: + memcpy(&i16, buf + 1, sizeof(i16)); + *val = i16; + return 1; + case BSER_INT32: + memcpy(&i32, buf + 1, sizeof(i32)); + *val = i32; + return 1; + case BSER_INT64: + memcpy(&i64, buf + 1, sizeof(i64)); + *val = i64; + return 1; + default: + return 0; + } +} + +static int bunser_string(const char **ptr, const char *end, + const char **start, int64_t *len) +{ + const char *buf = *ptr; + + // skip string marker + buf++; + if (!bunser_int(&buf, end, len)) { + return 0; + } + + if (buf + *len > end) { + PyErr_Format(PyExc_ValueError, "invalid string length in bser data"); + return 0; + } + + *ptr = buf + *len; + *start = buf; + return 1; +} + +static PyObject *bunser_array(const char **ptr, const char *end, int mutable) +{ + const char *buf = *ptr; + int64_t nitems, i; + PyObject *res; + + // skip array header + buf++; + if (!bunser_int(&buf, end, &nitems)) { + return 0; + } + *ptr = buf; + + if (nitems > LONG_MAX) { + PyErr_Format(PyExc_ValueError, "too many items for python array"); + return NULL; + } + + if (mutable) { + res = PyList_New((Py_ssize_t)nitems); + } else { + res = PyTuple_New((Py_ssize_t)nitems); + } + + for (i = 0; i < nitems; i++) { + PyObject *ele = bser_loads_recursive(ptr, end, mutable); + + if (!ele) { + Py_DECREF(res); + return NULL; + } + + if (mutable) { + PyList_SET_ITEM(res, i, ele); + } else { + PyTuple_SET_ITEM(res, i, ele); + } + // DECREF(ele) not required as SET_ITEM steals the ref + } + + return res; +} + +static PyObject *bunser_object(const char **ptr, const char *end, + int mutable) +{ + const char *buf = *ptr; + int64_t nitems, i; + PyObject *res; + bserObject *obj; + + // skip array header + buf++; + if (!bunser_int(&buf, end, &nitems)) { + return 0; + } + *ptr = buf; + + if (mutable) { + res = PyDict_New(); + } else { + obj = PyObject_New(bserObject, &bserObjectType); + obj->keys = PyTuple_New((Py_ssize_t)nitems); + obj->values = PyTuple_New((Py_ssize_t)nitems); + res = (PyObject*)obj; + } + + for (i = 0; i < nitems; i++) { + const char *keystr; + int64_t keylen; + PyObject *key; + PyObject *ele; + + if (!bunser_string(ptr, end, &keystr, &keylen)) { + Py_DECREF(res); + return NULL; + } + + if (keylen > LONG_MAX) { + PyErr_Format(PyExc_ValueError, "string too big for python"); + Py_DECREF(res); + return NULL; + } + + key = PyString_FromStringAndSize(keystr, (Py_ssize_t)keylen); + if (!key) { + Py_DECREF(res); + return NULL; + } + + ele = bser_loads_recursive(ptr, end, mutable); + + if (!ele) { + Py_DECREF(key); + Py_DECREF(res); + return NULL; + } + + if (mutable) { + PyDict_SetItem(res, key, ele); + Py_DECREF(key); + Py_DECREF(ele); + } else { + /* PyTuple_SET_ITEM steals ele, key */ + PyTuple_SET_ITEM(obj->values, i, ele); + PyTuple_SET_ITEM(obj->keys, i, key); + } + } + + return res; +} + +static PyObject *bunser_template(const char **ptr, const char *end, + int mutable) +{ + const char *buf = *ptr; + int64_t nitems, i; + PyObject *arrval; + PyObject *keys; + Py_ssize_t numkeys, keyidx; + + if (buf[1] != BSER_ARRAY) { + PyErr_Format(PyExc_ValueError, "Expect ARRAY to follow TEMPLATE"); + return NULL; + } + + // skip header + buf++; + *ptr = buf; + + // Load template keys + keys = bunser_array(ptr, end, mutable); + if (!keys) { + return NULL; + } + + numkeys = PySequence_Length(keys); + + // Load number of array elements + if (!bunser_int(ptr, end, &nitems)) { + Py_DECREF(keys); + return 0; + } + + if (nitems > LONG_MAX) { + PyErr_Format(PyExc_ValueError, "Too many items for python"); + Py_DECREF(keys); + return NULL; + } + + arrval = PyList_New((Py_ssize_t)nitems); + if (!arrval) { + Py_DECREF(keys); + return NULL; + } + + for (i = 0; i < nitems; i++) { + PyObject *dict = NULL; + bserObject *obj = NULL; + + if (mutable) { + dict = PyDict_New(); + } else { + obj = PyObject_New(bserObject, &bserObjectType); + if (obj) { + obj->keys = keys; + Py_INCREF(obj->keys); + obj->values = PyTuple_New(numkeys); + } + dict = (PyObject*)obj; + } + if (!dict) { +fail: + Py_DECREF(keys); + Py_DECREF(arrval); + return NULL; + } + + for (keyidx = 0; keyidx < numkeys; keyidx++) { + PyObject *key; + PyObject *ele; + + if (**ptr == BSER_SKIP) { + *ptr = *ptr + 1; + ele = Py_None; + Py_INCREF(ele); + } else { + ele = bser_loads_recursive(ptr, end, mutable); + } + + if (!ele) { + goto fail; + } + + if (mutable) { + key = PyList_GET_ITEM(keys, keyidx); + PyDict_SetItem(dict, key, ele); + Py_DECREF(ele); + } else { + PyTuple_SET_ITEM(obj->values, keyidx, ele); + // DECREF(ele) not required as SET_ITEM steals the ref + } + } + + PyList_SET_ITEM(arrval, i, dict); + // DECREF(obj) not required as SET_ITEM steals the ref + } + + Py_DECREF(keys); + + return arrval; +} + +static PyObject *bser_loads_recursive(const char **ptr, const char *end, + int mutable) +{ + const char *buf = *ptr; + + switch (buf[0]) { + case BSER_INT8: + case BSER_INT16: + case BSER_INT32: + case BSER_INT64: + { + int64_t ival; + if (!bunser_int(ptr, end, &ival)) { + return NULL; + } + if (ival < LONG_MIN || ival > LONG_MAX) { + return PyLong_FromLongLong(ival); + } + return PyInt_FromSsize_t(Py_SAFE_DOWNCAST(ival, int64_t, Py_ssize_t)); + } + + case BSER_REAL: + { + double dval; + memcpy(&dval, buf + 1, sizeof(dval)); + *ptr = buf + 1 + sizeof(double); + return PyFloat_FromDouble(dval); + } + + case BSER_TRUE: + *ptr = buf + 1; + Py_INCREF(Py_True); + return Py_True; + + case BSER_FALSE: + *ptr = buf + 1; + Py_INCREF(Py_False); + return Py_False; + + case BSER_NULL: + *ptr = buf + 1; + Py_INCREF(Py_None); + return Py_None; + + case BSER_STRING: + { + const char *start; + int64_t len; + + if (!bunser_string(ptr, end, &start, &len)) { + return NULL; + } + + if (len > LONG_MAX) { + PyErr_Format(PyExc_ValueError, "string too long for python"); + return NULL; + } + + return PyString_FromStringAndSize(start, (long)len); + } + + case BSER_ARRAY: + return bunser_array(ptr, end, mutable); + + case BSER_OBJECT: + return bunser_object(ptr, end, mutable); + + case BSER_TEMPLATE: + return bunser_template(ptr, end, mutable); + + default: + PyErr_Format(PyExc_ValueError, "unhandled bser opcode 0x%02x", buf[0]); + } + + return NULL; +} + +// Expected use case is to read a packet from the socket and +// then call bser.pdu_len on the packet. It returns the total +// length of the entire response that the peer is sending, +// including the bytes already received. This allows the client +// to compute the data size it needs to read before it can +// decode the data +static PyObject *bser_pdu_len(PyObject *self, PyObject *args) +{ + const char *start = NULL; + const char *data = NULL; + int datalen = 0; + const char *end; + int64_t expected_len, total_len; + + if (!PyArg_ParseTuple(args, "s#", &start, &datalen)) { + return NULL; + } + data = start; + end = data + datalen; + + // Validate the header and length + if (memcmp(data, EMPTY_HEADER, 2) != 0) { + PyErr_SetString(PyExc_ValueError, "invalid bser header"); + return NULL; + } + + data += 2; + + // Expect an integer telling us how big the rest of the data + // should be + if (!bunser_int(&data, end, &expected_len)) { + return NULL; + } + + total_len = expected_len + (data - start); + if (total_len > LONG_MAX) { + return PyLong_FromLongLong(total_len); + } + return PyInt_FromLong((long)total_len); +} + +static PyObject *bser_loads(PyObject *self, PyObject *args) +{ + const char *data = NULL; + int datalen = 0; + const char *end; + int64_t expected_len; + int mutable = 1; + PyObject *mutable_obj = NULL; + + if (!PyArg_ParseTuple(args, "s#|O:loads", &data, &datalen, &mutable_obj)) { + return NULL; + } + if (mutable_obj) { + mutable = PyObject_IsTrue(mutable_obj) > 0 ? 1 : 0; + } + + end = data + datalen; + + // Validate the header and length + if (memcmp(data, EMPTY_HEADER, 2) != 0) { + PyErr_SetString(PyExc_ValueError, "invalid bser header"); + return NULL; + } + + data += 2; + + // Expect an integer telling us how big the rest of the data + // should be + if (!bunser_int(&data, end, &expected_len)) { + return NULL; + } + + // Verify + if (expected_len + data != end) { + PyErr_SetString(PyExc_ValueError, "bser data len != header len"); + return NULL; + } + + return bser_loads_recursive(&data, end, mutable); +} + +static PyMethodDef bser_methods[] = { + {"loads", bser_loads, METH_VARARGS, "Deserialize string."}, + {"pdu_len", bser_pdu_len, METH_VARARGS, "Extract PDU length."}, + {"dumps", bser_dumps, METH_VARARGS, "Serialize string."}, + {NULL, NULL, 0, NULL} +}; + +PyMODINIT_FUNC initbser(void) +{ + (void)Py_InitModule("bser", bser_methods); + PyType_Ready(&bserObjectType); +} + +/* vim:ts=2:sw=2:et: + */ + +// no-check-code -- this is a 3rd party library
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hgext/fsmonitor/pywatchman/capabilities.py Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,69 @@ +# Copyright 2015 Facebook, Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name Facebook nor the names of its contributors may be used to +# endorse or promote products derived from this software without specific +# prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import re + +def parse_version(vstr): + res = 0 + for n in vstr.split('.'): + res = res * 1000 + res = res + int(n) + return res + +cap_versions = { + "cmd-watch-del-all": "3.1.1", + "cmd-watch-project": "3.1", + "relative_root": "3.3", + "term-dirname": "3.1", + "term-idirname": "3.1", + "wildmatch": "3.7", +} + +def check(version, name): + if name in cap_versions: + return version >= parse_version(cap_versions[name]) + return False + +def synthesize(vers, opts): + """ Synthesize a capability enabled version response + This is a very limited emulation for relatively recent feature sets + """ + parsed_version = parse_version(vers['version']) + vers['capabilities'] = {} + for name in opts['optional']: + vers['capabilities'][name] = check(parsed_version, name) + failed = False + for name in opts['required']: + have = check(parsed_version, name) + vers['capabilities'][name] = have + if not have: + vers['error'] = 'client required capability `' + name + \ + '` is not supported by this server' + return vers + +# no-check-code -- this is a 3rd party library
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hgext/fsmonitor/pywatchman/msc_stdint.h Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,260 @@ +// no-check-code +// ISO C9x compliant stdint.h for Microsoft Visual Studio +// Based on ISO/IEC 9899:TC2 Committee draft (May 6, 2005) WG14/N1124 +// +// Copyright (c) 2006-2013 Alexander Chemeris +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are met: +// +// 1. Redistributions of source code must retain the above copyright notice, +// this list of conditions and the following disclaimer. +// +// 2. Redistributions in binary form must reproduce the above copyright +// notice, this list of conditions and the following disclaimer in the +// documentation and/or other materials provided with the distribution. +// +// 3. Neither the name of the product nor the names of its contributors may +// be used to endorse or promote products derived from this software +// without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED +// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +// EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +// OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +// OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +// ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +/////////////////////////////////////////////////////////////////////////////// + +#ifndef _MSC_VER // [ +#error "Use this header only with Microsoft Visual C++ compilers!" +#endif // _MSC_VER ] + +#ifndef _MSC_STDINT_H_ // [ +#define _MSC_STDINT_H_ + +#if _MSC_VER > 1000 +#pragma once +#endif + +#if _MSC_VER >= 1600 // [ +#include <stdint.h> +#else // ] _MSC_VER >= 1600 [ + +#include <limits.h> + +// For Visual Studio 6 in C++ mode and for many Visual Studio versions when +// compiling for ARM we should wrap <wchar.h> include with 'extern "C++" {}' +// or compiler give many errors like this: +// error C2733: second C linkage of overloaded function 'wmemchr' not allowed +#ifdef __cplusplus +extern "C" { +#endif +# include <wchar.h> +#ifdef __cplusplus +} +#endif + +// Define _W64 macros to mark types changing their size, like intptr_t. +#ifndef _W64 +# if !defined(__midl) && (defined(_X86_) || defined(_M_IX86)) && _MSC_VER >= 1300 +# define _W64 __w64 +# else +# define _W64 +# endif +#endif + + +// 7.18.1 Integer types + +// 7.18.1.1 Exact-width integer types + +// Visual Studio 6 and Embedded Visual C++ 4 doesn't +// realize that, e.g. char has the same size as __int8 +// so we give up on __intX for them. +#if (_MSC_VER < 1300) + typedef signed char int8_t; + typedef signed short int16_t; + typedef signed int int32_t; + typedef unsigned char uint8_t; + typedef unsigned short uint16_t; + typedef unsigned int uint32_t; +#else + typedef signed __int8 int8_t; + typedef signed __int16 int16_t; + typedef signed __int32 int32_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; +#endif +typedef signed __int64 int64_t; +typedef unsigned __int64 uint64_t; + + +// 7.18.1.2 Minimum-width integer types +typedef int8_t int_least8_t; +typedef int16_t int_least16_t; +typedef int32_t int_least32_t; +typedef int64_t int_least64_t; +typedef uint8_t uint_least8_t; +typedef uint16_t uint_least16_t; +typedef uint32_t uint_least32_t; +typedef uint64_t uint_least64_t; + +// 7.18.1.3 Fastest minimum-width integer types +typedef int8_t int_fast8_t; +typedef int16_t int_fast16_t; +typedef int32_t int_fast32_t; +typedef int64_t int_fast64_t; +typedef uint8_t uint_fast8_t; +typedef uint16_t uint_fast16_t; +typedef uint32_t uint_fast32_t; +typedef uint64_t uint_fast64_t; + +// 7.18.1.4 Integer types capable of holding object pointers +#ifdef _WIN64 // [ + typedef signed __int64 intptr_t; + typedef unsigned __int64 uintptr_t; +#else // _WIN64 ][ + typedef _W64 signed int intptr_t; + typedef _W64 unsigned int uintptr_t; +#endif // _WIN64 ] + +// 7.18.1.5 Greatest-width integer types +typedef int64_t intmax_t; +typedef uint64_t uintmax_t; + + +// 7.18.2 Limits of specified-width integer types + +#if !defined(__cplusplus) || defined(__STDC_LIMIT_MACROS) // [ See footnote 220 at page 257 and footnote 221 at page 259 + +// 7.18.2.1 Limits of exact-width integer types +#define INT8_MIN ((int8_t)_I8_MIN) +#define INT8_MAX _I8_MAX +#define INT16_MIN ((int16_t)_I16_MIN) +#define INT16_MAX _I16_MAX +#define INT32_MIN ((int32_t)_I32_MIN) +#define INT32_MAX _I32_MAX +#define INT64_MIN ((int64_t)_I64_MIN) +#define INT64_MAX _I64_MAX +#define UINT8_MAX _UI8_MAX +#define UINT16_MAX _UI16_MAX +#define UINT32_MAX _UI32_MAX +#define UINT64_MAX _UI64_MAX + +// 7.18.2.2 Limits of minimum-width integer types +#define INT_LEAST8_MIN INT8_MIN +#define INT_LEAST8_MAX INT8_MAX +#define INT_LEAST16_MIN INT16_MIN +#define INT_LEAST16_MAX INT16_MAX +#define INT_LEAST32_MIN INT32_MIN +#define INT_LEAST32_MAX INT32_MAX +#define INT_LEAST64_MIN INT64_MIN +#define INT_LEAST64_MAX INT64_MAX +#define UINT_LEAST8_MAX UINT8_MAX +#define UINT_LEAST16_MAX UINT16_MAX +#define UINT_LEAST32_MAX UINT32_MAX +#define UINT_LEAST64_MAX UINT64_MAX + +// 7.18.2.3 Limits of fastest minimum-width integer types +#define INT_FAST8_MIN INT8_MIN +#define INT_FAST8_MAX INT8_MAX +#define INT_FAST16_MIN INT16_MIN +#define INT_FAST16_MAX INT16_MAX +#define INT_FAST32_MIN INT32_MIN +#define INT_FAST32_MAX INT32_MAX +#define INT_FAST64_MIN INT64_MIN +#define INT_FAST64_MAX INT64_MAX +#define UINT_FAST8_MAX UINT8_MAX +#define UINT_FAST16_MAX UINT16_MAX +#define UINT_FAST32_MAX UINT32_MAX +#define UINT_FAST64_MAX UINT64_MAX + +// 7.18.2.4 Limits of integer types capable of holding object pointers +#ifdef _WIN64 // [ +# define INTPTR_MIN INT64_MIN +# define INTPTR_MAX INT64_MAX +# define UINTPTR_MAX UINT64_MAX +#else // _WIN64 ][ +# define INTPTR_MIN INT32_MIN +# define INTPTR_MAX INT32_MAX +# define UINTPTR_MAX UINT32_MAX +#endif // _WIN64 ] + +// 7.18.2.5 Limits of greatest-width integer types +#define INTMAX_MIN INT64_MIN +#define INTMAX_MAX INT64_MAX +#define UINTMAX_MAX UINT64_MAX + +// 7.18.3 Limits of other integer types + +#ifdef _WIN64 // [ +# define PTRDIFF_MIN _I64_MIN +# define PTRDIFF_MAX _I64_MAX +#else // _WIN64 ][ +# define PTRDIFF_MIN _I32_MIN +# define PTRDIFF_MAX _I32_MAX +#endif // _WIN64 ] + +#define SIG_ATOMIC_MIN INT_MIN +#define SIG_ATOMIC_MAX INT_MAX + +#ifndef SIZE_MAX // [ +# ifdef _WIN64 // [ +# define SIZE_MAX _UI64_MAX +# else // _WIN64 ][ +# define SIZE_MAX _UI32_MAX +# endif // _WIN64 ] +#endif // SIZE_MAX ] + +// WCHAR_MIN and WCHAR_MAX are also defined in <wchar.h> +#ifndef WCHAR_MIN // [ +# define WCHAR_MIN 0 +#endif // WCHAR_MIN ] +#ifndef WCHAR_MAX // [ +# define WCHAR_MAX _UI16_MAX +#endif // WCHAR_MAX ] + +#define WINT_MIN 0 +#define WINT_MAX _UI16_MAX + +#endif // __STDC_LIMIT_MACROS ] + + +// 7.18.4 Limits of other integer types + +#if !defined(__cplusplus) || defined(__STDC_CONSTANT_MACROS) // [ See footnote 224 at page 260 + +// 7.18.4.1 Macros for minimum-width integer constants + +#define INT8_C(val) val##i8 +#define INT16_C(val) val##i16 +#define INT32_C(val) val##i32 +#define INT64_C(val) val##i64 + +#define UINT8_C(val) val##ui8 +#define UINT16_C(val) val##ui16 +#define UINT32_C(val) val##ui32 +#define UINT64_C(val) val##ui64 + +// 7.18.4.2 Macros for greatest-width integer constants +// These #ifndef's are needed to prevent collisions with <boost/cstdint.hpp>. +// Check out Issue 9 for the details. +#ifndef INTMAX_C // [ +# define INTMAX_C INT64_C +#endif // INTMAX_C ] +#ifndef UINTMAX_C // [ +# define UINTMAX_C UINT64_C +#endif // UINTMAX_C ] + +#endif // __STDC_CONSTANT_MACROS ] + +#endif // _MSC_VER >= 1600 ] + +#endif // _MSC_STDINT_H_ ]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hgext/fsmonitor/pywatchman/pybser.py Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,359 @@ +# Copyright 2015 Facebook, Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# * Neither the name Facebook nor the names of its contributors may be used to +# endorse or promote products derived from this software without specific +# prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import collections +import ctypes +import struct +import sys + +BSER_ARRAY = '\x00' +BSER_OBJECT = '\x01' +BSER_STRING = '\x02' +BSER_INT8 = '\x03' +BSER_INT16 = '\x04' +BSER_INT32 = '\x05' +BSER_INT64 = '\x06' +BSER_REAL = '\x07' +BSER_TRUE = '\x08' +BSER_FALSE = '\x09' +BSER_NULL = '\x0a' +BSER_TEMPLATE = '\x0b' +BSER_SKIP = '\x0c' + +# Leave room for the serialization header, which includes +# our overall length. To make things simpler, we'll use an +# int32 for the header +EMPTY_HEADER = "\x00\x01\x05\x00\x00\x00\x00" + +# Python 3 conditional for supporting Python 2's int/long types +if sys.version_info > (3,): + long = int + +def _int_size(x): + """Return the smallest size int that can store the value""" + if -0x80 <= x <= 0x7F: + return 1 + elif -0x8000 <= x <= 0x7FFF: + return 2 + elif -0x80000000 <= x <= 0x7FFFFFFF: + return 4 + elif long(-0x8000000000000000) <= x <= long(0x7FFFFFFFFFFFFFFF): + return 8 + else: + raise RuntimeError('Cannot represent value: ' + str(x)) + + +class _bser_buffer(object): + + def __init__(self): + self.buf = ctypes.create_string_buffer(8192) + struct.pack_into(str(len(EMPTY_HEADER)) + 's', self.buf, 0, EMPTY_HEADER) + self.wpos = len(EMPTY_HEADER) + + def ensure_size(self, size): + while ctypes.sizeof(self.buf) - self.wpos < size: + ctypes.resize(self.buf, ctypes.sizeof(self.buf) * 2) + + def append_long(self, val): + size = _int_size(val) + to_write = size + 1 + self.ensure_size(to_write) + if size == 1: + struct.pack_into('=cb', self.buf, self.wpos, BSER_INT8, val) + elif size == 2: + struct.pack_into('=ch', self.buf, self.wpos, BSER_INT16, val) + elif size == 4: + struct.pack_into('=ci', self.buf, self.wpos, BSER_INT32, val) + elif size == 8: + struct.pack_into('=cq', self.buf, self.wpos, BSER_INT64, val) + else: + raise RuntimeError('Cannot represent this long value') + self.wpos += to_write + + + def append_string(self, s): + if isinstance(s, unicode): + s = s.encode('utf-8') + s_len = len(s) + size = _int_size(s_len) + to_write = 2 + size + s_len + self.ensure_size(to_write) + if size == 1: + struct.pack_into('=ccb' + str(s_len) + 's', self.buf, self.wpos, BSER_STRING, BSER_INT8, s_len, s) + elif size == 2: + struct.pack_into('=cch' + str(s_len) + 's', self.buf, self.wpos, BSER_STRING, BSER_INT16, s_len, s) + elif size == 4: + struct.pack_into('=cci' + str(s_len) + 's', self.buf, self.wpos, BSER_STRING, BSER_INT32, s_len, s) + elif size == 8: + struct.pack_into('=ccq' + str(s_len) + 's', self.buf, self.wpos, BSER_STRING, BSER_INT64, s_len, s) + else: + raise RuntimeError('Cannot represent this string value') + self.wpos += to_write + + + def append_recursive(self, val): + if isinstance(val, bool): + needed = 1 + self.ensure_size(needed) + if val: + to_encode = BSER_TRUE + else: + to_encode = BSER_FALSE + struct.pack_into('=c', self.buf, self.wpos, to_encode) + self.wpos += needed + elif val is None: + needed = 1 + self.ensure_size(needed) + struct.pack_into('=c', self.buf, self.wpos, BSER_NULL) + self.wpos += needed + elif isinstance(val, (int, long)): + self.append_long(val) + elif isinstance(val, (str, unicode)): + self.append_string(val) + elif isinstance(val, float): + needed = 9 + self.ensure_size(needed) + struct.pack_into('=cd', self.buf, self.wpos, BSER_REAL, val) + self.wpos += needed + elif isinstance(val, collections.Mapping) and isinstance(val, collections.Sized): + val_len = len(val) + size = _int_size(val_len) + needed = 2 + size + self.ensure_size(needed) + if size == 1: + struct.pack_into('=ccb', self.buf, self.wpos, BSER_OBJECT, BSER_INT8, val_len) + elif size == 2: + struct.pack_into('=cch', self.buf, self.wpos, BSER_OBJECT, BSER_INT16, val_len) + elif size == 4: + struct.pack_into('=cci', self.buf, self.wpos, BSER_OBJECT, BSER_INT32, val_len) + elif size == 8: + struct.pack_into('=ccq', self.buf, self.wpos, BSER_OBJECT, BSER_INT64, val_len) + else: + raise RuntimeError('Cannot represent this mapping value') + self.wpos += needed + for k, v in val.iteritems(): + self.append_string(k) + self.append_recursive(v) + elif isinstance(val, collections.Iterable) and isinstance(val, collections.Sized): + val_len = len(val) + size = _int_size(val_len) + needed = 2 + size + self.ensure_size(needed) + if size == 1: + struct.pack_into('=ccb', self.buf, self.wpos, BSER_ARRAY, BSER_INT8, val_len) + elif size == 2: + struct.pack_into('=cch', self.buf, self.wpos, BSER_ARRAY, BSER_INT16, val_len) + elif size == 4: + struct.pack_into('=cci', self.buf, self.wpos, BSER_ARRAY, BSER_INT32, val_len) + elif size == 8: + struct.pack_into('=ccq', self.buf, self.wpos, BSER_ARRAY, BSER_INT64, val_len) + else: + raise RuntimeError('Cannot represent this sequence value') + self.wpos += needed + for v in val: + self.append_recursive(v) + else: + raise RuntimeError('Cannot represent unknown value type') + + +def dumps(obj): + bser_buf = _bser_buffer() + bser_buf.append_recursive(obj) + # Now fill in the overall length + obj_len = bser_buf.wpos - len(EMPTY_HEADER) + struct.pack_into('=i', bser_buf.buf, 3, obj_len) + return bser_buf.buf.raw[:bser_buf.wpos] + + +def _bunser_int(buf, pos): + try: + int_type = buf[pos] + except IndexError: + raise ValueError('Invalid bser int encoding, pos out of range') + if int_type == BSER_INT8: + needed = 2 + fmt = '=b' + elif int_type == BSER_INT16: + needed = 3 + fmt = '=h' + elif int_type == BSER_INT32: + needed = 5 + fmt = '=i' + elif int_type == BSER_INT64: + needed = 9 + fmt = '=q' + else: + raise ValueError('Invalid bser int encoding 0x%02x' % int(int_type)) + int_val = struct.unpack_from(fmt, buf, pos + 1)[0] + return (int_val, pos + needed) + + +def _bunser_string(buf, pos): + str_len, pos = _bunser_int(buf, pos + 1) + str_val = struct.unpack_from(str(str_len) + 's', buf, pos)[0] + return (str_val, pos + str_len) + + +def _bunser_array(buf, pos, mutable=True): + arr_len, pos = _bunser_int(buf, pos + 1) + arr = [] + for i in range(arr_len): + arr_item, pos = _bser_loads_recursive(buf, pos, mutable) + arr.append(arr_item) + + if not mutable: + arr = tuple(arr) + + return arr, pos + + +# This is a quack-alike with the bserObjectType in bser.c +# It provides by getattr accessors and getitem for both index +# and name. +class _BunserDict(object): + __slots__ = ('_keys', '_values') + + def __init__(self, keys, values): + self._keys = keys + self._values = values + + def __getattr__(self, name): + return self.__getitem__(name) + + def __getitem__(self, key): + if isinstance(key, (int, long)): + return self._values[key] + elif key.startswith('st_'): + # hack^Wfeature to allow mercurial to use "st_size" to + # reference "size" + key = key[3:] + try: + return self._values[self._keys.index(key)] + except ValueError as ex: + raise KeyError('_BunserDict has no key %s' % key) + + def __len__(self): + return len(self._keys) + +def _bunser_object(buf, pos, mutable=True): + obj_len, pos = _bunser_int(buf, pos + 1) + if mutable: + obj = {} + else: + keys = [] + vals = [] + + for i in range(obj_len): + key, pos = _bunser_string(buf, pos) + val, pos = _bser_loads_recursive(buf, pos, mutable) + if mutable: + obj[key] = val + else: + keys.append(key) + vals.append(val) + + if not mutable: + obj = _BunserDict(keys, vals) + + return obj, pos + + +def _bunser_template(buf, pos, mutable=True): + if buf[pos + 1] != BSER_ARRAY: + raise RuntimeError('Expect ARRAY to follow TEMPLATE') + keys, pos = _bunser_array(buf, pos + 1) + nitems, pos = _bunser_int(buf, pos) + arr = [] + for i in range(nitems): + if mutable: + obj = {} + else: + vals = [] + + for keyidx in range(len(keys)): + if buf[pos] == BSER_SKIP: + pos += 1 + ele = None + else: + ele, pos = _bser_loads_recursive(buf, pos, mutable) + + if mutable: + key = keys[keyidx] + obj[key] = ele + else: + vals.append(ele) + + if not mutable: + obj = _BunserDict(keys, vals) + + arr.append(obj) + return arr, pos + + +def _bser_loads_recursive(buf, pos, mutable=True): + val_type = buf[pos] + if (val_type == BSER_INT8 or val_type == BSER_INT16 or + val_type == BSER_INT32 or val_type == BSER_INT64): + return _bunser_int(buf, pos) + elif val_type == BSER_REAL: + val = struct.unpack_from('=d', buf, pos + 1)[0] + return (val, pos + 9) + elif val_type == BSER_TRUE: + return (True, pos + 1) + elif val_type == BSER_FALSE: + return (False, pos + 1) + elif val_type == BSER_NULL: + return (None, pos + 1) + elif val_type == BSER_STRING: + return _bunser_string(buf, pos) + elif val_type == BSER_ARRAY: + return _bunser_array(buf, pos, mutable) + elif val_type == BSER_OBJECT: + return _bunser_object(buf, pos, mutable) + elif val_type == BSER_TEMPLATE: + return _bunser_template(buf, pos, mutable) + else: + raise RuntimeError('unhandled bser opcode 0x%02x' % (val_type,)) + + +def pdu_len(buf): + if buf[0:2] != EMPTY_HEADER[0:2]: + raise RuntimeError('Invalid BSER header') + expected_len, pos = _bunser_int(buf, 2) + return expected_len + pos + + +def loads(buf, mutable=True): + if buf[0:2] != EMPTY_HEADER[0:2]: + raise RuntimeError('Invalid BSER header') + expected_len, pos = _bunser_int(buf, 2) + if len(buf) != expected_len + pos: + raise RuntimeError('bser data len != header len') + return _bser_loads_recursive(buf, pos, mutable)[0] + +# no-check-code -- this is a 3rd party library
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hgext/fsmonitor/state.py Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,115 @@ +# state.py - fsmonitor persistent state +# +# Copyright 2013-2016 Facebook, Inc. +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from __future__ import absolute_import + +import errno +import os +import socket +import struct + +from mercurial import pathutil +from mercurial.i18n import _ + +_version = 4 +_versionformat = ">I" + +class state(object): + def __init__(self, repo): + self._opener = repo.opener + self._ui = repo.ui + self._rootdir = pathutil.normasprefix(repo.root) + self._lastclock = None + + self.mode = self._ui.config('fsmonitor', 'mode', default='on') + self.walk_on_invalidate = self._ui.configbool( + 'fsmonitor', 'walk_on_invalidate', False) + self.timeout = float(self._ui.config( + 'fsmonitor', 'timeout', default='2')) + + def get(self): + try: + file = self._opener('fsmonitor.state', 'rb') + except IOError as inst: + if inst.errno != errno.ENOENT: + raise + return None, None, None + + versionbytes = file.read(4) + if len(versionbytes) < 4: + self._ui.log( + 'fsmonitor', 'fsmonitor: state file only has %d bytes, ' + 'nuking state\n' % len(versionbytes)) + self.invalidate() + return None, None, None + try: + diskversion = struct.unpack(_versionformat, versionbytes)[0] + if diskversion != _version: + # different version, nuke state and start over + self._ui.log( + 'fsmonitor', 'fsmonitor: version switch from %d to ' + '%d, nuking state\n' % (diskversion, _version)) + self.invalidate() + return None, None, None + + state = file.read().split('\0') + # state = hostname\0clock\0ignorehash\0 + list of files, each + # followed by a \0 + diskhostname = state[0] + hostname = socket.gethostname() + if diskhostname != hostname: + # file got moved to a different host + self._ui.log('fsmonitor', 'fsmonitor: stored hostname "%s" ' + 'different from current "%s", nuking state\n' % + (diskhostname, hostname)) + self.invalidate() + return None, None, None + + clock = state[1] + ignorehash = state[2] + # discard the value after the last \0 + notefiles = state[3:-1] + + finally: + file.close() + + return clock, ignorehash, notefiles + + def set(self, clock, ignorehash, notefiles): + if clock is None: + self.invalidate() + return + + try: + file = self._opener('fsmonitor.state', 'wb') + except (IOError, OSError): + self._ui.warn(_("warning: unable to write out fsmonitor state\n")) + return + + try: + file.write(struct.pack(_versionformat, _version)) + file.write(socket.gethostname() + '\0') + file.write(clock + '\0') + file.write(ignorehash + '\0') + if notefiles: + file.write('\0'.join(notefiles)) + file.write('\0') + finally: + file.close() + + def invalidate(self): + try: + os.unlink(os.path.join(self._rootdir, '.hg', 'fsmonitor.state')) + except OSError as inst: + if inst.errno != errno.ENOENT: + raise + + def setlastclock(self, clock): + self._lastclock = clock + + def getlastclock(self): + return self._lastclock
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hgext/fsmonitor/watchmanclient.py Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,109 @@ +# watchmanclient.py - Watchman client for the fsmonitor extension +# +# Copyright 2013-2016 Facebook, Inc. +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from __future__ import absolute_import + +import getpass + +from mercurial import util + +from . import pywatchman + +class Unavailable(Exception): + def __init__(self, msg, warn=True, invalidate=False): + self.msg = msg + self.warn = warn + if self.msg == 'timed out waiting for response': + self.warn = False + self.invalidate = invalidate + + def __str__(self): + if self.warn: + return 'warning: Watchman unavailable: %s' % self.msg + else: + return 'Watchman unavailable: %s' % self.msg + +class WatchmanNoRoot(Unavailable): + def __init__(self, root, msg): + self.root = root + super(WatchmanNoRoot, self).__init__(msg) + +class client(object): + def __init__(self, repo, timeout=1.0): + err = None + if not self._user: + err = "couldn't get user" + warn = True + if self._user in repo.ui.configlist('fsmonitor', 'blacklistusers'): + err = 'user %s in blacklist' % self._user + warn = False + + if err: + raise Unavailable(err, warn) + + self._timeout = timeout + self._watchmanclient = None + self._root = repo.root + self._ui = repo.ui + self._firsttime = True + + def settimeout(self, timeout): + self._timeout = timeout + if self._watchmanclient is not None: + self._watchmanclient.setTimeout(timeout) + + def getcurrentclock(self): + result = self.command('clock') + if not util.safehasattr(result, 'clock'): + raise Unavailable('clock result is missing clock value', + invalidate=True) + return result.clock + + def clearconnection(self): + self._watchmanclient = None + + def available(self): + return self._watchmanclient is not None or self._firsttime + + @util.propertycache + def _user(self): + try: + return getpass.getuser() + except KeyError: + # couldn't figure out our user + return None + + def _command(self, *args): + watchmanargs = (args[0], self._root) + args[1:] + try: + if self._watchmanclient is None: + self._firsttime = False + self._watchmanclient = pywatchman.client( + timeout=self._timeout, + useImmutableBser=True) + return self._watchmanclient.query(*watchmanargs) + except pywatchman.CommandError as ex: + if ex.msg.startswith('unable to resolve root'): + raise WatchmanNoRoot(self._root, ex.msg) + raise Unavailable(ex.msg) + except pywatchman.WatchmanError as ex: + raise Unavailable(str(ex)) + + def command(self, *args): + try: + try: + return self._command(*args) + except WatchmanNoRoot: + # this 'watch' command can also raise a WatchmanNoRoot if + # watchman refuses to accept this root + self._command('watch') + return self._command(*args) + except Unavailable: + # this is in an outer scope to catch Unavailable form any of the + # above _command calls + self._watchmanclient = None + raise
--- a/hgext/histedit.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/histedit.py Sat Apr 16 18:06:48 2016 -0500 @@ -220,13 +220,14 @@ tertiaryactions = set() internalactions = set() -def geteditcomment(first, last): +def geteditcomment(ui, first, last): """ construct the editor comment The comment includes:: - an intro - sorted primary commands - sorted short commands - sorted long commands + - additional hints Commands are only included once. """ @@ -234,6 +235,8 @@ Commits are listed from least to most recent +You can reorder changesets by reordering the lines + Commands: """) actions = [] @@ -253,8 +256,14 @@ addverb(v) actions.append('') - return ''.join(['# %s\n' % l if l else '#\n' - for l in ((intro % (first, last)).split('\n')) + actions]) + hints = [] + if ui.configbool('histedit', 'dropmissing'): + hints.append("Deleting a changeset from the list " + "will DISCARD it from the edited history!") + + lines = (intro % (first, last)).split('\n') + actions + hints + + return ''.join(['# %s\n' % l if l else '#\n' for l in lines]) class histeditstate(object): def __init__(self, repo, parentctxnode=None, actions=None, keep=None, @@ -279,7 +288,7 @@ except IOError as err: if err.errno != errno.ENOENT: raise - raise error.Abort(_('no histedit in progress')) + cmdutil.wrongtooltocontinue(self.repo, _('histedit')) if state.startswith('v1\n'): data = self._load() @@ -447,13 +456,18 @@ parentctx, but does not commit them.""" repo = self.repo rulectx = repo[self.node] + repo.ui.pushbuffer(error=True, labeled=True) hg.update(repo, self.state.parentctxnode, quietempty=True) stats = applychanges(repo.ui, repo, rulectx, {}) if stats and stats[3] > 0: + buf = repo.ui.popbuffer() + repo.ui.write(*buf) raise error.InterventionRequired( _('Fix up the change (%s %s)') % (self.verb, node.short(self.node)), hint=_('hg histedit --continue to resume')) + else: + repo.ui.popbuffer() def continuedirty(self): """Continues the action when changes have been applied to the working @@ -477,7 +491,7 @@ rulectx.""" ctx = self.repo['.'] if ctx.node() == self.state.parentctxnode: - self.repo.ui.warn(_('%s: empty changeset\n') % + self.repo.ui.warn(_('%s: skipping changeset (no changes)\n') % node.short(self.node)) return ctx, [(self.node, tuple())] if ctx.node() == self.node: @@ -733,7 +747,9 @@ def finishfold(self, ui, repo, ctx, oldctx, newnode, internalchanges): parent = ctx.parents()[0].node() + repo.ui.pushbuffer() hg.update(repo, parent) + repo.ui.popbuffer() ### prepare new commit data commitopts = {} commitopts['user'] = ctx.user() @@ -764,7 +780,9 @@ repo.ui.restoreconfig(phasebackup) if n is None: return ctx, [] + repo.ui.pushbuffer() hg.update(repo, n) + repo.ui.popbuffer() replacements = [(oldctx.node(), (newnode,)), (ctx.node(), (n,)), (newnode, (n,)), @@ -892,7 +910,7 @@ - Specify ANCESTOR directly - Use --outgoing -- it will be the first linear changeset not - included in destination. (See :hg:`help config.default-push`) + included in destination. (See :hg:`help config.paths.default-push`) - Otherwise, the value from the "histedit.defaultrev" config option is used as a revset to select the base revision when ANCESTOR is not @@ -973,7 +991,28 @@ finally: release(state.lock, state.wlock) -def _histedit(ui, repo, state, *freeargs, **opts): +goalcontinue = 'continue' +goalabort = 'abort' +goaleditplan = 'edit-plan' +goalnew = 'new' + +def _getgoal(opts): + if opts.get('continue'): + return goalcontinue + if opts.get('abort'): + return goalabort + if opts.get('edit_plan'): + return goaleditplan + return goalnew + +def _readfile(path): + if path == '-': + return sys.stdin.read() + else: + with open(path, 'rb') as f: + return f.read() + +def _validateargs(ui, repo, state, freeargs, opts, goal, rules, revs): # TODO only abort if we try to histedit mq patches, not just # blanket if mq patches are applied somewhere mq = getattr(repo, 'mq', None) @@ -982,28 +1021,21 @@ # basic argument incompatibility processing outg = opts.get('outgoing') - cont = opts.get('continue') editplan = opts.get('edit_plan') abort = opts.get('abort') force = opts.get('force') - rules = opts.get('commands', '') - revs = opts.get('rev', []) - goal = 'new' # This invocation goal, in new, continue, abort if force and not outg: raise error.Abort(_('--force only allowed with --outgoing')) - if cont: + if goal == 'continue': if any((outg, abort, revs, freeargs, rules, editplan)): raise error.Abort(_('no arguments allowed with --continue')) - goal = 'continue' - elif abort: + elif goal == 'abort': if any((outg, revs, freeargs, rules, editplan)): raise error.Abort(_('no arguments allowed with --abort')) - goal = 'abort' - elif editplan: + elif goal == 'edit-plan': if any((outg, revs, freeargs)): raise error.Abort(_('only --commands argument allowed with ' '--edit-plan')) - goal = 'edit-plan' else: if os.path.exists(os.path.join(repo.path, 'histedit-state')): raise error.Abort(_('history edit already in progress, try ' @@ -1025,124 +1057,36 @@ raise error.Abort( _('histedit requires exactly one ancestor revision')) +def _histedit(ui, repo, state, *freeargs, **opts): + goal = _getgoal(opts) + revs = opts.get('rev', []) + rules = opts.get('commands', '') + state.keep = opts.get('keep', False) - replacements = [] - state.keep = opts.get('keep', False) - supportsmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt) + _validateargs(ui, repo, state, freeargs, opts, goal, rules, revs) # rebuild state - if goal == 'continue': + if goal == goalcontinue: state.read() state = bootstrapcontinue(ui, state, opts) - elif goal == 'edit-plan': - state.read() - if not rules: - comment = geteditcomment(node.short(state.parentctxnode), - node.short(state.topmost)) - rules = ruleeditor(repo, ui, state.actions, comment) - else: - if rules == '-': - f = sys.stdin - else: - f = open(rules) - rules = f.read() - f.close() - actions = parserules(rules, state) - ctxs = [repo[act.nodetoverify()] \ - for act in state.actions if act.nodetoverify()] - warnverifyactions(ui, repo, actions, state, ctxs) - state.actions = actions - state.write() + elif goal == goaleditplan: + _edithisteditplan(ui, repo, state, rules) return - elif goal == 'abort': - try: - state.read() - tmpnodes, leafs = newnodestoabort(state) - ui.debug('restore wc to old parent %s\n' - % node.short(state.topmost)) - - # Recover our old commits if necessary - if not state.topmost in repo and state.backupfile: - backupfile = repo.join(state.backupfile) - f = hg.openpath(ui, backupfile) - gen = exchange.readbundle(ui, f, backupfile) - with repo.transaction('histedit.abort') as tr: - if not isinstance(gen, bundle2.unbundle20): - gen.apply(repo, 'histedit', 'bundle:' + backupfile) - if isinstance(gen, bundle2.unbundle20): - bundle2.applybundle(repo, gen, tr, - source='histedit', - url='bundle:' + backupfile) - - os.remove(backupfile) - - # check whether we should update away - if repo.unfiltered().revs('parents() and (%n or %ln::)', - state.parentctxnode, leafs | tmpnodes): - hg.clean(repo, state.topmost, show_stats=True, quietempty=True) - cleanupnode(ui, repo, 'created', tmpnodes) - cleanupnode(ui, repo, 'temp', leafs) - except Exception: - if state.inprogress(): - ui.warn(_('warning: encountered an exception during histedit ' - '--abort; the repository may not have been completely ' - 'cleaned up\n')) - raise - finally: - state.clear() + elif goal == goalabort: + _aborthistedit(ui, repo, state) return else: - cmdutil.checkunfinished(repo) - cmdutil.bailifchanged(repo) + # goal == goalnew + _newhistedit(ui, repo, state, revs, freeargs, opts) - topmost, empty = repo.dirstate.parents() - if outg: - if freeargs: - remote = freeargs[0] - else: - remote = None - root = findoutgoing(ui, repo, remote, force, opts) - else: - rr = list(repo.set('roots(%ld)', scmutil.revrange(repo, revs))) - if len(rr) != 1: - raise error.Abort(_('The specified revisions must have ' - 'exactly one common root')) - root = rr[0].node() - - revs = between(repo, root, topmost, state.keep) - if not revs: - raise error.Abort(_('%s is not an ancestor of working directory') % - node.short(root)) + _continuehistedit(ui, repo, state) + _finishhistedit(ui, repo, state) - ctxs = [repo[r] for r in revs] - if not rules: - comment = geteditcomment(node.short(root), node.short(topmost)) - actions = [pick(state, r) for r in revs] - rules = ruleeditor(repo, ui, actions, comment) - else: - if rules == '-': - f = sys.stdin - else: - f = open(rules) - rules = f.read() - f.close() - actions = parserules(rules, state) - warnverifyactions(ui, repo, actions, state, ctxs) - - parentctxnode = repo[root].parents()[0].node() - - state.parentctxnode = parentctxnode - state.actions = actions - state.topmost = topmost - state.replacements = replacements - - # Create a backup so we can always abort completely. - backupfile = None - if not obsolete.isenabled(repo, obsolete.createmarkersopt): - backupfile = repair._bundle(repo, [parentctxnode], [topmost], root, - 'histedit') - state.backupfile = backupfile - +def _continuehistedit(ui, repo, state): + """This function runs after either: + - bootstrapcontinue (if the goal is 'continue') + - _newhistedit (if the goal is 'new') + """ # preprocess rules so that we can hide inner folds from the user # and only show one editor actions = state.actions[:] @@ -1167,7 +1111,11 @@ state.write() ui.progress(_("editing"), None) +def _finishhistedit(ui, repo, state): + """This action runs when histedit is finishing its session""" + repo.ui.pushbuffer() hg.update(repo, state.parentctxnode, quietempty=True) + repo.ui.popbuffer() mapping, tmpnodes, created, ntm = processreplacement(state) if mapping: @@ -1182,6 +1130,7 @@ for n in succs[1:]: ui.debug(m % node.short(n)) + supportsmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt) if supportsmarkers: # Only create markers if the temp nodes weren't already removed. obsolete.createmarkers(repo, ((repo[t],()) for t in sorted(tmpnodes) @@ -1211,6 +1160,110 @@ if repo.vfs.exists('histedit-last-edit.txt'): repo.vfs.unlink('histedit-last-edit.txt') +def _aborthistedit(ui, repo, state): + try: + state.read() + __, leafs, tmpnodes, __ = processreplacement(state) + ui.debug('restore wc to old parent %s\n' + % node.short(state.topmost)) + + # Recover our old commits if necessary + if not state.topmost in repo and state.backupfile: + backupfile = repo.join(state.backupfile) + f = hg.openpath(ui, backupfile) + gen = exchange.readbundle(ui, f, backupfile) + with repo.transaction('histedit.abort') as tr: + if not isinstance(gen, bundle2.unbundle20): + gen.apply(repo, 'histedit', 'bundle:' + backupfile) + if isinstance(gen, bundle2.unbundle20): + bundle2.applybundle(repo, gen, tr, + source='histedit', + url='bundle:' + backupfile) + + os.remove(backupfile) + + # check whether we should update away + if repo.unfiltered().revs('parents() and (%n or %ln::)', + state.parentctxnode, leafs | tmpnodes): + hg.clean(repo, state.topmost, show_stats=True, quietempty=True) + cleanupnode(ui, repo, 'created', tmpnodes) + cleanupnode(ui, repo, 'temp', leafs) + except Exception: + if state.inprogress(): + ui.warn(_('warning: encountered an exception during histedit ' + '--abort; the repository may not have been completely ' + 'cleaned up\n')) + raise + finally: + state.clear() + +def _edithisteditplan(ui, repo, state, rules): + state.read() + if not rules: + comment = geteditcomment(ui, + node.short(state.parentctxnode), + node.short(state.topmost)) + rules = ruleeditor(repo, ui, state.actions, comment) + else: + rules = _readfile(rules) + actions = parserules(rules, state) + ctxs = [repo[act.nodetoverify()] \ + for act in state.actions if act.nodetoverify()] + warnverifyactions(ui, repo, actions, state, ctxs) + state.actions = actions + state.write() + +def _newhistedit(ui, repo, state, revs, freeargs, opts): + outg = opts.get('outgoing') + rules = opts.get('commands', '') + force = opts.get('force') + + cmdutil.checkunfinished(repo) + cmdutil.bailifchanged(repo) + + topmost, empty = repo.dirstate.parents() + if outg: + if freeargs: + remote = freeargs[0] + else: + remote = None + root = findoutgoing(ui, repo, remote, force, opts) + else: + rr = list(repo.set('roots(%ld)', scmutil.revrange(repo, revs))) + if len(rr) != 1: + raise error.Abort(_('The specified revisions must have ' + 'exactly one common root')) + root = rr[0].node() + + revs = between(repo, root, topmost, state.keep) + if not revs: + raise error.Abort(_('%s is not an ancestor of working directory') % + node.short(root)) + + ctxs = [repo[r] for r in revs] + if not rules: + comment = geteditcomment(ui, node.short(root), node.short(topmost)) + actions = [pick(state, r) for r in revs] + rules = ruleeditor(repo, ui, actions, comment) + else: + rules = _readfile(rules) + actions = parserules(rules, state) + warnverifyactions(ui, repo, actions, state, ctxs) + + parentctxnode = repo[root].parents()[0].node() + + state.parentctxnode = parentctxnode + state.actions = actions + state.topmost = topmost + state.replacements = [] + + # Create a backup so we can always abort completely. + backupfile = None + if not obsolete.isenabled(repo, obsolete.createmarkersopt): + backupfile = repair._bundle(repo, [parentctxnode], [topmost], root, + 'histedit') + state.backupfile = backupfile + def bootstrapcontinue(ui, state, opts): repo = state.repo if state.actions: @@ -1236,7 +1289,8 @@ if ctxs and not keep: if (not obsolete.isenabled(repo, obsolete.allowunstableopt) and repo.revs('(%ld::) - (%ld)', ctxs, ctxs)): - raise error.Abort(_('cannot edit history that would orphan nodes')) + raise error.Abort(_('can only histedit a changeset together ' + 'with all its descendants')) if repo.revs('(%ld) and merge()', ctxs): raise error.Abort(_('cannot edit history that contains merges')) root = ctxs[0] # list is already sorted by repo.set @@ -1330,6 +1384,10 @@ missing = sorted(expected - seen) # sort to stabilize output if state.repo.ui.configbool('histedit', 'dropmissing'): + if len(actions) == 0: + raise error.ParseError(_('no rules provided'), + hint=_('use strip extension to remove commits')) + drops = [drop(state, node.bin(n)) for n in missing] # put the in the beginning so they execute immediately and # don't show in the edit-plan in the future @@ -1340,24 +1398,40 @@ hint=_('use "drop %s" to discard, see also: ' '"hg help -e histedit.config"') % missing[0][:12]) -def newnodestoabort(state): - """process the list of replacements to return +def adjustreplacementsfrommarkers(repo, oldreplacements): + """Adjust replacements from obsolescense markers - 1) the list of final node - 2) the list of temporary node + Replacements structure is originally generated based on + histedit's state and does not account for changes that are + not recorded there. This function fixes that by adding + data read from obsolescense markers""" + if not obsolete.isenabled(repo, obsolete.createmarkersopt): + return oldreplacements - This is meant to be used on abort as less data are required in this case. - """ - replacements = state.replacements - allsuccs = set() - replaced = set() - for rep in replacements: - allsuccs.update(rep[1]) - replaced.add(rep[0]) - newnodes = allsuccs - replaced - tmpnodes = allsuccs & replaced - return newnodes, tmpnodes + unfi = repo.unfiltered() + nm = unfi.changelog.nodemap + obsstore = repo.obsstore + newreplacements = list(oldreplacements) + oldsuccs = [r[1] for r in oldreplacements] + # successors that have already been added to succstocheck once + seensuccs = set().union(*oldsuccs) # create a set from an iterable of tuples + succstocheck = list(seensuccs) + while succstocheck: + n = succstocheck.pop() + missing = nm.get(n) is None + markers = obsstore.successors.get(n, ()) + if missing and not markers: + # dead end, mark it as such + newreplacements.append((n, ())) + for marker in markers: + nsuccs = marker[1] + newreplacements.append((n, nsuccs)) + for nsucc in nsuccs: + if nsucc not in seensuccs: + seensuccs.add(nsucc) + succstocheck.append(nsucc) + return newreplacements def processreplacement(state): """process the list of replacements to return @@ -1365,7 +1439,7 @@ 1) the final mapping between original and created nodes 2) the list of temporary node created by histedit 3) the list of new commit created by histedit""" - replacements = state.replacements + replacements = adjustreplacementsfrommarkers(state.repo, state.replacements) allsuccs = set() replaced = set() fullmapping = {}
--- a/hgext/keyword.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/keyword.py Sat Apr 16 18:06:48 2016 -0500 @@ -82,12 +82,33 @@ {desc}" expands to the first line of the changeset description. ''' -from mercurial import commands, context, cmdutil, dispatch, filelog, extensions -from mercurial import localrepo, match, patch, templatefilters, util, error -from mercurial import scmutil, pathutil + +from __future__ import absolute_import + +import os +import re +import tempfile + from mercurial.hgweb import webcommands from mercurial.i18n import _ -import os, re, tempfile + +from mercurial import ( + cmdutil, + commands, + context, + dispatch, + error, + extensions, + filelog, + localrepo, + match, + patch, + pathutil, + registrar, + scmutil, + templatefilters, + util, +) cmdtable = {} command = cmdutil.command(cmdtable) @@ -117,28 +138,29 @@ 'kwfiles.ignoredunknown': 'none' } +templatefilter = registrar.templatefilter() + # date like in cvs' $Date +@templatefilter('utcdate') def utcdate(text): - ''':utcdate: Date. Returns a UTC-date in this format: "2009/08/18 11:00:13". + '''Date. Returns a UTC-date in this format: "2009/08/18 11:00:13". ''' return util.datestr((util.parsedate(text)[0], 0), '%Y/%m/%d %H:%M:%S') # date like in svn's $Date +@templatefilter('svnisodate') def svnisodate(text): - ''':svnisodate: Date. Returns a date in this format: "2009-08-18 13:00:13 + '''Date. Returns a date in this format: "2009-08-18 13:00:13 +0200 (Tue, 18 Aug 2009)". ''' return util.datestr(text, '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)') # date like in svn's $Id +@templatefilter('svnutcdate') def svnutcdate(text): - ''':svnutcdate: Date. Returns a UTC-date in this format: "2009-08-18 + '''Date. Returns a UTC-date in this format: "2009-08-18 11:00:13Z". ''' return util.datestr((util.parsedate(text)[0], 0), '%Y-%m-%d %H:%M:%SZ') -templatefilters.filters.update({'utcdate': utcdate, - 'svnisodate': svnisodate, - 'svnutcdate': svnutcdate}) - # make keyword tools accessible kwtools = {'templater': None, 'hgcmd': ''} @@ -410,10 +432,8 @@ ui.readconfig(opts.get('rcfile')) if args: # simulate hgrc parsing - rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args] - fp = repo.vfs('hgrc', 'w') - fp.writelines(rcmaps) - fp.close() + rcmaps = '[keywordmaps]\n%s\n' % '\n'.join(args) + repo.vfs.write('hgrc', rcmaps) ui.readconfig(repo.join('hgrc')) kwmaps = dict(ui.configitems('keywordmaps')) elif opts.get('default'):
--- a/hgext/largefiles/__init__.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/largefiles/__init__.py Sat Apr 16 18:06:48 2016 -0500 @@ -33,7 +33,7 @@ $ dd if=/dev/urandom of=randomdata count=2000 $ hg add --large randomdata - $ hg commit -m 'add randomdata as a largefile' + $ hg commit -m "add randomdata as a largefile" When you push a changeset that adds/modifies largefiles to a remote repository, its largefile revisions will be uploaded along with it. @@ -111,6 +111,7 @@ import proto import reposetup import uisetup as uisetupmod +import overrides # Note for extension authors: ONLY specify testedwith = 'internal' for # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should @@ -130,3 +131,4 @@ uisetupmod.uisetup(ui) cmdtable = lfcommands.cmdtable +revsetpredicate = overrides.revsetpredicate
--- a/hgext/largefiles/basestore.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/largefiles/basestore.py Sat Apr 16 18:06:48 2016 -0500 @@ -63,7 +63,7 @@ at = 0 available = self.exists(set(hash for (_filename, hash) in files)) for filename, hash in files: - ui.progress(_('getting largefiles'), at, unit='lfile', + ui.progress(_('getting largefiles'), at, unit=_('files'), total=len(files)) at += 1 ui.note(_('getting %s:%s\n') % (filename, hash))
--- a/hgext/largefiles/lfcommands.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/largefiles/lfcommands.py Sat Apr 16 18:06:48 2016 -0500 @@ -99,19 +99,19 @@ lfiletohash = {} for ctx in ctxs: ui.progress(_('converting revisions'), ctx.rev(), - unit=_('revision'), total=rsrc['tip'].rev()) + unit=_('revisions'), total=rsrc['tip'].rev()) _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles, matcher, size, lfiletohash) ui.progress(_('converting revisions'), None) - if os.path.exists(rdst.wjoin(lfutil.shortname)): - shutil.rmtree(rdst.wjoin(lfutil.shortname)) + if rdst.wvfs.exists(lfutil.shortname): + rdst.wvfs.rmtree(lfutil.shortname) for f in lfiletohash.keys(): - if os.path.isfile(rdst.wjoin(f)): - os.unlink(rdst.wjoin(f)) + if rdst.wvfs.isfile(f): + rdst.wvfs.unlink(f) try: - os.removedirs(os.path.dirname(rdst.wjoin(f))) + rdst.wvfs.removedirs(rdst.wvfs.dirname(f)) except OSError: pass @@ -346,7 +346,7 @@ ui.debug("%d largefiles need to be uploaded\n" % len(files)) for hash in files: - ui.progress(_('uploading largefiles'), at, unit='largefile', + ui.progress(_('uploading largefiles'), at, unit=_('files'), total=len(files)) source = lfutil.findfile(rsrc, hash) if not source: @@ -436,20 +436,26 @@ update = {} updated, removed = 0, 0 + wvfs = repo.wvfs for lfile in lfiles: - abslfile = repo.wjoin(lfile) - abslfileorig = scmutil.origpath(ui, repo, abslfile) - absstandin = repo.wjoin(lfutil.standin(lfile)) - absstandinorig = scmutil.origpath(ui, repo, absstandin) - if os.path.exists(absstandin): - if (os.path.exists(absstandinorig) and - os.path.exists(abslfile)): - shutil.copyfile(abslfile, abslfileorig) - util.unlinkpath(absstandinorig) + rellfile = lfile + rellfileorig = os.path.relpath( + scmutil.origpath(ui, repo, wvfs.join(rellfile)), + start=repo.root) + relstandin = lfutil.standin(lfile) + relstandinorig = os.path.relpath( + scmutil.origpath(ui, repo, wvfs.join(relstandin)), + start=repo.root) + if wvfs.exists(relstandin): + if (wvfs.exists(relstandinorig) and + wvfs.exists(rellfile)): + shutil.copyfile(wvfs.join(rellfile), + wvfs.join(rellfileorig)) + wvfs.unlinkpath(relstandinorig) expecthash = lfutil.readstandin(repo, lfile) if expecthash != '': if lfile not in repo[None]: # not switched to normal file - util.unlinkpath(abslfile, ignoremissing=True) + wvfs.unlinkpath(rellfile, ignoremissing=True) # use normallookup() to allocate an entry in largefiles # dirstate to prevent lfilesrepo.status() from reporting # missing files as removed. @@ -460,9 +466,9 @@ # lfile is added to the repository again. This happens when a # largefile is converted back to a normal file: the standin # disappears, but a new (normal) file appears as the lfile. - if (os.path.exists(abslfile) and + if (wvfs.exists(rellfile) and repo.dirstate.normalize(lfile) not in repo[None]): - util.unlinkpath(abslfile) + wvfs.unlinkpath(rellfile) removed += 1 # largefile processing might be slow and be interrupted - be prepared @@ -487,12 +493,12 @@ # copy the state of largefile standin from the repository's # dirstate to its state in the lfdirstate. - abslfile = repo.wjoin(lfile) - absstandin = repo.wjoin(lfutil.standin(lfile)) - if os.path.exists(absstandin): - mode = os.stat(absstandin).st_mode - if mode != os.stat(abslfile).st_mode: - os.chmod(abslfile, mode) + rellfile = lfile + relstandin = lfutil.standin(lfile) + if wvfs.exists(relstandin): + mode = wvfs.stat(relstandin).st_mode + if mode != wvfs.stat(rellfile).st_mode: + wvfs.chmod(rellfile, mode) update1 = 1 updated += update1
--- a/hgext/largefiles/lfutil.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/largefiles/lfutil.py Sat Apr 16 18:06:48 2016 -0500 @@ -39,6 +39,7 @@ return lfsize def link(src, dest): + """Try to create hardlink - if that fails, efficiently make a copy.""" util.makedirs(os.path.dirname(dest)) try: util.oslink(src, dest) @@ -51,36 +52,44 @@ os.chmod(dest, os.stat(src).st_mode) def usercachepath(ui, hash): + '''Return the correct location in the "global" largefiles cache for a file + with the given hash. + This cache is used for sharing of largefiles across repositories - both + to preserve download bandwidth and storage space.''' + return os.path.join(_usercachedir(ui), hash) + +def _usercachedir(ui): + '''Return the location of the "global" largefiles cache.''' path = ui.configpath(longname, 'usercache', None) if path: - path = os.path.join(path, hash) + return path + if os.name == 'nt': + appdata = os.getenv('LOCALAPPDATA', os.getenv('APPDATA')) + if appdata: + return os.path.join(appdata, longname) + elif platform.system() == 'Darwin': + home = os.getenv('HOME') + if home: + return os.path.join(home, 'Library', 'Caches', longname) + elif os.name == 'posix': + path = os.getenv('XDG_CACHE_HOME') + if path: + return os.path.join(path, longname) + home = os.getenv('HOME') + if home: + return os.path.join(home, '.cache', longname) else: - if os.name == 'nt': - appdata = os.getenv('LOCALAPPDATA', os.getenv('APPDATA')) - if appdata: - path = os.path.join(appdata, longname, hash) - elif platform.system() == 'Darwin': - home = os.getenv('HOME') - if home: - path = os.path.join(home, 'Library', 'Caches', - longname, hash) - elif os.name == 'posix': - path = os.getenv('XDG_CACHE_HOME') - if path: - path = os.path.join(path, longname, hash) - else: - home = os.getenv('HOME') - if home: - path = os.path.join(home, '.cache', longname, hash) - else: - raise error.Abort(_('unknown operating system: %s\n') % os.name) - return path + raise error.Abort(_('unknown operating system: %s\n') % os.name) + raise error.Abort(_('unknown %s usercache location\n') % longname) def inusercache(ui, hash): path = usercachepath(ui, hash) - return path and os.path.exists(path) + return os.path.exists(path) def findfile(repo, hash): + '''Return store path of the largefile with the specified hash. + As a side effect, the file might be linked from user cache. + Return None if the file can't be found locally.''' path, exists = findstorepath(repo, hash) if exists: repo.ui.note(_('found %s in store\n') % hash) @@ -120,20 +129,21 @@ Return a dirstate object that tracks largefiles: i.e. its root is the repo root, but it is saved in .hg/largefiles/dirstate. ''' - lfstoredir = repo.join(longname) - opener = scmutil.opener(lfstoredir) + vfs = repo.vfs + lfstoredir = longname + opener = scmutil.opener(vfs.join(lfstoredir)) lfdirstate = largefilesdirstate(opener, ui, repo.root, repo.dirstate._validate) # If the largefiles dirstate does not exist, populate and create # it. This ensures that we create it on the first meaningful # largefiles operation in a new clone. - if create and not os.path.exists(os.path.join(lfstoredir, 'dirstate')): + if create and not vfs.exists(vfs.join(lfstoredir, 'dirstate')): matcher = getstandinmatcher(repo) standins = repo.dirstate.walk(matcher, [], False, False) if len(standins) > 0: - util.makedirs(lfstoredir) + vfs.makedirs(lfstoredir) for standin in standins: lfile = splitstandin(standin) @@ -170,9 +180,13 @@ if rev is not None or repo.dirstate[f] != '?'] def instore(repo, hash, forcelocal=False): + '''Return true if a largefile with the given hash exists in the user + cache.''' return os.path.exists(storepath(repo, hash, forcelocal)) def storepath(repo, hash, forcelocal=False): + '''Return the correct location in the repository largefiles cache for a + file with the given hash.''' if not forcelocal and repo.shared(): return repo.vfs.reljoin(repo.sharedpath, longname, hash) return repo.join(longname, hash) @@ -200,30 +214,30 @@ file was not found in either cache (which should not happened: this is meant to be called only after ensuring that the needed largefile exists in the cache).''' + wvfs = repo.wvfs path = findfile(repo, hash) if path is None: return False - util.makedirs(os.path.dirname(repo.wjoin(filename))) + wvfs.makedirs(wvfs.dirname(wvfs.join(filename))) # The write may fail before the file is fully written, but we # don't use atomic writes in the working copy. - dest = repo.wjoin(filename) with open(path, 'rb') as srcfd: - with open(dest, 'wb') as destfd: + with wvfs(filename, 'wb') as destfd: gothash = copyandhash(srcfd, destfd) if gothash != hash: repo.ui.warn(_('%s: data corruption in %s with hash %s\n') % (filename, path, gothash)) - util.unlink(dest) + wvfs.unlink(filename) return False return True def copytostore(repo, rev, file, uploaded=False): + wvfs = repo.wvfs hash = readstandin(repo, file, rev) if instore(repo, hash): return - absfile = repo.wjoin(file) - if os.path.exists(absfile): - copytostoreabsolute(repo, absfile, hash) + if wvfs.exists(file): + copytostoreabsolute(repo, wvfs.join(file), hash) else: repo.ui.warn(_("%s: largefile %s not available from local store\n") % (file, hash)) @@ -251,27 +265,29 @@ linktousercache(repo, hash) def linktousercache(repo, hash): + '''Link / copy the largefile with the specified hash from the store + to the cache.''' path = usercachepath(repo.ui, hash) - if path: - link(storepath(repo, hash), path) + link(storepath(repo, hash), path) def getstandinmatcher(repo, rmatcher=None): '''Return a match object that applies rmatcher to the standin directory''' - standindir = repo.wjoin(shortname) + wvfs = repo.wvfs + standindir = shortname # no warnings about missing files or directories badfn = lambda f, msg: None if rmatcher and not rmatcher.always(): - pats = [os.path.join(standindir, pat) for pat in rmatcher.files()] + pats = [wvfs.join(standindir, pat) for pat in rmatcher.files()] if not pats: - pats = [standindir] + pats = [wvfs.join(standindir)] match = scmutil.match(repo[None], pats, badfn=badfn) # if pats is empty, it would incorrectly always match, so clear _always match._always = False else: # no patterns: relative to repo root - match = scmutil.match(repo[None], [standindir], badfn=badfn) + match = scmutil.match(repo[None], [wvfs.join(standindir)], badfn=badfn) return match def composestandinmatcher(repo, rmatcher): @@ -315,7 +331,7 @@ def updatestandin(repo, standin): file = repo.wjoin(splitstandin(standin)) - if os.path.exists(file): + if repo.wvfs.exists(splitstandin(standin)): hash = hashfile(file) executable = getexecutable(file) writestandin(repo, standin, hash, executable) @@ -388,6 +404,7 @@ return util.pconvert(os.path.normpath(path)) def islfilesrepo(repo): + '''Return true if the repo is a largefile repo.''' if ('largefiles' in repo.requirements and any(shortnameslash in f[0] for f in repo.store.datafiles())): return True @@ -419,7 +436,7 @@ state, mtime = '?', -1 if state == 'n': if (normallookup or mtime < 0 or - not os.path.exists(repo.wjoin(lfile))): + not repo.wvfs.exists(lfile)): # state 'n' doesn't ensure 'clean' in this case lfdirstate.normallookup(lfile) else: @@ -468,8 +485,8 @@ def getlfilestoupload(repo, missing, addfunc): for i, n in enumerate(missing): repo.ui.progress(_('finding outgoing largefiles'), i, - unit=_('revision'), total=len(missing)) - parents = [p for p in repo.changelog.parents(n) if p != node.nullid] + unit=_('revisions'), total=len(missing)) + parents = [p for p in repo[n].parents() if p != node.nullid] oldlfstatus = repo.lfstatus repo.lfstatus = False @@ -525,12 +542,11 @@ # removed/renamed) for lfile in lfiles: if lfile in modifiedfiles: - if os.path.exists( - repo.wjoin(standin(lfile))): + if repo.wvfs.exists(standin(lfile)): # this handles the case where a rebase is being # performed and the working copy is not updated # yet. - if os.path.exists(repo.wjoin(lfile)): + if repo.wvfs.exists(lfile): updatestandin(repo, standin(lfile))
--- a/hgext/largefiles/overrides.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/largefiles/overrides.py Sat Apr 16 18:06:48 2016 -0500 @@ -12,7 +12,7 @@ import copy from mercurial import hg, util, cmdutil, scmutil, match as match_, \ - archival, pathutil, revset, error + archival, pathutil, registrar, revset, error from mercurial.i18n import _ import lfutil @@ -452,11 +452,10 @@ # writing the files into the working copy and lfcommands.updatelfiles # will update the largefiles. def overridecalculateupdates(origfn, repo, p1, p2, pas, branchmerge, force, - acceptremote, followcopies, matcher=None): + acceptremote, *args, **kwargs): overwrite = force and not branchmerge actions, diverge, renamedelete = origfn( - repo, p1, p2, pas, branchmerge, force, acceptremote, - followcopies, matcher=matcher) + repo, p1, p2, pas, branchmerge, force, acceptremote, *args, **kwargs) if overwrite: return actions, diverge, renamedelete @@ -604,7 +603,7 @@ def makestandin(relpath): path = pathutil.canonpath(repo.root, repo.getcwd(), relpath) - return os.path.join(repo.wjoin(lfutil.standin(path))) + return repo.wvfs.join(lfutil.standin(path)) fullpats = scmutil.expandpats(pats) dest = fullpats[-1] @@ -674,7 +673,7 @@ dest.startswith(repo.wjoin(lfutil.shortname))): srclfile = src.replace(repo.wjoin(lfutil.standin('')), '') destlfile = dest.replace(repo.wjoin(lfutil.standin('')), '') - destlfiledir = os.path.dirname(repo.wjoin(destlfile)) or '.' + destlfiledir = repo.wvfs.dirname(repo.wjoin(destlfile)) or '.' if not os.path.isdir(destlfiledir): os.makedirs(destlfiledir) if rename: @@ -724,8 +723,8 @@ for lfile in s.modified: lfutil.updatestandin(repo, lfutil.standin(lfile)) for lfile in s.deleted: - if (os.path.exists(repo.wjoin(lfutil.standin(lfile)))): - os.unlink(repo.wjoin(lfutil.standin(lfile))) + if (repo.wvfs.exists(lfutil.standin(lfile))): + repo.wvfs.unlink(lfutil.standin(lfile)) oldstandins = lfutil.getstandinsstate(repo) @@ -802,7 +801,22 @@ ui.status(_("%d largefiles cached\n") % numcached) return result -revsetpredicate = revset.extpredicate() +def overridepush(orig, ui, repo, *args, **kwargs): + """Override push command and store --lfrev parameters in opargs""" + lfrevs = kwargs.pop('lfrev', None) + if lfrevs: + opargs = kwargs.setdefault('opargs', {}) + opargs['lfrevs'] = scmutil.revrange(repo, lfrevs) + return orig(ui, repo, *args, **kwargs) + +def exchangepushoperation(orig, *args, **kwargs): + """Override pushoperation constructor and store lfrevs parameter""" + lfrevs = kwargs.pop('lfrevs', None) + pushop = orig(*args, **kwargs) + pushop.lfrevs = lfrevs + return pushop + +revsetpredicate = registrar.revsetpredicate() @revsetpredicate('pulled()') def pulledrevsetsymbol(repo, subset, x): @@ -963,7 +977,7 @@ if subrepos: for subpath in sorted(ctx.substate): sub = ctx.workingsub(subpath) - submatch = match_.narrowmatcher(subpath, matchfn) + submatch = match_.subdirmatcher(subpath, matchfn) sub._repo.lfstatus = True sub.archive(archiver, prefix, submatch) @@ -1011,7 +1025,7 @@ for subpath in sorted(ctx.substate): sub = ctx.workingsub(subpath) - submatch = match_.narrowmatcher(subpath, match) + submatch = match_.subdirmatcher(subpath, match) sub._repo.lfstatus = True sub.archive(archiver, prefix + repo._path + '/', submatch) @@ -1367,7 +1381,7 @@ pctx = repo['.'] for lfile in unsure + s.modified: lfileabs = repo.wvfs.join(lfile) - if not os.path.exists(lfileabs): + if not repo.wvfs.exists(lfileabs): continue lfhash = lfutil.hashrepofile(repo, lfile) standin = lfutil.standin(lfile)
--- a/hgext/largefiles/proto.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/largefiles/proto.py Sat Apr 16 18:06:48 2016 -0500 @@ -4,12 +4,14 @@ # GNU General Public License version 2 or any later version. import os -import urllib2 import re from mercurial import error, httppeer, util, wireproto from mercurial.i18n import _ +urlerr = util.urlerr +urlreq = util.urlreq + import lfutil LARGEFILES_REQUIRED_MSG = ('\nThis repository uses the largefiles extension.' @@ -22,8 +24,8 @@ httpoldcallstream = None def putlfile(repo, proto, sha): - '''Put a largefile into a repository's local store and into the - user cache.''' + '''Server command for putting a largefile into a repository's local store + and into the user cache.''' proto.redirect() path = lfutil.storepath(repo, sha) @@ -47,8 +49,8 @@ return wireproto.pushres(0) def getlfile(repo, proto, sha): - '''Retrieve a largefile from the repository-local cache or system - cache.''' + '''Server command for retrieving a largefile from the repository-local + cache or user cache.''' filename = lfutil.findfile(repo, sha) if not filename: raise error.Abort(_('requested largefile %s not present in cache') @@ -68,8 +70,8 @@ return wireproto.streamres(generator()) def statlfile(repo, proto, sha): - '''Return '2\n' if the largefile is missing, '0\n' if it seems to be in - good condition. + '''Server command for checking if a largefile is present - returns '2\n' if + the largefile is missing, '0\n' if it seems to be in good condition. The value 1 is reserved for mismatched checksum, but that is too expensive to be verified on every stat and must be caught be running 'hg verify' @@ -140,7 +142,7 @@ yield result, f try: yield int(f.value) - except (ValueError, urllib2.HTTPError): + except (ValueError, urlerr.httperror): # If the server returns anything but an integer followed by a # newline, newline, it's not speaking our language; if we get # an HTTP error, we can't be sure the largefile is present; @@ -151,9 +153,12 @@ # advertise the largefiles=serve capability def capabilities(repo, proto): + '''Wrap server command to announce largefile server capability''' return capabilitiesorig(repo, proto) + ' largefiles=serve' def heads(repo, proto): + '''Wrap server command - largefile capable clients will know to call + lheads instead''' if lfutil.islfilesrepo(repo): return wireproto.ooberror(LARGEFILES_REQUIRED_MSG) return wireproto.heads(repo, proto)
--- a/hgext/largefiles/remotestore.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/largefiles/remotestore.py Sat Apr 16 18:06:48 2016 -0500 @@ -6,11 +6,12 @@ '''remote largefile store; the base class for wirestore''' -import urllib2 - from mercurial import util, wireproto, error from mercurial.i18n import _ +urlerr = util.urlerr +urlreq = util.urlreq + import lfutil import basestore @@ -49,11 +50,11 @@ def _getfile(self, tmpfile, filename, hash): try: chunks = self._get(hash) - except urllib2.HTTPError as e: + except urlerr.httperror as e: # 401s get converted to error.Aborts; everything else is fine being # turned into a StoreError raise basestore.StoreError(filename, hash, self.url, str(e)) - except urllib2.URLError as e: + except urlerr.urlerror as e: # This usually indicates a connection problem, so don't # keep trying with the other files... they will probably # all fail too. @@ -96,3 +97,18 @@ def batch(self): '''Support for remote batching.''' return wireproto.remotebatch(self) + + def _put(self, hash, fd): + '''Put file with the given hash in the remote store.''' + raise NotImplementedError('abstract method') + + def _get(self, hash): + '''Get file with the given hash from the remote store.''' + raise NotImplementedError('abstract method') + + def _stat(self, hashes): + '''Get information about availability of files specified by + hashes in the remote store. Return dictionary mapping hashes + to return code where 0 means that file is available, other + values if not.''' + raise NotImplementedError('abstract method')
--- a/hgext/largefiles/reposetup.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/largefiles/reposetup.py Sat Apr 16 18:06:48 2016 -0500 @@ -8,7 +8,6 @@ '''setup for largefiles repositories: reposetup''' import copy -import os from mercurial import error, match as match_, error from mercurial.i18n import _ @@ -305,7 +304,7 @@ _('file "%s" is a largefile standin') % f, hint=('commit the largefile itself instead')) # Scan directories - if os.path.isdir(self.wjoin(f)): + if self.wvfs.isdir(f): dirs.append(f) else: regulars.append(f) @@ -353,12 +352,17 @@ # is used to write status out. repo._lfstatuswriters = [ui.status] - def prepushoutgoinghook(local, remote, outgoing): - if outgoing.missing: + def prepushoutgoinghook(pushop): + """Push largefiles for pushop before pushing revisions.""" + lfrevs = pushop.lfrevs + if lfrevs is None: + lfrevs = pushop.outgoing.missing + if lfrevs: toupload = set() addfunc = lambda fn, lfhash: toupload.add(lfhash) - lfutil.getlfilestoupload(local, outgoing.missing, addfunc) - lfcommands.uploadlfiles(ui, local, remote, toupload) + lfutil.getlfilestoupload(pushop.repo, lfrevs, + addfunc) + lfcommands.uploadlfiles(ui, pushop.repo, pushop.remote, toupload) repo.prepushoutgoinghooks.add("largefiles", prepushoutgoinghook) def checkrequireslfiles(ui, repo, **kwargs):
--- a/hgext/largefiles/uisetup.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/largefiles/uisetup.py Sat Apr 16 18:06:48 2016 -0500 @@ -9,7 +9,7 @@ '''setup for largefiles extension: uisetup''' from mercurial import archival, cmdutil, commands, extensions, filemerge, hg, \ - httppeer, merge, scmutil, sshpeer, wireproto, subrepo, copies + httppeer, merge, scmutil, sshpeer, wireproto, subrepo, copies, exchange from mercurial.i18n import _ from mercurial.hgweb import hgweb_mod, webcommands @@ -84,6 +84,14 @@ _('download largefiles for these revisions'), _('REV'))] entry[1].extend(pullopt) + entry = extensions.wrapcommand(commands.table, 'push', + overrides.overridepush) + pushopt = [('', 'lfrev', [], + _('upload largefiles for these revisions'), _('REV'))] + entry[1].extend(pushopt) + entry = extensions.wrapfunction(exchange, 'pushoperation', + overrides.exchangepushoperation) + entry = extensions.wrapcommand(commands.table, 'clone', overrides.overrideclone) cloneopt = [('', 'all-largefiles', None, @@ -171,5 +179,3 @@ if name == 'transplant': extensions.wrapcommand(getattr(module, 'cmdtable'), 'transplant', overrides.overridetransplant) - - overrides.revsetpredicate.setup()
--- a/hgext/largefiles/wirestore.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/largefiles/wirestore.py Sat Apr 16 18:06:48 2016 -0500 @@ -29,12 +29,8 @@ '''For each hash, return 0 if it is available, other values if not. It is usually 2 if the largefile is missing, but might be 1 the server has a corrupted copy.''' - batch = self.remote.batch() - futures = {} + batch = self.remote.iterbatch() for hash in hashes: - futures[hash] = batch.statlfile(hash) + batch.statlfile(hash) batch.submit() - retval = {} - for hash in hashes: - retval[hash] = futures[hash].value - return retval + return dict(zip(hashes, batch.results()))
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hgext/logtoprocess.py Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,129 @@ +# logtoprocess.py - send ui.log() data to a subprocess +# +# Copyright 2016 Facebook, Inc. +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. +"""Send ui.log() data to a subprocess (EXPERIMENTAL) + +This extension lets you specify a shell command per ui.log() event, +sending all remaining arguments to as environment variables to that command. + +Each positional argument to the method results in a `MSG[N]` key in the +environment, starting at 1 (so `MSG1`, `MSG2`, etc.). Each keyword argument +is set as a `OPT_UPPERCASE_KEY` variable (so the key is uppercased, and +prefixed with `OPT_`). The original event name is passed in the `EVENT` +environment variable, and the process ID of mercurial is given in `HGPID`. + +So given a call `ui.log('foo', 'bar', 'baz', spam='eggs'), a script configured +for the `foo` event can expect an environment with `MSG1=bar`, `MSG2=baz`, and +`OPT_SPAM=eggs`. + +Scripts are configured in the `[logtoprocess]` section, each key an event name. +For example:: + + [logtoprocess] + commandexception = echo "$MSG2$MSG3" > /var/log/mercurial_exceptions.log + +would log the warning message and traceback of any failed command dispatch. + +Scripts are run asychronously as detached daemon processes; mercurial will +not ensure that they exit cleanly. + +""" + +from __future__ import absolute_import + +import itertools +import os +import platform +import subprocess +import sys + +# Note for extension authors: ONLY specify testedwith = 'internal' for +# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should +# be specifying the version(s) of Mercurial they are tested with, or +# leave the attribute unspecified. +testedwith = 'internal' + +def uisetup(ui): + if platform.system() == 'Windows': + # no fork on Windows, but we can create a detached process + # https://msdn.microsoft.com/en-us/library/windows/desktop/ms684863.aspx + # No stdlib constant exists for this value + DETACHED_PROCESS = 0x00000008 + _creationflags = DETACHED_PROCESS | subprocess.CREATE_NEW_PROCESS_GROUP + + def runshellcommand(script, env): + # we can't use close_fds *and* redirect stdin. I'm not sure that we + # need to because the detached process has no console connection. + subprocess.Popen( + script, shell=True, env=env, close_fds=True, + creationflags=_creationflags) + else: + def runshellcommand(script, env): + # double-fork to completely detach from the parent process + # based on http://code.activestate.com/recipes/278731 + pid = os.fork() + if pid: + # parent + return + # subprocess.Popen() forks again, all we need to add is + # flag the new process as a new session. + if sys.version_info < (3, 2): + newsession = {'preexec_fn': os.setsid} + else: + newsession = {'start_new_session': True} + try: + # connect stdin to devnull to make sure the subprocess can't + # muck up that stream for mercurial. + subprocess.Popen( + script, shell=True, stdin=open(os.devnull, 'r'), env=env, + close_fds=True, **newsession) + finally: + # mission accomplished, this child needs to exit and not + # continue the hg process here. + os._exit(0) + + class logtoprocessui(ui.__class__): + def log(self, event, *msg, **opts): + """Map log events to external commands + + Arguments are passed on as environment variables. + + """ + script = ui.config('logtoprocess', event) + if script: + if msg: + # try to format the log message given the remaining + # arguments + try: + # Python string formatting with % either uses a + # dictionary *or* tuple, but not both. If we have + # keyword options, assume we need a mapping. + formatted = msg[0] % (opts or msg[1:]) + except (TypeError, KeyError): + # Failed to apply the arguments, ignore + formatted = msg[0] + messages = (formatted,) + msg[1:] + else: + messages = msg + # positional arguments are listed as MSG[N] keys in the + # environment + msgpairs = ( + ('MSG{0:d}'.format(i), str(m)) + for i, m in enumerate(messages, 1)) + # keyword arguments get prefixed with OPT_ and uppercased + optpairs = ( + ('OPT_{0}'.format(key.upper()), str(value)) + for key, value in opts.iteritems()) + env = dict(itertools.chain(os.environ.items(), + msgpairs, optpairs), + EVENT=event, HGPID=str(os.getpid())) + # Connect stdin to /dev/null to prevent child processes messing + # with mercurial's stdin. + runshellcommand(script, env) + return super(logtoprocessui, self).log(event, *msg, **opts) + + # Replace the class for this instance and all clones created from it: + ui.__class__ = logtoprocessui
--- a/hgext/mq.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/mq.py Sat Apr 16 18:06:48 2016 -0500 @@ -66,10 +66,12 @@ from mercurial.node import bin, hex, short, nullid, nullrev from mercurial.lock import release from mercurial import commands, cmdutil, hg, scmutil, util, revset +from mercurial import dispatch from mercurial import extensions, error, phases from mercurial import patch as patchmod from mercurial import lock as lockmod from mercurial import localrepo +from mercurial import registrar from mercurial import subrepo import os, re, errno, shutil @@ -3537,7 +3539,7 @@ # i18n: column positioning for "hg summary" ui.note(_("mq: (empty queue)\n")) -revsetpredicate = revset.extpredicate() +revsetpredicate = registrar.revsetpredicate() @revsetpredicate('mq()') def revsetmq(repo, subset, x): @@ -3561,12 +3563,11 @@ entry = extensions.wrapcommand(commands.table, 'init', mqinit) entry[1].extend(mqopt) - nowrap = set(commands.norepo.split(" ")) - def dotable(cmdtable): - for cmd in cmdtable.keys(): + for cmd, entry in cmdtable.iteritems(): cmd = cmdutil.parsealiases(cmd)[0] - if cmd in nowrap: + func = entry[0] + if dispatch._cmdattr(ui, cmd, func, 'norepo'): continue entry = extensions.wrapcommand(cmdtable, cmd, mqcommand) entry[1].extend(mqopt) @@ -3577,8 +3578,6 @@ if extmodule.__file__ != __file__: dotable(getattr(extmodule, 'cmdtable', {})) - revsetpredicate.setup() - colortable = {'qguard.negative': 'red', 'qguard.positive': 'yellow', 'qguard.unguarded': 'green',
--- a/hgext/notify.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/notify.py Sat Apr 16 18:06:48 2016 -0500 @@ -132,11 +132,21 @@ references. See also ``notify.strip``. ''' +from __future__ import absolute_import -import email, socket, time +import email +import fnmatch +import socket +import time + +from mercurial import ( + cmdutil, + error, + mail, + patch, + util, +) from mercurial.i18n import _ -from mercurial import patch, cmdutil, util, mail, error -import fnmatch # Note for extension authors: ONLY specify testedwith = 'internal' for # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should @@ -186,9 +196,11 @@ self.subs = self.subscribers() self.merge = self.ui.configbool('notify', 'merge', True) - mapfile = self.ui.config('notify', 'style') + mapfile = None template = (self.ui.config('notify', hooktype) or self.ui.config('notify', 'template')) + if not template: + mapfile = self.ui.config('notify', 'style') if not mapfile and not template: template = deftemplates.get(hooktype) or single_template self.t = cmdutil.changeset_templater(self.ui, self.repo, False, None,
--- a/hgext/pager.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/pager.py Sat Apr 16 18:06:48 2016 -0500 @@ -58,9 +58,21 @@ will also work). ''' +from __future__ import absolute_import -import atexit, sys, os, signal, subprocess -from mercurial import commands, dispatch, util, extensions, cmdutil +import atexit +import os +import signal +import subprocess +import sys + +from mercurial import ( + cmdutil, + commands, + dispatch, + extensions, + util, + ) from mercurial.i18n import _ # Note for extension authors: ONLY specify testedwith = 'internal' for @@ -105,6 +117,11 @@ if '--debugger' in sys.argv or not ui.formatted(): return + # chg has its own pager implementation + argv = sys.argv[:] + if 'chgunix' in dispatch._earlygetopt(['--cmdserver'], argv): + return + def pagecmd(orig, ui, options, cmd, cmdfunc): p = ui.config("pager", "pager", os.environ.get("PAGER")) usepager = False
--- a/hgext/patchbomb.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/patchbomb.py Sat Apr 16 18:06:48 2016 -0500 @@ -63,14 +63,27 @@ You can set patchbomb to always ask for confirmation by setting ``patchbomb.confirm`` to true. ''' +from __future__ import absolute_import -import os, errno, socket, tempfile, cStringIO import email as emailmod +import errno +import os +import socket +import tempfile -from mercurial import cmdutil, commands, hg, mail, patch, util, error -from mercurial import scmutil +from mercurial import ( + cmdutil, + commands, + error, + hg, + mail, + node as nodemod, + patch, + scmutil, + util, +) +stringio = util.stringio from mercurial.i18n import _ -from mercurial.node import bin cmdtable = {} command = cmdutil.command(cmdtable) @@ -167,7 +180,7 @@ msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test'))) p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch', opts.get('test')) - binnode = bin(node) + binnode = nodemod.bin(node) # if node is mq patch, it will have the patch file's name as a tag if not patchname: patchtags = [t for t in repo.nodetags(binnode) @@ -215,7 +228,7 @@ if r == prev and (repo[None].files() or repo[None].deleted()): ui.warn(_('warning: working directory has ' 'uncommitted changes\n')) - output = cStringIO.StringIO() + output = stringio() cmdutil.export(repo, [r], fp=output, opts=patch.difffeatureopts(ui, opts, git=True)) yield output.getvalue().split('\n') @@ -557,7 +570,7 @@ else: msg = _('public url %s is missing %s') msg %= (publicurl, missing[0]) - revhint = ''.join('-r %s' % h + revhint = ' '.join('-r %s' % h for h in repo.set('heads(%ld)', missing)) hint = _('use "hg push %s %s"') % (publicurl, revhint) raise error.Abort(msg, hint=hint) @@ -703,11 +716,12 @@ finally: ui.setconfig('smtp', 'verifycert', verifycert, 'patchbomb') ui.status(_('sending '), subj, ' ...\n') - ui.progress(_('sending'), i, item=subj, total=len(msgs)) + ui.progress(_('sending'), i, item=subj, total=len(msgs), + unit=_('emails')) if not mbox: # Exim does not remove the Bcc field del m['Bcc'] - fp = cStringIO.StringIO() + fp = stringio() generator = emailmod.Generator.Generator(fp, mangle_from_=False) generator.flatten(m, 0) sendmail(sender_addr, to + bcc + cc, fp.getvalue())
--- a/hgext/purge.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/purge.py Sat Apr 16 18:06:48 2016 -0500 @@ -23,10 +23,18 @@ # along with this program; if not, see <http://www.gnu.org/licenses/>. '''command to delete untracked files from the working directory''' +from __future__ import absolute_import -from mercurial import util, commands, cmdutil, scmutil, error +import os + +from mercurial import ( + cmdutil, + commands, + error, + scmutil, + util, +) from mercurial.i18n import _ -import os cmdtable = {} command = cmdutil.command(cmdtable)
--- a/hgext/rebase.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/rebase.py Sat Apr 16 18:06:48 2016 -0500 @@ -16,7 +16,7 @@ from mercurial import hg, util, repair, merge, cmdutil, commands, bookmarks from mercurial import extensions, patch, scmutil, phases, obsolete, error -from mercurial import copies, repoview, revset +from mercurial import copies, destutil, repoview, registrar, revset from mercurial.commands import templateopts from mercurial.node import nullrev, nullid, hex, short from mercurial.lock import release @@ -69,13 +69,14 @@ c(ctx, extra) return extrafn -def _destrebase(repo): - # Destination defaults to the latest revision in the - # current branch - branch = repo[None].branch() - return repo[branch].rev() +def _destrebase(repo, sourceset): + """small wrapper around destmerge to pass the right extra args -revsetpredicate = revset.extpredicate() + Please wrap destutil.destmerge instead.""" + return destutil.destmerge(repo, action='rebase', sourceset=sourceset, + onheadcheck=False) + +revsetpredicate = registrar.revsetpredicate() @revsetpredicate('_destrebase') def _revsetdestrebase(repo, subset, x): @@ -83,12 +84,12 @@ # default destination for rebase. # # XXX: Currently private because I expect the signature to change. - # # XXX: - taking rev as arguments, # # XXX: - bailing out in case of ambiguity vs returning all data. - # # XXX: - probably merging with the merge destination. # i18n: "_rebasedefaultdest" is a keyword - revset.getargs(x, 0, 0, _("_rebasedefaultdest takes no arguments")) - return subset & revset.baseset([_destrebase(repo)]) + sourceset = None + if x is not None: + sourceset = revset.getset(repo, revset.fullreposet(repo), x) + return subset & revset.baseset([_destrebase(repo, sourceset)]) @command('rebase', [('s', 'source', '', @@ -127,10 +128,13 @@ Published commits cannot be rebased (see :hg:`help phases`). To copy commits, see :hg:`help graft`. - If you don't specify a destination changeset (``-d/--dest``), - rebase uses the current branch tip as the destination. (The - destination changeset is not modified by rebasing, but new - changesets are added as its descendants.) + If you don't specify a destination changeset (``-d/--dest``), rebase + will use the same logic as :hg:`merge` to pick a destination. if + the current branch contains exactly one other head, the other head + is merged with by default. Otherwise, an explicit revision with + which to merge with must be provided. (destination changeset is not + modified by rebasing, but new changesets are added as its + descendants.) Here are the ways to select changesets: @@ -155,6 +159,11 @@ a named branch with two heads. You will need to explicitly specify source and/or destination. + If you need to use a tool to automate merge/conflict decisions, you + can specify one with ``--tool``, see :hg:`help merge-tools`. + As a caveat: the tool will not be used to mediate when a file was + deleted, there is no hook presently available for this. + If a rebase is interrupted to manually resolve a conflict, it can be continued with --continue/-c or aborted with --abort/-a. @@ -258,9 +267,11 @@ try: (originalwd, target, state, skipped, collapsef, keepf, keepbranchesf, external, activebookmark) = restorestatus(repo) + collapsemsg = restorecollapsemsg(repo) except error.RepoLookupError: if abortf: clearstatus(repo) + clearcollapsemsg(repo) repo.ui.warn(_('rebase aborted (no revision is removed,' ' only broken state is cleared)\n')) return 0 @@ -271,79 +282,23 @@ if abortf: return abort(repo, originalwd, target, state, activebookmark=activebookmark) + + obsoletenotrebased = {} + if ui.configbool('experimental', 'rebaseskipobsolete', + default=True): + rebaseobsrevs = set([r for r, status in state.items() + if status == revprecursor]) + rebasesetrevs = set(state.keys()) + obsoletenotrebased = _computeobsoletenotrebased(repo, + rebaseobsrevs, + target) + rebaseobsskipped = set(obsoletenotrebased) + _checkobsrebase(repo, ui, rebaseobsrevs, rebasesetrevs, + rebaseobsskipped) else: - if srcf and basef: - raise error.Abort(_('cannot specify both a ' - 'source and a base')) - if revf and basef: - raise error.Abort(_('cannot specify both a ' - 'revision and a base')) - if revf and srcf: - raise error.Abort(_('cannot specify both a ' - 'revision and a source')) - - cmdutil.checkunfinished(repo) - cmdutil.bailifchanged(repo) - - if destf: - dest = scmutil.revsingle(repo, destf) - else: - dest = repo[_destrebase(repo)] - destf = str(dest) - - if revf: - rebaseset = scmutil.revrange(repo, revf) - if not rebaseset: - ui.status(_('empty "rev" revision set - ' - 'nothing to rebase\n')) - return _nothingtorebase() - elif srcf: - src = scmutil.revrange(repo, [srcf]) - if not src: - ui.status(_('empty "source" revision set - ' - 'nothing to rebase\n')) - return _nothingtorebase() - rebaseset = repo.revs('(%ld)::', src) - assert rebaseset - else: - base = scmutil.revrange(repo, [basef or '.']) - if not base: - ui.status(_('empty "base" revision set - ' - "can't compute rebase set\n")) - return _nothingtorebase() - commonanc = repo.revs('ancestor(%ld, %d)', base, dest).first() - if commonanc is not None: - rebaseset = repo.revs('(%d::(%ld) - %d)::', - commonanc, base, commonanc) - else: - rebaseset = [] - - if not rebaseset: - # transform to list because smartsets are not comparable to - # lists. This should be improved to honor laziness of - # smartset. - if list(base) == [dest.rev()]: - if basef: - ui.status(_('nothing to rebase - %s is both "base"' - ' and destination\n') % dest) - else: - ui.status(_('nothing to rebase - working directory ' - 'parent is also destination\n')) - elif not repo.revs('%ld - ::%d', base, dest): - if basef: - ui.status(_('nothing to rebase - "base" %s is ' - 'already an ancestor of destination ' - '%s\n') % - ('+'.join(str(repo[r]) for r in base), - dest)) - else: - ui.status(_('nothing to rebase - working ' - 'directory parent is already an ' - 'ancestor of destination %s\n') % dest) - else: # can it happen? - ui.status(_('nothing to rebase from %s to %s\n') % - ('+'.join(str(repo[r]) for r in base), dest)) - return _nothingtorebase() + dest, rebaseset = _definesets(ui, repo, destf, srcf, basef, revf) + if dest is None: + return _nothingtorebase() allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt) if (not (keepf or allowunstable) @@ -355,35 +310,17 @@ hint=_('use --keep to keep original changesets')) obsoletenotrebased = {} - if ui.configbool('experimental', 'rebaseskipobsolete'): + if ui.configbool('experimental', 'rebaseskipobsolete', + default=True): rebasesetrevs = set(rebaseset) rebaseobsrevs = _filterobsoleterevs(repo, rebasesetrevs) obsoletenotrebased = _computeobsoletenotrebased(repo, rebaseobsrevs, dest) rebaseobsskipped = set(obsoletenotrebased) - - # Obsolete node with successors not in dest leads to divergence - divergenceok = ui.configbool('rebase', - 'allowdivergence') - divergencebasecandidates = rebaseobsrevs - rebaseobsskipped - - if divergencebasecandidates and not divergenceok: - msg = _("this rebase will cause divergence") - h = _("to force the rebase please set " - "rebase.allowdivergence=True") - raise error.Abort(msg, hint=h) - - # - plain prune (no successor) changesets are rebased - # - split changesets are not rebased if at least one of the - # changeset resulting from the split is an ancestor of dest - rebaseset = rebasesetrevs - rebaseobsskipped - if rebasesetrevs and not rebaseset: - msg = _('all requested changesets have equivalents ' - 'or were marked as obsolete') - hint = _('to force the rebase, set the config ' - 'experimental.rebaseskipobsolete to False') - raise error.Abort(msg, hint=hint) + _checkobsrebase(repo, ui, rebaseobsrevs, + rebasesetrevs, + rebaseobsskipped) result = buildstate(repo, dest, rebaseset, collapsef, obsoletenotrebased) @@ -452,6 +389,7 @@ targetancestors) storestatus(repo, originalwd, target, state, collapsef, keepf, keepbranchesf, external, activebookmark) + storecollapsemsg(repo, collapsemsg) if len(repo[None].parents()) == 2: repo.ui.debug('resuming interrupted rebase\n') else: @@ -573,6 +511,7 @@ # active bookmark was divergent one and has been deleted activebookmark = None clearstatus(repo) + clearcollapsemsg(repo) ui.note(_("rebase completed\n")) util.unlinkpath(repo.sjoin('undo'), ignoremissing=True) @@ -586,6 +525,84 @@ finally: release(lock, wlock) +def _definesets(ui, repo, destf=None, srcf=None, basef=None, revf=[]): + """use revisions argument to define destination and rebase set + """ + if srcf and basef: + raise error.Abort(_('cannot specify both a source and a base')) + if revf and basef: + raise error.Abort(_('cannot specify both a revision and a base')) + if revf and srcf: + raise error.Abort(_('cannot specify both a revision and a source')) + + cmdutil.checkunfinished(repo) + cmdutil.bailifchanged(repo) + + if destf: + dest = scmutil.revsingle(repo, destf) + + if revf: + rebaseset = scmutil.revrange(repo, revf) + if not rebaseset: + ui.status(_('empty "rev" revision set - nothing to rebase\n')) + return None, None + elif srcf: + src = scmutil.revrange(repo, [srcf]) + if not src: + ui.status(_('empty "source" revision set - nothing to rebase\n')) + return None, None + rebaseset = repo.revs('(%ld)::', src) + assert rebaseset + else: + base = scmutil.revrange(repo, [basef or '.']) + if not base: + ui.status(_('empty "base" revision set - ' + "can't compute rebase set\n")) + return None, None + if not destf: + dest = repo[_destrebase(repo, base)] + destf = str(dest) + + commonanc = repo.revs('ancestor(%ld, %d)', base, dest).first() + if commonanc is not None: + rebaseset = repo.revs('(%d::(%ld) - %d)::', + commonanc, base, commonanc) + else: + rebaseset = [] + + if not rebaseset: + # transform to list because smartsets are not comparable to + # lists. This should be improved to honor laziness of + # smartset. + if list(base) == [dest.rev()]: + if basef: + ui.status(_('nothing to rebase - %s is both "base"' + ' and destination\n') % dest) + else: + ui.status(_('nothing to rebase - working directory ' + 'parent is also destination\n')) + elif not repo.revs('%ld - ::%d', base, dest): + if basef: + ui.status(_('nothing to rebase - "base" %s is ' + 'already an ancestor of destination ' + '%s\n') % + ('+'.join(str(repo[r]) for r in base), + dest)) + else: + ui.status(_('nothing to rebase - working ' + 'directory parent is already an ' + 'ancestor of destination %s\n') % dest) + else: # can it happen? + ui.status(_('nothing to rebase from %s to %s\n') % + ('+'.join(str(repo[r]) for r in base), dest)) + return None, None + + if not destf: + dest = repo[_destrebase(repo, rebaseset)] + destf = str(dest) + + return dest, rebaseset + def externalparent(repo, state, targetancestors): """Return the revision that should be used as the second parent when the revisions in state is collapsed on top of targetancestors. @@ -683,6 +700,43 @@ else: return None +def _checkobsrebase(repo, ui, + rebaseobsrevs, + rebasesetrevs, + rebaseobsskipped): + """ + Abort if rebase will create divergence or rebase is noop because of markers + + `rebaseobsrevs`: set of obsolete revision in source + `rebasesetrevs`: set of revisions to be rebased from source + `rebaseobsskipped`: set of revisions from source skipped because they have + successors in destination + """ + # Obsolete node with successors not in dest leads to divergence + divergenceok = ui.configbool('experimental', + 'allowdivergence') + divergencebasecandidates = rebaseobsrevs - rebaseobsskipped + + if divergencebasecandidates and not divergenceok: + divhashes = (str(repo[r]) + for r in divergencebasecandidates) + msg = _("this rebase will cause " + "divergences from: %s") + h = _("to force the rebase please set " + "experimental.allowdivergence=True") + raise error.Abort(msg % (",".join(divhashes),), hint=h) + + # - plain prune (no successor) changesets are rebased + # - split changesets are not rebased if at least one of the + # changeset resulting from the split is an ancestor of dest + rebaseset = rebasesetrevs - rebaseobsskipped + if rebasesetrevs and not rebaseset: + msg = _('all requested changesets have equivalents ' + 'or were marked as obsolete') + hint = _('to force the rebase, set the config ' + 'experimental.rebaseskipobsolete to False') + raise error.Abort(msg, hint=hint) + def defineparents(repo, rev, target, state, targetancestors): 'Return the new parent relationship of the revision that will be rebased' parents = repo[rev].parents() @@ -838,6 +892,29 @@ bookmarks.deletedivergent(repo, [targetnode], k) marks.recordchange(tr) +def storecollapsemsg(repo, collapsemsg): + 'Store the collapse message to allow recovery' + collapsemsg = collapsemsg or '' + f = repo.vfs("last-message.txt", "w") + f.write("%s\n" % collapsemsg) + f.close() + +def clearcollapsemsg(repo): + 'Remove collapse message file' + util.unlinkpath(repo.join("last-message.txt"), ignoremissing=True) + +def restorecollapsemsg(repo): + 'Restore previously stored collapse message' + try: + f = repo.vfs("last-message.txt") + collapsemsg = f.readline().strip() + f.close() + except IOError as err: + if err.errno != errno.ENOENT: + raise + raise error.Abort(_('no rebase in progress')) + return collapsemsg + def storestatus(repo, originalwd, target, state, collapse, keep, keepbranches, external, activebookmark): 'Store the current status to allow recovery' @@ -910,7 +987,7 @@ except IOError as err: if err.errno != errno.ENOENT: raise - raise error.Abort(_('no rebase in progress')) + cmdutil.wrongtooltocontinue(repo, _('rebase')) if keepbranches is None: raise error.Abort(_('.hg/rebasestate is incomplete')) @@ -997,6 +1074,7 @@ finally: clearstatus(repo) + clearcollapsemsg(repo) repo.ui.warn(_('rebase aborted\n')) return 0 @@ -1140,7 +1218,6 @@ ui.debug('--update and --rebase are not compatible, ignoring ' 'the update flag\n') - movemarkfrom = repo['.'].node() revsprepull = len(repo) origpostincoming = commands.postincoming def _dummy(*args, **kwargs): @@ -1160,15 +1237,18 @@ # --source. if 'source' in opts: del opts['source'] - rebase(ui, repo, **opts) - branch = repo[None].branch() - dest = repo[branch].rev() - if dest != repo['.'].rev(): - # there was nothing to rebase we force an update - hg.update(repo, dest) - if bookmarks.update(repo, [movemarkfrom], repo['.'].node()): - ui.status(_("updating bookmark %s\n") - % repo._activebookmark) + try: + rebase(ui, repo, **opts) + except error.NoMergeDestAbort: + # we can maybe update instead + rev, _a, _b = destutil.destupdate(repo) + if rev == repo['.'].rev(): + ui.status(_('nothing to rebase\n')) + else: + ui.status(_('nothing to rebase - updating instead\n')) + # not passing argument to get the bare update behavior + # with warning and trumpets + commands.update(ui, repo) finally: release(lock, wlock) else: @@ -1274,4 +1354,3 @@ ['rebasestate', _('hg rebase --continue')]) # ensure rebased rev are not hidden extensions.wrapfunction(repoview, '_getdynamicblockers', _rebasedvisible) - revsetpredicate.setup()
--- a/hgext/record.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/record.py Sat Apr 16 18:06:48 2016 -0500 @@ -5,11 +5,20 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -'''commands to interactively select changes for commit/qrefresh''' +'''commands to interactively select changes for commit/qrefresh (DEPRECATED) + +The feature provided by this extension has been moved into core Mercurial as +:hg:`commit --interactive`.''' + +from __future__ import absolute_import +from mercurial import ( + cmdutil, + commands, + error, + extensions, +) from mercurial.i18n import _ -from mercurial import cmdutil, commands, extensions -from mercurial import error cmdtable = {} command = cmdutil.command(cmdtable)
--- a/hgext/relink.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/relink.py Sat Apr 16 18:06:48 2016 -0500 @@ -6,10 +6,18 @@ # GNU General Public License version 2 or any later version. """recreates hardlinks between repository clones""" +from __future__ import absolute_import -from mercurial import cmdutil, hg, util, error +import os +import stat + +from mercurial import ( + cmdutil, + error, + hg, + util, +) from mercurial.i18n import _ -import os, stat cmdtable = {} command = cmdutil.command(cmdtable)
--- a/hgext/schemes.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/schemes.py Sat Apr 16 18:06:48 2016 -0500 @@ -39,11 +39,22 @@ You can override a predefined scheme by defining a new scheme with the same name. """ +from __future__ import absolute_import -import os, re -from mercurial import extensions, hg, templater, util, error +import os +import re +from mercurial import ( + cmdutil, + error, + extensions, + hg, + templater, + util, +) from mercurial.i18n import _ +cmdtable = {} +command = cmdutil.command(cmdtable) # Note for extension authors: ONLY specify testedwith = 'internal' for # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should # be specifying the version(s) of Mercurial they are tested with, or @@ -65,6 +76,10 @@ return '<ShortRepository: %s>' % self.scheme def instance(self, ui, url, create): + url = self.resolve(url) + return hg._peerlookup(url).instance(ui, url, create) + + def resolve(self, url): # Should this use the util.url class, or is manual parsing better? try: url = url.split('://', 1)[1] @@ -77,8 +92,7 @@ else: tail = '' context = dict((str(i + 1), v) for i, v in enumerate(parts)) - url = ''.join(self.templater.process(self.url, context)) + tail - return hg._peerlookup(url).instance(ui, url, create) + return ''.join(self.templater.process(self.url, context)) + tail def hasdriveletter(orig, path): if path: @@ -106,3 +120,12 @@ hg.schemes[scheme] = ShortRepository(url, scheme, t) extensions.wrapfunction(util, 'hasdriveletter', hasdriveletter) + +@command('debugexpandscheme', norepo=True) +def expandscheme(ui, url, **opts): + """given a repo path, provide the scheme-expanded path + """ + repo = hg._peerlookup(url) + if isinstance(repo, ShortRepository): + url = repo.resolve(url) + ui.write(url + '\n')
--- a/hgext/shelve.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/shelve.py Sat Apr 16 18:06:48 2016 -0500 @@ -20,17 +20,36 @@ shelved change has a distinct name. For details, see the help for "hg shelve". """ +from __future__ import absolute_import import collections +import errno import itertools +from mercurial import ( + bundle2, + bundlerepo, + changegroup, + cmdutil, + commands, + error, + exchange, + hg, + lock as lockmod, + mdiff, + merge, + node as nodemod, + patch, + phases, + repair, + scmutil, + templatefilters, + util, +) from mercurial.i18n import _ -from mercurial.node import nullid, nullrev, bin, hex -from mercurial import changegroup, cmdutil, scmutil, phases, commands -from mercurial import error, hg, mdiff, merge, patch, repair, util -from mercurial import templatefilters, exchange, bundlerepo, bundle2 -from mercurial import lock as lockmod -from hgext import rebase -import errno + +from . import ( + rebase, +) cmdtable = {} command = cmdutil.command(cmdtable) @@ -41,6 +60,7 @@ testedwith = 'internal' backupdir = 'shelve-backup' +shelvedir = 'shelved' class shelvedfile(object): """Helper for the file storing a single shelve @@ -50,7 +70,7 @@ def __init__(self, repo, name, filetype=None): self.repo = repo self.name = name - self.vfs = scmutil.vfs(repo.join('shelved')) + self.vfs = scmutil.vfs(repo.join(shelvedir)) self.backupvfs = scmutil.vfs(repo.join(backupdir)) self.ui = self.repo.ui if filetype: @@ -122,7 +142,7 @@ cg = changegroup.changegroupsubset(self.repo, bases, [node], 'shelve', version=cgversion) - changegroup.writebundle(self.ui, cg, self.fname, btype, self.vfs, + bundle2.writebundle(self.ui, cg, self.fname, btype, self.vfs, compression=compression) class shelvedstate(object): @@ -146,29 +166,35 @@ name = fp.readline().strip() wctx = fp.readline().strip() pendingctx = fp.readline().strip() - parents = [bin(h) for h in fp.readline().split()] - stripnodes = [bin(h) for h in fp.readline().split()] + parents = [nodemod.bin(h) for h in fp.readline().split()] + stripnodes = [nodemod.bin(h) for h in fp.readline().split()] + branchtorestore = fp.readline().strip() finally: fp.close() obj = cls() obj.name = name - obj.wctx = repo[bin(wctx)] - obj.pendingctx = repo[bin(pendingctx)] + obj.wctx = repo[nodemod.bin(wctx)] + obj.pendingctx = repo[nodemod.bin(pendingctx)] obj.parents = parents obj.stripnodes = stripnodes + obj.branchtorestore = branchtorestore return obj @classmethod - def save(cls, repo, name, originalwctx, pendingctx, stripnodes): + def save(cls, repo, name, originalwctx, pendingctx, stripnodes, + branchtorestore): fp = repo.vfs(cls._filename, 'wb') fp.write('%i\n' % cls._version) fp.write('%s\n' % name) - fp.write('%s\n' % hex(originalwctx.node())) - fp.write('%s\n' % hex(pendingctx.node())) - fp.write('%s\n' % ' '.join([hex(p) for p in repo.dirstate.parents()])) - fp.write('%s\n' % ' '.join([hex(n) for n in stripnodes])) + fp.write('%s\n' % nodemod.hex(originalwctx.node())) + fp.write('%s\n' % nodemod.hex(pendingctx.node())) + fp.write('%s\n' % + ' '.join([nodemod.hex(p) for p in repo.dirstate.parents()])) + fp.write('%s\n' % + ' '.join([nodemod.hex(n) for n in stripnodes])) + fp.write('%s\n' % branchtorestore) fp.close() @classmethod @@ -233,7 +259,7 @@ """return all mutable ancestors for ctx (included) Much faster than the revset ancestors(ctx) & draft()""" - seen = set([nullrev]) + seen = set([nodemod.nullrev]) visit = collections.deque() visit.append(ctx) while visit: @@ -251,6 +277,7 @@ if len(parents) > 1: raise error.Abort(_('cannot shelve while merging')) parent = parents[0] + origbranch = wctx.branch() # we never need the user, so we use a generic user for all shelve operations user = 'shelve@localhost' @@ -264,15 +291,15 @@ for i in xrange(1, 100): yield '%s-%02d' % (label, i) - if parent.node() != nullid: + if parent.node() != nodemod.nullid: desc = "changes to: %s" % parent.description().split('\n', 1)[0] else: desc = '(changes in empty repository)' - if not opts['message']: + if not opts.get('message'): opts['message'] = desc - name = opts['name'] + name = opts.get('name') lock = tr = None try: @@ -312,6 +339,11 @@ extra['shelve_unknown'] = '\0'.join(s.unknown) repo[None].add(s.unknown) + if _iswctxonnewbranch(repo) and not _isbareshelve(pats, opts): + # In non-bare shelve we don't store newly created branch + # at bundled commit + repo.dirstate.setbranch(repo['.'].branch()) + def commitfunc(ui, repo, message, match, opts): hasmq = util.safehasattr(repo, 'mq') if hasmq: @@ -357,16 +389,27 @@ desc = util.ellipsis(desc, ui.termwidth()) ui.status(_('shelved as %s\n') % name) hg.update(repo, parent.node()) + if origbranch != repo['.'].branch() and not _isbareshelve(pats, opts): + repo.dirstate.setbranch(origbranch) _aborttransaction(repo) finally: lockmod.release(tr, lock) +def _isbareshelve(pats, opts): + return (not pats + and not opts.get('interactive', False) + and not opts.get('include', False) + and not opts.get('exclude', False)) + +def _iswctxonnewbranch(repo): + return repo[None].branch() != repo['.'].branch() + def cleanupcmd(ui, repo): """subcommand that deletes all shelves""" with repo.wlock(): - for (name, _type) in repo.vfs.readdir('shelved'): + for (name, _type) in repo.vfs.readdir(shelvedir): suffix = name.rsplit('.', 1)[-1] if suffix in ('hg', 'patch'): shelvedfile(repo, name).movetobackup() @@ -390,7 +433,7 @@ def listshelves(repo): """return all shelves in repo as list of (time, filename)""" try: - names = repo.vfs.readdir('shelved') + names = repo.vfs.readdir(shelvedir) except OSError as err: if err.errno != errno.ENOENT: raise @@ -517,9 +560,15 @@ finally: ui.quiet = oldquiet +def restorebranch(ui, repo, branchtorestore): + if branchtorestore and branchtorestore != repo.dirstate.branch(): + repo.dirstate.setbranch(branchtorestore) + ui.status(_('marked working directory as branch %s\n') + % branchtorestore) + def unshelvecleanup(ui, repo, name, opts): """remove related files after an unshelve""" - if not opts['keep']: + if not opts.get('keep'): for filetype in 'hg patch'.split(): shelvedfile(repo, name, filetype).movetobackup() cleanupoldbackups(repo) @@ -556,6 +605,7 @@ state.stripnodes.append(shelvectx.node()) mergefiles(ui, repo, state.wctx, shelvectx) + restorebranch(ui, repo, state.branchtorestore) repair.strip(ui, repo, state.stripnodes, backup=False, topic='shelve') shelvedstate.clear(repo) @@ -594,6 +644,10 @@ that causes a conflict. This reverts the unshelved changes, and leaves the bundle in place.) + If bare shelved change(when no files are specified, without interactive, + include and exclude option) was done on newly created branch it would + restore branch information to the working directory. + After a successful unshelve, the shelved changes are stored in a backup directory. Only the N most recent backups are kept. N defaults to 10 but can be overridden using the ``shelve.maxbackups`` @@ -609,8 +663,8 @@ return _dounshelve(ui, repo, *shelved, **opts) def _dounshelve(ui, repo, *shelved, **opts): - abortf = opts['abort'] - continuef = opts['continue'] + abortf = opts.get('abort') + continuef = opts.get('continue') if not abortf and not continuef: cmdutil.checkunfinished(repo) @@ -628,7 +682,7 @@ except IOError as err: if err.errno != errno.ENOENT: raise - raise error.Abort(_('no unshelve operation underway')) + cmdutil.wrongtooltocontinue(repo, _('unshelve')) if abortf: return unshelveabort(ui, repo, state, opts) @@ -702,6 +756,10 @@ shelvectx = repo['tip'] + branchtorestore = '' + if shelvectx.branch() != shelvectx.p1().branch(): + branchtorestore = shelvectx.branch() + # If the shelve is not immediately on top of the commit # we'll be merging with, rebase it to be on top. if tmpwctx.node() != shelvectx.parents()[0].node(): @@ -718,7 +776,8 @@ stripnodes = [repo.changelog.node(rev) for rev in xrange(oldtiprev, len(repo))] - shelvedstate.save(repo, basename, pctx, tmpwctx, stripnodes) + shelvedstate.save(repo, basename, pctx, tmpwctx, stripnodes, + branchtorestore) util.rename(repo.join('rebasestate'), repo.join('unshelverebasestate')) @@ -734,6 +793,7 @@ shelvectx = tmpwctx mergefiles(ui, repo, pctx, shelvectx) + restorebranch(ui, repo, branchtorestore) # Forget any files that were unknown before the shelve, unknown before # unshelve started, but are now added. @@ -803,6 +863,12 @@ files. If specific files or directories are named, only changes to those files are shelved. + In bare shelve(when no files are specified, without interactive, + include and exclude option), shelving remembers information if the + working directory was on newly created branch, in other words working + directory was on different branch than its first parent. In this + situation unshelving restores branch information to the working directory. + Each shelved change has a name that makes it easier to find later. The name of a shelved change defaults to being based on the active bookmark, or if there is no active bookmark, the current named @@ -829,7 +895,7 @@ ('stat', set(['stat', 'list'])), ] def checkopt(opt): - if opts[opt]: + if opts.get(opt): for i, allowable in allowables: if opts[i] and opt not in allowable: raise error.Abort(_("options '--%s' and '--%s' may not be "
--- a/hgext/strip.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/strip.py Sat Apr 16 18:06:48 2016 -0500 @@ -3,11 +3,23 @@ This extension allows you to strip changesets and all their descendants from the repository. See the command help for details. """ +from __future__ import absolute_import + +from mercurial import ( + bookmarks as bookmarksmod, + cmdutil, + error, + hg, + lock as lockmod, + merge, + node as nodemod, + repair, + scmutil, + util, +) from mercurial.i18n import _ -from mercurial.node import nullid -from mercurial.lock import release -from mercurial import cmdutil, hg, scmutil, util, error -from mercurial import repair, bookmarks as bookmarksmod , merge +nullid = nodemod.nullid +release = lockmod.release cmdtable = {} command = cmdutil.command(cmdtable)
--- a/hgext/transplant.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/transplant.py Sat Apr 16 18:06:48 2016 -0500 @@ -13,13 +13,27 @@ Transplanted patches are recorded in .hg/transplant/transplants, as a map from a changeset hash to its hash in the source repository. ''' +from __future__ import absolute_import +import os +import tempfile from mercurial.i18n import _ -import os, tempfile -from mercurial.node import short -from mercurial import bundlerepo, hg, merge, match -from mercurial import patch, revlog, scmutil, util, error, cmdutil -from mercurial import revset, templatekw, exchange +from mercurial import ( + bundlerepo, + cmdutil, + error, + exchange, + hg, + match, + merge, + node as nodemod, + patch, + registrar, + revlog, + revset, + scmutil, + util, +) class TransplantError(error.Abort): pass @@ -64,7 +78,7 @@ fp = self.opener(self.transplantfile, 'w') for list in self.transplants.itervalues(): for t in list: - l, r = map(revlog.hex, (t.lnode, t.rnode)) + l, r = map(nodemod.hex, (t.lnode, t.rnode)) fp.write(l + ':' + r + '\n') fp.close() self.dirty = False @@ -133,7 +147,7 @@ tr = repo.transaction('transplant') for rev in revs: node = revmap[rev] - revstr = '%s:%s' % (rev, short(node)) + revstr = '%s:%s' % (rev, nodemod.short(node)) if self.applied(repo, node, p1): self.ui.warn(_('skipping already applied revision %s\n') % @@ -168,13 +182,14 @@ if parents[1] != revlog.nullid: if not opts.get('parent'): self.ui.note(_('skipping merge changeset %s:%s\n') - % (rev, short(node))) + % (rev, nodemod.short(node))) skipmerge = True else: parent = source.lookup(opts['parent']) if parent not in parents: raise error.Abort(_('%s is not a parent of %s') % - (short(parent), short(node))) + (nodemod.short(parent), + nodemod.short(node))) else: parent = parents[0] @@ -204,11 +219,11 @@ raise if n and domerge: self.ui.status(_('%s merged at %s\n') % (revstr, - short(n))) + nodemod.short(n))) elif n: self.ui.status(_('%s transplanted to %s\n') - % (short(node), - short(n))) + % (nodemod.short(node), + nodemod.short(n))) finally: if patchfile: os.unlink(patchfile) @@ -241,7 +256,7 @@ self.ui.system('%s %s %s' % (filter, util.shellquote(headerfile), util.shellquote(patchfile)), environ={'HGUSER': changelog[1], - 'HGREVISION': revlog.hex(node), + 'HGREVISION': nodemod.hex(node), }, onerr=error.Abort, errprefix=_('filter failed')) user, date, msg = self.parselog(file(headerfile))[1:4] @@ -261,9 +276,9 @@ if log: # we don't translate messages inserted into commits - message += '\n(transplanted from %s)' % revlog.hex(node) + message += '\n(transplanted from %s)' % nodemod.hex(node) - self.ui.status(_('applying %s\n') % short(node)) + self.ui.status(_('applying %s\n') % nodemod.short(node)) self.ui.note('%s %s\n%s\n' % (user, date, message)) if not patchfile and not merge: @@ -295,7 +310,8 @@ n = repo.commit(message, user, date, extra=extra, match=m, editor=self.getcommiteditor()) if not n: - self.ui.warn(_('skipping emptied changeset %s\n') % short(node)) + self.ui.warn(_('skipping emptied changeset %s\n') % + nodemod.short(node)) return None if not merge: self.transplants.set(n, node) @@ -310,11 +326,12 @@ if os.path.exists(os.path.join(self.path, 'journal')): n, node = self.recover(repo, source, opts) if n: - self.ui.status(_('%s transplanted as %s\n') % (short(node), - short(n))) + self.ui.status(_('%s transplanted as %s\n') % + (nodemod.short(node), + nodemod.short(n))) else: self.ui.status(_('%s skipped due to empty diff\n') - % (short(node),)) + % (nodemod.short(node),)) seriespath = os.path.join(self.path, 'series') if not os.path.exists(seriespath): self.transplants.write() @@ -341,7 +358,8 @@ parent = source.lookup(opts['parent']) if parent not in parents: raise error.Abort(_('%s is not a parent of %s') % - (short(parent), short(node))) + (nodemod.short(parent), + nodemod.short(node))) else: merge = True @@ -350,7 +368,7 @@ p1, p2 = repo.dirstate.parents() if p1 != parent: raise error.Abort(_('working directory not at transplant ' - 'parent %s') % revlog.hex(parent)) + 'parent %s') % nodemod.hex(parent)) if merge: repo.setparents(p1, parents[1]) modified, added, removed, deleted = repo.status()[:4] @@ -391,11 +409,11 @@ os.mkdir(self.path) series = self.opener('series', 'w') for rev in sorted(revmap): - series.write(revlog.hex(revmap[rev]) + '\n') + series.write(nodemod.hex(revmap[rev]) + '\n') if merges: series.write('# Merges\n') for m in merges: - series.write(revlog.hex(m) + '\n') + series.write(nodemod.hex(m) + '\n') series.close() def parselog(self, fp): @@ -431,10 +449,10 @@ fp = self.opener('journal', 'w') fp.write('# User %s\n' % user) fp.write('# Date %s\n' % date) - fp.write('# Node ID %s\n' % revlog.hex(p2)) - fp.write('# Parent ' + revlog.hex(p1) + '\n') + fp.write('# Node ID %s\n' % nodemod.hex(p2)) + fp.write('# Parent ' + nodemod.hex(p1) + '\n') if merge: - fp.write('# Parent ' + revlog.hex(p2) + '\n') + fp.write('# Parent ' + nodemod.hex(p2) + '\n') fp.write(message.rstrip() + '\n') fp.close() @@ -694,7 +712,7 @@ if cleanupfn: cleanupfn() -revsetpredicate = revset.extpredicate() +revsetpredicate = registrar.revsetpredicate() @revsetpredicate('transplanted([set])') def revsettransplanted(repo, subset, x): @@ -707,15 +725,16 @@ return revset.baseset([r for r in s if repo[r].extra().get('transplant_source')]) +templatekeyword = registrar.templatekeyword() + +@templatekeyword('transplanted') def kwtransplanted(repo, ctx, **args): - """:transplanted: String. The node identifier of the transplanted + """String. The node identifier of the transplanted changeset if any.""" n = ctx.extra().get('transplant_source') - return n and revlog.hex(n) or '' + return n and nodemod.hex(n) or '' def extsetup(ui): - revsetpredicate.setup() - templatekw.keywords['transplanted'] = kwtransplanted cmdutil.unfinishedstates.append( ['transplant/journal', True, False, _('transplant in progress'), _("use 'hg transplant --continue' or 'hg update' to abort")])
--- a/hgext/win32mbcs.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/win32mbcs.py Sat Apr 16 18:06:48 2016 -0500 @@ -44,10 +44,17 @@ It is useful for the users who want to commit with UTF-8 log message. ''' +from __future__ import absolute_import -import os, sys +import os +import sys + +from mercurial import ( + encoding, + error, +) from mercurial.i18n import _ -from mercurial import error, encoding + # Note for extension authors: ONLY specify testedwith = 'internal' for # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should # be specifying the version(s) of Mercurial they are tested with, or
--- a/hgext/zeroconf/Zeroconf.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/zeroconf/Zeroconf.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,3 +1,5 @@ +from __future__ import absolute_import, print_function + """ Multicast DNS Service Discovery for Python, v0.12 Copyright (C) 2003, Paul Scott-Murphy @@ -23,29 +25,29 @@ """ """0.12 update - allow selection of binding interface - typo fix - Thanks A. M. Kuchlingi - removed all use of word 'Rendezvous' - this is an API change""" + typo fix - Thanks A. M. Kuchlingi + removed all use of word 'Rendezvous' - this is an API change""" """0.11 update - correction to comments for addListener method support for new record types seen from OS X - - IPv6 address - - hostinfo - ignore unknown DNS record types - fixes to name decoding - works alongside other processes using port 5353 (e.g. on Mac OS X) - tested against Mac OS X 10.3.2's mDNSResponder - corrections to removal of list entries for service browser""" + - IPv6 address + - hostinfo + ignore unknown DNS record types + fixes to name decoding + works alongside other processes using port 5353 (e.g. Mac OS X) + tested against Mac OS X 10.3.2's mDNSResponder + corrections to removal of list entries for service browser""" """0.10 update - Jonathon Paisley contributed these corrections: always multicast replies, even when query is unicast - correct a pointer encoding problem - can now write records in any order - traceback shown on failure - better TXT record parsing - server is now separate from name - can cancel a service browser + correct a pointer encoding problem + can now write records in any order + traceback shown on failure + better TXT record parsing + server is now separate from name + can cancel a service browser - modified some unit tests to accommodate these changes""" + modified some unit tests to accommodate these changes""" """0.09 update - remove all records on service unregistration fix DOS security problem with readName""" @@ -54,36 +56,37 @@ """0.07 update - faster shutdown on engine pointer encoding of outgoing names - ServiceBrowser now works - new unit tests""" + ServiceBrowser now works + new unit tests""" """0.06 update - small improvements with unit tests added defined exception types - new style objects - fixed hostname/interface problem - fixed socket timeout problem - fixed addServiceListener() typo bug - using select() for socket reads - tested on Debian unstable with Python 2.2.2""" + new style objects + fixed hostname/interface problem + fixed socket timeout problem + fixed addServiceListener() typo bug + using select() for socket reads + tested on Debian unstable with Python 2.2.2""" """0.05 update - ensure case insensitivity on domain names support for unicast DNS queries""" """0.04 update - added some unit tests added __ne__ adjuncts where required - ensure names end in '.local.' - timeout on receiving socket for clean shutdown""" + ensure names end in '.local.' + timeout on receiving socket for clean shutdown""" __author__ = "Paul Scott-Murphy" __email__ = "paul at scott dash murphy dot com" __version__ = "0.12" +import itertools +import select +import socket import string -import time import struct -import socket import threading -import select +import time import traceback __all__ = ["Zeroconf", "ServiceInfo", "ServiceBrowser"] @@ -103,9 +106,9 @@ # Some DNS constants _MDNS_ADDR = '224.0.0.251' -_MDNS_PORT = 5353; -_DNS_PORT = 53; -_DNS_TTL = 60 * 60; # one hour default TTL +_MDNS_PORT = 5353 +_DNS_PORT = 53 +_DNS_TTL = 60 * 60 # one hour default TTL _MAX_MSG_TYPICAL = 1460 # unused _MAX_MSG_ABSOLUTE = 8972 @@ -155,1426 +158,1523 @@ # Mapping constants to names _CLASSES = { _CLASS_IN : "in", - _CLASS_CS : "cs", - _CLASS_CH : "ch", - _CLASS_HS : "hs", - _CLASS_NONE : "none", - _CLASS_ANY : "any" } + _CLASS_CS : "cs", + _CLASS_CH : "ch", + _CLASS_HS : "hs", + _CLASS_NONE : "none", + _CLASS_ANY : "any" } _TYPES = { _TYPE_A : "a", - _TYPE_NS : "ns", - _TYPE_MD : "md", - _TYPE_MF : "mf", - _TYPE_CNAME : "cname", - _TYPE_SOA : "soa", - _TYPE_MB : "mb", - _TYPE_MG : "mg", - _TYPE_MR : "mr", - _TYPE_NULL : "null", - _TYPE_WKS : "wks", - _TYPE_PTR : "ptr", - _TYPE_HINFO : "hinfo", - _TYPE_MINFO : "minfo", - _TYPE_MX : "mx", - _TYPE_TXT : "txt", - _TYPE_AAAA : "quada", - _TYPE_SRV : "srv", - _TYPE_ANY : "any" } + _TYPE_NS : "ns", + _TYPE_MD : "md", + _TYPE_MF : "mf", + _TYPE_CNAME : "cname", + _TYPE_SOA : "soa", + _TYPE_MB : "mb", + _TYPE_MG : "mg", + _TYPE_MR : "mr", + _TYPE_NULL : "null", + _TYPE_WKS : "wks", + _TYPE_PTR : "ptr", + _TYPE_HINFO : "hinfo", + _TYPE_MINFO : "minfo", + _TYPE_MX : "mx", + _TYPE_TXT : "txt", + _TYPE_AAAA : "quada", + _TYPE_SRV : "srv", + _TYPE_ANY : "any" } # utility functions def currentTimeMillis(): - """Current system time in milliseconds""" - return time.time() * 1000 + """Current system time in milliseconds""" + return time.time() * 1000 # Exceptions class NonLocalNameException(Exception): - pass + pass class NonUniqueNameException(Exception): - pass + pass class NamePartTooLongException(Exception): - pass + pass class AbstractMethodException(Exception): - pass + pass class BadTypeInNameException(Exception): - pass + pass class BadDomainName(Exception): - def __init__(self, pos): - Exception.__init__(self, "at position %s" % pos) + def __init__(self, pos): + Exception.__init__(self, "at position %s" % pos) class BadDomainNameCircular(BadDomainName): - pass + pass # implementation classes class DNSEntry(object): - """A DNS entry""" + """A DNS entry""" - def __init__(self, name, type, clazz): - self.key = string.lower(name) - self.name = name - self.type = type - self.clazz = clazz & _CLASS_MASK - self.unique = (clazz & _CLASS_UNIQUE) != 0 + def __init__(self, name, type, clazz): + self.key = string.lower(name) + self.name = name + self.type = type + self.clazz = clazz & _CLASS_MASK + self.unique = (clazz & _CLASS_UNIQUE) != 0 - def __eq__(self, other): - """Equality test on name, type, and class""" - if isinstance(other, DNSEntry): - return self.name == other.name and self.type == other.type and self.clazz == other.clazz - return 0 + def __eq__(self, other): + """Equality test on name, type, and class""" + if isinstance(other, DNSEntry): + return (self.name == other.name and self.type == other.type and + self.clazz == other.clazz) + return 0 - def __ne__(self, other): - """Non-equality test""" - return not self.__eq__(other) + def __ne__(self, other): + """Non-equality test""" + return not self.__eq__(other) - def getClazz(self, clazz): - """Class accessor""" - try: - return _CLASSES[clazz] - except KeyError: - return "?(%s)" % (clazz) + def getClazz(self, clazz): + """Class accessor""" + try: + return _CLASSES[clazz] + except KeyError: + return "?(%s)" % (clazz) - def getType(self, type): - """Type accessor""" - try: - return _TYPES[type] - except KeyError: - return "?(%s)" % (type) + def getType(self, type): + """Type accessor""" + try: + return _TYPES[type] + except KeyError: + return "?(%s)" % (type) - def toString(self, hdr, other): - """String representation with additional information""" - result = "%s[%s,%s" % (hdr, self.getType(self.type), self.getClazz(self.clazz)) - if self.unique: - result += "-unique," - else: - result += "," - result += self.name - if other is not None: - result += ",%s]" % (other) - else: - result += "]" - return result + def toString(self, hdr, other): + """String representation with additional information""" + result = ("%s[%s,%s" % + (hdr, self.getType(self.type), self.getClazz(self.clazz))) + if self.unique: + result += "-unique," + else: + result += "," + result += self.name + if other is not None: + result += ",%s]" % (other) + else: + result += "]" + return result class DNSQuestion(DNSEntry): - """A DNS question entry""" + """A DNS question entry""" - def __init__(self, name, type, clazz): - if not name.endswith(".local."): - raise NonLocalNameException(name) - DNSEntry.__init__(self, name, type, clazz) + def __init__(self, name, type, clazz): + if not name.endswith(".local."): + raise NonLocalNameException(name) + DNSEntry.__init__(self, name, type, clazz) - def answeredBy(self, rec): - """Returns true if the question is answered by the record""" - return self.clazz == rec.clazz and (self.type == rec.type or self.type == _TYPE_ANY) and self.name == rec.name + def answeredBy(self, rec): + """Returns true if the question is answered by the record""" + return (self.clazz == rec.clazz and + (self.type == rec.type or self.type == _TYPE_ANY) and + self.name == rec.name) - def __repr__(self): - """String representation""" - return DNSEntry.toString(self, "question", None) + def __repr__(self): + """String representation""" + return DNSEntry.toString(self, "question", None) class DNSRecord(DNSEntry): - """A DNS record - like a DNS entry, but has a TTL""" + """A DNS record - like a DNS entry, but has a TTL""" - def __init__(self, name, type, clazz, ttl): - DNSEntry.__init__(self, name, type, clazz) - self.ttl = ttl - self.created = currentTimeMillis() + def __init__(self, name, type, clazz, ttl): + DNSEntry.__init__(self, name, type, clazz) + self.ttl = ttl + self.created = currentTimeMillis() - def __eq__(self, other): - """Tests equality as per DNSRecord""" - if isinstance(other, DNSRecord): - return DNSEntry.__eq__(self, other) - return 0 + def __eq__(self, other): + """Tests equality as per DNSRecord""" + if isinstance(other, DNSRecord): + return DNSEntry.__eq__(self, other) + return 0 - def suppressedBy(self, msg): - """Returns true if any answer in a message can suffice for the - information held in this record.""" - for record in msg.answers: - if self.suppressedByAnswer(record): - return 1 - return 0 + def suppressedBy(self, msg): + """Returns true if any answer in a message can suffice for the + information held in this record.""" + for record in msg.answers: + if self.suppressedByAnswer(record): + return 1 + return 0 - def suppressedByAnswer(self, other): - """Returns true if another record has same name, type and class, - and if its TTL is at least half of this record's.""" - if self == other and other.ttl > (self.ttl / 2): - return 1 - return 0 + def suppressedByAnswer(self, other): + """Returns true if another record has same name, type and class, + and if its TTL is at least half of this record's.""" + if self == other and other.ttl > (self.ttl / 2): + return 1 + return 0 - def getExpirationTime(self, percent): - """Returns the time at which this record will have expired - by a certain percentage.""" - return self.created + (percent * self.ttl * 10) + def getExpirationTime(self, percent): + """Returns the time at which this record will have expired + by a certain percentage.""" + return self.created + (percent * self.ttl * 10) - def getRemainingTTL(self, now): - """Returns the remaining TTL in seconds.""" - return max(0, (self.getExpirationTime(100) - now) / 1000) + def getRemainingTTL(self, now): + """Returns the remaining TTL in seconds.""" + return max(0, (self.getExpirationTime(100) - now) / 1000) - def isExpired(self, now): - """Returns true if this record has expired.""" - return self.getExpirationTime(100) <= now + def isExpired(self, now): + """Returns true if this record has expired.""" + return self.getExpirationTime(100) <= now - def isStale(self, now): - """Returns true if this record is at least half way expired.""" - return self.getExpirationTime(50) <= now + def isStale(self, now): + """Returns true if this record is at least half way expired.""" + return self.getExpirationTime(50) <= now + + def resetTTL(self, other): + """Sets this record's TTL and created time to that of + another record.""" + self.created = other.created + self.ttl = other.ttl - def resetTTL(self, other): - """Sets this record's TTL and created time to that of - another record.""" - self.created = other.created - self.ttl = other.ttl + def write(self, out): + """Abstract method""" + raise AbstractMethodException - def write(self, out): - """Abstract method""" - raise AbstractMethodException - - def toString(self, other): - """String representation with additional information""" - arg = "%s/%s,%s" % (self.ttl, self.getRemainingTTL(currentTimeMillis()), other) - return DNSEntry.toString(self, "record", arg) + def toString(self, other): + """String representation with additional information""" + arg = ("%s/%s,%s" % + (self.ttl, self.getRemainingTTL(currentTimeMillis()), other)) + return DNSEntry.toString(self, "record", arg) class DNSAddress(DNSRecord): - """A DNS address record""" + """A DNS address record""" - def __init__(self, name, type, clazz, ttl, address): - DNSRecord.__init__(self, name, type, clazz, ttl) - self.address = address + def __init__(self, name, type, clazz, ttl, address): + DNSRecord.__init__(self, name, type, clazz, ttl) + self.address = address - def write(self, out): - """Used in constructing an outgoing packet""" - out.writeString(self.address, len(self.address)) + def write(self, out): + """Used in constructing an outgoing packet""" + out.writeString(self.address, len(self.address)) - def __eq__(self, other): - """Tests equality on address""" - if isinstance(other, DNSAddress): - return self.address == other.address - return 0 + def __eq__(self, other): + """Tests equality on address""" + if isinstance(other, DNSAddress): + return self.address == other.address + return 0 - def __repr__(self): - """String representation""" - try: - return socket.inet_ntoa(self.address) - except Exception: - return self.address + def __repr__(self): + """String representation""" + try: + return socket.inet_ntoa(self.address) + except Exception: + return self.address class DNSHinfo(DNSRecord): - """A DNS host information record""" + """A DNS host information record""" - def __init__(self, name, type, clazz, ttl, cpu, os): - DNSRecord.__init__(self, name, type, clazz, ttl) - self.cpu = cpu - self.os = os + def __init__(self, name, type, clazz, ttl, cpu, os): + DNSRecord.__init__(self, name, type, clazz, ttl) + self.cpu = cpu + self.os = os - def write(self, out): - """Used in constructing an outgoing packet""" - out.writeString(self.cpu, len(self.cpu)) - out.writeString(self.os, len(self.os)) + def write(self, out): + """Used in constructing an outgoing packet""" + out.writeString(self.cpu, len(self.cpu)) + out.writeString(self.os, len(self.os)) - def __eq__(self, other): - """Tests equality on cpu and os""" - if isinstance(other, DNSHinfo): - return self.cpu == other.cpu and self.os == other.os - return 0 + def __eq__(self, other): + """Tests equality on cpu and os""" + if isinstance(other, DNSHinfo): + return self.cpu == other.cpu and self.os == other.os + return 0 - def __repr__(self): - """String representation""" - return self.cpu + " " + self.os + def __repr__(self): + """String representation""" + return self.cpu + " " + self.os class DNSPointer(DNSRecord): - """A DNS pointer record""" + """A DNS pointer record""" - def __init__(self, name, type, clazz, ttl, alias): - DNSRecord.__init__(self, name, type, clazz, ttl) - self.alias = alias + def __init__(self, name, type, clazz, ttl, alias): + DNSRecord.__init__(self, name, type, clazz, ttl) + self.alias = alias - def write(self, out): - """Used in constructing an outgoing packet""" - out.writeName(self.alias) + def write(self, out): + """Used in constructing an outgoing packet""" + out.writeName(self.alias) - def __eq__(self, other): - """Tests equality on alias""" - if isinstance(other, DNSPointer): - return self.alias == other.alias - return 0 + def __eq__(self, other): + """Tests equality on alias""" + if isinstance(other, DNSPointer): + return self.alias == other.alias + return 0 - def __repr__(self): - """String representation""" - return self.toString(self.alias) + def __repr__(self): + """String representation""" + return self.toString(self.alias) class DNSText(DNSRecord): - """A DNS text record""" + """A DNS text record""" - def __init__(self, name, type, clazz, ttl, text): - DNSRecord.__init__(self, name, type, clazz, ttl) - self.text = text + def __init__(self, name, type, clazz, ttl, text): + DNSRecord.__init__(self, name, type, clazz, ttl) + self.text = text - def write(self, out): - """Used in constructing an outgoing packet""" - out.writeString(self.text, len(self.text)) + def write(self, out): + """Used in constructing an outgoing packet""" + out.writeString(self.text, len(self.text)) - def __eq__(self, other): - """Tests equality on text""" - if isinstance(other, DNSText): - return self.text == other.text - return 0 + def __eq__(self, other): + """Tests equality on text""" + if isinstance(other, DNSText): + return self.text == other.text + return 0 - def __repr__(self): - """String representation""" - if len(self.text) > 10: - return self.toString(self.text[:7] + "...") - else: - return self.toString(self.text) + def __repr__(self): + """String representation""" + if len(self.text) > 10: + return self.toString(self.text[:7] + "...") + else: + return self.toString(self.text) class DNSService(DNSRecord): - """A DNS service record""" + """A DNS service record""" - def __init__(self, name, type, clazz, ttl, priority, weight, port, server): - DNSRecord.__init__(self, name, type, clazz, ttl) - self.priority = priority - self.weight = weight - self.port = port - self.server = server + def __init__(self, name, type, clazz, ttl, priority, weight, port, server): + DNSRecord.__init__(self, name, type, clazz, ttl) + self.priority = priority + self.weight = weight + self.port = port + self.server = server - def write(self, out): - """Used in constructing an outgoing packet""" - out.writeShort(self.priority) - out.writeShort(self.weight) - out.writeShort(self.port) - out.writeName(self.server) + def write(self, out): + """Used in constructing an outgoing packet""" + out.writeShort(self.priority) + out.writeShort(self.weight) + out.writeShort(self.port) + out.writeName(self.server) - def __eq__(self, other): - """Tests equality on priority, weight, port and server""" - if isinstance(other, DNSService): - return self.priority == other.priority and self.weight == other.weight and self.port == other.port and self.server == other.server - return 0 + def __eq__(self, other): + """Tests equality on priority, weight, port and server""" + if isinstance(other, DNSService): + return (self.priority == other.priority and + self.weight == other.weight and + self.port == other.port and + self.server == other.server) + return 0 - def __repr__(self): - """String representation""" - return self.toString("%s:%s" % (self.server, self.port)) + def __repr__(self): + """String representation""" + return self.toString("%s:%s" % (self.server, self.port)) class DNSIncoming(object): - """Object representation of an incoming DNS packet""" + """Object representation of an incoming DNS packet""" - def __init__(self, data): - """Constructor from string holding bytes of packet""" - self.offset = 0 - self.data = data - self.questions = [] - self.answers = [] - self.numQuestions = 0 - self.numAnswers = 0 - self.numAuthorities = 0 - self.numAdditionals = 0 + def __init__(self, data): + """Constructor from string holding bytes of packet""" + self.offset = 0 + self.data = data + self.questions = [] + self.answers = [] + self.numquestions = 0 + self.numanswers = 0 + self.numauthorities = 0 + self.numadditionals = 0 - self.readHeader() - self.readQuestions() - self.readOthers() + self.readHeader() + self.readQuestions() + self.readOthers() - def readHeader(self): - """Reads header portion of packet""" - format = '!HHHHHH' - length = struct.calcsize(format) - info = struct.unpack(format, self.data[self.offset:self.offset+length]) - self.offset += length + def readHeader(self): + """Reads header portion of packet""" + format = '!HHHHHH' + length = struct.calcsize(format) + info = struct.unpack(format, + self.data[self.offset:self.offset + length]) + self.offset += length - self.id = info[0] - self.flags = info[1] - self.numQuestions = info[2] - self.numAnswers = info[3] - self.numAuthorities = info[4] - self.numAdditionals = info[5] + self.id = info[0] + self.flags = info[1] + self.numquestions = info[2] + self.numanswers = info[3] + self.numauthorities = info[4] + self.numadditionals = info[5] - def readQuestions(self): - """Reads questions section of packet""" - format = '!HH' - length = struct.calcsize(format) - for i in range(0, self.numQuestions): - name = self.readName() - info = struct.unpack(format, self.data[self.offset:self.offset+length]) - self.offset += length + def readQuestions(self): + """Reads questions section of packet""" + format = '!HH' + length = struct.calcsize(format) + for i in range(0, self.numquestions): + name = self.readName() + info = struct.unpack(format, + self.data[self.offset:self.offset + length]) + self.offset += length - try: - question = DNSQuestion(name, info[0], info[1]) - self.questions.append(question) - except NonLocalNameException: - pass + try: + question = DNSQuestion(name, info[0], info[1]) + self.questions.append(question) + except NonLocalNameException: + pass - def readInt(self): - """Reads an integer from the packet""" - format = '!I' - length = struct.calcsize(format) - info = struct.unpack(format, self.data[self.offset:self.offset+length]) - self.offset += length - return info[0] + def readInt(self): + """Reads an integer from the packet""" + format = '!I' + length = struct.calcsize(format) + info = struct.unpack(format, + self.data[self.offset:self.offset + length]) + self.offset += length + return info[0] - def readCharacterString(self): - """Reads a character string from the packet""" - length = ord(self.data[self.offset]) - self.offset += 1 - return self.readString(length) + def readCharacterString(self): + """Reads a character string from the packet""" + length = ord(self.data[self.offset]) + self.offset += 1 + return self.readString(length) - def readString(self, len): - """Reads a string of a given length from the packet""" - format = '!' + str(len) + 's' - length = struct.calcsize(format) - info = struct.unpack(format, self.data[self.offset:self.offset+length]) - self.offset += length - return info[0] + def readString(self, len): + """Reads a string of a given length from the packet""" + format = '!' + str(len) + 's' + length = struct.calcsize(format) + info = struct.unpack(format, + self.data[self.offset:self.offset + length]) + self.offset += length + return info[0] - def readUnsignedShort(self): - """Reads an unsigned short from the packet""" - format = '!H' - length = struct.calcsize(format) - info = struct.unpack(format, self.data[self.offset:self.offset+length]) - self.offset += length - return info[0] + def readUnsignedShort(self): + """Reads an unsigned short from the packet""" + format = '!H' + length = struct.calcsize(format) + info = struct.unpack(format, + self.data[self.offset:self.offset + length]) + self.offset += length + return info[0] - def readOthers(self): - """Reads the answers, authorities and additionals section of the packet""" - format = '!HHiH' - length = struct.calcsize(format) - n = self.numAnswers + self.numAuthorities + self.numAdditionals - for i in range(0, n): - domain = self.readName() - info = struct.unpack(format, self.data[self.offset:self.offset+length]) - self.offset += length + def readOthers(self): + """Reads answers, authorities and additionals section of the packet""" + format = '!HHiH' + length = struct.calcsize(format) + n = self.numanswers + self.numauthorities + self.numadditionals + for i in range(0, n): + domain = self.readName() + info = struct.unpack(format, + self.data[self.offset:self.offset + length]) + self.offset += length - rec = None - if info[0] == _TYPE_A: - rec = DNSAddress(domain, info[0], info[1], info[2], self.readString(4)) - elif info[0] == _TYPE_CNAME or info[0] == _TYPE_PTR: - rec = DNSPointer(domain, info[0], info[1], info[2], self.readName()) - elif info[0] == _TYPE_TXT: - rec = DNSText(domain, info[0], info[1], info[2], self.readString(info[3])) - elif info[0] == _TYPE_SRV: - rec = DNSService(domain, info[0], info[1], info[2], self.readUnsignedShort(), self.readUnsignedShort(), self.readUnsignedShort(), self.readName()) - elif info[0] == _TYPE_HINFO: - rec = DNSHinfo(domain, info[0], info[1], info[2], self.readCharacterString(), self.readCharacterString()) - elif info[0] == _TYPE_AAAA: - rec = DNSAddress(domain, info[0], info[1], info[2], self.readString(16)) - else: - # Try to ignore types we don't know about - # this may mean the rest of the name is - # unable to be parsed, and may show errors - # so this is left for debugging. New types - # encountered need to be parsed properly. - # - #print "UNKNOWN TYPE = " + str(info[0]) - #raise BadTypeInNameException - self.offset += info[3] - - if rec is not None: - self.answers.append(rec) - - def isQuery(self): - """Returns true if this is a query""" - return (self.flags & _FLAGS_QR_MASK) == _FLAGS_QR_QUERY + rec = None + if info[0] == _TYPE_A: + rec = DNSAddress(domain, info[0], info[1], info[2], + self.readString(4)) + elif info[0] == _TYPE_CNAME or info[0] == _TYPE_PTR: + rec = DNSPointer(domain, info[0], info[1], info[2], + self.readName()) + elif info[0] == _TYPE_TXT: + rec = DNSText(domain, info[0], info[1], info[2], + self.readString(info[3])) + elif info[0] == _TYPE_SRV: + rec = DNSService(domain, info[0], info[1], info[2], + self.readUnsignedShort(), + self.readUnsignedShort(), + self.readUnsignedShort(), + self.readName()) + elif info[0] == _TYPE_HINFO: + rec = DNSHinfo(domain, info[0], info[1], info[2], + self.readCharacterString(), + self.readCharacterString()) + elif info[0] == _TYPE_AAAA: + rec = DNSAddress(domain, info[0], info[1], info[2], + self.readString(16)) + else: + # Try to ignore types we don't know about + # this may mean the rest of the name is + # unable to be parsed, and may show errors + # so this is left for debugging. New types + # encountered need to be parsed properly. + # + #print "UNKNOWN TYPE = " + str(info[0]) + #raise BadTypeInNameException + self.offset += info[3] - def isResponse(self): - """Returns true if this is a response""" - return (self.flags & _FLAGS_QR_MASK) == _FLAGS_QR_RESPONSE + if rec is not None: + self.answers.append(rec) + + def isQuery(self): + """Returns true if this is a query""" + return (self.flags & _FLAGS_QR_MASK) == _FLAGS_QR_QUERY + + def isResponse(self): + """Returns true if this is a response""" + return (self.flags & _FLAGS_QR_MASK) == _FLAGS_QR_RESPONSE - def readUTF(self, offset, len): - """Reads a UTF-8 string of a given length from the packet""" - return self.data[offset:offset+len].decode('utf-8') + def readUTF(self, offset, len): + """Reads a UTF-8 string of a given length from the packet""" + return self.data[offset:offset + len].decode('utf-8') - def readName(self): - """Reads a domain name from the packet""" - result = '' - off = self.offset - next = -1 - first = off + def readName(self): + """Reads a domain name from the packet""" + result = '' + off = self.offset + next = -1 + first = off - while True: - len = ord(self.data[off]) - off += 1 - if len == 0: - break - t = len & 0xC0 - if t == 0x00: - result = ''.join((result, self.readUTF(off, len) + '.')) - off += len - elif t == 0xC0: - if next < 0: - next = off + 1 - off = ((len & 0x3F) << 8) | ord(self.data[off]) - if off >= first: - raise BadDomainNameCircular(off) - first = off - else: - raise BadDomainName(off) + while True: + len = ord(self.data[off]) + off += 1 + if len == 0: + break + t = len & 0xC0 + if t == 0x00: + result = ''.join((result, self.readUTF(off, len) + '.')) + off += len + elif t == 0xC0: + if next < 0: + next = off + 1 + off = ((len & 0x3F) << 8) | ord(self.data[off]) + if off >= first: + raise BadDomainNameCircular(off) + first = off + else: + raise BadDomainName(off) - if next >= 0: - self.offset = next - else: - self.offset = off + if next >= 0: + self.offset = next + else: + self.offset = off - return result + return result class DNSOutgoing(object): - """Object representation of an outgoing packet""" + """Object representation of an outgoing packet""" - def __init__(self, flags, multicast = 1): - self.finished = 0 - self.id = 0 - self.multicast = multicast - self.flags = flags - self.names = {} - self.data = [] - self.size = 12 + def __init__(self, flags, multicast=1): + self.finished = 0 + self.id = 0 + self.multicast = multicast + self.flags = flags + self.names = {} + self.data = [] + self.size = 12 - self.questions = [] - self.answers = [] - self.authorities = [] - self.additionals = [] + self.questions = [] + self.answers = [] + self.authorities = [] + self.additionals = [] - def addQuestion(self, record): - """Adds a question""" - self.questions.append(record) + def addQuestion(self, record): + """Adds a question""" + self.questions.append(record) - def addAnswer(self, inp, record): - """Adds an answer""" - if not record.suppressedBy(inp): - self.addAnswerAtTime(record, 0) + def addAnswer(self, inp, record): + """Adds an answer""" + if not record.suppressedBy(inp): + self.addAnswerAtTime(record, 0) - def addAnswerAtTime(self, record, now): - """Adds an answer if if does not expire by a certain time""" - if record is not None: - if now == 0 or not record.isExpired(now): - self.answers.append((record, now)) + def addAnswerAtTime(self, record, now): + """Adds an answer if if does not expire by a certain time""" + if record is not None: + if now == 0 or not record.isExpired(now): + self.answers.append((record, now)) - def addAuthoritativeAnswer(self, record): - """Adds an authoritative answer""" - self.authorities.append(record) + def addAuthoritativeAnswer(self, record): + """Adds an authoritative answer""" + self.authorities.append(record) - def addAdditionalAnswer(self, record): - """Adds an additional answer""" - self.additionals.append(record) + def addAdditionalAnswer(self, record): + """Adds an additional answer""" + self.additionals.append(record) - def writeByte(self, value): - """Writes a single byte to the packet""" - format = '!c' - self.data.append(struct.pack(format, chr(value))) - self.size += 1 + def writeByte(self, value): + """Writes a single byte to the packet""" + format = '!c' + self.data.append(struct.pack(format, chr(value))) + self.size += 1 - def insertShort(self, index, value): - """Inserts an unsigned short in a certain position in the packet""" - format = '!H' - self.data.insert(index, struct.pack(format, value)) - self.size += 2 + def insertShort(self, index, value): + """Inserts an unsigned short in a certain position in the packet""" + format = '!H' + self.data.insert(index, struct.pack(format, value)) + self.size += 2 - def writeShort(self, value): - """Writes an unsigned short to the packet""" - format = '!H' - self.data.append(struct.pack(format, value)) - self.size += 2 + def writeShort(self, value): + """Writes an unsigned short to the packet""" + format = '!H' + self.data.append(struct.pack(format, value)) + self.size += 2 - def writeInt(self, value): - """Writes an unsigned integer to the packet""" - format = '!I' - self.data.append(struct.pack(format, int(value))) - self.size += 4 + def writeInt(self, value): + """Writes an unsigned integer to the packet""" + format = '!I' + self.data.append(struct.pack(format, int(value))) + self.size += 4 - def writeString(self, value, length): - """Writes a string to the packet""" - format = '!' + str(length) + 's' - self.data.append(struct.pack(format, value)) - self.size += length + def writeString(self, value, length): + """Writes a string to the packet""" + format = '!' + str(length) + 's' + self.data.append(struct.pack(format, value)) + self.size += length - def writeUTF(self, s): - """Writes a UTF-8 string of a given length to the packet""" - utfstr = s.encode('utf-8') - length = len(utfstr) - if length > 64: - raise NamePartTooLongException - self.writeByte(length) - self.writeString(utfstr, length) + def writeUTF(self, s): + """Writes a UTF-8 string of a given length to the packet""" + utfstr = s.encode('utf-8') + length = len(utfstr) + if length > 64: + raise NamePartTooLongException + self.writeByte(length) + self.writeString(utfstr, length) - def writeName(self, name): - """Writes a domain name to the packet""" + def writeName(self, name): + """Writes a domain name to the packet""" - try: - # Find existing instance of this name in packet - # - index = self.names[name] - except KeyError: - # No record of this name already, so write it - # out as normal, recording the location of the name - # for future pointers to it. - # - self.names[name] = self.size - parts = name.split('.') - if parts[-1] == '': - parts = parts[:-1] - for part in parts: - self.writeUTF(part) - self.writeByte(0) - return + try: + # Find existing instance of this name in packet + # + index = self.names[name] + except KeyError: + # No record of this name already, so write it + # out as normal, recording the location of the name + # for future pointers to it. + # + self.names[name] = self.size + parts = name.split('.') + if parts[-1] == '': + parts = parts[:-1] + for part in parts: + self.writeUTF(part) + self.writeByte(0) + return - # An index was found, so write a pointer to it - # - self.writeByte((index >> 8) | 0xC0) - self.writeByte(index) + # An index was found, so write a pointer to it + # + self.writeByte((index >> 8) | 0xC0) + self.writeByte(index) - def writeQuestion(self, question): - """Writes a question to the packet""" - self.writeName(question.name) - self.writeShort(question.type) - self.writeShort(question.clazz) + def writeQuestion(self, question): + """Writes a question to the packet""" + self.writeName(question.name) + self.writeShort(question.type) + self.writeShort(question.clazz) - def writeRecord(self, record, now): - """Writes a record (answer, authoritative answer, additional) to - the packet""" - self.writeName(record.name) - self.writeShort(record.type) - if record.unique and self.multicast: - self.writeShort(record.clazz | _CLASS_UNIQUE) - else: - self.writeShort(record.clazz) - if now == 0: - self.writeInt(record.ttl) - else: - self.writeInt(record.getRemainingTTL(now)) - index = len(self.data) - # Adjust size for the short we will write before this record - # - self.size += 2 - record.write(self) - self.size -= 2 + def writeRecord(self, record, now): + """Writes a record (answer, authoritative answer, additional) to + the packet""" + self.writeName(record.name) + self.writeShort(record.type) + if record.unique and self.multicast: + self.writeShort(record.clazz | _CLASS_UNIQUE) + else: + self.writeShort(record.clazz) + if now == 0: + self.writeInt(record.ttl) + else: + self.writeInt(record.getRemainingTTL(now)) + index = len(self.data) + # Adjust size for the short we will write before this record + # + self.size += 2 + record.write(self) + self.size -= 2 - length = len(''.join(self.data[index:])) - self.insertShort(index, length) # Here is the short we adjusted for + length = len(''.join(self.data[index:])) + self.insertShort(index, length) # Here is the short we adjusted for - def packet(self): - """Returns a string containing the packet's bytes + def packet(self): + """Returns a string containing the packet's bytes - No further parts should be added to the packet once this - is done.""" - if not self.finished: - self.finished = 1 - for question in self.questions: - self.writeQuestion(question) - for answer, time in self.answers: - self.writeRecord(answer, time) - for authority in self.authorities: - self.writeRecord(authority, 0) - for additional in self.additionals: - self.writeRecord(additional, 0) + No further parts should be added to the packet once this + is done.""" + if not self.finished: + self.finished = 1 + for question in self.questions: + self.writeQuestion(question) + for answer, time_ in self.answers: + self.writeRecord(answer, time_) + for authority in self.authorities: + self.writeRecord(authority, 0) + for additional in self.additionals: + self.writeRecord(additional, 0) - self.insertShort(0, len(self.additionals)) - self.insertShort(0, len(self.authorities)) - self.insertShort(0, len(self.answers)) - self.insertShort(0, len(self.questions)) - self.insertShort(0, self.flags) - if self.multicast: - self.insertShort(0, 0) - else: - self.insertShort(0, self.id) - return ''.join(self.data) + self.insertShort(0, len(self.additionals)) + self.insertShort(0, len(self.authorities)) + self.insertShort(0, len(self.answers)) + self.insertShort(0, len(self.questions)) + self.insertShort(0, self.flags) + if self.multicast: + self.insertShort(0, 0) + else: + self.insertShort(0, self.id) + return ''.join(self.data) class DNSCache(object): - """A cache of DNS entries""" + """A cache of DNS entries""" - def __init__(self): - self.cache = {} + def __init__(self): + self.cache = {} - def add(self, entry): - """Adds an entry""" - try: - list = self.cache[entry.key] - except KeyError: - list = self.cache[entry.key] = [] - list.append(entry) + def add(self, entry): + """Adds an entry""" + try: + list = self.cache[entry.key] + except KeyError: + list = self.cache[entry.key] = [] + list.append(entry) - def remove(self, entry): - """Removes an entry""" - try: - list = self.cache[entry.key] - list.remove(entry) - except KeyError: - pass + def remove(self, entry): + """Removes an entry""" + try: + list = self.cache[entry.key] + list.remove(entry) + except KeyError: + pass - def get(self, entry): - """Gets an entry by key. Will return None if there is no - matching entry.""" - try: - list = self.cache[entry.key] - return list[list.index(entry)] - except (KeyError, ValueError): - return None - - def getByDetails(self, name, type, clazz): - """Gets an entry by details. Will return None if there is - no matching entry.""" - entry = DNSEntry(name, type, clazz) - return self.get(entry) + def get(self, entry): + """Gets an entry by key. Will return None if there is no + matching entry.""" + try: + list = self.cache[entry.key] + return list[list.index(entry)] + except (KeyError, ValueError): + return None - def entriesWithName(self, name): - """Returns a list of entries whose key matches the name.""" - try: - return self.cache[name] - except KeyError: - return [] + def getByDetails(self, name, type, clazz): + """Gets an entry by details. Will return None if there is + no matching entry.""" + entry = DNSEntry(name, type, clazz) + return self.get(entry) - def entries(self): - """Returns a list of all entries""" - def add(x, y): return x+y - try: - return reduce(add, self.cache.values()) - except Exception: - return [] + def entriesWithName(self, name): + """Returns a list of entries whose key matches the name.""" + try: + return self.cache[name] + except KeyError: + return [] + + def entries(self): + """Returns a list of all entries""" + try: + return list(itertools.chain.from_iterable(self.cache.values())) + except Exception: + return [] class Engine(threading.Thread): - """An engine wraps read access to sockets, allowing objects that - need to receive data from sockets to be called back when the - sockets are ready. + """An engine wraps read access to sockets, allowing objects that + need to receive data from sockets to be called back when the + sockets are ready. - A reader needs a handle_read() method, which is called when the socket - it is interested in is ready for reading. + A reader needs a handle_read() method, which is called when the socket + it is interested in is ready for reading. - Writers are not implemented here, because we only send short - packets. - """ + Writers are not implemented here, because we only send short + packets. + """ - def __init__(self, zeroconf): - threading.Thread.__init__(self) - self.zeroconf = zeroconf - self.readers = {} # maps socket to reader - self.timeout = 5 - self.condition = threading.Condition() - self.start() + def __init__(self, zeroconf): + threading.Thread.__init__(self) + self.zeroconf = zeroconf + self.readers = {} # maps socket to reader + self.timeout = 5 + self.condition = threading.Condition() + self.start() - def run(self): - while not globals()['_GLOBAL_DONE']: - rs = self.getReaders() - if len(rs) == 0: - # No sockets to manage, but we wait for the timeout - # or addition of a socket - # - self.condition.acquire() - self.condition.wait(self.timeout) - self.condition.release() - else: - try: - rr, wr, er = select.select(rs, [], [], self.timeout) - for socket in rr: - try: - self.readers[socket].handle_read() - except Exception: - if not globals()['_GLOBAL_DONE']: - traceback.print_exc() - except Exception: - pass + def run(self): + while not globals()['_GLOBAL_DONE']: + rs = self.getReaders() + if len(rs) == 0: + # No sockets to manage, but we wait for the timeout + # or addition of a socket + # + self.condition.acquire() + self.condition.wait(self.timeout) + self.condition.release() + else: + try: + rr, wr, er = select.select(rs, [], [], self.timeout) + for sock in rr: + try: + self.readers[sock].handle_read() + except Exception: + if not globals()['_GLOBAL_DONE']: + traceback.print_exc() + except Exception: + pass - def getReaders(self): - self.condition.acquire() - result = self.readers.keys() - self.condition.release() - return result + def getReaders(self): + self.condition.acquire() + result = self.readers.keys() + self.condition.release() + return result - def addReader(self, reader, socket): - self.condition.acquire() - self.readers[socket] = reader - self.condition.notify() - self.condition.release() + def addReader(self, reader, socket): + self.condition.acquire() + self.readers[socket] = reader + self.condition.notify() + self.condition.release() - def delReader(self, socket): - self.condition.acquire() - del(self.readers[socket]) - self.condition.notify() - self.condition.release() + def delReader(self, socket): + self.condition.acquire() + del self.readers[socket] + self.condition.notify() + self.condition.release() - def notify(self): - self.condition.acquire() - self.condition.notify() - self.condition.release() + def notify(self): + self.condition.acquire() + self.condition.notify() + self.condition.release() class Listener(object): - """A Listener is used by this module to listen on the multicast - group to which DNS messages are sent, allowing the implementation - to cache information as it arrives. + """A Listener is used by this module to listen on the multicast + group to which DNS messages are sent, allowing the implementation + to cache information as it arrives. - It requires registration with an Engine object in order to have - the read() method called when a socket is available for reading.""" + It requires registration with an Engine object in order to have + the read() method called when a socket is available for reading.""" - def __init__(self, zeroconf): - self.zeroconf = zeroconf - self.zeroconf.engine.addReader(self, self.zeroconf.socket) + def __init__(self, zeroconf): + self.zeroconf = zeroconf + self.zeroconf.engine.addReader(self, self.zeroconf.socket) - def handle_read(self): - data, (addr, port) = self.zeroconf.socket.recvfrom(_MAX_MSG_ABSOLUTE) - self.data = data - msg = DNSIncoming(data) - if msg.isQuery(): - # Always multicast responses - # - if port == _MDNS_PORT: - self.zeroconf.handleQuery(msg, _MDNS_ADDR, _MDNS_PORT) - # If it's not a multicast query, reply via unicast - # and multicast - # - elif port == _DNS_PORT: - self.zeroconf.handleQuery(msg, addr, port) - self.zeroconf.handleQuery(msg, _MDNS_ADDR, _MDNS_PORT) - else: - self.zeroconf.handleResponse(msg) + def handle_read(self): + data, (addr, port) = self.zeroconf.socket.recvfrom(_MAX_MSG_ABSOLUTE) + self.data = data + msg = DNSIncoming(data) + if msg.isQuery(): + # Always multicast responses + # + if port == _MDNS_PORT: + self.zeroconf.handleQuery(msg, _MDNS_ADDR, _MDNS_PORT) + # If it's not a multicast query, reply via unicast + # and multicast + # + elif port == _DNS_PORT: + self.zeroconf.handleQuery(msg, addr, port) + self.zeroconf.handleQuery(msg, _MDNS_ADDR, _MDNS_PORT) + else: + self.zeroconf.handleResponse(msg) class Reaper(threading.Thread): - """A Reaper is used by this module to remove cache entries that - have expired.""" + """A Reaper is used by this module to remove cache entries that + have expired.""" - def __init__(self, zeroconf): - threading.Thread.__init__(self) - self.zeroconf = zeroconf - self.start() + def __init__(self, zeroconf): + threading.Thread.__init__(self) + self.zeroconf = zeroconf + self.start() - def run(self): - while True: - self.zeroconf.wait(10 * 1000) - if globals()['_GLOBAL_DONE']: - return - now = currentTimeMillis() - for record in self.zeroconf.cache.entries(): - if record.isExpired(now): - self.zeroconf.updateRecord(now, record) - self.zeroconf.cache.remove(record) + def run(self): + while True: + self.zeroconf.wait(10 * 1000) + if globals()['_GLOBAL_DONE']: + return + now = currentTimeMillis() + for record in self.zeroconf.cache.entries(): + if record.isExpired(now): + self.zeroconf.updateRecord(now, record) + self.zeroconf.cache.remove(record) class ServiceBrowser(threading.Thread): - """Used to browse for a service of a specific type. + """Used to browse for a service of a specific type. - The listener object will have its addService() and - removeService() methods called when this browser - discovers changes in the services availability.""" + The listener object will have its addService() and + removeService() methods called when this browser + discovers changes in the services availability.""" - def __init__(self, zeroconf, type, listener): - """Creates a browser for a specific type""" - threading.Thread.__init__(self) - self.zeroconf = zeroconf - self.type = type - self.listener = listener - self.services = {} - self.nextTime = currentTimeMillis() - self.delay = _BROWSER_TIME - self.list = [] + def __init__(self, zeroconf, type, listener): + """Creates a browser for a specific type""" + threading.Thread.__init__(self) + self.zeroconf = zeroconf + self.type = type + self.listener = listener + self.services = {} + self.nexttime = currentTimeMillis() + self.delay = _BROWSER_TIME + self.list = [] - self.done = 0 + self.done = 0 - self.zeroconf.addListener(self, DNSQuestion(self.type, _TYPE_PTR, _CLASS_IN)) - self.start() + self.zeroconf.addListener(self, DNSQuestion(self.type, _TYPE_PTR, + _CLASS_IN)) + self.start() - def updateRecord(self, zeroconf, now, record): - """Callback invoked by Zeroconf when new information arrives. + def updateRecord(self, zeroconf, now, record): + """Callback invoked by Zeroconf when new information arrives. - Updates information required by browser in the Zeroconf cache.""" - if record.type == _TYPE_PTR and record.name == self.type: - expired = record.isExpired(now) - try: - oldrecord = self.services[record.alias.lower()] - if not expired: - oldrecord.resetTTL(record) - else: - del(self.services[record.alias.lower()]) - callback = lambda x: self.listener.removeService(x, self.type, record.alias) - self.list.append(callback) - return - except Exception: - if not expired: - self.services[record.alias.lower()] = record - callback = lambda x: self.listener.addService(x, self.type, record.alias) - self.list.append(callback) + Updates information required by browser in the Zeroconf cache.""" + if record.type == _TYPE_PTR and record.name == self.type: + expired = record.isExpired(now) + try: + oldrecord = self.services[record.alias.lower()] + if not expired: + oldrecord.resetTTL(record) + else: + del self.services[record.alias.lower()] + callback = (lambda x: + self.listener.removeService(x, self.type, record.alias)) + self.list.append(callback) + return + except Exception: + if not expired: + self.services[record.alias.lower()] = record + callback = (lambda x: + self.listener.addService(x, self.type, record.alias)) + self.list.append(callback) - expires = record.getExpirationTime(75) - if expires < self.nextTime: - self.nextTime = expires - - def cancel(self): - self.done = 1 - self.zeroconf.notifyAll() + expires = record.getExpirationTime(75) + if expires < self.nexttime: + self.nexttime = expires - def run(self): - while True: - event = None - now = currentTimeMillis() - if len(self.list) == 0 and self.nextTime > now: - self.zeroconf.wait(self.nextTime - now) - if globals()['_GLOBAL_DONE'] or self.done: - return - now = currentTimeMillis() + def cancel(self): + self.done = 1 + self.zeroconf.notifyAll() + + def run(self): + while True: + event = None + now = currentTimeMillis() + if len(self.list) == 0 and self.nexttime > now: + self.zeroconf.wait(self.nexttime - now) + if globals()['_GLOBAL_DONE'] or self.done: + return + now = currentTimeMillis() - if self.nextTime <= now: - out = DNSOutgoing(_FLAGS_QR_QUERY) - out.addQuestion(DNSQuestion(self.type, _TYPE_PTR, _CLASS_IN)) - for record in self.services.values(): - if not record.isExpired(now): - out.addAnswerAtTime(record, now) - self.zeroconf.send(out) - self.nextTime = now + self.delay - self.delay = min(20 * 1000, self.delay * 2) + if self.nexttime <= now: + out = DNSOutgoing(_FLAGS_QR_QUERY) + out.addQuestion(DNSQuestion(self.type, _TYPE_PTR, _CLASS_IN)) + for record in self.services.values(): + if not record.isExpired(now): + out.addAnswerAtTime(record, now) + self.zeroconf.send(out) + self.nexttime = now + self.delay + self.delay = min(20 * 1000, self.delay * 2) - if len(self.list) > 0: - event = self.list.pop(0) + if len(self.list) > 0: + event = self.list.pop(0) - if event is not None: - event(self.zeroconf) + if event is not None: + event(self.zeroconf) class ServiceInfo(object): - """Service information""" - - def __init__(self, type, name, address=None, port=None, weight=0, priority=0, properties=None, server=None): - """Create a service description. + """Service information""" - type: fully qualified service type name - name: fully qualified service name - address: IP address as unsigned short, network byte order - port: port that the service runs on - weight: weight of the service - priority: priority of the service - properties: dictionary of properties (or a string holding the bytes for the text field) - server: fully qualified name for service host (defaults to name)""" + def __init__(self, type, name, address=None, port=None, weight=0, + priority=0, properties=None, server=None): + """Create a service description. - if not name.endswith(type): - raise BadTypeInNameException - self.type = type - self.name = name - self.address = address - self.port = port - self.weight = weight - self.priority = priority - if server: - self.server = server - else: - self.server = name - self.setProperties(properties) + type: fully qualified service type name + name: fully qualified service name + address: IP address as unsigned short, network byte order + port: port that the service runs on + weight: weight of the service + priority: priority of the service + properties: dictionary of properties (or a string holding the bytes for + the text field) + server: fully qualified name for service host (defaults to name)""" + + if not name.endswith(type): + raise BadTypeInNameException + self.type = type + self.name = name + self.address = address + self.port = port + self.weight = weight + self.priority = priority + if server: + self.server = server + else: + self.server = name + self.setProperties(properties) - def setProperties(self, properties): - """Sets properties and text of this info from a dictionary""" - if isinstance(properties, dict): - self.properties = properties - list = [] - result = '' - for key in properties: - value = properties[key] - if value is None: - suffix = '' - elif isinstance(value, str): - suffix = value - elif isinstance(value, int): - if value: - suffix = 'true' - else: - suffix = 'false' - else: - suffix = '' - list.append('='.join((key, suffix))) - for item in list: - result = ''.join((result, struct.pack('!c', chr(len(item))), item)) - self.text = result - else: - self.text = properties + def setProperties(self, properties): + """Sets properties and text of this info from a dictionary""" + if isinstance(properties, dict): + self.properties = properties + list = [] + result = '' + for key in properties: + value = properties[key] + if value is None: + suffix = '' + elif isinstance(value, str): + suffix = value + elif isinstance(value, int): + if value: + suffix = 'true' + else: + suffix = 'false' + else: + suffix = '' + list.append('='.join((key, suffix))) + for item in list: + result = ''.join((result, struct.pack('!c', chr(len(item))), + item)) + self.text = result + else: + self.text = properties + + def setText(self, text): + """Sets properties and text given a text field""" + self.text = text + try: + result = {} + end = len(text) + index = 0 + strs = [] + while index < end: + length = ord(text[index]) + index += 1 + strs.append(text[index:index + length]) + index += length - def setText(self, text): - """Sets properties and text given a text field""" - self.text = text - try: - result = {} - end = len(text) - index = 0 - strs = [] - while index < end: - length = ord(text[index]) - index += 1 - strs.append(text[index:index+length]) - index += length + for s in strs: + eindex = s.find('=') + if eindex == -1: + # No equals sign at all + key = s + value = 0 + else: + key = s[:eindex] + value = s[eindex + 1:] + if value == 'true': + value = 1 + elif value == 'false' or not value: + value = 0 + + # Only update non-existent properties + if key and result.get(key) is None: + result[key] = value - for s in strs: - eindex = s.find('=') - if eindex == -1: - # No equals sign at all - key = s - value = 0 - else: - key = s[:eindex] - value = s[eindex+1:] - if value == 'true': - value = 1 - elif value == 'false' or not value: - value = 0 + self.properties = result + except Exception: + traceback.print_exc() + self.properties = None + + def getType(self): + """Type accessor""" + return self.type - # Only update non-existent properties - if key and result.get(key) == None: - result[key] = value - - self.properties = result - except Exception: - traceback.print_exc() - self.properties = None + def getName(self): + """Name accessor""" + if self.type is not None and self.name.endswith("." + self.type): + return self.name[:len(self.name) - len(self.type) - 1] + return self.name - def getType(self): - """Type accessor""" - return self.type + def getAddress(self): + """Address accessor""" + return self.address - def getName(self): - """Name accessor""" - if self.type is not None and self.name.endswith("." + self.type): - return self.name[:len(self.name) - len(self.type) - 1] - return self.name + def getPort(self): + """Port accessor""" + return self.port - def getAddress(self): - """Address accessor""" - return self.address + def getPriority(self): + """Priority accessor""" + return self.priority - def getPort(self): - """Port accessor""" - return self.port + def getWeight(self): + """Weight accessor""" + return self.weight - def getPriority(self): - """Priority accessor""" - return self.priority + def getProperties(self): + """Properties accessor""" + return self.properties - def getWeight(self): - """Weight accessor""" - return self.weight + def getText(self): + """Text accessor""" + return self.text - def getProperties(self): - """Properties accessor""" - return self.properties + def getServer(self): + """Server accessor""" + return self.server - def getText(self): - """Text accessor""" - return self.text - - def getServer(self): - """Server accessor""" - return self.server - - def updateRecord(self, zeroconf, now, record): - """Updates service information from a DNS record""" - if record is not None and not record.isExpired(now): - if record.type == _TYPE_A: - #if record.name == self.name: - if record.name == self.server: - self.address = record.address - elif record.type == _TYPE_SRV: - if record.name == self.name: - self.server = record.server - self.port = record.port - self.weight = record.weight - self.priority = record.priority - #self.address = None - self.updateRecord(zeroconf, now, zeroconf.cache.getByDetails(self.server, _TYPE_A, _CLASS_IN)) - elif record.type == _TYPE_TXT: - if record.name == self.name: - self.setText(record.text) + def updateRecord(self, zeroconf, now, record): + """Updates service information from a DNS record""" + if record is not None and not record.isExpired(now): + if record.type == _TYPE_A: + #if record.name == self.name: + if record.name == self.server: + self.address = record.address + elif record.type == _TYPE_SRV: + if record.name == self.name: + self.server = record.server + self.port = record.port + self.weight = record.weight + self.priority = record.priority + #self.address = None + self.updateRecord(zeroconf, now, + zeroconf.cache.getByDetails(self.server, + _TYPE_A, _CLASS_IN)) + elif record.type == _TYPE_TXT: + if record.name == self.name: + self.setText(record.text) - def request(self, zeroconf, timeout): - """Returns true if the service could be discovered on the - network, and updates this object with details discovered. - """ - now = currentTimeMillis() - delay = _LISTENER_TIME - next = now + delay - last = now + timeout - result = 0 - try: - zeroconf.addListener(self, DNSQuestion(self.name, _TYPE_ANY, _CLASS_IN)) - while self.server is None or self.address is None or self.text is None: - if last <= now: - return 0 - if next <= now: - out = DNSOutgoing(_FLAGS_QR_QUERY) - out.addQuestion(DNSQuestion(self.name, _TYPE_SRV, _CLASS_IN)) - out.addAnswerAtTime(zeroconf.cache.getByDetails(self.name, _TYPE_SRV, _CLASS_IN), now) - out.addQuestion(DNSQuestion(self.name, _TYPE_TXT, _CLASS_IN)) - out.addAnswerAtTime(zeroconf.cache.getByDetails(self.name, _TYPE_TXT, _CLASS_IN), now) - if self.server is not None: - out.addQuestion(DNSQuestion(self.server, _TYPE_A, _CLASS_IN)) - out.addAnswerAtTime(zeroconf.cache.getByDetails(self.server, _TYPE_A, _CLASS_IN), now) - zeroconf.send(out) - next = now + delay - delay = delay * 2 + def request(self, zeroconf, timeout): + """Returns true if the service could be discovered on the + network, and updates this object with details discovered. + """ + now = currentTimeMillis() + delay = _LISTENER_TIME + next = now + delay + last = now + timeout + result = 0 + try: + zeroconf.addListener(self, DNSQuestion(self.name, _TYPE_ANY, + _CLASS_IN)) + while (self.server is None or self.address is None or + self.text is None): + if last <= now: + return 0 + if next <= now: + out = DNSOutgoing(_FLAGS_QR_QUERY) + out.addQuestion(DNSQuestion(self.name, _TYPE_SRV, + _CLASS_IN)) + out.addAnswerAtTime( + zeroconf.cache.getByDetails(self.name, + _TYPE_SRV, + _CLASS_IN), + now) + out.addQuestion(DNSQuestion(self.name, _TYPE_TXT, + _CLASS_IN)) + out.addAnswerAtTime( + zeroconf.cache.getByDetails(self.name, _TYPE_TXT, + _CLASS_IN), + now) + if self.server is not None: + out.addQuestion( + DNSQuestion(self.server, _TYPE_A, _CLASS_IN)) + out.addAnswerAtTime( + zeroconf.cache.getByDetails(self.server, _TYPE_A, + _CLASS_IN), + now) + zeroconf.send(out) + next = now + delay + delay = delay * 2 - zeroconf.wait(min(next, last) - now) - now = currentTimeMillis() - result = 1 - finally: - zeroconf.removeListener(self) + zeroconf.wait(min(next, last) - now) + now = currentTimeMillis() + result = 1 + finally: + zeroconf.removeListener(self) - return result + return result - def __eq__(self, other): - """Tests equality of service name""" - if isinstance(other, ServiceInfo): - return other.name == self.name - return 0 + def __eq__(self, other): + """Tests equality of service name""" + if isinstance(other, ServiceInfo): + return other.name == self.name + return 0 - def __ne__(self, other): - """Non-equality test""" - return not self.__eq__(other) + def __ne__(self, other): + """Non-equality test""" + return not self.__eq__(other) - def __repr__(self): - """String representation""" - result = "service[%s,%s:%s," % (self.name, socket.inet_ntoa(self.getAddress()), self.port) - if self.text is None: - result += "None" - else: - if len(self.text) < 20: - result += self.text - else: - result += self.text[:17] + "..." - result += "]" - return result + def __repr__(self): + """String representation""" + result = ("service[%s,%s:%s," % + (self.name, socket.inet_ntoa(self.getAddress()), self.port)) + if self.text is None: + result += "None" + else: + if len(self.text) < 20: + result += self.text + else: + result += self.text[:17] + "..." + result += "]" + return result class Zeroconf(object): - """Implementation of Zeroconf Multicast DNS Service Discovery + """Implementation of Zeroconf Multicast DNS Service Discovery + + Supports registration, unregistration, queries and browsing. + """ + def __init__(self, bindaddress=None): + """Creates an instance of the Zeroconf class, establishing + multicast communications, listening and reaping threads.""" + globals()['_GLOBAL_DONE'] = 0 + if bindaddress is None: + self.intf = socket.gethostbyname(socket.gethostname()) + else: + self.intf = bindaddress + self.group = ('', _MDNS_PORT) + self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + try: + self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + except Exception: + # SO_REUSEADDR should be equivalent to SO_REUSEPORT for + # multicast UDP sockets (p 731, "TCP/IP Illustrated, + # Volume 2"), but some BSD-derived systems require + # SO_REUSEPORT to be specified explicitly. Also, not all + # versions of Python have SO_REUSEPORT available. So + # if you're on a BSD-based system, and haven't upgraded + # to Python 2.3 yet, you may find this library doesn't + # work as expected. + # + pass + self.socket.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_TTL, "\xff") + self.socket.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_LOOP, "\x01") + try: + self.socket.bind(self.group) + except Exception: + # Some versions of linux raise an exception even though + # SO_REUSEADDR and SO_REUSEPORT have been set, so ignore it + pass + self.socket.setsockopt(socket.SOL_IP, socket.IP_ADD_MEMBERSHIP, + socket.inet_aton(_MDNS_ADDR) + socket.inet_aton('0.0.0.0')) - Supports registration, unregistration, queries and browsing. - """ - def __init__(self, bindaddress=None): - """Creates an instance of the Zeroconf class, establishing - multicast communications, listening and reaping threads.""" - globals()['_GLOBAL_DONE'] = 0 - if bindaddress is None: - self.intf = socket.gethostbyname(socket.gethostname()) - else: - self.intf = bindaddress - self.group = ('', _MDNS_PORT) - self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - try: - self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) - except Exception: - # SO_REUSEADDR should be equivalent to SO_REUSEPORT for - # multicast UDP sockets (p 731, "TCP/IP Illustrated, - # Volume 2"), but some BSD-derived systems require - # SO_REUSEPORT to be specified explicitly. Also, not all - # versions of Python have SO_REUSEPORT available. So - # if you're on a BSD-based system, and haven't upgraded - # to Python 2.3 yet, you may find this library doesn't - # work as expected. - # - pass - self.socket.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_TTL, 255) - self.socket.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_LOOP, 1) - try: - self.socket.bind(self.group) - except Exception: - # Some versions of linux raise an exception even though - # SO_REUSEADDR and SO_REUSEPORT have been set, so ignore it - pass - self.socket.setsockopt(socket.SOL_IP, socket.IP_ADD_MEMBERSHIP, socket.inet_aton(_MDNS_ADDR) + socket.inet_aton('0.0.0.0')) + self.listeners = [] + self.browsers = [] + self.services = {} + self.servicetypes = {} + + self.cache = DNSCache() + + self.condition = threading.Condition() + + self.engine = Engine(self) + self.listener = Listener(self) + self.reaper = Reaper(self) + + def isLoopback(self): + return self.intf.startswith("127.0.0.1") + + def isLinklocal(self): + return self.intf.startswith("169.254.") - self.listeners = [] - self.browsers = [] - self.services = {} - self.servicetypes = {} - - self.cache = DNSCache() - - self.condition = threading.Condition() - - self.engine = Engine(self) - self.listener = Listener(self) - self.reaper = Reaper(self) - - def isLoopback(self): - return self.intf.startswith("127.0.0.1") + def wait(self, timeout): + """Calling thread waits for a given number of milliseconds or + until notified.""" + self.condition.acquire() + self.condition.wait(timeout / 1000) + self.condition.release() - def isLinklocal(self): - return self.intf.startswith("169.254.") + def notifyAll(self): + """Notifies all waiting threads""" + self.condition.acquire() + self.condition.notifyAll() + self.condition.release() - def wait(self, timeout): - """Calling thread waits for a given number of milliseconds or - until notified.""" - self.condition.acquire() - self.condition.wait(timeout/1000) - self.condition.release() - - def notifyAll(self): - """Notifies all waiting threads""" - self.condition.acquire() - self.condition.notifyAll() - self.condition.release() + def getServiceInfo(self, type, name, timeout=3000): + """Returns network's service information for a particular + name and type, or None if no service matches by the timeout, + which defaults to 3 seconds.""" + info = ServiceInfo(type, name) + if info.request(self, timeout): + return info + return None - def getServiceInfo(self, type, name, timeout=3000): - """Returns network's service information for a particular - name and type, or None if no service matches by the timeout, - which defaults to 3 seconds.""" - info = ServiceInfo(type, name) - if info.request(self, timeout): - return info - return None + def addServiceListener(self, type, listener): + """Adds a listener for a particular service type. This object + will then have its updateRecord method called when information + arrives for that type.""" + self.removeServiceListener(listener) + self.browsers.append(ServiceBrowser(self, type, listener)) - def addServiceListener(self, type, listener): - """Adds a listener for a particular service type. This object - will then have its updateRecord method called when information - arrives for that type.""" - self.removeServiceListener(listener) - self.browsers.append(ServiceBrowser(self, type, listener)) - - def removeServiceListener(self, listener): - """Removes a listener from the set that is currently listening.""" - for browser in self.browsers: - if browser.listener == listener: - browser.cancel() - del(browser) + def removeServiceListener(self, listener): + """Removes a listener from the set that is currently listening.""" + for browser in self.browsers: + if browser.listener == listener: + browser.cancel() + del browser - def registerService(self, info, ttl=_DNS_TTL): - """Registers service information to the network with a default TTL - of 60 seconds. Zeroconf will then respond to requests for - information for that service. The name of the service may be - changed if needed to make it unique on the network.""" - self.checkService(info) - self.services[info.name.lower()] = info - if self.servicetypes.has_key(info.type): - self.servicetypes[info.type]+=1 - else: - self.servicetypes[info.type]=1 - now = currentTimeMillis() - nextTime = now - i = 0 - while i < 3: - if now < nextTime: - self.wait(nextTime - now) - now = currentTimeMillis() - continue - out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA) - out.addAnswerAtTime(DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, ttl, info.name), 0) - out.addAnswerAtTime(DNSService(info.name, _TYPE_SRV, _CLASS_IN, ttl, info.priority, info.weight, info.port, info.server), 0) - out.addAnswerAtTime(DNSText(info.name, _TYPE_TXT, _CLASS_IN, ttl, info.text), 0) - if info.address: - out.addAnswerAtTime(DNSAddress(info.server, _TYPE_A, _CLASS_IN, ttl, info.address), 0) - self.send(out) - i += 1 - nextTime += _REGISTER_TIME + def registerService(self, info, ttl=_DNS_TTL): + """Registers service information to the network with a default TTL + of 60 seconds. Zeroconf will then respond to requests for + information for that service. The name of the service may be + changed if needed to make it unique on the network.""" + self.checkService(info) + self.services[info.name.lower()] = info + if info.type in self.servicetypes: + self.servicetypes[info.type] += 1 + else: + self.servicetypes[info.type] = 1 + now = currentTimeMillis() + nexttime = now + i = 0 + while i < 3: + if now < nexttime: + self.wait(nexttime - now) + now = currentTimeMillis() + continue + out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA) + out.addAnswerAtTime(DNSPointer(info.type, _TYPE_PTR, + _CLASS_IN, ttl, info.name), 0) + out.addAnswerAtTime( + DNSService( + info.name, _TYPE_SRV, + _CLASS_IN, ttl, info.priority, info.weight, info.port, + info.server), + 0) + out.addAnswerAtTime( + DNSText(info.name, _TYPE_TXT, _CLASS_IN, ttl, info.text), + 0) + if info.address: + out.addAnswerAtTime(DNSAddress(info.server, _TYPE_A, + _CLASS_IN, ttl, info.address), 0) + self.send(out) + i += 1 + nexttime += _REGISTER_TIME - def unregisterService(self, info): - """Unregister a service.""" - try: - del(self.services[info.name.lower()]) - if self.servicetypes[info.type]>1: - self.servicetypes[info.type]-=1 - else: - del self.servicetypes[info.type] - except KeyError: - pass - now = currentTimeMillis() - nextTime = now - i = 0 - while i < 3: - if now < nextTime: - self.wait(nextTime - now) - now = currentTimeMillis() - continue - out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA) - out.addAnswerAtTime(DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, 0, info.name), 0) - out.addAnswerAtTime(DNSService(info.name, _TYPE_SRV, _CLASS_IN, 0, info.priority, info.weight, info.port, info.name), 0) - out.addAnswerAtTime(DNSText(info.name, _TYPE_TXT, _CLASS_IN, 0, info.text), 0) - if info.address: - out.addAnswerAtTime(DNSAddress(info.server, _TYPE_A, _CLASS_IN, 0, info.address), 0) - self.send(out) - i += 1 - nextTime += _UNREGISTER_TIME + def unregisterService(self, info): + """Unregister a service.""" + try: + del self.services[info.name.lower()] + if self.servicetypes[info.type] > 1: + self.servicetypes[info.type] -= 1 + else: + del self.servicetypes[info.type] + except KeyError: + pass + now = currentTimeMillis() + nexttime = now + i = 0 + while i < 3: + if now < nexttime: + self.wait(nexttime - now) + now = currentTimeMillis() + continue + out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA) + out.addAnswerAtTime( + DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, 0, info.name), 0) + out.addAnswerAtTime( + DNSService(info.name, _TYPE_SRV, + _CLASS_IN, 0, info.priority, info.weight, info.port, + info.name), + 0) + out.addAnswerAtTime(DNSText(info.name, _TYPE_TXT, + _CLASS_IN, 0, info.text), 0) + if info.address: + out.addAnswerAtTime(DNSAddress(info.server, _TYPE_A, + _CLASS_IN, 0, info.address), 0) + self.send(out) + i += 1 + nexttime += _UNREGISTER_TIME - def unregisterAllServices(self): - """Unregister all registered services.""" - if len(self.services) > 0: - now = currentTimeMillis() - nextTime = now - i = 0 - while i < 3: - if now < nextTime: - self.wait(nextTime - now) - now = currentTimeMillis() - continue - out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA) - for info in self.services.values(): - out.addAnswerAtTime(DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, 0, info.name), 0) - out.addAnswerAtTime(DNSService(info.name, _TYPE_SRV, _CLASS_IN, 0, info.priority, info.weight, info.port, info.server), 0) - out.addAnswerAtTime(DNSText(info.name, _TYPE_TXT, _CLASS_IN, 0, info.text), 0) - if info.address: - out.addAnswerAtTime(DNSAddress(info.server, _TYPE_A, _CLASS_IN, 0, info.address), 0) - self.send(out) - i += 1 - nextTime += _UNREGISTER_TIME + def unregisterAllServices(self): + """Unregister all registered services.""" + if len(self.services) > 0: + now = currentTimeMillis() + nexttime = now + i = 0 + while i < 3: + if now < nexttime: + self.wait(nexttime - now) + now = currentTimeMillis() + continue + out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA) + for info in self.services.values(): + out.addAnswerAtTime(DNSPointer(info.type, _TYPE_PTR, + _CLASS_IN, 0, info.name), 0) + out.addAnswerAtTime( + DNSService(info.name, _TYPE_SRV, + _CLASS_IN, 0, info.priority, info.weight, + info.port, info.server), + 0) + out.addAnswerAtTime(DNSText(info.name, _TYPE_TXT, + _CLASS_IN, 0, info.text), 0) + if info.address: + out.addAnswerAtTime(DNSAddress(info.server, _TYPE_A, + _CLASS_IN, 0, info.address), 0) + self.send(out) + i += 1 + nexttime += _UNREGISTER_TIME - def checkService(self, info): - """Checks the network for a unique service name, modifying the - ServiceInfo passed in if it is not unique.""" - now = currentTimeMillis() - nextTime = now - i = 0 - while i < 3: - for record in self.cache.entriesWithName(info.type): - if record.type == _TYPE_PTR and not record.isExpired(now) and record.alias == info.name: - if (info.name.find('.') < 0): - info.name = info.name + ".[" + info.address + ":" + info.port + "]." + info.type - self.checkService(info) - return - raise NonUniqueNameException - if now < nextTime: - self.wait(nextTime - now) - now = currentTimeMillis() - continue - out = DNSOutgoing(_FLAGS_QR_QUERY | _FLAGS_AA) - self.debug = out - out.addQuestion(DNSQuestion(info.type, _TYPE_PTR, _CLASS_IN)) - out.addAuthoritativeAnswer(DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, _DNS_TTL, info.name)) - self.send(out) - i += 1 - nextTime += _CHECK_TIME + def checkService(self, info): + """Checks the network for a unique service name, modifying the + ServiceInfo passed in if it is not unique.""" + now = currentTimeMillis() + nexttime = now + i = 0 + while i < 3: + for record in self.cache.entriesWithName(info.type): + if (record.type == _TYPE_PTR and not record.isExpired(now) and + record.alias == info.name): + if (info.name.find('.') < 0): + info.name = ("%w.[%s:%d].%s" % + (info.name, info.address, info.port, info.type)) + self.checkService(info) + return + raise NonUniqueNameException + if now < nexttime: + self.wait(nexttime - now) + now = currentTimeMillis() + continue + out = DNSOutgoing(_FLAGS_QR_QUERY | _FLAGS_AA) + self.debug = out + out.addQuestion(DNSQuestion(info.type, _TYPE_PTR, _CLASS_IN)) + out.addAuthoritativeAnswer(DNSPointer(info.type, _TYPE_PTR, + _CLASS_IN, _DNS_TTL, info.name)) + self.send(out) + i += 1 + nexttime += _CHECK_TIME - def addListener(self, listener, question): - """Adds a listener for a given question. The listener will have - its updateRecord method called when information is available to - answer the question.""" - now = currentTimeMillis() - self.listeners.append(listener) - if question is not None: - for record in self.cache.entriesWithName(question.name): - if question.answeredBy(record) and not record.isExpired(now): - listener.updateRecord(self, now, record) - self.notifyAll() + def addListener(self, listener, question): + """Adds a listener for a given question. The listener will have + its updateRecord method called when information is available to + answer the question.""" + now = currentTimeMillis() + self.listeners.append(listener) + if question is not None: + for record in self.cache.entriesWithName(question.name): + if question.answeredBy(record) and not record.isExpired(now): + listener.updateRecord(self, now, record) + self.notifyAll() - def removeListener(self, listener): - """Removes a listener.""" - try: - self.listeners.remove(listener) - self.notifyAll() - except Exception: - pass + def removeListener(self, listener): + """Removes a listener.""" + try: + self.listeners.remove(listener) + self.notifyAll() + except Exception: + pass - def updateRecord(self, now, rec): - """Used to notify listeners of new information that has updated - a record.""" - for listener in self.listeners: - listener.updateRecord(self, now, rec) - self.notifyAll() + def updateRecord(self, now, rec): + """Used to notify listeners of new information that has updated + a record.""" + for listener in self.listeners: + listener.updateRecord(self, now, rec) + self.notifyAll() - def handleResponse(self, msg): - """Deal with incoming response packets. All answers - are held in the cache, and listeners are notified.""" - now = currentTimeMillis() - for record in msg.answers: - expired = record.isExpired(now) - if record in self.cache.entries(): - if expired: - self.cache.remove(record) - else: - entry = self.cache.get(record) - if entry is not None: - entry.resetTTL(record) - record = entry - else: - self.cache.add(record) + def handleResponse(self, msg): + """Deal with incoming response packets. All answers + are held in the cache, and listeners are notified.""" + now = currentTimeMillis() + for record in msg.answers: + expired = record.isExpired(now) + if record in self.cache.entries(): + if expired: + self.cache.remove(record) + else: + entry = self.cache.get(record) + if entry is not None: + entry.resetTTL(record) + record = entry + else: + self.cache.add(record) + + self.updateRecord(now, record) + + def handleQuery(self, msg, addr, port): + """Deal with incoming query packets. Provides a response if + possible.""" + out = None - self.updateRecord(now, record) - - def handleQuery(self, msg, addr, port): - """Deal with incoming query packets. Provides a response if - possible.""" - out = None + # Support unicast client responses + # + if port != _MDNS_PORT: + out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA, 0) + for question in msg.questions: + out.addQuestion(question) - # Support unicast client responses - # - if port != _MDNS_PORT: - out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA, 0) - for question in msg.questions: - out.addQuestion(question) + for question in msg.questions: + if question.type == _TYPE_PTR: + if question.name == "_services._dns-sd._udp.local.": + for stype in self.servicetypes.keys(): + if out is None: + out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA) + out.addAnswer(msg, + DNSPointer( + "_services._dns-sd._udp.local.", + _TYPE_PTR, _CLASS_IN, + _DNS_TTL, stype)) + for service in self.services.values(): + if question.name == service.type: + if out is None: + out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA) + out.addAnswer(msg, DNSPointer(service.type, _TYPE_PTR, + _CLASS_IN, _DNS_TTL, service.name)) + else: + try: + if out is None: + out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA) - for question in msg.questions: - if question.type == _TYPE_PTR: - if question.name == "_services._dns-sd._udp.local.": - for stype in self.servicetypes.keys(): - if out is None: - out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA) - out.addAnswer(msg, DNSPointer("_services._dns-sd._udp.local.", _TYPE_PTR, _CLASS_IN, _DNS_TTL, stype)) - for service in self.services.values(): - if question.name == service.type: - if out is None: - out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA) - out.addAnswer(msg, DNSPointer(service.type, _TYPE_PTR, _CLASS_IN, _DNS_TTL, service.name)) - else: - try: - if out is None: - out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA) + # Answer A record queries for any service addresses we know + if question.type == _TYPE_A or question.type == _TYPE_ANY: + for service in self.services.values(): + if service.server == question.name.lower(): + out.addAnswer(msg, + DNSAddress(question.name, _TYPE_A, + _CLASS_IN | _CLASS_UNIQUE, + _DNS_TTL, service.address)) - # Answer A record queries for any service addresses we know - if question.type == _TYPE_A or question.type == _TYPE_ANY: - for service in self.services.values(): - if service.server == question.name.lower(): - out.addAnswer(msg, DNSAddress(question.name, _TYPE_A, _CLASS_IN | _CLASS_UNIQUE, _DNS_TTL, service.address)) - - service = self.services.get(question.name.lower(), None) - if not service: continue + service = self.services.get(question.name.lower(), None) + if not service: continue - if question.type == _TYPE_SRV or question.type == _TYPE_ANY: - out.addAnswer(msg, DNSService(question.name, _TYPE_SRV, _CLASS_IN | _CLASS_UNIQUE, _DNS_TTL, service.priority, service.weight, service.port, service.server)) - if question.type == _TYPE_TXT or question.type == _TYPE_ANY: - out.addAnswer(msg, DNSText(question.name, _TYPE_TXT, _CLASS_IN | _CLASS_UNIQUE, _DNS_TTL, service.text)) - if question.type == _TYPE_SRV: - out.addAdditionalAnswer(DNSAddress(service.server, _TYPE_A, _CLASS_IN | _CLASS_UNIQUE, _DNS_TTL, service.address)) - except Exception: - traceback.print_exc() - - if out is not None and out.answers: - out.id = msg.id - self.send(out, addr, port) + if (question.type == _TYPE_SRV or + question.type == _TYPE_ANY): + out.addAnswer(msg, + DNSService(question.name, _TYPE_SRV, + _CLASS_IN | _CLASS_UNIQUE, + _DNS_TTL, service.priority, + service.weight, service.port, + service.server)) + if (question.type == _TYPE_TXT or + question.type == _TYPE_ANY): + out.addAnswer(msg, DNSText(question.name, _TYPE_TXT, + _CLASS_IN | _CLASS_UNIQUE, _DNS_TTL, service.text)) + if question.type == _TYPE_SRV: + out.addAdditionalAnswer( + DNSAddress(service.server, _TYPE_A, + _CLASS_IN | _CLASS_UNIQUE, + _DNS_TTL, service.address)) + except Exception: + traceback.print_exc() - def send(self, out, addr = _MDNS_ADDR, port = _MDNS_PORT): - """Sends an outgoing packet.""" - # This is a quick test to see if we can parse the packets we generate - #temp = DNSIncoming(out.packet()) - try: - self.socket.sendto(out.packet(), 0, (addr, port)) - except Exception: - # Ignore this, it may be a temporary loss of network connection - pass + if out is not None and out.answers: + out.id = msg.id + self.send(out, addr, port) - def close(self): - """Ends the background threads, and prevent this instance from - servicing further queries.""" - if globals()['_GLOBAL_DONE'] == 0: - globals()['_GLOBAL_DONE'] = 1 - self.notifyAll() - self.engine.notify() - self.unregisterAllServices() - self.socket.setsockopt(socket.SOL_IP, socket.IP_DROP_MEMBERSHIP, socket.inet_aton(_MDNS_ADDR) + socket.inet_aton('0.0.0.0')) - self.socket.close() + def send(self, out, addr=_MDNS_ADDR, port=_MDNS_PORT): + """Sends an outgoing packet.""" + # This is a quick test to see if we can parse the packets we generate + #temp = DNSIncoming(out.packet()) + try: + self.socket.sendto(out.packet(), 0, (addr, port)) + except Exception: + # Ignore this, it may be a temporary loss of network connection + pass + + def close(self): + """Ends the background threads, and prevent this instance from + servicing further queries.""" + if globals()['_GLOBAL_DONE'] == 0: + globals()['_GLOBAL_DONE'] = 1 + self.notifyAll() + self.engine.notify() + self.unregisterAllServices() + self.socket.setsockopt(socket.SOL_IP, socket.IP_DROP_MEMBERSHIP, + socket.inet_aton(_MDNS_ADDR) + socket.inet_aton('0.0.0.0')) + self.socket.close() # Test a few module features, including service registration, service # query (for Zoe), and service unregistration. if __name__ == '__main__': - print "Multicast DNS Service Discovery for Python, version", __version__ - r = Zeroconf() - print "1. Testing registration of a service..." - desc = {'version':'0.10','a':'test value', 'b':'another value'} - info = ServiceInfo("_http._tcp.local.", "My Service Name._http._tcp.local.", socket.inet_aton("127.0.0.1"), 1234, 0, 0, desc) - print " Registering service..." - r.registerService(info) - print " Registration done." - print "2. Testing query of service information..." - print " Getting ZOE service:", str(r.getServiceInfo("_http._tcp.local.", "ZOE._http._tcp.local.")) - print " Query done." - print "3. Testing query of own service..." - print " Getting self:", str(r.getServiceInfo("_http._tcp.local.", "My Service Name._http._tcp.local.")) - print " Query done." - print "4. Testing unregister of service information..." - r.unregisterService(info) - print " Unregister done." - r.close() - -# no-check-code + print("Multicast DNS Service Discovery for Python, version", __version__) + r = Zeroconf() + print("1. Testing registration of a service...") + desc = {'version':'0.10','a':'test value', 'b':'another value'} + info = ServiceInfo("_http._tcp.local.", + "My Service Name._http._tcp.local.", + socket.inet_aton("127.0.0.1"), 1234, 0, 0, desc) + print(" Registering service...") + r.registerService(info) + print(" Registration done.") + print("2. Testing query of service information...") + print(" Getting ZOE service:", + str(r.getServiceInfo("_http._tcp.local.", "ZOE._http._tcp.local."))) + print(" Query done.") + print("3. Testing query of own service...") + print(" Getting self:", + str(r.getServiceInfo("_http._tcp.local.", + "My Service Name._http._tcp.local."))) + print(" Query done.") + print("4. Testing unregister of service information...") + r.unregisterService(info) + print(" Unregister done.") + r.close()
--- a/hgext/zeroconf/__init__.py Tue Mar 29 11:54:46 2016 -0500 +++ b/hgext/zeroconf/__init__.py Sat Apr 16 18:06:48 2016 -0500 @@ -4,7 +4,6 @@ # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. - '''discover and advertise repositories on the local network Zeroconf-enabled repositories will be announced in a network without @@ -23,13 +22,23 @@ $ hg paths zc-test = http://example.com:8000/test ''' +from __future__ import absolute_import -import socket, time, os +import os +import socket +import time -import Zeroconf -from mercurial import ui, hg, encoding, dispatch -from mercurial import extensions -from mercurial.hgweb import server as servermod +from . import Zeroconf +from mercurial import ( + dispatch, + encoding, + extensions, + hg, + ui as uimod, +) +from mercurial.hgweb import ( + server as servermod +) # Note for extension authors: ONLY specify testedwith = 'internal' for # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should @@ -169,6 +178,16 @@ repos += getzcpaths() return repos +def configsuboptions(orig, self, section, name, *args, **kwargs): + opt, sub = orig(self, section, name, *args, **kwargs) + if section == "paths" and name.startswith("zc-"): + # We have to find the URL in the zeroconf paths. We can't cons up any + # suboptions, so we use any that we found in the original config. + for zcname, zcurl in getzcpaths(): + if zcname == name: + return zcurl, sub + return opt, sub + def defaultdest(orig, source): for name, path in getzcpaths(): if path == source: @@ -187,7 +206,8 @@ extensions.wrapfunction(dispatch, '_runcommand', cleanupafterdispatch) -extensions.wrapfunction(ui.ui, 'config', config) -extensions.wrapfunction(ui.ui, 'configitems', configitems) +extensions.wrapfunction(uimod.ui, 'config', config) +extensions.wrapfunction(uimod.ui, 'configitems', configitems) +extensions.wrapfunction(uimod.ui, 'configsuboptions', configsuboptions) extensions.wrapfunction(hg, 'defaultdest', defaultdest) extensions.wrapfunction(servermod, 'create_server', zc_create_server)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hgext3rd/__init__.py Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,4 @@ +# name space package to host third party extensions +from __future__ import absolute_import +import pkgutil +__path__ = pkgutil.extend_path(__path__, __name__)
--- a/i18n/posplit Tue Mar 29 11:54:46 2016 -0500 +++ b/i18n/posplit Sat Apr 16 18:06:48 2016 -0500 @@ -57,11 +57,13 @@ if mdirective: if not msgid[mdirective.end():].rstrip(): # only directive, nothing to translate here + delta += 2 continue directive = mdirective.group(1) if directive in ('container', 'include'): if msgid.rstrip('\n').count('\n') == 0: # only rst syntax, nothing to translate + delta += 2 continue else: # lines following directly, unexpected
--- a/mercurial/__init__.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/__init__.py Sat Apr 16 18:06:48 2016 -0500 @@ -19,11 +19,14 @@ # c - require C extensions # allow - allow pure Python implementation when C loading fails # py - only load pure Python modules -modulepolicy = '@MODULELOADPOLICY@' - +# # By default, require the C extensions for performance reasons. -if modulepolicy == '@' 'MODULELOADPOLICY' '@': - modulepolicy = 'c' +modulepolicy = 'c' +try: + from . import __modulepolicy__ + modulepolicy = __modulepolicy__.modulepolicy +except ImportError: + pass # PyPy doesn't load C extensions. # @@ -32,6 +35,11 @@ if '__pypy__' in sys.builtin_module_names: modulepolicy = 'py' +# Our C extensions aren't yet compatible with Python 3. So use pure Python +# on Python 3 for now. +if sys.version_info[0] >= 3: + modulepolicy = 'py' + # Environment variable can always force settings. modulepolicy = os.environ.get('HGMODULEPOLICY', modulepolicy)
--- a/mercurial/archival.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/archival.py Sat Apr 16 18:06:48 2016 -0500 @@ -7,7 +7,6 @@ from __future__ import absolute_import -import cStringIO import gzip import os import struct @@ -26,6 +25,7 @@ scmutil, util, ) +stringio = util.stringio # from unzip source code: _UNX_IFREG = 0x8000 @@ -172,7 +172,7 @@ i.size = 0 else: i.mode = mode - data = cStringIO.StringIO(data) + data = stringio(data) self.z.addfile(i, data) def done(self): @@ -331,7 +331,7 @@ if subrepos: for subpath in sorted(ctx.substate): sub = ctx.workingsub(subpath) - submatch = matchmod.narrowmatcher(subpath, matchfn) + submatch = matchmod.subdirmatcher(subpath, matchfn) total += sub.archive(archiver, prefix, submatch) if total == 0:
--- a/mercurial/bookmarks.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/bookmarks.py Sat Apr 16 18:06:48 2016 -0500 @@ -182,6 +182,11 @@ fp.write("%s %s\n" % (hex(node), encoding.fromlocal(name))) self._clean = True + def expandname(self, bname): + if bname == '.': + return self.active + return bname + def _readactive(repo, marks): """ Get the active bookmark. We can have an active bookmark that updates
--- a/mercurial/branchmap.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/branchmap.py Sat Apr 16 18:06:48 2016 -0500 @@ -55,6 +55,7 @@ if not partial.validfor(repo): # invalidate the cache raise ValueError('tip differs') + cl = repo.changelog for l in lines: if not l: continue @@ -62,9 +63,9 @@ if state not in 'oc': raise ValueError('invalid branch state') label = encoding.tolocal(label.strip()) - if not node in repo: - raise ValueError('node %s does not exist' % node) node = bin(node) + if not cl.hasnode(node): + raise ValueError('node %s does not exist' % hex(node)) partial.setdefault(label, []).append(node) if state == 'c': partial._closednodes.add(node) @@ -382,6 +383,15 @@ self._rbcnamescount = len(self._names) # number of good names on disk self._namesreverse = dict((b, r) for r, b in enumerate(self._names)) + def _clear(self): + self._rbcsnameslen = 0 + del self._names[:] + self._rbcnamescount = 0 + self._namesreverse.clear() + self._rbcrevslen = len(self._repo.changelog) + self._rbcrevs = array('c') + self._rbcrevs.fromstring('\0' * (self._rbcrevslen * _rbcrecsize)) + def branchinfo(self, rev): """Return branch name and close flag for rev, using and updating persistent cache.""" @@ -407,7 +417,11 @@ if cachenode == '\0\0\0\0': pass elif cachenode == reponode: - return self._names[branchidx], close + if branchidx < self._rbcnamescount: + return self._names[branchidx], close + # referenced branch doesn't exist - rebuild is expensive but needed + self._repo.ui.debug("rebuilding corrupted revision branch cache\n") + self._clear() else: # rev/node map has changed, invalidate the cache from here up truncate = rbcrevidx + _rbcrecsize @@ -460,6 +474,8 @@ self._rbcnamescount = 0 self._rbcrevslen = 0 if self._rbcnamescount == 0: + # before rewriting names, make sure references are removed + repo.vfs.unlinkpath(_rbcrevs, ignoremissing=True) f = repo.vfs.open(_rbcnames, 'wb') f.write('\0'.join(encoding.fromlocal(b) for b in self._names[self._rbcnamescount:])) @@ -479,6 +495,9 @@ if f.tell() != start: repo.ui.debug("truncating %s to %s\n" % (_rbcrevs, start)) f.seek(start) + if f.tell() != start: + start = 0 + f.seek(start) f.truncate() end = revs * _rbcrecsize f.write(self._rbcrevs[start:end])
--- a/mercurial/bundle2.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/bundle2.py Sat Apr 16 18:06:48 2016 -0500 @@ -152,7 +152,6 @@ import string import struct import sys -import urllib from .i18n import _ from . import ( @@ -165,6 +164,9 @@ util, ) +urlerr = util.urlerr +urlreq = util.urlreq + _pack = struct.pack _unpack = struct.unpack @@ -457,8 +459,8 @@ else: key, vals = line.split('=', 1) vals = vals.split(',') - key = urllib.unquote(key) - vals = [urllib.unquote(v) for v in vals] + key = urlreq.unquote(key) + vals = [urlreq.unquote(v) for v in vals] caps[key] = vals return caps @@ -467,13 +469,26 @@ chunks = [] for ca in sorted(caps): vals = caps[ca] - ca = urllib.quote(ca) - vals = [urllib.quote(v) for v in vals] + ca = urlreq.quote(ca) + vals = [urlreq.quote(v) for v in vals] if vals: ca = "%s=%s" % (ca, ','.join(vals)) chunks.append(ca) return '\n'.join(chunks) +bundletypes = { + "": ("", None), # only when using unbundle on ssh and old http servers + # since the unification ssh accepts a header but there + # is no capability signaling it. + "HG20": (), # special-cased below + "HG10UN": ("HG10UN", None), + "HG10BZ": ("HG10", 'BZ'), + "HG10GZ": ("HG10GZ", 'GZ'), +} + +# hgweb uses this list to communicate its preferred type +bundlepriority = ['HG10GZ', 'HG10BZ', 'HG10UN'] + class bundle20(object): """represent an outgoing bundle2 container @@ -557,9 +572,9 @@ """return a encoded version of all stream parameters""" blocks = [] for par, value in self._params: - par = urllib.quote(par) + par = urlreq.quote(par) if value is not None: - value = urllib.quote(value) + value = urlreq.quote(value) par = '%s=%s' % (par, value) blocks.append(par) return ' '.join(blocks) @@ -678,7 +693,7 @@ params = {} for p in paramsblock.split(' '): p = p.split('=', 1) - p = [urllib.unquote(i) for i in p] + p = [urlreq.unquote(i) for i in p] if len(p) < 2: p.append(None) self._processparam(*p) @@ -1256,7 +1271,7 @@ raw = remote.capable('bundle2') if not raw and raw != '': return {} - capsblob = urllib.unquote(remote.capable('bundle2')) + capsblob = urlreq.unquote(remote.capable('bundle2')) return decodecaps(capsblob) def obsmarkersversion(caps): @@ -1265,6 +1280,44 @@ obscaps = caps.get('obsmarkers', ()) return [int(c[1:]) for c in obscaps if c.startswith('V')] +def writebundle(ui, cg, filename, bundletype, vfs=None, compression=None): + """Write a bundle file and return its filename. + + Existing files will not be overwritten. + If no filename is specified, a temporary file is created. + bz2 compression can be turned off. + The bundle file will be deleted in case of errors. + """ + + if bundletype == "HG20": + bundle = bundle20(ui) + bundle.setcompression(compression) + part = bundle.newpart('changegroup', data=cg.getchunks()) + part.addparam('version', cg.version) + chunkiter = bundle.getchunks() + else: + # compression argument is only for the bundle2 case + assert compression is None + if cg.version != '01': + raise error.Abort(_('old bundle types only supports v1 ' + 'changegroups')) + header, comp = bundletypes[bundletype] + if comp not in util.compressors: + raise error.Abort(_('unknown stream compression type: %s') + % comp) + z = util.compressors[comp]() + subchunkiter = cg.getchunks() + def chunkiter(): + yield header + for chunk in subchunkiter: + yield z.compress(chunk) + yield z.flush() + chunkiter = chunkiter() + + # parse the changegroup data, otherwise we will block + # in case of sshrepo because we don't know the end of the stream + return changegroup.writechunks(ui, chunkiter, filename, vfs=vfs) + @parthandler('changegroup', ('version', 'nbchanges', 'treemanifest')) def handlechangegroup(op, inpart): """apply a changegroup part on the repo
--- a/mercurial/bundlerepo.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/bundlerepo.py Sat Apr 16 18:06:48 2016 -0500 @@ -32,6 +32,7 @@ localrepo, manifest, mdiff, + node as nodemod, pathutil, phases, revlog, @@ -385,6 +386,16 @@ def getcwd(self): return os.getcwd() # always outside the repo + # Check if parents exist in localrepo before setting + def setparents(self, p1, p2=nullid): + p1rev = self.changelog.rev(p1) + p2rev = self.changelog.rev(p2) + msg = _("setting parent to node %s that only exists in the bundle\n") + if self.changelog.repotiprev < p1rev: + self.ui.warn(msg % nodemod.hex(p1)) + if self.changelog.repotiprev < p2rev: + self.ui.warn(msg % nodemod.hex(p2)) + return super(bundlerepository, self).setparents(p1, p2) def instance(ui, path, create): if create: @@ -494,7 +505,7 @@ bundletype = "HG10BZ" else: bundletype = "HG10UN" - fname = bundle = changegroup.writebundle(ui, cg, bundlename, + fname = bundle = bundle2.writebundle(ui, cg, bundlename, bundletype) # keep written bundle? if bundlename:
--- a/mercurial/byterange.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/byterange.py Sat Apr 16 18:06:48 2016 -0500 @@ -26,22 +26,27 @@ import re import socket import stat -import urllib -import urllib2 + +from . import ( + util, +) + +urlerr = util.urlerr +urlreq = util.urlreq -addclosehook = urllib.addclosehook -addinfourl = urllib.addinfourl -splitattr = urllib.splitattr -splitpasswd = urllib.splitpasswd -splitport = urllib.splitport -splituser = urllib.splituser -unquote = urllib.unquote +addclosehook = urlreq.addclosehook +addinfourl = urlreq.addinfourl +splitattr = urlreq.splitattr +splitpasswd = urlreq.splitpasswd +splitport = urlreq.splitport +splituser = urlreq.splituser +unquote = urlreq.unquote class RangeError(IOError): """Error raised when an unsatisfiable range is requested.""" pass -class HTTPRangeHandler(urllib2.BaseHandler): +class HTTPRangeHandler(urlreq.basehandler): """Handler that enables HTTP Range headers. This was extremely simple. The Range header is a HTTP feature to @@ -54,20 +59,20 @@ import byterange range_handler = range.HTTPRangeHandler() - opener = urllib2.build_opener(range_handler) + opener = urlreq.buildopener(range_handler) # install it - urllib2.install_opener(opener) + urlreq.installopener(opener) # create Request and set Range header - req = urllib2.Request('http://www.python.org/') + req = urlreq.request('http://www.python.org/') req.header['Range'] = 'bytes=30-50' - f = urllib2.urlopen(req) + f = urlreq.urlopen(req) """ def http_error_206(self, req, fp, code, msg, hdrs): # 206 Partial Content Response - r = urllib.addinfourl(fp, hdrs, req.get_full_url()) + r = urlreq.addinfourl(fp, hdrs, req.get_full_url()) r.code = code r.msg = msg return r @@ -204,7 +209,7 @@ raise RangeError('Requested Range Not Satisfiable') pos += bufsize -class FileRangeHandler(urllib2.FileHandler): +class FileRangeHandler(urlreq.filehandler): """FileHandler subclass that adds Range support. This class handles Range headers exactly like an HTTP server would. @@ -212,15 +217,15 @@ def open_local_file(self, req): host = req.get_host() file = req.get_selector() - localfile = urllib.url2pathname(file) + localfile = urlreq.url2pathname(file) stats = os.stat(localfile) size = stats[stat.ST_SIZE] modified = email.Utils.formatdate(stats[stat.ST_MTIME]) mtype = mimetypes.guess_type(file)[0] if host: - host, port = urllib.splitport(host) + host, port = urlreq.splitport(host) if port or socket.gethostbyname(host) not in self.get_names(): - raise urllib2.URLError('file not on local host') + raise urlerr.urlerror('file not on local host') fo = open(localfile,'rb') brange = req.headers.get('Range', None) brange = range_header_to_tuple(brange) @@ -236,7 +241,7 @@ headers = email.message_from_string( 'Content-Type: %s\nContent-Length: %d\nLast-Modified: %s\n' % (mtype or 'text/plain', size, modified)) - return urllib.addinfourl(fo, headers, 'file:'+file) + return urlreq.addinfourl(fo, headers, 'file:'+file) # FTP Range Support @@ -246,7 +251,7 @@ # follows: # -- range support modifications start/end here -class FTPRangeHandler(urllib2.FTPHandler): +class FTPRangeHandler(urlreq.ftphandler): def ftp_open(self, req): host = req.get_host() if not host: @@ -270,7 +275,7 @@ try: host = socket.gethostbyname(host) except socket.error as msg: - raise urllib2.URLError(msg) + raise urlerr.urlerror(msg) path, attrs = splitattr(req.get_selector()) dirs = path.split('/') dirs = map(unquote, dirs) @@ -334,7 +339,7 @@ fw = ftpwrapper(user, passwd, host, port, dirs) return fw -class ftpwrapper(urllib.ftpwrapper): +class ftpwrapper(urlreq.ftpwrapper): # range support note: # this ftpwrapper code is copied directly from # urllib. The only enhancement is to add the rest
--- a/mercurial/changegroup.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/changegroup.py Sat Apr 16 18:06:48 2016 -0500 @@ -80,19 +80,6 @@ result = -1 + changedheads return result -bundletypes = { - "": ("", None), # only when using unbundle on ssh and old http servers - # since the unification ssh accepts a header but there - # is no capability signaling it. - "HG20": (), # special-cased below - "HG10UN": ("HG10UN", None), - "HG10BZ": ("HG10", 'BZ'), - "HG10GZ": ("HG10GZ", 'GZ'), -} - -# hgweb uses this list to communicate its preferred type -bundlepriority = ['HG10GZ', 'HG10BZ', 'HG10UN'] - def writechunks(ui, chunks, filename, vfs=None): """Write chunks to a file and return its filename. @@ -125,49 +112,6 @@ else: os.unlink(cleanup) -def writebundle(ui, cg, filename, bundletype, vfs=None, compression=None): - """Write a bundle file and return its filename. - - Existing files will not be overwritten. - If no filename is specified, a temporary file is created. - bz2 compression can be turned off. - The bundle file will be deleted in case of errors. - """ - - if bundletype == "HG20": - from . import bundle2 - bundle = bundle2.bundle20(ui) - bundle.setcompression(compression) - part = bundle.newpart('changegroup', data=cg.getchunks()) - part.addparam('version', cg.version) - chunkiter = bundle.getchunks() - else: - # compression argument is only for the bundle2 case - assert compression is None - if cg.version != '01': - raise error.Abort(_('old bundle types only supports v1 ' - 'changegroups')) - header, comp = bundletypes[bundletype] - if comp not in util.compressors: - raise error.Abort(_('unknown stream compression type: %s') - % comp) - z = util.compressors[comp]() - subchunkiter = cg.getchunks() - def chunkiter(): - yield header - for chunk in subchunkiter: - yield z.compress(chunk) - yield z.flush() - chunkiter = chunkiter() - - # parse the changegroup data, otherwise we will block - # in case of sshrepo because we don't know the end of the stream - - # an empty chunkgroup is the end of the changegroup - # a changegroup has at least 2 chunkgroups (changelog and manifest). - # after that, an empty chunkgroup is the end of the changegroup - return writechunks(ui, chunkiter, filename, vfs=vfs) - class cg1unpacker(object): """Unpacker for cg1 changegroup streams. @@ -306,6 +250,7 @@ self.manifestheader() repo.manifest.addgroup(self, revmap, trp) repo.ui.progress(_('manifests'), None) + self.callback = None def apply(self, repo, srctype, url, emptyok=False, targetphase=phases.draft, expectedtotal=None): @@ -363,7 +308,7 @@ efiles = set() def onchangelog(cl, node): - efiles.update(cl.read(node)[3]) + efiles.update(cl.readfiles(node)) self.changelogheader() srccontent = cl.addgroup(self, csmap, trp, @@ -375,6 +320,7 @@ clend = len(cl) changesets = clend - clstart repo.ui.progress(_('changesets'), None) + self.callback = None # pull off the manifest group repo.ui.status(_("adding manifests\n")) @@ -393,10 +339,8 @@ # process the files repo.ui.status(_("adding file changes\n")) - self.callback = None - pr = prog(_('files'), efiles) newrevs, newfiles = _addchangegroupfiles( - repo, self, revmap, trp, pr, needfiles) + repo, self, revmap, trp, efiles, needfiles) revisions += newrevs files += newfiles @@ -553,27 +497,6 @@ return d return readexactly(self._fh, n) -def _moddirs(files): - """Given a set of modified files, find the list of modified directories. - - This returns a list of (path to changed dir, changed dir) tuples, - as that's what the one client needs anyway. - - >>> _moddirs(['a/b/c.py', 'a/b/c.txt', 'a/d/e/f/g.txt', 'i.txt', ]) - [('/', 'a/'), ('a/', 'b/'), ('a/', 'd/'), ('a/d/', 'e/'), ('a/d/e/', 'f/')] - - """ - alldirs = set() - for f in files: - path = f.split('/')[:-1] - for i in xrange(len(path) - 1, -1, -1): - dn = '/'.join(path[:i]) - current = dn + '/', path[i] + '/' - if current in alldirs: - break - alldirs.add(current) - return sorted(alldirs) - class cg1packer(object): deltaheader = _CHANGEGROUPV1_DELTA_HEADER version = '01' @@ -659,33 +582,25 @@ rr, rl = revlog.rev, revlog.linkrev return [n for n in missing if rl(rr(n)) not in commonrevs] - def _packmanifests(self, mfnodes, tmfnodes, lookuplinknode): + def _packmanifests(self, dir, mfnodes, lookuplinknode): """Pack flat manifests into a changegroup stream.""" - ml = self._repo.manifest - size = 0 - for chunk in self.group( - mfnodes, ml, lookuplinknode, units=_('manifests')): - size += len(chunk) + assert not dir + for chunk in self.group(mfnodes, self._repo.manifest, + lookuplinknode, units=_('manifests')): yield chunk - self._verbosenote(_('%8.i (manifests)\n') % size) - # It looks odd to assert this here, but tmfnodes doesn't get - # filled in until after we've called lookuplinknode for - # sending root manifests, so the only way to tell the streams - # got crossed is to check after we've done all the work. - assert not tmfnodes + + def _manifestsdone(self): + return '' def generate(self, commonrevs, clnodes, fastpathlinkrev, source): '''yield a sequence of changegroup chunks (strings)''' repo = self._repo cl = repo.changelog - ml = repo.manifest clrevorder = {} mfs = {} # needed manifests - tmfnodes = {} fnodes = {} # needed file nodes - # maps manifest node id -> set(changed files) - mfchangedfiles = {} + changedfiles = set() # Callback for the changelog, used to collect changed files and manifest # nodes. @@ -698,7 +613,7 @@ mfs.setdefault(n, x) # Record a complete list of potentially-changed files in # this manifest. - mfchangedfiles.setdefault(n, set()).update(c[3]) + changedfiles.update(c[3]) return x self._verbosenote(_('uncompressed size of bundle content:\n')) @@ -729,12 +644,47 @@ # send along with files. This could probably be fixed. fastpathlinkrev = fastpathlinkrev and ( 'treemanifest' not in repo.requirements) + + for chunk in self.generatemanifests(commonrevs, clrevorder, + fastpathlinkrev, mfs, fnodes): + yield chunk + mfs.clear() + clrevs = set(cl.rev(x) for x in clnodes) + + if not fastpathlinkrev: + def linknodes(unused, fname): + return fnodes.get(fname, {}) + else: + cln = cl.node + def linknodes(filerevlog, fname): + llr = filerevlog.linkrev + fln = filerevlog.node + revs = ((r, llr(r)) for r in filerevlog) + return dict((fln(r), cln(lr)) for r, lr in revs if lr in clrevs) + + for chunk in self.generatefiles(changedfiles, linknodes, commonrevs, + source): + yield chunk + + yield self.close() + + if clnodes: + repo.hook('outgoing', node=hex(clnodes[0]), source=source) + + def generatemanifests(self, commonrevs, clrevorder, fastpathlinkrev, mfs, + fnodes): + repo = self._repo + dirlog = repo.manifest.dirlog + tmfnodes = {'': mfs} + # Callback for the manifest, used to collect linkrevs for filelog # revisions. # Returns the linkrev node (collected in lookupcl). - if fastpathlinkrev: - lookupmflinknode = mfs.__getitem__ - else: + def makelookupmflinknode(dir): + if fastpathlinkrev: + assert not dir + return mfs.__getitem__ + def lookupmflinknode(x): """Callback for looking up the linknode for manifests. @@ -751,75 +701,36 @@ the client before you can trust the list of files and treemanifests to send. """ - clnode = mfs[x] - # We no longer actually care about reading deltas of - # the manifest here, because we already know the list - # of changed files, so for treemanifests (which - # lazily-load anyway to *generate* a readdelta) we can - # just load them with read() and then we'll actually - # be able to correctly load node IDs from the - # submanifest entries. - if 'treemanifest' in repo.requirements: - mdata = ml.read(x) - else: - mdata = ml.readfast(x) - for f in mfchangedfiles[x]: - try: - n = mdata[f] - except KeyError: - continue - # record the first changeset introducing this filelog - # version - fclnodes = fnodes.setdefault(f, {}) - fclnode = fclnodes.setdefault(n, clnode) - if clrevorder[clnode] < clrevorder[fclnode]: - fclnodes[n] = clnode - # gather list of changed treemanifest nodes - if 'treemanifest' in repo.requirements: - submfs = {'/': mdata} - for dn, bn in _moddirs(mfchangedfiles[x]): - try: - submf = submfs[dn] - submf = submf._dirs[bn] - except KeyError: - continue # deleted directory, so nothing to send - submfs[submf.dir()] = submf - tmfclnodes = tmfnodes.setdefault(submf.dir(), {}) - tmfclnode = tmfclnodes.setdefault(submf._node, clnode) + clnode = tmfnodes[dir][x] + mdata = dirlog(dir).readshallowfast(x) + for p, n, fl in mdata.iterentries(): + if fl == 't': # subdirectory manifest + subdir = dir + p + '/' + tmfclnodes = tmfnodes.setdefault(subdir, {}) + tmfclnode = tmfclnodes.setdefault(n, clnode) if clrevorder[clnode] < clrevorder[tmfclnode]: tmfclnodes[n] = clnode + else: + f = dir + p + fclnodes = fnodes.setdefault(f, {}) + fclnode = fclnodes.setdefault(n, clnode) + if clrevorder[clnode] < clrevorder[fclnode]: + fclnodes[n] = clnode return clnode - - mfnodes = self.prune(ml, mfs, commonrevs) - for x in self._packmanifests( - mfnodes, tmfnodes, lookupmflinknode): - yield x - - mfs.clear() - clrevs = set(cl.rev(x) for x in clnodes) + return lookupmflinknode - if not fastpathlinkrev: - def linknodes(unused, fname): - return fnodes.get(fname, {}) - else: - cln = cl.node - def linknodes(filerevlog, fname): - llr = filerevlog.linkrev - fln = filerevlog.node - revs = ((r, llr(r)) for r in filerevlog) - return dict((fln(r), cln(lr)) for r, lr in revs if lr in clrevs) - - changedfiles = set() - for x in mfchangedfiles.itervalues(): - changedfiles.update(x) - for chunk in self.generatefiles(changedfiles, linknodes, commonrevs, - source): - yield chunk - - yield self.close() - - if clnodes: - repo.hook('outgoing', node=hex(clnodes[0]), source=source) + size = 0 + while tmfnodes: + dir = min(tmfnodes) + nodes = tmfnodes[dir] + prunednodes = self.prune(dirlog(dir), nodes, commonrevs) + for x in self._packmanifests(dir, prunednodes, + makelookupmflinknode(dir)): + size += len(x) + yield x + del tmfnodes[dir] + self._verbosenote(_('%8.i (manifests)\n') % size) + yield self._manifestsdone() # The 'source' parameter is useful for extensions def generatefiles(self, changedfiles, linknodes, commonrevs, source): @@ -920,23 +831,15 @@ version = '03' deltaheader = _CHANGEGROUPV3_DELTA_HEADER - def _packmanifests(self, mfnodes, tmfnodes, lookuplinknode): - # Note that debug prints are super confusing in this code, as - # tmfnodes gets populated by the calls to lookuplinknode in - # the superclass's manifest packer. In the future we should - # probably see if we can refactor this somehow to be less - # confusing. - for x in super(cg3packer, self)._packmanifests( - mfnodes, {}, lookuplinknode): - yield x - dirlog = self._repo.manifest.dirlog - for name, nodes in tmfnodes.iteritems(): - # For now, directory headers are simply file headers with - # a trailing '/' on the path (already in the name). - yield self.fileheader(name) - for chunk in self.group(nodes, dirlog(name), nodes.get): - yield chunk - yield self.close() + def _packmanifests(self, dir, mfnodes, lookuplinknode): + if dir: + yield self.fileheader(dir) + for chunk in self.group(mfnodes, self._repo.manifest.dirlog(dir), + lookuplinknode, units=_('manifests')): + yield chunk + + def _manifestsdone(self): + return self.close() def builddeltaheader(self, node, p1n, p2n, basenode, linknode, flags): return struct.pack( @@ -1109,16 +1012,18 @@ # to avoid a race we use changegroupsubset() (issue1320) return changegroupsubset(repo, basenodes, repo.heads(), source) -def _addchangegroupfiles(repo, source, revmap, trp, pr, needfiles): +def _addchangegroupfiles(repo, source, revmap, trp, expectedfiles, needfiles): revisions = 0 files = 0 while True: chunkdata = source.filelogheader() if not chunkdata: break + files += 1 f = chunkdata["filename"] repo.ui.debug("adding %s revisions\n" % f) - pr() + repo.ui.progress(_('files'), files, unit=_('files'), + total=expectedfiles) fl = repo.file(f) o = len(fl) try: @@ -1127,7 +1032,6 @@ except error.CensoredBaseError as e: raise error.Abort(_("received delta base is censored: %s") % e) revisions += len(fl) - o - files += 1 if f in needfiles: needs = needfiles[f] for new in xrange(o, len(fl)):
--- a/mercurial/changelog.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/changelog.py Sat Apr 16 18:06:48 2016 -0500 @@ -7,6 +7,8 @@ from __future__ import absolute_import +import collections + from .i18n import _ from .node import ( bin, @@ -136,6 +138,122 @@ return appender(opener, name, mode, buf) return _delay +_changelogrevision = collections.namedtuple('changelogrevision', + ('manifest', 'user', 'date', + 'files', 'description', 'extra')) + +class changelogrevision(object): + """Holds results of a parsed changelog revision. + + Changelog revisions consist of multiple pieces of data, including + the manifest node, user, and date. This object exposes a view into + the parsed object. + """ + + __slots__ = ( + '_offsets', + '_text', + ) + + def __new__(cls, text): + if not text: + return _changelogrevision( + manifest=nullid, + user='', + date=(0, 0), + files=[], + description='', + extra=_defaultextra, + ) + + self = super(changelogrevision, cls).__new__(cls) + # We could return here and implement the following as an __init__. + # But doing it here is equivalent and saves an extra function call. + + # format used: + # nodeid\n : manifest node in ascii + # user\n : user, no \n or \r allowed + # time tz extra\n : date (time is int or float, timezone is int) + # : extra is metadata, encoded and separated by '\0' + # : older versions ignore it + # files\n\n : files modified by the cset, no \n or \r allowed + # (.*) : comment (free text, ideally utf-8) + # + # changelog v0 doesn't use extra + + nl1 = text.index('\n') + nl2 = text.index('\n', nl1 + 1) + nl3 = text.index('\n', nl2 + 1) + + # The list of files may be empty. Which means nl3 is the first of the + # double newline that precedes the description. + if text[nl3 + 1] == '\n': + doublenl = nl3 + else: + doublenl = text.index('\n\n', nl3 + 1) + + self._offsets = (nl1, nl2, nl3, doublenl) + self._text = text + + return self + + @property + def manifest(self): + return bin(self._text[0:self._offsets[0]]) + + @property + def user(self): + off = self._offsets + return encoding.tolocal(self._text[off[0] + 1:off[1]]) + + @property + def _rawdate(self): + off = self._offsets + dateextra = self._text[off[1] + 1:off[2]] + return dateextra.split(' ', 2)[0:2] + + @property + def _rawextra(self): + off = self._offsets + dateextra = self._text[off[1] + 1:off[2]] + fields = dateextra.split(' ', 2) + if len(fields) != 3: + return None + + return fields[2] + + @property + def date(self): + raw = self._rawdate + time = float(raw[0]) + # Various tools did silly things with the timezone. + try: + timezone = int(raw[1]) + except ValueError: + timezone = 0 + + return time, timezone + + @property + def extra(self): + raw = self._rawextra + if raw is None: + return _defaultextra + + return decodeextra(raw) + + @property + def files(self): + off = self._offsets + if off[2] == off[3]: + return [] + + return self._text[off[2] + 1:off[3]].split('\n') + + @property + def description(self): + return encoding.tolocal(self._text[self._offsets[3] + 2:]) + class changelog(revlog.revlog): def __init__(self, opener): revlog.revlog.__init__(self, opener, "00changelog.i") @@ -323,42 +441,34 @@ revlog.revlog.checkinlinesize(self, tr, fp) def read(self, node): - """ - format used: - nodeid\n : manifest node in ascii - user\n : user, no \n or \r allowed - time tz extra\n : date (time is int or float, timezone is int) - : extra is metadata, encoded and separated by '\0' - : older versions ignore it - files\n\n : files modified by the cset, no \n or \r allowed - (.*) : comment (free text, ideally utf-8) + """Obtain data from a parsed changelog revision. + + Returns a 6-tuple of: - changelog v0 doesn't use extra + - manifest node in binary + - author/user as a localstr + - date as a 2-tuple of (time, timezone) + - list of files + - commit message as a localstr + - dict of extra metadata + + Unless you need to access all fields, consider calling + ``changelogrevision`` instead, as it is faster for partial object + access. """ - text = self.revision(node) - if not text: - return (nullid, "", (0, 0), [], "", _defaultextra) - last = text.index("\n\n") - desc = encoding.tolocal(text[last + 2:]) - l = text[:last].split('\n') - manifest = bin(l[0]) - user = encoding.tolocal(l[1]) + c = changelogrevision(self.revision(node)) + return ( + c.manifest, + c.user, + c.date, + c.files, + c.description, + c.extra + ) - tdata = l[2].split(' ', 2) - if len(tdata) != 3: - time = float(tdata[0]) - try: - # various tools did silly things with the time zone field. - timezone = int(tdata[1]) - except ValueError: - timezone = 0 - extra = _defaultextra - else: - time, timezone = float(tdata[0]), int(tdata[1]) - extra = decodeextra(tdata[2]) - - files = l[3:] - return (manifest, user, (time, timezone), files, desc, extra) + def changelogrevision(self, nodeorrev): + """Obtain a ``changelogrevision`` for a node or revision.""" + return changelogrevision(self.revision(nodeorrev)) def readfiles(self, node): """
--- a/mercurial/cmdutil.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/cmdutil.py Sat Apr 16 18:06:48 2016 -0500 @@ -5,18 +5,47 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -from node import hex, bin, nullid, nullrev, short -from i18n import _ -import os, sys, errno, re, tempfile, cStringIO -import util, scmutil, templater, patch, error, templatekw, revlog, copies -import match as matchmod -import repair, graphmod, revset, phases, obsolete, pathutil -import changelog -import bookmarks -import encoding -import formatter -import crecord as crecordmod -import lock as lockmod +from __future__ import absolute_import + +import errno +import os +import re +import sys +import tempfile + +from .i18n import _ +from .node import ( + bin, + hex, + nullid, + nullrev, + short, +) + +from . import ( + bookmarks, + changelog, + copies, + crecord as crecordmod, + encoding, + error, + formatter, + graphmod, + lock as lockmod, + match as matchmod, + obsolete, + patch, + pathutil, + phases, + repair, + revlog, + revset, + scmutil, + templatekw, + templater, + util, +) +stringio = util.stringio def ishunk(x): hunkclasses = (crecordmod.uihunk, patch.recordhunk) @@ -78,8 +107,7 @@ def dorecord(ui, repo, commitfunc, cmdsuggest, backupall, filterfn, *pats, **opts): - import merge as mergemod - + from . import merge as mergemod if not ui.interactive(): if cmdsuggest: msg = _('running non-interactively, use %s instead') % cmdsuggest @@ -107,12 +135,24 @@ """ checkunfinished(repo, commit=True) - merge = len(repo[None].parents()) > 1 + wctx = repo[None] + merge = len(wctx.parents()) > 1 if merge: raise error.Abort(_('cannot partially commit a merge ' '(use "hg commit" instead)')) + def fail(f, msg): + raise error.Abort('%s: %s' % (f, msg)) + + force = opts.get('force') + if not force: + vdirs = [] + match.explicitdir = vdirs.append + match.bad = fail + status = repo.status(match=match) + if not force: + repo.checkcommitpatterns(wctx, vdirs, match, status, fail) diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True) diffopts.nodates = True diffopts.git = True @@ -120,7 +160,7 @@ originaldiff = patch.diff(repo, changes=status, opts=diffopts) originalchunks = patch.parsepatch(originaldiff) - # 1. filter patch, so we have intending-to apply subset of it + # 1. filter patch, since we are intending to apply subset of it try: chunks, newopts = filterfn(ui, originalchunks) except patch.PatchError as err: @@ -171,7 +211,7 @@ util.copyfile(repo.wjoin(f), tmpname, copystat=True) backups[f] = tmpname - fp = cStringIO.StringIO() + fp = stringio() for c in chunks: fname = c.filename() if fname in backups: @@ -179,6 +219,17 @@ dopatch = fp.tell() fp.seek(0) + # 2.5 optionally review / modify patch in text editor + if opts.get('review', False): + patchtext = (crecordmod.diffhelptext + + crecordmod.patchhelptext + + fp.read()) + reviewedpatch = ui.edit(patchtext, "", + extra={"suffix": ".diff"}) + fp.truncate(0) + fp.write(reviewedpatch) + fp.seek(0) + [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles] # 3a. apply filtered patch to clean repo (clean) if backups: @@ -758,14 +809,14 @@ fp.write(str(pid) + '\n') fp.close() - if opts['daemon'] and not opts['daemon_pipefds']: + if opts['daemon'] and not opts['daemon_postexec']: # Signal child process startup with file removal lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-') os.close(lockfd) try: if not runargs: runargs = util.hgcmd() + sys.argv[1:] - runargs.append('--daemon-pipefds=%s' % lockpath) + runargs.append('--daemon-postexec=unlink:%s' % lockpath) # Don't pass --cwd to the child process, because we've already # changed directory. for i in xrange(1, len(runargs)): @@ -796,15 +847,22 @@ initfn() if not opts['daemon']: - writepid(os.getpid()) - - if opts['daemon_pipefds']: - lockpath = opts['daemon_pipefds'] + writepid(util.getpid()) + + if opts['daemon_postexec']: try: os.setsid() except AttributeError: pass - os.unlink(lockpath) + for inst in opts['daemon_postexec']: + if inst.startswith('unlink:'): + lockpath = inst[7:] + os.unlink(lockpath) + elif inst.startswith('chdir:'): + os.chdir(inst[6:]) + elif inst != 'none': + raise error.Abort(_('invalid value for --daemon-postexec: %s') + % inst) util.hidewindow() sys.stdout.flush() sys.stderr.flush() @@ -863,7 +921,7 @@ updatefunc(<repo>, <node>) """ # avoid cycle context -> subrepo -> cmdutil - import context + from . import context extractdata = patch.extract(ui, hunk) tmpname = extractdata.get('filename') message = extractdata.get('message') @@ -1142,7 +1200,7 @@ # node2 (inclusive). Thus, ctx2's substate won't contain that # subpath. The best we can do is to ignore it. tempnode2 = None - submatch = matchmod.narrowmatcher(subpath, match) + submatch = matchmod.subdirmatcher(subpath, match) sub.diff(ui, diffopts, tempnode2, submatch, changes=changes, stat=stat, fp=fp, prefix=prefix) @@ -1217,10 +1275,10 @@ self.ui.write(_("branch: %s\n") % branch, label='log.branch') - for name, ns in self.repo.names.iteritems(): + for nsname, ns in self.repo.names.iteritems(): # branches has special logic already handled above, so here we just # skip it - if name == 'branches': + if nsname == 'branches': continue # we will use the templatename as the color name since those two # should be the same @@ -1420,6 +1478,7 @@ def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered): changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered) formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12]) + filters = {'formatnode': formatnode} defaulttempl = { 'parent': '{rev}:{node|formatnode} ', 'manifest': '{rev}:{node|formatnode}', @@ -1428,10 +1487,14 @@ } # filecopy is preserved for compatibility reasons defaulttempl['filecopy'] = defaulttempl['file_copy'] - self.t = templater.templater(mapfile, {'formatnode': formatnode}, - cache=defaulttempl) - if tmpl: - self.t.cache['changeset'] = tmpl + assert not (tmpl and mapfile) + if mapfile: + self.t = templater.templater.frommapfile(mapfile, filters=filters, + cache=defaulttempl) + else: + self.t = formatter.maketemplater(ui, 'changeset', tmpl, + filters=filters, + cache=defaulttempl) self.cache = {} @@ -1470,34 +1533,29 @@ props['templ'] = self.t props['ctx'] = ctx props['repo'] = self.repo + props['ui'] = self.repo.ui props['revcache'] = {'copies': copies} props['cache'] = self.cache - try: - # write header - if self._parts['header']: - h = templater.stringify(self.t(self._parts['header'], **props)) - if self.buffered: - self.header[ctx.rev()] = h - else: - if self.lastheader != h: - self.lastheader = h - self.ui.write(h) - - # write changeset metadata, then patch if requested - key = self._parts['changeset'] - self.ui.write(templater.stringify(self.t(key, **props))) - self.showpatch(ctx, matchfn) - - if self._parts['footer']: - if not self.footer: - self.footer = templater.stringify( - self.t(self._parts['footer'], **props)) - except KeyError as inst: - msg = _("%s: no key named '%s'") - raise error.Abort(msg % (self.t.mapfile, inst.args[0])) - except SyntaxError as inst: - raise error.Abort('%s: %s' % (self.t.mapfile, inst.args[0])) + # write header + if self._parts['header']: + h = templater.stringify(self.t(self._parts['header'], **props)) + if self.buffered: + self.header[ctx.rev()] = h + else: + if self.lastheader != h: + self.lastheader = h + self.ui.write(h) + + # write changeset metadata, then patch if requested + key = self._parts['changeset'] + self.ui.write(templater.stringify(self.t(key, **props))) + self.showpatch(ctx, matchfn) + + if self._parts['footer']: + if not self.footer: + self.footer = templater.stringify( + self.t(self._parts['footer'], **props)) def gettemplate(ui, tmpl, style): """ @@ -1508,11 +1566,7 @@ if not tmpl and not style: # template are stronger than style tmpl = ui.config('ui', 'logtemplate') if tmpl: - try: - tmpl = templater.unquotestring(tmpl) - except SyntaxError: - pass - return tmpl, None + return templater.unquotestring(tmpl), None else: style = util.expandpath(ui.config('ui', 'style', '')) @@ -1554,17 +1608,14 @@ if not tmpl and not mapfile: return changeset_printer(ui, repo, matchfn, opts, buffered) - try: - t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, - buffered) - except SyntaxError as inst: - raise error.Abort(inst.args[0]) - return t - -def showmarker(ui, marker): + return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered) + +def showmarker(ui, marker, index=None): """utility function to display obsolescence marker in a readable way To be used by debug function.""" + if index is not None: + ui.write("%i " % index) ui.write(hex(marker.precnode())) for repl in marker.succnodes(): ui.write(' ') @@ -2173,6 +2224,7 @@ def formatnode(repo, ctx): props['ctx'] = ctx props['repo'] = repo + props['ui'] = repo.ui props['revcache'] = {} return templater.stringify(templ('graphnode', **props)) return formatnode @@ -2180,7 +2232,23 @@ def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None, filematcher=None): formatnode = _graphnodeformatter(ui, displayer) - seen, state = [], graphmod.asciistate() + state = graphmod.asciistate() + styles = state['styles'] + edgetypes = { + 'parent': graphmod.PARENT, + 'grandparent': graphmod.GRANDPARENT, + 'missing': graphmod.MISSINGPARENT + } + for name, key in edgetypes.items(): + # experimental config: experimental.graphstyle.* + styles[key] = ui.config('experimental', 'graphstyle.%s' % name, + styles[key]) + if not styles[key]: + styles[key] = None + + # experimental config: experimental.graphshorten + state['graphshorten'] = ui.configbool('experimental', 'graphshorten') + for rev, type, ctx, parents in dag: char = formatnode(repo, ctx) copies = None @@ -2198,7 +2266,7 @@ if not lines[-1]: del lines[-1] displayer.flush(ctx) - edges = edgefn(type, char, lines, seen, rev, parents) + edges = edgefn(type, char, lines, state, rev, parents) for type, char, lines, coldata in edges: graphmod.ascii(ui, state, type, char, lines, coldata) displayer.close() @@ -2260,7 +2328,7 @@ for subpath in sorted(wctx.substate): sub = wctx.sub(subpath) try: - submatch = matchmod.narrowmatcher(subpath, match) + submatch = matchmod.subdirmatcher(subpath, match) if opts.get('subrepos'): bad.extend(sub.add(ui, submatch, prefix, False, **opts)) else: @@ -2289,7 +2357,7 @@ for subpath in sorted(wctx.substate): sub = wctx.sub(subpath) try: - submatch = matchmod.narrowmatcher(subpath, match) + submatch = matchmod.subdirmatcher(subpath, match) subbad, subforgot = sub.forget(submatch, prefix) bad.extend([subpath + '/' + f for f in subbad]) forgot.extend([subpath + '/' + f for f in subforgot]) @@ -2346,7 +2414,7 @@ if subrepos or matchessubrepo(subpath): sub = ctx.sub(subpath) try: - submatch = matchmod.narrowmatcher(subpath, m) + submatch = matchmod.subdirmatcher(subpath, m) recurse = m.exact(subpath) or subrepos if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0: ret = 0 @@ -2356,7 +2424,7 @@ return ret -def remove(ui, repo, m, prefix, after, force, subrepos): +def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None): join = lambda f: os.path.join(prefix, f) ret = 0 s = repo.status(match=m, clean=True) @@ -2364,7 +2432,16 @@ wctx = repo[None] - for subpath in sorted(wctx.substate): + if warnings is None: + warnings = [] + warn = True + else: + warn = False + + subs = sorted(wctx.substate) + total = len(subs) + count = 0 + for subpath in subs: def matchessubrepo(matcher, subpath): if matcher.exact(subpath): return True @@ -2373,60 +2450,91 @@ return True return False + count += 1 if subrepos or matchessubrepo(m, subpath): + ui.progress(_('searching'), count, total=total, unit=_('subrepos')) + sub = wctx.sub(subpath) try: - submatch = matchmod.narrowmatcher(subpath, m) - if sub.removefiles(submatch, prefix, after, force, subrepos): + submatch = matchmod.subdirmatcher(subpath, m) + if sub.removefiles(submatch, prefix, after, force, subrepos, + warnings): ret = 1 except error.LookupError: - ui.status(_("skipping missing subrepository: %s\n") + warnings.append(_("skipping missing subrepository: %s\n") % join(subpath)) + ui.progress(_('searching'), None) # warn about failure to delete explicit files/dirs deleteddirs = util.dirs(deleted) - for f in m.files(): + files = m.files() + total = len(files) + count = 0 + for f in files: def insubrepo(): for subpath in wctx.substate: if f.startswith(subpath): return True return False + count += 1 + ui.progress(_('deleting'), count, total=total, unit=_('files')) isdir = f in deleteddirs or wctx.hasdir(f) if f in repo.dirstate or isdir or f == '.' or insubrepo(): continue if repo.wvfs.exists(f): if repo.wvfs.isdir(f): - ui.warn(_('not removing %s: no tracked files\n') + warnings.append(_('not removing %s: no tracked files\n') % m.rel(f)) else: - ui.warn(_('not removing %s: file is untracked\n') + warnings.append(_('not removing %s: file is untracked\n') % m.rel(f)) # missing files will generate a warning elsewhere ret = 1 + ui.progress(_('deleting'), None) if force: list = modified + deleted + clean + added elif after: list = deleted - for f in modified + added + clean: - ui.warn(_('not removing %s: file still exists\n') % m.rel(f)) + remaining = modified + added + clean + total = len(remaining) + count = 0 + for f in remaining: + count += 1 + ui.progress(_('skipping'), count, total=total, unit=_('files')) + warnings.append(_('not removing %s: file still exists\n') + % m.rel(f)) ret = 1 + ui.progress(_('skipping'), None) else: list = deleted + clean + total = len(modified) + len(added) + count = 0 for f in modified: - ui.warn(_('not removing %s: file is modified (use -f' + count += 1 + ui.progress(_('skipping'), count, total=total, unit=_('files')) + warnings.append(_('not removing %s: file is modified (use -f' ' to force removal)\n') % m.rel(f)) ret = 1 for f in added: - ui.warn(_('not removing %s: file has been marked for add' + count += 1 + ui.progress(_('skipping'), count, total=total, unit=_('files')) + warnings.append(_('not removing %s: file has been marked for add' ' (use forget to undo)\n') % m.rel(f)) ret = 1 - - for f in sorted(list): + ui.progress(_('skipping'), None) + + list = sorted(list) + total = len(list) + count = 0 + for f in list: + count += 1 if ui.verbose or not m.exact(f): + ui.progress(_('deleting'), count, total=total, unit=_('files')) ui.status(_('removing %s\n') % m.rel(f)) + ui.progress(_('deleting'), None) with repo.wlock(): if not after: @@ -2436,6 +2544,10 @@ util.unlinkpath(repo.wjoin(f), ignoremissing=True) repo[None].forget(list) + if warn: + for warning in warnings: + ui.warn(warning) + return ret def cat(ui, repo, ctx, matcher, prefix, **opts): @@ -2474,7 +2586,7 @@ for subpath in sorted(ctx.substate): sub = ctx.sub(subpath) try: - submatch = matchmod.narrowmatcher(subpath, matcher) + submatch = matchmod.subdirmatcher(subpath, matcher) if not sub.cat(submatch, os.path.join(prefix, sub._path), **opts): @@ -2504,7 +2616,7 @@ def amend(ui, repo, commitfunc, old, extra, pats, opts): # avoid cycle context -> subrepo -> cmdutil - import context + from . import context # amend will reuse the existing user if not specified, but the obsolete # marker creation requires that the current user's name is specified. @@ -2748,10 +2860,7 @@ ui = repo.ui tmpl, mapfile = gettemplate(ui, tmpl, None) - try: - t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False) - except SyntaxError as inst: - raise error.Abort(inst.args[0]) + t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False) for k, v in repo.ui.configitems('committemplate'): if k != 'changeset': @@ -3129,13 +3238,26 @@ """ parent, p2 = parents node = ctx.node() + excluded_files = [] + matcher_opts = {"exclude": excluded_files} + def checkout(f): fc = ctx[f] repo.wwrite(f, fc.data(), fc.flags()) audit_path = pathutil.pathauditor(repo.root) for f in actions['forget'][0]: - repo.dirstate.drop(f) + if interactive: + choice = \ + repo.ui.promptchoice( + _("forget added file %s (yn)?$$ &Yes $$ &No") + % f) + if choice == 0: + repo.dirstate.drop(f) + else: + excluded_files.append(repo.wjoin(f)) + else: + repo.dirstate.drop(f) for f in actions['remove'][0]: audit_path(f) try: @@ -3161,7 +3283,7 @@ if interactive: # Prompt the user for changes to revert torevert = [repo.wjoin(f) for f in actions['revert'][0]] - m = scmutil.match(ctx, torevert, {}) + m = scmutil.match(ctx, torevert, matcher_opts) diffopts = patch.difffeatureopts(repo.ui, whitespace=True) diffopts.nodates = True diffopts.git = True @@ -3185,7 +3307,7 @@ newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks) # Apply changes - fp = cStringIO.StringIO() + fp = stringio() for c in chunks: c.write(fp) dopatch = fp.tell() @@ -3254,24 +3376,13 @@ def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False, inferrepo=False): def decorator(func): + func.norepo = norepo + func.optionalrepo = optionalrepo + func.inferrepo = inferrepo if synopsis: table[name] = func, list(options), synopsis else: table[name] = func, list(options) - - if norepo: - # Avoid import cycle. - import commands - commands.norepo += ' %s' % ' '.join(parsealiases(name)) - - if optionalrepo: - import commands - commands.optionalrepo += ' %s' % ' '.join(parsealiases(name)) - - if inferrepo: - import commands - commands.inferrepo += ' %s' % ' '.join(parsealiases(name)) - return func return decorator @@ -3333,13 +3444,56 @@ _('hg graft --continue')), ] -def checkafterresolved(repo): - contmsg = _("continue: %s\n") +def howtocontinue(repo): + '''Check for an unfinished operation and return the command to finish + it. + + afterresolvedstates tupples define a .hg/{file} and the corresponding + command needed to finish it. + + Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is + a boolean. + ''' + contmsg = _("continue: %s") for f, msg in afterresolvedstates: if repo.vfs.exists(f): - repo.ui.warn(contmsg % msg) - return - repo.ui.note(contmsg % _("hg commit")) + return contmsg % msg, True + workingctx = repo[None] + dirty = any(repo.status()) or any(workingctx.sub(s).dirty() + for s in workingctx.substate) + if dirty: + return contmsg % _("hg commit"), False + return None, None + +def checkafterresolved(repo): + '''Inform the user about the next action after completing hg resolve + + If there's a matching afterresolvedstates, howtocontinue will yield + repo.ui.warn as the reporter. + + Otherwise, it will yield repo.ui.note. + ''' + msg, warning = howtocontinue(repo) + if msg is not None: + if warning: + repo.ui.warn("%s\n" % msg) + else: + repo.ui.note("%s\n" % msg) + +def wrongtooltocontinue(repo, task): + '''Raise an abort suggesting how to properly continue if there is an + active task. + + Uses howtocontinue() to find the active task. + + If there's no task (repo.ui.note for 'hg commit'), it does not offer + a hint. + ''' + after = howtocontinue(repo) + hint = None + if after[1]: + hint = after[0] + raise error.Abort(_('no %s in progress') % task, hint=hint) class dirstateguard(object): '''Restore dirstate at unexpected failure.
--- a/mercurial/commands.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/commands.py Sat Apr 16 18:06:48 2016 -0500 @@ -5,42 +5,82 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -from node import hex, bin, nullhex, nullid, nullrev, short -from lock import release -from i18n import _ -import os, re, difflib, time, tempfile, errno, shlex -import sys, socket -import hg, scmutil, util, revlog, copies, error, bookmarks -import patch, help, encoding, templatekw, discovery -import archival, changegroup, cmdutil, hbisect -import sshserver, hgweb -import extensions -import merge as mergemod -import minirst, revset, fileset -import dagparser, context, simplemerge, graphmod, copies -import random, operator -import setdiscovery, treediscovery, dagutil, pvec, localrepo, destutil -import phases, obsolete, exchange, bundle2, repair, lock as lockmod -import ui as uimod -import streamclone -import commandserver +from __future__ import absolute_import + +import difflib +import errno +import operator +import os +import random +import re +import shlex +import socket +import sys +import tempfile +import time + +from .i18n import _ +from .node import ( + bin, + hex, + nullhex, + nullid, + nullrev, + short, +) +from . import ( + archival, + bookmarks, + bundle2, + changegroup, + cmdutil, + commandserver, + context, + copies, + dagparser, + dagutil, + destutil, + discovery, + encoding, + error, + exchange, + extensions, + fileset, + formatter, + graphmod, + hbisect, + help, + hg, + hgweb, + localrepo, + lock as lockmod, + merge as mergemod, + minirst, + obsolete, + patch, + phases, + pvec, + repair, + revlog, + revset, + scmutil, + setdiscovery, + simplemerge, + sshserver, + streamclone, + templatekw, + templater, + treediscovery, + ui as uimod, + util, +) + +release = lockmod.release table = {} command = cmdutil.command(table) -# Space delimited list of commands that don't require local repositories. -# This should be populated by passing norepo=True into the @command decorator. -norepo = '' -# Space delimited list of commands that optionally require local repositories. -# This should be populated by passing optionalrepo=True into the @command -# decorator. -optionalrepo = '' -# Space delimited list of commands that will examine arguments looking for -# a repository. This should be populated by passing inferrepo=True into the -# @command decorator. -inferrepo = '' - # label constants # until 3.5, bookmarks.current was the advertised name, not # bookmarks.active, so we must use both to avoid breaking old @@ -1347,7 +1387,7 @@ # Packed bundles are a pseudo bundle format for now. if cgversion == 's1': raise error.Abort(_('packed bundles cannot be produced by "hg bundle"'), - hint=_('use "hg debugcreatestreamclonebundle"')) + hint=_("use 'hg debugcreatestreamclonebundle'")) if opts.get('all'): if dest: @@ -1360,6 +1400,10 @@ base = scmutil.revrange(repo, opts.get('base')) # TODO: get desired bundlecaps from command line. bundlecaps = None + if cgversion not in changegroup.supportedoutgoingversions(repo): + raise error.Abort(_("repository does not support bundle version %s") % + cgversion) + if base: if dest: raise error.Abort(_("--base is incompatible with specifying " @@ -1395,8 +1439,7 @@ assert cgversion == '02' bversion = 'HG20' - - changegroup.writebundle(ui, cg, fname, bversion, compression=bcompression) + bundle2.writebundle(ui, cg, fname, bversion, compression=bcompression) @command('cat', [('o', 'output', '', @@ -1682,6 +1725,15 @@ if not allowunstable and old.children(): raise error.Abort(_('cannot amend changeset with children')) + # Currently histedit gets confused if an amend happens while histedit + # is in progress. Since we have a checkunfinished command, we are + # temporarily honoring it. + # + # Note: eventually this guard will be removed. Please do not expect + # this behavior to remain. + if not obsolete.isenabled(repo, obsolete.createmarkersopt): + cmdutil.checkunfinished(repo) + # commitfunc is used only for temporary amend commit by cmdutil.amend def commitfunc(ui, repo, message, match, opts): return repo.commit(message, @@ -2434,9 +2486,9 @@ 'gzip': 'HG10GZ', 'bundle2': 'HG20'} bundletype = btypes.get(bundletype) - if bundletype not in changegroup.bundletypes: + if bundletype not in bundle2.bundletypes: raise error.Abort(_('unknown bundle type specified with --type')) - changegroup.writebundle(ui, bundle, bundlepath, bundletype) + bundle2.writebundle(ui, bundle, bundlepath, bundletype) @command('debugignore', [], '[FILE]') def debugignore(ui, repo, *files, **opts): @@ -2457,20 +2509,21 @@ raise error.Abort(_("no ignore patterns found")) else: for f in files: + nf = util.normpath(f) ignored = None ignoredata = None - if f != '.': - if ignore(f): - ignored = f - ignoredata = repo.dirstate._ignorefileandline(f) + if nf != '.': + if ignore(nf): + ignored = nf + ignoredata = repo.dirstate._ignorefileandline(nf) else: - for p in util.finddirs(f): + for p in util.finddirs(nf): if ignore(p): ignored = p ignoredata = repo.dirstate._ignorefileandline(p) break if ignored: - if ignored == f: + if ignored == nf: ui.write("%s is ignored\n" % f) else: ui.write("%s is ignored because of containing folder %s\n" @@ -2652,8 +2705,8 @@ fm.end() -@command('debuginstall', [], '', norepo=True) -def debuginstall(ui): +@command('debuginstall', [] + formatteropts, '', norepo=True) +def debuginstall(ui, **opts): '''test Mercurial installation Returns 0 on success. @@ -2668,86 +2721,109 @@ problems = 0 + fm = ui.formatter('debuginstall', opts) + fm.startitem() + # encoding - ui.status(_("checking encoding (%s)...\n") % encoding.encoding) + fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding) + err = None try: encoding.fromlocal("test") except error.Abort as inst: - ui.write(" %s\n" % inst) - ui.write(_(" (check that your locale is properly set)\n")) + err = inst problems += 1 + fm.condwrite(err, 'encodingerror', _(" %s\n" + " (check that your locale is properly set)\n"), err) # Python - ui.status(_("checking Python executable (%s)\n") % sys.executable) - ui.status(_("checking Python version (%s)\n") - % ("%s.%s.%s" % sys.version_info[:3])) - ui.status(_("checking Python lib (%s)...\n") - % os.path.dirname(os.__file__)) + fm.write('pythonexe', _("checking Python executable (%s)\n"), + sys.executable) + fm.write('pythonver', _("checking Python version (%s)\n"), + ("%s.%s.%s" % sys.version_info[:3])) + fm.write('pythonlib', _("checking Python lib (%s)...\n"), + os.path.dirname(os.__file__)) # compiled modules - ui.status(_("checking installed modules (%s)...\n") - % os.path.dirname(__file__)) + fm.write('hgmodules', _("checking installed modules (%s)...\n"), + os.path.dirname(__file__)) + + err = None try: - import bdiff, mpatch, base85, osutil + from . import ( + base85, + bdiff, + mpatch, + osutil, + ) dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes except Exception as inst: - ui.write(" %s\n" % inst) - ui.write(_(" One or more extensions could not be found")) - ui.write(_(" (check that you compiled the extensions)\n")) + err = inst problems += 1 + fm.condwrite(err, 'extensionserror', " %s\n", err) # templates - import templater p = templater.templatepaths() - ui.status(_("checking templates (%s)...\n") % ' '.join(p)) + fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p)) + fm.condwrite(not p, '', _(" no template directories found\n")) if p: m = templater.templatepath("map-cmdline.default") if m: # template found, check if it is working + err = None try: - templater.templater(m) + templater.templater.frommapfile(m) except Exception as inst: - ui.write(" %s\n" % inst) + err = inst p = None + fm.condwrite(err, 'defaulttemplateerror', " %s\n", err) else: - ui.write(_(" template 'default' not found\n")) p = None - else: - ui.write(_(" no template directories found\n")) + fm.condwrite(p, 'defaulttemplate', + _("checking default template (%s)\n"), m) + fm.condwrite(not m, 'defaulttemplatenotfound', + _(" template '%s' not found\n"), "default") if not p: - ui.write(_(" (templates seem to have been installed incorrectly)\n")) problems += 1 + fm.condwrite(not p, '', + _(" (templates seem to have been installed incorrectly)\n")) # editor - ui.status(_("checking commit editor...\n")) editor = ui.geteditor() editor = util.expandpath(editor) + fm.write('editor', _("checking commit editor... (%s)\n"), editor) cmdpath = util.findexe(shlex.split(editor)[0]) - if not cmdpath: - if editor == 'vi': - ui.write(_(" No commit editor set and can't find vi in PATH\n")) - ui.write(_(" (specify a commit editor in your configuration" - " file)\n")) - else: - ui.write(_(" Can't find editor '%s' in PATH\n") % editor) - ui.write(_(" (specify a commit editor in your configuration" - " file)\n")) - problems += 1 + fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound', + _(" No commit editor set and can't find %s in PATH\n" + " (specify a commit editor in your configuration" + " file)\n"), not cmdpath and editor == 'vi' and editor) + fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound', + _(" Can't find editor '%s' in PATH\n" + " (specify a commit editor in your configuration" + " file)\n"), not cmdpath and editor) + if not cmdpath and editor != 'vi': + problems += 1 # check username - ui.status(_("checking username...\n")) + username = None + err = None try: - ui.username() + username = ui.username() except error.Abort as e: - ui.write(" %s\n" % e) - ui.write(_(" (specify a username in your configuration file)\n")) + err = e problems += 1 + fm.condwrite(username, 'username', _("checking username (%s)\n"), username) + fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n" + " (specify a username in your configuration file)\n"), err) + + fm.condwrite(not problems, '', + _("no problems detected\n")) if not problems: - ui.status(_("no problems detected\n")) - else: - ui.write(_("%s problems detected," - " please check your install!\n") % problems) + fm.data(problems=problems) + fm.condwrite(problems, 'problems', + _("%s problems detected," + " please check your install!\n"), problems) + fm.end() return problems @@ -2813,6 +2889,25 @@ % (afile, _hashornull(anode))) ui.write((' other path: %s (node %s)\n') % (ofile, _hashornull(onode))) + elif rtype == 'f': + filename, rawextras = record.split('\0', 1) + extras = rawextras.split('\0') + i = 0 + extrastrings = [] + while i < len(extras): + extrastrings.append('%s = %s' % (extras[i], extras[i + 1])) + i += 2 + + ui.write(('file extras: %s (%s)\n') + % (filename, ', '.join(extrastrings))) + elif rtype == 'l': + labels = record.split('\0', 2) + labels = [l for l in labels if len(l) > 0] + ui.write(('labels:\n')) + ui.write((' local: %s\n' % labels[0])) + ui.write((' other: %s\n' % labels[1])) + if len(labels) > 2: + ui.write((' base: %s\n' % labels[2])) else: ui.write(('unrecognized entry: %s\t%s\n') % (rtype, record.replace('\0', '\t'))) @@ -2825,7 +2920,7 @@ # sort so that reasonable information is on top v1records = ms._readrecordsv1() v2records = ms._readrecordsv2() - order = 'LOm' + order = 'LOml' def key(r): idx = order.find(r[0]) if idx == -1: @@ -2946,6 +3041,8 @@ ('', 'record-parents', False, _('record parent information for the precursor')), ('r', 'rev', [], _('display markers relevant to REV')), + ('', 'index', False, _('display index of the marker')), + ('', 'delete', [], _('delete markers specified by indices')), ] + commitopts2, _('[OBSOLETED [REPLACEMENT ...]]')) def debugobsolete(ui, repo, precursor=None, *successors, **opts): @@ -2966,6 +3063,25 @@ raise error.Abort('changeset references must be full hexadecimal ' 'node identifiers') + if opts.get('delete'): + indices = [] + for v in opts.get('delete'): + try: + indices.append(int(v)) + except ValueError: + raise error.Abort(_('invalid index value: %r') % v, + hint=_('use integers for indices')) + + if repo.currenttransaction(): + raise error.Abort(_('cannot delete obsmarkers in the middle ' + 'of transaction.')) + + with repo.lock(): + n = repair.deleteobsmarkers(repo.obsstore, indices) + ui.write(_('deleted %i obsolescense markers\n') % n) + + return + if precursor is not None: if opts['rev']: raise error.Abort('cannot select revision when creating marker') @@ -3008,8 +3124,25 @@ else: markers = obsolete.getmarkers(repo) - for m in markers: - cmdutil.showmarker(ui, m) + markerstoiter = markers + isrelevant = lambda m: True + if opts.get('rev') and opts.get('index'): + markerstoiter = obsolete.getmarkers(repo) + markerset = set(markers) + isrelevant = lambda m: m in markerset + + for i, m in enumerate(markerstoiter): + if not isrelevant(m): + # marker can be irrelevant when we're iterating over a set + # of markers (markerstoiter) which is bigger than the set + # of markers we want to display (markers) + # this can happen if both --index and --rev options are + # provided and thus we need to iterate over all of the markers + # to get the correct indices, but only display the ones that + # are relevant to --rev value + continue + ind = i if opts.get('index') else None + cmdutil.showmarker(ui, m, index=ind) @command('debugpathcomplete', [('f', 'full', None, _('complete an entire path')), @@ -3204,12 +3337,16 @@ ts = ts + rs heads -= set(r.parentrevs(rev)) heads.add(rev) + try: + compression = ts / r.end(rev) + except ZeroDivisionError: + compression = 0 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d " "%11d %5d %8d\n" % (rev, p1, p2, r.start(rev), r.end(rev), r.start(dbase), r.start(cbase), r.start(p1), r.start(p2), - rs, ts, ts / r.end(rev), len(heads), clen)) + rs, ts, compression, len(heads), clen)) return 0 v = r.version @@ -3372,13 +3509,13 @@ if ui.verbose: tree = revset.parse(expr, lookup=repo.__contains__) ui.note(revset.prettyformat(tree), "\n") - newtree = revset.findaliases(ui, tree) + newtree = revset.expandaliases(ui, tree) if newtree != tree: - ui.note(revset.prettyformat(newtree), "\n") + ui.note("* expanded:\n", revset.prettyformat(newtree), "\n") tree = newtree newtree = revset.foldconcat(tree) if newtree != tree: - ui.note(revset.prettyformat(newtree), "\n") + ui.note("* concatenated:\n", revset.prettyformat(newtree), "\n") if opts["optimize"]: weight, optimizedtree = revset.optimize(newtree, True) ui.note("* optimized:\n", revset.prettyformat(optimizedtree), "\n") @@ -3405,9 +3542,7 @@ r2 = scmutil.revsingle(repo, rev2, 'null').node() with repo.wlock(): - repo.dirstate.beginparentchange() repo.setparents(r1, r2) - repo.dirstate.endparentchange() @command('debugdirstate|debugstate', [('', 'nodates', None, _('do not display the saved mtime')), @@ -3505,6 +3640,57 @@ ui.write(node2str(node)) ui.write('\n') +@command('debugtemplate', + [('r', 'rev', [], _('apply template on changesets'), _('REV')), + ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))], + _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'), + optionalrepo=True) +def debugtemplate(ui, repo, tmpl, **opts): + """parse and apply a template + + If -r/--rev is given, the template is processed as a log template and + applied to the given changesets. Otherwise, it is processed as a generic + template. + + Use --verbose to print the parsed tree. + """ + revs = None + if opts['rev']: + if repo is None: + raise error.RepoError(_('there is no Mercurial repository here ' + '(.hg not found)')) + revs = scmutil.revrange(repo, opts['rev']) + + props = {} + for d in opts['define']: + try: + k, v = (e.strip() for e in d.split('=', 1)) + if not k: + raise ValueError + props[k] = v + except ValueError: + raise error.Abort(_('malformed keyword definition: %s') % d) + + if ui.verbose: + aliases = ui.configitems('templatealias') + tree = templater.parse(tmpl) + ui.note(templater.prettyformat(tree), '\n') + newtree = templater.expandaliases(tree, aliases) + if newtree != tree: + ui.note("* expanded:\n", templater.prettyformat(newtree), '\n') + + mapfile = None + if revs is None: + k = 'debugtemplate' + t = formatter.maketemplater(ui, k, tmpl) + ui.write(templater.stringify(t(k, **props))) + else: + displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl, + mapfile, buffered=False) + for r in revs: + displayer.show(repo[r], **props) + displayer.close() + @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'), inferrepo=True) def debugwalk(ui, repo, *pats, **opts): """show how files match on given patterns""" @@ -3880,7 +4066,7 @@ - show revisions sorted by date:: - hg log -r 'sort(all(), date)' + hg log -r "sort(all(), date)" See :hg:`help revisions` and :hg:`help revsets` for more about specifying revisions. @@ -3917,7 +4103,7 @@ except IOError as inst: if inst.errno != errno.ENOENT: raise - raise error.Abort(_("no graft state found, can't continue")) + cmdutil.wrongtooltocontinue(repo, _('graft')) else: cmdutil.checkunfinished(repo) cmdutil.bailifchanged(repo) @@ -4050,7 +4236,7 @@ extra += ' --date %s' % opts['date'] if opts.get('log'): extra += ' --log' - hint=_('use hg resolve and hg graft --continue%s') % extra + hint=_("use 'hg resolve' and 'hg graft --continue%s'") % extra raise error.Abort( _("unresolved conflicts, can't continue"), hint=hint) @@ -4595,7 +4781,7 @@ ('', 'partial', None, _('commit even if some hunks fail')), ('', 'exact', None, - _('apply patch to the nodes from which it was generated')), + _('abort if patch would apply lossily')), ('', 'prefix', '', _('apply patch to subdirectory'), _('DIR')), ('', 'import-branch', None, @@ -4635,8 +4821,9 @@ If --exact is specified, import will set the working directory to the parent of each patch before applying it, and will abort if the resulting changeset has a different ID than the one recorded in - the patch. This may happen due to character set problems or other - deficiencies in the text patch format. + the patch. This will guard against various ways that portable + patch formats and mail systems might fail to transfer Mercurial + data or metadata. See ':hg: bundle' for lossless transmission. Use --partial to ensure a changeset will be created from the patch even if some hunks fail to apply. Hunks that fail to apply will be @@ -5231,7 +5418,8 @@ try: # ui.forcemerge is an internal variable, do not document repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge') - return hg.merge(repo, node, force=opts.get('force')) + force = opts.get('force') + return hg.merge(repo, node, force=force, mergeforce=force) finally: ui.setconfig('ui', 'forcemerge', '', 'merge') @@ -5526,27 +5714,25 @@ ui.warn(_('no phases changed\n')) return ret -def postincoming(ui, repo, modheads, optupdate, checkout): +def postincoming(ui, repo, modheads, optupdate, checkout, brev): + """Run after a changegroup has been added via pull/unbundle + + This takes arguments below: + + :modheads: change of heads by pull/unbundle + :optupdate: updating working directory is needed or not + :checkout: update destination revision (or None to default destination) + :brev: a name, which might be a bookmark to be activated after updating + """ if modheads == 0: return if optupdate: try: - brev = checkout - movemarkfrom = None - if not checkout: - updata = destutil.destupdate(repo) - checkout, movemarkfrom, brev = updata - ret = hg.update(repo, checkout) + return hg.updatetotally(ui, repo, checkout, brev) except error.UpdateAbort as inst: msg = _("not updating: %s") % str(inst) hint = inst.hint raise error.UpdateAbort(msg, hint=hint) - if not ret and movemarkfrom: - if movemarkfrom == repo['.'].node(): - pass # no-op update - elif bookmarks.update(repo, [movemarkfrom], repo['.'].node()): - ui.status(_("updating bookmark %s\n") % repo._activebookmark) - return ret if modheads > 1: currentbranchheads = len(repo.branchheads()) if currentbranchheads == modheads: @@ -5634,11 +5820,28 @@ force=opts.get('force'), bookmarks=opts.get('bookmark', ()), opargs=pullopargs).cgresult + + # brev is a name, which might be a bookmark to be activated at + # the end of the update. In other words, it is an explicit + # destination of the update + brev = None + if checkout: checkout = str(repo.changelog.rev(checkout)) + + # order below depends on implementation of + # hg.addbranchrevs(). opts['bookmark'] is ignored, + # because 'checkout' is determined without it. + if opts.get('rev'): + brev = opts['rev'][0] + elif opts.get('branch'): + brev = opts['branch'][0] + else: + brev = branches[0] repo._subtoppath = source try: - ret = postincoming(ui, repo, modheads, opts.get('update'), checkout) + ret = postincoming(ui, repo, modheads, opts.get('update'), + checkout, brev) finally: del repo._subtoppath @@ -5687,7 +5890,8 @@ If -B/--bookmark is used, the specified bookmarked revision, its ancestors, and the bookmark will be pushed to the remote - repository. + repository. Specifying ``.`` is equivalent to specifying the active + bookmark's name. Please see :hg:`help urls` for important details about ``ssh://`` URLs. If DESTINATION is omitted, a default path will be used. @@ -5699,6 +5903,7 @@ ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push') for b in opts['bookmark']: # translate -B options to -r so changesets get pushed + b = repo._bookmarks.expandname(b) if b in repo._bookmarks: opts.setdefault('rev', []).append(b) else: @@ -5767,7 +5972,7 @@ @command('^remove|rm', [('A', 'after', None, _('record delete for missing files')), ('f', 'force', None, - _('remove (and delete) file even if added or modified')), + _('forget added files, delete modified files')), ] + subrepoopts + walkopts, _('[OPTION]... FILE...'), inferrepo=True) @@ -5891,8 +6096,9 @@ Returns 0 on success, 1 if any files fail a resolve attempt. """ + flaglist = 'all mark unmark list no_status'.split() all, mark, unmark, show, nostatus = \ - [opts.get(o) for o in 'all mark unmark list no_status'.split()] + [opts.get(o) for o in flaglist] if (show and (mark or unmark)) or (mark and unmark): raise error.Abort(_("too many options specified")) @@ -6021,7 +6227,22 @@ ms.recordactions() if not didwork and pats: + hint = None + if not any([p for p in pats if p.find(':') >= 0]): + pats = ['path:%s' % p for p in pats] + m = scmutil.match(wctx, pats, opts) + for f in ms: + if not m(f): + continue + flags = ''.join(['-%s ' % o[0] for o in flaglist + if opts.get(o)]) + hint = _("(try: hg resolve %s%s)\n") % ( + flags, + ' '.join(pats)) + break ui.warn(_("arguments do not match paths that need resolving\n")) + if hint: + ui.warn(hint) elif ms.mergedriver and ms.mdstate() != 's': # run conclude step when either a driver-resolved file is requested # or there are no driver-resolved files @@ -6098,7 +6319,7 @@ if not opts.get('rev') and p2 != nullid: # revert after merge is a trap for new users (issue2915) raise error.Abort(_('uncommitted merge with no revision specified'), - hint=_('use "hg update" or see "hg help revert"')) + hint=_("use 'hg update' or see 'hg help revert'")) ctx = scmutil.revsingle(repo, opts.get('rev')) @@ -6185,7 +6406,7 @@ [('A', 'accesslog', '', _('name of access log file to write to'), _('FILE')), ('d', 'daemon', None, _('run server in background')), - ('', 'daemon-pipefds', '', _('used internally by daemon mode'), _('FILE')), + ('', 'daemon-postexec', [], _('used internally by daemon mode')), ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')), # use string type, then we can check if something was passed ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')), @@ -6404,6 +6625,17 @@ pnode = parents[0].node() marks = [] + ms = None + try: + ms = mergemod.mergestate.read(repo) + except error.UnsupportedMergeRecords as e: + s = ' '.join(e.recordtypes) + ui.warn( + _('warning: merge state has unsupported record types: %s\n') % s) + unresolved = 0 + else: + unresolved = [f for f in ms if ms[f] == 'u'] + for p in parents: # label with log.changeset (instead of log.parent) since this # shows a working directory parent *changeset*: @@ -6459,16 +6691,6 @@ if d in status.added: status.added.remove(d) - try: - ms = mergemod.mergestate.read(repo) - except error.UnsupportedMergeRecords as e: - s = ' '.join(e.recordtypes) - ui.warn( - _('warning: merge state has unsupported record types: %s\n') % s) - unresolved = 0 - else: - unresolved = [f for f in ms if ms[f] == 'u'] - subs = [s for s in ctx.substate if ctx.sub(s).dirty()] labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified), @@ -6875,7 +7097,7 @@ else: modheads = gen.apply(repo, 'unbundle', 'bundle:' + fname) - return postincoming(ui, repo, modheads, opts.get('update'), None) + return postincoming(ui, repo, modheads, opts.get('update'), None, None) @command('^update|up|checkout|co', [('C', 'clean', None, _('discard uncommitted changes (no backup)')), @@ -6936,62 +7158,34 @@ Returns 0 on success, 1 if there are unresolved files. """ - movemarkfrom = None if rev and node: raise error.Abort(_("please specify just one revision")) if rev is None or rev == '': rev = node + if date and rev is not None: + raise error.Abort(_("you can't specify a revision and a date")) + + if check and clean: + raise error.Abort(_("cannot specify both -c/--check and -C/--clean")) + with repo.wlock(): cmdutil.clearunfinished(repo) if date: - if rev is not None: - raise error.Abort(_("you can't specify a revision and a date")) rev = cmdutil.finddate(ui, repo, date) # if we defined a bookmark, we have to remember the original name brev = rev rev = scmutil.revsingle(repo, rev, rev).rev() - if check and clean: - raise error.Abort(_("cannot specify both -c/--check and -C/--clean") - ) - if check: cmdutil.bailifchanged(repo, merge=False) - if rev is None: - updata = destutil.destupdate(repo, clean=clean, check=check) - rev, movemarkfrom, brev = updata repo.ui.setconfig('ui', 'forcemerge', tool, 'update') - if clean: - ret = hg.clean(repo, rev) - else: - ret = hg.update(repo, rev) - - if not ret and movemarkfrom: - if movemarkfrom == repo['.'].node(): - pass # no-op update - elif bookmarks.update(repo, [movemarkfrom], repo['.'].node()): - ui.status(_("updating bookmark %s\n") % repo._activebookmark) - else: - # this can happen with a non-linear update - ui.status(_("(leaving bookmark %s)\n") % - repo._activebookmark) - bookmarks.deactivate(repo) - elif brev in repo._bookmarks: - bookmarks.activate(repo, brev) - ui.status(_("(activating bookmark %s)\n") % brev) - elif brev: - if repo._activebookmark: - ui.status(_("(leaving bookmark %s)\n") % - repo._activebookmark) - bookmarks.deactivate(repo) - - return ret + return hg.updatetotally(ui, repo, rev, brev, clean=clean, check=check) @command('verify', []) def verify(ui, repo): @@ -7030,10 +7224,25 @@ # format names and versions into columns names = [] vers = [] + place = [] for name, module in extensions.extensions(): names.append(name) vers.append(extensions.moduleversion(module)) + if extensions.ismoduleinternal(module): + place.append(_("internal")) + else: + place.append(_("external")) if names: maxnamelen = max(len(n) for n in names) for i, name in enumerate(names): - ui.write(" %-*s %s\n" % (maxnamelen, name, vers[i])) + ui.write(" %-*s %s %s\n" % + (maxnamelen, name, place[i], vers[i])) + +def loadcmdtable(ui, name, cmdtable): + """Load command functions from specified cmdtable + """ + overrides = [cmd for cmd in cmdtable if cmd in table] + if overrides: + ui.warn(_("extension '%s' overrides commands: %s\n") + % (name, " ".join(overrides))) + table.update(cmdtable)
--- a/mercurial/commandserver.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/commandserver.py Sat Apr 16 18:06:48 2016 -0500 @@ -190,16 +190,31 @@ return data + def _readstr(self): + """read a string from the channel + + format: + data length (uint32), data + """ + length = struct.unpack('>I', self._read(4))[0] + if not length: + return '' + return self._read(length) + + def _readlist(self): + """read a list of NULL separated strings from the channel""" + s = self._readstr() + if s: + return s.split('\0') + else: + return [] + def runcommand(self): """ reads a list of \0 terminated arguments, executes and writes the return code to the result channel """ from . import dispatch # avoid cycle - length = struct.unpack('>I', self._read(4))[0] - if not length: - args = [] - else: - args = self._read(length).split('\0') + args = self._readlist() # copy the uis so changes (e.g. --config or --verbose) don't # persist between requests @@ -262,7 +277,7 @@ hellomsg += '\n' hellomsg += 'encoding: ' + encoding.encoding hellomsg += '\n' - hellomsg += 'pid: %d' % os.getpid() + hellomsg += 'pid: %d' % util.getpid() # write the hello msg in -one- chunk self.cout.write(hellomsg) @@ -323,8 +338,9 @@ def handle(self): ui = self.server.ui repo = self.server.repo - sv = server(ui, repo, self.rfile, self.wfile) + sv = None try: + sv = server(ui, repo, self.rfile, self.wfile) try: sv.serve() # handle exceptions that may be raised by command server. most of @@ -339,7 +355,11 @@ except: # re-raises # also write traceback to error channel. otherwise client cannot # see it because it is written to server's stderr by default. - traceback.print_exc(file=sv.cerr) + if sv: + cerr = sv.cerr + else: + cerr = channeledoutput(self.wfile, 'e') + traceback.print_exc(file=cerr) raise class unixservice(object):
--- a/mercurial/context.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/context.py Sat Apr 16 18:06:48 2016 -0500 @@ -259,7 +259,7 @@ if path in self._manifestdelta: return (self._manifestdelta[path], self._manifestdelta.flags(path)) - node, flag = self._repo.manifest.find(self._changeset[0], path) + node, flag = self._repo.manifest.find(self._changeset.manifest, path) if not node: raise error.ManifestLookupError(self._node, path, _('not found in manifest')) @@ -365,7 +365,7 @@ # node1 and node2 (inclusive). Thus, ctx2's substate # won't contain that subpath. The best we can do ignore it. rev2 = None - submatch = matchmod.narrowmatcher(subpath, match) + submatch = matchmod.subdirmatcher(subpath, match) s = sub.status(rev2, match=submatch, ignored=listignored, clean=listclean, unknown=listunknown, listsubrepos=True) @@ -524,15 +524,15 @@ @propertycache def _changeset(self): - return self._repo.changelog.read(self.rev()) + return self._repo.changelog.changelogrevision(self.rev()) @propertycache def _manifest(self): - return self._repo.manifest.read(self._changeset[0]) + return self._repo.manifest.read(self._changeset.manifest) @propertycache def _manifestdelta(self): - return self._repo.manifest.readdelta(self._changeset[0]) + return self._repo.manifest.readdelta(self._changeset.manifest) @propertycache def _parents(self): @@ -543,24 +543,32 @@ return [changectx(repo, p1), changectx(repo, p2)] def changeset(self): - return self._changeset + c = self._changeset + return ( + c.manifest, + c.user, + c.date, + c.files, + c.description, + c.extra, + ) def manifestnode(self): - return self._changeset[0] + return self._changeset.manifest def user(self): - return self._changeset[1] + return self._changeset.user def date(self): - return self._changeset[2] + return self._changeset.date def files(self): - return self._changeset[3] + return self._changeset.files def description(self): - return self._changeset[4] + return self._changeset.description def branch(self): - return encoding.tolocal(self._changeset[5].get("branch")) + return encoding.tolocal(self._changeset.extra.get("branch")) def closesbranch(self): - return 'close' in self._changeset[5] + return 'close' in self._changeset.extra def extra(self): - return self._changeset[5] + return self._changeset.extra def tags(self): return self._repo.nodetags(self._node) def bookmarks(self): @@ -789,7 +797,7 @@ if fctx._customcmp: return fctx.cmp(self) - if (fctx._filerev is None + if (fctx._filenode is None and (self._repo._encodefilterpats # if file data starts with '\1\n', empty metadata block is # prepended, which adds 4 bytes to filelog.size(). @@ -1892,9 +1900,9 @@ p2node = nullid p = pctx[f].parents() # if file isn't in pctx, check p2? if len(p) > 0: - p1node = p[0].node() + p1node = p[0].filenode() if len(p) > 1: - p2node = p[1].node() + p2node = p[1].filenode() man[f] = revlog.hash(self[f].data(), p1node, p2node) for f in self._status.added:
--- a/mercurial/copies.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/copies.py Sat Apr 16 18:06:48 2016 -0500 @@ -10,7 +10,9 @@ import heapq from . import ( + node, pathutil, + scmutil, util, ) @@ -175,7 +177,18 @@ # we currently don't try to find where old files went, too expensive # this means we can miss a case like 'hg rm b; hg cp a b' cm = {} - missing = _computeforwardmissing(a, b, match=match) + + # Computing the forward missing is quite expensive on large manifests, since + # it compares the entire manifests. We can optimize it in the common use + # case of computing what copies are in a commit versus its parent (like + # during a rebase or histedit). Note, we exclude merge commits from this + # optimization, since the ctx.files() for a merge commit is not correct for + # this comparison. + forwardmissingmatch = match + if not match and b.p1() == a and b.p2().node() == node.nullid: + forwardmissingmatch = scmutil.matchfiles(a._repo, b.files()) + missing = _computeforwardmissing(a, b, match=forwardmissingmatch) + ancestrycontext = a._repo.changelog.ancestors([b.rev()], inclusive=True) for f in missing: fctx = b[f]
--- a/mercurial/crecord.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/crecord.py Sat Apr 16 18:06:48 2016 -0500 @@ -10,14 +10,12 @@ from __future__ import absolute_import -import cStringIO import locale import os import re import signal import struct import sys -import tempfile from .i18n import _ from . import ( @@ -26,11 +24,32 @@ patch as patchmod, util, ) +stringio = util.stringio # This is required for ncurses to display non-ASCII characters in default user # locale encoding correctly. --immerrr locale.setlocale(locale.LC_ALL, '') +# patch comments based on the git one +diffhelptext = _("""# To remove '-' lines, make them ' ' lines (context). +# To remove '+' lines, delete them. +# Lines starting with # will be removed from the patch. +""") + +hunkhelptext = _("""# +# If the patch applies cleanly, the edited hunk will immediately be +# added to the record list. If it does not apply cleanly, a rejects file +# will be generated. You can use that when you try again. If all lines +# of the hunk are removed, then the edit is aborted and the hunk is left +# unchanged. +""") + +patchhelptext = _("""# +# If the patch applies cleanly, the edited patch will immediately +# be finalised. If it does not apply cleanly, rejects files will be +# generated. You can use those when you try again. +""") + try: import curses import fcntl @@ -54,7 +73,7 @@ This method returns True if curses is found (and that python is built with it) and that the user has the correct flag for the ui. """ - return curses and ui.configbool('experimental', 'crecord', False) + return curses and ui.interface("chunkselector") == "curses" _origstdout = sys.__stdout__ # used by gethw() @@ -77,7 +96,6 @@ Return the closest next item of the same type where there are no items of different types between the current item and this closest item. If no such item exists, return None. - """ raise NotImplementedError("method must be implemented by subclass") @@ -86,7 +104,6 @@ Return the closest previous item of the same type where there are no items of different types between the current item and this closest item. If no such item exists, return None. - """ raise NotImplementedError("method must be implemented by subclass") @@ -109,7 +126,6 @@ the next item. If it is not possible to get the next item, return None. - """ try: itemfolded = self.folded @@ -163,7 +179,6 @@ next item. If it is not possible to get the previous item, return None. - """ if constrainlevel: return self.prevsibling() @@ -190,7 +205,6 @@ class patch(patchnode, list): # todo: rename patchroot """ list of header objects representing the patch. - """ def __init__(self, headerlist): self.extend(headerlist) @@ -224,7 +238,7 @@ def prettystr(self): - x = cStringIO.StringIO() + x = stringio() self.pretty(x) return x.getvalue() @@ -435,7 +449,7 @@ pretty = write def prettystr(self): - x = cStringIO.StringIO() + x = stringio() self.pretty(x) return x.getvalue() @@ -487,7 +501,6 @@ this is a rip-off of a rip-off - taken from the bpython code. it is useful / necessary because otherwise curses.initscr() must be called, which can leave the terminal in a nasty state after exiting. - """ h, w = struct.unpack( "hhhh", fcntl.ioctl(_origstdout, termios.TIOCGWINSZ, "\000"*8))[0:2] @@ -497,7 +510,6 @@ """ curses interface to get selection of chunks, and mark the applied flags of the chosen chunks. - """ ui.write(_('starting interactive selection\n')) chunkselector = curseschunkselector(headerlist, ui) @@ -518,7 +530,6 @@ """ test interface to get selection of chunks, and mark the applied flags of the chosen chunks. - """ chunkselector = curseschunkselector(headerlist, ui) if testfn and os.path.exists(testfn): @@ -595,7 +606,6 @@ if the currently selected item is already at the top of the screen, scroll the screen down to show the new-selected item. - """ currentitem = self.currentselecteditem @@ -616,7 +626,6 @@ if the currently selected item is already at the top of the screen, scroll the screen down to show the new-selected item. - """ currentitem = self.currentselecteditem nextitem = currentitem.previtem() @@ -640,7 +649,6 @@ if the currently selected item is already at the bottom of the screen, scroll the screen up to show the new-selected item. - """ #self.startprintline += 1 #debug currentitem = self.currentselecteditem @@ -657,7 +665,6 @@ if the cursor is already at the bottom chunk, scroll the screen up and move the cursor-position to the subsequent chunk. otherwise, only move the cursor position down one chunk. - """ # todo: update docstring @@ -680,7 +687,6 @@ def rightarrowevent(self): """ select (if possible) the first of this item's child-items. - """ currentitem = self.currentselecteditem nextitem = currentitem.firstchild() @@ -700,7 +706,6 @@ if the current item can be folded (i.e. it is an unfolded header or hunk), then fold it. otherwise try select (if possible) the parent of this item. - """ currentitem = self.currentselecteditem @@ -725,7 +730,6 @@ """ select the header of the current item (or fold current item if the current item is already a header). - """ currentitem = self.currentselecteditem @@ -775,7 +779,6 @@ """ toggle the applied flag of the specified item. if no item is specified, toggle the flag of the currently selected item. - """ if item is None: item = self.currentselecteditem @@ -898,7 +901,6 @@ the screen in the x direction. the current cursor position is taken into account when making this calculation. the string can span multiple lines. - """ y, xstart = window.getyx() width = self.xscreensize @@ -927,7 +929,6 @@ the string stretches to the right border of the window. if showwhtspc == True, trailing whitespace of a string is highlighted. - """ # preprocess the text, converting tabs to spaces text = text.expandtabs(4) @@ -1042,8 +1043,8 @@ """ create a string to prefix a line with which indicates whether 'item' is applied and/or folded. + """ - """ # create checkbox string if item.applied: if not isinstance(item, uihunkline) and item.partial: @@ -1076,8 +1077,8 @@ """ print the header to the pad. if countlines is True, don't print anything, but just count the number of lines which would be printed. + """ - """ outstr = "" text = header.prettystr() chunkindex = self.chunklist.index(header) @@ -1192,6 +1193,7 @@ if item is not specified, then print the entire patch. (hiding folded elements, etc. -- see __printitem() docstring) """ + if item is None: item = self.headerlist if recursechildren: @@ -1233,8 +1235,8 @@ if recursechildren is False, then only print the item without its child items. + """ - """ if towin and self.outofdisplayedarea(): return @@ -1281,8 +1283,8 @@ if no item is given, assume the entire patch. if ignorefolding is True, folded items will be unfolded when counting the number of lines. + """ - """ # temporarily disable printing to windows by printstring patchdisplaystring = self.printitem(item, ignorefolding, recursechildren, towin=False) @@ -1316,8 +1318,8 @@ attrlist is used to 'flavor' the returned color-pair. this information is not stored in self.colorpairs. it contains attribute values like curses.A_BOLD. + """ - """ if (name is not None) and name in self.colorpairnames: # then get the associated color pair and return it colorpair = self.colorpairnames[name] @@ -1415,11 +1417,10 @@ return response - def confirmcommit(self, review=False): + def reviewcommit(self): """ask for 'y' to be pressed to confirm selected. return True if confirmed.""" - if review: - confirmtext = ( + confirmtext = ( """if you answer yes to the following, the your currently chosen patch chunks will be loaded into an editor. you may modify the patch from the editor, and save the changes if you wish to change the patch. otherwise, you can just @@ -1430,10 +1431,6 @@ are you sure you want to review/edit and confirm the selected changes [yn]? """) - else: - confirmtext = ( - "are you sure you want to confirm the selected changes [yn]? ") - response = self.confirmationwindow(confirmtext) if response is None: response = "n" @@ -1448,8 +1445,8 @@ When the amend flag is set, a commit will modify the most recently committed changeset, instead of creating a new changeset. Otherwise, a new changeset will be created (the normal commit behavior). + """ - """ try: ver = float(util.version()[:3]) except ValueError: @@ -1483,7 +1480,7 @@ def toggleedit(self, item=None, test=False): """ - edit the currently selected chunk + edit the currently selected chunk """ def updateui(self): self.numpadlines = self.getnumlinesdisplayed(ignorefolding=True) + 1 @@ -1502,49 +1499,26 @@ self.ui.write(_('cannot edit patch for binary file')) self.ui.write("\n") return None - # patch comment based on the git one (based on comment at end of - # https://mercurial-scm.org/wiki/recordextension) - phelp = '---' + _(""" - to remove '-' lines, make them ' ' lines (context). - to remove '+' lines, delete them. - lines starting with # will be removed from the patch. - if the patch applies cleanly, the edited hunk will immediately be - added to the record list. if it does not apply cleanly, a rejects - file will be generated: you can use that when you try again. if - all lines of the hunk are removed, then the edit is aborted and - the hunk is left unchanged. - """) - (patchfd, patchfn) = tempfile.mkstemp(prefix="hg-editor-", - suffix=".diff", text=True) - ncpatchfp = None + # write the initial patch + patch = stringio() + patch.write(diffhelptext + hunkhelptext) + chunk.header.write(patch) + chunk.write(patch) + + # start the editor and wait for it to complete try: - # write the initial patch - f = os.fdopen(patchfd, "w") - chunk.header.write(f) - chunk.write(f) - f.write('\n'.join(['# ' + i for i in phelp.splitlines()])) - f.close() - # start the editor and wait for it to complete - editor = self.ui.geteditor() - ret = self.ui.system("%s \"%s\"" % (editor, patchfn), - environ={'hguser': self.ui.username()}) - if ret != 0: - self.errorstr = "Editor exited with status %d" % ret - return None - # remove comment lines - patchfp = open(patchfn) - ncpatchfp = cStringIO.StringIO() - for line in patchfp: - if not line.startswith('#'): - ncpatchfp.write(line) - patchfp.close() - ncpatchfp.seek(0) - newpatches = patchmod.parsepatch(ncpatchfp) - finally: - os.unlink(patchfn) - del ncpatchfp - return newpatches + patch = self.ui.edit(patch.getvalue(), "", + extra={"suffix": ".diff"}) + except error.Abort as exc: + self.errorstr = str(exc) + return None + + # remove comment lines + patch = [line + '\n' for line in patch.splitlines() + if not line.startswith('#')] + return patchmod.parsepatch(patch) + if item is None: item = self.currentselecteditem if isinstance(item, uiheader): @@ -1597,6 +1571,11 @@ return True def handlekeypressed(self, keypressed, test=False): + """ + Perform actions based on pressed keys. + + Return true to exit the main loop. + """ if keypressed in ["k", "KEY_UP"]: self.uparrowevent() if keypressed in ["K", "KEY_PPAGE"]: @@ -1616,12 +1595,15 @@ elif keypressed in ['a']: self.toggleamend(self.opts, test) elif keypressed in ["c"]: - if self.confirmcommit(): - return True + return True + elif test and keypressed in ['X']: + return True elif keypressed in ["r"]: - if self.confirmcommit(review=True): + if self.reviewcommit(): + self.opts['review'] = True return True - elif test and keypressed in ['X']: + elif test and keypressed in ['R']: + self.opts['review'] = True return True elif keypressed in [' '] or (test and keypressed in ["TOGGLE"]): self.toggleapply() @@ -1641,8 +1623,8 @@ def main(self, stdscr): """ method to be wrapped by curses.wrapper() for selecting chunks. + """ - """ signal.signal(signal.SIGWINCH, self.sigwinchhandler) self.stdscr = stdscr # error during initialization, cannot be printed in the curses
--- a/mercurial/demandimport.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/demandimport.py Sat Apr 16 18:06:48 2016 -0500 @@ -174,7 +174,12 @@ """ symbol = getattr(mod, attr, nothing) if symbol is nothing: - symbol = _demandmod(attr, mod.__dict__, locals, level=1) + mn = '%s.%s' % (mod.__name__, attr) + if mn in ignore: + importfunc = _origimport + else: + importfunc = _demandmod + symbol = importfunc(attr, mod.__dict__, locals, level=1) setattr(mod, attr, symbol) # Record the importing module references this symbol so we can @@ -252,6 +257,7 @@ '_sre', # issue4920 'rfc822', 'mimetools', + 'sqlalchemy.events', # has import-time side effects (issue5085) # setuptools 8 expects this module to explode early when not on windows 'distutils.msvc9compiler' ]
--- a/mercurial/destutil.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/destutil.py Sat Apr 16 18:06:48 2016 -0500 @@ -88,27 +88,55 @@ return node, movemark, activemark def _destupdatebranch(repo, clean, check): - """decide on an update destination from current branch""" + """decide on an update destination from current branch + + This ignores closed branch heads. + """ wc = repo[None] movemark = node = None - try: - node = repo.branchtip(wc.branch()) + currentbranch = wc.branch() + if currentbranch in repo.branchmap(): + heads = repo.branchheads(currentbranch) + if heads: + node = repo.revs('max(.::(%ln))', heads).first() if bookmarks.isactivewdirparent(repo): movemark = repo['.'].node() - except error.RepoLookupError: - if wc.branch() == 'default': # no default branch! - node = repo.lookup('tip') # update to tip - else: - raise error.Abort(_("branch %s not found") % wc.branch()) + elif currentbranch == 'default' and not wc.p1(): + # "null" parent belongs to "default" branch, but it doesn't exist, so + # update to the tipmost non-closed branch head + node = repo.revs('max(head() and not closed())').first() + else: + node = repo['.'].node() + return node, movemark, None + +def _destupdatebranchfallback(repo, clean, check): + """decide on an update destination from closed heads in current branch""" + wc = repo[None] + currentbranch = wc.branch() + movemark = None + if currentbranch in repo.branchmap(): + # here, all descendant branch heads are closed + heads = repo.branchheads(currentbranch, closed=True) + assert heads, "any branch has at least one head" + node = repo.revs('max(.::(%ln))', heads).first() + assert node is not None, ("any revision has at least " + "one descendant branch head") + if bookmarks.isactivewdirparent(repo): + movemark = repo['.'].node() + else: + # here, no "default" branch, and all branches are closed + node = repo.lookup('tip') + assert node is not None, "'tip' exists even in empty repository" return node, movemark, None # order in which each step should be evalutated # steps are run until one finds a destination -destupdatesteps = ['evolution', 'bookmark', 'branch'] +destupdatesteps = ['evolution', 'bookmark', 'branch', 'branchfallback'] # mapping to ease extension overriding steps. destupdatestepmap = {'evolution': _destupdateobs, 'bookmark': _destupdatebook, 'branch': _destupdatebranch, + 'branchfallback': _destupdatebranchfallback, } def destupdate(repo, clean=False, check=False): @@ -133,7 +161,102 @@ return rev, movemark, activemark -def _destmergebook(repo): +msgdestmerge = { + # too many matching divergent bookmark + 'toomanybookmarks': + {'merge': + (_("multiple matching bookmarks to merge -" + " please merge with an explicit rev or bookmark"), + _("run 'hg heads' to see all heads")), + 'rebase': + (_("multiple matching bookmarks to rebase -" + " please rebase to an explicit rev or bookmark"), + _("run 'hg heads' to see all heads")), + }, + # no other matching divergent bookmark + 'nootherbookmarks': + {'merge': + (_("no matching bookmark to merge - " + "please merge with an explicit rev or bookmark"), + _("run 'hg heads' to see all heads")), + 'rebase': + (_("no matching bookmark to rebase - " + "please rebase to an explicit rev or bookmark"), + _("run 'hg heads' to see all heads")), + }, + # branch have too many unbookmarked heads, no obvious destination + 'toomanyheads': + {'merge': + (_("branch '%s' has %d heads - please merge with an explicit rev"), + _("run 'hg heads .' to see heads")), + 'rebase': + (_("branch '%s' has %d heads - please rebase to an explicit rev"), + _("run 'hg heads .' to see heads")), + }, + # branch have no other unbookmarked heads + 'bookmarkedheads': + {'merge': + (_("heads are bookmarked - please merge with an explicit rev"), + _("run 'hg heads' to see all heads")), + 'rebase': + (_("heads are bookmarked - please rebase to an explicit rev"), + _("run 'hg heads' to see all heads")), + }, + # branch have just a single heads, but there is other branches + 'nootherbranchheads': + {'merge': + (_("branch '%s' has one head - please merge with an explicit rev"), + _("run 'hg heads' to see all heads")), + 'rebase': + (_("branch '%s' has one head - please rebase to an explicit rev"), + _("run 'hg heads' to see all heads")), + }, + # repository have a single head + 'nootherheads': + {'merge': + (_('nothing to merge'), + None), + 'rebase': + (_('nothing to rebase'), + None), + }, + # repository have a single head and we are not on it + 'nootherheadsbehind': + {'merge': + (_('nothing to merge'), + _("use 'hg update' instead")), + 'rebase': + (_('nothing to rebase'), + _("use 'hg update' instead")), + }, + # We are not on a head + 'notatheads': + {'merge': + (_('working directory not at a head revision'), + _("use 'hg update' or merge with an explicit revision")), + 'rebase': + (_('working directory not at a head revision'), + _("use 'hg update' or rebase to an explicit revision")) + }, + 'emptysourceset': + {'merge': + (_('source set is empty'), + None), + 'rebase': + (_('source set is empty'), + None), + }, + 'multiplebranchessourceset': + {'merge': + (_('source set is rooted in multiple branches'), + None), + 'rebase': + (_('rebaseset is rooted in multiple named branches'), + _('specify an explicit destination with --dest')), + }, + } + +def _destmergebook(repo, action='merge', sourceset=None): """find merge destination in the active bookmark case""" node = None bmheads = repo.bookmarkheads(repo._activebookmark) @@ -144,61 +267,90 @@ else: node = bmheads[0] elif len(bmheads) > 2: - raise error.Abort(_("multiple matching bookmarks to merge - " - "please merge with an explicit rev or bookmark"), - hint=_("run 'hg heads' to see all heads")) + msg, hint = msgdestmerge['toomanybookmarks'][action] + raise error.ManyMergeDestAbort(msg, hint=hint) elif len(bmheads) <= 1: - raise error.Abort(_("no matching bookmark to merge - " - "please merge with an explicit rev or bookmark"), - hint=_("run 'hg heads' to see all heads")) + msg, hint = msgdestmerge['nootherbookmarks'][action] + raise error.NoMergeDestAbort(msg, hint=hint) assert node is not None return node -def _destmergebranch(repo): +def _destmergebranch(repo, action='merge', sourceset=None, onheadcheck=True): """find merge destination based on branch heads""" node = None - branch = repo[None].branch() - bheads = repo.branchheads(branch) - nbhs = [bh for bh in bheads if not repo[bh].bookmarks()] - if len(nbhs) > 2: - raise error.Abort(_("branch '%s' has %d heads - " - "please merge with an explicit rev") - % (branch, len(bheads)), - hint=_("run 'hg heads .' to see heads")) + if sourceset is None: + sourceset = [repo[repo.dirstate.p1()].rev()] + branch = repo.dirstate.branch() + elif not sourceset: + msg, hint = msgdestmerge['emptysourceset'][action] + raise error.NoMergeDestAbort(msg, hint=hint) + else: + branch = None + for ctx in repo.set('roots(%ld::%ld)', sourceset, sourceset): + if branch is not None and ctx.branch() != branch: + msg, hint = msgdestmerge['multiplebranchessourceset'][action] + raise error.ManyMergeDestAbort(msg, hint=hint) + branch = ctx.branch() - parent = repo.dirstate.p1() - if len(nbhs) <= 1: - if len(bheads) > 1: - raise error.Abort(_("heads are bookmarked - " - "please merge with an explicit rev"), - hint=_("run 'hg heads' to see all heads")) - if len(repo.heads()) > 1: - raise error.Abort(_("branch '%s' has one head - " - "please merge with an explicit rev") - % branch, - hint=_("run 'hg heads' to see all heads")) - msg, hint = _('nothing to merge'), None - if parent != repo.lookup(branch): - hint = _("use 'hg update' instead") + bheads = repo.branchheads(branch) + onhead = repo.revs('%ld and %ln', sourceset, bheads) + if onheadcheck and not onhead: + # Case A: working copy if not on a head. (merge only) + # + # This is probably a user mistake We bailout pointing at 'hg update' + if len(repo.heads()) <= 1: + msg, hint = msgdestmerge['nootherheadsbehind'][action] + else: + msg, hint = msgdestmerge['notatheads'][action] raise error.Abort(msg, hint=hint) - - if parent not in bheads: - raise error.Abort(_('working directory not at a head revision'), - hint=_("use 'hg update' or merge with an " - "explicit revision")) - if parent == nbhs[0]: - node = nbhs[-1] + # remove heads descendants of source from the set + bheads = list(repo.revs('%ln - (%ld::)', bheads, sourceset)) + # filters out bookmarked heads + nbhs = list(repo.revs('%ld - bookmark()', bheads)) + if len(nbhs) > 1: + # Case B: There is more than 1 other anonymous heads + # + # This means that there will be more than 1 candidate. This is + # ambiguous. We abort asking the user to pick as explicit destination + # instead. + msg, hint = msgdestmerge['toomanyheads'][action] + msg %= (branch, len(bheads) + 1) + raise error.ManyMergeDestAbort(msg, hint=hint) + elif not nbhs: + # Case B: There is no other anonymous heads + # + # This means that there is no natural candidate to merge with. + # We abort, with various messages for various cases. + if bheads: + msg, hint = msgdestmerge['bookmarkedheads'][action] + elif len(repo.heads()) > 1: + msg, hint = msgdestmerge['nootherbranchheads'][action] + msg %= branch + elif not onhead: + # if 'onheadcheck == False' (rebase case), + # this was not caught in Case A. + msg, hint = msgdestmerge['nootherheadsbehind'][action] + else: + msg, hint = msgdestmerge['nootherheads'][action] + raise error.NoMergeDestAbort(msg, hint=hint) else: node = nbhs[0] assert node is not None return node -def destmerge(repo): +def destmerge(repo, action='merge', sourceset=None, onheadcheck=True): + """return the default destination for a merge + + (or raise exception about why it can't pick one) + + :action: the action being performed, controls emitted error message + """ if repo._activebookmark: - node = _destmergebook(repo) + node = _destmergebook(repo, action=action, sourceset=sourceset) else: - node = _destmergebranch(repo) + node = _destmergebranch(repo, action=action, sourceset=sourceset, + onheadcheck=onheadcheck) return repo[node].rev() histeditdefaultrevset = 'reverse(only(.) and not public() and not ::merge())' @@ -218,3 +370,53 @@ return revs.first() return None + +def _statusotherbook(ui, repo): + bmheads = repo.bookmarkheads(repo._activebookmark) + curhead = repo[repo._activebookmark].node() + if repo.revs('%n and parents()', curhead): + # we are on the active bookmark + bmheads = [b for b in bmheads if curhead != b] + if bmheads: + msg = _('%i other divergent bookmarks for "%s"\n') + ui.status(msg % (len(bmheads), repo._activebookmark)) + +def _statusotherbranchheads(ui, repo): + currentbranch = repo.dirstate.branch() + allheads = repo.branchheads(currentbranch, closed=True) + heads = repo.branchheads(currentbranch) + if repo.revs('%ln and parents()', allheads): + # we are on a head, even though it might be closed + # + # on closed otherheads + # ========= ========== + # o 0 all heads for current branch are closed + # N only descendant branch heads are closed + # x 0 there is only one non-closed branch head + # N there are some non-closed branch heads + # ========= ========== + otherheads = repo.revs('%ln - parents()', heads) + if repo['.'].closesbranch(): + ui.warn(_('no open descendant heads on branch "%s", ' + 'updating to a closed head\n') % + (currentbranch)) + if otherheads: + ui.warn(_('(committing will reopen the head, ' + 'use `hg heads .` to see %i other heads)\n') % + (len(otherheads))) + else: + ui.warn(_('(committing will reopen branch "%s")\n') % + (currentbranch)) + elif otherheads: + ui.status(_('%i other heads for branch "%s"\n') % + (len(otherheads), currentbranch)) + +def statusotherdests(ui, repo): + """Print message about other head""" + # XXX we should probably include a hint: + # - about what to do + # - how to see such heads + if repo._activebookmark: + _statusotherbook(ui, repo) + else: + _statusotherbranchheads(ui, repo)
--- a/mercurial/dispatch.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/dispatch.py Sat Apr 16 18:06:48 2016 -0500 @@ -31,8 +31,13 @@ error, extensions, fancyopts, + fileset, hg, hook, + revset, + templatefilters, + templatekw, + templater, ui as uimod, util, ) @@ -79,6 +84,8 @@ else: write(_("hg: parse error: %s\n") % inst.args[0]) _reportsimilar(write, similar) + if inst.hint: + write(_("(%s)\n") % inst.hint) def dispatch(req): "run the command specified in req.args" @@ -109,8 +116,6 @@ return -1 except error.ParseError as inst: _formatparse(ferr.write, inst) - if inst.hint: - ferr.write(_("(%s)\n") % inst.hint) return -1 msg = ' '.join(' ' in a and repr(a) or a for a in req.args) @@ -118,11 +123,19 @@ ret = None try: ret = _runcatch(req) - return ret + except KeyboardInterrupt: + try: + req.ui.warn(_("interrupted!\n")) + except IOError as inst: + if inst.errno != errno.EPIPE: + raise + ret = -1 finally: duration = time.time() - starttime + req.ui.flush() req.ui.log("commandfinish", "%s exited %s after %0.2f seconds\n", msg, ret or 0, duration) + return ret def _runcatch(req): def catchterm(*args): @@ -206,8 +219,6 @@ (inst.args[0], " ".join(inst.args[1]))) except error.ParseError as inst: _formatparse(ui.warn, inst) - if inst.hint: - ui.warn(_("(%s)\n") % inst.hint) return -1 except error.LockHeld as inst: if inst.errno == errno.ETIMEDOUT: @@ -313,11 +324,7 @@ else: ui.warn(_("abort: %s\n") % inst.strerror) except KeyboardInterrupt: - try: - ui.warn(_("interrupted!\n")) - except IOError as inst: - if inst.errno != errno.EPIPE: - raise + raise except MemoryError: ui.warn(_("abort: out of memory\n")) except SystemExit as inst: @@ -326,61 +333,9 @@ return inst.code except socket.error as inst: ui.warn(_("abort: %s\n") % inst.args[-1]) - except: # re-raises - # For compatibility checking, we discard the portion of the hg - # version after the + on the assumption that if a "normal - # user" is running a build with a + in it the packager - # probably built from fairly close to a tag and anyone with a - # 'make local' copy of hg (where the version number can be out - # of date) will be clueful enough to notice the implausible - # version number and try updating. - ct = util.versiontuple(n=2) - worst = None, ct, '' - if ui.config('ui', 'supportcontact', None) is None: - for name, mod in extensions.extensions(): - testedwith = getattr(mod, 'testedwith', '') - report = getattr(mod, 'buglink', _('the extension author.')) - if not testedwith.strip(): - # We found an untested extension. It's likely the culprit. - worst = name, 'unknown', report - break - - # Never blame on extensions bundled with Mercurial. - if testedwith == 'internal': - continue - - tested = [util.versiontuple(t, 2) for t in testedwith.split()] - if ct in tested: - continue - - lower = [t for t in tested if t < ct] - nearest = max(lower or tested) - if worst[0] is None or nearest < worst[1]: - worst = name, nearest, report - if worst[0] is not None: - name, testedwith, report = worst - if not isinstance(testedwith, str): - testedwith = '.'.join([str(c) for c in testedwith]) - warning = (_('** Unknown exception encountered with ' - 'possibly-broken third-party extension %s\n' - '** which supports versions %s of Mercurial.\n' - '** Please disable %s and try your action again.\n' - '** If that fixes the bug please report it to %s\n') - % (name, testedwith, name, report)) - else: - bugtracker = ui.config('ui', 'supportcontact', None) - if bugtracker is None: - bugtracker = _("https://mercurial-scm.org/wiki/BugTracker") - warning = (_("** unknown exception encountered, " - "please report by visiting\n** ") + bugtracker + '\n') - warning += ((_("** Python %s\n") % sys.version.replace('\n', '')) + - (_("** Mercurial Distributed SCM (version %s)\n") % - util.version()) + - (_("** Extensions loaded: %s\n") % - ", ".join([x[0] for x in extensions.extensions()]))) - ui.log("commandexception", "%s\n%s\n", warning, traceback.format_exc()) - ui.warn(warning) - raise + except: # perhaps re-raises + if not handlecommandexception(ui): + raise return -1 @@ -424,7 +379,7 @@ return r.sub(lambda x: replacemap[x.group()], cmd) class cmdalias(object): - def __init__(self, name, definition, cmdtable): + def __init__(self, name, definition, cmdtable, source): self.name = self.cmd = name self.cmdname = '' self.definition = definition @@ -432,11 +387,9 @@ self.args = [] self.opts = [] self.help = '' - self.norepo = True - self.optionalrepo = False - self.inferrepo = False self.badalias = None self.unknowncmd = False + self.source = source try: aliases, entry = cmdutil.findcmd(self.name, cmdtable) @@ -496,12 +449,6 @@ self.fn, self.opts = tableentry self.args = aliasargs(self.fn, args) - if cmd not in commands.norepo.split(' '): - self.norepo = False - if cmd in commands.optionalrepo.split(' '): - self.optionalrepo = True - if cmd in commands.inferrepo.split(' '): - self.inferrepo = True if self.help.startswith("hg " + cmd): # drop prefix in old-style help lines so hg shows the alias self.help = self.help[4 + len(cmd):] @@ -515,6 +462,14 @@ self.badalias = (_("alias '%s' resolves to ambiguous command '%s'") % (self.name, cmd)) + def __getattr__(self, name): + adefaults = {'norepo': True, 'optionalrepo': False, 'inferrepo': False} + if name not in adefaults: + raise AttributeError(name) + if self.badalias or util.safehasattr(self, 'shell'): + return adefaults[name] + return getattr(self.fn, name) + def __call__(self, ui, *args, **opts): if self.badalias: hint = None @@ -545,7 +500,8 @@ # may use extension commands. Aliases can also use other alias definitions, # but only if they have been defined prior to the current definition. for alias, definition in ui.configitems('alias'): - aliasdef = cmdalias(alias, definition, cmdtable) + source = ui.configsource('alias', alias) + aliasdef = cmdalias(alias, definition, cmdtable, source) try: olddef = cmdtable[aliasdef.cmd][0] @@ -556,12 +512,6 @@ pass cmdtable[aliasdef.name] = (aliasdef, aliasdef.opts, aliasdef.help) - if aliasdef.norepo: - commands.norepo += ' %s' % alias - if aliasdef.optionalrepo: - commands.optionalrepo += ' %s' % alias - if aliasdef.inferrepo: - commands.inferrepo += ' %s' % alias def _parse(ui, args): options = {} @@ -609,7 +559,8 @@ for cfg in config: try: - name, value = cfg.split('=', 1) + name, value = [cfgelem.strip() + for cfgelem in cfg.split('=', 1)] section, name = name.split('.', 1) if not section or not name: raise IndexError @@ -684,16 +635,17 @@ result=ret, pats=cmdpats, opts=cmdoptions) return ret -def _getlocal(ui, rpath): +def _getlocal(ui, rpath, wd=None): """Return (path, local ui object) for the given target path. Takes paths in [cwd]/.hg/hgrc into account." """ - try: - wd = os.getcwd() - except OSError as e: - raise error.Abort(_("error getting current working directory: %s") % - e.strerror) + if wd is None: + try: + wd = os.getcwd() + except OSError as e: + raise error.Abort(_("error getting current working directory: %s") % + e.strerror) path = cmdutil.findrepo(wd) or "" if not path: lui = ui @@ -726,26 +678,16 @@ if precheck: strict = True - norepo = commands.norepo - optionalrepo = commands.optionalrepo - inferrepo = commands.inferrepo - def restorecommands(): - commands.norepo = norepo - commands.optionalrepo = optionalrepo - commands.inferrepo = inferrepo cmdtable = commands.table.copy() addaliases(lui, cmdtable) else: strict = False - def restorecommands(): - pass cmdtable = commands.table cmd = args[0] try: aliases, entry = cmdutil.findcmd(cmd, cmdtable, strict) except (error.AmbiguousCommand, error.UnknownCommand): - restorecommands() return cmd = aliases[0] @@ -756,9 +698,31 @@ return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d, [], {}) - restorecommands() +def _cmdattr(ui, cmd, func, attr): + try: + return getattr(func, attr) + except AttributeError: + ui.deprecwarn("missing attribute '%s', use @command decorator " + "to register '%s'" % (attr, cmd), '3.8') + return False _loaded = set() + +# list of (objname, loadermod, loadername) tuple: +# - objname is the name of an object in extension module, from which +# extra information is loaded +# - loadermod is the module where loader is placed +# - loadername is the name of the function, which takes (ui, extensionname, +# extraobj) arguments +extraloaders = [ + ('cmdtable', commands, 'loadcmdtable'), + ('filesetpredicate', fileset, 'loadpredicate'), + ('revsetpredicate', revset, 'loadpredicate'), + ('templatefilter', templatefilters, 'loadfilter'), + ('templatefunc', templater, 'loadfunction'), + ('templatekeyword', templatekw, 'loadkeyword'), +] + def _dispatch(req): args = req.args ui = req.ui @@ -788,12 +752,10 @@ # (uisetup and extsetup are handled in extensions.loadall) for name, module in exts: - cmdtable = getattr(module, 'cmdtable', {}) - overrides = [cmd for cmd in cmdtable if cmd in commands.table] - if overrides: - ui.warn(_("extension '%s' overrides commands: %s\n") - % (name, " ".join(overrides))) - commands.table.update(cmdtable) + for objname, loadermod, loadername in extraloaders: + extraobj = getattr(module, objname, None) + if extraobj is not None: + getattr(loadermod, loadername)(ui, name, extraobj) _loaded.add(name) # (reposetup is handled in hg.repository) @@ -874,7 +836,7 @@ repo = None cmdpats = args[:] - if cmd not in commands.norepo.split(): + if not _cmdattr(ui, cmd, func, 'norepo'): # use the repo from the request only if we don't have -R if not rpath and not cwd: repo = req.repo @@ -895,9 +857,10 @@ except error.RepoError: if rpath and rpath[-1]: # invalid -R path raise - if cmd not in commands.optionalrepo.split(): - if (cmd in commands.inferrepo.split() and - args and not path): # try to infer -R from command args + if not _cmdattr(ui, cmd, func, 'optionalrepo'): + if (_cmdattr(ui, cmd, func, 'inferrepo') and + args and not path): + # try to infer -R from command args repos = map(cmdutil.findrepo, args) guess = repos[0] if guess and repos.count(guess) == len(repos): @@ -1027,8 +990,7 @@ output = ui.config('profiling', 'output') if output == 'blackbox': - import StringIO - fp = StringIO.StringIO() + fp = util.stringio() elif output: path = ui.expandpath(output) fp = open(path, 'wb') @@ -1053,3 +1015,70 @@ fp.close() else: return checkargs() + +def _exceptionwarning(ui): + """Produce a warning message for the current active exception""" + + # For compatibility checking, we discard the portion of the hg + # version after the + on the assumption that if a "normal + # user" is running a build with a + in it the packager + # probably built from fairly close to a tag and anyone with a + # 'make local' copy of hg (where the version number can be out + # of date) will be clueful enough to notice the implausible + # version number and try updating. + ct = util.versiontuple(n=2) + worst = None, ct, '' + if ui.config('ui', 'supportcontact', None) is None: + for name, mod in extensions.extensions(): + testedwith = getattr(mod, 'testedwith', '') + report = getattr(mod, 'buglink', _('the extension author.')) + if not testedwith.strip(): + # We found an untested extension. It's likely the culprit. + worst = name, 'unknown', report + break + + # Never blame on extensions bundled with Mercurial. + if testedwith == 'internal': + continue + + tested = [util.versiontuple(t, 2) for t in testedwith.split()] + if ct in tested: + continue + + lower = [t for t in tested if t < ct] + nearest = max(lower or tested) + if worst[0] is None or nearest < worst[1]: + worst = name, nearest, report + if worst[0] is not None: + name, testedwith, report = worst + if not isinstance(testedwith, str): + testedwith = '.'.join([str(c) for c in testedwith]) + warning = (_('** Unknown exception encountered with ' + 'possibly-broken third-party extension %s\n' + '** which supports versions %s of Mercurial.\n' + '** Please disable %s and try your action again.\n' + '** If that fixes the bug please report it to %s\n') + % (name, testedwith, name, report)) + else: + bugtracker = ui.config('ui', 'supportcontact', None) + if bugtracker is None: + bugtracker = _("https://mercurial-scm.org/wiki/BugTracker") + warning = (_("** unknown exception encountered, " + "please report by visiting\n** ") + bugtracker + '\n') + warning += ((_("** Python %s\n") % sys.version.replace('\n', '')) + + (_("** Mercurial Distributed SCM (version %s)\n") % + util.version()) + + (_("** Extensions loaded: %s\n") % + ", ".join([x[0] for x in extensions.extensions()]))) + return warning + +def handlecommandexception(ui): + """Produce a warning message for broken commands + + Called when handling an exception; the exception is reraised if + this function returns False, ignored otherwise. + """ + warning = _exceptionwarning(ui) + ui.log("commandexception", "%s\n%s\n", warning, traceback.format_exc()) + ui.warn(warning) + return False # re-raise the exception
--- a/mercurial/encoding.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/encoding.py Sat Apr 16 18:06:48 2016 -0500 @@ -7,14 +7,19 @@ from __future__ import absolute_import +import array import locale import os +import sys import unicodedata from . import ( error, ) +if sys.version_info[0] >= 3: + unichr = chr + # These unicode characters are ignored by HFS+ (Apple Technote 1150, # "Unicode Subtleties"), so we need to ignore them in some places for # sanity. @@ -22,7 +27,10 @@ "200c 200d 200e 200f 202a 202b 202c 202d 202e " "206a 206b 206c 206d 206e 206f feff".split()] # verify the next function will work -assert set([i[0] for i in _ignore]) == set(["\xe2", "\xef"]) +if sys.version_info[0] >= 3: + assert set(i[0] for i in _ignore) == set([ord(b'\xe2'), ord(b'\xef')]) +else: + assert set(i[0] for i in _ignore) == set(["\xe2", "\xef"]) def hfsignoreclean(s): """Remove codepoints ignored by HFS+ from s. @@ -378,9 +386,23 @@ upper = 1 other = 0 -_jsonmap = {} +_jsonmap = [] +_jsonmap.extend("\\u%04x" % x for x in range(32)) +_jsonmap.extend(chr(x) for x in range(32, 127)) +_jsonmap.append('\\u007f') +_jsonmap[0x09] = '\\t' +_jsonmap[0x0a] = '\\n' +_jsonmap[0x22] = '\\"' +_jsonmap[0x5c] = '\\\\' +_jsonmap[0x08] = '\\b' +_jsonmap[0x0c] = '\\f' +_jsonmap[0x0d] = '\\r' +_paranoidjsonmap = _jsonmap[:] +_paranoidjsonmap[0x3c] = '\\u003c' # '<' (e.g. escape "</script>") +_paranoidjsonmap[0x3e] = '\\u003e' # '>' +_jsonmap.extend(chr(x) for x in range(128, 256)) -def jsonescape(s): +def jsonescape(s, paranoid=False): '''returns a string suitable for JSON JSON is problematic for us because it doesn't support non-Unicode @@ -405,24 +427,36 @@ 'utf-8: caf\\xc3\\xa9' >>> jsonescape('') '' + + If paranoid, non-ascii and common troublesome characters are also escaped. + This is suitable for web output. + + >>> jsonescape('escape boundary: \\x7e \\x7f \\xc2\\x80', paranoid=True) + 'escape boundary: ~ \\\\u007f \\\\u0080' + >>> jsonescape('a weird byte: \\xdd', paranoid=True) + 'a weird byte: \\\\udcdd' + >>> jsonescape('utf-8: caf\\xc3\\xa9', paranoid=True) + 'utf-8: caf\\\\u00e9' + >>> jsonescape('non-BMP: \\xf0\\x9d\\x84\\x9e', paranoid=True) + 'non-BMP: \\\\ud834\\\\udd1e' + >>> jsonescape('<foo@example.org>', paranoid=True) + '\\\\u003cfoo@example.org\\\\u003e' ''' - if not _jsonmap: - for x in xrange(32): - _jsonmap[chr(x)] = "\\u%04x" % x - for x in xrange(32, 256): - c = chr(x) - _jsonmap[c] = c - _jsonmap['\x7f'] = '\\u007f' - _jsonmap['\t'] = '\\t' - _jsonmap['\n'] = '\\n' - _jsonmap['\"'] = '\\"' - _jsonmap['\\'] = '\\\\' - _jsonmap['\b'] = '\\b' - _jsonmap['\f'] = '\\f' - _jsonmap['\r'] = '\\r' + if paranoid: + jm = _paranoidjsonmap + else: + jm = _jsonmap - return ''.join(_jsonmap[c] for c in toutf8b(s)) + u8chars = toutf8b(s) + try: + return ''.join(jm[x] for x in bytearray(u8chars)) # fast path + except IndexError: + pass + # non-BMP char is represented as UTF-16 surrogate pair + u16codes = array.array('H', u8chars.decode('utf-8').encode('utf-16')) + u16codes.pop(0) # drop BOM + return ''.join(jm[x] if x < 128 else '\\u%04x' % x for x in u16codes) _utf8len = [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 3, 4]
--- a/mercurial/error.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/error.py Sat Apr 16 18:06:48 2016 -0500 @@ -72,6 +72,15 @@ class UpdateAbort(Abort): """Raised when an update is aborted for destination issue""" +class MergeDestAbort(Abort): + """Raised when an update is aborted for destination issues""" + +class NoMergeDestAbort(MergeDestAbort): + """Raised when an update is aborted because there is nothing to merge""" + +class ManyMergeDestAbort(MergeDestAbort): + """Raised when an update is aborted because destination is ambigious""" + class ResponseExpected(Abort): """Raised when an EOF is received for a prompt""" def __init__(self):
--- a/mercurial/exchange.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/exchange.py Sat Apr 16 18:06:48 2016 -0500 @@ -8,8 +8,6 @@ from __future__ import absolute_import import errno -import urllib -import urllib2 from .i18n import _ from .node import ( @@ -35,6 +33,9 @@ util, ) +urlerr = util.urlerr +urlreq = util.urlreq + # Maps bundle compression human names to internal representation. _bundlespeccompressions = {'none': None, 'bzip2': 'BZ', @@ -97,8 +98,8 @@ 'missing "=" in parameter: %s') % p) key, value = p.split('=', 1) - key = urllib.unquote(key) - value = urllib.unquote(value) + key = urlreq.unquote(key) + value = urlreq.unquote(value) params[key] = value return version, params @@ -236,7 +237,7 @@ elif isinstance(b, streamclone.streamcloneapplier): requirements = streamclone.readbundle1header(fh)[2] params = 'requirements=%s' % ','.join(sorted(requirements)) - return 'none-packed1;%s' % urllib.quote(params) + return 'none-packed1;%s' % urlreq.quote(params) else: raise error.Abort(_('unknown bundle type: %s') % b) @@ -266,10 +267,10 @@ class pushoperation(object): """A object that represent a single push operation - It purpose is to carry push related state and very common operation. + Its purpose is to carry push related state and very common operations. - A new should be created at the beginning of each push and discarded - afterward. + A new pushoperation should be created at the beginning of each push and + discarded afterward. """ def __init__(self, repo, remote, force=False, revs=None, newbranch=False, @@ -576,7 +577,8 @@ ancestors = repo.changelog.ancestors(revnums, inclusive=True) remotebookmark = remote.listkeys('bookmarks') - explicit = set(pushop.bookmarks) + explicit = set([repo._bookmarks.expandname(bookmark) + for bookmark in pushop.bookmarks]) comp = bookmod.compare(repo, repo._bookmarks, remotebookmark, srchex=hex) addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp @@ -693,30 +695,25 @@ # Send known heads to the server for race detection. if not _pushcheckoutgoing(pushop): return - pushop.repo.prepushoutgoinghooks(pushop.repo, - pushop.remote, - pushop.outgoing) + pushop.repo.prepushoutgoinghooks(pushop) _pushb2ctxcheckheads(pushop, bundler) b2caps = bundle2.bundle2caps(pushop.remote) - version = None + version = '01' cgversions = b2caps.get('changegroup') - if not cgversions: # 3.1 and 3.2 ship with an empty value - cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push', - pushop.outgoing) - else: + if cgversions: # 3.1 and 3.2 ship with an empty value cgversions = [v for v in cgversions if v in changegroup.supportedoutgoingversions( pushop.repo)] if not cgversions: raise ValueError(_('no common changegroup version')) version = max(cgversions) - cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push', - pushop.outgoing, - version=version) + cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push', + pushop.outgoing, + version=version) cgpart = bundler.newpart('changegroup', data=cg) - if version is not None: + if cgversions: cgpart.addparam('version', version) if 'treemanifest' in pushop.repo.requirements: cgpart.addparam('treemanifest', '1') @@ -886,9 +883,7 @@ pushop.stepsdone.add('changesets') if not _pushcheckoutgoing(pushop): return - pushop.repo.prepushoutgoinghooks(pushop.repo, - pushop.remote, - pushop.outgoing) + pushop.repo.prepushoutgoinghooks(pushop) outgoing = pushop.outgoing unbundle = pushop.remote.capable('unbundle') # TODO: get bundlecaps from remote @@ -1471,7 +1466,7 @@ """return a set with appropriate options to use bundle20 during getbundle""" caps = set(['HG20']) capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo)) - caps.add('bundle2=' + urllib.quote(capsblob)) + caps.add('bundle2=' + urlreq.quote(capsblob)) return caps # List of names of steps to perform for a bundle2 for getbundle, order matters. @@ -1537,7 +1532,7 @@ b2caps = {} for bcaps in bundlecaps: if bcaps.startswith('bundle2='): - blob = urllib.unquote(bcaps[len('bundle2='):]) + blob = urlreq.unquote(bcaps[len('bundle2='):]) b2caps.update(bundle2.decodecaps(blob)) bundler = bundle2.bundle20(repo.ui, b2caps) @@ -1558,23 +1553,22 @@ cg = None if kwargs.get('cg', True): # build changegroup bundle here. - version = None + version = '01' cgversions = b2caps.get('changegroup') - getcgkwargs = {} if cgversions: # 3.1 and 3.2 ship with an empty value cgversions = [v for v in cgversions if v in changegroup.supportedoutgoingversions(repo)] if not cgversions: raise ValueError(_('no common changegroup version')) - version = getcgkwargs['version'] = max(cgversions) + version = max(cgversions) outgoing = changegroup.computeoutgoing(repo, heads, common) cg = changegroup.getlocalchangegroupraw(repo, source, outgoing, bundlecaps=bundlecaps, - **getcgkwargs) + version=version) if cg: part = bundler.newpart('changegroup', data=cg) - if version is not None: + if cgversions: part.addparam('version', version) part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False) if 'treemanifest' in repo.requirements: @@ -1807,8 +1801,8 @@ attrs = {'URL': fields[0]} for rawattr in fields[1:]: key, value = rawattr.split('=', 1) - key = urllib.unquote(key) - value = urllib.unquote(value) + key = urlreq.unquote(key) + value = urlreq.unquote(value) attrs[key] = value # Parse BUNDLESPEC into components. This makes client-side @@ -1924,9 +1918,9 @@ cg.apply(repo, 'clonebundles', url) tr.close() return True - except urllib2.HTTPError as e: + except urlerr.httperror as e: ui.warn(_('HTTP error fetching bundle: %s\n') % str(e)) - except urllib2.URLError as e: + except urlerr.urlerror as e: ui.warn(_('error fetching bundle: %s\n') % e.reason[1]) return False
--- a/mercurial/extensions.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/extensions.py Sat Apr 16 18:06:48 2016 -0500 @@ -71,6 +71,20 @@ exc.filename = path # python does not fill this raise +def _importh(name): + """import and return the <name> module""" + mod = __import__(name) + components = name.split('.') + for comp in components[1:]: + mod = getattr(mod, comp) + return mod + +def _reportimporterror(ui, err, failed, next): + ui.debug('could not import %s (%s): trying %s\n' + % (failed, err, next)) + if ui.debugflag: + ui.traceback() + def load(ui, name, path): if name.startswith('hgext.') or name.startswith('hgext/'): shortname = name[6:] @@ -87,20 +101,15 @@ # conflicts with other modules mod = loadpath(path, 'hgext.%s' % name) else: - def importh(name): - mod = __import__(name) - components = name.split('.') - for comp in components[1:]: - mod = getattr(mod, comp) - return mod try: - mod = importh("hgext.%s" % name) + mod = _importh("hgext.%s" % name) except ImportError as err: - ui.debug('could not import hgext.%s (%s): trying %s\n' - % (name, err, name)) - if ui.debugflag: - ui.traceback() - mod = importh(name) + _reportimporterror(ui, err, "hgext.%s" % name, name) + try: + mod = _importh("hgext3rd.%s" % name) + except ImportError as err: + _reportimporterror(ui, err, "hgext3rd.%s" % name, name) + mod = _importh(name) # Before we do anything with the extension, check against minimum stated # compatibility. This gives extension authors a mechanism to have their @@ -195,6 +204,12 @@ return func(*(args + a), **kw) return closure +def _updatewrapper(wrap, origfn): + '''Copy attributes to wrapper function''' + wrap.__module__ = getattr(origfn, '__module__') + wrap.__doc__ = getattr(origfn, '__doc__') + wrap.__dict__.update(getattr(origfn, '__dict__', {})) + def wrapcommand(table, command, wrapper, synopsis=None, docstring=None): '''Wrap the command named `command' in table @@ -233,13 +248,9 @@ origfn = entry[0] wrap = bind(util.checksignature(wrapper), util.checksignature(origfn)) - - wrap.__module__ = getattr(origfn, '__module__') - - doc = getattr(origfn, '__doc__') + _updatewrapper(wrap, origfn) if docstring is not None: - doc += docstring - wrap.__doc__ = doc + wrap.__doc__ += docstring newentry = list(entry) newentry[0] = wrap @@ -285,7 +296,9 @@ origfn = getattr(container, funcname) assert callable(origfn) - setattr(container, funcname, bind(wrapper, origfn)) + wrap = bind(wrapper, origfn) + _updatewrapper(wrap, origfn) + setattr(container, funcname, wrap) return origfn def _disabledpaths(strip_init=False): @@ -456,6 +469,10 @@ return exts +def notloaded(): + '''return short names of extensions that failed to load''' + return [name for name, mod in _extensions.iteritems() if mod is None] + def moduleversion(module): '''return version information from given module as a string''' if (util.safehasattr(module, 'getversion') @@ -468,3 +485,7 @@ if isinstance(version, (list, tuple)): version = '.'.join(str(o) for o in version) return version + +def ismoduleinternal(module): + exttestedwith = getattr(module, 'testedwith', None) + return exttestedwith == "internal"
--- a/mercurial/filemerge.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/filemerge.py Sat Apr 16 18:06:48 2016 -0500 @@ -17,6 +17,7 @@ from . import ( error, + formatter, match, scmutil, simplemerge, @@ -230,8 +231,8 @@ @internaltool('prompt', nomerge) def _iprompt(repo, mynode, orig, fcd, fco, fca, toolconf): - """Asks the user which of the local or the other version to keep as - the merged version.""" + """Asks the user which of the local `p1()` or the other `p2()` version to + keep as the merged version.""" ui = repo.ui fd = fcd.path() @@ -268,12 +269,12 @@ @internaltool('local', nomerge) def _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf): - """Uses the local version of files as the merged version.""" + """Uses the local `p1()` version of files as the merged version.""" return 0, fcd.isabsent() @internaltool('other', nomerge) def _iother(repo, mynode, orig, fcd, fco, fca, toolconf): - """Uses the other version of files as the merged version.""" + """Uses the other `p2()` version of files as the merged version.""" if fco.isabsent(): # local changed, remote deleted -- 'deleted' picked repo.wvfs.unlinkpath(fcd.path()) @@ -411,7 +412,7 @@ def _imergelocal(*args, **kwargs): """ Like :merge, but resolve all conflicts non-interactively in favor - of the local changes.""" + of the local `p1()` changes.""" success, status = _imergeauto(localorother='local', *args, **kwargs) return success, status, False @@ -419,7 +420,7 @@ def _imergeother(*args, **kwargs): """ Like :merge, but resolve all conflicts non-interactively in favor - of the other changes.""" + of the other `p2()` changes.""" success, status = _imergeauto(localorother='other', *args, **kwargs) return success, status, False @@ -526,7 +527,7 @@ ui = repo.ui template = ui.config('ui', 'mergemarkertemplate', _defaultconflictmarker) - tmpl = templater.templater(None, cache={'conflictmarker': template}) + tmpl = formatter.maketemplater(ui, 'conflictmarker', template) pad = max(len(l) for l in labels)
--- a/mercurial/fileset.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/fileset.py Sat Apr 16 18:06:48 2016 -0500 @@ -14,6 +14,7 @@ error, merge, parser, + registrar, util, ) @@ -144,34 +145,7 @@ # filesets using matchctx.existing() _existingcallers = set() -def predicate(decl, callstatus=False, callexisting=False): - """Return a decorator for fileset predicate function - - 'decl' argument is the declaration (including argument list like - 'adds(pattern)') or the name (for internal use only) of predicate. - - Optional 'callstatus' argument indicates whether predicate implies - 'matchctx.status()' at runtime or not (False, by default). - - Optional 'callexisting' argument indicates whether predicate - implies 'matchctx.existing()' at runtime or not (False, by - default). - """ - def decorator(func): - i = decl.find('(') - if i > 0: - name = decl[:i] - else: - name = decl - symbols[name] = func - if callstatus: - _statuscallers.add(name) - if callexisting: - _existingcallers.add(name) - if func.__doc__: - func.__doc__ = "``%s``\n %s" % (decl, func.__doc__.strip()) - return func - return decorator +predicate = registrar.filesetpredicate() @predicate('modified()', callstatus=True) def modified(mctx, x): @@ -560,5 +534,18 @@ def prettyformat(tree): return parser.prettyformat(tree, ('string', 'symbol')) +def loadpredicate(ui, extname, registrarobj): + """Load fileset predicates from specified registrarobj + """ + for name, func in registrarobj._table.iteritems(): + symbols[name] = func + if func._callstatus: + _statuscallers.add(name) + if func._callexisting: + _existingcallers.add(name) + +# load built-in predicates explicitly to setup _statuscallers/_existingcallers +loadpredicate(None, None, predicate) + # tell hggettext to extract docstrings from these functions: i18nfunctions = symbols.values()
--- a/mercurial/formatter.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/formatter.py Sat Apr 16 18:06:48 2016 -0500 @@ -153,7 +153,7 @@ self._topic = topic self._t = gettemplater(ui, topic, opts.get('template', '')) def _showitem(self): - g = self._t(self._topic, **self._item) + g = self._t(self._topic, ui=self._ui, **self._item) self._ui.write(templater.stringify(g)) def lookuptemplate(ui, topic, tmpl): @@ -171,11 +171,7 @@ # perhaps it's a reference to [templates] t = ui.config('templates', tmpl) if t: - try: - tmpl = templater.unquotestring(t) - except SyntaxError: - tmpl = t - return tmpl, None + return templater.unquotestring(t), None if tmpl == 'list': ui.write(_("available styles: %s\n") % templater.stylelist()) @@ -194,7 +190,15 @@ def gettemplater(ui, topic, spec): tmpl, mapfile = lookuptemplate(ui, topic, spec) - t = templater.templater(mapfile, {}) + assert not (tmpl and mapfile) + if mapfile: + return templater.templater.frommapfile(mapfile) + return maketemplater(ui, topic, tmpl) + +def maketemplater(ui, topic, tmpl, filters=None, cache=None): + """Create a templater from a string template 'tmpl'""" + aliases = ui.configitems('templatealias') + t = templater.templater(filters=filters, cache=cache, aliases=aliases) if tmpl: t.cache[topic] = tmpl return t
--- a/mercurial/graphmod.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/graphmod.py Sat Apr 16 18:06:48 2016 -0500 @@ -28,6 +28,12 @@ ) CHANGESET = 'C' +PARENT = 'P' +GRANDPARENT = 'G' +MISSINGPARENT = 'M' +# Style of line to draw. None signals a line that ends and is removed at this +# point. +EDGES = {PARENT: '|', GRANDPARENT: ':', MISSINGPARENT: None} def groupbranchiter(revs, parentsfunc, firstbranch=()): """Yield revisions from heads to roots one (topo) branch at a time. @@ -228,12 +234,16 @@ yield r def dagwalker(repo, revs): - """cset DAG generator yielding (id, CHANGESET, ctx, [parentids]) tuples + """cset DAG generator yielding (id, CHANGESET, ctx, [parentinfo]) tuples This generator function walks through revisions (which should be ordered - from bigger to lower). It returns a tuple for each node. The node and parent - ids are arbitrary integers which identify a node in the context of the graph + from bigger to lower). It returns a tuple for each node. + + Each parentinfo entry is a tuple with (edgetype, parentid), where edgetype + is one of PARENT, GRANDPARENT or MISSINGPARENT. The node and parent ids + are arbitrary integers which identify a node in the context of the graph returned. + """ if not revs: return @@ -252,10 +262,13 @@ for rev in revs: ctx = repo[rev] - parents = sorted(set([p.rev() for p in ctx.parents() - if p.rev() in revs])) - mpars = [p.rev() for p in ctx.parents() if - p.rev() != nullrev and p.rev() not in parents] + # partition into parents in the rev set and missing parents, then + # augment the lists with markers, to inform graph drawing code about + # what kind of edge to draw between nodes. + pset = set(p.rev() for p in ctx.parents() if p.rev() in revs) + mpars = [p.rev() for p in ctx.parents() + if p.rev() != nullrev and p.rev() not in pset] + parents = [(PARENT, p) for p in sorted(pset)] for mpar in mpars: gp = gpcache.get(mpar) @@ -264,11 +277,14 @@ # through all revs (issue4782) if not isinstance(revs, revset.baseset): revs = revset.baseset(revs) - gp = gpcache[mpar] = revset.reachableroots(repo, revs, [mpar]) + gp = gpcache[mpar] = sorted(set(revset.reachableroots( + repo, revs, [mpar]))) if not gp: - parents.append(mpar) + parents.append((MISSINGPARENT, mpar)) + pset.add(mpar) else: - parents.extend(g for g in gp if g not in parents) + parents.extend((GRANDPARENT, g) for g in gp if g not in pset) + pset.update(gp) yield (ctx.rev(), CHANGESET, ctx, parents) @@ -281,7 +297,8 @@ include = set(nodes) for node in nodes: ctx = repo[node] - parents = set([p.rev() for p in ctx.parents() if p.node() in include]) + parents = set((PARENT, p.rev()) for p in ctx.parents() + if p.node() in include) yield (ctx.rev(), CHANGESET, ctx, sorted(parents)) def colored(dag, repo): @@ -330,7 +347,7 @@ next = seen[:] # Add parents to next - addparents = [p for p in parents if p not in next] + addparents = [p for pt, p in parents if p not in next] next[col:col + 1] = addparents # Set colors for the parents @@ -351,7 +368,7 @@ bconf.get('width', -1), bconf.get('color', ''))) elif eid == cur: - for p in parents: + for ptype, p in parents: bconf = getconf(p) edges.append(( ecol, next.index(p), color, @@ -362,24 +379,27 @@ yield (cur, type, data, (col, color), edges) seen = next -def asciiedges(type, char, lines, seen, rev, parents): +def asciiedges(type, char, lines, state, rev, parents): """adds edge info to changelog DAG walk suitable for ascii()""" + seen = state['seen'] if rev not in seen: seen.append(rev) nodeidx = seen.index(rev) knownparents = [] newparents = [] - for parent in parents: + for ptype, parent in parents: if parent in seen: knownparents.append(parent) else: newparents.append(parent) + state['edges'][parent] = state['styles'].get(ptype, '|') ncols = len(seen) nextseen = seen[:] nextseen[nodeidx:nodeidx + 1] = newparents - edges = [(nodeidx, nextseen.index(p)) for p in knownparents if p != nullrev] + edges = [(nodeidx, nextseen.index(p)) + for p in knownparents if p != nullrev] while len(newparents) > 2: # ascii() only knows how to add or remove a single column between two @@ -403,6 +423,8 @@ edges.append((nodeidx, nodeidx + 1)) nmorecols = len(nextseen) - ncols seen[:] = nextseen + # remove current node from edge characters, no longer needed + state['edges'].pop(rev, None) yield (type, char, lines, (nodeidx, edges, ncols, nmorecols)) def _fixlongrightedges(edges): @@ -411,27 +433,28 @@ edges[i] = (start, end + 1) def _getnodelineedgestail( - node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail): - if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0: + echars, idx, pidx, ncols, coldiff, pdiff, fix_tail): + if fix_tail and coldiff == pdiff and coldiff != 0: # Still going in the same non-vertical direction. - if n_columns_diff == -1: - start = max(node_index + 1, p_node_index) - tail = ["|", " "] * (start - node_index - 1) - tail.extend(["/", " "] * (n_columns - start)) + if coldiff == -1: + start = max(idx + 1, pidx) + tail = echars[idx * 2:(start - 1) * 2] + tail.extend(["/", " "] * (ncols - start)) return tail else: - return ["\\", " "] * (n_columns - node_index - 1) + return ["\\", " "] * (ncols - idx - 1) else: - return ["|", " "] * (n_columns - node_index - 1) + remainder = (ncols - idx - 1) + return echars[-(remainder * 2):] if remainder > 0 else [] -def _drawedges(edges, nodeline, interline): +def _drawedges(echars, edges, nodeline, interline): for (start, end) in edges: if start == end + 1: interline[2 * end + 1] = "/" elif start == end - 1: interline[2 * start + 1] = "\\" elif start == end: - interline[2 * start] = "|" + interline[2 * start] = echars[2 * start] else: if 2 * end >= len(nodeline): continue @@ -442,26 +465,86 @@ if nodeline[i] != "+": nodeline[i] = "-" -def _getpaddingline(ni, n_columns, edges): - line = [] - line.extend(["|", " "] * ni) - if (ni, ni - 1) in edges or (ni, ni) in edges: - # (ni, ni - 1) (ni, ni) +def _getpaddingline(echars, idx, ncols, edges): + # all edges up to the current node + line = echars[:idx * 2] + # an edge for the current node, if there is one + if (idx, idx - 1) in edges or (idx, idx) in edges: + # (idx, idx - 1) (idx, idx) # | | | | | | | | # +---o | | o---+ - # | | c | | c | | + # | | X | | X | | # | |/ / | |/ / # | | | | | | - c = "|" + line.extend(echars[idx * 2:(idx + 1) * 2]) else: - c = " " - line.extend([c, " "]) - line.extend(["|", " "] * (n_columns - ni - 1)) + line.extend(' ') + # all edges to the right of the current node + remainder = ncols - idx - 1 + if remainder > 0: + line.extend(echars[-(remainder * 2):]) return line +def _drawendinglines(lines, extra, edgemap, seen): + """Draw ending lines for missing parent edges + + None indicates an edge that ends at between this node and the next + Replace with a short line ending in ~ and add / lines to any edges to + the right. + + """ + if None not in edgemap.values(): + return + + # Check for more edges to the right of our ending edges. + # We need enough space to draw adjustment lines for these. + edgechars = extra[::2] + while edgechars and edgechars[-1] is None: + edgechars.pop() + shift_size = max((edgechars.count(None) * 2) - 1, 0) + while len(lines) < 3 + shift_size: + lines.append(extra[:]) + + if shift_size: + empties = [] + toshift = [] + first_empty = extra.index(None) + for i, c in enumerate(extra[first_empty::2], first_empty // 2): + if c is None: + empties.append(i * 2) + else: + toshift.append(i * 2) + targets = list(range(first_empty, first_empty + len(toshift) * 2, 2)) + positions = toshift[:] + for line in lines[-shift_size:]: + line[first_empty:] = [' '] * (len(line) - first_empty) + for i in range(len(positions)): + pos = positions[i] - 1 + positions[i] = max(pos, targets[i]) + line[pos] = '/' if pos > targets[i] else extra[toshift[i]] + + map = {1: '|', 2: '~'} + for i, line in enumerate(lines): + if None not in line: + continue + line[:] = [c or map.get(i, ' ') for c in line] + + # remove edges that ended + remove = [p for p, c in edgemap.items() if c is None] + for parent in remove: + del edgemap[parent] + seen.remove(parent) + def asciistate(): """returns the initial value for the "state" argument to ascii()""" - return [0, 0] + return { + 'seen': [], + 'edges': {}, + 'lastcoldiff': 0, + 'lastindex': 0, + 'styles': EDGES.copy(), + 'graphshorten': False, + } def ascii(ui, state, type, char, text, coldata): """prints an ASCII graph of the DAG @@ -483,9 +566,15 @@ in the current revision. That is: -1 means one column removed; 0 means no columns added or removed; 1 means one column added. """ - idx, edges, ncols, coldiff = coldata assert -2 < coldiff < 2 + + edgemap, seen = state['edges'], state['seen'] + # Be tolerant of history issues; make sure we have at least ncols + coldiff + # elements to work with. See test-glog.t for broken history test cases. + echars = [c for p in seen for c in (edgemap.get(p, '|'), ' ')] + echars.extend(('|', ' ') * max(ncols + coldiff - len(seen), 0)) + if coldiff == -1: # Transform # @@ -515,45 +604,54 @@ fix_nodeline_tail = len(text) <= 2 and not add_padding_line # nodeline is the line containing the node character (typically o) - nodeline = ["|", " "] * idx + nodeline = echars[:idx * 2] nodeline.extend([char, " "]) nodeline.extend( - _getnodelineedgestail(idx, state[1], ncols, coldiff, - state[0], fix_nodeline_tail)) + _getnodelineedgestail( + echars, idx, state['lastindex'], ncols, coldiff, + state['lastcoldiff'], fix_nodeline_tail)) # shift_interline is the line containing the non-vertical # edges between this entry and the next - shift_interline = ["|", " "] * idx + shift_interline = echars[:idx * 2] + shift_interline.extend(' ' * (2 + coldiff)) + count = ncols - idx - 1 if coldiff == -1: - n_spaces = 1 - edge_ch = "/" + shift_interline.extend('/ ' * count) elif coldiff == 0: - n_spaces = 2 - edge_ch = "|" + shift_interline.extend(echars[(idx + 1) * 2:ncols * 2]) else: - n_spaces = 3 - edge_ch = "\\" - shift_interline.extend(n_spaces * [" "]) - shift_interline.extend([edge_ch, " "] * (ncols - idx - 1)) + shift_interline.extend(r'\ ' * count) # draw edges from the current node to its parents - _drawedges(edges, nodeline, shift_interline) + _drawedges(echars, edges, nodeline, shift_interline) # lines is the list of all graph lines to print lines = [nodeline] if add_padding_line: - lines.append(_getpaddingline(idx, ncols, edges)) - lines.append(shift_interline) + lines.append(_getpaddingline(echars, idx, ncols, edges)) + + # If 'graphshorten' config, only draw shift_interline + # when there is any non vertical flow in graph. + if state['graphshorten']: + if any(c in '\/' for c in shift_interline if c): + lines.append(shift_interline) + # Else, no 'graphshorten' config so draw shift_interline. + else: + lines.append(shift_interline) # make sure that there are as many graph lines as there are # log strings + extra_interline = echars[:(ncols + coldiff) * 2] + if len(lines) < len(text): + while len(lines) < len(text): + lines.append(extra_interline[:]) + + _drawendinglines(lines, extra_interline, edgemap, seen) + while len(text) < len(lines): text.append("") - if len(lines) < len(text): - extra_interline = ["|", " "] * (ncols + coldiff) - while len(lines) < len(text): - lines.append(extra_interline) # print lines indentation_level = max(ncols, ncols + coldiff) @@ -562,5 +660,5 @@ ui.write(ln.rstrip() + '\n') # ... and start over - state[0] = coldiff - state[1] = idx + state['lastcoldiff'] = coldiff + state['lastindex'] = idx
--- a/mercurial/help.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/help.py Sat Apr 16 18:06:48 2016 -0500 @@ -149,6 +149,8 @@ for name, docs in itertools.chain( extensions.enabled(False).iteritems(), extensions.disabled().iteritems()): + if not docs: + continue mod = extensions.load(ui, name, '') name = name.rpartition('.')[-1] if lowercontains(name) or lowercontains(docs): @@ -186,6 +188,8 @@ loaddoc('bundles', subdir='internals')), (['changegroups'], _('representation of revlog data'), loaddoc('changegroups', subdir='internals')), + (['requirements'], _('repository requirements'), + loaddoc('requirements', subdir='internals')), (['revlogs'], _('revision storage mechanism'), loaddoc('revlogs', subdir='internals')), ]) @@ -332,10 +336,13 @@ if not doc: doc = _("(no help text available)") if util.safehasattr(entry[0], 'definition'): # aliased command + source = entry[0].source if entry[0].definition.startswith('!'): # shell alias - doc = _('shell alias for::\n\n %s') % entry[0].definition[1:] + doc = (_('shell alias for::\n\n %s\n\ndefined by: %s\n') % + (entry[0].definition[1:], source)) else: - doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc) + doc = (_('alias for: hg %s\n\n%s\n\ndefined by: %s\n') % + (entry[0].definition, doc, source)) doc = doc.splitlines(True) if ui.quiet or not full: rst.append(doc[0])
--- a/mercurial/help/config.txt Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/help/config.txt Sat Apr 16 18:06:48 2016 -0500 @@ -800,7 +800,7 @@ ``outgoing`` Run after sending changes from local repository to another. ID of first changeset sent is in ``$HG_NODE``. Source of operation is in - ``$HG_SOURCE``; Also see :hg:`help config.preoutgoing` hook. + ``$HG_SOURCE``; Also see :hg:`help config.hooks.preoutgoing` hook. ``post-<command>`` Run after successful invocations of the associated command. The @@ -881,11 +881,11 @@ ``txnclose`` Run after any repository transaction has been committed. At this point, the transaction can no longer be rolled back. The hook will run - after the lock is released. See :hg:`help config.pretxnclose` docs for + after the lock is released. See :hg:`help config.hooks.pretxnclose` docs for details about available variables. ``txnabort`` - Run when a transaction is aborted. See :hg:`help config.pretxnclose` + Run when a transaction is aborted. See :hg:`help config.hooks.pretxnclose` docs for details about available variables. ``pretxnchangegroup`` @@ -968,10 +968,16 @@ -------------------- Fingerprints of the certificates of known HTTPS servers. + A HTTPS connection to a server with a fingerprint configured here will only succeed if the servers certificate matches the fingerprint. This is very similar to how ssh known hosts works. + The fingerprint is the SHA-1 hash value of the DER encoded certificate. +Multiple values can be specified (separated by spaces or commas). This can +be used to define both old and new fingerprints while a host transitions +to a new certificate. + The CA chain and web.cacerts is not used for servers with a fingerprint. For example:: @@ -980,9 +986,6 @@ hg.intevation.de = fc:e2:8d:d9:51:cd:cb:c1:4d:18:6b:b7:44:8d:49:72:57:e6:cd:33 hg.intevation.org = fc:e2:8d:d9:51:cd:cb:c1:4d:18:6b:b7:44:8d:49:72:57:e6:cd:33 -This feature is only supported when using Python 2.6 or later. - - ``http_proxy`` -------------- @@ -1007,6 +1010,25 @@ Optional. Always use the proxy, even for localhost and any entries in ``http_proxy.no``. (default: False) +``merge`` +--------- + +This section specifies behavior during merges and updates. + +``checkignored`` + Controls behavior when an ignored file on disk has the same name as a tracked + file in the changeset being merged or updated to, and has different + contents. Options are ``abort``, ``warn`` and ``ignore``. With ``abort``, + abort on such files. With ``warn``, warn on such files and back them up as + .orig. With ``ignore``, don't print a warning and back them up as + .orig. (default: ``abort``) + +``checkunknown`` + Controls behavior when an unknown file that isn't ignored has the same name + as a tracked file in the changeset being merged or updated to, and has + different contents. Similar to ``merge.checkignored``, except for files that + are not ignored. (default: ``abort``) + ``merge-patterns`` ------------------ @@ -1466,6 +1488,11 @@ rewrite rules are then applied on the full (absolute) path. The rules are applied in definition order. +``templatealias`` +----------------- + +Alias definitions for templates. See :hg:`help templates` for details. + ``trusted`` ----------- @@ -1588,6 +1615,15 @@ ``interactive`` Allow to prompt the user. (default: True) +``interface`` + Select the default interface for interactive features (default: text). + Possible values are 'text' and 'curses'. + +``interface.chunkselector`` + Select the interface for change recording (e.g. :hg:`commit` -i). + Possible values are 'text' and 'curses'. + This config overrides the interface specified by ui.interface. + ``logtemplate`` Template string for commands that print changesets. @@ -1815,11 +1851,6 @@ client, then it will verify the identity of remote HTTPS servers with these certificates. - This feature is only supported when using Python 2.6 or later. If you wish - to use it with earlier versions of Python, install the backported - version of the ssl library that is available from - ``http://pypi.python.org``. - To disable SSL verification temporarily, specify ``--insecure`` from command line.
--- a/mercurial/help/environment.txt Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/help/environment.txt Sat Apr 16 18:06:48 2016 -0500 @@ -69,6 +69,10 @@ Preserve internationalization. ``revsetalias`` Don't remove revset aliases. + ``templatealias`` + Don't remove template aliases. + ``progress`` + Don't hide progress output. Setting HGPLAINEXCEPT to anything (even an empty string) will enable plain mode.
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/mercurial/help/internals/requirements.txt Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,110 @@ +Requirements +============ + +Repositories contain a file (``.hg/requires``) containing a list of +features/capabilities that are *required* for clients to interface +with the repository. This file has been present in Mercurial since +version 0.9.2 (released December 2006). + +One of the first things clients do when opening a repository is read +``.hg/requires`` and verify that all listed requirements are supported, +aborting if not. Requirements are therefore a strong mechanism to +prevent incompatible clients from reading from unknown repository +formats or even corrupting them by writing to them. + +Extensions may add requirements. When they do this, clients not running +an extension will be unable to read from repositories. + +The following sections describe the requirements defined by the +Mercurial core distribution. + +revlogv1 +-------- + +When present, revlogs are version 1 (RevlogNG). RevlogNG was introduced +in 2006. The ``revlogv1`` requirement has been enabled by default +since the ``requires`` file was introduced in Mercurial 0.9.2. + +If this requirement is not present, version 0 revlogs are assumed. + +store +----- + +The *store* repository layout should be used. + +This requirement has been enabled by default since the ``requires`` file +was introduced in Mercurial 0.9.2. + +fncache +------- + +The *fncache* repository layout should be used. + +The *fncache* layout hash encodes filenames with long paths and +encodes reserved filenames. + +This requirement is enabled by default when the *store* requirement is +enabled (which is the default behavior). It was introduced in Mercurial +1.1 (released December 2008). + +shared +------ + +Denotes that the store for a repository is shared from another location +(defined by the ``.hg/sharedpath`` file). + +This requirement is set when a repository is created via :hg:`share`. + +The requirement was added in Mercurial 1.3 (released July 2009). + +dotencode +--------- + +The *dotencode* repository layout should be used. + +The *dotencode* layout encodes the first period or space in filenames +to prevent issues on OS X and Windows. + +This requirement is enabled by default when the *store* requirement +is enabled (which is the default behavior). It was introduced in +Mercurial 1.7 (released November 2010). + +parentdelta +----------- + +Denotes a revlog delta encoding format that was experimental and +replaced by *generaldelta*. It should not be seen in the wild because +it was never enabled by default. + +This requirement was added in Mercurial 1.7 and removed in Mercurial +1.9. + +generaldelta +------------ + +Revlogs should be created with the *generaldelta* flag enabled. The +generaldelta flag will cause deltas to be encoded against a parent +revision instead of the previous revision in the revlog. + +Support for this requirement was added in Mercurial 1.9 (released +July 2011). The requirement was disabled on new repositories by +default until Mercurial 3.7 (released February 2016). + +manifestv2 +---------- + +Denotes that version 2 of manifests are being used. + +Support for this requirement was added in Mercurial 3.4 (released +May 2015). The requirement is currently experimental and is disabled +by default. + +treemanifest +------------ + +Denotes that tree manifests are being used. Tree manifests are +one manifest per directory (as opposed to a single flat manifest). + +Support for this requirement was added in Mercurial 3.4 (released +August 2015). The requirement is currently experimental and is +disabled by default.
--- a/mercurial/help/internals/revlogs.txt Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/help/internals/revlogs.txt Sat Apr 16 18:06:48 2016 -0500 @@ -31,7 +31,8 @@ ----------- A revlog begins with a 32-bit big endian integer holding version info -and feature flags. +and feature flags. This integer is shared with the first revision +entry. This integer is logically divided into 2 16-bit shorts. The least significant half of the integer is the format/version short. The other @@ -70,8 +71,10 @@ 00 03 00 01 RevlogNG + inline + generaldelta -Following the 32-bit header is *index* data. Inlined revision data is possibly -located between index entries. More on this layout is described below. +Following the 32-bit header is the remainder of the first index entry. +Following that are remaining *index* data. Inlined revision data is +possibly located between index entries. More on this layout is described +below. RevlogNG Format --------------- @@ -83,6 +86,8 @@ Each index entry is 64 bytes. The byte layout of each entry is as follows, with byte 0 being the first byte (all data stored as big endian): +0-3 (4 bytes) (rev 0 only) + Revlog header 0-5 (6 bytes) Absolute offset of revision data from beginning of revlog. 6-7 (2 bytes) @@ -120,6 +125,9 @@ separate byte container. The offsets from bytes 0-5 and the compressed length from bytes 8-11 define how to access this data. +The first 4 bytes of the revlog are shared between the revlog header +and the 6 byte absolute offset field from the first revlog entry. + Delta Chains ------------ @@ -190,4 +198,4 @@ 1. Hash the parent nodes 2. Hash the fulltext of the revision -The 20 byte node ids of the parents are fed into the hasher in ascending order. \ No newline at end of file +The 20 byte node ids of the parents are fed into the hasher in ascending order.
--- a/mercurial/help/templates.txt Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/help/templates.txt Sat Apr 16 18:06:48 2016 -0500 @@ -51,6 +51,26 @@ To prevent it from being interpreted, you can use an escape character ``\{`` or a raw string prefix, ``r'...'``. +New keywords and functions can be defined in the ``templatealias`` section of +a Mercurial configuration file:: + + <alias> = <definition> + +Arguments of the form `a1`, `a2`, etc. are substituted from the alias into +the definition. + +For example, + +:: + + [templatealias] + r = rev + rn = "{r}:{node|short}" + leftpad(s, w) = pad(s, w, ' ', True) + +defines two symbol aliases, ``r`` and ``rn``, and a function alias +``leftpad()``. + Some sample command line templates: - Format lists, e.g. files::
--- a/mercurial/hg.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/hg.py Sat Apr 16 18:06:48 2016 -0500 @@ -19,6 +19,7 @@ bookmarks, bundlerepo, cmdutil, + destutil, discovery, error, exchange, @@ -694,10 +695,67 @@ _showstats(repo, stats, quietempty) return stats[3] > 0 -def merge(repo, node, force=None, remind=True): +# naming conflict in updatetotally() +_clean = clean + +def updatetotally(ui, repo, checkout, brev, clean=False, check=False): + """Update the working directory with extra care for non-file components + + This takes care of non-file components below: + + :bookmark: might be advanced or (in)activated + + This takes arguments below: + + :checkout: to which revision the working directory is updated + :brev: a name, which might be a bookmark to be activated after updating + :clean: whether changes in the working directory can be discarded + :check: whether changes in the working directory should be checked + + This returns whether conflict is detected at updating or not. + """ + with repo.wlock(): + movemarkfrom = None + warndest = False + if checkout is None: + updata = destutil.destupdate(repo, clean=clean, check=check) + checkout, movemarkfrom, brev = updata + warndest = True + + if clean: + ret = _clean(repo, checkout) + else: + ret = _update(repo, checkout) + + if not ret and movemarkfrom: + if movemarkfrom == repo['.'].node(): + pass # no-op update + elif bookmarks.update(repo, [movemarkfrom], repo['.'].node()): + ui.status(_("updating bookmark %s\n") % repo._activebookmark) + else: + # this can happen with a non-linear update + ui.status(_("(leaving bookmark %s)\n") % + repo._activebookmark) + bookmarks.deactivate(repo) + elif brev in repo._bookmarks: + if brev != repo._activebookmark: + ui.status(_("(activating bookmark %s)\n") % brev) + bookmarks.activate(repo, brev) + elif brev: + if repo._activebookmark: + ui.status(_("(leaving bookmark %s)\n") % + repo._activebookmark) + bookmarks.deactivate(repo) + + if warndest: + destutil.statusotherdests(ui, repo) + + return ret + +def merge(repo, node, force=None, remind=True, mergeforce=False): """Branch merge with node, resolving changes. Return true if any unresolved conflicts.""" - stats = mergemod.update(repo, node, True, force) + stats = mergemod.update(repo, node, True, force, mergeforce=mergeforce) _showstats(repo, stats) if stats[3]: repo.ui.status(_("use 'hg resolve' to retry unresolved file merges " @@ -888,6 +946,7 @@ assert isinstance(repo, localrepo.localrepository) self._repo = repo self._state, self.mtime = self._repostate() + self._filtername = repo.filtername def fetch(self): """Refresh (if necessary) and return a repository. @@ -907,7 +966,11 @@ if state == self._state: return self._repo, False - self._repo = repository(self._repo.baseui, self._repo.url()) + repo = repository(self._repo.baseui, self._repo.url()) + if self._filtername: + self._repo = repo.filtered(self._filtername) + else: + self._repo = repo.unfiltered() self._state = state self.mtime = mtime @@ -935,6 +998,10 @@ completely independent of the original. """ repo = repository(self._repo.baseui, self._repo.origroot) + if self._filtername: + repo = repo.filtered(self._filtername) + else: + repo = repo.unfiltered() c = cachedlocalrepo(repo) c._state = self._state c.mtime = self.mtime
--- a/mercurial/hgweb/hgweb_mod.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/hgweb/hgweb_mod.py Sat Apr 16 18:06:48 2016 -0500 @@ -188,20 +188,22 @@ # create the templater - tmpl = templater.templater(mapfile, - filters={'websub': websubfilter}, - defaults={'url': req.url, - 'logourl': logourl, - 'logoimg': logoimg, - 'staticurl': staticurl, - 'urlbase': urlbase, - 'repo': self.reponame, - 'encoding': encoding.encoding, - 'motd': motd, - 'sessionvars': sessionvars, - 'pathdef': makebreadcrumb(req.url), - 'style': style, - }) + defaults = { + 'url': req.url, + 'logourl': logourl, + 'logoimg': logoimg, + 'staticurl': staticurl, + 'urlbase': urlbase, + 'repo': self.reponame, + 'encoding': encoding.encoding, + 'motd': motd, + 'sessionvars': sessionvars, + 'pathdef': makebreadcrumb(req.url), + 'style': style, + } + tmpl = templater.templater.frommapfile(mapfile, + filters={'websub': websubfilter}, + defaults=defaults) return tmpl
--- a/mercurial/hgweb/hgwebdir_mod.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/hgweb/hgwebdir_mod.py Sat Apr 16 18:06:48 2016 -0500 @@ -491,16 +491,17 @@ if not staticurl.endswith('/'): staticurl += '/' - tmpl = templater.templater(mapfile, - defaults={"encoding": encoding.encoding, - "motd": motd, - "url": url, - "logourl": logourl, - "logoimg": logoimg, - "staticurl": staticurl, - "sessionvars": sessionvars, - "style": style, - }) + defaults = { + "encoding": encoding.encoding, + "motd": motd, + "url": url, + "logourl": logourl, + "logoimg": logoimg, + "staticurl": staticurl, + "sessionvars": sessionvars, + "style": style, + } + tmpl = templater.templater.frommapfile(mapfile, defaults=defaults) return tmpl def updatereqenv(self, env):
--- a/mercurial/hgweb/protocol.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/hgweb/protocol.py Sat Apr 16 18:06:48 2016 -0500 @@ -7,9 +7,7 @@ from __future__ import absolute_import -import cStringIO import cgi -import urllib import zlib from .common import ( @@ -20,6 +18,10 @@ util, wireproto, ) +stringio = util.stringio + +urlerr = util.urlerr +urlreq = util.urlreq HGTYPE = 'application/mercurial-0.1' HGERRTYPE = 'application/hg-error' @@ -45,6 +47,11 @@ return [data[k] for k in keys] def _args(self): args = self.req.form.copy() + postlen = int(self.req.env.get('HTTP_X_HGARGS_POST', 0)) + if postlen: + args.update(cgi.parse_qs( + self.req.read(postlen), keep_blank_values=True)) + return args chunks = [] i = 1 while True: @@ -61,7 +68,7 @@ fp.write(s) def redirect(self): self.oldio = self.ui.fout, self.ui.ferr - self.ui.ferr = self.ui.fout = cStringIO.StringIO() + self.ui.ferr = self.ui.fout = stringio() def restore(self): val = self.ui.fout.getvalue() self.ui.ferr, self.ui.fout = self.oldio @@ -77,8 +84,8 @@ def _client(self): return 'remote:%s:%s:%s' % ( self.req.env.get('wsgi.url_scheme') or 'http', - urllib.quote(self.req.env.get('REMOTE_HOST', '')), - urllib.quote(self.req.env.get('REMOTE_USER', ''))) + urlreq.quote(self.req.env.get('REMOTE_HOST', '')), + urlreq.quote(self.req.env.get('REMOTE_USER', ''))) def iscmd(cmd): return cmd in wireproto.commands
--- a/mercurial/hgweb/server.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/hgweb/server.py Sat Apr 16 18:06:48 2016 -0500 @@ -15,7 +15,6 @@ import socket import sys import traceback -import urllib from ..i18n import _ @@ -24,6 +23,9 @@ util, ) +urlerr = util.urlerr +urlreq = util.urlreq + from . import ( common, ) @@ -38,7 +40,7 @@ path, query = uri.split('?', 1) else: path, query = uri, '' - return urllib.unquote(path), query + return urlreq.unquote(path), query class _error_logger(object): def __init__(self, handler):
--- a/mercurial/hgweb/webcommands.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/hgweb/webcommands.py Sat Apr 16 18:06:48 2016 -0500 @@ -606,21 +606,28 @@ The ``bookmarks`` template is rendered. """ i = [b for b in web.repo._bookmarks.items() if b[1] in web.repo] + sortkey = lambda b: (web.repo[b[1]].rev(), b[0]) + i = sorted(i, key=sortkey, reverse=True) parity = paritygen(web.stripecount) def entries(latestonly, **map): + t = i if latestonly: - t = [min(i)] - else: - t = sorted(i) + t = i[:1] for k, n in t: yield {"parity": parity.next(), "bookmark": k, "date": web.repo[n].date(), "node": hex(n)} + if i: + latestrev = i[0][1] + else: + latestrev = -1 + return tmpl("bookmarks", node=hex(web.repo.changelog.tip()), + lastchange=[{"date": web.repo[latestrev].date()}], entries=lambda **x: entries(latestonly=False, **x), latestentry=lambda **x: entries(latestonly=True, **x)) @@ -678,7 +685,9 @@ def bookmarks(**map): parity = paritygen(web.stripecount) marks = [b for b in web.repo._bookmarks.items() if b[1] in web.repo] - for k, n in sorted(marks)[:10]: # limit to 10 bookmarks + sortkey = lambda b: (web.repo[b[1]].rev(), b[0]) + marks = sorted(marks, key=sortkey, reverse=True) + for k, n in marks[:10]: # limit to 10 bookmarks yield {'parity': parity.next(), 'bookmark': k, 'date': web.repo[n].date(), @@ -1135,7 +1144,7 @@ max([edge[1] for edge in edges] or [0])) return cols - def graphdata(usetuples, **map): + def graphdata(usetuples, encodestr): data = [] row = 0 @@ -1143,11 +1152,11 @@ if type != graphmod.CHANGESET: continue node = str(ctx) - age = templatefilters.age(ctx.date()) - desc = templatefilters.firstline(ctx.description()) + age = encodestr(templatefilters.age(ctx.date())) + desc = templatefilters.firstline(encodestr(ctx.description())) desc = cgi.escape(templatefilters.nonempty(desc)) - user = cgi.escape(templatefilters.person(ctx.user())) - branch = cgi.escape(ctx.branch()) + user = cgi.escape(templatefilters.person(encodestr(ctx.user()))) + branch = cgi.escape(encodestr(ctx.branch())) try: branchnode = web.repo.branchtip(branch) except error.RepoLookupError: @@ -1156,8 +1165,9 @@ if usetuples: data.append((node, vtx, edges, desc, user, age, branch, - [cgi.escape(x) for x in ctx.tags()], - [cgi.escape(x) for x in ctx.bookmarks()])) + [cgi.escape(encodestr(x)) for x in ctx.tags()], + [cgi.escape(encodestr(x)) + for x in ctx.bookmarks()])) else: edgedata = [{'col': edge[0], 'nextcol': edge[1], 'color': (edge[2] - 1) % 6 + 1, @@ -1195,8 +1205,9 @@ canvaswidth=(cols + 1) * bg_height, truecanvasheight=rows * bg_height, canvasheight=canvasheight, bg_height=bg_height, - jsdata=lambda **x: graphdata(True, **x), - nodes=lambda **x: graphdata(False, **x), + # {jsdata} will be passed to |json, so it must be in utf-8 + jsdata=lambda **x: graphdata(True, encoding.fromlocal), + nodes=lambda **x: graphdata(False, str), node=ctx.hex(), changenav=changenav) def _getdoc(e):
--- a/mercurial/hgweb/webutil.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/hgweb/webutil.py Sat Apr 16 18:06:48 2016 -0500 @@ -341,6 +341,7 @@ entry = commonentry(repo, ctx) entry.update( + allparents=lambda **x: parents(ctx), parent=lambda **x: parents(ctx, rev - 1), child=lambda **x: children(ctx, rev + 1), changelogtag=showtags,
--- a/mercurial/hook.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/hook.py Sat Apr 16 18:06:48 2016 -0500 @@ -36,7 +36,7 @@ d = funcname.rfind('.') if d == -1: raise error.HookLoadError( - _('%s hook is invalid ("%s" not in a module)') + _('%s hook is invalid: "%s" not in a module') % (hname, funcname)) modname = funcname[:d] oldpaths = sys.path @@ -49,13 +49,13 @@ with demandimport.deactivated(): try: obj = __import__(modname) - except ImportError: - e1 = sys.exc_type, sys.exc_value, sys.exc_traceback + except (ImportError, SyntaxError): + e1 = sys.exc_info() try: # extensions are loaded with hgext_ prefix obj = __import__("hgext_%s" % modname) - except ImportError: - e2 = sys.exc_type, sys.exc_value, sys.exc_traceback + except (ImportError, SyntaxError): + e2 = sys.exc_info() if ui.tracebackflag: ui.warn(_('exception from first failed import ' 'attempt:\n')) @@ -64,20 +64,26 @@ ui.warn(_('exception from second failed import ' 'attempt:\n')) ui.traceback(e2) + + if not ui.tracebackflag: + tracebackhint = _( + 'run with --traceback for stack trace') + else: + tracebackhint = None raise error.HookLoadError( - _('%s hook is invalid (import of "%s" failed)') % - (hname, modname)) + _('%s hook is invalid: import of "%s" failed') % + (hname, modname), hint=tracebackhint) sys.path = oldpaths try: for p in funcname.split('.')[1:]: obj = getattr(obj, p) except AttributeError: raise error.HookLoadError( - _('%s hook is invalid ("%s" is not defined)') + _('%s hook is invalid: "%s" is not defined') % (hname, funcname)) if not callable(obj): raise error.HookLoadError( - _('%s hook is invalid ("%s" is not callable)') + _('%s hook is invalid: "%s" is not callable') % (hname, funcname)) ui.note(_("calling hook %s: %s\n") % (hname, funcname)) @@ -100,6 +106,8 @@ '%s\n') % (hname, exc)) if throw: raise + if not ui.tracebackflag: + ui.warn(_('(run with --traceback for stack trace)\n')) ui.traceback() return True, True finally: @@ -153,13 +161,32 @@ ui.warn(_('warning: %s hook %s\n') % (name, desc)) return r +# represent an untrusted hook command +_fromuntrusted = object() + def _allhooks(ui): - hooks = [] - for name, cmd in ui.configitems('hooks'): + """return a list of (hook-id, cmd) pairs sorted by priority""" + hooks = _hookitems(ui) + # Be careful in this section, propagating the real commands from untrusted + # sources would create a security vulnerability, make sure anything altered + # in that section uses "_fromuntrusted" as its command. + untrustedhooks = _hookitems(ui, _untrusted=True) + for name, value in untrustedhooks.items(): + trustedvalue = hooks.get(name, (None, None, name, _fromuntrusted)) + if value != trustedvalue: + (lp, lo, lk, lv) = trustedvalue + hooks[name] = (lp, lo, lk, _fromuntrusted) + # (end of the security sensitive section) + return [(k, v) for p, o, k, v in sorted(hooks.values())] + +def _hookitems(ui, _untrusted=False): + """return all hooks items ready to be sorted""" + hooks = {} + for name, cmd in ui.configitems('hooks', untrusted=_untrusted): if not name.startswith('priority'): priority = ui.configint('hooks', 'priority.%s' % name, 0) - hooks.append((-priority, len(hooks), name, cmd)) - return [(k, v) for p, o, k, v in sorted(hooks)] + hooks[name] = (-priority, len(hooks), name, cmd) + return hooks _redirect = False def redirect(state): @@ -200,7 +227,15 @@ # files seem to be bogus, give up on redirecting (WSGI, etc) pass - if callable(cmd): + if cmd is _fromuntrusted: + if throw: + raise error.HookAbort( + _('untrusted hook %s not executed') % name, + hint = _("see 'hg help config.trusted'")) + ui.warn(_('warning: untrusted hook %s not executed\n') % name) + r = 1 + raised = False + elif callable(cmd): r, raised = _pythonhook(ui, repo, name, hname, cmd, args, throw) elif cmd.startswith('python:'): if cmd.count(':') >= 2:
--- a/mercurial/httpclient/__init__.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/httpclient/__init__.py Sat Apr 16 18:06:48 2016 -0500 @@ -41,7 +41,6 @@ # Many functions in this file have too many arguments. # pylint: disable=R0913 -import cStringIO import errno import httplib import logging
--- a/mercurial/httpconnection.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/httpconnection.py Sat Apr 16 18:06:48 2016 -0500 @@ -13,8 +13,6 @@ import logging import os import socket -import urllib -import urllib2 from .i18n import _ from . import ( @@ -23,6 +21,9 @@ util, ) +urlerr = util.urlerr +urlreq = util.urlreq + # moved here from url.py to avoid a cycle class httpsendfile(object): """This is a wrapper around the objects returned by python's "open". @@ -33,9 +34,6 @@ """ def __init__(self, ui, *args, **kwargs): - # We can't just "self._data = open(*args, **kwargs)" here because there - # is an "open" function defined in this module that shadows the global - # one self.ui = ui self._data = open(*args, **kwargs) self.seek = self._data.seek @@ -126,10 +124,10 @@ # Subclass BOTH of these because otherwise urllib2 "helpfully" # reinserts them since it notices we don't include any subclasses of # them. -class http2handler(urllib2.HTTPHandler, urllib2.HTTPSHandler): +class http2handler(urlreq.httphandler, urlreq.httpshandler): def __init__(self, ui, pwmgr): global _configuredlogging - urllib2.AbstractHTTPHandler.__init__(self) + urlreq.abstracthttphandler.__init__(self) self.ui = ui self.pwmgr = pwmgr self._connections = {} @@ -190,7 +188,7 @@ proxy = None if not host: - raise urllib2.URLError('no host given') + raise urlerr.urlerror('no host given') connkey = use_ssl, host, proxy allconns = self._connections.get(connkey, []) @@ -220,13 +218,13 @@ h.request(req.get_method(), path, req.data, headers) r = h.getresponse() except socket.error as err: # XXX what error? - raise urllib2.URLError(err) + raise urlerr.urlerror(err) # Pick apart the HTTPResponse object to get the addinfourl # object initialized properly. r.recv = r.read - resp = urllib.addinfourl(r, r.headers, req.get_full_url()) + resp = urlreq.addinfourl(r, r.headers, req.get_full_url()) resp.code = r.status resp.msg = r.reason return resp
--- a/mercurial/httppeer.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/httppeer.py Sat Apr 16 18:06:48 2016 -0500 @@ -13,14 +13,12 @@ import os import socket import tempfile -import urllib -import urllib2 import zlib from .i18n import _ from .node import nullid from . import ( - changegroup, + bundle2, error, httpconnection, statichttprepo, @@ -29,6 +27,9 @@ wireproto, ) +urlerr = util.urlerr +urlreq = util.urlreq + def zgenerator(f): zd = zlib.decompressobj() try: @@ -59,7 +60,7 @@ self.ui.debug('using %s\n' % self._url) self.urlopener = url.opener(ui, authinfo) - self.requestbuilder = urllib2.Request + self.requestbuilder = urlreq.request def __del__(self): if self.urlopener: @@ -92,50 +93,65 @@ if cmd == 'pushkey': args['data'] = '' data = args.pop('data', None) + headers = args.pop('headers', {}) + + self.ui.debug("sending %s command\n" % cmd) + q = [('cmd', cmd)] + headersize = 0 + # Important: don't use self.capable() here or else you end up + # with infinite recursion when trying to look up capabilities + # for the first time. + postargsok = self.caps is not None and 'httppostargs' in self.caps + # TODO: support for httppostargs when data is a file-like + # object rather than a basestring + canmungedata = not data or isinstance(data, basestring) + if postargsok and canmungedata: + strargs = urlreq.urlencode(sorted(args.items())) + if strargs: + if not data: + data = strargs + elif isinstance(data, basestring): + data = strargs + data + headers['X-HgArgs-Post'] = len(strargs) + else: + if len(args) > 0: + httpheader = self.capable('httpheader') + if httpheader: + headersize = int(httpheader.split(',', 1)[0]) + if headersize > 0: + # The headers can typically carry more data than the URL. + encargs = urlreq.urlencode(sorted(args.items())) + headerfmt = 'X-HgArg-%s' + contentlen = headersize - len(headerfmt % '000' + ': \r\n') + headernum = 0 + varyheaders = [] + for i in xrange(0, len(encargs), contentlen): + headernum += 1 + header = headerfmt % str(headernum) + headers[header] = encargs[i:i + contentlen] + varyheaders.append(header) + headers['Vary'] = ','.join(varyheaders) + else: + q += sorted(args.items()) + qs = '?%s' % urlreq.urlencode(q) + cu = "%s%s" % (self._url, qs) size = 0 if util.safehasattr(data, 'length'): size = data.length elif data is not None: size = len(data) - headers = args.pop('headers', {}) - if data is not None and 'Content-Type' not in headers: - headers['Content-Type'] = 'application/mercurial-0.1' - - if size and self.ui.configbool('ui', 'usehttp2', False): headers['Expect'] = '100-Continue' headers['X-HgHttp2'] = '1' - - self.ui.debug("sending %s command\n" % cmd) - q = [('cmd', cmd)] - headersize = 0 - if len(args) > 0: - httpheader = self.capable('httpheader') - if httpheader: - headersize = int(httpheader.split(',')[0]) - if headersize > 0: - # The headers can typically carry more data than the URL. - encargs = urllib.urlencode(sorted(args.items())) - headerfmt = 'X-HgArg-%s' - contentlen = headersize - len(headerfmt % '000' + ': \r\n') - headernum = 0 - for i in xrange(0, len(encargs), contentlen): - headernum += 1 - header = headerfmt % str(headernum) - headers[header] = encargs[i:i + contentlen] - varyheaders = [headerfmt % str(h) for h in range(1, headernum + 1)] - headers['Vary'] = ','.join(varyheaders) - else: - q += sorted(args.items()) - qs = '?%s' % urllib.urlencode(q) - cu = "%s%s" % (self._url, qs) + if data is not None and 'Content-Type' not in headers: + headers['Content-Type'] = 'application/mercurial-0.1' req = self.requestbuilder(cu, data, headers) if data is not None: self.ui.debug("sending %s bytes\n" % size) req.add_unredirected_header('Content-Length', '%d' % size) try: resp = self.urlopener.open(req) - except urllib2.HTTPError as inst: + except urlerr.httperror as inst: if inst.code == 401: raise error.Abort(_('authorization failed')) raise @@ -207,11 +223,11 @@ # bundles. types = [""] for x in types: - if x in changegroup.bundletypes: + if x in bundle2.bundletypes: type = x break - tempname = changegroup.writebundle(self.ui, cg, None, type) + tempname = bundle2.writebundle(self.ui, cg, None, type) fp = httpconnection.httpsendfile(self.ui, tempname, "rb") headers = {'Content-Type': 'application/mercurial-0.1'}
--- a/mercurial/i18n.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/i18n.py Sat Apr 16 18:06:48 2016 -0500 @@ -20,6 +20,10 @@ else: module = __file__ +try: + unicode +except NameError: + unicode = str _languages = None if (os.name == 'nt' @@ -45,7 +49,10 @@ localedir = os.path.join(datapath, 'locale') t = gettextmod.translation('hg', localedir, _languages, fallback=True) global _ugettext - _ugettext = t.ugettext + try: + _ugettext = t.ugettext + except AttributeError: + _ugettext = t.gettext _msgcache = {}
--- a/mercurial/keepalive.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/keepalive.py Sat Apr 16 18:06:48 2016 -0500 @@ -28,10 +28,10 @@ >>> import urllib2 >>> from keepalive import HTTPHandler >>> keepalive_handler = HTTPHandler() ->>> opener = urllib2.build_opener(keepalive_handler) ->>> urllib2.install_opener(opener) +>>> opener = urlreq.buildopener(keepalive_handler) +>>> urlreq.installopener(opener) >>> ->>> fo = urllib2.urlopen('http://www.python.org') +>>> fo = urlreq.urlopen('http://www.python.org') If a connection to a given host is requested, and all of the existing connections are still in use, another connection will be opened. If @@ -114,7 +114,13 @@ import socket import sys import thread -import urllib2 + +from . import ( + util, +) + +urlerr = util.urlerr +urlreq = util.urlreq DEBUG = None @@ -227,7 +233,7 @@ def do_open(self, http_class, req): host = req.get_host() if not host: - raise urllib2.URLError('no host given') + raise urlerr.urlerror('no host given') try: h = self._cm.get_ready_conn(host) @@ -254,7 +260,7 @@ self._start_transaction(h, req) r = h.getresponse() except (socket.error, httplib.HTTPException) as err: - raise urllib2.URLError(err) + raise urlerr.urlerror(err) # if not a persistent connection, don't try to reuse it if r.will_close: @@ -345,15 +351,15 @@ h.putheader('Content-length', '%d' % len(data)) else: h.putrequest('GET', req.get_selector(), **skipheaders) - except (socket.error) as err: - raise urllib2.URLError(err) + except socket.error as err: + raise urlerr.urlerror(err) for k, v in headers.items(): h.putheader(k, v) h.endheaders() if req.has_data(): h.send(data) -class HTTPHandler(KeepAliveHandler, urllib2.HTTPHandler): +class HTTPHandler(KeepAliveHandler, urlreq.httphandler): pass class HTTPResponse(httplib.HTTPResponse): @@ -593,14 +599,14 @@ global HANDLE_ERRORS orig = HANDLE_ERRORS keepalive_handler = HTTPHandler() - opener = urllib2.build_opener(keepalive_handler) - urllib2.install_opener(opener) + opener = urlreq.buildopener(keepalive_handler) + urlreq.installopener(opener) pos = {0: 'off', 1: 'on'} for i in (0, 1): print(" fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i)) HANDLE_ERRORS = i try: - fo = urllib2.urlopen(url) + fo = urlreq.urlopen(url) fo.read() fo.close() try: @@ -623,25 +629,25 @@ format = '%25s: %s' # first fetch the file with the normal http handler - opener = urllib2.build_opener() - urllib2.install_opener(opener) - fo = urllib2.urlopen(url) + opener = urlreq.buildopener() + urlreq.installopener(opener) + fo = urlreq.urlopen(url) foo = fo.read() fo.close() m = md5(foo) print(format % ('normal urllib', m.hexdigest())) # now install the keepalive handler and try again - opener = urllib2.build_opener(HTTPHandler()) - urllib2.install_opener(opener) + opener = urlreq.buildopener(HTTPHandler()) + urlreq.installopener(opener) - fo = urllib2.urlopen(url) + fo = urlreq.urlopen(url) foo = fo.read() fo.close() m = md5(foo) print(format % ('keepalive read', m.hexdigest())) - fo = urllib2.urlopen(url) + fo = urlreq.urlopen(url) foo = '' while True: f = fo.readline() @@ -657,15 +663,15 @@ sys.stdout.write(' first using the normal urllib handlers') # first use normal opener - opener = urllib2.build_opener() - urllib2.install_opener(opener) + opener = urlreq.buildopener() + urlreq.installopener(opener) t1 = fetch(N, url) print(' TIME: %.3f s' % t1) sys.stdout.write(' now using the keepalive handler ') # now install the keepalive handler and try again - opener = urllib2.build_opener(HTTPHandler()) - urllib2.install_opener(opener) + opener = urlreq.buildopener(HTTPHandler()) + urlreq.installopener(opener) t2 = fetch(N, url) print(' TIME: %.3f s' % t2) print(' improvement factor: %.2f' % (t1 / t2)) @@ -677,7 +683,7 @@ for i in range(N): if delay and i > 0: time.sleep(delay) - fo = urllib2.urlopen(url) + fo = urlreq.urlopen(url) foo = fo.read() fo.close() lens.append(len(foo)) @@ -700,7 +706,7 @@ info = warning = error = debug DEBUG = FakeLogger() print(" fetching the file to establish a connection") - fo = urllib2.urlopen(url) + fo = urlreq.urlopen(url) data1 = fo.read() fo.close() @@ -714,7 +720,7 @@ sys.stderr.write('\r') print(" fetching the file a second time") - fo = urllib2.urlopen(url) + fo = urlreq.urlopen(url) data2 = fo.read() fo.close()
--- a/mercurial/localrepo.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/localrepo.py Sat Apr 16 18:06:48 2016 -0500 @@ -12,7 +12,6 @@ import os import random import time -import urllib import weakref from .i18n import _ @@ -59,6 +58,8 @@ release = lockmod.release propertycache = util.propertycache +urlerr = util.urlerr +urlreq = util.urlreq filecache = scmutil.filecache class repofilecache(filecache): @@ -242,9 +243,6 @@ # only functions defined in module of enabled extensions are invoked featuresetupfuncs = set() - def _baserequirements(self, create): - return ['revlogv1'] - def __init__(self, baseui, path=None, create=False): self.requirements = set() self.wvfs = scmutil.vfs(path, expandpath=True, realpath=True) @@ -282,29 +280,21 @@ if not self.vfs.isdir(): if create: + self.requirements = newreporequirements(self) + if not self.wvfs.exists(): self.wvfs.makedirs() self.vfs.makedir(notindexed=True) - self.requirements.update(self._baserequirements(create)) - if self.ui.configbool('format', 'usestore', True): + + if 'store' in self.requirements: self.vfs.mkdir("store") - self.requirements.add("store") - if self.ui.configbool('format', 'usefncache', True): - self.requirements.add("fncache") - if self.ui.configbool('format', 'dotencode', True): - self.requirements.add('dotencode') + # create an invalid changelog self.vfs.append( "00changelog.i", '\0\0\0\2' # represents revlogv2 ' dummy changelog to prevent using the old repo layout' ) - if scmutil.gdinitconfig(self.ui): - self.requirements.add("generaldelta") - if self.ui.configbool('experimental', 'treemanifest', False): - self.requirements.add("treemanifest") - if self.ui.configbool('experimental', 'manifestv2', False): - self.requirements.add("manifestv2") else: raise error.RepoError(_("repository %s not found") % path) elif create: @@ -377,7 +367,7 @@ if self.ui.configbool('experimental', 'bundle2-advertise', True): caps = set(caps) capsblob = bundle2.encodecaps(bundle2.getrepocaps(self)) - caps.add('bundle2=' + urllib.quote(capsblob)) + caps.add('bundle2=' + urlreq.quote(capsblob)) return caps def _applyopenerreqs(self): @@ -985,7 +975,7 @@ data = self.wvfs.read(filename) return self._filter(self._encodefilterpats, filename, data) - def wwrite(self, filename, data, flags): + def wwrite(self, filename, data, flags, backgroundclose=False): """write ``data`` into ``filename`` in the working directory This returns length of written (maybe decoded) data. @@ -994,7 +984,7 @@ if 'l' in flags: self.wvfs.symlink(data, filename) else: - self.wvfs.write(filename, data) + self.wvfs.write(filename, data, backgroundclose=backgroundclose) if 'x' in flags: self.wvfs.setflags(filename, False, True) return len(data) @@ -1488,6 +1478,27 @@ return fparent1 + def checkcommitpatterns(self, wctx, vdirs, match, status, fail): + """check for commit arguments that aren't commitable""" + if match.isexact() or match.prefix(): + matched = set(status.modified + status.added + status.removed) + + for f in match.files(): + f = self.dirstate.normalize(f) + if f == '.' or f in matched or f in wctx.substate: + continue + if f in status.deleted: + fail(f, _('file not found!')) + if f in vdirs: # visited directory + d = f + '/' + for mf in matched: + if mf.startswith(d): + break + else: + fail(f, _("no match under directory!")) + elif f not in self.dirstate: + fail(f, _("file not tracked!")) + @unfilteredmethod def commit(self, text="", user=None, date=None, match=None, force=False, editor=False, extra=None): @@ -1582,24 +1593,8 @@ status.removed.insert(0, '.hgsubstate') # make sure all explicit patterns are matched - if not force and (match.isexact() or match.prefix()): - matched = set(status.modified + status.added + status.removed) - - for f in match.files(): - f = self.dirstate.normalize(f) - if f == '.' or f in matched or f in wctx.substate: - continue - if f in status.deleted: - fail(f, _('file not found!')) - if f in vdirs: # visited directory - d = f + '/' - for mf in matched: - if mf.startswith(d): - break - else: - fail(f, _("no match under directory!")) - elif f not in self.dirstate: - fail(f, _("file not tracked!")) + if not force: + self.checkcommitpatterns(wctx, vdirs, match, status, fail) cctx = context.workingcommitctx(self, status, text, user, date, extra) @@ -1893,8 +1888,8 @@ @unfilteredpropertycache def prepushoutgoinghooks(self): - """Return util.hooks consists of "(repo, remote, outgoing)" - functions, which are called before pushing changesets. + """Return util.hooks consists of a pushop with repo, remote, outgoing + methods, which are called before pushing changesets. """ return util.hooks() @@ -1962,3 +1957,27 @@ def islocal(path): return True + +def newreporequirements(repo): + """Determine the set of requirements for a new local repository. + + Extensions can wrap this function to specify custom requirements for + new repositories. + """ + ui = repo.ui + requirements = set(['revlogv1']) + if ui.configbool('format', 'usestore', True): + requirements.add('store') + if ui.configbool('format', 'usefncache', True): + requirements.add('fncache') + if ui.configbool('format', 'dotencode', True): + requirements.add('dotencode') + + if scmutil.gdinitconfig(ui): + requirements.add('generaldelta') + if ui.configbool('experimental', 'treemanifest', False): + requirements.add('treemanifest') + if ui.configbool('experimental', 'manifestv2', False): + requirements.add('manifestv2') + + return requirements
--- a/mercurial/lock.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/lock.py Sat Apr 16 18:06:48 2016 -0500 @@ -9,7 +9,6 @@ import contextlib import errno -import os import socket import time import warnings @@ -77,8 +76,8 @@ self.release() def _getpid(self): - # wrapper around os.getpid() to make testing easier - return os.getpid() + # wrapper around util.getpid() to make testing easier + return util.getpid() def lock(self): timeout = self.timeout @@ -235,6 +234,8 @@ if not self._parentheld: for callback in self.postrelease: callback() + # Prevent double usage and help clear cycles. + self.postrelease = None def release(*locks): for lock in locks:
--- a/mercurial/mail.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/mail.py Sat Apr 16 18:06:48 2016 -0500 @@ -48,9 +48,10 @@ This class allows to pass any keyword arguments to SSL socket creation. ''' - def __init__(self, sslkwargs, **kwargs): + def __init__(self, sslkwargs, host=None, **kwargs): smtplib.SMTP.__init__(self, **kwargs) self._sslkwargs = sslkwargs + self._host = host def starttls(self, keyfile=None, certfile=None): if not self.has_extn("starttls"): @@ -59,6 +60,7 @@ (resp, reply) = self.docmd("STARTTLS") if resp == 220: self.sock = sslutil.wrapsocket(self.sock, keyfile, certfile, + serverhostname=self._host, **self._sslkwargs) self.file = smtplib.SSLFakeFile(self.sock) self.helo_resp = None @@ -72,10 +74,12 @@ This class allows to pass any keyword arguments to SSL socket creation. ''' - def __init__(self, sslkwargs, keyfile=None, certfile=None, **kwargs): + def __init__(self, sslkwargs, keyfile=None, certfile=None, host=None, + **kwargs): self.keyfile = keyfile self.certfile = certfile smtplib.SMTP.__init__(self, **kwargs) + self._host = host self.default_port = smtplib.SMTP_SSL_PORT self._sslkwargs = sslkwargs @@ -85,6 +89,7 @@ new_socket = socket.create_connection((host, port), timeout) new_socket = sslutil.wrapsocket(new_socket, self.keyfile, self.certfile, + serverhostname=self._host, **self._sslkwargs) self.file = smtplib.SSLFakeFile(new_socket) return new_socket @@ -114,9 +119,9 @@ sslkwargs = {'ui': ui} if smtps: ui.note(_('(using smtps)\n')) - s = SMTPS(sslkwargs, local_hostname=local_hostname) + s = SMTPS(sslkwargs, local_hostname=local_hostname, host=mailhost) elif starttls: - s = STARTTLS(sslkwargs, local_hostname=local_hostname) + s = STARTTLS(sslkwargs, local_hostname=local_hostname, host=mailhost) else: s = smtplib.SMTP(local_hostname=local_hostname) if smtps: @@ -332,3 +337,21 @@ if not display: s, cs = _encode(ui, s, charsets) return mimetextqp(s, 'plain', cs) + +def headdecode(s): + '''Decodes RFC-2047 header''' + uparts = [] + for part, charset in email.Header.decode_header(s): + if charset is not None: + try: + uparts.append(part.decode(charset)) + continue + except UnicodeDecodeError: + pass + try: + uparts.append(part.decode('UTF-8')) + continue + except UnicodeDecodeError: + pass + uparts.append(part.decode('ISO-8859-1')) + return encoding.tolocal(u' '.join(uparts).encode('UTF-8'))
--- a/mercurial/manifest.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/manifest.py Sat Apr 16 18:06:48 2016 -0500 @@ -325,6 +325,9 @@ def iteritems(self): return (x[:2] for x in self._lm.iterentries()) + def iterentries(self): + return self._lm.iterentries() + def text(self, usemanifestv2=False): if usemanifestv2: return _textv2(self._lm.iterentries()) @@ -517,6 +520,15 @@ self._node = node self._dirty = False + def iterentries(self): + self._load() + for p, n in sorted(self._dirs.items() + self._files.items()): + if p in self._files: + yield self._subpath(p), n, self._flags.get(p, '') + else: + for x in n.iterentries(): + yield x + def iteritems(self): self._load() for p, n in sorted(self._dirs.items() + self._files.items()): @@ -627,7 +639,6 @@ def setflag(self, f, flags): """Set the flags (symlink, executable) for path f.""" - assert 't' not in flags self._load() dir, subpath = _splittopdir(f) if dir: @@ -849,9 +860,7 @@ def text(self, usemanifestv2=False): """Get the full data of this manifest as a bytestring.""" self._load() - flags = self.flags - return _text(((f, self[f], flags(f)) for f in self.keys()), - usemanifestv2) + return _text(self.iterentries(), usemanifestv2) def dirtext(self, usemanifestv2=False): """Get the full data of this directory as a bytestring. Make sure that @@ -920,7 +929,8 @@ return manifestdict(data) def dirlog(self, dir): - assert self._treeondisk + if dir: + assert self._treeondisk if dir not in self._dirlogcache: self._dirlogcache[dir] = manifest(self.opener, dir, self._dirlogcache) @@ -945,6 +955,22 @@ d = mdiff.patchtext(self.revdiff(self.deltaparent(r), r)) return self._newmanifest(d) + def readshallowdelta(self, node): + '''For flat manifests, this is the same as readdelta(). For + treemanifests, this will read the delta for this revlog's directory, + without recursively reading subdirectory manifests. Instead, any + subdirectory entry will be reported as it appears in the manifests, i.e. + the subdirectory will be reported among files and distinguished only by + its 't' flag.''' + if not self._treeondisk: + return self.readdelta(node) + if self._usemanifestv2: + raise error.Abort( + "readshallowdelta() not implemented for manifestv2") + r = self.rev(node) + d = mdiff.patchtext(self.revdiff(self.deltaparent(r), r)) + return manifestdict(d) + def readfast(self, node): '''use the faster of readdelta or read @@ -959,6 +985,15 @@ return self.readdelta(node) return self.read(node) + def readshallowfast(self, node): + '''like readfast(), but calls readshallowdelta() instead of readdelta() + ''' + r = self.rev(node) + deltaparent = self.deltaparent(r) + if deltaparent != revlog.nullrev and deltaparent in self.parentrevs(r): + return self.readshallowdelta(node) + return self.readshallow(node) + def read(self, node): if node == revlog.nullid: return self._newmanifest() # don't upset local cache @@ -980,6 +1015,13 @@ self._mancache[node] = (m, arraytext) return m + def readshallow(self, node): + '''Reads the manifest in this directory. When using flat manifests, + this manifest will generally have files in subdirectories in it. Does + not cache the manifest as the callers generally do not read the same + version twice.''' + return manifestdict(self.revision(node)) + def find(self, node, f): '''look up entry for a single file efficiently. return (node, flags) pair if found, (None, None) if not.'''
--- a/mercurial/match.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/match.py Sat Apr 16 18:06:48 2016 -0500 @@ -334,13 +334,13 @@ m.bad = badfn return m -class narrowmatcher(match): +class subdirmatcher(match): """Adapt a matcher to work on a subdirectory only. The paths are remapped to remove/insert the path as needed: >>> m1 = match('root', '', ['a.txt', 'sub/b.txt']) - >>> m2 = narrowmatcher('sub', m1) + >>> m2 = subdirmatcher('sub', m1) >>> bool(m2('a.txt')) False >>> bool(m2('b.txt')) @@ -381,7 +381,16 @@ self._always = any(f == path for f in matcher._files) self._anypats = matcher._anypats + # Some information is lost in the superclass's constructor, so we + # can not accurately create the matching function for the subdirectory + # from the inputs. Instead, we override matchfn() and visitdir() to + # call the original matcher with the subdirectory path prepended. self.matchfn = lambda fn: matcher.matchfn(self._path + "/" + fn) + def visitdir(dir): + if dir == '.': + return matcher.visitdir(self._path) + return matcher.visitdir(self._path + "/" + dir) + self.visitdir = visitdir self._fileroots = set(self._files) def abs(self, f):
--- a/mercurial/merge.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/merge.py Sat Apr 16 18:06:48 2016 -0500 @@ -65,8 +65,10 @@ (experimental) m: the external merge driver defined for this merge plus its run state (experimental) + f: a (filename, dictonary) tuple of optional values for a given file X: unsupported mandatory record type (used in tests) x: unsupported advisory record type (used in tests) + l: the labels for the parts of the merge. Merge driver run states (experimental): u: driver-resolved files unmarked -- needs to be run next time we're about @@ -79,11 +81,11 @@ statepathv2 = 'merge/state2' @staticmethod - def clean(repo, node=None, other=None): + def clean(repo, node=None, other=None, labels=None): """Initialize a brand new merge state, removing any existing state on disk.""" ms = mergestate(repo) - ms.reset(node, other) + ms.reset(node, other, labels) return ms @staticmethod @@ -99,11 +101,14 @@ Do not use this directly! Instead call read() or clean().""" self._repo = repo self._dirty = False + self._labels = None - def reset(self, node=None, other=None): + def reset(self, node=None, other=None, labels=None): self._state = {} + self._stateextras = {} self._local = None self._other = None + self._labels = labels for var in ('localctx', 'otherctx'): if var in vars(self): delattr(self, var) @@ -126,6 +131,7 @@ of on disk file. """ self._state = {} + self._stateextras = {} self._local = None self._other = None for var in ('localctx', 'otherctx'): @@ -152,6 +158,19 @@ elif rtype in 'FDC': bits = record.split('\0') self._state[bits[0]] = bits[1:] + elif rtype == 'f': + filename, rawextras = record.split('\0', 1) + extraparts = rawextras.split('\0') + extras = {} + i = 0 + while i < len(extraparts): + extras[extraparts[i]] = extraparts[i + 1] + i += 2 + + self._stateextras[filename] = extras + elif rtype == 'l': + labels = record.split('\0', 2) + self._labels = [l for l in labels if len(l) > 0] elif not rtype.islower(): unsupported.add(rtype) self._results = {} @@ -298,7 +317,7 @@ @util.propertycache def otherctx(self): if self._other is None: - raise RuntimeError("localctx accessed but self._local isn't set") + raise RuntimeError("otherctx accessed but self._other isn't set") return self._repo[self._other] def active(self): @@ -336,6 +355,13 @@ records.append(('C', '\0'.join([d] + v))) else: records.append(('F', '\0'.join([d] + v))) + for filename, extras in sorted(self._stateextras.iteritems()): + rawextras = '\0'.join('%s\0%s' % (k, v) for k, v in + extras.iteritems()) + records.append(('f', '%s\0%s' % (filename, rawextras))) + if self._labels is not None: + labels = '\0'.join(self._labels) + records.append(('l', labels)) return records def _writerecords(self, records): @@ -388,6 +414,7 @@ fca.path(), hex(fca.filenode()), fco.path(), hex(fco.filenode()), fcl.flags()] + self._stateextras[fd] = { 'ancestorlinknode' : hex(fca.node()) } self._dirty = True def __contains__(self, dfile): @@ -423,17 +450,26 @@ if entry[0] == 'd': yield f - def _resolve(self, preresolve, dfile, wctx, labels=None): + def extras(self, filename): + return self._stateextras.setdefault(filename, {}) + + def _resolve(self, preresolve, dfile, wctx): """rerun merge process for file path `dfile`""" if self[dfile] in 'rd': return True, 0 stateentry = self._state[dfile] state, hash, lfile, afile, anode, ofile, onode, flags = stateentry octx = self._repo[self._other] + extras = self.extras(dfile) + anccommitnode = extras.get('ancestorlinknode') + if anccommitnode: + actx = self._repo[anccommitnode] + else: + actx = None fcd = self._filectxorabsent(hash, wctx, dfile) fco = self._filectxorabsent(onode, octx, ofile) # TODO: move this to filectxorabsent - fca = self._repo.filectx(afile, fileid=anode) + fca = self._repo.filectx(afile, fileid=anode, changeid=actx) # "premerge" x flags flo = fco.flags() fla = fca.flags() @@ -454,14 +490,15 @@ self._repo.wvfs.unlinkpath(dfile, ignoremissing=True) complete, r, deleted = filemerge.premerge(self._repo, self._local, lfile, fcd, fco, fca, - labels=labels) + labels=self._labels) else: complete, r, deleted = filemerge.filemerge(self._repo, self._local, lfile, fcd, fco, fca, - labels=labels) + labels=self._labels) if r is None: # no real conflict del self._state[dfile] + self._stateextras.pop(dfile, None) self._dirty = True elif not r: self.mark(dfile, 'r') @@ -495,17 +532,17 @@ else: return ctx[f] - def preresolve(self, dfile, wctx, labels=None): + def preresolve(self, dfile, wctx): """run premerge process for dfile Returns whether the merge is complete, and the exit code.""" - return self._resolve(True, dfile, wctx, labels=labels) + return self._resolve(True, dfile, wctx) - def resolve(self, dfile, wctx, labels=None): + def resolve(self, dfile, wctx): """run merge process (assuming premerge was run) for dfile Returns the exit code of the merge.""" - return self._resolve(False, dfile, wctx, labels=labels)[1] + return self._resolve(False, dfile, wctx)[1] def counts(self): """return counts for updated, merged and removed files in this @@ -570,29 +607,29 @@ def _checkunknownfile(repo, wctx, mctx, f, f2=None): if f2 is None: f2 = f - return (repo.wvfs.isfileorlink(f) - and repo.wvfs.audit.check(f) + return (repo.wvfs.audit.check(f) + and repo.wvfs.isfileorlink(f) and repo.dirstate.normalize(f) not in repo.dirstate and mctx[f2].cmp(wctx[f])) -def _checkunknownfiles(repo, wctx, mctx, force, actions): +def _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce): """ Considers any actions that care about the presence of conflicting unknown files. For some actions, the result is to abort; for others, it is to choose a different action. """ conflicts = set() + warnconflicts = set() + abortconflicts = set() + unknownconfig = _getcheckunknownconfig(repo, 'merge', 'checkunknown') + ignoredconfig = _getcheckunknownconfig(repo, 'merge', 'checkignored') if not force: - abortconflicts = set() - warnconflicts = set() def collectconflicts(conflicts, config): if config == 'abort': abortconflicts.update(conflicts) elif config == 'warn': warnconflicts.update(conflicts) - unknownconfig = _getcheckunknownconfig(repo, 'merge', 'checkunknown') - ignoredconfig = _getcheckunknownconfig(repo, 'merge', 'checkignored') for f, (m, args, msg) in actions.iteritems(): if m in ('c', 'dc'): if _checkunknownfile(repo, wctx, mctx, f): @@ -606,28 +643,54 @@ unknownconflicts = conflicts - ignoredconflicts collectconflicts(ignoredconflicts, ignoredconfig) collectconflicts(unknownconflicts, unknownconfig) - for f in sorted(abortconflicts): - repo.ui.warn(_("%s: untracked file differs\n") % f) - if abortconflicts: - raise error.Abort(_("untracked files in working directory " - "differ from files in requested revision")) + else: + for f, (m, args, msg) in actions.iteritems(): + if m == 'cm': + fl2, anc = args + different = _checkunknownfile(repo, wctx, mctx, f) + if repo.dirstate._ignore(f): + config = ignoredconfig + else: + config = unknownconfig - for f in sorted(warnconflicts): - repo.ui.warn(_("%s: replacing untracked file\n") % f) + # The behavior when force is True is described by this table: + # config different mergeforce | action backup + # * n * | get n + # * y y | merge - + # abort y n | merge - (1) + # warn y n | warn + get y + # ignore y n | get y + # + # (1) this is probably the wrong behavior here -- we should + # probably abort, but some actions like rebases currently + # don't like an abort happening in the middle of + # merge.update. + if not different: + actions[f] = ('g', (fl2, False), "remote created") + elif mergeforce or config == 'abort': + actions[f] = ('m', (f, f, None, False, anc), + "remote differs from untracked local") + elif config == 'abort': + abortconflicts.add(f) + else: + if config == 'warn': + warnconflicts.add(f) + actions[f] = ('g', (fl2, True), "remote created") + + for f in sorted(abortconflicts): + repo.ui.warn(_("%s: untracked file differs\n") % f) + if abortconflicts: + raise error.Abort(_("untracked files in working directory " + "differ from files in requested revision")) + + for f in sorted(warnconflicts): + repo.ui.warn(_("%s: replacing untracked file\n") % f) for f, (m, args, msg) in actions.iteritems(): backup = f in conflicts if m == 'c': flags, = args actions[f] = ('g', (flags, backup), msg) - elif m == 'cm': - fl2, anc = args - different = _checkunknownfile(repo, wctx, mctx, f) - if different: - actions[f] = ('m', (f, f, None, False, anc), - "remote differs from untracked local") - else: - actions[f] = ('g', (fl2, backup), "remote created") def _forgetremoved(wctx, mctx, branchmerge): """ @@ -747,10 +810,8 @@ if '.hgsubstate' in m1: # check whether sub state is modified - for s in sorted(wctx.substate): - if wctx.sub(s).dirty(): - m1['.hgsubstate'] += '+' - break + if any(wctx.sub(s).dirty() for s in wctx.substate): + m1['.hgsubstate'] += '+' # Compare manifests if matcher is not None: @@ -876,13 +937,14 @@ del actions[f] # don't get = keep local deleted def calculateupdates(repo, wctx, mctx, ancestors, branchmerge, force, - acceptremote, followcopies, matcher=None): + acceptremote, followcopies, matcher=None, + mergeforce=False): "Calculate the actions needed to merge mctx into wctx using ancestors" if len(ancestors) == 1: # default actions, diverge, renamedelete = manifestmerge( repo, wctx, mctx, ancestors[0], branchmerge, force, matcher, acceptremote, followcopies) - _checkunknownfiles(repo, wctx, mctx, force, actions) + _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce) else: # only when merge.preferancestor=* - the default repo.ui.note( @@ -897,7 +959,7 @@ actions, diverge1, renamedelete1 = manifestmerge( repo, wctx, mctx, ancestor, branchmerge, force, matcher, acceptremote, followcopies) - _checkunknownfiles(repo, wctx, mctx, force, actions) + _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce) # Track the shortest set of warning on the theory that bid # merge will correctly incorporate more information @@ -1003,29 +1065,30 @@ wwrite = repo.wwrite ui = repo.ui i = 0 - for f, (flags, backup), msg in actions: - repo.ui.debug(" %s: %s -> g\n" % (f, msg)) - if verbose: - repo.ui.note(_("getting %s\n") % f) + with repo.wvfs.backgroundclosing(ui, expectedcount=len(actions)): + for f, (flags, backup), msg in actions: + repo.ui.debug(" %s: %s -> g\n" % (f, msg)) + if verbose: + repo.ui.note(_("getting %s\n") % f) - if backup: - absf = repo.wjoin(f) - orig = scmutil.origpath(ui, repo, absf) - try: - # TODO Mercurial has always aborted if an untracked directory - # is replaced by a tracked file, or generally with - # file/directory merges. This needs to be sorted out. - if repo.wvfs.isfileorlink(f): - util.rename(absf, orig) - except OSError as e: - if e.errno != errno.ENOENT: - raise + if backup: + absf = repo.wjoin(f) + orig = scmutil.origpath(ui, repo, absf) + try: + # TODO Mercurial has always aborted if an untracked + # directory is replaced by a tracked file, or generally + # with file/directory merges. This needs to be sorted out. + if repo.wvfs.isfileorlink(f): + util.rename(absf, orig) + except OSError as e: + if e.errno != errno.ENOENT: + raise - wwrite(f, fctx(f).data(), flags) - if i == 100: - yield i, f - i = 0 - i += 1 + wwrite(f, fctx(f).data(), flags, backgroundclose=True) + if i == 100: + yield i, f + i = 0 + i += 1 if i > 0: yield i, f @@ -1040,7 +1103,7 @@ """ updated, merged, removed = 0, 0, 0 - ms = mergestate.clean(repo, wctx.p1().node(), mctx.node()) + ms = mergestate.clean(repo, wctx.p1().node(), mctx.node(), labels) moves = [] for m, l in actions.items(): l.sort() @@ -1193,7 +1256,7 @@ overwrite) continue audit(f) - complete, r = ms.preresolve(f, wctx, labels=labels) + complete, r = ms.preresolve(f, wctx) if not complete: numupdates += 1 tocomplete.append((f, args, msg)) @@ -1203,7 +1266,7 @@ repo.ui.debug(" %s: %s -> m (merge)\n" % (f, msg)) z += 1 progress(_updating, z, item=f, total=numupdates, unit=_files) - ms.resolve(f, wctx, labels=labels) + ms.resolve(f, wctx) ms.commit() @@ -1315,7 +1378,7 @@ repo.dirstate.normal(f) def update(repo, node, branchmerge, force, ancestor=None, - mergeancestor=False, labels=None, matcher=None): + mergeancestor=False, labels=None, matcher=None, mergeforce=False): """ Perform a merge between the working directory and the given node @@ -1328,6 +1391,9 @@ If false, merging with an ancestor (fast-forward) is only allowed between different named branches. This flag is used by rebase extension as a temporary fix and should be avoided in general. + labels = labels to use for base, local and other + mergeforce = whether the merge was run with 'merge --force' (deprecated): if + this is True, then 'force' should be True as well. The table below shows all the behaviors of the update command given the -c and -C or no options, whether the working directory @@ -1463,7 +1529,7 @@ ### calculate phase actionbyfile, diverge, renamedelete = calculateupdates( repo, wc, p2, pas, branchmerge, force, mergeancestor, - followcopies, matcher=matcher) + followcopies, matcher=matcher, mergeforce=mergeforce) # Prompt and create actions. Most of this is in the resolve phase # already, but we can't handle .hgsubstate in filemerge or
--- a/mercurial/mpatch.c Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/mpatch.c Sat Apr 16 18:06:48 2016 -0500 @@ -404,7 +404,8 @@ if (m == NULL) return NULL; - mpatch_Error = PyErr_NewException("mpatch.mpatchError", NULL, NULL); + mpatch_Error = PyErr_NewException("mercurial.mpatch.mpatchError", + NULL, NULL); Py_INCREF(mpatch_Error); PyModule_AddObject(m, "mpatchError", mpatch_Error); @@ -415,6 +416,7 @@ initmpatch(void) { Py_InitModule3("mpatch", methods, mpatch_doc); - mpatch_Error = PyErr_NewException("mpatch.mpatchError", NULL, NULL); + mpatch_Error = PyErr_NewException("mercurial.mpatch.mpatchError", + NULL, NULL); } #endif
--- a/mercurial/namespaces.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/namespaces.py Sat Apr 16 18:06:48 2016 -0500 @@ -30,7 +30,7 @@ # branches) to be initialized somewhere, so that place is here bmknames = lambda repo: repo._bookmarks.keys() bmknamemap = lambda repo, name: tolist(repo._bookmarks.get(name)) - bmknodemap = lambda repo, name: repo.nodebookmarks(name) + bmknodemap = lambda repo, node: repo.nodebookmarks(node) n = namespace("bookmarks", templatename="bookmark", # i18n: column positioning for "hg log" logfmt=_("bookmark: %s\n"), @@ -40,7 +40,7 @@ tagnames = lambda repo: [t for t, n in repo.tagslist()] tagnamemap = lambda repo, name: tolist(repo._tagscache.tags.get(name)) - tagnodemap = lambda repo, name: repo.nodetags(name) + tagnodemap = lambda repo, node: repo.nodetags(node) n = namespace("tags", templatename="tag", # i18n: column positioning for "hg log" logfmt=_("tag: %s\n"), @@ -149,8 +149,8 @@ logfmt: the format to use for (i18n-ed) log output; if not specified it is composed from logname listnames: function to list all names - namemap: function that inputs a node, output name(s) - nodemap: function that inputs a name, output node(s) + namemap: function that inputs a name, output node(s) + nodemap: function that inputs a node, output name(s) deprecated: set of names to be masked for ordinary use """
--- a/mercurial/node.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/node.py Sat Apr 16 18:06:48 2016 -0500 @@ -14,13 +14,13 @@ bin = binascii.unhexlify nullrev = -1 -nullid = "\0" * 20 +nullid = b"\0" * 20 nullhex = hex(nullid) # pseudo identifiers for working directory # (they are experimental, so don't add too many dependencies on them) wdirrev = 0x7fffffff -wdirid = "\xff" * 20 +wdirid = b"\xff" * 20 def short(node): return hex(node[:6])
--- a/mercurial/osutil.c Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/osutil.c Sat Apr 16 18:06:48 2016 -0500 @@ -203,14 +203,15 @@ PyErr_NoMemory(); goto error_nomem; } - strcpy(pattern, path); + memcpy(pattern, path, plen); if (plen > 0) { char c = path[plen-1]; if (c != ':' && c != '/' && c != '\\') pattern[plen++] = '\\'; } - strcpy(pattern + plen, "*"); + pattern[plen++] = '*'; + pattern[plen] = '\0'; fh = FindFirstFileA(pattern, &fd); if (fh == INVALID_HANDLE_VALUE) {
--- a/mercurial/parser.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/parser.py Sat Apr 16 18:06:48 2016 -0500 @@ -220,3 +220,323 @@ simplified.append(simplifyinfixops(x, targetnodes)) simplified.append(op) return tuple(reversed(simplified)) + +def parseerrordetail(inst): + """Compose error message from specified ParseError object + """ + if len(inst.args) > 1: + return _('at %s: %s') % (inst.args[1], inst.args[0]) + else: + return inst.args[0] + +class alias(object): + """Parsed result of alias""" + + def __init__(self, name, args, err, replacement): + self.name = name + self.args = args + self.error = err + self.replacement = replacement + # whether own `error` information is already shown or not. + # this avoids showing same warning multiple times at each + # `expandaliases`. + self.warned = False + +class basealiasrules(object): + """Parsing and expansion rule set of aliases + + This is a helper for fileset/revset/template aliases. A concrete rule set + should be made by sub-classing this and implementing class/static methods. + + It supports alias expansion of symbol and funciton-call styles:: + + # decl = defn + h = heads(default) + b($1) = ancestors($1) - ancestors(default) + """ + # typically a config section, which will be included in error messages + _section = None + # tag of symbol node + _symbolnode = 'symbol' + + def __new__(cls): + raise TypeError("'%s' is not instantiatable" % cls.__name__) + + @staticmethod + def _parse(spec): + """Parse an alias name, arguments and definition""" + raise NotImplementedError + + @staticmethod + def _trygetfunc(tree): + """Return (name, args) if tree is a function; otherwise None""" + raise NotImplementedError + + @classmethod + def _builddecl(cls, decl): + """Parse an alias declaration into ``(name, args, errorstr)`` + + This function analyzes the parsed tree. The parsing rule is provided + by ``_parse()``. + + - ``name``: of declared alias (may be ``decl`` itself at error) + - ``args``: list of argument names (or None for symbol declaration) + - ``errorstr``: detail about detected error (or None) + + >>> sym = lambda x: ('symbol', x) + >>> symlist = lambda *xs: ('list',) + tuple(sym(x) for x in xs) + >>> func = lambda n, a: ('func', sym(n), a) + >>> parsemap = { + ... 'foo': sym('foo'), + ... '$foo': sym('$foo'), + ... 'foo::bar': ('dagrange', sym('foo'), sym('bar')), + ... 'foo()': func('foo', None), + ... '$foo()': func('$foo', None), + ... 'foo($1, $2)': func('foo', symlist('$1', '$2')), + ... 'foo(bar_bar, baz.baz)': + ... func('foo', symlist('bar_bar', 'baz.baz')), + ... 'foo(bar($1, $2))': + ... func('foo', func('bar', symlist('$1', '$2'))), + ... 'foo($1, $2, nested($1, $2))': + ... func('foo', (symlist('$1', '$2') + + ... (func('nested', symlist('$1', '$2')),))), + ... 'foo("bar")': func('foo', ('string', 'bar')), + ... 'foo($1, $2': error.ParseError('unexpected token: end', 10), + ... 'foo("bar': error.ParseError('unterminated string', 5), + ... 'foo($1, $2, $1)': func('foo', symlist('$1', '$2', '$1')), + ... } + >>> def parse(expr): + ... x = parsemap[expr] + ... if isinstance(x, Exception): + ... raise x + ... return x + >>> def trygetfunc(tree): + ... if not tree or tree[0] != 'func' or tree[1][0] != 'symbol': + ... return None + ... if not tree[2]: + ... return tree[1][1], [] + ... if tree[2][0] == 'list': + ... return tree[1][1], list(tree[2][1:]) + ... return tree[1][1], [tree[2]] + >>> class aliasrules(basealiasrules): + ... _parse = staticmethod(parse) + ... _trygetfunc = staticmethod(trygetfunc) + >>> builddecl = aliasrules._builddecl + >>> builddecl('foo') + ('foo', None, None) + >>> builddecl('$foo') + ('$foo', None, "'$' not for alias arguments") + >>> builddecl('foo::bar') + ('foo::bar', None, 'invalid format') + >>> builddecl('foo()') + ('foo', [], None) + >>> builddecl('$foo()') + ('$foo()', None, "'$' not for alias arguments") + >>> builddecl('foo($1, $2)') + ('foo', ['$1', '$2'], None) + >>> builddecl('foo(bar_bar, baz.baz)') + ('foo', ['bar_bar', 'baz.baz'], None) + >>> builddecl('foo($1, $2, nested($1, $2))') + ('foo($1, $2, nested($1, $2))', None, 'invalid argument list') + >>> builddecl('foo(bar($1, $2))') + ('foo(bar($1, $2))', None, 'invalid argument list') + >>> builddecl('foo("bar")') + ('foo("bar")', None, 'invalid argument list') + >>> builddecl('foo($1, $2') + ('foo($1, $2', None, 'at 10: unexpected token: end') + >>> builddecl('foo("bar') + ('foo("bar', None, 'at 5: unterminated string') + >>> builddecl('foo($1, $2, $1)') + ('foo', None, 'argument names collide with each other') + """ + try: + tree = cls._parse(decl) + except error.ParseError as inst: + return (decl, None, parseerrordetail(inst)) + + if tree[0] == cls._symbolnode: + # "name = ...." style + name = tree[1] + if name.startswith('$'): + return (decl, None, _("'$' not for alias arguments")) + return (name, None, None) + + func = cls._trygetfunc(tree) + if func: + # "name(arg, ....) = ...." style + name, args = func + if name.startswith('$'): + return (decl, None, _("'$' not for alias arguments")) + if any(t[0] != cls._symbolnode for t in args): + return (decl, None, _("invalid argument list")) + if len(args) != len(set(args)): + return (name, None, _("argument names collide with each other")) + return (name, [t[1] for t in args], None) + + return (decl, None, _("invalid format")) + + @classmethod + def _relabelargs(cls, tree, args): + """Mark alias arguments as ``_aliasarg``""" + if not isinstance(tree, tuple): + return tree + op = tree[0] + if op != cls._symbolnode: + return (op,) + tuple(cls._relabelargs(x, args) for x in tree[1:]) + + assert len(tree) == 2 + sym = tree[1] + if sym in args: + op = '_aliasarg' + elif sym.startswith('$'): + raise error.ParseError(_("'$' not for alias arguments")) + return (op, sym) + + @classmethod + def _builddefn(cls, defn, args): + """Parse an alias definition into a tree and marks substitutions + + This function marks alias argument references as ``_aliasarg``. The + parsing rule is provided by ``_parse()``. + + ``args`` is a list of alias argument names, or None if the alias + is declared as a symbol. + + >>> parsemap = { + ... '$1 or foo': ('or', ('symbol', '$1'), ('symbol', 'foo')), + ... '$1 or $bar': ('or', ('symbol', '$1'), ('symbol', '$bar')), + ... '$10 or baz': ('or', ('symbol', '$10'), ('symbol', 'baz')), + ... '"$1" or "foo"': ('or', ('string', '$1'), ('string', 'foo')), + ... } + >>> class aliasrules(basealiasrules): + ... _parse = staticmethod(parsemap.__getitem__) + ... _trygetfunc = staticmethod(lambda x: None) + >>> builddefn = aliasrules._builddefn + >>> def pprint(tree): + ... print prettyformat(tree, ('_aliasarg', 'string', 'symbol')) + >>> args = ['$1', '$2', 'foo'] + >>> pprint(builddefn('$1 or foo', args)) + (or + ('_aliasarg', '$1') + ('_aliasarg', 'foo')) + >>> try: + ... builddefn('$1 or $bar', args) + ... except error.ParseError as inst: + ... print parseerrordetail(inst) + '$' not for alias arguments + >>> args = ['$1', '$10', 'foo'] + >>> pprint(builddefn('$10 or baz', args)) + (or + ('_aliasarg', '$10') + ('symbol', 'baz')) + >>> pprint(builddefn('"$1" or "foo"', args)) + (or + ('string', '$1') + ('string', 'foo')) + """ + tree = cls._parse(defn) + if args: + args = set(args) + else: + args = set() + return cls._relabelargs(tree, args) + + @classmethod + def build(cls, decl, defn): + """Parse an alias declaration and definition into an alias object""" + repl = efmt = None + name, args, err = cls._builddecl(decl) + if err: + efmt = _('failed to parse the declaration of %(section)s ' + '"%(name)s": %(error)s') + else: + try: + repl = cls._builddefn(defn, args) + except error.ParseError as inst: + err = parseerrordetail(inst) + efmt = _('failed to parse the definition of %(section)s ' + '"%(name)s": %(error)s') + if err: + err = efmt % {'section': cls._section, 'name': name, 'error': err} + return alias(name, args, err, repl) + + @classmethod + def buildmap(cls, items): + """Parse a list of alias (name, replacement) pairs into a dict of + alias objects""" + aliases = {} + for decl, defn in items: + a = cls.build(decl, defn) + aliases[a.name] = a + return aliases + + @classmethod + def _getalias(cls, aliases, tree): + """If tree looks like an unexpanded alias, return (alias, pattern-args) + pair. Return None otherwise. + """ + if not isinstance(tree, tuple): + return None + if tree[0] == cls._symbolnode: + name = tree[1] + a = aliases.get(name) + if a and a.args is None: + return a, None + func = cls._trygetfunc(tree) + if func: + name, args = func + a = aliases.get(name) + if a and a.args is not None: + return a, args + return None + + @classmethod + def _expandargs(cls, tree, args): + """Replace _aliasarg instances with the substitution value of the + same name in args, recursively. + """ + if not isinstance(tree, tuple): + return tree + if tree[0] == '_aliasarg': + sym = tree[1] + return args[sym] + return tuple(cls._expandargs(t, args) for t in tree) + + @classmethod + def _expand(cls, aliases, tree, expanding, cache): + if not isinstance(tree, tuple): + return tree + r = cls._getalias(aliases, tree) + if r is None: + return tuple(cls._expand(aliases, t, expanding, cache) + for t in tree) + a, l = r + if a.error: + raise error.Abort(a.error) + if a in expanding: + raise error.ParseError(_('infinite expansion of %(section)s ' + '"%(name)s" detected') + % {'section': cls._section, 'name': a.name}) + # get cacheable replacement tree by expanding aliases recursively + expanding.append(a) + if a.name not in cache: + cache[a.name] = cls._expand(aliases, a.replacement, expanding, + cache) + result = cache[a.name] + expanding.pop() + if a.args is None: + return result + # substitute function arguments in replacement tree + if len(l) != len(a.args): + raise error.ParseError(_('invalid number of arguments: %d') + % len(l)) + l = [cls._expand(aliases, t, [], cache) for t in l] + return cls._expandargs(result, dict(zip(a.args, l))) + + @classmethod + def expand(cls, aliases, tree): + """Expand aliases in tree, recursively. + + 'aliases' is a dictionary mapping user defined aliases to alias objects. + """ + return cls._expand(aliases, tree, [], {})
--- a/mercurial/parsers.c Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/parsers.c Sat Apr 16 18:06:48 2016 -0500 @@ -1029,12 +1029,14 @@ return NULL; #define istat(__n, __d) \ - t = PyInt_FromSsize_t(self->__n); \ - if (!t) \ - goto bail; \ - if (PyDict_SetItemString(obj, __d, t) == -1) \ - goto bail; \ - Py_DECREF(t); + do { \ + t = PyInt_FromSsize_t(self->__n); \ + if (!t) \ + goto bail; \ + if (PyDict_SetItemString(obj, __d, t) == -1) \ + goto bail; \ + Py_DECREF(t); \ + } while (0) if (self->added) { Py_ssize_t len = PyList_GET_SIZE(self->added); @@ -1446,20 +1448,26 @@ goto bail; } - for (i = 0; i < len; i++) { + for (i = len - 1; i >= 0; i--) { int isfiltered; int parents[2]; - isfiltered = check_filter(filter, i); - if (isfiltered == -1) { - PyErr_SetString(PyExc_TypeError, - "unable to check filter"); - goto bail; - } - - if (isfiltered) { - nothead[i] = 1; - continue; + /* If nothead[i] == 1, it means we've seen an unfiltered child of this + * node already, and therefore this node is not filtered. So we can skip + * the expensive check_filter step. + */ + if (nothead[i] != 1) { + isfiltered = check_filter(filter, i); + if (isfiltered == -1) { + PyErr_SetString(PyExc_TypeError, + "unable to check filter"); + goto bail; + } + + if (isfiltered) { + nothead[i] = 1; + continue; + } } if (index_get_parents(self, i, parents, (int)len - 1) < 0)
--- a/mercurial/patch.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/patch.py Sat Apr 16 18:06:48 2016 -0500 @@ -8,7 +8,6 @@ from __future__ import absolute_import -import cStringIO import collections import copy import email @@ -31,11 +30,13 @@ diffhelpers, encoding, error, + mail, mdiff, pathutil, scmutil, util, ) +stringio = util.stringio gitre = re.compile('diff --git a/(.*) b/(.*)') tabsplitter = re.compile(r'(\t+|[^\t]+)') @@ -59,7 +60,7 @@ return len(l) == 2 and ' ' not in l[0] def chunk(lines): - return cStringIO.StringIO(''.join(lines)) + return stringio(''.join(lines)) def hgsplit(stream, cur): inheader = True @@ -92,7 +93,7 @@ def mimesplit(stream, cur): def msgfp(m): - fp = cStringIO.StringIO() + fp = stringio() g = email.Generator.Generator(fp, mangle_from_=False) g.flatten(m) fp.seek(0) @@ -210,8 +211,8 @@ try: msg = email.Parser.Parser().parse(fileobj) - subject = msg['Subject'] - data['user'] = msg['From'] + subject = msg['Subject'] and mail.headdecode(msg['Subject']) + data['user'] = msg['From'] and mail.headdecode(msg['From']) if not subject and not data['user']: # Not an email, restore parsed headers if any subject = '\n'.join(': '.join(h) for h in msg.items()) + '\n' @@ -245,7 +246,7 @@ ui.debug('found patch at byte %d\n' % m.start(0)) diffs_seen += 1 - cfp = cStringIO.StringIO() + cfp = stringio() for line in payload[:m.start(0)].splitlines(): if line.startswith('# HG changeset patch') and not hgpatch: ui.debug('patch generated by hg export\n') @@ -1056,7 +1057,7 @@ continue # Remove comment lines patchfp = open(patchfn) - ncpatchfp = cStringIO.StringIO() + ncpatchfp = stringio() for line in patchfp: if not line.startswith('#'): ncpatchfp.write(line) @@ -1439,7 +1440,8 @@ ... hunkscomingfromfilterpatch.extend(h.hunks) >>> reversedhunks = reversehunks(hunkscomingfromfilterpatch) - >>> fp = cStringIO.StringIO() + >>> from . import util + >>> fp = util.stringio() >>> for c in reversedhunks: ... c.write(fp) >>> fp.seek(0) @@ -1552,7 +1554,7 @@ } p = parser() - fp = cStringIO.StringIO() + fp = stringio() fp.write(''.join(originalchunks)) fp.seek(0) @@ -1731,7 +1733,7 @@ pos = lr.fp.tell() fp = lr.fp except IOError: - fp = cStringIO.StringIO(lr.fp.read()) + fp = stringio(lr.fp.read()) gitlr = linereader(fp) gitlr.push(firstline) gitpatches = readgitpatch(gitlr)
--- a/mercurial/pathutil.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/pathutil.py Sat Apr 16 18:06:48 2016 -0500 @@ -83,16 +83,17 @@ parts.pop() normparts.pop() prefixes = [] - while parts: - prefix = os.sep.join(parts) - normprefix = os.sep.join(normparts) + # It's important that we check the path parts starting from the root. + # This means we won't accidentaly traverse a symlink into some other + # filesystem (which is potentially expensive to access). + for i in range(len(parts)): + prefix = os.sep.join(parts[:i + 1]) + normprefix = os.sep.join(normparts[:i + 1]) if normprefix in self.auditeddir: - break + continue if self._realfs: self._checkfs(prefix, path) prefixes.append(normprefix) - parts.pop() - normparts.pop() self.audited.add(normpath) # only add prefixes to the cache after checking everything: we don't
--- a/mercurial/peer.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/peer.py Sat Apr 16 18:06:48 2016 -0500 @@ -39,7 +39,15 @@ return resref return call def submit(self): - pass + raise NotImplementedError() + +class iterbatcher(batcher): + + def submit(self): + raise NotImplementedError() + + def results(self): + raise NotImplementedError() class localbatch(batcher): '''performs the queued calls directly''' @@ -50,6 +58,19 @@ for name, args, opts, resref in self.calls: resref.set(getattr(self.local, name)(*args, **opts)) +class localiterbatcher(iterbatcher): + def __init__(self, local): + super(iterbatcher, self).__init__() + self.local = local + + def submit(self): + # submit for a local iter batcher is a noop + pass + + def results(self): + for name, args, opts, resref in self.calls: + yield getattr(self.local, name)(*args, **opts) + def batchable(f): '''annotation for batchable methods @@ -91,6 +112,14 @@ def batch(self): return localbatch(self) + def iterbatch(self): + """Batch requests but allow iterating over the results. + + This is to allow interleaving responses with things like + progress updates for clients. + """ + return localiterbatcher(self) + def capable(self, name): '''tell whether repo supports named capability. return False if not supported.
--- a/mercurial/phases.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/phases.py Sat Apr 16 18:06:48 2016 -0500 @@ -435,11 +435,11 @@ continue node = bin(nhex) phase = int(phase) - if phase == 0: + if phase == public: if node != nullid: repo.ui.warn(_('ignoring inconsistent public root' ' from remote: %s\n') % nhex) - elif phase == 1: + elif phase == draft: if node in nodemap: draftroots.append(node) else:
--- a/mercurial/progress.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/progress.py Sat Apr 16 18:06:48 2016 -0500 @@ -18,7 +18,7 @@ return ' '.join(s for s in args if s) def shouldprint(ui): - return not (ui.quiet or ui.plain()) and ( + return not (ui.quiet or ui.plain('progress')) and ( ui._isatty(sys.stderr) or ui.configbool('progress', 'assume-tty')) def fmtremaining(seconds):
--- a/mercurial/pure/mpatch.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/pure/mpatch.py Sat Apr 16 18:06:48 2016 -0500 @@ -7,10 +7,14 @@ from __future__ import absolute_import -import cStringIO import struct -StringIO = cStringIO.StringIO +from . import pycompat +stringio = pycompat.stringio + +class mpatchError(Exception): + """error raised when a delta cannot be decoded + """ # This attempts to apply a series of patches in time proportional to # the total size of the patches, rather than patches * len(text). This @@ -22,6 +26,33 @@ # mmap and simply use memmove. This avoids creating a bunch of large # temporary string buffers. +def _pull(dst, src, l): # pull l bytes from src + while l: + f = src.pop() + if f[0] > l: # do we need to split? + src.append((f[0] - l, f[1] + l)) + dst.append((l, f[1])) + return + dst.append(f) + l -= f[0] + +def _move(m, dest, src, count): + """move count bytes from src to dest + + The file pointer is left at the end of dest. + """ + m.seek(src) + buf = m.read(count) + m.seek(dest) + m.write(buf) + +def _collect(m, buf, list): + start = buf + for l, p in reversed(list): + _move(m, buf, p, l) + buf += l + return (buf - start, start) + def patches(a, bins): if not bins: return a @@ -35,16 +66,7 @@ if not tl: return a - m = StringIO() - def move(dest, src, count): - """move count bytes from src to dest - - The file pointer is left at the end of dest. - """ - m.seek(src) - buf = m.read(count) - m.seek(dest) - m.write(buf) + m = stringio() # load our original text m.write(a) @@ -55,43 +77,29 @@ m.seek(pos) for p in bins: m.write(p) - def pull(dst, src, l): # pull l bytes from src - while l: - f = src.pop() - if f[0] > l: # do we need to split? - src.append((f[0] - l, f[1] + l)) - dst.append((l, f[1])) - return - dst.append(f) - l -= f[0] - - def collect(buf, list): - start = buf - for l, p in reversed(list): - move(buf, p, l) - buf += l - return (buf - start, start) - for plen in plens: # if our list gets too long, execute it if len(frags) > 128: b2, b1 = b1, b2 - frags = [collect(b1, frags)] + frags = [_collect(m, b1, frags)] new = [] end = pos + plen last = 0 while pos < end: m.seek(pos) - p1, p2, l = struct.unpack(">lll", m.read(12)) - pull(new, frags, p1 - last) # what didn't change - pull([], frags, p2 - p1) # what got deleted + try: + p1, p2, l = struct.unpack(">lll", m.read(12)) + except struct.error: + raise mpatchError("patch cannot be decoded") + _pull(new, frags, p1 - last) # what didn't change + _pull([], frags, p2 - p1) # what got deleted new.append((l, pos + 12)) # what got added pos += l + 12 last = p2 frags.extend(reversed(new)) # what was left at the end - t = collect(b2, frags) + t = _collect(m, b2, frags) m.seek(t[1]) return m.read(t[0]) @@ -113,7 +121,7 @@ outlen += length if bin != binend: - raise ValueError("patch cannot be decoded") + raise mpatchError("patch cannot be decoded") outlen += orig - last return outlen
--- a/mercurial/pure/parsers.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/pure/parsers.py Sat Apr 16 18:06:48 2016 -0500 @@ -7,11 +7,12 @@ from __future__ import absolute_import -import cStringIO import struct import zlib from .node import nullid +from . import pycompat +stringio = pycompat.stringio _pack = struct.pack _unpack = struct.unpack @@ -90,7 +91,7 @@ def pack_dirstate(dmap, copymap, pl, now): now = int(now) - cs = cStringIO.StringIO() + cs = stringio() write = cs.write write("".join(pl)) for f, e in dmap.iteritems():
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/mercurial/pycompat.py Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,123 @@ +# pycompat.py - portability shim for python 3 +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +"""Mercurial portability shim for python 3. + +This contains aliases to hide python version-specific details from the core. +""" + +from __future__ import absolute_import + +try: + import cStringIO as io + stringio = io.StringIO +except ImportError: + import io + stringio = io.StringIO + +try: + import Queue as _queue + _queue.Queue +except ImportError: + import queue as _queue +empty = _queue.Empty +queue = _queue.Queue + +class _pycompatstub(object): + pass + +def _alias(alias, origin, items): + """ populate a _pycompatstub + + copies items from origin to alias + """ + def hgcase(item): + return item.replace('_', '').lower() + for item in items: + try: + setattr(alias, hgcase(item), getattr(origin, item)) + except AttributeError: + pass + +urlreq = _pycompatstub() +urlerr = _pycompatstub() +try: + import urllib2 + import urllib + _alias(urlreq, urllib, ( + "addclosehook", + "addinfourl", + "ftpwrapper", + "pathname2url", + "quote", + "splitattr", + "splitpasswd", + "splitport", + "splituser", + "unquote", + "url2pathname", + "urlencode", + "urlencode", + )) + _alias(urlreq, urllib2, ( + "AbstractHTTPHandler", + "BaseHandler", + "build_opener", + "FileHandler", + "FTPHandler", + "HTTPBasicAuthHandler", + "HTTPDigestAuthHandler", + "HTTPHandler", + "HTTPPasswordMgrWithDefaultRealm", + "HTTPSHandler", + "install_opener", + "ProxyHandler", + "Request", + "urlopen", + )) + _alias(urlerr, urllib2, ( + "HTTPError", + "URLError", + )) + +except ImportError: + import urllib.request + _alias(urlreq, urllib.request, ( + "AbstractHTTPHandler", + "addclosehook", + "addinfourl", + "BaseHandler", + "build_opener", + "FileHandler", + "FTPHandler", + "ftpwrapper", + "HTTPHandler", + "HTTPSHandler", + "install_opener", + "pathname2url", + "HTTPBasicAuthHandler", + "HTTPDigestAuthHandler", + "ProxyHandler", + "quote", + "Request", + "splitattr", + "splitpasswd", + "splitport", + "splituser", + "unquote", + "url2pathname", + "urlopen", + )) + import urllib.error + _alias(urlerr, urllib.error, ( + "HTTPError", + "URLError", + )) + +try: + xrange +except NameError: + import builtins + builtins.xrange = range
--- a/mercurial/registrar.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/registrar.py Sat Apr 16 18:06:48 2016 -0500 @@ -11,16 +11,20 @@ util, ) -class funcregistrar(object): +class _funcregistrarbase(object): """Base of decorator to register a fuction for specific purpose - The least derived class can be defined by overriding 'table' and - 'formatdoc', for example:: + This decorator stores decorated functions into own dict 'table'. + + The least derived class can be defined by overriding 'formatdoc', + for example:: - symbols = {} - class keyword(funcregistrar): - table = symbols - formatdoc = ":%s: %s" + class keyword(_funcregistrarbase): + _docformat = ":%s: %s" + + This should be used as below: + + keyword = registrar.keyword() @keyword('bar') def barfunc(*args, **kwargs): @@ -30,99 +34,211 @@ In this case: - - 'barfunc' is registered as 'bar' in 'symbols' - - online help uses ":bar: Explanation of bar keyword" + - 'barfunc' is stored as 'bar' in '_table' of an instance 'keyword' above + - 'barfunc.__doc__' becomes ":bar: Explanation of bar keyword" """ - - def __init__(self, decl): - """'decl' is a name or more descriptive string of a function + def __init__(self, table=None): + if table is None: + self._table = {} + else: + self._table = table - Specification of 'decl' depends on registration purpose. - """ - self.decl = decl + def __call__(self, decl, *args, **kwargs): + return lambda func: self._doregister(func, decl, *args, **kwargs) - table = None - - def __call__(self, func): - """Execute actual registration for specified function - """ - name = self.getname() + def _doregister(self, func, decl, *args, **kwargs): + name = self._getname(decl) if func.__doc__ and not util.safehasattr(func, '_origdoc'): doc = func.__doc__.strip() func._origdoc = doc - if callable(self.formatdoc): - func.__doc__ = self.formatdoc(doc) - else: - # convenient shortcut for simple format - func.__doc__ = self.formatdoc % (self.decl, doc) + func.__doc__ = self._formatdoc(decl, doc) - self.table[name] = func - self.extraaction(name, func) + self._table[name] = func + self._extrasetup(name, func, *args, **kwargs) return func - def getname(self): - """Return the name of the registered function from self.decl + def _parsefuncdecl(self, decl): + """Parse function declaration and return the name of function in it + """ + i = decl.find('(') + if i >= 0: + return decl[:i] + else: + return decl + + def _getname(self, decl): + """Return the name of the registered function from decl Derived class should override this, if it allows more descriptive 'decl' string than just a name. """ - return self.decl + return decl - def parsefuncdecl(self): - """Parse function declaration and return the name of function in it - """ - i = self.decl.find('(') - if i > 0: - return self.decl[:i] - else: - return self.decl + _docformat = None - def formatdoc(self, doc): + def _formatdoc(self, decl, doc): """Return formatted document of the registered function for help 'doc' is '__doc__.strip()' of the registered function. + """ + return self._docformat % (decl, doc) - If this is overridden by non-callable object in derived class, - such value is treated as "format string" and used to format - document by 'self.formatdoc % (self.decl, doc)' for convenience. - """ - raise NotImplementedError() - - def extraaction(self, name, func): - """Execute exra action for registered function, if needed + def _extrasetup(self, name, func): + """Execute exra setup for registered function, if needed """ pass -class delayregistrar(object): - """Decorator to delay actual registration until uisetup or so +class revsetpredicate(_funcregistrarbase): + """Decorator to register revset predicate + + Usage:: + + revsetpredicate = registrar.revsetpredicate() + + @revsetpredicate('mypredicate(arg1, arg2[, arg3])') + def mypredicatefunc(repo, subset, x): + '''Explanation of this revset predicate .... + ''' + pass + + The first string argument is used also in online help. + + Optional argument 'safe' indicates whether a predicate is safe for + DoS attack (False by default). - For example, the decorator class to delay registration by - 'keyword' funcregistrar can be defined as below:: + 'revsetpredicate' instance in example above can be used to + decorate multiple functions. + + Decorated functions are registered automatically at loading + extension, if an instance named as 'revsetpredicate' is used for + decorating in extension. + + Otherwise, explicit 'revset.loadpredicate()' is needed. + """ + _getname = _funcregistrarbase._parsefuncdecl + _docformat = "``%s``\n %s" + + def _extrasetup(self, name, func, safe=False): + func._safe = safe + +class filesetpredicate(_funcregistrarbase): + """Decorator to register fileset predicate - class extkeyword(delayregistrar): - registrar = keyword + Usage:: + + filesetpredicate = registrar.filesetpredicate() + + @filesetpredicate('mypredicate()') + def mypredicatefunc(mctx, x): + '''Explanation of this fileset predicate .... + ''' + pass + + The first string argument is used also in online help. + + Optional argument 'callstatus' indicates whether a predicate + implies 'matchctx.status()' at runtime or not (False, by + default). + + Optional argument 'callexisting' indicates whether a predicate + implies 'matchctx.existing()' at runtime or not (False, by + default). + + 'filesetpredicate' instance in example above can be used to + decorate multiple functions. + + Decorated functions are registered automatically at loading + extension, if an instance named as 'filesetpredicate' is used for + decorating in extension. + + Otherwise, explicit 'fileset.loadpredicate()' is needed. """ - def __init__(self): - self._list = [] + _getname = _funcregistrarbase._parsefuncdecl + _docformat = "``%s``\n %s" - registrar = None + def _extrasetup(self, name, func, callstatus=False, callexisting=False): + func._callstatus = callstatus + func._callexisting = callexisting - def __call__(self, *args, **kwargs): - """Return the decorator to delay actual registration until setup - """ - assert self.registrar is not None - def decorator(func): - # invocation of self.registrar() here can detect argument - # mismatching immediately - self._list.append((func, self.registrar(*args, **kwargs))) - return func - return decorator +class _templateregistrarbase(_funcregistrarbase): + """Base of decorator to register functions as template specific one + """ + _docformat = ":%s: %s" + +class templatekeyword(_templateregistrarbase): + """Decorator to register template keyword + + Usage:: + + templaetkeyword = registrar.templatekeyword() + + @templatekeyword('mykeyword') + def mykeywordfunc(repo, ctx, templ, cache, revcache, **args): + '''Explanation of this template keyword .... + ''' + pass + + The first string argument is used also in online help. + + 'templatekeyword' instance in example above can be used to + decorate multiple functions. + + Decorated functions are registered automatically at loading + extension, if an instance named as 'templatekeyword' is used for + decorating in extension. + + Otherwise, explicit 'templatekw.loadkeyword()' is needed. + """ + +class templatefilter(_templateregistrarbase): + """Decorator to register template filer + + Usage:: + + templatefilter = registrar.templatefilter() - def setup(self): - """Execute actual registration - """ - while self._list: - func, decorator = self._list.pop(0) - decorator(func) + @templatefilter('myfilter') + def myfilterfunc(text): + '''Explanation of this template filter .... + ''' + pass + + The first string argument is used also in online help. + + 'templatefilter' instance in example above can be used to + decorate multiple functions. + + Decorated functions are registered automatically at loading + extension, if an instance named as 'templatefilter' is used for + decorating in extension. + + Otherwise, explicit 'templatefilters.loadkeyword()' is needed. + """ + +class templatefunc(_templateregistrarbase): + """Decorator to register template function + + Usage:: + + templatefunc = registrar.templatefunc() + + @templatefunc('myfunc(arg1, arg2[, arg3])') + def myfuncfunc(context, mapping, args): + '''Explanation of this template function .... + ''' + pass + + The first string argument is used also in online help. + + 'templatefunc' instance in example above can be used to + decorate multiple functions. + + Decorated functions are registered automatically at loading + extension, if an instance named as 'templatefunc' is used for + decorating in extension. + + Otherwise, explicit 'templater.loadfunction()' is needed. + """ + _getname = _funcregistrarbase._parsefuncdecl
--- a/mercurial/repair.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/repair.py Sat Apr 16 18:06:48 2016 -0500 @@ -17,6 +17,7 @@ changegroup, error, exchange, + obsolete, util, ) @@ -46,7 +47,7 @@ bundletype = "HG10BZ" else: bundletype = "HG10UN" - return changegroup.writebundle(repo.ui, cg, name, bundletype, vfs, + return bundle2.writebundle(repo.ui, cg, name, bundletype, vfs, compression=comp) def _collectfiles(repo, striprev): @@ -254,7 +255,8 @@ repolen = len(repo) for rev in repo: - ui.progress(_('changeset'), rev, total=repolen) + ui.progress(_('rebuilding'), rev, total=repolen, + unit=_('changesets')) ctx = repo[rev] for f in ctx.files(): @@ -271,7 +273,17 @@ if repo.store._exists(d): newentries.add(d) - ui.progress(_('changeset'), None) + ui.progress(_('rebuilding'), None) + + if 'treemanifest' in repo.requirements: # safe but unnecessary otherwise + for dir in util.dirs(seenfiles): + i = 'meta/%s/00manifest.i' % dir + d = 'meta/%s/00manifest.d' % dir + + if repo.store._exists(i): + newentries.add(i) + if repo.store._exists(d): + newentries.add(d) addcount = len(newentries - oldentries) removecount = len(oldentries - newentries) @@ -302,3 +314,32 @@ "ancestors(head() and not bookmark(%s)) - " "ancestors(bookmark() and not bookmark(%s))", mark, mark, mark) + +def deleteobsmarkers(obsstore, indices): + """Delete some obsmarkers from obsstore and return how many were deleted + + 'indices' is a list of ints which are the indices + of the markers to be deleted. + + Every invocation of this function completely rewrites the obsstore file, + skipping the markers we want to be removed. The new temporary file is + created, remaining markers are written there and on .close() this file + gets atomically renamed to obsstore, thus guaranteeing consistency.""" + if not indices: + # we don't want to rewrite the obsstore with the same content + return + + left = [] + current = obsstore._all + n = 0 + for i, m in enumerate(current): + if i in indices: + n += 1 + continue + left.append(m) + + newobsstorefile = obsstore.svfs('obsstore', 'w', atomictemp=True) + for bytes in obsolete.encodemarkers(left, True, obsstore._version): + newobsstorefile.write(bytes) + newobsstorefile.close() + return n
--- a/mercurial/repoview.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/repoview.py Sat Apr 16 18:06:48 2016 -0500 @@ -22,9 +22,14 @@ ) def hideablerevs(repo): - """Revisions candidates to be hidden + """Revision candidates to be hidden + + This is a standalone function to allow extensions to wrap it. - This is a standalone function to help extensions to wrap it.""" + Because we use the set of immutable changesets as a fallback subset in + branchmap (see mercurial.branchmap.subsettable), you cannot set "public" + changesets as "hideable". Doing so would break multiple code assertions and + lead to crashes.""" return obsolete.getrevs(repo, 'obsolete') def _getstatichidden(repo): @@ -315,7 +320,10 @@ revs = filterrevs(unfi, self.filtername) cl = self._clcache newkey = (unfilen, unfinode, hash(revs), unfichangelog._delayed) - if cl is not None and newkey != self._clcachekey: + # if cl.index is not unfiindex, unfi.changelog would be + # recreated, and our clcache refers to garbage object + if (cl is not None and + (cl.index is not unfiindex or newkey != self._clcachekey)): cl = None # could have been made None by the previous if if cl is None:
--- a/mercurial/revset.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/revset.py Sat Apr 16 18:06:48 2016 -0500 @@ -300,14 +300,6 @@ pos += 1 yield ('end', None, pos) -def parseerrordetail(inst): - """Compose error message from specified ParseError object - """ - if len(inst.args) > 1: - return _('at %s: %s') % (inst.args[1], inst.args[0]) - else: - return inst.args[0] - # helpers def getstring(x, err): @@ -332,40 +324,6 @@ return parser.buildargsdict(getlist(x), funcname, keys.split(), keyvaluenode='keyvalue', keynode='symbol') -def isvalidsymbol(tree): - """Examine whether specified ``tree`` is valid ``symbol`` or not - """ - return tree[0] == 'symbol' and len(tree) > 1 - -def getsymbol(tree): - """Get symbol name from valid ``symbol`` in ``tree`` - - This assumes that ``tree`` is already examined by ``isvalidsymbol``. - """ - return tree[1] - -def isvalidfunc(tree): - """Examine whether specified ``tree`` is valid ``func`` or not - """ - return tree[0] == 'func' and len(tree) > 1 and isvalidsymbol(tree[1]) - -def getfuncname(tree): - """Get function name from valid ``func`` in ``tree`` - - This assumes that ``tree`` is already examined by ``isvalidfunc``. - """ - return getsymbol(tree[1]) - -def getfuncargs(tree): - """Get list of function arguments from valid ``func`` in ``tree`` - - This assumes that ``tree`` is already examined by ``isvalidfunc``. - """ - if len(tree) > 2: - return getlist(tree[2]) - else: - return [] - def getset(repo, subset, x): if not x: raise error.ParseError(_("missing argument")) @@ -436,6 +394,9 @@ def andset(repo, subset, x, y): return getset(repo, getset(repo, subset, x), y) +def differenceset(repo, subset, x, y): + return getset(repo, subset, x) - getset(repo, subset, y) + def orset(repo, subset, *xs): assert xs if len(xs) == 1: @@ -479,58 +440,7 @@ # functions that just return a lot of changesets (like all) don't count here safesymbols = set() -class predicate(registrar.funcregistrar): - """Decorator to register revset predicate - - Usage:: - - @predicate('mypredicate(arg1, arg2[, arg3])') - def mypredicatefunc(repo, subset, x): - '''Explanation of this revset predicate .... - ''' - pass - - The first string argument of the constructor is used also in - online help. - - Use 'extpredicate' instead of this to register revset predicate in - extensions. - """ - table = symbols - formatdoc = "``%s``\n %s" - getname = registrar.funcregistrar.parsefuncdecl - - def __init__(self, decl, safe=False): - """'safe' indicates whether a predicate is safe for DoS attack - """ - super(predicate, self).__init__(decl) - self.safe = safe - - def extraaction(self, name, func): - if self.safe: - safesymbols.add(name) - -class extpredicate(registrar.delayregistrar): - """Decorator to register revset predicate in extensions - - Usage:: - - revsetpredicate = revset.extpredicate() - - @revsetpredicate('mypredicate(arg1, arg2[, arg3])') - def mypredicatefunc(repo, subset, x): - '''Explanation of this revset predicate .... - ''' - pass - - def uisetup(ui): - revsetpredicate.setup() - - 'revsetpredicate' instance above can be used to decorate multiple - functions, and 'setup()' on it registers all such functions at - once. - """ - registrar = predicate +predicate = registrar.revsetpredicate() @predicate('_destupdate') def _destupdate(repo, subset, x): @@ -541,8 +451,10 @@ @predicate('_destmerge') def _destmerge(repo, subset, x): # experimental revset for merge destination - getargs(x, 0, 0, _("_mergedefaultdest takes no arguments")) - return subset & baseset([destutil.destmerge(repo)]) + sourceset = None + if x is not None: + sourceset = getset(repo, fullreposet(repo), x) + return subset & baseset([destutil.destmerge(repo, sourceset=sourceset)]) @predicate('adds(pattern)', safe=True) def adds(repo, subset, x): @@ -624,7 +536,8 @@ # i18n: "author" is a keyword n = encoding.lower(getstring(x, _("author requires a string"))) kind, pattern, matcher = _substringmatcher(n) - return subset.filter(lambda x: matcher(encoding.lower(repo[x].user()))) + return subset.filter(lambda x: matcher(encoding.lower(repo[x].user())), + condrepr=('<user %r>', n)) @predicate('bisect(string)', safe=True) def bisect(repo, subset, x): @@ -710,19 +623,22 @@ # note: falls through to the revspec case if no branch with # this name exists and pattern kind is not specified explicitly if pattern in repo.branchmap(): - return subset.filter(lambda r: matcher(getbi(r)[0])) + return subset.filter(lambda r: matcher(getbi(r)[0]), + condrepr=('<branch %r>', b)) if b.startswith('literal:'): raise error.RepoLookupError(_("branch '%s' does not exist") % pattern) else: - return subset.filter(lambda r: matcher(getbi(r)[0])) + return subset.filter(lambda r: matcher(getbi(r)[0]), + condrepr=('<branch %r>', b)) s = getset(repo, fullreposet(repo), x) b = set() for r in s: b.add(getbi(r)[0]) c = s.__contains__ - return subset.filter(lambda r: c(r) or getbi(r)[0] in b) + return subset.filter(lambda r: c(r) or getbi(r)[0] in b, + condrepr=lambda: '<branch %r>' % sorted(b)) @predicate('bumped()', safe=True) def bumped(repo, subset, x): @@ -777,7 +693,7 @@ if m(f): return True - return subset.filter(matches) + return subset.filter(matches, condrepr=('<status[%r] %r>', field, pat)) def _children(repo, narrow, parentset): if not parentset: @@ -809,7 +725,8 @@ """ # i18n: "closed" is a keyword getargs(x, 0, 0, _("closed takes no arguments")) - return subset.filter(lambda r: repo[r].closesbranch()) + return subset.filter(lambda r: repo[r].closesbranch(), + condrepr='<branch closed>') @predicate('contains(pattern)') def contains(repo, subset, x): @@ -836,7 +753,7 @@ return True return False - return subset.filter(matches) + return subset.filter(matches, condrepr=('<contains %r>', pat)) @predicate('converted([id])', safe=True) def converted(repo, subset, x): @@ -858,7 +775,8 @@ source = repo[r].extra().get('convert_revision', None) return source is not None and (rev is None or source.startswith(rev)) - return subset.filter(lambda r: _matchvalue(r)) + return subset.filter(lambda r: _matchvalue(r), + condrepr=('<converted %r>', rev)) @predicate('date(interval)', safe=True) def date(repo, subset, x): @@ -867,7 +785,8 @@ # i18n: "date" is a keyword ds = getstring(x, _("date requires a string")) dm = util.matchdate(ds) - return subset.filter(lambda x: dm(repo[x].date()[0])) + return subset.filter(lambda x: dm(repo[x].date()[0]), + condrepr=('<date %r>', ds)) @predicate('desc(string)', safe=True) def desc(repo, subset, x): @@ -880,7 +799,7 @@ c = repo[x] return ds in encoding.lower(c.description()) - return subset.filter(matches) + return subset.filter(matches, condrepr=('<desc %r>', ds)) def _descendants(repo, subset, x, followfirst=False): roots = getset(repo, fullreposet(repo), x) @@ -955,7 +874,8 @@ r = src src = _getrevsource(repo, r) - return subset.filter(dests.__contains__) + return subset.filter(dests.__contains__, + condrepr=lambda: '<destination %r>' % sorted(dests)) @predicate('divergent()', safe=True) def divergent(repo, subset, x): @@ -1004,7 +924,8 @@ extra = repo[r].extra() return label in extra and (value is None or matcher(extra[label])) - return subset.filter(lambda r: _matchvalue(r)) + return subset.filter(lambda r: _matchvalue(r), + condrepr=('<extra[%r] %r>', label, value)) @predicate('filelog(pattern)', safe=True) def filelog(repo, subset, x): @@ -1086,13 +1007,14 @@ matcher = matchmod.match(repo.root, repo.getcwd(), [x], ctx=repo[None], default='path') + files = c.manifest().walk(matcher) + s = set() - for fname in c: - if matcher(fname): - fctx = c[fname] - s = s.union(set(c.rev() for c in fctx.ancestors(followfirst))) - # include the revision responsible for the most recent version - s.add(fctx.introrev()) + for fname in files: + fctx = c[fname] + s = s.union(set(c.rev() for c in fctx.ancestors(followfirst))) + # include the revision responsible for the most recent version + s.add(fctx.introrev()) else: s = _revancestors(repo, baseset([c.rev()]), followfirst) @@ -1141,7 +1063,7 @@ return True return False - return subset.filter(matches) + return subset.filter(matches, condrepr=('<grep %r>', gr.pattern)) @predicate('_matchfiles', safe=True) def _matchfiles(repo, subset, x): @@ -1157,13 +1079,11 @@ # initialized. Use 'd:' to set the default matching mode, default # to 'glob'. At most one 'r:' and 'd:' argument can be passed. - # i18n: "_matchfiles" is a keyword - l = getargs(x, 1, -1, _("_matchfiles requires at least one argument")) + l = getargs(x, 1, -1, "_matchfiles requires at least one argument") pats, inc, exc = [], [], [] rev, default = None, None for arg in l: - # i18n: "_matchfiles" is a keyword - s = getstring(arg, _("_matchfiles requires string arguments")) + s = getstring(arg, "_matchfiles requires string arguments") prefix, value = s[:2], s[2:] if prefix == 'p:': pats.append(value) @@ -1173,20 +1093,17 @@ exc.append(value) elif prefix == 'r:': if rev is not None: - # i18n: "_matchfiles" is a keyword - raise error.ParseError(_('_matchfiles expected at most one ' - 'revision')) + raise error.ParseError('_matchfiles expected at most one ' + 'revision') if value != '': # empty means working directory; leave rev as None rev = value elif prefix == 'd:': if default is not None: - # i18n: "_matchfiles" is a keyword - raise error.ParseError(_('_matchfiles expected at most one ' - 'default mode')) + raise error.ParseError('_matchfiles expected at most one ' + 'default mode') default = value else: - # i18n: "_matchfiles" is a keyword - raise error.ParseError(_('invalid _matchfiles prefix: %s') % prefix) + raise error.ParseError('invalid _matchfiles prefix: %s' % prefix) if not default: default = 'glob' @@ -1207,7 +1124,10 @@ return True return False - return subset.filter(matches) + return subset.filter(matches, + condrepr=('<matchfiles patterns=%r, include=%r ' + 'exclude=%r, default=%r, rev=%r>', + pats, inc, exc, default, rev)) @predicate('file(pattern)', safe=True) def hasfile(repo, subset, x): @@ -1268,7 +1188,7 @@ return any(kw in encoding.lower(t) for t in c.files() + [c.user(), c.description()]) - return subset.filter(matches) + return subset.filter(matches, condrepr=('<keyword %r>', kw)) @predicate('limit(set[, n[, offset]])', safe=True) def limit(repo, subset, x): @@ -1304,7 +1224,8 @@ break elif y in subset: result.append(y) - return baseset(result) + return baseset(result, datarepr=('<limit n=%d, offset=%d, %r, %r>', + lim, ofs, subset, os)) @predicate('last(set, [n])', safe=True) def last(repo, subset, x): @@ -1330,7 +1251,7 @@ break elif y in subset: result.append(y) - return baseset(result) + return baseset(result, datarepr=('<last n=%d, %r, %r>', lim, subset, os)) @predicate('max(set)', safe=True) def maxrev(repo, subset, x): @@ -1340,12 +1261,12 @@ try: m = os.max() if m in subset: - return baseset([m]) + return baseset([m], datarepr=('<max %r, %r>', subset, os)) except ValueError: # os.max() throws a ValueError when the collection is empty. # Same as python's max(). pass - return baseset() + return baseset(datarepr=('<max %r, %r>', subset, os)) @predicate('merge()', safe=True) def merge(repo, subset, x): @@ -1354,7 +1275,8 @@ # i18n: "merge" is a keyword getargs(x, 0, 0, _("merge takes no arguments")) cl = repo.changelog - return subset.filter(lambda r: cl.parentrevs(r)[1] != -1) + return subset.filter(lambda r: cl.parentrevs(r)[1] != -1, + condrepr='<merge>') @predicate('branchpoint()', safe=True) def branchpoint(repo, subset, x): @@ -1373,7 +1295,8 @@ for p in cl.parentrevs(r): if p >= baserev: parentscount[p - baserev] += 1 - return subset.filter(lambda r: parentscount[r - baserev] > 1) + return subset.filter(lambda r: parentscount[r - baserev] > 1, + condrepr='<branchpoint>') @predicate('min(set)', safe=True) def minrev(repo, subset, x): @@ -1383,12 +1306,12 @@ try: m = os.min() if m in subset: - return baseset([m]) + return baseset([m], datarepr=('<min %r, %r>', subset, os)) except ValueError: # os.min() throws a ValueError when the collection is empty. # Same as python's min(). pass - return baseset() + return baseset(datarepr=('<min %r, %r>', subset, os)) @predicate('modifies(pattern)', safe=True) def modifies(repo, subset, x): @@ -1630,7 +1553,8 @@ else: phase = repo._phasecache.phase condition = lambda r: phase(repo, r) == target - return subset.filter(condition, cache=False) + return subset.filter(condition, condrepr=('<phase %r>', target), + cache=False) @predicate('draft()', safe=True) def draft(repo, subset, x): @@ -1703,7 +1627,8 @@ phase = repo._phasecache.phase target = phases.public condition = lambda r: phase(repo, r) != target - return subset.filter(condition, cache=False) + return subset.filter(condition, condrepr=('<phase %r>', target), + cache=False) @predicate('public()', safe=True) def public(repo, subset, x): @@ -1713,7 +1638,8 @@ phase = repo._phasecache.phase target = phases.public condition = lambda r: phase(repo, r) == target - return subset.filter(condition, cache=False) + return subset.filter(condition, condrepr=('<phase %r>', target), + cache=False) @predicate('remote([id [,path]])', safe=True) def remote(repo, subset, x): @@ -1888,7 +1814,7 @@ return True return False - return subset.filter(matches) + return subset.filter(matches, condrepr=('<matching%r %r>', fields, revs)) @predicate('reverse(set)', safe=True) def reverse(repo, subset, x): @@ -1909,7 +1835,7 @@ if 0 <= p and p in s: return False return True - return subset & s.filter(filter) + return subset & s.filter(filter, condrepr='<roots>') @predicate('sort(set[, [-]key...])', safe=True) def sort(repo, subset, x): @@ -1981,6 +1907,7 @@ """ # i18n: "subrepo" is a keyword args = getargs(x, 0, 1, _('subrepo takes at most one argument')) + pat = None if len(args) != 0: pat = getstring(args[0], _("subrepo requires a pattern")) @@ -1996,7 +1923,7 @@ c = repo[x] s = repo.status(c.p1().node(), c.node(), match=m) - if len(args) == 0: + if pat is None: return s.added or s.modified or s.removed if s.added: @@ -2015,7 +1942,7 @@ return False - return subset.filter(matches) + return subset.filter(matches, condrepr=('<subrepo %r>', pat)) def _substringmatcher(pattern): kind, pattern, matcher = util.stringmatcher(pattern) @@ -2144,6 +2071,7 @@ "and": andset, "or": orset, "not": notset, + "difference": differenceset, "list": listset, "keyvalue": keyvaluepair, "func": func, @@ -2204,6 +2132,9 @@ if isonly(tb, ta): return w, ('func', ('symbol', 'only'), ('list', tb[2], ta[1][2])) + if tb is not None and tb[0] == 'not': + return wa, ('difference', ta, tb[1]) + if wa > wb: return w, (op, tb, ta) return w, (op, ta, tb) @@ -2288,27 +2219,6 @@ return w + wa, (op, x[1], ta) return 1, x -_aliasarg = ('func', ('symbol', '_aliasarg')) -def _getaliasarg(tree): - """If tree matches ('func', ('symbol', '_aliasarg'), ('string', X)) - return X, None otherwise. - """ - if (len(tree) == 3 and tree[:2] == _aliasarg - and tree[2][0] == 'string'): - return tree[2][1] - return None - -def _checkaliasarg(tree, known=None): - """Check tree contains no _aliasarg construct or only ones which - value is in known. Used to avoid alias placeholders injection. - """ - if isinstance(tree, tuple): - arg = _getaliasarg(tree) - if arg is not None and (not known or arg not in known): - raise error.UnknownIdentifier('_aliasarg', []) - for t in tree: - _checkaliasarg(t, known) - # the set of valid characters for the initial letter of symbols in # alias declarations and definitions _aliassyminitletters = set(c for c in [chr(i) for i in xrange(256)] @@ -2324,238 +2234,35 @@ return tokenize(program, lookup=lookup, syminitletters=_aliassyminitletters) -def _parsealiasdecl(decl): - """Parse alias declaration ``decl`` - - This returns ``(name, tree, args, errorstr)`` tuple: - - - ``name``: of declared alias (may be ``decl`` itself at error) - - ``tree``: parse result (or ``None`` at error) - - ``args``: list of alias argument names (or None for symbol declaration) - - ``errorstr``: detail about detected error (or None) - - >>> _parsealiasdecl('foo') - ('foo', ('symbol', 'foo'), None, None) - >>> _parsealiasdecl('$foo') - ('$foo', None, None, "'$' not for alias arguments") - >>> _parsealiasdecl('foo::bar') - ('foo::bar', None, None, 'invalid format') - >>> _parsealiasdecl('foo bar') - ('foo bar', None, None, 'at 4: invalid token') - >>> _parsealiasdecl('foo()') - ('foo', ('func', ('symbol', 'foo')), [], None) - >>> _parsealiasdecl('$foo()') - ('$foo()', None, None, "'$' not for alias arguments") - >>> _parsealiasdecl('foo($1, $2)') - ('foo', ('func', ('symbol', 'foo')), ['$1', '$2'], None) - >>> _parsealiasdecl('foo(bar_bar, baz.baz)') - ('foo', ('func', ('symbol', 'foo')), ['bar_bar', 'baz.baz'], None) - >>> _parsealiasdecl('foo($1, $2, nested($1, $2))') - ('foo($1, $2, nested($1, $2))', None, None, 'invalid argument list') - >>> _parsealiasdecl('foo(bar($1, $2))') - ('foo(bar($1, $2))', None, None, 'invalid argument list') - >>> _parsealiasdecl('foo("string")') - ('foo("string")', None, None, 'invalid argument list') - >>> _parsealiasdecl('foo($1, $2') - ('foo($1, $2', None, None, 'at 10: unexpected token: end') - >>> _parsealiasdecl('foo("string') - ('foo("string', None, None, 'at 5: unterminated string') - >>> _parsealiasdecl('foo($1, $2, $1)') - ('foo', None, None, 'argument names collide with each other') +def _parsealias(spec): + """Parse alias declaration/definition ``spec`` + + >>> _parsealias('foo($1)') + ('func', ('symbol', 'foo'), ('symbol', '$1')) + >>> _parsealias('foo bar') + Traceback (most recent call last): + ... + ParseError: ('invalid token', 4) """ p = parser.parser(elements) - try: - tree, pos = p.parse(_tokenizealias(decl)) - if (pos != len(decl)): - raise error.ParseError(_('invalid token'), pos) - tree = parser.simplifyinfixops(tree, ('list',)) - - if isvalidsymbol(tree): - # "name = ...." style - name = getsymbol(tree) - if name.startswith('$'): - return (decl, None, None, _("'$' not for alias arguments")) - return (name, ('symbol', name), None, None) - - if isvalidfunc(tree): - # "name(arg, ....) = ...." style - name = getfuncname(tree) - if name.startswith('$'): - return (decl, None, None, _("'$' not for alias arguments")) - args = [] - for arg in getfuncargs(tree): - if not isvalidsymbol(arg): - return (decl, None, None, _("invalid argument list")) - args.append(getsymbol(arg)) - if len(args) != len(set(args)): - return (name, None, None, - _("argument names collide with each other")) - return (name, ('func', ('symbol', name)), args, None) - - return (decl, None, None, _("invalid format")) - except error.ParseError as inst: - return (decl, None, None, parseerrordetail(inst)) - -def _parsealiasdefn(defn, args): - """Parse alias definition ``defn`` - - This function also replaces alias argument references in the - specified definition by ``_aliasarg(ARGNAME)``. - - ``args`` is a list of alias argument names, or None if the alias - is declared as a symbol. - - This returns "tree" as parsing result. - - >>> args = ['$1', '$2', 'foo'] - >>> print prettyformat(_parsealiasdefn('$1 or foo', args)) - (or - (func - ('symbol', '_aliasarg') - ('string', '$1')) - (func - ('symbol', '_aliasarg') - ('string', 'foo'))) - >>> try: - ... _parsealiasdefn('$1 or $bar', args) - ... except error.ParseError, inst: - ... print parseerrordetail(inst) - at 6: '$' not for alias arguments - >>> args = ['$1', '$10', 'foo'] - >>> print prettyformat(_parsealiasdefn('$10 or foobar', args)) - (or - (func - ('symbol', '_aliasarg') - ('string', '$10')) - ('symbol', 'foobar')) - >>> print prettyformat(_parsealiasdefn('"$1" or "foo"', args)) - (or - ('string', '$1') - ('string', 'foo')) - """ - def tokenizedefn(program, lookup=None): - if args: - argset = set(args) - else: - argset = set() - - for t, value, pos in _tokenizealias(program, lookup=lookup): - if t == 'symbol': - if value in argset: - # emulate tokenization of "_aliasarg('ARGNAME')": - # "_aliasarg()" is an unknown symbol only used separate - # alias argument placeholders from regular strings. - yield ('symbol', '_aliasarg', pos) - yield ('(', None, pos) - yield ('string', value, pos) - yield (')', None, pos) - continue - elif value.startswith('$'): - raise error.ParseError(_("'$' not for alias arguments"), - pos) - yield (t, value, pos) - - p = parser.parser(elements) - tree, pos = p.parse(tokenizedefn(defn)) - if pos != len(defn): + tree, pos = p.parse(_tokenizealias(spec)) + if pos != len(spec): raise error.ParseError(_('invalid token'), pos) return parser.simplifyinfixops(tree, ('list', 'or')) -class revsetalias(object): - # whether own `error` information is already shown or not. - # this avoids showing same warning multiple times at each `findaliases`. - warned = False - - def __init__(self, name, value): - '''Aliases like: - - h = heads(default) - b($1) = ancestors($1) - ancestors(default) - ''' - self.name, self.tree, self.args, self.error = _parsealiasdecl(name) - if self.error: - self.error = _('failed to parse the declaration of revset alias' - ' "%s": %s') % (self.name, self.error) - return - - try: - self.replacement = _parsealiasdefn(value, self.args) - # Check for placeholder injection - _checkaliasarg(self.replacement, self.args) - except error.ParseError as inst: - self.error = _('failed to parse the definition of revset alias' - ' "%s": %s') % (self.name, parseerrordetail(inst)) - -def _getalias(aliases, tree): - """If tree looks like an unexpanded alias, return it. Return None - otherwise. - """ - if isinstance(tree, tuple) and tree: - if tree[0] == 'symbol' and len(tree) == 2: - name = tree[1] - alias = aliases.get(name) - if alias and alias.args is None and alias.tree == tree: - return alias - if tree[0] == 'func' and len(tree) > 1: - if tree[1][0] == 'symbol' and len(tree[1]) == 2: - name = tree[1][1] - alias = aliases.get(name) - if alias and alias.args is not None and alias.tree == tree[:2]: - return alias - return None - -def _expandargs(tree, args): - """Replace _aliasarg instances with the substitution value of the - same name in args, recursively. - """ - if not tree or not isinstance(tree, tuple): - return tree - arg = _getaliasarg(tree) - if arg is not None: - return args[arg] - return tuple(_expandargs(t, args) for t in tree) - -def _expandaliases(aliases, tree, expanding, cache): - """Expand aliases in tree, recursively. - - 'aliases' is a dictionary mapping user defined aliases to - revsetalias objects. - """ - if not isinstance(tree, tuple): - # Do not expand raw strings - return tree - alias = _getalias(aliases, tree) - if alias is not None: - if alias.error: - raise error.Abort(alias.error) - if alias in expanding: - raise error.ParseError(_('infinite expansion of revset alias "%s" ' - 'detected') % alias.name) - expanding.append(alias) - if alias.name not in cache: - cache[alias.name] = _expandaliases(aliases, alias.replacement, - expanding, cache) - result = cache[alias.name] - expanding.pop() - if alias.args is not None: - l = getlist(tree[2]) - if len(l) != len(alias.args): - raise error.ParseError( - _('invalid number of arguments: %d') % len(l)) - l = [_expandaliases(aliases, a, [], cache) for a in l] - result = _expandargs(result, dict(zip(alias.args, l))) - else: - result = tuple(_expandaliases(aliases, t, expanding, cache) - for t in tree) - return result - -def findaliases(ui, tree, showwarning=None): - _checkaliasarg(tree) - aliases = {} - for k, v in ui.configitems('revsetalias'): - alias = revsetalias(k, v) - aliases[alias.name] = alias - tree = _expandaliases(aliases, tree, [], {}) +class _aliasrules(parser.basealiasrules): + """Parsing and expansion rule set of revset aliases""" + _section = _('revset alias') + _parse = staticmethod(_parsealias) + + @staticmethod + def _trygetfunc(tree): + if tree[0] == 'func' and tree[1][0] == 'symbol': + return tree[1][1], getlist(tree[2]) + +def expandaliases(ui, tree, showwarning=None): + aliases = _aliasrules.buildmap(ui.configitems('revsetalias')) + tree = _aliasrules.expand(aliases, tree) if showwarning: # warn about problematic (but not referred) aliases for name, alias in sorted(aliases.iteritems()): @@ -2625,7 +2332,7 @@ def _makematcher(ui, tree, repo): if ui: - tree = findaliases(ui, tree, showwarning=ui.warn) + tree = expandaliases(ui, tree, showwarning=ui.warn) tree = foldconcat(tree) weight, tree = optimize(tree, True) posttreebuilthook(tree, repo) @@ -2753,6 +2460,29 @@ funcs.add(tree[1][1]) return funcs +def _formatsetrepr(r): + """Format an optional printable representation of a set + + ======== ================================= + type(r) example + ======== ================================= + tuple ('<not %r>', other) + str '<branch closed>' + callable lambda: '<branch %r>' % sorted(b) + object other + ======== ================================= + """ + if r is None: + return '' + elif isinstance(r, tuple): + return r[0] % r[1:] + elif isinstance(r, str): + return r + elif callable(r): + return r() + else: + return repr(r) + class abstractsmartset(object): def __nonzero__(self): @@ -2833,7 +2563,7 @@ This is part of the mandatory API for smartset.""" if isinstance(other, fullreposet): return self - return self.filter(other.__contains__, cache=False) + return self.filter(other.__contains__, condrepr=other, cache=False) def __add__(self, other): """Returns a new object with the union of the two collections. @@ -2846,19 +2576,21 @@ This is part of the mandatory API for smartset.""" c = other.__contains__ - return self.filter(lambda r: not c(r), cache=False) - - def filter(self, condition, cache=True): + return self.filter(lambda r: not c(r), condrepr=('<not %r>', other), + cache=False) + + def filter(self, condition, condrepr=None, cache=True): """Returns this smartset filtered by condition as a new smartset. `condition` is a callable which takes a revision number and returns a - boolean. + boolean. Optional `condrepr` provides a printable representation of + the given `condition`. This is part of the mandatory API for smartset.""" # builtin cannot be cached. but do not needs to if cache and util.safehasattr(condition, 'func_code'): condition = util.cachefunc(condition) - return filteredset(self, condition) + return filteredset(self, condition, condrepr) class baseset(abstractsmartset): """Basic data structure that represents a revset and contains the basic @@ -2866,13 +2598,20 @@ Every method in this class should be implemented by any smartset class. """ - def __init__(self, data=()): + def __init__(self, data=(), datarepr=None): + """ + datarepr: a tuple of (format, obj, ...), a function or an object that + provides a printable representation of the given data. + """ + self._ascending = None if not isinstance(data, list): if isinstance(data, set): self._set = data + # set has no order we pick one for stability purpose + self._ascending = True data = list(data) self._list = data - self._ascending = None + self._datarepr = datarepr @util.propertycache def _set(self): @@ -2955,20 +2694,32 @@ def __repr__(self): d = {None: '', False: '-', True: '+'}[self._ascending] - return '<%s%s %r>' % (type(self).__name__, d, self._list) + s = _formatsetrepr(self._datarepr) + if not s: + l = self._list + # if _list has been built from a set, it might have a different + # order from one python implementation to another. + # We fallback to the sorted version for a stable output. + if self._ascending is not None: + l = self._asclist + s = repr(l) + return '<%s%s %s>' % (type(self).__name__, d, s) class filteredset(abstractsmartset): """Duck type for baseset class which iterates lazily over the revisions in the subset and contains a function which tests for membership in the revset """ - def __init__(self, subset, condition=lambda x: True): + def __init__(self, subset, condition=lambda x: True, condrepr=None): """ condition: a function that decide whether a revision in the subset belongs to the revset or not. + condrepr: a tuple of (format, obj, ...), a function or an object that + provides a printable representation of the given condition. """ self._subset = subset self._condition = condition + self._condrepr = condrepr def __contains__(self, x): return x in self._subset and self._condition(x) @@ -3011,7 +2762,10 @@ def __len__(self): # Basic implementation to be changed in future patches. - l = baseset([r for r in self]) + # until this gets improved, we use generator expression + # here, since list compr is free to call __len__ again + # causing infinite recursion + l = baseset(r for r in self) return len(l) def sort(self, reverse=False): @@ -3048,7 +2802,11 @@ return x def __repr__(self): - return '<%s %r>' % (type(self).__name__, self._subset) + xs = [repr(self._subset)] + s = _formatsetrepr(self._condrepr) + if s: + xs.append(s) + return '<%s %s>' % (type(self).__name__, ', '.join(xs)) def _iterordered(ascending, iter1, iter2): """produce an ordered iteration from two iterators with the same order @@ -3111,7 +2869,8 @@ iterate unsorted: >>> rs = addset(xs, ys) - >>> [x for x in rs] # without _genlist + >>> # (use generator because pypy could call len()) + >>> list(x for x in rs) # without _genlist [0, 3, 2, 5, 4] >>> assert not rs._genlist >>> len(rs) @@ -3122,7 +2881,8 @@ iterate ascending: >>> rs = addset(xs, ys, ascending=True) - >>> [x for x in rs], [x for x in rs.fastasc()] # without _asclist + >>> # (use generator because pypy could call len()) + >>> list(x for x in rs), list(x for x in rs.fastasc()) # without _asclist ([0, 2, 3, 4, 5], [0, 2, 3, 4, 5]) >>> assert not rs._asclist >>> len(rs) @@ -3133,7 +2893,8 @@ iterate descending: >>> rs = addset(xs, ys, ascending=False) - >>> [x for x in rs], [x for x in rs.fastdesc()] # without _asclist + >>> # (use generator because pypy could call len()) + >>> list(x for x in rs), list(x for x in rs.fastdesc()) # without _asclist ([5, 4, 3, 2, 0], [5, 4, 3, 2, 0]) >>> assert not rs._asclist >>> len(rs) @@ -3622,5 +3383,16 @@ p = q return '\n'.join(' ' * l + s for l, s in lines) +def loadpredicate(ui, extname, registrarobj): + """Load revset predicates from specified registrarobj + """ + for name, func in registrarobj._table.iteritems(): + symbols[name] = func + if func._safe: + safesymbols.add(name) + +# load built-in predicates explicitly to setup safesymbols +loadpredicate(None, None, predicate) + # tell hggettext to extract docstrings from these functions: i18nfunctions = symbols.values()
--- a/mercurial/scmutil.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/scmutil.py Sat Apr 16 18:06:48 2016 -0500 @@ -7,7 +7,6 @@ from __future__ import absolute_import -import Queue import contextlib import errno import glob @@ -276,8 +275,8 @@ with self(path, mode=mode) as fp: return fp.readlines() - def write(self, path, data): - with self(path, 'wb') as fp: + def write(self, path, data, backgroundclose=False): + with self(path, 'wb', backgroundclose=backgroundclose) as fp: return fp.write(data) def writelines(self, path, data, mode='wb', notindexed=False): @@ -913,7 +912,7 @@ if opts.get('subrepos') or matchessubrepo(m, subpath): sub = wctx.sub(subpath) try: - submatch = matchmod.narrowmatcher(subpath, m) + submatch = matchmod.subdirmatcher(subpath, m) if sub.addremove(submatch, prefix, opts, dry_run, similarity): ret = 1 except error.LookupError: @@ -1320,7 +1319,7 @@ ui.debug('starting %d threads for background file closing\n' % threadcount) - self._queue = Queue.Queue(maxsize=maxqueue) + self._queue = util.queue(maxsize=maxqueue) self._running = True for i in range(threadcount): @@ -1352,7 +1351,7 @@ except Exception as e: # Stash so can re-raise from main thread later. self._threadexception = e - except Queue.Empty: + except util.empty: if not self._running: break
--- a/mercurial/setdiscovery.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/setdiscovery.py Sat Apr 16 18:06:48 2016 -0500 @@ -147,12 +147,11 @@ sample = _limitsample(ownheads, initialsamplesize) # indices between sample and externalized version must match sample = list(sample) - batch = remote.batch() - srvheadhashesref = batch.heads() - yesnoref = batch.known(dag.externalizeall(sample)) + batch = remote.iterbatch() + batch.heads() + batch.known(dag.externalizeall(sample)) batch.submit() - srvheadhashes = srvheadhashesref.value - yesno = yesnoref.value + srvheadhashes, yesno = batch.results() if cl.tip() == nullid: if srvheadhashes != [nullid]:
--- a/mercurial/similar.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/similar.py Sat Apr 16 18:06:48 2016 -0500 @@ -25,14 +25,15 @@ # Get hashes of removed files. hashes = {} for i, fctx in enumerate(removed): - repo.ui.progress(_('searching for exact renames'), i, total=numfiles) + repo.ui.progress(_('searching for exact renames'), i, total=numfiles, + unit=_('files')) h = util.sha1(fctx.data()).digest() hashes[h] = fctx # For each added file, see if it corresponds to a removed file. for i, fctx in enumerate(added): repo.ui.progress(_('searching for exact renames'), i + len(removed), - total=numfiles) + total=numfiles, unit=_('files')) h = util.sha1(fctx.data()).digest() if h in hashes: yield (hashes[h], fctx) @@ -49,7 +50,7 @@ copies = {} for i, r in enumerate(removed): repo.ui.progress(_('searching for similar files'), i, - total=len(removed)) + total=len(removed), unit=_('files')) # lazily load text @util.cachefunc
--- a/mercurial/simplemerge.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/simplemerge.py Sat Apr 16 18:06:48 2016 -0500 @@ -92,7 +92,8 @@ mid_marker='=======', end_marker='>>>>>>>', base_marker=None, - localorother=None): + localorother=None, + minimize=False): """Return merge in cvs-like form. """ self.conflicts = False @@ -109,6 +110,8 @@ if name_base and base_marker: base_marker = base_marker + ' ' + name_base merge_regions = self.merge_regions() + if minimize: + merge_regions = self.minimize(merge_regions) for t in merge_regions: what = t[0] if what == 'unchanged': @@ -195,6 +198,9 @@ 'a', start, end Non-clashing insertion from a[start:end] + 'conflict', zstart, zend, astart, aend, bstart, bend + Conflict between a and b, with z as common ancestor + Method is as follows: The two sequences align only on regions which match the base @@ -266,6 +272,45 @@ ia = aend ib = bend + def minimize(self, merge_regions): + """Trim conflict regions of lines where A and B sides match. + + Lines where both A and B have made the same changes at the begining + or the end of each merge region are eliminated from the conflict + region and are instead considered the same. + """ + for region in merge_regions: + if region[0] != "conflict": + yield region + continue + issue, z1, z2, a1, a2, b1, b2 = region + alen = a2 - a1 + blen = b2 - b1 + + # find matches at the front + ii = 0 + while ii < alen and ii < blen and \ + self.a[a1 + ii] == self.b[b1 + ii]: + ii += 1 + startmatches = ii + + # find matches at the end + ii = 0 + while ii < alen and ii < blen and \ + self.a[a2 - ii - 1] == self.b[b2 - ii - 1]: + ii += 1 + endmatches = ii + + if startmatches > 0: + yield 'same', a1, a1 + startmatches + + yield ('conflict', z1, z2, + a1 + startmatches, a2 - endmatches, + b1 + startmatches, b2 - endmatches) + + if endmatches > 0: + yield 'same', a2 - endmatches, a2 + def find_sync_regions(self): """Return a list of sync regions, where both descendants match the base. @@ -399,7 +444,10 @@ out = sys.stdout m3 = Merge3Text(basetext, localtext, othertext) - extrakwargs = {"localorother": opts.get("localorother", None)} + extrakwargs = { + "localorother": opts.get("localorother", None), + 'minimize': True, + } if mode == 'union': extrakwargs['start_marker'] = None extrakwargs['mid_marker'] = None @@ -407,6 +455,7 @@ elif name_base is not None: extrakwargs['base_marker'] = '|||||||' extrakwargs['name_base'] = name_base + extrakwargs['minimize'] = False for line in m3.merge_lines(name_a=name_a, name_b=name_b, **extrakwargs): out.write(line)
--- a/mercurial/sshpeer.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/sshpeer.py Sat Apr 16 18:06:48 2016 -0500 @@ -231,6 +231,31 @@ __del__ = cleanup + def _submitbatch(self, req): + cmds = [] + for op, argsdict in req: + args = ','.join('%s=%s' % (wireproto.escapearg(k), + wireproto.escapearg(v)) + for k, v in argsdict.iteritems()) + cmds.append('%s %s' % (op, args)) + rsp = self._callstream("batch", cmds=';'.join(cmds)) + available = self._getamount() + # TODO this response parsing is probably suboptimal for large + # batches with large responses. + toread = min(available, 1024) + work = rsp.read(toread) + available -= toread + chunk = work + while chunk: + while ';' in work: + one, work = work.split(';', 1) + yield wireproto.unescapearg(one) + toread = min(available, 1024) + chunk = rsp.read(toread) + available -= toread + work += chunk + yield wireproto.unescapearg(work) + def _callstream(self, cmd, **args): self.ui.debug("sending %s command\n" % cmd) self.pipeo.write("%s\n" % cmd) @@ -291,7 +316,7 @@ self._send("", flush=True) return self.pipei - def _recv(self): + def _getamount(self): l = self.pipei.readline() if l == '\n': self.readerr() @@ -299,10 +324,12 @@ self._abort(error.OutOfBandError(hint=msg)) self.readerr() try: - l = int(l) + return int(l) except ValueError: self._abort(error.ResponseError(_("unexpected response:"), l)) - return self.pipei.read(l) + + def _recv(self): + return self.pipei.read(self._getamount()) def _send(self, data, flush=False): self.pipeo.write("%d\n" % len(data))
--- a/mercurial/sslutil.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/sslutil.py Sat Apr 16 18:06:48 2016 -0500 @@ -14,58 +14,161 @@ import sys from .i18n import _ -from . import error, util +from . import ( + error, + util, +) + +# Python 2.7.9+ overhauled the built-in SSL/TLS features of Python. It added +# support for TLS 1.1, TLS 1.2, SNI, system CA stores, etc. These features are +# all exposed via the "ssl" module. +# +# Depending on the version of Python being used, SSL/TLS support is either +# modern/secure or legacy/insecure. Many operations in this module have +# separate code paths depending on support in Python. hassni = getattr(ssl, 'HAS_SNI', False) -_canloaddefaultcerts = False +try: + OP_NO_SSLv2 = ssl.OP_NO_SSLv2 + OP_NO_SSLv3 = ssl.OP_NO_SSLv3 +except AttributeError: + OP_NO_SSLv2 = 0x1000000 + OP_NO_SSLv3 = 0x2000000 + try: - ssl_context = ssl.SSLContext - _canloaddefaultcerts = util.safehasattr(ssl_context, 'load_default_certs') + # ssl.SSLContext was added in 2.7.9 and presence indicates modern + # SSL/TLS features are available. + SSLContext = ssl.SSLContext + modernssl = True + _canloaddefaultcerts = util.safehasattr(SSLContext, 'load_default_certs') +except AttributeError: + modernssl = False + _canloaddefaultcerts = False + + # We implement SSLContext using the interface from the standard library. + class SSLContext(object): + # ssl.wrap_socket gained the "ciphers" named argument in 2.7. + _supportsciphers = sys.version_info >= (2, 7) + + def __init__(self, protocol): + # From the public interface of SSLContext + self.protocol = protocol + self.check_hostname = False + self.options = 0 + self.verify_mode = ssl.CERT_NONE + + # Used by our implementation. + self._certfile = None + self._keyfile = None + self._certpassword = None + self._cacerts = None + self._ciphers = None + + def load_cert_chain(self, certfile, keyfile=None, password=None): + self._certfile = certfile + self._keyfile = keyfile + self._certpassword = password + + def load_default_certs(self, purpose=None): + pass + + def load_verify_locations(self, cafile=None, capath=None, cadata=None): + if capath: + raise error.Abort('capath not supported') + if cadata: + raise error.Abort('cadata not supported') + + self._cacerts = cafile + + def set_ciphers(self, ciphers): + if not self._supportsciphers: + raise error.Abort('setting ciphers not supported') + + self._ciphers = ciphers + + def wrap_socket(self, socket, server_hostname=None, server_side=False): + # server_hostname is unique to SSLContext.wrap_socket and is used + # for SNI in that context. So there's nothing for us to do with it + # in this legacy code since we don't support SNI. - def wrapsocket(sock, keyfile, certfile, ui, cert_reqs=ssl.CERT_NONE, - ca_certs=None, serverhostname=None): - # Allow any version of SSL starting with TLSv1 and - # up. Note that specifying TLSv1 here prohibits use of - # newer standards (like TLSv1_2), so this is the right way - # to do this. Note that in the future it'd be better to - # support using ssl.create_default_context(), which sets - # up a bunch of things in smart ways (strong ciphers, - # protocol versions, etc) and is upgraded by Python - # maintainers for us, but that breaks too many things to - # do it in a hurry. - sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23) - sslcontext.options |= ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3 - if certfile is not None: - def password(): - f = keyfile or certfile - return ui.getpass(_('passphrase for %s: ') % f, '') - sslcontext.load_cert_chain(certfile, keyfile, password) - sslcontext.verify_mode = cert_reqs - if ca_certs is not None: - sslcontext.load_verify_locations(cafile=ca_certs) - elif _canloaddefaultcerts: - sslcontext.load_default_certs() + args = { + 'keyfile': self._keyfile, + 'certfile': self._certfile, + 'server_side': server_side, + 'cert_reqs': self.verify_mode, + 'ssl_version': self.protocol, + 'ca_certs': self._cacerts, + } + + if self._supportsciphers: + args['ciphers'] = self._ciphers + + return ssl.wrap_socket(socket, **args) + +def wrapsocket(sock, keyfile, certfile, ui, cert_reqs=ssl.CERT_NONE, + ca_certs=None, serverhostname=None): + """Add SSL/TLS to a socket. + + This is a glorified wrapper for ``ssl.wrap_socket()``. It makes sane + choices based on what security options are available. + + In addition to the arguments supported by ``ssl.wrap_socket``, we allow + the following additional arguments: + + * serverhostname - The expected hostname of the remote server. If the + server (and client) support SNI, this tells the server which certificate + to use. + """ + if not serverhostname: + raise error.Abort('serverhostname argument required') - sslsocket = sslcontext.wrap_socket(sock, server_hostname=serverhostname) - # check if wrap_socket failed silently because socket had been - # closed - # - see http://bugs.python.org/issue13721 - if not sslsocket.cipher(): - raise error.Abort(_('ssl connection failed')) - return sslsocket -except AttributeError: - def wrapsocket(sock, keyfile, certfile, ui, cert_reqs=ssl.CERT_NONE, - ca_certs=None, serverhostname=None): - sslsocket = ssl.wrap_socket(sock, keyfile, certfile, - cert_reqs=cert_reqs, ca_certs=ca_certs, - ssl_version=ssl.PROTOCOL_TLSv1) - # check if wrap_socket failed silently because socket had been - # closed - # - see http://bugs.python.org/issue13721 - if not sslsocket.cipher(): - raise error.Abort(_('ssl connection failed')) - return sslsocket + # Despite its name, PROTOCOL_SSLv23 selects the highest protocol + # that both ends support, including TLS protocols. On legacy stacks, + # the highest it likely goes in TLS 1.0. On modern stacks, it can + # support TLS 1.2. + # + # The PROTOCOL_TLSv* constants select a specific TLS version + # only (as opposed to multiple versions). So the method for + # supporting multiple TLS versions is to use PROTOCOL_SSLv23 and + # disable protocols via SSLContext.options and OP_NO_* constants. + # However, SSLContext.options doesn't work unless we have the + # full/real SSLContext available to us. + # + # SSLv2 and SSLv3 are broken. We ban them outright. + if modernssl: + protocol = ssl.PROTOCOL_SSLv23 + else: + protocol = ssl.PROTOCOL_TLSv1 + + # TODO use ssl.create_default_context() on modernssl. + sslcontext = SSLContext(protocol) + + # This is a no-op on old Python. + sslcontext.options |= OP_NO_SSLv2 | OP_NO_SSLv3 + + # This still works on our fake SSLContext. + sslcontext.verify_mode = cert_reqs + + if certfile is not None: + def password(): + f = keyfile or certfile + return ui.getpass(_('passphrase for %s: ') % f, '') + sslcontext.load_cert_chain(certfile, keyfile, password) + + if ca_certs is not None: + sslcontext.load_verify_locations(cafile=ca_certs) + else: + # This is a no-op on old Python. + sslcontext.load_default_certs() + + sslsocket = sslcontext.wrap_socket(sock, server_hostname=serverhostname) + # check if wrap_socket failed silently because socket had been + # closed + # - see http://bugs.python.org/issue13721 + if not sslsocket.cipher(): + raise error.Abort(_('ssl connection failed')) + return sslsocket def _verifycert(cert, hostname): '''Verify that cert (in socket.getpeercert() format) matches hostname. @@ -161,8 +264,6 @@ def __call__(self, sock, strict=False): host = self.host - cacerts = self.ui.config('web', 'cacerts') - hostfingerprint = self.ui.config('hostfingerprints', host) if not sock.cipher(): # work around http://bugs.python.org/issue13721 raise error.Abort(_('%s ssl connection error') % host) @@ -175,18 +276,31 @@ if not peercert: raise error.Abort(_('%s certificate error: ' 'no certificate received') % host) + + # If a certificate fingerprint is pinned, use it and only it to + # validate the remote cert. + hostfingerprints = self.ui.configlist('hostfingerprints', host) peerfingerprint = util.sha1(peercert).hexdigest() nicefingerprint = ":".join([peerfingerprint[x:x + 2] for x in xrange(0, len(peerfingerprint), 2)]) - if hostfingerprint: - if peerfingerprint.lower() != \ - hostfingerprint.replace(':', '').lower(): + if hostfingerprints: + fingerprintmatch = False + for hostfingerprint in hostfingerprints: + if peerfingerprint.lower() == \ + hostfingerprint.replace(':', '').lower(): + fingerprintmatch = True + break + if not fingerprintmatch: raise error.Abort(_('certificate for %s has unexpected ' 'fingerprint %s') % (host, nicefingerprint), hint=_('check hostfingerprint configuration')) self.ui.debug('%s certificate matched fingerprint %s\n' % (host, nicefingerprint)) - elif cacerts != '!': + return + + # No pinned fingerprint. Establish trust by looking at the CAs. + cacerts = self.ui.config('web', 'cacerts') + if cacerts != '!': msg = _verifycert(peercert2, host) if msg: raise error.Abort(_('%s certificate error: %s') % (host, msg),
--- a/mercurial/statichttprepo.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/statichttprepo.py Sat Apr 16 18:06:48 2016 -0500 @@ -11,8 +11,6 @@ import errno import os -import urllib -import urllib2 from .i18n import _ from . import ( @@ -28,6 +26,9 @@ util, ) +urlerr = util.urlerr +urlreq = util.urlreq + class httprangereader(object): def __init__(self, url, opener): # we assume opener has HTTPRangeHandler @@ -45,7 +46,7 @@ def seek(self, pos): self.pos = pos def read(self, bytes=None): - req = urllib2.Request(self.url) + req = urlreq.request(self.url) end = '' if bytes: end = self.pos + bytes - 1 @@ -56,10 +57,10 @@ f = self.opener.open(req) data = f.read() code = f.code - except urllib2.HTTPError as inst: + except urlerr.httperror as inst: num = inst.code == 404 and errno.ENOENT or None raise IOError(num, inst) - except urllib2.URLError as inst: + except urlerr.urlerror as inst: raise IOError(None, inst.reason[1]) if code == 200: @@ -92,7 +93,7 @@ def __call__(self, path, mode='r', *args, **kw): if mode not in ('r', 'rb'): raise IOError('Permission denied') - f = "/".join((self.base, urllib.quote(path))) + f = "/".join((self.base, urlreq.quote(path))) return httprangereader(f, urlopener) def join(self, path):
--- a/mercurial/store.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/store.py Sat Apr 16 18:06:48 2016 -0500 @@ -290,7 +290,7 @@ mode = None return mode -_data = ('data 00manifest.d 00manifest.i 00changelog.d 00changelog.i' +_data = ('data meta 00manifest.d 00manifest.i 00changelog.d 00changelog.i' ' phaseroots obsstore') class basicstore(object): @@ -330,7 +330,7 @@ return l def datafiles(self): - return self._walk('data', True) + return self._walk('data', True) + self._walk('meta', True) def topfiles(self): # yield manifest before changelog @@ -378,7 +378,7 @@ self.opener = self.vfs def datafiles(self): - for a, b, size in self._walk('data', True): + for a, b, size in super(encodedstore, self).datafiles(): try: a = decodefilename(a) except KeyError: @@ -460,7 +460,8 @@ self.encode = encode def __call__(self, path, mode='r', *args, **kw): - if mode not in ('r', 'rb') and path.startswith('data/'): + if mode not in ('r', 'rb') and (path.startswith('data/') or + path.startswith('meta/')): self.fncache.add(path) return self.vfs(self.encode(path), mode, *args, **kw) @@ -504,7 +505,7 @@ raise def copylist(self): - d = ('data dh fncache phaseroots obsstore' + d = ('data meta dh fncache phaseroots obsstore' ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i') return (['requires', '00changelog.i'] + ['store/' + f for f in d.split()])
--- a/mercurial/streamclone.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/streamclone.py Sat Apr 16 18:06:48 2016 -0500 @@ -271,11 +271,12 @@ assert compression == 'UN' seen = 0 - repo.ui.progress(_('bundle'), 0, total=bytecount) + repo.ui.progress(_('bundle'), 0, total=bytecount, unit=_('bytes')) for chunk in it: seen += len(chunk) - repo.ui.progress(_('bundle'), seen, total=bytecount) + repo.ui.progress(_('bundle'), seen, total=bytecount, + unit=_('bytes')) yield chunk repo.ui.progress(_('bundle'), None) @@ -295,7 +296,7 @@ repo.ui.status(_('%d files to transfer, %s of data\n') % (filecount, util.bytecount(bytecount))) handled_bytes = 0 - repo.ui.progress(_('clone'), 0, total=bytecount) + repo.ui.progress(_('clone'), 0, total=bytecount, unit=_('bytes')) start = time.time() with repo.transaction('clone'): @@ -318,7 +319,7 @@ for chunk in util.filechunkiter(fp, limit=size): handled_bytes += len(chunk) repo.ui.progress(_('clone'), handled_bytes, - total=bytecount) + total=bytecount, unit=_('bytes')) ofp.write(chunk) # Writing straight to files circumvented the inmemory caches
--- a/mercurial/subrepo.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/subrepo.py Sat Apr 16 18:06:48 2016 -0500 @@ -575,11 +575,13 @@ def forget(self, match, prefix): return ([], []) - def removefiles(self, matcher, prefix, after, force, subrepos): + def removefiles(self, matcher, prefix, after, force, subrepos, warnings): """remove the matched files from the subrepository and the filesystem, possibly by force and/or after the file has been removed from the filesystem. Return 0 on success, 1 on any warning. """ + warnings.append(_("warning: removefiles not implemented (%s)") + % self._path) return 1 def revert(self, substate, *pats, **opts): @@ -774,7 +776,7 @@ ctx = self._repo[rev] for subpath in ctx.substate: s = subrepo(ctx, subpath, True) - submatch = matchmod.narrowmatcher(subpath, match) + submatch = matchmod.subdirmatcher(subpath, match) total += s.archive(archiver, prefix + self._path + '/', submatch) return total @@ -991,7 +993,7 @@ self.wvfs.reljoin(prefix, self._path), True) @annotatesubrepoerror - def removefiles(self, matcher, prefix, after, force, subrepos): + def removefiles(self, matcher, prefix, after, force, subrepos, warnings): return cmdutil.remove(self.ui, self._repo, matcher, self.wvfs.reljoin(prefix, self._path), after, force, subrepos) @@ -1385,6 +1387,8 @@ self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands))) if env is None: env = os.environ.copy() + # disable localization for Git output (issue5176) + env['LC_ALL'] = 'C' # fix for Git CVE-2015-7545 if 'GIT_ALLOW_PROTOCOL' not in env: env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
--- a/mercurial/templatefilters.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/templatefilters.py Sat Apr 16 18:06:48 2016 -0500 @@ -11,18 +11,30 @@ import os import re import time -import urllib from . import ( encoding, hbisect, node, + registrar, templatekw, util, ) +urlerr = util.urlerr +urlreq = util.urlreq + +# filters are callables like: +# fn(obj) +# with: +# obj - object to be filtered (text, date, list and so on) +filters = {} + +templatefilter = registrar.templatefilter(filters) + +@templatefilter('addbreaks') def addbreaks(text): - """:addbreaks: Any text. Add an XHTML "<br />" tag before the end of + """Any text. Add an XHTML "<br />" tag before the end of every line except the last. """ return text.replace('\n', '<br/>\n') @@ -35,8 +47,9 @@ ("minute", 60, 'm'), ("second", 1, 's')] +@templatefilter('age') def age(date, abbrev=False): - """:age: Date. Returns a human-readable date/time difference between the + """Date. Returns a human-readable date/time difference between the given date/time and the current date/time. """ @@ -69,20 +82,23 @@ return '%s from now' % fmt(t, n, a) return '%s ago' % fmt(t, n, a) +@templatefilter('basename') def basename(path): - """:basename: Any text. Treats the text as a path, and returns the last + """Any text. Treats the text as a path, and returns the last component of the path after splitting by the path separator (ignoring trailing separators). For example, "foo/bar/baz" becomes "baz" and "foo/bar//" becomes "bar". """ return os.path.basename(path) +@templatefilter('count') def count(i): - """:count: List or text. Returns the length as an integer.""" + """List or text. Returns the length as an integer.""" return len(i) +@templatefilter('domain') def domain(author): - """:domain: Any text. Finds the first string that looks like an email + """Any text. Finds the first string that looks like an email address, and extracts just the domain component. Example: ``User <user@example.com>`` becomes ``example.com``. """ @@ -95,15 +111,17 @@ author = author[:f] return author +@templatefilter('email') def email(text): - """:email: Any text. Extracts the first string that looks like an email + """Any text. Extracts the first string that looks like an email address. Example: ``User <user@example.com>`` becomes ``user@example.com``. """ return util.email(text) +@templatefilter('escape') def escape(text): - """:escape: Any text. Replaces the special XML/XHTML characters "&", "<" + """Any text. Replaces the special XML/XHTML characters "&", "<" and ">" with XML entities, and filters out NUL characters. """ return cgi.escape(text.replace('\0', ''), True) @@ -137,41 +155,48 @@ width, initindent, hangindent) + rest for para, rest in findparas()]) +@templatefilter('fill68') def fill68(text): - """:fill68: Any text. Wraps the text to fit in 68 columns.""" + """Any text. Wraps the text to fit in 68 columns.""" return fill(text, 68) +@templatefilter('fill76') def fill76(text): - """:fill76: Any text. Wraps the text to fit in 76 columns.""" + """Any text. Wraps the text to fit in 76 columns.""" return fill(text, 76) +@templatefilter('firstline') def firstline(text): - """:firstline: Any text. Returns the first line of text.""" + """Any text. Returns the first line of text.""" try: return text.splitlines(True)[0].rstrip('\r\n') except IndexError: return '' +@templatefilter('hex') def hexfilter(text): - """:hex: Any text. Convert a binary Mercurial node identifier into + """Any text. Convert a binary Mercurial node identifier into its long hexadecimal representation. """ return node.hex(text) +@templatefilter('hgdate') def hgdate(text): - """:hgdate: Date. Returns the date as a pair of numbers: "1157407993 + """Date. Returns the date as a pair of numbers: "1157407993 25200" (Unix timestamp, timezone offset). """ return "%d %d" % text +@templatefilter('isodate') def isodate(text): - """:isodate: Date. Returns the date in ISO 8601 format: "2009-08-18 13:00 + """Date. Returns the date in ISO 8601 format: "2009-08-18 13:00 +0200". """ return util.datestr(text, '%Y-%m-%d %H:%M %1%2') +@templatefilter('isodatesec') def isodatesec(text): - """:isodatesec: Date. Returns the date in ISO 8601 format, including + """Date. Returns the date in ISO 8601 format, including seconds: "2009-08-18 13:00:13 +0200". See also the rfc3339date filter. """ @@ -192,20 +217,14 @@ yield '\n' return "".join(indenter()) +@templatefilter('json') def json(obj): if obj is None or obj is False or obj is True: return {None: 'null', False: 'false', True: 'true'}[obj] elif isinstance(obj, int) or isinstance(obj, float): return str(obj) - elif isinstance(obj, encoding.localstr): - u = encoding.fromlocal(obj).decode('utf-8') # can round-trip - return '"%s"' % jsonescape(u) elif isinstance(obj, str): - # no encoding.fromlocal() because it may abort if obj can't be decoded - u = unicode(obj, encoding.encoding, 'replace') - return '"%s"' % jsonescape(u) - elif isinstance(obj, unicode): - return '"%s"' % jsonescape(obj) + return '"%s"' % encoding.jsonescape(obj, paranoid=True) elif util.safehasattr(obj, 'keys'): out = [] for k, v in sorted(obj.iteritems()): @@ -222,38 +241,25 @@ else: raise TypeError('cannot encode type %s' % obj.__class__.__name__) -def _uescape(c): - if 0x20 <= ord(c) < 0x80: - return c - else: - return '\\u%04x' % ord(c) - -_escapes = [ - ('\\', '\\\\'), ('"', '\\"'), ('\t', '\\t'), ('\n', '\\n'), - ('\r', '\\r'), ('\f', '\\f'), ('\b', '\\b'), - ('<', '\\u003c'), ('>', '\\u003e'), ('\0', '\\u0000') -] - -def jsonescape(s): - for k, v in _escapes: - s = s.replace(k, v) - return ''.join(_uescape(c) for c in s) - +@templatefilter('lower') def lower(text): - """:lower: Any text. Converts the text to lowercase.""" + """Any text. Converts the text to lowercase.""" return encoding.lower(text) +@templatefilter('nonempty') def nonempty(str): - """:nonempty: Any text. Returns '(none)' if the string is empty.""" + """Any text. Returns '(none)' if the string is empty.""" return str or "(none)" +@templatefilter('obfuscate') def obfuscate(text): - """:obfuscate: Any text. Returns the input text rendered as a sequence of + """Any text. Returns the input text rendered as a sequence of XML entities. """ text = unicode(text, encoding.encoding, 'replace') return ''.join(['&#%d;' % ord(c) for c in text]) +@templatefilter('permissions') def permissions(flags): if "l" in flags: return "lrwxrwxrwx" @@ -261,8 +267,9 @@ return "-rwxr-xr-x" return "-rw-r--r--" +@templatefilter('person') def person(author): - """:person: Any text. Returns the name before an email address, + """Any text. Returns the name before an email address, interpreting it as per RFC 5322. >>> person('foo@bar') @@ -288,52 +295,61 @@ f = author.find('@') return author[:f].replace('.', ' ') +@templatefilter('revescape') def revescape(text): - """:revescape: Any text. Escapes all "special" characters, except @. + """Any text. Escapes all "special" characters, except @. Forward slashes are escaped twice to prevent web servers from prematurely unescaping them. For example, "@foo bar/baz" becomes "@foo%20bar%252Fbaz". """ - return urllib.quote(text, safe='/@').replace('/', '%252F') + return urlreq.quote(text, safe='/@').replace('/', '%252F') +@templatefilter('rfc3339date') def rfc3339date(text): - """:rfc3339date: Date. Returns a date using the Internet date format + """Date. Returns a date using the Internet date format specified in RFC 3339: "2009-08-18T13:00:13+02:00". """ return util.datestr(text, "%Y-%m-%dT%H:%M:%S%1:%2") +@templatefilter('rfc822date') def rfc822date(text): - """:rfc822date: Date. Returns a date using the same format used in email + """Date. Returns a date using the same format used in email headers: "Tue, 18 Aug 2009 13:00:13 +0200". """ return util.datestr(text, "%a, %d %b %Y %H:%M:%S %1%2") +@templatefilter('short') def short(text): - """:short: Changeset hash. Returns the short form of a changeset hash, + """Changeset hash. Returns the short form of a changeset hash, i.e. a 12 hexadecimal digit string. """ return text[:12] +@templatefilter('shortbisect') def shortbisect(text): - """:shortbisect: Any text. Treats `text` as a bisection status, and + """Any text. Treats `text` as a bisection status, and returns a single-character representing the status (G: good, B: bad, S: skipped, U: untested, I: ignored). Returns single space if `text` is not a valid bisection status. """ return hbisect.shortlabel(text) or ' ' +@templatefilter('shortdate') def shortdate(text): - """:shortdate: Date. Returns a date like "2006-09-18".""" + """Date. Returns a date like "2006-09-18".""" return util.shortdate(text) +@templatefilter('splitlines') def splitlines(text): - """:splitlines: Any text. Split text into a list of lines.""" + """Any text. Split text into a list of lines.""" return templatekw.showlist('line', text.splitlines(), 'lines') +@templatefilter('stringescape') def stringescape(text): return text.encode('string_escape') +@templatefilter('stringify') def stringify(thing): - """:stringify: Any type. Turns the value into text by converting values into + """Any type. Turns the value into text by converting values into text and concatenating them. """ if util.safehasattr(thing, '__iter__') and not isinstance(thing, str): @@ -342,8 +358,9 @@ return "" return str(thing) +@templatefilter('stripdir') def stripdir(text): - """:stripdir: Treat the text as path and strip a directory level, if + """Treat the text as path and strip a directory level, if possible. For example, "foo" and "foo/bar" becomes "foo". """ dir = os.path.dirname(text) @@ -352,31 +369,42 @@ else: return dir +@templatefilter('tabindent') def tabindent(text): - """:tabindent: Any text. Returns the text, with every non-empty line + """Any text. Returns the text, with every non-empty line except the first starting with a tab character. """ return indent(text, '\t') +@templatefilter('upper') def upper(text): - """:upper: Any text. Converts the text to uppercase.""" + """Any text. Converts the text to uppercase.""" return encoding.upper(text) +@templatefilter('urlescape') def urlescape(text): - """:urlescape: Any text. Escapes all "special" characters. For example, + """Any text. Escapes all "special" characters. For example, "foo bar" becomes "foo%20bar". """ - return urllib.quote(text) + return urlreq.quote(text) +@templatefilter('user') def userfilter(text): - """:user: Any text. Returns a short representation of a user name or email + """Any text. Returns a short representation of a user name or email address.""" return util.shortuser(text) +@templatefilter('emailuser') def emailuser(text): - """:emailuser: Any text. Returns the user portion of an email address.""" + """Any text. Returns the user portion of an email address.""" return util.emailuser(text) +@templatefilter('utf8') +def utf8(text): + """Any text. Converts from the local character encoding to UTF-8.""" + return encoding.fromlocal(text) + +@templatefilter('xmlescape') def xmlescape(text): text = (text .replace('&', '&') @@ -386,46 +414,6 @@ .replace("'", ''')) # ' invalid in HTML return re.sub('[\x00-\x08\x0B\x0C\x0E-\x1F]', ' ', text) -filters = { - "addbreaks": addbreaks, - "age": age, - "basename": basename, - "count": count, - "domain": domain, - "email": email, - "escape": escape, - "fill68": fill68, - "fill76": fill76, - "firstline": firstline, - "hex": hexfilter, - "hgdate": hgdate, - "isodate": isodate, - "isodatesec": isodatesec, - "json": json, - "jsonescape": jsonescape, - "lower": lower, - "nonempty": nonempty, - "obfuscate": obfuscate, - "permissions": permissions, - "person": person, - "revescape": revescape, - "rfc3339date": rfc3339date, - "rfc822date": rfc822date, - "short": short, - "shortbisect": shortbisect, - "shortdate": shortdate, - "splitlines": splitlines, - "stringescape": stringescape, - "stringify": stringify, - "stripdir": stripdir, - "tabindent": tabindent, - "upper": upper, - "urlescape": urlescape, - "user": userfilter, - "emailuser": emailuser, - "xmlescape": xmlescape, -} - def websub(text, websubtable): """:websub: Any text. Only applies to hgweb. Applies the regular expression replacements defined in the websub section. @@ -435,5 +423,11 @@ text = regexp.sub(format, text) return text +def loadfilter(ui, extname, registrarobj): + """Load template filter from specified registrarobj + """ + for name, func in registrarobj._table.iteritems(): + filters[name] = func + # tell hggettext to extract docstrings from these functions: i18nfunctions = filters.values()
--- a/mercurial/templatekw.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/templatekw.py Sat Apr 16 18:06:48 2016 -0500 @@ -9,9 +9,11 @@ from .node import hex, nullid from . import ( + encoding, error, hbisect, patch, + registrar, scmutil, util, ) @@ -195,23 +197,38 @@ return getrenamed +# keywords are callables like: +# fn(repo, ctx, templ, cache, revcache, **args) +# with: +# repo - current repository instance +# ctx - the changectx being displayed +# templ - the templater instance +# cache - a cache dictionary for the whole templater run +# revcache - a cache dictionary for the current revision +keywords = {} +templatekeyword = registrar.templatekeyword(keywords) + +@templatekeyword('author') def showauthor(repo, ctx, templ, **args): - """:author: String. The unmodified author of the changeset.""" + """String. The unmodified author of the changeset.""" return ctx.user() +@templatekeyword('bisect') def showbisect(repo, ctx, templ, **args): - """:bisect: String. The changeset bisection status.""" + """String. The changeset bisection status.""" return hbisect.label(repo, ctx.node()) +@templatekeyword('branch') def showbranch(**args): - """:branch: String. The name of the branch on which the changeset was + """String. The name of the branch on which the changeset was committed. """ return args['ctx'].branch() +@templatekeyword('branches') def showbranches(**args): - """:branches: List of strings. The name of the branch on which the + """List of strings. The name of the branch on which the changeset was committed. Will be empty if the branch name was default. (DEPRECATED) """ @@ -220,8 +237,9 @@ return showlist('branch', [branch], plural='branches', **args) return showlist('branch', [], plural='branches', **args) +@templatekeyword('bookmarks') def showbookmarks(**args): - """:bookmarks: List of strings. Any bookmarks associated with the + """List of strings. Any bookmarks associated with the changeset. Also sets 'active', the name of the active bookmark. """ repo = args['ctx']._repo @@ -231,44 +249,56 @@ f = _showlist('bookmark', bookmarks, **args) return _hybrid(f, bookmarks, makemap, lambda x: x['bookmark']) +@templatekeyword('children') def showchildren(**args): - """:children: List of strings. The children of the changeset.""" + """List of strings. The children of the changeset.""" ctx = args['ctx'] childrevs = ['%d:%s' % (cctx, cctx) for cctx in ctx.children()] return showlist('children', childrevs, element='child', **args) # Deprecated, but kept alive for help generation a purpose. +@templatekeyword('currentbookmark') def showcurrentbookmark(**args): - """:currentbookmark: String. The active bookmark, if it is + """String. The active bookmark, if it is associated with the changeset (DEPRECATED)""" return showactivebookmark(**args) +@templatekeyword('activebookmark') def showactivebookmark(**args): - """:activebookmark: String. The active bookmark, if it is + """String. The active bookmark, if it is associated with the changeset""" active = args['repo']._activebookmark if active and active in args['ctx'].bookmarks(): return active return '' +@templatekeyword('date') def showdate(repo, ctx, templ, **args): - """:date: Date information. The date when the changeset was committed.""" + """Date information. The date when the changeset was committed.""" return ctx.date() +@templatekeyword('desc') def showdescription(repo, ctx, templ, **args): - """:desc: String. The text of the changeset description.""" - return ctx.description().strip() + """String. The text of the changeset description.""" + s = ctx.description() + if isinstance(s, encoding.localstr): + # try hard to preserve utf-8 bytes + return encoding.tolocal(encoding.fromlocal(s).strip()) + else: + return s.strip() +@templatekeyword('diffstat') def showdiffstat(repo, ctx, templ, **args): - """:diffstat: String. Statistics of changes with the following format: + """String. Statistics of changes with the following format: "modified files: +added/-removed lines" """ stats = patch.diffstatdata(util.iterlines(ctx.diff())) maxname, maxtotal, adds, removes, binary = patch.diffstatsum(stats) return '%s: +%s/-%s' % (len(stats), adds, removes) +@templatekeyword('extras') def showextras(**args): - """:extras: List of dicts with key, value entries of the 'extras' + """List of dicts with key, value entries of the 'extras' field of this changeset.""" extras = args['ctx'].extra() extras = util.sortdict((k, extras[k]) for k in sorted(extras)) @@ -278,14 +308,16 @@ return _hybrid(f, extras, makemap, lambda x: '%s=%s' % (x['key'], x['value'])) +@templatekeyword('file_adds') def showfileadds(**args): - """:file_adds: List of strings. Files added by this changeset.""" + """List of strings. Files added by this changeset.""" repo, ctx, revcache = args['repo'], args['ctx'], args['revcache'] return showlist('file_add', getfiles(repo, ctx, revcache)[1], element='file', **args) +@templatekeyword('file_copies') def showfilecopies(**args): - """:file_copies: List of strings. Files copied in this changeset with + """List of strings. Files copied in this changeset with their sources. """ cache, ctx = args['cache'], args['ctx'] @@ -310,8 +342,9 @@ # showfilecopiesswitch() displays file copies only if copy records are # provided before calling the templater, usually with a --copies # command line switch. +@templatekeyword('file_copies_switch') def showfilecopiesswitch(**args): - """:file_copies_switch: List of strings. Like "file_copies" but displayed + """List of strings. Like "file_copies" but displayed only if the --copied switch is set. """ copies = args['revcache'].get('copies') or [] @@ -322,26 +355,30 @@ return _hybrid(f, copies, makemap, lambda x: '%s (%s)' % (x['name'], x['source'])) +@templatekeyword('file_dels') def showfiledels(**args): - """:file_dels: List of strings. Files removed by this changeset.""" + """List of strings. Files removed by this changeset.""" repo, ctx, revcache = args['repo'], args['ctx'], args['revcache'] return showlist('file_del', getfiles(repo, ctx, revcache)[2], element='file', **args) +@templatekeyword('file_mods') def showfilemods(**args): - """:file_mods: List of strings. Files modified by this changeset.""" + """List of strings. Files modified by this changeset.""" repo, ctx, revcache = args['repo'], args['ctx'], args['revcache'] return showlist('file_mod', getfiles(repo, ctx, revcache)[0], element='file', **args) +@templatekeyword('files') def showfiles(**args): - """:files: List of strings. All files modified, added, or removed by this + """List of strings. All files modified, added, or removed by this changeset. """ return showlist('file', args['ctx'].files(), **args) +@templatekeyword('graphnode') def showgraphnode(repo, ctx, **args): - """:graphnode: String. The character representing the changeset node in + """String. The character representing the changeset node in an ASCII revision graph""" wpnodes = repo.dirstate.parents() if wpnodes[1] == nullid: @@ -355,8 +392,9 @@ else: return 'o' +@templatekeyword('latesttag') def showlatesttag(**args): - """:latesttag: List of strings. The global tags on the most recent globally + """List of strings. The global tags on the most recent globally tagged ancestor of this changeset. """ return showlatesttags(None, **args) @@ -381,12 +419,14 @@ f = _showlist('latesttag', tags, separator=':', **args) return _hybrid(f, tags, makemap, lambda x: x['latesttag']) +@templatekeyword('latesttagdistance') def showlatesttagdistance(repo, ctx, templ, cache, **args): - """:latesttagdistance: Integer. Longest path to the latest tag.""" + """Integer. Longest path to the latest tag.""" return getlatesttags(repo, ctx, cache)[1] +@templatekeyword('changessincelatesttag') def showchangessincelatesttag(repo, ctx, templ, cache, **args): - """:changessincelatesttag: Integer. All ancestors not in the latest tag.""" + """Integer. All ancestors not in the latest tag.""" latesttag = getlatesttags(repo, ctx, cache)[2][0] return _showchangessincetag(repo, ctx, tag=latesttag, **args) @@ -403,6 +443,7 @@ return len(repo.revs('only(%ld, %s)', revs, tag)) + offset +@templatekeyword('manifest') def showmanifest(**args): repo, ctx, templ = args['repo'], args['ctx'], args['templ'] mnode = ctx.manifestnode() @@ -421,8 +462,9 @@ names = ns.names(repo, ctx.node()) return showlist(ns.templatename, names, plural=namespace, **args) +@templatekeyword('namespaces') def shownamespaces(**args): - """:namespaces: Dict of lists. Names attached to this changeset per + """Dict of lists. Names attached to this changeset per namespace.""" ctx = args['ctx'] repo = ctx.repo() @@ -434,68 +476,82 @@ lambda k: {'namespace': k, 'names': namespaces[k]}, lambda x: x['namespace']) +@templatekeyword('node') def shownode(repo, ctx, templ, **args): - """:node: String. The changeset identification hash, as a 40 hexadecimal + """String. The changeset identification hash, as a 40 hexadecimal digit string. """ return ctx.hex() +@templatekeyword('p1rev') def showp1rev(repo, ctx, templ, **args): - """:p1rev: Integer. The repository-local revision number of the changeset's + """Integer. The repository-local revision number of the changeset's first parent, or -1 if the changeset has no parents.""" return ctx.p1().rev() +@templatekeyword('p2rev') def showp2rev(repo, ctx, templ, **args): - """:p2rev: Integer. The repository-local revision number of the changeset's + """Integer. The repository-local revision number of the changeset's second parent, or -1 if the changeset has no second parent.""" return ctx.p2().rev() +@templatekeyword('p1node') def showp1node(repo, ctx, templ, **args): - """:p1node: String. The identification hash of the changeset's first parent, + """String. The identification hash of the changeset's first parent, as a 40 digit hexadecimal string. If the changeset has no parents, all digits are 0.""" return ctx.p1().hex() +@templatekeyword('p2node') def showp2node(repo, ctx, templ, **args): - """:p2node: String. The identification hash of the changeset's second + """String. The identification hash of the changeset's second parent, as a 40 digit hexadecimal string. If the changeset has no second parent, all digits are 0.""" return ctx.p2().hex() +@templatekeyword('parents') def showparents(**args): - """:parents: List of strings. The parents of the changeset in "rev:node" + """List of strings. The parents of the changeset in "rev:node" format. If the changeset has only one "natural" parent (the predecessor revision) nothing is shown.""" repo = args['repo'] ctx = args['ctx'] + pctxs = scmutil.meaningfulparents(repo, ctx) + prevs = [str(p.rev()) for p in pctxs] # ifcontains() needs a list of str parents = [[('rev', p.rev()), ('node', p.hex()), ('phase', p.phasestr())] - for p in scmutil.meaningfulparents(repo, ctx)] - return showlist('parent', parents, **args) + for p in pctxs] + f = _showlist('parent', parents, **args) + return _hybrid(f, prevs, lambda x: {'ctx': repo[int(x)], 'revcache': {}}) +@templatekeyword('phase') def showphase(repo, ctx, templ, **args): - """:phase: String. The changeset phase name.""" + """String. The changeset phase name.""" return ctx.phasestr() +@templatekeyword('phaseidx') def showphaseidx(repo, ctx, templ, **args): - """:phaseidx: Integer. The changeset phase index.""" + """Integer. The changeset phase index.""" return ctx.phase() +@templatekeyword('rev') def showrev(repo, ctx, templ, **args): - """:rev: Integer. The repository-local changeset revision number.""" + """Integer. The repository-local changeset revision number.""" return scmutil.intrev(ctx.rev()) def showrevslist(name, revs, **args): """helper to generate a list of revisions in which a mapped template will be evaluated""" repo = args['ctx'].repo() + revs = [str(r) for r in revs] # ifcontains() needs a list of str f = _showlist(name, revs, **args) return _hybrid(f, revs, - lambda x: {name: x, 'ctx': repo[x], 'revcache': {}}) + lambda x: {name: x, 'ctx': repo[int(x)], 'revcache': {}}) +@templatekeyword('subrepos') def showsubrepos(**args): - """:subrepos: List of strings. Updated subrepositories in the changeset.""" + """List of strings. Updated subrepositories in the changeset.""" ctx = args['ctx'] substate = ctx.substate if not substate: @@ -513,56 +569,16 @@ # don't remove "showtags" definition, even though namespaces will put # a helper function for "tags" keyword into "keywords" map automatically, # because online help text is built without namespaces initialization +@templatekeyword('tags') def showtags(**args): - """:tags: List of strings. Any tags associated with the changeset.""" + """List of strings. Any tags associated with the changeset.""" return shownames('tags', **args) -# keywords are callables like: -# fn(repo, ctx, templ, cache, revcache, **args) -# with: -# repo - current repository instance -# ctx - the changectx being displayed -# templ - the templater instance -# cache - a cache dictionary for the whole templater run -# revcache - a cache dictionary for the current revision -keywords = { - 'activebookmark': showactivebookmark, - 'author': showauthor, - 'bisect': showbisect, - 'branch': showbranch, - 'branches': showbranches, - 'bookmarks': showbookmarks, - 'changessincelatesttag': showchangessincelatesttag, - 'children': showchildren, - # currentbookmark is deprecated - 'currentbookmark': showcurrentbookmark, - 'date': showdate, - 'desc': showdescription, - 'diffstat': showdiffstat, - 'extras': showextras, - 'file_adds': showfileadds, - 'file_copies': showfilecopies, - 'file_copies_switch': showfilecopiesswitch, - 'file_dels': showfiledels, - 'file_mods': showfilemods, - 'files': showfiles, - 'graphnode': showgraphnode, - 'latesttag': showlatesttag, - 'latesttagdistance': showlatesttagdistance, - 'manifest': showmanifest, - 'namespaces': shownamespaces, - 'node': shownode, - 'p1rev': showp1rev, - 'p1node': showp1node, - 'p2rev': showp2rev, - 'p2node': showp2node, - 'parents': showparents, - 'phase': showphase, - 'phaseidx': showphaseidx, - 'rev': showrev, - 'subrepos': showsubrepos, - 'tags': showtags, -} +def loadkeyword(ui, extname, registrarobj): + """Load template keyword from specified registrarobj + """ + for name, func in registrarobj._table.iteritems(): + keywords[name] = func # tell hggettext to extract docstrings from these functions: i18nfunctions = keywords.values()
--- a/mercurial/templater.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/templater.py Sat Apr 16 18:06:48 2016 -0500 @@ -17,6 +17,7 @@ error, minirst, parser, + registrar, revset as revsetmod, templatefilters, templatekw, @@ -39,7 +40,9 @@ "end": (0, None, None, None, None), } -def tokenize(program, start, end): +def tokenize(program, start, end, term=None): + """Parse a template expression into a stream of tokens, which must end + with term if specified""" pos = start while pos < end: c = program[pos] @@ -126,13 +129,15 @@ sym = program[s:pos] yield ('symbol', sym, s) pos -= 1 - elif c == '}': + elif c == term: yield ('end', None, pos + 1) return else: raise error.ParseError(_("syntax error"), pos) pos += 1 - raise error.ParseError(_("unterminated template expansion"), start) + if term: + raise error.ParseError(_("unterminated template expansion"), start) + yield ('end', None, pos) def _parsetemplate(tmpl, start, stop, quote=''): r""" @@ -170,18 +175,80 @@ if c == quote: return parsed, n + 1 - parseres, pos = p.parse(tokenize(tmpl, n + 1, stop)) + parseres, pos = p.parse(tokenize(tmpl, n + 1, stop, '}')) parsed.append(parseres) if quote: raise error.ParseError(_("unterminated string"), start) return parsed, pos -def compiletemplate(tmpl, context): +def _unnesttemplatelist(tree): + """Expand list of templates to node tuple + + >>> def f(tree): + ... print prettyformat(_unnesttemplatelist(tree)) + >>> f(('template', [])) + ('string', '') + >>> f(('template', [('string', 'foo')])) + ('string', 'foo') + >>> f(('template', [('string', 'foo'), ('symbol', 'rev')])) + (template + ('string', 'foo') + ('symbol', 'rev')) + >>> f(('template', [('symbol', 'rev')])) # template(rev) -> str + (template + ('symbol', 'rev')) + >>> f(('template', [('template', [('string', 'foo')])])) + ('string', 'foo') + """ + if not isinstance(tree, tuple): + return tree + op = tree[0] + if op != 'template': + return (op,) + tuple(_unnesttemplatelist(x) for x in tree[1:]) + + assert len(tree) == 2 + xs = tuple(_unnesttemplatelist(x) for x in tree[1]) + if not xs: + return ('string', '') # empty template "" + elif len(xs) == 1 and xs[0][0] == 'string': + return xs[0] # fast path for string with no template fragment "x" + else: + return (op,) + xs + +def parse(tmpl): + """Parse template string into tree""" parsed, pos = _parsetemplate(tmpl, 0, len(tmpl)) - return [compileexp(e, context, methods) for e in parsed] + assert pos == len(tmpl), 'unquoted template should be consumed' + return _unnesttemplatelist(('template', parsed)) + +def _parseexpr(expr): + """Parse a template expression into tree + + >>> _parseexpr('"foo"') + ('string', 'foo') + >>> _parseexpr('foo(bar)') + ('func', ('symbol', 'foo'), ('symbol', 'bar')) + >>> _parseexpr('foo(') + Traceback (most recent call last): + ... + ParseError: ('not a prefix: end', 4) + >>> _parseexpr('"foo" "bar"') + Traceback (most recent call last): + ... + ParseError: ('invalid token', 7) + """ + p = parser.parser(elements) + tree, pos = p.parse(tokenize(expr, 0, len(expr))) + if pos != len(expr): + raise error.ParseError(_('invalid token'), pos) + return _unnesttemplatelist(tree) + +def prettyformat(tree): + return parser.prettyformat(tree, ('integer', 'string', 'symbol')) def compileexp(exp, context, curmethods): + """Compile parsed template tree to (func, data) pair""" t = exp[0] if t in curmethods: return curmethods[t](exp, context) @@ -202,8 +269,10 @@ return [x] def gettemplate(exp, context): - if exp[0] == 'template': - return [compileexp(e, context, methods) for e in exp[1]] + """Compile given template tree or load named template from map file; + returns (func, data) pair""" + if exp[0] in ('template', 'string'): + return compileexp(exp, context, methods) if exp[0] == 'symbol': # unlike runsymbol(), here 'symbol' is always taken as template name # even if it exists in mapping. this allows us to override mapping @@ -220,6 +289,27 @@ thing = stringify(thing) return thing +def evalinteger(context, mapping, arg, err): + v = evalfuncarg(context, mapping, arg) + try: + return int(v) + except (TypeError, ValueError): + raise error.ParseError(err) + +def evalstring(context, mapping, arg): + func, data = arg + return stringify(func(context, mapping, data)) + +def evalstringliteral(context, mapping, arg): + """Evaluate given argument as string template, but returns symbol name + if it is unknown""" + func, data = arg + if func is runsymbol: + thing = func(context, mapping, data, default=data) + else: + thing = func(context, mapping, data) + return stringify(thing) + def runinteger(context, mapping, data): return int(data) @@ -234,7 +324,7 @@ def _runrecursivesymbol(context, mapping, key): raise error.Abort(_("recursive reference '%s' in template") % key) -def runsymbol(context, mapping, key): +def runsymbol(context, mapping, key, default=''): v = mapping.get(key) if v is None: v = context._defaults.get(key) @@ -246,15 +336,13 @@ try: v = context.process(key, safemapping) except TemplateNotFound: - v = '' + v = default if callable(v): return v(**mapping) return v def buildtemplate(exp, context): - ctmpl = [compileexp(e, context, methods) for e in exp[1]] - if len(ctmpl) == 1: - return ctmpl[0] # fast path for string with no template fragment + ctmpl = [compileexp(e, context, methods) for e in exp[1:]] return (runtemplate, ctmpl) def runtemplate(context, mapping, template): @@ -287,22 +375,29 @@ def buildmap(exp, context): func, data = compileexp(exp[1], context, methods) - ctmpl = gettemplate(exp[2], context) - return (runmap, (func, data, ctmpl)) + tfunc, tdata = gettemplate(exp[2], context) + return (runmap, (func, data, tfunc, tdata)) def runmap(context, mapping, data): - func, data, ctmpl = data + func, data, tfunc, tdata = data d = func(context, mapping, data) if util.safehasattr(d, 'itermaps'): - d = d.itermaps() + diter = d.itermaps() + else: + try: + diter = iter(d) + except TypeError: + if func is runsymbol: + raise error.ParseError(_("keyword '%s' is not iterable") % data) + else: + raise error.ParseError(_("%r is not iterable") % d) - lm = mapping.copy() - - for i in d: + for i in diter: + lm = mapping.copy() if isinstance(i, dict): lm.update(i) lm['originalnode'] = mapping.get('node') - yield runtemplate(context, lm, ctmpl) + yield tfunc(context, lm, tdata) else: # v is not an iterable of dicts, this happen when 'key' # has been fully expanded already and format is useless. @@ -322,18 +417,24 @@ return (runfilter, (args[0], f)) raise error.ParseError(_("unknown function '%s'") % n) +# dict of template built-in functions +funcs = {} + +templatefunc = registrar.templatefunc(funcs) + +@templatefunc('date(date[, fmt])') def date(context, mapping, args): - """:date(date[, fmt]): Format a date. See :hg:`help dates` for formatting + """Format a date. See :hg:`help dates` for formatting strings. The default is a Unix date format, including the timezone: "Mon Sep 04 15:13:13 2006 0700".""" if not (1 <= len(args) <= 2): # i18n: "date" is a keyword raise error.ParseError(_("date expects one or two arguments")) - date = args[0][0](context, mapping, args[0][1]) + date = evalfuncarg(context, mapping, args[0]) fmt = None if len(args) == 2: - fmt = stringify(args[1][0](context, mapping, args[1][1])) + fmt = evalstring(context, mapping, args[1]) try: if fmt is None: return util.datestr(date) @@ -343,8 +444,9 @@ # i18n: "date" is a keyword raise error.ParseError(_("date expects a date information")) +@templatefunc('diff([includepattern [, excludepattern]])') def diff(context, mapping, args): - """:diff([includepattern [, excludepattern]]): Show a diff, optionally + """Show a diff, optionally specifying files to include or exclude.""" if len(args) > 2: # i18n: "diff" is a keyword @@ -352,7 +454,7 @@ def getpatterns(i): if i < len(args): - s = stringify(args[i][0](context, mapping, args[i][1])).strip() + s = evalstring(context, mapping, args[i]).strip() if s: return [s] return [] @@ -362,46 +464,48 @@ return ''.join(chunks) +@templatefunc('fill(text[, width[, initialident[, hangindent]]])') def fill(context, mapping, args): - """:fill(text[, width[, initialident[, hangindent]]]): Fill many + """Fill many paragraphs with optional indentation. See the "fill" filter.""" if not (1 <= len(args) <= 4): # i18n: "fill" is a keyword raise error.ParseError(_("fill expects one to four arguments")) - text = stringify(args[0][0](context, mapping, args[0][1])) + text = evalstring(context, mapping, args[0]) width = 76 initindent = '' hangindent = '' if 2 <= len(args) <= 4: + width = evalinteger(context, mapping, args[1], + # i18n: "fill" is a keyword + _("fill expects an integer width")) try: - width = int(stringify(args[1][0](context, mapping, args[1][1]))) - except ValueError: - # i18n: "fill" is a keyword - raise error.ParseError(_("fill expects an integer width")) - try: - initindent = stringify(args[2][0](context, mapping, args[2][1])) - hangindent = stringify(args[3][0](context, mapping, args[3][1])) + initindent = evalstring(context, mapping, args[2]) + hangindent = evalstring(context, mapping, args[3]) except IndexError: pass return templatefilters.fill(text, width, initindent, hangindent) +@templatefunc('pad(text, width[, fillchar=\' \'[, right=False]])') def pad(context, mapping, args): - """:pad(text, width[, fillchar=' '[, right=False]]): Pad text with a + """Pad text with a fill character.""" if not (2 <= len(args) <= 4): # i18n: "pad" is a keyword raise error.ParseError(_("pad() expects two to four arguments")) - width = int(args[1][1]) + width = evalinteger(context, mapping, args[1], + # i18n: "pad" is a keyword + _("pad() expects an integer width")) - text = stringify(args[0][0](context, mapping, args[0][1])) + text = evalstring(context, mapping, args[0]) right = False fillchar = ' ' if len(args) > 2: - fillchar = stringify(args[2][0](context, mapping, args[2][1])) + fillchar = evalstring(context, mapping, args[2]) if len(args) > 3: right = util.parsebool(args[3][1]) @@ -410,8 +514,9 @@ else: return text.ljust(width, fillchar) +@templatefunc('indent(text, indentchars[, firstline])') def indent(context, mapping, args): - """:indent(text, indentchars[, firstline]): Indents all non-empty lines + """Indents all non-empty lines with the characters given in the indentchars string. An optional third parameter will override the indent for the first line only if present.""" @@ -419,77 +524,82 @@ # i18n: "indent" is a keyword raise error.ParseError(_("indent() expects two or three arguments")) - text = stringify(args[0][0](context, mapping, args[0][1])) - indent = stringify(args[1][0](context, mapping, args[1][1])) + text = evalstring(context, mapping, args[0]) + indent = evalstring(context, mapping, args[1]) if len(args) == 3: - firstline = stringify(args[2][0](context, mapping, args[2][1])) + firstline = evalstring(context, mapping, args[2]) else: firstline = indent # the indent function doesn't indent the first line, so we do it here return templatefilters.indent(firstline + text, indent) +@templatefunc('get(dict, key)') def get(context, mapping, args): - """:get(dict, key): Get an attribute/key from an object. Some keywords + """Get an attribute/key from an object. Some keywords are complex types. This function allows you to obtain the value of an attribute on these types.""" if len(args) != 2: # i18n: "get" is a keyword raise error.ParseError(_("get() expects two arguments")) - dictarg = args[0][0](context, mapping, args[0][1]) + dictarg = evalfuncarg(context, mapping, args[0]) if not util.safehasattr(dictarg, 'get'): # i18n: "get" is a keyword raise error.ParseError(_("get() expects a dict as first argument")) - key = args[1][0](context, mapping, args[1][1]) + key = evalfuncarg(context, mapping, args[1]) return dictarg.get(key) +@templatefunc('if(expr, then[, else])') def if_(context, mapping, args): - """:if(expr, then[, else]): Conditionally execute based on the result of + """Conditionally execute based on the result of an expression.""" if not (2 <= len(args) <= 3): # i18n: "if" is a keyword raise error.ParseError(_("if expects two or three arguments")) - test = stringify(args[0][0](context, mapping, args[0][1])) + test = evalstring(context, mapping, args[0]) if test: yield args[1][0](context, mapping, args[1][1]) elif len(args) == 3: yield args[2][0](context, mapping, args[2][1]) +@templatefunc('ifcontains(search, thing, then[, else])') def ifcontains(context, mapping, args): - """:ifcontains(search, thing, then[, else]): Conditionally execute based + """Conditionally execute based on whether the item "search" is in "thing".""" if not (3 <= len(args) <= 4): # i18n: "ifcontains" is a keyword raise error.ParseError(_("ifcontains expects three or four arguments")) - item = stringify(args[0][0](context, mapping, args[0][1])) - items = args[1][0](context, mapping, args[1][1]) + item = evalstring(context, mapping, args[0]) + items = evalfuncarg(context, mapping, args[1]) if item in items: yield args[2][0](context, mapping, args[2][1]) elif len(args) == 4: yield args[3][0](context, mapping, args[3][1]) +@templatefunc('ifeq(expr1, expr2, then[, else])') def ifeq(context, mapping, args): - """:ifeq(expr1, expr2, then[, else]): Conditionally execute based on + """Conditionally execute based on whether 2 items are equivalent.""" if not (3 <= len(args) <= 4): # i18n: "ifeq" is a keyword raise error.ParseError(_("ifeq expects three or four arguments")) - test = stringify(args[0][0](context, mapping, args[0][1])) - match = stringify(args[1][0](context, mapping, args[1][1])) + test = evalstring(context, mapping, args[0]) + match = evalstring(context, mapping, args[1]) if test == match: yield args[2][0](context, mapping, args[2][1]) elif len(args) == 4: yield args[3][0](context, mapping, args[3][1]) +@templatefunc('join(list, sep)') def join(context, mapping, args): - """:join(list, sep): Join items in a list with a delimiter.""" + """Join items in a list with a delimiter.""" if not (1 <= len(args) <= 2): # i18n: "join" is a keyword raise error.ParseError(_("join expects one or two arguments")) @@ -501,7 +611,7 @@ joiner = " " if len(args) > 1: - joiner = stringify(args[1][0](context, mapping, args[1][1])) + joiner = evalstring(context, mapping, args[1]) first = True for x in joinset: @@ -511,19 +621,26 @@ yield joiner yield x +@templatefunc('label(label, expr)') def label(context, mapping, args): - """:label(label, expr): Apply a label to generated content. Content with + """Apply a label to generated content. Content with a label applied can result in additional post-processing, such as automatic colorization.""" if len(args) != 2: # i18n: "label" is a keyword raise error.ParseError(_("label expects two arguments")) - # ignore args[0] (the label string) since this is supposed to be a a no-op - yield args[1][0](context, mapping, args[1][1]) + ui = mapping['ui'] + thing = evalstring(context, mapping, args[1]) + # preserve unknown symbol as literal so effects like 'red', 'bold', + # etc. don't need to be quoted + label = evalstringliteral(context, mapping, args[0]) + return ui.label(thing, label) + +@templatefunc('latesttag([pattern])') def latesttag(context, mapping, args): - """:latesttag([pattern]): The global tags matching the given pattern on the + """The global tags matching the given pattern on the most recent globally tagged ancestor of this changeset.""" if len(args) > 1: # i18n: "latesttag" is a keyword @@ -531,12 +648,13 @@ pattern = None if len(args) == 1: - pattern = stringify(args[0][0](context, mapping, args[0][1])) + pattern = evalstring(context, mapping, args[0]) return templatekw.showlatesttags(pattern, **mapping) +@templatefunc('localdate(date[, tz])') def localdate(context, mapping, args): - """:localdate(date[, tz]): Converts a date to the specified timezone. + """Converts a date to the specified timezone. The default is local date.""" if not (1 <= len(args) <= 2): # i18n: "localdate" is a keyword @@ -563,14 +681,15 @@ tzoffset = util.makedate()[1] return (date[0], tzoffset) +@templatefunc('revset(query[, formatargs...])') def revset(context, mapping, args): - """:revset(query[, formatargs...]): Execute a revision set query. See + """Execute a revision set query. See :hg:`help revset`.""" if not len(args) > 0: # i18n: "revset" is a keyword raise error.ParseError(_("revset expects one or more arguments")) - raw = stringify(args[0][0](context, mapping, args[0][1])) + raw = evalstring(context, mapping, args[0]) ctx = mapping['ctx'] repo = ctx.repo() @@ -579,43 +698,47 @@ return m(repo) if len(args) > 1: - formatargs = list([a[0](context, mapping, a[1]) for a in args[1:]]) + formatargs = [evalfuncarg(context, mapping, a) for a in args[1:]] revs = query(revsetmod.formatspec(raw, *formatargs)) - revs = list([str(r) for r in revs]) + revs = list(revs) else: revsetcache = mapping['cache'].setdefault("revsetcache", {}) if raw in revsetcache: revs = revsetcache[raw] else: revs = query(raw) - revs = list([str(r) for r in revs]) + revs = list(revs) revsetcache[raw] = revs return templatekw.showrevslist("revision", revs, **mapping) +@templatefunc('rstdoc(text, style)') def rstdoc(context, mapping, args): - """:rstdoc(text, style): Format ReStructuredText.""" + """Format ReStructuredText.""" if len(args) != 2: # i18n: "rstdoc" is a keyword raise error.ParseError(_("rstdoc expects two arguments")) - text = stringify(args[0][0](context, mapping, args[0][1])) - style = stringify(args[1][0](context, mapping, args[1][1])) + text = evalstring(context, mapping, args[0]) + style = evalstring(context, mapping, args[1]) return minirst.format(text, style=style, keep=['verbose']) +@templatefunc('shortest(node, minlength=4)') def shortest(context, mapping, args): - """:shortest(node, minlength=4): Obtain the shortest representation of + """Obtain the shortest representation of a node.""" if not (1 <= len(args) <= 2): # i18n: "shortest" is a keyword raise error.ParseError(_("shortest() expects one or two arguments")) - node = stringify(args[0][0](context, mapping, args[0][1])) + node = evalstring(context, mapping, args[0]) minlength = 4 if len(args) > 1: - minlength = int(args[1][1]) + minlength = evalinteger(context, mapping, args[1], + # i18n: "shortest" is a keyword + _("shortest() expects an integer minlength")) cl = mapping['ctx']._repo.changelog def isvalid(test): @@ -656,29 +779,31 @@ if len(shortest) <= length: return shortest +@templatefunc('strip(text[, chars])') def strip(context, mapping, args): - """:strip(text[, chars]): Strip characters from a string. By default, + """Strip characters from a string. By default, strips all leading and trailing whitespace.""" if not (1 <= len(args) <= 2): # i18n: "strip" is a keyword raise error.ParseError(_("strip expects one or two arguments")) - text = stringify(args[0][0](context, mapping, args[0][1])) + text = evalstring(context, mapping, args[0]) if len(args) == 2: - chars = stringify(args[1][0](context, mapping, args[1][1])) + chars = evalstring(context, mapping, args[1]) return text.strip(chars) return text.strip() +@templatefunc('sub(pattern, replacement, expression)') def sub(context, mapping, args): - """:sub(pattern, replacement, expression): Perform text substitution + """Perform text substitution using regular expressions.""" if len(args) != 3: # i18n: "sub" is a keyword raise error.ParseError(_("sub expects three arguments")) - pat = stringify(args[0][0](context, mapping, args[0][1])) - rpl = stringify(args[1][0](context, mapping, args[1][1])) - src = stringify(args[2][0](context, mapping, args[2][1])) + pat = evalstring(context, mapping, args[0]) + rpl = evalstring(context, mapping, args[1]) + src = evalstring(context, mapping, args[2]) try: patre = re.compile(pat) except re.error: @@ -690,35 +815,34 @@ # i18n: "sub" is a keyword raise error.ParseError(_("sub got an invalid replacement: %s") % rpl) +@templatefunc('startswith(pattern, text)') def startswith(context, mapping, args): - """:startswith(pattern, text): Returns the value from the "text" argument + """Returns the value from the "text" argument if it begins with the content from the "pattern" argument.""" if len(args) != 2: # i18n: "startswith" is a keyword raise error.ParseError(_("startswith expects two arguments")) - patn = stringify(args[0][0](context, mapping, args[0][1])) - text = stringify(args[1][0](context, mapping, args[1][1])) + patn = evalstring(context, mapping, args[0]) + text = evalstring(context, mapping, args[1]) if text.startswith(patn): return text return '' - +@templatefunc('word(number, text[, separator])') def word(context, mapping, args): - """:word(number, text[, separator]): Return the nth word from a string.""" + """Return the nth word from a string.""" if not (2 <= len(args) <= 3): # i18n: "word" is a keyword raise error.ParseError(_("word expects two or three arguments, got %d") % len(args)) - try: - num = int(stringify(args[0][0](context, mapping, args[0][1]))) - except ValueError: - # i18n: "word" is a keyword - raise error.ParseError(_("word expects an integer index")) - text = stringify(args[1][0](context, mapping, args[1][1])) + num = evalinteger(context, mapping, args[0], + # i18n: "word" is a keyword + _("word expects an integer index")) + text = evalstring(context, mapping, args[1]) if len(args) == 3: - splitter = stringify(args[2][0](context, mapping, args[2][1])) + splitter = evalstring(context, mapping, args[2]) else: splitter = None @@ -745,28 +869,24 @@ methods = exprmethods.copy() methods["integer"] = exprmethods["symbol"] # '{1}' as variable -funcs = { - "date": date, - "diff": diff, - "fill": fill, - "get": get, - "if": if_, - "ifcontains": ifcontains, - "ifeq": ifeq, - "indent": indent, - "join": join, - "label": label, - "latesttag": latesttag, - "localdate": localdate, - "pad": pad, - "revset": revset, - "rstdoc": rstdoc, - "shortest": shortest, - "startswith": startswith, - "strip": strip, - "sub": sub, - "word": word, -} +class _aliasrules(parser.basealiasrules): + """Parsing and expansion rule set of template aliases""" + _section = _('template alias') + _parse = staticmethod(_parseexpr) + + @staticmethod + def _trygetfunc(tree): + """Return (name, args) if tree is func(...) or ...|filter; otherwise + None""" + if tree[0] == 'func' and tree[1][0] == 'symbol': + return tree[1][1], getlist(tree[2]) + if tree[0] == '|' and tree[2][0] == 'symbol': + return tree[2][1], [tree[1]] + +def expandaliases(tree, aliases): + """Return new tree of aliases are expanded""" + aliasmap = _aliasrules.buildmap(aliases) + return _aliasrules.expand(aliasmap, tree) # template engine @@ -791,9 +911,9 @@ yield j def unquotestring(s): - '''unwrap quotes''' - if len(s) < 2 or s[0] != s[-1]: - raise SyntaxError(_('unmatched quotes')) + '''unwrap quotes if any; otherwise returns unmodified string''' + if len(s) < 2 or s[0] not in "'\"" or s[0] != s[-1]: + return s return s[1:-1] class engine(object): @@ -816,7 +936,7 @@ filter uses function to transform value. syntax is {key|filter1|filter2|...}.''' - def __init__(self, loader, filters=None, defaults=None): + def __init__(self, loader, filters=None, defaults=None, aliases=()): self._loader = loader if filters is None: filters = {} @@ -824,15 +944,19 @@ if defaults is None: defaults = {} self._defaults = defaults - self._cache = {} + self._aliasmap = _aliasrules.buildmap(aliases) + self._cache = {} # key: (func, data) def _load(self, t): '''load, parse, and cache a template''' if t not in self._cache: # put poison to cut recursion while compiling 't' - self._cache[t] = [(_runrecursivesymbol, t)] + self._cache[t] = (_runrecursivesymbol, t) try: - self._cache[t] = compiletemplate(self._loader(t), self) + x = parse(self._loader(t)) + if self._aliasmap: + x = _aliasrules.expand(self._aliasmap, x) + self._cache[t] = compileexp(x, self, methods) except: # re-raises del self._cache[t] raise @@ -842,7 +966,8 @@ '''Perform expansion. t is name of map element to expand. mapping contains added elements for use during expansion. Is a generator.''' - return _flatten(runtemplate(self, mapping, self._load(t))) + func, data = self._load(t) + return _flatten(func(self, mapping, data)) engines = {'default': engine} @@ -854,63 +979,75 @@ stylelist = [] for file in dirlist: split = file.split(".") + if split[-1] in ('orig', 'rej'): + continue if split[0] == "map-cmdline": stylelist.append(split[1]) return ", ".join(sorted(stylelist)) +def _readmapfile(mapfile): + """Load template elements from the given map file""" + if not os.path.exists(mapfile): + raise error.Abort(_("style '%s' not found") % mapfile, + hint=_("available styles: %s") % stylelist()) + + base = os.path.dirname(mapfile) + conf = config.config(includepaths=templatepaths()) + conf.read(mapfile) + + cache = {} + tmap = {} + for key, val in conf[''].items(): + if not val: + raise error.ParseError(_('missing value'), conf.source('', key)) + if val[0] in "'\"": + if val[0] != val[-1]: + raise error.ParseError(_('unmatched quotes'), + conf.source('', key)) + cache[key] = unquotestring(val) + else: + val = 'default', val + if ':' in val[1]: + val = val[1].split(':', 1) + tmap[key] = val[0], os.path.join(base, val[1]) + return cache, tmap + class TemplateNotFound(error.Abort): pass class templater(object): - def __init__(self, mapfile, filters=None, defaults=None, cache=None, + def __init__(self, filters=None, defaults=None, cache=None, aliases=(), minchunk=1024, maxchunk=65536): '''set up template engine. - mapfile is name of file to read map definitions from. filters is dict of functions. each transforms a value into another. - defaults is dict of default map definitions.''' + defaults is dict of default map definitions. + aliases is list of alias (name, replacement) pairs. + ''' if filters is None: filters = {} if defaults is None: defaults = {} if cache is None: cache = {} - self.mapfile = mapfile or 'template' self.cache = cache.copy() self.map = {} - if mapfile: - self.base = os.path.dirname(mapfile) - else: - self.base = '' self.filters = templatefilters.filters.copy() self.filters.update(filters) self.defaults = defaults + self._aliases = aliases self.minchunk, self.maxchunk = minchunk, maxchunk self.ecache = {} - if not mapfile: - return - if not os.path.exists(mapfile): - raise error.Abort(_("style '%s' not found") % mapfile, - hint=_("available styles: %s") % stylelist()) - - conf = config.config(includepaths=templatepaths()) - conf.read(mapfile) - - for key, val in conf[''].items(): - if not val: - raise SyntaxError(_('%s: missing value') % conf.source('', key)) - if val[0] in "'\"": - try: - self.cache[key] = unquotestring(val) - except SyntaxError as inst: - raise SyntaxError('%s: %s' % - (conf.source('', key), inst.args[0])) - else: - val = 'default', val - if ':' in val[1]: - val = val[1].split(':', 1) - self.map[key] = val[0], os.path.join(self.base, val[1]) + @classmethod + def frommapfile(cls, mapfile, filters=None, defaults=None, cache=None, + minchunk=1024, maxchunk=65536): + """Create templater from the specified map file""" + t = cls(filters, defaults, cache, [], minchunk, maxchunk) + cache, tmap = _readmapfile(mapfile) + t.cache.update(cache) + t.map = tmap + return t def __contains__(self, key): return key in self.cache or key in self.map @@ -931,8 +1068,12 @@ def __call__(self, t, **mapping): ttype = t in self.map and self.map[t][0] or 'default' if ttype not in self.ecache: - self.ecache[ttype] = engines[ttype](self.load, - self.filters, self.defaults) + try: + ecls = engines[ttype] + except KeyError: + raise error.Abort(_('invalid template engine: %s') % ttype) + self.ecache[ttype] = ecls(self.load, self.filters, self.defaults, + self._aliases) proc = self.ecache[ttype] stream = proc.process(t, mapping) @@ -991,5 +1132,11 @@ raise RuntimeError("No hgweb templates found in %r" % paths) +def loadfunction(ui, extname, registrarobj): + """Load template function from specified registrarobj + """ + for name, func in registrarobj._table.iteritems(): + funcs[name] = func + # tell hggettext to extract docstrings from these functions: i18nfunctions = funcs.values()
--- a/mercurial/templates/atom/bookmarks.tmpl Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/templates/atom/bookmarks.tmpl Sat Apr 16 18:06:48 2016 -0500 @@ -5,7 +5,7 @@ <title>{repo|escape}: bookmarks</title> <summary>{repo|escape} bookmark history</summary> <author><name>Mercurial SCM</name></author> - {latestentry%feedupdated} + {lastchange%feedupdated} {entries%bookmarkentry} </feed>
--- a/mercurial/templates/atom/branches.tmpl Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/templates/atom/branches.tmpl Sat Apr 16 18:06:48 2016 -0500 @@ -1,7 +1,7 @@ {header} <id>{urlbase}{url|urlescape}</id> - <link rel="self" href="{urlbase}{url|urlescape}atom-tags"/> - <link rel="alternate" href="{urlbase}{url|urlescape}tags"/> + <link rel="self" href="{urlbase}{url|urlescape}atom-branches"/> + <link rel="alternate" href="{urlbase}{url|urlescape}branches"/> <title>{repo|escape}: branches</title> <summary>{repo|escape} branch history</summary> <author><name>Mercurial SCM</name></author>
--- a/mercurial/templates/json/map Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/templates/json/map Sat Apr 16 18:06:48 2016 -0500 @@ -8,26 +8,27 @@ changelistentry = '\{ "node": {node|json}, "date": {date|json}, - "desc": {desc|json}, + "desc": {desc|utf8|json}, "bookmarks": [{join(bookmarks%changelistentryname, ", ")}], "tags": [{join(tags%changelistentryname, ", ")}], - "user": {author|json} + "user": {author|utf8|json}, + "parents": [{join(allparents%changesetparent, ", ")}] }' -changelistentryname = '{name|json}' +changelistentryname = '{name|utf8|json}' changeset = '\{ "node": {node|json}, "date": {date|json}, - "desc": {desc|json}, + "desc": {desc|utf8|json}, "branch": {if(branch, branch%changesetbranch, "default"|json)}, "bookmarks": [{join(changesetbookmark, ", ")}], "tags": [{join(changesettag, ", ")}], - "user": {author|json}, + "user": {author|utf8|json}, "parents": [{join(parent%changesetparent, ", ")}], "phase": {phase|json} }' -changesetbranch = '{name|json}' -changesetbookmark = '{bookmark|json}' -changesettag = '{tag|json}' +changesetbranch = '{name|utf8|json}' +changesetbookmark = '{bookmark|utf8|json}' +changesettag = '{tag|utf8|json}' changesetparent = '{node|json}' manifest = '\{ "node": {node|json}, @@ -37,7 +38,7 @@ "bookmarks": [{join(bookmarks%name, ", ")}], "tags": [{join(tags%name, ", ")}] }' -name = '{name|json}' +name = '{name|utf8|json}' direntry = '\{ "abspath": {path|json}, "basename": {basename|json}, @@ -55,7 +56,7 @@ "tags": [{join(entriesnotip%tagentry, ", ")}] }' tagentry = '\{ - "tag": {tag|json}, + "tag": {tag|utf8|json}, "node": {node|json}, "date": {date|json} }' @@ -64,7 +65,7 @@ "bookmarks": [{join(entries%bookmarkentry, ", ")}] }' bookmarkentry = '\{ - "bookmark": {bookmark|json}, + "bookmark": {bookmark|utf8|json}, "node": {node|json}, "date": {date|json} }' @@ -72,7 +73,7 @@ "branches": [{join(entries%branchentry, ", ")}] }' branchentry = '\{ - "branch": {branch|json}, + "branch": {branch|utf8|json}, "node": {node|json}, "date": {date|json}, "status": {status|json} @@ -82,8 +83,8 @@ "path": {file|json}, "node": {node|json}, "date": {date|json}, - "desc": {desc|json}, - "author": {author|json}, + "desc": {desc|utf8|json}, + "author": {author|utf8|json}, "parents": [{join(parent%changesetparent, ", ")}], "children": [{join(child%changesetparent, ", ")}], "diff": [{join(diff%diffblock, ", ")}] @@ -116,8 +117,8 @@ "path": {file|json}, "node": {node|json}, "date": {date|json}, - "desc": {desc|json}, - "author": {author|json}, + "desc": {desc|utf8|json}, + "author": {author|utf8|json}, "parents": [{join(parent%changesetparent, ", ")}], "children": [{join(child%changesetparent, ", ")}], "leftnode": {leftnode|json}, @@ -137,9 +138,9 @@ fileannotate = '\{ "abspath": {file|json}, "node": {node|json}, - "author": {author|json}, + "author": {author|utf8|json}, "date": {date|json}, - "desc": {desc|json}, + "desc": {desc|utf8|json}, "parents": [{join(parent%changesetparent, ", ")}], "children": [{join(child%changesetparent, ", ")}], "permissions": {permissions|json}, @@ -147,8 +148,8 @@ }' fileannotation = '\{ "node": {node|json}, - "author": {author|json}, - "desc": {desc|json}, + "author": {author|utf8|json}, + "desc": {desc|utf8|json}, "abspath": {file|json}, "targetline": {targetline|json}, "line": {line|json}, @@ -163,12 +164,21 @@ "othercommands": [{join(othercommands%helptopicentry, ", ")}] }' helptopicentry = '\{ - "topic": {topic|json}, - "summary": {summary|json} + "topic": {topic|utf8|json}, + "summary": {summary|utf8|json} }' help = '\{ - "topic": {topic|json}, - "rawdoc": {doc|json} + "topic": {topic|utf8|json}, + "rawdoc": {doc|utf8|json} }' filenodelink = '' filenolink = '' +index = '\{ + "entries": [{join(entries%indexentry, ", ")}] + }' +indexentry = '\{ + "name": {name|utf8|json}, + "description": {description|utf8|json}, + "contact": {contact|utf8|json}, + "lastchange": {lastchange|json} + }'
--- a/mercurial/templates/map-cmdline.compact Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/templates/map-cmdline.compact Sat Apr 16 18:06:48 2016 -0500 @@ -1,12 +1,30 @@ -changeset = '{rev}{tags}{bookmarks}{parents} {node|short} {date|isodate} {author|user}\n {desc|firstline|strip}\n\n' -changeset_quiet = '{rev}:{node|short}\n' -changeset_verbose = '{rev}{tags}{parents} {node|short} {date|isodate} {author}\n {desc|strip}\n\n' +ldate = '{label("log.date", + "{date|isodate}")}' + +ldesc = '{label('ui.note log.description', + '{desc|strip}')}' +ldescfirst = '{label('ui.note log.description', + '{desc|firstline|strip}')}' + +changeset = '{lrev}{tags}{bookmarks}{parents} {lnode} {ldate} {luser}\n {ldescfirst}\n\n' +changeset_quiet = '{lrev}:{lnode}\n' +changeset_verbose = '{lrev}{tags}{parents} {lnode} {ldate} {lauthor}\n {ldesc}\n\n' +lrev = '{label("log.changeset changeset.{phase}", + "{rev}")}' +lnode = '{label("log.node", + "{node|short}")}' +lauthor = '{label("log.user", + "{author}")}' +luser = '{label("log.user", + "{author|user}")}' start_tags = '[' -tag = '{tag},' +tag = '{label("log.tag", + "{tag},")}' last_tag = '{tag}]' start_parents = ':' -parent = '{rev},' -last_parent = '{rev}' +parent = '{lrev},' +last_parent = '{lrev}' start_bookmarks = '[' -bookmark = '{bookmark},' +bookmark = '{label("log.bookmark", + "{bookmark},")}' last_bookmark = '{bookmark}]'
--- a/mercurial/transaction.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/transaction.py Sat Apr 16 18:06:48 2016 -0500 @@ -23,6 +23,19 @@ version = 2 +# These are the file generators that should only be executed after the +# finalizers are done, since they rely on the output of the finalizers (like +# the changelog having been written). +postfinalizegenerators = set([ + 'bookmarks', + 'dirstate' +]) + +class GenerationGroup(object): + ALL='all' + PREFINALIZE='prefinalize' + POSTFINALIZE='postfinalize' + def active(func): def _active(self, *args, **kwds): if self.count == 0: @@ -276,12 +289,19 @@ # but for bookmarks that are handled outside this mechanism. self._filegenerators[genid] = (order, filenames, genfunc, location) - def _generatefiles(self, suffix=''): + def _generatefiles(self, suffix='', group=GenerationGroup.ALL): # write files registered for generation any = False - for entry in sorted(self._filegenerators.values()): + for id, entry in sorted(self._filegenerators.iteritems()): any = True order, filenames, genfunc, location = entry + + # for generation at closing, check if it's before or after finalize + postfinalize = group == GenerationGroup.POSTFINALIZE + if (group != GenerationGroup.ALL and + (id in postfinalizegenerators) != (postfinalize)): + continue + vfs = self._vfsmap[location] files = [] try: @@ -407,10 +427,13 @@ '''commit the transaction''' if self.count == 1: self.validator(self) # will raise exception if needed - self._generatefiles() + self._generatefiles(group=GenerationGroup.PREFINALIZE) categories = sorted(self._finalizecallback) for cat in categories: self._finalizecallback[cat](self) + # Prevent double usage and help clear cycles. + self._finalizecallback = None + self._generatefiles(group=GenerationGroup.POSTFINALIZE) self.count -= 1 if self.count != 0: @@ -465,6 +488,8 @@ categories = sorted(self._postclosecallback) for cat in categories: self._postclosecallback[cat](self) + # Prevent double usage and help clear cycles. + self._postclosecallback = None @active def abort(self): @@ -518,6 +543,8 @@ try: for cat in sorted(self._abortcallback): self._abortcallback[cat](self) + # Prevent double usage and help clear cycles. + self._abortcallback = None _playback(self.journal, self.report, self.opener, self._vfsmap, self.entries, self._backupentries, False) self.report(_("rollback completed\n"))
--- a/mercurial/ui.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/ui.py Sat Apr 16 18:06:48 2016 -0500 @@ -42,7 +42,6 @@ # (see "hg help extensions" for more info) # # pager = -# progress = # color =""", 'cloned': @@ -86,7 +85,6 @@ # (see "hg help extensions" for more info) # # blackbox = -# progress = # color = # pager =""", } @@ -194,6 +192,9 @@ if self.plain('revsetalias'): for k, v in cfg.items('revsetalias'): del cfg['revsetalias'][k] + if self.plain('templatealias'): + for k, v in cfg.items('templatealias'): + del cfg['templatealias'][k] if trusted: self._tcfg.update(cfg) @@ -582,7 +583,7 @@ pass if not user: raise error.Abort(_('no username supplied'), - hint=_('use "hg config --edit" ' + hint=_("use 'hg config --edit' " 'to set your username')) if "\n" in user: raise error.Abort(_("username %s contains a newline\n") @@ -664,7 +665,7 @@ "cmdname.type" is recommended. For example, status issues a label of "status.modified" for modified files. ''' - if self._buffers: + if self._buffers and not opts.get('prompt', False): self._buffers[-1].extend(a for a in args) else: self._progclear() @@ -699,6 +700,77 @@ return False return util.isatty(fh) + def interface(self, feature): + """what interface to use for interactive console features? + + The interface is controlled by the value of `ui.interface` but also by + the value of feature-specific configuration. For example: + + ui.interface.histedit = text + ui.interface.chunkselector = curses + + Here the features are "histedit" and "chunkselector". + + The configuration above means that the default interfaces for commands + is curses, the interface for histedit is text and the interface for + selecting chunk is crecord (the best curses interface available). + + Consider the following exemple: + ui.interface = curses + ui.interface.histedit = text + + Then histedit will use the text interface and chunkselector will use + the default curses interface (crecord at the moment). + """ + alldefaults = frozenset(["text", "curses"]) + + featureinterfaces = { + "chunkselector": [ + "text", + "curses", + ] + } + + # Feature-specific interface + if feature not in featureinterfaces.keys(): + # Programming error, not user error + raise ValueError("Unknown feature requested %s" % feature) + + availableinterfaces = frozenset(featureinterfaces[feature]) + if alldefaults > availableinterfaces: + # Programming error, not user error. We need a use case to + # define the right thing to do here. + raise ValueError( + "Feature %s does not handle all default interfaces" % + feature) + + if self.plain(): + return "text" + + # Default interface for all the features + defaultinterface = "text" + i = self.config("ui", "interface", None) + if i in alldefaults: + defaultinterface = i + + choseninterface = defaultinterface + f = self.config("ui", "interface.%s" % feature, None) + if f in availableinterfaces: + choseninterface = f + + if i is not None and defaultinterface != i: + if f is not None: + self.warn(_("invalid value for ui.interface: %s\n") % + (i,)) + else: + self.warn(_("invalid value for ui.interface: %s (using %s)\n") % + (i, choseninterface)) + if f is not None and choseninterface != f: + self.warn(_("invalid value for ui.interface.%s: %s (using %s)\n") % + (feature, f, choseninterface)) + + return choseninterface + def interactive(self): '''is interactive input allowed? @@ -773,7 +845,7 @@ # call write() so output goes through subclassed implementation # e.g. color extension on Windows - self.write(prompt) + self.write(prompt, prompt=True) # instead of trying to emulate raw_input, swap (self.fin, # self.fout) with (sys.stdin, sys.stdout) @@ -902,12 +974,15 @@ self.write(*msg, **opts) def edit(self, text, user, extra=None, editform=None, pending=None): - extra_defaults = { 'prefix': 'editor' } + extra_defaults = { + 'prefix': 'editor', + 'suffix': '.txt', + } if extra is not None: extra_defaults.update(extra) extra = extra_defaults (fd, name) = tempfile.mkstemp(prefix='hg-' + extra['prefix'] + '-', - suffix=".txt", text=True) + suffix=extra['suffix'], text=True) try: f = os.fdopen(fd, "w") f.write(text) @@ -1006,9 +1081,8 @@ def progress(self, topic, pos, item="", unit="", total=None): '''show a progress message - With stock hg, this is simply a debug message that is hidden - by default, but with extensions or GUI tools it may be - visible. 'topic' is the current operation, 'item' is a + By default a textual progress bar will be displayed if an operation + takes too long. 'topic' is the current operation, 'item' is a non-numeric marker of the current position (i.e. the currently in-process file), 'pos' is the current numeric position (i.e. revision, bytes, etc.), unit is a corresponding unit label, @@ -1071,11 +1145,15 @@ stacklevel += 1 # get in develwarn if self.tracebackflag: util.debugstacktrace(msg, stacklevel, self.ferr, self.fout) + self.log('develwarn', '%s at:\n%s' % + (msg, ''.join(util.getstackframes(stacklevel)))) else: curframe = inspect.currentframe() calframe = inspect.getouterframes(curframe, 2) self.write_err('%s at: %s:%s (%s)\n' % ((msg,) + calframe[stacklevel][1:4])) + self.log('develwarn', '%s at: %s:%s (%s)\n', + msg, *calframe[stacklevel][1:4]) def deprecwarn(self, msg, version): """issue a deprecation warning
--- a/mercurial/url.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/url.py Sat Apr 16 18:06:48 2016 -0500 @@ -10,12 +10,9 @@ from __future__ import absolute_import import base64 -import cStringIO import httplib import os import socket -import urllib -import urllib2 from .i18n import _ from . import ( @@ -25,14 +22,18 @@ sslutil, util, ) +stringio = util.stringio -class passwordmgr(urllib2.HTTPPasswordMgrWithDefaultRealm): +urlerr = util.urlerr +urlreq = util.urlreq + +class passwordmgr(urlreq.httppasswordmgrwithdefaultrealm): def __init__(self, ui): - urllib2.HTTPPasswordMgrWithDefaultRealm.__init__(self) + urlreq.httppasswordmgrwithdefaultrealm.__init__(self) self.ui = ui def find_user_password(self, realm, authuri): - authinfo = urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password( + authinfo = urlreq.httppasswordmgrwithdefaultrealm.find_user_password( self, realm, authuri) user, passwd = authinfo if user and passwd: @@ -72,10 +73,10 @@ self.ui.debug(msg % (user, passwd and '*' * len(passwd) or 'not set')) def find_stored_password(self, authuri): - return urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password( + return urlreq.httppasswordmgrwithdefaultrealm.find_user_password( self, None, authuri) -class proxyhandler(urllib2.ProxyHandler): +class proxyhandler(urlreq.proxyhandler): def __init__(self, ui): proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy') # XXX proxyauthinfo = None @@ -121,7 +122,7 @@ except OSError: pass - urllib2.ProxyHandler.__init__(self, proxies) + urlreq.proxyhandler.__init__(self, proxies) self.ui = ui def proxy_open(self, req, proxy, type_): @@ -134,7 +135,7 @@ if e.startswith('.') and host.endswith(e[1:]): return None - return urllib2.ProxyHandler.proxy_open(self, req, proxy, type_) + return urlreq.proxyhandler.proxy_open(self, req, proxy, type_) def _gen_sendfile(orgsend): def _sendfile(self, data): @@ -148,7 +149,7 @@ orgsend(self, data) return _sendfile -has_https = util.safehasattr(urllib2, 'HTTPSHandler') +has_https = util.safehasattr(urlreq, 'httpshandler') if has_https: try: _create_connection = socket.create_connection @@ -273,7 +274,7 @@ res.length = None res.chunked = 0 res.will_close = 1 - res.msg = httplib.HTTPMessage(cStringIO.StringIO()) + res.msg = httplib.HTTPMessage(stringio()) return False res.msg = httplib.HTTPMessage(res.fp) @@ -357,10 +358,10 @@ **sslutil.sslkwargs(self.ui, host)) sslutil.validator(self.ui, host)(self.sock) - class httpshandler(keepalive.KeepAliveHandler, urllib2.HTTPSHandler): + class httpshandler(keepalive.KeepAliveHandler, urlreq.httpshandler): def __init__(self, ui): keepalive.KeepAliveHandler.__init__(self) - urllib2.HTTPSHandler.__init__(self) + urlreq.httpshandler.__init__(self) self.ui = ui self.pwmgr = passwordmgr(self.ui) @@ -403,9 +404,9 @@ conn.ui = self.ui return conn -class httpdigestauthhandler(urllib2.HTTPDigestAuthHandler): +class httpdigestauthhandler(urlreq.httpdigestauthhandler): def __init__(self, *args, **kwargs): - urllib2.HTTPDigestAuthHandler.__init__(self, *args, **kwargs) + urlreq.httpdigestauthhandler.__init__(self, *args, **kwargs) self.retried_req = None def reset_retry_count(self): @@ -419,13 +420,13 @@ if req is not self.retried_req: self.retried_req = req self.retried = 0 - return urllib2.HTTPDigestAuthHandler.http_error_auth_reqed( + return urlreq.httpdigestauthhandler.http_error_auth_reqed( self, auth_header, host, req, headers) -class httpbasicauthhandler(urllib2.HTTPBasicAuthHandler): +class httpbasicauthhandler(urlreq.httpbasicauthhandler): def __init__(self, *args, **kwargs): self.auth = None - urllib2.HTTPBasicAuthHandler.__init__(self, *args, **kwargs) + urlreq.httpbasicauthhandler.__init__(self, *args, **kwargs) self.retried_req = None def http_request(self, request): @@ -451,7 +452,7 @@ if req is not self.retried_req: self.retried_req = req self.retried = 0 - return urllib2.HTTPBasicAuthHandler.http_error_auth_reqed( + return urlreq.httpbasicauthhandler.http_error_auth_reqed( self, auth_header, host, req, headers) def retry_http_basic_auth(self, host, req, realm): @@ -494,7 +495,7 @@ handlers.extend((httpbasicauthhandler(passmgr), httpdigestauthhandler(passmgr))) handlers.extend([h(ui, passmgr) for h in handlerfuncs]) - opener = urllib2.build_opener(*handlers) + opener = urlreq.buildopener(*handlers) # 1.0 here is the _protocol_ version opener.addheaders = [('User-agent', 'mercurial/proto-1.0')] @@ -508,6 +509,6 @@ url_, authinfo = u.authinfo() else: path = util.normpath(os.path.abspath(url_)) - url_ = 'file://' + urllib.pathname2url(path) + url_ = 'file://' + urlreq.pathname2url(path) authinfo = None return opener(ui, authinfo).open(url_, data)
--- a/mercurial/util.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/util.py Sat Apr 16 18:06:48 2016 -0500 @@ -34,7 +34,6 @@ import textwrap import time import traceback -import urllib import zlib from . import ( @@ -43,8 +42,22 @@ i18n, osutil, parsers, + pycompat, ) +for attr in ( + 'empty', + 'queue', + 'urlerr', + # we do import urlreq, but we do it outside the loop + #'urlreq', + 'stringio', +): + globals()[attr] = getattr(pycompat, attr) + +# This line is to make pyflakes happy: +urlreq = pycompat.urlreq + if os.name == 'nt': from . import windows as platform else: @@ -65,6 +78,7 @@ findexe = platform.findexe gethgcmd = platform.gethgcmd getuser = platform.getuser +getpid = os.getpid groupmembers = platform.groupmembers groupname = platform.groupname hidewindow = platform.hidewindow @@ -442,7 +456,7 @@ def cachefunc(func): '''cache the result of function calls''' # XXX doesn't handle keywords args - if func.func_code.co_argcount == 0: + if func.__code__.co_argcount == 0: cache = [] def f(): if len(cache) == 0: @@ -450,7 +464,7 @@ return cache[0] return f cache = {} - if func.func_code.co_argcount == 1: + if func.__code__.co_argcount == 1: # we gain a small amount of time because # we don't need to pack/unpack the list def f(arg): @@ -692,7 +706,7 @@ '''cache most recent results of function calls''' cache = {} order = collections.deque() - if func.func_code.co_argcount == 1: + if func.__code__.co_argcount == 1: def f(arg): if arg not in cache: if len(cache) > 20: @@ -1574,12 +1588,20 @@ def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'): """represent a (unixtime, offset) tuple as a localized time. unixtime is seconds since the epoch, and offset is the time zone's - number of seconds away from UTC. if timezone is false, do not - append time zone to string.""" + number of seconds away from UTC. + + >>> datestr((0, 0)) + 'Thu Jan 01 00:00:00 1970 +0000' + >>> datestr((42, 0)) + 'Thu Jan 01 00:00:42 1970 +0000' + >>> datestr((-42, 0)) + 'Wed Dec 31 23:59:18 1969 +0000' + >>> datestr((0x7fffffff, 0)) + 'Tue Jan 19 03:14:07 2038 +0000' + >>> datestr((-0x80000000, 0)) + 'Fri Dec 13 20:45:52 1901 +0000' + """ t, tz = date or makedate() - if t < 0: - t = 0 # time.gmtime(lt) fails on Windows for lt < -43200 - tz = 0 if "%1" in format or "%2" in format or "%z" in format: sign = (tz > 0) and "-" or "+" minutes = abs(tz) // 60 @@ -1587,12 +1609,16 @@ format = format.replace("%z", "%1%2") format = format.replace("%1", "%c%02d" % (sign, q)) format = format.replace("%2", "%02d" % r) - try: - t = time.gmtime(float(t) - tz) - except ValueError: - # time was out of range - t = time.gmtime(sys.maxint) - s = time.strftime(format, t) + d = t - tz + if d > 0x7fffffff: + d = 0x7fffffff + elif d < -0x80000000: + d = -0x80000000 + # Never use time.gmtime() and datetime.datetime.fromtimestamp() + # because they use the gmtime() system call which is buggy on Windows + # for negative values. + t = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=d) + s = t.strftime(format) return s def shortdate(date=None): @@ -1711,10 +1737,8 @@ # time zone offset. values must fit in signed 32 bits for # current 32-bit linux runtimes. timezones go from UTC-12 # to UTC+14 - if abs(when) > 0x7fffffff: + if when < -0x80000000 or when > 0x7fffffff: raise Abort(_('date exceeds 32 bits: %d') % when) - if when < 0: - raise Abort(_('negative date value: %d') % when) if offset < -50400 or offset > 43200: raise Abort(_('impossible time zone offset: %d') % offset) return when, offset @@ -2367,30 +2391,30 @@ if hasdriveletter(self.path): s += '/' if self.user: - s += urllib.quote(self.user, safe=self._safechars) + s += urlreq.quote(self.user, safe=self._safechars) if self.passwd: - s += ':' + urllib.quote(self.passwd, safe=self._safechars) + s += ':' + urlreq.quote(self.passwd, safe=self._safechars) if self.user or self.passwd: s += '@' if self.host: if not (self.host.startswith('[') and self.host.endswith(']')): - s += urllib.quote(self.host) + s += urlreq.quote(self.host) else: s += self.host if self.port: - s += ':' + urllib.quote(self.port) + s += ':' + urlreq.quote(self.port) if self.host: s += '/' if self.path: # TODO: similar to the query string, we should not unescape the # path when we store it, the path might contain '%2f' = '/', # which we should *not* escape. - s += urllib.quote(self.path, safe=self._safepchars) + s += urlreq.quote(self.path, safe=self._safepchars) if self.query: # we store the query in escaped form. s += '?' + self.query if self.fragment is not None: - s += '#' + urllib.quote(self.fragment, safe=self._safepchars) + s += '#' + urlreq.quote(self.fragment, safe=self._safepchars) return s def authinfo(self): @@ -2549,21 +2573,39 @@ results.append(hook(*args)) return results +def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%s'): + '''Yields lines for a nicely formatted stacktrace. + Skips the 'skip' last entries. + Each file+linenumber is formatted according to fileline. + Each line is formatted according to line. + If line is None, it yields: + length of longest filepath+line number, + filepath+linenumber, + function + + Not be used in production code but very convenient while developing. + ''' + entries = [(fileline % (fn, ln), func) + for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]] + if entries: + fnmax = max(len(entry[0]) for entry in entries) + for fnln, func in entries: + if line is None: + yield (fnmax, fnln, func) + else: + yield line % (fnmax, fnln, func) + def debugstacktrace(msg='stacktrace', skip=0, f=sys.stderr, otherf=sys.stdout): '''Writes a message to f (stderr) with a nicely formatted stacktrace. Skips the 'skip' last entries. By default it will flush stdout first. - It can be used everywhere and do intentionally not require an ui object. + It can be used everywhere and intentionally does not require an ui object. Not be used in production code but very convenient while developing. ''' if otherf: otherf.flush() f.write('%s at:\n' % msg) - entries = [('%s:%s' % (fn, ln), func) - for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]] - if entries: - fnmax = max(len(entry[0]) for entry in entries) - for fnln, func in entries: - f.write(' %-*s in %s\n' % (fnmax, fnln, func)) + for line in getstackframes(skip + 1): + f.write(line) f.flush() class dirs(object):
--- a/mercurial/verify.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/verify.py Sat Apr 16 18:06:48 2016 -0500 @@ -147,9 +147,9 @@ mflinkrevs, filelinkrevs = self._verifychangelog() filenodes = self._verifymanifest(mflinkrevs) + del mflinkrevs - self._crosscheckfiles(mflinkrevs, filelinkrevs, filenodes) - del mflinkrevs + self._crosscheckfiles(filelinkrevs, filenodes) totalfiles, filerevisions = self._verifyfiles(filenodes, filelinkrevs) @@ -197,60 +197,111 @@ ui.progress(_('checking'), None) return mflinkrevs, filelinkrevs - def _verifymanifest(self, mflinkrevs): + def _verifymanifest(self, mflinkrevs, dir="", storefiles=None, + progress=None): repo = self.repo ui = self.ui - mf = self.repo.manifest + mf = self.repo.manifest.dirlog(dir) + + if not dir: + self.ui.status(_("checking manifests\n")) - ui.status(_("checking manifests\n")) filenodes = {} + subdirnodes = {} seen = {} + label = "manifest" + if dir: + label = dir + revlogfiles = mf.files() + storefiles.difference_update(revlogfiles) + if progress: # should be true since we're in a subdirectory + progress() if self.refersmf: # Do not check manifest if there are only changelog entries with # null manifests. - self.checklog(mf, "manifest", 0) + self.checklog(mf, label, 0) total = len(mf) for i in mf: - ui.progress(_('checking'), i, total=total, unit=_('manifests')) + if not dir: + ui.progress(_('checking'), i, total=total, unit=_('manifests')) n = mf.node(i) - lr = self.checkentry(mf, i, n, seen, mflinkrevs.get(n, []), - "manifest") + lr = self.checkentry(mf, i, n, seen, mflinkrevs.get(n, []), label) if n in mflinkrevs: del mflinkrevs[n] + elif dir: + self.err(lr, _("%s not in parent-directory manifest") % + short(n), label) else: - self.err(lr, _("%s not in changesets") % short(n), "manifest") + self.err(lr, _("%s not in changesets") % short(n), label) try: - for f, fn in mf.readdelta(n).iteritems(): + for f, fn, fl in mf.readshallowdelta(n).iterentries(): if not f: - self.err(lr, _("file without name in manifest")) - elif f != "/dev/null": # ignore this in very old repos - if _validpath(repo, f): - filenodes.setdefault( - _normpath(f), {}).setdefault(fn, lr) + self.err(lr, _("entry without name in manifest")) + elif f == "/dev/null": # ignore this in very old repos + continue + fullpath = dir + _normpath(f) + if not _validpath(repo, fullpath): + continue + if fl == 't': + subdirnodes.setdefault(fullpath + '/', {}).setdefault( + fn, []).append(lr) + else: + filenodes.setdefault(fullpath, {}).setdefault(fn, lr) except Exception as inst: - self.exc(lr, _("reading manifest delta %s") % short(n), inst) - ui.progress(_('checking'), None) + self.exc(lr, _("reading delta %s") % short(n), inst, label) + if not dir: + ui.progress(_('checking'), None) + + if self.havemf: + for c, m in sorted([(c, m) for m in mflinkrevs + for c in mflinkrevs[m]]): + if dir: + self.err(c, _("parent-directory manifest refers to unknown " + "revision %s") % short(m), label) + else: + self.err(c, _("changeset refers to unknown revision %s") % + short(m), label) + + if not dir and subdirnodes: + self.ui.status(_("checking directory manifests\n")) + storefiles = set() + subdirs = set() + revlogv1 = self.revlogv1 + for f, f2, size in repo.store.datafiles(): + if not f: + self.err(None, _("cannot decode filename '%s'") % f2) + elif (size > 0 or not revlogv1) and f.startswith('meta/'): + storefiles.add(_normpath(f)) + subdirs.add(os.path.dirname(f)) + subdircount = len(subdirs) + currentsubdir = [0] + def progress(): + currentsubdir[0] += 1 + ui.progress(_('checking'), currentsubdir[0], total=subdircount, + unit=_('manifests')) + + for subdir, linkrevs in subdirnodes.iteritems(): + subdirfilenodes = self._verifymanifest(linkrevs, subdir, storefiles, + progress) + for f, onefilenodes in subdirfilenodes.iteritems(): + filenodes.setdefault(f, {}).update(onefilenodes) + + if not dir and subdirnodes: + ui.progress(_('checking'), None) + for f in sorted(storefiles): + self.warn(_("warning: orphan revlog '%s'") % f) return filenodes - def _crosscheckfiles(self, mflinkrevs, filelinkrevs, filenodes): + def _crosscheckfiles(self, filelinkrevs, filenodes): repo = self.repo ui = self.ui ui.status(_("crosschecking files in changesets and manifests\n")) - total = len(mflinkrevs) + len(filelinkrevs) + len(filenodes) + total = len(filelinkrevs) + len(filenodes) count = 0 if self.havemf: - for c, m in sorted([(c, m) for m in mflinkrevs - for c in mflinkrevs[m]]): - count += 1 - if m == nullid: - continue - ui.progress(_('crosschecking'), count, total=total) - self.err(c, _("changeset refers to unknown manifest %s") % - short(m)) - for f in sorted(filelinkrevs): count += 1 ui.progress(_('crosschecking'), count, total=total) @@ -284,14 +335,14 @@ for f, f2, size in repo.store.datafiles(): if not f: self.err(None, _("cannot decode filename '%s'") % f2) - elif size > 0 or not revlogv1: + elif (size > 0 or not revlogv1) and f.startswith('data/'): storefiles.add(_normpath(f)) files = sorted(set(filenodes) | set(filelinkrevs)) total = len(files) revisions = 0 for i, f in enumerate(files): - ui.progress(_('checking'), i, item=f, total=total) + ui.progress(_('checking'), i, item=f, total=total, unit=_('files')) try: linkrevs = filelinkrevs[f] except KeyError: @@ -374,11 +425,11 @@ if f in filenodes: fns = [(lr, n) for n, lr in filenodes[f].iteritems()] for lr, node in sorted(fns): - self.err(lr, _("%s in manifests not found") % short(node), - f) + self.err(lr, _("manifest refers to unknown revision %s") % + short(node), f) ui.progress(_('checking'), None) - for f in storefiles: + for f in sorted(storefiles): self.warn(_("warning: orphan revlog '%s'") % f) return len(files), revisions
--- a/mercurial/wireproto.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/wireproto.py Sat Apr 16 18:06:48 2016 -0500 @@ -7,10 +7,10 @@ from __future__ import absolute_import +import itertools import os import sys import tempfile -import urllib from .i18n import _ from .node import ( @@ -30,6 +30,9 @@ util, ) +urlerr = util.urlerr +urlreq = util.urlreq + bundle2required = _( 'incompatible Mercurial client; bundle2 required\n' '(see https://www.mercurial-scm.org/wiki/IncompatibleClient)\n') @@ -114,6 +117,41 @@ encresref.set(encres) resref.set(batchable.next()) +class remoteiterbatcher(peer.iterbatcher): + def __init__(self, remote): + super(remoteiterbatcher, self).__init__() + self._remote = remote + + def __getattr__(self, name): + if not getattr(self._remote, name, False): + raise AttributeError( + 'Attempted to iterbatch non-batchable call to %r' % name) + return super(remoteiterbatcher, self).__getattr__(name) + + def submit(self): + """Break the batch request into many patch calls and pipeline them. + + This is mostly valuable over http where request sizes can be + limited, but can be used in other places as well. + """ + req, rsp = [], [] + for name, args, opts, resref in self.calls: + mtd = getattr(self._remote, name) + batchable = mtd.batchable(mtd.im_self, *args, **opts) + encargsorres, encresref = batchable.next() + assert encresref + req.append((name, encargsorres)) + rsp.append((batchable, encresref)) + if req: + self._resultiter = self._remote._submitbatch(req) + self._rsp = rsp + + def results(self): + for (batchable, encresref), encres in itertools.izip( + self._rsp, self._resultiter): + encresref.set(encres) + yield batchable.next() + # Forward a couple of names from peer to make wireproto interactions # slightly more sensible. batchable = peer.batchable @@ -183,16 +221,34 @@ else: return peer.localbatch(self) def _submitbatch(self, req): + """run batch request <req> on the server + + Returns an iterator of the raw responses from the server. + """ cmds = [] for op, argsdict in req: args = ','.join('%s=%s' % (escapearg(k), escapearg(v)) for k, v in argsdict.iteritems()) cmds.append('%s %s' % (op, args)) - rsp = self._call("batch", cmds=';'.join(cmds)) - return [unescapearg(r) for r in rsp.split(';')] + rsp = self._callstream("batch", cmds=';'.join(cmds)) + # TODO this response parsing is probably suboptimal for large + # batches with large responses. + work = rsp.read(1024) + chunk = work + while chunk: + while ';' in work: + one, work = work.split(';', 1) + yield unescapearg(one) + chunk = rsp.read(1024) + work += chunk + yield unescapearg(work) + def _submitone(self, op, args): return self._call(op, **args) + def iterbatch(self): + return remoteiterbatcher(self) + @batchable def lookup(self, key): self.requirecap('lookup', _('look up remote revision')) @@ -233,7 +289,7 @@ branchmap = {} for branchpart in d.splitlines(): branchname, branchheads = branchpart.split(' ', 1) - branchname = encoding.tolocal(urllib.unquote(branchname)) + branchname = encoding.tolocal(urlreq.unquote(branchname)) branchheads = decodelist(branchheads) branchmap[branchname] = branchheads yield branchmap @@ -396,9 +452,12 @@ def _callstream(self, cmd, **args): """execute <cmd> on the server - The command is expected to return a stream. + The command is expected to return a stream. Note that if the + command doesn't return a stream, _callstream behaves + differently for ssh and http peers. - returns the server reply as a file like object.""" + returns the server reply as a file like object. + """ raise NotImplementedError() def _callcompressable(self, cmd, **args): @@ -575,7 +634,7 @@ branchmap = repo.branchmap() heads = [] for branch, nodes in branchmap.iteritems(): - branchname = urllib.quote(encoding.fromlocal(branch)) + branchname = urlreq.quote(encoding.fromlocal(branch)) branchnodes = encodelist(nodes) heads.append('%s %s' % (branchname, branchnodes)) return '\n'.join(heads) @@ -627,10 +686,12 @@ caps.append('streamreqs=%s' % ','.join(sorted(requiredformats))) if repo.ui.configbool('experimental', 'bundle2-advertise', True): capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo)) - caps.append('bundle2=' + urllib.quote(capsblob)) - caps.append('unbundle=%s' % ','.join(changegroupmod.bundlepriority)) + caps.append('bundle2=' + urlreq.quote(capsblob)) + caps.append('unbundle=%s' % ','.join(bundle2.bundlepriority)) caps.append( 'httpheader=%d' % repo.ui.configint('server', 'maxhttpheaderlen', 1024)) + if repo.ui.configbool('experimental', 'httppostargs', False): + caps.append('httppostargs') return caps # If you are writing an extension and consider wrapping this function. Wrap
--- a/mercurial/worker.py Tue Mar 29 11:54:46 2016 -0500 +++ b/mercurial/worker.py Sat Apr 16 18:06:48 2016 -0500 @@ -152,14 +152,33 @@ _exitstatus = _posixexitstatus def partition(lst, nslices): - '''partition a list into N slices of equal size''' - n = len(lst) - chunk, slop = n / nslices, n % nslices - end = 0 - for i in xrange(nslices): - start = end - end = start + chunk - if slop: - end += 1 - slop -= 1 - yield lst[start:end] + '''partition a list into N slices of roughly equal size + + The current strategy takes every Nth element from the input. If + we ever write workers that need to preserve grouping in input + we should consider allowing callers to specify a partition strategy. + + mpm is not a fan of this partitioning strategy when files are involved. + In his words: + + Single-threaded Mercurial makes a point of creating and visiting + files in a fixed order (alphabetical). When creating files in order, + a typical filesystem is likely to allocate them on nearby regions on + disk. Thus, when revisiting in the same order, locality is maximized + and various forms of OS and disk-level caching and read-ahead get a + chance to work. + + This effect can be quite significant on spinning disks. I discovered it + circa Mercurial v0.4 when revlogs were named by hashes of filenames. + Tarring a repo and copying it to another disk effectively randomized + the revlog ordering on disk by sorting the revlogs by hash and suddenly + performance of my kernel checkout benchmark dropped by ~10x because the + "working set" of sectors visited no longer fit in the drive's cache and + the workload switched from streaming to random I/O. + + What we should really be doing is have workers read filenames from a + ordered queue. This preserves locality and also keeps any worker from + getting more than one file out of balance. + ''' + for i in range(nslices): + yield lst[i::nslices]
--- a/setup.py Tue Mar 29 11:54:46 2016 -0500 +++ b/setup.py Sat Apr 16 18:06:48 2016 -0500 @@ -84,19 +84,6 @@ from distutils.sysconfig import get_python_inc, get_config_var from distutils.version import StrictVersion -convert2to3 = '--c2to3' in sys.argv -if convert2to3: - try: - from distutils.command.build_py import build_py_2to3 as build_py - from lib2to3.refactor import get_fixers_from_package as getfixers - except ImportError: - if sys.version_info[0] < 3: - raise SystemExit("--c2to3 is only compatible with python3.") - raise - sys.path.append('contrib') -elif sys.version_info[0] >= 3: - raise SystemExit("setup.py with python3 needs --c2to3 (experimental)") - scripts = ['hg'] if os.name == 'nt': # We remove hg.bat if we are able to build hg.exe. @@ -220,30 +207,27 @@ version = kw.get('node', '')[:12] if version: - f = open("mercurial/__version__.py", "w") - f.write('# this file is autogenerated by setup.py\n') - f.write('version = "%s"\n' % version) - f.close() - + with open("mercurial/__version__.py", "w") as f: + f.write('# this file is autogenerated by setup.py\n') + f.write('version = "%s"\n' % version) try: + oldpolicy = os.environ.get('HGMODULEPOLICY', None) + os.environ['HGMODULEPOLICY'] = 'py' from mercurial import __version__ version = __version__.version except ImportError: version = 'unknown' +finally: + if oldpolicy is None: + del os.environ['HGMODULEPOLICY'] + else: + os.environ['HGMODULEPOLICY'] = oldpolicy class hgbuild(build): # Insert hgbuildmo first so that files in mercurial/locale/ are found # when build_py is run next. - sub_commands = [('build_mo', None), - - # We also need build_ext before build_py. Otherwise, when 2to3 is - # called (in build_py), it will not find osutil & friends, - # thinking that those modules are global and, consequently, making - # a mess, now that all module imports are global. - - ('build_ext', build.has_ext_modules), - ] + build.sub_commands + sub_commands = [('build_mo', None)] + build.sub_commands class hgbuildmo(build): @@ -282,8 +266,6 @@ global_options = Distribution.global_options + \ [('pure', None, "use pure (slow) Python " "code instead of C extensions"), - ('c2to3', None, "(experimental!) convert " - "code with 2to3"), ] def has_ext_modules(self): @@ -328,10 +310,6 @@ return build_scripts.run(self) class hgbuildpy(build_py): - if convert2to3: - fixer_names = sorted(set(getfixers("lib2to3.fixes") + - getfixers("hgfixes"))) - def finalize_options(self): build_py.finalize_options(self) @@ -343,21 +321,16 @@ raise SystemExit('Python headers are required to build ' 'Mercurial but weren\'t found in %s' % h) - def copy_file(self, *args, **kwargs): - dst, copied = build_py.copy_file(self, *args, **kwargs) + def run(self): + if self.distribution.pure: + modulepolicy = 'py' + else: + modulepolicy = 'c' + with open("mercurial/__modulepolicy__.py", "w") as f: + f.write('# this file is autogenerated by setup.py\n') + f.write('modulepolicy = "%s"\n' % modulepolicy) - if copied and dst.endswith('__init__.py'): - if self.distribution.pure: - modulepolicy = 'py' - else: - modulepolicy = 'c' - content = open(dst, 'rb').read() - content = content.replace(b'@MODULELOADPOLICY@', - modulepolicy.encode(libdir_escape)) - with open(dst, 'wb') as fh: - fh.write(content) - - return dst, copied + build_py.run(self) class buildhgextindex(Command): description = 'generate prebuilt index of hgext (for frozen package)' @@ -372,9 +345,8 @@ def run(self): if os.path.exists(self._indexfilename): - f = open(self._indexfilename, 'w') - f.write('# empty\n') - f.close() + with open(self._indexfilename, 'w') as f: + f.write('# empty\n') # here no extension enabled, disabled() lists up everything code = ('import pprint; from mercurial import extensions; ' @@ -383,11 +355,10 @@ if err: raise DistutilsExecError(err) - f = open(self._indexfilename, 'w') - f.write('# this file is autogenerated by setup.py\n') - f.write('docs = ') - f.write(out) - f.close() + with open(self._indexfilename, 'w') as f: + f.write('# this file is autogenerated by setup.py\n') + f.write('docs = ') + f.write(out) class buildhgexe(build_ext): description = 'compile hg.exe from mercurial/exewrapper.c' @@ -400,10 +371,9 @@ self.compiler.dll_libraries = [] # no -lmsrvc90 hv = sys.hexversion pythonlib = 'python%d%d' % (hv >> 24, (hv >> 16) & 0xff) - f = open('mercurial/hgpythonlib.h', 'wb') - f.write('/* this file is autogenerated by setup.py */\n') - f.write('#define HGPYTHONLIB "%s"\n' % pythonlib) - f.close() + with open('mercurial/hgpythonlib.h', 'wb') as f: + f.write('/* this file is autogenerated by setup.py */\n') + f.write('#define HGPYTHONLIB "%s"\n' % pythonlib) objects = self.compiler.compile(['mercurial/exewrapper.c'], output_dir=self.build_temp) dir = os.path.dirname(self.get_ext_fullpath('dummy')) @@ -503,9 +473,8 @@ libdir = uplevel * ('..' + os.sep) + self.install_lib[len(common):] for outfile in self.outfiles: - fp = open(outfile, 'rb') - data = fp.read() - fp.close() + with open(outfile, 'rb') as fp: + data = fp.read() # skip binary files if b'\0' in data: @@ -520,9 +489,8 @@ continue data = data.replace(b'@LIBDIR@', libdir.encode(libdir_escape)) - fp = open(outfile, 'wb') - fp.write(data) - fp.close() + with open(outfile, 'wb') as fp: + fp.write(data) cmdclass = {'build': hgbuild, 'build_mo': hgbuildmo, @@ -537,8 +505,9 @@ packages = ['mercurial', 'mercurial.hgweb', 'mercurial.httpclient', 'mercurial.pure', - 'hgext', 'hgext.convert', 'hgext.highlight', 'hgext.zeroconf', - 'hgext.largefiles'] + 'hgext', 'hgext.convert', 'hgext.fsmonitor', + 'hgext.fsmonitor.pywatchman', 'hgext.highlight', + 'hgext.largefiles', 'hgext.zeroconf', 'hgext3rd'] common_depends = ['mercurial/util.h'] @@ -564,6 +533,8 @@ Extension('mercurial.osutil', ['mercurial/osutil.c'], extra_link_args=osutil_ldflags, depends=common_depends), + Extension('hgext.fsmonitor.pywatchman.bser', + ['hgext/fsmonitor/pywatchman/bser.c']), ] try:
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/blackbox-readonly-dispatch.py Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,36 @@ +from __future__ import absolute_import, print_function +import os +from mercurial import ( + dispatch, +) + +def testdispatch(cmd): + """Simple wrapper around dispatch.dispatch() + + Prints command and result value, but does not handle quoting. + """ + print("running: %s" % (cmd,)) + req = dispatch.request(cmd.split()) + result = dispatch.dispatch(req) + print("result: %r" % (result,)) + +# create file 'foo', add and commit +f = open('foo', 'wb') +f.write('foo\n') +f.close() +testdispatch("add foo") +testdispatch("commit -m commit1 -d 2000-01-01 foo") + +# append to file 'foo' and commit +f = open('foo', 'ab') +f.write('bar\n') +f.close() +# remove blackbox.log directory (proxy for readonly log file) +os.rmdir(".hg/blackbox.log") +# replace it with the real blackbox.log file +os.rename(".hg/blackbox.log-", ".hg/blackbox.log") +testdispatch("commit -m commit2 -d 2000-01-02 foo") + +# check 88803a69b24 (fancyopts modified command table) +testdispatch("log -r 0") +testdispatch("log -r tip")
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/blacklists/fsmonitor Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,52 @@ +# Blacklist for a full testsuite run with fsmonitor enabled. +# Use with +# run-tests --blacklist=blacklists/fsmonitor \ +# --extra-config="extensions.fsmonitor=" +# The following tests all fail because they either use extensions that conflict +# with fsmonitor, use subrepositories, or don't anticipate the extra file in +# the .hg directory that fsmonitor adds. +test-basic.t +test-blackbox.t +test-check-commit.t +test-commandserver.t +test-copy.t +test-debugextensions.t +test-eol-add.t +test-eol-clone.t +test-eol-hook.t +test-eol-patch.t +test-eol-tag.t +test-eol-update.t +test-eol.t +test-eolfilename.t +test-extension.t +test-fncache.t +test-hardlinks.t +test-help.t +test-inherit-mode.t +test-issue3084.t +test-largefiles-cache.t +test-largefiles-misc.t +test-largefiles-small-disk.t +test-largefiles-update.t +test-largefiles-wireproto.t +test-largefiles.t +test-lfconvert.t +test-merge-tools.t +test-nested-repo.t +test-permissions.t +test-push-warn.t +test-subrepo-deep-nested-change.t +test-subrepo-recursion.t +test-subrepo.t +test-tags.t + +# The following tests remain enabled; they fail *too*, but only because they +# occasionally use blacklisted extensions and don't anticipate the warning +# generated. +#test-log.t +#test-hook.t +#test-rename.t +#test-histedit-fold.t +#test-fileset-generated.t +#test-init.t
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/check-gendoc Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,11 @@ +#!/bin/sh +HGENCODING=UTF-8 +export HGENCODING + +echo ".. -*- coding: utf-8 -*-" > gendoc.txt +echo "" >> gendoc.txt +LANGUAGE=$1 python "$TESTDIR/../doc/gendoc.py" >> gendoc.txt 2> /dev/null || exit + +echo "checking for parse errors" +python "$TESTDIR/../doc/docchecker" gendoc.txt +python "$TESTDIR/../doc/runrst" html gendoc.txt /dev/null
--- a/tests/dumbhttp.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/dumbhttp.py Sat Apr 16 18:06:48 2016 -0500 @@ -6,10 +6,10 @@ Small and dumb HTTP server for use in tests. """ +import BaseHTTPServer +import SimpleHTTPServer import optparse -import BaseHTTPServer import signal -import SimpleHTTPServer import sys from mercurial import ( @@ -38,7 +38,7 @@ parser.add_option('-f', '--foreground', dest='foreground', action='store_true', help='do not start the HTTP server in the background') - parser.add_option('--daemon-pipefds') + parser.add_option('--daemon-postexec', action='append') (options, args) = parser.parse_args() @@ -49,7 +49,7 @@ opts = {'pid_file': options.pid, 'daemon': not options.foreground, - 'daemon_pipefds': options.daemon_pipefds} + 'daemon_postexec': options.daemon_postexec} service = simplehttpservice(options.host, options.port) cmdutil.service(opts, initfn=service.init, runfn=service.run, runargs=[sys.executable, __file__] + sys.argv[1:])
--- a/tests/f Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/f Sat Apr 16 18:06:48 2016 -0500 @@ -64,7 +64,7 @@ if opts.size and not isdir: facts.append('size=%s' % stat.st_size) if opts.mode and not islink: - facts.append('mode=%o' % (stat.st_mode & 0777)) + facts.append('mode=%o' % (stat.st_mode & 0o777)) if opts.links: facts.append('links=%s' % stat.st_nlink) if opts.newer: @@ -106,7 +106,7 @@ chunk = chunk[opts.bytes:] if opts.hexdump: for i in range(0, len(chunk), 16): - s = chunk[i:i+16] + s = chunk[i:i + 16] outfile.write('%04x: %-47s |%s|\n' % (i, ' '.join('%02x' % ord(c) for c in s), re.sub('[^ -~]', '.', s)))
--- a/tests/failfilemerge.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/failfilemerge.py Sat Apr 16 18:06:48 2016 -0500 @@ -3,9 +3,9 @@ from __future__ import absolute_import from mercurial import ( - filemerge, + error, extensions, - error, + filemerge, ) def failfilemerge(filemergefn,
--- a/tests/filterpyflakes.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/filterpyflakes.py Sat Apr 16 18:06:48 2016 -0500 @@ -2,7 +2,7 @@ # Filter output by pyflakes to control which warnings we check -from __future__ import absolute_import +from __future__ import absolute_import, print_function import re import sys @@ -54,8 +54,8 @@ for msgtype, line in sorted(lines, key=makekey): sys.stdout.write(line) -print +print() # self test of "undefined name" detection for other than 'memoryview' if False: - print undefinedname + print(undefinedname)
--- a/tests/generate-working-copy-states.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/generate-working-copy-states.py Sat Apr 16 18:06:48 2016 -0500 @@ -29,7 +29,7 @@ # $ hg forget *_*_*-untracked # $ rm *_*_missing-* -from __future__ import absolute_import +from __future__ import absolute_import, print_function import os import sys @@ -66,7 +66,7 @@ content = [] for filename, states in combinations: if target == 'filelist': - print filename + print(filename) elif target == 'state': if depth == 'wc': # Make sure there is content so the file gets written and can be @@ -75,7 +75,7 @@ else: content.append((filename, states[int(depth) - 1])) else: - print >> sys.stderr, "unknown target:", target + print("unknown target:", target, file=sys.stderr) sys.exit(1) # write actual content
--- a/tests/get-with-headers.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/get-with-headers.py Sat Apr 16 18:06:48 2016 -0500 @@ -3,7 +3,7 @@ """This does HTTP GET requests given a host:port and path and returns a subset of the headers plus the body of the result.""" -from __future__ import absolute_import +from __future__ import absolute_import, print_function import httplib import json @@ -41,15 +41,15 @@ conn = httplib.HTTPConnection(host) conn.request("GET", '/' + path, None, headers) response = conn.getresponse() - print response.status, response.reason + print(response.status, response.reason) if show[:1] == ['-']: show = sorted(h for h, v in response.getheaders() if h.lower() not in show) for h in [h.lower() for h in show]: if response.getheader(h, None) is not None: - print "%s: %s" % (h, response.getheader(h)) + print("%s: %s" % (h, response.getheader(h))) if not headeronly: - print + print() data = response.read() # Pretty print JSON. This also has the beneficial side-effect @@ -60,7 +60,7 @@ data = json.loads(data) lines = json.dumps(data, sort_keys=True, indent=2).splitlines() for line in lines: - print line.rstrip() + print(line.rstrip()) else: sys.stdout.write(data)
--- a/tests/hghave Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/hghave Sat Apr 16 18:06:48 2016 -0500 @@ -3,6 +3,9 @@ if all features are there, non-zero otherwise. If a feature name is prefixed with "no-", the absence of feature is tested. """ + +from __future__ import print_function + import optparse import os, sys import hghave @@ -12,7 +15,7 @@ def list_features(): for name, feature in sorted(checks.iteritems()): desc = feature[1] - print name + ':', desc + print(name + ':', desc) def test_features(): failed = 0 @@ -20,8 +23,8 @@ check, _ = feature try: check() - except Exception, e: - print "feature %s failed: %s" % (name, e) + except Exception as e: + print("feature %s failed: %s" % (name, e)) failed += 1 return failed @@ -45,7 +48,7 @@ sys.path.insert(0, path) try: import hghaveaddon - except BaseException, inst: + except BaseException as inst: sys.stderr.write('failed to import hghaveaddon.py from %r: %s\n' % (path, inst)) sys.exit(2)
--- a/tests/hghave.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/hghave.py Sat Apr 16 18:06:48 2016 -0500 @@ -17,11 +17,28 @@ } def check(name, desc): + """Registers a check function for a feature.""" def decorator(func): checks[name] = (func, desc) return func return decorator +def checkvers(name, desc, vers): + """Registers a check function for each of a series of versions. + + vers can be a list or an iterator""" + def decorator(func): + def funcv(v): + def f(): + return func(v) + return f + for v in vers: + v = str(v) + f = funcv(v) + checks['%s%s' % (name, v.replace('.', ''))] = (f, desc % v) + return func + return decorator + def checkfeatures(features): result = { 'error': [], @@ -97,25 +114,35 @@ except ImportError: return False -@check("bzr114", "Canonical's Bazaar client >= 1.14") -def has_bzr114(): +@checkvers("bzr", "Canonical's Bazaar client >= %s", (1.14,)) +def has_bzr_range(v): + major, minor = v.split('.')[0:2] try: import bzrlib return (bzrlib.__doc__ is not None - and bzrlib.version_info[:2] >= (1, 14)) + and bzrlib.version_info[:2] >= (int(major), int(minor))) except ImportError: return False +@check("chg", "running with chg") +def has_chg(): + return 'CHGHG' in os.environ + @check("cvs", "cvs client/server") def has_cvs(): re = r'Concurrent Versions System.*?server' return matchoutput('cvs --version 2>&1', re) and not has_msys() -@check("cvs112", "cvs client/server >= 1.12") +@check("cvs112", "cvs client/server 1.12.* (not cvsnt)") def has_cvs112(): re = r'Concurrent Versions System \(CVS\) 1.12.*?server' return matchoutput('cvs --version 2>&1', re) and not has_msys() +@check("cvsnt", "cvsnt client/server") +def has_cvsnt(): + re = r'Concurrent Versions System \(CVSNT\) (\d+).(\d+).*\(client/server\)' + return matchoutput('cvsnt --version 2>&1', re) + @check("darcs", "darcs client") def has_darcs(): return matchoutput('darcs --version', r'2\.[2-9]', True) @@ -208,6 +235,36 @@ except ImportError: return False +def gethgversion(): + m = matchoutput('hg --version --quiet 2>&1', r'(\d+)\.(\d+)') + if not m: + return (0, 0) + return (int(m.group(1)), int(m.group(2))) + +@checkvers("hg", "Mercurial >= %s", + list([(1.0 * x) / 10 for x in range(9, 40)])) +def has_hg_range(v): + major, minor = v.split('.')[0:2] + return gethgversion() >= (int(major), int(minor)) + +@check("hg08", "Mercurial >= 0.8") +def has_hg08(): + if checks["hg09"][0](): + return True + return matchoutput('hg help annotate 2>&1', '--date') + +@check("hg07", "Mercurial >= 0.7") +def has_hg07(): + if checks["hg08"][0](): + return True + return matchoutput('hg --version --quiet 2>&1', 'Mercurial Distributed SCM') + +@check("hg06", "Mercurial >= 0.6") +def has_hg06(): + if checks["hg07"][0](): + return True + return matchoutput('hg --version --quiet 2>&1', 'Mercurial version') + @check("gettext", "GNU Gettext (msgfmt)") def has_gettext(): return matchoutput('msgfmt --version', 'GNU gettext-tools') @@ -219,8 +276,8 @@ @check("docutils", "Docutils text processing library") def has_docutils(): try: - from docutils.core import publish_cmdline - publish_cmdline # silence unused import + import docutils.core + docutils.core.publish_cmdline # silence unused import return True except ImportError: return False @@ -231,13 +288,10 @@ return (0, 0) return (int(m.group(1)), int(m.group(2))) -@check("svn15", "subversion client and admin tools >= 1.5") -def has_svn15(): - return getsvnversion() >= (1, 5) - -@check("svn13", "subversion client and admin tools >= 1.3") -def has_svn13(): - return getsvnversion() >= (1, 3) +@checkvers("svn", "subversion client and admin tools >= %s", (1.3, 1.5)) +def has_svn_range(v): + major, minor = v.split('.')[0:2] + return getsvnversion() >= (int(major), int(minor)) @check("svn", "subversion client and admin tools") def has_svn(): @@ -335,36 +389,17 @@ except ImportError: return False -@check("json", "some json module available") -def has_json(): - try: - import json - json.dumps - return True - except ImportError: - try: - import simplejson as json - json.dumps - return True - except ImportError: - pass - return False - @check("outer-repo", "outer repo") def has_outer_repo(): # failing for other reasons than 'no repo' imply that there is a repo return not matchoutput('hg root 2>&1', r'abort: no repository found', True) -@check("ssl", ("(python >= 2.6 ssl module and python OpenSSL) " - "OR python >= 2.7.9 ssl")) +@check("ssl", "ssl module available") def has_ssl(): try: import ssl - if getattr(ssl, 'create_default_context', False): - return True - import OpenSSL - OpenSSL.SSL.Context + ssl.CERT_NONE return True except ImportError: return False @@ -423,7 +458,7 @@ @check("docker", "docker support") def has_docker(): - pat = r'A self-sufficient runtime for linux containers\.' + pat = r'A self-sufficient runtime for' if matchoutput('docker --help', pat): if 'linux' not in sys.platform: # TODO: in theory we should be able to test docker-based @@ -458,6 +493,10 @@ def has_py3k(): return 3 == sys.version_info[0] +@check("py3exe", "a Python 3.x interpreter is available") +def has_python3exe(): + return 'PYTHON3' in os.environ + @check("pure", "running with pure Python code") def has_pure(): return any([ @@ -469,7 +508,7 @@ def has_slow(): return os.environ.get('HGTEST_SLOW') == 'slow' -@check("hypothesis", "is Hypothesis installed") +@check("hypothesis", "Hypothesis automated test generation") def has_hypothesis(): try: import hypothesis
--- a/tests/hypothesishelpers.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/hypothesishelpers.py Sat Apr 16 18:06:48 2016 -0500 @@ -4,6 +4,7 @@ # # For details see http://hypothesis.readthedocs.org +from __future__ import absolute_import, print_function import os import sys import traceback @@ -33,7 +34,8 @@ # Fixed in version 1.13 (released 2015 october 29th) f.__module__ = '__anon__' try: - given(*args, settings=settings(max_examples=2000), **kwargs)(f)() + with settings(max_examples=2000): + given(*args, **kwargs)(f)() except Exception: traceback.print_exc(file=sys.stdout) sys.exit(1)
--- a/tests/killdaemons.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/killdaemons.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,6 +1,11 @@ #!/usr/bin/env python -import os, sys, time, errno, signal +from __future__ import absolute_import +import errno +import os +import signal +import sys +import time if os.name =='nt': import ctypes
--- a/tests/mockblackbox.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/mockblackbox.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,11 +1,17 @@ -from mercurial import util +from __future__ import absolute_import +from mercurial import ( + util, +) def makedate(): return 0, 0 def getuser(): return 'bob' +def getpid(): + return 5000 # mock the date and user apis so the output is always the same def uisetup(ui): util.makedate = makedate util.getuser = getuser + util.getpid = getpid
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/pdiff Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,58 @@ +#!/bin/sh + +# Script to get stable diff output on any platform. +# +# Output of this script is almost equivalent to GNU diff with "-Nru". +# +# Use this script as "hg pdiff" via extdiff extension with preparation +# below in test scripts: +# +# $ cat >> $HGRCPATH <<EOF +# > [extdiff] +# > pdiff = sh "$RUNTESTDIR/pdiff" +# > EOF + +filediff(){ + # USAGE: filediff file1 file2 [header] + + # compare with /dev/null if file doesn't exist (as "-N" option) + file1="$1" + if test ! -f "$file1"; then + file1=/dev/null + fi + file2="$2" + if test ! -f "$file2"; then + file2=/dev/null + fi + + if cmp -s "$file1" "$file2" 2> /dev/null; then + # Return immediately, because comparison isn't needed. This + # also avoids redundant message of diff like "No differences + # encountered" (on Solaris) + return + fi + + if test -n "$3"; then + # show header only in recursive case + echo "$3" + fi + + # replace "/dev/null" by corresponded filename (as "-N" option) + diff -u "$file1" "$file2" | + sed "s@^--- /dev/null\(.*\)\$@--- $1\1@" | + sed "s@^\+\+\+ /dev/null\(.*\)\$@+++ $2\1@" +} + +if test -d "$1" -o -d "$2"; then + # ensure comparison in dictionary order + ( + if test -d "$1"; then (cd "$1" && find . -type f); fi + if test -d "$2"; then (cd "$2" && find . -type f); fi + ) | + sed 's@^\./@@g' | sort | uniq | + while read file; do + filediff "$1/$file" "$2/$file" "diff -Nru $1/$file $2/$file" + done +else + filediff "$1" "$2" +fi
--- a/tests/printenv.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/printenv.py Sat Apr 16 18:06:48 2016 -0500 @@ -12,6 +12,7 @@ # - [output] is the name of the output file (default: use sys.stdout) # the file will be opened in append mode. # +from __future__ import absolute_import import os import sys
--- a/tests/revlog-formatv0.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/revlog-formatv0.py Sat Apr 16 18:06:48 2016 -0500 @@ -17,7 +17,9 @@ empty file """ -import os, sys +from __future__ import absolute_import +import os +import sys files = [ ('formatv0/.hg/00changelog.i',
--- a/tests/run-tests.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/run-tests.py Sat Apr 16 18:06:48 2016 -0500 @@ -48,6 +48,7 @@ from distutils import version import difflib import errno +import json import optparse import os import shutil @@ -69,15 +70,6 @@ import unittest osenvironb = getattr(os, 'environb', os.environ) - -try: - import json -except ImportError: - try: - import simplejson as json - except ImportError: - json = None - processlock = threading.Lock() if sys.version_info > (3, 5, 0): @@ -159,6 +151,9 @@ 'shell': ('HGTEST_SHELL', 'sh'), } +def canonpath(path): + return os.path.realpath(os.path.expanduser(path)) + def parselistfiles(files, listtype, warn=True): entries = dict() for filename in files: @@ -257,8 +252,17 @@ metavar="HG", help="test using specified hg script rather than a " "temporary installation") + parser.add_option("--chg", action="store_true", + help="install and use chg wrapper in place of hg") + parser.add_option("--with-chg", metavar="CHG", + help="use specified chg wrapper in place of hg") parser.add_option("-3", "--py3k-warnings", action="store_true", help="enable Py3k warnings on Python 2.6+") + # This option should be deleted once test-check-py3-compat.t and other + # Python 3 tests run with Python 3. + parser.add_option("--with-python3", metavar="PYTHON3", + help="Python 3 interpreter (if running under Python 2)" + " (TEMPORARY)") parser.add_option('--extra-config-opt', action="append", help='set the given config opt in the test hgrc') parser.add_option('--random', action="store_true", @@ -269,6 +273,10 @@ help='allow extremely slow tests') parser.add_option('--showchannels', action='store_true', help='show scheduling channels') + parser.add_option('--known-good-rev', type="string", + metavar="known_good_rev", + help=("Automatically bisect any failures using this " + "revision as a known-good revision.")) for option, (envvar, default) in defaults.items(): defaults[option] = type(default)(os.environ.get(envvar, default)) @@ -285,20 +293,33 @@ options.pure = True if options.with_hg: - options.with_hg = os.path.expanduser(options.with_hg) + options.with_hg = canonpath(_bytespath(options.with_hg)) if not (os.path.isfile(options.with_hg) and os.access(options.with_hg, os.X_OK)): parser.error('--with-hg must specify an executable hg script') - if not os.path.basename(options.with_hg) == 'hg': + if not os.path.basename(options.with_hg) == b'hg': sys.stderr.write('warning: --with-hg should specify an hg script\n') if options.local: - testdir = os.path.dirname(_bytespath(os.path.realpath(sys.argv[0]))) + testdir = os.path.dirname(_bytespath(canonpath(sys.argv[0]))) hgbin = os.path.join(os.path.dirname(testdir), b'hg') if os.name != 'nt' and not os.access(hgbin, os.X_OK): parser.error('--local specified, but %r not found or not executable' % hgbin) options.with_hg = hgbin + if (options.chg or options.with_chg) and os.name == 'nt': + parser.error('chg does not work on %s' % os.name) + if options.with_chg: + options.chg = False # no installation to temporary location + options.with_chg = canonpath(_bytespath(options.with_chg)) + if not (os.path.isfile(options.with_chg) and + os.access(options.with_chg, os.X_OK)): + parser.error('--with-chg must specify a chg executable') + if options.chg and options.with_hg: + # chg shares installation location with hg + parser.error('--chg does not work when --with-hg is specified ' + '(use --with-chg instead)') + options.anycoverage = options.cover or options.annotate or options.htmlcov if options.anycoverage: try: @@ -323,7 +344,7 @@ verbose = '' if options.tmpdir: - options.tmpdir = os.path.expanduser(options.tmpdir) + options.tmpdir = canonpath(options.tmpdir) if options.jobs < 1: parser.error('--jobs must be positive') @@ -342,6 +363,28 @@ if PYTHON3: parser.error( '--py3k-warnings can only be used on Python 2.6 and 2.7') + if options.with_python3: + if PYTHON3: + parser.error('--with-python3 cannot be used when executing with ' + 'Python 3') + + options.with_python3 = canonpath(options.with_python3) + # Verify Python3 executable is acceptable. + proc = subprocess.Popen([options.with_python3, b'--version'], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + out, _err = proc.communicate() + ret = proc.wait() + if ret != 0: + parser.error('could not determine version of python 3') + if not out.startswith('Python '): + parser.error('unexpected output from python3 --version: %s' % + out) + vers = version.LooseVersion(out[len('Python '):]) + if vers < version.LooseVersion('3.5.0'): + parser.error('--with-python3 version must be 3.5.0 or greater; ' + 'got %s' % out) + if options.blacklist: options.blacklist = parselistfiles(options.blacklist, 'blacklist') if options.whitelist: @@ -443,8 +486,8 @@ debug=False, timeout=defaults['timeout'], startport=defaults['port'], extraconfigopts=None, - py3kwarnings=False, shell=None, - slowtimeout=defaults['slowtimeout']): + py3kwarnings=False, shell=None, hgcommand=None, + slowtimeout=defaults['slowtimeout'], usechg=False): """Create a test from parameters. path is the full path to the file defining the test. @@ -490,6 +533,8 @@ self._extraconfigopts = extraconfigopts or [] self._py3kwarnings = py3kwarnings self._shell = _bytespath(shell) + self._hgcommand = hgcommand or b'hg' + self._usechg = usechg self._aborted = False self._daemonpids = [] @@ -498,6 +543,7 @@ self._out = None self._skipped = None self._testtmp = None + self._chgsockdir = None # If we're not in --debug mode and reference output file exists, # check test output against it. @@ -534,8 +580,8 @@ if e.errno != errno.EEXIST: raise - self._testtmp = os.path.join(self._threadtmp, - os.path.basename(self.path)) + name = os.path.basename(self.path) + self._testtmp = os.path.join(self._threadtmp, name) os.mkdir(self._testtmp) # Remove any previous output files. @@ -549,6 +595,11 @@ if e.errno != errno.ENOENT: raise + if self._usechg: + self._chgsockdir = os.path.join(self._threadtmp, + b'%s.chgsock' % name) + os.mkdir(self._chgsockdir) + def run(self, result): """Run this test and report results against a TestResult instance.""" # This function is extremely similar to unittest.TestCase.run(). Once @@ -686,11 +737,17 @@ if self._keeptmpdir: log('\nKeeping testtmp dir: %s\nKeeping threadtmp dir: %s' % - (self._testtmp, self._threadtmp)) + (self._testtmp.decode('utf-8'), + self._threadtmp.decode('utf-8'))) else: shutil.rmtree(self._testtmp, True) shutil.rmtree(self._threadtmp, True) + if self._usechg: + # chgservers will stop automatically after they find the socket + # files are deleted + shutil.rmtree(self._chgsockdir, True) + if (self._ret != 0 or self._out != self._refout) and not self._skipped \ and not self._debug and self._out: f = open(self.errpath, 'wb') @@ -708,6 +765,10 @@ """Terminate execution of this test.""" self._aborted = True + def _portmap(self, i): + offset = b'' if i == 0 else b'%d' % i + return (br':%d\b' % (self._startport + i), b':$HGPORT%s' % offset) + def _getreplacements(self): """Obtain a mapping of text replacements to apply to test output. @@ -716,31 +777,39 @@ occur. """ r = [ - (br':%d\b' % self._startport, b':$HGPORT'), - (br':%d\b' % (self._startport + 1), b':$HGPORT1'), - (br':%d\b' % (self._startport + 2), b':$HGPORT2'), + # This list should be parallel to defineport in _getenv + self._portmap(0), + self._portmap(1), + self._portmap(2), (br'(?m)^(saved backup bundle to .*\.hg)( \(glob\))?$', br'\1 (glob)'), ] - - if os.name == 'nt': - r.append( - (b''.join(c.isalpha() and b'[%s%s]' % (c.lower(), c.upper()) or - c in b'/\\' and br'[/\\]' or c.isdigit() and c or b'\\' + c - for c in self._testtmp), b'$TESTTMP')) - else: - r.append((re.escape(self._testtmp), b'$TESTTMP')) + r.append((self._escapepath(self._testtmp), b'$TESTTMP')) return r + def _escapepath(self, p): + if os.name == 'nt': + return ( + (b''.join(c.isalpha() and b'[%s%s]' % (c.lower(), c.upper()) or + c in b'/\\' and br'[/\\]' or c.isdigit() and c or b'\\' + c + for c in p)) + ) + else: + return re.escape(p) + def _getenv(self): """Obtain environment variables to use during test execution.""" + def defineport(i): + offset = '' if i == 0 else '%s' % i + env["HGPORT%s" % offset] = '%s' % (self._startport + i) env = os.environ.copy() env['TESTTMP'] = self._testtmp env['HOME'] = self._testtmp - env["HGPORT"] = str(self._startport) - env["HGPORT1"] = str(self._startport + 1) - env["HGPORT2"] = str(self._startport + 2) + # This number should match portneeded in _getport + for port in xrange(3): + # This list should be parallel to _portmap in _getreplacements + defineport(port) env["HGRCPATH"] = os.path.join(self._threadtmp, b'.hgrc') env["DAEMON_PIDS"] = os.path.join(self._threadtmp, b'daemon.pids') env["HGEDITOR"] = ('"' + sys.executable + '"' @@ -768,6 +837,9 @@ if k.startswith('HG_'): del env[k] + if self._usechg: + env['CHGSOCKNAME'] = os.path.join(self._chgsockdir, b'server') + return env def _createhgrc(self, path): @@ -885,8 +957,8 @@ class TTest(Test): """A "t test" is a test backed by a .t file.""" - SKIPPED_PREFIX = 'skipped: ' - FAILED_PREFIX = 'hghave check failed: ' + SKIPPED_PREFIX = b'skipped: ' + FAILED_PREFIX = b'hghave check failed: ' NEEDESCAPE = re.compile(br'[\x00-\x08\x0b-\x1f\x7f-\xff]').search ESCAPESUB = re.compile(br'[\x00-\x08\x0b-\x1f\\\x7f-\xff]').sub @@ -938,7 +1010,7 @@ if wifexited(ret): ret = os.WEXITSTATUS(ret) if ret == 2: - print(stdout) + print(stdout.decode('utf-8')) sys.exit(1) if ret != 0: @@ -981,9 +1053,12 @@ if self._debug: script.append(b'set -x\n') + if self._hgcommand != b'hg': + script.append(b'alias hg="%s"\n' % self._hgcommand) if os.getenv('MSYSTEM'): script.append(b'alias pwd="pwd -W"\n') + n = 0 for n, l in enumerate(lines): if not l.endswith(b'\n'): l += b'\n' @@ -1081,25 +1156,45 @@ lout += b' (no-eol)\n' # Find the expected output at the current position. - el = None + els = [None] if expected.get(pos, None): - el = expected[pos].pop(0) + els = expected[pos] + + i = 0 + optional = [] + while i < len(els): + el = els[i] - r = TTest.linematch(el, lout) - if isinstance(r, str): - if r == '+glob': - lout = el[:-1] + ' (glob)\n' - r = '' # Warn only this line. - elif r == '-glob': - lout = ''.join(el.rsplit(' (glob)', 1)) - r = '' # Warn only this line. - elif r == "retry": - postout.append(b' ' + el) + r = TTest.linematch(el, lout) + if isinstance(r, str): + if r == '+glob': + lout = el[:-1] + ' (glob)\n' + r = '' # Warn only this line. + elif r == '-glob': + lout = ''.join(el.rsplit(' (glob)', 1)) + r = '' # Warn only this line. + elif r == "retry": + postout.append(b' ' + el) + els.pop(i) + break + else: + log('\ninfo, unknown linematch result: %r\n' % r) + r = False + if r: + els.pop(i) + break + if el and el.endswith(b" (?)\n"): + optional.append(i) + i += 1 + + if r: + if r == "retry": continue - else: - log('\ninfo, unknown linematch result: %r\n' % r) - r = False - if r: + # clean up any optional leftovers + for i in optional: + postout.append(b' ' + els[i]) + for i in reversed(optional): + del els[i] postout.append(b' ' + el) else: if self.NEEDESCAPE(lout): @@ -1111,14 +1206,13 @@ elif warnonly == 1: # Is "not yet" and line is warn only. warnonly = 2 # Yes do warn. break - - # clean up any optional leftovers - while expected.get(pos, None): - el = expected[pos].pop(0) - if not el.endswith(b" (?)\n"): - expected[pos].insert(0, el) - break - postout.append(b' ' + el) + else: + # clean up any optional leftovers + while expected.get(pos, None): + el = expected[pos].pop(0) + if el and not el.endswith(b" (?)\n"): + break + postout.append(b' ' + el) if lcmd: # Add on last return code. @@ -1187,7 +1281,7 @@ if el: if el.endswith(b" (?)\n"): retry = "retry" - el = el[:-5] + "\n" + el = el[:-5] + b"\n" if el.endswith(b" (esc)\n"): if PYTHON3: el = el[:-7].decode('unicode_escape') + '\n' @@ -1219,10 +1313,10 @@ for line in lines: if line.startswith(TTest.SKIPPED_PREFIX): line = line.splitlines()[0] - missing.append(line[len(TTest.SKIPPED_PREFIX):]) + missing.append(line[len(TTest.SKIPPED_PREFIX):].decode('utf-8')) elif line.startswith(TTest.FAILED_PREFIX): line = line.splitlines()[0] - failed.append(line[len(TTest.FAILED_PREFIX):]) + failed.append(line[len(TTest.FAILED_PREFIX):].decode('utf-8')) return missing, failed @@ -1347,7 +1441,6 @@ return accepted = False - failed = False lines = [] with iolock: @@ -1388,7 +1481,7 @@ else: rename(test.errpath, '%s.out' % test.path) accepted = True - if not accepted and not failed: + if not accepted: self.faildata[test.name] = b''.join(lines) return accepted @@ -1481,7 +1574,7 @@ def get(): num_tests[0] += 1 if getattr(test, 'should_reload', False): - return self._loadtest(test.bname, num_tests[0]) + return self._loadtest(test.path, num_tests[0]) return test if not os.path.exists(test.path): result.addSkip(test, "Doesn't exist") @@ -1617,7 +1710,7 @@ def loadtimes(testdir): times = [] try: - with open(os.path.join(testdir, '.testtimes-')) as fp: + with open(os.path.join(testdir, b'.testtimes-')) as fp: for line in fp: ts = line.split() times.append((ts[0], [float(t) for t in ts[1:]])) @@ -1637,12 +1730,12 @@ ts.append(real) ts[:] = ts[-maxruns:] - fd, tmpname = tempfile.mkstemp(prefix='.testtimes', + fd, tmpname = tempfile.mkstemp(prefix=b'.testtimes', dir=testdir, text=True) with os.fdopen(fd, 'w') as fp: - for name, ts in sorted(saved.iteritems()): + for name, ts in sorted(saved.items()): fp.write('%s %s\n' % (name, ' '.join(['%.3f' % (t,) for t in ts]))) - timepath = os.path.join(testdir, '.testtimes') + timepath = os.path.join(testdir, b'.testtimes') try: os.unlink(timepath) except OSError: @@ -1714,9 +1807,7 @@ xuf.write(doc.toprettyxml(indent=' ', encoding='utf-8')) if self._runner.options.json: - if json is None: - raise ImportError("json module not installed") - jsonpath = os.path.join(self._runner._testdir, 'report.json') + jsonpath = os.path.join(self._runner._testdir, b'report.json') with open(jsonpath, 'w') as fp: timesd = {} for tdata in result.times: @@ -1731,14 +1822,14 @@ for res, testcases in groups: for tc, __ in testcases: if tc.name in timesd: + diff = result.faildata.get(tc.name, b'') tres = {'result': res, 'time': ('%0.3f' % timesd[tc.name][2]), 'cuser': ('%0.3f' % timesd[tc.name][0]), 'csys': ('%0.3f' % timesd[tc.name][1]), 'start': ('%0.3f' % timesd[tc.name][3]), 'end': ('%0.3f' % timesd[tc.name][4]), - 'diff': result.faildata.get(tc.name, - ''), + 'diff': diff.decode('unicode_escape'), } else: # blacklisted test @@ -1751,6 +1842,40 @@ self._runner._checkhglib('Tested') savetimes(self._runner._testdir, result) + + if failed and self._runner.options.known_good_rev: + def nooutput(args): + p = subprocess.Popen(args, stderr=subprocess.STDOUT, + stdout=subprocess.PIPE) + p.stdout.read() + p.wait() + for test, msg in result.failures: + nooutput(['hg', 'bisect', '--reset']), + nooutput(['hg', 'bisect', '--bad', '.']) + nooutput(['hg', 'bisect', '--good', + self._runner.options.known_good_rev]) + # TODO: we probably need to forward some options + # that alter hg's behavior inside the tests. + rtc = '%s %s %s' % (sys.executable, sys.argv[0], test) + sub = subprocess.Popen(['hg', 'bisect', '--command', rtc], + stderr=subprocess.STDOUT, + stdout=subprocess.PIPE) + data = sub.stdout.read() + sub.wait() + m = re.search( + (r'\nThe first (?P<goodbad>bad|good) revision ' + r'is:\nchangeset: +\d+:(?P<node>[a-f0-9]+)\n.*\n' + r'summary: +(?P<summary>[^\n]+)\n'), + data, (re.MULTILINE | re.DOTALL)) + if m is None: + self.stream.writeln( + 'Failed to identify failure point for %s' % test) + continue + dat = m.groupdict() + verb = 'broken' if dat['goodbad'] == 'bad' else 'fixed' + self.stream.writeln( + '%s %s by %s (%s)' % ( + test, verb, dat['node'], dat['summary'])) self.stream.writeln( '# Ran %d tests, %d skipped, %d warned, %d failed.' % (result.testsRun, @@ -1809,6 +1934,7 @@ self._pythondir = None self._coveragefile = None self._createdfiles = [] + self._hgcommand = None self._hgpath = None self._portoffset = 0 self._ports = {} @@ -1871,7 +1997,7 @@ for kw, mul in slow.items(): if kw in f: val *= mul - if f.endswith('.py'): + if f.endswith(b'.py'): val /= 10.0 perf[f] = val / 1000.0 return perf[f] @@ -1915,14 +2041,9 @@ if self.options.with_hg: self._installdir = None whg = self.options.with_hg - # If --with-hg is not specified, we have bytes already, - # but if it was specified in python3 we get a str, so we - # have to encode it back into a bytes. - if PYTHON3: - if not isinstance(whg, bytes): - whg = _bytespath(whg) self._bindir = os.path.dirname(os.path.realpath(whg)) assert isinstance(self._bindir, bytes) + self._hgcommand = os.path.basename(whg) self._tmpbindir = os.path.join(self._hgtmp, b'install', b'bin') os.makedirs(self._tmpbindir) @@ -1934,14 +2055,29 @@ self._pythondir = self._bindir else: self._installdir = os.path.join(self._hgtmp, b"install") - self._bindir = osenvironb[b"BINDIR"] = \ - os.path.join(self._installdir, b"bin") + self._bindir = os.path.join(self._installdir, b"bin") + self._hgcommand = b'hg' self._tmpbindir = self._bindir self._pythondir = os.path.join(self._installdir, b"lib", b"python") + # set CHGHG, then replace "hg" command by "chg" + chgbindir = self._bindir + if self.options.chg or self.options.with_chg: + osenvironb[b'CHGHG'] = os.path.join(self._bindir, self._hgcommand) + else: + osenvironb.pop(b'CHGHG', None) # drop flag for hghave + if self.options.chg: + self._hgcommand = b'chg' + elif self.options.with_chg: + chgbindir = os.path.dirname(os.path.realpath(self.options.with_chg)) + self._hgcommand = os.path.basename(self.options.with_chg) + osenvironb[b"BINDIR"] = self._bindir osenvironb[b"PYTHON"] = PYTHON + if self.options.with_python3: + osenvironb[b'PYTHON3'] = self.options.with_python3 + fileb = _bytespath(__file__) runtestdir = os.path.abspath(os.path.dirname(fileb)) osenvironb[b'RUNTESTDIR'] = runtestdir @@ -1955,6 +2091,8 @@ realfile = os.path.realpath(fileb) realdir = os.path.abspath(os.path.dirname(realfile)) path.insert(2, realdir) + if chgbindir != self._bindir: + path.insert(1, chgbindir) if self._testdir != runtestdir: path = [self._testdir] + path if self._tmpbindir != self._bindir: @@ -1977,6 +2115,7 @@ if self.options.pure: os.environ["HGTEST_RUN_TESTS_PURE"] = "--pure" + os.environ["HGMODULEPOLICY"] = "py" if self.options.allow_slow_tests: os.environ["HGTEST_SLOW"] = "slow" @@ -2023,6 +2162,9 @@ self._checkhglib("Testing") else: self._usecorrectpython() + if self.options.chg: + assert self._installdir + self._installchg() if self.options.restart: orig = list(tests) @@ -2116,13 +2258,14 @@ startport=self._getport(count), extraconfigopts=self.options.extra_config_opt, py3kwarnings=self.options.py3k_warnings, - shell=self.options.shell) + shell=self.options.shell, + hgcommand=self._hgcommand, + usechg=bool(self.options.with_chg or self.options.chg)) t.should_reload = True return t def _cleanup(self): """Clean up state from this test invocation.""" - if self.options.keep_tmpdir: return @@ -2174,7 +2317,7 @@ This will also configure hg with the appropriate testing settings. """ vlog("# Performing temporary installation of HG") - installerrs = os.path.join(b"tests", b"install.err") + installerrs = os.path.join(self._hgtmp, b"install.err") compiler = '' if self.options.compiler: compiler = '--compiler ' + self.options.compiler @@ -2182,13 +2325,11 @@ pure = b"--pure" else: pure = b"" - py3 = '' # Run installer in hg root script = os.path.realpath(sys.argv[0]) exe = sys.executable if PYTHON3: - py3 = b'--c2to3' compiler = _bytespath(compiler) script = _bytespath(script) exe = _bytespath(exe) @@ -2202,12 +2343,12 @@ # least on Windows for now, deal with .pydistutils.cfg bugs # when they happen. nohome = b'' - cmd = (b'%(exe)s setup.py %(py3)s %(pure)s clean --all' + cmd = (b'%(exe)s setup.py %(pure)s clean --all' b' build %(compiler)s --build-base="%(base)s"' b' install --force --prefix="%(prefix)s"' b' --install-lib="%(libdir)s"' b' --install-scripts="%(bindir)s" %(nohome)s >%(logfile)s 2>&1' - % {b'exe': exe, b'py3': py3, b'pure': pure, + % {b'exe': exe, b'pure': pure, b'compiler': compiler, b'base': os.path.join(self._hgtmp, b"build"), b'prefix': self._installdir, b'libdir': self._pythondir, @@ -2321,6 +2462,27 @@ return self._hgpath + def _installchg(self): + """Install chg into the test environment""" + vlog('# Performing temporary installation of CHG') + assert os.path.dirname(self._bindir) == self._installdir + assert self._hgroot, 'must be called after _installhg()' + cmd = (b'"%(make)s" clean install PREFIX="%(prefix)s"' + % {b'make': 'make', # TODO: switch by option or environment? + b'prefix': self._installdir}) + cwd = os.path.join(self._hgroot, b'contrib', b'chg') + vlog("# Running", cmd) + proc = subprocess.Popen(cmd, shell=True, cwd=cwd, + stdin=subprocess.PIPE, stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + out, _err = proc.communicate() + if proc.returncode != 0: + if PYTHON3: + sys.stdout.buffer.write(out) + else: + sys.stdout.write(out) + sys.exit(1) + def _outputcoverage(self): """Produce code coverage output.""" from coverage import coverage @@ -2368,7 +2530,8 @@ if found: vlog("# Found prerequisite", p, "at", found) else: - print("WARNING: Did not find prerequisite tool: %s " % p) + print("WARNING: Did not find prerequisite tool: %s " % + p.decode("utf-8")) if __name__ == '__main__': runner = TestRunner()
--- a/tests/seq.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/seq.py Sat Apr 16 18:06:48 2016 -0500 @@ -7,6 +7,7 @@ # seq START STOP [START, STOP] stepping by 1 # seq START STEP STOP [START, STOP] stepping by STEP +from __future__ import absolute_import, print_function import sys start = 1 @@ -20,4 +21,4 @@ stop = int(sys.argv[-1]) + 1 for i in xrange(start, stop, step): - print i + print(i)
--- a/tests/silenttestrunner.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/silenttestrunner.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,4 +1,7 @@ -import unittest, sys, os +from __future__ import absolute_import, print_function +import os +import sys +import unittest def main(modulename): '''run the tests found in module, printing nothing when all tests pass''' @@ -8,12 +11,12 @@ suite.run(results) if results.errors or results.failures: for tc, exc in results.errors: - print 'ERROR:', tc - print + print('ERROR:', tc) + print() sys.stdout.write(exc) for tc, exc in results.failures: - print 'FAIL:', tc - print + print('FAIL:', tc) + print() sys.stdout.write(exc) sys.exit(1)
--- a/tests/sitecustomize.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/sitecustomize.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,3 +1,4 @@ +from __future__ import absolute_import import os if os.environ.get('COVERAGE_PROCESS_START'):
--- a/tests/svnxml.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/svnxml.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,7 +1,9 @@ # Read the output of a "svn log --xml" command on stdin, parse it and # print a subset of attributes common to all svn versions tested by # hg. -import xml.dom.minidom, sys +from __future__ import absolute_import +import sys +import xml.dom.minidom def xmltext(e): return ''.join(c.data for c
--- a/tests/test-1102.t Tue Mar 29 11:54:46 2016 -0500 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,17 +0,0 @@ - $ rm -rf a - $ hg init a - $ cd a - $ echo a > a - $ hg ci -Am0 - adding a - $ hg tag t1 # 1 - $ hg tag --remove t1 # 2 - - $ hg co 1 - 1 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ hg tag -f -r0 t1 - $ hg tags - tip 3:a49829c4fc11 - t1 0:f7b1eb17ad24 - - $ cd ..
--- a/tests/test-1993.t Tue Mar 29 11:54:46 2016 -0500 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,48 +0,0 @@ - $ hg init a - $ cd a - $ echo a > a - $ hg ci -Am0 - adding a - $ echo b > b - $ hg ci -Am1 - adding b - $ hg tag -r0 default - warning: tag default conflicts with existing branch name - $ hg log - changeset: 2:30a83d1e4a1e - tag: tip - user: test - date: Thu Jan 01 00:00:00 1970 +0000 - summary: Added tag default for changeset f7b1eb17ad24 - - changeset: 1:925d80f479bb - user: test - date: Thu Jan 01 00:00:00 1970 +0000 - summary: 1 - - changeset: 0:f7b1eb17ad24 - tag: default - user: test - date: Thu Jan 01 00:00:00 1970 +0000 - summary: 0 - - $ hg update 'tag(default)' - 0 files updated, 0 files merged, 2 files removed, 0 files unresolved - $ hg parents - changeset: 0:f7b1eb17ad24 - tag: default - user: test - date: Thu Jan 01 00:00:00 1970 +0000 - summary: 0 - - $ hg update 'branch(default)' - 2 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ hg parents - changeset: 2:30a83d1e4a1e - tag: tip - user: test - date: Thu Jan 01 00:00:00 1970 +0000 - summary: Added tag default for changeset f7b1eb17ad24 - - - $ cd ..
--- a/tests/test-586.t Tue Mar 29 11:54:46 2016 -0500 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,92 +0,0 @@ -Issue586: removing remote files after merge appears to corrupt the -dirstate - - $ hg init a - $ cd a - $ echo a > a - $ hg ci -Ama - adding a - - $ hg init ../b - $ cd ../b - $ echo b > b - $ hg ci -Amb - adding b - - $ hg pull -f ../a - pulling from ../a - searching for changes - warning: repository is unrelated - requesting all changes - adding changesets - adding manifests - adding file changes - added 1 changesets with 1 changes to 1 files (+1 heads) - (run 'hg heads' to see heads, 'hg merge' to merge) - $ hg merge - 1 files updated, 0 files merged, 0 files removed, 0 files unresolved - (branch merge, don't forget to commit) - $ hg rm -f a - $ hg ci -Amc - - $ hg st -A - C b - $ cd .. - -Issue1433: Traceback after two unrelated pull, two move, a merge and -a commit (related to issue586) - -create test repos - - $ hg init repoa - $ touch repoa/a - $ hg -R repoa ci -Am adda - adding a - - $ hg init repob - $ touch repob/b - $ hg -R repob ci -Am addb - adding b - - $ hg init repoc - $ cd repoc - $ hg pull ../repoa - pulling from ../repoa - requesting all changes - adding changesets - adding manifests - adding file changes - added 1 changesets with 1 changes to 1 files - (run 'hg update' to get a working copy) - $ hg update - 1 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ mkdir tst - $ hg mv * tst - $ hg ci -m "import a in tst" - $ hg pull -f ../repob - pulling from ../repob - searching for changes - warning: repository is unrelated - requesting all changes - adding changesets - adding manifests - adding file changes - added 1 changesets with 1 changes to 1 files (+1 heads) - (run 'hg heads' to see heads, 'hg merge' to merge) - -merge both repos - - $ hg merge - 1 files updated, 0 files merged, 0 files removed, 0 files unresolved - (branch merge, don't forget to commit) - $ mkdir src - -move b content - - $ hg mv b src - $ hg ci -m "import b in src" - $ hg manifest - src/b - tst/a - - $ cd ..
--- a/tests/test-add.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-add.t Sat Apr 16 18:06:48 2016 -0500 @@ -226,7 +226,7 @@ $ hg diff capsdir1/capsdir diff -r * CapsDir1/CapsDir/SubDir/Def.txt (glob) --- a/CapsDir1/CapsDir/SubDir/Def.txt Thu Jan 01 00:00:00 1970 +0000 - +++ b/CapsDir1/CapsDir/SubDir/Def.txt * +0000 (glob) + +++ b/CapsDir1/CapsDir/SubDir/Def.txt * (glob) @@ -1,1 +1,1 @@ -xyz +def
--- a/tests/test-ancestor.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-ancestor.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,4 +1,4 @@ -from __future__ import absolute_import +from __future__ import absolute_import, print_function import binascii import getopt @@ -13,7 +13,7 @@ ancestor, commands, hg, - ui, + ui as uimod, util, ) @@ -94,13 +94,13 @@ def err(seed, graph, bases, seq, output, expected): if nerrs[0] == 0: - print >> sys.stderr, 'seed:', hex(seed)[:-1] + print('seed:', hex(seed)[:-1], file=sys.stderr) if gerrs[0] == 0: - print >> sys.stderr, 'graph:', graph - print >> sys.stderr, '* bases:', bases - print >> sys.stderr, '* seq: ', seq - print >> sys.stderr, '* output: ', output - print >> sys.stderr, '* expected:', expected + print('graph:', graph, file=sys.stderr) + print('* bases:', bases, file=sys.stderr) + print('* seq: ', seq, file=sys.stderr) + print('* output: ', output, file=sys.stderr) + print('* expected:', expected, file=sys.stderr) nerrs[0] += 1 gerrs[0] += 1 @@ -178,14 +178,14 @@ 13: [8]} def genlazyancestors(revs, stoprev=0, inclusive=False): - print ("%% lazy ancestor set for %s, stoprev = %s, inclusive = %s" % - (revs, stoprev, inclusive)) + print(("%% lazy ancestor set for %s, stoprev = %s, inclusive = %s" % + (revs, stoprev, inclusive))) return ancestor.lazyancestors(graph.get, revs, stoprev=stoprev, inclusive=inclusive) def printlazyancestors(s, l): - print 'membership: %r' % [n for n in l if n in s] - print 'iteration: %r' % list(s) + print('membership: %r' % [n for n in l if n in s]) + print('iteration: %r' % list(s)) def test_lazyancestors(): # Empty revs @@ -218,7 +218,7 @@ '+3*3/*2*2/*4*4/*4/2*4/2*2', ] def test_gca(): - u = ui.ui() + u = uimod.ui() for i, dag in enumerate(dagtests): repo = hg.repository(u, 'gca%d' % i, create=1) cl = repo.changelog @@ -235,9 +235,10 @@ cgcas = sorted(cl.index.ancestors(a, b)) pygcas = sorted(ancestor.ancestors(cl.parentrevs, a, b)) if cgcas != pygcas: - print "test_gca: for dag %s, gcas for %d, %d:" % (dag, a, b) - print " C returned: %s" % cgcas - print " Python returned: %s" % pygcas + print("test_gca: for dag %s, gcas for %d, %d:" + % (dag, a, b)) + print(" C returned: %s" % cgcas) + print(" Python returned: %s" % pygcas) def main(): seed = None
--- a/tests/test-archive.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-archive.t Sat Apr 16 18:06:48 2016 -0500 @@ -269,8 +269,6 @@ $ cp $HGRCPATH $HGRCPATH.no-progress $ cat >> $HGRCPATH <<EOF - > [extensions] - > progress = > [progress] > assume-tty = 1 > format = topic bar number
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-automv.t Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,338 @@ +Tests for the automv extension; detect moved files at commit time. + + $ cat >> $HGRCPATH << EOF + > [extensions] + > automv= + > rebase= + > EOF + +Setup repo + + $ hg init repo + $ cd repo + +Test automv command for commit + + $ printf 'foo\nbar\nbaz\n' > a.txt + $ hg add a.txt + $ hg commit -m 'init repo with a' + +mv/rm/add + $ mv a.txt b.txt + $ hg rm a.txt + $ hg add b.txt + $ hg status -C + A b.txt + R a.txt + $ hg commit -m 'msg' + detected move of 1 files + $ hg status --change . -C + A b.txt + a.txt + R a.txt + $ hg up -r 0 + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + +mv/rm/add/modif + $ mv a.txt b.txt + $ hg rm a.txt + $ hg add b.txt + $ printf '\n' >> b.txt + $ hg status -C + A b.txt + R a.txt + $ hg commit -m 'msg' + detected move of 1 files + created new head + $ hg status --change . -C + A b.txt + a.txt + R a.txt + $ hg up -r 0 + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + +mv/rm/add/modif + $ mv a.txt b.txt + $ hg rm a.txt + $ hg add b.txt + $ printf '\nfoo\n' >> b.txt + $ hg status -C + A b.txt + R a.txt + $ hg commit -m 'msg' + created new head + $ hg status --change . -C + A b.txt + R a.txt + $ hg up -r 0 + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + +mv/rm/add/modif/changethreshold + $ mv a.txt b.txt + $ hg rm a.txt + $ hg add b.txt + $ printf '\nfoo\n' >> b.txt + $ hg status -C + A b.txt + R a.txt + $ hg commit --config automv.similarity='60' -m 'msg' + detected move of 1 files + created new head + $ hg status --change . -C + A b.txt + a.txt + R a.txt + $ hg up -r 0 + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + +mv + $ mv a.txt b.txt + $ hg status -C + ! a.txt + ? b.txt + $ hg commit -m 'msg' + nothing changed (1 missing files, see 'hg status') + [1] + $ hg status -C + ! a.txt + ? b.txt + $ hg revert -aqC + $ rm b.txt + +mv/rm/add/notincommitfiles + $ mv a.txt b.txt + $ hg rm a.txt + $ hg add b.txt + $ echo 'bar' > c.txt + $ hg add c.txt + $ hg status -C + A b.txt + A c.txt + R a.txt + $ hg commit c.txt -m 'msg' + created new head + $ hg status --change . -C + A c.txt + $ hg status -C + A b.txt + R a.txt + $ hg up -r 0 + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg rm a.txt + $ echo 'bar' > c.txt + $ hg add c.txt + $ hg commit -m 'msg' + detected move of 1 files + created new head + $ hg status --change . -C + A b.txt + a.txt + A c.txt + R a.txt + $ hg up -r 0 + 1 files updated, 0 files merged, 2 files removed, 0 files unresolved + +mv/rm/add/--no-automv + $ mv a.txt b.txt + $ hg rm a.txt + $ hg add b.txt + $ hg status -C + A b.txt + R a.txt + $ hg commit --no-automv -m 'msg' + created new head + $ hg status --change . -C + A b.txt + R a.txt + $ hg up -r 0 + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + +Test automv command for commit --amend + +mv/rm/add + $ echo 'c' > c.txt + $ hg add c.txt + $ hg commit -m 'revision to amend to' + created new head + $ mv a.txt b.txt + $ hg rm a.txt + $ hg add b.txt + $ hg status -C + A b.txt + R a.txt + $ hg commit --amend -m 'amended' + detected move of 1 files + saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-amend-backup.hg (glob) + $ hg status --change . -C + A b.txt + a.txt + A c.txt + R a.txt + $ hg up -r 0 + 1 files updated, 0 files merged, 2 files removed, 0 files unresolved + +mv/rm/add/modif + $ echo 'c' > c.txt + $ hg add c.txt + $ hg commit -m 'revision to amend to' + created new head + $ mv a.txt b.txt + $ hg rm a.txt + $ hg add b.txt + $ printf '\n' >> b.txt + $ hg status -C + A b.txt + R a.txt + $ hg commit --amend -m 'amended' + detected move of 1 files + saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-amend-backup.hg (glob) + $ hg status --change . -C + A b.txt + a.txt + A c.txt + R a.txt + $ hg up -r 0 + 1 files updated, 0 files merged, 2 files removed, 0 files unresolved + +mv/rm/add/modif + $ echo 'c' > c.txt + $ hg add c.txt + $ hg commit -m 'revision to amend to' + created new head + $ mv a.txt b.txt + $ hg rm a.txt + $ hg add b.txt + $ printf '\nfoo\n' >> b.txt + $ hg status -C + A b.txt + R a.txt + $ hg commit --amend -m 'amended' + saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-amend-backup.hg (glob) + $ hg status --change . -C + A b.txt + A c.txt + R a.txt + $ hg up -r 0 + 1 files updated, 0 files merged, 2 files removed, 0 files unresolved + +mv/rm/add/modif/changethreshold + $ echo 'c' > c.txt + $ hg add c.txt + $ hg commit -m 'revision to amend to' + created new head + $ mv a.txt b.txt + $ hg rm a.txt + $ hg add b.txt + $ printf '\nfoo\n' >> b.txt + $ hg status -C + A b.txt + R a.txt + $ hg commit --amend --config automv.similarity='60' -m 'amended' + detected move of 1 files + saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-amend-backup.hg (glob) + $ hg status --change . -C + A b.txt + a.txt + A c.txt + R a.txt + $ hg up -r 0 + 1 files updated, 0 files merged, 2 files removed, 0 files unresolved + +mv + $ echo 'c' > c.txt + $ hg add c.txt + $ hg commit -m 'revision to amend to' + created new head + $ mv a.txt b.txt + $ hg status -C + ! a.txt + ? b.txt + $ hg commit --amend -m 'amended' + saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-amend-backup.hg (glob) + $ hg status -C + ! a.txt + ? b.txt + $ hg up -Cr 0 + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + +mv/rm/add/notincommitfiles + $ echo 'c' > c.txt + $ hg add c.txt + $ hg commit -m 'revision to amend to' + created new head + $ mv a.txt b.txt + $ hg rm a.txt + $ hg add b.txt + $ echo 'bar' > d.txt + $ hg add d.txt + $ hg status -C + A b.txt + A d.txt + R a.txt + $ hg commit --amend -m 'amended' d.txt + saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-amend-backup.hg (glob) + $ hg status --change . -C + A c.txt + A d.txt + $ hg status -C + A b.txt + R a.txt + $ hg commit --amend -m 'amended' + detected move of 1 files + saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-amend-backup.hg (glob) + $ hg status --change . -C + A b.txt + a.txt + A c.txt + A d.txt + R a.txt + $ hg up -r 0 + 1 files updated, 0 files merged, 3 files removed, 0 files unresolved + +mv/rm/add/--no-automv + $ echo 'c' > c.txt + $ hg add c.txt + $ hg commit -m 'revision to amend to' + created new head + $ mv a.txt b.txt + $ hg rm a.txt + $ hg add b.txt + $ hg status -C + A b.txt + R a.txt + $ hg commit --amend -m 'amended' --no-automv + saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-amend-backup.hg (glob) + $ hg status --change . -C + A b.txt + A c.txt + R a.txt + $ hg up -r 0 + 1 files updated, 0 files merged, 2 files removed, 0 files unresolved + +mv/rm/commit/add/amend + $ echo 'c' > c.txt + $ hg add c.txt + $ hg commit -m 'revision to amend to' + created new head + $ mv a.txt b.txt + $ hg rm a.txt + $ hg status -C + R a.txt + ? b.txt + $ hg commit -m "removed a" + $ hg add b.txt + $ hg commit --amend -m 'amended' + saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-amend-backup.hg (glob) + $ hg status --change . -C + A b.txt + R a.txt + +error conditions + + $ cat >> $HGRCPATH << EOF + > [automv] + > similarity=110 + > EOF + $ hg commit -m 'revision to amend to' + abort: automv.similarity must be between 0 and 100 + [255]
--- a/tests/test-backout.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-backout.t Sat Apr 16 18:06:48 2016 -0500 @@ -686,6 +686,7 @@ * version 2 records local: b71750c4b0fdf719734971e3ef90dbeab5919a2d other: a30dd8addae3ce71b8667868478542bc417439e6 + file extras: foo (ancestorlinknode = 91360952243723bd5b1138d5f26bd8c8564cb553) file: foo (record type "F", state "u", hash 0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33) local path: foo (flags "") ancestor path: foo (node f89532f44c247a0e993d63e3a734dd781ab04708)
--- a/tests/test-bad-extension.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-bad-extension.t Sat Apr 16 18:06:48 2016 -0500 @@ -1,11 +1,19 @@ $ echo 'raise Exception("bit bucket overflow")' > badext.py - $ abspath=`pwd`/badext.py + $ abspathexc=`pwd`/badext.py + + $ cat >baddocext.py <<EOF + > """ + > baddocext is bad + > """ + > EOF + $ abspathdoc=`pwd`/baddocext.py $ cat <<EOF >> $HGRCPATH > [extensions] > gpg = > hgext.gpg = - > badext = $abspath + > badext = $abspathexc + > baddocext = $abspathdoc > badext2 = > EOF @@ -23,6 +31,19 @@ Traceback (most recent call last): ImportError: No module named badext2 +names of extensions failed to load can be accessed via extensions.notloaded() + + $ cat <<EOF > showbadexts.py + > from mercurial import cmdutil, commands, extensions + > cmdtable = {} + > command = cmdutil.command(cmdtable) + > @command('showbadexts', norepo=True) + > def showbadexts(ui, *pats, **opts): + > ui.write('BADEXTS: %s\n' % ' '.join(sorted(extensions.notloaded()))) + > EOF + $ hg --config extensions.badexts=showbadexts.py showbadexts 2>&1 | grep '^BADEXTS' + BADEXTS: badext badext2 + show traceback for ImportError of hgext.name if debug is set (note that --debug option isn't applied yet when loading extensions) @@ -35,6 +56,18 @@ could not import hgext.badext2 (No module named *badext2): trying badext2 (glob) Traceback (most recent call last): ImportError: No module named *badext2 (glob) + could not import hgext3rd.badext2 (No module named *badext2): trying badext2 (glob) + Traceback (most recent call last): + ImportError: No module named *badext2 (glob) *** failed to import extension badext2: No module named badext2 Traceback (most recent call last): ImportError: No module named badext2 + +confirm that there's no crash when an extension's documentation is bad + + $ hg help --keyword baddocext + *** failed to import extension badext from $TESTTMP/badext.py: bit bucket overflow + *** failed to import extension badext2: No module named badext2 + Topics: + + extensions Using Additional Features
--- a/tests/test-basic.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-basic.t Sat Apr 16 18:06:48 2016 -0500 @@ -6,6 +6,7 @@ defaults.shelve=--date "0 0" defaults.tag=-d "0 0" devel.all-warnings=true + extensions.chgserver= (?) largefiles.usercache=$TESTTMP/.cache/largefiles (glob) ui.slash=True ui.interactive=False
--- a/tests/test-batching.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-batching.py Sat Apr 16 18:06:48 2016 -0500 @@ -5,8 +5,12 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. -from mercurial.peer import localbatch, batchable, future -from mercurial.wireproto import remotebatch +from __future__ import absolute_import, print_function + +from mercurial import ( + peer, + wireproto, +) # equivalent of repo.repository class thing(object): @@ -25,17 +29,17 @@ return "Hello, %s" % name def batch(self): '''Support for local batching.''' - return localbatch(self) + return peer.localbatch(self) # usage of "thing" interface def use(it): # Direct call to base method shared between client and server. - print it.hello() + print(it.hello()) # Direct calls to proxied methods. They cause individual roundtrips. - print it.foo("Un", two="Deux") - print it.bar("Eins", "Zwei") + print(it.foo("Un", two="Deux")) + print(it.bar("Eins", "Zwei")) # Batched call to a couple of (possibly proxied) methods. batch = it.batch() @@ -53,17 +57,17 @@ # as possible. batch.submit() # After the call to submit, the futures actually contain values. - print foo.value - print foo2.value - print bar.value - print greet.value - print hello.value - print bar2.value + print(foo.value) + print(foo2.value) + print(bar.value) + print(greet.value) + print(hello.value) + print(bar2.value) # local usage mylocal = localthing() -print -print "== Local" +print() +print("== Local") use(mylocal) # demo remoting; mimicks what wireproto and HTTP/SSH do @@ -93,12 +97,12 @@ args = dict(arg.split('=', 1) for arg in args) return getattr(self, name)(**args) def perform(self, req): - print "REQ:", req + print("REQ:", req) name, args = req.split('?', 1) args = args.split('&') vals = dict(arg.split('=', 1) for arg in args) res = getattr(self, name)(**vals) - print " ->", res + print(" ->", res) return res def batch(self, cmds): res = [] @@ -145,20 +149,20 @@ return res.split(';') def batch(self): - return remotebatch(self) + return wireproto.remotebatch(self) - @batchable + @peer.batchable def foo(self, one, two=None): if not one: yield "Nope", None encargs = [('one', mangle(one),), ('two', mangle(two),)] - encresref = future() + encresref = peer.future() yield encargs, encresref yield unmangle(encresref.value) - @batchable + @peer.batchable def bar(self, b, a): - encresref = future() + encresref = peer.future() yield [('b', mangle(b),), ('a', mangle(a),)], encresref yield unmangle(encresref.value) @@ -171,6 +175,6 @@ # demo remote usage myproxy = remotething(myserver) -print -print "== Remote" +print() +print("== Remote") use(myproxy)
--- a/tests/test-bdiff.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-bdiff.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,5 +1,9 @@ +from __future__ import absolute_import, print_function import struct -from mercurial import bdiff, mpatch +from mercurial import ( + bdiff, + mpatch, +) def test1(a, b): d = bdiff.bdiff(a, b) @@ -7,13 +11,13 @@ if d: c = mpatch.patches(a, [d]) if c != b: - print "***", repr(a), repr(b) - print "bad:" - print repr(c)[:200] - print repr(d) + print("***", repr(a), repr(b)) + print("bad:") + print(repr(c)[:200]) + print(repr(d)) def test(a, b): - print "***", repr(a), repr(b) + print("***", repr(a), repr(b)) test1(a, b) test1(b, a) @@ -44,23 +48,23 @@ while pos < len(bin): p1, p2, l = struct.unpack(">lll", bin[pos:pos + 12]) pos += 12 - print p1, p2, repr(bin[pos:pos + l]) + print(p1, p2, repr(bin[pos:pos + l])) pos += l showdiff("x\n\nx\n\nx\n\nx\n\nz\n", "x\n\nx\n\ny\n\nx\n\nx\n\nz\n") showdiff("x\n\nx\n\nx\n\nx\n\nz\n", "x\n\nx\n\ny\n\nx\n\ny\n\nx\n\nz\n") -print "done" +print("done") def testfixws(a, b, allws): c = bdiff.fixws(a, allws) if c != b: - print "*** fixws", repr(a), repr(b), allws - print "got:" - print repr(c) + print("*** fixws", repr(a), repr(b), allws) + print("got:") + print(repr(c)) testfixws(" \ta\r b\t\n", "ab\n", 1) testfixws(" \ta\r b\t\n", " a b\n", 0) testfixws("", "", 1) testfixws("", "", 0) -print "done" +print("done")
--- a/tests/test-bisect2.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-bisect2.t Sat Apr 16 18:06:48 2016 -0500 @@ -244,6 +244,7 @@ $ hg up -C 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + 3 other heads for branch "default" complex bisect test 1 # first bad rev is 9
--- a/tests/test-blackbox.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-blackbox.t Sat Apr 16 18:06:48 2016 -0500 @@ -12,9 +12,10 @@ $ echo a > a $ hg add a - $ hg blackbox - 1970/01/01 00:00:00 bob (*)> add a (glob) - 1970/01/01 00:00:00 bob (*)> add a exited 0 after * seconds (glob) + $ hg blackbox --config blackbox.dirty=True + 1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> add a + 1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> add a exited 0 after * seconds (glob) + 1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000+ (5000)> blackbox incoming change tracking @@ -43,22 +44,23 @@ adding file changes added 1 changesets with 1 changes to 1 files (run 'hg update' to get a working copy) - $ hg blackbox -l 5 - 1970/01/01 00:00:00 bob (*)> pull (glob) - 1970/01/01 00:00:00 bob (*)> updated served branch cache in ?.???? seconds (glob) - 1970/01/01 00:00:00 bob (*)> wrote served branch cache with 1 labels and 2 nodes (glob) - 1970/01/01 00:00:00 bob (*)> 1 incoming changes - new heads: d02f48003e62 (glob) - 1970/01/01 00:00:00 bob (*)> pull exited 0 after * seconds (glob) + $ hg blackbox -l 6 + 1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> pull + 1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> updated served branch cache in * seconds (glob) + 1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> wrote served branch cache with 1 labels and 2 nodes + 1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> 1 incoming changes - new heads: d02f48003e62 + 1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> pull exited 0 after * seconds (glob) + 1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> blackbox -l 6 we must not cause a failure if we cannot write to the log $ hg rollback repository tip rolled back to revision 1 (undo pull) -#if unix-permissions no-root - $ chmod 000 .hg/blackbox.log + $ mv .hg/blackbox.log .hg/blackbox.log- + $ mkdir .hg/blackbox.log $ hg --debug incoming - warning: cannot write to blackbox.log: Permission denied + warning: cannot write to blackbox.log: * (glob) comparing with $TESTTMP/blackboxtest (glob) query 1; heads searching for changes @@ -77,7 +79,6 @@ c -#endif $ hg pull pulling from $TESTTMP/blackboxtest (glob) searching for changes @@ -87,14 +88,14 @@ added 1 changesets with 1 changes to 1 files (run 'hg update' to get a working copy) -a failure reading from the log is fine -#if unix-permissions no-root +a failure reading from the log is fatal + $ hg blackbox -l 3 - abort: Permission denied: $TESTTMP/blackboxtest2/.hg/blackbox.log + abort: *$TESTTMP/blackboxtest2/.hg/blackbox.log* (glob) [255] - $ chmod 600 .hg/blackbox.log -#endif + $ rmdir .hg/blackbox.log + $ mv .hg/blackbox.log- .hg/blackbox.log backup bundles get logged @@ -105,12 +106,13 @@ $ hg strip tip 0 files updated, 0 files merged, 1 files removed, 0 files unresolved saved backup bundle to $TESTTMP/blackboxtest2/.hg/strip-backup/*-backup.hg (glob) - $ hg blackbox -l 5 - 1970/01/01 00:00:00 bob (*)> strip tip (glob) - 1970/01/01 00:00:00 bob (*)> saved backup bundle to $TESTTMP/blackboxtest2/.hg/strip-backup/*-backup.hg (glob) - 1970/01/01 00:00:00 bob (*)> updated base branch cache in ?.???? seconds (glob) - 1970/01/01 00:00:00 bob (*)> wrote base branch cache with 1 labels and 2 nodes (glob) - 1970/01/01 00:00:00 bob (*)> strip tip exited 0 after * seconds (glob) + $ hg blackbox -l 6 + 1970/01/01 00:00:00 bob @73f6ee326b27d820b0472f1a825e3a50f3dc489b (5000)> strip tip + 1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> saved backup bundle to $TESTTMP/blackboxtest2/.hg/strip-backup/73f6ee326b27-7612e004-backup.hg (glob) + 1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> updated base branch cache in * seconds (glob) + 1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> wrote base branch cache with 1 labels and 2 nodes + 1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> strip tip exited 0 after * seconds (glob) + 1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> blackbox -l 6 extension and python hooks - use the eol extension for a pythonhook @@ -121,12 +123,14 @@ $ hg update hooked 1 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ hg blackbox -l 5 - 1970/01/01 00:00:00 bob (*)> update (glob) - 1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2-visible with 0 tags (glob) - 1970/01/01 00:00:00 bob (*)> pythonhook-preupdate: hgext.eol.preupdate finished in * seconds (glob) - 1970/01/01 00:00:00 bob (*)> exthook-update: echo hooked finished in * seconds (glob) - 1970/01/01 00:00:00 bob (*)> update exited 0 after * seconds (glob) + 1 other heads for branch "default" + $ hg blackbox -l 6 + 1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> update + 1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> writing .hg/cache/tags2-visible with 0 tags + 1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> pythonhook-preupdate: hgext.eol.preupdate finished in * seconds (glob) + 1970/01/01 00:00:00 bob @d02f48003e62c24e2659d97d30f2a83abe5d5d51 (5000)> exthook-update: echo hooked finished in * seconds (glob) + 1970/01/01 00:00:00 bob @d02f48003e62c24e2659d97d30f2a83abe5d5d51 (5000)> update exited 0 after * seconds (glob) + 1970/01/01 00:00:00 bob @d02f48003e62c24e2659d97d30f2a83abe5d5d51 (5000)> blackbox -l 6 log rotation @@ -142,6 +146,64 @@ .hg/blackbox.log .hg/blackbox.log.1 .hg/blackbox.log.2 + $ cd .. + + $ hg init blackboxtest3 + $ cd blackboxtest3 + $ hg blackbox + 1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> blackbox + $ mv .hg/blackbox.log .hg/blackbox.log- + $ mkdir .hg/blackbox.log + $ sed -e 's/\(.*test1.*\)/#\1/; s#\(.*commit2.*\)#os.rmdir(".hg/blackbox.log")\ + > os.rename(".hg/blackbox.log-", ".hg/blackbox.log")\ + > \1#' $TESTDIR/test-dispatch.py > ../test-dispatch.py + $ python $TESTDIR/blackbox-readonly-dispatch.py + running: add foo + result: 0 + running: commit -m commit1 -d 2000-01-01 foo + result: None + running: commit -m commit2 -d 2000-01-02 foo + result: None + running: log -r 0 + changeset: 0:0e4634943879 + user: test + date: Sat Jan 01 00:00:00 2000 +0000 + summary: commit1 + + result: None + running: log -r tip + changeset: 1:45589e459b2e + tag: tip + user: test + date: Sun Jan 02 00:00:00 2000 +0000 + summary: commit2 + + result: None + $ hg blackbox + 1970/01/01 00:00:00 bob @0e46349438790c460c5c9f7546bfcd39b267bbd2 (5000)> commit -m commit2 -d 2000-01-02 foo + 1970/01/01 00:00:00 bob @0e46349438790c460c5c9f7546bfcd39b267bbd2 (5000)> updated served branch cache in * seconds (glob) + 1970/01/01 00:00:00 bob @0e46349438790c460c5c9f7546bfcd39b267bbd2 (5000)> wrote served branch cache with 1 labels and 1 nodes + 1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> commit -m commit2 -d 2000-01-02 foo exited 0 after * seconds (glob) + 1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> log -r 0 + 1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> writing .hg/cache/tags2-visible with 0 tags + 1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> log -r 0 exited 0 after * seconds (glob) + 1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> log -r tip + 1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> log -r tip exited 0 after * seconds (glob) + 1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> blackbox + +Test log recursion from dirty status check + + $ cat > ../r.py <<EOF + > from mercurial import context, error, extensions + > x=[False] + > def status(orig, *args, **opts): + > args[0].repo().ui.log("broken", "recursion?") + > return orig(*args, **opts) + > def reposetup(ui, repo): + > extensions.wrapfunction(context.basectx, 'status', status) + > EOF + $ hg id --config extensions.x=../r.py --config blackbox.dirty=True + 45589e459b2e tip cleanup $ cd ..
--- a/tests/test-bookmarks-current.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-bookmarks-current.t Sat Apr 16 18:06:48 2016 -0500 @@ -22,9 +22,10 @@ update to bookmark X + $ hg bookmarks + * X -1:000000000000 $ hg update X 0 files updated, 0 files merged, 0 files removed, 0 files unresolved - (activating bookmark X) list bookmarks @@ -202,3 +203,22 @@ Z $ hg log -T '{bookmarks % "{active}\n"}' -r Z Z + +test that updating to closed branch head also advances active bookmark + + $ hg commit --close-branch -m "closed" + $ hg update -q ".^1" + $ hg bookmark Y + $ hg bookmarks + X 3:4d6bd4bfb1ae + * Y 3:4d6bd4bfb1ae + Z 0:719295282060 + $ hg update + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + updating bookmark Y + $ hg bookmarks + X 3:4d6bd4bfb1ae + * Y 4:8fa964221e8e + Z 0:719295282060 + $ hg parents -q + 4:8fa964221e8e
--- a/tests/test-bookmarks-pushpull.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-bookmarks-pushpull.t Sat Apr 16 18:06:48 2016 -0500 @@ -103,6 +103,29 @@ deleting remote bookmark W [1] +export the active bookmark + + $ hg bookmark V + $ hg push -B . ../a + pushing to ../a + searching for changes + no changes found + exporting bookmark V + [1] + +delete the bookmark + + $ hg book -d V + $ hg push -B V ../a + pushing to ../a + searching for changes + no changes found + deleting remote bookmark V + [1] + $ hg up foobar + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + (activating bookmark foobar) + push/pull name that doesn't exist $ hg push -B badname ../a @@ -267,7 +290,7 @@ We want to use http because it is stateless and therefore more susceptible to race conditions - $ hg -R pull-race serve -p $HGPORT -d --pid-file=pull-race.pid -E main-error.log + $ hg serve -R pull-race -p $HGPORT -d --pid-file=pull-race.pid -E main-error.log $ cat pull-race.pid >> $DAEMON_PIDS $ hg clone -q http://localhost:$HGPORT/ pull-race2 @@ -285,7 +308,7 @@ $ cd .. $ killdaemons.py - $ hg -R pull-race serve -p $HGPORT -d --pid-file=pull-race.pid -E main-error.log + $ hg serve -R pull-race -p $HGPORT -d --pid-file=pull-race.pid -E main-error.log $ cat pull-race.pid >> $DAEMON_PIDS $ cd pull-race2 $ hg -R $TESTTMP/pull-race book @@ -322,7 +345,7 @@ (new config need server restart) $ killdaemons.py - $ hg -R ../pull-race serve -p $HGPORT -d --pid-file=../pull-race.pid -E main-error.log + $ hg serve -R ../pull-race -p $HGPORT -d --pid-file=../pull-race.pid -E main-error.log $ cat ../pull-race.pid >> $DAEMON_PIDS $ hg -R $TESTTMP/pull-race book @@ -381,7 +404,7 @@ > allow_push = * > EOF - $ hg -R ../a serve -p $HGPORT2 -d --pid-file=../hg2.pid + $ hg serve -R ../a -p $HGPORT2 -d --pid-file=../hg2.pid $ cat ../hg2.pid >> $DAEMON_PIDS $ hg push http://localhost:$HGPORT2/ @@ -680,12 +703,12 @@ pushing an existing but divergent bookmark with -B still requires -f - $ hg clone -q . r + $ hg clone -q . ../r $ hg up -q X $ echo 1 > f2 $ hg ci -qAml - $ cd r + $ cd ../r $ hg up -q X $ echo 2 > f2 $ hg ci -qAmr @@ -696,7 +719,7 @@ abort: push creates new remote head 54694f811df9 with bookmark 'X'! (pull and merge or see "hg help push" for details about pushing new heads) [255] - $ cd .. + $ cd ../addmarks Check summary output for incoming/outgoing bookmarks @@ -764,7 +787,7 @@ > allow_push = * > EOF $ killdaemons.py - $ hg -R ../issue4455-dest serve -p $HGPORT -d --pid-file=../issue4455.pid -E ../issue4455-error.log + $ hg serve -R ../issue4455-dest -p $HGPORT -d --pid-file=../issue4455.pid -E ../issue4455-error.log $ cat ../issue4455.pid >> $DAEMON_PIDS Local push
--- a/tests/test-bookmarks.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-bookmarks.t Sat Apr 16 18:06:48 2016 -0500 @@ -573,6 +573,7 @@ $ hg bookmark -r3 Y moving bookmark 'Y' forward from db815d6d32e6 $ cp -r ../cloned-bookmarks-update ../cloned-bookmarks-manual-update + $ cp -r ../cloned-bookmarks-update ../cloned-bookmarks-manual-update-with-divergence (manual version) @@ -598,7 +599,6 @@ $ hg -R ../cloned-bookmarks-manual-update update updating to active bookmark Y 1 files updated, 0 files merged, 0 files removed, 0 files unresolved - (activating bookmark Y) (all in one version) @@ -617,6 +617,33 @@ updating to active bookmark Y 1 files updated, 0 files merged, 0 files removed, 0 files unresolved +We warn about divergent during bare update to the active bookmark + + $ hg -R ../cloned-bookmarks-manual-update-with-divergence update Y + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + (activating bookmark Y) + $ hg -R ../cloned-bookmarks-manual-update-with-divergence bookmarks -r X2 Y@1 + $ hg -R ../cloned-bookmarks-manual-update-with-divergence bookmarks + X2 1:925d80f479bb + * Y 2:db815d6d32e6 + Y@1 1:925d80f479bb + Z 2:db815d6d32e6 + x y 2:db815d6d32e6 + $ hg -R ../cloned-bookmarks-manual-update-with-divergence pull + pulling from $TESTTMP + searching for changes + adding changesets + adding manifests + adding file changes + added 2 changesets with 2 changes to 2 files (+1 heads) + updating bookmark Y + updating bookmark Z + (run 'hg heads' to see heads, 'hg merge' to merge) + $ hg -R ../cloned-bookmarks-manual-update-with-divergence update + updating to active bookmark Y + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + 1 other divergent bookmarks for "Y" + test wrongly formated bookmark $ echo '' >> .hg/bookmarks @@ -706,34 +733,14 @@ date: Thu Jan 01 00:00:00 1970 +0000 summary: 0 -test non-linear update not clearing active bookmark - - $ hg up 1 - 1 files updated, 0 files merged, 2 files removed, 0 files unresolved - (leaving bookmark four) - $ hg book drop - $ hg up -C - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved - (leaving bookmark drop) - $ hg sum - parent: 2:db815d6d32e6 - 2 - branch: default - bookmarks: should-end-on-two - commit: 2 unknown (clean) - update: 1 new changesets, 2 branch heads (merge) - phases: 4 draft - $ hg book - drop 1:925d80f479bb - four 3:9ba5f110a0b3 - should-end-on-two 2:db815d6d32e6 - $ hg book -d drop - $ hg up four - 1 files updated, 0 files merged, 0 files removed, 0 files unresolved - (activating bookmark four) no-op update doesn't deactive bookmarks + $ hg bookmarks + * four 3:9ba5f110a0b3 + should-end-on-two 2:db815d6d32e6 + $ hg up four + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg up 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg sum @@ -839,3 +846,52 @@ 6:81dcce76aa0b $ hg -R ../cloned-bookmarks-update bookmarks | grep ' Y ' * Y 6:81dcce76aa0b + + $ cd .. + +ensure changelog is written before bookmarks + $ hg init orderrepo + $ cd orderrepo + $ touch a + $ hg commit -Aqm one + $ hg book mybook + $ echo a > a + + $ cat > $TESTTMP/pausefinalize.py <<EOF + > from mercurial import extensions, localrepo + > import os, time + > def transaction(orig, self, desc, report=None): + > tr = orig(self, desc, report) + > def sleep(*args, **kwargs): + > retry = 20 + > while retry > 0 and not os.path.exists("$TESTTMP/unpause"): + > retry -= 1 + > time.sleep(0.5) + > if os.path.exists("$TESTTMP/unpause"): + > os.remove("$TESTTMP/unpause") + > # It is important that this finalizer start with 'a', so it runs before + > # the changelog finalizer appends to the changelog. + > tr.addfinalize('a-sleep', sleep) + > return tr + > + > def extsetup(ui): + > # This extension inserts an artifical pause during the transaction + > # finalizer, so we can run commands mid-transaction-close. + > extensions.wrapfunction(localrepo.localrepository, 'transaction', + > transaction) + > EOF + $ hg commit -qm two --config extensions.pausefinalize=$TESTTMP/pausefinalize.py & + $ sleep 2 + $ hg log -r . + changeset: 0:867bc5792c8c + bookmark: mybook + tag: tip + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: one + + $ hg bookmarks + * mybook 0:867bc5792c8c + $ touch $TESTTMP/unpause + + $ cd ..
--- a/tests/test-branches.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-branches.t Sat Apr 16 18:06:48 2016 -0500 @@ -544,15 +544,15 @@ 0060: e3 d4 9c 05 80 00 00 02 e2 3b 55 05 00 00 00 02 |.........;U.....| 0070: f8 94 c2 56 80 00 00 03 |...V....| -#if unix-permissions no-root no errors when revbranchcache is not writable $ echo >> .hg/cache/rbc-revs-v1 - $ chmod a-w .hg/cache/rbc-revs-v1 + $ mv .hg/cache/rbc-revs-v1 .hg/cache/rbc-revs-v1_ + $ mkdir .hg/cache/rbc-revs-v1 $ rm -f .hg/cache/branch* && hg head a -T '{rev}\n' 5 - $ chmod a+w .hg/cache/rbc-revs-v1 -#endif + $ rmdir .hg/cache/rbc-revs-v1 + $ mv .hg/cache/rbc-revs-v1_ .hg/cache/rbc-revs-v1 recovery from invalid cache revs file with trailing data $ echo >> .hg/cache/rbc-revs-v1 @@ -629,4 +629,25 @@ $ f --size .hg/cache/rbc-revs* .hg/cache/rbc-revs-v1: size=112 +cache is rebuilt when corruption is detected + $ echo > .hg/cache/rbc-names-v1 + $ hg log -r '5:&branch(.)' -T '{rev} ' --debug + rebuilding corrupted revision branch cache + 8 9 10 11 12 13 truncating cache/rbc-revs-v1 to 40 + $ f --size --hexdump .hg/cache/rbc-* + .hg/cache/rbc-names-v1: size=79 + 0000: 62 00 61 00 63 00 61 20 62 72 61 6e 63 68 20 6e |b.a.c.a branch n| + 0010: 61 6d 65 20 6d 75 63 68 20 6c 6f 6e 67 65 72 20 |ame much longer | + 0020: 74 68 61 6e 20 74 68 65 20 64 65 66 61 75 6c 74 |than the default| + 0030: 20 6a 75 73 74 69 66 69 63 61 74 69 6f 6e 20 75 | justification u| + 0040: 73 65 64 20 62 79 20 62 72 61 6e 63 68 65 73 |sed by branches| + .hg/cache/rbc-revs-v1: size=112 + 0000: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................| + 0010: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................| + 0020: 00 00 00 00 00 00 00 00 d8 cb c6 1d 00 00 00 01 |................| + 0030: 58 97 36 a2 00 00 00 02 10 ff 58 95 00 00 00 03 |X.6.......X.....| + 0040: ee bb 94 44 00 00 00 00 5f 40 61 bb 00 00 00 00 |...D...._@a.....| + 0050: bf be 84 1b 00 00 00 00 d3 f1 63 45 80 00 00 00 |..........cE....| + 0060: e3 d4 9c 05 80 00 00 00 e2 3b 55 05 00 00 00 00 |.........;U.....| + $ cd ..
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-bugzilla.t Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,97 @@ +mock bugzilla driver for testing template output: + + $ cat <<EOF > bzmock.py + > from __future__ import absolute_import + > from mercurial import extensions + > + > def extsetup(ui): + > bugzilla = extensions.find('bugzilla') + > class bzmock(bugzilla.bzaccess): + > def __init__(self, ui): + > super(bzmock, self).__init__(ui) + > self._logfile = ui.config('bugzilla', 'mocklog') + > def updatebug(self, bugid, newstate, text, committer): + > with open(self._logfile, 'a') as f: + > f.write('update bugid=%r, newstate=%r, committer=%r\n' + > % (bugid, newstate, committer)) + > f.write('----\n' + text + '\n----\n') + > def notify(self, bugs, committer): + > with open(self._logfile, 'a') as f: + > f.write('notify bugs=%r, committer=%r\n' + > % (bugs, committer)) + > bugzilla.bugzilla._versions['mock'] = bzmock + > EOF + +set up mock repository: + + $ hg init mockremote + $ cat <<EOF > mockremote/.hg/hgrc + > [extensions] + > bugzilla = + > bzmock = $TESTTMP/bzmock.py + > + > [bugzilla] + > version = mock + > mocklog = $TESTTMP/bzmock.log + > + > [hooks] + > incoming.bugzilla = python:hgext.bugzilla.hook + > + > [web] + > baseurl=http://example.org/hg + > + > %include $TESTTMP/bzstyle.hgrc + > EOF + + $ hg clone -q mockremote mocklocal + +push with default template: + + $ echo '[bugzilla]' > bzstyle.hgrc + $ echo foo > mocklocal/foo + $ hg ci -R mocklocal -Aqm 'Fixes bug 123' + $ hg -R mocklocal push -q + $ cat bzmock.log && rm bzmock.log + update bugid=123, newstate={}, committer='test' + ---- + changeset 7875a8342c6f in repo $TESTTMP/mockremote refers to bug 123. + details: + Fixes bug 123 + ---- + notify bugs={123: {}}, committer='test' + +push with style: + + $ cat <<EOF > bzstyle.map + > changeset = "{node|short} refers to bug {bug}." + > EOF + $ echo "style = $TESTTMP/bzstyle.map" >> bzstyle.hgrc + $ echo foo >> mocklocal/foo + $ hg ci -R mocklocal -qm 'Fixes bug 456' + $ hg -R mocklocal push -q + $ cat bzmock.log && rm bzmock.log + update bugid=456, newstate={}, committer='test' + ---- + 2808b172464b refers to bug 456. + ---- + notify bugs={456: {}}, committer='test' + +push with template (overrides style): + + $ cat <<EOF >> bzstyle.hgrc + > template = Changeset {node|short} in {root|basename}. + > {hgweb}/rev/{node|short}\n + > {desc} + > EOF + $ echo foo >> mocklocal/foo + $ hg ci -R mocklocal -qm 'Fixes bug 789' + $ hg -R mocklocal push -q + $ cat bzmock.log && rm bzmock.log + update bugid=789, newstate={}, committer='test' + ---- + Changeset a770f3e409f2 in mockremote. + http://example.org/hg/rev/a770f3e409f2 + + Fixes bug 789 + ---- + notify bugs={789: {}}, committer='test'
--- a/tests/test-bundle.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-bundle.t Sat Apr 16 18:06:48 2016 -0500 @@ -260,7 +260,7 @@ $ hg -R test bundle -t packed1 packed.hg abort: packed bundles cannot be produced by "hg bundle" - (use "hg debugcreatestreamclonebundle") + (use 'hg debugcreatestreamclonebundle') [255] packed1 is produced properly @@ -733,3 +733,77 @@ $ hg bundle -r 'public()' no-output.hg abort: no commits to bundle [255] + + $ cd .. + +When user merges to the revision existing only in the bundle, +it should show warning that second parent of the working +directory does not exist + + $ hg init update2bundled + $ cd update2bundled + $ cat <<EOF >> .hg/hgrc + > [extensions] + > strip = + > EOF + $ echo "aaa" >> a + $ hg commit -A -m 0 + adding a + $ echo "bbb" >> b + $ hg commit -A -m 1 + adding b + $ echo "ccc" >> c + $ hg commit -A -m 2 + adding c + $ hg update -r 1 + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ echo "ddd" >> d + $ hg commit -A -m 3 + adding d + created new head + $ hg update -r 2 + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg log -G + o changeset: 3:8bd3e1f196af + | tag: tip + | parent: 1:a01eca7af26d + | user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: 3 + | + | @ changeset: 2:4652c276ac4f + |/ user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: 2 + | + o changeset: 1:a01eca7af26d + | user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: 1 + | + o changeset: 0:4fe08cd4693e + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: 0 + + $ hg bundle --base 1 -r 3 ../update2bundled.hg + 1 changesets found + $ hg strip -r 3 + saved backup bundle to $TESTTMP/update2bundled/.hg/strip-backup/8bd3e1f196af-017e56d8-backup.hg (glob) + $ hg merge -R ../update2bundled.hg -r 3 + setting parent to node 8bd3e1f196af289b2b121be08031e76d7ae92098 that only exists in the bundle + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + +When user updates to the revision existing only in the bundle, +it should show warning + + $ hg update -R ../update2bundled.hg --clean -r 3 + setting parent to node 8bd3e1f196af289b2b121be08031e76d7ae92098 that only exists in the bundle + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + +When user updates to the revision existing in the local repository +the warning shouldn't be emitted + + $ hg update -R ../update2bundled.hg -r 0 + 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
--- a/tests/test-bundle2-exchange.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-bundle2-exchange.t Sat Apr 16 18:06:48 2016 -0500 @@ -301,7 +301,7 @@ pull over http - $ hg -R main serve -p $HGPORT -d --pid-file=main.pid -E main-error.log + $ hg serve -R main -p $HGPORT -d --pid-file=main.pid -E main-error.log $ cat main.pid >> $DAEMON_PIDS $ hg -R other pull http://localhost:$HGPORT/ -r 42ccdea3bb16 --bookmark book_42cc @@ -372,7 +372,7 @@ push over http - $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log + $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log $ cat other.pid >> $DAEMON_PIDS $ hg -R main phase --public 32af7686d403 @@ -498,7 +498,7 @@ > EOF $ killdaemons.py - $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log + $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log $ cat other.pid >> $DAEMON_PIDS Doing the actual push: Abort error @@ -596,7 +596,7 @@ > EOF $ killdaemons.py - $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log + $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log $ cat other.pid >> $DAEMON_PIDS $ hg -R main push other -r e7ec4e813ba6 @@ -661,7 +661,7 @@ > pretxnchangegroup = sh -c "echo 'Fail early!'; false" > EOF $ killdaemons.py # reload http config - $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log + $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log $ cat other.pid >> $DAEMON_PIDS $ hg -R main push other -r e7ec4e813ba6 @@ -785,7 +785,7 @@ > mandatorypart=$TESTTMP/mandatorypart.py > EOF $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS # reload http config - $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log + $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log $ cat other.pid >> $DAEMON_PIDS (Failure from a hook) @@ -857,7 +857,7 @@ > prepushkey.failpush = > EOF $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS # reload http config - $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log + $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log $ cat other.pid >> $DAEMON_PIDS $ hg -R main push other -r e7ec4e813ba6
--- a/tests/test-bundle2-format.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-bundle2-format.t Sat Apr 16 18:06:48 2016 -0500 @@ -9,8 +9,8 @@ $ cat > bundle2.py << EOF > """A small extension to test bundle2 implementation > - > Current bundle2 implementation is far too limited to be used in any core - > code. We still need to be able to test it while it grow up. + > This extension allows detailed testing of the various bundle2 API and + > behaviors. > """ > > import sys, os, gc @@ -166,6 +166,8 @@ > file.write(chunk) > except RuntimeError, exc: > raise error.Abort(exc) + > finally: + > file.flush() > > @command('unbundle2', [], '') > def cmdunbundle2(ui, repo, replypath=None): @@ -194,9 +196,9 @@ > for rec in op.records['changegroup']: > ui.write('addchangegroup return: %i\n' % rec['return']) > if op.reply is not None and replypath is not None: - > file = open(replypath, 'wb') - > for chunk in op.reply.getchunks(): - > file.write(chunk) + > with open(replypath, 'wb') as file: + > for chunk in op.reply.getchunks(): + > file.write(chunk) > > @command('statbundle2', [], '') > def cmdstatbundle2(ui, repo):
--- a/tests/test-check-code.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-check-code.t Sat Apr 16 18:06:48 2016 -0500 @@ -8,7 +8,11 @@ $ hg locate | sed 's-\\-/-g' | > xargs "$check_code" --warnings --per-file=0 || false - Skipping hgext/zeroconf/Zeroconf.py it has no-che?k-code (glob) + Skipping hgext/fsmonitor/pywatchman/__init__.py it has no-che?k-code (glob) + Skipping hgext/fsmonitor/pywatchman/bser.c it has no-che?k-code (glob) + Skipping hgext/fsmonitor/pywatchman/capabilities.py it has no-che?k-code (glob) + Skipping hgext/fsmonitor/pywatchman/msc_stdint.h it has no-che?k-code (glob) + Skipping hgext/fsmonitor/pywatchman/pybser.py it has no-che?k-code (glob) Skipping i18n/polib.py it has no-che?k-code (glob) Skipping mercurial/httpclient/__init__.py it has no-che?k-code (glob) Skipping mercurial/httpclient/_readers.py it has no-che?k-code (glob)
--- a/tests/test-check-commit.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-check-commit.t Sat Apr 16 18:06:48 2016 -0500 @@ -14,7 +14,7 @@ $ for node in `hg log --rev 'not public() and ::.' --template '{node|short}\n'`; do > hg export $node | contrib/check-commit > ${TESTTMP}/check-commit.out > if [ $? -ne 0 ]; then - > echo "Revision $node does not comply to rules" + > echo "Revision $node does not comply with rules" > echo '------------------------------------------------------' > cat ${TESTTMP}/check-commit.out > echo
--- a/tests/test-check-config.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-check-config.t Sat Apr 16 18:06:48 2016 -0500 @@ -5,4 +5,4 @@ New errors are not allowed. Warnings are strongly discouraged. $ hg files "set:(**.py or **.txt) - tests/**" | sed 's|\\|/|g' | - > xargs python contrib/check-config.py + > python contrib/check-config.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-check-module-imports.t Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,166 @@ +#require test-repo + + $ import_checker="$TESTDIR"/../contrib/import-checker.py + +Run the doctests from the import checker, and make sure +it's working correctly. + $ TERM=dumb + $ export TERM + $ python -m doctest $import_checker + +Run additional tests for the import checker + + $ mkdir testpackage + + $ cat > testpackage/multiple.py << EOF + > from __future__ import absolute_import + > import os, sys + > EOF + + $ cat > testpackage/unsorted.py << EOF + > from __future__ import absolute_import + > import sys + > import os + > EOF + + $ cat > testpackage/stdafterlocal.py << EOF + > from __future__ import absolute_import + > from . import unsorted + > import os + > EOF + + $ cat > testpackage/requirerelative.py << EOF + > from __future__ import absolute_import + > import testpackage.unsorted + > EOF + + $ cat > testpackage/importalias.py << EOF + > from __future__ import absolute_import + > import ui + > EOF + + $ cat > testpackage/relativestdlib.py << EOF + > from __future__ import absolute_import + > from .. import os + > EOF + + $ cat > testpackage/symbolimport.py << EOF + > from __future__ import absolute_import + > from .unsorted import foo + > EOF + + $ cat > testpackage/latesymbolimport.py << EOF + > from __future__ import absolute_import + > from . import unsorted + > from mercurial.node import hex + > EOF + + $ cat > testpackage/multiplegroups.py << EOF + > from __future__ import absolute_import + > from . import unsorted + > from . import more + > EOF + + $ mkdir testpackage/subpackage + $ cat > testpackage/subpackage/levelpriority.py << EOF + > from __future__ import absolute_import + > from . import foo + > from .. import parent + > EOF + + $ touch testpackage/subpackage/foo.py + $ cat > testpackage/subpackage/__init__.py << EOF + > from __future__ import absolute_import + > from . import levelpriority # should not cause cycle + > EOF + + $ cat > testpackage/subpackage/localimport.py << EOF + > from __future__ import absolute_import + > from . import foo + > def bar(): + > # should not cause "higher-level import should come first" + > from .. import unsorted + > # but other errors should be detected + > from .. import more + > import testpackage.subpackage.levelpriority + > EOF + + $ cat > testpackage/importmodulefromsub.py << EOF + > from __future__ import absolute_import + > from .subpackage import foo # not a "direct symbol import" + > EOF + + $ cat > testpackage/importsymbolfromsub.py << EOF + > from __future__ import absolute_import + > from .subpackage import foo, nonmodule + > EOF + + $ cat > testpackage/sortedentries.py << EOF + > from __future__ import absolute_import + > from . import ( + > foo, + > bar, + > ) + > EOF + + $ cat > testpackage/importfromalias.py << EOF + > from __future__ import absolute_import + > from . import ui + > EOF + + $ cat > testpackage/importfromrelative.py << EOF + > from __future__ import absolute_import + > from testpackage.unsorted import foo + > EOF + + $ python "$import_checker" testpackage/*.py testpackage/subpackage/*.py + testpackage/importalias.py:2: ui module must be "as" aliased to uimod + testpackage/importfromalias.py:2: ui from testpackage must be "as" aliased to uimod + testpackage/importfromrelative.py:2: import should be relative: testpackage.unsorted + testpackage/importfromrelative.py:2: direct symbol import foo from testpackage.unsorted + testpackage/importsymbolfromsub.py:2: direct symbol import nonmodule from testpackage.subpackage + testpackage/latesymbolimport.py:3: symbol import follows non-symbol import: mercurial.node + testpackage/multiple.py:2: multiple imported names: os, sys + testpackage/multiplegroups.py:3: multiple "from . import" statements + testpackage/relativestdlib.py:2: relative import of stdlib module + testpackage/requirerelative.py:2: import should be relative: testpackage.unsorted + testpackage/sortedentries.py:2: imports from testpackage not lexically sorted: bar < foo + testpackage/stdafterlocal.py:3: stdlib import "os" follows local import: testpackage + testpackage/subpackage/levelpriority.py:3: higher-level import should come first: testpackage + testpackage/subpackage/localimport.py:7: multiple "from .. import" statements + testpackage/subpackage/localimport.py:8: import should be relative: testpackage.subpackage.levelpriority + testpackage/symbolimport.py:2: direct symbol import foo from testpackage.unsorted + testpackage/unsorted.py:3: imports not lexically sorted: os < sys + [1] + + $ cd "$TESTDIR"/.. + +There are a handful of cases here that require renaming a module so it +doesn't overlap with a stdlib module name. There are also some cycles +here that we should still endeavor to fix, and some cycles will be +hidden by deduplication algorithm in the cycle detector, so fixing +these may expose other cycles. + +Known-bad files are excluded by -X as some of them would produce unstable +outputs, which should be fixed later. + + $ hg locate 'mercurial/**.py' 'hgext/**.py' 'tests/**.py' \ + > 'tests/**.t' \ + > -X tests/test-hgweb-auth.py \ + > -X tests/hypothesishelpers.py \ + > -X tests/test-ctxmanager.py \ + > -X tests/test-lock.py \ + > -X tests/test-verify-repo-operations.py \ + > -X tests/test-hook.t \ + > -X tests/test-import.t \ + > -X tests/test-check-module-imports.t \ + > -X tests/test-commit-interactive.t \ + > -X tests/test-contrib-check-code.t \ + > -X tests/test-extension.t \ + > -X tests/test-hghave.t \ + > -X tests/test-hgweb-no-path-info.t \ + > -X tests/test-hgweb-no-request-uri.t \ + > -X tests/test-hgweb-non-interactive.t \ + > | sed 's-\\-/-g' | python "$import_checker" - + Import cycle: hgext.largefiles.basestore -> hgext.largefiles.localstore -> hgext.largefiles.basestore + [1]
--- a/tests/test-check-py3-compat.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-check-py3-compat.t Sat Apr 16 18:06:48 2016 -0500 @@ -3,62 +3,11 @@ $ cd "$TESTDIR"/.. $ hg files 'set:(**.py)' | sed 's|\\|/|g' | xargs python contrib/check-py3-compat.py - contrib/casesmash.py not using absolute_import - contrib/check-code.py not using absolute_import - contrib/check-code.py requires print_function - contrib/check-config.py not using absolute_import - contrib/check-config.py requires print_function - contrib/debugcmdserver.py not using absolute_import - contrib/debugcmdserver.py requires print_function - contrib/debugshell.py not using absolute_import - contrib/fixpax.py not using absolute_import - contrib/fixpax.py requires print_function - contrib/hgclient.py not using absolute_import - contrib/hgclient.py requires print_function - contrib/hgfixes/fix_bytes.py not using absolute_import - contrib/hgfixes/fix_bytesmod.py not using absolute_import - contrib/hgfixes/fix_leftover_imports.py not using absolute_import - contrib/import-checker.py not using absolute_import - contrib/import-checker.py requires print_function - contrib/memory.py not using absolute_import - contrib/perf.py not using absolute_import - contrib/python-hook-examples.py not using absolute_import - contrib/revsetbenchmarks.py not using absolute_import - contrib/revsetbenchmarks.py requires print_function - contrib/showstack.py not using absolute_import - contrib/synthrepo.py not using absolute_import - contrib/win32/hgwebdir_wsgi.py not using absolute_import - doc/check-seclevel.py not using absolute_import - doc/gendoc.py not using absolute_import - doc/hgmanpage.py not using absolute_import - hgext/__init__.py not using absolute_import - hgext/acl.py not using absolute_import - hgext/blackbox.py not using absolute_import - hgext/bugzilla.py not using absolute_import - hgext/censor.py not using absolute_import - hgext/children.py not using absolute_import - hgext/churn.py not using absolute_import - hgext/clonebundles.py not using absolute_import - hgext/color.py not using absolute_import - hgext/convert/__init__.py not using absolute_import - hgext/convert/bzr.py not using absolute_import - hgext/convert/common.py not using absolute_import - hgext/convert/convcmd.py not using absolute_import - hgext/convert/cvs.py not using absolute_import - hgext/convert/cvsps.py not using absolute_import - hgext/convert/darcs.py not using absolute_import - hgext/convert/filemap.py not using absolute_import - hgext/convert/git.py not using absolute_import - hgext/convert/gnuarch.py not using absolute_import - hgext/convert/hg.py not using absolute_import - hgext/convert/monotone.py not using absolute_import - hgext/convert/p4.py not using absolute_import - hgext/convert/subversion.py not using absolute_import - hgext/convert/transport.py not using absolute_import - hgext/eol.py not using absolute_import - hgext/extdiff.py not using absolute_import - hgext/factotum.py not using absolute_import hgext/fetch.py not using absolute_import + hgext/fsmonitor/pywatchman/__init__.py not using absolute_import + hgext/fsmonitor/pywatchman/__init__.py requires print_function + hgext/fsmonitor/pywatchman/capabilities.py not using absolute_import + hgext/fsmonitor/pywatchman/pybser.py not using absolute_import hgext/gpg.py not using absolute_import hgext/graphlog.py not using absolute_import hgext/hgcia.py not using absolute_import @@ -66,7 +15,6 @@ hgext/highlight/__init__.py not using absolute_import hgext/highlight/highlight.py not using absolute_import hgext/histedit.py not using absolute_import - hgext/keyword.py not using absolute_import hgext/largefiles/__init__.py not using absolute_import hgext/largefiles/basestore.py not using absolute_import hgext/largefiles/lfcommands.py not using absolute_import @@ -79,103 +27,150 @@ hgext/largefiles/uisetup.py not using absolute_import hgext/largefiles/wirestore.py not using absolute_import hgext/mq.py not using absolute_import - hgext/notify.py not using absolute_import - hgext/pager.py not using absolute_import - hgext/patchbomb.py not using absolute_import - hgext/purge.py not using absolute_import hgext/rebase.py not using absolute_import - hgext/record.py not using absolute_import - hgext/relink.py not using absolute_import - hgext/schemes.py not using absolute_import hgext/share.py not using absolute_import - hgext/shelve.py not using absolute_import - hgext/strip.py not using absolute_import - hgext/transplant.py not using absolute_import - hgext/win32mbcs.py not using absolute_import hgext/win32text.py not using absolute_import - hgext/zeroconf/Zeroconf.py not using absolute_import - hgext/zeroconf/Zeroconf.py requires print_function - hgext/zeroconf/__init__.py not using absolute_import i18n/check-translation.py not using absolute_import i18n/polib.py not using absolute_import - mercurial/cmdutil.py not using absolute_import - mercurial/commands.py not using absolute_import setup.py not using absolute_import - tests/filterpyflakes.py requires print_function - tests/generate-working-copy-states.py requires print_function - tests/get-with-headers.py requires print_function tests/heredoctest.py requires print_function - tests/hypothesishelpers.py not using absolute_import - tests/hypothesishelpers.py requires print_function - tests/killdaemons.py not using absolute_import tests/md5sum.py not using absolute_import - tests/mockblackbox.py not using absolute_import - tests/printenv.py not using absolute_import tests/readlink.py not using absolute_import tests/readlink.py requires print_function - tests/revlog-formatv0.py not using absolute_import tests/run-tests.py not using absolute_import - tests/seq.py not using absolute_import - tests/seq.py requires print_function - tests/silenttestrunner.py not using absolute_import - tests/silenttestrunner.py requires print_function - tests/sitecustomize.py not using absolute_import tests/svn-safe-append.py not using absolute_import - tests/svnxml.py not using absolute_import - tests/test-ancestor.py requires print_function tests/test-atomictempfile.py not using absolute_import - tests/test-batching.py not using absolute_import - tests/test-batching.py requires print_function - tests/test-bdiff.py not using absolute_import - tests/test-bdiff.py requires print_function - tests/test-context.py not using absolute_import - tests/test-context.py requires print_function tests/test-demandimport.py not using absolute_import - tests/test-demandimport.py requires print_function - tests/test-dispatch.py not using absolute_import - tests/test-dispatch.py requires print_function - tests/test-doctest.py not using absolute_import - tests/test-duplicateoptions.py not using absolute_import - tests/test-duplicateoptions.py requires print_function - tests/test-filecache.py not using absolute_import - tests/test-filecache.py requires print_function - tests/test-filelog.py not using absolute_import - tests/test-filelog.py requires print_function - tests/test-hg-parseurl.py not using absolute_import - tests/test-hg-parseurl.py requires print_function - tests/test-hgweb-auth.py not using absolute_import - tests/test-hgweb-auth.py requires print_function - tests/test-hgwebdir-paths.py not using absolute_import - tests/test-hybridencode.py not using absolute_import - tests/test-hybridencode.py requires print_function - tests/test-lrucachedict.py not using absolute_import - tests/test-lrucachedict.py requires print_function - tests/test-manifest.py not using absolute_import - tests/test-minirst.py not using absolute_import - tests/test-minirst.py requires print_function - tests/test-parseindex2.py not using absolute_import - tests/test-parseindex2.py requires print_function - tests/test-pathencode.py not using absolute_import - tests/test-pathencode.py requires print_function - tests/test-propertycache.py not using absolute_import - tests/test-propertycache.py requires print_function - tests/test-revlog-ancestry.py not using absolute_import - tests/test-revlog-ancestry.py requires print_function - tests/test-run-tests.py not using absolute_import - tests/test-simplemerge.py not using absolute_import - tests/test-status-inprocess.py not using absolute_import - tests/test-status-inprocess.py requires print_function - tests/test-symlink-os-yes-fs-no.py not using absolute_import - tests/test-trusted.py not using absolute_import - tests/test-trusted.py requires print_function - tests/test-ui-color.py not using absolute_import - tests/test-ui-color.py requires print_function - tests/test-ui-config.py not using absolute_import - tests/test-ui-config.py requires print_function - tests/test-ui-verbosity.py not using absolute_import - tests/test-ui-verbosity.py requires print_function - tests/test-url.py not using absolute_import - tests/test-url.py requires print_function - tests/test-walkrepo.py requires print_function - tests/test-wireproto.py requires print_function - tests/tinyproxy.py requires print_function + +#if py3exe + $ hg files 'set:(**.py)' | sed 's|\\|/|g' | xargs $PYTHON3 contrib/check-py3-compat.py + contrib/check-code.py: invalid syntax: (unicode error) 'unicodeescape' codec can't decode bytes in position *-*: malformed \N character escape (<unknown>, line *) (glob) + doc/hgmanpage.py: invalid syntax: invalid syntax (<unknown>, line *) (glob) + hgext/automv.py: error importing module: <SyntaxError> invalid syntax (commands.py, line *) (line *) (glob) + hgext/blackbox.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + hgext/bugzilla.py: error importing module: <ImportError> No module named 'urlparse' (line *) (glob) + hgext/censor.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + hgext/chgserver.py: error importing module: <ImportError> No module named 'SocketServer' (line *) (glob) + hgext/children.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + hgext/churn.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + hgext/clonebundles.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + hgext/color.py: invalid syntax: invalid syntax (<unknown>, line *) (glob) + hgext/convert/bzr.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob) + hgext/convert/common.py: error importing module: <ImportError> No module named 'cPickle' (line *) (glob) + hgext/convert/convcmd.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at bundlerepo.py:*) (glob) + hgext/convert/cvs.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob) + hgext/convert/cvsps.py: error importing module: <ImportError> No module named 'cPickle' (line *) (glob) + hgext/convert/darcs.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob) + hgext/convert/filemap.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob) + hgext/convert/git.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob) + hgext/convert/gnuarch.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob) + hgext/convert/hg.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + hgext/convert/monotone.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob) + hgext/convert/p*.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob) + hgext/convert/subversion.py: error importing module: <ImportError> No module named 'cPickle' (line *) (glob) + hgext/convert/transport.py: error importing module: <ImportError> No module named 'svn.client' (line *) (glob) + hgext/eol.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + hgext/extdiff.py: error importing module: <SyntaxError> invalid syntax (archival.py, line *) (line *) (glob) + hgext/factotum.py: error importing: <ImportError> No module named 'httplib' (error at __init__.py:*) (glob) + hgext/fetch.py: error importing module: <SyntaxError> invalid syntax (commands.py, line *) (line *) (glob) + hgext/fsmonitor/watchmanclient.py: error importing module: <SystemError> Parent module 'hgext.fsmonitor' not loaded, cannot perform relative import (line *) (glob) + hgext/gpg.py: error importing module: <SyntaxError> invalid syntax (commands.py, line *) (line *) (glob) + hgext/graphlog.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + hgext/hgcia.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + hgext/hgk.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + hgext/histedit.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob) + hgext/keyword.py: error importing: <ImportError> No module named 'BaseHTTPServer' (error at common.py:*) (glob) + hgext/largefiles/basestore.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at bundlerepo.py:*) (glob) + hgext/largefiles/lfcommands.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at bundlerepo.py:*) (glob) + hgext/largefiles/lfutil.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + hgext/largefiles/localstore.py: error importing module: <ImportError> No module named 'lfutil' (line *) (glob) + hgext/largefiles/overrides.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at bundlerepo.py:*) (glob) + hgext/largefiles/proto.py: error importing: <ImportError> No module named 'httplib' (error at httppeer.py:*) (glob) + hgext/largefiles/remotestore.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at wireproto.py:*) (glob) + hgext/largefiles/reposetup.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + hgext/largefiles/uisetup.py: error importing module: <SyntaxError> invalid syntax (archival.py, line *) (line *) (glob) + hgext/largefiles/wirestore.py: error importing module: <ImportError> No module named 'lfutil' (line *) (glob) + hgext/mq.py: error importing module: <SyntaxError> invalid syntax (commands.py, line *) (line *) (glob) + hgext/notify.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + hgext/pager.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + hgext/patchbomb.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + hgext/purge.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + hgext/rebase.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at bundlerepo.py:*) (glob) + hgext/record.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + hgext/relink.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + hgext/schemes.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + hgext/share.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + hgext/shelve.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob) + hgext/strip.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + hgext/transplant.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at bundlerepo.py:*) (glob) + mercurial/archival.py: invalid syntax: invalid syntax (<unknown>, line *) (glob) + mercurial/branchmap.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/bundle*.py: invalid syntax: invalid syntax (<unknown>, line *) (glob) + mercurial/bundlerepo.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob) + mercurial/changegroup.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/changelog.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/cmdutil.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/commands.py: invalid syntax: invalid syntax (<unknown>, line *) (glob) + mercurial/commandserver.py: error importing module: <ImportError> No module named 'SocketServer' (line *) (glob) + mercurial/context.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/copies.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/crecord.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/dirstate.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/discovery.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/dispatch.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/exchange.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob) + mercurial/extensions.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/filelog.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/filemerge.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/fileset.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/formatter.py: error importing module: <ImportError> No module named 'cPickle' (line *) (glob) + mercurial/graphmod.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/help.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/hg.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at bundlerepo.py:*) (glob) + mercurial/hgweb/common.py: error importing module: <ImportError> No module named 'BaseHTTPServer' (line *) (glob) + mercurial/hgweb/hgweb_mod.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob) + mercurial/hgweb/hgwebdir_mod.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob) + mercurial/hgweb/protocol.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob) + mercurial/hgweb/request.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob) + mercurial/hgweb/server.py: error importing module: <ImportError> No module named 'BaseHTTPServer' (line *) (glob) + mercurial/hgweb/webcommands.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob) + mercurial/hgweb/webutil.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob) + mercurial/hgweb/wsgicgi.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob) + mercurial/hook.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/httpclient/_readers.py: error importing module: <ImportError> No module named 'httplib' (line *) (glob) + mercurial/httpconnection.py: error importing: <ImportError> No module named 'httplib' (error at __init__.py:*) (glob) + mercurial/httppeer.py: error importing module: <ImportError> No module named 'httplib' (line *) (glob) + mercurial/keepalive.py: error importing module: <ImportError> No module named 'httplib' (line *) (glob) + mercurial/localrepo.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/mail.py: error importing module: <AttributeError> module 'email' has no attribute 'Header' (line *) (glob) + mercurial/manifest.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/merge.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/namespaces.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/patch.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/pure/mpatch.py: error importing module: <ImportError> cannot import name 'pycompat' (line *) (glob) + mercurial/pure/parsers.py: error importing module: <ImportError> No module named 'mercurial.pure.node' (line *) (glob) + mercurial/repair.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob) + mercurial/revlog.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/revset.py: error importing module: <AttributeError> 'dict' object has no attribute 'iteritems' (line *) (glob) + mercurial/scmutil.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/scmwindows.py: error importing module: <ImportError> No module named '_winreg' (line *) (glob) + mercurial/simplemerge.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/sshpeer.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at wireproto.py:*) (glob) + mercurial/sshserver.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/statichttprepo.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/store.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/streamclone.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/subrepo.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/templatefilters.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/templatekw.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/templater.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/ui.py: error importing: <ImportError> No module named 'cPickle' (error at formatter.py:*) (glob) + mercurial/unionrepo.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/url.py: error importing module: <ImportError> No module named 'httplib' (line *) (glob) + mercurial/verify.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob) + mercurial/win*.py: error importing module: <ImportError> No module named 'msvcrt' (line *) (glob) + mercurial/windows.py: error importing module: <ImportError> No module named '_winreg' (line *) (glob) + mercurial/wireproto.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob) + tests/readlink.py: invalid syntax: invalid syntax (<unknown>, line *) (glob) + +#endif
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-check-shbang.t Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,13 @@ +#require test-repo + + $ cd "`dirname "$TESTDIR"`" + +look for python scripts that do not use /usr/bin/env + + $ hg files 'set:grep(r"^#!.*?python") and not grep(r"^#!/usr/bin/env python")' + [1] + +look for shell scripts that do not use /bin/sh + + $ hg files 'set:grep(r"^#!.*/bin/sh") and not grep(r"^#!/bin/sh")' + [1]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-chg.t Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,12 @@ +init repo + + $ hg init foo + $ cd foo + +ill-formed config + + $ hg status + $ echo '=brokenconfig' >> $HGRCPATH + $ hg status + hg: parse error at * (glob) + [255]
--- a/tests/test-clonebundles.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-clonebundles.t Sat Apr 16 18:06:48 2016 -0500 @@ -52,7 +52,7 @@ $ echo 'http://does.not.exist/bundle.hg' > server/.hg/clonebundles.manifest $ hg clone http://localhost:$HGPORT 404-url applying clone bundle from http://does.not.exist/bundle.hg - error fetching bundle: (.* not known|getaddrinfo failed) (re) + error fetching bundle: (.* not known|getaddrinfo failed|No address associated with hostname) (re) abort: error applying bundle (if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false") [255]
--- a/tests/test-command-template.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-command-template.t Sat Apr 16 18:06:48 2016 -0500 @@ -71,6 +71,8 @@ 8 $ hg tip --config 'ui.logtemplate="{rev}\n"' 8 + $ hg tip --config 'ui.logtemplate=n{rev}\n' + n8 Make sure user/global hgrc does not affect tests @@ -1008,7 +1010,7 @@ $ echo 'changeset =' > t $ hg log --style t - abort: t:1: missing value + hg: parse error at t:1: missing value [255] Error if include fails: @@ -1018,7 +1020,7 @@ $ hg log --style ./t abort: template file ./q: Permission denied [255] - $ rm q + $ rm -f q #endif Include works: @@ -2508,7 +2510,7 @@ $ echo 'x = "f' >> t $ hg log - abort: t:3: unmatched quotes + hg: parse error at t:3: unmatched quotes [255] $ hg log -T '{date' @@ -2790,6 +2792,14 @@ $ hg log -R latesttag -r tip --template 'modified files: {file_mods % " {file}\n"}\n' modified files: .hgtags + + $ hg log -R latesttag -r tip -T '{rev % "a"}\n' + hg: parse error: keyword 'rev' is not iterable + [255] + $ hg log -R latesttag -r tip -T '{get(extras, "unknown") % "a"}\n' + hg: parse error: None is not iterable + [255] + Test the sub function of templating for expansion: $ hg log -R latesttag -r 10 --template '{sub("[0-9]", "x", "{rev}")}\n' @@ -2853,27 +2863,55 @@ Test integer literal: - $ hg log -Ra -r0 -T '{(0)}\n' + $ hg debugtemplate -v '{(0)}\n' + (template + (group + ('integer', '0')) + ('string', '\n')) 0 - $ hg log -Ra -r0 -T '{(123)}\n' + $ hg debugtemplate -v '{(123)}\n' + (template + (group + ('integer', '123')) + ('string', '\n')) 123 - $ hg log -Ra -r0 -T '{(-4)}\n' + $ hg debugtemplate -v '{(-4)}\n' + (template + (group + ('integer', '-4')) + ('string', '\n')) -4 - $ hg log -Ra -r0 -T '{(-)}\n' + $ hg debugtemplate '{(-)}\n' hg: parse error at 2: integer literal without digits [255] - $ hg log -Ra -r0 -T '{(-a)}\n' + $ hg debugtemplate '{(-a)}\n' hg: parse error at 2: integer literal without digits [255] top-level integer literal is interpreted as symbol (i.e. variable name): - $ hg log -Ra -r0 -T '{1}\n' - - $ hg log -Ra -r0 -T '{if("t", "{1}")}\n' - - $ hg log -Ra -r0 -T '{1|stringify}\n' - + $ hg debugtemplate -D 1=one -v '{1}\n' + (template + ('integer', '1') + ('string', '\n')) + one + $ hg debugtemplate -D 1=one -v '{if("t", "{1}")}\n' + (template + (func + ('symbol', 'if') + (list + ('string', 't') + (template + ('integer', '1')))) + ('string', '\n')) + one + $ hg debugtemplate -D 1=one -v '{1|stringify}\n' + (template + (| + ('integer', '1') + ('symbol', 'stringify')) + ('string', '\n')) + one unless explicit symbol is expected: @@ -2886,18 +2924,30 @@ Test string literal: - $ hg log -Ra -r0 -T '{"string with no template fragment"}\n' + $ hg debugtemplate -Ra -r0 -v '{"string with no template fragment"}\n' + (template + ('string', 'string with no template fragment') + ('string', '\n')) string with no template fragment - $ hg log -Ra -r0 -T '{"template: {rev}"}\n' + $ hg debugtemplate -Ra -r0 -v '{"template: {rev}"}\n' + (template + (template + ('string', 'template: ') + ('symbol', 'rev')) + ('string', '\n')) template: 0 - $ hg log -Ra -r0 -T '{r"rawstring: {rev}"}\n' + $ hg debugtemplate -Ra -r0 -v '{r"rawstring: {rev}"}\n' + (template + ('string', 'rawstring: {rev}') + ('string', '\n')) rawstring: {rev} - -because map operation requires template, raw string can't be used - - $ hg log -Ra -r0 -T '{files % r"rawstring"}\n' - hg: parse error: expected template specifier - [255] + $ hg debugtemplate -Ra -r0 -v '{files % r"rawstring: {file}"}\n' + (template + (% + ('symbol', 'files') + ('string', 'rawstring: {file}')) + ('string', '\n')) + rawstring: {file} Test string escaping: @@ -3148,6 +3198,9 @@ text.1:be wrapped text.1:desc to be text.1:wrapped (no-eol) + $ hg log -l1 -T '{fill(desc, date, "", "")}\n' + hg: parse error: fill expects an integer width + [255] $ hg log -l 1 --template '{sub(r"[0-9]", "-", author)}' {node|short} (no-eol) @@ -3167,6 +3220,18 @@ $ hg log --color=always -l 1 --template '{label("text.{rev}", "text\n")}' \x1b[0;32mtext\x1b[0m (esc) +color effect can be specified without quoting: + + $ hg log --color=always -l 1 --template '{label(red, "text\n")}' + \x1b[0;31mtext\x1b[0m (esc) + +label should be no-op if color is disabled: + + $ hg log --color=never -l 1 --template '{label(red, "text\n")}' + text + $ hg log --config extensions.color=! -l 1 --template '{label(red, "text\n")}' + text + Test branches inside if statement: $ hg log -r 0 --template '{if(branches, "yes", "no")}\n' @@ -3176,6 +3241,8 @@ $ hg log -r 0 --template '{get(extras, "branch")}\n' default + $ hg log -r 0 --template '{get(extras, "br{"anch"}")}\n' + default $ hg log -r 0 --template '{get(files, "should_fail")}\n' hg: parse error: get() expects a dict as first argument [255] @@ -3214,6 +3281,12 @@ $ hg log --template '{node|shortest}\n' -l1 e777 + $ hg log -r 0 -T '{shortest(node, "1{"0"}")}\n' + f7769ec2ab + $ hg log -r 0 -T '{shortest(node, "not an int")}\n' + hg: parse error: shortest() expects an integer minlength + [255] + Test pad function $ hg log --template '{pad(rev, 20)} {author|user}\n' @@ -3239,6 +3312,14 @@ $ hg log -r 0 -T '{pad(r"\{rev}", 10)} {author|user}\n' \{rev} test +Test width argument passed to pad function + + $ hg log -r 0 -T '{pad(rev, "1{"0"}")} {author|user}\n' + 0 test + $ hg log -r 0 -T '{pad(rev, "not an int")}\n' + hg: parse error: pad() expects an integer width + [255] + Test ifcontains function $ hg log --template '{rev} {ifcontains(rev, "2 two 0", "is in the string", "is not")}\n' @@ -3246,11 +3327,21 @@ 1 is not 0 is in the string + $ hg log -T '{rev} {ifcontains(rev, "2 two{" 0"}", "is in the string", "is not")}\n' + 2 is in the string + 1 is not + 0 is in the string + $ hg log --template '{rev} {ifcontains("a", file_adds, "added a", "did not add a")}\n' 2 did not add a 1 did not add a 0 added a + $ hg log --debug -T '{rev}{ifcontains(1, parents, " is parent of 1")}\n' + 2 is parent of 1 + 1 + 0 + Test revset function $ hg log --template '{rev} {ifcontains(rev, revset("."), "current rev", "not current rev")}\n' @@ -3293,13 +3384,21 @@ $ hg log --template '{revset("TIP"|lower)}\n' -l1 2 - a list template is evaluated for each item of revset + $ hg log -T '{revset("%s", "t{"ip"}")}\n' -l1 + 2 + + a list template is evaluated for each item of revset/parents $ hg log -T '{rev} p: {revset("p1(%s)", rev) % "{rev}:{node|short}"}\n' 2 p: 1:bcc7ff960b8e 1 p: 0:f7769ec2ab97 0 p: + $ hg log --debug -T '{rev} p:{parents % " {rev}:{node|short}"}\n' + 2 p: 1:bcc7ff960b8e -1:000000000000 + 1 p: 0:f7769ec2ab97 -1:000000000000 + 0 p: -1:000000000000 -1:000000000000 + therefore, 'revcache' should be recreated for each rev $ hg log -T '{rev} {file_adds}\np {revset("p1(%s)", rev) % "{file_adds}"}\n' @@ -3310,6 +3409,21 @@ 0 a p + $ hg log --debug -T '{rev} {file_adds}\np {parents % "{file_adds}"}\n' + 2 aa b + p + 1 + p a + 0 a + p + +a revset item must be evaluated as an integer revision, not an offset from tip + + $ hg log -l 1 -T '{revset("null") % "{rev}:{node|short}"}\n' + -1:000000000000 + $ hg log -l 1 -T '{revset("%s", "null") % "{rev}:{node|short}"}\n' + -1:000000000000 + Test active bookmark templating $ hg book foo @@ -3535,25 +3649,192 @@ hg: parse error: invalid \x escape [255] +json filter should escape HTML tags so that the output can be embedded in hgweb: + + $ hg log -T "{'<foo@example.org>'|json}\n" -R a -l1 + "\u003cfoo@example.org\u003e" + +Templater supports aliases of symbol and func() styles: + + $ hg clone -q a aliases + $ cd aliases + $ cat <<EOF >> .hg/hgrc + > [templatealias] + > r = rev + > rn = "{r}:{node|short}" + > status(c, files) = files % "{c} {file}\n" + > utcdate(d) = localdate(d, "UTC") + > EOF + + $ hg debugtemplate -vr0 '{rn} {utcdate(date)|isodate}\n' + (template + ('symbol', 'rn') + ('string', ' ') + (| + (func + ('symbol', 'utcdate') + ('symbol', 'date')) + ('symbol', 'isodate')) + ('string', '\n')) + * expanded: + (template + (template + ('symbol', 'rev') + ('string', ':') + (| + ('symbol', 'node') + ('symbol', 'short'))) + ('string', ' ') + (| + (func + ('symbol', 'localdate') + (list + ('symbol', 'date') + ('string', 'UTC'))) + ('symbol', 'isodate')) + ('string', '\n')) + 0:1e4e1b8f71e0 1970-01-12 13:46 +0000 + + $ hg debugtemplate -vr0 '{status("A", file_adds)}' + (template + (func + ('symbol', 'status') + (list + ('string', 'A') + ('symbol', 'file_adds')))) + * expanded: + (template + (% + ('symbol', 'file_adds') + (template + ('string', 'A') + ('string', ' ') + ('symbol', 'file') + ('string', '\n')))) + A a + +A unary function alias can be called as a filter: + + $ hg debugtemplate -vr0 '{date|utcdate|isodate}\n' + (template + (| + (| + ('symbol', 'date') + ('symbol', 'utcdate')) + ('symbol', 'isodate')) + ('string', '\n')) + * expanded: + (template + (| + (func + ('symbol', 'localdate') + (list + ('symbol', 'date') + ('string', 'UTC'))) + ('symbol', 'isodate')) + ('string', '\n')) + 1970-01-12 13:46 +0000 + +Aliases should be applied only to command arguments and templates in hgrc. +Otherwise, our stock styles and web templates could be corrupted: + + $ hg log -r0 -T '{rn} {utcdate(date)|isodate}\n' + 0:1e4e1b8f71e0 1970-01-12 13:46 +0000 + + $ hg log -r0 --config ui.logtemplate='"{rn} {utcdate(date)|isodate}\n"' + 0:1e4e1b8f71e0 1970-01-12 13:46 +0000 + + $ cat <<EOF > tmpl + > changeset = 'nothing expanded:{rn}\n' + > EOF + $ hg log -r0 --style ./tmpl + nothing expanded: + +Aliases in formatter: + + $ hg branches -T '{pad(branch, 7)} {rn}\n' + default 6:d41e714fe50d + foo 4:bbe44766e73d + +Aliases should honor HGPLAIN: + + $ HGPLAIN= hg log -r0 -T 'nothing expanded:{rn}\n' + nothing expanded: + $ HGPLAINEXCEPT=templatealias hg log -r0 -T '{rn}\n' + 0:1e4e1b8f71e0 + +Unparsable alias: + + $ hg debugtemplate --config templatealias.bad='x(' -v '{bad}' + (template + ('symbol', 'bad')) + abort: failed to parse the definition of template alias "bad": at 2: not a prefix: end + [255] + $ hg log --config templatealias.bad='x(' -T '{bad}' + abort: failed to parse the definition of template alias "bad": at 2: not a prefix: end + [255] + + $ cd .. + Set up repository for non-ascii encoding tests: $ hg init nonascii $ cd nonascii $ python <<EOF + > open('latin1', 'w').write('\xe9') > open('utf-8', 'w').write('\xc3\xa9') > EOF $ HGENCODING=utf-8 hg branch -q `cat utf-8` - $ HGENCODING=utf-8 hg ci -qAm 'non-ascii branch' utf-8 + $ HGENCODING=utf-8 hg ci -qAm "non-ascii branch: `cat utf-8`" utf-8 json filter should try round-trip conversion to utf-8: $ HGENCODING=ascii hg log -T "{branch|json}\n" -r0 "\u00e9" - -json filter should not abort if it can't decode bytes: -(not sure the current behavior is right; we might want to use utf-8b encoding?) + $ HGENCODING=ascii hg log -T "{desc|json}\n" -r0 + "non-ascii branch: \u00e9" + +json filter takes input as utf-8b: $ HGENCODING=ascii hg log -T "{'`cat utf-8`'|json}\n" -l1 - "\ufffd\ufffd" + "\u00e9" + $ HGENCODING=ascii hg log -T "{'`cat latin1`'|json}\n" -l1 + "\udce9" + +utf8 filter: + + $ HGENCODING=ascii hg log -T "round-trip: {branch|utf8|hex}\n" -r0 + round-trip: c3a9 + $ HGENCODING=latin1 hg log -T "decoded: {'`cat latin1`'|utf8|hex}\n" -l1 + decoded: c3a9 + $ HGENCODING=ascii hg log -T "replaced: {'`cat latin1`'|utf8|hex}\n" -l1 + abort: decoding near * (glob) + [255] + $ hg log -T "invalid type: {rev|utf8}\n" -r0 + abort: template filter 'utf8' is not compatible with keyword 'rev' + [255] $ cd .. + +Test that template function in extension is registered as expected + + $ cd a + + $ cat <<EOF > $TESTTMP/customfunc.py + > from mercurial import registrar + > + > templatefunc = registrar.templatefunc() + > + > @templatefunc('custom()') + > def custom(context, mapping, args): + > return 'custom' + > EOF + $ cat <<EOF > .hg/hgrc + > [extensions] + > customfunc = $TESTTMP/customfunc.py + > EOF + + $ hg log -r . -T "{custom()}\n" --config customfunc.enabled=true + custom + + $ cd ..
--- a/tests/test-commandserver.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-commandserver.t Sat Apr 16 18:06:48 2016 -0500 @@ -102,8 +102,7 @@ ... print 'server exit code =', server.wait() server exit code = 1 - >>> import cStringIO - >>> from hgclient import readchannel, runcommand, check + >>> from hgclient import readchannel, runcommand, check, stringio >>> @check ... def serverinput(server): ... readchannel(server) @@ -123,7 +122,7 @@ ... +1 ... """ ... - ... runcommand(server, ['import', '-'], input=cStringIO.StringIO(patch)) + ... runcommand(server, ['import', '-'], input=stringio(patch)) ... runcommand(server, ['log']) *** runcommand import - applying patch from stdin @@ -211,15 +210,14 @@ > print 'now try to read something: %r' % sys.stdin.read() > EOF - >>> import cStringIO - >>> from hgclient import readchannel, runcommand, check + >>> from hgclient import readchannel, runcommand, check, stringio >>> @check ... def hookoutput(server): ... readchannel(server) ... runcommand(server, ['--config', ... 'hooks.pre-identify=python:hook.hook', ... 'id'], - ... input=cStringIO.StringIO('some input')) + ... input=stringio('some input')) *** runcommand --config hooks.pre-identify=python:hook.hook id hook talking now try to read something: 'some input' @@ -587,17 +585,16 @@ > dbgui = dbgui.py > EOF - >>> import cStringIO - >>> from hgclient import readchannel, runcommand, check + >>> from hgclient import readchannel, runcommand, check, stringio >>> @check ... def getpass(server): ... readchannel(server) ... runcommand(server, ['debuggetpass', '--config', ... 'ui.interactive=True'], - ... input=cStringIO.StringIO('1234\n')) + ... input=stringio('1234\n')) ... runcommand(server, ['debugprompt', '--config', ... 'ui.interactive=True'], - ... input=cStringIO.StringIO('5678\n')) + ... input=stringio('5678\n')) ... runcommand(server, ['debugreadstdin']) ... runcommand(server, ['debugwritestdout']) *** runcommand debuggetpass --config ui.interactive=True @@ -611,14 +608,13 @@ run commandserver in commandserver, which is silly but should work: - >>> import cStringIO - >>> from hgclient import readchannel, runcommand, check + >>> from hgclient import readchannel, runcommand, check, stringio >>> @check ... def nested(server): ... print '%c, %r' % readchannel(server) ... class nestedserver(object): - ... stdin = cStringIO.StringIO('getencoding\n') - ... stdout = cStringIO.StringIO() + ... stdin = stringio('getencoding\n') + ... stdout = stringio() ... runcommand(server, ['serve', '--cmdserver', 'pipe'], ... output=nestedserver.stdout, input=nestedserver.stdin) ... nestedserver.stdout.seek(0) @@ -674,8 +670,7 @@ #if unix-socket unix-permissions - >>> import cStringIO - >>> from hgclient import unixserver, readchannel, runcommand, check + >>> from hgclient import unixserver, readchannel, runcommand, check, stringio >>> server = unixserver('.hg/server.sock', '.hg/server.log') >>> def hellomessage(conn): ... ch, data = readchannel(conn) @@ -704,7 +699,7 @@ ... 1 ... +2 ... """ - ... runcommand(conn, ['import', '-'], input=cStringIO.StringIO(patch)) + ... runcommand(conn, ['import', '-'], input=stringio(patch)) ... runcommand(conn, ['log', '-rtip', '-q']) >>> check(serverinput, server.connect) *** runcommand import - @@ -717,6 +712,35 @@ listening at .hg/server.sock abort: unknown command unknowncommand killed! + $ rm .hg/server.log + + if server crashed before hello, traceback will be sent to 'e' channel as + last ditch: + + $ cat <<EOF >> .hg/hgrc + > [cmdserver] + > log = inexistent/path.log + > EOF + >>> from hgclient import unixserver, readchannel, check + >>> server = unixserver('.hg/server.sock', '.hg/server.log') + >>> def earlycrash(conn): + ... while True: + ... try: + ... ch, data = readchannel(conn) + ... if not data.startswith(' '): + ... print '%c, %r' % (ch, data) + ... except EOFError: + ... break + >>> check(earlycrash, server.connect) + e, 'Traceback (most recent call last):\n' + e, "IOError: *" (glob) + >>> server.shutdown() + + $ cat .hg/server.log | grep -v '^ ' + listening at .hg/server.sock + Traceback (most recent call last): + IOError: * (glob) + killed! #endif #if no-unix-socket @@ -725,3 +749,133 @@ [255] #endif + + $ cd .. + +Test that accessing to invalid changelog cache is avoided at +subsequent operations even if repo object is reused even after failure +of transaction (see 0a7610758c42 also) + +"hg log" after failure of transaction is needed to detect invalid +cache in repoview: this can't detect by "hg verify" only. + +Combination of "finalization" and "empty-ness of changelog" (2 x 2 = +4) are tested, because '00changelog.i' are differently changed in each +cases. + + $ cat > $TESTTMP/failafterfinalize.py <<EOF + > # extension to abort transaction after finalization forcibly + > from mercurial import commands, error, extensions, lock as lockmod + > def fail(tr): + > raise error.Abort('fail after finalization') + > def reposetup(ui, repo): + > class failrepo(repo.__class__): + > def commitctx(self, ctx, error=False): + > if self.ui.configbool('failafterfinalize', 'fail'): + > # 'sorted()' by ASCII code on category names causes + > # invoking 'fail' after finalization of changelog + > # using "'cl-%i' % id(self)" as category name + > self.currenttransaction().addfinalize('zzzzzzzz', fail) + > return super(failrepo, self).commitctx(ctx, error) + > repo.__class__ = failrepo + > EOF + + $ hg init repo3 + $ cd repo3 + + $ cat <<EOF >> $HGRCPATH + > [ui] + > logtemplate = {rev} {desc|firstline} ({files})\n + > + > [extensions] + > failafterfinalize = $TESTTMP/failafterfinalize.py + > EOF + +- test failure with "empty changelog" + + $ echo foo > foo + $ hg add foo + +(failuer before finalization) + + >>> from hgclient import readchannel, runcommand, check + >>> @check + ... def abort(server): + ... readchannel(server) + ... runcommand(server, ['commit', + ... '--config', 'hooks.pretxncommit=false', + ... '-mfoo']) + ... runcommand(server, ['log']) + ... runcommand(server, ['verify', '-q']) + *** runcommand commit --config hooks.pretxncommit=false -mfoo + transaction abort! + rollback completed + abort: pretxncommit hook exited with status 1 + [255] + *** runcommand log + *** runcommand verify -q + +(failuer after finalization) + + >>> from hgclient import readchannel, runcommand, check + >>> @check + ... def abort(server): + ... readchannel(server) + ... runcommand(server, ['commit', + ... '--config', 'failafterfinalize.fail=true', + ... '-mfoo']) + ... runcommand(server, ['log']) + ... runcommand(server, ['verify', '-q']) + *** runcommand commit --config failafterfinalize.fail=true -mfoo + transaction abort! + rollback completed + abort: fail after finalization + [255] + *** runcommand log + *** runcommand verify -q + +- test failure with "not-empty changelog" + + $ echo bar > bar + $ hg add bar + $ hg commit -mbar bar + +(failure before finalization) + + >>> from hgclient import readchannel, runcommand, check + >>> @check + ... def abort(server): + ... readchannel(server) + ... runcommand(server, ['commit', + ... '--config', 'hooks.pretxncommit=false', + ... '-mfoo', 'foo']) + ... runcommand(server, ['log']) + ... runcommand(server, ['verify', '-q']) + *** runcommand commit --config hooks.pretxncommit=false -mfoo foo + transaction abort! + rollback completed + abort: pretxncommit hook exited with status 1 + [255] + *** runcommand log + 0 bar (bar) + *** runcommand verify -q + +(failure after finalization) + + >>> from hgclient import readchannel, runcommand, check + >>> @check + ... def abort(server): + ... readchannel(server) + ... runcommand(server, ['commit', + ... '--config', 'failafterfinalize.fail=true', + ... '-mfoo', 'foo']) + ... runcommand(server, ['log']) + ... runcommand(server, ['verify', '-q']) + *** runcommand commit --config failafterfinalize.fail=true -mfoo foo + transaction abort! + rollback completed + abort: fail after finalization + [255] + *** runcommand log + 0 bar (bar) + *** runcommand verify -q
--- a/tests/test-commit-amend.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-commit-amend.t Sat Apr 16 18:06:48 2016 -0500 @@ -540,10 +540,10 @@ | | o 12:0 2647734878ef 1970-01-01 00:00 +0000 test | | fork - | | - o | 11 3334b7925910 1970-01-01 00:00 +0000 test - | | a'' - | | + | ~ + o 11 3334b7925910 1970-01-01 00:00 +0000 test + | a'' + ~ $ hg log -Gl 4 --hidden --style=compact @ 14[tip]:11 b650e6ee8614 1970-01-01 00:00 +0000 test | babar @@ -553,10 +553,10 @@ | | o 12:0 2647734878ef 1970-01-01 00:00 +0000 test | | fork - | | - o | 11 3334b7925910 1970-01-01 00:00 +0000 test - | | a'' - | | + | ~ + o 11 3334b7925910 1970-01-01 00:00 +0000 test + | a'' + ~ Amend with files changes @@ -580,10 +580,10 @@ | | o 12:0 2647734878ef 1970-01-01 00:00 +0000 test | | fork - | | - o | 11 3334b7925910 1970-01-01 00:00 +0000 test - | | a'' - | | + | ~ + o 11 3334b7925910 1970-01-01 00:00 +0000 test + | a'' + ~ Test that amend does not make it easy to create obsolescence cycle
--- a/tests/test-commit-interactive-curses.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-commit-interactive-curses.t Sat Apr 16 18:06:48 2016 -0500 @@ -1,10 +1,11 @@ Set up a repo + $ cp $HGRCPATH $HGRCPATH.pretest $ cat <<EOF >> $HGRCPATH > [ui] > interactive = true + > interface = curses > [experimental] - > crecord = true > crecordtest = testModeCommands > EOF @@ -222,4 +223,166 @@ foo hello world +Testing the review option. The entire final filtered patch should show +up in the editor and be editable. We will unselect the second file and +the first hunk of the third file. During review, we will decide that +"lower" sounds better than "bottom", and the final commit should +reflect this edition. + $ hg update -C . + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ echo "top" > c + $ cat x >> c + $ echo "bottom" >> c + $ mv c x + $ echo "third a" >> a + $ echo "we will unselect this" >> b + + $ cat > editor.sh <<EOF + > cat "\$1" + > cat "\$1" | sed s/bottom/lower/ > tmp + > mv tmp "\$1" + > EOF + $ cat > testModeCommands <<EOF + > KEY_DOWN + > TOGGLE + > KEY_DOWN + > f + > KEY_DOWN + > TOGGLE + > R + > EOF + + $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit -i -m "review hunks" -d "0 0" + # To remove '-' lines, make them ' ' lines (context). + # To remove '+' lines, delete them. + # Lines starting with # will be removed from the patch. + # + # If the patch applies cleanly, the edited patch will immediately + # be finalised. If it does not apply cleanly, rejects files will be + # generated. You can use those when you try again. + diff --git a/a b/a + --- a/a + +++ b/a + @@ -1,2 +1,3 @@ + a + a + +third a + diff --git a/x b/x + --- a/x + +++ b/x + @@ -1,2 +1,3 @@ + foo + hello world + +bottom + + $ hg cat -r . a + a + a + third a + + $ hg cat -r . b + x + 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + y + + $ hg cat -r . x + foo + hello world + lower +Check ui.interface logic for the chunkselector + +The default interface is text + $ cp $HGRCPATH.pretest $HGRCPATH + $ chunkselectorinterface() { + > python <<EOF + > from mercurial import hg, ui, parsers;\ + > repo = hg.repository(ui.ui(), ".");\ + > print repo.ui.interface("chunkselector") + > EOF + > } + $ chunkselectorinterface + text + +If only the default is set, we'll use that for the feature, too + $ cp $HGRCPATH.pretest $HGRCPATH + $ cat <<EOF >> $HGRCPATH + > [ui] + > interface = curses + > EOF + $ chunkselectorinterface + curses + +It is possible to override the default interface with a feature specific +interface + $ cp $HGRCPATH.pretest $HGRCPATH + $ cat <<EOF >> $HGRCPATH + > [ui] + > interface = text + > interface.chunkselector = curses + > EOF + + $ chunkselectorinterface + curses + + $ cp $HGRCPATH.pretest $HGRCPATH + $ cat <<EOF >> $HGRCPATH + > [ui] + > interface = curses + > interface.chunkselector = text + > EOF + + $ chunkselectorinterface + text + +If a bad interface name is given, we use the default value (with a nice +error message to suggest that the configuration needs to be fixed) + + $ cp $HGRCPATH.pretest $HGRCPATH + $ cat <<EOF >> $HGRCPATH + > [ui] + > interface = blah + > EOF + $ chunkselectorinterface + invalid value for ui.interface: blah (using text) + text + + $ cp $HGRCPATH.pretest $HGRCPATH + $ cat <<EOF >> $HGRCPATH + > [ui] + > interface = curses + > interface.chunkselector = blah + > EOF + $ chunkselectorinterface + invalid value for ui.interface.chunkselector: blah (using curses) + curses + + $ cp $HGRCPATH.pretest $HGRCPATH + $ cat <<EOF >> $HGRCPATH + > [ui] + > interface = blah + > interface.chunkselector = curses + > EOF + $ chunkselectorinterface + invalid value for ui.interface: blah + curses + + $ cp $HGRCPATH.pretest $HGRCPATH + $ cat <<EOF >> $HGRCPATH + > [ui] + > interface = blah + > interface.chunkselector = blah + > EOF + $ chunkselectorinterface + invalid value for ui.interface: blah + invalid value for ui.interface.chunkselector: blah (using text) + text
--- a/tests/test-commit-interactive.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-commit-interactive.t Sat Apr 16 18:06:48 2016 -0500 @@ -59,6 +59,14 @@ +Abort for untracked + + $ touch untracked + $ hg commit -i -m should-fail empty-rw untracked + abort: untracked: file not tracked! + [255] + $ rm untracked + Record empty file $ hg commit -i -d '0 0' -m empty empty-rw<<EOF @@ -283,7 +291,7 @@ $ unset HGUSER $ hg commit -i --config ui.username= -d '8 0' -m end plain abort: no username supplied - (use "hg config --edit" to set your username) + (use 'hg config --edit' to set your username) [255]
--- a/tests/test-commit.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-commit.t Sat Apr 16 18:06:48 2016 -0500 @@ -27,8 +27,21 @@ $ hg commit -d '111111111111 0' -m commit-7 abort: date exceeds 32 bits: 111111111111 [255] - $ hg commit -d '-7654321 3600' -m commit-7 - abort: negative date value: -7654321 + $ hg commit -d '-111111111111 0' -m commit-7 + abort: date exceeds 32 bits: -111111111111 + [255] + $ echo foo >> foo + $ hg commit -d '1901-12-13 20:45:52 +0000' -m commit-7-2 + $ echo foo >> foo + $ hg commit -d '-2147483648 0' -m commit-7-3 + $ hg log -T '{rev} {date|isodatesec}\n' -l2 + 3 1901-12-13 20:45:52 +0000 + 2 1901-12-13 20:45:52 +0000 + $ hg commit -d '1901-12-13 20:45:51 +0000' -m commit-7 + abort: date exceeds 32 bits: -2147483649 + [255] + $ hg commit -d '-2147483649 0' -m commit-7 + abort: date exceeds 32 bits: -2147483649 [255] commit added file that has been deleted @@ -54,7 +67,7 @@ dir/file committing manifest committing changelog - committed changeset 2:d2a76177cb42 + committed changeset 4:1957363f1ced $ echo > dir.file $ hg add @@ -78,7 +91,7 @@ dir/file committing manifest committing changelog - committed changeset 3:1cd62a2d8db5 + committed changeset 5:a31d8f87544a $ cd .. $ hg commit -m commit-14 does-not-exist @@ -102,7 +115,7 @@ dir/file committing manifest committing changelog - committed changeset 4:49176991390e + committed changeset 6:32d054c9d085 An empty date was interpreted as epoch origin
--- a/tests/test-committer.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-committer.t Sat Apr 16 18:06:48 2016 -0500 @@ -50,7 +50,7 @@ $ echo "username = " >> .hg/hgrc $ hg commit -m commit-1 abort: no username supplied - (use "hg config --edit" to set your username) + (use 'hg config --edit' to set your username) [255] # test alternate config var
--- a/tests/test-completion.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-completion.t Sat Apr 16 18:06:48 2016 -0500 @@ -108,6 +108,7 @@ debugsetparents debugsub debugsuccessorssets + debugtemplate debugwalk debugwireargs @@ -158,7 +159,7 @@ --config --cwd --daemon - --daemon-pipefds + --daemon-postexec --debug --debugger --encoding @@ -218,7 +219,7 @@ pull: update, force, rev, bookmark, branch, ssh, remotecmd, insecure push: force, rev, bookmark, branch, new-branch, ssh, remotecmd, insecure remove: after, force, subrepos, include, exclude - serve: accesslog, daemon, daemon-pipefds, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate + serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, copies, print0, rev, change, include, exclude, subrepos, template summary: remote update: clean, check, date, rev, tool @@ -254,13 +255,13 @@ debugignore: debugindex: changelog, manifest, dir, format debugindexdot: changelog, manifest, dir - debuginstall: + debuginstall: template debugknown: debuglabelcomplete: debuglocks: force-lock, force-wlock debugmergestate: debugnamecomplete: - debugobsolete: flags, record-parents, rev, date, user + debugobsolete: flags, record-parents, rev, index, delete, date, user debugpathcomplete: full, normal, added, removed debugpushkey: debugpvec: @@ -272,6 +273,7 @@ debugsetparents: debugsub: rev debugsuccessorssets: + debugtemplate: rev, define debugwalk: include, exclude debugwireargs: three, four, five, ssh, remotecmd, insecure files: rev, print0, include, exclude, template, subrepos
--- a/tests/test-conflict.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-conflict.t Sat Apr 16 18:06:48 2016 -0500 @@ -46,16 +46,13 @@ $ cat a Small Mathematical Series. - <<<<<<< local: 618808747361 - test: branch2 1 2 3 + <<<<<<< local: 618808747361 - test: branch2 6 8 ======= - 1 - 2 - 3 4 5 >>>>>>> other: c0c68e4fe667 - test: branch1 @@ -79,16 +76,13 @@ $ cat a Small Mathematical Series. - <<<<<<< local: test 2 1 2 3 + <<<<<<< local: test 2 6 8 ======= - 1 - 2 - 3 4 5 >>>>>>> other: test 1 @@ -108,16 +102,13 @@ $ cat a Small Mathematical Series. - <<<<<<< local: test 2 1 2 3 + <<<<<<< local: test 2 6 8 ======= - 1 - 2 - 3 4 5 >>>>>>> other: test 1 @@ -150,16 +141,13 @@ $ cat a Small Mathematical Series. - <<<<<<< local: 123456789012345678901234567890123456789012345678901234567890\xe3\x81\x82... (esc) 1 2 3 + <<<<<<< local: 123456789012345678901234567890123456789012345678901234567890\xe3\x81\x82... (esc) 6 8 ======= - 1 - 2 - 3 4 5 >>>>>>> other: branch1 @@ -179,16 +167,13 @@ $ cat a Small Mathematical Series. - <<<<<<< local 1 2 3 + <<<<<<< local 6 8 ======= - 1 - 2 - 3 4 5 >>>>>>> other @@ -232,6 +217,7 @@ $ hg up -C 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + 1 other heads for branch "default" $ printf "\n\nEnd of file\n" >> a $ hg ci -m "Add some stuff at the end" $ hg up -r 1 @@ -269,6 +255,7 @@ $ hg up -C 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + 1 other heads for branch "default" $ hg merge --tool :merge-local merging a 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-confused-revert.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-confused-revert.t Sat Apr 16 18:06:48 2016 -0500 @@ -60,7 +60,7 @@ $ hg revert abort: uncommitted merge with no revision specified - (use "hg update" or see "hg help revert") + (use 'hg update' or see 'hg help revert') [255] Revert should be ok now:
--- a/tests/test-context.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-context.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,7 +1,13 @@ +from __future__ import absolute_import, print_function import os -from mercurial import hg, ui, context, encoding +from mercurial import ( + context, + encoding, + hg, + ui as uimod, +) -u = ui.ui() +u = uimod.ui() repo = hg.repository(u, 'test1', create=1) os.chdir('test1') @@ -18,9 +24,9 @@ if os.name == 'nt': d = repo[None]['foo'].date() - print "workingfilectx.date = (%d, %d)" % (d[0], d[1]) + print("workingfilectx.date = (%d, %d)" % (d[0], d[1])) else: - print "workingfilectx.date =", repo[None]['foo'].date() + print("workingfilectx.date =", repo[None]['foo'].date()) # test memctx with non-ASCII commit message @@ -33,7 +39,7 @@ ctx.commit() for enc in "ASCII", "Latin-1", "UTF-8": encoding.encoding = enc - print "%-8s: %s" % (enc, repo["tip"].description()) + print("%-8s: %s" % (enc, repo["tip"].description())) # test performing a status @@ -48,15 +54,15 @@ ctxb = context.memctx(repo, [ctxa.node(), None], "test diff", ["foo"], getfilectx, ctxa.user(), ctxa.date()) -print ctxb.status(ctxa) +print(ctxb.status(ctxa)) # test performing a diff on a memctx for d in ctxb.diff(ctxa, git=True): - print d + print(d) # test safeness and correctness of "ctx.status()" -print '= checking context.status():' +print('= checking context.status():') # ancestor "wcctx ~ 2" actx2 = repo['.'] @@ -82,26 +88,26 @@ from mercurial import scmutil -print '== checking workingctx.status:' +print('== checking workingctx.status:') wctx = repo[None] -print 'wctx._status=%s' % (str(wctx._status)) +print('wctx._status=%s' % (str(wctx._status))) -print '=== with "pattern match":' -print actx1.status(other=wctx, - match=scmutil.matchfiles(repo, ['bar-m', 'foo'])) -print 'wctx._status=%s' % (str(wctx._status)) -print actx2.status(other=wctx, - match=scmutil.matchfiles(repo, ['bar-m', 'foo'])) -print 'wctx._status=%s' % (str(wctx._status)) +print('=== with "pattern match":') +print(actx1.status(other=wctx, + match=scmutil.matchfiles(repo, ['bar-m', 'foo']))) +print('wctx._status=%s' % (str(wctx._status))) +print(actx2.status(other=wctx, + match=scmutil.matchfiles(repo, ['bar-m', 'foo']))) +print('wctx._status=%s' % (str(wctx._status))) -print '=== with "always match" and "listclean=True":' -print actx1.status(other=wctx, listclean=True) -print 'wctx._status=%s' % (str(wctx._status)) -print actx2.status(other=wctx, listclean=True) -print 'wctx._status=%s' % (str(wctx._status)) +print('=== with "always match" and "listclean=True":') +print(actx1.status(other=wctx, listclean=True)) +print('wctx._status=%s' % (str(wctx._status))) +print(actx2.status(other=wctx, listclean=True)) +print('wctx._status=%s' % (str(wctx._status))) -print "== checking workingcommitctx.status:" +print("== checking workingcommitctx.status:") wcctx = context.workingcommitctx(repo, scmutil.status(['bar-m'], @@ -109,34 +115,34 @@ [], [], [], [], []), text='', date='0 0') -print 'wcctx._status=%s' % (str(wcctx._status)) +print('wcctx._status=%s' % (str(wcctx._status))) -print '=== with "always match":' -print actx1.status(other=wcctx) -print 'wcctx._status=%s' % (str(wcctx._status)) -print actx2.status(other=wcctx) -print 'wcctx._status=%s' % (str(wcctx._status)) +print('=== with "always match":') +print(actx1.status(other=wcctx)) +print('wcctx._status=%s' % (str(wcctx._status))) +print(actx2.status(other=wcctx)) +print('wcctx._status=%s' % (str(wcctx._status))) -print '=== with "always match" and "listclean=True":' -print actx1.status(other=wcctx, listclean=True) -print 'wcctx._status=%s' % (str(wcctx._status)) -print actx2.status(other=wcctx, listclean=True) -print 'wcctx._status=%s' % (str(wcctx._status)) +print('=== with "always match" and "listclean=True":') +print(actx1.status(other=wcctx, listclean=True)) +print('wcctx._status=%s' % (str(wcctx._status))) +print(actx2.status(other=wcctx, listclean=True)) +print('wcctx._status=%s' % (str(wcctx._status))) -print '=== with "pattern match":' -print actx1.status(other=wcctx, - match=scmutil.matchfiles(repo, ['bar-m', 'foo'])) -print 'wcctx._status=%s' % (str(wcctx._status)) -print actx2.status(other=wcctx, - match=scmutil.matchfiles(repo, ['bar-m', 'foo'])) -print 'wcctx._status=%s' % (str(wcctx._status)) +print('=== with "pattern match":') +print(actx1.status(other=wcctx, + match=scmutil.matchfiles(repo, ['bar-m', 'foo']))) +print('wcctx._status=%s' % (str(wcctx._status))) +print(actx2.status(other=wcctx, + match=scmutil.matchfiles(repo, ['bar-m', 'foo']))) +print('wcctx._status=%s' % (str(wcctx._status))) -print '=== with "pattern match" and "listclean=True":' -print actx1.status(other=wcctx, +print('=== with "pattern match" and "listclean=True":') +print(actx1.status(other=wcctx, match=scmutil.matchfiles(repo, ['bar-r', 'foo']), - listclean=True) -print 'wcctx._status=%s' % (str(wcctx._status)) -print actx2.status(other=wcctx, + listclean=True)) +print('wcctx._status=%s' % (str(wcctx._status))) +print(actx2.status(other=wcctx, match=scmutil.matchfiles(repo, ['bar-r', 'foo']), - listclean=True) -print 'wcctx._status=%s' % (str(wcctx._status)) + listclean=True)) +print('wcctx._status=%s' % (str(wcctx._status)))
--- a/tests/test-contrib-check-code.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-contrib-check-code.t Sat Apr 16 18:06:48 2016 -0500 @@ -69,6 +69,22 @@ dict() is different in Py2 and 3 and is slower than {} [1] + $ cat > foo.c <<EOF + > void narf() { + > strcpy(foo, bar); + > // strcpy_s is okay, but this comment is not + > strcpy_s(foo, bar); + > } + > EOF + $ "$check_code" ./foo.c + ./foo.c:2: + > strcpy(foo, bar); + don't use strcpy, use strlcpy or memcpy + ./foo.c:3: + > // strcpy_s is okay, but this comment is not + don't use //-style comments + [1] + $ cat > is-op.py <<EOF > # is-operator comparing number or string literal > x = None
--- a/tests/test-contrib-check-commit.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-contrib-check-commit.t Sat Apr 16 18:06:48 2016 -0500 @@ -87,6 +87,10 @@ > @@ -599,7 +599,7 @@ > if opts.get('all'): > + > + > + + > + some = otherjunk + > + > + > + def blah_blah(x): > + pass @@ -102,10 +106,10 @@ This has no topic and ends with a period. 7: don't add trailing period on summary line This has no topic and ends with a period. - 15: adds double empty line - + - 16: adds a function with foo_bar naming - + def blah_blah(x): 19: adds double empty line + + 20: adds a function with foo_bar naming + + def blah_blah(x): + 23: adds double empty line + + [1]
--- a/tests/test-contrib.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-contrib.t Sat Apr 16 18:06:48 2016 -0500 @@ -148,11 +148,10 @@ base <<<<<<< conflict-local not other - end ======= other + >>>>>>> conflict-other end - >>>>>>> conflict-other [1] 1 label @@ -161,11 +160,10 @@ base <<<<<<< foo not other - end ======= other + >>>>>>> conflict-other end - >>>>>>> conflict-other [1] 2 labels @@ -174,11 +172,10 @@ base <<<<<<< foo not other - end ======= other + >>>>>>> bar end - >>>>>>> bar [1] 3 labels
--- a/tests/test-convert-git.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-convert-git.t Sat Apr 16 18:06:48 2016 -0500 @@ -730,6 +730,8 @@ $ hg convert git-repo4 git-repo4-broken-hg 2>&1 | grep 'abort:' abort: cannot read changes in 1c0ce3c5886f83a1d78a7b517cdff5cf9ca17bdd +#if no-windows + test for escaping the repo name (CVE-2016-3069) $ git init '`echo pwned >COMMAND-INJECTION`' @@ -746,3 +748,6 @@ updating bookmarks $ test -f COMMAND-INJECTION [1] + +#endif +
--- a/tests/test-convert-hg-sink.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-convert-hg-sink.t Sat Apr 16 18:06:48 2016 -0500 @@ -371,6 +371,7 @@ |/ o 6 0613c8e59a3d (public) "6: change a" files: a | + ~ $ cd .. $ hg convert --filemap filemap source dest --config convert.hg.revs=3:
--- a/tests/test-convert-hg-startrev.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-convert-hg-startrev.t Sat Apr 16 18:06:48 2016 -0500 @@ -221,3 +221,25 @@ |/ o 0 "0: add a b f" files: a b f +Convert in multiple steps that doesn't overlap - the link to the parent is +preserved anyway + + $ hg convert --config convert.hg.revs=::1 source multistep + initializing destination multistep repository + scanning source... + sorting... + converting... + 1 0: add a b f + 0 1: add c, move f to d + $ hg convert --config convert.hg.revs=2 source multistep + scanning source... + sorting... + converting... + 0 2: copy e from a, change b + $ glog multistep + o 2 "2: copy e from a, change b" files: b e + | + o 1 "1: add c, move f to d" files: c d f + | + o 0 "0: add a b f" files: a b f +
--- a/tests/test-convert-hg-svn.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-convert-hg-svn.t Sat Apr 16 18:06:48 2016 -0500 @@ -1,5 +1,9 @@ #require svn svn-bindings + $ filter_svn_output () { + > egrep -v 'Committing|Updating|(^$)' | sed -e 's/done$//' || true + > } + $ cat <<EOF >> $HGRCPATH > [extensions] > convert = @@ -37,7 +41,7 @@ $ echo a > a $ svn add a A a - $ svn ci -m'added a' a + $ svn ci -m'added a' a | filter_svn_output Adding a Transmitting file data . Committed revision 1.
--- a/tests/test-convert-mtn.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-convert-mtn.t Sat Apr 16 18:06:48 2016 -0500 @@ -260,6 +260,8 @@ $ cd repo.mtn-hg $ hg up -C 12 files updated, 0 files merged, 0 files removed, 0 files unresolved + no open descendant heads on branch "com.selenic.test", updating to a closed head + (committing will reopen branch "com.selenic.test") $ glog @ 14 "largefile" files: large-file |
--- a/tests/test-convert-p4-filetypes.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-convert-p4-filetypes.t Sat Apr 16 18:06:48 2016 -0500 @@ -317,7 +317,7 @@ 1 initial 0 keywords $ hg -R dst log --template 'rev={rev} desc="{desc}" tags="{tags}" files="{files}"\n' - rev=1 desc="keywords" tags="tip" files="crazy_symlink+k file_binary file_binary+k file_binary+kx file_binary+x file_ctext file_cxtext file_ktext file_kxtext file_ltext file_text file_text+c file_text+d file_text+f file_text+k file_text+ko file_text+kx file_text+l file_text+m file_text+s file_text+s2 file_text+w file_text+x file_ubinary file_uxbinary file_xbinary file_xltext file_xtext target_symlink target_symlink+k" + rev=1 desc="keywords" tags="tip" files="crazy_symlink+k file_binary file_binary+k file_binary+kx file_binary+x file_ctext file_cxtext file_ktext file_kxtext file_ltext file_tempobj file_text file_text+c file_text+d file_text+f file_text+k file_text+ko file_text+kx file_text+l file_text+m file_text+s file_text+s2 file_text+w file_text+x file_ubinary file_uxbinary file_xbinary file_xltext file_xtempobj file_xtext target_symlink target_symlink+k" rev=0 desc="initial" tags="" files="file_binary file_binary+k file_binary+kx file_binary+x file_ctext file_cxtext file_ktext file_kxtext file_ltext file_symlink file_symlink+k file_text file_text+c file_text+d file_text+f file_text+k file_text+ko file_text+kx file_text+l file_text+m file_text+s2 file_text+w file_text+x file_ubinary file_uxbinary file_xbinary file_xltext file_xtext target_symlink target_symlink+k" revision 0 @@ -406,7 +406,7 @@ revision 1 $ hg -R dst update 1 - 30 files updated, 0 files merged, 0 files removed, 0 files unresolved + 32 files updated, 0 files merged, 0 files removed, 0 files unresolved $ head dst/file_* | cat -v ==> dst/file_binary <== this is binary$Id$ @@ -525,6 +525,17 @@ $Revision$ $Header$$Header$Header$ + ==> dst/file_tempobj <== + this is tempobj + $Id$ + $Header$ + $Date$ + $DateTime$ + $Change$ + $File$ + $Revision$ + $Header$$Header$Header$ + ==> dst/file_text <== this is text $Id$ @@ -712,6 +723,17 @@ $Revision$ $Header$$Header$Header$ + ==> dst/file_xtempobj <== + this is xtempobj + $Id$ + $Header$ + $Date$ + $DateTime$ + $Change$ + $File$ + $Revision$ + $Header$$Header$Header$ + ==> dst/file_xtext <== this is xtext $Id$
--- a/tests/test-convert-svn-encoding.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-convert-svn-encoding.t Sat Apr 16 18:06:48 2016 -0500 @@ -57,55 +57,55 @@ source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/trunk@2 converting: 1/6 revisions (16.67%) reparent to file://*/svn-repo/trunk (glob) - scanning paths: /trunk/\xc3\xa0 0/3 (0.00%) (esc) - scanning paths: /trunk/\xc3\xa0/e\xcc\x81 1/3 (33.33%) (esc) - scanning paths: /trunk/\xc3\xa9 2/3 (66.67%) (esc) + scanning paths: /trunk/\xc3\xa0 0/3 paths (0.00%) (esc) + scanning paths: /trunk/\xc3\xa0/e\xcc\x81 1/3 paths (33.33%) (esc) + scanning paths: /trunk/\xc3\xa9 2/3 paths (66.67%) (esc) committing files: \xc3\xa0/e\xcc\x81 (esc) - getting files: \xc3\xa0/e\xcc\x81 1/2 (50.00%) (esc) + getting files: \xc3\xa0/e\xcc\x81 1/2 files (50.00%) (esc) \xc3\xa9 (esc) - getting files: \xc3\xa9 2/2 (100.00%) (esc) + getting files: \xc3\xa9 2/2 files (100.00%) (esc) committing manifest committing changelog 3 copy files source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/trunk@3 converting: 2/6 revisions (33.33%) - scanning paths: /trunk/\xc3\xa0 0/4 (0.00%) (esc) + scanning paths: /trunk/\xc3\xa0 0/4 paths (0.00%) (esc) gone from -1 reparent to file://*/svn-repo (glob) reparent to file://*/svn-repo/trunk (glob) - scanning paths: /trunk/\xc3\xa8 1/4 (25.00%) (esc) + scanning paths: /trunk/\xc3\xa8 1/4 paths (25.00%) (esc) copied to \xc3\xa8 from \xc3\xa9@2 (esc) - scanning paths: /trunk/\xc3\xa9 2/4 (50.00%) (esc) + scanning paths: /trunk/\xc3\xa9 2/4 paths (50.00%) (esc) gone from -1 reparent to file://*/svn-repo (glob) reparent to file://*/svn-repo/trunk (glob) - scanning paths: /trunk/\xc3\xb9 3/4 (75.00%) (esc) + scanning paths: /trunk/\xc3\xb9 3/4 paths (75.00%) (esc) mark /trunk/\xc3\xb9 came from \xc3\xa0:2 (esc) - getting files: \xc3\xa0/e\xcc\x81 1/4 (25.00%) (esc) - getting files: \xc3\xa9 2/4 (50.00%) (esc) + getting files: \xc3\xa0/e\xcc\x81 1/4 files (25.00%) (esc) + getting files: \xc3\xa9 2/4 files (50.00%) (esc) committing files: \xc3\xa8 (esc) - getting files: \xc3\xa8 3/4 (75.00%) (esc) + getting files: \xc3\xa8 3/4 files (75.00%) (esc) \xc3\xa8: copy \xc3\xa9:6b67ccefd5ce6de77e7ead4f5292843a0255329f (esc) \xc3\xb9/e\xcc\x81 (esc) - getting files: \xc3\xb9/e\xcc\x81 4/4 (100.00%) (esc) + getting files: \xc3\xb9/e\xcc\x81 4/4 files (100.00%) (esc) \xc3\xb9/e\xcc\x81: copy \xc3\xa0/e\xcc\x81:a9092a3d84a37b9993b5c73576f6de29b7ea50f6 (esc) committing manifest committing changelog 2 remove files source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/trunk@4 converting: 3/6 revisions (50.00%) - scanning paths: /trunk/\xc3\xa8 0/2 (0.00%) (esc) + scanning paths: /trunk/\xc3\xa8 0/2 paths (0.00%) (esc) gone from -1 reparent to file://*/svn-repo (glob) reparent to file://*/svn-repo/trunk (glob) - scanning paths: /trunk/\xc3\xb9 1/2 (50.00%) (esc) + scanning paths: /trunk/\xc3\xb9 1/2 paths (50.00%) (esc) gone from -1 reparent to file://*/svn-repo (glob) reparent to file://*/svn-repo/trunk (glob) - getting files: \xc3\xa8 1/2 (50.00%) (esc) - getting files: \xc3\xb9/e\xcc\x81 2/2 (100.00%) (esc) + getting files: \xc3\xa8 1/2 files (50.00%) (esc) + getting files: \xc3\xb9/e\xcc\x81 2/2 files (100.00%) (esc) committing files: committing manifest committing changelog @@ -113,13 +113,13 @@ source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/branches/branch?@5 converting: 4/6 revisions (66.67%) reparent to file://*/svn-repo/branches/branch%C3%A9 (glob) - scanning paths: /branches/branch\xc3\xa9 0/1 (0.00%) (esc) + scanning paths: /branches/branch\xc3\xa9 0/1 paths (0.00%) (esc) committing changelog 0 branch to branch?e source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/branches/branch?e@6 converting: 5/6 revisions (83.33%) reparent to file://*/svn-repo/branches/branch%C3%A9e (glob) - scanning paths: /branches/branch\xc3\xa9e 0/1 (0.00%) (esc) + scanning paths: /branches/branch\xc3\xa9e 0/1 paths (0.00%) (esc) committing changelog reparent to file://*/svn-repo (glob) reparent to file://*/svn-repo/branches/branch%C3%A9e (glob)
--- a/tests/test-convert-svn-move.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-convert-svn-move.t Sat Apr 16 18:06:48 2016 -0500 @@ -152,8 +152,6 @@ Test convert progress bar $ cat >> $HGRCPATH <<EOF - > [extensions] - > progress = > [progress] > assume-tty = 1 > delay = 0
--- a/tests/test-convert-svn-source.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-convert-svn-source.t Sat Apr 16 18:06:48 2016 -0500 @@ -1,5 +1,9 @@ #require svn svn-bindings + $ filter_svn_output () { + > egrep -v 'Committing|Updating|(^$)' | sed -e 's/done$//' || true + > } + $ cat >> $HGRCPATH <<EOF > [extensions] > convert = @@ -27,37 +31,35 @@ $ mkdir tags $ cd .. - $ svn import -m "init projB" projB "$SVNREPOURL/proj%20B" | sort - + $ svn import -m "init projB" projB "$SVNREPOURL/proj%20B" | filter_svn_output | sort Adding projB/mytrunk (glob) Adding projB/tags (glob) Committed revision 1. Update svn repository - $ svn co "$SVNREPOURL/proj%20B/mytrunk" B + $ svn co "$SVNREPOURL/proj%20B/mytrunk" B | filter_svn_output Checked out revision 1. $ cd B $ echo hello > 'letter .txt' - $ svn add 'letter .txt' + $ svn add 'letter .txt' | filter_svn_output A letter .txt - $ svn ci -m hello + $ svn ci -m hello | filter_svn_output Adding letter .txt Transmitting file data . Committed revision 2. $ svn-safe-append.py world 'letter .txt' - $ svn ci -m world + $ svn ci -m world | filter_svn_output Sending letter .txt Transmitting file data . Committed revision 3. - $ svn copy -m "tag v0.1" "$SVNREPOURL/proj%20B/mytrunk" "$SVNREPOURL/proj%20B/tags/v0.1" - + $ svn copy -m "tag v0.1" "$SVNREPOURL/proj%20B/mytrunk" "$SVNREPOURL/proj%20B/tags/v0.1" | filter_svn_output Committed revision 4. $ svn-safe-append.py 'nice day today!' 'letter .txt' - $ svn ci -m "nice day" + $ svn ci -m "nice day" | filter_svn_output Sending letter .txt Transmitting file data . Committed revision 5. @@ -88,20 +90,19 @@ $ cd B $ svn-safe-append.py "see second letter" 'letter .txt' $ echo "nice to meet you" > letter2.txt - $ svn add letter2.txt + $ svn add letter2.txt | filter_svn_output A letter2.txt - $ svn ci -m "second letter" + $ svn ci -m "second letter" | filter_svn_output Sending letter .txt Adding letter2.txt Transmitting file data .. Committed revision 6. - $ svn copy -m "tag v0.2" "$SVNREPOURL/proj%20B/mytrunk" "$SVNREPOURL/proj%20B/tags/v0.2" - + $ svn copy -m "tag v0.2" "$SVNREPOURL/proj%20B/mytrunk" "$SVNREPOURL/proj%20B/tags/v0.2" | filter_svn_output Committed revision 7. $ svn-safe-append.py "blah-blah-blah" letter2.txt - $ svn ci -m "work in progress" + $ svn ci -m "work in progress" | filter_svn_output Sending letter2.txt Transmitting file data . Committed revision 8. @@ -172,7 +173,7 @@ $ cd B $ echo >> "letter .txt" - $ svn ci -m 'nothing' + $ svn ci -m 'nothing' | filter_svn_output Sending letter .txt Transmitting file data . Committed revision 9.
--- a/tests/test-copy-move-merge.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-copy-move-merge.t Sat Apr 16 18:06:48 2016 -0500 @@ -34,6 +34,7 @@ preserving a for resolve of b preserving a for resolve of c removing a + starting 4 threads for background file closing (?) b: remote moved from a -> m (premerge) picked tool ':merge' for b (binary False symlink False changedelete False) merging a and b to b @@ -155,11 +156,11 @@ $ hg log -G -f b @ changeset: 3:76024fb4b05b - | tag: tip - | user: test - | date: Thu Jan 01 00:00:00 1970 +0000 - | summary: copy a->b (2) - | + : tag: tip + : user: test + : date: Thu Jan 01 00:00:00 1970 +0000 + : summary: copy a->b (2) + : o changeset: 0:ac82d8b1f7c4 user: test date: Thu Jan 01 00:00:00 1970 +0000
--- a/tests/test-ctxmanager.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-ctxmanager.py Sat Apr 16 18:06:48 2016 -0500 @@ -3,7 +3,7 @@ import silenttestrunner import unittest -from mercurial.util import ctxmanager +from mercurial import util class contextmanager(object): def __init__(self, name, trace): @@ -44,7 +44,7 @@ def test_basics(self): trace = [] addtrace = trace.append - with ctxmanager(ctxmgr('a', addtrace), ctxmgr('b', addtrace)) as c: + with util.ctxmanager(ctxmgr('a', addtrace), ctxmgr('b', addtrace)) as c: a, b = c.enter() c.atexit(addtrace, ('atexit', 'x')) c.atexit(addtrace, ('atexit', 'y')) @@ -56,8 +56,8 @@ trace = [] addtrace = trace.append def go(): - with ctxmanager(ctxmgr('a', addtrace), - lambda: raise_on_enter('b', addtrace)) as c: + with util.ctxmanager(ctxmgr('a', addtrace), + lambda: raise_on_enter('b', addtrace)) as c: c.enter() addtrace('unreachable') self.assertRaises(ctxerror, go) @@ -67,8 +67,8 @@ trace = [] addtrace = trace.append def go(): - with ctxmanager(ctxmgr('a', addtrace), - lambda: raise_on_exit('b', addtrace)) as c: + with util.ctxmanager(ctxmgr('a', addtrace), + lambda: raise_on_exit('b', addtrace)) as c: c.enter() addtrace('running') self.assertRaises(ctxerror, go)
--- a/tests/test-debugextensions.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-debugextensions.t Sat Apr 16 18:06:48 2016 -0500 @@ -25,21 +25,21 @@ $ hg debugextensions -v color - location: */hgext/color.pyc (glob) + location: */hgext/color.py* (glob) tested with: internal ext1 - location: */extwithoutinfos.pyc (glob) + location: */extwithoutinfos.py* (glob) histedit - location: */hgext/histedit.pyc (glob) + location: */hgext/histedit.py* (glob) tested with: internal mq - location: */hgext/mq.pyc (glob) + location: */hgext/mq.py* (glob) tested with: internal patchbomb - location: */hgext/patchbomb.pyc (glob) + location: */hgext/patchbomb.py* (glob) tested with: internal rebase - location: */hgext/rebase.pyc (glob) + location: */hgext/rebase.py* (glob) tested with: internal $ hg debugextensions -Tjson | sed 's|\\\\|/|g' @@ -47,37 +47,37 @@ { "buglink": "", "name": "color", - "source": "*/hgext/color.pyc", (glob) + "source": "*/hgext/color.py*", (glob) "testedwith": "internal" }, { "buglink": "", "name": "ext1", - "source": "*/extwithoutinfos.pyc", (glob) + "source": "*/extwithoutinfos.py*", (glob) "testedwith": "" }, { "buglink": "", "name": "histedit", - "source": "*/hgext/histedit.pyc", (glob) + "source": "*/hgext/histedit.py*", (glob) "testedwith": "internal" }, { "buglink": "", "name": "mq", - "source": "*/hgext/mq.pyc", (glob) + "source": "*/hgext/mq.py*", (glob) "testedwith": "internal" }, { "buglink": "", "name": "patchbomb", - "source": "*/hgext/patchbomb.pyc", (glob) + "source": "*/hgext/patchbomb.py*", (glob) "testedwith": "internal" }, { "buglink": "", "name": "rebase", - "source": "*/hgext/rebase.pyc", (glob) + "source": "*/hgext/rebase.py*", (glob) "testedwith": "internal" } ]
--- a/tests/test-demandimport.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-demandimport.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,3 +1,5 @@ +from __future__ import print_function + from mercurial import demandimport demandimport.enable() @@ -5,8 +7,8 @@ if os.name != 'nt': try: import distutils.msvc9compiler - print ('distutils.msvc9compiler needs to be an immediate ' - 'importerror on non-windows platforms') + print('distutils.msvc9compiler needs to be an immediate ' + 'importerror on non-windows platforms') distutils.msvc9compiler except ImportError: pass @@ -23,38 +25,38 @@ import os -print "os =", f(os) -print "os.system =", f(os.system) -print "os =", f(os) +print("os =", f(os)) +print("os.system =", f(os.system)) +print("os =", f(os)) from mercurial import util -print "util =", f(util) -print "util.system =", f(util.system) -print "util =", f(util) -print "util.system =", f(util.system) +print("util =", f(util)) +print("util.system =", f(util.system)) +print("util =", f(util)) +print("util.system =", f(util.system)) from mercurial import hgweb -print "hgweb =", f(hgweb) -print "hgweb_mod =", f(hgweb.hgweb_mod) -print "hgweb =", f(hgweb) +print("hgweb =", f(hgweb)) +print("hgweb_mod =", f(hgweb.hgweb_mod)) +print("hgweb =", f(hgweb)) import re as fred -print "fred =", f(fred) +print("fred =", f(fred)) import sys as re -print "re =", f(re) +print("re =", f(re)) -print "fred =", f(fred) -print "fred.sub =", f(fred.sub) -print "fred =", f(fred) +print("fred =", f(fred)) +print("fred.sub =", f(fred.sub)) +print("fred =", f(fred)) -print "re =", f(re) -print "re.stderr =", f(re.stderr) -print "re =", f(re) +print("re =", f(re)) +print("re.stderr =", f(re.stderr)) +print("re =", f(re)) demandimport.disable() os.environ['HGDEMANDIMPORT'] = 'disable' demandimport.enable() from mercurial import node -print "node =", f(node) +print("node =", f(node))
--- a/tests/test-devel-warnings.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-devel-warnings.t Sat Apr 16 18:06:48 2016 -0500 @@ -11,6 +11,8 @@ > @command('buggylocking', [], '') > def buggylocking(ui, repo): > tr = repo.transaction('buggy') + > # make sure we rollback the transaction as we don't want to rely on the__del__ + > tr.release() > lo = repo.lock() > wl = repo.wlock() > wl.release() @@ -63,6 +65,8 @@ $ cat << EOF >> $HGRCPATH > [extensions] > buggylocking=$TESTTMP/buggylocking.py + > mock=$TESTDIR/mockblackbox.py + > blackbox= > [devel] > all-warnings=1 > EOF @@ -70,16 +74,16 @@ $ hg init lock-checker $ cd lock-checker $ hg buggylocking - devel-warn: transaction with no lock at: $TESTTMP/buggylocking.py:11 (buggylocking) - devel-warn: "wlock" acquired after "lock" at: $TESTTMP/buggylocking.py:13 (buggylocking) + devel-warn: transaction with no lock at: $TESTTMP/buggylocking.py:* (buggylocking) (glob) + devel-warn: "wlock" acquired after "lock" at: $TESTTMP/buggylocking.py:* (buggylocking) (glob) $ cat << EOF >> $HGRCPATH > [devel] > all=0 > check-locks=1 > EOF $ hg buggylocking - devel-warn: transaction with no lock at: $TESTTMP/buggylocking.py:11 (buggylocking) - devel-warn: "wlock" acquired after "lock" at: $TESTTMP/buggylocking.py:13 (buggylocking) + devel-warn: transaction with no lock at: $TESTTMP/buggylocking.py:* (buggylocking) (glob) + devel-warn: "wlock" acquired after "lock" at: $TESTTMP/buggylocking.py:* (buggylocking) (glob) $ hg buggylocking --traceback devel-warn: transaction with no lock at: */hg:* in * (glob) @@ -112,7 +116,7 @@ $ hg add a $ hg commit -m a $ hg stripintr - saved backup bundle to $TESTTMP/lock-checker/.hg/strip-backup/cb9a9f314b8b-cc5ccb0b-backup.hg (glob) + saved backup bundle to $TESTTMP/lock-checker/.hg/strip-backup/*-backup.hg (glob) abort: programming error: cannot strip from inside a transaction (contact your extension maintainer) [255] @@ -122,7 +126,7 @@ 0 $ hg oldanddeprecated devel-warn: foorbar is deprecated, go shopping - (compatibility will be dropped after Mercurial-42.1337, update your code.) at: $TESTTMP/buggylocking.py:53 (oldanddeprecated) + (compatibility will be dropped after Mercurial-42.1337, update your code.) at: $TESTTMP/buggylocking.py:* (oldanddeprecated) (glob) $ hg oldanddeprecated --traceback devel-warn: foorbar is deprecated, go shopping @@ -138,4 +142,27 @@ */mercurial/dispatch.py:* in <lambda> (glob) */mercurial/util.py:* in check (glob) $TESTTMP/buggylocking.py:* in oldanddeprecated (glob) + $ hg blackbox -l 9 + 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> devel-warn: revset "oldstyle" use list instead of smartset, (upgrade your code) at: */mercurial/revset.py:* (mfunc) (glob) + 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> log -r oldstyle() -T {rev}\n exited 0 after * seconds (glob) + 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated + 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> devel-warn: foorbar is deprecated, go shopping + (compatibility will be dropped after Mercurial-42.1337, update your code.) at: $TESTTMP/buggylocking.py:* (oldanddeprecated) (glob) + 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated exited 0 after * seconds (glob) + 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated --traceback + 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> devel-warn: foorbar is deprecated, go shopping + (compatibility will be dropped after Mercurial-42.1337, update your code.) at: + */hg:* in <module> (glob) + */mercurial/dispatch.py:* in run (glob) + */mercurial/dispatch.py:* in dispatch (glob) + */mercurial/dispatch.py:* in _runcatch (glob) + */mercurial/dispatch.py:* in _dispatch (glob) + */mercurial/dispatch.py:* in runcommand (glob) + */mercurial/dispatch.py:* in _runcommand (glob) + */mercurial/dispatch.py:* in checkargs (glob) + */mercurial/dispatch.py:* in <lambda> (glob) + */mercurial/util.py:* in check (glob) + $TESTTMP/buggylocking.py:* in oldanddeprecated (glob) + 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated --traceback exited 0 after * seconds (glob) + 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> blackbox -l 9 $ cd ..
--- a/tests/test-dispatch.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-dispatch.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,16 +1,18 @@ +from __future__ import absolute_import, print_function import os -from mercurial import dispatch +from mercurial import ( + dispatch, +) def testdispatch(cmd): """Simple wrapper around dispatch.dispatch() Prints command and result value, but does not handle quoting. """ - print "running: %s" % (cmd,) + print("running: %s" % (cmd,)) req = dispatch.request(cmd.split()) result = dispatch.dispatch(req) - print "result: %r" % (result,) - + print("result: %r" % (result,)) testdispatch("init test1") os.chdir('test1')
--- a/tests/test-docker-packaging.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-docker-packaging.t Sat Apr 16 18:06:48 2016 -0500 @@ -24,4 +24,5 @@ mercurial-common should have .py but no .so or .pyc $ ar x mercurial-common_*.deb $ tar tf data.tar* | egrep '(localrepo|parsers)' + ./usr/lib/python2.7/dist-packages/mercurial/pure/parsers.py ./usr/lib/python2.7/dist-packages/mercurial/localrepo.py
--- a/tests/test-doctest.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-doctest.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,8 +1,12 @@ # this is hack to make sure no escape characters are inserted into the output -import os, sys + +from __future__ import absolute_import + +import doctest +import os +import sys if 'TERM' in os.environ: del os.environ['TERM'] -import doctest def testmod(name, optionflags=0, testtarget=None): __import__(name)
--- a/tests/test-double-merge.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-double-merge.t Sat Apr 16 18:06:48 2016 -0500 @@ -37,6 +37,7 @@ ancestor: e6dc8efe11cc, local: 6a0df1dad128+, remote: 484bf6903104 preserving foo for resolve of bar preserving foo for resolve of foo + starting 4 threads for background file closing (?) bar: remote copied from foo -> m (premerge) picked tool ':merge' for bar (binary False symlink False changedelete False) merging foo and bar to bar
--- a/tests/test-duplicateoptions.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-duplicateoptions.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,5 +1,10 @@ +from __future__ import absolute_import, print_function import os -from mercurial import ui, commands, extensions +from mercurial import ( + commands, + extensions, + ui as uimod, +) ignore = set(['highlight', 'win32text', 'factotum']) @@ -16,7 +21,7 @@ hgrc.close() -u = ui.ui() +u = uimod.ui() extensions.loadall(u) globalshort = set() @@ -31,6 +36,6 @@ for option in entry[1]: if (option[0] and option[0] in seenshort) or \ (option[1] and option[1] in seenlong): - print "command '" + cmd + "' has duplicate option " + str(option) + print("command '" + cmd + "' has duplicate option " + str(option)) seenshort.add(option[0]) seenlong.add(option[1])
--- a/tests/test-extdiff.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-extdiff.t Sat Apr 16 18:06:48 2016 -0500 @@ -72,7 +72,7 @@ Specifying an empty revision should abort. - $ hg extdiff --patch --rev 'ancestor()' --rev 1 + $ hg extdiff -p diff --patch --rev 'ancestor()' --rev 1 abort: empty revision on one side of range [255]
--- a/tests/test-extension.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-extension.t Sat Apr 16 18:06:48 2016 -0500 @@ -7,9 +7,11 @@ > command = cmdutil.command(cmdtable) > def uisetup(ui): > ui.write("uisetup called\\n") + > ui.flush() > def reposetup(ui, repo): > ui.write("reposetup called for %s\\n" % os.path.basename(repo.root)) > ui.write("ui %s= repo.ui\\n" % (ui == repo.ui and "=" or "!")) + > ui.flush() > @command('foo', [], 'hg foo') > def foo(ui, *args, **kwargs): > ui.write("Foo\\n") @@ -183,6 +185,7 @@ > import foo > def extsetup(ui): > ui.write('(extroot) ', foo.func(), '\n') + > ui.flush() > EOF $ cat > $TESTTMP/extroot/foo.py <<EOF @@ -1003,7 +1006,7 @@ Enabled extensions: - throw 1.2.3 + throw external 1.2.3 $ echo 'getversion = lambda: "1.twentythree"' >> throw.py $ rm -f throw.pyc throw.pyo $ hg version -v --config extensions.throw=throw.py @@ -1016,7 +1019,7 @@ Enabled extensions: - throw 1.twentythree + throw external 1.twentythree Refuse to load extensions with minimum version requirements @@ -1077,6 +1080,7 @@ > from mercurial import extensions > def reposetup(ui, repo): > ui.write('reposetup() for %s\n' % (repo.root)) + > ui.flush() > EOF $ hg init src $ echo a > src/a @@ -1199,6 +1203,48 @@ $ cd .. +Test compatibility with extension commands that don't use @command (issue5137) + + $ hg init deprecated + $ cd deprecated + + $ cat <<EOF > deprecatedcmd.py + > def deprecatedcmd(repo, ui): + > pass + > cmdtable = { + > 'deprecatedcmd': (deprecatedcmd, [], ''), + > } + > EOF + $ cat <<EOF > .hg/hgrc + > [extensions] + > deprecatedcmd = `pwd`/deprecatedcmd.py + > mq = ! + > hgext.mq = ! + > hgext/mq = ! + > [alias] + > deprecatedalias = deprecatedcmd + > EOF + + $ hg deprecatedcmd + devel-warn: missing attribute 'norepo', use @command decorator to register 'deprecatedcmd' + (compatibility will be dropped after Mercurial-3.8, update your code.) at: * (glob) + + $ hg deprecatedalias + devel-warn: missing attribute 'norepo', use @command decorator to register 'deprecatedalias' + (compatibility will be dropped after Mercurial-3.8, update your code.) at: * (glob) + + no warning unless command is executed: + + $ hg paths + + but mq iterates over command table: + + $ hg --config extensions.mq= paths + devel-warn: missing attribute 'norepo', use @command decorator to register 'deprecatedcmd' + (compatibility will be dropped after Mercurial-3.8, update your code.) at: * (glob) + + $ cd .. + Test synopsis and docstring extending $ hg init exthelp
--- a/tests/test-fetch.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-fetch.t Sat Apr 16 18:06:48 2016 -0500 @@ -64,7 +64,7 @@ a b c - $ hg --cwd a serve -a localhost -p $HGPORT -d --pid-file=hg.pid + $ hg serve --cwd a -a localhost -p $HGPORT -d --pid-file=hg.pid $ cat a/hg.pid >> "$DAEMON_PIDS" fetch over http, no auth @@ -339,7 +339,7 @@ (branches are permanent and global, did you want a bookmark?) $ hg -R n2 fetch -m merge n1 abort: working directory not at branch tip - (use "hg update" to check out branch tip) + (use 'hg update' to check out branch tip) [255] parent should be 0 (fetch did not update or merge anything)
--- a/tests/test-filecache.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-filecache.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,10 +1,19 @@ -import sys, os, subprocess +from __future__ import absolute_import, print_function +import os +import subprocess +import sys if subprocess.call(['python', '%s/hghave' % os.environ['TESTDIR'], 'cacheable']): sys.exit(80) -from mercurial import util, scmutil, extensions, hg, ui +from mercurial import ( + extensions, + hg, + scmutil, + ui as uimod, + util, +) filecache = scmutil.filecache @@ -20,7 +29,7 @@ @filecache('x', 'y') def cached(self): - print 'creating' + print('creating') return 'string from function' def invalidate(self): @@ -31,12 +40,12 @@ pass def basic(repo): - print "* neither file exists" + print("* neither file exists") # calls function repo.cached repo.invalidate() - print "* neither file still exists" + print("* neither file still exists") # uses cache repo.cached @@ -44,7 +53,7 @@ f = open('x', 'w') f.close() repo.invalidate() - print "* empty file x created" + print("* empty file x created") # should recreate the object repo.cached @@ -52,12 +61,12 @@ f.write('a') f.close() repo.invalidate() - print "* file x changed size" + print("* file x changed size") # should recreate the object repo.cached repo.invalidate() - print "* nothing changed with either file" + print("* nothing changed with either file") # stats file again, reuses object repo.cached @@ -69,14 +78,14 @@ f.close() repo.invalidate() - print "* file x changed inode" + print("* file x changed inode") repo.cached # create empty file y f = open('y', 'w') f.close() repo.invalidate() - print "* empty file y created" + print("* empty file y created") # should recreate the object repo.cached @@ -84,7 +93,7 @@ f.write('A') f.close() repo.invalidate() - print "* file y changed size" + print("* file y changed size") # should recreate the object repo.cached @@ -93,7 +102,7 @@ f.close() repo.invalidate() - print "* file y changed inode" + print("* file y changed inode") repo.cached f = scmutil.opener('.')('x', 'w', atomictemp=True) @@ -104,7 +113,7 @@ f.close() repo.invalidate() - print "* both files changed inode" + print("* both files changed inode") repo.cached def fakeuncacheable(): @@ -132,7 +141,7 @@ def test_filecache_synced(): # test old behavior that caused filecached properties to go out of sync os.system('hg init && echo a >> a && hg ci -qAm.') - repo = hg.repository(ui.ui()) + repo = hg.repository(uimod.ui()) # first rollback clears the filecache, but changelog to stays in __dict__ repo.rollback() repo.commit('.') @@ -149,36 +158,36 @@ os.remove('y') repo.cached = 'string set externally' repo.invalidate() - print "* neither file exists" - print repo.cached + print("* neither file exists") + print(repo.cached) repo.invalidate() f = open('x', 'w') f.write('a') f.close() - print "* file x created" - print repo.cached + print("* file x created") + print(repo.cached) repo.cached = 'string 2 set externally' repo.invalidate() - print "* string set externally again" - print repo.cached + print("* string set externally again") + print(repo.cached) repo.invalidate() f = open('y', 'w') f.write('b') f.close() - print "* file y created" - print repo.cached + print("* file y created") + print(repo.cached) -print 'basic:' -print +print('basic:') +print() basic(fakerepo()) -print -print 'fakeuncacheable:' -print +print() +print('fakeuncacheable:') +print() fakeuncacheable() test_filecache_synced() -print -print 'setbeforeget:' -print +print() +print('setbeforeget:') +print() setbeforeget(fakerepo())
--- a/tests/test-filelog.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-filelog.py Sat Apr 16 18:06:48 2016 -0500 @@ -2,10 +2,17 @@ """ Tests the behavior of filelog w.r.t. data starting with '\1\n' """ -from mercurial import ui, hg -from mercurial.node import nullid, hex +from __future__ import absolute_import, print_function +from mercurial import ( + hg, + ui as uimod, +) +from mercurial.node import ( + hex, + nullid, +) -myui = ui.ui() +myui = uimod.ui() repo = hg.repository(myui, path='.', create=True) fl = repo.file('foobar') @@ -30,7 +37,7 @@ lock.release() def error(text): - print 'ERROR: ' + text + print('ERROR: ' + text) textwith = '\1\nfoo' without = 'foo' @@ -52,4 +59,4 @@ if fl.size(1) != len(textwith): error('filelog.size for a renaming + data starting with \\1\\n') -print 'OK.' +print('OK.')
--- a/tests/test-fileset.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-fileset.t Sat Apr 16 18:06:48 2016 -0500 @@ -351,9 +351,10 @@ Test detection of unintentional 'matchctx.existing()' invocation $ cat > $TESTTMP/existingcaller.py <<EOF - > from mercurial import fileset + > from mercurial import registrar > - > @fileset.predicate('existingcaller()', callexisting=False) + > filesetpredicate = registrar.filesetpredicate() + > @filesetpredicate('existingcaller()', callexisting=False) > def existingcaller(mctx, x): > # this 'mctx.existing()' invocation is unintentional > return [f for f in mctx.existing()]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-gendoc-da.t Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,4 @@ +#require docutils gettext + + $ $TESTDIR/check-gendoc da + checking for parse errors
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-gendoc-de.t Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,6 @@ +#require docutils gettext + + $ $TESTDIR/check-gendoc de + checking for parse errors + Die Dateien werden dem Projektarchiv beim n\xc3\xa4chsten \xc3\x9cbernehmen (commit) hinzugef\xc3\xbcgt. Um dies vorher r\xc3\xbcckg\xc3\xa4ngig zu machen, siehe:hg:`forget`. (esc) + warning: please have a space before :hg:
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-gendoc-el.t Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,4 @@ +#require docutils gettext + + $ $TESTDIR/check-gendoc el + checking for parse errors
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-gendoc-fr.t Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,4 @@ +#require docutils gettext + + $ $TESTDIR/check-gendoc fr + checking for parse errors
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-gendoc-it.t Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,4 @@ +#require docutils gettext + + $ $TESTDIR/check-gendoc it + checking for parse errors
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-gendoc-ja.t Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,6 @@ +#require docutils gettext + + $ $TESTDIR/check-gendoc ja + checking for parse errors + \xe8\xa4\x87\xe8\xa3\xbd\xef\xbc\x8f\xe6\x94\xb9\xe5\x90\x8d\xe3\x81\xab\xe9\x96\xa2\xe3\x81\x99\xe3\x82\x8b\xe5\xb1\xa5\xe6\xad\xb4\xe4\xbf\x9d\xe5\xad\x98\xe3\x81\xae\xe3\x81\x9f\xe3\x82\x81\xe3\x81\xab git \xe5\xb7\xae\xe5\x88\x86\xe5\xbd\xa2\xe5\xbc\x8f\xe3\x82\x92\xe4\xbd\xbf\xe7\x94\xa8(-g/--git \xe6\x8c\x87\xe5\xae\x9a\xe3\x82\x84 \xe8\xa8\xad\xe5\xae\x9a\xe3\x83\x95\xe3\x82\xa1\xe3\x82\xa4\xe3\x83\xab\xe3\x81\xa7\xe3\x81\xae [diff] git=1 \xe8\xa8\x98\xe8\xbf\xb0)\xe3\x81\x99\xe3\x82\x8b\xe3\x81\xae\xe3\x81\xa7\xe3\x81\x82\xe3\x82\x8c\xe3\x81\xb0\xe3\x80\x81 add/remove/copy/rename \xe3\x81\xa8\xe3\x81\x84\xe3\x81\xa3\xe3\x81\x9f hg \xe3\x81\xae\xe3\x82\xb3\xe3\x83\x9e\xe3\x83\xb3\xe3\x83\x89\xe3\x81\xab\xe3\x82\x88\xe3\x82\x8b\xe5\xb1\xa5\xe6\xad\xb4\xe8\xa8\x98\xe9\x8c\xb2\xe3\x82\x82\xe3\x80\x81 \xe9\x80\x9a\xe5\xb8\xb8\xe3\x81\xa8\xe5\xa4\x89\xe3\x82\x8f\xe3\x82\x8a\xe3\x81\xaa\xe3\x81\x8f\xe6\xa9\x9f\xe8\x83\xbd\xe3\x81\x97\xe3\x81\xbe\xe3\x81\x99\xe3\x80\x82 git \xe5\xb7\xae\xe5\x88\x86\xe5\xbd\xa2\xe5\xbc\x8f\xe3\x81\xae\xe8\xa9\xb3\xe7\xb4\xb0\xe3\x81\xab\xe9\x96\xa2\xe3\x81\x97\xe3\x81\xa6\xe3\x81\xaf\xe3\x80\x81 'help diffs' \xe3\x82\x92\xe5\x8f\x82\xe7\x85\xa7\xe3\x81\x97\xe3\x81\xa6\xe3\x81\x8f\xe3\x81\xa0\xe3\x81\x95\xe3\x81\x84\xe3\x80\x82 (esc) + warning: please use " instead of ' for hg ... "..."
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-gendoc-pt_BR.t Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,4 @@ +#require docutils gettext + + $ $TESTDIR/check-gendoc pt_BR + checking for parse errors
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-gendoc-ro.t Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,4 @@ +#require docutils gettext + + $ $TESTDIR/check-gendoc ro + checking for parse errors
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-gendoc-ru.t Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,4 @@ +#require docutils gettext + + $ $TESTDIR/check-gendoc ru + checking for parse errors
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-gendoc-sv.t Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,4 @@ +#require docutils gettext + + $ $TESTDIR/check-gendoc sv + checking for parse errors
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-gendoc-zh_CN.t Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,4 @@ +#require docutils gettext + + $ $TESTDIR/check-gendoc zh_CN + checking for parse errors
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-gendoc-zh_TW.t Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,4 @@ +#require docutils gettext + + $ $TESTDIR/check-gendoc zh_TW + checking for parse errors
--- a/tests/test-gendoc.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-gendoc.t Sat Apr 16 18:06:48 2016 -0500 @@ -7,60 +7,26 @@ $ export HGENCODING $ { echo C; ls "$TESTDIR/../i18n"/*.po | sort; } | while read PO; do > LOCALE=`basename "$PO" .po` - > echo > echo "% extracting documentation from $LOCALE" - > echo ".. -*- coding: utf-8 -*-" > gendoc-$LOCALE.txt - > echo "" >> gendoc-$LOCALE.txt > LANGUAGE=$LOCALE python "$TESTDIR/../doc/gendoc.py" >> gendoc-$LOCALE.txt 2> /dev/null || exit > > if [ $LOCALE != C ]; then - > cmp -s gendoc-C.txt gendoc-$LOCALE.txt && echo '** NOTHING TRANSLATED **' + > if [ ! -f $TESTDIR/test-gendoc-$LOCALE.t ]; then + > echo missing test-gendoc-$LOCALE.t + > fi + > cmp -s gendoc-C.txt gendoc-$LOCALE.txt && echo "** NOTHING TRANSLATED ($LOCALE) **" > fi - > - > echo "checking for parse errors" - > python "$TESTDIR/../doc/docchecker" gendoc-$LOCALE.txt - > # We call runrst without adding "--halt warning" to make it report - > # all errors instead of stopping on the first one. - > python "$TESTDIR/../doc/runrst" html gendoc-$LOCALE.txt /dev/null - > done - + > done; true % extracting documentation from C - checking for parse errors - % extracting documentation from da - checking for parse errors - % extracting documentation from de - checking for parse errors - Die Dateien werden dem Projektarchiv beim n\xc3\xa4chsten \xc3\x9cbernehmen (commit) hinzugef\xc3\xbcgt. Um dies vorher r\xc3\xbcckg\xc3\xa4ngig zu machen, siehe:hg:`forget`. (esc) - warning: please have a space before :hg: - % extracting documentation from el - checking for parse errors - % extracting documentation from fr - checking for parse errors - % extracting documentation from it - checking for parse errors - % extracting documentation from ja - checking for parse errors - % extracting documentation from pt_BR - checking for parse errors - % extracting documentation from ro - checking for parse errors - % extracting documentation from ru - checking for parse errors - % extracting documentation from sv - checking for parse errors - % extracting documentation from zh_CN - checking for parse errors - % extracting documentation from zh_TW - checking for parse errors
--- a/tests/test-glog-topological.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-glog-topological.t Sat Apr 16 18:06:48 2016 -0500 @@ -66,11 +66,11 @@ o 8 | o 3 - | + : o 1 | | o 7 - | | + | : | o 5 | | | o 4
--- a/tests/test-glog.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-glog.t Sat Apr 16 18:06:48 2016 -0500 @@ -940,193 +940,193 @@ $ hg log -G -r 'file("a")' -m o changeset: 32:d06dffa21a31 |\ parent: 27:886ed638191b - | | parent: 31:621d83e11f67 - | | user: test - | | date: Thu Jan 01 00:00:32 1970 +0000 - | | summary: (32) expand + | : parent: 31:621d83e11f67 + | : user: test + | : date: Thu Jan 01 00:00:32 1970 +0000 + | : summary: (32) expand + | : + o : changeset: 31:621d83e11f67 + |\: parent: 21:d42a756af44d + | : parent: 30:6e11cd4b648f + | : user: test + | : date: Thu Jan 01 00:00:31 1970 +0000 + | : summary: (31) expand + | : + o : changeset: 30:6e11cd4b648f + |\ \ parent: 28:44ecd0b9ae99 + | ~ : parent: 29:cd9bb2be7593 + | : user: test + | : date: Thu Jan 01 00:00:30 1970 +0000 + | : summary: (30) expand + | / + o : changeset: 28:44ecd0b9ae99 + |\ \ parent: 1:6db2ef61d156 + | ~ : parent: 26:7f25b6c2f0b9 + | : user: test + | : date: Thu Jan 01 00:00:28 1970 +0000 + | : summary: (28) merge zero known + | / + o : changeset: 26:7f25b6c2f0b9 + |\ \ parent: 18:1aa84d96232a + | | : parent: 25:91da8ed57247 + | | : user: test + | | : date: Thu Jan 01 00:00:26 1970 +0000 + | | : summary: (26) merge one known; far right + | | : + | o : changeset: 25:91da8ed57247 + | |\: parent: 21:d42a756af44d + | | : parent: 24:a9c19a3d96b7 + | | : user: test + | | : date: Thu Jan 01 00:00:25 1970 +0000 + | | : summary: (25) merge one known; far left + | | : + | o : changeset: 24:a9c19a3d96b7 + | |\ \ parent: 0:e6eb3150255d + | | ~ : parent: 23:a01cddf0766d + | | : user: test + | | : date: Thu Jan 01 00:00:24 1970 +0000 + | | : summary: (24) merge one known; immediate right + | | / + | o : changeset: 23:a01cddf0766d + | |\ \ parent: 1:6db2ef61d156 + | | ~ : parent: 22:e0d9cccacb5d + | | : user: test + | | : date: Thu Jan 01 00:00:23 1970 +0000 + | | : summary: (23) merge one known; immediate left + | | / + | o : changeset: 22:e0d9cccacb5d + |/:/ parent: 18:1aa84d96232a + | : parent: 21:d42a756af44d + | : user: test + | : date: Thu Jan 01 00:00:22 1970 +0000 + | : summary: (22) merge two known; one far left, one far right + | : + | o changeset: 21:d42a756af44d + | |\ parent: 19:31ddc2c1573b + | | | parent: 20:d30ed6450e32 + | | | user: test + | | | date: Thu Jan 01 00:00:21 1970 +0000 + | | | summary: (21) expand + | | | + +---o changeset: 20:d30ed6450e32 + | | | parent: 0:e6eb3150255d + | | ~ parent: 18:1aa84d96232a + | | user: test + | | date: Thu Jan 01 00:00:20 1970 +0000 + | | summary: (20) merge two known; two far right | | - o | changeset: 31:621d83e11f67 - |\| parent: 21:d42a756af44d - | | parent: 30:6e11cd4b648f - | | user: test - | | date: Thu Jan 01 00:00:31 1970 +0000 - | | summary: (31) expand - | | - o | changeset: 30:6e11cd4b648f - |\ \ parent: 28:44ecd0b9ae99 - | | | parent: 29:cd9bb2be7593 + | o changeset: 19:31ddc2c1573b + | |\ parent: 15:1dda3f72782d + | | | parent: 17:44765d7c06e0 | | | user: test - | | | date: Thu Jan 01 00:00:30 1970 +0000 - | | | summary: (30) expand + | | | date: Thu Jan 01 00:00:19 1970 +0000 + | | | summary: (19) expand | | | - o | | changeset: 28:44ecd0b9ae99 - |\ \ \ parent: 1:6db2ef61d156 - | | | | parent: 26:7f25b6c2f0b9 - | | | | user: test - | | | | date: Thu Jan 01 00:00:28 1970 +0000 - | | | | summary: (28) merge zero known - | | | | - o | | | changeset: 26:7f25b6c2f0b9 - |\ \ \ \ parent: 18:1aa84d96232a - | | | | | parent: 25:91da8ed57247 - | | | | | user: test - | | | | | date: Thu Jan 01 00:00:26 1970 +0000 - | | | | | summary: (26) merge one known; far right - | | | | | - | o-----+ changeset: 25:91da8ed57247 - | | | | | parent: 21:d42a756af44d - | | | | | parent: 24:a9c19a3d96b7 - | | | | | user: test - | | | | | date: Thu Jan 01 00:00:25 1970 +0000 - | | | | | summary: (25) merge one known; far left - | | | | | - | o | | | changeset: 24:a9c19a3d96b7 - | |\ \ \ \ parent: 0:e6eb3150255d - | | | | | | parent: 23:a01cddf0766d - | | | | | | user: test - | | | | | | date: Thu Jan 01 00:00:24 1970 +0000 - | | | | | | summary: (24) merge one known; immediate right - | | | | | | - | o---+ | | changeset: 23:a01cddf0766d - | | | | | | parent: 1:6db2ef61d156 - | | | | | | parent: 22:e0d9cccacb5d - | | | | | | user: test - | | | | | | date: Thu Jan 01 00:00:23 1970 +0000 - | | | | | | summary: (23) merge one known; immediate left - | | | | | | - | o-------+ changeset: 22:e0d9cccacb5d - | | | | | | parent: 18:1aa84d96232a - |/ / / / / parent: 21:d42a756af44d - | | | | | user: test - | | | | | date: Thu Jan 01 00:00:22 1970 +0000 - | | | | | summary: (22) merge two known; one far left, one far right - | | | | | - | | | | o changeset: 21:d42a756af44d - | | | | |\ parent: 19:31ddc2c1573b - | | | | | | parent: 20:d30ed6450e32 - | | | | | | user: test - | | | | | | date: Thu Jan 01 00:00:21 1970 +0000 - | | | | | | summary: (21) expand - | | | | | | - +-+-------o changeset: 20:d30ed6450e32 - | | | | | parent: 0:e6eb3150255d - | | | | | parent: 18:1aa84d96232a - | | | | | user: test - | | | | | date: Thu Jan 01 00:00:20 1970 +0000 - | | | | | summary: (20) merge two known; two far right - | | | | | - | | | | o changeset: 19:31ddc2c1573b - | | | | |\ parent: 15:1dda3f72782d - | | | | | | parent: 17:44765d7c06e0 - | | | | | | user: test - | | | | | | date: Thu Jan 01 00:00:19 1970 +0000 - | | | | | | summary: (19) expand - | | | | | | - o---+---+ | changeset: 18:1aa84d96232a - | | | | | parent: 1:6db2ef61d156 - / / / / / parent: 15:1dda3f72782d - | | | | | user: test - | | | | | date: Thu Jan 01 00:00:18 1970 +0000 - | | | | | summary: (18) merge two known; two far left - | | | | | - | | | | o changeset: 17:44765d7c06e0 - | | | | |\ parent: 12:86b91144a6e9 - | | | | | | parent: 16:3677d192927d - | | | | | | user: test - | | | | | | date: Thu Jan 01 00:00:17 1970 +0000 - | | | | | | summary: (17) expand - | | | | | | - +-+-------o changeset: 16:3677d192927d - | | | | | parent: 0:e6eb3150255d - | | | | | parent: 1:6db2ef61d156 - | | | | | user: test - | | | | | date: Thu Jan 01 00:00:16 1970 +0000 - | | | | | summary: (16) merge two known; one immediate right, one near right - | | | | | - | | | o | changeset: 15:1dda3f72782d - | | | |\ \ parent: 13:22d8966a97e3 - | | | | | | parent: 14:8eac370358ef - | | | | | | user: test - | | | | | | date: Thu Jan 01 00:00:15 1970 +0000 - | | | | | | summary: (15) expand - | | | | | | - +-------o | changeset: 14:8eac370358ef - | | | | |/ parent: 0:e6eb3150255d - | | | | | parent: 12:86b91144a6e9 - | | | | | user: test - | | | | | date: Thu Jan 01 00:00:14 1970 +0000 - | | | | | summary: (14) merge two known; one immediate right, one far right - | | | | | - | | | o | changeset: 13:22d8966a97e3 - | | | |\ \ parent: 9:7010c0af0a35 - | | | | | | parent: 11:832d76e6bdf2 - | | | | | | user: test - | | | | | | date: Thu Jan 01 00:00:13 1970 +0000 - | | | | | | summary: (13) expand - | | | | | | - | +---+---o changeset: 12:86b91144a6e9 - | | | | | parent: 1:6db2ef61d156 - | | | | | parent: 9:7010c0af0a35 - | | | | | user: test - | | | | | date: Thu Jan 01 00:00:12 1970 +0000 - | | | | | summary: (12) merge two known; one immediate right, one far left - | | | | | - | | | | o changeset: 11:832d76e6bdf2 - | | | | |\ parent: 6:b105a072e251 - | | | | | | parent: 10:74c64d036d72 - | | | | | | user: test - | | | | | | date: Thu Jan 01 00:00:11 1970 +0000 - | | | | | | summary: (11) expand - | | | | | | - +---------o changeset: 10:74c64d036d72 - | | | | |/ parent: 0:e6eb3150255d - | | | | | parent: 6:b105a072e251 - | | | | | user: test - | | | | | date: Thu Jan 01 00:00:10 1970 +0000 - | | | | | summary: (10) merge two known; one immediate left, one near right - | | | | | - | | | o | changeset: 9:7010c0af0a35 - | | | |\ \ parent: 7:b632bb1b1224 - | | | | | | parent: 8:7a0b11f71937 - | | | | | | user: test - | | | | | | date: Thu Jan 01 00:00:09 1970 +0000 - | | | | | | summary: (9) expand - | | | | | | - +-------o | changeset: 8:7a0b11f71937 - | | | |/ / parent: 0:e6eb3150255d - | | | | | parent: 7:b632bb1b1224 - | | | | | user: test - | | | | | date: Thu Jan 01 00:00:08 1970 +0000 - | | | | | summary: (8) merge two known; one immediate left, one far right - | | | | | - | | | o | changeset: 7:b632bb1b1224 - | | | |\ \ parent: 2:3d9a33b8d1e1 - | | | | | | parent: 5:4409d547b708 - | | | | | | user: test - | | | | | | date: Thu Jan 01 00:00:07 1970 +0000 - | | | | | | summary: (7) expand - | | | | | | - | | | +---o changeset: 6:b105a072e251 - | | | | |/ parent: 2:3d9a33b8d1e1 - | | | | | parent: 5:4409d547b708 - | | | | | user: test - | | | | | date: Thu Jan 01 00:00:06 1970 +0000 - | | | | | summary: (6) merge two known; one immediate left, one far left - | | | | | - | | | o | changeset: 5:4409d547b708 - | | | |\ \ parent: 3:27eef8ed80b4 - | | | | | | parent: 4:26a8bac39d9f - | | | | | | user: test - | | | | | | date: Thu Jan 01 00:00:05 1970 +0000 - | | | | | | summary: (5) expand - | | | | | | - | +---o | | changeset: 4:26a8bac39d9f - | | | |/ / parent: 1:6db2ef61d156 - | | | | | parent: 3:27eef8ed80b4 - | | | | | user: test - | | | | | date: Thu Jan 01 00:00:04 1970 +0000 - | | | | | summary: (4) merge two known; one immediate left, one immediate right - | | | | | + o | | changeset: 18:1aa84d96232a + |\| | parent: 1:6db2ef61d156 + ~ | | parent: 15:1dda3f72782d + | | user: test + | | date: Thu Jan 01 00:00:18 1970 +0000 + | | summary: (18) merge two known; two far left + / / + | o changeset: 17:44765d7c06e0 + | |\ parent: 12:86b91144a6e9 + | | | parent: 16:3677d192927d + | | | user: test + | | | date: Thu Jan 01 00:00:17 1970 +0000 + | | | summary: (17) expand + | | | + | | o changeset: 16:3677d192927d + | | |\ parent: 0:e6eb3150255d + | | ~ ~ parent: 1:6db2ef61d156 + | | user: test + | | date: Thu Jan 01 00:00:16 1970 +0000 + | | summary: (16) merge two known; one immediate right, one near right + | | + o | changeset: 15:1dda3f72782d + |\ \ parent: 13:22d8966a97e3 + | | | parent: 14:8eac370358ef + | | | user: test + | | | date: Thu Jan 01 00:00:15 1970 +0000 + | | | summary: (15) expand + | | | + | o | changeset: 14:8eac370358ef + | |\| parent: 0:e6eb3150255d + | ~ | parent: 12:86b91144a6e9 + | | user: test + | | date: Thu Jan 01 00:00:14 1970 +0000 + | | summary: (14) merge two known; one immediate right, one far right + | / + o | changeset: 13:22d8966a97e3 + |\ \ parent: 9:7010c0af0a35 + | | | parent: 11:832d76e6bdf2 + | | | user: test + | | | date: Thu Jan 01 00:00:13 1970 +0000 + | | | summary: (13) expand + | | | + +---o changeset: 12:86b91144a6e9 + | | | parent: 1:6db2ef61d156 + | | ~ parent: 9:7010c0af0a35 + | | user: test + | | date: Thu Jan 01 00:00:12 1970 +0000 + | | summary: (12) merge two known; one immediate right, one far left + | | + | o changeset: 11:832d76e6bdf2 + | |\ parent: 6:b105a072e251 + | | | parent: 10:74c64d036d72 + | | | user: test + | | | date: Thu Jan 01 00:00:11 1970 +0000 + | | | summary: (11) expand + | | | + | | o changeset: 10:74c64d036d72 + | |/| parent: 0:e6eb3150255d + | | ~ parent: 6:b105a072e251 + | | user: test + | | date: Thu Jan 01 00:00:10 1970 +0000 + | | summary: (10) merge two known; one immediate left, one near right + | | + o | changeset: 9:7010c0af0a35 + |\ \ parent: 7:b632bb1b1224 + | | | parent: 8:7a0b11f71937 + | | | user: test + | | | date: Thu Jan 01 00:00:09 1970 +0000 + | | | summary: (9) expand + | | | + | o | changeset: 8:7a0b11f71937 + |/| | parent: 0:e6eb3150255d + | ~ | parent: 7:b632bb1b1224 + | | user: test + | | date: Thu Jan 01 00:00:08 1970 +0000 + | | summary: (8) merge two known; one immediate left, one far right + | / + o | changeset: 7:b632bb1b1224 + |\ \ parent: 2:3d9a33b8d1e1 + | ~ | parent: 5:4409d547b708 + | | user: test + | | date: Thu Jan 01 00:00:07 1970 +0000 + | | summary: (7) expand + | / + | o changeset: 6:b105a072e251 + |/| parent: 2:3d9a33b8d1e1 + | ~ parent: 5:4409d547b708 + | user: test + | date: Thu Jan 01 00:00:06 1970 +0000 + | summary: (6) merge two known; one immediate left, one far left + | + o changeset: 5:4409d547b708 + |\ parent: 3:27eef8ed80b4 + | ~ parent: 4:26a8bac39d9f + | user: test + | date: Thu Jan 01 00:00:05 1970 +0000 + | summary: (5) expand + | + o changeset: 4:26a8bac39d9f + |\ parent: 1:6db2ef61d156 + ~ ~ parent: 3:27eef8ed80b4 + user: test + date: Thu Jan 01 00:00:04 1970 +0000 + summary: (4) merge two known; one immediate left, one immediate right + Empty revision range - display nothing: @@ -1140,19 +1140,19 @@ $ hg log -G -l1 repo @ changeset: 34:fea3ac5810e0 | tag: tip - | parent: 32:d06dffa21a31 - | user: test - | date: Thu Jan 01 00:00:34 1970 +0000 - | summary: (34) head - | + ~ parent: 32:d06dffa21a31 + user: test + date: Thu Jan 01 00:00:34 1970 +0000 + summary: (34) head + $ hg log -G -l1 repo/a @ changeset: 34:fea3ac5810e0 | tag: tip - | parent: 32:d06dffa21a31 - | user: test - | date: Thu Jan 01 00:00:34 1970 +0000 - | summary: (34) head - | + ~ parent: 32:d06dffa21a31 + user: test + date: Thu Jan 01 00:00:34 1970 +0000 + summary: (34) head + $ hg log -G -l1 repo/missing #endif @@ -1177,9 +1177,9 @@ | o changeset: 1:5ac72c0599bf | user: test - | date: Thu Jan 01 00:00:00 1970 +0000 - | summary: two - | + ~ date: Thu Jan 01 00:00:00 1970 +0000 + summary: two + Issue1896: File log with explicit style $ hg log -G --style=default one @@ -1278,16 +1278,16 @@ $ hg log -G -l2 a o changeset: 34:fea3ac5810e0 | parent: 32:d06dffa21a31 - | user: test - | date: Thu Jan 01 00:00:34 1970 +0000 - | summary: (34) head - | - | o changeset: 33:68608f5145f9 - | | parent: 18:1aa84d96232a - | | user: test - | | date: Thu Jan 01 00:00:33 1970 +0000 - | | summary: (33) head - | | + ~ user: test + date: Thu Jan 01 00:00:34 1970 +0000 + summary: (34) head + + o changeset: 33:68608f5145f9 + | parent: 18:1aa84d96232a + ~ user: test + date: Thu Jan 01 00:00:33 1970 +0000 + summary: (33) head + File + limit + -ra:b, (b - a) < limit: $ hg log -G -l3000 -r32:tip a @@ -1299,66 +1299,66 @@ | | o changeset: 33:68608f5145f9 | | parent: 18:1aa84d96232a - | | user: test - | | date: Thu Jan 01 00:00:33 1970 +0000 - | | summary: (33) head - | | - o | changeset: 32:d06dffa21a31 - |\ \ parent: 27:886ed638191b - | | | parent: 31:621d83e11f67 - | | | user: test - | | | date: Thu Jan 01 00:00:32 1970 +0000 - | | | summary: (32) expand - | | | + | ~ user: test + | date: Thu Jan 01 00:00:33 1970 +0000 + | summary: (33) head + | + o changeset: 32:d06dffa21a31 + |\ parent: 27:886ed638191b + ~ ~ parent: 31:621d83e11f67 + user: test + date: Thu Jan 01 00:00:32 1970 +0000 + summary: (32) expand + Point out a common and an uncommon unshown parent $ hg log -G -r 'rev(8) or rev(9)' o changeset: 9:7010c0af0a35 |\ parent: 7:b632bb1b1224 - | | parent: 8:7a0b11f71937 - | | user: test - | | date: Thu Jan 01 00:00:09 1970 +0000 - | | summary: (9) expand - | | - o | changeset: 8:7a0b11f71937 - |\| parent: 0:e6eb3150255d - | | parent: 7:b632bb1b1224 - | | user: test - | | date: Thu Jan 01 00:00:08 1970 +0000 - | | summary: (8) merge two known; one immediate left, one far right - | | + | ~ parent: 8:7a0b11f71937 + | user: test + | date: Thu Jan 01 00:00:09 1970 +0000 + | summary: (9) expand + | + o changeset: 8:7a0b11f71937 + |\ parent: 0:e6eb3150255d + ~ ~ parent: 7:b632bb1b1224 + user: test + date: Thu Jan 01 00:00:08 1970 +0000 + summary: (8) merge two known; one immediate left, one far right + File + limit + -ra:b, b < tip: $ hg log -G -l1 -r32:34 a o changeset: 34:fea3ac5810e0 | parent: 32:d06dffa21a31 - | user: test - | date: Thu Jan 01 00:00:34 1970 +0000 - | summary: (34) head - | + ~ user: test + date: Thu Jan 01 00:00:34 1970 +0000 + summary: (34) head + file(File) + limit + -ra:b, b < tip: $ hg log -G -l1 -r32:34 -r 'file("a")' o changeset: 34:fea3ac5810e0 | parent: 32:d06dffa21a31 - | user: test - | date: Thu Jan 01 00:00:34 1970 +0000 - | summary: (34) head - | + ~ user: test + date: Thu Jan 01 00:00:34 1970 +0000 + summary: (34) head + limit(file(File) and a::b), b < tip: $ hg log -G -r 'limit(file("a") and 32::34, 1)' o changeset: 32:d06dffa21a31 |\ parent: 27:886ed638191b - | | parent: 31:621d83e11f67 - | | user: test - | | date: Thu Jan 01 00:00:32 1970 +0000 - | | summary: (32) expand - | | + ~ ~ parent: 31:621d83e11f67 + user: test + date: Thu Jan 01 00:00:32 1970 +0000 + summary: (32) expand + File + limit + -ra:b, b < tip: @@ -1369,16 +1369,16 @@ $ hg log -G -l10 -r33:34 a o changeset: 34:fea3ac5810e0 | parent: 32:d06dffa21a31 - | user: test - | date: Thu Jan 01 00:00:34 1970 +0000 - | summary: (34) head - | - | o changeset: 33:68608f5145f9 - | | parent: 18:1aa84d96232a - | | user: test - | | date: Thu Jan 01 00:00:33 1970 +0000 - | | summary: (33) head - | | + ~ user: test + date: Thu Jan 01 00:00:34 1970 +0000 + summary: (34) head + + o changeset: 33:68608f5145f9 + | parent: 18:1aa84d96232a + ~ user: test + date: Thu Jan 01 00:00:33 1970 +0000 + summary: (33) head + Do not crash or produce strange graphs if history is buggy @@ -1409,17 +1409,17 @@ | | o changeset: 33:68608f5145f9 | | parent: 18:1aa84d96232a - | | user: test - | | date: Thu Jan 01 00:00:33 1970 +0000 - | | summary: (33) head - | | - o | changeset: 32:d06dffa21a31 - |\ \ parent: 27:886ed638191b - | | | parent: 31:621d83e11f67 - | | | user: test - | | | date: Thu Jan 01 00:00:32 1970 +0000 - | | | summary: (32) expand - | | | + | ~ user: test + | date: Thu Jan 01 00:00:33 1970 +0000 + | summary: (33) head + | + o changeset: 32:d06dffa21a31 + |\ parent: 27:886ed638191b + ~ ~ parent: 31:621d83e11f67 + user: test + date: Thu Jan 01 00:00:32 1970 +0000 + summary: (32) expand + Test log -G options @@ -1796,8 +1796,10 @@ $ hg log -G --follow-first e --template '{rev} {desc|firstline}\n' @ 6 merge 5 and 4 |\ - o | 5 add another e - | | + | ~ + o 5 add another e + | + ~ Test --copies @@ -1876,23 +1878,23 @@ $ hg log -G --git --patch b o changeset: 1:216d4c92cf98 | user: test - | date: Thu Jan 01 00:00:00 1970 +0000 - | summary: copy a b - | - | diff --git a/a b/b - | copy from a - | copy to b - | + ~ date: Thu Jan 01 00:00:00 1970 +0000 + summary: copy a b + + diff --git a/a b/b + copy from a + copy to b + $ hg log -G --git --stat b o changeset: 1:216d4c92cf98 | user: test - | date: Thu Jan 01 00:00:00 1970 +0000 - | summary: copy a b - | - | b | 0 - | 1 files changed, 0 insertions(+), 0 deletions(-) - | + ~ date: Thu Jan 01 00:00:00 1970 +0000 + summary: copy a b + + b | 0 + 1 files changed, 0 insertions(+), 0 deletions(-) + $ hg log -G --git --patch --follow b o changeset: 1:216d4c92cf98 @@ -1939,32 +1941,32 @@ $ hg log -G --git --patch --follow-first e @ changeset: 6:fc281d8ff18d |\ tag: tip - | | parent: 5:99b31f1c2782 - | | parent: 4:17d952250a9d - | | user: test - | | date: Thu Jan 01 00:00:00 1970 +0000 - | | summary: merge 5 and 4 - | | - | | diff --git a/e b/e - | | --- a/e - | | +++ b/e - | | @@ -1,1 +1,1 @@ - | | -ee - | | +merge - | | - o | changeset: 5:99b31f1c2782 - | | parent: 3:5918b8d165d1 - | | user: test - | | date: Thu Jan 01 00:00:00 1970 +0000 - | | summary: add another e - | | - | | diff --git a/e b/e - | | new file mode 100644 - | | --- /dev/null - | | +++ b/e - | | @@ -0,0 +1,1 @@ - | | +ee - | | + | ~ parent: 5:99b31f1c2782 + | parent: 4:17d952250a9d + | user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: merge 5 and 4 + | + | diff --git a/e b/e + | --- a/e + | +++ b/e + | @@ -1,1 +1,1 @@ + | -ee + | +merge + | + o changeset: 5:99b31f1c2782 + | parent: 3:5918b8d165d1 + ~ user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: add another e + + diff --git a/e b/e + new file mode 100644 + --- /dev/null + +++ b/e + @@ -0,0 +1,1 @@ + +ee + Test old-style --rev @@ -2385,6 +2387,7 @@ | @ 3:5918b8d165d1 | + ~ node template with changeset_printer: @@ -2393,8 +2396,10 @@ | 6 6:fc281d8ff18d |\ - 5 | 5:99b31f1c2782 - | | + | ~ + 5 5:99b31f1c2782 + | + ~ node template with changeset_templater (shared cache variable): @@ -2404,7 +2409,330 @@ | # 6 foo-bar+0 |\ - o | 5 null+5 - | | + | ~ + o 5 null+5 + | + ~ + +label() should just work in node template: + + $ hg log -Gqr 7 --config extensions.color= --color=debug \ + > --config ui.graphnodetemplate='{label("branch.{branch}", rev)}' + [branch.default|7] [log.node|7:02dbb8e276b8] + | + ~ $ cd .. + +change graph edge styling + + $ cd repo + $ cat << EOF >> $HGRCPATH + > [experimental] + > graphstyle.parent = | + > graphstyle.grandparent = : + > graphstyle.missing = + > EOF + $ hg log -G -r 'file("a")' -m + @ changeset: 36:08a19a744424 + : branch: branch + : tag: tip + : parent: 35:9159c3644c5e + : parent: 35:9159c3644c5e + : user: test + : date: Thu Jan 01 00:00:36 1970 +0000 + : summary: (36) buggy merge: identical parents + : + o changeset: 32:d06dffa21a31 + |\ parent: 27:886ed638191b + | : parent: 31:621d83e11f67 + | : user: test + | : date: Thu Jan 01 00:00:32 1970 +0000 + | : summary: (32) expand + | : + o : changeset: 31:621d83e11f67 + |\: parent: 21:d42a756af44d + | : parent: 30:6e11cd4b648f + | : user: test + | : date: Thu Jan 01 00:00:31 1970 +0000 + | : summary: (31) expand + | : + o : changeset: 30:6e11cd4b648f + |\ \ parent: 28:44ecd0b9ae99 + | ~ : parent: 29:cd9bb2be7593 + | : user: test + | : date: Thu Jan 01 00:00:30 1970 +0000 + | : summary: (30) expand + | / + o : changeset: 28:44ecd0b9ae99 + |\ \ parent: 1:6db2ef61d156 + | ~ : parent: 26:7f25b6c2f0b9 + | : user: test + | : date: Thu Jan 01 00:00:28 1970 +0000 + | : summary: (28) merge zero known + | / + o : changeset: 26:7f25b6c2f0b9 + |\ \ parent: 18:1aa84d96232a + | | : parent: 25:91da8ed57247 + | | : user: test + | | : date: Thu Jan 01 00:00:26 1970 +0000 + | | : summary: (26) merge one known; far right + | | : + | o : changeset: 25:91da8ed57247 + | |\: parent: 21:d42a756af44d + | | : parent: 24:a9c19a3d96b7 + | | : user: test + | | : date: Thu Jan 01 00:00:25 1970 +0000 + | | : summary: (25) merge one known; far left + | | : + | o : changeset: 24:a9c19a3d96b7 + | |\ \ parent: 0:e6eb3150255d + | | ~ : parent: 23:a01cddf0766d + | | : user: test + | | : date: Thu Jan 01 00:00:24 1970 +0000 + | | : summary: (24) merge one known; immediate right + | | / + | o : changeset: 23:a01cddf0766d + | |\ \ parent: 1:6db2ef61d156 + | | ~ : parent: 22:e0d9cccacb5d + | | : user: test + | | : date: Thu Jan 01 00:00:23 1970 +0000 + | | : summary: (23) merge one known; immediate left + | | / + | o : changeset: 22:e0d9cccacb5d + |/:/ parent: 18:1aa84d96232a + | : parent: 21:d42a756af44d + | : user: test + | : date: Thu Jan 01 00:00:22 1970 +0000 + | : summary: (22) merge two known; one far left, one far right + | : + | o changeset: 21:d42a756af44d + | |\ parent: 19:31ddc2c1573b + | | | parent: 20:d30ed6450e32 + | | | user: test + | | | date: Thu Jan 01 00:00:21 1970 +0000 + | | | summary: (21) expand + | | | + +---o changeset: 20:d30ed6450e32 + | | | parent: 0:e6eb3150255d + | | ~ parent: 18:1aa84d96232a + | | user: test + | | date: Thu Jan 01 00:00:20 1970 +0000 + | | summary: (20) merge two known; two far right + | | + | o changeset: 19:31ddc2c1573b + | |\ parent: 15:1dda3f72782d + | | | parent: 17:44765d7c06e0 + | | | user: test + | | | date: Thu Jan 01 00:00:19 1970 +0000 + | | | summary: (19) expand + | | | + o | | changeset: 18:1aa84d96232a + |\| | parent: 1:6db2ef61d156 + ~ | | parent: 15:1dda3f72782d + | | user: test + | | date: Thu Jan 01 00:00:18 1970 +0000 + | | summary: (18) merge two known; two far left + / / + | o changeset: 17:44765d7c06e0 + | |\ parent: 12:86b91144a6e9 + | | | parent: 16:3677d192927d + | | | user: test + | | | date: Thu Jan 01 00:00:17 1970 +0000 + | | | summary: (17) expand + | | | + | | o changeset: 16:3677d192927d + | | |\ parent: 0:e6eb3150255d + | | ~ ~ parent: 1:6db2ef61d156 + | | user: test + | | date: Thu Jan 01 00:00:16 1970 +0000 + | | summary: (16) merge two known; one immediate right, one near right + | | + o | changeset: 15:1dda3f72782d + |\ \ parent: 13:22d8966a97e3 + | | | parent: 14:8eac370358ef + | | | user: test + | | | date: Thu Jan 01 00:00:15 1970 +0000 + | | | summary: (15) expand + | | | + | o | changeset: 14:8eac370358ef + | |\| parent: 0:e6eb3150255d + | ~ | parent: 12:86b91144a6e9 + | | user: test + | | date: Thu Jan 01 00:00:14 1970 +0000 + | | summary: (14) merge two known; one immediate right, one far right + | / + o | changeset: 13:22d8966a97e3 + |\ \ parent: 9:7010c0af0a35 + | | | parent: 11:832d76e6bdf2 + | | | user: test + | | | date: Thu Jan 01 00:00:13 1970 +0000 + | | | summary: (13) expand + | | | + +---o changeset: 12:86b91144a6e9 + | | | parent: 1:6db2ef61d156 + | | ~ parent: 9:7010c0af0a35 + | | user: test + | | date: Thu Jan 01 00:00:12 1970 +0000 + | | summary: (12) merge two known; one immediate right, one far left + | | + | o changeset: 11:832d76e6bdf2 + | |\ parent: 6:b105a072e251 + | | | parent: 10:74c64d036d72 + | | | user: test + | | | date: Thu Jan 01 00:00:11 1970 +0000 + | | | summary: (11) expand + | | | + | | o changeset: 10:74c64d036d72 + | |/| parent: 0:e6eb3150255d + | | ~ parent: 6:b105a072e251 + | | user: test + | | date: Thu Jan 01 00:00:10 1970 +0000 + | | summary: (10) merge two known; one immediate left, one near right + | | + o | changeset: 9:7010c0af0a35 + |\ \ parent: 7:b632bb1b1224 + | | | parent: 8:7a0b11f71937 + | | | user: test + | | | date: Thu Jan 01 00:00:09 1970 +0000 + | | | summary: (9) expand + | | | + | o | changeset: 8:7a0b11f71937 + |/| | parent: 0:e6eb3150255d + | ~ | parent: 7:b632bb1b1224 + | | user: test + | | date: Thu Jan 01 00:00:08 1970 +0000 + | | summary: (8) merge two known; one immediate left, one far right + | / + o | changeset: 7:b632bb1b1224 + |\ \ parent: 2:3d9a33b8d1e1 + | ~ | parent: 5:4409d547b708 + | | user: test + | | date: Thu Jan 01 00:00:07 1970 +0000 + | | summary: (7) expand + | / + | o changeset: 6:b105a072e251 + |/| parent: 2:3d9a33b8d1e1 + | ~ parent: 5:4409d547b708 + | user: test + | date: Thu Jan 01 00:00:06 1970 +0000 + | summary: (6) merge two known; one immediate left, one far left + | + o changeset: 5:4409d547b708 + |\ parent: 3:27eef8ed80b4 + | ~ parent: 4:26a8bac39d9f + | user: test + | date: Thu Jan 01 00:00:05 1970 +0000 + | summary: (5) expand + | + o changeset: 4:26a8bac39d9f + |\ parent: 1:6db2ef61d156 + ~ ~ parent: 3:27eef8ed80b4 + user: test + date: Thu Jan 01 00:00:04 1970 +0000 + summary: (4) merge two known; one immediate left, one immediate right + + + $ cd .. + +Change graph shorten, test better with graphstyle.missing not none + + $ cd repo + $ cat << EOF >> $HGRCPATH + > [experimental] + > graphstyle.parent = | + > graphstyle.grandparent = : + > graphstyle.missing = ' + > graphshorten = true + > EOF + $ hg log -G -r 'file("a")' -m -T '{rev} {desc}' + @ 36 (36) buggy merge: identical parents + o 32 (32) expand + |\ + o : 31 (31) expand + |\: + o : 30 (30) expand + |\ \ + o \ \ 28 (28) merge zero known + |\ \ \ + o \ \ \ 26 (26) merge one known; far right + |\ \ \ \ + | o-----+ 25 (25) merge one known; far left + | o ' ' : 24 (24) merge one known; immediate right + | |\ \ \ \ + | o---+ ' : 23 (23) merge one known; immediate left + | o-------+ 22 (22) merge two known; one far left, one far right + |/ / / / / + | ' ' ' o 21 (21) expand + | ' ' ' |\ + +-+-------o 20 (20) merge two known; two far right + | ' ' ' o 19 (19) expand + | ' ' ' |\ + o---+---+ | 18 (18) merge two known; two far left + / / / / / + ' ' ' | o 17 (17) expand + ' ' ' | |\ + +-+-------o 16 (16) merge two known; one immediate right, one near right + ' ' ' o | 15 (15) expand + ' ' ' |\ \ + +-------o | 14 (14) merge two known; one immediate right, one far right + ' ' ' | |/ + ' ' ' o | 13 (13) expand + ' ' ' |\ \ + ' +---+---o 12 (12) merge two known; one immediate right, one far left + ' ' ' | o 11 (11) expand + ' ' ' | |\ + +---------o 10 (10) merge two known; one immediate left, one near right + ' ' ' | |/ + ' ' ' o | 9 (9) expand + ' ' ' |\ \ + +-------o | 8 (8) merge two known; one immediate left, one far right + ' ' ' |/ / + ' ' ' o | 7 (7) expand + ' ' ' |\ \ + ' ' ' +---o 6 (6) merge two known; one immediate left, one far left + ' ' ' | '/ + ' ' ' o ' 5 (5) expand + ' ' ' |\ \ + ' +---o ' ' 4 (4) merge two known; one immediate left, one immediate right + ' ' ' '/ / + +behavior with newlines + + $ hg log -G -r ::2 -T '{rev} {desc}' + o 2 (2) collapse + o 1 (1) collapse + o 0 (0) root + + $ hg log -G -r ::2 -T '{rev} {desc}\n' + o 2 (2) collapse + o 1 (1) collapse + o 0 (0) root + + $ hg log -G -r ::2 -T '{rev} {desc}\n\n' + o 2 (2) collapse + | + o 1 (1) collapse + | + o 0 (0) root + + + $ hg log -G -r ::2 -T '\n{rev} {desc}' + o + | 2 (2) collapse + o + | 1 (1) collapse + o + 0 (0) root + + $ hg log -G -r ::2 -T '{rev} {desc}\n\n\n' + o 2 (2) collapse + | + | + o 1 (1) collapse + | + | + o 0 (0) root + + + $ cd ..
--- a/tests/test-graft.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-graft.t Sat Apr 16 18:06:48 2016 -0500 @@ -1,3 +1,9 @@ + $ cat >> $HGRCPATH <<EOF + > [extdiff] + > # for portability: + > pdiff = sh "$RUNTESTDIR/pdiff" + > EOF + Create a repo with some stuff in it: $ hg init a @@ -40,6 +46,13 @@ | o test@0.public: 0 +Can't continue without starting: + + $ hg rm -q e + $ hg graft --continue + abort: no graft in progress + [255] + $ hg revert -r . -q e Need to specify a rev: @@ -154,6 +167,7 @@ branchmerge: True, force: True, partial: False ancestor: 68795b066622, local: ef0ef43d49e7+, remote: 5d205f8b35b6 preserving b for resolve of b + starting 4 threads for background file closing (?) b: local copied/moved from a -> m (premerge) picked tool ':merge' for b (binary False symlink False changedelete False) merging b and a to b @@ -189,13 +203,13 @@ e: versions differ -> m (premerge) picked tool ':merge' for e (binary False symlink False changedelete False) merging e - my e@1905859650ec+ other e@9c233e8e184d ancestor e@68795b066622 + my e@1905859650ec+ other e@9c233e8e184d ancestor e@4c60f11aa304 e: versions differ -> m (merge) picked tool ':merge' for e (binary False symlink False changedelete False) - my e@1905859650ec+ other e@9c233e8e184d ancestor e@68795b066622 + my e@1905859650ec+ other e@9c233e8e184d ancestor e@4c60f11aa304 warning: conflicts while merging e! (edit, then use 'hg resolve --mark') abort: unresolved conflicts, can't continue - (use hg resolve and hg graft --continue --log) + (use 'hg resolve' and 'hg graft --continue --log') [255] Summary should mention graft: @@ -232,7 +246,7 @@ merging e warning: conflicts while merging e! (edit, then use 'hg resolve --mark') abort: unresolved conflicts, can't continue - (use hg resolve and hg graft --continue) + (use 'hg resolve' and 'hg graft --continue') [255] Continue without resolve should fail: @@ -342,9 +356,9 @@ skipping already grafted revision 7:ef0ef43d49e7 (was grafted from 2:5c095ad7e90f) [255] - $ hg extdiff --config extensions.extdiff= --patch -r 2 -r 13 - --- */hg-5c095ad7e90f.patch * +0000 (glob) - +++ */hg-7a4785234d87.patch * +0000 (glob) + $ hg pdiff --config extensions.extdiff= --patch -r 2 -r 13 + --- */hg-5c095ad7e90f.patch * (glob) + +++ */hg-7a4785234d87.patch * (glob) @@ -1,18 +1,18 @@ # HG changeset patch -# User test @@ -373,9 +387,9 @@ ++a [1] - $ hg extdiff --config extensions.extdiff= --patch -r 2 -r 13 -X . - --- */hg-5c095ad7e90f.patch * +0000 (glob) - +++ */hg-7a4785234d87.patch * +0000 (glob) + $ hg pdiff --config extensions.extdiff= --patch -r 2 -r 13 -X . + --- */hg-5c095ad7e90f.patch * (glob) + +++ */hg-7a4785234d87.patch * (glob) @@ -1,8 +1,8 @@ # HG changeset patch -# User test @@ -427,7 +441,7 @@ $ hg graft 1 --tool internal:fail grafting 1:5d205f8b35b6 "1" abort: unresolved conflicts, can't continue - (use hg resolve and hg graft --continue) + (use 'hg resolve' and 'hg graft --continue') [255] $ hg resolve --all merging a @@ -438,7 +452,7 @@ c ======= b - >>>>>>> other: 5d205f8b35b6 - bar: 1 + >>>>>>> graft: 5d205f8b35b6 - bar: 1 $ echo b > a $ hg resolve -m a (no more unresolved files) @@ -467,7 +481,7 @@ $ hg graft 2 --tool internal:fail grafting 2:5c095ad7e90f "2" abort: unresolved conflicts, can't continue - (use hg resolve and hg graft --continue) + (use 'hg resolve' and 'hg graft --continue') [255] $ hg resolve --all merging a and b to b @@ -754,7 +768,7 @@ $ hg graft 28 --force --tool internal:fail grafting 28:50a516bb8b57 "28" abort: unresolved conflicts, can't continue - (use hg resolve and hg graft --continue) + (use 'hg resolve' and 'hg graft --continue') [255] $ hg resolve --all merging a
--- a/tests/test-help.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-help.t Sat Apr 16 18:06:48 2016 -0500 @@ -241,6 +241,7 @@ enabled extensions: + chgserver command server extension for cHg (EXPERIMENTAL) (?) children command to display child changesets (DEPRECATED) rebase command to move sets of revisions to a different ancestor @@ -271,8 +272,6 @@ patchbomb command to send changesets as (a series of) patch emails purge command to delete untracked files from the working directory - record commands to interactively select changes for - commit/qrefresh relink recreates hardlinks between repository clones schemes extend schemes with shortcuts to repository swarms share share a common history between several working directories @@ -674,10 +673,50 @@ > def nohelp(ui, *args, **kwargs): > pass > + > def uisetup(ui): + > ui.setconfig('alias', 'shellalias', '!echo hi', 'helpext') + > ui.setconfig('alias', 'hgalias', 'summary', 'helpext') + > > EOF $ echo '[extensions]' >> $HGRCPATH $ echo "helpext = `pwd`/helpext.py" >> $HGRCPATH +Test for aliases + + $ hg help hgalias + hg hgalias [--remote] + + alias for: hg summary + + summarize working directory state + + This generates a brief summary of the working directory state, including + parents, branch, commit status, phase and available updates. + + With the --remote option, this will check the default paths for incoming + and outgoing changes. This can be time-consuming. + + Returns 0 on success. + + defined by: helpext + + options: + + --remote check for push and pull + + (some details hidden, use --verbose to show complete help) + + $ hg help shellalias + hg shellalias + + shell alias for: + + echo hi + + defined by: helpext + + (some details hidden, use --verbose to show complete help) + Test command with no help text $ hg help nohelp @@ -861,6 +900,8 @@ debugsub (no help text available) debugsuccessorssets show set of successors for revision + debugtemplate + parse and apply a template debugwalk show how files match on given patterns debugwireargs (no help text available) @@ -875,6 +916,7 @@ bundles container for exchange of repository data changegroups representation of revlog data + requirements repository requirements revlogs revision storage mechanism sub-topics can be accessed @@ -1536,7 +1578,7 @@ to resolve these conflicts. ":local" - Uses the local version of files as the merged version. + Uses the local 'p1()' version of files as the merged version. ":merge" Uses the internal non-interactive simple merge algorithm for merging @@ -1546,11 +1588,11 @@ ":merge-local" Like :merge, but resolve all conflicts non-interactively in favor of the - local changes. + local 'p1()' changes. ":merge-other" Like :merge, but resolve all conflicts non-interactively in favor of the - other changes. + other 'p2()' changes. ":merge3" Uses the internal non-interactive simple merge algorithm for merging @@ -1559,11 +1601,11 @@ side of the merge and one for the base content. ":other" - Uses the other version of files as the merged version. + Uses the other 'p2()' version of files as the merged version. ":prompt" - Asks the user which of the local or the other version to keep as the - merged version. + Asks the user which of the local 'p1()' or the other 'p2()' version to + keep as the merged version. ":tagmerge" Uses the internal tag merge algorithm (experimental). @@ -2062,6 +2104,13 @@ show help for a given topic or a help overview </td></tr> <tr><td> + <a href="/help/hgalias"> + hgalias + </a> + </td><td> + summarize working directory state + </td></tr> + <tr><td> <a href="/help/identify"> identify </a> @@ -2153,6 +2202,13 @@ print the root (top) of the current working directory </td></tr> <tr><td> + <a href="/help/shellalias"> + shellalias + </a> + </td><td> + (no help text available) + </td></tr> + <tr><td> <a href="/help/tag"> tag </a> @@ -2495,7 +2551,7 @@ <td>record delete for missing files</td></tr> <tr><td>-f</td> <td>--force</td> - <td>remove (and delete) file even if added or modified</td></tr> + <td>forget added files, delete modified files</td></tr> <tr><td>-S</td> <td>--subrepos</td> <td>recurse into subrepositories</td></tr> @@ -2727,6 +2783,13 @@ representation of revlog data </td></tr> <tr><td> + <a href="/help/internals.requirements"> + requirements + </a> + </td><td> + repository requirements + </td></tr> + <tr><td> <a href="/help/internals.revlogs"> revlogs </a>
--- a/tests/test-hg-parseurl.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-hg-parseurl.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,7 +1,11 @@ -from mercurial.hg import parseurl +from __future__ import absolute_import, print_function + +from mercurial import ( + hg, +) def testparse(url, branch=[]): - print '%s, branches: %r' % parseurl(url, branch) + print('%s, branches: %r' % hg.parseurl(url, branch)) testparse('http://example.com/no/anchor') testparse('http://example.com/an/anchor#foo')
--- a/tests/test-hgignore.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-hgignore.t Sat Apr 16 18:06:48 2016 -0500 @@ -286,3 +286,16 @@ $ hg debugignore dir1/file2 dir1/file2 is ignored (ignore rule in dir2/.hgignore, line 1: 'file*2') + +#if windows + +Windows paths are accepted on input + + $ rm dir1/.hgignore + $ echo "dir1/file*" >> .hgignore + $ hg debugignore "dir1\file2" + dir1\file2 is ignored + (ignore rule in $TESTTMP\ignorerepo\.hgignore, line 4: 'dir1/file*') + $ hg up -qC . + +#endif
--- a/tests/test-hgrc.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-hgrc.t Sat Apr 16 18:06:48 2016 -0500 @@ -46,6 +46,7 @@ default = $TESTTMP/foo%bar (glob) $ hg showconfig bundle.mainreporoot=$TESTTMP/foobar (glob) + extensions.chgserver= (?) paths.default=$TESTTMP/foo%bar (glob) $ cd .. @@ -80,6 +81,7 @@ $ hg showconfig --config ui.verbose=True --quiet bundle.mainreporoot=$TESTTMP + extensions.chgserver= (?) ui.verbose=False ui.debug=False ui.quiet=True @@ -111,6 +113,7 @@ $ hg showconfig bundle.mainreporoot=$TESTTMP + extensions.chgserver= (?) ui.username=$FAKEUSER $ unset FAKEUSER @@ -156,6 +159,7 @@ $TESTTMP/hgrc:13: alias.log=log -g repo: bundle.mainreporoot=$TESTTMP $TESTTMP/hgrc:11: defaults.identify=-n + --config: extensions.chgserver= (?) $TESTTMP/hgrc:2: ui.debug=true $TESTTMP/hgrc:3: ui.fallbackencoding=ASCII $TESTTMP/hgrc:4: ui.quiet=true @@ -171,6 +175,7 @@ $ hg showconfig --config ui.traceback=True --debug read config from: $TESTTMP/hgrc repo: bundle.mainreporoot=$TESTTMP + --config: extensions.chgserver= (?) --config: ui.traceback=True --verbose: ui.verbose=False --debug: ui.debug=True @@ -179,8 +184,12 @@ plain mode with exceptions $ cat > plain.py <<EOF + > from mercurial import commands, extensions + > def _config(orig, ui, repo, *values, **opts): + > ui.write('plain: %r\n' % ui.plain()) + > return orig(ui, repo, *values, **opts) > def uisetup(ui): - > ui.write('plain: %r\n' % ui.plain()) + > extensions.wrapcommand(commands.table, 'config', _config) > EOF $ echo "[extensions]" >> $HGRC $ echo "plain=./plain.py" >> $HGRC @@ -190,6 +199,7 @@ read config from: $TESTTMP/hgrc repo: bundle.mainreporoot=$TESTTMP $TESTTMP/hgrc:15: extensions.plain=./plain.py + --config: extensions.chgserver= (?) --config: ui.traceback=True --verbose: ui.verbose=False --debug: ui.debug=True @@ -200,6 +210,7 @@ read config from: $TESTTMP/hgrc repo: bundle.mainreporoot=$TESTTMP $TESTTMP/hgrc:15: extensions.plain=./plain.py + --config: extensions.chgserver= (?) --config: ui.traceback=True --verbose: ui.verbose=False --debug: ui.debug=True @@ -210,6 +221,7 @@ read config from: $TESTTMP/hgrc repo: bundle.mainreporoot=$TESTTMP $TESTTMP/hgrc:15: extensions.plain=./plain.py + --config: extensions.chgserver= (?) --config: ui.traceback=True --verbose: ui.verbose=False --debug: ui.debug=True
--- a/tests/test-hgweb-auth.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-hgweb-auth.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,10 +1,17 @@ +from __future__ import absolute_import, print_function + from mercurial import demandimport; demandimport.enable() -import urllib2 -from mercurial import ui, util -from mercurial import url -from mercurial.error import Abort +from mercurial import ( + error, + ui as uimod, + url, + util, +) -class myui(ui.ui): +urlerr = util.urlerr +urlreq = util.urlreq + +class myui(uimod.ui): def interactive(self): return False @@ -21,7 +28,7 @@ for k in sorted(dict.iterkeys())]) + '}' def test(auth, urls=None): - print 'CFG:', dumpdict(auth) + print('CFG:', dumpdict(auth)) prefixes = set() for k in auth: prefixes.add(k.split('.', 1)[0]) @@ -34,15 +41,15 @@ ui = writeauth(auth) def _test(uri): - print 'URI:', uri + print('URI:', uri) try: pm = url.passwordmgr(ui) u, authinfo = util.url(uri).authinfo() if authinfo is not None: pm.add_password(*authinfo) - print ' ', pm.find_user_password('test', u) - except Abort: - print 'abort' + print(' ', pm.find_user_password('test', u)) + except error.Abort: + print(' ','abort') if not urls: urls = [ @@ -59,25 +66,25 @@ _test(u) -print '\n*** Test in-uri schemes\n' +print('\n*** Test in-uri schemes\n') test({'x.prefix': 'http://example.org'}) test({'x.prefix': 'https://example.org'}) test({'x.prefix': 'http://example.org', 'x.schemes': 'https'}) test({'x.prefix': 'https://example.org', 'x.schemes': 'http'}) -print '\n*** Test separately configured schemes\n' +print('\n*** Test separately configured schemes\n') test({'x.prefix': 'example.org', 'x.schemes': 'http'}) test({'x.prefix': 'example.org', 'x.schemes': 'https'}) test({'x.prefix': 'example.org', 'x.schemes': 'http https'}) -print '\n*** Test prefix matching\n' +print('\n*** Test prefix matching\n') test({'x.prefix': 'http://example.org/foo', 'y.prefix': 'http://example.org/bar'}) test({'x.prefix': 'http://example.org/foo', 'y.prefix': 'http://example.org/foo/bar'}) test({'x.prefix': '*', 'y.prefix': 'https://example.org/bar'}) -print '\n*** Test user matching\n' +print('\n*** Test user matching\n') test({'x.prefix': 'http://example.org/foo', 'x.username': None, 'x.password': 'xpassword'}, @@ -98,10 +105,10 @@ urls=['http://y@example.org/foo/bar']) def testauthinfo(fullurl, authurl): - print 'URIs:', fullurl, authurl - pm = urllib2.HTTPPasswordMgrWithDefaultRealm() + print('URIs:', fullurl, authurl) + pm = urlreq.httppasswordmgrwithdefaultrealm() pm.add_password(*util.url(fullurl).authinfo()[1]) - print pm.find_user_password('test', authurl) + print(pm.find_user_password('test', authurl)) -print '\n*** Test urllib2 and util.url\n' +print('\n*** Test urllib2 and util.url\n') testauthinfo('http://user@example.com:8080/foo', 'http://example.com:8080/foo')
--- a/tests/test-hgweb-commands.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-hgweb-commands.t Sat Apr 16 18:06:48 2016 -0500 @@ -1496,8 +1496,8 @@ $ get-with-headers.py 127.0.0.1:$HGPORT 'raw-bookmarks' 200 Script output follows + something cad8025a2e87f88c06259790adfa15acb4080123 anotherthing 2ef0ac749a14e4f57a5a822464a0902c6f7f448f - something cad8025a2e87f88c06259790adfa15acb4080123 $ get-with-headers.py 127.0.0.1:$HGPORT 'summary/?style=gitweb' 200 Script output follows @@ -1631,6 +1631,15 @@ <tr class="parity0"> <td class="age"><i class="age">Thu, 01 Jan 1970 00:00:00 +0000</i></td> + <td><a class="list" href="/rev/something?style=gitweb"><b>something</b></a></td> + <td class="link"> + <a href="/rev/cad8025a2e87?style=gitweb">changeset</a> | + <a href="/log/cad8025a2e87?style=gitweb">changelog</a> | + <a href="/file/cad8025a2e87?style=gitweb">files</a> + </td> + </tr> + <tr class="parity1"> + <td class="age"><i class="age">Thu, 01 Jan 1970 00:00:00 +0000</i></td> <td><a class="list" href="/rev/anotherthing?style=gitweb"><b>anotherthing</b></a></td> <td class="link"> <a href="/rev/2ef0ac749a14?style=gitweb">changeset</a> | @@ -1638,15 +1647,6 @@ <a href="/file/2ef0ac749a14?style=gitweb">files</a> </td> </tr> - <tr class="parity1"> - <td class="age"><i class="age">Thu, 01 Jan 1970 00:00:00 +0000</i></td> - <td><a class="list" href="/rev/something?style=gitweb"><b>something</b></a></td> - <td class="link"> - <a href="/rev/cad8025a2e87?style=gitweb">changeset</a> | - <a href="/log/cad8025a2e87?style=gitweb">changelog</a> | - <a href="/file/cad8025a2e87?style=gitweb">files</a> - </td> - </tr> <tr class="light"><td colspan="3"><a class="list" href="/bookmarks?style=gitweb">...</a></td></tr> </table>
--- a/tests/test-hgweb-empty.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-hgweb-empty.t Sat Apr 16 18:06:48 2016 -0500 @@ -461,4 +461,20 @@ </html> + $ (get-with-headers.py localhost:$HGPORT 'atom-bookmarks') + 200 Script output follows + + <?xml version="1.0" encoding="ascii"?> + <feed xmlns="http://www.w3.org/2005/Atom"> + <id>http://*:$HGPORT/</id> (glob) + <link rel="self" href="http://*:$HGPORT/atom-bookmarks"/> (glob) + <link rel="alternate" href="http://*:$HGPORT/bookmarks"/> (glob) + <title>test: bookmarks</title> + <summary>test bookmark history</summary> + <author><name>Mercurial SCM</name></author> + <updated>1970-01-01T00:00:00+00:00</updated> + + + </feed> + $ cd ..
--- a/tests/test-hgweb-json.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-hgweb-json.t Sat Apr 16 18:06:48 2016 -0500 @@ -1,4 +1,3 @@ -#require json #require serve $ request() { @@ -176,6 +175,10 @@ ], "desc": "merge test-branch into default", "node": "cc725e08502a79dd1eda913760fbe06ed7a9abc7", + "parents": [ + "ceed296fe500c3fac9541e31dad860cb49c89e45", + "ed66c30e87eb65337c05a4229efaa5f1d5285a90" + ], "tags": [ "tip" ], @@ -189,6 +192,9 @@ ], "desc": "another commit in test-branch", "node": "ed66c30e87eb65337c05a4229efaa5f1d5285a90", + "parents": [ + "6ab967a8ab3489227a83f80e920faa039a71819f" + ], "tags": [], "user": "test" }, @@ -200,6 +206,9 @@ ], "desc": "create test branch", "node": "6ab967a8ab3489227a83f80e920faa039a71819f", + "parents": [ + "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e" + ], "tags": [], "user": "test" }, @@ -213,6 +222,9 @@ ], "desc": "create tag2", "node": "ceed296fe500c3fac9541e31dad860cb49c89e45", + "parents": [ + "f2890a05fea49bfaf9fb27ed5490894eba32da78" + ], "tags": [], "user": "test" }, @@ -224,6 +236,9 @@ ], "desc": "another commit to da/foo", "node": "f2890a05fea49bfaf9fb27ed5490894eba32da78", + "parents": [ + "93a8ce14f89156426b7fa981af8042da53f03aa0" + ], "tags": [ "tag2" ], @@ -237,6 +252,9 @@ ], "desc": "create tag", "node": "93a8ce14f89156426b7fa981af8042da53f03aa0", + "parents": [ + "78896eb0e102174ce9278438a95e12543e4367a7" + ], "tags": [], "user": "test" }, @@ -248,6 +266,9 @@ ], "desc": "move foo", "node": "78896eb0e102174ce9278438a95e12543e4367a7", + "parents": [ + "8d7c456572acf3557e8ed8a07286b10c408bcec5" + ], "tags": [ "tag1" ], @@ -263,6 +284,9 @@ ], "desc": "modify da/foo", "node": "8d7c456572acf3557e8ed8a07286b10c408bcec5", + "parents": [ + "f8bbb9024b10f93cdbb8d940337398291d40dea8" + ], "tags": [], "user": "test" }, @@ -274,6 +298,9 @@ ], "desc": "modify foo", "node": "f8bbb9024b10f93cdbb8d940337398291d40dea8", + "parents": [ + "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e" + ], "tags": [], "user": "test" }, @@ -285,6 +312,7 @@ ], "desc": "initial", "node": "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e", + "parents": [], "tags": [], "user": "test" } @@ -308,6 +336,9 @@ ], "desc": "modify foo", "node": "f8bbb9024b10f93cdbb8d940337398291d40dea8", + "parents": [ + "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e" + ], "tags": [], "user": "test" }, @@ -319,6 +350,7 @@ ], "desc": "initial", "node": "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e", + "parents": [], "tags": [], "user": "test" } @@ -342,6 +374,10 @@ ], "desc": "merge test-branch into default", "node": "cc725e08502a79dd1eda913760fbe06ed7a9abc7", + "parents": [ + "ceed296fe500c3fac9541e31dad860cb49c89e45", + "ed66c30e87eb65337c05a4229efaa5f1d5285a90" + ], "tags": [ "tip" ], @@ -355,6 +391,9 @@ ], "desc": "another commit in test-branch", "node": "ed66c30e87eb65337c05a4229efaa5f1d5285a90", + "parents": [ + "6ab967a8ab3489227a83f80e920faa039a71819f" + ], "tags": [], "user": "test" }, @@ -366,6 +405,9 @@ ], "desc": "create test branch", "node": "6ab967a8ab3489227a83f80e920faa039a71819f", + "parents": [ + "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e" + ], "tags": [], "user": "test" }, @@ -379,6 +421,9 @@ ], "desc": "create tag2", "node": "ceed296fe500c3fac9541e31dad860cb49c89e45", + "parents": [ + "f2890a05fea49bfaf9fb27ed5490894eba32da78" + ], "tags": [], "user": "test" }, @@ -390,6 +435,9 @@ ], "desc": "another commit to da/foo", "node": "f2890a05fea49bfaf9fb27ed5490894eba32da78", + "parents": [ + "93a8ce14f89156426b7fa981af8042da53f03aa0" + ], "tags": [ "tag2" ], @@ -403,6 +451,9 @@ ], "desc": "create tag", "node": "93a8ce14f89156426b7fa981af8042da53f03aa0", + "parents": [ + "78896eb0e102174ce9278438a95e12543e4367a7" + ], "tags": [], "user": "test" }, @@ -414,6 +465,9 @@ ], "desc": "move foo", "node": "78896eb0e102174ce9278438a95e12543e4367a7", + "parents": [ + "8d7c456572acf3557e8ed8a07286b10c408bcec5" + ], "tags": [ "tag1" ], @@ -429,6 +483,9 @@ ], "desc": "modify da/foo", "node": "8d7c456572acf3557e8ed8a07286b10c408bcec5", + "parents": [ + "f8bbb9024b10f93cdbb8d940337398291d40dea8" + ], "tags": [], "user": "test" }, @@ -440,6 +497,9 @@ ], "desc": "modify foo", "node": "f8bbb9024b10f93cdbb8d940337398291d40dea8", + "parents": [ + "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e" + ], "tags": [], "user": "test" }, @@ -451,6 +511,7 @@ ], "desc": "initial", "node": "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e", + "parents": [], "tags": [], "user": "test" } @@ -619,20 +680,20 @@ { "bookmarks": [ { + "bookmark": "bookmark2", + "date": [ + 0.0, + 0 + ], + "node": "ceed296fe500c3fac9541e31dad860cb49c89e45" + }, + { "bookmark": "bookmark1", "date": [ 0.0, 0 ], "node": "8d7c456572acf3557e8ed8a07286b10c408bcec5" - }, - { - "bookmark": "bookmark2", - "date": [ - 0.0, - 0 - ], - "node": "ceed296fe500c3fac9541e31dad860cb49c89e45" } ], "node": "cc725e08502a79dd1eda913760fbe06ed7a9abc7"
--- a/tests/test-hgweb-no-path-info.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-hgweb-no-path-info.t Sat Apr 16 18:06:48 2016 -0500 @@ -15,12 +15,20 @@ summary: test $ cat > request.py <<EOF - > from mercurial.hgweb import hgweb, hgwebdir - > from StringIO import StringIO - > import os, sys + > from __future__ import absolute_import + > import os + > import sys + > from mercurial.hgweb import ( + > hgweb, + > hgwebdir, + > ) + > from mercurial import ( + > util, + > ) + > stringio = util.stringio > - > errors = StringIO() - > input = StringIO() + > errors = stringio() + > input = stringio() > > def startrsp(status, headers): > print '---- STATUS' @@ -54,11 +62,11 @@ > print '---- ERRORS' > print errors.getvalue() > - > output = StringIO() + > output = stringio() > env['QUERY_STRING'] = 'style=atom' > process(hgweb('.', name='repo')) > - > output = StringIO() + > output = stringio() > env['QUERY_STRING'] = 'style=raw' > process(hgwebdir({'repo': '.'})) > EOF
--- a/tests/test-hgweb-no-request-uri.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-hgweb-no-request-uri.t Sat Apr 16 18:06:48 2016 -0500 @@ -15,12 +15,20 @@ summary: test $ cat > request.py <<EOF - > from mercurial.hgweb import hgweb, hgwebdir - > from StringIO import StringIO - > import os, sys + > from __future__ import absolute_import + > import os + > import sys + > from mercurial.hgweb import ( + > hgweb, + > hgwebdir, + > ) + > from mercurial import ( + > util, + > ) + > stringio = util.stringio > - > errors = StringIO() - > input = StringIO() + > errors = stringio() + > input = stringio() > > def startrsp(status, headers): > print '---- STATUS' @@ -53,22 +61,22 @@ > print '---- ERRORS' > print errors.getvalue() > - > output = StringIO() + > output = stringio() > env['PATH_INFO'] = '/' > env['QUERY_STRING'] = 'style=atom' > process(hgweb('.', name = 'repo')) > - > output = StringIO() + > output = stringio() > env['PATH_INFO'] = '/file/tip/' > env['QUERY_STRING'] = 'style=raw' > process(hgweb('.', name = 'repo')) > - > output = StringIO() + > output = stringio() > env['PATH_INFO'] = '/' > env['QUERY_STRING'] = 'style=raw' > process(hgwebdir({'repo': '.'})) > - > output = StringIO() + > output = stringio() > env['PATH_INFO'] = '/repo/file/tip/' > env['QUERY_STRING'] = 'style=raw' > process(hgwebdir({'repo': '.'}))
--- a/tests/test-hgweb-non-interactive.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-hgweb-non-interactive.t Sat Apr 16 18:06:48 2016 -0500 @@ -7,12 +7,20 @@ $ hg add bar $ hg commit -m "test" $ cat > request.py <<EOF - > from mercurial import dispatch - > from mercurial.hgweb.hgweb_mod import hgweb - > from mercurial.ui import ui - > from mercurial import hg - > from StringIO import StringIO - > import os, sys + > from __future__ import absolute_import + > import os + > import sys + > from mercurial import ( + > dispatch, + > hg, + > ui as uimod, + > util, + > ) + > ui = uimod.ui + > from mercurial.hgweb.hgweb_mod import ( + > hgweb, + > ) + > stringio = util.stringio > > class FileLike(object): > def __init__(self, real): @@ -28,9 +36,9 @@ > return self.real.readline() > > sys.stdin = FileLike(sys.stdin) - > errors = StringIO() - > input = StringIO() - > output = StringIO() + > errors = stringio() + > input = stringio() + > output = stringio() > > def startrsp(status, headers): > print '---- STATUS'
--- a/tests/test-hgweb.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-hgweb.t Sat Apr 16 18:06:48 2016 -0500 @@ -700,7 +700,7 @@ Uncaught exceptions result in a logged error and canned HTTP response $ killdaemons.py - $ hg --config extensions.hgweberror=$TESTDIR/hgweberror.py serve -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log + $ hg serve --config extensions.hgweberror=$TESTDIR/hgweberror.py -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log $ cat hg.pid >> $DAEMON_PIDS $ get-with-headers.py localhost:$HGPORT 'raiseerror' transfer-encoding content-type @@ -716,7 +716,7 @@ Uncaught exception after partial content sent - $ hg --config extensions.hgweberror=$TESTDIR/hgweberror.py serve -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log + $ hg serve --config extensions.hgweberror=$TESTDIR/hgweberror.py -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log $ cat hg.pid >> $DAEMON_PIDS $ get-with-headers.py localhost:$HGPORT 'raiseerror?partialresponse=1' transfer-encoding content-type 200 Script output follows
--- a/tests/test-hgwebdir-paths.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-hgwebdir-paths.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,13 +1,21 @@ +from __future__ import absolute_import + import os -from mercurial import hg, ui -from mercurial.hgweb.hgwebdir_mod import hgwebdir +from mercurial import ( + hg, + ui as uimod, +) +from mercurial.hgweb import ( + hgwebdir_mod, +) +hgwebdir = hgwebdir_mod.hgwebdir os.mkdir('webdir') os.chdir('webdir') webdir = os.path.realpath('.') -u = ui.ui() +u = uimod.ui() hg.repository(u, 'a', create=1) hg.repository(u, 'b', create=1) os.chdir('b')
--- a/tests/test-hgwebdir.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-hgwebdir.t Sat Apr 16 18:06:48 2016 -0500 @@ -100,6 +100,23 @@ /a/ /b/ + $ get-with-headers.py localhost:$HGPORT '?style=json' + 200 Script output follows + + { + "entries": [{ + "name": "a", + "description": "unknown", + "contact": "Foo Bar \u003cfoo.bar@example.com\u003e", + "lastchange": [*, *] (glob) + }, { + "name": "b", + "description": "unknown", + "contact": "Foo Bar \u003cfoo.bar@example.com\u003e", + "lastchange": [*, *] (glob) + }] + } (no-eol) + $ get-with-headers.py localhost:$HGPORT 'a/file/tip/a?style=raw' 200 Script output follows
--- a/tests/test-highlight.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-highlight.t Sat Apr 16 18:06:48 2016 -0500 @@ -10,6 +10,12 @@ $ hg init test $ cd test + $ filterhtml () { + > sed -e "s/class=\"k\"/class=\"kn\"/g" \ + > -e "s/class=\"mf\"/class=\"mi\"/g" \ + > -e "s/class=\"\([cs]\)[h12]\"/class=\"\1\"/g" + > } + create random Python file to exercise Pygments $ cat <<EOF > primes.py @@ -57,8 +63,7 @@ hgweb filerevision, html - $ (get-with-headers.py localhost:$HGPORT 'file/tip/primes.py') \ - > | sed "s/class=\"k\"/class=\"kn\"/g" | sed "s/class=\"mf\"/class=\"mi\"/g" + $ (get-with-headers.py localhost:$HGPORT 'file/tip/primes.py') | filterhtml 200 Script output follows <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd"> @@ -190,8 +195,7 @@ hgweb fileannotate, html - $ (get-with-headers.py localhost:$HGPORT 'annotate/tip/primes.py') \ - > | sed "s/class=\"k\"/class=\"kn\"/g" | sed "s/class=\"mi\"/class=\"mf\"/g" + $ (get-with-headers.py localhost:$HGPORT 'annotate/tip/primes.py') | filterhtml 200 Script output follows <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd"> @@ -408,7 +412,7 @@ <a href="/annotate/06824edf55d0/primes.py#l18" title="06824edf55d0: a">test@0</a> </td> - <td class="source"><a href="#l18"> 18</a> <span class="n">ns</span> <span class="o">=</span> <span class="n">ifilter</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">n</span><span class="p">:</span> <span class="n">n</span> <span class="o">%</span> <span class="n">p</span> <span class="o">!=</span> <span class="mf">0</span><span class="p">,</span> <span class="n">ns</span><span class="p">)</span></td> + <td class="source"><a href="#l18"> 18</a> <span class="n">ns</span> <span class="o">=</span> <span class="n">ifilter</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">n</span><span class="p">:</span> <span class="n">n</span> <span class="o">%</span> <span class="n">p</span> <span class="o">!=</span> <span class="mi">0</span><span class="p">,</span> <span class="n">ns</span><span class="p">)</span></td> </tr> <tr id="l19"> <td class="annotate"> @@ -436,14 +440,14 @@ <a href="/annotate/06824edf55d0/primes.py#l22" title="06824edf55d0: a">test@0</a> </td> - <td class="source"><a href="#l22"> 22</a> <span class="n">odds</span> <span class="o">=</span> <span class="n">ifilter</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">i</span><span class="p">:</span> <span class="n">i</span> <span class="o">%</span> <span class="mf">2</span> <span class="o">==</span> <span class="mf">1</span><span class="p">,</span> <span class="n">count</span><span class="p">())</span></td> + <td class="source"><a href="#l22"> 22</a> <span class="n">odds</span> <span class="o">=</span> <span class="n">ifilter</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">i</span><span class="p">:</span> <span class="n">i</span> <span class="o">%</span> <span class="mi">2</span> <span class="o">==</span> <span class="mi">1</span><span class="p">,</span> <span class="n">count</span><span class="p">())</span></td> </tr> <tr id="l23"> <td class="annotate"> <a href="/annotate/06824edf55d0/primes.py#l23" title="06824edf55d0: a">test@0</a> </td> - <td class="source"><a href="#l23"> 23</a> <span class="kn">return</span> <span class="n">chain</span><span class="p">([</span><span class="mf">2</span><span class="p">],</span> <span class="n">sieve</span><span class="p">(</span><span class="n">dropwhile</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">n</span><span class="p">:</span> <span class="n">n</span> <span class="o"><</span> <span class="mf">3</span><span class="p">,</span> <span class="n">odds</span><span class="p">)))</span></td> + <td class="source"><a href="#l23"> 23</a> <span class="kn">return</span> <span class="n">chain</span><span class="p">([</span><span class="mi">2</span><span class="p">],</span> <span class="n">sieve</span><span class="p">(</span><span class="n">dropwhile</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">n</span><span class="p">:</span> <span class="n">n</span> <span class="o"><</span> <span class="mi">3</span><span class="p">,</span> <span class="n">odds</span><span class="p">)))</span></td> </tr> <tr id="l24"> <td class="annotate"> @@ -478,7 +482,7 @@ <a href="/annotate/06824edf55d0/primes.py#l28" title="06824edf55d0: a">test@0</a> </td> - <td class="source"><a href="#l28"> 28</a> <span class="n">n</span> <span class="o">=</span> <span class="nb">int</span><span class="p">(</span><span class="n">sys</span><span class="o">.</span><span class="n">argv</span><span class="p">[</span><span class="mf">1</span><span class="p">])</span></td> + <td class="source"><a href="#l28"> 28</a> <span class="n">n</span> <span class="o">=</span> <span class="nb">int</span><span class="p">(</span><span class="n">sys</span><span class="o">.</span><span class="n">argv</span><span class="p">[</span><span class="mi">1</span><span class="p">])</span></td> </tr> <tr id="l29"> <td class="annotate"> @@ -492,7 +496,7 @@ <a href="/annotate/06824edf55d0/primes.py#l30" title="06824edf55d0: a">test@0</a> </td> - <td class="source"><a href="#l30"> 30</a> <span class="n">n</span> <span class="o">=</span> <span class="mf">10</span></td> + <td class="source"><a href="#l30"> 30</a> <span class="n">n</span> <span class="o">=</span> <span class="mi">10</span></td> </tr> <tr id="l31"> <td class="annotate">
--- a/tests/test-histedit-arguments.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-histedit-arguments.t Sat Apr 16 18:06:48 2016 -0500 @@ -63,6 +63,8 @@ # # Commits are listed from least to most recent # + # You can reorder changesets by reordering the lines + # # Commands: # # e, edit = use commit, but stop for amending @@ -124,6 +126,7 @@ | o 2 eb57 three | + ~ $ HGEDITOR=cat hg histedit -r 4 --commands - << EOF > edit 08d98a8350f3 4 five > EOF @@ -133,6 +136,11 @@ (hg histedit --continue to resume) [1] + $ hg graft --continue + abort: no graft in progress + (continue: hg histedit --continue) + [255] + $ mv .hg/histedit-state .hg/histedit-state.back $ hg update --quiet --clean 2 $ echo alpha >> alpha @@ -147,6 +155,7 @@ |/ o 2 eb57 three | + ~ $ hg unbundle -q $TESTTMP/foo/.hg/strip-backup/08d98a8350f3-02594089-backup.hg $ hg strip -q -r f5ed --config extensions.strip= @@ -243,9 +252,6 @@ > p c8e68270e35a 3 four > f 08d98a8350f3 4 five > EOF - 1 files updated, 0 files merged, 0 files removed, 0 files unresolved - reverting alpha - 1 files updated, 0 files merged, 0 files removed, 0 files unresolved four *** five @@ -258,7 +264,6 @@ HG: user: test HG: branch 'default' HG: changed alpha - 1 files updated, 0 files merged, 0 files removed, 0 files unresolved saved backup bundle to $TESTTMP/foo/.hg/strip-backup/*-backup.hg (glob) saved backup bundle to $TESTTMP/foo/.hg/strip-backup/*-backup.hg (glob) @@ -294,6 +299,8 @@ # # Commits are listed from least to most recent # + # You can reorder changesets by reordering the lines + # # Commands: # # e, edit = use commit, but stop for amending @@ -449,3 +456,46 @@ > pick 6f2f0241f119 > pick 8cde254db839 > EOF + +commit --amend should abort if histedit is in progress +(issue4800) and markers are not being created. +Eventually, histedit could perhaps look at `source` extra, +in which case this test should be revisited. + + $ hg -q up 8cde254db839 + $ hg histedit 6f2f0241f119 --commands - <<EOF + > pick 8cde254db839 + > edit 6f2f0241f119 + > EOF + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + merging foo + warning: conflicts while merging foo! (edit, then use 'hg resolve --mark') + Fix up the change (pick 8cde254db839) + (hg histedit --continue to resume) + [1] + $ hg resolve -m --all + (no more unresolved files) + continue: hg histedit --continue + $ hg histedit --cont + merging foo + warning: conflicts while merging foo! (edit, then use 'hg resolve --mark') + Editing (6f2f0241f119), you may commit or record as needed now. + (hg histedit --continue to resume) + [1] + $ hg resolve -m --all + (no more unresolved files) + continue: hg histedit --continue + $ hg commit --amend -m 'reject this fold' + abort: histedit in progress + (use 'hg histedit --continue' or 'hg histedit --abort') + [255] + +With markers enabled, histedit does not get confused, and +amend should not be blocked by the ongoing histedit. + + $ cat >>$HGRCPATH <<EOF + > [experimental] + > evolution=createmarkers,allowunstable + > EOF + $ hg commit --amend -m 'allow this fold' + $ hg histedit --continue
--- a/tests/test-histedit-bookmark-motion.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-histedit-bookmark-motion.t Sat Apr 16 18:06:48 2016 -0500 @@ -69,6 +69,8 @@ # # Commits are listed from least to most recent # + # You can reorder changesets by reordering the lines + # # Commands: # # e, edit = use commit, but stop for amending @@ -130,6 +132,8 @@ # # Commits are listed from least to most recent # + # You can reorder changesets by reordering the lines + # # Commands: # # e, edit = use commit, but stop for amending
--- a/tests/test-histedit-commute.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-histedit-commute.t Sat Apr 16 18:06:48 2016 -0500 @@ -63,6 +63,8 @@ # # Commits are listed from least to most recent # + # You can reorder changesets by reordering the lines + # # Commands: # # e, edit = use commit, but stop for amending @@ -104,7 +106,6 @@ > pick 055a42cdd887 d > EOF $ HGEDITOR="cat \"$EDITED\" > " hg histedit 177f92b77385 2>&1 | fixbundle - 0 files updated, 0 files merged, 3 files removed, 0 files unresolved log after edit $ hg log --graph @@ -148,7 +149,6 @@ > pick d8249471110a e > pick 8ade9693061e f > EOF - 0 files updated, 0 files merged, 3 files removed, 0 files unresolved $ hg log --graph @ changeset: 5:7eca9b5b1148 @@ -191,7 +191,6 @@ > pick 915da888f2de e > pick 177f92b77385 c > EOF - 0 files updated, 0 files merged, 4 files removed, 0 files unresolved $ hg log --graph @ changeset: 5:38b92f448761 | tag: tip @@ -232,7 +231,6 @@ > pick 38b92f448761 c > pick de71b079d9ce e > EOF - 0 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg log --graph @ changeset: 7:803ef1c6fcfd | tag: tip @@ -343,6 +341,8 @@ # # Commits are listed from least to most recent # + # You can reorder changesets by reordering the lines + # # Commands: # # e, edit = use commit, but stop for amending @@ -417,11 +417,6 @@ > EOF $ HGEDITOR="sh ./editor.sh" hg histedit 0 - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved - adding another-dir/initial-file (glob) - removing initial-dir/initial-file (glob) - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - 1 files updated, 0 files merged, 0 files removed, 0 files unresolved saved backup bundle to $TESTTMP/issue4251/.hg/strip-backup/*-backup.hg (glob) saved backup bundle to $TESTTMP/issue4251/.hg/strip-backup/*-backup.hg (glob)
--- a/tests/test-histedit-drop.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-histedit-drop.t Sat Apr 16 18:06:48 2016 -0500 @@ -59,7 +59,6 @@ > pick 652413bf663e f > pick 055a42cdd887 d > EOF - 0 files updated, 0 files merged, 4 files removed, 0 files unresolved log after edit $ hg log --graph @@ -124,7 +123,6 @@ > pick a4f7421b80f7 f > drop f518305ce889 d > EOF - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg log --graph @ changeset: 3:a4f7421b80f7 | tag: tip @@ -155,10 +153,13 @@ hg: parse error: missing rules for changeset a4f7421b80f7 (use "drop a4f7421b80f7" to discard, see also: "hg help -e histedit.config") $ hg --config histedit.dropmissing=True histedit cb9a9f314b8b --commands - 2>&1 << EOF | fixbundle + > EOF + hg: parse error: no rules provided + (use strip extension to remove commits) + $ hg --config histedit.dropmissing=True histedit cb9a9f314b8b --commands - 2>&1 << EOF | fixbundle > pick cb9a9f314b8b a > pick ee283cb5f2d5 e > EOF - 0 files updated, 0 files merged, 3 files removed, 0 files unresolved $ hg log --graph @ changeset: 1:e99c679bf03e | tag: tip
--- a/tests/test-histedit-edit.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-histedit-edit.t Sat Apr 16 18:06:48 2016 -0500 @@ -77,6 +77,12 @@ Editing (e860deea161a), you may commit or record as needed now. (hg histedit --continue to resume) +try to update and get an error + $ hg update tip + abort: histedit in progress + (use 'hg histedit --continue' or 'hg histedit --abort') + [255] + edit the plan via the editor $ cat >> $TESTTMP/editplan.sh <<EOF > cat > \$1 <<EOF2 @@ -286,7 +292,6 @@ > mv tmp "\$1" > EOF $ HGEDITOR="sh ../edit.sh" hg histedit tip 2>&1 | fixbundle - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg status $ hg log --limit 1 changeset: 6:1fd3b2fe7754 @@ -327,7 +332,6 @@ $ HGEDITOR="sh $TESTTMP/editor.sh" hg histedit tip --commands - 2>&1 << EOF | fixbundle > mess 1fd3b2fe7754 f > EOF - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved abort: emulating unexpected abort $ test -f .hg/last-message.txt [1] @@ -354,8 +358,6 @@ $ HGEDITOR="sh $TESTTMP/editor.sh" hg histedit tip --commands - 2>&1 << EOF > mess 1fd3b2fe7754 f > EOF - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - adding f ==== before editing f @@ -408,7 +410,6 @@ $ hg histedit tip --commands - 2>&1 << EOF | fixbundle > mess 1fd3b2fe7754 f > EOF - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg status $ hg log --limit 1 changeset: 6:62feedb1200e @@ -468,6 +469,8 @@ # # Commits are listed from least to most recent # + # You can reorder changesets by reordering the lines + # # Commands: # # e, edit = use commit, but stop for amending
--- a/tests/test-histedit-fold-non-commute.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-histedit-fold-non-commute.t Sat Apr 16 18:06:48 2016 -0500 @@ -97,14 +97,7 @@ $ hg resolve --mark e (no more unresolved files) continue: hg histedit --continue - $ cat > cat.py <<EOF - > import sys - > print open(sys.argv[1]).read() - > print - > print - > EOF - $ HGEDITOR="python cat.py" hg histedit --continue 2>&1 | fixbundle | grep -v '2 files removed' - 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ HGEDITOR=cat hg histedit --continue 2>&1 | fixbundle | grep -v '2 files removed' d *** does not commute with e @@ -118,22 +111,20 @@ HG: branch 'default' HG: changed d HG: changed e - - - - 2 files updated, 0 files merged, 0 files removed, 0 files unresolved merging e warning: conflicts while merging e! (edit, then use 'hg resolve --mark') Fix up the change (pick 7b4e2f4b7bcd) (hg histedit --continue to resume) just continue this time +keep the non-commuting change, and thus the pending change will be dropped $ hg revert -r 'p1()' e $ hg resolve --mark e (no more unresolved files) continue: hg histedit --continue + $ hg diff $ hg histedit --continue 2>&1 | fixbundle - 7b4e2f4b7bcd: empty changeset + 7b4e2f4b7bcd: skipping changeset (no changes) log after edit $ hg log --graph @@ -262,8 +253,6 @@ (no more unresolved files) continue: hg histedit --continue $ hg histedit --continue 2>&1 | fixbundle | grep -v '2 files removed' - 2 files updated, 0 files merged, 0 files removed, 0 files unresolved - 2 files updated, 0 files merged, 0 files removed, 0 files unresolved merging e warning: conflicts while merging e! (edit, then use 'hg resolve --mark') Fix up the change (pick 7b4e2f4b7bcd) @@ -275,7 +264,7 @@ (no more unresolved files) continue: hg histedit --continue $ hg histedit --continue 2>&1 | fixbundle - 7b4e2f4b7bcd: empty changeset + 7b4e2f4b7bcd: skipping changeset (no changes) log after edit $ hg log --graph
--- a/tests/test-histedit-fold.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-histedit-fold.t Sat Apr 16 18:06:48 2016 -0500 @@ -54,9 +54,6 @@ > fold 177f92b77385 c > pick 055a42cdd887 d > EOF - 0 files updated, 0 files merged, 4 files removed, 0 files unresolved - 0 files updated, 0 files merged, 2 files removed, 0 files unresolved - 2 files updated, 0 files merged, 0 files removed, 0 files unresolved log after edit $ hg logt --graph @@ -111,9 +108,6 @@ > pick 6de59d13424a f > pick 9c277da72c9b d > EOF - 0 files updated, 0 files merged, 4 files removed, 0 files unresolved - 0 files updated, 0 files merged, 2 files removed, 0 files unresolved - 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ HGEDITOR=$OLDHGEDITOR @@ -177,10 +171,7 @@ > pick 8e03a72b6f83 f > fold c4a9eb7989fc d > EOF - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - adding d allow non-folding commit - 0 files updated, 0 files merged, 3 files removed, 0 files unresolved ==== before editing f *** @@ -242,9 +233,6 @@ > EOF editing: pick e860deea161a 4 e 1/2 changes (50.00%) editing: fold a00ad806cb55 5 f 2/2 changes (100.00%) - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - 0 files updated, 0 files merged, 2 files removed, 0 files unresolved - 2 files updated, 0 files merged, 0 files removed, 0 files unresolved tip after edit $ hg log --rev . @@ -372,7 +360,6 @@ created new head $ echo 6 >> file $ HGEDITOR=cat hg histedit --continue - 1 files updated, 0 files merged, 0 files removed, 0 files unresolved +4 *** +5.2 @@ -387,7 +374,6 @@ HG: user: test HG: branch 'default' HG: changed file - 1 files updated, 0 files merged, 0 files removed, 0 files unresolved saved backup bundle to $TESTTMP/fold-with-dropped/.hg/strip-backup/55c8d8dc79ce-4066cd98-backup.hg (glob) saved backup bundle to $TESTTMP/fold-with-dropped/.hg/strip-backup/617f94f13c0f-a35700fc-backup.hg (glob) $ hg logt -G @@ -443,10 +429,6 @@ > pick 1c4f440a8085 rename > fold e0371e0426bc b > EOF - 1 files updated, 0 files merged, 0 files removed, 0 files unresolved - reverting b.txt - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg logt --follow b.txt 1:cf858d235c76 rename @@ -489,9 +471,6 @@ > fold a1a953ffb4b0 c > pick 6c795aa153cb a > EOF - 0 files updated, 0 files merged, 3 files removed, 0 files unresolved - 0 files updated, 0 files merged, 2 files removed, 0 files unresolved - 2 files updated, 0 files merged, 0 files removed, 0 files unresolved commit 9599899f62c05f4377548c32bf1c9f1a39634b0c $ hg logt @@ -530,13 +509,6 @@ > fold b7389cc4d66e 3 foo2 > fold 21679ff7675c 4 foo3 > EOF - 1 files updated, 0 files merged, 0 files removed, 0 files unresolved - reverting foo - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - 1 files updated, 0 files merged, 0 files removed, 0 files unresolved - merging foo - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg logt 2:e8bedbda72c1 merged foos 1:578c7455730c a
--- a/tests/test-histedit-non-commute-abort.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-histedit-non-commute-abort.t Sat Apr 16 18:06:48 2016 -0500 @@ -69,7 +69,6 @@ > pick e860deea161a e > pick 652413bf663e f > EOF - 0 files updated, 0 files merged, 2 files removed, 0 files unresolved merging e warning: conflicts while merging e! (edit, then use 'hg resolve --mark') Fix up the change (pick e860deea161a) @@ -81,7 +80,11 @@ * version 2 records local: 8f7551c7e4a2f2efe0bc8c741baf7f227d65d758 other: e860deea161a2f77de56603b340ebbb4536308ae + labels: + local: local + other: histedit unrecognized entry: x advisory record + file extras: e (ancestorlinknode = 0000000000000000000000000000000000000000) file: e (record type "F", state "u", hash 58e6b3a414a1e090dfc6029add0f3555ccba127f) local path: e (flags "") ancestor path: e (node null) @@ -95,6 +98,10 @@ * version 2 records local: 8f7551c7e4a2f2efe0bc8c741baf7f227d65d758 other: e860deea161a2f77de56603b340ebbb4536308ae + labels: + local: local + other: histedit + file extras: e (ancestorlinknode = 0000000000000000000000000000000000000000) file: e (record type "F", state "u", hash 58e6b3a414a1e090dfc6029add0f3555ccba127f) local path: e (flags "") ancestor path: e (node null)
--- a/tests/test-histedit-non-commute.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-histedit-non-commute.t Sat Apr 16 18:06:48 2016 -0500 @@ -171,7 +171,7 @@ (no more unresolved files) continue: hg histedit --continue $ hg histedit --continue 2>&1 | fixbundle - 7b4e2f4b7bcd: empty changeset + 7b4e2f4b7bcd: skipping changeset (no changes) log after edit $ hg log --graph @@ -254,7 +254,7 @@ (no more unresolved files) continue: hg histedit --continue $ hg histedit --continue 2>&1 | fixbundle - 7b4e2f4b7bcd: empty changeset + 7b4e2f4b7bcd: skipping changeset (no changes) post message fix $ hg log --graph
--- a/tests/test-histedit-obsolete.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-histedit-obsolete.t Sat Apr 16 18:06:48 2016 -0500 @@ -14,6 +14,85 @@ > rebase= > EOF +Test that histedit learns about obsolescence not stored in histedit state + $ hg init boo + $ cd boo + $ echo a > a + $ hg ci -Am a + adding a + $ echo a > b + $ echo a > c + $ echo a > c + $ hg ci -Am b + adding b + adding c + $ echo a > d + $ hg ci -Am c + adding d + $ echo "pick `hg log -r 0 -T '{node|short}'`" > plan + $ echo "pick `hg log -r 2 -T '{node|short}'`" >> plan + $ echo "edit `hg log -r 1 -T '{node|short}'`" >> plan + $ hg histedit -r 'all()' --commands plan + Editing (1b2d564fad96), you may commit or record as needed now. + (hg histedit --continue to resume) + [1] + $ hg st + A b + A c + ? plan + $ hg commit --amend b + $ hg histedit --continue + $ hg log -G + @ 6:46abc7c4d873 b + | + o 5:49d44ab2be1b c + | + o 0:cb9a9f314b8b a + + $ hg debugobsolete + e72d22b19f8ecf4150ab4f91d0973fd9955d3ddf 49d44ab2be1b67a79127568a67c9c99430633b48 0 (*) {'user': 'test'} (glob) + 3e30a45cf2f719e96ab3922dfe039cfd047956ce 0 {e72d22b19f8ecf4150ab4f91d0973fd9955d3ddf} (*) {'user': 'test'} (glob) + 1b2d564fad96311b45362f17c2aa855150efb35f 46abc7c4d8738e8563e577f7889e1b6db3da4199 0 (*) {'user': 'test'} (glob) + 114f4176969ef342759a8a57e6bccefc4234829b 49d44ab2be1b67a79127568a67c9c99430633b48 0 (*) {'user': 'test'} (glob) + +With some node gone missing during the edit. + + $ echo "pick `hg log -r 0 -T '{node|short}'`" > plan + $ echo "pick `hg log -r 6 -T '{node|short}'`" >> plan + $ echo "edit `hg log -r 5 -T '{node|short}'`" >> plan + $ hg histedit -r 'all()' --commands plan + Editing (49d44ab2be1b), you may commit or record as needed now. + (hg histedit --continue to resume) + [1] + $ hg st + A b + A d + ? plan + $ hg commit --amend -X . -m XXXXXX + $ hg commit --amend -X . -m b2 + $ hg --hidden --config extensions.strip= strip 'desc(XXXXXX)' --no-backup + $ hg histedit --continue + $ hg log -G + @ 9:273c1f3b8626 c + | + o 8:aba7da937030 b2 + | + o 0:cb9a9f314b8b a + + $ hg debugobsolete + e72d22b19f8ecf4150ab4f91d0973fd9955d3ddf 49d44ab2be1b67a79127568a67c9c99430633b48 0 (*) {'user': 'test'} (glob) + 3e30a45cf2f719e96ab3922dfe039cfd047956ce 0 {e72d22b19f8ecf4150ab4f91d0973fd9955d3ddf} (*) {'user': 'test'} (glob) + 1b2d564fad96311b45362f17c2aa855150efb35f 46abc7c4d8738e8563e577f7889e1b6db3da4199 0 (*) {'user': 'test'} (glob) + 114f4176969ef342759a8a57e6bccefc4234829b 49d44ab2be1b67a79127568a67c9c99430633b48 0 (*) {'user': 'test'} (glob) + 76f72745eac0643d16530e56e2f86e36e40631f1 2ca853e48edbd6453a0674dc0fe28a0974c51b9c 0 (*) {'user': 'test'} (glob) + 2ca853e48edbd6453a0674dc0fe28a0974c51b9c aba7da93703075eec9fb1dbaf143ff2bc1c49d46 0 (*) {'user': 'test'} (glob) + 49d44ab2be1b67a79127568a67c9c99430633b48 273c1f3b86267ed3ec684bb13af1fa4d6ba56e02 0 (*) {'user': 'test'} (glob) + 46abc7c4d8738e8563e577f7889e1b6db3da4199 aba7da93703075eec9fb1dbaf143ff2bc1c49d46 0 (*) {'user': 'test'} (glob) + $ cd .. + +Base setup for the rest of the testing +====================================== + $ hg init base $ cd base @@ -48,6 +127,8 @@ # # Commits are listed from least to most recent # + # You can reorder changesets by reordering the lines + # # Commands: # # e, edit = use commit, but stop for amending @@ -108,7 +189,6 @@ > drop 59d9f330561f 7 d > pick cacdfd884a93 8 f > EOF - 0 files updated, 0 files merged, 3 files removed, 0 files unresolved $ hg log --graph @ 11:c13eb81022ca f | @@ -167,7 +247,6 @@ > pick 40db8afa467b 10 c > drop b449568bf7fc 11 f > EOF - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg log -G @ 12:40db8afa467b c | @@ -187,7 +266,6 @@ > pick 40db8afa467b 10 c > drop 1b3b05f35ff0 13 h > EOF - 0 files updated, 0 files merged, 3 files removed, 0 files unresolved $ hg log -G @ 17:ee6544123ab8 c | @@ -298,7 +376,7 @@ $ cd .. -New-commit as draft (default) +New-commit as secret (config) $ cp -r base simple-secret $ cd simple-secret @@ -357,7 +435,6 @@ > pick 7395e1ff83bd 13 h > pick ee118ab9fa44 16 k > EOF - 0 files updated, 0 files merged, 5 files removed, 0 files unresolved $ hg log -G @ 23:558246857888 (secret) k | @@ -399,13 +476,6 @@ > pick b605fb7503f2 14 i > fold ee118ab9fa44 16 k > EOF - 0 files updated, 0 files merged, 6 files removed, 0 files unresolved - 0 files updated, 0 files merged, 2 files removed, 0 files unresolved - 2 files updated, 0 files merged, 0 files removed, 0 files unresolved - 0 files updated, 0 files merged, 2 files removed, 0 files unresolved - 2 files updated, 0 files merged, 0 files removed, 0 files unresolved - 0 files updated, 0 files merged, 2 files removed, 0 files unresolved - 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -G @ 27:f9daec13fb98 (secret) i |
--- a/tests/test-histedit-outgoing.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-histedit-outgoing.t Sat Apr 16 18:06:48 2016 -0500 @@ -45,6 +45,8 @@ # # Commits are listed from least to most recent # + # You can reorder changesets by reordering the lines + # # Commands: # # e, edit = use commit, but stop for amending @@ -77,6 +79,8 @@ # # Commits are listed from least to most recent # + # You can reorder changesets by reordering the lines + # # Commands: # # e, edit = use commit, but stop for amending @@ -101,6 +105,8 @@ # # Commits are listed from least to most recent # + # You can reorder changesets by reordering the lines + # # Commands: # # e, edit = use commit, but stop for amending
--- a/tests/test-hook.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-hook.t Sat Apr 16 18:06:48 2016 -0500 @@ -436,6 +436,10 @@ > unreachable = 1 > EOF + $ cat > syntaxerror.py << EOF + > (foo + > EOF + test python hooks #if windows @@ -480,7 +484,7 @@ $ hg pull ../a pulling from ../a searching for changes - abort: preoutgoing.uncallable hook is invalid ("hooktests.uncallable" is not callable) + abort: preoutgoing.uncallable hook is invalid: "hooktests.uncallable" is not callable [255] $ echo '[hooks]' > ../a/.hg/hgrc @@ -488,7 +492,7 @@ $ hg pull ../a pulling from ../a searching for changes - abort: preoutgoing.nohook hook is invalid ("hooktests.nohook" is not defined) + abort: preoutgoing.nohook hook is invalid: "hooktests.nohook" is not defined [255] $ echo '[hooks]' > ../a/.hg/hgrc @@ -496,7 +500,7 @@ $ hg pull ../a pulling from ../a searching for changes - abort: preoutgoing.nomodule hook is invalid ("nomodule" not in a module) + abort: preoutgoing.nomodule hook is invalid: "nomodule" not in a module [255] $ echo '[hooks]' > ../a/.hg/hgrc @@ -504,7 +508,8 @@ $ hg pull ../a pulling from ../a searching for changes - abort: preoutgoing.badmodule hook is invalid (import of "nomodule" failed) + abort: preoutgoing.badmodule hook is invalid: import of "nomodule" failed + (run with --traceback for stack trace) [255] $ echo '[hooks]' > ../a/.hg/hgrc @@ -512,9 +517,34 @@ $ hg pull ../a pulling from ../a searching for changes - abort: preoutgoing.unreachable hook is invalid (import of "hooktests.container" failed) + abort: preoutgoing.unreachable hook is invalid: import of "hooktests.container" failed + (run with --traceback for stack trace) + [255] + + $ echo '[hooks]' > ../a/.hg/hgrc + $ echo 'preoutgoing.syntaxerror = python:syntaxerror.syntaxerror' >> ../a/.hg/hgrc + $ hg pull ../a + pulling from ../a + searching for changes + abort: preoutgoing.syntaxerror hook is invalid: import of "syntaxerror" failed + (run with --traceback for stack trace) [255] +The second egrep is to filter out lines like ' ^', which are slightly +different between Python 2.6 and Python 2.7. + $ hg pull ../a --traceback 2>&1 | egrep -v '^( +File| [_a-zA-Z*(])' | egrep -v '^( )+(\^)?$' + pulling from ../a + searching for changes + exception from first failed import attempt: + Traceback (most recent call last): + SyntaxError: * (glob) + exception from second failed import attempt: + Traceback (most recent call last): + ImportError: No module named hgext_syntaxerror + Traceback (most recent call last): + HookLoadError: preoutgoing.syntaxerror hook is invalid: import of "syntaxerror" failed + abort: preoutgoing.syntaxerror hook is invalid: import of "syntaxerror" failed + $ echo '[hooks]' > ../a/.hg/hgrc $ echo 'preoutgoing.pass = python:hooktests.passhook' >> ../a/.hg/hgrc $ hg pull ../a @@ -530,6 +560,46 @@ adding remote bookmark quux (run 'hg update' to get a working copy) +post- python hooks that fail to *run* don't cause an abort + $ rm ../a/.hg/hgrc + $ echo '[hooks]' > .hg/hgrc + $ echo 'post-pull.broken = python:hooktests.brokenhook' >> .hg/hgrc + $ hg pull ../a + pulling from ../a + searching for changes + no changes found + error: post-pull.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict' + (run with --traceback for stack trace) + +but post- python hooks that fail to *load* do + $ echo '[hooks]' > .hg/hgrc + $ echo 'post-pull.nomodule = python:nomodule' >> .hg/hgrc + $ hg pull ../a + pulling from ../a + searching for changes + no changes found + abort: post-pull.nomodule hook is invalid: "nomodule" not in a module + [255] + + $ echo '[hooks]' > .hg/hgrc + $ echo 'post-pull.badmodule = python:nomodule.nowhere' >> .hg/hgrc + $ hg pull ../a + pulling from ../a + searching for changes + no changes found + abort: post-pull.badmodule hook is invalid: import of "nomodule" failed + (run with --traceback for stack trace) + [255] + + $ echo '[hooks]' > .hg/hgrc + $ echo 'post-pull.nohook = python:hooktests.nohook' >> .hg/hgrc + $ hg pull ../a + pulling from ../a + searching for changes + no changes found + abort: post-pull.nohook hook is invalid: "hooktests.nohook" is not defined + [255] + make sure --traceback works $ echo '[hooks]' > .hg/hgrc @@ -628,8 +698,8 @@ Traceback (most recent call last): ImportError: No module named hgext_importfail Traceback (most recent call last): - HookLoadError: precommit.importfail hook is invalid (import of "importfail" failed) - abort: precommit.importfail hook is invalid (import of "importfail" failed) + HookLoadError: precommit.importfail hook is invalid: import of "importfail" failed + abort: precommit.importfail hook is invalid: import of "importfail" failed Issue1827: Hooks Update & Commit not completely post operation @@ -724,7 +794,6 @@ date: Thu Jan 01 00:00:00 1970 +0000 summary: b - $ cd .. pretxnclose hook failure should abort the transaction @@ -746,3 +815,62 @@ $ hg recover no interrupted transaction available [1] + $ cd .. + +Hook from untrusted hgrc are reported as failure +================================================ + + $ cat << EOF > $TESTTMP/untrusted.py + > from mercurial import scmutil, util + > def uisetup(ui): + > class untrustedui(ui.__class__): + > def _trusted(self, fp, f): + > if util.normpath(fp.name).endswith('untrusted/.hg/hgrc'): + > return False + > return super(untrustedui, self)._trusted(fp, f) + > ui.__class__ = untrustedui + > EOF + $ cat << EOF >> $HGRCPATH + > [extensions] + > untrusted=$TESTTMP/untrusted.py + > EOF + $ hg init untrusted + $ cd untrusted + +Non-blocking hook +----------------- + + $ cat << EOF >> .hg/hgrc + > [hooks] + > txnclose.testing=echo txnclose hook called + > EOF + $ touch a && hg commit -Aqm a + warning: untrusted hook txnclose not executed + $ hg log + changeset: 0:3903775176ed + tag: tip + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: a + + +Non-blocking hook +----------------- + + $ cat << EOF >> .hg/hgrc + > [hooks] + > pretxnclose.testing=echo pre-txnclose hook called + > EOF + $ touch b && hg commit -Aqm a + transaction abort! + rollback completed + abort: untrusted hook pretxnclose not executed + (see 'hg help config.trusted') + [255] + $ hg log + changeset: 0:3903775176ed + tag: tip + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: a +
--- a/tests/test-http-bundle1.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-http-bundle1.t Sat Apr 16 18:06:48 2016 -0500 @@ -22,7 +22,7 @@ adding foo.d/baR.d.hg/bAR adding foo.d/foo $ hg serve -p $HGPORT -d --pid-file=../hg1.pid -E ../error.log - $ hg --config server.uncompressed=False serve -p $HGPORT1 -d --pid-file=../hg2.pid + $ hg serve --config server.uncompressed=False -p $HGPORT1 -d --pid-file=../hg2.pid Test server address cannot be reused @@ -163,7 +163,7 @@ > def extsetup(): > common.permhooks.insert(0, perform_authentication) > EOT - $ hg --config extensions.x=userpass.py serve -p $HGPORT2 -d --pid-file=pid \ + $ hg serve --config extensions.x=userpass.py -p $HGPORT2 -d --pid-file=pid \ > --config server.preferuncompressed=True \ > --config web.push_ssl=False --config web.allow_push=* -A ../access.log $ cat pid >> $DAEMON_PIDS
--- a/tests/test-http-proxy.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-http-proxy.t Sat Apr 16 18:06:48 2016 -0500 @@ -11,7 +11,7 @@ $ echo a > a $ hg ci -Ama -d '1123456789 0' adding a - $ hg --config server.uncompressed=True serve -p $HGPORT -d --pid-file=hg.pid + $ hg serve --config server.uncompressed=True -p $HGPORT -d --pid-file=hg.pid $ cat hg.pid >> $DAEMON_PIDS $ cd .. $ tinyproxy.py $HGPORT1 localhost >proxy.log 2>&1 </dev/null &
--- a/tests/test-http.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-http.t Sat Apr 16 18:06:48 2016 -0500 @@ -13,7 +13,7 @@ adding foo.d/baR.d.hg/bAR adding foo.d/foo $ hg serve -p $HGPORT -d --pid-file=../hg1.pid -E ../error.log - $ hg --config server.uncompressed=False serve -p $HGPORT1 -d --pid-file=../hg2.pid + $ hg serve --config server.uncompressed=False -p $HGPORT1 -d --pid-file=../hg2.pid Test server address cannot be reused @@ -154,7 +154,7 @@ > def extsetup(): > common.permhooks.insert(0, perform_authentication) > EOT - $ hg --config extensions.x=userpass.py serve -p $HGPORT2 -d --pid-file=pid \ + $ hg serve --config extensions.x=userpass.py -p $HGPORT2 -d --pid-file=pid \ > --config server.preferuncompressed=True \ > --config web.push_ssl=False --config web.allow_push=* -A ../access.log $ cat pid >> $DAEMON_PIDS
--- a/tests/test-https.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-https.t Sat Apr 16 18:06:48 2016 -0500 @@ -171,7 +171,7 @@ abort: error: *certificate verify failed* (glob) [255] - $ DISABLEOSXDUMMYCERT="--config=web.cacerts=!" + $ DISABLEOSXDUMMYCERT="--insecure" #endif clone via pull @@ -264,7 +264,7 @@ Test server cert which isn't valid yet - $ hg -R test serve -p $HGPORT1 -d --pid-file=hg1.pid --certificate=server-not-yet.pem + $ hg serve -R test -p $HGPORT1 -d --pid-file=hg1.pid --certificate=server-not-yet.pem $ cat hg1.pid >> $DAEMON_PIDS $ hg -R copy-pull pull --config web.cacerts=pub-not-yet.pem https://localhost:$HGPORT1/ pulling from https://localhost:$HGPORT1/ @@ -273,7 +273,7 @@ Test server cert which no longer is valid - $ hg -R test serve -p $HGPORT2 -d --pid-file=hg2.pid --certificate=server-expired.pem + $ hg serve -R test -p $HGPORT2 -d --pid-file=hg2.pid --certificate=server-expired.pem $ cat hg2.pid >> $DAEMON_PIDS $ hg -R copy-pull pull --config web.cacerts=pub-expired.pem https://localhost:$HGPORT2/ pulling from https://localhost:$HGPORT2/ @@ -287,9 +287,24 @@ $ echo "127.0.0.1 = 914f1aff87249c09b6859b88b1906d30756491ca" >> copy-pull/.hg/hgrc - works without cacerts - $ hg -R copy-pull id https://localhost:$HGPORT/ --config web.cacerts=! + $ hg -R copy-pull id https://localhost:$HGPORT/ --insecure + 5fed3813f7f5 + +- multiple fingerprints specified and first matches + $ hg --config 'hostfingerprints.localhost=914f1aff87249c09b6859b88b1906d30756491ca, deadbeefdeadbeefdeadbeefdeadbeefdeadbeef' -R copy-pull id https://localhost:$HGPORT/ --insecure 5fed3813f7f5 +- multiple fingerprints specified and last matches + $ hg --config 'hostfingerprints.localhost=deadbeefdeadbeefdeadbeefdeadbeefdeadbeef, 914f1aff87249c09b6859b88b1906d30756491ca' -R copy-pull id https://localhost:$HGPORT/ --insecure + 5fed3813f7f5 + +- multiple fingerprints specified and none match + + $ hg --config 'hostfingerprints.localhost=deadbeefdeadbeefdeadbeefdeadbeefdeadbeef, aeadbeefdeadbeefdeadbeefdeadbeefdeadbeef' -R copy-pull id https://localhost:$HGPORT/ --insecure + abort: certificate for localhost has unexpected fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca + (check hostfingerprint configuration) + [255] + - fails when cert doesn't match hostname (port is ignored) $ hg -R copy-pull id https://localhost:$HGPORT1/ abort: certificate for localhost has unexpected fingerprint 28:ff:71:bf:65:31:14:23:ad:62:92:b4:0e:31:99:18:fc:83:e3:9b
--- a/tests/test-hybridencode.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-hybridencode.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,35 +1,38 @@ -from mercurial import store +from __future__ import absolute_import, print_function +from mercurial import ( + store, +) def show(s): # show test input - print "A = '%s'" % s.encode("string_escape") + print("A = '%s'" % s.encode("string_escape")) # show the result of the C implementation, if available h = store._pathencode(s) - print "B = '%s'" % h.encode("string_escape") + print("B = '%s'" % h.encode("string_escape")) # compare it with reference implementation in Python r = store._hybridencode(s, True) if h != r: - print "R = '%s'" % r.encode("string_escape") - print + print("R = '%s'" % r.encode("string_escape")) + print() show("data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&'()+,-.;=[]^`{}") -print "uppercase char X is encoded as _x" +print("uppercase char X is encoded as _x") show("data/ABCDEFGHIJKLMNOPQRSTUVWXYZ") -print "underbar is doubled" +print("underbar is doubled") show("data/_") -print "tilde is character-encoded" +print("tilde is character-encoded") show("data/~") -print "characters in ASCII code range 1..31" +print("characters in ASCII code range 1..31") show('data/\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f' '\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f') -print "characters in ASCII code range 126..255" +print("characters in ASCII code range 126..255") show('data/\x7e\x7f' '\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f' '\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f') @@ -40,18 +43,18 @@ show('data/\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee\xef' '\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff') -print "Windows reserved characters" +print("Windows reserved characters") show('data/less <, greater >, colon :, double-quote ", backslash \\' ', pipe |, question-mark ?, asterisk *') -print "encoding directories ending in .hg, .i or .d with '.hg' suffix" +print("encoding directories ending in .hg, .i or .d with '.hg' suffix") show('data/x.h.i/x.hg/x.i/x.d/foo') show('data/a.hg/a.i/a.d/foo') show('data/au.hg/au.i/au.d/foo') show('data/aux.hg/aux.i/aux.d/foo') show('data/auxy.hg/auxy.i/auxy.d/foo') -print "but these are not encoded on *filenames*" +print("but these are not encoded on *filenames*") show('data/foo/x.hg') show('data/foo/x.i') show('data/foo/x.d') @@ -68,7 +71,7 @@ show('data/foo/auxy.i') show('data/foo/auxy.d') -print "plain .hg, .i and .d directories have the leading dot encoded" +print("plain .hg, .i and .d directories have the leading dot encoded") show('data/.hg/.i/.d/foo') show('data/aux.bla/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c.i') @@ -126,22 +129,22 @@ show('data/a /au /aux /auxy /aux ') -print "largest unhashed path" +print("largest unhashed path") show('data/123456789-123456789-123456789-123456789-123456789-' 'unhashed--xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') -print "shortest hashed path" +print("shortest hashed path") show('data/123456789-123456789-123456789-123456789-123456789-' 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') -print "changing one char in part that's hashed away produces a different hash" +print("changing one char in part that's hashed away produces a different hash") show('data/123456789-123456789-123456789-123456789-123456789-' 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxy-' '123456789-123456') -print "uppercase hitting length limit due to encoding" +print("uppercase hitting length limit due to encoding") show('data/A23456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') @@ -149,7 +152,7 @@ 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') -print "compare with lowercase not hitting limit" +print("compare with lowercase not hitting limit") show('data/a23456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') @@ -157,22 +160,22 @@ 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') -print "not hitting limit with any of these" +print("not hitting limit with any of these") show("data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&'()+,-.;=" "[]^`{}xxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-" "123456789-12345") -print "underbar hitting length limit due to encoding" +print("underbar hitting length limit due to encoding") show('data/_23456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') -print "tilde hitting length limit due to encoding" +print("tilde hitting length limit due to encoding") show('data/~23456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') -print "Windows reserved characters hitting length limit" +print("Windows reserved characters hitting length limit") show('data/<23456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') @@ -198,47 +201,47 @@ 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') -print "initial space hitting length limit" +print("initial space hitting length limit") show('data/ 23456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') -print "initial dot hitting length limit" +print("initial dot hitting length limit") show('data/.23456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') -print "trailing space in filename hitting length limit" +print("trailing space in filename hitting length limit") show('data/123456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-1234 ') -print "trailing dot in filename hitting length limit" +print("trailing dot in filename hitting length limit") show('data/123456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-1234.') -print "initial space in directory hitting length limit" +print("initial space in directory hitting length limit") show('data/ x/456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') -print "initial dot in directory hitting length limit" +print("initial dot in directory hitting length limit") show('data/.x/456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') -print "trailing space in directory hitting length limit" +print("trailing space in directory hitting length limit") show('data/x /456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') -print "trailing dot in directory hitting length limit" +print("trailing dot in directory hitting length limit") show('data/x./456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') -print "with directories that need direncoding, hitting length limit" +print("with directories that need direncoding, hitting length limit") show('data/x.i/56789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') @@ -249,7 +252,7 @@ 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') -print "Windows reserved filenames, hitting length limit" +print("Windows reserved filenames, hitting length limit") show('data/con/56789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') @@ -275,100 +278,100 @@ 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') -print "non-reserved names, just not hitting limit" +print("non-reserved names, just not hitting limit") show('data/123456789-123456789-123456789-123456789-123456789-' '/com/com0/lpt/lpt0/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') -print "hashed path with largest untruncated 1st dir" +print("hashed path with largest untruncated 1st dir") show('data/12345678/-123456789-123456789-123456789-123456789-' 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') -print "hashed path with smallest truncated 1st dir" +print("hashed path with smallest truncated 1st dir") show('data/123456789/123456789-123456789-123456789-123456789-' 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') -print "hashed path with largest untruncated two dirs" +print("hashed path with largest untruncated two dirs") show('data/12345678/12345678/9-123456789-123456789-123456789-' 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') -print "hashed path with smallest truncated two dirs" +print("hashed path with smallest truncated two dirs") show('data/123456789/123456789/123456789-123456789-123456789-' 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') -print "hashed path with largest untruncated three dirs" +print("hashed path with largest untruncated three dirs") show('data/12345678/12345678/12345678/89-123456789-123456789-' 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') -print "hashed path with smallest truncated three dirs" +print("hashed path with smallest truncated three dirs") show('data/123456789/123456789/123456789/123456789-123456789-' 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') -print "hashed path with largest untruncated four dirs" +print("hashed path with largest untruncated four dirs") show('data/12345678/12345678/12345678/12345678/789-123456789-' 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') -print "hashed path with smallest truncated four dirs" +print("hashed path with smallest truncated four dirs") show('data/123456789/123456789/123456789/123456789/123456789-' 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') -print "hashed path with largest untruncated five dirs" +print("hashed path with largest untruncated five dirs") show('data/12345678/12345678/12345678/12345678/12345678/6789-' 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') -print "hashed path with smallest truncated five dirs" +print("hashed path with smallest truncated five dirs") show('data/123456789/123456789/123456789/123456789/123456789/' 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') -print "hashed path with largest untruncated six dirs" +print("hashed path with largest untruncated six dirs") show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/ed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') -print "hashed path with smallest truncated six dirs" +print("hashed path with smallest truncated six dirs") show('data/123456789/123456789/123456789/123456789/123456789/' '123456789/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') -print "hashed path with largest untruncated seven dirs" +print("hashed path with largest untruncated seven dirs") show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/12345678/xxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') -print "hashed path with smallest truncated seven dirs" +print("hashed path with smallest truncated seven dirs") show('data/123456789/123456789/123456789/123456789/123456789/' '123456789/123456789/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') -print "hashed path with largest untruncated eight dirs" -print "(directory 8 is dropped because it hits _maxshortdirslen)" +print("hashed path with largest untruncated eight dirs") +print("(directory 8 is dropped because it hits _maxshortdirslen)") show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/12345678/12345678/xxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') -print "hashed path with smallest truncated eight dirs" -print "(directory 8 is dropped because it hits _maxshortdirslen)" +print("hashed path with smallest truncated eight dirs") +print("(directory 8 is dropped because it hits _maxshortdirslen)") show('data/123456789/123456789/123456789/123456789/123456789/' '123456789/123456789/123456789/xxxxxxxxx-xxxxxxxxx-' '123456789-123456') -print "hashed path with largest non-dropped directory 8" -print "(just not hitting the _maxshortdirslen boundary)" +print("hashed path with largest non-dropped directory 8") +print("(just not hitting the _maxshortdirslen boundary)") show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') -print "...adding one truncated char to dir 1..7 won't drop dir 8" +print("...adding one truncated char to dir 1..7 won't drop dir 8") show('data/12345678x/12345678/12345678/12345678/12345678/12345' '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') @@ -391,14 +394,14 @@ '678/12345678x/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') -print "hashed path with shortest dropped directory 8" -print "(just hitting the _maxshortdirslen boundary)" +print("hashed path with shortest dropped directory 8") +print("(just hitting the _maxshortdirslen boundary)") show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/12345678/123456/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') -print "hashed path that drops dir 8 due to dot or space at end is" -print "encoded, and thus causing to hit _maxshortdirslen" +print("hashed path that drops dir 8 due to dot or space at end is") +print("encoded, and thus causing to hit _maxshortdirslen") show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/12345678/1234./-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') @@ -406,7 +409,7 @@ '678/12345678/1234 /-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') -print "... with dir 8 short enough for encoding" +print("... with dir 8 short enough for encoding") show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/12345678/12./xx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') @@ -414,9 +417,9 @@ '678/12345678/12 /xx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') -print '''Extensions are replicated on hashed paths. Note that +print('''Extensions are replicated on hashed paths. Note that we only get to encode files that end in .i or .d inside the -store. Encoded filenames are thus bound in length.''' +store. Encoded filenames are thus bound in length.''') show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12.345.i') @@ -461,7 +464,7 @@ 'xxxxx-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwww' 'wwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww.i') -print "paths outside data/ can be encoded" +print("paths outside data/ can be encoded") show('metadata/dir/00manifest.i') show('metadata/12345678/12345678/12345678/12345678/12345678/' '12345678/12345678/12345678/12345678/12345678/12345678/'
--- a/tests/test-import-git.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-import-git.t Sat Apr 16 18:06:48 2016 -0500 @@ -822,4 +822,27 @@ > EOF applying patch from stdin +Test email metadata + + $ hg revert -qa + $ hg --encoding utf-8 import - <<EOF + > From: =?UTF-8?q?Rapha=C3=ABl=20Hertzog?= <hertzog@debian.org> + > Subject: [PATCH] =?UTF-8?q?=C5=A7=E2=82=AC=C3=9F=E1=B9=AA?= + > + > diff --git a/a b/a + > --- a/a + > +++ b/a + > @@ -1,1 +1,2 @@ + > a + > +a + > EOF + applying patch from stdin + $ hg --encoding utf-8 log -r . + changeset: *:* (glob) + tag: tip + user: Rapha\xc3\xabl Hertzog <hertzog@debian.org> (esc) + date: * (glob) + summary: \xc5\xa7\xe2\x82\xac\xc3\x9f\xe1\xb9\xaa (esc) + + $ cd ..
--- a/tests/test-install.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-install.t Sat Apr 16 18:06:48 2016 -0500 @@ -6,10 +6,35 @@ checking Python lib (*lib*)... (glob) checking installed modules (*mercurial)... (glob) checking templates (*mercurial?templates)... (glob) - checking commit editor... - checking username... + checking default template (*mercurial?templates?map-cmdline.default) (glob) + checking commit editor... (* -c "import sys; sys.exit(0)") (glob) + checking username (test) no problems detected +hg debuginstall JSON + $ hg debuginstall -Tjson | sed 's|\\\\|\\|g' + [ + { + "defaulttemplate": "*mercurial?templates?map-cmdline.default", (glob) + "defaulttemplateerror": null, + "defaulttemplatenotfound": "default", + "editor": "* -c \"import sys; sys.exit(0)\"", (glob) + "editornotfound": false, + "encoding": "ascii", + "encodingerror": null, + "extensionserror": null, + "hgmodules": "*mercurial", (glob) + "problems": 0, + "pythonexe": "*", (glob) + "pythonlib": "*", (glob) + "pythonver": "*.*.*", (glob) + "templatedirs": "*mercurial?templates", (glob) + "username": "test", + "usernameerror": null, + "vinotfound": false + } + ] + hg debuginstall with no username $ HGUSER= hg debuginstall checking encoding (ascii)... @@ -18,7 +43,8 @@ checking Python lib (*lib*)... (glob) checking installed modules (*mercurial)... (glob) checking templates (*mercurial?templates)... (glob) - checking commit editor... + checking default template (*mercurial?templates?map-cmdline.default) (glob) + checking commit editor... (* -c "import sys; sys.exit(0)") (glob) checking username... no username supplied (specify a username in your configuration file) @@ -38,8 +64,9 @@ checking Python lib (*lib*)... (glob) checking installed modules (*mercurial)... (glob) checking templates (*mercurial?templates)... (glob) - checking commit editor... - checking username... + checking default template (*mercurial?templates?map-cmdline.default) (glob) + checking commit editor... (* -c "import sys; sys.exit(0)") (glob) + checking username (test) no problems detected #if test-repo
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-issue1102.t Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,17 @@ + $ rm -rf a + $ hg init a + $ cd a + $ echo a > a + $ hg ci -Am0 + adding a + $ hg tag t1 # 1 + $ hg tag --remove t1 # 2 + + $ hg co 1 + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg tag -f -r0 t1 + $ hg tags + tip 3:a49829c4fc11 + t1 0:f7b1eb17ad24 + + $ cd ..
--- a/tests/test-issue1175.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-issue1175.t Sat Apr 16 18:06:48 2016 -0500 @@ -72,7 +72,7 @@ merging b warning: conflicts while merging b! (edit, then use 'hg resolve --mark') abort: unresolved conflicts, can't continue - (use hg resolve and hg graft --continue) + (use 'hg resolve' and 'hg graft --continue') [255] $ echo a > b $ echo b3 >> b
--- a/tests/test-issue1502.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-issue1502.t Sat Apr 16 18:06:48 2016 -0500 @@ -12,16 +12,14 @@ $ echo "bar" > foo1/a && hg -R foo1 commit -m "edit a in foo1" $ echo "hi" > foo/a && hg -R foo commit -m "edited a foo" - $ hg -R foo1 pull -u + $ hg -R foo1 pull pulling from $TESTTMP/foo (glob) searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) - abort: not updating: not a linear update - (merge or update --check to force update) - [255] + (run 'hg heads' to see heads, 'hg merge' to merge) $ hg -R foo1 book branchy $ hg -R foo1 book
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-issue1993.t Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,48 @@ + $ hg init a + $ cd a + $ echo a > a + $ hg ci -Am0 + adding a + $ echo b > b + $ hg ci -Am1 + adding b + $ hg tag -r0 default + warning: tag default conflicts with existing branch name + $ hg log + changeset: 2:30a83d1e4a1e + tag: tip + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: Added tag default for changeset f7b1eb17ad24 + + changeset: 1:925d80f479bb + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: 1 + + changeset: 0:f7b1eb17ad24 + tag: default + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: 0 + + $ hg update 'tag(default)' + 0 files updated, 0 files merged, 2 files removed, 0 files unresolved + $ hg parents + changeset: 0:f7b1eb17ad24 + tag: default + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: 0 + + $ hg update 'branch(default)' + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg parents + changeset: 2:30a83d1e4a1e + tag: tip + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: Added tag default for changeset f7b1eb17ad24 + + + $ cd ..
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-issue586.t Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,92 @@ +Issue586: removing remote files after merge appears to corrupt the +dirstate + + $ hg init a + $ cd a + $ echo a > a + $ hg ci -Ama + adding a + + $ hg init ../b + $ cd ../b + $ echo b > b + $ hg ci -Amb + adding b + + $ hg pull -f ../a + pulling from ../a + searching for changes + warning: repository is unrelated + requesting all changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files (+1 heads) + (run 'hg heads' to see heads, 'hg merge' to merge) + $ hg merge + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg rm -f a + $ hg ci -Amc + + $ hg st -A + C b + $ cd .. + +Issue1433: Traceback after two unrelated pull, two move, a merge and +a commit (related to issue586) + +create test repos + + $ hg init repoa + $ touch repoa/a + $ hg -R repoa ci -Am adda + adding a + + $ hg init repob + $ touch repob/b + $ hg -R repob ci -Am addb + adding b + + $ hg init repoc + $ cd repoc + $ hg pull ../repoa + pulling from ../repoa + requesting all changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files + (run 'hg update' to get a working copy) + $ hg update + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ mkdir tst + $ hg mv * tst + $ hg ci -m "import a in tst" + $ hg pull -f ../repob + pulling from ../repob + searching for changes + warning: repository is unrelated + requesting all changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files (+1 heads) + (run 'hg heads' to see heads, 'hg merge' to merge) + +merge both repos + + $ hg merge + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ mkdir src + +move b content + + $ hg mv b src + $ hg ci -m "import b in src" + $ hg manifest + src/b + tst/a + + $ cd ..
--- a/tests/test-issue672.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-issue672.t Sat Apr 16 18:06:48 2016 -0500 @@ -65,10 +65,11 @@ branchmerge: True, force: False, partial: False ancestor: c64f439569a9, local: e327dca35ac8+, remote: 746e9549ea96 preserving 1a for resolve of 1a + starting 4 threads for background file closing (?) 1a: local copied/moved from 1 -> m (premerge) picked tool ':merge' for 1a (binary False symlink False changedelete False) merging 1a and 1 to 1a - my 1a@e327dca35ac8+ other 1@746e9549ea96 ancestor 1@81f4b099af3d + my 1a@e327dca35ac8+ other 1@746e9549ea96 ancestor 1@c64f439569a9 premerge successful 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) @@ -88,10 +89,11 @@ ancestor: c64f439569a9, local: 746e9549ea96+, remote: e327dca35ac8 preserving 1 for resolve of 1a removing 1 + starting 4 threads for background file closing (?) 1a: remote moved from 1 -> m (premerge) picked tool ':merge' for 1a (binary False symlink False changedelete False) merging 1 and 1a to 1a - my 1a@746e9549ea96+ other 1a@e327dca35ac8 ancestor 1@81f4b099af3d + my 1a@746e9549ea96+ other 1a@e327dca35ac8 ancestor 1@c64f439569a9 premerge successful 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit)
--- a/tests/test-keyword.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-keyword.t Sat Apr 16 18:06:48 2016 -0500 @@ -51,6 +51,31 @@ Branch = {branches} $Branch: demobranch $ +(test template filter svnisodate and svnutcdate) + + $ hg --quiet kwdemo --config keywordset.svn=True + [extensions] + keyword = + [keyword] + demo.txt = + [keywordset] + svn = True + [keywordmaps] + Author = {author|user} + Date = {date|svnisodate} + Id = {file|basename},v {node|short} {date|svnutcdate} {author|user} + LastChangedBy = {author|user} + LastChangedDate = {date|svnisodate} + LastChangedRevision = {node|short} + Revision = {node|short} + $Author: test $ + $Date: ????-??-?? ??:??:?? ????? (???, ?? ??? ????) $ (glob) + $Id: demo.txt,v ???????????? ????-??-?? ??:??:??Z test $ (glob) + $LastChangedBy: test $ + $LastChangedDate: ????-??-?? ??:??:?? ????? (???, ?? ??? ????) $ (glob) + $LastChangedRevision: ???????????? $ (glob) + $Revision: ???????????? $ (glob) + $ cat <<EOF >> $HGRCPATH > [keyword] > ** =
--- a/tests/test-largefiles-cache.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-largefiles-cache.t Sat Apr 16 18:06:48 2016 -0500 @@ -189,7 +189,7 @@ Inject corruption into the largefiles store and see how update handles that: $ cd src - $ hg up -qC + $ hg up -qC tip $ cat large modified $ rm large @@ -202,6 +202,7 @@ large: data corruption in $TESTTMP/src/.hg/largefiles/e2fb5f2139d086ded2cb600d5a91a196e76bf020 with hash 6a7bb2556144babe3899b25e5428123735bb1e27 (glob) 0 largefiles updated, 0 removed 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + [12] other heads for branch "default" (re) $ hg st ! large ? z @@ -234,4 +235,20 @@ abort: largefile e2fb5f2139d086ded2cb600d5a91a196e76bf020 missing from store (needs to be uploaded) [255] +Verify that --lfrev controls which revisions are checked for largefiles to push + + $ hg push http://localhost:$HGPORT2 -f --config largefiles.usercache=nocache --lfrev tip + pushing to http://localhost:$HGPORT2/ + searching for changes + abort: largefile e2fb5f2139d086ded2cb600d5a91a196e76bf020 missing from store (needs to be uploaded) + [255] + + $ hg push http://localhost:$HGPORT2 -f --config largefiles.usercache=nocache --lfrev null + pushing to http://localhost:$HGPORT2/ + searching for changes + remote: adding changesets + remote: adding manifests + remote: adding file changes + remote: added 1 changesets with 1 changes to 1 files (+1 heads) + #endif
--- a/tests/test-largefiles-misc.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-largefiles-misc.t Sat Apr 16 18:06:48 2016 -0500 @@ -473,10 +473,10 @@ $ hg log -G anotherlarge @ changeset: 1:9627a577c5e9 | tag: tip - | user: test - | date: Thu Jan 01 00:00:00 1970 +0000 - | summary: anotherlarge - | + ~ user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: anotherlarge + $ hg log glob:another* changeset: 1:9627a577c5e9 @@ -489,6 +489,7 @@ updated patterns: ['glob:../.hglf/sub/another*', 'glob:another*'] @ 1: anotherlarge | + ~ #if no-msys $ hg --debug log -T '{rev}: {desc}\n' 'glob:../.hglf/sub/another*' # no-msys @@ -499,6 +500,7 @@ updated patterns: ['glob:../.hglf/sub/another*'] @ 1: anotherlarge | + ~ #endif $ echo more >> anotherlarge @@ -534,10 +536,10 @@ $ hg log -G glob:**another* @ changeset: 1:9627a577c5e9 | tag: tip - | user: test - | date: Thu Jan 01 00:00:00 1970 +0000 - | summary: anotherlarge - | + ~ user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: anotherlarge + $ cd .. @@ -682,8 +684,8 @@ all remote heads known locally 1:1acbe71ce432 2:6095d0695d70 - finding outgoing largefiles: 0/2 revision (0.00%) - finding outgoing largefiles: 1/2 revision (50.00%) + finding outgoing largefiles: 0/2 revisions (0.00%) + finding outgoing largefiles: 1/2 revisions (50.00%) largefiles to upload (1 entities): b 89e6c98d92887913cadf06b2adb97f26cde4849b @@ -740,11 +742,11 @@ 3:7983dce246cc 4:233f12ada4ae 5:036794ea641c - finding outgoing largefiles: 0/5 revision (0.00%) - finding outgoing largefiles: 1/5 revision (20.00%) - finding outgoing largefiles: 2/5 revision (40.00%) - finding outgoing largefiles: 3/5 revision (60.00%) - finding outgoing largefiles: 4/5 revision (80.00%) + finding outgoing largefiles: 0/5 revisions (0.00%) + finding outgoing largefiles: 1/5 revisions (20.00%) + finding outgoing largefiles: 2/5 revisions (40.00%) + finding outgoing largefiles: 3/5 revisions (60.00%) + finding outgoing largefiles: 4/5 revisions (80.00%) largefiles to upload (3 entities): b 13f9ed0898e315bf59dc2973fec52037b6f441a2 @@ -791,10 +793,10 @@ 3:7983dce246cc 4:233f12ada4ae 5:036794ea641c - finding outgoing largefiles: 0/4 revision (0.00%) - finding outgoing largefiles: 1/4 revision (25.00%) - finding outgoing largefiles: 2/4 revision (50.00%) - finding outgoing largefiles: 3/4 revision (75.00%) + finding outgoing largefiles: 0/4 revisions (0.00%) + finding outgoing largefiles: 1/4 revisions (25.00%) + finding outgoing largefiles: 2/4 revisions (50.00%) + finding outgoing largefiles: 3/4 revisions (75.00%) largefiles to upload (2 entities): b 13f9ed0898e315bf59dc2973fec52037b6f441a2 @@ -1095,7 +1097,7 @@ adding manifests adding file changes added 1 changesets with 1 changes to 1 files - nothing to rebase - working directory parent is already an ancestor of destination bf5e395ced2c + nothing to rebase - updating instead 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd ..
--- a/tests/test-largefiles-update.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-largefiles-update.t Sat Apr 16 18:06:48 2016 -0500 @@ -6,6 +6,9 @@ > merge = internal:fail > [extensions] > largefiles = + > [extdiff] + > # for portability: + > pdiff = sh "$RUNTESTDIR/pdiff" > EOF $ hg init repo @@ -20,17 +23,17 @@ $ echo 'large1 in #1' > large1 $ echo 'normal1 in #1' > normal1 $ hg commit -m '#1' - $ hg extdiff -r '.^' --config extensions.extdiff= - diff -Npru repo.0d9d9b8dc9a3/.hglf/large1 repo/.hglf/large1 + $ hg pdiff -r '.^' --config extensions.extdiff= + diff -Nru repo.0d9d9b8dc9a3/.hglf/large1 repo/.hglf/large1 --- repo.0d9d9b8dc9a3/.hglf/large1 * (glob) +++ repo/.hglf/large1 * (glob) - @@ -1 +1 @@ + @@ -1* +1* @@ (glob) -4669e532d5b2c093a78eca010077e708a071bb64 +58e24f733a964da346e2407a2bee99d9001184f5 - diff -Npru repo.0d9d9b8dc9a3/normal1 repo/normal1 + diff -Nru repo.0d9d9b8dc9a3/normal1 repo/normal1 --- repo.0d9d9b8dc9a3/normal1 * (glob) +++ repo/normal1 * (glob) - @@ -1 +1 @@ + @@ -1* +1* @@ (glob) -normal1 +normal1 in #1 [1] @@ -68,6 +71,7 @@ $ hg up 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + 1 other heads for branch "default" $ hg debugdirstate --large --nodate n 644 7 set large1 n 644 13 set large2 @@ -82,6 +86,7 @@ n 644 13 set large2 $ hg up 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + 1 other heads for branch "default" $ hg debugdirstate --large --nodate n 644 7 set large1 n 644 13 set large2 @@ -463,6 +468,7 @@ keep (l)ocal ba94c2efe5b7c5e0af8d189295ce00553b0612b7 or take (o)ther e5bb990443d6a92aaf7223813720f7566c9dd05b? l 2 files updated, 1 files merged, 0 files removed, 0 files unresolved + 1 other heads for branch "default" $ hg status -A large1 M large1 @@ -496,6 +502,7 @@ keep (l)ocal ba94c2efe5b7c5e0af8d189295ce00553b0612b7 or take (o)ther e5bb990443d6a92aaf7223813720f7566c9dd05b? l 2 files updated, 1 files merged, 0 files removed, 0 files unresolved + 1 other heads for branch "default" $ hg status -A large1 M large1
--- a/tests/test-largefiles-wireproto.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-largefiles-wireproto.t Sat Apr 16 18:06:48 2016 -0500 @@ -45,7 +45,7 @@ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved largefiles clients still work with vanilla servers - $ hg --config extensions.largefiles=! serve -R r1 -d -p $HGPORT1 --pid-file hg.pid + $ hg serve --config extensions.largefiles=! -R r1 -d -p $HGPORT1 --pid-file hg.pid $ cat hg.pid >> $DAEMON_PIDS $ hg clone http://localhost:$HGPORT1 r3 requesting all changes @@ -149,7 +149,7 @@ $ hg commit -m "m2" Invoking status precommit hook A f2 - $ hg --config extensions.largefiles=! -R ../r6 serve -d -p $HGPORT --pid-file ../hg.pid + $ hg serve --config extensions.largefiles=! -R ../r6 -d -p $HGPORT --pid-file ../hg.pid $ cat ../hg.pid >> $DAEMON_PIDS $ hg push http://localhost:$HGPORT pushing to http://localhost:$HGPORT/ @@ -291,7 +291,7 @@ using http://localhost:$HGPORT2/ sending capabilities command sending batch command - getting largefiles: 0/1 lfile (0.00%) + getting largefiles: 0/1 files (0.00%) getting f1:02a439e5c31c526465ab1a0ca1f431f76b827b90 sending getlfile command found 02a439e5c31c526465ab1a0ca1f431f76b827b90 in store
--- a/tests/test-largefiles.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-largefiles.t Sat Apr 16 18:06:48 2016 -0500 @@ -931,6 +931,7 @@ $ hg log -Gqf sub2/large7 @ 7:daea875e9014 | + ~ $ cd .. Test log from outside repo @@ -1285,13 +1286,14 @@ 4:74c02385b94c move files $ hg log -G --template '{rev}:{node|short} {desc|firstline}\n' .hglf/sub/large4 o 8:a381d2c8c80e modify normal file and largefile in repo b - | + : o 6:4355d653f84f edit files yet again | o 5:9d5af5072dbd edit files again | o 4:74c02385b94c move files | + ~ $ hg log --template '{rev}:{node|short} {desc|firstline}\n' sub/large4 8:a381d2c8c80e modify normal file and largefile in repo b 6:4355d653f84f edit files yet again @@ -1299,13 +1301,14 @@ 4:74c02385b94c move files $ hg log -G --template '{rev}:{node|short} {desc|firstline}\n' .hglf/sub/large4 o 8:a381d2c8c80e modify normal file and largefile in repo b - | + : o 6:4355d653f84f edit files yet again | o 5:9d5af5072dbd edit files again | o 4:74c02385b94c move files | + ~ - .hglf only matches largefiles, without .hglf it matches 9 bco sub/normal $ hg log --template '{rev}:{node|short} {desc|firstline}\n' .hglf/sub @@ -1317,13 +1320,13 @@ 0:30d30fe6a5be add files $ hg log -G --template '{rev}:{node|short} {desc|firstline}\n' .hglf/sub o 8:a381d2c8c80e modify normal file and largefile in repo b - | + : o 6:4355d653f84f edit files yet again | o 5:9d5af5072dbd edit files again | o 4:74c02385b94c move files - | + : o 1:ce8896473775 edit files | o 0:30d30fe6a5be add files @@ -1340,13 +1343,13 @@ @ 9:598410d3eb9a modify normal file largefile in repo d | o 8:a381d2c8c80e modify normal file and largefile in repo b - | + : o 6:4355d653f84f edit files yet again | o 5:9d5af5072dbd edit files again | o 4:74c02385b94c move files - | + : o 1:ce8896473775 edit files | o 0:30d30fe6a5be add files @@ -1364,13 +1367,13 @@ @ 9:598410d3eb9a modify normal file largefile in repo d | o 8:a381d2c8c80e modify normal file and largefile in repo b - | + : o 6:4355d653f84f edit files yet again | o 5:9d5af5072dbd edit files again | o 4:74c02385b94c move files - | + : o 1:ce8896473775 edit files | o 0:30d30fe6a5be add files
--- a/tests/test-lock.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-lock.py Sat Apr 16 18:06:48 2016 -0500 @@ -28,7 +28,7 @@ self._pidoffset = pidoffset super(lockwrapper, self).__init__(*args, **kwargs) def _getpid(self): - return os.getpid() + self._pidoffset + return super(lockwrapper, self)._getpid() + self._pidoffset class teststate(object): def __init__(self, testcase, dir, pidoffset=0):
--- a/tests/test-log.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-log.t Sat Apr 16 18:06:48 2016 -0500 @@ -1777,6 +1777,37 @@ working-directory revision requires special treatment +clean: + + $ hg log -r 'wdir()' --debug + changeset: 2147483647:ffffffffffffffffffffffffffffffffffffffff + phase: draft + parent: 0:65624cd9070a035fa7191a54f2b8af39f16b0c08 + parent: -1:0000000000000000000000000000000000000000 + user: test + date: [A-Za-z0-9:+ ]+ (re) + extra: branch=default + + $ hg log -r 'wdir()' -p --stat + changeset: 2147483647:ffffffffffff + parent: 0:65624cd9070a + user: test + date: [A-Za-z0-9:+ ]+ (re) + + + + +dirty: + + $ echo 2 >> d1/f1 + $ echo 2 > d1/f2 + $ hg add d1/f2 + $ hg remove .d6/f1 + $ hg status + M d1/f1 + A d1/f2 + R .d6/f1 + $ hg log -r 'wdir()' changeset: 2147483647:ffffffffffff parent: 0:65624cd9070a @@ -1793,8 +1824,41 @@ parent: -1:0000000000000000000000000000000000000000 user: test date: [A-Za-z0-9:+ ]+ (re) + files: d1/f1 + files+: d1/f2 + files-: .d6/f1 extra: branch=default + $ hg log -r 'wdir()' -p --stat --git + changeset: 2147483647:ffffffffffff + parent: 0:65624cd9070a + user: test + date: [A-Za-z0-9:+ ]+ (re) + + .d6/f1 | 1 - + d1/f1 | 1 + + d1/f2 | 1 + + 3 files changed, 2 insertions(+), 1 deletions(-) + + diff --git a/.d6/f1 b/.d6/f1 + deleted file mode 100644 + --- a/.d6/f1 + +++ /dev/null + @@ -1,1 +0,0 @@ + -1 + diff --git a/d1/f1 b/d1/f1 + --- a/d1/f1 + +++ b/d1/f1 + @@ -1,1 +1,2 @@ + 1 + +2 + diff --git a/d1/f2 b/d1/f2 + new file mode 100644 + --- /dev/null + +++ b/d1/f2 + @@ -0,0 +1,1 @@ + +2 + $ hg log -r 'wdir()' -Tjson [ { @@ -1834,12 +1898,14 @@ "parents": ["65624cd9070a035fa7191a54f2b8af39f16b0c08"], "manifest": null, "extra": {"branch": "default"}, - "modified": [], - "added": [], - "removed": [] + "modified": ["d1/f1"], + "added": ["d1/f2"], + "removed": [".d6/f1"] } ] + $ hg revert -aqC + Check that adding an arbitrary name shows up in log automatically $ cat > ../names.py <<EOF @@ -1970,10 +2036,10 @@ | summary: content3 | o changeset: 3:15b2327059e5 - | user: test - | date: Thu Jan 01 00:00:00 1970 +0000 - | summary: content2 - | + : user: test + : date: Thu Jan 01 00:00:00 1970 +0000 + : summary: content2 + : o changeset: 0:ae0a3c9f9e95 user: test date: Thu Jan 01 00:00:00 1970 +0000 @@ -1985,16 +2051,16 @@ $ hg log -G a @ changeset: 4:50b9b36e9c5d - | tag: tip - | user: test - | date: Thu Jan 01 00:00:00 1970 +0000 - | summary: content3 - | - | o changeset: 1:2294ae80ad84 - |/ user: test - | date: Thu Jan 01 00:00:00 1970 +0000 - | summary: content2 - | + : tag: tip + : user: test + : date: Thu Jan 01 00:00:00 1970 +0000 + : summary: content3 + : + : o changeset: 1:2294ae80ad84 + :/ user: test + : date: Thu Jan 01 00:00:00 1970 +0000 + : summary: content2 + : o changeset: 0:ae0a3c9f9e95 user: test date: Thu Jan 01 00:00:00 1970 +0000 @@ -2008,10 +2074,10 @@ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -Gf a @ changeset: 3:15b2327059e5 - | user: test - | date: Thu Jan 01 00:00:00 1970 +0000 - | summary: content2 - | + : user: test + : date: Thu Jan 01 00:00:00 1970 +0000 + : summary: content2 + : o changeset: 0:ae0a3c9f9e95 user: test date: Thu Jan 01 00:00:00 1970 +0000 @@ -2059,10 +2125,10 @@ | summary: content3 | @ changeset: 3:15b2327059e5 - | user: test - | date: Thu Jan 01 00:00:00 1970 +0000 - | summary: content2 - | + : user: test + : date: Thu Jan 01 00:00:00 1970 +0000 + : summary: content2 + : o changeset: 0:ae0a3c9f9e95 user: test date: Thu Jan 01 00:00:00 1970 +0000 @@ -2076,11 +2142,11 @@ $ hg debugobsolete 50b9b36e9c5df2c6fc6dcefa8ad0da929e84aed2 $ hg log -G a @ changeset: 3:15b2327059e5 - | tag: tip - | user: test - | date: Thu Jan 01 00:00:00 1970 +0000 - | summary: content2 - | + : tag: tip + : user: test + : date: Thu Jan 01 00:00:00 1970 +0000 + : summary: content2 + : o changeset: 0:ae0a3c9f9e95 user: test date: Thu Jan 01 00:00:00 1970 +0000 @@ -2133,17 +2199,17 @@ $ hg log -f -G b @ changeset: 3:9bc8ce7f9356 | parent: 0:f7b1eb17ad24 - | user: test - | date: Thu Jan 01 00:00:00 1970 +0000 - | summary: 1 - | + ~ user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: 1 + $ hg log -G b @ changeset: 3:9bc8ce7f9356 | parent: 0:f7b1eb17ad24 - | user: test - | date: Thu Jan 01 00:00:00 1970 +0000 - | summary: 1 - | + ~ user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: 1 + $ cd .. Check proper report when the manifest changes but not the file issue4499
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-logtoprocess.t Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,54 @@ +Test if logtoprocess correctly captures command-related log calls. + + $ hg init + $ cat > $TESTTMP/foocommand.py << EOF + > from mercurial import cmdutil + > from time import sleep + > cmdtable = {} + > command = cmdutil.command(cmdtable) + > @command('foo', []) + > def foo(ui, repo): + > ui.log('foo', 'a message: %(bar)s\n', bar='spam') + > EOF + $ cat >> $HGRCPATH << EOF + > [extensions] + > logtoprocess= + > foocommand=$TESTTMP/foocommand.py + > [logtoprocess] + > command=echo 'logtoprocess command output:'; + > echo "\$EVENT"; + > echo "\$MSG1"; + > echo "\$MSG2" + > commandfinish=echo 'logtoprocess commandfinish output:'; + > echo "\$EVENT"; + > echo "\$MSG1"; + > echo "\$MSG2"; + > echo "\$MSG3" + > foo=echo 'logtoprocess foo output:'; + > echo "\$EVENT"; + > echo "\$MSG1"; + > echo "\$OPT_BAR" + > EOF + +Running a command triggers both a ui.log('command') and a +ui.log('commandfinish') call. The foo command also uses ui.log. + +Use head to ensure we wait for all lines to be produced, and sort to avoid +ordering issues between the various processes we spawn: + $ hg foo | head -n 17 | sort + + + + 0 + a message: spam + command + commandfinish + foo + foo + foo + foo + foo exited 0 after * seconds (glob) + logtoprocess command output: + logtoprocess commandfinish output: + logtoprocess foo output: + spam
--- a/tests/test-lrucachedict.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-lrucachedict.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,11 +1,15 @@ -from mercurial import util +from __future__ import absolute_import, print_function + +from mercurial import ( + util, +) def printifpresent(d, xs, name='d'): for x in xs: present = x in d - print "'%s' in %s: %s" % (x, name, present) + print("'%s' in %s: %s" % (x, name, present)) if present: - print "%s['%s']: %s" % (name, x, d[x]) + print("%s['%s']: %s" % (name, x, d[x])) def test_lrucachedict(): d = util.lrucachedict(4) @@ -52,19 +56,19 @@ dc = d.copy() # all of these should be present - print "\nAll of these should be present:" + print("\nAll of these should be present:") printifpresent(dc, ['a', 'b', 'c', 'd'], 'dc') # 'a' should be dropped because it was least recently used - print "\nAll of these except 'a' should be present:" + print("\nAll of these except 'a' should be present:") dc['e'] = 've3' printifpresent(dc, ['a', 'b', 'c', 'd', 'e'], 'dc') # contents and order of original dict should remain unchanged - print "\nThese should be in reverse alphabetical order and read 'v?3':" + print("\nThese should be in reverse alphabetical order and read 'v?3':") dc['b'] = 'vb3_new' for k in list(iter(d)): - print "d['%s']: %s" % (k, d[k]) + print("d['%s']: %s" % (k, d[k])) if __name__ == '__main__': test_lrucachedict()
--- a/tests/test-manifest.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-manifest.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,11 +1,14 @@ -import binascii -import unittest -import itertools +from __future__ import absolute_import +import binascii +import itertools import silenttestrunner +import unittest -from mercurial import manifest as manifestmod -from mercurial import match as matchmod +from mercurial import ( + manifest as manifestmod, + match as matchmod, +) EMTPY_MANIFEST = '' EMTPY_MANIFEST_V2 = '\0\n'
--- a/tests/test-merge-changedelete.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-merge-changedelete.t Sat Apr 16 18:06:48 2016 -0500 @@ -77,14 +77,17 @@ * version 2 records local: 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4 other: 10f9a0a634e82080907e62f075ab119cbc565ea6 + file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file1 (record type "C", state "u", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) + file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file2 (record type "C", state "u", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) + file extras: file3 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file3 (record type "F", state "u", hash d5b0a58bc47161b1b8a831084b366f757c4f0b11) local path: file3 (flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) @@ -108,6 +111,7 @@ $ hg co -C 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + 1 other heads for branch "default" $ hg merge --config ui.interactive=true <<EOF > c @@ -136,14 +140,17 @@ * version 2 records local: 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4 other: 10f9a0a634e82080907e62f075ab119cbc565ea6 + file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file1 (record type "C", state "r", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) + file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file2 (record type "C", state "r", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) + file extras: file3 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file3 (record type "F", state "u", hash d5b0a58bc47161b1b8a831084b366f757c4f0b11) local path: file3 (flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) @@ -165,6 +172,7 @@ $ hg co -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + 1 other heads for branch "default" $ hg merge --config ui.interactive=true <<EOF > foo @@ -205,14 +213,17 @@ * version 2 records local: 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4 other: 10f9a0a634e82080907e62f075ab119cbc565ea6 + file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file1 (record type "C", state "r", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) + file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file2 (record type "C", state "r", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) + file extras: file3 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file3 (record type "F", state "u", hash d5b0a58bc47161b1b8a831084b366f757c4f0b11) local path: file3 (flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) @@ -234,6 +245,7 @@ $ hg co -C 2 files updated, 0 files merged, 1 files removed, 0 files unresolved + 1 other heads for branch "default" $ hg merge --config ui.interactive=true <<EOF > d @@ -261,14 +273,17 @@ * version 2 records local: 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4 other: 10f9a0a634e82080907e62f075ab119cbc565ea6 + file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file1 (record type "C", state "r", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) + file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file2 (record type "C", state "u", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) + file extras: file3 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file3 (record type "F", state "u", hash d5b0a58bc47161b1b8a831084b366f757c4f0b11) local path: file3 (flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) @@ -289,6 +304,7 @@ $ hg co -C 2 files updated, 0 files merged, 1 files removed, 0 files unresolved + 1 other heads for branch "default" $ hg merge --tool :local 0 files updated, 3 files merged, 0 files removed, 0 files unresolved @@ -306,14 +322,17 @@ * version 2 records local: 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4 other: 10f9a0a634e82080907e62f075ab119cbc565ea6 + file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file1 (record type "C", state "r", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) + file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file2 (record type "C", state "r", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) + file extras: file3 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file3 (record type "F", state "r", hash d5b0a58bc47161b1b8a831084b366f757c4f0b11) local path: file3 (flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) @@ -330,6 +349,7 @@ $ hg co -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + 1 other heads for branch "default" $ hg merge --tool :other 0 files updated, 2 files merged, 1 files removed, 0 files unresolved @@ -347,14 +367,17 @@ * version 2 records local: 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4 other: 10f9a0a634e82080907e62f075ab119cbc565ea6 + file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file1 (record type "C", state "r", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) + file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file2 (record type "C", state "r", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) + file extras: file3 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file3 (record type "F", state "r", hash d5b0a58bc47161b1b8a831084b366f757c4f0b11) local path: file3 (flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) @@ -371,6 +394,7 @@ $ hg co -C 2 files updated, 0 files merged, 1 files removed, 0 files unresolved + 1 other heads for branch "default" $ hg merge --tool :fail 0 files updated, 0 files merged, 0 files removed, 3 files unresolved @@ -389,14 +413,17 @@ * version 2 records local: 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4 other: 10f9a0a634e82080907e62f075ab119cbc565ea6 + file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file1 (record type "C", state "u", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) + file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file2 (record type "C", state "u", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) + file extras: file3 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file3 (record type "F", state "u", hash d5b0a58bc47161b1b8a831084b366f757c4f0b11) local path: file3 (flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) @@ -415,6 +442,7 @@ $ hg co -C 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + 1 other heads for branch "default" $ hg merge --config ui.interactive=True --tool :prompt local changed file1 which remote deleted @@ -439,14 +467,17 @@ * version 2 records local: 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4 other: 10f9a0a634e82080907e62f075ab119cbc565ea6 + file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file1 (record type "C", state "u", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) + file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file2 (record type "C", state "u", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) + file extras: file3 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file3 (record type "F", state "u", hash d5b0a58bc47161b1b8a831084b366f757c4f0b11) local path: file3 (flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) @@ -467,6 +498,7 @@ $ hg co -C 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + 1 other heads for branch "default" $ hg merge --tool :prompt local changed file1 which remote deleted @@ -491,14 +523,17 @@ * version 2 records local: 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4 other: 10f9a0a634e82080907e62f075ab119cbc565ea6 + file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file1 (record type "C", state "u", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) + file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file2 (record type "C", state "u", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) + file extras: file3 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file3 (record type "F", state "u", hash d5b0a58bc47161b1b8a831084b366f757c4f0b11) local path: file3 (flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) @@ -517,6 +552,7 @@ $ hg co -C 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + 1 other heads for branch "default" $ hg merge --tool :merge3 local changed file1 which remote deleted @@ -541,14 +577,17 @@ * version 2 records local: 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4 other: 10f9a0a634e82080907e62f075ab119cbc565ea6 + file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file1 (record type "C", state "u", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) + file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file2 (record type "C", state "u", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) + file extras: file3 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file3 (record type "F", state "u", hash d5b0a58bc47161b1b8a831084b366f757c4f0b11) local path: file3 (flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) @@ -697,10 +736,15 @@ * version 2 records local: ab57bf49aa276a22d35a473592d4c34b5abc3eff other: 10f9a0a634e82080907e62f075ab119cbc565ea6 + labels: + local: working copy + other: destination + file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file1 (record type "C", state "u", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) + file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file2 (record type "C", state "u", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) @@ -735,10 +779,15 @@ * version 2 records local: ab57bf49aa276a22d35a473592d4c34b5abc3eff other: 10f9a0a634e82080907e62f075ab119cbc565ea6 + labels: + local: working copy + other: destination + file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file1 (record type "C", state "r", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) + file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file2 (record type "C", state "r", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) @@ -771,10 +820,15 @@ * version 2 records local: ab57bf49aa276a22d35a473592d4c34b5abc3eff other: 10f9a0a634e82080907e62f075ab119cbc565ea6 + labels: + local: working copy + other: destination + file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file1 (record type "C", state "r", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) + file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file2 (record type "C", state "r", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) @@ -809,10 +863,15 @@ * version 2 records local: ab57bf49aa276a22d35a473592d4c34b5abc3eff other: 10f9a0a634e82080907e62f075ab119cbc565ea6 + labels: + local: working copy + other: destination + file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file1 (record type "C", state "u", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) + file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file2 (record type "C", state "u", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) @@ -853,10 +912,15 @@ * version 2 records local: ab57bf49aa276a22d35a473592d4c34b5abc3eff other: 10f9a0a634e82080907e62f075ab119cbc565ea6 + labels: + local: working copy + other: destination + file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file1 (record type "C", state "u", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) + file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file2 (record type "C", state "u", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) @@ -898,10 +962,15 @@ * version 2 records local: ab57bf49aa276a22d35a473592d4c34b5abc3eff other: 10f9a0a634e82080907e62f075ab119cbc565ea6 + labels: + local: working copy + other: destination + file extras: file1 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file1 (record type "C", state "u", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) + file extras: file2 (ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff) file: file2 (record type "C", state "u", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e)
--- a/tests/test-merge-commit.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-merge-commit.t Sat Apr 16 18:06:48 2016 -0500 @@ -72,6 +72,7 @@ branchmerge: True, force: False, partial: False ancestor: 0f2ff26688b9, local: 2263c1be0967+, remote: 0555950ead28 preserving bar for resolve of bar + starting 4 threads for background file closing (?) bar: versions differ -> m (premerge) picked tool ':merge' for bar (binary False symlink False changedelete False) merging bar @@ -158,6 +159,7 @@ branchmerge: True, force: False, partial: False ancestor: 0f2ff26688b9, local: 2263c1be0967+, remote: 3ffa6b9e35f0 preserving bar for resolve of bar + starting 4 threads for background file closing (?) bar: versions differ -> m (premerge) picked tool ':merge' for bar (binary False symlink False changedelete False) merging bar
--- a/tests/test-merge-criss-cross.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-merge-criss-cross.t Sat Apr 16 18:06:48 2016 -0500 @@ -85,10 +85,10 @@ f2: versions differ -> m (premerge) picked tool ':dump' for f2 (binary False symlink False changedelete False) merging f2 - my f2@3b08d01b0ab5+ other f2@adfe50279922 ancestor f2@40494bf2444c + my f2@3b08d01b0ab5+ other f2@adfe50279922 ancestor f2@0f6b37dbe527 f2: versions differ -> m (merge) picked tool ':dump' for f2 (binary False symlink False changedelete False) - my f2@3b08d01b0ab5+ other f2@adfe50279922 ancestor f2@40494bf2444c + my f2@3b08d01b0ab5+ other f2@adfe50279922 ancestor f2@0f6b37dbe527 1 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] @@ -212,7 +212,7 @@ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) - $ hg up -qC + $ hg up -qC tip $ hg merge -v note: merging 3b08d01b0ab5+ and adfe50279922 using bids from ancestors 0f6b37dbe527 and 40663881a6dd
--- a/tests/test-merge-default.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-merge-default.t Sat Apr 16 18:06:48 2016 -0500 @@ -27,12 +27,13 @@ Should fail because not at a head: $ hg merge - abort: branch 'default' has 3 heads - please merge with an explicit rev - (run 'hg heads .' to see heads) + abort: working directory not at a head revision + (use 'hg update' or merge with an explicit revision) [255] $ hg up 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + 2 other heads for branch "default" Should fail because > 2 heads: @@ -115,3 +116,36 @@ (run 'hg heads' to see all heads) [255] +(on a branch with a two heads) + + $ hg up 5 + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ echo f >> a + $ hg commit -mf + created new head + $ hg log -r '_destmerge()' + changeset: 6:e88e33f3bf62 + parent: 5:a431fabd6039 + parent: 3:ea9ff125ff88 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: m2 + + +(from the other head) + + $ hg log -r '_destmerge(e88e33f3bf62)' + changeset: 8:b613918999e2 + tag: tip + parent: 5:a431fabd6039 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: f + + +(from unrelated branch) + + $ hg log -r '_destmerge(foobranch)' + abort: branch 'foobranch' has one head - please merge with an explicit rev + (run 'hg heads' to see all heads) + [255]
--- a/tests/test-merge-force.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-merge-force.t Sat Apr 16 18:06:48 2016 -0500 @@ -141,7 +141,7 @@ # - local and remote changed content1_content2_*_content2-untracked # in the same way, so it could potentially be left alone - $ hg merge -f --tool internal:merge3 'desc("remote")' + $ hg merge -f --tool internal:merge3 'desc("remote")' 2>&1 | tee $TESTTMP/merge-output-1 local changed content1_missing_content1_content4-tracked which remote deleted use (c)hanged version, (d)elete, or leave (u)nresolved? u local changed content1_missing_content3_content3-tracked which remote deleted @@ -217,7 +217,6 @@ warning: conflicts while merging missing_content2_missing_content4-untracked! (edit, then use 'hg resolve --mark') 18 files updated, 3 files merged, 8 files removed, 35 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon - [1] Check which files need to be resolved (should correspond to the output above). This should be the files for which the base (1st filename segment), the remote @@ -780,3 +779,17 @@ [1] $ checkstatus > $TESTTMP/status2 2>&1 $ cmp $TESTTMP/status1 $TESTTMP/status2 || diff -U8 $TESTTMP/status1 $TESTTMP/status2 + +Set up working directory again + + $ hg -q update --clean 2 + $ hg --config extensions.purge= purge + $ python $TESTDIR/generate-working-copy-states.py state 3 wc + $ hg addremove -q --similarity 0 + $ hg forget *_*_*_*-untracked + $ rm *_*_*_missing-* + +Merge with checkunknown = warn, see that behavior is the same as before + $ hg merge -f --tool internal:merge3 'desc("remote")' --config merge.checkunknown=warn > $TESTTMP/merge-output-2 2>&1 + [1] + $ cmp $TESTTMP/merge-output-1 $TESTTMP/merge-output-2 || diff -U8 $TESTTMP/merge-output-1 $TESTTMP/merge-output-2
--- a/tests/test-merge-types.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-merge-types.t Sat Apr 16 18:06:48 2016 -0500 @@ -155,6 +155,7 @@ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg up 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + 1 other heads for branch "default" $ hg st ? a.orig @@ -175,6 +176,7 @@ keep (l)ocal, take (o)ther, or leave (u)nresolved? u 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges + 1 other heads for branch "default" [1] $ hg diff --git diff --git a/a b/a
--- a/tests/test-merge5.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-merge5.t Sat Apr 16 18:06:48 2016 -0500 @@ -13,16 +13,12 @@ created new head $ hg update 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ hg update - abort: not a linear update - (merge or update --check to force update) - [255] $ rm b - $ hg update -c + $ hg update -c 2 abort: uncommitted changes [255] $ hg revert b - $ hg update -c + $ hg update -c 2 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ mv a c
--- a/tests/test-merge7.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-merge7.t Sat Apr 16 18:06:48 2016 -0500 @@ -84,6 +84,7 @@ branchmerge: True, force: False, partial: False ancestor: 96b70246a118, local: 50c3a7e29886+, remote: 40d11a4173a8 preserving test.txt for resolve of test.txt + starting 4 threads for background file closing (?) test.txt: versions differ -> m (premerge) picked tool ':merge' for test.txt (binary False symlink False changedelete False) merging test.txt
--- a/tests/test-minirst.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-minirst.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,26 +1,29 @@ -from pprint import pprint -from mercurial import minirst +from __future__ import absolute_import, print_function +import pprint +from mercurial import ( + minirst, +) def debugformat(text, form, **kwargs): if form == 'html': - print "html format:" + print("html format:") out = minirst.format(text, style=form, **kwargs) else: - print "%d column format:" % form + print("%d column format:" % form) out = minirst.format(text, width=form, **kwargs) - print "-" * 70 + print("-" * 70) if type(out) == tuple: - print out[0][:-1] - print "-" * 70 - pprint(out[1]) + print(out[0][:-1]) + print("-" * 70) + pprint.pprint(out[1]) else: - print out[:-1] - print "-" * 70 - print + print(out[:-1]) + print("-" * 70) + print() def debugformats(title, text, **kwargs): - print "== %s ==" % title + print("== %s ==" % title) debugformat(text, 60, **kwargs) debugformat(text, 30, **kwargs) debugformat(text, 'html', **kwargs) @@ -241,7 +244,7 @@ rst = minirst.maketable(data, 2, True) table = ''.join(rst) -print table +print(table) debugformats('table', table) @@ -251,7 +254,7 @@ rst = minirst.maketable(data, 1, False) table = ''.join(rst) -print table +print(table) debugformats('table+nl', table)
--- a/tests/test-module-imports.t Tue Mar 29 11:54:46 2016 -0500 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,146 +0,0 @@ -#require test-repo - - $ import_checker="$TESTDIR"/../contrib/import-checker.py - -Run the doctests from the import checker, and make sure -it's working correctly. - $ TERM=dumb - $ export TERM - $ python -m doctest $import_checker - -Run additional tests for the import checker - - $ mkdir testpackage - - $ cat > testpackage/multiple.py << EOF - > from __future__ import absolute_import - > import os, sys - > EOF - - $ cat > testpackage/unsorted.py << EOF - > from __future__ import absolute_import - > import sys - > import os - > EOF - - $ cat > testpackage/stdafterlocal.py << EOF - > from __future__ import absolute_import - > from . import unsorted - > import os - > EOF - - $ cat > testpackage/requirerelative.py << EOF - > from __future__ import absolute_import - > import testpackage.unsorted - > EOF - - $ cat > testpackage/importalias.py << EOF - > from __future__ import absolute_import - > import ui - > EOF - - $ cat > testpackage/relativestdlib.py << EOF - > from __future__ import absolute_import - > from .. import os - > EOF - - $ cat > testpackage/symbolimport.py << EOF - > from __future__ import absolute_import - > from .unsorted import foo - > EOF - - $ cat > testpackage/latesymbolimport.py << EOF - > from __future__ import absolute_import - > from . import unsorted - > from mercurial.node import hex - > EOF - - $ cat > testpackage/multiplegroups.py << EOF - > from __future__ import absolute_import - > from . import unsorted - > from . import more - > EOF - - $ mkdir testpackage/subpackage - $ cat > testpackage/subpackage/levelpriority.py << EOF - > from __future__ import absolute_import - > from . import foo - > from .. import parent - > EOF - - $ touch testpackage/subpackage/foo.py - $ cat > testpackage/subpackage/__init__.py << EOF - > from __future__ import absolute_import - > from . import levelpriority # should not cause cycle - > EOF - - $ cat > testpackage/subpackage/localimport.py << EOF - > from __future__ import absolute_import - > from . import foo - > def bar(): - > # should not cause "higher-level import should come first" - > from .. import unsorted - > # but other errors should be detected - > from .. import more - > import testpackage.subpackage.levelpriority - > EOF - - $ cat > testpackage/importmodulefromsub.py << EOF - > from __future__ import absolute_import - > from .subpackage import foo # not a "direct symbol import" - > EOF - - $ cat > testpackage/importsymbolfromsub.py << EOF - > from __future__ import absolute_import - > from .subpackage import foo, nonmodule - > EOF - - $ cat > testpackage/sortedentries.py << EOF - > from __future__ import absolute_import - > from . import ( - > foo, - > bar, - > ) - > EOF - - $ cat > testpackage/importfromalias.py << EOF - > from __future__ import absolute_import - > from . import ui - > EOF - - $ cat > testpackage/importfromrelative.py << EOF - > from __future__ import absolute_import - > from testpackage.unsorted import foo - > EOF - - $ python "$import_checker" testpackage/*.py testpackage/subpackage/*.py - testpackage/importalias.py:2: ui module must be "as" aliased to uimod - testpackage/importfromalias.py:2: ui from testpackage must be "as" aliased to uimod - testpackage/importfromrelative.py:2: import should be relative: testpackage.unsorted - testpackage/importfromrelative.py:2: direct symbol import foo from testpackage.unsorted - testpackage/importsymbolfromsub.py:2: direct symbol import nonmodule from testpackage.subpackage - testpackage/latesymbolimport.py:3: symbol import follows non-symbol import: mercurial.node - testpackage/multiple.py:2: multiple imported names: os, sys - testpackage/multiplegroups.py:3: multiple "from . import" statements - testpackage/relativestdlib.py:2: relative import of stdlib module - testpackage/requirerelative.py:2: import should be relative: testpackage.unsorted - testpackage/sortedentries.py:2: imports from testpackage not lexically sorted: bar < foo - testpackage/stdafterlocal.py:3: stdlib import follows local import: os - testpackage/subpackage/levelpriority.py:3: higher-level import should come first: testpackage - testpackage/subpackage/localimport.py:7: multiple "from .. import" statements - testpackage/subpackage/localimport.py:8: import should be relative: testpackage.subpackage.levelpriority - testpackage/symbolimport.py:2: direct symbol import foo from testpackage.unsorted - testpackage/unsorted.py:3: imports not lexically sorted: os < sys - [1] - - $ cd "$TESTDIR"/.. - -There are a handful of cases here that require renaming a module so it -doesn't overlap with a stdlib module name. There are also some cycles -here that we should still endeavor to fix, and some cycles will be -hidden by deduplication algorithm in the cycle detector, so fixing -these may expose other cycles. - - $ hg locate 'mercurial/**.py' 'hgext/**.py' | sed 's-\\-/-g' | python "$import_checker" - - Import cycle: hgext.largefiles.basestore -> hgext.largefiles.localstore -> hgext.largefiles.basestore - [1]
--- a/tests/test-newbranch.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-newbranch.t Sat Apr 16 18:06:48 2016 -0500 @@ -211,8 +211,7 @@ marked working directory as branch foobar $ hg up - abort: branch foobar not found - [255] + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved Fast-forward merge: @@ -345,3 +344,122 @@ (branch merge, don't forget to commit) $ cd .. + +We expect that bare update on new branch, updates to parent + + $ hg init bareupdateonnewbranch + $ cd bareupdateonnewbranch + $ hg update + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ touch a + $ hg commit -A -m "a" + adding a + $ touch b + $ hg commit -A -m "b" + adding b + $ touch c + $ hg commit -A -m "c" + adding c + $ hg update -r 1 + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg log -G + o changeset: 2:991a3460af53 + | tag: tip + | user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: c + | + @ changeset: 1:0e067c57feba + | user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: b + | + o changeset: 0:3903775176ed + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: a + + $ hg branch dev + marked working directory as branch dev + (branches are permanent and global, did you want a bookmark?) + $ hg update + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg summary + parent: 1:0e067c57feba + b + branch: dev + commit: (new branch) + update: (current) + phases: 3 draft + + $ cd .. + +We need special handling for repositories with no "default" branch because +"null" revision belongs to non-existent "default" branch. + + $ hg init nodefault + $ cd nodefault + $ hg branch -q foo + $ touch 0 + $ hg ci -Aqm0 + $ touch 1 + $ hg ci -Aqm1 + $ hg update -qr0 + $ hg branch -q bar + $ touch 2 + $ hg ci -Aqm2 + $ hg update -qr0 + $ hg branch -q baz + $ touch 3 + $ hg ci -Aqm3 + $ hg ci --close-branch -m 'close baz' + $ hg update -q null + $ hg log -GT'{rev} {branch}\n' + _ 4 baz + | + o 3 baz + | + | o 2 bar + |/ + | o 1 foo + |/ + o 0 foo + + + a) updating from "null" should bring us to the tip-most branch head as + there is no "default" branch: + + $ hg update -q null + $ hg id -bn + -1 default + $ hg update + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg id -bn + 2 bar + + b) but if we are at uncommitted "default" branch, we should stick to the + current revision: + + $ hg update -q 0 + $ hg branch default + marked working directory as branch default + $ hg id -bn + 0 default + $ hg update + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg id -bn + 0 default + + c) also, if we have uncommitted branch at "null", we should stick to it: + + $ hg update -q null + $ hg branch new + marked working directory as branch new + $ hg id -bn + -1 new + $ hg update + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg id -bn + -1 new + + $ cd ..
--- a/tests/test-notify.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-notify.t Sat Apr 16 18:06:48 2016 -0500 @@ -553,3 +553,76 @@ description: test (run 'hg heads' to see heads) +default template: + + $ grep -v '^template =' $HGRCPATH > "$HGRCPATH.new" + $ mv "$HGRCPATH.new" $HGRCPATH + $ echo a >> a/a + $ hg --cwd a commit -m 'default template' + $ hg --cwd b pull ../a -q | \ + > $PYTHON -c 'import sys,re; print re.sub("\n\t", " ", sys.stdin.read()),' + Content-Type: text/plain; charset="us-ascii" + MIME-Version: 1.0 + Content-Transfer-Encoding: 7bit + Date: * (glob) + Subject: changeset in b: default template + From: test@test.com + X-Hg-Notification: changeset 3548c9e294b6 + Message-Id: <hg.3548c9e294b6.*.*@*> (glob) + To: baz@test.com, foo@bar + + changeset 3548c9e294b6 in $TESTTMP/b + details: http://test/b?cmd=changeset;node=3548c9e294b6 + description: default template + +with style: + + $ cat <<EOF > notifystyle.map + > changeset = "Subject: {desc|firstline|strip} + > From: {author} + > {""} + > changeset {node|short}" + > EOF + $ cat <<EOF >> $HGRCPATH + > [notify] + > style = $TESTTMP/notifystyle.map + > EOF + $ echo a >> a/a + $ hg --cwd a commit -m 'with style' + $ hg --cwd b pull ../a -q | \ + > $PYTHON -c 'import sys,re; print re.sub("\n\t", " ", sys.stdin.read()),' + Content-Type: text/plain; charset="us-ascii" + MIME-Version: 1.0 + Content-Transfer-Encoding: 7bit + Date: * (glob) + Subject: with style + From: test@test.com + X-Hg-Notification: changeset e917dbd961d3 + Message-Id: <hg.e917dbd961d3.*.*@*> (glob) + To: baz@test.com, foo@bar + + changeset e917dbd961d3 + +with template (overrides style): + + $ cat <<EOF >> $HGRCPATH + > template = Subject: {node|short}: {desc|firstline|strip} + > From: {author} + > {""} + > {desc} + > EOF + $ echo a >> a/a + $ hg --cwd a commit -m 'with template' + $ hg --cwd b pull ../a -q | \ + > $PYTHON -c 'import sys,re; print re.sub("\n\t", " ", sys.stdin.read()),' + Content-Type: text/plain; charset="us-ascii" + MIME-Version: 1.0 + Content-Transfer-Encoding: 7bit + Date: * (glob) + Subject: a09743fd3edd: with template + From: test@test.com + X-Hg-Notification: changeset a09743fd3edd + Message-Id: <hg.a09743fd3edd.*.*@*> (glob) + To: baz@test.com, foo@bar + + with template
--- a/tests/test-obsolete-tag-cache.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-obsolete-tag-cache.t Sat Apr 16 18:06:48 2016 -0500 @@ -67,11 +67,12 @@ 042eb6bfcc4909bad84a1cbf6eb1ddf0ab587d41 head2 55482a6fb4b1881fa8f746fd52cf6f096bb21c89 test1 - $ hg blackbox -l 4 - 1970/01/01 00:00:00 bob (*)> tags (glob) - 1970/01/01 00:00:00 bob (*)> 2/2 cache hits/lookups in * seconds (glob) - 1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2-visible with 2 tags (glob) - 1970/01/01 00:00:00 bob (*)> tags exited 0 after * seconds (glob) + $ hg blackbox -l 5 + 1970/01/01 00:00:00 bob @2942a772f72a444bef4bef13874d515f50fa27b6 (5000)> tags + 1970/01/01 00:00:00 bob @2942a772f72a444bef4bef13874d515f50fa27b6 (5000)> 2/2 cache hits/lookups in * seconds (glob) + 1970/01/01 00:00:00 bob @2942a772f72a444bef4bef13874d515f50fa27b6 (5000)> writing .hg/cache/tags2-visible with 2 tags + 1970/01/01 00:00:00 bob @2942a772f72a444bef4bef13874d515f50fa27b6 (5000)> tags exited 0 after * seconds (glob) + 1970/01/01 00:00:00 bob @2942a772f72a444bef4bef13874d515f50fa27b6 (5000)> blackbox -l 5 Hiding another changeset should cause the filtered hash to change @@ -86,11 +87,12 @@ 5 2942a772f72a444bef4bef13874d515f50fa27b6 2fce1eec33263d08a4d04293960fc73a555230e4 042eb6bfcc4909bad84a1cbf6eb1ddf0ab587d41 head2 - $ hg blackbox -l 4 - 1970/01/01 00:00:00 bob (*)> tags (glob) - 1970/01/01 00:00:00 bob (*)> 1/1 cache hits/lookups in * seconds (glob) - 1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2-visible with 1 tags (glob) - 1970/01/01 00:00:00 bob (*)> tags exited 0 after * seconds (glob) + $ hg blackbox -l 5 + 1970/01/01 00:00:00 bob @2942a772f72a444bef4bef13874d515f50fa27b6 (5000)> tags + 1970/01/01 00:00:00 bob @2942a772f72a444bef4bef13874d515f50fa27b6 (5000)> 1/1 cache hits/lookups in * seconds (glob) + 1970/01/01 00:00:00 bob @2942a772f72a444bef4bef13874d515f50fa27b6 (5000)> writing .hg/cache/tags2-visible with 1 tags + 1970/01/01 00:00:00 bob @2942a772f72a444bef4bef13874d515f50fa27b6 (5000)> tags exited 0 after * seconds (glob) + 1970/01/01 00:00:00 bob @2942a772f72a444bef4bef13874d515f50fa27b6 (5000)> blackbox -l 5 Resolving tags on an unfiltered repo writes a separate tags cache @@ -106,8 +108,9 @@ 55482a6fb4b1881fa8f746fd52cf6f096bb21c89 test1 d75775ffbc6bca1794d300f5571272879bd280da test2 - $ hg blackbox -l 4 - 1970/01/01 00:00:00 bob (*)> --hidden tags (glob) - 1970/01/01 00:00:00 bob (*)> 2/2 cache hits/lookups in * seconds (glob) - 1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2 with 3 tags (glob) - 1970/01/01 00:00:00 bob (*)> --hidden tags exited 0 after * seconds (glob) + $ hg blackbox -l 5 + 1970/01/01 00:00:00 bob @2942a772f72a444bef4bef13874d515f50fa27b6 (5000)> --hidden tags + 1970/01/01 00:00:00 bob @2942a772f72a444bef4bef13874d515f50fa27b6 (5000)> 2/2 cache hits/lookups in * seconds (glob) + 1970/01/01 00:00:00 bob @2942a772f72a444bef4bef13874d515f50fa27b6 (5000)> writing .hg/cache/tags2 with 3 tags + 1970/01/01 00:00:00 bob @2942a772f72a444bef4bef13874d515f50fa27b6 (5000)> --hidden tags exited 0 after * seconds (glob) + 1970/01/01 00:00:00 bob @2942a772f72a444bef4bef13874d515f50fa27b6 (5000)> blackbox -l 5
--- a/tests/test-obsolete.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-obsolete.t Sat Apr 16 18:06:48 2016 -0500 @@ -129,6 +129,13 @@ ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'} 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'} +Test the --index option of debugobsolete command + $ hg debugobsolete --index + 0 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'} + 1 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'} + 2 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'} + 3 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'} + Refuse pathological nullid successors $ hg debugobsolete -d '9001 0' 1337133713371337133713371337133713371337 0000000000000000000000000000000000000000 transaction abort! @@ -935,7 +942,7 @@ > opts['message'] = 'Test' > opts['logfile'] = None > cmdutil.amend(ui, repo, commitfunc, repo['.'], {}, pats, opts) - > print repo.changelog.headrevs() + > ui.write('%s\n' % repo.changelog.headrevs()) > EOF $ cat >> $HGRCPATH << EOF > [extensions] @@ -960,6 +967,7 @@ $ hg log -r . -T '{node}' --debug 8fd96dfc63e51ed5a8af1bec18eb4b19dbf83812 (no-eol) +#if unix-permissions Check that wrong hidden cache permission does not crash $ chmod 000 .hg/cache/hidden @@ -967,6 +975,7 @@ cannot read hidden cache error writing hidden changesets cache 8fd96dfc63e51ed5a8af1bec18eb4b19dbf83812 (no-eol) +#endif Test cache consistency for the visible filter 1) We want to make sure that the cached filtered revs are invalidated when @@ -1074,4 +1083,40 @@ | @ 0:a78f55e5508c (draft) [ ] 0 +Test that 'hg debugobsolete --index --rev' can show indices of obsmarkers when +only a subset of those are displayed (because of --rev option) + $ hg init doindexrev + $ cd doindexrev + $ echo a > a + $ hg ci -Am a + adding a + $ hg ci --amend -m aa + $ echo b > b + $ hg ci -Am b + adding b + $ hg ci --amend -m bb + $ echo c > c + $ hg ci -Am c + adding c + $ hg ci --amend -m cc + $ echo d > d + $ hg ci -Am d + adding d + $ hg ci --amend -m dd + $ hg debugobsolete --index --rev "3+7" + 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 \(.*\) {'user': 'test'} (re) + 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 \(.*\) {'user': 'test'} (re) +Test the --delete option of debugobsolete command + $ hg debugobsolete --index + 0 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 \(.*\) {'user': 'test'} (re) + 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 \(.*\) {'user': 'test'} (re) + 2 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 \(.*\) {'user': 'test'} (re) + 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 \(.*\) {'user': 'test'} (re) + $ hg debugobsolete --delete 1 --delete 3 + deleted 2 obsolescense markers + $ hg debugobsolete + cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 \(.*\) {'user': 'test'} (re) + 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 \(.*\) {'user': 'test'} (re) + $ cd .. +
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-pager.t Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,179 @@ + $ cat >> fakepager.py <<EOF + > import sys + > for line in sys.stdin: + > sys.stdout.write('paged! %r\n' % line) + > EOF + +Enable ui.formatted because pager won't fire without it, and set up +pager and tell it to use our fake pager that lets us see when the +pager was running. + $ cat >> $HGRCPATH <<EOF + > [ui] + > formatted = yes + > [extensions] + > pager= + > [pager] + > pager = python $TESTTMP/fakepager.py + > EOF + + $ hg init repo + $ cd repo + $ echo a >> a + $ hg add a + $ hg ci -m 'add a' + $ for x in `python $TESTDIR/seq.py 1 10`; do + > echo a $x >> a + > hg ci -m "modify a $x" + > done + +By default diff and log are paged, but summary is not: + + $ hg diff -c 2 --pager=yes + paged! 'diff -r f4be7687d414 -r bce265549556 a\n' + paged! '--- a/a\tThu Jan 01 00:00:00 1970 +0000\n' + paged! '+++ b/a\tThu Jan 01 00:00:00 1970 +0000\n' + paged! '@@ -1,2 +1,3 @@\n' + paged! ' a\n' + paged! ' a 1\n' + paged! '+a 2\n' + + $ hg log --limit 2 + paged! 'changeset: 10:46106edeeb38\n' + paged! 'tag: tip\n' + paged! 'user: test\n' + paged! 'date: Thu Jan 01 00:00:00 1970 +0000\n' + paged! 'summary: modify a 10\n' + paged! '\n' + paged! 'changeset: 9:6dd8ea7dd621\n' + paged! 'user: test\n' + paged! 'date: Thu Jan 01 00:00:00 1970 +0000\n' + paged! 'summary: modify a 9\n' + paged! '\n' + + $ hg summary + parent: 10:46106edeeb38 tip + modify a 10 + branch: default + commit: (clean) + update: (current) + phases: 11 draft + +We can enable the pager on summary: + + $ hg --config pager.attend-summary=yes summary + paged! 'parent: 10:46106edeeb38 tip\n' + paged! ' modify a 10\n' + paged! 'branch: default\n' + paged! 'commit: (clean)\n' + paged! 'update: (current)\n' + paged! 'phases: 11 draft\n' + +If we completely change the attend list that's respected: + + $ hg --config pager.attend-diff=no diff -c 2 + diff -r f4be7687d414 -r bce265549556 a + --- a/a Thu Jan 01 00:00:00 1970 +0000 + +++ b/a Thu Jan 01 00:00:00 1970 +0000 + @@ -1,2 +1,3 @@ + a + a 1 + +a 2 + + $ hg --config pager.attend=summary diff -c 2 + diff -r f4be7687d414 -r bce265549556 a + --- a/a Thu Jan 01 00:00:00 1970 +0000 + +++ b/a Thu Jan 01 00:00:00 1970 +0000 + @@ -1,2 +1,3 @@ + a + a 1 + +a 2 + +If 'log' is in attend, then 'history' should also be paged: + $ hg history --limit 2 --config pager.attend=log + paged! 'changeset: 10:46106edeeb38\n' + paged! 'tag: tip\n' + paged! 'user: test\n' + paged! 'date: Thu Jan 01 00:00:00 1970 +0000\n' + paged! 'summary: modify a 10\n' + paged! '\n' + paged! 'changeset: 9:6dd8ea7dd621\n' + paged! 'user: test\n' + paged! 'date: Thu Jan 01 00:00:00 1970 +0000\n' + paged! 'summary: modify a 9\n' + paged! '\n' + +Possible bug: history is explicitly ignored in pager config, but +because log is in the attend list it still gets pager treatment. + + $ hg history --limit 2 --config pager.attend=log \ + > --config pager.ignore=history + paged! 'changeset: 10:46106edeeb38\n' + paged! 'tag: tip\n' + paged! 'user: test\n' + paged! 'date: Thu Jan 01 00:00:00 1970 +0000\n' + paged! 'summary: modify a 10\n' + paged! '\n' + paged! 'changeset: 9:6dd8ea7dd621\n' + paged! 'user: test\n' + paged! 'date: Thu Jan 01 00:00:00 1970 +0000\n' + paged! 'summary: modify a 9\n' + paged! '\n' + +Possible bug: history is explicitly marked as attend-history=no, but +it doesn't fail to get paged because log is still in the attend list. + + $ hg history --limit 2 --config pager.attend-history=no + paged! 'changeset: 10:46106edeeb38\n' + paged! 'tag: tip\n' + paged! 'user: test\n' + paged! 'date: Thu Jan 01 00:00:00 1970 +0000\n' + paged! 'summary: modify a 10\n' + paged! '\n' + paged! 'changeset: 9:6dd8ea7dd621\n' + paged! 'user: test\n' + paged! 'date: Thu Jan 01 00:00:00 1970 +0000\n' + paged! 'summary: modify a 9\n' + paged! '\n' + +Possible bug: disabling pager for log but enabling it for history +doesn't result in history being paged. + + $ hg history --limit 2 --config pager.attend-log=no \ + > --config pager.attend-history=yes + changeset: 10:46106edeeb38 + tag: tip + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: modify a 10 + + changeset: 9:6dd8ea7dd621 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: modify a 9 + + +Pager with color enabled allows colors to come through by default, +even though stdout is no longer a tty. + $ cat >> $HGRCPATH <<EOF + > [extensions] + > color= + > [color] + > mode = ansi + > EOF + $ hg log --limit 3 + paged! '\x1b[0;33mchangeset: 10:46106edeeb38\x1b[0m\n' + paged! 'tag: tip\n' + paged! 'user: test\n' + paged! 'date: Thu Jan 01 00:00:00 1970 +0000\n' + paged! 'summary: modify a 10\n' + paged! '\n' + paged! '\x1b[0;33mchangeset: 9:6dd8ea7dd621\x1b[0m\n' + paged! 'user: test\n' + paged! 'date: Thu Jan 01 00:00:00 1970 +0000\n' + paged! 'summary: modify a 9\n' + paged! '\n' + paged! '\x1b[0;33mchangeset: 8:cff05a6312fe\x1b[0m\n' + paged! 'user: test\n' + paged! 'date: Thu Jan 01 00:00:00 1970 +0000\n' + paged! 'summary: modify a 8\n' + paged! '\n'
--- a/tests/test-parse-date.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-parse-date.t Sat Apr 16 18:06:48 2016 -0500 @@ -28,10 +28,12 @@ Check with local timezone other than GMT and with DST - $ TZ="PST+8PDT" + $ TZ="PST+8PDT+7,M4.1.0/02:00:00,M10.5.0/02:00:00" $ export TZ PST=UTC-8 / PDT=UTC-7 +Summer time begins on April's first Sunday at 2:00am, +and ends on October's last Sunday at 2:00am. $ hg debugrebuildstate $ echo "a" > a
--- a/tests/test-parseindex2.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-parseindex2.py Sat Apr 16 18:06:48 2016 -0500 @@ -3,12 +3,20 @@ It also checks certain aspects of the parsers module as a whole. """ -from mercurial import parsers -from mercurial.node import nullid, nullrev +from __future__ import absolute_import, print_function + import struct import subprocess import sys +from mercurial import ( + parsers, +) +from mercurial.node import ( + nullid, + nullrev, +) + # original python implementation def gettype(q): return int(q & 0xFFFF) @@ -122,10 +130,10 @@ hexstring = hex(hexversion) except TypeError: hexstring = None - print ("FAILED: version test #%s with Python %s and patched " - "sys.hexversion %r (%r):\n Expected %s but got:\n-->'%s'\n" % - (testnumber, sys.version_info, hexversion, hexstring, expected, - stdout)) + print("FAILED: version test #%s with Python %s and patched " + "sys.hexversion %r (%r):\n Expected %s but got:\n-->'%s'\n" % + (testnumber, sys.version_info, hexversion, hexstring, expected, + stdout)) def testversionokay(testnumber, hexversion): stdout, stderr = importparsers(hexversion) @@ -170,7 +178,7 @@ except TypeError: pass else: - print "Expected to get TypeError." + print("Expected to get TypeError.") # Check parsers.parse_index2() on an index file against the original # Python implementation of parseindex, both with and without inlined data. @@ -182,10 +190,10 @@ c_res_2 = parse_index2(data_non_inlined, False) if py_res_1 != c_res_1: - print "Parse index result (with inlined data) differs!" + print("Parse index result (with inlined data) differs!") if py_res_2 != c_res_2: - print "Parse index result (no inlined data) differs!" + print("Parse index result (no inlined data) differs!") ix = parsers.parse_index2(data_inlined, True)[0] for i, r in enumerate(ix): @@ -193,11 +201,12 @@ i = -1 try: if ix[r[7]] != i: - print 'Reverse lookup inconsistent for %r' % r[7].encode('hex') + print('Reverse lookup inconsistent for %r' + % r[7].encode('hex')) except TypeError: # pure version doesn't support this break - print "done" + print("done") runtest()
--- a/tests/test-patchbomb.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-patchbomb.t Sat Apr 16 18:06:48 2016 -0500 @@ -28,6 +28,9 @@ $ echo "[extensions]" >> $HGRCPATH $ echo "patchbomb=" >> $HGRCPATH +Ensure hg email output is sent to stdout + $ unset PAGER + $ hg init t $ cd t $ echo a > a @@ -2874,7 +2877,7 @@ (use "hg push $TESTTMP/t2 -r 3b6f1ec9dde9") [1] -remote missing +public missing $ echo 'publicurl=$TESTTMP/missing' >> $HGRCPATH $ hg email --date '1980-1-1 0:1' -t foo -s test -r '10' @@ -2882,7 +2885,7 @@ abort: repository $TESTTMP/missing not found! [255] -node missing at remote +node missing at public $ hg clone -r '9' . ../t3 adding changesets @@ -2896,3 +2899,11 @@ abort: public url $TESTTMP/t3 is missing 3b6f1ec9dde9 (use "hg push $TESTTMP/t3 -r 3b6f1ec9dde9") [255] + +multiple heads are missing at public + + $ hg email --date '1980-1-1 0:1' -t foo -s test -r '2+10' + abort: public "$TESTTMP/t3" is missing ff2c9fa2018b and 1 others + (use "hg push $TESTTMP/t3 -r ff2c9fa2018b -r 3b6f1ec9dde9") + [255] +
--- a/tests/test-pathencode.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-pathencode.py Sat Apr 16 18:06:48 2016 -0500 @@ -5,9 +5,19 @@ # that have proven likely to expose bugs and divergent behavior in # different encoding implementations. -from mercurial import store -import binascii, itertools, math, os, random, sys, time +from __future__ import absolute_import, print_function + +import binascii import collections +import itertools +import math +import os +import random +import sys +import time +from mercurial import ( + store, +) validchars = set(map(chr, range(0, 256))) alphanum = range(ord('A'), ord('Z')) @@ -154,10 +164,10 @@ r = store._hybridencode(p, True) # reference implementation in Python if h != r: if nerrs == 0: - print >> sys.stderr, 'seed:', hex(seed)[:-1] - print >> sys.stderr, "\np: '%s'" % p.encode("string_escape") - print >> sys.stderr, "h: '%s'" % h.encode("string_escape") - print >> sys.stderr, "r: '%s'" % r.encode("string_escape") + print('seed:', hex(seed)[:-1], file=sys.stderr) + print("\np: '%s'" % p.encode("string_escape"), file=sys.stderr) + print("h: '%s'" % h.encode("string_escape"), file=sys.stderr) + print("r: '%s'" % r.encode("string_escape"), file=sys.stderr) nerrs += 1 return nerrs
--- a/tests/test-paths.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-paths.t Sat Apr 16 18:06:48 2016 -0500 @@ -59,24 +59,24 @@ formatter output with paths: $ echo 'dupe:pushurl = https://example.com/dupe' >> .hg/hgrc - $ hg paths -Tjson + $ hg paths -Tjson | sed 's|\\\\|\\|g' [ { "name": "dupe", "pushurl": "https://example.com/dupe", - "url": "$TESTTMP/b#tip" + "url": "$TESTTMP/b#tip" (glob) }, { "name": "expand", - "url": "$TESTTMP/a/$SOMETHING/bar" + "url": "$TESTTMP/a/$SOMETHING/bar" (glob) } ] - $ hg paths -Tjson dupe + $ hg paths -Tjson dupe | sed 's|\\\\|\\|g' [ { "name": "dupe", "pushurl": "https://example.com/dupe", - "url": "$TESTTMP/b#tip" + "url": "$TESTTMP/b#tip" (glob) } ] $ hg paths -Tjson -q unknown
--- a/tests/test-progress.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-progress.t Sat Apr 16 18:06:48 2016 -0500 @@ -79,6 +79,12 @@ no progress with --quiet $ hg -y loop 3 --quiet +test plain mode exception + $ HGPLAINEXCEPT=progress hg -y loop 1 + \r (no-eol) (esc) + loop [ ] 0/1\r (no-eol) (esc) + \r (no-eol) (esc) + test nested short-lived topics (which shouldn't display with nestdelay): $ hg -y loop 3 --nested @@ -176,6 +182,8 @@ test delay time estimates +#if no-chg + $ cat > mocktime.py <<EOF > import os > import time @@ -244,6 +252,8 @@ loop [ <=> ] 3\r (no-eol) (esc) \r (no-eol) (esc) +#endif + test line trimming by '[progress] width', when progress topic contains multi-byte characters, of which length of byte sequence and columns in display are different from each other.
--- a/tests/test-propertycache.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-propertycache.py Sat Apr 16 18:06:48 2016 -0500 @@ -4,18 +4,21 @@ property cache of both localrepo and repoview to prevent regression.""" -import os, subprocess -import mercurial.localrepo -import mercurial.repoview -import mercurial.util -import mercurial.hg -import mercurial.ui as uimod +from __future__ import absolute_import, print_function +import os +import subprocess +from mercurial import ( + hg, + localrepo, + ui as uimod, + util, +) # create some special property cache that trace they call calllog = [] -@mercurial.util.propertycache +@util.propertycache def testcachedfoobar(repo): name = repo.filtername if name is None: @@ -25,7 +28,7 @@ return val unficalllog = [] -@mercurial.localrepo.unfilteredpropertycache +@localrepo.unfilteredpropertycache def testcachedunfifoobar(repo): name = repo.filtername if name is None: @@ -35,8 +38,8 @@ return val #plug them on repo -mercurial.localrepo.localrepository.testcachedfoobar = testcachedfoobar -mercurial.localrepo.localrepository.testcachedunfifoobar = testcachedunfifoobar +localrepo.localrepository.testcachedfoobar = testcachedfoobar +localrepo.localrepository.testcachedunfifoobar = testcachedunfifoobar # Create an empty repo and instantiate it. It is important to run @@ -44,136 +47,136 @@ repopath = os.path.join(os.environ['TESTTMP'], 'repo') assert subprocess.call(['hg', 'init', repopath]) == 0 ui = uimod.ui() -repo = mercurial.hg.repository(ui, path=repopath).unfiltered() +repo = hg.repository(ui, path=repopath).unfiltered() -print '' -print '=== property cache ===' -print '' -print 'calllog:', calllog -print 'cached value (unfiltered):', -print vars(repo).get('testcachedfoobar', 'NOCACHE') +print('') +print('=== property cache ===') +print('') +print('calllog:', calllog) +print('cached value (unfiltered):', + vars(repo).get('testcachedfoobar', 'NOCACHE')) -print '' -print '= first access on unfiltered, should do a call' -print 'access:', repo.testcachedfoobar -print 'calllog:', calllog -print 'cached value (unfiltered):', -print vars(repo).get('testcachedfoobar', 'NOCACHE') +print('') +print('= first access on unfiltered, should do a call') +print('access:', repo.testcachedfoobar) +print('calllog:', calllog) +print('cached value (unfiltered):', + vars(repo).get('testcachedfoobar', 'NOCACHE')) -print '' -print '= second access on unfiltered, should not do call' -print 'access', repo.testcachedfoobar -print 'calllog:', calllog -print 'cached value (unfiltered):', -print vars(repo).get('testcachedfoobar', 'NOCACHE') +print('') +print('= second access on unfiltered, should not do call') +print('access', repo.testcachedfoobar) +print('calllog:', calllog) +print('cached value (unfiltered):', + vars(repo).get('testcachedfoobar', 'NOCACHE')) -print '' -print '= first access on "visible" view, should do a call' +print('') +print('= first access on "visible" view, should do a call') visibleview = repo.filtered('visible') -print 'cached value ("visible" view):', -print vars(visibleview).get('testcachedfoobar', 'NOCACHE') -print 'access:', visibleview.testcachedfoobar -print 'calllog:', calllog -print 'cached value (unfiltered):', -print vars(repo).get('testcachedfoobar', 'NOCACHE') -print 'cached value ("visible" view):', -print vars(visibleview).get('testcachedfoobar', 'NOCACHE') +print('cached value ("visible" view):', + vars(visibleview).get('testcachedfoobar', 'NOCACHE')) +print('access:', visibleview.testcachedfoobar) +print('calllog:', calllog) +print('cached value (unfiltered):', + vars(repo).get('testcachedfoobar', 'NOCACHE')) +print('cached value ("visible" view):', + vars(visibleview).get('testcachedfoobar', 'NOCACHE')) -print '' -print '= second access on "visible view", should not do call' -print 'access:', visibleview.testcachedfoobar -print 'calllog:', calllog -print 'cached value (unfiltered):', -print vars(repo).get('testcachedfoobar', 'NOCACHE') -print 'cached value ("visible" view):', -print vars(visibleview).get('testcachedfoobar', 'NOCACHE') +print('') +print('= second access on "visible view", should not do call') +print('access:', visibleview.testcachedfoobar) +print('calllog:', calllog) +print('cached value (unfiltered):', + vars(repo).get('testcachedfoobar', 'NOCACHE')) +print('cached value ("visible" view):', + vars(visibleview).get('testcachedfoobar', 'NOCACHE')) -print '' -print '= no effect on other view' +print('') +print('= no effect on other view') immutableview = repo.filtered('immutable') -print 'cached value ("immutable" view):', -print vars(immutableview).get('testcachedfoobar', 'NOCACHE') -print 'access:', immutableview.testcachedfoobar -print 'calllog:', calllog -print 'cached value (unfiltered):', -print vars(repo).get('testcachedfoobar', 'NOCACHE') -print 'cached value ("visible" view):', -print vars(visibleview).get('testcachedfoobar', 'NOCACHE') -print 'cached value ("immutable" view):', -print vars(immutableview).get('testcachedfoobar', 'NOCACHE') +print('cached value ("immutable" view):', + vars(immutableview).get('testcachedfoobar', 'NOCACHE')) +print('access:', immutableview.testcachedfoobar) +print('calllog:', calllog) +print('cached value (unfiltered):', + vars(repo).get('testcachedfoobar', 'NOCACHE')) +print('cached value ("visible" view):', + vars(visibleview).get('testcachedfoobar', 'NOCACHE')) +print('cached value ("immutable" view):', + vars(immutableview).get('testcachedfoobar', 'NOCACHE')) # unfiltered property cache test -print '' -print '' -print '=== unfiltered property cache ===' -print '' -print 'unficalllog:', unficalllog -print 'cached value (unfiltered): ', -print vars(repo).get('testcachedunfifoobar', 'NOCACHE') -print 'cached value ("visible" view): ', -print vars(visibleview).get('testcachedunfifoobar', 'NOCACHE') -print 'cached value ("immutable" view):', -print vars(immutableview).get('testcachedunfifoobar', 'NOCACHE') +print('') +print('') +print('=== unfiltered property cache ===') +print('') +print('unficalllog:', unficalllog) +print('cached value (unfiltered): ', + vars(repo).get('testcachedunfifoobar', 'NOCACHE')) +print('cached value ("visible" view): ', + vars(visibleview).get('testcachedunfifoobar', 'NOCACHE')) +print('cached value ("immutable" view):', + vars(immutableview).get('testcachedunfifoobar', 'NOCACHE')) -print '' -print '= first access on unfiltered, should do a call' -print 'access (unfiltered):', repo.testcachedunfifoobar -print 'unficalllog:', unficalllog -print 'cached value (unfiltered): ', -print vars(repo).get('testcachedunfifoobar', 'NOCACHE') +print('') +print('= first access on unfiltered, should do a call') +print('access (unfiltered):', repo.testcachedunfifoobar) +print('unficalllog:', unficalllog) +print('cached value (unfiltered): ', + vars(repo).get('testcachedunfifoobar', 'NOCACHE')) -print '' -print '= second access on unfiltered, should not do call' -print 'access (unfiltered):', repo.testcachedunfifoobar -print 'unficalllog:', unficalllog -print 'cached value (unfiltered): ', -print vars(repo).get('testcachedunfifoobar', 'NOCACHE') +print('') +print('= second access on unfiltered, should not do call') +print('access (unfiltered):', repo.testcachedunfifoobar) +print('unficalllog:', unficalllog) +print('cached value (unfiltered): ', + vars(repo).get('testcachedunfifoobar', 'NOCACHE')) -print '' -print '= access on view should use the unfiltered cache' -print 'access (unfiltered): ', repo.testcachedunfifoobar -print 'access ("visible" view): ', visibleview.testcachedunfifoobar -print 'access ("immutable" view):', immutableview.testcachedunfifoobar -print 'unficalllog:', unficalllog -print 'cached value (unfiltered): ', -print vars(repo).get('testcachedunfifoobar', 'NOCACHE') -print 'cached value ("visible" view): ', -print vars(visibleview).get('testcachedunfifoobar', 'NOCACHE') -print 'cached value ("immutable" view):', -print vars(immutableview).get('testcachedunfifoobar', 'NOCACHE') +print('') +print('= access on view should use the unfiltered cache') +print('access (unfiltered): ', repo.testcachedunfifoobar) +print('access ("visible" view): ', visibleview.testcachedunfifoobar) +print('access ("immutable" view):', immutableview.testcachedunfifoobar) +print('unficalllog:', unficalllog) +print('cached value (unfiltered): ', + vars(repo).get('testcachedunfifoobar', 'NOCACHE')) +print('cached value ("visible" view): ', + vars(visibleview).get('testcachedunfifoobar', 'NOCACHE')) +print('cached value ("immutable" view):', + vars(immutableview).get('testcachedunfifoobar', 'NOCACHE')) -print '' -print '= even if we clear the unfiltered cache' +print('') +print('= even if we clear the unfiltered cache') del repo.__dict__['testcachedunfifoobar'] -print 'cached value (unfiltered): ', -print vars(repo).get('testcachedunfifoobar', 'NOCACHE') -print 'cached value ("visible" view): ', -print vars(visibleview).get('testcachedunfifoobar', 'NOCACHE') -print 'cached value ("immutable" view):', -print vars(immutableview).get('testcachedunfifoobar', 'NOCACHE') -print 'unficalllog:', unficalllog -print 'access ("visible" view): ', visibleview.testcachedunfifoobar -print 'unficalllog:', unficalllog -print 'cached value (unfiltered): ', -print vars(repo).get('testcachedunfifoobar', 'NOCACHE') -print 'cached value ("visible" view): ', -print vars(visibleview).get('testcachedunfifoobar', 'NOCACHE') -print 'cached value ("immutable" view):', -print vars(immutableview).get('testcachedunfifoobar', 'NOCACHE') -print 'access ("immutable" view):', immutableview.testcachedunfifoobar -print 'unficalllog:', unficalllog -print 'cached value (unfiltered): ', -print vars(repo).get('testcachedunfifoobar', 'NOCACHE') -print 'cached value ("visible" view): ', -print vars(visibleview).get('testcachedunfifoobar', 'NOCACHE') -print 'cached value ("immutable" view):', -print vars(immutableview).get('testcachedunfifoobar', 'NOCACHE') -print 'access (unfiltered): ', repo.testcachedunfifoobar -print 'unficalllog:', unficalllog -print 'cached value (unfiltered): ', -print vars(repo).get('testcachedunfifoobar', 'NOCACHE') -print 'cached value ("visible" view): ', -print vars(visibleview).get('testcachedunfifoobar', 'NOCACHE') -print 'cached value ("immutable" view):', -print vars(immutableview).get('testcachedunfifoobar', 'NOCACHE') +print('cached value (unfiltered): ', + vars(repo).get('testcachedunfifoobar', 'NOCACHE')) +print('cached value ("visible" view): ', + vars(visibleview).get('testcachedunfifoobar', 'NOCACHE')) +print('cached value ("immutable" view):', + vars(immutableview).get('testcachedunfifoobar', 'NOCACHE')) +print('unficalllog:', unficalllog) +print('access ("visible" view): ', visibleview.testcachedunfifoobar) +print('unficalllog:', unficalllog) +print('cached value (unfiltered): ', + vars(repo).get('testcachedunfifoobar', 'NOCACHE')) +print('cached value ("visible" view): ', + vars(visibleview).get('testcachedunfifoobar', 'NOCACHE')) +print('cached value ("immutable" view):', + vars(immutableview).get('testcachedunfifoobar', 'NOCACHE')) +print('access ("immutable" view):', immutableview.testcachedunfifoobar) +print('unficalllog:', unficalllog) +print('cached value (unfiltered): ', + vars(repo).get('testcachedunfifoobar', 'NOCACHE')) +print('cached value ("visible" view): ', + vars(visibleview).get('testcachedunfifoobar', 'NOCACHE')) +print('cached value ("immutable" view):', + vars(immutableview).get('testcachedunfifoobar', 'NOCACHE')) +print('access (unfiltered): ', repo.testcachedunfifoobar) +print('unficalllog:', unficalllog) +print('cached value (unfiltered): ', + vars(repo).get('testcachedunfifoobar', 'NOCACHE')) +print('cached value ("visible" view): ', + vars(visibleview).get('testcachedunfifoobar', 'NOCACHE')) +print('cached value ("immutable" view):', + vars(immutableview).get('testcachedunfifoobar', 'NOCACHE'))
--- a/tests/test-pull-branch.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-pull-branch.t Sat Apr 16 18:06:48 2016 -0500 @@ -133,6 +133,7 @@ adding file changes added 4 changesets with 4 changes to 1 files (+1 heads) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + 1 other heads for branch "branchA" Make changes on new branch on tt
--- a/tests/test-pull-r.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-pull-r.t Sat Apr 16 18:06:48 2016 -0500 @@ -112,7 +112,7 @@ > [hooks] > outgoing.makecommit = hg ci -Am 'racy commit'; echo committed in pull-race > EOF - $ hg -R ../repo serve -p $HGPORT2 -d --pid-file=../repo.pid + $ hg serve -R ../repo -p $HGPORT2 -d --pid-file=../repo.pid $ cat ../repo.pid >> $DAEMON_PIDS $ hg pull --rev default --update http://localhost:$HGPORT2/ pulling from http://localhost:$HGPORT2/
--- a/tests/test-pull-update.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-pull-update.t Sat Apr 16 18:06:48 2016 -0500 @@ -16,7 +16,7 @@ $ echo 1.2 > foo $ hg ci -Am m -Should not update: +Should not update to the other topological branch: $ hg pull -u ../tt pulling from ../tt @@ -25,13 +25,12 @@ adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) - abort: not updating: not a linear update - (merge or update --check to force update) - [255] + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + 1 other heads for branch "default" $ cd ../tt -Should not update: +Should not update to the other branch: $ hg pull -u ../t pulling from ../t @@ -40,9 +39,8 @@ adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) - abort: not updating: not a linear update - (merge or update --check to force update) - [255] + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + 1 other heads for branch "default" $ HGMERGE=true hg merge merging foo @@ -63,4 +61,154 @@ added 1 changesets with 1 changes to 1 files (-1 heads) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved +Similarity between "hg update" and "hg pull -u" in handling bookmark +==================================================================== + +Test that updating activates the bookmark, which matches with the +explicit destination of the update. + + $ echo 4 >> foo + $ hg commit -m "#4" + $ hg bookmark active-after-pull + $ cd ../tt + +(1) activating by --rev BOOKMARK + + $ hg bookmark -f active-before-pull + $ hg bookmarks + * active-before-pull 3:483b76ad4309 + + $ hg pull -u -r active-after-pull + pulling from $TESTTMP/t (glob) + searching for changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files + adding remote bookmark active-after-pull + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (activating bookmark active-after-pull) + + $ hg parents -q + 4:f815b3da6163 + $ hg bookmarks + * active-after-pull 4:f815b3da6163 + active-before-pull 3:483b76ad4309 + +(discard pulled changes) + + $ hg update -q 483b76ad4309 + $ hg rollback -q + +(2) activating by URL#BOOKMARK + + $ hg bookmark -f active-before-pull + $ hg bookmarks + * active-before-pull 3:483b76ad4309 + + $ hg pull -u $TESTTMP/t#active-after-pull + pulling from $TESTTMP/t (glob) + searching for changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files + adding remote bookmark active-after-pull + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (activating bookmark active-after-pull) + + $ hg parents -q + 4:f815b3da6163 + $ hg bookmarks + * active-after-pull 4:f815b3da6163 + active-before-pull 3:483b76ad4309 + +(discard pulled changes) + + $ hg update -q 483b76ad4309 + $ hg rollback -q + +Test that updating deactivates current active bookmark, if the +destination of the update is explicitly specified, and it doesn't +match with the name of any exsiting bookmarks. + + $ cd ../t + $ hg bookmark -d active-after-pull + $ hg branch bar -q + $ hg commit -m "#5 (bar #1)" + $ cd ../tt + +(1) deactivating by --rev REV + + $ hg bookmark -f active-before-pull + $ hg bookmarks + * active-before-pull 3:483b76ad4309 + + $ hg pull -u -r b5e4babfaaa7 + pulling from $TESTTMP/t (glob) + searching for changes + adding changesets + adding manifests + adding file changes + added 2 changesets with 1 changes to 1 files + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (leaving bookmark active-before-pull) + + $ hg parents -q + 5:b5e4babfaaa7 + $ hg bookmarks + active-before-pull 3:483b76ad4309 + +(discard pulled changes) + + $ hg update -q 483b76ad4309 + $ hg rollback -q + +(2) deactivating by --branch BRANCH + + $ hg bookmark -f active-before-pull + $ hg bookmarks + * active-before-pull 3:483b76ad4309 + + $ hg pull -u -b bar + pulling from $TESTTMP/t (glob) + searching for changes + adding changesets + adding manifests + adding file changes + added 2 changesets with 1 changes to 1 files + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (leaving bookmark active-before-pull) + + $ hg parents -q + 5:b5e4babfaaa7 + $ hg bookmarks + active-before-pull 3:483b76ad4309 + +(discard pulled changes) + + $ hg update -q 483b76ad4309 + $ hg rollback -q + +(3) deactivating by URL#ANOTHER-BRANCH + + $ hg bookmark -f active-before-pull + $ hg bookmarks + * active-before-pull 3:483b76ad4309 + + $ hg pull -u $TESTTMP/t#bar + pulling from $TESTTMP/t (glob) + searching for changes + adding changesets + adding manifests + adding file changes + added 2 changesets with 1 changes to 1 files + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (leaving bookmark active-before-pull) + + $ hg parents -q + 5:b5e4babfaaa7 + $ hg bookmarks + active-before-pull 3:483b76ad4309 + $ cd ..
--- a/tests/test-push-validation.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-push-validation.t Sat Apr 16 18:06:48 2016 -0500 @@ -72,7 +72,7 @@ checking manifests crosschecking files in changesets and manifests checking files - beta@1: dddc47b3ba30 in manifests not found + beta@1: manifest refers to unknown revision dddc47b3ba30 2 files, 2 changesets, 2 total revisions 1 integrity errors encountered! (first damaged changeset appears to be 1)
--- a/tests/test-qrecord.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-qrecord.t Sat Apr 16 18:06:48 2016 -0500 @@ -7,7 +7,7 @@ $ hg help record record extension - commands to interactively select changes for - commit/qrefresh + commit/qrefresh (DEPRECATED) (use "hg help extensions" for information on enabling extensions) @@ -17,6 +17,7 @@ 'qrecord' is provided by the following extension: record commands to interactively select changes for commit/qrefresh + (DEPRECATED) (use "hg help extensions" for information on enabling extensions) @@ -54,6 +55,8 @@ This command is not available when committing a merge. + (use "hg help -e record" to show help for the record extension) + options ([+] can be repeated): -A --addremove mark new/missing files as added/removed before
--- a/tests/test-rebase-abort.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-rebase-abort.t Sat Apr 16 18:06:48 2016 -0500 @@ -75,7 +75,11 @@ * version 2 records local: 3e046f2ecedb793b97ed32108086edd1a162f8bc other: 46f0b057b5c061d276b91491c22151f78698abd2 + labels: + local: dest + other: source unrecognized entry: x advisory record + file extras: common (ancestorlinknode = 3163e20567cc93074fbb7a53c8b93312e59dbf2c) file: common (record type "F", state "u", hash 94c8c21d08740f5da9eaa38d1f175c592692f0d1) local path: common (flags "") ancestor path: common (node de0a666fdd9c1a0b0698b90d85064d8bd34f74b6) @@ -90,6 +94,10 @@ * version 2 records local: 3e046f2ecedb793b97ed32108086edd1a162f8bc other: 46f0b057b5c061d276b91491c22151f78698abd2 + labels: + local: dest + other: source + file extras: common (ancestorlinknode = 3163e20567cc93074fbb7a53c8b93312e59dbf2c) file: common (record type "F", state "u", hash 94c8c21d08740f5da9eaa38d1f175c592692f0d1) local path: common (flags "") ancestor path: common (node de0a666fdd9c1a0b0698b90d85064d8bd34f74b6) @@ -428,6 +436,7 @@ commit: (clean) update: 1 new changesets, 2 branch heads (merge) phases: 4 draft + $ cd .. test aborting a rebase succeeds after rebasing with skipped commits onto a public changeset (issue4896) @@ -461,4 +470,5 @@ [1] $ hg rebase --abort rebase aborted + $ cd ..
--- a/tests/test-rebase-bookmarks.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-rebase-bookmarks.t Sat Apr 16 18:06:48 2016 -0500 @@ -167,7 +167,7 @@ created new head $ hg up 3 1 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ hg rebase + $ hg rebase --dest 4 rebasing 3:3d5fa227f4b5 "C" (Y Z) merging c warning: conflicts while merging c! (edit, then use 'hg resolve --mark')
--- a/tests/test-rebase-collapse.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-rebase-collapse.t Sat Apr 16 18:06:48 2016 -0500 @@ -58,7 +58,7 @@ > echo "====" > echo "edited manually" >> \$1 > EOF - $ HGEDITOR="sh $TESTTMP/editor.sh" hg rebase --collapse --keepbranches -e + $ HGEDITOR="sh $TESTTMP/editor.sh" hg rebase --collapse --keepbranches -e --dest 7 rebasing 1:42ccdea3bb16 "B" rebasing 2:5fddd98957c8 "C" rebasing 3:32af7686d403 "D" @@ -115,7 +115,7 @@ $ cd a2 $ hg phase --force --secret 6 - $ hg rebase --source 4 --collapse + $ hg rebase --source 4 --collapse --dest 7 rebasing 4:9520eea781bc "E" rebasing 6:eea13746799a "G" saved backup bundle to $TESTTMP/a2/.hg/strip-backup/9520eea781bc-fcd8edd4-backup.hg (glob) @@ -157,7 +157,7 @@ > env | grep HGEDITFORM > true > EOF - $ HGEDITOR="sh $TESTTMP/checkeditform.sh" hg rebase --source 4 --collapse -m 'custom message' -e + $ HGEDITOR="sh $TESTTMP/checkeditform.sh" hg rebase --source 4 --collapse -m 'custom message' -e --dest 7 rebasing 4:9520eea781bc "E" rebasing 6:eea13746799a "G" HGEDITFORM=rebase.collapse @@ -261,13 +261,13 @@ $ hg clone -q -u . b b1 $ cd b1 - $ hg rebase -s 2 --collapse + $ hg rebase -s 2 --dest 7 --collapse abort: unable to collapse on top of 7, there is more than one external parent: 1, 5 [255] Rebase and collapse - E onto H: - $ hg rebase -s 4 --collapse # root (4) is not a merge + $ hg rebase -s 4 --dest 7 --collapse # root (4) is not a merge rebasing 4:8a5212ebc852 "E" rebasing 5:7f219660301f "F" rebasing 6:c772a8b2dc17 "G" @@ -418,7 +418,7 @@ $ hg clone -q -u . c c1 $ cd c1 - $ hg rebase -s 4 --collapse # root (4) is not a merge + $ hg rebase -s 4 --dest 8 --collapse # root (4) is not a merge rebasing 4:8a5212ebc852 "E" rebasing 5:dca5924bb570 "F" merging E @@ -512,7 +512,7 @@ $ hg clone -q -u . d d1 $ cd d1 - $ hg rebase -s 1 --collapse + $ hg rebase -s 1 --collapse --dest 5 rebasing 1:27547f69f254 "B" rebasing 2:f838bfaca5c7 "C" rebasing 3:7bbcd6078bcc "D" @@ -804,3 +804,52 @@ base $ cd .. + +Test that rebase --collapse will remember message after +running into merge conflict and invoking rebase --continue. + + $ hg init collapse_remember_message + $ cd collapse_remember_message + $ touch a + $ hg add a + $ hg commit -m "a" + $ echo "a-default" > a + $ hg commit -m "a-default" + $ hg update -r 0 + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg branch dev + marked working directory as branch dev + (branches are permanent and global, did you want a bookmark?) + $ echo "a-dev" > a + $ hg commit -m "a-dev" + $ hg rebase --collapse -m "a-default-dev" -d 1 + rebasing 2:b8d8db2b242d "a-dev" (tip) + merging a + warning: conflicts while merging a! (edit, then use 'hg resolve --mark') + unresolved conflicts (see hg resolve, then hg rebase --continue) + [1] + $ rm a.orig + $ hg resolve --mark a + (no more unresolved files) + continue: hg rebase --continue + $ hg rebase --continue + rebasing 2:b8d8db2b242d "a-dev" (tip) + saved backup bundle to $TESTTMP/collapse_remember_message/.hg/strip-backup/b8d8db2b242d-f474c19a-backup.hg (glob) + $ hg log + changeset: 2:12bb766dceb1 + tag: tip + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: a-default-dev + + changeset: 1:3c8db56a44bc + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: a-default + + changeset: 0:3903775176ed + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: a + + $ cd ..
--- a/tests/test-rebase-conflicts.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-rebase-conflicts.t Sat Apr 16 18:06:48 2016 -0500 @@ -305,3 +305,55 @@ rebase completed updating the branch cache truncating cache/rbc-revs-v1 to 72 + +Test minimization of merge conflicts + $ hg up -q null + $ echo a > a + $ hg add a + $ hg commit -q -m 'a' + $ echo b >> a + $ hg commit -q -m 'ab' + $ hg bookmark ab + $ hg up -q '.^' + $ echo b >> a + $ echo c >> a + $ hg commit -q -m 'abc' + $ hg rebase -s 7bc217434fc1 -d ab --keep + rebasing 13:7bc217434fc1 "abc" (tip) + merging a + warning: conflicts while merging a! (edit, then use 'hg resolve --mark') + unresolved conflicts (see hg resolve, then hg rebase --continue) + [1] + $ hg diff + diff -r 328e4ab1f7cc a + --- a/a Thu Jan 01 00:00:00 1970 +0000 + +++ b/a * (glob) + @@ -1,2 +1,6 @@ + a + b + +<<<<<<< dest: 328e4ab1f7cc ab - test: ab + +======= + +c + +>>>>>>> source: 7bc217434fc1 - test: abc + $ hg rebase --abort + rebase aborted + $ hg up -q -C 7bc217434fc1 + $ hg rebase -s . -d ab --keep -t internal:merge3 + rebasing 13:7bc217434fc1 "abc" (tip) + merging a + warning: conflicts while merging a! (edit, then use 'hg resolve --mark') + unresolved conflicts (see hg resolve, then hg rebase --continue) + [1] + $ hg diff + diff -r 328e4ab1f7cc a + --- a/a Thu Jan 01 00:00:00 1970 +0000 + +++ b/a * (glob) + @@ -1,2 +1,8 @@ + a + +<<<<<<< dest: 328e4ab1f7cc ab - test: ab + b + +||||||| base + +======= + +b + +c + +>>>>>>> source: 7bc217434fc1 - test: abc
--- a/tests/test-rebase-named-branches.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-rebase-named-branches.t Sat Apr 16 18:06:48 2016 -0500 @@ -327,14 +327,13 @@ $ hg up -qr 2 $ hg rebase - nothing to rebase - working directory parent is also destination - [1] + rebasing 2:792845bb77ee "b2" + note: rebase of 2:792845bb77ee created no changes to commit + saved backup bundle to $TESTTMP/case1/.hg/strip-backup/792845bb77ee-627120ee-backup.hg (glob) $ hg tglog - o 3: 'c1' c + o 2: 'c1' c | - | @ 2: 'b2' b - |/ - | o 1: 'b1' b + | @ 1: 'b1' b |/ o 0: '0' @@ -373,8 +372,9 @@ o 0: '0' $ hg rebase - nothing to rebase - working directory parent is also destination - [1] + abort: branch 'c' has one head - please rebase to an explicit rev + (run 'hg heads' to see all heads) + [255] $ hg tglog _ 4: 'c2 closed' c |
--- a/tests/test-rebase-obsolete.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-rebase-obsolete.t Sat Apr 16 18:06:48 2016 -0500 @@ -527,7 +527,7 @@ $ hg commit -m J $ hg debugobsolete `hg log --rev . -T '{node}'` - $ hg rebase --rev .~1::. --dest 'max(desc(D))' --traceback + $ hg rebase --rev .~1::. --dest 'max(desc(D))' --traceback --config experimental.rebaseskipobsolete=off rebasing 9:4bde274eefcf "I" rebasing 13:06edfc82198f "J" (tip) $ hg log -G @@ -771,8 +771,8 @@ phases: 8 draft unstable: 1 changesets $ hg rebase -s 10 -d 12 - abort: this rebase will cause divergence - (to force the rebase please set rebase.allowdivergence=True) + abort: this rebase will cause divergences from: 121d9e3bc4c6 + (to force the rebase please set experimental.allowdivergence=True) [255] $ hg log -G @ 15:73568ab6879d bar foo @@ -791,9 +791,9 @@ | o 0:4a2df7238c3b A -With rebase.allowdivergence=True, rebase can create divergence +With experimental.allowdivergence=True, rebase can create divergence - $ hg rebase -s 10 -d 12 --config rebase.allowdivergence=True + $ hg rebase -s 10 -d 12 --config experimental.allowdivergence=True rebasing 10:121d9e3bc4c6 "P" rebasing 15:73568ab6879d "bar foo" (tip) $ hg summary @@ -805,3 +805,61 @@ phases: 8 draft divergent: 2 changesets +rebase --continue + skipped rev because their successors are in destination +we make a change in trunk and work on conflicting changes to make rebase abort. + + $ hg log -G -r 17:: + @ 17:61bd55f69bc4 bar foo + | + ~ + +Create the two changes in trunk + $ printf "a" > willconflict + $ hg add willconflict + $ hg commit -m "willconflict first version" + + $ printf "dummy" > C + $ hg commit -m "dummy change successor" + +Create the changes that we will rebase + $ hg update -C 17 -q + $ printf "b" > willconflict + $ hg add willconflict + $ hg commit -m "willconflict second version" + created new head + $ printf "dummy" > K + $ hg add K + $ hg commit -m "dummy change" + $ printf "dummy" > L + $ hg add L + $ hg commit -m "dummy change" + $ hg debugobsolete `hg log -r ".^" -T '{node}'` `hg log -r 19 -T '{node}'` --config experimental.evolution=all + + $ hg log -G -r 17:: + @ 22:7bdc8a87673d dummy change + | + x 21:8b31da3c4919 dummy change + | + o 20:b82fb57ea638 willconflict second version + | + | o 19:601db7a18f51 dummy change successor + | | + | o 18:357ddf1602d5 willconflict first version + |/ + o 17:61bd55f69bc4 bar foo + | + ~ + $ hg rebase -r ".^^ + .^ + ." -d 19 + rebasing 20:b82fb57ea638 "willconflict second version" + merging willconflict + warning: conflicts while merging willconflict! (edit, then use 'hg resolve --mark') + unresolved conflicts (see hg resolve, then hg rebase --continue) + [1] + + $ hg resolve --mark willconflict + (no more unresolved files) + continue: hg rebase --continue + $ hg rebase --continue + rebasing 20:b82fb57ea638 "willconflict second version" + note: not rebasing 21:8b31da3c4919 "dummy change", already in destination as 19:601db7a18f51 "dummy change successor" + rebasing 22:7bdc8a87673d "dummy change" (tip)
--- a/tests/test-rebase-parameters.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-rebase-parameters.t Sat Apr 16 18:06:48 2016 -0500 @@ -46,11 +46,14 @@ $ cd .. +Version with only two heads (to allow default destination to work) + + $ hg clone -q -u . a a2heads -r 3 -r 8 These fail: - $ hg clone -q -u . a a1 - $ cd a1 + $ hg clone -q -u . a a0 + $ cd a0 $ hg rebase -s 8 -d 7 nothing to rebase @@ -79,33 +82,38 @@ abort: cannot specify both a revision and a base [255] - $ hg rebase --rev '1 & !1' + $ hg rebase --base 6 + abort: branch 'default' has 3 heads - please rebase to an explicit rev + (run 'hg heads .' to see heads) + [255] + + $ hg rebase --rev '1 & !1' --dest 8 empty "rev" revision set - nothing to rebase [1] - $ hg rebase --source '1 & !1' + $ hg rebase --source '1 & !1' --dest 8 empty "source" revision set - nothing to rebase [1] - $ hg rebase --base '1 & !1' + $ hg rebase --base '1 & !1' --dest 8 empty "base" revision set - can't compute rebase set [1] - $ hg rebase + $ hg rebase --dest 8 nothing to rebase - working directory parent is also destination [1] - $ hg rebase -b. + $ hg rebase -b . --dest 8 nothing to rebase - e7ec4e813ba6 is both "base" and destination [1] $ hg up -q 7 - $ hg rebase --traceback + $ hg rebase --dest 8 --traceback nothing to rebase - working directory parent is already an ancestor of destination e7ec4e813ba6 [1] - $ hg rebase -b. + $ hg rebase --dest 8 -b. nothing to rebase - "base" 02de42196ebe is already an ancestor of destination e7ec4e813ba6 [1] @@ -117,6 +125,9 @@ Rebase with no arguments (from 3 onto 8): + $ cd .. + $ hg clone -q -u . a2heads a1 + $ cd a1 $ hg up -q -C 3 $ hg rebase @@ -126,22 +137,18 @@ saved backup bundle to $TESTTMP/a1/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob) $ hg tglog - @ 8: 'D' - | - o 7: 'C' + @ 6: 'D' | - o 6: 'B' + o 5: 'C' | - o 5: 'I' + o 4: 'B' | - o 4: 'H' + o 3: 'I' | - | o 3: 'G' - |/| - o | 2: 'F' - | | - | o 1: 'E' - |/ + o 2: 'H' + | + o 1: 'F' + | o 0: 'A' Try to rollback after a rebase (fail): @@ -154,7 +161,7 @@ Rebase with base == '.' => same as no arguments (from 3 onto 8): - $ hg clone -q -u 3 a a2 + $ hg clone -q -u 3 a2heads a2 $ cd a2 $ hg rebase --base . @@ -164,22 +171,18 @@ saved backup bundle to $TESTTMP/a2/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob) $ hg tglog - @ 8: 'D' - | - o 7: 'C' + @ 6: 'D' | - o 6: 'B' + o 5: 'C' | - o 5: 'I' + o 4: 'B' | - o 4: 'H' + o 3: 'I' | - | o 3: 'G' - |/| - o | 2: 'F' - | | - | o 1: 'E' - |/ + o 2: 'H' + | + o 1: 'F' + | o 0: 'A' $ cd .. @@ -220,7 +223,7 @@ Specify only source (from 2 onto 8): - $ hg clone -q -u . a a4 + $ hg clone -q -u . a2heads a4 $ cd a4 $ hg rebase --source 'desc("C")' @@ -229,20 +232,16 @@ saved backup bundle to $TESTTMP/a4/.hg/strip-backup/5fddd98957c8-f9244fa1-backup.hg (glob) $ hg tglog - o 8: 'D' + o 6: 'D' | - o 7: 'C' - | - @ 6: 'I' + o 5: 'C' | - o 5: 'H' + @ 4: 'I' | - | o 4: 'G' - |/| - o | 3: 'F' - | | - | o 2: 'E' - |/ + o 3: 'H' + | + o 2: 'F' + | | o 1: 'B' |/ o 0: 'A' @@ -285,7 +284,7 @@ Specify only base (from 1 onto 8): - $ hg clone -q -u . a a6 + $ hg clone -q -u . a2heads a6 $ cd a6 $ hg rebase --base 'desc("D")' @@ -295,22 +294,18 @@ saved backup bundle to $TESTTMP/a6/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob) $ hg tglog - o 8: 'D' - | - o 7: 'C' + o 6: 'D' | - o 6: 'B' + o 5: 'C' | - @ 5: 'I' + o 4: 'B' | - o 4: 'H' + @ 3: 'I' | - | o 3: 'G' - |/| - o | 2: 'F' - | | - | o 1: 'E' - |/ + o 2: 'H' + | + o 1: 'F' + | o 0: 'A' $ cd .. @@ -383,7 +378,7 @@ Specify only revs (from 2 onto 8) - $ hg clone -q -u . a a9 + $ hg clone -q -u . a2heads a9 $ cd a9 $ hg rebase --rev 'desc("C")::' @@ -392,20 +387,16 @@ saved backup bundle to $TESTTMP/a9/.hg/strip-backup/5fddd98957c8-f9244fa1-backup.hg (glob) $ hg tglog - o 8: 'D' + o 6: 'D' | - o 7: 'C' - | - @ 6: 'I' + o 5: 'C' | - o 5: 'H' + @ 4: 'I' | - | o 4: 'G' - |/| - o | 3: 'F' - | | - | o 2: 'E' - |/ + o 3: 'H' + | + o 2: 'F' + | | o 1: 'B' |/ o 0: 'A' @@ -416,7 +407,7 @@ $ hg clone -q -u . a aX $ cd aX - $ hg rebase -r 3 -r 6 + $ hg rebase -r 3 -r 6 --dest 8 rebasing 3:32af7686d403 "D" rebasing 6:eea13746799a "G" saved backup bundle to $TESTTMP/aX/.hg/strip-backup/eea13746799a-ad273fd6-backup.hg (glob) @@ -495,6 +486,10 @@ $ hg resolve -m c2 (no more unresolved files) continue: hg rebase --continue + $ hg graft --continue + abort: no graft in progress + (continue: hg rebase --continue) + [255] $ hg rebase -c --tool internal:fail rebasing 2:e4e3f3546619 "c2b" (tip) note: rebase of 2:e4e3f3546619 created no changes to commit
--- a/tests/test-rebase-pull.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-rebase-pull.t Sat Apr 16 18:06:48 2016 -0500 @@ -85,13 +85,14 @@ adding manifests adding file changes added 1 changesets with 1 changes to 1 files - nothing to rebase - working directory parent is already an ancestor of destination 77ae9631bcca + nothing to rebase - updating instead 1 files updated, 0 files merged, 0 files removed, 0 files unresolved updating bookmark norebase $ hg tglog -l 1 @ 2: 'R1' | + ~ pull --rebase --update should ignore --update: @@ -112,6 +113,7 @@ $ hg tglog -l 1 o 2: 'R1' | + ~ $ cd .. @@ -209,3 +211,103 @@ | o 0: 'C1' + +pull --rebase only update if there is nothing to rebase + + $ cd ../a + $ echo R5 > R5 + $ hg ci -Am R5 + adding R5 + $ hg tglog + @ 6: 'R5' + | + o 5: 'R4' + | + o 4: 'R3' + | + o 3: 'R2' + | + o 2: 'R1' + | + o 1: 'C2' + | + o 0: 'C1' + + $ cd ../c + $ echo L2 > L2 + $ hg ci -Am L2 + adding L2 + $ hg up 'desc(L1)' + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg pull --rebase + pulling from $TESTTMP/a (glob) + searching for changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files (+1 heads) + rebasing 6:0d0727eb7ce0 "L1" + rebasing 7:c1f58876e3bf "L2" + saved backup bundle to $TESTTMP/c/.hg/strip-backup/0d0727eb7ce0-ef61ccb2-backup.hg (glob) + $ hg tglog + o 8: 'L2' + | + @ 7: 'L1' + | + o 6: 'R5' + | + o 5: 'R4' + | + o 4: 'R3' + | + o 3: 'R2' + | + o 2: 'R1' + | + o 1: 'C2' + | + o 0: 'C1' + + +pull --rebase update (no rebase) use proper update: + +- warn about other head. + + $ cd ../a + $ echo R6 > R6 + $ hg ci -Am R6 + adding R6 + $ cd ../c + $ hg up 'desc(R5)' + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg pull --rebase + pulling from $TESTTMP/a (glob) + searching for changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files (+1 heads) + nothing to rebase - updating instead + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + 1 other heads for branch "default" + $ hg tglog + @ 9: 'R6' + | + | o 8: 'L2' + | | + | o 7: 'L1' + |/ + o 6: 'R5' + | + o 5: 'R4' + | + o 4: 'R3' + | + o 3: 'R2' + | + o 2: 'R1' + | + o 1: 'C2' + | + o 0: 'C1' +
--- a/tests/test-rebase-rename.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-rebase-rename.t Sat Apr 16 18:06:48 2016 -0500 @@ -298,7 +298,7 @@ o 3: 'File c created as copy of b and modified' | o 2: 'File b created as copy of a and modified' - | + : o 0: 'File a created' Same steps as above, but with --collapse on rebase to make sure the @@ -323,9 +323,9 @@ $ hg tglog --follow d @ 3: 'Collapsed revision - | * File b created as copy of a and modified - | * File c created as copy of b and modified - | * File d created as copy of c and modified' + : * File b created as copy of a and modified + : * File c created as copy of b and modified + : * File d created as copy of c and modified' o 0: 'File a created'
--- a/tests/test-rebase-scenario-global.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-rebase-scenario-global.t Sat Apr 16 18:06:48 2016 -0500 @@ -25,7 +25,8 @@ Rebasing D onto H - simple rebase: -(this also tests that editor is invoked if '--edit' is specified) +(this also tests that editor is invoked if '--edit' is specified, and that we +can abort or warn for colliding untracked files) $ hg clone -q -u . a a1 $ cd a1 @@ -50,8 +51,10 @@ $ hg status --rev "3^1" --rev 3 A D - $ HGEDITOR=cat hg rebase -s 3 -d 7 --edit + $ echo collide > D + $ HGEDITOR=cat hg rebase -s 3 -d 7 --edit --config merge.checkunknown=warn rebasing 3:32af7686d403 "D" + D: replacing untracked file D @@ -62,6 +65,9 @@ HG: branch 'default' HG: added D saved backup bundle to $TESTTMP/a1/.hg/strip-backup/32af7686d403-6f7dface-backup.hg (glob) + $ cat D.orig + collide + $ rm D.orig $ hg tglog o 7: 'D' @@ -84,14 +90,19 @@ D onto F - intermediate point: -(this also tests that editor is not invoked if '--edit' is not specified) +(this also tests that editor is not invoked if '--edit' is not specified, and +that we can ignore for colliding untracked files) $ hg clone -q -u . a a2 $ cd a2 + $ echo collide > D - $ HGEDITOR=cat hg rebase -s 3 -d 5 + $ HGEDITOR=cat hg rebase -s 3 -d 5 --config merge.checkunknown=ignore rebasing 3:32af7686d403 "D" saved backup bundle to $TESTTMP/a2/.hg/strip-backup/32af7686d403-6f7dface-backup.hg (glob) + $ cat D.orig + collide + $ rm D.orig $ hg tglog o 7: 'D' @@ -114,15 +125,21 @@ E onto H - skip of G: +(this also tests that we can overwrite untracked files and don't create backups +if they have the same contents) $ hg clone -q -u . a a3 $ cd a3 + $ hg cat -r 4 E | tee E + E $ hg rebase -s 4 -d 7 rebasing 4:9520eea781bc "E" rebasing 6:eea13746799a "G" note: rebase of 6:eea13746799a created no changes to commit saved backup bundle to $TESTTMP/a3/.hg/strip-backup/9520eea781bc-fcd8edd4-backup.hg (glob) + $ f E.orig + E.orig: file not found $ hg tglog o 6: 'E' @@ -745,12 +762,73 @@ saved backup bundle to $TESTTMP/cwd-vanish/.hg/strip-backup/779a07b1b7a0-853e0073-backup.hg (glob) Test experimental revset +======================== $ cd .. + +Make the repo a bit more interresting + + $ hg up 1 + 0 files updated, 0 files merged, 2 files removed, 0 files unresolved + $ echo aaa > aaa + $ hg add aaa + $ hg commit -m aaa + created new head + $ hg log -G + @ changeset: 4:5f7bc9025ed2 + | tag: tip + | parent: 1:58d79cc1cf43 + | user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: aaa + | + | o changeset: 3:1910d5ff34ea + | | user: test + | | date: Thu Jan 01 00:00:00 1970 +0000 + | | summary: second source with subdir + | | + | o changeset: 2:82901330b6ef + |/ user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: first source commit + | + o changeset: 1:58d79cc1cf43 + | user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: dest commit + | + o changeset: 0:e94b687f7da3 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: initial commit + + +Testing from lower head + + $ hg up 3 + 2 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg log -r '_destrebase()' + changeset: 4:5f7bc9025ed2 + tag: tip + parent: 1:58d79cc1cf43 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: aaa + + +Testing from upper head + + $ hg log -r '_destrebase(4)' changeset: 3:1910d5ff34ea - tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: second source with subdir + $ hg up 4 + 1 files updated, 0 files merged, 2 files removed, 0 files unresolved + $ hg log -r '_destrebase()' + changeset: 3:1910d5ff34ea + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: second source with subdir +
--- a/tests/test-record.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-record.t Sat Apr 16 18:06:48 2016 -0500 @@ -41,6 +41,8 @@ This command is not available when committing a merge. + (use "hg help -e record" to show help for the record extension) + options ([+] can be repeated): -A --addremove mark new/missing files as added/removed before
--- a/tests/test-remove.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-remove.t Sat Apr 16 18:06:48 2016 -0500 @@ -7,6 +7,18 @@ > hg up -C > } + $ cat >> $HGRCPATH <<EOF + > [progress] + > disable=False + > assume-tty = 1 + > delay = 0 + > # set changedelay really large so we don't see nested topics + > changedelay = 30000 + > format = topic bar number + > refresh = 0 + > width = 60 + > EOF + $ hg init a $ cd a $ echo a > foo @@ -14,6 +26,9 @@ file not managed $ remove foo + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) not removing foo: file is untracked exit code: 1 ? foo @@ -29,42 +44,72 @@ $ echo b > bar $ hg add bar $ remove bar + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + skipping [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) not removing bar: file has been marked for add (use forget to undo) exit code: 1 A bar ./bar ./foo + \r (no-eol) (esc) + updating [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) 0 files updated, 0 files merged, 0 files removed, 0 files unresolved 01 state clean, options none $ remove foo + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) exit code: 0 R foo ? bar ./bar + \r (no-eol) (esc) + updating [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved 02 state modified, options none $ echo b >> foo $ remove foo + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + skipping [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) not removing foo: file is modified (use -f to force removal) exit code: 1 M foo ? bar ./bar ./foo + \r (no-eol) (esc) + updating [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved 03 state missing, options none $ rm foo $ remove foo + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) exit code: 0 R foo ? bar ./bar + \r (no-eol) (esc) + updating [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved 10 state added, options -f @@ -72,6 +117,9 @@ $ echo b > bar $ hg add bar $ remove -f bar + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) exit code: 0 ? bar ./bar @@ -82,24 +130,42 @@ 11 state clean, options -f $ remove -f foo + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) exit code: 0 R foo + \r (no-eol) (esc) + updating [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved 12 state modified, options -f $ echo b >> foo $ remove -f foo + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) exit code: 0 R foo + \r (no-eol) (esc) + updating [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved 13 state missing, options -f $ rm foo $ remove -f foo + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) exit code: 0 R foo + \r (no-eol) (esc) + updating [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved 20 state added, options -A @@ -107,16 +173,31 @@ $ echo b > bar $ hg add bar $ remove -A bar + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + skipping [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) not removing bar: file still exists exit code: 1 A bar ./bar ./foo + \r (no-eol) (esc) + updating [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) 0 files updated, 0 files merged, 0 files removed, 0 files unresolved 21 state clean, options -A $ remove -A foo + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + skipping [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) not removing foo: file still exists exit code: 1 ? bar @@ -128,22 +209,37 @@ $ echo b >> foo $ remove -A foo + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + skipping [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) not removing foo: file still exists exit code: 1 M foo ? bar ./bar ./foo + \r (no-eol) (esc) + updating [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved 23 state missing, options -A $ rm foo $ remove -A foo + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) exit code: 0 R foo ? bar ./bar + \r (no-eol) (esc) + updating [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved 30 state added, options -Af @@ -151,6 +247,9 @@ $ echo b > bar $ hg add bar $ remove -Af bar + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) exit code: 0 ? bar ./bar @@ -161,26 +260,44 @@ 31 state clean, options -Af $ remove -Af foo + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) exit code: 0 R foo ./foo + \r (no-eol) (esc) + updating [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved 32 state modified, options -Af $ echo b >> foo $ remove -Af foo + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) exit code: 0 R foo ./foo + \r (no-eol) (esc) + updating [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved 33 state missing, options -Af $ rm foo $ remove -Af foo + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) exit code: 0 R foo + \r (no-eol) (esc) + updating [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved test some directory stuff @@ -196,42 +313,95 @@ $ rm test/bar $ remove test + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + deleting [=====================> ] 1/2\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + deleting [===========================================>] 2/2\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) removing test/bar (glob) removing test/foo (glob) exit code: 0 R test/bar R test/foo ./foo + \r (no-eol) (esc) + updating [===========================================>] 2/2\r (no-eol) (esc) + \r (no-eol) (esc) 2 files updated, 0 files merged, 0 files removed, 0 files unresolved dir, options -f $ rm test/bar $ remove -f test + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + deleting [=====================> ] 1/2\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + deleting [===========================================>] 2/2\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) removing test/bar (glob) removing test/foo (glob) exit code: 0 R test/bar R test/foo ./foo + \r (no-eol) (esc) + updating [===========================================>] 2/2\r (no-eol) (esc) + \r (no-eol) (esc) 2 files updated, 0 files merged, 0 files removed, 0 files unresolved dir, options -A $ rm test/bar $ remove -A test + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + skipping [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + removing test/bar (glob) not removing test/foo: file still exists (glob) - removing test/bar (glob) exit code: 1 R test/bar ./foo ./test/foo + \r (no-eol) (esc) + updating [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved dir, options -Af $ rm test/bar $ remove -Af test + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + deleting [=====================> ] 1/2\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + deleting [===========================================>] 2/2\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) removing test/bar (glob) removing test/foo (glob) exit code: 0 @@ -239,6 +409,9 @@ R test/foo ./foo ./test/foo + \r (no-eol) (esc) + updating [===========================================>] 2/2\r (no-eol) (esc) + \r (no-eol) (esc) 2 files updated, 0 files merged, 0 files removed, 0 files unresolved test remove dropping empty trees (issue1861) @@ -250,6 +423,14 @@ adding issue1861/b/c/y adding issue1861/x $ hg rm issue1861/b + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) removing issue1861/b/c/y (glob) $ hg ci -m remove $ ls issue1861 @@ -270,18 +451,27 @@ $ mkdir d1 $ echo a > d1/a $ hg rm --after d1 + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) not removing d1: no tracked files [1] $ hg add d1/a $ rm d1/a $ hg rm --after d1 + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) removing d1/a (glob) -#if windows + $ hg rm --after nosuch nosuch: * (glob) + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) [1] -#else - $ hg rm --after nosuch - nosuch: No such file or directory - [1] -#endif
--- a/tests/test-rename-dir-merge.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-rename-dir-merge.t Sat Apr 16 18:06:48 2016 -0500 @@ -85,6 +85,7 @@ resolving manifests branchmerge: True, force: False, partial: False ancestor: f9b20c0d4c51, local: 397f8b00a740+, remote: ce36d17b18fb + starting 4 threads for background file closing (?) b/c: local directory rename - get from a/c -> dg getting a/c to b/c 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-rename-merge2.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-rename-merge2.t Sat Apr 16 18:06:48 2016 -0500 @@ -88,6 +88,7 @@ ancestor: 924404dff337, local: e300d1c794ec+, remote: 4ce40f5aca24 preserving a for resolve of b preserving rev for resolve of rev + starting 4 threads for background file closing (?) a: remote unchanged -> k b: remote copied from a -> m (premerge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) @@ -168,6 +169,7 @@ preserving a for resolve of b preserving rev for resolve of rev removing a + starting 4 threads for background file closing (?) b: remote moved from a -> m (premerge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) merging a and b to b @@ -205,6 +207,7 @@ ancestor: 924404dff337, local: 02963e448370+, remote: f4db7e329e71 preserving b for resolve of b preserving rev for resolve of rev + starting 4 threads for background file closing (?) b: local copied/moved from a -> m (premerge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) merging b and a to b @@ -274,6 +277,7 @@ branchmerge: True, force: False, partial: False ancestor: 924404dff337, local: 86a2aa42fc76+, remote: 97c705ade336 preserving rev for resolve of rev + starting 4 threads for background file closing (?) rev: versions differ -> m (premerge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) merging rev @@ -339,6 +343,7 @@ branchmerge: True, force: False, partial: False ancestor: 924404dff337, local: 02963e448370+, remote: 97c705ade336 preserving rev for resolve of rev + starting 4 threads for background file closing (?) rev: versions differ -> m (premerge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) merging rev @@ -367,6 +372,7 @@ ancestor: 924404dff337, local: 62e7bf090eba+, remote: 49b6d8032493 preserving b for resolve of b preserving rev for resolve of rev + starting 4 threads for background file closing (?) b: both renamed from a -> m (premerge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) merging b @@ -446,6 +452,7 @@ ancestor: 924404dff337, local: 86a2aa42fc76+, remote: af30c7647fc7 preserving b for resolve of b preserving rev for resolve of rev + starting 4 threads for background file closing (?) b: both created -> m (premerge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) merging b @@ -486,6 +493,7 @@ preserving rev for resolve of rev a: other deleted -> r removing a + starting 4 threads for background file closing (?) b: both created -> m (premerge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) merging b @@ -565,6 +573,7 @@ preserving rev for resolve of rev a: other deleted -> r removing a + starting 4 threads for background file closing (?) b: both created -> m (premerge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) merging b @@ -642,6 +651,7 @@ ancestor: 924404dff337, local: 0b76e65c8289+, remote: 4ce40f5aca24 preserving b for resolve of b preserving rev for resolve of rev + starting 4 threads for background file closing (?) a: remote unchanged -> k b: both created -> m (premerge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) @@ -681,6 +691,7 @@ ancestor: 924404dff337, local: 02963e448370+, remote: 8dbce441892a preserving b for resolve of b preserving rev for resolve of rev + starting 4 threads for background file closing (?) a: prompt deleted/changed -> m (premerge) picked tool ':prompt' for a (binary False symlink False changedelete True) remote changed a which local deleted @@ -725,6 +736,7 @@ preserving a for resolve of a preserving b for resolve of b preserving rev for resolve of rev + starting 4 threads for background file closing (?) a: prompt changed/deleted -> m (premerge) picked tool ':prompt' for a (binary False symlink False changedelete True) local changed a which remote deleted @@ -772,6 +784,7 @@ preserving a for resolve of b preserving rev for resolve of rev removing a + starting 4 threads for background file closing (?) b: remote moved from a -> m (premerge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) merging a and b to b @@ -813,6 +826,7 @@ ancestor: 924404dff337, local: 62e7bf090eba+, remote: f4db7e329e71 preserving b for resolve of b preserving rev for resolve of rev + starting 4 threads for background file closing (?) b: local copied/moved from a -> m (premerge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) merging b and a to b
--- a/tests/test-repair-strip.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-repair-strip.t Sat Apr 16 18:06:48 2016 -0500 @@ -1,5 +1,12 @@ #require unix-permissions no-root + $ cat > $TESTTMP/dumpjournal.py <<EOF + > import sys + > for entry in sys.stdin.read().split('\n'): + > if entry: + > print entry.split('\x00')[0] + > EOF + $ echo "[extensions]" >> $HGRCPATH $ echo "mq=">> $HGRCPATH @@ -14,7 +21,7 @@ > hg verify > echo % journal contents > if [ -f .hg/store/journal ]; then - > sed -e 's/\.i[^\n]*/\.i/' .hg/store/journal + > cat .hg/store/journal | python $TESTTMP/dumpjournal.py > else > echo "(no journal)" > fi
--- a/tests/test-resolve.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-resolve.t Sat Apr 16 18:06:48 2016 -0500 @@ -53,6 +53,34 @@ arguments do not match paths that need resolving $ hg resolve -l does-not-exist +tell users how they could have used resolve + + $ mkdir nested + $ cd nested + $ hg resolve -m file1 + arguments do not match paths that need resolving + (try: hg resolve -m path:file1) + $ hg resolve -m file1 filez + arguments do not match paths that need resolving + (try: hg resolve -m path:file1 path:filez) + $ hg resolve -m path:file1 path:filez + $ hg resolve -l + R file1 + U file2 + $ hg resolve -m filez file2 + arguments do not match paths that need resolving + (try: hg resolve -m path:filez path:file2) + $ hg resolve -m path:filez path:file2 + (no more unresolved files) + $ hg resolve -l + R file1 + R file2 + +cleanup + $ hg resolve -u + $ cd .. + $ rmdir nested + don't allow marking or unmarking driver-resolved files $ cat > $TESTTMP/markdriver.py << EOF @@ -263,10 +291,12 @@ local: 57653b9f834a4493f7240b0681efcb9ae7cab745 other: dc77451844e37f03f5c559e3b8529b2b48d381d1 unrecognized entry: x advisory record + file extras: file1 (ancestorlinknode = 99726c03216e233810a2564cbc0adfe395007eac) file: file1 (record type "F", state "r", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node 2ed2a3912a0b24502043eae84ee4b279c18b90dd) other path: file1 (node 6f4310b00b9a147241b071a60c28a650827fb03d) + file extras: file2 (ancestorlinknode = 99726c03216e233810a2564cbc0adfe395007eac) file: file2 (record type "F", state "u", hash cb99b709a1978bd205ab9dfd4c5aaa1fc91c7523) local path: file2 (flags "") ancestor path: file2 (node 2ed2a3912a0b24502043eae84ee4b279c18b90dd) @@ -282,10 +312,12 @@ * version 2 records local: 57653b9f834a4493f7240b0681efcb9ae7cab745 other: dc77451844e37f03f5c559e3b8529b2b48d381d1 + file extras: file1 (ancestorlinknode = 99726c03216e233810a2564cbc0adfe395007eac) file: file1 (record type "F", state "r", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node 2ed2a3912a0b24502043eae84ee4b279c18b90dd) other path: file1 (node 6f4310b00b9a147241b071a60c28a650827fb03d) + file extras: file2 (ancestorlinknode = 99726c03216e233810a2564cbc0adfe395007eac) file: file2 (record type "F", state "u", hash cb99b709a1978bd205ab9dfd4c5aaa1fc91c7523) local path: file2 (flags "") ancestor path: file2 (node 2ed2a3912a0b24502043eae84ee4b279c18b90dd) @@ -300,12 +332,12 @@ (see https://mercurial-scm.org/wiki/MergeStateRecords for more information) [255] $ hg summary + warning: merge state has unsupported record types: X parent: 2:57653b9f834a append baz to files parent: 1:dc77451844e3 append bar to files branch: default - warning: merge state has unsupported record types: X commit: 2 modified, 2 unknown (merge) update: 2 new changesets (update) phases: 5 draft
--- a/tests/test-revert-interactive.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-revert-interactive.t Sat Apr 16 18:06:48 2016 -0500 @@ -15,6 +15,7 @@ > interactive = true > [extensions] > record = + > purge = > EOF @@ -377,3 +378,26 @@ 5 d +lastline + + $ hg update -C . + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg purge + $ touch newfile + $ hg add newfile + $ hg status + A newfile + $ hg revert -i <<EOF + > n + > EOF + forgetting newfile + forget added file newfile (yn)? n + $ hg status + A newfile + $ hg revert -i <<EOF + > y + > EOF + forgetting newfile + forget added file newfile (yn)? y + $ hg status + ? newfile +
--- a/tests/test-revlog-ancestry.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-revlog-ancestry.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,7 +1,12 @@ +from __future__ import absolute_import, print_function import os -from mercurial import hg, ui, merge +from mercurial import ( + hg, + merge, + ui as uimod, +) -u = ui.ui() +u = uimod.ui() repo = hg.repository(u, 'test1', create=1) os.chdir('test1') @@ -46,39 +51,38 @@ addcommit("I", 8) # Ancestors - print 'Ancestors of 5' + print('Ancestors of 5') for r in repo.changelog.ancestors([5]): - print r, + print(r, end=' ') - print '\nAncestors of 6 and 5' + print('\nAncestors of 6 and 5') for r in repo.changelog.ancestors([6, 5]): - print r, + print(r, end=' ') - print '\nAncestors of 5 and 4' + print('\nAncestors of 5 and 4') for r in repo.changelog.ancestors([5, 4]): - print r, + print(r, end=' ') - print '\nAncestors of 7, stop at 6' + print('\nAncestors of 7, stop at 6') for r in repo.changelog.ancestors([7], 6): - print r, + print(r, end=' ') - print '\nAncestors of 7, including revs' + print('\nAncestors of 7, including revs') for r in repo.changelog.ancestors([7], inclusive=True): - print r, + print(r, end=' ') - print '\nAncestors of 7, 5 and 3, including revs' + print('\nAncestors of 7, 5 and 3, including revs') for r in repo.changelog.ancestors([7, 5, 3], inclusive=True): - print r, + print(r, end=' ') # Descendants - print '\n\nDescendants of 5' + print('\n\nDescendants of 5') for r in repo.changelog.descendants([5]): - print r, + print(r, end=' ') - print '\nDescendants of 5 and 3' + print('\nDescendants of 5 and 3') for r in repo.changelog.descendants([5, 3]): - print r, + print(r, end=' ') - print '\nDescendants of 5 and 4' - for r in repo.changelog.descendants([5, 4]): - print r, + print('\nDescendants of 5 and 4') + print(*repo.changelog.descendants([5, 4]), sep=' ')
--- a/tests/test-revlog.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-revlog.t Sat Apr 16 18:06:48 2016 -0500 @@ -11,5 +11,5 @@ rev offset length delta linkrev nodeid p1 p2 0 0 19 -1 2 99e0332bd498 000000000000 000000000000 1 19 12 0 3 6674f57a23d8 99e0332bd498 000000000000 - $ hg debugdata a.i 1 2>&1 | grep decoded - mpatch.mpatchError: patch cannot be decoded + $ hg debugdata a.i 1 2>&1 | egrep 'Error:.*decoded' + (mercurial.mpatch.)?mpatchError: patch cannot be decoded (re)
--- a/tests/test-revset.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-revset.t Sat Apr 16 18:06:48 2016 -0500 @@ -169,7 +169,9 @@ ('symbol', 'a')) * set: <filteredset - <baseset [1]>> + <baseset [1]>, + <not + <baseset [0]>>> 1 $ try _a_b_c_ ('symbol', '_a_b_c_') @@ -182,7 +184,9 @@ ('symbol', 'a')) * set: <filteredset - <baseset [6]>> + <baseset [6]>, + <not + <baseset [0]>>> 6 $ try .a.b.c. ('symbol', '.a.b.c.') @@ -195,7 +199,9 @@ ('symbol', 'a')) * set: <filteredset - <baseset [7]>> + <baseset [7]>, + <not + <baseset [0]>>> 7 names that should be caught by fallback mechanism @@ -278,7 +284,9 @@ ('symbol', 'a')) * set: <filteredset - <baseset [4]>> + <baseset [4]>, + <not + <baseset [0]>>> 4 $ log '1 or 2' @@ -537,14 +545,16 @@ ('string', '\x08issue\\d+')) * set: <filteredset - <fullreposet+ 0:9>> + <fullreposet+ 0:9>, + <grep '\x08issue\\d+'>> $ try 'grep(r"\bissue\d+")' (func ('symbol', 'grep') ('string', '\\bissue\\d+')) * set: <filteredset - <fullreposet+ 0:9>> + <fullreposet+ 0:9>, + <grep '\\bissue\\d+'>> 6 $ try 'grep(r"\")' hg: parse error at 7: unterminated string @@ -693,12 +703,11 @@ * optimized: (func ('symbol', 'only') - (and + (difference (range ('symbol', '8') ('symbol', '9')) - (not - ('symbol', '8')))) + ('symbol', '8'))) * set: <baseset+ [8, 9]> 8 @@ -716,7 +725,7 @@ ('symbol', '9') ('symbol', '5'))) * set: - <baseset+ [8, 9, 2, 4]> + <baseset+ [2, 4, 8, 9]> 2 4 8 @@ -1230,7 +1239,7 @@ test that chained `or` operations never eat up stack (issue4624) (uses `0:1` instead of `0` to avoid future optimization of trivial revisions) - $ hg log -T '{rev}\n' -r "`python -c "print '|'.join(['0:1'] * 500)"`" + $ hg log -T '{rev}\n' -r `python -c "print '+'.join(['0:1'] * 500)"` 0 1 @@ -1581,12 +1590,14 @@ $ try m ('symbol', 'm') + * expanded: (func ('symbol', 'merge') None) * set: <filteredset - <fullreposet+ 0:9>> + <fullreposet+ 0:9>, + <merge>> 6 $ HGPLAIN=1 @@ -1600,12 +1611,14 @@ $ export HGPLAINEXCEPT $ try m ('symbol', 'm') + * expanded: (func ('symbol', 'merge') None) * set: <filteredset - <fullreposet+ 0:9>> + <fullreposet+ 0:9>, + <merge>> 6 $ unset HGPLAIN @@ -1615,6 +1628,7 @@ (func ('symbol', 'p2') ('symbol', '.')) + * expanded: (func ('symbol', 'p1') ('symbol', '.')) @@ -1637,6 +1651,7 @@ (func ('symbol', 'p2') ('symbol', '.')) + * expanded: (func ('symbol', 'p1') ('symbol', '.')) @@ -1651,6 +1666,7 @@ $ try sincem ('symbol', 'sincem') + * expanded: (func ('symbol', 'descendants') (func @@ -1659,7 +1675,8 @@ * set: <addset+ <filteredset - <fullreposet+ 0:9>>, + <fullreposet+ 0:9>, + <merge>>, <generatorset+>> 6 7 @@ -1685,6 +1702,7 @@ ('symbol', '1') ('symbol', '2'))) ('symbol', '3'))) + * expanded: (or ('symbol', '3') (or @@ -1709,13 +1727,17 @@ (range ('symbol', '2') ('symbol', '5'))) + * expanded: (func ('symbol', 'max') (range ('symbol', '2') ('symbol', '5'))) * set: - <baseset [5]> + <baseset + <max + <fullreposet+ 0:9>, + <spanset+ 2:5>>> 5 test chained `or` operations are flattened at parsing phase @@ -1734,6 +1756,7 @@ (range ('symbol', '2') ('symbol', '3')))) + * expanded: (or (range ('symbol', '0') @@ -1767,6 +1790,7 @@ (range ('symbol', '2') ('symbol', '5'))) + * expanded: (func ('symbol', 'descendants') (func @@ -1775,25 +1799,42 @@ abort: unknown revision '$1'! [255] - $ echo 'injectparamasstring2 = max(_aliasarg("$1"))' >> .hg/hgrc - $ echo 'callinjection2($1) = descendants(injectparamasstring2)' >> .hg/hgrc - $ try 'callinjection2(2:5)' +test scope of alias expansion: 'universe' is expanded prior to 'shadowall(0)', +but 'all()' should never be substituded to '0()'. + + $ echo 'universe = all()' >> .hg/hgrc + $ echo 'shadowall(all) = all and universe' >> .hg/hgrc + $ try 'shadowall(0)' (func - ('symbol', 'callinjection2') - (range - ('symbol', '2') - ('symbol', '5'))) - abort: failed to parse the definition of revset alias "injectparamasstring2": unknown identifier: _aliasarg + ('symbol', 'shadowall') + ('symbol', '0')) + * expanded: + (and + ('symbol', '0') + (func + ('symbol', 'all') + None)) + * set: + <filteredset + <baseset [0]>, + <spanset+ 0:9>> + 0 + +test unknown reference: + + $ try "unknownref(0)" --config 'revsetalias.unknownref($1)=$1:$2' + (func + ('symbol', 'unknownref') + ('symbol', '0')) + abort: failed to parse the definition of revset alias "unknownref": '$' not for alias arguments [255] + $ hg debugrevspec --debug --config revsetalias.anotherbadone='branch(' "tip" ('symbol', 'tip') warning: failed to parse the definition of revset alias "anotherbadone": at 7: not a prefix: end - warning: failed to parse the definition of revset alias "injectparamasstring2": unknown identifier: _aliasarg * set: <baseset [9]> 9 - >>> data = file('.hg/hgrc', 'rb').read() - >>> file('.hg/hgrc', 'wb').write(data.replace('_aliasarg', '')) $ try 'tip' ('symbol', 'tip') @@ -1814,6 +1855,7 @@ (list ('string', 'foo') ('symbol', 'tip'))) + * expanded: (or ('symbol', 'tip') (func @@ -1823,7 +1865,8 @@ <addset <baseset [9]>, <filteredset - <fullreposet+ 0:9>>> + <fullreposet+ 0:9>, + <desc '$1'>>> 9 $ try 'd(2:5)' @@ -1832,6 +1875,7 @@ (range ('symbol', '2') ('symbol', '5'))) + * expanded: (func ('symbol', 'reverse') (func @@ -1855,6 +1899,7 @@ ('symbol', '2') ('symbol', '3')) ('symbol', 'date'))) + * expanded: (func ('symbol', 'reverse') (func @@ -1899,6 +1944,7 @@ ('symbol', 'x') ('symbol', 'x') ('symbol', 'date'))) + * expanded: (func ('symbol', 'reverse') (func @@ -1971,11 +2017,80 @@ issue2549 - correct optimizations - $ log 'limit(1 or 2 or 3, 2) and not 2' + $ try 'limit(1 or 2 or 3, 2) and not 2' + (and + (func + ('symbol', 'limit') + (list + (or + ('symbol', '1') + ('symbol', '2') + ('symbol', '3')) + ('symbol', '2'))) + (not + ('symbol', '2'))) + * set: + <filteredset + <baseset + <limit n=2, offset=0, + <fullreposet+ 0:9>, + <baseset [1, 2, 3]>>>, + <not + <baseset [2]>>> 1 - $ log 'max(1 or 2) and not 2' - $ log 'min(1 or 2) and not 1' - $ log 'last(1 or 2, 1) and not 2' + $ try 'max(1 or 2) and not 2' + (and + (func + ('symbol', 'max') + (or + ('symbol', '1') + ('symbol', '2'))) + (not + ('symbol', '2'))) + * set: + <filteredset + <baseset + <max + <fullreposet+ 0:9>, + <baseset [1, 2]>>>, + <not + <baseset [2]>>> + $ try 'min(1 or 2) and not 1' + (and + (func + ('symbol', 'min') + (or + ('symbol', '1') + ('symbol', '2'))) + (not + ('symbol', '1'))) + * set: + <filteredset + <baseset + <min + <fullreposet+ 0:9>, + <baseset [1, 2]>>>, + <not + <baseset [1]>>> + $ try 'last(1 or 2, 1) and not 2' + (and + (func + ('symbol', 'last') + (list + (or + ('symbol', '1') + ('symbol', '2')) + ('symbol', '1'))) + (not + ('symbol', '2'))) + * set: + <filteredset + <baseset + <last n=1, + <fullreposet+ 0:9>, + <baseset [2, 1]>>>, + <not + <baseset [2]>>> issue4289 - ordering of built-ins $ hg log -M -q -r 3:2 @@ -2049,6 +2164,7 @@ ('string', '5f5')) ('symbol', '1ee')) ('string', 'ce5')) + * concatenated: ('string', '2785f51eece5') * set: <baseset [0]> @@ -2063,6 +2179,7 @@ ('string', '5f5') ('symbol', '1ee') ('string', 'ce5'))) + * expanded: (_concat (_concat (_concat @@ -2070,6 +2187,7 @@ ('string', '5f5')) ('symbol', '1ee')) ('string', 'ce5')) + * concatenated: ('string', '2785f51eece5') * set: <baseset [0]> @@ -2192,28 +2310,21 @@ $ cd .. -Test registrar.delayregistrar via revset.extpredicate - -'extpredicate' decorator shouldn't register any functions until -'setup()' on it. +Test that revset predicate of extension isn't loaded at failure of +loading it $ cd repo $ cat <<EOF > $TESTTMP/custompredicate.py - > from mercurial import revset + > from mercurial import error, registrar, revset > - > revsetpredicate = revset.extpredicate() + > revsetpredicate = registrar.revsetpredicate() > > @revsetpredicate('custom1()') > def custom1(repo, subset, x): > return revset.baseset([1]) - > @revsetpredicate('custom2()') - > def custom2(repo, subset, x): - > return revset.baseset([2]) > - > def uisetup(ui): - > if ui.configbool('custompredicate', 'enabled'): - > revsetpredicate.setup() + > raise error.Abort('intentional failure of loading extension') > EOF $ cat <<EOF > .hg/hgrc > [extensions] @@ -2221,13 +2332,8 @@ > EOF $ hg debugrevspec "custom1()" + *** failed to import extension custompredicate from $TESTTMP/custompredicate.py: intentional failure of loading extension hg: parse error: unknown identifier: custom1 [255] - $ hg debugrevspec "custom2()" - hg: parse error: unknown identifier: custom2 - [255] - $ hg debugrevspec "custom1() or custom2()" --config custompredicate.enabled=true - 1 - 2 $ cd ..
--- a/tests/test-run-tests.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-run-tests.py Sat Apr 16 18:06:48 2016 -0500 @@ -3,13 +3,14 @@ run-test.t only checks positive matches and can not see warnings (both by design) """ -from __future__ import print_function +from __future__ import absolute_import, print_function -import os, re +import doctest +import os +import re # this is hack to make sure no escape characters are inserted into the output if 'TERM' in os.environ: del os.environ['TERM'] -import doctest run_tests = __import__('run-tests') def prn(ex):
--- a/tests/test-run-tests.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-run-tests.t Sat Apr 16 18:06:48 2016 -0500 @@ -21,6 +21,36 @@ > run-tests.py --with-hg=`which hg` "$@" > } +error paths + +#if symlink + $ ln -s `which true` hg + $ run-tests.py --with-hg=./hg + warning: --with-hg should specify an hg script + + # Ran 0 tests, 0 skipped, 0 warned, 0 failed. + $ rm hg +#endif + +#if execbit + $ touch hg + $ run-tests.py --with-hg=./hg + Usage: run-tests.py [options] [tests] + + run-tests.py: error: --with-hg must specify an executable hg script + [2] + $ rm hg +#endif + +an empty test +======================= + + $ touch test-empty.t + $ rt + . + # Ran 1 tests, 0 skipped, 0 warned, 0 failed. + $ rm test-empty.t + a succesful test ======================= @@ -28,9 +58,15 @@ > $ echo babar > babar > $ echo xyzzy - > never happens (?) + > never*happens (glob) (?) > xyzzy > nor this (?) + > $ printf 'abc\ndef\nxyz\n' + > 123 (?) + > abc + > def (?) + > 456 (?) + > xyz > EOF $ rt @@ -40,11 +76,37 @@ failing test ================== +test churn with globs + $ cat > test-failure.t <<EOF + > $ echo "bar-baz"; echo "bar-bad" + > bar*bad (glob) + > bar*baz (glob) + > EOF + $ rt test-failure.t + + --- $TESTTMP/test-failure.t + +++ $TESTTMP/test-failure.t.err + @@ -1,3 +1,3 @@ + $ echo "bar-baz"; echo "bar-bad" + + bar*baz (glob) + bar*bad (glob) + - bar*baz (glob) + + ERROR: test-failure.t output changed + ! + Failed test-failure.t: output changed + # Ran 1 tests, 0 skipped, 0 warned, 1 failed. + python hash seed: * (glob) + [1] + +basic failing test $ cat > test-failure.t << EOF > $ echo babar > rataxes > This is a noop statement so that > this test is still more bytes than success. + > pad pad pad pad............................................................ + > pad pad pad pad............................................................ > EOF >>> fh = open('test-failure-unicode.t', 'wb') @@ -55,12 +117,13 @@ --- $TESTTMP/test-failure.t +++ $TESTTMP/test-failure.t.err - @@ -1,4 +1,4 @@ + @@ -1,5 +1,5 @@ $ echo babar - rataxes + babar This is a noop statement so that this test is still more bytes than success. + pad pad pad pad............................................................ ERROR: test-failure.t output changed !. @@ -84,12 +147,13 @@ --- $TESTTMP/test-failure.t +++ $TESTTMP/test-failure.t.err - @@ -1,4 +1,4 @@ + @@ -1,5 +1,5 @@ $ echo babar - rataxes + babar This is a noop statement so that this test is still more bytes than success. + pad pad pad pad............................................................ ERROR: test-failure.t output changed !. @@ -122,12 +186,13 @@ <testcase name="test-failure.t" time="*"> (glob) <![CDATA[--- $TESTTMP/test-failure.t +++ $TESTTMP/test-failure.t.err - @@ -1,4 +1,4 @@ + @@ -1,5 +1,5 @@ $ echo babar - rataxes + babar This is a noop statement so that this test is still more bytes than success. + pad pad pad pad............................................................ ]]> </testcase> </testsuite> @@ -140,12 +205,13 @@ --- $TESTTMP/test-failure.t +++ $TESTTMP/test-failure.t.err - @@ -1,4 +1,4 @@ + @@ -1,5 +1,5 @@ $ echo babar - rataxes + babar This is a noop statement so that this test is still more bytes than success. + pad pad pad pad............................................................ ERROR: test-failure.t output changed ! @@ -174,12 +240,13 @@ --- $TESTTMP/test-failure.t +++ $TESTTMP/test-failure.t.err - @@ -1,4 +1,4 @@ + @@ -1,5 +1,5 @@ $ echo babar - rataxes + babar This is a noop statement so that this test is still more bytes than success. + pad pad pad pad............................................................ ERROR: test-failure.t output changed ! @@ -193,12 +260,13 @@ --- $TESTTMP/test-failure.t +++ $TESTTMP/test-failure.t.err - @@ -1,4 +1,4 @@ + @@ -1,5 +1,5 @@ $ echo babar - rataxes + babar This is a noop statement so that this test is still more bytes than success. + pad pad pad pad............................................................ ERROR: test-failure.t output changed ! @@ -245,8 +313,8 @@ *SALT* 0 0 (glob) + echo babar babar - + echo *SALT* 4 0 (glob) - *SALT* 4 0 (glob) + + echo *SALT* 6 0 (glob) + *SALT* 6 0 (glob) *+ echo *SALT* 0 0 (glob) *SALT* 0 0 (glob) + echo babar @@ -257,6 +325,12 @@ xyzzy + echo *SALT* 6 0 (glob) *SALT* 6 0 (glob) + + printf *abc\ndef\nxyz\n* (glob) + abc + def + xyz + + echo *SALT* 12 0 (glob) + *SALT* 12 0 (glob) . # Ran 2 tests, 0 skipped, 0 warned, 0 failed. @@ -282,12 +356,13 @@ --- $TESTTMP/test-failure*.t (glob) +++ $TESTTMP/test-failure*.t.err (glob) - @@ -1,4 +1,4 @@ + @@ -1,5 +1,5 @@ $ echo babar - rataxes + babar This is a noop statement so that this test is still more bytes than success. + pad pad pad pad............................................................ Failed test-failure*.t: output changed (glob) Failed test-nothing.t: output changed @@ -312,12 +387,13 @@ --- $TESTTMP/test-failure.t +++ $TESTTMP/test-failure.t.err - @@ -1,4 +1,4 @@ + @@ -1,5 +1,5 @@ $ echo babar - rataxes + babar This is a noop statement so that this test is still more bytes than success. + pad pad pad pad............................................................ Accept this change? [n] ERROR: test-failure.t output changed !. @@ -331,6 +407,8 @@ rataxes This is a noop statement so that this test is still more bytes than success. + pad pad pad pad............................................................ + pad pad pad pad............................................................ Interactive with custom view @@ -368,12 +446,14 @@ --- $TESTTMP/test-failure.t +++ $TESTTMP/test-failure.t.err - @@ -1,9 +1,9 @@ + @@ -1,11 +1,11 @@ $ echo babar - rataxes + babar This is a noop statement so that this test is still more bytes than success. + pad pad pad pad............................................................ + pad pad pad pad............................................................ $ echo 'saved backup bundle to $TESTTMP/foo.hg' - saved backup bundle to $TESTTMP/foo.hg + saved backup bundle to $TESTTMP/foo.hg* (glob) @@ -388,6 +468,8 @@ babar This is a noop statement so that this test is still more bytes than success. + pad pad pad pad............................................................ + pad pad pad pad............................................................ $ echo 'saved backup bundle to $TESTTMP/foo.hg' saved backup bundle to $TESTTMP/foo.hg (glob)< $ echo 'saved backup bundle to $TESTTMP/foo.hg' @@ -509,8 +591,6 @@ "result": "skip" } } (no-eol) -#if json - test for --json ================== @@ -518,12 +598,13 @@ --- $TESTTMP/test-failure.t +++ $TESTTMP/test-failure.t.err - @@ -1,4 +1,4 @@ + @@ -1,5 +1,5 @@ $ echo babar - rataxes + babar This is a noop statement so that this test is still more bytes than success. + pad pad pad pad............................................................ ERROR: test-failure.t output changed !.s @@ -571,12 +652,13 @@ --- $TESTTMP/test-failure.t +++ $TESTTMP/test-failure.t.err - @@ -1,4 +1,4 @@ + @@ -1,5 +1,5 @@ $ echo babar - rataxes + babar This is a noop statement so that this test is still more bytes than success. + pad pad pad pad............................................................ Accept this change? [n] ..s Skipped test-skip.t: missing feature: nail clipper # Ran 2 tests, 1 skipped, 0 warned, 0 failed. @@ -613,8 +695,6 @@ } (no-eol) $ mv backup test-failure.t -#endif - backslash on end of line with glob matching is handled properly $ cat > test-glob-backslash.t << EOF @@ -642,7 +722,7 @@ > $ echo foo > foo > EOF - $ rt $HGTEST_RUN_TESTS_PURE test-hghave.t + $ rt test-hghave.t . # Ran 1 tests, 0 skipped, 0 warned, 0 failed. @@ -665,7 +745,7 @@ > # > # check-code - a style and portability checker for Mercurial > EOF - $ rt $HGTEST_RUN_TESTS_PURE test-runtestdir.t + $ rt test-runtestdir.t . # Ran 1 tests, 0 skipped, 0 warned, 0 failed. @@ -682,7 +762,7 @@ > $ custom-command.sh > hello world > EOF - $ rt $HGTEST_RUN_TESTS_PURE test-testdir-path.t + $ rt test-testdir-path.t . # Ran 1 tests, 0 skipped, 0 warned, 0 failed. @@ -694,10 +774,53 @@ > $ echo pass > pass > EOF - $ rt $HGTEST_RUN_TESTS_PURE test-very-slow-test.t + $ rt test-very-slow-test.t s Skipped test-very-slow-test.t: missing feature: allow slow tests # Ran 0 tests, 1 skipped, 0 warned, 0 failed. $ rt $HGTEST_RUN_TESTS_PURE --allow-slow-tests test-very-slow-test.t . # Ran 1 tests, 0 skipped, 0 warned, 0 failed. + +support for running a test outside the current directory + $ mkdir nonlocal + $ cat > nonlocal/test-is-not-here.t << EOF + > $ echo pass + > pass + > EOF + $ rt nonlocal/test-is-not-here.t + . + # Ran 1 tests, 0 skipped, 0 warned, 0 failed. + +support for bisecting failed tests automatically + $ hg init bisect + $ cd bisect + $ cat >> test-bisect.t <<EOF + > $ echo pass + > pass + > EOF + $ hg add test-bisect.t + $ hg ci -m 'good' + $ cat >> test-bisect.t <<EOF + > $ echo pass + > fail + > EOF + $ hg ci -m 'bad' + $ rt --known-good-rev=0 test-bisect.t + + --- $TESTTMP/anothertests/bisect/test-bisect.t + +++ $TESTTMP/anothertests/bisect/test-bisect.t.err + @@ -1,4 +1,4 @@ + $ echo pass + pass + $ echo pass + - fail + + pass + + ERROR: test-bisect.t output changed + ! + Failed test-bisect.t: output changed + test-bisect.t broken by 72cbf122d116 (bad) + # Ran 1 tests, 0 skipped, 0 warned, 1 failed. + python hash seed: * (glob) + [1]
--- a/tests/test-schemes.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-schemes.t Sat Apr 16 18:06:48 2016 -0500 @@ -52,6 +52,21 @@ no changes found [1] +check that debugexpandscheme outputs the canonical form + + $ hg debugexpandscheme bb://user/repo + https://bitbucket.org/user/repo + +expanding an unknown scheme emits the input + + $ hg debugexpandscheme foobar://this/that + foobar://this/that + +expanding a canonical URL emits the input + + $ hg debugexpandscheme https://bitbucket.org/user/repo + https://bitbucket.org/user/repo + errors $ cat errors.log
--- a/tests/test-setdiscovery.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-setdiscovery.t Sat Apr 16 18:06:48 2016 -0500 @@ -317,13 +317,13 @@ query 3; still undecided: 1140, sample size is: 200 sampling from both directions searching: 4 queries - query 4; still undecided: 592, sample size is: 200 + query 4; still undecided: \d+, sample size is: 200 (re) sampling from both directions searching: 5 queries - query 5; still undecided: 292, sample size is: 200 + query 5; still undecided: \d+, sample size is: 200 (re) sampling from both directions searching: 6 queries - query 6; still undecided: 51, sample size is: 51 + query 6; still undecided: \d+, sample size is: \d+ (re) 6 total queries common heads: 3ee37d65064a
--- a/tests/test-shelve.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-shelve.t Sat Apr 16 18:06:48 2016 -0500 @@ -36,6 +36,12 @@ specific files or directories are named, only changes to those files are shelved. + In bare shelve(when no files are specified, without interactive, include + and exclude option), shelving remembers information if the working + directory was on newly created branch, in other words working directory + was on different branch than its first parent. In this situation + unshelving restores branch information to the working directory. + Each shelved change has a name that makes it easier to find later. The name of a shelved change defaults to being based on the active bookmark, or if there is no active bookmark, the current named branch. To specify a @@ -373,7 +379,7 @@ try to continue with no unshelve underway $ hg unshelve -c - abort: no unshelve operation underway + abort: no unshelve in progress [255] $ hg status A foo/foo @@ -403,6 +409,10 @@ (use 'hg unshelve --continue' or 'hg unshelve --abort') [255] + $ hg graft --continue + abort: no graft in progress + (continue: hg unshelve --continue) + [255] $ hg unshelve -c rebasing 5:32c69314e062 "changes to: [mq]: second.patch" (tip) unshelve of 'default' complete @@ -715,6 +725,7 @@ $ hg log -G --template '{rev} {desc|firstline} {author}' -R bundle://.hg/shelved/default.hg -r 'bundle()' o 4 changes to: commit stuff shelve@localhost | + ~ $ hg log -G --template '{rev} {desc|firstline} {author}' @ 3 commit stuff test | @@ -1314,3 +1325,263 @@ $ hg commit -qm "Remove unknown" $ cd .. + +We expects that non-bare shelve keeps newly created branch in +working directory. + + $ hg init shelve-preserve-new-branch + $ cd shelve-preserve-new-branch + $ echo "a" >> a + $ hg add a + $ echo "b" >> b + $ hg add b + $ hg commit -m "ab" + $ echo "aa" >> a + $ echo "bb" >> b + $ hg branch new-branch + marked working directory as branch new-branch + (branches are permanent and global, did you want a bookmark?) + $ hg status + M a + M b + $ hg branch + new-branch + $ hg shelve a + shelved as default + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg branch + new-branch + $ hg status + M b + $ touch "c" >> c + $ hg add c + $ hg status + M b + A c + $ hg shelve --exclude c + shelved as default-01 + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg branch + new-branch + $ hg status + A c + $ hg shelve --include c + shelved as default-02 + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg branch + new-branch + $ hg status + $ echo "d" >> d + $ hg add d + $ hg status + A d + +We expect that bare-shelve will not keep branch in current working directory. + + $ hg shelve + shelved as default-03 + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg branch + default + +When i shelve commit on newly created branch i expect +that after unshelve newly created branch will be preserved. + + $ hg init shelve_on_new_branch_simple + $ cd shelve_on_new_branch_simple + $ echo "aaa" >> a + $ hg commit -A -m "a" + adding a + $ hg branch + default + $ hg branch test + marked working directory as branch test + (branches are permanent and global, did you want a bookmark?) + $ echo "bbb" >> a + $ hg status + M a + $ hg shelve + shelved as default + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg branch + default + $ echo "bbb" >> b + $ hg status + ? b + $ hg unshelve + unshelving change 'default' + marked working directory as branch test + $ hg status + M a + ? b + $ hg branch + test + +When i shelve commit on newly created branch, make +some changes, unshelve it and running into merge +conflicts i expect that after fixing them and +running unshelve --continue newly created branch +will be preserved. + + $ hg init shelve_on_new_branch_conflict + $ cd shelve_on_new_branch_conflict + $ echo "aaa" >> a + $ hg commit -A -m "a" + adding a + $ hg branch + default + $ hg branch test + marked working directory as branch test + (branches are permanent and global, did you want a bookmark?) + $ echo "bbb" >> a + $ hg status + M a + $ hg shelve + shelved as default + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg branch + default + $ echo "ccc" >> a + $ hg status + M a + $ hg unshelve + unshelving change 'default' + temporarily committing pending changes (restore with 'hg unshelve --abort') + rebasing shelved changes + rebasing 2:425c97ef07f3 "changes to: a" (tip) + merging a + warning: conflicts while merging a! (edit, then use 'hg resolve --mark') + unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue') + [1] + $ echo "aaabbbccc" > a + $ rm a.orig + $ hg resolve --mark a + (no more unresolved files) + continue: hg unshelve --continue + $ hg unshelve --continue + rebasing 2:425c97ef07f3 "changes to: a" (tip) + marked working directory as branch test + unshelve of 'default' complete + $ cat a + aaabbbccc + $ hg status + M a + $ hg branch + test + $ hg commit -m "test-commit" + +When i shelve on test branch, update to default branch +and unshelve i expect that it will not preserve previous +test branch. + + $ echo "xxx" > b + $ hg add b + $ hg shelve + shelved as test + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg update -r default + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg unshelve + unshelving change 'test' + rebasing shelved changes + rebasing 2:357525f34729 "changes to: test-commit" (tip) + $ hg status + A b + $ hg branch + default + +When i unshelve resulting in merge conflicts and makes saved +file shelvedstate looks like in previous versions in +mercurial(without restore branch information in 7th line) i +expect that after resolving conflicts and succesfully +running 'shelve --continue' the branch information won't be +restored and branch will be unchanged. + +shelve on new branch, conflict with previous shelvedstate + + $ hg init conflict + $ cd conflict + $ echo "aaa" >> a + $ hg commit -A -m "a" + adding a + $ hg branch + default + $ hg branch test + marked working directory as branch test + (branches are permanent and global, did you want a bookmark?) + $ echo "bbb" >> a + $ hg status + M a + $ hg shelve + shelved as default + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg branch + default + $ echo "ccc" >> a + $ hg status + M a + $ hg unshelve + unshelving change 'default' + temporarily committing pending changes (restore with 'hg unshelve --abort') + rebasing shelved changes + rebasing 2:425c97ef07f3 "changes to: a" (tip) + merging a + warning: conflicts while merging a! (edit, then use 'hg resolve --mark') + unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue') + [1] + +Removing restore branch information from shelvedstate file(making it looks like +in previous versions) and running unshelve --continue + + $ head -n 6 < .hg/shelvedstate > .hg/shelvedstate_oldformat + $ rm .hg/shelvedstate + $ mv .hg/shelvedstate_oldformat .hg/shelvedstate + + $ echo "aaabbbccc" > a + $ rm a.orig + $ hg resolve --mark a + (no more unresolved files) + continue: hg unshelve --continue + $ hg unshelve --continue + rebasing 2:425c97ef07f3 "changes to: a" (tip) + unshelve of 'default' complete + $ cat a + aaabbbccc + $ hg status + M a + $ hg branch + default + +On non bare shelve the branch information shouldn't be restored + + $ hg init bare_shelve_on_new_branch + $ cd bare_shelve_on_new_branch + $ echo "aaa" >> a + $ hg commit -A -m "a" + adding a + $ hg branch + default + $ hg branch test + marked working directory as branch test + (branches are permanent and global, did you want a bookmark?) + $ echo "bbb" >> a + $ hg status + M a + $ hg shelve a + shelved as default + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg branch + test + $ hg branch default + marked working directory as branch default + (branches are permanent and global, did you want a bookmark?) + $ echo "bbb" >> b + $ hg status + ? b + $ hg unshelve + unshelving change 'default' + $ hg status + M a + ? b + $ hg branch + default
--- a/tests/test-simplemerge.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-simplemerge.py Sat Apr 16 18:06:48 2016 -0500 @@ -13,10 +13,16 @@ # You should have received a copy of the GNU General Public License # along with this program; if not, see <http://www.gnu.org/licenses/>. +from __future__ import absolute_import + import unittest -from unittest import TestCase -from mercurial import util, simplemerge, error +from mercurial import ( + error, + simplemerge, + util, +) +TestCase = unittest.TestCase # bzr compatible interface, for the tests class Merge3(simplemerge.Merge3Text): """3-way merge of texts. @@ -36,8 +42,7 @@ CantReprocessAndShowBase = simplemerge.CantReprocessAndShowBase def split_lines(t): - from cStringIO import StringIO - return StringIO(t).readlines() + return util.stringio(t).readlines() ############################################################ # test case data from the gnu diffutils manual
--- a/tests/test-ssh-bundle1.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-ssh-bundle1.t Sat Apr 16 18:06:48 2016 -0500 @@ -377,8 +377,8 @@ abort: no suitable response from remote hg! [255] - $ SSH_ORIGINAL_COMMAND="'hg' -R 'a'repo' serve --stdio" python "$TESTDIR/../contrib/hg-ssh" - Illegal command "'hg' -R 'a'repo' serve --stdio": No closing quotation + $ SSH_ORIGINAL_COMMAND="'hg' serve -R 'a'repo' --stdio" python "$TESTDIR/../contrib/hg-ssh" + Illegal command "'hg' serve -R 'a'repo' --stdio": No closing quotation [255] Test hg-ssh in read-only mode:
--- a/tests/test-status-color.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-status-color.t Sat Apr 16 18:06:48 2016 -0500 @@ -30,6 +30,15 @@ [status.unknown|? ][status.unknown|b/in_b] [status.unknown|? ][status.unknown|in_root] +hg status with template + $ hg status -T "{label('red', path)}\n" --color=debug + [red|a/1/in_a_1] + [red|a/in_a] + [red|b/1/in_b_1] + [red|b/2/in_b_2] + [red|b/in_b] + [red|in_root] + hg status . in repo root: $ hg status --color=always . @@ -154,6 +163,10 @@ [log.date|date: Thu Jan 01 00:00:00 1970 +0000] [log.summary|summary: initial checkin] + $ hg log -Tcompact --color=debug + [log.changeset changeset.draft|0][tip] [log.node|389aef86a55e] [log.date|1970-01-01 00:00 +0000] [log.user|test] + [ui.note log.description|initial checkin] + Labels on empty strings should not be displayed, labels on custom templates should be.
--- a/tests/test-status-inprocess.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-status-inprocess.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,12 +1,16 @@ -#!/usr/bin/python -from mercurial.ui import ui -from mercurial.localrepo import localrepository -from mercurial.commands import add, commit, status +#!/usr/bin/env python +from __future__ import absolute_import, print_function -u = ui() +from mercurial import ( + commands, + localrepo, + ui as uimod, +) -print '% creating repo' -repo = localrepository(u, '.', create=True) +u = uimod.ui() + +print('% creating repo') +repo = localrepo.localrepository(u, '.', create=True) f = open('test.py', 'w') try: @@ -14,13 +18,13 @@ finally: f.close -print '% add and commit' -add(u, repo, 'test.py') -commit(u, repo, message='*') -status(u, repo, clean=True) +print('% add and commit') +commands.add(u, repo, 'test.py') +commands.commit(u, repo, message='*') +commands.status(u, repo, clean=True) -print '% change' +print('% change') f = open('test.py', 'w') try: f.write('bar\n') @@ -28,4 +32,4 @@ f.close() # this would return clean instead of changed before the fix -status(u, repo, clean=True, modified=True) +commands.status(u, repo, clean=True, modified=True)
--- a/tests/test-strip.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-strip.t Sat Apr 16 18:06:48 2016 -0500 @@ -287,6 +287,7 @@ $ hg up 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + 1 other heads for branch "default" $ hg log -G @ changeset: 4:264128213d29 | tag: tip @@ -791,6 +792,7 @@ removing c d: other deleted -> r removing d + starting 4 threads for background file closing (?) 0 files updated, 0 files merged, 2 files removed, 0 files unresolved 2 changesets found list of changesets:
--- a/tests/test-subrepo-deep-nested-change.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-subrepo-deep-nested-change.t Sat Apr 16 18:06:48 2016 -0500 @@ -1,3 +1,18 @@ + $ cat >> $HGRCPATH <<EOF + > [extdiff] + > # for portability: + > pdiff = sh "$RUNTESTDIR/pdiff" + > [progress] + > disable=False + > assume-tty = 1 + > delay = 0 + > # set changedelay really large so we don't see nested topics + > changedelay = 30000 + > format = topic bar number + > refresh = 0 + > width = 60 + > EOF + Preparing the subrepository 'sub2' $ hg init sub2 @@ -12,6 +27,17 @@ $ echo sub1 > sub1/sub1 $ echo "sub2 = ../sub2" > sub1/.hgsub $ hg clone sub2 sub1/sub2 + \r (no-eol) (esc) + linking [ <=> ] 1\r (no-eol) (esc) + linking [ <=> ] 2\r (no-eol) (esc) + linking [ <=> ] 3\r (no-eol) (esc) + linking [ <=> ] 4\r (no-eol) (esc) + linking [ <=> ] 5\r (no-eol) (esc) + linking [ <=> ] 6\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + updating [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg add -R sub1 @@ -25,6 +51,20 @@ $ echo main > main/main $ echo "sub1 = ../sub1" > main/.hgsub $ hg clone sub1 main/sub1 + \r (no-eol) (esc) + linking [ <=> ] 1\r (no-eol) (esc) + linking [ <=> ] 2\r (no-eol) (esc) + linking [ <=> ] 3\r (no-eol) (esc) + linking [ <=> ] 4\r (no-eol) (esc) + linking [ <=> ] 5\r (no-eol) (esc) + linking [ <=> ] 6\r (no-eol) (esc) + linking [ <=> ] 7\r (no-eol) (esc) + linking [ <=> ] 8\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + updating [===========================================>] 3/3\r (no-eol) (esc) + updating [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) updating to branch default cloning subrepo sub2 from $TESTTMP/sub2 3 files updated, 0 files merged, 0 files removed, 0 files unresolved @@ -36,10 +76,28 @@ Cleaning both repositories, just as a clone -U $ hg up -C -R sub2 null + \r (no-eol) (esc) + updating [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg up -C -R sub1 null + \r (no-eol) (esc) + updating [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + updating [===========================================>] 3/3\r (no-eol) (esc) + \r (no-eol) (esc) 0 files updated, 0 files merged, 3 files removed, 0 files unresolved $ hg up -C -R main null + \r (no-eol) (esc) + updating [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + updating [===========================================>] 3/3\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + updating [===========================================>] 3/3\r (no-eol) (esc) + \r (no-eol) (esc) 0 files updated, 0 files merged, 3 files removed, 0 files unresolved $ rm -rf main/sub1 $ rm -rf sub1/sub2 @@ -47,6 +105,21 @@ Clone main $ hg --config extensions.largefiles= clone main cloned + \r (no-eol) (esc) + linking [ <=> ] 1\r (no-eol) (esc) + linking [ <=> ] 2\r (no-eol) (esc) + linking [ <=> ] 3\r (no-eol) (esc) + linking [ <=> ] 4\r (no-eol) (esc) + linking [ <=> ] 5\r (no-eol) (esc) + linking [ <=> ] 6\r (no-eol) (esc) + linking [ <=> ] 7\r (no-eol) (esc) + linking [ <=> ] 8\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + updating [===========================================>] 3/3\r (no-eol) (esc) + updating [===========================================>] 3/3\r (no-eol) (esc) + updating [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) updating to branch default cloning subrepo sub1 from $TESTTMP/sub1 cloning subrepo sub1/sub2 from $TESTTMP/sub2 (glob) @@ -131,6 +204,18 @@ .. but first take a detour through some deep removal testing $ hg remove -S -I 're:.*.txt' . + \r (no-eol) (esc) + searching [==========================================>] 1/1\r (no-eol) (esc) + searching [==========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + deleting [=====================> ] 1/2\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + deleting [===========================================>] 2/2\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) removing sub1/sub2/folder/test.txt (glob) removing sub1/sub2/test.txt (glob) $ hg status -S @@ -138,9 +223,37 @@ R sub1/sub2/test.txt $ hg update -Cq $ hg remove -I 're:.*.txt' sub1 + \r (no-eol) (esc) + searching [==========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) $ hg status -S $ hg remove sub1/sub2/folder/test.txt + \r (no-eol) (esc) + searching [==========================================>] 1/1\r (no-eol) (esc) + searching [==========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) $ hg remove sub1/.hgsubstate + \r (no-eol) (esc) + searching [==========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) $ mv sub1/.hgsub sub1/x.hgsub $ hg status -S warning: subrepo spec file 'sub1/.hgsub' not found @@ -159,6 +272,9 @@ $ cd foo $ touch bar/abc $ hg addremove -S .. + \r (no-eol) (esc) + searching for exact renames [ ] 0/1\r (no-eol) (esc) + \r (no-eol) (esc) adding ../sub1/sub2/folder/test.txt (glob) removing ../sub1/sub2/test.txt (glob) adding ../sub1/foo (glob) @@ -171,7 +287,28 @@ Archive wdir() with subrepos $ hg rm main + \r (no-eol) (esc) + deleting [===========================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) $ hg archive -S -r 'wdir()' ../wdir + \r (no-eol) (esc) + archiving [ ] 0/3\r (no-eol) (esc) + archiving [=============> ] 1/3\r (no-eol) (esc) + archiving [===========================> ] 2/3\r (no-eol) (esc) + archiving [==========================================>] 3/3\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + archiving (sub1) [ ] 0/4\r (no-eol) (esc) + archiving (sub1) [========> ] 1/4\r (no-eol) (esc) + archiving (sub1) [=================> ] 2/4\r (no-eol) (esc) + archiving (sub1) [==========================> ] 3/4\r (no-eol) (esc) + archiving (sub1) [===================================>] 4/4\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + archiving (sub1/sub2) [ ] 0/2\r (no-eol) (esc) + archiving (sub1/sub2) [==============> ] 1/2\r (no-eol) (esc) + archiving (sub1/sub2) [==============================>] 2/2\r (no-eol) (esc) + \r (no-eol) (esc) $ diff -r . ../wdir | egrep -v '\.hg$|^Common subdirectories:' Only in ../wdir: .hg_archival.txt @@ -199,6 +336,23 @@ $ rm sub1/sub1 $ rm -r ../wdir $ hg archive -v -S -r 'wdir()' ../wdir + \r (no-eol) (esc) + archiving [ ] 0/3\r (no-eol) (esc) + archiving [=============> ] 1/3\r (no-eol) (esc) + archiving [===========================> ] 2/3\r (no-eol) (esc) + archiving [==========================================>] 3/3\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + archiving (sub1) [ ] 0/3\r (no-eol) (esc) + archiving (sub1) [===========> ] 1/3\r (no-eol) (esc) + archiving (sub1) [=======================> ] 2/3\r (no-eol) (esc) + archiving (sub1) [===================================>] 3/3\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + archiving (sub1/sub2) [ ] 0/2\r (no-eol) (esc) + archiving (sub1/sub2) [==============> ] 1/2\r (no-eol) (esc) + archiving (sub1/sub2) [==============================>] 2/2\r (no-eol) (esc) + \r (no-eol) (esc) $ find ../wdir -type f | sort ../wdir/.hg_archival.txt ../wdir/.hgsub @@ -214,6 +368,24 @@ $ hg update -Cq $ rm -r ../wdir $ hg archive -S -r 'wdir()' ../wdir + \r (no-eol) (esc) + archiving [ ] 0/3\r (no-eol) (esc) + archiving [=============> ] 1/3\r (no-eol) (esc) + archiving [===========================> ] 2/3\r (no-eol) (esc) + archiving [==========================================>] 3/3\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + archiving (sub1) [ ] 0/3\r (no-eol) (esc) + archiving (sub1) [===========> ] 1/3\r (no-eol) (esc) + archiving (sub1) [=======================> ] 2/3\r (no-eol) (esc) + archiving (sub1) [===================================>] 3/3\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + archiving (sub1/sub2) [ ] 0/3\r (no-eol) (esc) + archiving (sub1/sub2) [=========> ] 1/3\r (no-eol) (esc) + archiving (sub1/sub2) [===================> ] 2/3\r (no-eol) (esc) + archiving (sub1/sub2) [==============================>] 3/3\r (no-eol) (esc) + \r (no-eol) (esc) $ cat ../wdir/.hg_archival.txt repo: 7f491f53a367861f47ee64a80eb997d1f341b77a node: 9bb10eebee29dc0f1201dcf5977b811a540255fd @@ -327,6 +499,24 @@ $ hg up -Cq $ hg --config extensions.largefiles=! archive -S ../archive_all + \r (no-eol) (esc) + archiving [ ] 0/3\r (no-eol) (esc) + archiving [=============> ] 1/3\r (no-eol) (esc) + archiving [===========================> ] 2/3\r (no-eol) (esc) + archiving [==========================================>] 3/3\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + archiving (sub1) [ ] 0/3\r (no-eol) (esc) + archiving (sub1) [===========> ] 1/3\r (no-eol) (esc) + archiving (sub1) [=======================> ] 2/3\r (no-eol) (esc) + archiving (sub1) [===================================>] 3/3\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + archiving (sub1/sub2) [ ] 0/3\r (no-eol) (esc) + archiving (sub1/sub2) [=========> ] 1/3\r (no-eol) (esc) + archiving (sub1/sub2) [===================> ] 2/3\r (no-eol) (esc) + archiving (sub1/sub2) [==============================>] 3/3\r (no-eol) (esc) + \r (no-eol) (esc) $ find ../archive_all | sort ../archive_all ../archive_all/.hg_archival.txt @@ -346,6 +536,22 @@ Check that archive -X works in deep subrepos $ hg --config extensions.largefiles=! archive -S -X '**test*' ../archive_exclude + \r (no-eol) (esc) + archiving [ ] 0/3\r (no-eol) (esc) + archiving [=============> ] 1/3\r (no-eol) (esc) + archiving [===========================> ] 2/3\r (no-eol) (esc) + archiving [==========================================>] 3/3\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + archiving (sub1) [ ] 0/3\r (no-eol) (esc) + archiving (sub1) [===========> ] 1/3\r (no-eol) (esc) + archiving (sub1) [=======================> ] 2/3\r (no-eol) (esc) + archiving (sub1) [===================================>] 3/3\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + archiving (sub1/sub2) [ ] 0/1\r (no-eol) (esc) + archiving (sub1/sub2) [==============================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) $ find ../archive_exclude | sort ../archive_exclude ../archive_exclude/.hg_archival.txt @@ -360,6 +566,14 @@ ../archive_exclude/sub1/sub2/sub2 $ hg --config extensions.largefiles=! archive -S -I '**test*' ../archive_include + \r (no-eol) (esc) + archiving (sub1) [ <=> ] 0\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + archiving (sub1/sub2) [ ] 0/2\r (no-eol) (esc) + archiving (sub1/sub2) [==============> ] 1/2\r (no-eol) (esc) + archiving (sub1/sub2) [==============================>] 2/2\r (no-eol) (esc) + \r (no-eol) (esc) $ find ../archive_include | sort ../archive_include ../archive_include/sub1 @@ -682,6 +896,10 @@ Test .hgsubstate in the R state $ hg rm .hgsub .hgsubstate + \r (no-eol) (esc) + deleting [=====================> ] 1/2\r (no-eol) (esc) + deleting [===========================================>] 2/2\r (no-eol) (esc) + \r (no-eol) (esc) $ hg ci -m 'trash subrepo tracking' $ hg log -r "subrepo('re:sub\d+')" --style compact @@ -717,92 +935,163 @@ Interaction with extdiff, largefiles and subrepos - $ hg --config extensions.extdiff= extdiff -S + $ hg --config extensions.extdiff= pdiff -S - $ hg --config extensions.extdiff= extdiff -r '.^' -S - diff -Npru cloned.*/.hgsub cloned/.hgsub (glob) - --- cloned.*/.hgsub * +0000 (glob) - +++ cloned/.hgsub * +0000 (glob) - @@ -1,2 +1 @@ + $ hg --config extensions.extdiff= pdiff -r '.^' -S + \r (no-eol) (esc) + archiving [ ] 0/2\r (no-eol) (esc) + archiving [====================> ] 1/2\r (no-eol) (esc) + archiving [==========================================>] 2/2\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + archiving (sub1) [ <=> ] 0\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + archiving (sub1/sub2) [ <=> ] 0\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + archiving (sub3) [ <=> ] 0\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + archiving [ ] 0/2\r (no-eol) (esc) + archiving [====================> ] 1/2\r (no-eol) (esc) + archiving [==========================================>] 2/2\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + archiving (sub1) [ <=> ] 0\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + archiving (sub1/sub2) [ <=> ] 0\r (no-eol) (esc) + \r (no-eol) (esc) + diff -Nru cloned.*/.hgsub cloned/.hgsub (glob) + --- cloned.*/.hgsub * (glob) + +++ cloned/.hgsub * (glob) + @@ -1,2 +1* @@ (glob) sub1 = ../sub1 -sub3 = sub3 - diff -Npru cloned.*/.hgsubstate cloned/.hgsubstate (glob) - --- cloned.*/.hgsubstate * +0000 (glob) - +++ cloned/.hgsubstate * +0000 (glob) - @@ -1,2 +1 @@ + diff -Nru cloned.*/.hgsubstate cloned/.hgsubstate (glob) + --- cloned.*/.hgsubstate * (glob) + +++ cloned/.hgsubstate * (glob) + @@ -1,2 +1* @@ (glob) 7a36fa02b66e61f27f3d4a822809f159479b8ab2 sub1 -b1a26de6f2a045a9f079323693614ee322f1ff7e sub3 [1] - $ hg --config extensions.extdiff= extdiff -r 0 -r '.^' -S - diff -Npru cloned.*/.hglf/b.dat cloned.*/.hglf/b.dat (glob) + $ hg --config extensions.extdiff= pdiff -r 0 -r '.^' -S + \r (no-eol) (esc) + archiving [ ] 0/3\r (no-eol) (esc) + archiving [=============> ] 1/3\r (no-eol) (esc) + archiving [===========================> ] 2/3\r (no-eol) (esc) + archiving [==========================================>] 3/3\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + archiving (sub1) [ ] 0/1\r (no-eol) (esc) + archiving (sub1) [===================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + archiving (sub1/sub2) [ ] 0/1\r (no-eol) (esc) + archiving (sub1/sub2) [==============================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + archiving [ ] 0/8\r (no-eol) (esc) + archiving [====> ] 1/8\r (no-eol) (esc) + archiving [=========> ] 2/8\r (no-eol) (esc) + archiving [===============> ] 3/8\r (no-eol) (esc) + archiving [====================> ] 4/8\r (no-eol) (esc) + archiving [=========================> ] 5/8\r (no-eol) (esc) + archiving [===============================> ] 6/8\r (no-eol) (esc) + archiving [====================================> ] 7/8\r (no-eol) (esc) + archiving [==========================================>] 8/8\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + archiving (sub1) [ ] 0/1\r (no-eol) (esc) + archiving (sub1) [===================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + archiving (sub1/sub2) [ ] 0/3\r (no-eol) (esc) + archiving (sub1/sub2) [=========> ] 1/3\r (no-eol) (esc) + archiving (sub1/sub2) [===================> ] 2/3\r (no-eol) (esc) + archiving (sub1/sub2) [==============================>] 3/3\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + archiving (sub3) [ ] 0/1\r (no-eol) (esc) + archiving (sub3) [===================================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) + diff -Nru cloned.*/.hglf/b.dat cloned.*/.hglf/b.dat (glob) --- cloned.*/.hglf/b.dat * (glob) +++ cloned.*/.hglf/b.dat * (glob) - @@ -0,0 +1 @@ + @@ -*,0 +1* @@ (glob) +da39a3ee5e6b4b0d3255bfef95601890afd80709 - diff -Npru cloned.*/.hglf/foo/bar/large.dat cloned.*/.hglf/foo/bar/large.dat (glob) + diff -Nru cloned.*/.hglf/foo/bar/large.dat cloned.*/.hglf/foo/bar/large.dat (glob) --- cloned.*/.hglf/foo/bar/large.dat * (glob) +++ cloned.*/.hglf/foo/bar/large.dat * (glob) - @@ -0,0 +1 @@ + @@ -*,0 +1* @@ (glob) +2f6933b5ee0f5fdd823d9717d8729f3c2523811b - diff -Npru cloned.*/.hglf/large.bin cloned.*/.hglf/large.bin (glob) + diff -Nru cloned.*/.hglf/large.bin cloned.*/.hglf/large.bin (glob) --- cloned.*/.hglf/large.bin * (glob) +++ cloned.*/.hglf/large.bin * (glob) - @@ -0,0 +1 @@ + @@ -*,0 +1* @@ (glob) +7f7097b041ccf68cc5561e9600da4655d21c6d18 - diff -Npru cloned.*/.hgsub cloned.*/.hgsub (glob) + diff -Nru cloned.*/.hgsub cloned.*/.hgsub (glob) --- cloned.*/.hgsub * (glob) +++ cloned.*/.hgsub * (glob) - @@ -1 +1,2 @@ + @@ -1* +1,2 @@ (glob) sub1 = ../sub1 +sub3 = sub3 - diff -Npru cloned.*/.hgsubstate cloned.*/.hgsubstate (glob) + diff -Nru cloned.*/.hgsubstate cloned.*/.hgsubstate (glob) --- cloned.*/.hgsubstate * (glob) +++ cloned.*/.hgsubstate * (glob) - @@ -1 +1,2 @@ + @@ -1* +1,2 @@ (glob) -fc3b4ce2696f7741438c79207583768f2ce6b0dd sub1 +7a36fa02b66e61f27f3d4a822809f159479b8ab2 sub1 +b1a26de6f2a045a9f079323693614ee322f1ff7e sub3 - diff -Npru cloned.*/foo/bar/def cloned.*/foo/bar/def (glob) + diff -Nru cloned.*/foo/bar/def cloned.*/foo/bar/def (glob) --- cloned.*/foo/bar/def * (glob) +++ cloned.*/foo/bar/def * (glob) - @@ -0,0 +1 @@ + @@ -*,0 +1* @@ (glob) +changed - diff -Npru cloned.*/main cloned.*/main (glob) + diff -Nru cloned.*/main cloned.*/main (glob) --- cloned.*/main * (glob) +++ cloned.*/main * (glob) - @@ -1 +1 @@ + @@ -1* +1* @@ (glob) -main +foo - diff -Npru cloned.*/sub1/.hgsubstate cloned.*/sub1/.hgsubstate (glob) + diff -Nru cloned.*/sub1/.hgsubstate cloned.*/sub1/.hgsubstate (glob) --- cloned.*/sub1/.hgsubstate * (glob) +++ cloned.*/sub1/.hgsubstate * (glob) - @@ -1 +1 @@ + @@ -1* +1* @@ (glob) -c57a0840e3badd667ef3c3ef65471609acb2ba3c sub2 +c77908c81ccea3794a896c79e98b0e004aee2e9e sub2 - diff -Npru cloned.*/sub1/sub2/folder/test.txt cloned.*/sub1/sub2/folder/test.txt (glob) + diff -Nru cloned.*/sub1/sub2/folder/test.txt cloned.*/sub1/sub2/folder/test.txt (glob) --- cloned.*/sub1/sub2/folder/test.txt * (glob) +++ cloned.*/sub1/sub2/folder/test.txt * (glob) - @@ -0,0 +1 @@ + @@ -*,0 +1* @@ (glob) +subfolder - diff -Npru cloned.*/sub1/sub2/sub2 cloned.*/sub1/sub2/sub2 (glob) + diff -Nru cloned.*/sub1/sub2/sub2 cloned.*/sub1/sub2/sub2 (glob) --- cloned.*/sub1/sub2/sub2 * (glob) +++ cloned.*/sub1/sub2/sub2 * (glob) - @@ -1 +1 @@ + @@ -1* +1* @@ (glob) -sub2 +modified - diff -Npru cloned.*/sub3/a.txt cloned.*/sub3/a.txt (glob) + diff -Nru cloned.*/sub3/a.txt cloned.*/sub3/a.txt (glob) --- cloned.*/sub3/a.txt * (glob) +++ cloned.*/sub3/a.txt * (glob) - @@ -0,0 +1 @@ + @@ -*,0 +1* @@ (glob) +xyz [1] $ echo mod > sub1/sub2/sub2 - $ hg --config extensions.extdiff= extdiff -S + $ hg --config extensions.extdiff= pdiff -S + \r (no-eol) (esc) + archiving (sub1) [ <=> ] 0\r (no-eol) (esc) + \r (no-eol) (esc) + \r (no-eol) (esc) + archiving (sub1/sub2) [ ] 0/1\r (no-eol) (esc) + archiving (sub1/sub2) [==============================>] 1/1\r (no-eol) (esc) + \r (no-eol) (esc) --- */cloned.*/sub1/sub2/sub2 * (glob) +++ */cloned/sub1/sub2/sub2 * (glob) - @@ -1 +1 @@ + @@ -1* +1* @@ (glob) -modified +mod [1]
--- a/tests/test-subrepo-git.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-subrepo-git.t Sat Apr 16 18:06:48 2016 -0500 @@ -785,7 +785,7 @@ index 0000000..257cc56 --- /dev/null +++ b/s/barfoo - @@ -0,0 +1 @@ + @@ -0,0 +1* @@ (glob) +foo $ hg diff --subrepos s/foobar diff --git a/s/foobar b/s/foobar @@ -827,7 +827,7 @@ index 0000000..257cc56 --- /dev/null +++ b/s/barfoo - @@ -0,0 +1 @@ + @@ -0,0 +1* @@ (glob) +foo moving a file should show a removal and an add @@ -1146,7 +1146,7 @@ $ hg commit -m "add subrepo" $ cd .. $ env -u GIT_ALLOW_PROTOCOL hg clone malicious-subrepository malicious-subrepository-protected - Cloning into '$TESTTMP/tc/malicious-subrepository-protected/s'... + Cloning into '$TESTTMP/tc/malicious-subrepository-protected/s'... (glob) fatal: transport 'ext' not allowed updating to branch default cloning subrepo s from ext::sh -c echo% pwned% >&2 @@ -1155,7 +1155,7 @@ whitelisting of ext should be respected (that's the git submodule behaviour) $ env GIT_ALLOW_PROTOCOL=ext hg clone malicious-subrepository malicious-subrepository-clone-allowed - Cloning into '$TESTTMP/tc/malicious-subrepository-clone-allowed/s'... + Cloning into '$TESTTMP/tc/malicious-subrepository-clone-allowed/s'... (glob) pwned fatal: Could not read from remote repository.
--- a/tests/test-subrepo-recursion.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-subrepo-recursion.t Sat Apr 16 18:06:48 2016 -0500 @@ -255,8 +255,6 @@ $ cp $HGRCPATH $HGRCPATH.no-progress $ cat >> $HGRCPATH <<EOF - > [extensions] - > progress = > [progress] > disable=False > assume-tty = 1
--- a/tests/test-subrepo-svn.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-subrepo-svn.t Sat Apr 16 18:06:48 2016 -0500 @@ -8,7 +8,7 @@ #endif $ filter_svn_output () { - > egrep -v 'Committing|Transmitting|Updating' || true + > egrep -v 'Committing|Transmitting|Updating|(^$)' || true > } create subversion repo @@ -97,10 +97,8 @@ committing subrepository s Sending*s/alpha (glob) Committed revision 3. - Fetching external item into '*s/externals'* (glob) External at revision 1. - At revision 3. $ hg debugsub path s
--- a/tests/test-subrepo.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-subrepo.t Sat Apr 16 18:06:48 2016 -0500 @@ -259,6 +259,7 @@ resolving manifests branchmerge: True, force: False, partial: False ancestor: 1f14a2e2d3ec, local: f0d2028bf86d+, remote: 1831e14459c4 + starting 4 threads for background file closing (?) .hgsubstate: versions differ -> m (premerge) subrepo merge f0d2028bf86d+ 1831e14459c4 1f14a2e2d3ec subrepo t: other changed, get t:6747d179aa9a688023c4b0cad32e4c92bb7f34ad:hg @@ -285,6 +286,7 @@ resolving manifests branchmerge: True, force: False, partial: False ancestor: 1831e14459c4, local: e45c8b14af55+, remote: f94576341bcf + starting 4 threads for background file closing (?) .hgsubstate: versions differ -> m (premerge) subrepo merge e45c8b14af55+ f94576341bcf 1831e14459c4 subrepo t: both sides changed @@ -296,6 +298,7 @@ branchmerge: True, force: False, partial: False ancestor: 6747d179aa9a, local: 20a0db6fbf6c+, remote: 7af322bc1198 preserving t for resolve of t + starting 4 threads for background file closing (?) t: versions differ -> m (premerge) picked tool ':merge' for t (binary False symlink False changedelete False) merging t @@ -664,6 +667,7 @@ $ cd ../t $ hg up -C # discard our earlier merge 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + 2 other heads for branch "default" $ echo blah > t/t $ hg ci -m13 committing subrepository t @@ -677,6 +681,7 @@ $ hg up -C # discard changes 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + 2 other heads for branch "default" pull @@ -718,6 +723,7 @@ adding file changes added 1 changesets with 1 changes to 1 files 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + 2 other heads for branch "default" $ cat t/t blah @@ -1185,6 +1191,7 @@ ? s/c $ hg update -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + 2 other heads for branch "default" $ hg status -S ? s/b ? s/c
--- a/tests/test-symlink-os-yes-fs-no.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-symlink-os-yes-fs-no.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,5 +1,14 @@ -import os, sys, time -from mercurial import hg, ui, commands, util +from __future__ import absolute_import + +import os +import sys +import time +from mercurial import ( + commands, + hg, + ui as uimod, + util, +) TESTDIR = os.environ["TESTDIR"] BUNDLEPATH = os.path.join(TESTDIR, 'bundles', 'test-no-symlinks.hg') @@ -8,7 +17,7 @@ if not getattr(os, "symlink", False): sys.exit(80) # SKIPPED_STATUS defined in run-tests.py -u = ui.ui() +u = uimod.ui() # hide outer repo hg.peer(u, {}, '.', create=True) @@ -36,10 +45,10 @@ fp.close() # reload repository -u = ui.ui() +u = uimod.ui() repo = hg.repository(u, 'test0') commands.status(u, repo) # try cloning a repo which contains symlinks -u = ui.ui() +u = uimod.ui() hg.clone(u, {}, BUNDLEPATH, 'test1')
--- a/tests/test-tags.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-tags.t Sat Apr 16 18:06:48 2016 -0500 @@ -136,12 +136,13 @@ $ rm -f .hg/cache/tags2-visible .hg/cache/hgtagsfnodes1 $ hg identify b9154636be93 tip - $ hg blackbox -l 5 - 1970/01/01 00:00:00 bob (*)> identify (glob) - 1970/01/01 00:00:00 bob (*)> writing 48 bytes to cache/hgtagsfnodes1 (glob) - 1970/01/01 00:00:00 bob (*)> 0/1 cache hits/lookups in * seconds (glob) - 1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2-visible with 1 tags (glob) - 1970/01/01 00:00:00 bob (*)> identify exited 0 after ?.?? seconds (glob) + $ hg blackbox -l 6 + 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> identify + 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> writing 48 bytes to cache/hgtagsfnodes1 + 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> 0/1 cache hits/lookups in * seconds (glob) + 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> writing .hg/cache/tags2-visible with 1 tags + 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> identify exited 0 after * seconds (glob) + 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> blackbox -l 6 Failure to acquire lock results in no write @@ -149,12 +150,13 @@ $ echo 'foo:1' > .hg/wlock $ hg identify b9154636be93 tip - $ hg blackbox -l 5 - 1970/01/01 00:00:00 bob (*)> identify (glob) - 1970/01/01 00:00:00 bob (*)> not writing .hg/cache/hgtagsfnodes1 because lock cannot be acquired (glob) - 1970/01/01 00:00:00 bob (*)> 0/1 cache hits/lookups in * seconds (glob) - 1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2-visible with 1 tags (glob) - 1970/01/01 00:00:00 bob (*)> identify exited 0 after * seconds (glob) + $ hg blackbox -l 6 + 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> identify + 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> not writing .hg/cache/hgtagsfnodes1 because lock cannot be acquired + 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> 0/1 cache hits/lookups in * seconds (glob) + 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> writing .hg/cache/tags2-visible with 1 tags + 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> identify exited 0 after * seconds (glob) + 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> blackbox -l 6 $ fnodescacheexists no fnodes cache @@ -214,6 +216,10 @@ $ hg merge 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) + $ hg blackbox -l3 + 1970/01/01 00:00:00 bob @c8edf04160c7f731e4589d66ab3ab3486a64ac28 (5000)> merge 1 + 1970/01/01 00:00:00 bob @c8edf04160c7f731e4589d66ab3ab3486a64ac28+b9154636be938d3d431e75a7c906504a079bfe07 (5000)> merge 1 exited 0 after * seconds (glob) + 1970/01/01 00:00:00 bob @c8edf04160c7f731e4589d66ab3ab3486a64ac28+b9154636be938d3d431e75a7c906504a079bfe07 (5000)> blackbox -l3 $ hg id c8edf04160c7+b9154636be93+ tip $ hg status @@ -348,12 +354,13 @@ tip 5:8dbfe60eff30 bar 1:78391a272241 - $ hg blackbox -l 5 - 1970/01/01 00:00:00 bob (*)> tags (glob) - 1970/01/01 00:00:00 bob (*)> writing 24 bytes to cache/hgtagsfnodes1 (glob) - 1970/01/01 00:00:00 bob (*)> 2/3 cache hits/lookups in * seconds (glob) - 1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2-visible with 1 tags (glob) - 1970/01/01 00:00:00 bob (*)> tags exited 0 after * seconds (glob) + $ hg blackbox -l 6 + 1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> tags + 1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> writing 24 bytes to cache/hgtagsfnodes1 + 1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> 2/3 cache hits/lookups in * seconds (glob) + 1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> writing .hg/cache/tags2-visible with 1 tags + 1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> tags exited 0 after * seconds (glob) + 1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> blackbox -l 6 #if unix-permissions no-root Errors writing to .hgtags fnodes cache are silently ignored @@ -368,12 +375,13 @@ tip 6:b968051b5cf3 bar 1:78391a272241 - $ hg blackbox -l 5 - 1970/01/01 00:00:00 bob (*)> tags (glob) - 1970/01/01 00:00:00 bob (*)> couldn't write cache/hgtagsfnodes1: [Errno 13] Permission denied: '$TESTTMP/t2/.hg/cache/hgtagsfnodes1' (glob) - 1970/01/01 00:00:00 bob (*)> 2/3 cache hits/lookups in * seconds (glob) - 1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2-visible with 1 tags (glob) - 1970/01/01 00:00:00 bob (*)> tags exited 0 after * seconds (glob) + $ hg blackbox -l 6 + 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> tags + 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> couldn't write cache/hgtagsfnodes1: [Errno 13] Permission denied: '$TESTTMP/t2/.hg/cache/hgtagsfnodes1' + 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> 2/3 cache hits/lookups in * seconds (glob) + 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> writing .hg/cache/tags2-visible with 1 tags + 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> tags exited 0 after * seconds (glob) + 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> blackbox -l 6 $ chmod a+w .hg/cache/hgtagsfnodes1 @@ -382,12 +390,13 @@ tip 6:b968051b5cf3 bar 1:78391a272241 - $ hg blackbox -l 5 - 1970/01/01 00:00:00 bob (*)> tags (glob) - 1970/01/01 00:00:00 bob (*)> writing 24 bytes to cache/hgtagsfnodes1 (glob) - 1970/01/01 00:00:00 bob (*)> 2/3 cache hits/lookups in * seconds (glob) - 1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2-visible with 1 tags (glob) - 1970/01/01 00:00:00 bob (*)> tags exited 0 after * seconds (glob) + $ hg blackbox -l 6 + 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> tags + 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> writing 24 bytes to cache/hgtagsfnodes1 + 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> 2/3 cache hits/lookups in * seconds (glob) + 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> writing .hg/cache/tags2-visible with 1 tags + 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> tags exited 0 after * seconds (glob) + 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> blackbox -l 6 $ f --size .hg/cache/hgtagsfnodes1 .hg/cache/hgtagsfnodes1: size=168 @@ -410,11 +419,12 @@ tip 4:0c192d7d5e6b bar 1:78391a272241 - $ hg blackbox -l 4 - 1970/01/01 00:00:00 bob (*)> writing 24 bytes to cache/hgtagsfnodes1 (glob) - 1970/01/01 00:00:00 bob (*)> 2/3 cache hits/lookups in * seconds (glob) - 1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2-visible with 1 tags (glob) - 1970/01/01 00:00:00 bob (*)> tags exited 0 after * seconds (glob) + $ hg blackbox -l 5 + 1970/01/01 00:00:00 bob @0c192d7d5e6b78a714de54a2e9627952a877e25a (5000)> writing 24 bytes to cache/hgtagsfnodes1 + 1970/01/01 00:00:00 bob @0c192d7d5e6b78a714de54a2e9627952a877e25a (5000)> 2/3 cache hits/lookups in * seconds (glob) + 1970/01/01 00:00:00 bob @0c192d7d5e6b78a714de54a2e9627952a877e25a (5000)> writing .hg/cache/tags2-visible with 1 tags + 1970/01/01 00:00:00 bob @0c192d7d5e6b78a714de54a2e9627952a877e25a (5000)> tags exited 0 after * seconds (glob) + 1970/01/01 00:00:00 bob @0c192d7d5e6b78a714de54a2e9627952a877e25a (5000)> blackbox -l 5 $ f --size .hg/cache/hgtagsfnodes1 .hg/cache/hgtagsfnodes1: size=120 @@ -426,12 +436,13 @@ tip 5:035f65efb448 bar 1:78391a272241 - $ hg blackbox -l 5 - 1970/01/01 00:00:00 bob (*)> tags (glob) - 1970/01/01 00:00:00 bob (*)> writing 24 bytes to cache/hgtagsfnodes1 (glob) - 1970/01/01 00:00:00 bob (*)> 2/3 cache hits/lookups in * seconds (glob) - 1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2-visible with 1 tags (glob) - 1970/01/01 00:00:00 bob (*)> tags exited 0 after * seconds (glob) + $ hg blackbox -l 6 + 1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> tags + 1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> writing 24 bytes to cache/hgtagsfnodes1 + 1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> 2/3 cache hits/lookups in * seconds (glob) + 1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> writing .hg/cache/tags2-visible with 1 tags + 1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> tags exited 0 after * seconds (glob) + 1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> blackbox -l 6 $ f --size .hg/cache/hgtagsfnodes1 .hg/cache/hgtagsfnodes1: size=144
--- a/tests/test-template-engine.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-template-engine.t Sat Apr 16 18:06:48 2016 -0500 @@ -4,7 +4,7 @@ > from mercurial import templater > > class mytemplater(object): - > def __init__(self, loader, filters, defaults): + > def __init__(self, loader, filters, defaults, aliases): > self.loader = loader > > def process(self, t, map): @@ -44,17 +44,11 @@ 0 97e5f848f0936960273bbf75be6388cd0350a32b -1 0000000000000000000000000000000000000000 -1 0000000000000000000000000000000000000000 -1 0000000000000000000000000000000000000000 -Fuzzing the unicode escaper to ensure it produces valid data - -#if hypothesis +invalid engine type: - >>> from hypothesishelpers import * - >>> import mercurial.templatefilters as tf - >>> import json - >>> @check(st.text().map(lambda s: s.encode('utf-8'))) - ... def testtfescapeproducesvalidjson(text): - ... json.loads('"' + tf.jsonescape(text) + '"') - -#endif + $ echo 'changeset = unknown:changeset.txt' > unknownenginemap + $ hg log --style=./unknownenginemap + abort: invalid template engine: unknown + [255] $ cd ..
--- a/tests/test-transplant.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-transplant.t Sat Apr 16 18:06:48 2016 -0500 @@ -321,7 +321,7 @@ remote transplant with pull - $ hg -R ../t serve -p $HGPORT -d --pid-file=../t.pid + $ hg serve -R ../t -p $HGPORT -d --pid-file=../t.pid $ cat ../t.pid >> $DAEMON_PIDS $ hg clone -r 0 ../t ../rp @@ -409,6 +409,7 @@ $ hg up -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + 1 other heads for branch "default" $ rm added $ hg transplant --continue abort: no transplant to continue
--- a/tests/test-treemanifest.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-treemanifest.t Sat Apr 16 18:06:48 2016 -0500 @@ -1,3 +1,5 @@ +#require killdaemons + $ cat << EOF >> $HGRCPATH > [format] > usegeneraldelta=yes @@ -361,13 +363,20 @@ added 11 changesets with 15 changes to 10 files (+3 heads) $ grep treemanifest clone/.hg/requires treemanifest + $ hg -R clone verify + checking changesets + checking manifests + checking directory manifests + crosschecking files in changesets and manifests + checking files + 10 files, 11 changesets, 15 total revisions Create deeper repo with tree manifests. $ hg --config experimental.treemanifest=True init deeprepo $ cd deeprepo - $ mkdir a + $ mkdir .A $ mkdir b $ mkdir b/bar $ mkdir b/bar/orange @@ -376,8 +385,8 @@ $ mkdir b/foo/apple $ mkdir b/foo/apple/bees - $ touch a/one.txt - $ touch a/two.txt + $ touch .A/one.txt + $ touch .A/two.txt $ touch b/bar/fruits.txt $ touch b/bar/orange/fly/gnat.py $ touch b/bar/orange/fly/housefly.txt @@ -393,8 +402,8 @@ Test files from the root. $ hg files -r . - a/one.txt (glob) - a/two.txt (glob) + .A/one.txt (glob) + .A/two.txt (glob) b/bar/fruits.txt (glob) b/bar/orange/fly/gnat.py (glob) b/bar/orange/fly/housefly.txt (glob) @@ -408,61 +417,56 @@ b/bar/fruits.txt (glob) b/bar/orange/fly/gnat.py (glob) b/bar/orange/fly/housefly.txt (glob) + $ cp -r .hg/store .hg/store-copy Test files for a subdirectory. - $ mv .hg/store/meta/a oldmf + $ rm -r .hg/store/meta/~2e_a $ hg files -r . b b/bar/fruits.txt (glob) b/bar/orange/fly/gnat.py (glob) b/bar/orange/fly/housefly.txt (glob) b/foo/apple/bees/flower.py (glob) - $ mv oldmf .hg/store/meta/a + $ cp -r .hg/store-copy/. .hg/store Test files with just includes and excludes. - $ mv .hg/store/meta/a oldmf - $ mv .hg/store/meta/b/bar/orange/fly oldmf2 - $ mv .hg/store/meta/b/foo/apple/bees oldmf3 + $ rm -r .hg/store/meta/~2e_a + $ rm -r .hg/store/meta/b/bar/orange/fly + $ rm -r .hg/store/meta/b/foo/apple/bees $ hg files -r . -I path:b/bar -X path:b/bar/orange/fly -I path:b/foo -X path:b/foo/apple/bees b/bar/fruits.txt (glob) - $ mv oldmf .hg/store/meta/a - $ mv oldmf2 .hg/store/meta/b/bar/orange/fly - $ mv oldmf3 .hg/store/meta/b/foo/apple/bees + $ cp -r .hg/store-copy/. .hg/store Test files for a subdirectory, excluding a directory within it. - $ mv .hg/store/meta/a oldmf - $ mv .hg/store/meta/b/foo oldmf2 + $ rm -r .hg/store/meta/~2e_a + $ rm -r .hg/store/meta/b/foo $ hg files -r . -X path:b/foo b b/bar/fruits.txt (glob) b/bar/orange/fly/gnat.py (glob) b/bar/orange/fly/housefly.txt (glob) - $ mv oldmf .hg/store/meta/a - $ mv oldmf2 .hg/store/meta/b/foo + $ cp -r .hg/store-copy/. .hg/store Test files for a sub directory, including only a directory within it, and including an unrelated directory. - $ mv .hg/store/meta/a oldmf - $ mv .hg/store/meta/b/foo oldmf2 + $ rm -r .hg/store/meta/~2e_a + $ rm -r .hg/store/meta/b/foo $ hg files -r . -I path:b/bar/orange -I path:a b b/bar/orange/fly/gnat.py (glob) b/bar/orange/fly/housefly.txt (glob) - $ mv oldmf .hg/store/meta/a - $ mv oldmf2 .hg/store/meta/b/foo + $ cp -r .hg/store-copy/. .hg/store Test files for a pattern, including a directory, and excluding a directory within that. - $ mv .hg/store/meta/a oldmf - $ mv .hg/store/meta/b/foo oldmf2 - $ mv .hg/store/meta/b/bar/orange oldmf3 + $ rm -r .hg/store/meta/~2e_a + $ rm -r .hg/store/meta/b/foo + $ rm -r .hg/store/meta/b/bar/orange $ hg files -r . glob:**.txt -I path:b/bar -X path:b/bar/orange b/bar/fruits.txt (glob) - $ mv oldmf .hg/store/meta/a - $ mv oldmf2 .hg/store/meta/b/foo - $ mv oldmf3 .hg/store/meta/b/bar/orange + $ cp -r .hg/store-copy/. .hg/store Add some more changes to the deep repo $ echo narf >> b/bar/fruits.txt @@ -470,14 +474,108 @@ $ echo troz >> b/bar/orange/fly/gnat.py $ hg ci -m troz +Verify works + $ hg verify + checking changesets + checking manifests + checking directory manifests + crosschecking files in changesets and manifests + checking files + 8 files, 3 changesets, 10 total revisions + +Dirlogs are included in fncache + $ grep meta/.A/00manifest.i .hg/store/fncache + meta/.A/00manifest.i + +Rebuilt fncache includes dirlogs + $ rm .hg/store/fncache + $ hg debugrebuildfncache + adding data/.A/one.txt.i + adding data/.A/two.txt.i + adding data/b/bar/fruits.txt.i + adding data/b/bar/orange/fly/gnat.py.i + adding data/b/bar/orange/fly/housefly.txt.i + adding data/b/foo/apple/bees/flower.py.i + adding data/c.txt.i + adding data/d.py.i + adding meta/.A/00manifest.i + adding meta/b/00manifest.i + adding meta/b/bar/00manifest.i + adding meta/b/bar/orange/00manifest.i + adding meta/b/bar/orange/fly/00manifest.i + adding meta/b/foo/00manifest.i + adding meta/b/foo/apple/00manifest.i + adding meta/b/foo/apple/bees/00manifest.i + 16 items added, 0 removed from fncache + +Finish first server + $ killdaemons.py + +Back up the recently added revlogs + $ cp -r .hg/store .hg/store-newcopy + +Verify reports missing dirlog + $ rm .hg/store/meta/b/00manifest.* + $ hg verify + checking changesets + checking manifests + checking directory manifests + 0: empty or missing b/ + b/@0: parent-directory manifest refers to unknown revision 67688a370455 + b/@1: parent-directory manifest refers to unknown revision f38e85d334c5 + b/@2: parent-directory manifest refers to unknown revision 99c9792fd4b0 + warning: orphan revlog 'meta/b/bar/00manifest.i' + warning: orphan revlog 'meta/b/bar/orange/00manifest.i' + warning: orphan revlog 'meta/b/bar/orange/fly/00manifest.i' + warning: orphan revlog 'meta/b/foo/00manifest.i' + warning: orphan revlog 'meta/b/foo/apple/00manifest.i' + warning: orphan revlog 'meta/b/foo/apple/bees/00manifest.i' + crosschecking files in changesets and manifests + b/bar/fruits.txt@0: in changeset but not in manifest + b/bar/orange/fly/gnat.py@0: in changeset but not in manifest + b/bar/orange/fly/housefly.txt@0: in changeset but not in manifest + b/foo/apple/bees/flower.py@0: in changeset but not in manifest + checking files + 8 files, 3 changesets, 10 total revisions + 6 warnings encountered! + 8 integrity errors encountered! + (first damaged changeset appears to be 0) + [1] + $ cp -r .hg/store-newcopy/. .hg/store + +Verify reports missing dirlog entry + $ mv -f .hg/store-copy/meta/b/00manifest.* .hg/store/meta/b/ + $ hg verify + checking changesets + checking manifests + checking directory manifests + b/@1: parent-directory manifest refers to unknown revision f38e85d334c5 + b/@2: parent-directory manifest refers to unknown revision 99c9792fd4b0 + b/bar/@?: rev 1 points to unexpected changeset 1 + b/bar/@?: 5e03c4ee5e4a not in parent-directory manifest + b/bar/@?: rev 2 points to unexpected changeset 2 + b/bar/@?: 1b16940d66d6 not in parent-directory manifest + b/bar/orange/@?: rev 1 points to unexpected changeset 2 + (expected None) + b/bar/orange/fly/@?: rev 1 points to unexpected changeset 2 + (expected None) + crosschecking files in changesets and manifests + checking files + 8 files, 3 changesets, 10 total revisions + 2 warnings encountered! + 8 integrity errors encountered! + (first damaged changeset appears to be 1) + [1] + $ cp -r .hg/store-newcopy/. .hg/store + Test cloning a treemanifest repo over http. - $ hg serve -p $HGPORT2 -d --pid-file=hg.pid --errorlog=errors.log + $ hg serve -p $HGPORT -d --pid-file=hg.pid --errorlog=errors.log $ cat hg.pid >> $DAEMON_PIDS $ cd .. We can clone even with the knob turned off and we'll get a treemanifest repo. $ hg clone --config experimental.treemanifest=False \ > --config experimental.changegroup3=True \ - > http://localhost:$HGPORT2 deepclone + > http://localhost:$HGPORT deepclone requesting all changes adding changesets adding manifests @@ -493,8 +591,6 @@ Tree manifest revlogs exist. $ find deepclone/.hg/store/meta | sort deepclone/.hg/store/meta - deepclone/.hg/store/meta/a - deepclone/.hg/store/meta/a/00manifest.i deepclone/.hg/store/meta/b deepclone/.hg/store/meta/b/00manifest.i deepclone/.hg/store/meta/b/bar @@ -509,12 +605,140 @@ deepclone/.hg/store/meta/b/foo/apple/00manifest.i deepclone/.hg/store/meta/b/foo/apple/bees deepclone/.hg/store/meta/b/foo/apple/bees/00manifest.i + deepclone/.hg/store/meta/~2e_a + deepclone/.hg/store/meta/~2e_a/00manifest.i Verify passes. $ cd deepclone $ hg verify checking changesets checking manifests + checking directory manifests crosschecking files in changesets and manifests checking files 8 files, 3 changesets, 10 total revisions $ cd .. + +Create clones using old repo formats to use in later tests + $ hg clone --config format.usestore=False \ + > --config experimental.changegroup3=True \ + > http://localhost:$HGPORT deeprepo-basicstore + requesting all changes + adding changesets + adding manifests + adding file changes + added 3 changesets with 10 changes to 8 files + updating to branch default + 8 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ cd deeprepo-basicstore + $ grep store .hg/requires + [1] + $ hg serve -p $HGPORT1 -d --pid-file=hg.pid --errorlog=errors.log + $ cat hg.pid >> $DAEMON_PIDS + $ cd .. + $ hg clone --config format.usefncache=False \ + > --config experimental.changegroup3=True \ + > http://localhost:$HGPORT deeprepo-encodedstore + requesting all changes + adding changesets + adding manifests + adding file changes + added 3 changesets with 10 changes to 8 files + updating to branch default + 8 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ cd deeprepo-encodedstore + $ grep fncache .hg/requires + [1] + $ hg serve -p $HGPORT2 -d --pid-file=hg.pid --errorlog=errors.log + $ cat hg.pid >> $DAEMON_PIDS + $ cd .. + +Local clone with basicstore + $ hg clone -U deeprepo-basicstore local-clone-basicstore + $ hg -R local-clone-basicstore verify + checking changesets + checking manifests + checking directory manifests + crosschecking files in changesets and manifests + checking files + 8 files, 3 changesets, 10 total revisions + +Local clone with encodedstore + $ hg clone -U deeprepo-encodedstore local-clone-encodedstore + $ hg -R local-clone-encodedstore verify + checking changesets + checking manifests + checking directory manifests + crosschecking files in changesets and manifests + checking files + 8 files, 3 changesets, 10 total revisions + +Local clone with fncachestore + $ hg clone -U deeprepo local-clone-fncachestore + $ hg -R local-clone-fncachestore verify + checking changesets + checking manifests + checking directory manifests + crosschecking files in changesets and manifests + checking files + 8 files, 3 changesets, 10 total revisions + +Stream clone with basicstore + $ hg clone --config experimental.changegroup3=True --uncompressed -U \ + > http://localhost:$HGPORT1 stream-clone-basicstore + streaming all changes + 18 files to transfer, * of data (glob) + transferred * in * seconds (*) (glob) + searching for changes + no changes found + $ hg -R stream-clone-basicstore verify + checking changesets + checking manifests + checking directory manifests + crosschecking files in changesets and manifests + checking files + 8 files, 3 changesets, 10 total revisions + +Stream clone with encodedstore + $ hg clone --config experimental.changegroup3=True --uncompressed -U \ + > http://localhost:$HGPORT2 stream-clone-encodedstore + streaming all changes + 18 files to transfer, * of data (glob) + transferred * in * seconds (*) (glob) + searching for changes + no changes found + $ hg -R stream-clone-encodedstore verify + checking changesets + checking manifests + checking directory manifests + crosschecking files in changesets and manifests + checking files + 8 files, 3 changesets, 10 total revisions + +Stream clone with fncachestore + $ hg clone --config experimental.changegroup3=True --uncompressed -U \ + > http://localhost:$HGPORT stream-clone-fncachestore + streaming all changes + 18 files to transfer, * of data (glob) + transferred * in * seconds (*) (glob) + searching for changes + no changes found + $ hg -R stream-clone-fncachestore verify + checking changesets + checking manifests + checking directory manifests + crosschecking files in changesets and manifests + checking files + 8 files, 3 changesets, 10 total revisions + +Packed bundle + $ hg -R deeprepo debugcreatestreamclonebundle repo-packed.hg + writing 3349 bytes for 18 files + bundle requirements: generaldelta, revlogv1, treemanifest + $ hg debugbundle --spec repo-packed.hg + none-packed1;requirements%3Dgeneraldelta%2Crevlogv1%2Ctreemanifest + +Bundle with changegroup2 is not supported + + $ hg -R deeprepo bundle --all -t v2 deeprepo.bundle + abort: repository does not support bundle version 02 + [255]
--- a/tests/test-trusted.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-trusted.py Sat Apr 16 18:06:48 2016 -0500 @@ -2,8 +2,14 @@ # with files from different users/groups, we cheat a bit by # monkey-patching some functions in the util module +from __future__ import absolute_import, print_function + import os -from mercurial import ui, util, error +from mercurial import ( + error, + ui as uimod, + util, +) hgrc = os.environ['HGRCPATH'] f = open(hgrc) @@ -57,26 +63,26 @@ trusted = who[(user in tusers) + 2*(group in tgroups)] if trusted: trusted = ', but we trust the ' + trusted - print '# %s user, %s group%s' % (kind[user == cuser], kind[group == cgroup], - trusted) + print('# %s user, %s group%s' % (kind[user == cuser], kind[group == cgroup], + trusted)) - u = ui.ui() + u = uimod.ui() u.setconfig('ui', 'debug', str(bool(debug))) u.setconfig('ui', 'report_untrusted', str(bool(report))) u.readconfig('.hg/hgrc') if silent: return u - print 'trusted' + print('trusted') for name, path in u.configitems('paths'): - print ' ', name, '=', path - print 'untrusted' + print(' ', name, '=', path) + print('untrusted') for name, path in u.configitems('paths', untrusted=True): - print '.', + print('.', end=' ') u.config('paths', name) # warning with debug=True - print '.', + print('.', end=' ') u.config('paths', name, untrusted=True) # no warnings - print name, '=', path - print + print(name, '=', path) + print() return u @@ -107,50 +113,50 @@ # ... but we trust the user and the group testui(user='abc', group='def', tusers=['abc'], tgroups=['def']) # ... but we trust all users -print '# we trust all users' +print('# we trust all users') testui(user='abc', group='def', tusers=['*']) # ... but we trust all groups -print '# we trust all groups' +print('# we trust all groups') testui(user='abc', group='def', tgroups=['*']) # ... but we trust the whole universe -print '# we trust all users and groups' +print('# we trust all users and groups') testui(user='abc', group='def', tusers=['*'], tgroups=['*']) # ... check that users and groups are in different namespaces -print "# we don't get confused by users and groups with the same name" +print("# we don't get confused by users and groups with the same name") testui(user='abc', group='def', tusers=['def'], tgroups=['abc']) # ... lists of user names work -print "# list of user names" +print("# list of user names") testui(user='abc', group='def', tusers=['foo', 'xyz', 'abc', 'bleh'], tgroups=['bar', 'baz', 'qux']) # ... lists of group names work -print "# list of group names" +print("# list of group names") testui(user='abc', group='def', tusers=['foo', 'xyz', 'bleh'], tgroups=['bar', 'def', 'baz', 'qux']) -print "# Can't figure out the name of the user running this process" +print("# Can't figure out the name of the user running this process") testui(user='abc', group='def', cuser=None) -print "# prints debug warnings" +print("# prints debug warnings") u = testui(user='abc', group='def', cuser='foo', debug=True) -print "# report_untrusted enabled without debug hides warnings" +print("# report_untrusted enabled without debug hides warnings") u = testui(user='abc', group='def', cuser='foo', report=False) -print "# report_untrusted enabled with debug shows warnings" +print("# report_untrusted enabled with debug shows warnings") u = testui(user='abc', group='def', cuser='foo', debug=True, report=False) -print "# ui.readconfig sections" +print("# ui.readconfig sections") filename = 'foobar' f = open(filename, 'w') f.write('[foobar]\n') f.write('baz = quux\n') f.close() u.readconfig(filename, sections=['foobar']) -print u.config('foobar', 'baz') +print(u.config('foobar', 'baz')) -print -print "# read trusted, untrusted, new ui, trusted" -u = ui.ui() +print() +print("# read trusted, untrusted, new ui, trusted") +u = uimod.ui() u.setconfig('ui', 'debug', 'on') u.readconfig(filename) u2 = u.copy() @@ -158,30 +164,30 @@ return 'foo' util.username = username u2.readconfig('.hg/hgrc') -print 'trusted:' -print u2.config('foobar', 'baz') -print 'untrusted:' -print u2.config('foobar', 'baz', untrusted=True) +print('trusted:') +print(u2.config('foobar', 'baz')) +print('untrusted:') +print(u2.config('foobar', 'baz', untrusted=True)) -print -print "# error handling" +print() +print("# error handling") def assertraises(f, exc=error.Abort): try: f() except exc as inst: - print 'raised', inst.__class__.__name__ + print('raised', inst.__class__.__name__) else: - print 'no exception?!' + print('no exception?!') -print "# file doesn't exist" +print("# file doesn't exist") os.unlink('.hg/hgrc') assert not os.path.exists('.hg/hgrc') testui(debug=True, silent=True) testui(user='abc', group='def', debug=True, silent=True) -print -print "# parse error" +print() +print("# parse error") f = open('.hg/hgrc', 'w') f.write('foo') f.close() @@ -189,9 +195,9 @@ try: testui(user='abc', group='def', silent=True) except error.ParseError as inst: - print inst + print(inst) try: testui(debug=True, silent=True) except error.ParseError as inst: - print inst + print(inst)
--- a/tests/test-trusted.py.out Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-trusted.py.out Sat Apr 16 18:06:48 2016 -0500 @@ -133,7 +133,7 @@ global = /some/path untrusted . . global = /some/path -.ignoring untrusted configuration option paths.local = /another/path +. ignoring untrusted configuration option paths.local = /another/path . local = /another/path # report_untrusted enabled without debug hides warnings @@ -152,7 +152,7 @@ global = /some/path untrusted . . global = /some/path -.ignoring untrusted configuration option paths.local = /another/path +. ignoring untrusted configuration option paths.local = /another/path . local = /another/path # ui.readconfig sections
--- a/tests/test-ui-color.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-ui-color.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,6 +1,13 @@ +from __future__ import absolute_import, print_function + import os -from hgext import color -from mercurial import dispatch, ui +from hgext import ( + color, +) +from mercurial import ( + dispatch, + ui as uimod, +) # ensure errors aren't buffered testui = color.colorui() @@ -8,7 +15,7 @@ testui.write(('buffered\n')) testui.warn(('warning\n')) testui.write_err('error\n') -print repr(testui.popbuffer()) +print(repr(testui.popbuffer())) # test dispatch.dispatch with the same ui object hgrc = open(os.environ["HGRCPATH"], 'w') @@ -16,7 +23,7 @@ hgrc.write('color=\n') hgrc.close() -ui_ = ui.ui() +ui_ = uimod.ui() ui_.setconfig('ui', 'formatted', 'True') # we're not interested in the output, so write that to devnull @@ -28,6 +35,6 @@ dispatch.dispatch(dispatch.request(['version', '-q'], ui_)) runcmd() -print "colored? " + str(issubclass(ui_.__class__, color.colorui)) +print("colored? " + str(issubclass(ui_.__class__, color.colorui))) runcmd() -print "colored? " + str(issubclass(ui_.__class__, color.colorui)) +print("colored? " + str(issubclass(ui_.__class__, color.colorui)))
--- a/tests/test-ui-config.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-ui-config.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,6 +1,11 @@ -from mercurial import ui, dispatch, error +from __future__ import absolute_import, print_function +from mercurial import ( + dispatch, + error, + ui as uimod, +) -testui = ui.ui() +testui = uimod.ui() parsed = dispatch._parseconfig(testui, [ 'values.string=string value', 'values.bool1=true', @@ -29,70 +34,70 @@ 'lists.list18=\n \n\nding\ndong', ]) -print repr(testui.configitems('values')) -print repr(testui.configitems('lists')) -print "---" -print repr(testui.config('values', 'string')) -print repr(testui.config('values', 'bool1')) -print repr(testui.config('values', 'bool2')) -print repr(testui.config('values', 'unknown')) -print "---" +print(repr(testui.configitems('values'))) +print(repr(testui.configitems('lists'))) +print("---") +print(repr(testui.config('values', 'string'))) +print(repr(testui.config('values', 'bool1'))) +print(repr(testui.config('values', 'bool2'))) +print(repr(testui.config('values', 'unknown'))) +print("---") try: - print repr(testui.configbool('values', 'string')) + print(repr(testui.configbool('values', 'string'))) except error.ConfigError as inst: - print inst -print repr(testui.configbool('values', 'bool1')) -print repr(testui.configbool('values', 'bool2')) -print repr(testui.configbool('values', 'bool2', True)) -print repr(testui.configbool('values', 'unknown')) -print repr(testui.configbool('values', 'unknown', True)) -print "---" -print repr(testui.configint('values', 'int1')) -print repr(testui.configint('values', 'int2')) -print "---" -print repr(testui.configlist('lists', 'list1')) -print repr(testui.configlist('lists', 'list2')) -print repr(testui.configlist('lists', 'list3')) -print repr(testui.configlist('lists', 'list4')) -print repr(testui.configlist('lists', 'list4', ['foo'])) -print repr(testui.configlist('lists', 'list5')) -print repr(testui.configlist('lists', 'list6')) -print repr(testui.configlist('lists', 'list7')) -print repr(testui.configlist('lists', 'list8')) -print repr(testui.configlist('lists', 'list9')) -print repr(testui.configlist('lists', 'list10')) -print repr(testui.configlist('lists', 'list11')) -print repr(testui.configlist('lists', 'list12')) -print repr(testui.configlist('lists', 'list13')) -print repr(testui.configlist('lists', 'list14')) -print repr(testui.configlist('lists', 'list15')) -print repr(testui.configlist('lists', 'list16')) -print repr(testui.configlist('lists', 'list17')) -print repr(testui.configlist('lists', 'list18')) -print repr(testui.configlist('lists', 'unknown')) -print repr(testui.configlist('lists', 'unknown', '')) -print repr(testui.configlist('lists', 'unknown', 'foo')) -print repr(testui.configlist('lists', 'unknown', ['foo'])) -print repr(testui.configlist('lists', 'unknown', 'foo bar')) -print repr(testui.configlist('lists', 'unknown', 'foo, bar')) -print repr(testui.configlist('lists', 'unknown', ['foo bar'])) -print repr(testui.configlist('lists', 'unknown', ['foo', 'bar'])) + print(inst) +print(repr(testui.configbool('values', 'bool1'))) +print(repr(testui.configbool('values', 'bool2'))) +print(repr(testui.configbool('values', 'bool2', True))) +print(repr(testui.configbool('values', 'unknown'))) +print(repr(testui.configbool('values', 'unknown', True))) +print("---") +print(repr(testui.configint('values', 'int1'))) +print(repr(testui.configint('values', 'int2'))) +print("---") +print(repr(testui.configlist('lists', 'list1'))) +print(repr(testui.configlist('lists', 'list2'))) +print(repr(testui.configlist('lists', 'list3'))) +print(repr(testui.configlist('lists', 'list4'))) +print(repr(testui.configlist('lists', 'list4', ['foo']))) +print(repr(testui.configlist('lists', 'list5'))) +print(repr(testui.configlist('lists', 'list6'))) +print(repr(testui.configlist('lists', 'list7'))) +print(repr(testui.configlist('lists', 'list8'))) +print(repr(testui.configlist('lists', 'list9'))) +print(repr(testui.configlist('lists', 'list10'))) +print(repr(testui.configlist('lists', 'list11'))) +print(repr(testui.configlist('lists', 'list12'))) +print(repr(testui.configlist('lists', 'list13'))) +print(repr(testui.configlist('lists', 'list14'))) +print(repr(testui.configlist('lists', 'list15'))) +print(repr(testui.configlist('lists', 'list16'))) +print(repr(testui.configlist('lists', 'list17'))) +print(repr(testui.configlist('lists', 'list18'))) +print(repr(testui.configlist('lists', 'unknown'))) +print(repr(testui.configlist('lists', 'unknown', ''))) +print(repr(testui.configlist('lists', 'unknown', 'foo'))) +print(repr(testui.configlist('lists', 'unknown', ['foo']))) +print(repr(testui.configlist('lists', 'unknown', 'foo bar'))) +print(repr(testui.configlist('lists', 'unknown', 'foo, bar'))) +print(repr(testui.configlist('lists', 'unknown', ['foo bar']))) +print(repr(testui.configlist('lists', 'unknown', ['foo', 'bar']))) -print repr(testui.config('values', 'String')) +print(repr(testui.config('values', 'String'))) def function(): pass # values that aren't strings should work testui.setconfig('hook', 'commit', function) -print function == testui.config('hook', 'commit') +print(function == testui.config('hook', 'commit')) # invalid values try: testui.configbool('values', 'boolinvalid') except error.ConfigError: - print 'boolinvalid' + print('boolinvalid') try: testui.configint('values', 'intinvalid') except error.ConfigError: - print 'intinvalid' + print('intinvalid')
--- a/tests/test-ui-config.py.out Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-ui-config.py.out Sat Apr 16 18:06:48 2016 -0500 @@ -1,5 +1,5 @@ [('string', 'string value'), ('bool1', 'true'), ('bool2', 'false'), ('boolinvalid', 'foo'), ('int1', '42'), ('int2', '-42'), ('intinvalid', 'foo')] -[('list1', 'foo'), ('list2', 'foo bar baz'), ('list3', 'alice, bob'), ('list4', 'foo bar baz alice, bob'), ('list5', 'abc d"ef"g "hij def"'), ('list6', '"hello world", "how are you?"'), ('list7', 'Do"Not"Separate'), ('list8', '"Do"Separate'), ('list9', '"Do\\"NotSeparate"'), ('list10', 'string "with extraneous" quotation mark"'), ('list11', 'x, y'), ('list12', '"x", "y"'), ('list13', '""" key = "x", "y" """'), ('list14', ',,,, '), ('list15', '" just with starting quotation'), ('list16', '"longer quotation" with "no ending quotation'), ('list17', 'this is \\" "not a quotation mark"'), ('list18', '\n \n\nding\ndong')] +[('list1', 'foo'), ('list2', 'foo bar baz'), ('list3', 'alice, bob'), ('list4', 'foo bar baz alice, bob'), ('list5', 'abc d"ef"g "hij def"'), ('list6', '"hello world", "how are you?"'), ('list7', 'Do"Not"Separate'), ('list8', '"Do"Separate'), ('list9', '"Do\\"NotSeparate"'), ('list10', 'string "with extraneous" quotation mark"'), ('list11', 'x, y'), ('list12', '"x", "y"'), ('list13', '""" key = "x", "y" """'), ('list14', ',,,,'), ('list15', '" just with starting quotation'), ('list16', '"longer quotation" with "no ending quotation'), ('list17', 'this is \\" "not a quotation mark"'), ('list18', 'ding\ndong')] --- 'string value' 'true'
--- a/tests/test-ui-verbosity.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-ui-verbosity.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,13 +1,17 @@ +from __future__ import absolute_import, print_function + import os -from mercurial import ui +from mercurial import ( + ui as uimod, +) hgrc = os.environ['HGRCPATH'] f = open(hgrc) basehgrc = f.read() f.close() -print ' hgrc settings command line options final result ' -print ' quiet verbo debug quiet verbo debug quiet verbo debug' +print(' hgrc settings command line options final result ') +print(' quiet verbo debug quiet verbo debug quiet verbo debug') for i in xrange(64): hgrc_quiet = bool(i & 1<<0) @@ -28,7 +32,7 @@ f.write('debug = True\n') f.close() - u = ui.ui() + u = uimod.ui() if cmd_quiet or cmd_debug or cmd_verbose: u.setconfig('ui', 'quiet', str(bool(cmd_quiet))) u.setconfig('ui', 'verbose', str(bool(cmd_verbose))) @@ -41,7 +45,7 @@ elif u.verbose and u.quiet: check = ' +' - print ('%2d %5s %5s %5s %5s %5s %5s -> %5s %5s %5s%s' + print(('%2d %5s %5s %5s %5s %5s %5s -> %5s %5s %5s%s' % (i, hgrc_quiet, hgrc_verbose, hgrc_debug, cmd_quiet, cmd_verbose, cmd_debug, - u.quiet, u.verbose, u.debugflag, check)) + u.quiet, u.verbose, u.debugflag, check)))
--- a/tests/test-up-local-change.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-up-local-change.t Sat Apr 16 18:06:48 2016 -0500 @@ -73,6 +73,7 @@ preserving a for resolve of a b: other deleted -> r removing b + starting 4 threads for background file closing (?) a: versions differ -> m (premerge) picked tool 'true' for a (binary False symlink False changedelete False) merging a @@ -172,9 +173,8 @@ summary: 2 $ hg --debug up - abort: uncommitted changes - (commit and merge, or update --clean to discard changes) - [255] + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + 1 other heads for branch "default" test conflicting untracked files
--- a/tests/test-update-branches.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-update-branches.t Sat Apr 16 18:06:48 2016 -0500 @@ -93,8 +93,8 @@ parent=5 $ norevtest 'none clean same' clean 2 - abort: not a linear update - (merge or update --check to force update) + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + 1 other heads for branch "default" parent=2 @@ -140,8 +140,8 @@ M foo $ norevtest 'none dirty cross' dirty 2 - abort: uncommitted changes - (commit and merge, or update --clean to discard changes) + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + 1 other heads for branch "default" parent=2 M foo @@ -166,14 +166,128 @@ M sub/suba $ norevtest '-c clean same' clean 2 -c - 1 files updated, 0 files merged, 0 files removed, 0 files unresolved - parent=3 + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + 1 other heads for branch "default" + parent=2 $ revtest '-cC dirty linear' dirty 1 2 -cC abort: cannot specify both -c/--check and -C/--clean parent=1 M foo + $ cd .. + +Test updating with closed head +--------------------------------------------------------------------- + + $ hg clone -U -q b1 closed-heads + $ cd closed-heads + +Test updating if at least one non-closed branch head exists + +if on the closed branch head: +- update to "." +- "updated to a closed branch head ...." message is displayed +- "N other heads for ...." message is displayed + + $ hg update -q -C 3 + $ hg commit --close-branch -m 6 + $ norevtest "on closed branch head" clean 6 + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + no open descendant heads on branch "default", updating to a closed head + (committing will reopen the head, use `hg heads .` to see 1 other heads) + parent=6 + +if descendant non-closed branch head exists, and it is only one branch head: +- update to it, even if its revision is less than closed one +- "N other heads for ...." message isn't displayed + + $ norevtest "non-closed 2 should be chosen" clean 1 + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + parent=2 + +if all descendant branch heads are closed, but there is another branch head: +- update to the tipmost descendant head +- "updated to a closed branch head ...." message is displayed +- "N other heads for ...." message is displayed + + $ norevtest "all descendant branch heads are closed" clean 3 + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + no open descendant heads on branch "default", updating to a closed head + (committing will reopen the head, use `hg heads .` to see 1 other heads) + parent=6 + +Test updating if all branch heads are closed + +if on the closed branch head: +- update to "." +- "updated to a closed branch head ...." message is displayed +- "all heads of branch ...." message is displayed + + $ hg update -q -C 2 + $ hg commit --close-branch -m 7 + $ norevtest "all heads of branch default are closed" clean 6 + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + no open descendant heads on branch "default", updating to a closed head + (committing will reopen branch "default") + parent=6 + +if not on the closed branch head: +- update to the tipmost descendant (closed) head +- "updated to a closed branch head ...." message is displayed +- "all heads of branch ...." message is displayed + + $ norevtest "all heads of branch default are closed" clean 1 + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + no open descendant heads on branch "default", updating to a closed head + (committing will reopen branch "default") + parent=7 + + $ cd .. + +Test updating if "default" branch doesn't exist and no revision is +checked out (= "default" is used as current branch) + + $ hg init no-default-branch + $ cd no-default-branch + + $ hg branch foobar + marked working directory as branch foobar + (branches are permanent and global, did you want a bookmark?) + $ echo a > a + $ hg commit -m "#0" -A + adding a + $ echo 1 >> a + $ hg commit -m "#1" + $ hg update -q 0 + $ echo 3 >> a + $ hg commit -m "#2" + created new head + $ hg commit --close-branch -m "#3" + +if there is at least one non-closed branch head: +- update to the tipmost branch head + + $ norevtest "non-closed 1 should be chosen" clean null + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + parent=1 + +if all branch heads are closed +- update to "tip" +- "updated to a closed branch head ...." message is displayed +- "all heads for branch "XXXX" are closed" message is displayed + + $ hg update -q -C 1 + $ hg commit --close-branch -m "#4" + + $ norevtest "all branches are closed" clean null + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + no open descendant heads on branch "foobar", updating to a closed head + (committing will reopen branch "foobar") + parent=4 + + $ cd ../b1 + Test obsolescence behavior ---------------------------------------------------------------------
--- a/tests/test-url.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-url.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,14 +1,20 @@ +from __future__ import absolute_import, print_function + +import doctest import os def check(a, b): if a != b: - print (a, b) + print((a, b)) def cert(cn): return {'subject': ((('commonName', cn),),)} -from mercurial.sslutil import _verifycert +from mercurial import ( + sslutil, +) +_verifycert = sslutil._verifycert # Test non-wildcard certificates check(_verifycert(cert('example.com'), 'example.com'), None) @@ -57,8 +63,6 @@ check(_verifycert(cert(u'\u4f8b.jp'), 'example.jp'), 'IDN in certificate not supported') -import doctest - def test_url(): """ >>> from mercurial.util import url
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-verify-repo-operations.py Sat Apr 16 18:06:48 2016 -0500 @@ -0,0 +1,603 @@ +from __future__ import print_function, absolute_import + +"""Fuzz testing for operations against a Mercurial repository + +This uses Hypothesis's stateful testing to generate random repository +operations and test Mercurial using them, both to see if there are any +unexpected errors and to compare different versions of it.""" + +import os +import subprocess +import sys + +# Only run if slow tests are allowed +if subprocess.call(['python', '%s/hghave' % os.environ['TESTDIR'], + 'slow']): + sys.exit(80) + +# These tests require Hypothesis and pytz to be installed. +# Running 'pip install hypothesis pytz' will achieve that. +# Note: This won't work if you're running Python < 2.7. +try: + from hypothesis.extra.datetime import datetimes +except ImportError: + sys.stderr.write("skipped: hypothesis or pytz not installed" + os.linesep) + sys.exit(80) + +# If you are running an old version of pip you may find that the enum34 +# backport is not installed automatically. If so 'pip install enum34' will +# fix this problem. +try: + import enum + assert enum # Silence pyflakes +except ImportError: + sys.stderr.write("skipped: enum34 not installed" + os.linesep) + sys.exit(80) + +import binascii +from contextlib import contextmanager +import errno +import pipes +import shutil +import silenttestrunner +import subprocess + +from hypothesis.errors import HypothesisException +from hypothesis.stateful import ( + rule, RuleBasedStateMachine, Bundle, precondition) +from hypothesis import settings, note, strategies as st +from hypothesis.configuration import set_hypothesis_home_dir +from hypothesis.database import ExampleDatabase + +testdir = os.path.abspath(os.environ["TESTDIR"]) + +# We store Hypothesis examples here rather in the temporary test directory +# so that when rerunning a failing test this always results in refinding the +# previous failure. This directory is in .hgignore and should not be checked in +# but is useful to have for development. +set_hypothesis_home_dir(os.path.join(testdir, ".hypothesis")) + +runtests = os.path.join(os.environ["RUNTESTDIR"], "run-tests.py") +testtmp = os.environ["TESTTMP"] +assert os.path.isdir(testtmp) + +generatedtests = os.path.join(testdir, "hypothesis-generated") + +try: + os.makedirs(generatedtests) +except OSError: + pass + +# We write out generated .t files to a file in order to ease debugging and to +# give a starting point for turning failures Hypothesis finds into normal +# tests. In order to ensure that multiple copies of this test can be run in +# parallel we use atomic file create to ensure that we always get a unique +# name. +file_index = 0 +while True: + file_index += 1 + savefile = os.path.join(generatedtests, "test-generated-%d.t" % ( + file_index, + )) + try: + os.close(os.open(savefile, os.O_CREAT | os.O_EXCL | os.O_WRONLY)) + break + except OSError as e: + if e.errno != errno.EEXIST: + raise +assert os.path.exists(savefile) + +hgrc = os.path.join(".hg", "hgrc") + +filecharacters = ( + "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" + "[]^_`;=@{}~ !#$%&'()+,-" +) + +files = st.text(filecharacters, min_size=1).map(lambda x: x.strip()).filter( + bool).map(lambda s: s.encode('ascii')) + +safetext = st.text(st.characters( + min_codepoint=1, max_codepoint=127, + blacklist_categories=('Cc', 'Cs')), min_size=1).map( + lambda s: s.encode('utf-8') +) + +extensions = st.sampled_from(('shelve', 'mq', 'blackbox',)) + +@contextmanager +def acceptableerrors(*args): + """Sometimes we know an operation we're about to perform might fail, and + we're OK with some of the failures. In those cases this may be used as a + context manager and will swallow expected failures, as identified by + substrings of the error message Mercurial emits.""" + try: + yield + except subprocess.CalledProcessError as e: + if not any(a in e.output for a in args): + note(e.output) + raise + +reponames = st.text("abcdefghijklmnopqrstuvwxyz01234556789", min_size=1).map( + lambda s: s.encode('ascii') +) + +class verifyingstatemachine(RuleBasedStateMachine): + """This defines the set of acceptable operations on a Mercurial repository + using Hypothesis's RuleBasedStateMachine. + + The general concept is that we manage multiple repositories inside a + repos/ directory in our temporary test location. Some of these are freshly + inited, some are clones of the others. Our current working directory is + always inside one of these repositories while the tests are running. + + Hypothesis then performs a series of operations against these repositories, + including hg commands, generating contents and editing the .hgrc file. + If these operations fail in unexpected ways or behave differently in + different configurations of Mercurial, the test will fail and a minimized + .t test file will be written to the hypothesis-generated directory to + exhibit that failure. + + Operations are defined as methods with @rule() decorators. See the + Hypothesis documentation at + http://hypothesis.readthedocs.org/en/release/stateful.html for more + details.""" + + # A bundle is a reusable collection of previously generated data which may + # be provided as arguments to future operations. + repos = Bundle('repos') + paths = Bundle('paths') + contents = Bundle('contents') + branches = Bundle('branches') + committimes = Bundle('committimes') + + def __init__(self): + super(verifyingstatemachine, self).__init__() + self.repodir = os.path.join(testtmp, "repos") + if os.path.exists(self.repodir): + shutil.rmtree(self.repodir) + os.chdir(testtmp) + self.log = [] + self.failed = False + self.configperrepo = {} + self.all_extensions = set() + self.non_skippable_extensions = set() + + self.mkdirp("repos") + self.cd("repos") + self.mkdirp("repo1") + self.cd("repo1") + self.hg("init") + + def teardown(self): + """On teardown we clean up after ourselves as usual, but we also + do some additional testing: We generate a .t file based on our test + run using run-test.py -i to get the correct output. + + We then test it in a number of other configurations, verifying that + each passes the same test.""" + super(verifyingstatemachine, self).teardown() + try: + shutil.rmtree(self.repodir) + except OSError: + pass + ttest = os.linesep.join(" " + l for l in self.log) + os.chdir(testtmp) + path = os.path.join(testtmp, "test-generated.t") + with open(path, 'w') as o: + o.write(ttest + os.linesep) + with open(os.devnull, "w") as devnull: + rewriter = subprocess.Popen( + [runtests, "--local", "-i", path], stdin=subprocess.PIPE, + stdout=devnull, stderr=devnull, + ) + rewriter.communicate("yes") + with open(path, 'r') as i: + ttest = i.read() + + e = None + if not self.failed: + try: + output = subprocess.check_output([ + runtests, path, "--local", "--pure" + ], stderr=subprocess.STDOUT) + assert "Ran 1 test" in output, output + for ext in ( + self.all_extensions - self.non_skippable_extensions + ): + tf = os.path.join(testtmp, "test-generated-no-%s.t" % ( + ext, + )) + with open(tf, 'w') as o: + for l in ttest.splitlines(): + if l.startswith(" $ hg"): + l = l.replace( + "--config %s=" % ( + extensionconfigkey(ext),), "") + o.write(l + os.linesep) + with open(tf, 'r') as r: + t = r.read() + assert ext not in t, t + output = subprocess.check_output([ + runtests, tf, "--local", + ], stderr=subprocess.STDOUT) + assert "Ran 1 test" in output, output + except subprocess.CalledProcessError as e: + note(e.output) + if self.failed or e is not None: + with open(savefile, "wb") as o: + o.write(ttest) + if e is not None: + raise e + + def execute_step(self, step): + try: + return super(verifyingstatemachine, self).execute_step(step) + except (HypothesisException, KeyboardInterrupt): + raise + except Exception: + self.failed = True + raise + + # Section: Basic commands. + def mkdirp(self, path): + if os.path.exists(path): + return + self.log.append( + "$ mkdir -p -- %s" % (pipes.quote(os.path.relpath(path)),)) + os.makedirs(path) + + def cd(self, path): + path = os.path.relpath(path) + if path == ".": + return + os.chdir(path) + self.log.append("$ cd -- %s" % (pipes.quote(path),)) + + def hg(self, *args): + extra_flags = [] + for key, value in self.config.items(): + extra_flags.append("--config") + extra_flags.append("%s=%s" % (key, value)) + self.command("hg", *(tuple(extra_flags) + args)) + + def command(self, *args): + self.log.append("$ " + ' '.join(map(pipes.quote, args))) + subprocess.check_output(args, stderr=subprocess.STDOUT) + + # Section: Set up basic data + # This section has no side effects but generates data that we will want + # to use later. + @rule( + target=paths, + source=st.lists(files, min_size=1).map(lambda l: os.path.join(*l))) + def genpath(self, source): + return source + + @rule( + target=committimes, + when=datetimes(min_year=1970, max_year=2038) | st.none()) + def gentime(self, when): + return when + + @rule( + target=contents, + content=st.one_of( + st.binary(), + st.text().map(lambda x: x.encode('utf-8')) + )) + def gencontent(self, content): + return content + + @rule( + target=branches, + name=safetext, + ) + def genbranch(self, name): + return name + + @rule(target=paths, source=paths) + def lowerpath(self, source): + return source.lower() + + @rule(target=paths, source=paths) + def upperpath(self, source): + return source.upper() + + # Section: Basic path operations + @rule(path=paths, content=contents) + def writecontent(self, path, content): + self.unadded_changes = True + if os.path.isdir(path): + return + parent = os.path.dirname(path) + if parent: + try: + self.mkdirp(parent) + except OSError: + # It may be the case that there is a regular file that has + # previously been created that has the same name as an ancestor + # of the current path. This will cause mkdirp to fail with this + # error. We just turn this into a no-op in that case. + return + with open(path, 'wb') as o: + o.write(content) + self.log.append(( + "$ python -c 'import binascii; " + "print(binascii.unhexlify(\"%s\"))' > %s") % ( + binascii.hexlify(content), + pipes.quote(path), + )) + + @rule(path=paths) + def addpath(self, path): + if os.path.exists(path): + self.hg("add", "--", path) + + @rule(path=paths) + def forgetpath(self, path): + if os.path.exists(path): + with acceptableerrors( + "file is already untracked", + ): + self.hg("forget", "--", path) + + @rule(s=st.none() | st.integers(0, 100)) + def addremove(self, s): + args = ["addremove"] + if s is not None: + args.extend(["-s", str(s)]) + self.hg(*args) + + @rule(path=paths) + def removepath(self, path): + if os.path.exists(path): + with acceptableerrors( + 'file is untracked', + 'file has been marked for add', + 'file is modified', + ): + self.hg("remove", "--", path) + + @rule( + message=safetext, + amend=st.booleans(), + when=committimes, + addremove=st.booleans(), + secret=st.booleans(), + close_branch=st.booleans(), + ) + def maybecommit( + self, message, amend, when, addremove, secret, close_branch + ): + command = ["commit"] + errors = ["nothing changed"] + if amend: + errors.append("cannot amend public changesets") + command.append("--amend") + command.append("-m" + pipes.quote(message)) + if secret: + command.append("--secret") + if close_branch: + command.append("--close-branch") + errors.append("can only close branch heads") + if addremove: + command.append("--addremove") + if when is not None: + if when.year == 1970: + errors.append('negative date value') + if when.year == 2038: + errors.append('exceeds 32 bits') + command.append("--date=%s" % ( + when.strftime('%Y-%m-%d %H:%M:%S %z'),)) + + with acceptableerrors(*errors): + self.hg(*command) + + # Section: Repository management + @property + def currentrepo(self): + return os.path.basename(os.getcwd()) + + @property + def config(self): + return self.configperrepo.setdefault(self.currentrepo, {}) + + @rule( + target=repos, + source=repos, + name=reponames, + ) + def clone(self, source, name): + if not os.path.exists(os.path.join("..", name)): + self.cd("..") + self.hg("clone", source, name) + self.cd(name) + return name + + @rule( + target=repos, + name=reponames, + ) + def fresh(self, name): + if not os.path.exists(os.path.join("..", name)): + self.cd("..") + self.mkdirp(name) + self.cd(name) + self.hg("init") + return name + + @rule(name=repos) + def switch(self, name): + self.cd(os.path.join("..", name)) + assert self.currentrepo == name + assert os.path.exists(".hg") + + @rule(target=repos) + def origin(self): + return "repo1" + + @rule() + def pull(self, repo=repos): + with acceptableerrors( + "repository default not found", + "repository is unrelated", + ): + self.hg("pull") + + @rule(newbranch=st.booleans()) + def push(self, newbranch): + with acceptableerrors( + "default repository not configured", + "no changes found", + ): + if newbranch: + self.hg("push", "--new-branch") + else: + with acceptableerrors( + "creates new branches" + ): + self.hg("push") + + # Section: Simple side effect free "check" operations + @rule() + def log(self): + self.hg("log") + + @rule() + def verify(self): + self.hg("verify") + + @rule() + def diff(self): + self.hg("diff", "--nodates") + + @rule() + def status(self): + self.hg("status") + + @rule() + def export(self): + self.hg("export") + + # Section: Branch management + @rule() + def checkbranch(self): + self.hg("branch") + + @rule(branch=branches) + def switchbranch(self, branch): + with acceptableerrors( + 'cannot use an integer as a name', + 'cannot be used in a name', + 'a branch of the same name already exists', + 'is reserved', + ): + self.hg("branch", "--", branch) + + @rule(branch=branches, clean=st.booleans()) + def update(self, branch, clean): + with acceptableerrors( + 'unknown revision', + 'parse error', + ): + if clean: + self.hg("update", "-C", "--", branch) + else: + self.hg("update", "--", branch) + + # Section: Extension management + def hasextension(self, extension): + return extensionconfigkey(extension) in self.config + + def commandused(self, extension): + assert extension in self.all_extensions + self.non_skippable_extensions.add(extension) + + @rule(extension=extensions) + def addextension(self, extension): + self.all_extensions.add(extension) + self.config[extensionconfigkey(extension)] = "" + + @rule(extension=extensions) + def removeextension(self, extension): + self.config.pop(extensionconfigkey(extension), None) + + # Section: Commands from the shelve extension + @rule() + @precondition(lambda self: self.hasextension("shelve")) + def shelve(self): + self.commandused("shelve") + with acceptableerrors("nothing changed"): + self.hg("shelve") + + @rule() + @precondition(lambda self: self.hasextension("shelve")) + def unshelve(self): + self.commandused("shelve") + with acceptableerrors("no shelved changes to apply"): + self.hg("unshelve") + +class writeonlydatabase(ExampleDatabase): + def __init__(self, underlying): + super(ExampleDatabase, self).__init__() + self.underlying = underlying + + def fetch(self, key): + return () + + def save(self, key, value): + self.underlying.save(key, value) + + def delete(self, key, value): + self.underlying.delete(key, value) + + def close(self): + self.underlying.close() + +def extensionconfigkey(extension): + return "extensions." + extension + +settings.register_profile( + 'default', settings( + timeout=300, + stateful_step_count=50, + max_examples=10, + ) +) + +settings.register_profile( + 'fast', settings( + timeout=10, + stateful_step_count=20, + max_examples=5, + min_satisfying_examples=1, + max_shrinks=0, + ) +) + +settings.register_profile( + 'continuous', settings( + timeout=-1, + stateful_step_count=1000, + max_examples=10 ** 8, + max_iterations=10 ** 8, + database=writeonlydatabase(settings.default.database) + ) +) + +settings.load_profile(os.getenv('HYPOTHESIS_PROFILE', 'default')) + +verifyingtest = verifyingstatemachine.TestCase + +verifyingtest.settings = settings.default + +if __name__ == '__main__': + try: + silenttestrunner.main(__name__) + finally: + # So as to prevent proliferation of useless test files, if we never + # actually wrote a failing test we clean up after ourselves and delete + # the file for doing so that we owned. + if os.path.exists(savefile) and os.path.getsize(savefile) == 0: + os.unlink(savefile)
--- a/tests/test-verify.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-verify.t Sat Apr 16 18:06:48 2016 -0500 @@ -46,13 +46,13 @@ checking files warning: revlog 'data/FOO.txt.i' not in fncache! 0: empty or missing FOO.txt - FOO.txt@0: f62022d3d590 in manifests not found + FOO.txt@0: manifest refers to unknown revision f62022d3d590 warning: revlog 'data/QUICK.txt.i' not in fncache! 0: empty or missing QUICK.txt - QUICK.txt@0: 88b857db8eba in manifests not found + QUICK.txt@0: manifest refers to unknown revision 88b857db8eba warning: revlog 'data/bar.txt.i' not in fncache! 0: empty or missing bar.txt - bar.txt@0: 256559129457 in manifests not found + bar.txt@0: manifest refers to unknown revision 256559129457 3 files, 1 changesets, 0 total revisions 3 warnings encountered! hint: run "hg debugrebuildfncache" to recover from corrupt fncache @@ -63,6 +63,208 @@ $ cd ../../.. $ cd .. +Set up a repo for testing missing revlog entries + + $ hg init missing-entries + $ cd missing-entries + $ echo 0 > file + $ hg ci -Aqm0 + $ cp -r .hg/store .hg/store-partial + $ echo 1 > file + $ hg ci -Aqm1 + $ cp -r .hg/store .hg/store-full + +Entire changelog missing + + $ rm .hg/store/00changelog.* + $ hg verify -q + 0: empty or missing changelog + manifest@0: d0b6632564d4 not in changesets + manifest@1: 941fc4534185 not in changesets + 3 integrity errors encountered! + (first damaged changeset appears to be 0) + [1] + $ cp -r .hg/store-full/. .hg/store + +Entire manifest log missing + + $ rm .hg/store/00manifest.* + $ hg verify -q + 0: empty or missing manifest + 1 integrity errors encountered! + (first damaged changeset appears to be 0) + [1] + $ cp -r .hg/store-full/. .hg/store + +Entire filelog missing + + $ rm .hg/store/data/file.* + $ hg verify -q + warning: revlog 'data/file.i' not in fncache! + 0: empty or missing file + file@0: manifest refers to unknown revision 362fef284ce2 + file@1: manifest refers to unknown revision c10f2164107d + 1 warnings encountered! + hint: run "hg debugrebuildfncache" to recover from corrupt fncache + 3 integrity errors encountered! + (first damaged changeset appears to be 0) + [1] + $ cp -r .hg/store-full/. .hg/store + +Entire changelog and manifest log missing + + $ rm .hg/store/00changelog.* + $ rm .hg/store/00manifest.* + $ hg verify -q + warning: orphan revlog 'data/file.i' + 1 warnings encountered! + $ cp -r .hg/store-full/. .hg/store + +Entire changelog and filelog missing + + $ rm .hg/store/00changelog.* + $ rm .hg/store/data/file.* + $ hg verify -q + 0: empty or missing changelog + manifest@0: d0b6632564d4 not in changesets + manifest@1: 941fc4534185 not in changesets + warning: revlog 'data/file.i' not in fncache! + ?: empty or missing file + file@0: manifest refers to unknown revision 362fef284ce2 + file@1: manifest refers to unknown revision c10f2164107d + 1 warnings encountered! + hint: run "hg debugrebuildfncache" to recover from corrupt fncache + 6 integrity errors encountered! + (first damaged changeset appears to be 0) + [1] + $ cp -r .hg/store-full/. .hg/store + +Entire manifest log and filelog missing + + $ rm .hg/store/00manifest.* + $ rm .hg/store/data/file.* + $ hg verify -q + 0: empty or missing manifest + warning: revlog 'data/file.i' not in fncache! + 0: empty or missing file + 1 warnings encountered! + hint: run "hg debugrebuildfncache" to recover from corrupt fncache + 2 integrity errors encountered! + (first damaged changeset appears to be 0) + [1] + $ cp -r .hg/store-full/. .hg/store + +Changelog missing entry + + $ cp -f .hg/store-partial/00changelog.* .hg/store + $ hg verify -q + manifest@?: rev 1 points to nonexistent changeset 1 + manifest@?: 941fc4534185 not in changesets + file@?: rev 1 points to nonexistent changeset 1 + (expected 0) + 1 warnings encountered! + 3 integrity errors encountered! + [1] + $ cp -r .hg/store-full/. .hg/store + +Manifest log missing entry + + $ cp -f .hg/store-partial/00manifest.* .hg/store + $ hg verify -q + manifest@1: changeset refers to unknown revision 941fc4534185 + file@1: c10f2164107d not in manifests + 2 integrity errors encountered! + (first damaged changeset appears to be 1) + [1] + $ cp -r .hg/store-full/. .hg/store + +Filelog missing entry + + $ cp -f .hg/store-partial/data/file.* .hg/store/data + $ hg verify -q + file@1: manifest refers to unknown revision c10f2164107d + 1 integrity errors encountered! + (first damaged changeset appears to be 1) + [1] + $ cp -r .hg/store-full/. .hg/store + +Changelog and manifest log missing entry + + $ cp -f .hg/store-partial/00changelog.* .hg/store + $ cp -f .hg/store-partial/00manifest.* .hg/store + $ hg verify -q + file@?: rev 1 points to nonexistent changeset 1 + (expected 0) + file@?: c10f2164107d not in manifests + 1 warnings encountered! + 2 integrity errors encountered! + [1] + $ cp -r .hg/store-full/. .hg/store + +Changelog and filelog missing entry + + $ cp -f .hg/store-partial/00changelog.* .hg/store + $ cp -f .hg/store-partial/data/file.* .hg/store/data + $ hg verify -q + manifest@?: rev 1 points to nonexistent changeset 1 + manifest@?: 941fc4534185 not in changesets + file@?: manifest refers to unknown revision c10f2164107d + 3 integrity errors encountered! + [1] + $ cp -r .hg/store-full/. .hg/store + +Manifest and filelog missing entry + + $ cp -f .hg/store-partial/00manifest.* .hg/store + $ cp -f .hg/store-partial/data/file.* .hg/store/data + $ hg verify -q + manifest@1: changeset refers to unknown revision 941fc4534185 + 1 integrity errors encountered! + (first damaged changeset appears to be 1) + [1] + $ cp -r .hg/store-full/. .hg/store + +Corrupt changelog base node to cause failure to read revision + + $ printf abcd | dd conv=notrunc of=.hg/store/00changelog.i bs=1 seek=16 \ + > 2> /dev/null + $ hg verify -q + 0: unpacking changeset 08b1860757c2: * (glob) + manifest@?: rev 0 points to unexpected changeset 0 + manifest@?: d0b6632564d4 not in changesets + file@?: rev 0 points to unexpected changeset 0 + (expected 1) + 1 warnings encountered! + 4 integrity errors encountered! + (first damaged changeset appears to be 0) + [1] + $ cp -r .hg/store-full/. .hg/store + +Corrupt manifest log base node to cause failure to read revision + + $ printf abcd | dd conv=notrunc of=.hg/store/00manifest.i bs=1 seek=16 \ + > 2> /dev/null + $ hg verify -q + manifest@0: reading delta d0b6632564d4: * (glob) + file@0: 362fef284ce2 not in manifests + 2 integrity errors encountered! + (first damaged changeset appears to be 0) + [1] + $ cp -r .hg/store-full/. .hg/store + +Corrupt filelog base node to cause failure to read revision + + $ printf abcd | dd conv=notrunc of=.hg/store/data/file.i bs=1 seek=16 \ + > 2> /dev/null + $ hg verify -q + file@0: unpacking 362fef284ce2: * (glob) + 1 integrity errors encountered! + (first damaged changeset appears to be 0) + [1] + $ cp -r .hg/store-full/. .hg/store + + $ cd .. + test changelog without a manifest $ hg init b
--- a/tests/test-walkrepo.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-walkrepo.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,11 +1,11 @@ -from __future__ import absolute_import +from __future__ import absolute_import, print_function import os from mercurial import ( hg, scmutil, - ui, + ui as uimod, util, ) @@ -16,7 +16,7 @@ walkrepos = scmutil.walkrepos checklink = util.checklink -u = ui.ui() +u = uimod.ui() sym = checklink('.') hg.repository(u, 'top1', create=1) @@ -34,29 +34,29 @@ def runtest(): reposet = frozenset(walkrepos('.', followsym=True)) if sym and (len(reposet) != 3): - print "reposet = %r" % (reposet,) - print ("Found %d repositories when I should have found 3" - % (len(reposet),)) + print("reposet = %r" % (reposet,)) + print(("Found %d repositories when I should have found 3" + % (len(reposet),))) if (not sym) and (len(reposet) != 2): - print "reposet = %r" % (reposet,) - print ("Found %d repositories when I should have found 2" - % (len(reposet),)) + print("reposet = %r" % (reposet,)) + print(("Found %d repositories when I should have found 2" + % (len(reposet),))) sub1set = frozenset((pjoin('.', 'sub1'), pjoin('.', 'circle', 'subdir', 'sub1'))) if len(sub1set & reposet) != 1: - print "sub1set = %r" % (sub1set,) - print "reposet = %r" % (reposet,) - print "sub1set and reposet should have exactly one path in common." + print("sub1set = %r" % (sub1set,)) + print("reposet = %r" % (reposet,)) + print("sub1set and reposet should have exactly one path in common.") sub2set = frozenset((pjoin('.', 'subsub1'), pjoin('.', 'subsubdir', 'subsub1'))) if len(sub2set & reposet) != 1: - print "sub2set = %r" % (sub2set,) - print "reposet = %r" % (reposet,) - print "sub2set and reposet should have exactly one path in common." + print("sub2set = %r" % (sub2set,)) + print("reposet = %r" % (reposet,)) + print("sub2set and reposet should have exactly one path in common.") sub3 = pjoin('.', 'circle', 'top1') if sym and sub3 not in reposet: - print "reposet = %r" % (reposet,) - print "Symbolic links are supported and %s is not in reposet" % (sub3,) + print("reposet = %r" % (reposet,)) + print("Symbolic links are supported and %s is not in reposet" % (sub3,)) runtest() if sym:
--- a/tests/test-wireproto.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-wireproto.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,6 +1,10 @@ -from __future__ import absolute_import +from __future__ import absolute_import, print_function -from mercurial import wireproto +from mercurial import ( + util, + wireproto, +) +stringio = util.stringio class proto(object): def __init__(self, args): @@ -21,6 +25,9 @@ def _call(self, cmd, **args): return wireproto.dispatch(self.serverrepo, proto(args), cmd) + def _callstream(self, cmd, **args): + return stringio(self._call(cmd, **args)) + @wireproto.batchable def greet(self, name): f = wireproto.future() @@ -47,8 +54,8 @@ srv = serverrepo() clt = clientpeer(srv) -print clt.greet("Foobar") +print(clt.greet("Foobar")) b = clt.batch() fs = [b.greet(s) for s in ["Fo, =;:<o", "Bar"]] b.submit() -print [f.value for f in fs] +print([f.value for f in fs])
--- a/tests/test-wireproto.t Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/test-wireproto.t Sat Apr 16 18:06:48 2016 -0500 @@ -19,7 +19,9 @@ HTTP: - $ hg serve -R repo -p $HGPORT -d --pid-file=hg1.pid -E error.log -A access.log + $ hg serve -R repo -p $HGPORT -d --pid-file=hg1.pid \ + > -E error.log -A access.log \ + > --config experimental.httppostargs=yes $ cat hg1.pid >> $DAEMON_PIDS $ hg debugwireargs http://localhost:$HGPORT/ un deux trois quatre @@ -37,6 +39,61 @@ $ cat error.log $ cat access.log * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) + * - - [*] "POST /?cmd=debugwireargs HTTP/1.1" 200 - x-hgargs-post:39 (glob) + * - - [*] "POST /?cmd=debugwireargs HTTP/1.1" 200 - x-hgargs-post:39 (glob) + * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) + * - - [*] "POST /?cmd=debugwireargs HTTP/1.1" 200 - x-hgargs-post:43 (glob) + * - - [*] "POST /?cmd=debugwireargs HTTP/1.1" 200 - x-hgargs-post:43 (glob) + * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) + * - - [*] "POST /?cmd=debugwireargs HTTP/1.1" 200 - x-hgargs-post:27 (glob) + * - - [*] "POST /?cmd=debugwireargs HTTP/1.1" 200 - x-hgargs-post:27 (glob) + * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) + * - - [*] "POST /?cmd=debugwireargs HTTP/1.1" 200 - x-hgargs-post:17 (glob) + * - - [*] "POST /?cmd=debugwireargs HTTP/1.1" 200 - x-hgargs-post:17 (glob) + * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) + * - - [*] "POST /?cmd=debugwireargs HTTP/1.1" 200 - x-hgargs-post:17 (glob) + * - - [*] "POST /?cmd=debugwireargs HTTP/1.1" 200 - x-hgargs-post:17 (glob) + * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) + * - - [*] "POST /?cmd=debugwireargs HTTP/1.1" 200 - x-hgargs-post:1033 (glob) + * - - [*] "POST /?cmd=debugwireargs HTTP/1.1" 200 - x-hgargs-post:1033 (glob) + +HTTP without args-in-POST: + $ hg serve -R repo -p $HGPORT1 -d --pid-file=hg1.pid -E error.log -A access.log + $ cat hg1.pid >> $DAEMON_PIDS + + $ hg debugwireargs http://localhost:$HGPORT1/ un deux trois quatre + un deux trois quatre None + $ hg debugwireargs http://localhost:$HGPORT1/ \ un deux trois\ qu\ \ atre + un deux trois qu atre None + $ hg debugwireargs http://localhost:$HGPORT1/ eins zwei --four vier + eins zwei None vier None + $ hg debugwireargs http://localhost:$HGPORT1/ eins zwei + eins zwei None None None + $ hg debugwireargs http://localhost:$HGPORT1/ eins zwei --five fuenf + eins zwei None None None + $ hg debugwireargs http://localhost:$HGPORT1/ un deux trois onethousandcharactersxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + un deux trois onethousandcharactersxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx None + $ cat error.log + $ cat access.log + * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) + * - - [*] "POST /?cmd=debugwireargs HTTP/1.1" 200 - x-hgargs-post:39 (glob) + * - - [*] "POST /?cmd=debugwireargs HTTP/1.1" 200 - x-hgargs-post:39 (glob) + * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) + * - - [*] "POST /?cmd=debugwireargs HTTP/1.1" 200 - x-hgargs-post:43 (glob) + * - - [*] "POST /?cmd=debugwireargs HTTP/1.1" 200 - x-hgargs-post:43 (glob) + * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) + * - - [*] "POST /?cmd=debugwireargs HTTP/1.1" 200 - x-hgargs-post:27 (glob) + * - - [*] "POST /?cmd=debugwireargs HTTP/1.1" 200 - x-hgargs-post:27 (glob) + * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) + * - - [*] "POST /?cmd=debugwireargs HTTP/1.1" 200 - x-hgargs-post:17 (glob) + * - - [*] "POST /?cmd=debugwireargs HTTP/1.1" 200 - x-hgargs-post:17 (glob) + * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) + * - - [*] "POST /?cmd=debugwireargs HTTP/1.1" 200 - x-hgargs-post:17 (glob) + * - - [*] "POST /?cmd=debugwireargs HTTP/1.1" 200 - x-hgargs-post:17 (glob) + * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) + * - - [*] "POST /?cmd=debugwireargs HTTP/1.1" 200 - x-hgargs-post:1033 (glob) + * - - [*] "POST /?cmd=debugwireargs HTTP/1.1" 200 - x-hgargs-post:1033 (glob) + * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) * - - [*] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:four=quatre&one=un&three=trois&two=deux (glob) * - - [*] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:four=quatre&one=un&three=trois&two=deux (glob) * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
--- a/tests/tinyproxy.py Tue Mar 29 11:54:46 2016 -0500 +++ b/tests/tinyproxy.py Sat Apr 16 18:06:48 2016 -0500 @@ -1,6 +1,6 @@ #!/usr/bin/env python -from __future__ import absolute_import +from __future__ import absolute_import, print_function __doc__ = """Tiny HTTP Proxy. @@ -15,10 +15,11 @@ __version__ = "0.2.1" import BaseHTTPServer +import SocketServer import os import select import socket -import SocketServer +import sys import urlparse class ProxyHandler (BaseHTTPServer.BaseHTTPRequestHandler): @@ -50,7 +51,7 @@ host_port = netloc[:i], int(netloc[i + 1:]) else: host_port = netloc, 80 - print "\t" "connect to %s:%d" % host_port + print("\t" "connect to %s:%d" % host_port) try: soc.connect(host_port) except socket.error as arg: try: msg = arg[1] @@ -70,7 +71,7 @@ self.wfile.write("\r\n") self._read_write(soc, 300) finally: - print "\t" "bye" + print("\t" "bye") soc.close() self.connection.close() @@ -95,7 +96,7 @@ soc.send("\r\n") self._read_write(soc) finally: - print "\t" "bye" + print("\t" "bye") soc.close() self.connection.close() @@ -122,7 +123,7 @@ out.send(data) count = 0 else: - print "\t" "idle", count + print("\t" "idle", count) if count == max_idling: break @@ -140,18 +141,18 @@ a.close() if __name__ == '__main__': - from sys import argv + argv = sys.argv if argv[1:] and argv[1] in ('-h', '--help'): - print argv[0], "[port [allowed_client_name ...]]" + print(argv[0], "[port [allowed_client_name ...]]") else: if argv[2:]: allowed = [] for name in argv[2:]: client = socket.gethostbyname(name) allowed.append(client) - print "Accept: %s (%s)" % (client, name) + print("Accept: %s (%s)" % (client, name)) ProxyHandler.allowed_clients = allowed del argv[2:] else: - print "Any clients will be served..." + print("Any clients will be served...") BaseHTTPServer.test(ProxyHandler, ThreadingHTTPServer)