Mercurial > hg
changeset 41985:b1bc6e5f5249
merge with stable
author | Pulkit Goyal <pulkit@yandex-team.ru> |
---|---|
date | Tue, 19 Mar 2019 16:36:59 +0300 |
parents | f8c5225b9054 (diff) d1c33b2442a7 (current diff) |
children | 95e4ae86329f |
files | mercurial/cmdutil.py tests/test-https.t |
diffstat | 405 files changed, 14231 insertions(+), 6630 deletions(-) [+] |
line wrap: on
line diff
--- a/contrib/bdiff-torture.py Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/bdiff-torture.py Tue Mar 19 16:36:59 2019 +0300 @@ -25,7 +25,7 @@ try: test1(a, b) - except Exception as inst: + except Exception: reductions += 1 tries = 0 a = a2
--- a/contrib/check-code.py Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/check-code.py Tue Mar 19 16:36:59 2019 +0300 @@ -40,6 +40,8 @@ except ImportError: re2 = None +import testparseutil + def compilere(pat, multiline=False): if multiline: pat = '(?m)' + pat @@ -231,8 +233,10 @@ (r"( +)(#([^!][^\n]*\S)?)", repcomment), ] -pypats = [ +# common patterns to check *.py +commonpypats = [ [ + (r'\\$', 'Use () to wrap long lines in Python, not \\'), (r'^\s*def\s*\w+\s*\(.*,\s*\(', "tuple parameter unpacking not available in Python 3+"), (r'lambda\s*\(.*,.*\)', @@ -261,7 +265,6 @@ # a pass at the same indent level, which is bogus r'(?P=indent)pass[ \t\n#]' ), 'omit superfluous pass'), - (r'.{81}', "line too long"), (r'[^\n]\Z', "no trailing newline"), (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"), # (r'^\s+[^_ \n][^_. \n]+_[^_\n]+\s*=', @@ -299,7 +302,6 @@ "wrong whitespace around ="), (r'\([^()]*( =[^=]|[^<>!=]= )', "no whitespace around = for named parameters"), - (r'raise Exception', "don't raise generic exceptions"), (r'raise [^,(]+, (\([^\)]+\)|[^,\(\)]+)$', "don't use old-style two-argument raise, use Exception(message)"), (r' is\s+(not\s+)?["\'0-9-]', "object comparison with literal"), @@ -315,21 +317,12 @@ "use opener.read() instead"), (r'opener\([^)]*\).write\(', "use opener.write() instead"), - (r'[\s\(](open|file)\([^)]*\)\.read\(', - "use util.readfile() instead"), - (r'[\s\(](open|file)\([^)]*\)\.write\(', - "use util.writefile() instead"), - (r'^[\s\(]*(open(er)?|file)\([^)]*\)(?!\.close\(\))', - "always assign an opened file to a variable, and close it afterwards"), - (r'[\s\(](open|file)\([^)]*\)\.(?!close\(\))', - "always assign an opened file to a variable, and close it afterwards"), (r'(?i)descend[e]nt', "the proper spelling is descendAnt"), (r'\.debug\(\_', "don't mark debug messages for translation"), (r'\.strip\(\)\.split\(\)', "no need to strip before splitting"), (r'^\s*except\s*:', "naked except clause", r'#.*re-raises'), (r'^\s*except\s([^\(,]+|\([^\)]+\))\s*,', 'legacy exception syntax; use "as" instead of ","'), - (r':\n( )*( ){1,3}[^ ]', "must indent 4 spaces"), (r'release\(.*wlock, .*lock\)', "wrong lock release order"), (r'\bdef\s+__bool__\b', "__bool__ should be __nonzero__ in Python 2"), (r'os\.path\.join\(.*, *(""|\'\')\)', @@ -339,7 +332,6 @@ (r'def.*[( ]\w+=\{\}', "don't use mutable default arguments"), (r'\butil\.Abort\b', "directly use error.Abort"), (r'^@(\w*\.)?cachefunc', "module-level @cachefunc is risky, please avoid"), - (r'^import atexit', "don't use atexit, use ui.atexit"), (r'^import Queue', "don't use Queue, use pycompat.queue.Queue + " "pycompat.queue.Empty"), (r'^import cStringIO', "don't use cStringIO.StringIO, use util.stringio"), @@ -358,6 +350,34 @@ "don't convert rev to node before passing to revision(nodeorrev)"), (r'platform\.system\(\)', "don't use platform.system(), use pycompat"), + ], + # warnings + [ + ] +] + +# patterns to check normal *.py files +pypats = [ + [ + # Ideally, these should be placed in "commonpypats" for + # consistency of coding rules in Mercurial source tree. + # But on the other hand, these are not so seriously required for + # python code fragments embedded in test scripts. Fixing test + # scripts for these patterns requires many changes, and has less + # profit than effort. + (r'.{81}', "line too long"), + (r'raise Exception', "don't raise generic exceptions"), + (r'[\s\(](open|file)\([^)]*\)\.read\(', + "use util.readfile() instead"), + (r'[\s\(](open|file)\([^)]*\)\.write\(', + "use util.writefile() instead"), + (r'^[\s\(]*(open(er)?|file)\([^)]*\)(?!\.close\(\))', + "always assign an opened file to a variable, and close it afterwards"), + (r'[\s\(](open|file)\([^)]*\)\.(?!close\(\))', + "always assign an opened file to a variable, and close it afterwards"), + (r':\n( )*( ){1,3}[^ ]', "must indent 4 spaces"), + (r'^import atexit', "don't use atexit, use ui.atexit"), + # rules depending on implementation of repquote() (r' x+[xpqo%APM][\'"]\n\s+[\'"]x', 'string join across lines with no space'), @@ -376,21 +396,35 @@ # because _preparepats forcibly adds "\n" into [^...], # even though this regexp wants match it against "\n")''', "missing _() in ui message (use () to hide false-positives)"), - ], + ] + commonpypats[0], # warnings [ # rules depending on implementation of repquote() (r'(^| )pp +xxxxqq[ \n][^\n]', "add two newlines after '.. note::'"), - ] + ] + commonpypats[1] ] -pyfilters = [ +# patterns to check *.py for embedded ones in test script +embeddedpypats = [ + [ + ] + commonpypats[0], + # warnings + [ + ] + commonpypats[1] +] + +# common filters to convert *.py +commonpyfilters = [ (r"""(?msx)(?P<comment>\#.*?$)| ((?P<quote>('''|\"\"\"|(?<!')'(?!')|(?<!")"(?!"))) (?P<text>(([^\\]|\\.)*?)) (?P=quote))""", reppython), ] +# filters to convert normal *.py files +pyfilters = [ +] + commonpyfilters + # non-filter patterns pynfpats = [ [ @@ -403,6 +437,10 @@ [], ] +# filters to convert *.py for embedded ones in test script +embeddedpyfilters = [ +] + commonpyfilters + # extension non-filter patterns pyextnfpats = [ [(r'^"""\n?[A-Z]', "don't capitalize docstring title")], @@ -414,7 +452,7 @@ txtpats = [ [ - ('\s$', 'trailing whitespace'), + (r'\s$', 'trailing whitespace'), ('.. note::[ \n][^\n]', 'add two newlines after note::') ], [] @@ -537,9 +575,17 @@ allfilesfilters, allfilespats), ] +# (desc, +# func to pick up embedded code fragments, +# list of patterns to convert target files +# list of patterns to detect errors/warnings) +embeddedchecks = [ + ('embedded python', + testparseutil.pyembedded, embeddedpyfilters, embeddedpypats) +] + def _preparepats(): - for c in checks: - failandwarn = c[-1] + def preparefailandwarn(failandwarn): for pats in failandwarn: for i, pseq in enumerate(pats): # fix-up regexes for multi-line searches @@ -553,10 +599,19 @@ p = re.sub(r'(?<!\\)\[\^', r'[^\\n', p) pats[i] = (re.compile(p, re.MULTILINE),) + pseq[1:] - filters = c[3] + + def preparefilters(filters): for i, flt in enumerate(filters): filters[i] = re.compile(flt[0]), flt[1] + for cs in (checks, embeddedchecks): + for c in cs: + failandwarn = c[-1] + preparefailandwarn(failandwarn) + + filters = c[-2] + preparefilters(filters) + class norepeatlogger(object): def __init__(self): self._lastseen = None @@ -604,13 +659,12 @@ return True if no error is found, False otherwise. """ - blamecache = None result = True try: with opentext(f) as fp: try: - pre = post = fp.read() + pre = fp.read() except UnicodeDecodeError as e: print("%s while reading %s" % (e, f)) return result @@ -618,11 +672,12 @@ print("Skipping %s, %s" % (f, str(e).split(':', 1)[0])) return result + # context information shared while single checkfile() invocation + context = {'blamecache': None} + for name, match, magic, filters, pats in checks: - post = pre # discard filtering result of previous check if debug: print(name, f) - fc = 0 if not (re.match(match, f) or (magic and re.search(magic, pre))): if debug: print("Skipping %s for %s it doesn't match %s" % ( @@ -637,6 +692,74 @@ # tests/test-check-code.t print("Skipping %s it has no-che?k-code (glob)" % f) return "Skip" # skip checking this file + + fc = _checkfiledata(name, f, pre, filters, pats, context, + logfunc, maxerr, warnings, blame, debug, lineno) + if fc: + result = False + + if f.endswith('.t') and "no-" "check-code" not in pre: + if debug: + print("Checking embedded code in %s" % (f)) + + prelines = pre.splitlines() + embeddederros = [] + for name, embedded, filters, pats in embeddedchecks: + # "reset curmax at each repetition" treats maxerr as "max + # nubmer of errors in an actual file per entry of + # (embedded)checks" + curmaxerr = maxerr + + for found in embedded(f, prelines, embeddederros): + filename, starts, ends, code = found + fc = _checkfiledata(name, f, code, filters, pats, context, + logfunc, curmaxerr, warnings, blame, debug, + lineno, offset=starts - 1) + if fc: + result = False + if curmaxerr: + if fc >= curmaxerr: + break + curmaxerr -= fc + + return result + +def _checkfiledata(name, f, filedata, filters, pats, context, + logfunc, maxerr, warnings, blame, debug, lineno, + offset=None): + """Execute actual error check for file data + + :name: of the checking category + :f: filepath + :filedata: content of a file + :filters: to be applied before checking + :pats: to detect errors + :context: a dict of information shared while single checkfile() invocation + Valid keys: 'blamecache'. + :logfunc: function used to report error + logfunc(filename, linenumber, linecontent, errormessage) + :maxerr: number of error to display before aborting, or False to + report all errors + :warnings: whether warning level checks should be applied + :blame: whether blame information should be displayed at error reporting + :debug: whether debug information should be displayed + :lineno: whether lineno should be displayed at error reporting + :offset: line number offset of 'filedata' in 'f' for checking + an embedded code fragment, or None (offset=0 is different + from offset=None) + + returns number of detected errors. + """ + blamecache = context['blamecache'] + if offset is None: + lineoffset = 0 + else: + lineoffset = offset + + fc = 0 + pre = post = filedata + + if True: # TODO: get rid of this redundant 'if' block for p, r in filters: post = re.sub(p, r, post) nerrs = len(pats[0]) # nerr elements are errors @@ -679,20 +802,30 @@ if ignore and re.search(ignore, l, re.MULTILINE): if debug: print("Skipping %s for %s:%s (ignore pattern)" % ( - name, f, n)) + name, f, (n + lineoffset))) continue bd = "" if blame: bd = 'working directory' - if not blamecache: + if blamecache is None: blamecache = getblame(f) - if n < len(blamecache): - bl, bu, br = blamecache[n] - if bl == l: + context['blamecache'] = blamecache + if (n + lineoffset) < len(blamecache): + bl, bu, br = blamecache[(n + lineoffset)] + if offset is None and bl == l: bd = '%s@%s' % (bu, br) + elif offset is not None and bl.endswith(l): + # "offset is not None" means "checking + # embedded code fragment". In this case, + # "l" does not have information about the + # beginning of an *original* line in the + # file (e.g. ' > '). + # Therefore, use "str.endswith()", and + # show "maybe" for a little loose + # examination. + bd = '%s@%s, maybe' % (bu, br) - errors.append((f, lineno and n + 1, l, msg, bd)) - result = False + errors.append((f, lineno and (n + lineoffset + 1), l, msg, bd)) errors.sort() for e in errors: @@ -702,7 +835,7 @@ print(" (too many errors, giving up)") break - return result + return fc def main(): parser = optparse.OptionParser("%prog [options] [files | -]")
--- a/contrib/check-commit Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/check-commit Tue Mar 19 16:36:59 2019 +0300 @@ -47,7 +47,7 @@ "adds a function with foo_bar naming"), ] -word = re.compile('\S') +word = re.compile(r'\S') def nonempty(first, second): if word.search(first): return first
--- a/contrib/check-config.py Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/check-config.py Tue Mar 19 16:36:59 2019 +0300 @@ -25,7 +25,7 @@ (?:default=)?(?P<default>\S+?))? \)''', re.VERBOSE | re.MULTILINE) -configwithre = re.compile(b''' +configwithre = re.compile(br''' ui\.config(?P<ctype>with)\( # First argument is callback function. This doesn't parse robustly # if it is e.g. a function call. @@ -61,10 +61,10 @@ linenum += 1 # check topic-like bits - m = re.match(b'\s*``(\S+)``', l) + m = re.match(br'\s*``(\S+)``', l) if m: prevname = m.group(1) - if re.match(b'^\s*-+$', l): + if re.match(br'^\s*-+$', l): sect = prevname prevname = b''
--- a/contrib/check-py3-compat.py Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/check-py3-compat.py Tue Mar 19 16:36:59 2019 +0300 @@ -14,6 +14,7 @@ import os import sys import traceback +import warnings def check_compat_py2(f): """Check Python 3 compatibility for a file with Python 2""" @@ -45,7 +46,7 @@ content = fh.read() try: - ast.parse(content) + ast.parse(content, filename=f) except SyntaxError as e: print('%s: invalid syntax: %s' % (f, e)) return @@ -91,6 +92,11 @@ fn = check_compat_py3 for f in sys.argv[1:]: - fn(f) + with warnings.catch_warnings(record=True) as warns: + fn(f) + + for w in warns: + print(warnings.formatwarning(w.message, w.category, + w.filename, w.lineno).rstrip()) sys.exit(0)
--- a/contrib/chg/hgclient.c Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/chg/hgclient.c Tue Mar 19 16:36:59 2019 +0300 @@ -84,8 +84,9 @@ static void enlargecontext(context_t *ctx, size_t newsize) { - if (newsize <= ctx->maxdatasize) + if (newsize <= ctx->maxdatasize) { return; + } newsize = defaultdatasize * ((newsize + defaultdatasize - 1) / defaultdatasize); @@ -117,22 +118,25 @@ uint32_t datasize_n; rsize = recv(hgc->sockfd, &datasize_n, sizeof(datasize_n), 0); - if (rsize != sizeof(datasize_n)) + if (rsize != sizeof(datasize_n)) { abortmsg("failed to read data size"); + } /* datasize denotes the maximum size to write if input request */ hgc->ctx.datasize = ntohl(datasize_n); enlargecontext(&hgc->ctx, hgc->ctx.datasize); - if (isupper(hgc->ctx.ch) && hgc->ctx.ch != 'S') + if (isupper(hgc->ctx.ch) && hgc->ctx.ch != 'S') { return; /* assumes input request */ + } size_t cursize = 0; while (cursize < hgc->ctx.datasize) { rsize = recv(hgc->sockfd, hgc->ctx.data + cursize, hgc->ctx.datasize - cursize, 0); - if (rsize < 1) + if (rsize < 1) { abortmsg("failed to read data block"); + } cursize += rsize; } } @@ -143,8 +147,9 @@ const char *const endp = p + datasize; while (p < endp) { ssize_t r = send(sockfd, p, endp - p, 0); - if (r < 0) + if (r < 0) { abortmsgerrno("cannot communicate"); + } p += r; } } @@ -186,8 +191,9 @@ ctx->datasize += n; } - if (ctx->datasize > 0) + if (ctx->datasize > 0) { --ctx->datasize; /* strip last '\0' */ + } } /* Extract '\0'-separated list of args to new buffer, terminated by NULL */ @@ -205,8 +211,9 @@ args[nargs] = s; nargs++; s = memchr(s, '\0', e - s); - if (!s) + if (!s) { break; + } s++; } args[nargs] = NULL; @@ -225,8 +232,9 @@ static void handlereadlinerequest(hgclient_t *hgc) { context_t *ctx = &hgc->ctx; - if (!fgets(ctx->data, ctx->datasize, stdin)) + if (!fgets(ctx->data, ctx->datasize, stdin)) { ctx->data[0] = '\0'; + } ctx->datasize = strlen(ctx->data); writeblock(hgc); } @@ -239,8 +247,9 @@ ctx->data[ctx->datasize] = '\0'; /* terminate last string */ const char **args = unpackcmdargsnul(ctx); - if (!args[0] || !args[1] || !args[2]) + if (!args[0] || !args[1] || !args[2]) { abortmsg("missing type or command or cwd in system request"); + } if (strcmp(args[0], "system") == 0) { debugmsg("run '%s' at '%s'", args[1], args[2]); int32_t r = runshellcmd(args[1], args + 3, args[2]); @@ -252,8 +261,9 @@ writeblock(hgc); } else if (strcmp(args[0], "pager") == 0) { setuppager(args[1], args + 3); - if (hgc->capflags & CAP_ATTACHIO) + if (hgc->capflags & CAP_ATTACHIO) { attachio(hgc); + } /* unblock the server */ static const char emptycmd[] = "\n"; sendall(hgc->sockfd, emptycmd, sizeof(emptycmd) - 1); @@ -296,9 +306,10 @@ handlesystemrequest(hgc); break; default: - if (isupper(ctx->ch)) + if (isupper(ctx->ch)) { abortmsg("cannot handle response (ch = %c)", ctx->ch); + } } } } @@ -308,8 +319,9 @@ unsigned int flags = 0; while (s < e) { const char *t = strchr(s, ' '); - if (!t || t > e) + if (!t || t > e) { t = e; + } const cappair_t *cap; for (cap = captable; cap->flag; ++cap) { size_t n = t - s; @@ -346,11 +358,13 @@ const char *const dataend = ctx->data + ctx->datasize; while (s < dataend) { const char *t = strchr(s, ':'); - if (!t || t[1] != ' ') + if (!t || t[1] != ' ') { break; + } const char *u = strchr(t + 2, '\n'); - if (!u) + if (!u) { u = dataend; + } if (strncmp(s, "capabilities:", t - s + 1) == 0) { hgc->capflags = parsecapabilities(t + 2, u); } else if (strncmp(s, "pgid:", t - s + 1) == 0) { @@ -367,8 +381,9 @@ { int r = snprintf(hgc->ctx.data, hgc->ctx.maxdatasize, "chg[worker/%d]", (int)getpid()); - if (r < 0 || (size_t)r >= hgc->ctx.maxdatasize) + if (r < 0 || (size_t)r >= hgc->ctx.maxdatasize) { abortmsg("insufficient buffer to write procname (r = %d)", r); + } hgc->ctx.datasize = (size_t)r; writeblockrequest(hgc, "setprocname"); } @@ -380,8 +395,9 @@ sendall(hgc->sockfd, chcmd, sizeof(chcmd) - 1); readchannel(hgc); context_t *ctx = &hgc->ctx; - if (ctx->ch != 'I') + if (ctx->ch != 'I') { abortmsg("unexpected response for attachio (ch = %c)", ctx->ch); + } static const int fds[3] = {STDIN_FILENO, STDOUT_FILENO, STDERR_FILENO}; struct msghdr msgh; @@ -399,23 +415,27 @@ memcpy(CMSG_DATA(cmsg), fds, sizeof(fds)); msgh.msg_controllen = cmsg->cmsg_len; ssize_t r = sendmsg(hgc->sockfd, &msgh, 0); - if (r < 0) + if (r < 0) { abortmsgerrno("sendmsg failed"); + } handleresponse(hgc); int32_t n; - if (ctx->datasize != sizeof(n)) + if (ctx->datasize != sizeof(n)) { abortmsg("unexpected size of attachio result"); + } memcpy(&n, ctx->data, sizeof(n)); n = ntohl(n); - if (n != sizeof(fds) / sizeof(fds[0])) + if (n != sizeof(fds) / sizeof(fds[0])) { abortmsg("failed to send fds (n = %d)", n); + } } static void chdirtocwd(hgclient_t *hgc) { - if (!getcwd(hgc->ctx.data, hgc->ctx.maxdatasize)) + if (!getcwd(hgc->ctx.data, hgc->ctx.maxdatasize)) { abortmsgerrno("failed to getcwd"); + } hgc->ctx.datasize = strlen(hgc->ctx.data); writeblockrequest(hgc, "chdir"); } @@ -440,8 +460,9 @@ hgclient_t *hgc_open(const char *sockname) { int fd = socket(AF_UNIX, SOCK_STREAM, 0); - if (fd < 0) + if (fd < 0) { abortmsgerrno("cannot create socket"); + } /* don't keep fd on fork(), so that it can be closed when the parent * process get terminated. */ @@ -456,34 +477,39 @@ { const char *split = strrchr(sockname, '/'); if (split && split != sockname) { - if (split[1] == '\0') + if (split[1] == '\0') { abortmsg("sockname cannot end with a slash"); + } size_t len = split - sockname; char sockdir[len + 1]; memcpy(sockdir, sockname, len); sockdir[len] = '\0'; bakfd = open(".", O_DIRECTORY); - if (bakfd == -1) + if (bakfd == -1) { abortmsgerrno("cannot open cwd"); + } int r = chdir(sockdir); - if (r != 0) + if (r != 0) { abortmsgerrno("cannot chdir %s", sockdir); + } basename = split + 1; } } - if (strlen(basename) >= sizeof(addr.sun_path)) + if (strlen(basename) >= sizeof(addr.sun_path)) { abortmsg("sockname is too long: %s", basename); + } strncpy(addr.sun_path, basename, sizeof(addr.sun_path)); addr.sun_path[sizeof(addr.sun_path) - 1] = '\0'; /* real connect */ int r = connect(fd, (struct sockaddr *)&addr, sizeof(addr)); if (r < 0) { - if (errno != ENOENT && errno != ECONNREFUSED) + if (errno != ENOENT && errno != ECONNREFUSED) { abortmsgerrno("cannot connect to %s", sockname); + } } if (bakfd != -1) { fchdirx(bakfd); @@ -501,16 +527,21 @@ initcontext(&hgc->ctx); readhello(hgc); - if (!(hgc->capflags & CAP_RUNCOMMAND)) + if (!(hgc->capflags & CAP_RUNCOMMAND)) { abortmsg("insufficient capability: runcommand"); - if (hgc->capflags & CAP_SETPROCNAME) + } + if (hgc->capflags & CAP_SETPROCNAME) { updateprocname(hgc); - if (hgc->capflags & CAP_ATTACHIO) + } + if (hgc->capflags & CAP_ATTACHIO) { attachio(hgc); - if (hgc->capflags & CAP_CHDIR) + } + if (hgc->capflags & CAP_CHDIR) { chdirtocwd(hgc); - if (hgc->capflags & CAP_SETUMASK2) + } + if (hgc->capflags & CAP_SETUMASK2) { forwardumask(hgc); + } return hgc; } @@ -555,16 +586,18 @@ size_t argsize) { assert(hgc); - if (!(hgc->capflags & CAP_VALIDATE)) + if (!(hgc->capflags & CAP_VALIDATE)) { return NULL; + } packcmdargs(&hgc->ctx, args, argsize); writeblockrequest(hgc, "validate"); handleresponse(hgc); /* the server returns '\0' if it can handle our request */ - if (hgc->ctx.datasize <= 1) + if (hgc->ctx.datasize <= 1) { return NULL; + } /* make sure the buffer is '\0' terminated */ enlargecontext(&hgc->ctx, hgc->ctx.datasize + 1); @@ -599,8 +632,9 @@ void hgc_attachio(hgclient_t *hgc) { assert(hgc); - if (!(hgc->capflags & CAP_ATTACHIO)) + if (!(hgc->capflags & CAP_ATTACHIO)) { return; + } attachio(hgc); } @@ -613,8 +647,9 @@ void hgc_setenv(hgclient_t *hgc, const char *const envp[]) { assert(hgc && envp); - if (!(hgc->capflags & CAP_SETENV)) + if (!(hgc->capflags & CAP_SETENV)) { return; + } packcmdargs(&hgc->ctx, envp, /*argsize*/ -1); writeblockrequest(hgc, "setenv"); }
--- a/contrib/chg/procutil.c Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/chg/procutil.c Tue Mar 19 16:36:59 2019 +0300 @@ -25,8 +25,9 @@ static void forwardsignal(int sig) { assert(peerpid > 0); - if (kill(peerpid, sig) < 0) + if (kill(peerpid, sig) < 0) { abortmsgerrno("cannot kill %d", peerpid); + } debugmsg("forward signal %d", sig); } @@ -34,8 +35,9 @@ { /* prefer kill(-pgid, sig), fallback to pid if pgid is invalid */ pid_t killpid = peerpgid > 1 ? -peerpgid : peerpid; - if (kill(killpid, sig) < 0) + if (kill(killpid, sig) < 0) { abortmsgerrno("cannot kill %d", killpid); + } debugmsg("forward signal %d to %d", sig, killpid); } @@ -43,28 +45,36 @@ { sigset_t unblockset, oldset; struct sigaction sa, oldsa; - if (sigemptyset(&unblockset) < 0) + if (sigemptyset(&unblockset) < 0) { goto error; - if (sigaddset(&unblockset, sig) < 0) + } + if (sigaddset(&unblockset, sig) < 0) { goto error; + } memset(&sa, 0, sizeof(sa)); sa.sa_handler = SIG_DFL; sa.sa_flags = SA_RESTART; - if (sigemptyset(&sa.sa_mask) < 0) + if (sigemptyset(&sa.sa_mask) < 0) { goto error; + } forwardsignal(sig); - if (raise(sig) < 0) /* resend to self */ + if (raise(sig) < 0) { /* resend to self */ goto error; - if (sigaction(sig, &sa, &oldsa) < 0) + } + if (sigaction(sig, &sa, &oldsa) < 0) { goto error; - if (sigprocmask(SIG_UNBLOCK, &unblockset, &oldset) < 0) + } + if (sigprocmask(SIG_UNBLOCK, &unblockset, &oldset) < 0) { goto error; + } /* resent signal will be handled before sigprocmask() returns */ - if (sigprocmask(SIG_SETMASK, &oldset, NULL) < 0) + if (sigprocmask(SIG_SETMASK, &oldset, NULL) < 0) { goto error; - if (sigaction(sig, &oldsa, NULL) < 0) + } + if (sigaction(sig, &oldsa, NULL) < 0) { goto error; + } return; error: @@ -73,19 +83,22 @@ static void handlechildsignal(int sig UNUSED_) { - if (peerpid == 0 || pagerpid == 0) + if (peerpid == 0 || pagerpid == 0) { return; + } /* if pager exits, notify the server with SIGPIPE immediately. * otherwise the server won't get SIGPIPE if it does not write * anything. (issue5278) */ - if (waitpid(pagerpid, NULL, WNOHANG) == pagerpid) + if (waitpid(pagerpid, NULL, WNOHANG) == pagerpid) { kill(peerpid, SIGPIPE); + } } void setupsignalhandler(pid_t pid, pid_t pgid) { - if (pid <= 0) + if (pid <= 0) { return; + } peerpid = pid; peerpgid = (pgid <= 1 ? 0 : pgid); @@ -98,42 +111,52 @@ * - SIGINT: usually generated by the terminal */ sa.sa_handler = forwardsignaltogroup; sa.sa_flags = SA_RESTART; - if (sigemptyset(&sa.sa_mask) < 0) + if (sigemptyset(&sa.sa_mask) < 0) { + goto error; + } + if (sigaction(SIGHUP, &sa, NULL) < 0) { goto error; - if (sigaction(SIGHUP, &sa, NULL) < 0) + } + if (sigaction(SIGINT, &sa, NULL) < 0) { goto error; - if (sigaction(SIGINT, &sa, NULL) < 0) - goto error; + } /* terminate frontend by double SIGTERM in case of server freeze */ sa.sa_handler = forwardsignal; sa.sa_flags |= SA_RESETHAND; - if (sigaction(SIGTERM, &sa, NULL) < 0) + if (sigaction(SIGTERM, &sa, NULL) < 0) { goto error; + } /* notify the worker about window resize events */ sa.sa_flags = SA_RESTART; - if (sigaction(SIGWINCH, &sa, NULL) < 0) + if (sigaction(SIGWINCH, &sa, NULL) < 0) { goto error; + } /* forward user-defined signals */ - if (sigaction(SIGUSR1, &sa, NULL) < 0) + if (sigaction(SIGUSR1, &sa, NULL) < 0) { goto error; - if (sigaction(SIGUSR2, &sa, NULL) < 0) + } + if (sigaction(SIGUSR2, &sa, NULL) < 0) { goto error; + } /* propagate job control requests to worker */ sa.sa_handler = forwardsignal; sa.sa_flags = SA_RESTART; - if (sigaction(SIGCONT, &sa, NULL) < 0) + if (sigaction(SIGCONT, &sa, NULL) < 0) { goto error; + } sa.sa_handler = handlestopsignal; sa.sa_flags = SA_RESTART; - if (sigaction(SIGTSTP, &sa, NULL) < 0) + if (sigaction(SIGTSTP, &sa, NULL) < 0) { goto error; + } /* get notified when pager exits */ sa.sa_handler = handlechildsignal; sa.sa_flags = SA_RESTART; - if (sigaction(SIGCHLD, &sa, NULL) < 0) + if (sigaction(SIGCHLD, &sa, NULL) < 0) { goto error; + } return; @@ -147,26 +170,34 @@ memset(&sa, 0, sizeof(sa)); sa.sa_handler = SIG_DFL; sa.sa_flags = SA_RESTART; - if (sigemptyset(&sa.sa_mask) < 0) + if (sigemptyset(&sa.sa_mask) < 0) { goto error; + } - if (sigaction(SIGHUP, &sa, NULL) < 0) + if (sigaction(SIGHUP, &sa, NULL) < 0) { goto error; - if (sigaction(SIGTERM, &sa, NULL) < 0) + } + if (sigaction(SIGTERM, &sa, NULL) < 0) { goto error; - if (sigaction(SIGWINCH, &sa, NULL) < 0) + } + if (sigaction(SIGWINCH, &sa, NULL) < 0) { goto error; - if (sigaction(SIGCONT, &sa, NULL) < 0) + } + if (sigaction(SIGCONT, &sa, NULL) < 0) { goto error; - if (sigaction(SIGTSTP, &sa, NULL) < 0) + } + if (sigaction(SIGTSTP, &sa, NULL) < 0) { goto error; - if (sigaction(SIGCHLD, &sa, NULL) < 0) + } + if (sigaction(SIGCHLD, &sa, NULL) < 0) { goto error; + } /* ignore Ctrl+C while shutting down to make pager exits cleanly */ sa.sa_handler = SIG_IGN; - if (sigaction(SIGINT, &sa, NULL) < 0) + if (sigaction(SIGINT, &sa, NULL) < 0) { goto error; + } peerpid = 0; return; @@ -180,22 +211,27 @@ pid_t setuppager(const char *pagercmd, const char *envp[]) { assert(pagerpid == 0); - if (!pagercmd) + if (!pagercmd) { return 0; + } int pipefds[2]; - if (pipe(pipefds) < 0) + if (pipe(pipefds) < 0) { return 0; + } pid_t pid = fork(); - if (pid < 0) + if (pid < 0) { goto error; + } if (pid > 0) { close(pipefds[0]); - if (dup2(pipefds[1], fileno(stdout)) < 0) + if (dup2(pipefds[1], fileno(stdout)) < 0) { goto error; + } if (isatty(fileno(stderr))) { - if (dup2(pipefds[1], fileno(stderr)) < 0) + if (dup2(pipefds[1], fileno(stderr)) < 0) { goto error; + } } close(pipefds[1]); pagerpid = pid; @@ -222,16 +258,18 @@ void waitpager(void) { - if (pagerpid == 0) + if (pagerpid == 0) { return; + } /* close output streams to notify the pager its input ends */ fclose(stdout); fclose(stderr); while (1) { pid_t ret = waitpid(pagerpid, NULL, 0); - if (ret == -1 && errno == EINTR) + if (ret == -1 && errno == EINTR) { continue; + } break; } }
--- a/contrib/chg/util.c Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/chg/util.c Tue Mar 19 16:36:59 2019 +0300 @@ -25,8 +25,9 @@ static inline void fsetcolor(FILE *fp, const char *code) { - if (!colorenabled) + if (!colorenabled) { return; + } fprintf(fp, "\033[%sm", code); } @@ -35,8 +36,9 @@ fsetcolor(stderr, "1;31"); fputs("chg: abort: ", stderr); vfprintf(stderr, fmt, args); - if (no != 0) + if (no != 0) { fprintf(stderr, " (errno = %d, %s)", no, strerror(no)); + } fsetcolor(stderr, ""); fputc('\n', stderr); exit(255); @@ -82,8 +84,9 @@ void debugmsg(const char *fmt, ...) { - if (!debugmsgenabled) + if (!debugmsgenabled) { return; + } va_list args; va_start(args, fmt); @@ -98,32 +101,37 @@ void fchdirx(int dirfd) { int r = fchdir(dirfd); - if (r == -1) + if (r == -1) { abortmsgerrno("failed to fchdir"); + } } void fsetcloexec(int fd) { int flags = fcntl(fd, F_GETFD); - if (flags < 0) + if (flags < 0) { abortmsgerrno("cannot get flags of fd %d", fd); - if (fcntl(fd, F_SETFD, flags | FD_CLOEXEC) < 0) + } + if (fcntl(fd, F_SETFD, flags | FD_CLOEXEC) < 0) { abortmsgerrno("cannot set flags of fd %d", fd); + } } void *mallocx(size_t size) { void *result = malloc(size); - if (!result) + if (!result) { abortmsg("failed to malloc"); + } return result; } void *reallocx(void *ptr, size_t size) { void *result = realloc(ptr, size); - if (!result) + if (!result) { abortmsg("failed to realloc"); + } return result; } @@ -144,30 +152,37 @@ memset(&newsa, 0, sizeof(newsa)); newsa.sa_handler = SIG_IGN; newsa.sa_flags = 0; - if (sigemptyset(&newsa.sa_mask) < 0) + if (sigemptyset(&newsa.sa_mask) < 0) { goto done; - if (sigaction(SIGINT, &newsa, &oldsaint) < 0) + } + if (sigaction(SIGINT, &newsa, &oldsaint) < 0) { goto done; + } doneflags |= F_SIGINT; - if (sigaction(SIGQUIT, &newsa, &oldsaquit) < 0) + if (sigaction(SIGQUIT, &newsa, &oldsaquit) < 0) { goto done; + } doneflags |= F_SIGQUIT; - if (sigaddset(&newsa.sa_mask, SIGCHLD) < 0) + if (sigaddset(&newsa.sa_mask, SIGCHLD) < 0) { goto done; - if (sigprocmask(SIG_BLOCK, &newsa.sa_mask, &oldmask) < 0) + } + if (sigprocmask(SIG_BLOCK, &newsa.sa_mask, &oldmask) < 0) { goto done; + } doneflags |= F_SIGMASK; pid_t pid = fork(); - if (pid < 0) + if (pid < 0) { goto done; + } if (pid == 0) { sigaction(SIGINT, &oldsaint, NULL); sigaction(SIGQUIT, &oldsaquit, NULL); sigprocmask(SIG_SETMASK, &oldmask, NULL); - if (cwd && chdir(cwd) < 0) + if (cwd && chdir(cwd) < 0) { _exit(127); + } const char *argv[] = {"sh", "-c", cmd, NULL}; if (envp) { execve("/bin/sh", (char **)argv, (char **)envp); @@ -176,25 +191,32 @@ } _exit(127); } else { - if (waitpid(pid, &status, 0) < 0) + if (waitpid(pid, &status, 0) < 0) { goto done; + } doneflags |= F_WAITPID; } done: - if (doneflags & F_SIGINT) + if (doneflags & F_SIGINT) { sigaction(SIGINT, &oldsaint, NULL); - if (doneflags & F_SIGQUIT) + } + if (doneflags & F_SIGQUIT) { sigaction(SIGQUIT, &oldsaquit, NULL); - if (doneflags & F_SIGMASK) + } + if (doneflags & F_SIGMASK) { sigprocmask(SIG_SETMASK, &oldmask, NULL); + } /* no way to report other errors, use 127 (= shell termination) */ - if (!(doneflags & F_WAITPID)) + if (!(doneflags & F_WAITPID)) { return 127; - if (WIFEXITED(status)) + } + if (WIFEXITED(status)) { return WEXITSTATUS(status); - if (WIFSIGNALED(status)) + } + if (WIFSIGNALED(status)) { return -WTERMSIG(status); + } return 127; }
--- a/contrib/debugshell.py Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/debugshell.py Tue Mar 19 16:36:59 2019 +0300 @@ -7,6 +7,7 @@ import sys from mercurial import ( demandimport, + pycompat, registrar, ) @@ -32,28 +33,30 @@ IPython.embed() -@command('debugshell|dbsh', []) +@command(b'debugshell|dbsh', []) def debugshell(ui, repo, **opts): - bannermsg = "loaded repo : %s\n" \ - "using source: %s" % (repo.root, - mercurial.__path__[0]) + bannermsg = ("loaded repo : %s\n" + "using source: %s" % (pycompat.sysstr(repo.root), + mercurial.__path__[0])) pdbmap = { 'pdb' : 'code', 'ipdb' : 'IPython' } - debugger = ui.config("ui", "debugger") + debugger = ui.config(b"ui", b"debugger") if not debugger: debugger = 'pdb' + else: + debugger = pycompat.sysstr(debugger) # if IPython doesn't exist, fallback to code.interact try: with demandimport.deactivated(): __import__(pdbmap[debugger]) except ImportError: - ui.warn(("%s debugger specified but %s module was not found\n") + ui.warn((b"%s debugger specified but %s module was not found\n") % (debugger, pdbmap[debugger])) - debugger = 'pdb' + debugger = b'pdb' getattr(sys.modules[__name__], debugger)(ui, repo, bannermsg, **opts)
--- a/contrib/discovery-helper.sh Tue Mar 19 09:23:35 2019 -0400 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,64 +0,0 @@ -#!/bin/bash -# -# produces two repositories with different common and missing subsets -# -# $ discovery-helper.sh REPO NBHEADS DEPT -# -# The Goal is to produce two repositories with some common part and some -# exclusive part on each side. Provide a source repository REPO, it will -# produce two repositories REPO-left and REPO-right. -# -# Each repository will be missing some revisions exclusive to NBHEADS of the -# repo topological heads. These heads and revisions exclusive to them (up to -# DEPTH depth) are stripped. -# -# The "left" repository will use the NBHEADS first heads (sorted by -# description). The "right" use the last NBHEADS one. -# -# To find out how many topological heads a repo has, use: -# -# $ hg heads -t -T '{rev}\n' | wc -l -# -# Example: -# -# The `pypy-2018-09-01` repository has 192 heads. To produce two repositories -# with 92 common heads and ~50 exclusive heads on each side. -# -# $ ./discovery-helper.sh pypy-2018-08-01 50 10 - -set -euo pipefail - -if [ $# -lt 3 ]; then - echo "usage: `basename $0` REPO NBHEADS DEPTH" - exit 64 -fi - -repo="$1" -shift - -nbheads="$1" -shift - -depth="$1" -shift - -leftrepo="${repo}-left" -rightrepo="${repo}-right" - -left="first(sort(heads(all()), 'desc'), $nbheads)" -right="last(sort(heads(all()), 'desc'), $nbheads)" - -leftsubset="ancestors($left, $depth) and only($left, heads(all() - $left))" -rightsubset="ancestors($right, $depth) and only($right, heads(all() - $right))" - -echo '### building left repository:' $left-repo -echo '# cloning' -hg clone --noupdate "${repo}" "${leftrepo}" -echo '# stripping' '"'${leftsubset}'"' -hg -R "${leftrepo}" --config extensions.strip= strip --rev "$leftsubset" --no-backup - -echo '### building right repository:' $right-repo -echo '# cloning' -hg clone --noupdate "${repo}" "${rightrepo}" -echo '# stripping:' '"'${rightsubset}'"' -hg -R "${rightrepo}" --config extensions.strip= strip --rev "$rightsubset" --no-backup
--- a/contrib/fuzz/manifest.cc Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/fuzz/manifest.cc Tue Mar 19 16:36:59 2019 +0300 @@ -20,11 +20,19 @@ lm = lazymanifest(mdata) # iterate the whole thing, which causes the code to fully parse # every line in the manifest - list(lm.iterentries()) + for e, _, _ in lm.iterentries(): + # also exercise __getitem__ et al + lm[e] + e in lm + (e + 'nope') in lm lm[b'xyzzy'] = (b'\0' * 20, 'x') # do an insert, text should change assert lm.text() != mdata, "insert should change text and didn't: %r %r" % (lm.text(), mdata) + cloned = lm.filtercopy(lambda x: x != 'xyzzy') + assert cloned.text() == mdata, 'cloned text should equal mdata' + cloned.diff(lm) del lm[b'xyzzy'] + cloned.diff(lm) # should be back to the same assert lm.text() == mdata, "delete should have restored text but didn't: %r %r" % (lm.text(), mdata) except Exception as e: @@ -39,6 +47,11 @@ int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size) { + // Don't allow fuzzer inputs larger than 100k, since we'll just bog + // down and not accomplish much. + if (Size > 100000) { + return 0; + } PyObject *mtext = PyBytes_FromStringAndSize((const char *)Data, (Py_ssize_t)Size); PyObject *locals = PyDict_New();
--- a/contrib/fuzz/revlog.cc Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/fuzz/revlog.cc Tue Mar 19 16:36:59 2019 +0300 @@ -19,6 +19,11 @@ for inline in (True, False): try: index, cache = parse_index2(data, inline) + index.slicechunktodensity(list(range(len(index))), 0.5, 262144) + for rev in range(len(index)): + node = index[rev][7] + partial = index.shortest(node) + index.partialmatch(node[:partial]) except Exception as e: pass # uncomment this print if you're editing this Python code @@ -31,6 +36,11 @@ int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size) { + // Don't allow fuzzer inputs larger than 60k, since we'll just bog + // down and not accomplish much. + if (Size > 60000) { + return 0; + } PyObject *text = PyBytes_FromStringAndSize((const char *)Data, (Py_ssize_t)Size); PyObject *locals = PyDict_New();
--- a/contrib/hg-test-mode.el Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/hg-test-mode.el Tue Mar 19 16:36:59 2019 +0300 @@ -53,4 +53,45 @@ (setq mode-name "hg-test") (run-hooks 'hg-test-mode-hook)) +(with-eval-after-load "compile" + ;; Link to Python sources in tracebacks in .t failures. + (add-to-list 'compilation-error-regexp-alist-alist + '(hg-test-output-python-tb + "^\\+ +File ['\"]\\([^'\"]+\\)['\"], line \\([0-9]+\\)," 1 2)) + (add-to-list 'compilation-error-regexp-alist 'hg-test-output-python-tb) + ;; Link to source files in test-check-code.t violations. + (add-to-list 'compilation-error-regexp-alist-alist + '(hg-test-check-code-output + "\\+ \\([^:\n]+\\):\\([0-9]+\\):$" 1 2)) + (add-to-list 'compilation-error-regexp-alist 'hg-test-check-code-output)) + +(defun hg-test-mode--test-one-error-line-regexp (test) + (erase-buffer) + (setq compilation-locs (make-hash-table)) + (insert (car test)) + (compilation-parse-errors (point-min) (point-max)) + (let ((msg (get-text-property 1 'compilation-message))) + (should msg) + (let ((loc (compilation--message->loc msg)) + (line (nth 1 test)) + (file (nth 2 test))) + (should (equal (compilation--loc->line loc) line)) + (should (equal (caar (compilation--loc->file-struct loc)) file))) + msg)) + +(require 'ert) +(ert-deftest hg-test-mode--compilation-mode-support () + "Test hg-specific compilation-mode regular expressions" + (require 'compile) + (with-temp-buffer + (font-lock-mode -1) + (mapc 'hg-test-mode--test-one-error-line-regexp + '( + ("+ contrib/debugshell.py:37:" 37 "contrib/debugshell.py") + ("+ File \"/tmp/hg/mercurial/commands.py\", line 3115, in help_" + 3115 "/tmp/hg/mercurial/commands.py") + ("+ File \"mercurial/dispatch.py\", line 225, in dispatch" + 225 "mercurial/dispatch.py"))))) + + (provide 'hg-test-mode)
--- a/contrib/packaging/hg-docker Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/packaging/hg-docker Tue Mar 19 16:36:59 2019 +0300 @@ -76,7 +76,7 @@ p.communicate(input=dockerfile) if p.returncode: raise subprocess.CalledProcessException( - p.returncode, 'failed to build docker image: %s %s' \ + p.returncode, 'failed to build docker image: %s %s' % (p.stdout, p.stderr)) def command_build(args):
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/hgpackaging/downloads.py Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,175 @@ +# downloads.py - Code for downloading dependencies. +# +# Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +# no-check-code because Python 3 native. + +import gzip +import hashlib +import pathlib +import urllib.request + + +DOWNLOADS = { + 'gettext': { + 'url': 'https://versaweb.dl.sourceforge.net/project/gnuwin32/gettext/0.14.4/gettext-0.14.4-bin.zip', + 'size': 1606131, + 'sha256': '60b9ef26bc5cceef036f0424e542106cf158352b2677f43a01affd6d82a1d641', + 'version': '0.14.4', + }, + 'gettext-dep': { + 'url': 'https://versaweb.dl.sourceforge.net/project/gnuwin32/gettext/0.14.4/gettext-0.14.4-dep.zip', + 'size': 715086, + 'sha256': '411f94974492fd2ecf52590cb05b1023530aec67e64154a88b1e4ebcd9c28588', + }, + 'py2exe': { + 'url': 'https://versaweb.dl.sourceforge.net/project/py2exe/py2exe/0.6.9/py2exe-0.6.9.zip', + 'size': 149687, + 'sha256': '6bd383312e7d33eef2e43a5f236f9445e4f3e0f6b16333c6f183ed445c44ddbd', + 'version': '0.6.9', + }, + # The VC9 CRT merge modules aren't readily available on most systems because + # they are only installed as part of a full Visual Studio 2008 install. + # While we could potentially extract them from a Visual Studio 2008 + # installer, it is easier to just fetch them from a known URL. + 'vc9-crt-x86-msm': { + 'url': 'https://github.com/indygreg/vc90-merge-modules/raw/9232f8f0b2135df619bf7946eaa176b4ac35ccff/Microsoft_VC90_CRT_x86.msm', + 'size': 615424, + 'sha256': '837e887ef31b332feb58156f429389de345cb94504228bb9a523c25a9dd3d75e', + }, + 'vc9-crt-x86-msm-policy': { + 'url': 'https://github.com/indygreg/vc90-merge-modules/raw/9232f8f0b2135df619bf7946eaa176b4ac35ccff/policy_9_0_Microsoft_VC90_CRT_x86.msm', + 'size': 71168, + 'sha256': '3fbcf92e3801a0757f36c5e8d304e134a68d5cafd197a6df7734ae3e8825c940', + }, + 'vc9-crt-x64-msm': { + 'url': 'https://github.com/indygreg/vc90-merge-modules/raw/9232f8f0b2135df619bf7946eaa176b4ac35ccff/Microsoft_VC90_CRT_x86_x64.msm', + 'size': 662528, + 'sha256': '50d9639b5ad4844a2285269c7551bf5157ec636e32396ddcc6f7ec5bce487a7c', + }, + 'vc9-crt-x64-msm-policy': { + 'url': 'https://github.com/indygreg/vc90-merge-modules/raw/9232f8f0b2135df619bf7946eaa176b4ac35ccff/policy_9_0_Microsoft_VC90_CRT_x86_x64.msm', + 'size': 71168, + 'sha256': '0550ea1929b21239134ad3a678c944ba0f05f11087117b6cf0833e7110686486', + }, + 'virtualenv': { + 'url': 'https://files.pythonhosted.org/packages/37/db/89d6b043b22052109da35416abc3c397655e4bd3cff031446ba02b9654fa/virtualenv-16.4.3.tar.gz', + 'size': 3713208, + 'sha256': '984d7e607b0a5d1329425dd8845bd971b957424b5ba664729fab51ab8c11bc39', + 'version': '16.4.3', + }, + 'wix': { + 'url': 'https://github.com/wixtoolset/wix3/releases/download/wix3111rtm/wix311-binaries.zip', + 'size': 34358269, + 'sha256': '37f0a533b0978a454efb5dc3bd3598becf9660aaf4287e55bf68ca6b527d051d', + 'version': '3.11.1', + }, +} + + +def hash_path(p: pathlib.Path): + h = hashlib.sha256() + + with p.open('rb') as fh: + while True: + chunk = fh.read(65536) + if not chunk: + break + + h.update(chunk) + + return h.hexdigest() + + +class IntegrityError(Exception): + """Represents an integrity error when downloading a URL.""" + + +def secure_download_stream(url, size, sha256): + """Securely download a URL to a stream of chunks. + + If the integrity of the download fails, an IntegrityError is + raised. + """ + h = hashlib.sha256() + length = 0 + + with urllib.request.urlopen(url) as fh: + if not url.endswith('.gz') and fh.info().get('Content-Encoding') == 'gzip': + fh = gzip.GzipFile(fileobj=fh) + + while True: + chunk = fh.read(65536) + if not chunk: + break + + h.update(chunk) + length += len(chunk) + + yield chunk + + digest = h.hexdigest() + + if length != size: + raise IntegrityError('size mismatch on %s: wanted %d; got %d' % ( + url, size, length)) + + if digest != sha256: + raise IntegrityError('sha256 mismatch on %s: wanted %s; got %s' % ( + url, sha256, digest)) + + +def download_to_path(url: str, path: pathlib.Path, size: int, sha256: str): + """Download a URL to a filesystem path, possibly with verification.""" + + # We download to a temporary file and rename at the end so there's + # no chance of the final file being partially written or containing + # bad data. + print('downloading %s to %s' % (url, path)) + + if path.exists(): + good = True + + if path.stat().st_size != size: + print('existing file size is wrong; removing') + good = False + + if good: + if hash_path(path) != sha256: + print('existing file hash is wrong; removing') + good = False + + if good: + print('%s exists and passes integrity checks' % path) + return + + path.unlink() + + tmp = path.with_name('%s.tmp' % path.name) + + try: + with tmp.open('wb') as fh: + for chunk in secure_download_stream(url, size, sha256): + fh.write(chunk) + except IntegrityError: + tmp.unlink() + raise + + tmp.rename(path) + print('successfully downloaded %s' % url) + + +def download_entry(name: dict, dest_path: pathlib.Path, local_name=None) -> pathlib.Path: + entry = DOWNLOADS[name] + + url = entry['url'] + + local_name = local_name or url[url.rindex('/') + 1:] + + local_path = dest_path / local_name + download_to_path(url, local_path, entry['size'], entry['sha256']) + + return local_path, entry
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/hgpackaging/inno.py Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,78 @@ +# inno.py - Inno Setup functionality. +# +# Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +# no-check-code because Python 3 native. + +import os +import pathlib +import shutil +import subprocess + +from .py2exe import ( + build_py2exe, +) +from .util import ( + find_vc_runtime_files, +) + + +EXTRA_PACKAGES = { + 'dulwich', + 'keyring', + 'pygments', + 'win32ctypes', +} + + +def build(source_dir: pathlib.Path, build_dir: pathlib.Path, + python_exe: pathlib.Path, iscc_exe: pathlib.Path, + version=None): + """Build the Inno installer. + + Build files will be placed in ``build_dir``. + + py2exe's setup.py doesn't use setuptools. It doesn't have modern logic + for finding the Python 2.7 toolchain. So, we require the environment + to already be configured with an active toolchain. + """ + if not iscc_exe.exists(): + raise Exception('%s does not exist' % iscc_exe) + + vc_x64 = r'\x64' in os.environ.get('LIB', '') + + requirements_txt = (source_dir / 'contrib' / 'packaging' / + 'inno' / 'requirements.txt') + + build_py2exe(source_dir, build_dir, python_exe, 'inno', + requirements_txt, extra_packages=EXTRA_PACKAGES) + + # hg.exe depends on VC9 runtime DLLs. Copy those into place. + for f in find_vc_runtime_files(vc_x64): + if f.name.endswith('.manifest'): + basename = 'Microsoft.VC90.CRT.manifest' + else: + basename = f.name + + dest_path = source_dir / 'dist' / basename + + print('copying %s to %s' % (f, dest_path)) + shutil.copyfile(f, dest_path) + + print('creating installer') + + args = [str(iscc_exe)] + + if vc_x64: + args.append('/dARCH=x64') + + if version: + args.append('/dVERSION=%s' % version) + + args.append('/Odist') + args.append('contrib/packaging/inno/mercurial.iss') + + subprocess.run(args, cwd=str(source_dir), check=True)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/hgpackaging/py2exe.py Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,135 @@ +# py2exe.py - Functionality for performing py2exe builds. +# +# Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +# no-check-code because Python 3 native. + +import os +import pathlib +import subprocess + +from .downloads import ( + download_entry, +) +from .util import ( + extract_tar_to_directory, + extract_zip_to_directory, + python_exe_info, +) + + +def build_py2exe(source_dir: pathlib.Path, build_dir: pathlib.Path, + python_exe: pathlib.Path, build_name: str, + venv_requirements_txt: pathlib.Path, + extra_packages=None, extra_excludes=None, + extra_dll_excludes=None): + """Build Mercurial with py2exe. + + Build files will be placed in ``build_dir``. + + py2exe's setup.py doesn't use setuptools. It doesn't have modern logic + for finding the Python 2.7 toolchain. So, we require the environment + to already be configured with an active toolchain. + """ + if 'VCINSTALLDIR' not in os.environ: + raise Exception('not running from a Visual C++ build environment; ' + 'execute the "Visual C++ <version> Command Prompt" ' + 'application shortcut or a vcsvarsall.bat file') + + # Identity x86/x64 and validate the environment matches the Python + # architecture. + vc_x64 = r'\x64' in os.environ['LIB'] + + py_info = python_exe_info(python_exe) + + if vc_x64: + if py_info['arch'] != '64bit': + raise Exception('architecture mismatch: Visual C++ environment ' + 'is configured for 64-bit but Python is 32-bit') + else: + if py_info['arch'] != '32bit': + raise Exception('architecture mismatch: Visual C++ environment ' + 'is configured for 32-bit but Python is 64-bit') + + if py_info['py3']: + raise Exception('Only Python 2 is currently supported') + + build_dir.mkdir(exist_ok=True) + + gettext_pkg, gettext_entry = download_entry('gettext', build_dir) + gettext_dep_pkg = download_entry('gettext-dep', build_dir)[0] + virtualenv_pkg, virtualenv_entry = download_entry('virtualenv', build_dir) + py2exe_pkg, py2exe_entry = download_entry('py2exe', build_dir) + + venv_path = build_dir / ('venv-%s-%s' % (build_name, + 'x64' if vc_x64 else 'x86')) + + gettext_root = build_dir / ( + 'gettext-win-%s' % gettext_entry['version']) + + if not gettext_root.exists(): + extract_zip_to_directory(gettext_pkg, gettext_root) + extract_zip_to_directory(gettext_dep_pkg, gettext_root) + + # This assumes Python 2. We don't need virtualenv on Python 3. + virtualenv_src_path = build_dir / ( + 'virtualenv-%s' % virtualenv_entry['version']) + virtualenv_py = virtualenv_src_path / 'virtualenv.py' + + if not virtualenv_src_path.exists(): + extract_tar_to_directory(virtualenv_pkg, build_dir) + + py2exe_source_path = build_dir / ('py2exe-%s' % py2exe_entry['version']) + + if not py2exe_source_path.exists(): + extract_zip_to_directory(py2exe_pkg, build_dir) + + if not venv_path.exists(): + print('creating virtualenv with dependencies') + subprocess.run( + [str(python_exe), str(virtualenv_py), str(venv_path)], + check=True) + + venv_python = venv_path / 'Scripts' / 'python.exe' + venv_pip = venv_path / 'Scripts' / 'pip.exe' + + subprocess.run([str(venv_pip), 'install', '-r', str(venv_requirements_txt)], + check=True) + + # Force distutils to use VC++ settings from environment, which was + # validated above. + env = dict(os.environ) + env['DISTUTILS_USE_SDK'] = '1' + env['MSSdk'] = '1' + + if extra_packages: + env['HG_PY2EXE_EXTRA_PACKAGES'] = ' '.join(sorted(extra_packages)) + if extra_excludes: + env['HG_PY2EXE_EXTRA_EXCLUDES'] = ' '.join(sorted(extra_excludes)) + if extra_dll_excludes: + env['HG_PY2EXE_EXTRA_DLL_EXCLUDES'] = ' '.join( + sorted(extra_dll_excludes)) + + py2exe_py_path = venv_path / 'Lib' / 'site-packages' / 'py2exe' + if not py2exe_py_path.exists(): + print('building py2exe') + subprocess.run([str(venv_python), 'setup.py', 'install'], + cwd=py2exe_source_path, + env=env, + check=True) + + # Register location of msgfmt and other binaries. + env['PATH'] = '%s%s%s' % ( + env['PATH'], os.pathsep, str(gettext_root / 'bin')) + + print('building Mercurial') + subprocess.run( + [str(venv_python), 'setup.py', + 'py2exe', + 'build_doc', '--html'], + cwd=str(source_dir), + env=env, + check=True)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/hgpackaging/util.py Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,157 @@ +# util.py - Common packaging utility code. +# +# Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +# no-check-code because Python 3 native. + +import distutils.version +import getpass +import os +import pathlib +import subprocess +import tarfile +import zipfile + + +def extract_tar_to_directory(source: pathlib.Path, dest: pathlib.Path): + with tarfile.open(source, 'r') as tf: + tf.extractall(dest) + + +def extract_zip_to_directory(source: pathlib.Path, dest: pathlib.Path): + with zipfile.ZipFile(source, 'r') as zf: + zf.extractall(dest) + + +def find_vc_runtime_files(x64=False): + """Finds Visual C++ Runtime DLLs to include in distribution.""" + winsxs = pathlib.Path(os.environ['SYSTEMROOT']) / 'WinSxS' + + prefix = 'amd64' if x64 else 'x86' + + candidates = sorted(p for p in os.listdir(winsxs) + if p.lower().startswith('%s_microsoft.vc90.crt_' % prefix)) + + for p in candidates: + print('found candidate VC runtime: %s' % p) + + # Take the newest version. + version = candidates[-1] + + d = winsxs / version + + return [ + d / 'msvcm90.dll', + d / 'msvcp90.dll', + d / 'msvcr90.dll', + winsxs / 'Manifests' / ('%s.manifest' % version), + ] + + +def windows_10_sdk_info(): + """Resolves information about the Windows 10 SDK.""" + + base = pathlib.Path(os.environ['ProgramFiles(x86)']) / 'Windows Kits' / '10' + + if not base.is_dir(): + raise Exception('unable to find Windows 10 SDK at %s' % base) + + # Find the latest version. + bin_base = base / 'bin' + + versions = [v for v in os.listdir(bin_base) if v.startswith('10.')] + version = sorted(versions, reverse=True)[0] + + bin_version = bin_base / version + + return { + 'root': base, + 'version': version, + 'bin_root': bin_version, + 'bin_x86': bin_version / 'x86', + 'bin_x64': bin_version / 'x64' + } + + +def find_signtool(): + """Find signtool.exe from the Windows SDK.""" + sdk = windows_10_sdk_info() + + for key in ('bin_x64', 'bin_x86'): + p = sdk[key] / 'signtool.exe' + + if p.exists(): + return p + + raise Exception('could not find signtool.exe in Windows 10 SDK') + + +def sign_with_signtool(file_path, description, subject_name=None, + cert_path=None, cert_password=None, + timestamp_url=None): + """Digitally sign a file with signtool.exe. + + ``file_path`` is file to sign. + ``description`` is text that goes in the signature. + + The signing certificate can be specified by ``cert_path`` or + ``subject_name``. These correspond to the ``/f`` and ``/n`` arguments + to signtool.exe, respectively. + + The certificate password can be specified via ``cert_password``. If + not provided, you will be prompted for the password. + + ``timestamp_url`` is the URL of a RFC 3161 timestamp server (``/tr`` + argument to signtool.exe). + """ + if cert_path and subject_name: + raise ValueError('cannot specify both cert_path and subject_name') + + while cert_path and not cert_password: + cert_password = getpass.getpass('password for %s: ' % cert_path) + + args = [ + str(find_signtool()), 'sign', + '/v', + '/fd', 'sha256', + '/d', description, + ] + + if cert_path: + args.extend(['/f', str(cert_path), '/p', cert_password]) + elif subject_name: + args.extend(['/n', subject_name]) + + if timestamp_url: + args.extend(['/tr', timestamp_url, '/td', 'sha256']) + + args.append(str(file_path)) + + print('signing %s' % file_path) + subprocess.run(args, check=True) + + +PRINT_PYTHON_INFO = ''' +import platform; print("%s:%s" % (platform.architecture()[0], platform.python_version())) +'''.strip() + + +def python_exe_info(python_exe: pathlib.Path): + """Obtain information about a Python executable.""" + + res = subprocess.run( + [str(python_exe), '-c', PRINT_PYTHON_INFO], + capture_output=True, check=True) + + arch, version = res.stdout.decode('utf-8').split(':') + + version = distutils.version.LooseVersion(version) + + return { + 'arch': arch, + 'version': version, + 'py3': version >= distutils.version.LooseVersion('3'), + }
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/hgpackaging/wix.py Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,301 @@ +# wix.py - WiX installer functionality +# +# Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +# no-check-code because Python 3 native. + +import os +import pathlib +import re +import subprocess +import tempfile +import xml.dom.minidom + +from .downloads import ( + download_entry, +) +from .py2exe import ( + build_py2exe, +) +from .util import ( + extract_zip_to_directory, + sign_with_signtool, +) + + +SUPPORT_WXS = [ + ('contrib.wxs', r'contrib'), + ('dist.wxs', r'dist'), + ('doc.wxs', r'doc'), + ('help.wxs', r'mercurial\help'), + ('i18n.wxs', r'i18n'), + ('locale.wxs', r'mercurial\locale'), + ('templates.wxs', r'mercurial\templates'), +] + + +EXTRA_PACKAGES = { + 'distutils', + 'pygments', +} + + +def find_version(source_dir: pathlib.Path): + version_py = source_dir / 'mercurial' / '__version__.py' + + with version_py.open('r', encoding='utf-8') as fh: + source = fh.read().strip() + + m = re.search('version = b"(.*)"', source) + return m.group(1) + + +def normalize_version(version): + """Normalize Mercurial version string so WiX accepts it. + + Version strings have to be numeric X.Y.Z. + """ + + if '+' in version: + version, extra = version.split('+', 1) + else: + extra = None + + # 4.9rc0 + if version[:-1].endswith('rc'): + version = version[:-3] + + versions = [int(v) for v in version.split('.')] + while len(versions) < 3: + versions.append(0) + + major, minor, build = versions[:3] + + if extra: + # <commit count>-<hash>+<date> + build = int(extra.split('-')[0]) + + return '.'.join('%d' % x for x in (major, minor, build)) + + +def ensure_vc90_merge_modules(build_dir): + x86 = ( + download_entry('vc9-crt-x86-msm', build_dir, + local_name='microsoft.vcxx.crt.x86_msm.msm')[0], + download_entry('vc9-crt-x86-msm-policy', build_dir, + local_name='policy.x.xx.microsoft.vcxx.crt.x86_msm.msm')[0] + ) + + x64 = ( + download_entry('vc9-crt-x64-msm', build_dir, + local_name='microsoft.vcxx.crt.x64_msm.msm')[0], + download_entry('vc9-crt-x64-msm-policy', build_dir, + local_name='policy.x.xx.microsoft.vcxx.crt.x64_msm.msm')[0] + ) + return { + 'x86': x86, + 'x64': x64, + } + + +def run_candle(wix, cwd, wxs, source_dir, defines=None): + args = [ + str(wix / 'candle.exe'), + '-nologo', + str(wxs), + '-dSourceDir=%s' % source_dir, + ] + + if defines: + args.extend('-d%s=%s' % define for define in sorted(defines.items())) + + subprocess.run(args, cwd=str(cwd), check=True) + + +def make_post_build_signing_fn(name, subject_name=None, cert_path=None, + cert_password=None, timestamp_url=None): + """Create a callable that will use signtool to sign hg.exe.""" + + def post_build_sign(source_dir, build_dir, dist_dir, version): + description = '%s %s' % (name, version) + + sign_with_signtool(dist_dir / 'hg.exe', description, + subject_name=subject_name, cert_path=cert_path, + cert_password=cert_password, + timestamp_url=timestamp_url) + + return post_build_sign + + +LIBRARIES_XML = ''' +<?xml version="1.0" encoding="utf-8"?> +<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi"> + + <?include {wix_dir}/guids.wxi ?> + <?include {wix_dir}/defines.wxi ?> + + <Fragment> + <DirectoryRef Id="INSTALLDIR" FileSource="$(var.SourceDir)"> + <Directory Id="libdir" Name="lib" FileSource="$(var.SourceDir)/lib"> + <Component Id="libOutput" Guid="$(var.lib.guid)" Win64='$(var.IsX64)'> + </Component> + </Directory> + </DirectoryRef> + </Fragment> +</Wix> +'''.lstrip() + + +def make_libraries_xml(wix_dir: pathlib.Path, dist_dir: pathlib.Path): + """Make XML data for library components WXS.""" + # We can't use ElementTree because it doesn't handle the + # <?include ?> directives. + doc = xml.dom.minidom.parseString( + LIBRARIES_XML.format(wix_dir=str(wix_dir))) + + component = doc.getElementsByTagName('Component')[0] + + f = doc.createElement('File') + f.setAttribute('Name', 'library.zip') + f.setAttribute('KeyPath', 'yes') + component.appendChild(f) + + lib_dir = dist_dir / 'lib' + + for p in sorted(lib_dir.iterdir()): + if not p.name.endswith(('.dll', '.pyd')): + continue + + f = doc.createElement('File') + f.setAttribute('Name', p.name) + component.appendChild(f) + + return doc.toprettyxml() + + +def build_installer(source_dir: pathlib.Path, python_exe: pathlib.Path, + msi_name='mercurial', version=None, post_build_fn=None): + """Build a WiX MSI installer. + + ``source_dir`` is the path to the Mercurial source tree to use. + ``arch`` is the target architecture. either ``x86`` or ``x64``. + ``python_exe`` is the path to the Python executable to use/bundle. + ``version`` is the Mercurial version string. If not defined, + ``mercurial/__version__.py`` will be consulted. + ``post_build_fn`` is a callable that will be called after building + Mercurial but before invoking WiX. It can be used to e.g. facilitate + signing. It is passed the paths to the Mercurial source, build, and + dist directories and the resolved Mercurial version. + """ + arch = 'x64' if r'\x64' in os.environ.get('LIB', '') else 'x86' + + hg_build_dir = source_dir / 'build' + dist_dir = source_dir / 'dist' + wix_dir = source_dir / 'contrib' / 'packaging' / 'wix' + + requirements_txt = wix_dir / 'requirements.txt' + + build_py2exe(source_dir, hg_build_dir, + python_exe, 'wix', requirements_txt, + extra_packages=EXTRA_PACKAGES) + + version = version or normalize_version(find_version(source_dir)) + print('using version string: %s' % version) + + if post_build_fn: + post_build_fn(source_dir, hg_build_dir, dist_dir, version) + + build_dir = hg_build_dir / ('wix-%s' % arch) + + build_dir.mkdir(exist_ok=True) + + wix_pkg, wix_entry = download_entry('wix', hg_build_dir) + wix_path = hg_build_dir / ('wix-%s' % wix_entry['version']) + + if not wix_path.exists(): + extract_zip_to_directory(wix_pkg, wix_path) + + ensure_vc90_merge_modules(hg_build_dir) + + source_build_rel = pathlib.Path(os.path.relpath(source_dir, build_dir)) + + defines = {'Platform': arch} + + for wxs, rel_path in SUPPORT_WXS: + wxs = wix_dir / wxs + wxs_source_dir = source_dir / rel_path + run_candle(wix_path, build_dir, wxs, wxs_source_dir, defines=defines) + + # candle.exe doesn't like when we have an open handle on the file. + # So use TemporaryDirectory() instead of NamedTemporaryFile(). + with tempfile.TemporaryDirectory() as td: + td = pathlib.Path(td) + + tf = td / 'library.wxs' + with tf.open('w') as fh: + fh.write(make_libraries_xml(wix_dir, dist_dir)) + + run_candle(wix_path, build_dir, tf, dist_dir, defines=defines) + + source = wix_dir / 'mercurial.wxs' + defines['Version'] = version + defines['Comments'] = 'Installs Mercurial version %s' % version + defines['VCRedistSrcDir'] = str(hg_build_dir) + + run_candle(wix_path, build_dir, source, source_build_rel, defines=defines) + + msi_path = source_dir / 'dist' / ( + '%s-%s-%s.msi' % (msi_name, version, arch)) + + args = [ + str(wix_path / 'light.exe'), + '-nologo', + '-ext', 'WixUIExtension', + '-sw1076', + '-spdb', + '-o', str(msi_path), + ] + + for source, rel_path in SUPPORT_WXS: + assert source.endswith('.wxs') + args.append(str(build_dir / ('%s.wixobj' % source[:-4]))) + + args.extend([ + str(build_dir / 'library.wixobj'), + str(build_dir / 'mercurial.wixobj'), + ]) + + subprocess.run(args, cwd=str(source_dir), check=True) + + print('%s created' % msi_path) + + return { + 'msi_path': msi_path, + } + + +def build_signed_installer(source_dir: pathlib.Path, python_exe: pathlib.Path, + name: str, version=None, subject_name=None, + cert_path=None, cert_password=None, + timestamp_url=None): + """Build an installer with signed executables.""" + + post_build_fn = make_post_build_signing_fn( + name, + subject_name=subject_name, + cert_path=cert_path, + cert_password=cert_password, + timestamp_url=timestamp_url) + + info = build_installer(source_dir, python_exe=python_exe, + msi_name=name.lower(), version=version, + post_build_fn=post_build_fn) + + description = '%s %s' % (name, version) + + sign_with_signtool(info['msi_path'], description, + subject_name=subject_name, cert_path=cert_path, + cert_password=cert_password, timestamp_url=timestamp_url)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/inno/build.py Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,48 @@ +#!/usr/bin/env python3 +# build.py - Inno installer build script. +# +# Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +# This script automates the building of the Inno MSI installer for Mercurial. + +# no-check-code because Python 3 native. + +import argparse +import os +import pathlib +import sys + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + + parser.add_argument('--python', + required=True, + help='path to python.exe to use') + parser.add_argument('--iscc', + help='path to iscc.exe to use') + parser.add_argument('--version', + help='Mercurial version string to use ' + '(detected from __version__.py if not defined') + + args = parser.parse_args() + + if args.iscc: + iscc = pathlib.Path(args.iscc) + else: + iscc = (pathlib.Path(os.environ['ProgramFiles(x86)']) / 'Inno Setup 5' / + 'ISCC.exe') + + here = pathlib.Path(os.path.abspath(os.path.dirname(__file__))) + source_dir = here.parent.parent.parent + build_dir = source_dir / 'build' + + sys.path.insert(0, str(source_dir / 'contrib' / 'packaging')) + + from hgpackaging.inno import build + + build(source_dir, build_dir, pathlib.Path(args.python), iscc, + version=args.version)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/inno/mercurial.iss Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,124 @@ +; Script generated by the Inno Setup Script Wizard. +; SEE THE DOCUMENTATION FOR DETAILS ON CREATING INNO SETUP SCRIPT FILES! + +#ifndef VERSION +#define FileHandle +#define FileLine +#define VERSION = "unknown" +#if FileHandle = FileOpen(SourcePath + "\..\..\..\mercurial\__version__.py") + #expr FileLine = FileRead(FileHandle) + #expr FileLine = FileRead(FileHandle) + #define VERSION = Copy(FileLine, Pos('"', FileLine)+1, Len(FileLine)-Pos('"', FileLine)-1) +#endif +#if FileHandle + #expr FileClose(FileHandle) +#endif +#pragma message "Detected Version: " + VERSION +#endif + +#ifndef ARCH +#define ARCH = "x86" +#endif + +[Setup] +AppCopyright=Copyright 2005-2019 Matt Mackall and others +AppName=Mercurial +AppVersion={#VERSION} +#if ARCH == "x64" +AppVerName=Mercurial {#VERSION} (64-bit) +OutputBaseFilename=Mercurial-{#VERSION}-x64 +ArchitecturesAllowed=x64 +ArchitecturesInstallIn64BitMode=x64 +#else +AppVerName=Mercurial {#VERSION} +OutputBaseFilename=Mercurial-{#VERSION} +#endif +InfoAfterFile=contrib/win32/postinstall.txt +LicenseFile=COPYING +ShowLanguageDialog=yes +AppPublisher=Matt Mackall and others +AppPublisherURL=https://mercurial-scm.org/ +AppSupportURL=https://mercurial-scm.org/ +AppUpdatesURL=https://mercurial-scm.org/ +AppID={{4B95A5F1-EF59-4B08-BED8-C891C46121B3} +AppContact=mercurial@mercurial-scm.org +DefaultDirName={pf}\Mercurial +SourceDir=..\..\.. +VersionInfoDescription=Mercurial distributed SCM (version {#VERSION}) +VersionInfoCopyright=Copyright 2005-2019 Matt Mackall and others +VersionInfoCompany=Matt Mackall and others +InternalCompressLevel=max +SolidCompression=true +SetupIconFile=contrib\win32\mercurial.ico +AllowNoIcons=true +DefaultGroupName=Mercurial +PrivilegesRequired=none +ChangesEnvironment=true + +[Files] +Source: contrib\mercurial.el; DestDir: {app}/Contrib +Source: contrib\vim\*.*; DestDir: {app}/Contrib/Vim +Source: contrib\zsh_completion; DestDir: {app}/Contrib +Source: contrib\bash_completion; DestDir: {app}/Contrib +Source: contrib\tcsh_completion; DestDir: {app}/Contrib +Source: contrib\tcsh_completion_build.sh; DestDir: {app}/Contrib +Source: contrib\hgk; DestDir: {app}/Contrib; DestName: hgk.tcl +Source: contrib\xml.rnc; DestDir: {app}/Contrib +Source: contrib\mercurial.el; DestDir: {app}/Contrib +Source: contrib\mq.el; DestDir: {app}/Contrib +Source: contrib\hgweb.fcgi; DestDir: {app}/Contrib +Source: contrib\hgweb.wsgi; DestDir: {app}/Contrib +Source: contrib\win32\ReadMe.html; DestDir: {app}; Flags: isreadme +Source: contrib\win32\postinstall.txt; DestDir: {app}; DestName: ReleaseNotes.txt +Source: dist\hg.exe; DestDir: {app}; AfterInstall: Touch('{app}\hg.exe.local') +Source: dist\lib\*.dll; Destdir: {app}\lib +Source: dist\lib\*.pyd; Destdir: {app}\lib +Source: dist\python*.dll; Destdir: {app}; Flags: skipifsourcedoesntexist +Source: dist\msvc*.dll; DestDir: {app}; Flags: skipifsourcedoesntexist +Source: dist\Microsoft.VC*.CRT.manifest; DestDir: {app}; Flags: skipifsourcedoesntexist +Source: dist\lib\library.zip; DestDir: {app}\lib +Source: doc\*.html; DestDir: {app}\Docs +Source: doc\style.css; DestDir: {app}\Docs +Source: mercurial\help\*.txt; DestDir: {app}\help +Source: mercurial\help\internals\*.txt; DestDir: {app}\help\internals +Source: mercurial\default.d\*.rc; DestDir: {app}\default.d +Source: mercurial\locale\*.*; DestDir: {app}\locale; Flags: recursesubdirs createallsubdirs skipifsourcedoesntexist +Source: mercurial\templates\*.*; DestDir: {app}\Templates; Flags: recursesubdirs createallsubdirs +Source: CONTRIBUTORS; DestDir: {app}; DestName: Contributors.txt +Source: COPYING; DestDir: {app}; DestName: Copying.txt + +[INI] +Filename: {app}\Mercurial.url; Section: InternetShortcut; Key: URL; String: https://mercurial-scm.org/ +Filename: {app}\default.d\editor.rc; Section: ui; Key: editor; String: notepad + +[UninstallDelete] +Type: files; Name: {app}\Mercurial.url +Type: filesandordirs; Name: {app}\default.d +Type: files; Name: "{app}\hg.exe.local" + +[Icons] +Name: {group}\Uninstall Mercurial; Filename: {uninstallexe} +Name: {group}\Mercurial Command Reference; Filename: {app}\Docs\hg.1.html +Name: {group}\Mercurial Configuration Files; Filename: {app}\Docs\hgrc.5.html +Name: {group}\Mercurial Ignore Files; Filename: {app}\Docs\hgignore.5.html +Name: {group}\Mercurial Web Site; Filename: {app}\Mercurial.url + +[Tasks] +Name: modifypath; Description: Add the installation path to the search path; Flags: unchecked + +[Code] +procedure Touch(fn: String); +begin + SaveStringToFile(ExpandConstant(fn), '', False); +end; + +const + ModPathName = 'modifypath'; + ModPathType = 'user'; + +function ModPathDir(): TArrayOfString; +begin + setArrayLength(Result, 1) + Result[0] := ExpandConstant('{app}'); +end; +#include "modpath.iss"
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/inno/modpath.iss Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,219 @@ +// ---------------------------------------------------------------------------- +// +// Inno Setup Ver: 5.4.2 +// Script Version: 1.4.2 +// Author: Jared Breland <jbreland@legroom.net> +// Homepage: http://www.legroom.net/software +// License: GNU Lesser General Public License (LGPL), version 3 +// http://www.gnu.org/licenses/lgpl.html +// +// Script Function: +// Allow modification of environmental path directly from Inno Setup installers +// +// Instructions: +// Copy modpath.iss to the same directory as your setup script +// +// Add this statement to your [Setup] section +// ChangesEnvironment=true +// +// Add this statement to your [Tasks] section +// You can change the Description or Flags +// You can change the Name, but it must match the ModPathName setting below +// Name: modifypath; Description: &Add application directory to your environmental path; Flags: unchecked +// +// Add the following to the end of your [Code] section +// ModPathName defines the name of the task defined above +// ModPathType defines whether the 'user' or 'system' path will be modified; +// this will default to user if anything other than system is set +// setArrayLength must specify the total number of dirs to be added +// Result[0] contains first directory, Result[1] contains second, etc. +// const +// ModPathName = 'modifypath'; +// ModPathType = 'user'; +// +// function ModPathDir(): TArrayOfString; +// begin +// setArrayLength(Result, 1); +// Result[0] := ExpandConstant('{app}'); +// end; +// #include "modpath.iss" +// ---------------------------------------------------------------------------- + +procedure ModPath(); +var + oldpath: String; + newpath: String; + updatepath: Boolean; + pathArr: TArrayOfString; + aExecFile: String; + aExecArr: TArrayOfString; + i, d: Integer; + pathdir: TArrayOfString; + regroot: Integer; + regpath: String; + +begin + // Get constants from main script and adjust behavior accordingly + // ModPathType MUST be 'system' or 'user'; force 'user' if invalid + if ModPathType = 'system' then begin + regroot := HKEY_LOCAL_MACHINE; + regpath := 'SYSTEM\CurrentControlSet\Control\Session Manager\Environment'; + end else begin + regroot := HKEY_CURRENT_USER; + regpath := 'Environment'; + end; + + // Get array of new directories and act on each individually + pathdir := ModPathDir(); + for d := 0 to GetArrayLength(pathdir)-1 do begin + updatepath := true; + + // Modify WinNT path + if UsingWinNT() = true then begin + + // Get current path, split into an array + RegQueryStringValue(regroot, regpath, 'Path', oldpath); + oldpath := oldpath + ';'; + i := 0; + + while (Pos(';', oldpath) > 0) do begin + SetArrayLength(pathArr, i+1); + pathArr[i] := Copy(oldpath, 0, Pos(';', oldpath)-1); + oldpath := Copy(oldpath, Pos(';', oldpath)+1, Length(oldpath)); + i := i + 1; + + // Check if current directory matches app dir + if pathdir[d] = pathArr[i-1] then begin + // if uninstalling, remove dir from path + if IsUninstaller() = true then begin + continue; + // if installing, flag that dir already exists in path + end else begin + updatepath := false; + end; + end; + + // Add current directory to new path + if i = 1 then begin + newpath := pathArr[i-1]; + end else begin + newpath := newpath + ';' + pathArr[i-1]; + end; + end; + + // Append app dir to path if not already included + if (IsUninstaller() = false) AND (updatepath = true) then + newpath := newpath + ';' + pathdir[d]; + + // Write new path + RegWriteStringValue(regroot, regpath, 'Path', newpath); + + // Modify Win9x path + end else begin + + // Convert to shortened dirname + pathdir[d] := GetShortName(pathdir[d]); + + // If autoexec.bat exists, check if app dir already exists in path + aExecFile := 'C:\AUTOEXEC.BAT'; + if FileExists(aExecFile) then begin + LoadStringsFromFile(aExecFile, aExecArr); + for i := 0 to GetArrayLength(aExecArr)-1 do begin + if IsUninstaller() = false then begin + // If app dir already exists while installing, skip add + if (Pos(pathdir[d], aExecArr[i]) > 0) then + updatepath := false; + break; + end else begin + // If app dir exists and = what we originally set, then delete at uninstall + if aExecArr[i] = 'SET PATH=%PATH%;' + pathdir[d] then + aExecArr[i] := ''; + end; + end; + end; + + // If app dir not found, or autoexec.bat didn't exist, then (create and) append to current path + if (IsUninstaller() = false) AND (updatepath = true) then begin + SaveStringToFile(aExecFile, #13#10 + 'SET PATH=%PATH%;' + pathdir[d], True); + + // If uninstalling, write the full autoexec out + end else begin + SaveStringsToFile(aExecFile, aExecArr, False); + end; + end; + end; +end; + +// Split a string into an array using passed delimeter +procedure MPExplode(var Dest: TArrayOfString; Text: String; Separator: String); +var + i: Integer; +begin + i := 0; + repeat + SetArrayLength(Dest, i+1); + if Pos(Separator,Text) > 0 then begin + Dest[i] := Copy(Text, 1, Pos(Separator, Text)-1); + Text := Copy(Text, Pos(Separator,Text) + Length(Separator), Length(Text)); + i := i + 1; + end else begin + Dest[i] := Text; + Text := ''; + end; + until Length(Text)=0; +end; + + +procedure CurStepChanged(CurStep: TSetupStep); +var + taskname: String; +begin + taskname := ModPathName; + if CurStep = ssPostInstall then + if IsTaskSelected(taskname) then + ModPath(); +end; + +procedure CurUninstallStepChanged(CurUninstallStep: TUninstallStep); +var + aSelectedTasks: TArrayOfString; + i: Integer; + taskname: String; + regpath: String; + regstring: String; + appid: String; +begin + // only run during actual uninstall + if CurUninstallStep = usUninstall then begin + // get list of selected tasks saved in registry at install time + appid := '{#emit SetupSetting("AppId")}'; + if appid = '' then appid := '{#emit SetupSetting("AppName")}'; + regpath := ExpandConstant('Software\Microsoft\Windows\CurrentVersion\Uninstall\'+appid+'_is1'); + RegQueryStringValue(HKLM, regpath, 'Inno Setup: Selected Tasks', regstring); + if regstring = '' then RegQueryStringValue(HKCU, regpath, 'Inno Setup: Selected Tasks', regstring); + + // check each task; if matches modpath taskname, trigger patch removal + if regstring <> '' then begin + taskname := ModPathName; + MPExplode(aSelectedTasks, regstring, ','); + if GetArrayLength(aSelectedTasks) > 0 then begin + for i := 0 to GetArrayLength(aSelectedTasks)-1 do begin + if comparetext(aSelectedTasks[i], taskname) = 0 then + ModPath(); + end; + end; + end; + end; +end; + +function NeedRestart(): Boolean; +var + taskname: String; +begin + taskname := ModPathName; + if IsTaskSelected(taskname) and not UsingWinNT() then begin + Result := True; + end else begin + Result := False; + end; +end;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/inno/readme.rst Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,61 @@ +Requirements +============ + +Building the Inno installer requires a Windows machine. + +The following system dependencies must be installed: + +* Python 2.7 (download from https://www.python.org/downloads/) +* Microsoft Visual C++ Compiler for Python 2.7 + (https://www.microsoft.com/en-us/download/details.aspx?id=44266) +* Inno Setup (http://jrsoftware.org/isdl.php) version 5.4 or newer. + Be sure to install the optional Inno Setup Preprocessor feature, + which is required. +* Python 3.5+ (to run the ``build.py`` script) + +Building +======== + +The ``build.py`` script automates the process of producing an +Inno installer. It manages fetching and configuring the +non-system dependencies (such as py2exe, gettext, and various +Python packages). + +The script requires an activated ``Visual C++ 2008`` command prompt. +A shortcut to such a prompt was installed with ``Microsoft Visual C++ +Compiler for Python 2.7``. From your Start Menu, look for +``Microsoft Visual C++ Compiler Package for Python 2.7`` then launch +either ``Visual C++ 2008 32-bit Command Prompt`` or +``Visual C++ 2008 64-bit Command Prompt``. + +From the prompt, change to the Mercurial source directory. e.g. +``cd c:\src\hg``. + +Next, invoke ``build.py`` to produce an Inno installer. You will +need to supply the path to the Python interpreter to use.: + + $ python3.exe contrib\packaging\inno\build.py \ + --python c:\python27\python.exe + +.. note:: + + The script validates that the Visual C++ environment is + active and that the architecture of the specified Python + interpreter matches the Visual C++ environment and errors + if not. + +If everything runs as intended, dependencies will be fetched and +configured into the ``build`` sub-directory, Mercurial will be built, +and an installer placed in the ``dist`` sub-directory. The final +line of output should print the name of the generated installer. + +Additional options may be configured. Run ``build.py --help`` to +see a list of program flags. + +MinGW +===== + +It is theoretically possible to generate an installer that uses +MinGW. This isn't well tested and ``build.py`` and may properly +support it. See old versions of this file in version control for +potentially useful hints as to how to achieve this.
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/inno/requirements.txt Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,38 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --generate-hashes contrib/packaging/inno/requirements.txt.in -o contrib/packaging/inno/requirements.txt +# +certifi==2018.11.29 \ + --hash=sha256:47f9c83ef4c0c621eaef743f133f09fa8a74a9b75f037e8624f83bd1b6626cb7 \ + --hash=sha256:993f830721089fef441cdfeb4b2c8c9df86f0c63239f06bd025a76a7daddb033 \ + # via dulwich +configparser==3.7.3 \ + --hash=sha256:27594cf4fc279f321974061ac69164aaebd2749af962ac8686b20503ac0bcf2d \ + --hash=sha256:9d51fe0a382f05b6b117c5e601fc219fede4a8c71703324af3f7d883aef476a3 \ + # via entrypoints +docutils==0.14 \ + --hash=sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6 \ + --hash=sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274 \ + --hash=sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6 +dulwich==0.19.11 \ + --hash=sha256:afbe070f6899357e33f63f3f3696e601731fef66c64a489dea1bc9f539f4a725 +entrypoints==0.3 \ + --hash=sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19 \ + --hash=sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451 \ + # via keyring +keyring==18.0.0 \ + --hash=sha256:12833d2b05d2055e0e25931184af9cd6a738f320a2264853cabbd8a3a0f0b65d \ + --hash=sha256:ca33f5ccc542b9ffaa196ee9a33488069e5e7eac77d5b81969f8a3ce74d0230c +pygments==2.3.1 \ + --hash=sha256:5ffada19f6203563680669ee7f53b64dabbeb100eb51b61996085e99c03b284a \ + --hash=sha256:e8218dd399a61674745138520d0d4cf2621d7e032439341bc3f647bff125818d +pywin32-ctypes==0.2.0 \ + --hash=sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942 \ + --hash=sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98 \ + # via keyring +urllib3==1.24.1 \ + --hash=sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39 \ + --hash=sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22 \ + # via dulwich
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/inno/requirements.txt.in Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,4 @@ +docutils +dulwich +keyring +pygments
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/wix/build.py Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,65 @@ +#!/usr/bin/env python3 +# Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +# no-check-code because Python 3 native. + +"""Code to build Mercurial WiX installer.""" + +import argparse +import os +import pathlib +import sys + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + + parser.add_argument('--name', + help='Application name', + default='Mercurial') + parser.add_argument('--python', + help='Path to Python executable to use', + required=True) + parser.add_argument('--sign-sn', + help='Subject name (or fragment thereof) of certificate ' + 'to use for signing') + parser.add_argument('--sign-cert', + help='Path to certificate to use for signing') + parser.add_argument('--sign-password', + help='Password for signing certificate') + parser.add_argument('--sign-timestamp-url', + help='URL of timestamp server to use for signing') + parser.add_argument('--version', + help='Version string to use') + + args = parser.parse_args() + + here = pathlib.Path(os.path.abspath(os.path.dirname(__file__))) + source_dir = here.parent.parent.parent + + sys.path.insert(0, str(source_dir / 'contrib' / 'packaging')) + + from hgpackaging.wix import ( + build_installer, + build_signed_installer, + ) + + fn = build_installer + kwargs = { + 'source_dir': source_dir, + 'python_exe': pathlib.Path(args.python), + 'version': args.version, + } + + if args.sign_sn or args.sign_cert: + fn = build_signed_installer + kwargs['name'] = args.name + kwargs['subject_name'] = args.sign_sn + kwargs['cert_path'] = args.sign_cert + kwargs['cert_password'] = args.sign_password + kwargs['timestamp_url'] = args.sign_timestamp_url + + fn(**kwargs)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/wix/contrib.wxs Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,43 @@ +<?xml version="1.0" encoding="utf-8"?> +<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi"> + + <?include guids.wxi ?> + <?include defines.wxi ?> + + <Fragment> + <ComponentGroup Id="contribFolder"> + <ComponentRef Id="contrib" /> + <ComponentRef Id="contrib.vim" /> + </ComponentGroup> + </Fragment> + + <Fragment> + <DirectoryRef Id="INSTALLDIR"> + <Directory Id="contribdir" Name="contrib" FileSource="$(var.SourceDir)"> + <Component Id="contrib" Guid="$(var.contrib.guid)" Win64='$(var.IsX64)'> + <File Name="bash_completion" KeyPath="yes" /> + <File Name="hgk" /> + <File Name="hgweb.fcgi" /> + <File Name="hgweb.wsgi" /> + <File Name="logo-droplets.svg" /> + <File Name="mercurial.el" /> + <File Name="tcsh_completion" /> + <File Name="tcsh_completion_build.sh" /> + <File Name="xml.rnc" /> + <File Name="zsh_completion" /> + </Component> + <Directory Id="vimdir" Name="vim"> + <Component Id="contrib.vim" Guid="$(var.contrib.vim.guid)" Win64='$(var.IsX64)'> + <File Name="hg-menu.vim" KeyPath="yes" /> + <File Name="HGAnnotate.vim" /> + <File Name="hgcommand.vim" /> + <File Name="patchreview.txt" /> + <File Name="patchreview.vim" /> + <File Name="hgtest.vim" /> + </Component> + </Directory> + </Directory> + </DirectoryRef> + </Fragment> + +</Wix>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/wix/defines.wxi Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,9 @@ +<Include> + + <?if $(var.Platform) = "x64" ?> + <?define IsX64 = yes ?> + <?else?> + <?define IsX64 = no ?> + <?endif?> + +</Include>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/wix/dist.wxs Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,15 @@ +<?xml version="1.0" encoding="utf-8"?> +<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi"> + + <?include guids.wxi ?> + <?include defines.wxi ?> + + <Fragment> + <DirectoryRef Id="INSTALLDIR" FileSource="$(var.SourceDir)"> + <Component Id="distOutput" Guid="$(var.dist.guid)" Win64='$(var.IsX64)'> + <File Name="python27.dll" KeyPath="yes" /> + </Component> + </DirectoryRef> + </Fragment> + +</Wix>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/wix/doc.wxs Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,50 @@ +<?xml version="1.0" encoding="utf-8"?> +<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi"> + + <?include guids.wxi ?> + <?include defines.wxi ?> + + <Fragment> + <ComponentGroup Id="docFolder"> + <ComponentRef Id="doc.hg.1.html" /> + <ComponentRef Id="doc.hgignore.5.html" /> + <ComponentRef Id="doc.hgrc.5.html" /> + <ComponentRef Id="doc.style.css" /> + </ComponentGroup> + </Fragment> + + <Fragment> + <DirectoryRef Id="INSTALLDIR"> + <Directory Id="docdir" Name="doc" FileSource="$(var.SourceDir)"> + <Component Id="doc.hg.1.html" Guid="$(var.doc.hg.1.html.guid)" Win64='$(var.IsX64)'> + <File Name="hg.1.html" KeyPath="yes"> + <Shortcut Id="hg1StartMenu" Directory="ProgramMenuDir" + Name="Mercurial Command Reference" + Icon="hgIcon.ico" IconIndex="0" Advertise="yes" + /> + </File> + </Component> + <Component Id="doc.hgignore.5.html" Guid="$(var.doc.hgignore.5.html.guid)" Win64='$(var.IsX64)'> + <File Name="hgignore.5.html" KeyPath="yes"> + <Shortcut Id="hgignore5StartMenu" Directory="ProgramMenuDir" + Name="Mercurial Ignore Files" + Icon="hgIcon.ico" IconIndex="0" Advertise="yes" + /> + </File> + </Component> + <Component Id="doc.hgrc.5.html" Guid="$(var.doc.hgrc.5.html)" Win64='$(var.IsX64)'> + <File Name="hgrc.5.html" KeyPath="yes"> + <Shortcut Id="hgrc5StartMenu" Directory="ProgramMenuDir" + Name="Mercurial Configuration Files" + Icon="hgIcon.ico" IconIndex="0" Advertise="yes" + /> + </File> + </Component> + <Component Id="doc.style.css" Guid="$(var.doc.style.css)" Win64='$(var.IsX64)'> + <File Name="style.css" KeyPath="yes" /> + </Component> + </Directory> + </DirectoryRef> + </Fragment> + +</Wix>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/wix/guids.wxi Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,52 @@ +<Include> + <!-- These are component GUIDs used for Mercurial installers. + YOU MUST CHANGE ALL GUIDs below when copying this file + and replace 'Mercurial' in this notice with the name of + your project. Component GUIDs have global namespace! --> + + <!-- contrib.wxs --> + <?define contrib.guid = {4E11FFC2-E2F7-482A-8460-9394B5489F02} ?> + <?define contrib.vim.guid = {BB04903A-652D-4C4F-9590-2BD07A2304F2} ?> + + <!-- dist.wxs --> + <?define dist.guid = {CE405FE6-CD1E-4873-9C9A-7683AE5A3D90} ?> + <?define lib.guid = {877633b5-0b7e-4b46-8f1c-224a61733297} ?> + + <!-- doc.wxs --> + <?define doc.hg.1.html.guid = {AAAA3FDA-EDC5-4220-B59D-D342722358A2} ?> + <?define doc.hgignore.5.html.guid = {AA9118C4-F3A0-4429-A5F4-5A1906B2D67F} ?> + <?define doc.hgrc.5.html = {E0CEA1EB-FA01-408c-844B-EE5965165BAE} ?> + <?define doc.style.css = {172F8262-98E0-4711-BD39-4DAE0D77EF05} ?> + + <!-- help.wxs --> + <?define help.root.guid = {9FA957DB-6DFE-44f2-AD03-293B2791CF17} ?> + <?define help.internals.guid = {2DD7669D-0DB8-4C39-9806-78E6475E7ACC} ?> + + <!-- i18n.wxs --> + <?define i18nFolder.guid = {1BF8026D-CF7C-4174-AEE6-D6B7BF119248} ?> + + <!-- templates.wxs --> + <?define templates.root.guid = {437FD55C-7756-4EA0-87E5-FDBE75DC8595} ?> + <?define templates.atom.guid = {D30E14A5-8AF0-4268-8B00-00BEE9E09E39} ?> + <?define templates.coal.guid = {B63CCAAB-4EAF-43b4-901E-4BD13F5B78FC} ?> + <?define templates.gitweb.guid = {827334AF-1EFD-421B-962C-5660A068F612} ?> + <?define templates.json.guid = {F535BE7A-EC34-46E0-B9BE-013F3DBAFB19} ?> + <?define templates.monoblue.guid = {8060A1E4-BD4C-453E-92CB-9536DC44A9E3} ?> + <?define templates.paper.guid = {61AB1DE9-645F-46ED-8AF8-0CF02267FFBB} ?> + <?define templates.raw.guid = {834DF8D7-9784-43A6-851D-A96CE1B3575B} ?> + <?define templates.rss.guid = {9338FA09-E128-4B1C-B723-1142DBD09E14} ?> + <?define templates.spartan.guid = {80222625-FA8F-44b1-86CE-1781EF375D09} ?> + <?define templates.static.guid = {6B3D7C24-98DA-4B67-9F18-35F77357B0B4} ?> + + <!-- mercurial.wxs --> + <?define ProductUpgradeCode = {A1CC6134-E945-4399-BE36-EB0017FDF7CF} ?> + + <?define ComponentMainExecutableGUID = {D102B8FA-059B-4ACC-9FA3-8C78C3B58EEF} ?> + + <?define ReadMe.guid = {56A8E372-991D-4DCA-B91D-93D775974CF5} ?> + <?define COPYING.guid = {B7801DBA-1C49-4BF4-91AD-33C65F5C7895} ?> + <?define mercurial.rc.guid = {1D5FAEEE-7E6E-43B1-9F7F-802714316B15} ?> + <?define mergetools.rc.guid = {E8A1DC29-FF40-4B5F-BD12-80B9F7BF0CCD} ?> + <?define ProgramMenuDir.guid = {D5A63320-1238-489B-B68B-CF053E9577CA} ?> + +</Include>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/wix/help.wxs Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,64 @@ +<?xml version="1.0" encoding="utf-8"?> +<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi"> + + <?include guids.wxi ?> + <?include defines.wxi ?> + + <Fragment> + <ComponentGroup Id='helpFolder'> + <ComponentRef Id='help.root' /> + <ComponentRef Id='help.internals' /> + </ComponentGroup> + </Fragment> + + <Fragment> + <DirectoryRef Id="INSTALLDIR"> + <Directory Id="helpdir" Name="help" FileSource="$(var.SourceDir)"> + <Component Id="help.root" Guid="$(var.help.root.guid)" Win64='$(var.IsX64)'> + <File Name="bundlespec.txt" /> + <File Name="color.txt" /> + <File Name="config.txt" KeyPath="yes" /> + <File Name="dates.txt" /> + <File Name="deprecated.txt" /> + <File Name="diffs.txt" /> + <File Name="environment.txt" /> + <File Name="extensions.txt" /> + <File Name="filesets.txt" /> + <File Name="flags.txt" /> + <File Name="glossary.txt" /> + <File Name="hgignore.txt" /> + <File Name="hgweb.txt" /> + <File Name="merge-tools.txt" /> + <File Name="pager.txt" /> + <File Name="patterns.txt" /> + <File Name="phases.txt" /> + <File Name="revisions.txt" /> + <File Name="scripting.txt" /> + <File Name="subrepos.txt" /> + <File Name="templates.txt" /> + <File Name="urls.txt" /> + </Component> + + <Directory Id="help.internaldir" Name="internals"> + <Component Id="help.internals" Guid="$(var.help.internals.guid)" Win64='$(var.IsX64)'> + <File Id="internals.bundle2.txt" Name="bundle2.txt" /> + <File Id="internals.bundles.txt" Name="bundles.txt" KeyPath="yes" /> + <File Id="internals.cbor.txt" Name="cbor.txt" /> + <File Id="internals.censor.txt" Name="censor.txt" /> + <File Id="internals.changegroups.txt" Name="changegroups.txt" /> + <File Id="internals.config.txt" Name="config.txt" /> + <File Id="internals.extensions.txt" Name="extensions.txt" /> + <File Id="internals.linelog.txt" Name="linelog.txt" /> + <File Id="internals.requirements.txt" Name="requirements.txt" /> + <File Id="internals.revlogs.txt" Name="revlogs.txt" /> + <File Id="internals.wireprotocol.txt" Name="wireprotocol.txt" /> + <File Id="internals.wireprotocolrpc.txt" Name="wireprotocolrpc.txt" /> + <File Id="internals.wireprotocolv2.txt" Name="wireprotocolv2.txt" /> + </Component> + </Directory> + + </Directory> + </DirectoryRef> + </Fragment> + +</Wix>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/wix/i18n.wxs Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,26 @@ +<?xml version="1.0" encoding="utf-8"?> +<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi"> + + <?include guids.wxi ?> + <?include defines.wxi ?> + + <?define hg_po_langs = + da;de;el;fr;it;ja;pt_BR;ro;ru;sv;zh_CN;zh_TW + ?> + + <Fragment> + <DirectoryRef Id="INSTALLDIR"> + <Directory Id="i18ndir" Name="i18n" FileSource="$(var.SourceDir)"> + <Component Id="i18nFolder" Guid="$(var.i18nFolder.guid)" Win64='$(var.IsX64)'> + <File Name="hggettext" KeyPath="yes" /> + <?foreach LANG in $(var.hg_po_langs) ?> + <File Id="hg.$(var.LANG).po" + Name="$(var.LANG).po" + /> + <?endforeach?> + </Component> + </Directory> + </DirectoryRef> + </Fragment> + +</Wix>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/wix/locale.wxs Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,34 @@ +<?xml version="1.0" encoding="utf-8"?> +<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi"> + + <?include defines.wxi ?> + + <?define hglocales = + da;de;el;fr;it;ja;pt_BR;ro;ru;sv;zh_CN;zh_TW + ?> + + <Fragment> + <ComponentGroup Id="localeFolder"> + <?foreach LOC in $(var.hglocales) ?> + <ComponentRef Id="hg.locale.$(var.LOC)"/> + <?endforeach?> + </ComponentGroup> + </Fragment> + + <Fragment> + <DirectoryRef Id="INSTALLDIR"> + <Directory Id="localedir" Name="locale" FileSource="$(var.SourceDir)"> + <?foreach LOC in $(var.hglocales) ?> + <Directory Id="hg.locale.$(var.LOC)" Name="$(var.LOC)"> + <Directory Id="hg.locale.$(var.LOC).LC_MESSAGES" Name="LC_MESSAGES"> + <Component Id="hg.locale.$(var.LOC)" Guid="*" Win64='$(var.IsX64)'> + <File Id="hg.mo.$(var.LOC)" Name="hg.mo" KeyPath="yes" /> + </Component> + </Directory> + </Directory> + <?endforeach?> + </Directory> + </DirectoryRef> + </Fragment> + +</Wix>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/wix/mercurial.wxs Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,162 @@ +<?xml version='1.0' encoding='windows-1252'?> +<Wix xmlns='http://schemas.microsoft.com/wix/2006/wi'> + + <!-- Copyright 2010 Steve Borho <steve@borho.org> + + This software may be used and distributed according to the terms of the + GNU General Public License version 2 or any later version. --> + + <?include guids.wxi ?> + <?include defines.wxi ?> + + <?if $(var.Platform) = "x64" ?> + <?define PFolder = ProgramFiles64Folder ?> + <?else?> + <?define PFolder = ProgramFilesFolder ?> + <?endif?> + + <Product Id='*' + Name='Mercurial $(var.Version) ($(var.Platform))' + UpgradeCode='$(var.ProductUpgradeCode)' + Language='1033' Codepage='1252' Version='$(var.Version)' + Manufacturer='Matt Mackall and others'> + + <Package Id='*' + Keywords='Installer' + Description="Mercurial distributed SCM (version $(var.Version))" + Comments='$(var.Comments)' + Platform='$(var.Platform)' + Manufacturer='Matt Mackall and others' + InstallerVersion='300' Languages='1033' Compressed='yes' SummaryCodepage='1252' /> + + <Media Id='1' Cabinet='mercurial.cab' EmbedCab='yes' DiskPrompt='CD-ROM #1' + CompressionLevel='high' /> + <Property Id='DiskPrompt' Value="Mercurial $(var.Version) Installation [1]" /> + + <Condition Message='Mercurial MSI installers require Windows XP or higher'> + VersionNT >= 501 + </Condition> + + <Property Id="INSTALLDIR"> + <ComponentSearch Id='SearchForMainExecutableComponent' + Guid='$(var.ComponentMainExecutableGUID)' /> + </Property> + + <!--Property Id='ARPCOMMENTS'>any comments</Property--> + <Property Id='ARPCONTACT'>mercurial@mercurial-scm.org</Property> + <Property Id='ARPHELPLINK'>https://mercurial-scm.org/wiki/</Property> + <Property Id='ARPURLINFOABOUT'>https://mercurial-scm.org/about/</Property> + <Property Id='ARPURLUPDATEINFO'>https://mercurial-scm.org/downloads/</Property> + <Property Id='ARPHELPTELEPHONE'>https://mercurial-scm.org/wiki/Support</Property> + <Property Id='ARPPRODUCTICON'>hgIcon.ico</Property> + + <Property Id='INSTALLEDMERCURIALPRODUCTS' Secure='yes'></Property> + <Property Id='REINSTALLMODE'>amus</Property> + + <!--Auto-accept the license page--> + <Property Id='LicenseAccepted'>1</Property> + + <Directory Id='TARGETDIR' Name='SourceDir'> + <Directory Id='$(var.PFolder)' Name='PFiles'> + <Directory Id='INSTALLDIR' Name='Mercurial'> + <Component Id='MainExecutable' Guid='$(var.ComponentMainExecutableGUID)' Win64='$(var.IsX64)'> + <File Id='hgEXE' Name='hg.exe' Source='dist\hg.exe' KeyPath='yes' /> + <Environment Id="Environment" Name="PATH" Part="last" System="yes" + Permanent="no" Value="[INSTALLDIR]" Action="set" /> + </Component> + <Component Id='ReadMe' Guid='$(var.ReadMe.guid)' Win64='$(var.IsX64)'> + <File Id='ReadMe' Name='ReadMe.html' Source='contrib\win32\ReadMe.html' + KeyPath='yes'/> + </Component> + <Component Id='COPYING' Guid='$(var.COPYING.guid)' Win64='$(var.IsX64)'> + <File Id='COPYING' Name='COPYING.rtf' Source='contrib\packaging\wix\COPYING.rtf' + KeyPath='yes'/> + </Component> + + <Directory Id='HGRCD' Name='hgrc.d'> + <Component Id='mercurial.rc' Guid='$(var.mercurial.rc.guid)' Win64='$(var.IsX64)'> + <File Id='mercurial.rc' Name='Mercurial.rc' Source='contrib\win32\mercurial.ini' + ReadOnly='yes' KeyPath='yes'/> + </Component> + <Component Id='mergetools.rc' Guid='$(var.mergetools.rc.guid)' Win64='$(var.IsX64)'> + <File Id='mergetools.rc' Name='MergeTools.rc' Source='mercurial\default.d\mergetools.rc' + ReadOnly='yes' KeyPath='yes'/> + </Component> + </Directory> + + </Directory> + </Directory> + + <Directory Id="ProgramMenuFolder" Name="Programs"> + <Directory Id="ProgramMenuDir" Name="Mercurial $(var.Version)"> + <Component Id="ProgramMenuDir" Guid="$(var.ProgramMenuDir.guid)" Win64='$(var.IsX64)'> + <RemoveFolder Id='ProgramMenuDir' On='uninstall' /> + <RegistryValue Root='HKCU' Key='Software\Mercurial\InstallDir' Type='string' + Value='[INSTALLDIR]' KeyPath='yes' /> + <Shortcut Id='UrlShortcut' Directory='ProgramMenuDir' Name='Mercurial Web Site' + Target='[ARPHELPLINK]' Icon="hgIcon.ico" IconIndex='0' /> + </Component> + </Directory> + </Directory> + + <?if $(var.Platform) = "x86" ?> + <Merge Id='VCRuntime' DiskId='1' Language='1033' + SourceFile='$(var.VCRedistSrcDir)\microsoft.vcxx.crt.x86_msm.msm' /> + <Merge Id='VCRuntimePolicy' DiskId='1' Language='1033' + SourceFile='$(var.VCRedistSrcDir)\policy.x.xx.microsoft.vcxx.crt.x86_msm.msm' /> + <?else?> + <Merge Id='VCRuntime' DiskId='1' Language='1033' + SourceFile='$(var.VCRedistSrcDir)\microsoft.vcxx.crt.x64_msm.msm' /> + <Merge Id='VCRuntimePolicy' DiskId='1' Language='1033' + SourceFile='$(var.VCRedistSrcDir)\policy.x.xx.microsoft.vcxx.crt.x64_msm.msm' /> + <?endif?> + </Directory> + + <Feature Id='Complete' Title='Mercurial' Description='The complete package' + Display='expand' Level='1' ConfigurableDirectory='INSTALLDIR' > + <Feature Id='MainProgram' Title='Program' Description='Mercurial command line app' + Level='1' Absent='disallow' > + <ComponentRef Id='MainExecutable' /> + <ComponentRef Id='distOutput' /> + <ComponentRef Id='libOutput' /> + <ComponentRef Id='ProgramMenuDir' /> + <ComponentRef Id='ReadMe' /> + <ComponentRef Id='COPYING' /> + <ComponentRef Id='mercurial.rc' /> + <ComponentRef Id='mergetools.rc' /> + <ComponentGroupRef Id='helpFolder' /> + <ComponentGroupRef Id='templatesFolder' /> + <MergeRef Id='VCRuntime' /> + <MergeRef Id='VCRuntimePolicy' /> + </Feature> + <Feature Id='Locales' Title='Translations' Description='Translations' Level='1'> + <ComponentGroupRef Id='localeFolder' /> + <ComponentRef Id='i18nFolder' /> + </Feature> + <Feature Id='Documentation' Title='Documentation' Description='HTML man pages' Level='1'> + <ComponentGroupRef Id='docFolder' /> + </Feature> + <Feature Id='Misc' Title='Miscellaneous' Description='Contributed scripts' Level='1'> + <ComponentGroupRef Id='contribFolder' /> + </Feature> + </Feature> + + <UIRef Id="WixUI_FeatureTree" /> + <UIRef Id="WixUI_ErrorProgressText" /> + + <WixVariable Id="WixUILicenseRtf" Value="contrib\packaging\wix\COPYING.rtf" /> + + <Icon Id="hgIcon.ico" SourceFile="contrib/win32/mercurial.ico" /> + + <Upgrade Id='$(var.ProductUpgradeCode)'> + <UpgradeVersion + IncludeMinimum='yes' Minimum='0.0.0' IncludeMaximum='no' OnlyDetect='no' + Property='INSTALLEDMERCURIALPRODUCTS' /> + </Upgrade> + + <InstallExecuteSequence> + <RemoveExistingProducts After='InstallInitialize'/> + </InstallExecuteSequence> + + </Product> +</Wix>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/wix/readme.rst Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,71 @@ +WiX Installer +============= + +The files in this directory are used to produce an MSI installer using +the WiX Toolset (http://wixtoolset.org/). + +The MSI installers require elevated (admin) privileges due to the +installation of MSVC CRT libraries into the Windows system store. See +the Inno Setup installers in the ``inno`` sibling directory for installers +that do not have this requirement. + +Requirements +============ + +Building the WiX installers requires a Windows machine. The following +dependencies must be installed: + +* Python 2.7 (download from https://www.python.org/downloads/) +* Microsoft Visual C++ Compiler for Python 2.7 + (https://www.microsoft.com/en-us/download/details.aspx?id=44266) +* Python 3.5+ (to run the ``build.py`` script) + +Building +======== + +The ``build.py`` script automates the process of producing an MSI +installer. It manages fetching and configuring non-system dependencies +(such as py2exe, gettext, and various Python packages). + +The script requires an activated ``Visual C++ 2008`` command prompt. +A shortcut to such a prompt was installed with ``Microsoft Visual +C++ Compiler for Python 2.7``. From your Start Menu, look for +``Microsoft Visual C++ Compiler Package for Python 2.7`` then +launch either ``Visual C++ 2008 32-bit Command Prompt`` or +``Visual C++ 2008 64-bit Command Prompt``. + +From the prompt, change to the Mercurial source directory. e.g. +``cd c:\src\hg``. + +Next, invoke ``build.py`` to produce an MSI installer. You will need +to supply the path to the Python interpreter to use.:: + + $ python3 contrib\packaging\wix\build.py \ + --python c:\python27\python.exe + +.. note:: + + The script validates that the Visual C++ environment is active and + that the architecture of the specified Python interpreter matches the + Visual C++ environment. An error is raised otherwise. + +If everything runs as intended, dependencies will be fetched and +configured into the ``build`` sub-directory, Mercurial will be built, +and an installer placed in the ``dist`` sub-directory. The final line +of output should print the name of the generated installer. + +Additional options may be configured. Run ``build.py --help`` to see +a list of program flags. + +Relationship to TortoiseHG +========================== + +TortoiseHG uses the WiX files in this directory. + +The code for building TortoiseHG installers lives at +https://bitbucket.org/tortoisehg/thg-winbuild and is maintained by +Steve Borho (steve@borho.org). + +When changing behavior of the WiX installer, be sure to notify +the TortoiseHG Project of the changes so they have ample time +provide feedback and react to those changes.
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/wix/requirements.txt Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,13 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --generate-hashes contrib/packaging/wix/requirements.txt.in -o contrib/packaging/wix/requirements.txt +# +docutils==0.14 \ + --hash=sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6 \ + --hash=sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274 \ + --hash=sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6 +pygments==2.3.1 \ + --hash=sha256:5ffada19f6203563680669ee7f53b64dabbeb100eb51b61996085e99c03b284a \ + --hash=sha256:e8218dd399a61674745138520d0d4cf2621d7e032439341bc3f647bff125818d
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/wix/requirements.txt.in Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,2 @@ +docutils +pygments
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/wix/templates.wxs Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,251 @@ +<?xml version="1.0" encoding="utf-8"?> +<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi"> + + <?include guids.wxi ?> + <?include defines.wxi ?> + + <Fragment> + <ComponentGroup Id="templatesFolder"> + + <ComponentRef Id="templates.root" /> + + <ComponentRef Id="templates.atom" /> + <ComponentRef Id="templates.coal" /> + <ComponentRef Id="templates.gitweb" /> + <ComponentRef Id="templates.json" /> + <ComponentRef Id="templates.monoblue" /> + <ComponentRef Id="templates.paper" /> + <ComponentRef Id="templates.raw" /> + <ComponentRef Id="templates.rss" /> + <ComponentRef Id="templates.spartan" /> + <ComponentRef Id="templates.static" /> + + </ComponentGroup> + </Fragment> + + <Fragment> + <DirectoryRef Id="INSTALLDIR"> + + <Directory Id="templatesdir" Name="templates" FileSource="$(var.SourceDir)"> + + <Component Id="templates.root" Guid="$(var.templates.root.guid)" Win64='$(var.IsX64)'> + <File Name="map-cmdline.changelog" KeyPath="yes" /> + <File Name="map-cmdline.compact" /> + <File Name="map-cmdline.default" /> + <File Name="map-cmdline.show" /> + <File Name="map-cmdline.bisect" /> + <File Name="map-cmdline.xml" /> + <File Name="map-cmdline.status" /> + <File Name="map-cmdline.phases" /> + </Component> + + <Directory Id="templates.jsondir" Name="json"> + <Component Id="templates.json" Guid="$(var.templates.json.guid)" Win64='$(var.IsX64)'> + <File Id="json.changelist.tmpl" Name="changelist.tmpl" KeyPath="yes" /> + <File Id="json.graph.tmpl" Name="graph.tmpl" /> + <File Id="json.map" Name="map" /> + </Component> + </Directory> + + <Directory Id="templates.atomdir" Name="atom"> + <Component Id="templates.atom" Guid="$(var.templates.atom.guid)" Win64='$(var.IsX64)'> + <File Id="atom.changelog.tmpl" Name="changelog.tmpl" KeyPath="yes" /> + <File Id="atom.changelogentry.tmpl" Name="changelogentry.tmpl" /> + <File Id="atom.error.tmpl" Name="error.tmpl" /> + <File Id="atom.filelog.tmpl" Name="filelog.tmpl" /> + <File Id="atom.header.tmpl" Name="header.tmpl" /> + <File Id="atom.map" Name="map" /> + <File Id="atom.tagentry.tmpl" Name="tagentry.tmpl" /> + <File Id="atom.tags.tmpl" Name="tags.tmpl" /> + <File Id="atom.branchentry.tmpl" Name="branchentry.tmpl" /> + <File Id="atom.branches.tmpl" Name="branches.tmpl" /> + <File Id="atom.bookmarks.tmpl" Name="bookmarks.tmpl" /> + <File Id="atom.bookmarkentry.tmpl" Name="bookmarkentry.tmpl" /> + </Component> + </Directory> + + <Directory Id="templates.coaldir" Name="coal"> + <Component Id="templates.coal" Guid="$(var.templates.coal.guid)" Win64='$(var.IsX64)'> + <File Id="coal.header.tmpl" Name="header.tmpl" KeyPath="yes" /> + <File Id="coal.map" Name="map" /> + </Component> + </Directory> + + <Directory Id="templates.gitwebdir" Name="gitweb"> + <Component Id="templates.gitweb" Guid="$(var.templates.gitweb.guid)" Win64='$(var.IsX64)'> + <File Id="gitweb.branches.tmpl" Name="branches.tmpl" KeyPath="yes" /> + <File Id="gitweb.bookmarks.tmpl" Name="bookmarks.tmpl" /> + <File Id="gitweb.changelog.tmpl" Name="changelog.tmpl" /> + <File Id="gitweb.changelogentry.tmpl" Name="changelogentry.tmpl" /> + <File Id="gitweb.changeset.tmpl" Name="changeset.tmpl" /> + <File Id="gitweb.error.tmpl" Name="error.tmpl" /> + <File Id="gitweb.fileannotate.tmpl" Name="fileannotate.tmpl" /> + <File Id="gitweb.filecomparison.tmpl" Name="filecomparison.tmpl" /> + <File Id="gitweb.filediff.tmpl" Name="filediff.tmpl" /> + <File Id="gitweb.filelog.tmpl" Name="filelog.tmpl" /> + <File Id="gitweb.filerevision.tmpl" Name="filerevision.tmpl" /> + <File Id="gitweb.footer.tmpl" Name="footer.tmpl" /> + <File Id="gitweb.graph.tmpl" Name="graph.tmpl" /> + <File Id="gitweb.graphentry.tmpl" Name="graphentry.tmpl" /> + <File Id="gitweb.header.tmpl" Name="header.tmpl" /> + <File Id="gitweb.index.tmpl" Name="index.tmpl" /> + <File Id="gitweb.manifest.tmpl" Name="manifest.tmpl" /> + <File Id="gitweb.map" Name="map" /> + <File Id="gitweb.notfound.tmpl" Name="notfound.tmpl" /> + <File Id="gitweb.search.tmpl" Name="search.tmpl" /> + <File Id="gitweb.shortlog.tmpl" Name="shortlog.tmpl" /> + <File Id="gitweb.summary.tmpl" Name="summary.tmpl" /> + <File Id="gitweb.tags.tmpl" Name="tags.tmpl" /> + <File Id="gitweb.help.tmpl" Name="help.tmpl" /> + <File Id="gitweb.helptopics.tmpl" Name="helptopics.tmpl" /> + </Component> + </Directory> + + <Directory Id="templates.monobluedir" Name="monoblue"> + <Component Id="templates.monoblue" Guid="$(var.templates.monoblue.guid)" Win64='$(var.IsX64)'> + <File Id="monoblue.branches.tmpl" Name="branches.tmpl" KeyPath="yes" /> + <File Id="monoblue.bookmarks.tmpl" Name="bookmarks.tmpl" /> + <File Id="monoblue.changelog.tmpl" Name="changelog.tmpl" /> + <File Id="monoblue.changelogentry.tmpl" Name="changelogentry.tmpl" /> + <File Id="monoblue.changeset.tmpl" Name="changeset.tmpl" /> + <File Id="monoblue.error.tmpl" Name="error.tmpl" /> + <File Id="monoblue.fileannotate.tmpl" Name="fileannotate.tmpl" /> + <File Id="monoblue.filecomparison.tmpl" Name="filecomparison.tmpl" /> + <File Id="monoblue.filediff.tmpl" Name="filediff.tmpl" /> + <File Id="monoblue.filelog.tmpl" Name="filelog.tmpl" /> + <File Id="monoblue.filerevision.tmpl" Name="filerevision.tmpl" /> + <File Id="monoblue.footer.tmpl" Name="footer.tmpl" /> + <File Id="monoblue.graph.tmpl" Name="graph.tmpl" /> + <File Id="monoblue.graphentry.tmpl" Name="graphentry.tmpl" /> + <File Id="monoblue.header.tmpl" Name="header.tmpl" /> + <File Id="monoblue.index.tmpl" Name="index.tmpl" /> + <File Id="monoblue.manifest.tmpl" Name="manifest.tmpl" /> + <File Id="monoblue.map" Name="map" /> + <File Id="monoblue.notfound.tmpl" Name="notfound.tmpl" /> + <File Id="monoblue.search.tmpl" Name="search.tmpl" /> + <File Id="monoblue.shortlog.tmpl" Name="shortlog.tmpl" /> + <File Id="monoblue.summary.tmpl" Name="summary.tmpl" /> + <File Id="monoblue.tags.tmpl" Name="tags.tmpl" /> + <File Id="monoblue.help.tmpl" Name="help.tmpl" /> + <File Id="monoblue.helptopics.tmpl" Name="helptopics.tmpl" /> + </Component> + </Directory> + + <Directory Id="templates.paperdir" Name="paper"> + <Component Id="templates.paper" Guid="$(var.templates.paper.guid)" Win64='$(var.IsX64)'> + <File Id="paper.branches.tmpl" Name="branches.tmpl" KeyPath="yes" /> + <File Id="paper.bookmarks.tmpl" Name="bookmarks.tmpl" /> + <File Id="paper.changeset.tmpl" Name="changeset.tmpl" /> + <File Id="paper.diffstat.tmpl" Name="diffstat.tmpl" /> + <File Id="paper.error.tmpl" Name="error.tmpl" /> + <File Id="paper.fileannotate.tmpl" Name="fileannotate.tmpl" /> + <File Id="paper.filecomparison.tmpl" Name="filecomparison.tmpl" /> + <File Id="paper.filediff.tmpl" Name="filediff.tmpl" /> + <File Id="paper.filelog.tmpl" Name="filelog.tmpl" /> + <File Id="paper.filelogentry.tmpl" Name="filelogentry.tmpl" /> + <File Id="paper.filerevision.tmpl" Name="filerevision.tmpl" /> + <File Id="paper.footer.tmpl" Name="footer.tmpl" /> + <File Id="paper.graph.tmpl" Name="graph.tmpl" /> + <File Id="paper.graphentry.tmpl" Name="graphentry.tmpl" /> + <File Id="paper.header.tmpl" Name="header.tmpl" /> + <File Id="paper.index.tmpl" Name="index.tmpl" /> + <File Id="paper.manifest.tmpl" Name="manifest.tmpl" /> + <File Id="paper.map" Name="map" /> + <File Id="paper.notfound.tmpl" Name="notfound.tmpl" /> + <File Id="paper.search.tmpl" Name="search.tmpl" /> + <File Id="paper.shortlog.tmpl" Name="shortlog.tmpl" /> + <File Id="paper.shortlogentry.tmpl" Name="shortlogentry.tmpl" /> + <File Id="paper.tags.tmpl" Name="tags.tmpl" /> + <File Id="paper.help.tmpl" Name="help.tmpl" /> + <File Id="paper.helptopics.tmpl" Name="helptopics.tmpl" /> + </Component> + </Directory> + + <Directory Id="templates.rawdir" Name="raw"> + <Component Id="templates.raw" Guid="$(var.templates.raw.guid)" Win64='$(var.IsX64)'> + <File Id="raw.changeset.tmpl" Name="changeset.tmpl" KeyPath="yes" /> + <File Id="raw.error.tmpl" Name="error.tmpl" /> + <File Id="raw.fileannotate.tmpl" Name="fileannotate.tmpl" /> + <File Id="raw.filediff.tmpl" Name="filediff.tmpl" /> + <File Id="raw.graph.tmpl" Name="graph.tmpl" /> + <File Id="raw.graphedge.tmpl" Name="graphedge.tmpl" /> + <File Id="raw.graphnode.tmpl" Name="graphnode.tmpl" /> + <File Id="raw.index.tmpl" Name="index.tmpl" /> + <File Id="raw.manifest.tmpl" Name="manifest.tmpl" /> + <File Id="raw.map" Name="map" /> + <File Id="raw.notfound.tmpl" Name="notfound.tmpl" /> + <File Id="raw.search.tmpl" Name="search.tmpl" /> + <File Id="raw.logentry.tmpl" Name="logentry.tmpl" /> + <File Id="raw.changelog.tmpl" Name="changelog.tmpl" /> + </Component> + </Directory> + + <Directory Id="templates.rssdir" Name="rss"> + <Component Id="templates.rss" Guid="$(var.templates.rss.guid)" Win64='$(var.IsX64)'> + <File Id="rss.changelog.tmpl" Name="changelog.tmpl" KeyPath="yes" /> + <File Id="rss.changelogentry.tmpl" Name="changelogentry.tmpl" /> + <File Id="rss.error.tmpl" Name="error.tmpl" /> + <File Id="rss.filelog.tmpl" Name="filelog.tmpl" /> + <File Id="rss.filelogentry.tmpl" Name="filelogentry.tmpl" /> + <File Id="rss.header.tmpl" Name="header.tmpl" /> + <File Id="rss.map" Name="map" /> + <File Id="rss.tagentry.tmpl" Name="tagentry.tmpl" /> + <File Id="rss.tags.tmpl" Name="tags.tmpl" /> + <File Id="rss.bookmarks.tmpl" Name="bookmarks.tmpl" /> + <File Id="rss.bookmarkentry.tmpl" Name="bookmarkentry.tmpl" /> + <File Id="rss.branchentry.tmpl" Name="branchentry.tmpl" /> + <File Id="rss.branches.tmpl" Name="branches.tmpl" /> + </Component> + </Directory> + + <Directory Id="templates.spartandir" Name="spartan"> + <Component Id="templates.spartan" Guid="$(var.templates.spartan.guid)" Win64='$(var.IsX64)'> + <File Id="spartan.branches.tmpl" Name="branches.tmpl" KeyPath="yes" /> + <File Id="spartan.changelog.tmpl" Name="changelog.tmpl" /> + <File Id="spartan.changelogentry.tmpl" Name="changelogentry.tmpl" /> + <File Id="spartan.changeset.tmpl" Name="changeset.tmpl" /> + <File Id="spartan.error.tmpl" Name="error.tmpl" /> + <File Id="spartan.fileannotate.tmpl" Name="fileannotate.tmpl" /> + <File Id="spartan.filediff.tmpl" Name="filediff.tmpl" /> + <File Id="spartan.filelog.tmpl" Name="filelog.tmpl" /> + <File Id="spartan.filelogentry.tmpl" Name="filelogentry.tmpl" /> + <File Id="spartan.filerevision.tmpl" Name="filerevision.tmpl" /> + <File Id="spartan.footer.tmpl" Name="footer.tmpl" /> + <File Id="spartan.graph.tmpl" Name="graph.tmpl" /> + <File Id="spartan.graphentry.tmpl" Name="graphentry.tmpl" /> + <File Id="spartan.header.tmpl" Name="header.tmpl" /> + <File Id="spartan.index.tmpl" Name="index.tmpl" /> + <File Id="spartan.manifest.tmpl" Name="manifest.tmpl" /> + <File Id="spartan.map" Name="map" /> + <File Id="spartan.notfound.tmpl" Name="notfound.tmpl" /> + <File Id="spartan.search.tmpl" Name="search.tmpl" /> + <File Id="spartan.shortlog.tmpl" Name="shortlog.tmpl" /> + <File Id="spartan.shortlogentry.tmpl" Name="shortlogentry.tmpl" /> + <File Id="spartan.tags.tmpl" Name="tags.tmpl" /> + </Component> + </Directory> + + <Directory Id="templates.staticdir" Name="static"> + <Component Id="templates.static" Guid="$(var.templates.static.guid)" Win64='$(var.IsX64)'> + <File Id="static.background.png" Name="background.png" KeyPath="yes" /> + <File Id="static.coal.file.png" Name="coal-file.png" /> + <File Id="static.coal.folder.png" Name="coal-folder.png" /> + <File Id="static.followlines.js" Name="followlines.js" /> + <File Id="static.mercurial.js" Name="mercurial.js" /> + <File Id="static.hgicon.png" Name="hgicon.png" /> + <File Id="static.hglogo.png" Name="hglogo.png" /> + <File Id="static.style.coal.css" Name="style-extra-coal.css" /> + <File Id="static.style.gitweb.css" Name="style-gitweb.css" /> + <File Id="static.style.monoblue.css" Name="style-monoblue.css" /> + <File Id="static.style.paper.css" Name="style-paper.css" /> + <File Id="static.style.css" Name="style.css" /> + <File Id="static.feed.icon" Name="feed-icon-14x14.png" /> + </Component> + </Directory> + + </Directory> + + </DirectoryRef> + </Fragment> + + </Wix>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/perf-utils/discovery-helper.sh Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,107 @@ +#!/bin/bash +# +# produces two repositories with different common and missing subsets +# +# $ discovery-helper.sh REPO NBHEADS DEPT +# +# The Goal is to produce two repositories with some common part and some +# exclusive part on each side. Provide a source repository REPO, it will +# produce two repositories REPO-left and REPO-right. +# +# Each repository will be missing some revisions exclusive to NBHEADS of the +# repo topological heads. These heads and revisions exclusive to them (up to +# DEPTH depth) are stripped. +# +# The "left" repository will use the NBHEADS first heads (sorted by +# description). The "right" use the last NBHEADS one. +# +# To find out how many topological heads a repo has, use: +# +# $ hg heads -t -T '{rev}\n' | wc -l +# +# Example: +# +# The `pypy-2018-09-01` repository has 192 heads. To produce two repositories +# with 92 common heads and ~50 exclusive heads on each side. +# +# $ ./discovery-helper.sh pypy-2018-08-01 50 10 + +set -euo pipefail + +printusage () { + echo "usage: `basename $0` REPO NBHEADS DEPTH [left|right]" >&2 +} + +if [ $# -lt 3 ]; then + printusage + exit 64 +fi + +repo="$1" +shift + +nbheads="$1" +shift + +depth="$1" +shift + +doleft=1 +doright=1 +if [ $# -gt 1 ]; then + printusage + exit 64 +elif [ $# -eq 1 ]; then + if [ "$1" == "left" ]; then + doleft=1 + doright=0 + elif [ "$1" == "right" ]; then + doleft=0 + doright=1 + else + printusage + exit 64 + fi +fi + +leftrepo="${repo}-${nbheads}h-${depth}d-left" +rightrepo="${repo}-${nbheads}h-${depth}d-right" + +left="first(sort(heads(all()), 'desc'), $nbheads)" +right="last(sort(heads(all()), 'desc'), $nbheads)" + +leftsubset="ancestors($left, $depth) and only($left, heads(all() - $left))" +rightsubset="ancestors($right, $depth) and only($right, heads(all() - $right))" + +echo '### creating left/right repositories with missing changesets:' +if [ $doleft -eq 1 ]; then + echo '# left revset:' '"'${leftsubset}'"' +fi +if [ $doright -eq 1 ]; then + echo '# right revset:' '"'${rightsubset}'"' +fi + +buildone() { + side="$1" + dest="$2" + revset="$3" + echo "### building $side repository: $dest" + if [ -e "$dest" ]; then + echo "destination repo already exists: $dest" >&2 + exit 1 + fi + echo '# cloning' + if ! cp --recursive --reflink=always ${repo} ${dest}; then + hg clone --noupdate "${repo}" "${dest}" + fi + echo '# stripping' '"'${revset}'"' + hg -R "${dest}" --config extensions.strip= strip --rev "$revset" --no-backup +} + +if [ $doleft -eq 1 ]; then + buildone left "$leftrepo" "$leftsubset" +fi + +if [ $doright -eq 1 ]; then + buildone right "$rightrepo" "$rightsubset" +fi
--- a/contrib/perf.py Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/perf.py Tue Mar 19 16:36:59 2019 +0300 @@ -519,7 +519,11 @@ repo.ui.quiet = True matcher = scmutil.match(repo[None]) opts[b'dry_run'] = True - timer(lambda: scmutil.addremove(repo, matcher, b"", opts)) + if b'uipathfn' in getargspec(scmutil.addremove).args: + uipathfn = scmutil.getuipathfn(repo) + timer(lambda: scmutil.addremove(repo, matcher, b"", uipathfn, opts)) + else: + timer(lambda: scmutil.addremove(repo, matcher, b"", opts)) finally: repo.ui.quiet = oldquiet fm.end() @@ -535,13 +539,15 @@ @command(b'perfheads', formatteropts) def perfheads(ui, repo, **opts): + """benchmark the computation of a changelog heads""" opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) cl = repo.changelog + def s(): + clearcaches(cl) def d(): len(cl.headrevs()) - clearcaches(cl) - timer(d) + timer(d, setup=s) fm.end() @command(b'perftags', formatteropts+ @@ -911,9 +917,7 @@ raise error.Abort((b'default repository not configured!'), hint=(b"see 'hg help config.paths'")) dest = path.pushloc or path.loc - branches = (path.branch, opts.get(b'branch') or []) ui.status((b'analysing phase of %s\n') % util.hidepassword(dest)) - revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get(b'rev')) other = hg.peer(repo, opts, dest) # easier to perform discovery through the operation @@ -1014,18 +1018,44 @@ fm.end() @command(b'perfindex', [ - (b'', b'rev', b'', b'revision to be looked up (default tip)'), + (b'', b'rev', [], b'revision to be looked up (default tip)'), + (b'', b'no-lookup', None, b'do not revision lookup post creation'), ] + formatteropts) def perfindex(ui, repo, **opts): + """benchmark index creation time followed by a lookup + + The default is to look `tip` up. Depending on the index implementation, + the revision looked up can matters. For example, an implementation + scanning the index will have a faster lookup time for `--rev tip` than for + `--rev 0`. The number of looked up revisions and their order can also + matters. + + Example of useful set to test: + * tip + * 0 + * -10: + * :10 + * -10: + :10 + * :10: + -10: + * -10000: + * -10000: + 0 + + It is not currently possible to check for lookup of a missing node. For + deeper lookup benchmarking, checkout the `perfnodemap` command.""" import mercurial.revlog opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg - if opts[b'rev'] is None: - n = repo[b"tip"].node() + if opts[b'no_lookup']: + if opts['rev']: + raise error.Abort('--no-lookup and --rev are mutually exclusive') + nodes = [] + elif not opts[b'rev']: + nodes = [repo[b"tip"].node()] else: - rev = scmutil.revsingle(repo, opts[b'rev']) - n = repo[rev].node() + revs = scmutil.revrange(repo, opts[b'rev']) + cl = repo.changelog + nodes = [cl.node(r) for r in revs] unfi = repo.unfiltered() # find the filecache func directly @@ -1036,7 +1066,67 @@ clearchangelog(unfi) def d(): cl = makecl(unfi) - cl.rev(n) + for n in nodes: + cl.rev(n) + timer(d, setup=setup) + fm.end() + +@command(b'perfnodemap', [ + (b'', b'rev', [], b'revision to be looked up (default tip)'), + (b'', b'clear-caches', True, b'clear revlog cache between calls'), + ] + formatteropts) +def perfnodemap(ui, repo, **opts): + """benchmark the time necessary to look up revision from a cold nodemap + + Depending on the implementation, the amount and order of revision we look + up can varies. Example of useful set to test: + * tip + * 0 + * -10: + * :10 + * -10: + :10 + * :10: + -10: + * -10000: + * -10000: + 0 + + The command currently focus on valid binary lookup. Benchmarking for + hexlookup, prefix lookup and missing lookup would also be valuable. + """ + import mercurial.revlog + opts = _byteskwargs(opts) + timer, fm = gettimer(ui, opts) + mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg + + unfi = repo.unfiltered() + clearcaches = opts['clear_caches'] + # find the filecache func directly + # This avoid polluting the benchmark with the filecache logic + makecl = unfi.__class__.changelog.func + if not opts[b'rev']: + raise error.Abort('use --rev to specify revisions to look up') + revs = scmutil.revrange(repo, opts[b'rev']) + cl = repo.changelog + nodes = [cl.node(r) for r in revs] + + # use a list to pass reference to a nodemap from one closure to the next + nodeget = [None] + def setnodeget(): + # probably not necessary, but for good measure + clearchangelog(unfi) + nodeget[0] = makecl(unfi).nodemap.get + + def d(): + get = nodeget[0] + for n in nodes: + get(n) + + setup = None + if clearcaches: + def setup(): + setnodeget() + else: + setnodeget() + d() # prewarm the data structure timer(d, setup=setup) fm.end() @@ -2290,13 +2380,18 @@ view = repo else: view = repo.filtered(filtername) + if util.safehasattr(view._branchcaches, '_per_filter'): + filtered = view._branchcaches._per_filter + else: + # older versions + filtered = view._branchcaches def d(): if clear_revbranch: repo.revbranchcache()._clear() if full: view._branchcaches.clear() else: - view._branchcaches.pop(filtername, None) + filtered.pop(filtername, None) view.branchmap() return d # add filter in smaller subset to bigger subset @@ -2323,10 +2418,15 @@ # add unfiltered allfilters.append(None) - branchcacheread = safeattrsetter(branchmap, b'read') + if util.safehasattr(branchmap.branchcache, 'fromfile'): + branchcacheread = safeattrsetter(branchmap.branchcache, b'fromfile') + branchcacheread.set(classmethod(lambda *args: None)) + else: + # older versions + branchcacheread = safeattrsetter(branchmap, b'read') + branchcacheread.set(lambda *args: None) branchcachewrite = safeattrsetter(branchmap.branchcache, b'write') - branchcacheread.set(lambda repo: None) - branchcachewrite.set(lambda bc, repo: None) + branchcachewrite.set(lambda *args: None) try: for name in allfilters: printname = name @@ -2470,9 +2570,15 @@ repo.branchmap() # make sure we have a relevant, up to date branchmap + try: + fromfile = branchmap.branchcache.fromfile + except AttributeError: + # older versions + fromfile = branchmap.read + currentfilter = filter # try once without timer, the filter may not be cached - while branchmap.read(repo) is None: + while fromfile(repo) is None: currentfilter = subsettable.get(currentfilter) if currentfilter is None: raise error.Abort(b'No branchmap cached for %s repo' @@ -2483,7 +2589,7 @@ if clearrevlogs: clearchangelog(repo) def bench(): - branchmap.read(repo) + fromfile(repo) timer(bench, setup=setup) fm.end()
--- a/contrib/python3-whitelist Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/python3-whitelist Tue Mar 19 16:36:59 2019 +0300 @@ -5,6 +5,7 @@ test-absorb-rename.t test-absorb-strip.t test-absorb.t +test-acl.t test-add.t test-addremove-similar.t test-addremove.t @@ -14,6 +15,7 @@ test-ancestor.py test-annotate.py test-annotate.t +test-arbitraryfilectx.t test-archive-symlinks.t test-archive.t test-atomictempfile.py @@ -25,6 +27,7 @@ test-bad-extension.t test-bad-pull.t test-basic.t +test-batching.py test-bdiff.py test-bheads.t test-bisect.t @@ -42,6 +45,7 @@ test-branch-option.t test-branch-tag-confict.t test-branches.t +test-bugzilla.t test-bundle-phases.t test-bundle-r.t test-bundle-type.t @@ -54,14 +58,15 @@ test-bundle2-remote-changegroup.t test-cache-abuse.t test-cappedreader.py +test-casecollision-merge.t test-casecollision.t +test-casefolding.t test-cat.t test-cbor.py test-censor.t test-changelog-exec.t test-check-code.t test-check-commit.t -test-check-config.py test-check-config.t test-check-execute.t test-check-help.t @@ -83,6 +88,7 @@ test-close-head.t test-commandserver.t test-commit-amend.t +test-commit-interactive-curses.t test-commit-interactive.t test-commit-multiple.t test-commit-unresolved.t @@ -111,11 +117,16 @@ test-convert-cvsnt-mergepoints.t test-convert-datesort.t test-convert-filemap.t +test-convert-git.t test-convert-hg-sink.t test-convert-hg-source.t test-convert-hg-startrev.t +test-convert-mtn.t test-convert-splicemap.t +test-convert-svn-sink.t test-convert-tagsbranch-topology.t +test-convert.t +test-copies.t test-copy-move-merge.t test-copy.t test-copytrace-heuristics.t @@ -127,6 +138,7 @@ test-debugindexdot.t test-debugrename.t test-default-push.t +test-demandimport.py test-diff-antipatience.t test-diff-binary-file.t test-diff-change.t @@ -149,6 +161,7 @@ test-dirstate-race.t test-dirstate.t test-dispatch.py +test-dispatch.t test-doctest.py test-double-merge.t test-drawdag.t @@ -159,6 +172,7 @@ test-empty-group.t test-empty.t test-encode.t +test-encoding-align.t test-encoding-func.py test-encoding-textwrap.t test-encoding.t @@ -198,6 +212,7 @@ test-extdata.t test-extdiff.t test-extension-timing.t +test-extension.t test-extensions-afterloaded.t test-extensions-wrapfunction.py test-extra-filelog-entry.t @@ -217,6 +232,7 @@ test-fileset.t test-fix-topology.t test-fix.t +test-flagprocessor.t test-flags.t test-fncache.t test-gendoc-da.t @@ -235,6 +251,7 @@ test-generaldelta.t test-getbundle.t test-git-export.t +test-githelp.t test-globalopts.t test-glog-beautifygraph.t test-glog-topological.t @@ -251,17 +268,24 @@ test-hgk.t test-hgrc.t test-hgweb-annotate-whitespace.t +test-hgweb-auth.py test-hgweb-bundle.t +test-hgweb-commands.t test-hgweb-csp.t test-hgweb-descend-empties.t test-hgweb-diffs.t test-hgweb-empty.t test-hgweb-filelog.t +test-hgweb-json.t +test-hgweb-no-path-info.t +test-hgweb-no-request-uri.t test-hgweb-non-interactive.t test-hgweb-raw.t test-hgweb-removed.t +test-hgweb-symrev.t test-hgweb.t test-hgwebdir-paths.py +test-hgwebdir.t test-hgwebdirsym.t test-histedit-arguments.t test-histedit-base.t @@ -278,11 +302,17 @@ test-histedit-obsolete.t test-histedit-outgoing.t test-histedit-templates.t +test-http-api-httpv2.t +test-http-api.t +test-http-bad-server.t test-http-branchmap.t test-http-bundle1.t test-http-clone-r.t test-http-permissions.t +test-http-protocol.t +test-http-proxy.t test-http.t +test-https.t test-hybridencode.py test-i18n.t test-identify.t @@ -290,6 +320,7 @@ test-import-bypass.t test-import-context.t test-import-eol.t +test-import-git.t test-import-merge.t test-import-unknown.t test-import.t @@ -300,6 +331,7 @@ test-infinitepush.t test-inherit-mode.t test-init.t +test-install.t test-issue1089.t test-issue1102.t test-issue1175.t @@ -335,11 +367,14 @@ test-lfs-bundle.t test-lfs-largefiles.t test-lfs-pointer.py +test-lfs-serve.t +test-lfs-test-server.t test-lfs.t test-linelog.py test-linerange.py test-locate.t test-lock-badness.t +test-lock.py test-log-exthook.t test-log-linerange.t test-log.t @@ -381,11 +416,14 @@ test-merge9.t test-minifileset.py test-minirst.py +test-missing-capability.t +test-mq-eol.t test-mq-git.t test-mq-guards.t test-mq-header-date.t test-mq-header-from.t test-mq-merge.t +test-mq-missingfiles.t test-mq-pull-from-bundle.t test-mq-qclone-http.t test-mq-qdelete.t @@ -393,6 +431,7 @@ test-mq-qfold.t test-mq-qgoto.t test-mq-qimport-fail-cleanup.t +test-mq-qimport.t test-mq-qnew.t test-mq-qpush-exact.t test-mq-qpush-fail.t @@ -403,6 +442,7 @@ test-mq-qrename.t test-mq-qsave.t test-mq-safety.t +test-mq-subrepo-svn.t test-mq-subrepo.t test-mq-symlinks.t test-mq.t @@ -438,8 +478,10 @@ test-narrow.t test-nested-repo.t test-newbranch.t +test-newcgi.t test-newercgi.t test-nointerrupt.t +test-notify-changegroup.t test-obshistory.t test-obsmarker-template.t test-obsmarkers-effectflag.t @@ -451,11 +493,13 @@ test-obsolete-divergent.t test-obsolete-tag-cache.t test-obsolete.t +test-oldcgi.t test-origbackup-conflict.t test-pager-legacy.t test-pager.t test-parents.t test-parse-date.t +test-parseindex.t test-parseindex2.py test-patch-offset.t test-patch.t @@ -468,12 +512,15 @@ test-pathencode.py test-pending.t test-permissions.t +test-phabricator.t +test-phase-archived.t test-phases-exchange.t test-phases.t test-profile.t test-progress.t test-propertycache.py test-pull-branch.t +test-pull-bundle.t test-pull-http.t test-pull-permission.t test-pull-pull-corruption.t @@ -557,16 +604,23 @@ test-remotefilelog-cacheprocess.t test-remotefilelog-clone-tree.t test-remotefilelog-clone.t +test-remotefilelog-corrupt-cache.t +test-remotefilelog-datapack.py +test-remotefilelog-gc.t test-remotefilelog-gcrepack.t +test-remotefilelog-histpack.py test-remotefilelog-http.t test-remotefilelog-keepset.t +test-remotefilelog-linknodes.t test-remotefilelog-local.t test-remotefilelog-log.t test-remotefilelog-partial-shallow.t test-remotefilelog-permissions.t -test-remotefilelog-permisssions.t test-remotefilelog-prefetch.t test-remotefilelog-pull-noshallow.t +test-remotefilelog-push-pull.t +test-remotefilelog-repack-fast.t +test-remotefilelog-repack.t test-remotefilelog-share.t test-remotefilelog-sparse.t test-remotefilelog-tags.t @@ -597,12 +651,15 @@ test-revset-dirstate-parents.t test-revset-legacy-lookup.t test-revset-outgoing.t +test-revset.t +test-revset2.t test-rollback.t test-run-tests.py test-run-tests.t test-rust-ancestor.py test-schemes.t test-serve.t +test-server-view.t test-setdiscovery.t test-share.t test-shelve.t @@ -631,6 +688,7 @@ test-ssh.t test-sshserver.py test-stack.t +test-static-http.t test-status-color.t test-status-inprocess.py test-status-rev.t @@ -642,10 +700,12 @@ test-strip-cross.t test-strip.t test-subrepo-deep-nested-change.t +test-subrepo-git.t test-subrepo-missing.t test-subrepo-paths.t test-subrepo-recursion.t test-subrepo-relative-path.t +test-subrepo-svn.t test-subrepo.t test-symlink-os-yes-fs-no.py test-symlink-placeholder.t @@ -658,7 +718,10 @@ test-template-map.t test-tools.t test-transplant.t +test-treediscovery-legacy.t +test-treediscovery.t test-treemanifest.t +test-trusted.py test-ui-color.py test-ui-config.py test-ui-verbosity.py @@ -669,6 +732,7 @@ test-unionrepo.t test-unrelated-pull.t test-up-local-change.t +test-update-atomic.t test-update-branches.t test-update-dest.t test-update-issue1456.t @@ -685,19 +749,26 @@ test-walkrepo.py test-websub.t test-win32text.t +test-wireproto-caching.t test-wireproto-clientreactor.py test-wireproto-command-branchmap.t +test-wireproto-command-capabilities.t test-wireproto-command-changesetdata.t test-wireproto-command-filedata.t test-wireproto-command-filesdata.t test-wireproto-command-heads.t +test-wireproto-command-known.t test-wireproto-command-listkeys.t test-wireproto-command-lookup.t test-wireproto-command-manifestdata.t test-wireproto-command-pushkey.t test-wireproto-command-rawstorefiledata.t +test-wireproto-content-redirects.t +test-wireproto-exchangev2.t test-wireproto-framing.py test-wireproto-serverreactor.py test-wireproto.py +test-wireproto.t +test-worker.t test-wsgirequest.py test-xdg.t
--- a/contrib/relnotes Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/relnotes Tue Mar 19 16:36:59 2019 +0300 @@ -14,6 +14,7 @@ r"\(issue": 100, r"\(BC\)": 100, r"\(API\)": 100, + r"\(SEC\)": 100, # core commands, bump up r"(commit|files|log|pull|push|patch|status|tag|summary)(|s|es):": 20, r"(annotate|alias|branch|bookmark|clone|graft|import|verify).*:": 20, @@ -21,6 +22,7 @@ r"(mq|shelve|rebase):": 20, # newsy r": deprecate": 20, + r": new.*(extension|flag|module)": 10, r"( ability|command|feature|option|support)": 10, # experimental r"hg-experimental": 20, @@ -29,22 +31,23 @@ # bug-like? r"(fix|don't break|improve)": 7, r"(not|n't|avoid|fix|prevent).*crash": 10, + r"vulnerab": 10, # boring stuff, bump down r"^contrib": -5, r"debug": -5, r"help": -5, + r"minor": -5, r"(doc|metavar|bundle2|obsolete|obsmarker|rpm|setup|debug\S+:)": -15, r"(check-code|check-commit|check-config|import-checker)": -20, r"(flake8|lintian|pyflakes|pylint)": -20, # cleanups and refactoring - r"(cleanup|white ?space|spelling|quoting)": -20, + r"(clean ?up|white ?space|spelling|quoting)": -20, r"(flatten|dedent|indent|nesting|unnest)": -20, r"(typo|hint|note|comment|TODO|FIXME)": -20, r"(style:|convention|one-?liner)": -20, - r"_": -10, r"(argument|absolute_import|attribute|assignment|mutable)": -15, r"(scope|True|False)": -10, - r"(unused|useless|unnecessary|superfluous|duplicate|deprecated)": -10, + r"(unused|useless|unnecessar|superfluous|duplicate|deprecated)": -10, r"(redundant|pointless|confusing|uninitialized|meaningless|dead)": -10, r": (drop|remove|delete|rip out)": -10, r": (inherit|rename|simplify|naming|inline)": -10, @@ -54,9 +57,12 @@ r": (move|extract) .* (to|into|from|out of)": -20, r": implement ": -5, r": use .* implementation": -20, + r": use .* instead of": -20, + # code + r"_": -10, + r"__": -5, + r"\(\)": -5, r"\S\S\S+\.\S\S\S\S+": -5, - r": use .* instead of": -20, - r"__": -5, # dumb keywords r"\S+/\S+:": -10, r"\S+\.\S+:": -10, @@ -92,6 +98,15 @@ (r"shelve|unshelve", "extensions"), ] +def wikify(desc): + desc = desc.replace("(issue", "(Bts:issue") + desc = re.sub(r"\b([0-9a-f]{12})\b", r"Cset:\1", desc) + # stop ParseError from being recognized as a (nonexistent) wiki page + desc = re.sub(r" ([A-Z][a-z]+[A-Z][a-z]+)\b", r" !\1", desc) + # prevent wiki markup of magic methods + desc = re.sub(r"\b(\S*__\S*)\b", r"`\1`", desc) + return desc + def main(): desc = "example: %(prog)s 4.7.2 --stoprev 4.8rc0" ap = argparse.ArgumentParser(description=desc) @@ -148,10 +163,8 @@ if re.search(rule, desc): score += val - desc = desc.replace("(issue", "(Bts:issue") - if score >= cutoff: - commits.append(desc) + commits.append(wikify(desc)) # Group unflagged notes. groups = {} bcs = []
--- a/contrib/revsetbenchmarks.py Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/revsetbenchmarks.py Tue Mar 19 16:36:59 2019 +0300 @@ -71,8 +71,8 @@ print(exc.output, file=sys.stderr) return None -outputre = re.compile(r'! wall (\d+.\d+) comb (\d+.\d+) user (\d+.\d+) ' - 'sys (\d+.\d+) \(best of (\d+)\)') +outputre = re.compile(br'! wall (\d+.\d+) comb (\d+.\d+) user (\d+.\d+) ' + br'sys (\d+.\d+) \(best of (\d+)\)') def parseoutput(output): """parse a textual output into a dict
--- a/contrib/showstack.py Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/showstack.py Tue Mar 19 16:36:59 2019 +0300 @@ -1,7 +1,7 @@ # showstack.py - extension to dump a Python stack trace on signal # # binds to both SIGQUIT (Ctrl-\) and SIGINFO (Ctrl-T on BSDs) -"""dump stack trace when receiving SIGQUIT (Ctrl-\) and SIGINFO (Ctrl-T on BSDs) +r"""dump stack trace when receiving SIGQUIT (Ctrl-\) or SIGINFO (Ctrl-T on BSDs) """ from __future__ import absolute_import, print_function
--- a/contrib/synthrepo.py Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/synthrepo.py Tue Mar 19 16:36:59 2019 +0300 @@ -349,7 +349,7 @@ # to the modeled directory structure. initcount = int(opts['initfiles']) if initcount and initdirs: - pctx = repo[None].parents()[0] + pctx = repo['.'] dirs = set(pctx.dirs()) files = {} @@ -450,7 +450,6 @@ path = fctx.path() changes[path] = '\n'.join(lines) + '\n' for __ in xrange(pick(filesremoved)): - path = random.choice(mfk) for __ in xrange(10): path = random.choice(mfk) if path not in changes:
--- a/contrib/testparseutil.py Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/testparseutil.py Tue Mar 19 16:36:59 2019 +0300 @@ -265,7 +265,7 @@ class fileheredocmatcher(embeddedmatcher): """Detect "cat > FILE << LIMIT" style embedded code - >>> matcher = fileheredocmatcher(b'heredoc .py file', br'[^<]+\.py') + >>> matcher = fileheredocmatcher(b'heredoc .py file', br'[^<]+\\.py') >>> b2s(matcher.startsat(b' $ cat > file.py << EOF\\n')) ('file.py', ' > EOF\\n') >>> b2s(matcher.startsat(b' $ cat >>file.py <<EOF\\n'))
--- a/contrib/win32/hgwebdir_wsgi.py Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/win32/hgwebdir_wsgi.py Tue Mar 19 16:36:59 2019 +0300 @@ -6,7 +6,6 @@ # # Requirements: # - Python 2.7, preferably 64 bit -# - PyWin32 for Python 2.7 (32 or 64 bit) # - Mercurial installed from source (python setup.py install) or download the # python module installer from https://www.mercurial-scm.org/wiki/Download # - IIS 7 or newer
--- a/contrib/win32/mercurial.iss Tue Mar 19 09:23:35 2019 -0400 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,120 +0,0 @@ -; Script generated by the Inno Setup Script Wizard. -; SEE THE DOCUMENTATION FOR DETAILS ON CREATING INNO SETUP SCRIPT FILES! - -#ifndef VERSION -#define FileHandle -#define FileLine -#define VERSION = "unknown" -#if FileHandle = FileOpen(SourcePath + "\..\..\mercurial\__version__.py") - #expr FileLine = FileRead(FileHandle) - #expr FileLine = FileRead(FileHandle) - #define VERSION = Copy(FileLine, Pos('"', FileLine)+1, Len(FileLine)-Pos('"', FileLine)-1) -#endif -#if FileHandle - #expr FileClose(FileHandle) -#endif -#pragma message "Detected Version: " + VERSION -#endif - -#ifndef ARCH -#define ARCH = "x86" -#endif - -[Setup] -AppCopyright=Copyright 2005-2019 Matt Mackall and others -AppName=Mercurial -AppVersion={#VERSION} -#if ARCH == "x64" -AppVerName=Mercurial {#VERSION} (64-bit) -OutputBaseFilename=Mercurial-{#VERSION}-x64 -ArchitecturesAllowed=x64 -ArchitecturesInstallIn64BitMode=x64 -#else -AppVerName=Mercurial {#VERSION} -OutputBaseFilename=Mercurial-{#VERSION} -#endif -InfoAfterFile=contrib/win32/postinstall.txt -LicenseFile=COPYING -ShowLanguageDialog=yes -AppPublisher=Matt Mackall and others -AppPublisherURL=https://mercurial-scm.org/ -AppSupportURL=https://mercurial-scm.org/ -AppUpdatesURL=https://mercurial-scm.org/ -AppID={{4B95A5F1-EF59-4B08-BED8-C891C46121B3} -AppContact=mercurial@mercurial-scm.org -DefaultDirName={pf}\Mercurial -SourceDir=..\.. -VersionInfoDescription=Mercurial distributed SCM (version {#VERSION}) -VersionInfoCopyright=Copyright 2005-2019 Matt Mackall and others -VersionInfoCompany=Matt Mackall and others -InternalCompressLevel=max -SolidCompression=true -SetupIconFile=contrib\win32\mercurial.ico -AllowNoIcons=true -DefaultGroupName=Mercurial -PrivilegesRequired=none - -[Files] -Source: contrib\mercurial.el; DestDir: {app}/Contrib -Source: contrib\vim\*.*; DestDir: {app}/Contrib/Vim -Source: contrib\zsh_completion; DestDir: {app}/Contrib -Source: contrib\bash_completion; DestDir: {app}/Contrib -Source: contrib\tcsh_completion; DestDir: {app}/Contrib -Source: contrib\tcsh_completion_build.sh; DestDir: {app}/Contrib -Source: contrib\hgk; DestDir: {app}/Contrib; DestName: hgk.tcl -Source: contrib\xml.rnc; DestDir: {app}/Contrib -Source: contrib\mercurial.el; DestDir: {app}/Contrib -Source: contrib\mq.el; DestDir: {app}/Contrib -Source: contrib\hgweb.fcgi; DestDir: {app}/Contrib -Source: contrib\hgweb.wsgi; DestDir: {app}/Contrib -Source: contrib\win32\ReadMe.html; DestDir: {app}; Flags: isreadme -Source: contrib\win32\postinstall.txt; DestDir: {app}; DestName: ReleaseNotes.txt -Source: dist\hg.exe; DestDir: {app}; AfterInstall: Touch('{app}\hg.exe.local') -#if ARCH == "x64" -Source: dist\lib\*.dll; Destdir: {app}\lib -Source: dist\lib\*.pyd; Destdir: {app}\lib -#else -Source: dist\w9xpopen.exe; DestDir: {app} -#endif -Source: dist\python*.dll; Destdir: {app}; Flags: skipifsourcedoesntexist -Source: dist\msvc*.dll; DestDir: {app}; Flags: skipifsourcedoesntexist -Source: dist\Microsoft.VC*.CRT.manifest; DestDir: {app}; Flags: skipifsourcedoesntexist -Source: dist\lib\library.zip; DestDir: {app}\lib -Source: dist\add_path.exe; DestDir: {app} -Source: doc\*.html; DestDir: {app}\Docs -Source: doc\style.css; DestDir: {app}\Docs -Source: mercurial\help\*.txt; DestDir: {app}\help -Source: mercurial\help\internals\*.txt; DestDir: {app}\help\internals -Source: mercurial\default.d\*.rc; DestDir: {app}\default.d -Source: mercurial\locale\*.*; DestDir: {app}\locale; Flags: recursesubdirs createallsubdirs skipifsourcedoesntexist -Source: mercurial\templates\*.*; DestDir: {app}\Templates; Flags: recursesubdirs createallsubdirs -Source: CONTRIBUTORS; DestDir: {app}; DestName: Contributors.txt -Source: COPYING; DestDir: {app}; DestName: Copying.txt - -[INI] -Filename: {app}\Mercurial.url; Section: InternetShortcut; Key: URL; String: https://mercurial-scm.org/ -Filename: {app}\default.d\editor.rc; Section: ui; Key: editor; String: notepad - -[UninstallDelete] -Type: files; Name: {app}\Mercurial.url -Type: filesandordirs; Name: {app}\default.d -Type: files; Name: "{app}\hg.exe.local" - -[Icons] -Name: {group}\Uninstall Mercurial; Filename: {uninstallexe} -Name: {group}\Mercurial Command Reference; Filename: {app}\Docs\hg.1.html -Name: {group}\Mercurial Configuration Files; Filename: {app}\Docs\hgrc.5.html -Name: {group}\Mercurial Ignore Files; Filename: {app}\Docs\hgignore.5.html -Name: {group}\Mercurial Web Site; Filename: {app}\Mercurial.url - -[Run] -Filename: "{app}\add_path.exe"; Parameters: "{app}"; Flags: postinstall; Description: "Add the installation path to the search path" - -[UninstallRun] -Filename: "{app}\add_path.exe"; Parameters: "/del {app}" - -[Code] -procedure Touch(fn: String); -begin - SaveStringToFile(ExpandConstant(fn), '', False); -end;
--- a/contrib/win32/win32-build.txt Tue Mar 19 09:23:35 2019 -0400 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,130 +0,0 @@ -The standalone Windows installer for Mercurial is built in a somewhat -jury-rigged fashion. - -It has the following prerequisites. Ensure to take the packages -matching the mercurial version you want to build (32-bit or 64-bit). - - Python 2.6 for Windows - http://www.python.org/download/releases/ - - A compiler: - either MinGW - http://www.mingw.org/ - or Microsoft Visual C++ 2008 SP1 Express Edition - http://www.microsoft.com/express/Downloads/Download-2008.aspx - - Python for Windows Extensions - http://sourceforge.net/projects/pywin32/ - - mfc71.dll (just download, don't install; not needed for Python 2.6) - http://starship.python.net/crew/mhammond/win32/ - - Visual C++ 2008 redistributable package (needed for >= Python 2.6 or if you compile with MSVC) - for 32-bit: - http://www.microsoft.com/downloads/details.aspx?FamilyID=9b2da534-3e03-4391-8a4d-074b9f2bc1bf - for 64-bit: - http://www.microsoft.com/downloads/details.aspx?familyid=bd2a6171-e2d6-4230-b809-9a8d7548c1b6 - - The py2exe distutils extension - http://sourceforge.net/projects/py2exe/ - - GnuWin32 gettext utility (if you want to build translations) - http://gnuwin32.sourceforge.net/packages/gettext.htm - - Inno Setup - http://www.jrsoftware.org/isdl.php#qsp - - Get and install ispack-5.3.10.exe or later (includes Inno Setup Processor), - which is necessary to package Mercurial. - - ISTool - optional - http://www.istool.org/default.aspx/ - - add_path (you need only add_path.exe in the zip file) - http://www.barisione.org/apps.html#add_path - - Docutils - http://docutils.sourceforge.net/ - - CA Certs file - http://curl.haxx.se/ca/cacert.pem - -And, of course, Mercurial itself. - -Once you have all this installed and built, clone a copy of the -Mercurial repository you want to package, and name the repo -C:\hg\hg-release. - -In a shell, build a standalone copy of the hg.exe program. - -Building instructions for MinGW: - python setup.py build -c mingw32 - python setup.py py2exe -b 2 -Note: the previously suggested combined command of "python setup.py build -c -mingw32 py2exe -b 2" doesn't work correctly anymore as it doesn't include the -extensions in the mercurial subdirectory. -If you want to create a file named setup.cfg with the contents: -[build] -compiler=mingw32 -you can skip the first build step. - -Building instructions with MSVC 2008 Express Edition: - for 32-bit: - "C:\Program Files\Microsoft Visual Studio 9.0\VC\vcvarsall.bat" x86 - python setup.py py2exe -b 2 - for 64-bit: - "C:\Program Files\Microsoft Visual Studio 9.0\VC\vcvarsall.bat" x86_amd64 - python setup.py py2exe -b 3 - -Copy add_path.exe and cacert.pem files into the dist directory that just got created. - -If you are using Python 2.6 or later, or if you are using MSVC 2008 to compile -mercurial, you must include the C runtime libraries in the installer. To do so, -install the Visual C++ 2008 redistributable package. Then in your windows\winsxs -folder, locate the folder containing the dlls version 9.0.21022.8. -For x86, it should be named like x86_Microsoft.VC90.CRT_(...)_9.0.21022.8(...). -For x64, it should be named like amd64_Microsoft.VC90.CRT_(...)_9.0.21022.8(...). -Copy the files named msvcm90.dll, msvcp90.dll and msvcr90.dll into the dist -directory. -Then in the windows\winsxs\manifests folder, locate the corresponding manifest -file (x86_Microsoft.VC90.CRT_(...)_9.0.21022.8(...).manifest for x86, -amd64_Microsoft.VC90.CRT_(...)_9.0.21022.8(...).manifest for x64), copy it in the -dist directory and rename it to Microsoft.VC90.CRT.manifest. - -Before building the installer, you have to build Mercurial HTML documentation -(or fix mercurial.iss to not reference the doc directory): - - cd doc - mingw32-make html - cd .. - -If you use ISTool, you open the C:\hg\hg-release\contrib\win32\mercurial.iss -file and type Ctrl-F9 to compile the installer file. - -Otherwise you run the Inno Setup compiler. Assuming it's in the path -you should execute: - - iscc contrib\win32\mercurial.iss /dVERSION=foo - -Where 'foo' is the version number you would like to see in the -'Add/Remove Applications' tool. The installer will be placed into -a directory named Output/ at the root of your repository. -If the /dVERSION=foo parameter is not given in the command line, the -installer will retrieve the version information from the __version__.py file. - -If you want to build an installer for a 64-bit mercurial, add /dARCH=x64 to -your command line: - iscc contrib\win32\mercurial.iss /dARCH=x64 - -To automate the steps above you may want to create a batchfile based on the -following (MinGW build chain): - - echo [build] > setup.cfg - echo compiler=mingw32 >> setup.cfg - python setup.py py2exe -b 2 - cd doc - mingw32-make html - cd .. - iscc contrib\win32\mercurial.iss /dVERSION=snapshot - -and run it from the root of the hg repository (c:\hg\hg-release).
--- a/contrib/wix/README.txt Tue Mar 19 09:23:35 2019 -0400 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,31 +0,0 @@ -WiX installer source files -========================== - -The files in this folder are used by the thg-winbuild [1] package -building architecture to create a Mercurial MSI installer. These files -are versioned within the Mercurial source tree because the WXS files -must kept up to date with distribution changes within their branch. In -other words, the default branch WXS files are expected to diverge from -the stable branch WXS files. Storing them within the same repository is -the only sane way to keep the source tree and the installer in sync. - -The MSI installer builder uses only the mercurial.ini file from the -contrib/win32 folder, the contents of which have been historically used -to create an InnoSetup based installer. The rest of the files there are -ignored. - -The MSI packages built by thg-winbuild require elevated (admin) -privileges to be installed due to the installation of MSVC CRT libraries -under the C:\WINDOWS\WinSxS folder. Thus the InnoSetup installers may -still be useful to some users. - -To build your own MSI packages, clone the thg-winbuild [1] repository -and follow the README.txt [2] instructions closely. There are fewer -prerequisites for a WiX [3] installer than an InnoSetup installer, but -they are more specific. - -Direct questions or comments to Steve Borho <steve@borho.org> - -[1] http://bitbucket.org/tortoisehg/thg-winbuild -[2] http://bitbucket.org/tortoisehg/thg-winbuild/src/tip/README.txt -[3] http://wix.sourceforge.net/
--- a/contrib/wix/contrib.wxs Tue Mar 19 09:23:35 2019 -0400 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,43 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi"> - - <?include guids.wxi ?> - <?include defines.wxi ?> - - <Fragment> - <ComponentGroup Id="contribFolder"> - <ComponentRef Id="contrib" /> - <ComponentRef Id="contrib.vim" /> - </ComponentGroup> - </Fragment> - - <Fragment> - <DirectoryRef Id="INSTALLDIR"> - <Directory Id="contribdir" Name="contrib" FileSource="$(var.SourceDir)"> - <Component Id="contrib" Guid="$(var.contrib.guid)" Win64='$(var.IsX64)'> - <File Name="bash_completion" KeyPath="yes" /> - <File Name="hgk" /> - <File Name="hgweb.fcgi" /> - <File Name="hgweb.wsgi" /> - <File Name="logo-droplets.svg" /> - <File Name="mercurial.el" /> - <File Name="tcsh_completion" /> - <File Name="tcsh_completion_build.sh" /> - <File Name="xml.rnc" /> - <File Name="zsh_completion" /> - </Component> - <Directory Id="vimdir" Name="vim"> - <Component Id="contrib.vim" Guid="$(var.contrib.vim.guid)" Win64='$(var.IsX64)'> - <File Name="hg-menu.vim" KeyPath="yes" /> - <File Name="HGAnnotate.vim" /> - <File Name="hgcommand.vim" /> - <File Name="patchreview.txt" /> - <File Name="patchreview.vim" /> - <File Name="hgtest.vim" /> - </Component> - </Directory> - </Directory> - </DirectoryRef> - </Fragment> - -</Wix>
--- a/contrib/wix/defines.wxi Tue Mar 19 09:23:35 2019 -0400 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,9 +0,0 @@ -<Include> - - <?if $(var.Platform) = "x64" ?> - <?define IsX64 = yes ?> - <?else?> - <?define IsX64 = no ?> - <?endif?> - -</Include>
--- a/contrib/wix/dist.wxs Tue Mar 19 09:23:35 2019 -0400 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,37 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi"> - - <?include guids.wxi ?> - <?include defines.wxi ?> - - <Fragment> - <DirectoryRef Id="INSTALLDIR" FileSource="$(var.SourceDir)"> - <Component Id="distOutput" Guid="$(var.dist.guid)" Win64='$(var.IsX64)'> - <File Name="python27.dll" KeyPath="yes" /> - </Component> - <Directory Id="libdir" Name="lib" FileSource="$(var.SourceDir)/lib"> - <Component Id="libOutput" Guid="$(var.lib.guid)" Win64='$(var.IsX64)'> - <File Name="library.zip" KeyPath="yes" /> - <File Name="mercurial.cext.base85.pyd" /> - <File Name="mercurial.cext.bdiff.pyd" /> - <File Name="mercurial.cext.mpatch.pyd" /> - <File Name="mercurial.cext.osutil.pyd" /> - <File Name="mercurial.cext.parsers.pyd" /> - <File Name="mercurial.zstd.pyd" /> - <File Name="hgext.fsmonitor.pywatchman.bser.pyd" /> - <File Name="pyexpat.pyd" /> - <File Name="bz2.pyd" /> - <File Name="select.pyd" /> - <File Name="unicodedata.pyd" /> - <File Name="_ctypes.pyd" /> - <File Name="_elementtree.pyd" /> - <File Name="_testcapi.pyd" /> - <File Name="_hashlib.pyd" /> - <File Name="_socket.pyd" /> - <File Name="_ssl.pyd" /> - </Component> - </Directory> - </DirectoryRef> - </Fragment> - -</Wix>
--- a/contrib/wix/doc.wxs Tue Mar 19 09:23:35 2019 -0400 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,50 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi"> - - <?include guids.wxi ?> - <?include defines.wxi ?> - - <Fragment> - <ComponentGroup Id="docFolder"> - <ComponentRef Id="doc.hg.1.html" /> - <ComponentRef Id="doc.hgignore.5.html" /> - <ComponentRef Id="doc.hgrc.5.html" /> - <ComponentRef Id="doc.style.css" /> - </ComponentGroup> - </Fragment> - - <Fragment> - <DirectoryRef Id="INSTALLDIR"> - <Directory Id="docdir" Name="doc" FileSource="$(var.SourceDir)"> - <Component Id="doc.hg.1.html" Guid="$(var.doc.hg.1.html.guid)" Win64='$(var.IsX64)'> - <File Name="hg.1.html" KeyPath="yes"> - <Shortcut Id="hg1StartMenu" Directory="ProgramMenuDir" - Name="Mercurial Command Reference" - Icon="hgIcon.ico" IconIndex="0" Advertise="yes" - /> - </File> - </Component> - <Component Id="doc.hgignore.5.html" Guid="$(var.doc.hgignore.5.html.guid)" Win64='$(var.IsX64)'> - <File Name="hgignore.5.html" KeyPath="yes"> - <Shortcut Id="hgignore5StartMenu" Directory="ProgramMenuDir" - Name="Mercurial Ignore Files" - Icon="hgIcon.ico" IconIndex="0" Advertise="yes" - /> - </File> - </Component> - <Component Id="doc.hgrc.5.html" Guid="$(var.doc.hgrc.5.html)" Win64='$(var.IsX64)'> - <File Name="hgrc.5.html" KeyPath="yes"> - <Shortcut Id="hgrc5StartMenu" Directory="ProgramMenuDir" - Name="Mercurial Configuration Files" - Icon="hgIcon.ico" IconIndex="0" Advertise="yes" - /> - </File> - </Component> - <Component Id="doc.style.css" Guid="$(var.doc.style.css)" Win64='$(var.IsX64)'> - <File Name="style.css" KeyPath="yes" /> - </Component> - </Directory> - </DirectoryRef> - </Fragment> - -</Wix>
--- a/contrib/wix/guids.wxi Tue Mar 19 09:23:35 2019 -0400 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,52 +0,0 @@ -<Include> - <!-- These are component GUIDs used for Mercurial installers. - YOU MUST CHANGE ALL GUIDs below when copying this file - and replace 'Mercurial' in this notice with the name of - your project. Component GUIDs have global namespace! --> - - <!-- contrib.wxs --> - <?define contrib.guid = {4E11FFC2-E2F7-482A-8460-9394B5489F02} ?> - <?define contrib.vim.guid = {BB04903A-652D-4C4F-9590-2BD07A2304F2} ?> - - <!-- dist.wxs --> - <?define dist.guid = {CE405FE6-CD1E-4873-9C9A-7683AE5A3D90} ?> - <?define lib.guid = {877633b5-0b7e-4b46-8f1c-224a61733297} ?> - - <!-- doc.wxs --> - <?define doc.hg.1.html.guid = {AAAA3FDA-EDC5-4220-B59D-D342722358A2} ?> - <?define doc.hgignore.5.html.guid = {AA9118C4-F3A0-4429-A5F4-5A1906B2D67F} ?> - <?define doc.hgrc.5.html = {E0CEA1EB-FA01-408c-844B-EE5965165BAE} ?> - <?define doc.style.css = {172F8262-98E0-4711-BD39-4DAE0D77EF05} ?> - - <!-- help.wxs --> - <?define help.root.guid = {9FA957DB-6DFE-44f2-AD03-293B2791CF17} ?> - <?define help.internals.guid = {2DD7669D-0DB8-4C39-9806-78E6475E7ACC} ?> - - <!-- i18n.wxs --> - <?define i18nFolder.guid = {1BF8026D-CF7C-4174-AEE6-D6B7BF119248} ?> - - <!-- templates.wxs --> - <?define templates.root.guid = {437FD55C-7756-4EA0-87E5-FDBE75DC8595} ?> - <?define templates.atom.guid = {D30E14A5-8AF0-4268-8B00-00BEE9E09E39} ?> - <?define templates.coal.guid = {B63CCAAB-4EAF-43b4-901E-4BD13F5B78FC} ?> - <?define templates.gitweb.guid = {827334AF-1EFD-421B-962C-5660A068F612} ?> - <?define templates.json.guid = {F535BE7A-EC34-46E0-B9BE-013F3DBAFB19} ?> - <?define templates.monoblue.guid = {8060A1E4-BD4C-453E-92CB-9536DC44A9E3} ?> - <?define templates.paper.guid = {61AB1DE9-645F-46ED-8AF8-0CF02267FFBB} ?> - <?define templates.raw.guid = {834DF8D7-9784-43A6-851D-A96CE1B3575B} ?> - <?define templates.rss.guid = {9338FA09-E128-4B1C-B723-1142DBD09E14} ?> - <?define templates.spartan.guid = {80222625-FA8F-44b1-86CE-1781EF375D09} ?> - <?define templates.static.guid = {6B3D7C24-98DA-4B67-9F18-35F77357B0B4} ?> - - <!-- mercurial.wxs --> - <?define ProductUpgradeCode = {A1CC6134-E945-4399-BE36-EB0017FDF7CF} ?> - - <?define ComponentMainExecutableGUID = {D102B8FA-059B-4ACC-9FA3-8C78C3B58EEF} ?> - - <?define ReadMe.guid = {56A8E372-991D-4DCA-B91D-93D775974CF5} ?> - <?define COPYING.guid = {B7801DBA-1C49-4BF4-91AD-33C65F5C7895} ?> - <?define mercurial.rc.guid = {1D5FAEEE-7E6E-43B1-9F7F-802714316B15} ?> - <?define mergetools.rc.guid = {E8A1DC29-FF40-4B5F-BD12-80B9F7BF0CCD} ?> - <?define ProgramMenuDir.guid = {D5A63320-1238-489B-B68B-CF053E9577CA} ?> - -</Include>
--- a/contrib/wix/help.wxs Tue Mar 19 09:23:35 2019 -0400 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,64 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi"> - - <?include guids.wxi ?> - <?include defines.wxi ?> - - <Fragment> - <ComponentGroup Id='helpFolder'> - <ComponentRef Id='help.root' /> - <ComponentRef Id='help.internals' /> - </ComponentGroup> - </Fragment> - - <Fragment> - <DirectoryRef Id="INSTALLDIR"> - <Directory Id="helpdir" Name="help" FileSource="$(var.SourceDir)"> - <Component Id="help.root" Guid="$(var.help.root.guid)" Win64='$(var.IsX64)'> - <File Name="bundlespec.txt" /> - <File Name="color.txt" /> - <File Name="config.txt" KeyPath="yes" /> - <File Name="dates.txt" /> - <File Name="deprecated.txt" /> - <File Name="diffs.txt" /> - <File Name="environment.txt" /> - <File Name="extensions.txt" /> - <File Name="filesets.txt" /> - <File Name="flags.txt" /> - <File Name="glossary.txt" /> - <File Name="hgignore.txt" /> - <File Name="hgweb.txt" /> - <File Name="merge-tools.txt" /> - <File Name="pager.txt" /> - <File Name="patterns.txt" /> - <File Name="phases.txt" /> - <File Name="revisions.txt" /> - <File Name="scripting.txt" /> - <File Name="subrepos.txt" /> - <File Name="templates.txt" /> - <File Name="urls.txt" /> - </Component> - - <Directory Id="help.internaldir" Name="internals"> - <Component Id="help.internals" Guid="$(var.help.internals.guid)" Win64='$(var.IsX64)'> - <File Id="internals.bundle2.txt" Name="bundle2.txt" /> - <File Id="internals.bundles.txt" Name="bundles.txt" KeyPath="yes" /> - <File Id="internals.cbor.txt" Name="cbor.txt" /> - <File Id="internals.censor.txt" Name="censor.txt" /> - <File Id="internals.changegroups.txt" Name="changegroups.txt" /> - <File Id="internals.config.txt" Name="config.txt" /> - <File Id="internals.extensions.txt" Name="extensions.txt" /> - <File Id="internals.linelog.txt" Name="linelog.txt" /> - <File Id="internals.requirements.txt" Name="requirements.txt" /> - <File Id="internals.revlogs.txt" Name="revlogs.txt" /> - <File Id="internals.wireprotocol.txt" Name="wireprotocol.txt" /> - <File Id="internals.wireprotocolrpc.txt" Name="wireprotocolrpc.txt" /> - <File Id="internals.wireprotocolv2.txt" Name="wireprotocolv2.txt" /> - </Component> - </Directory> - - </Directory> - </DirectoryRef> - </Fragment> - -</Wix>
--- a/contrib/wix/hg.cmd Tue Mar 19 09:23:35 2019 -0400 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,3 +0,0 @@ -@echo off -rem launch hg.exe from parent folder -"%~dp0\..\hg.exe" %*
--- a/contrib/wix/i18n.wxs Tue Mar 19 09:23:35 2019 -0400 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,26 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi"> - - <?include guids.wxi ?> - <?include defines.wxi ?> - - <?define hg_po_langs = - da;de;el;fr;it;ja;pt_BR;ro;ru;sv;zh_CN;zh_TW - ?> - - <Fragment> - <DirectoryRef Id="INSTALLDIR"> - <Directory Id="i18ndir" Name="i18n" FileSource="$(var.SourceDir)"> - <Component Id="i18nFolder" Guid="$(var.i18nFolder.guid)" Win64='$(var.IsX64)'> - <File Name="hggettext" KeyPath="yes" /> - <?foreach LANG in $(var.hg_po_langs) ?> - <File Id="hg.$(var.LANG).po" - Name="$(var.LANG).po" - /> - <?endforeach?> - </Component> - </Directory> - </DirectoryRef> - </Fragment> - -</Wix>
--- a/contrib/wix/locale.wxs Tue Mar 19 09:23:35 2019 -0400 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,34 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi"> - - <?include defines.wxi ?> - - <?define hglocales = - da;de;el;fr;it;ja;pt_BR;ro;ru;sv;zh_CN;zh_TW - ?> - - <Fragment> - <ComponentGroup Id="localeFolder"> - <?foreach LOC in $(var.hglocales) ?> - <ComponentRef Id="hg.locale.$(var.LOC)"/> - <?endforeach?> - </ComponentGroup> - </Fragment> - - <Fragment> - <DirectoryRef Id="INSTALLDIR"> - <Directory Id="localedir" Name="locale" FileSource="$(var.SourceDir)"> - <?foreach LOC in $(var.hglocales) ?> - <Directory Id="hg.locale.$(var.LOC)" Name="$(var.LOC)"> - <Directory Id="hg.locale.$(var.LOC).LC_MESSAGES" Name="LC_MESSAGES"> - <Component Id="hg.locale.$(var.LOC)" Guid="*" Win64='$(var.IsX64)'> - <File Id="hg.mo.$(var.LOC)" Name="hg.mo" KeyPath="yes" /> - </Component> - </Directory> - </Directory> - <?endforeach?> - </Directory> - </DirectoryRef> - </Fragment> - -</Wix>
--- a/contrib/wix/mercurial.wxs Tue Mar 19 09:23:35 2019 -0400 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,162 +0,0 @@ -<?xml version='1.0' encoding='windows-1252'?> -<Wix xmlns='http://schemas.microsoft.com/wix/2006/wi'> - - <!-- Copyright 2010 Steve Borho <steve@borho.org> - - This software may be used and distributed according to the terms of the - GNU General Public License version 2 or any later version. --> - - <?include guids.wxi ?> - <?include defines.wxi ?> - - <?if $(var.Platform) = "x64" ?> - <?define PFolder = ProgramFiles64Folder ?> - <?else?> - <?define PFolder = ProgramFilesFolder ?> - <?endif?> - - <Product Id='*' - Name='Mercurial $(var.Version) ($(var.Platform))' - UpgradeCode='$(var.ProductUpgradeCode)' - Language='1033' Codepage='1252' Version='$(var.Version)' - Manufacturer='Matt Mackall and others'> - - <Package Id='*' - Keywords='Installer' - Description="Mercurial distributed SCM (version $(var.Version))" - Comments='$(var.Comments)' - Platform='$(var.Platform)' - Manufacturer='Matt Mackall and others' - InstallerVersion='300' Languages='1033' Compressed='yes' SummaryCodepage='1252' /> - - <Media Id='1' Cabinet='mercurial.cab' EmbedCab='yes' DiskPrompt='CD-ROM #1' - CompressionLevel='high' /> - <Property Id='DiskPrompt' Value="Mercurial $(var.Version) Installation [1]" /> - - <Condition Message='Mercurial MSI installers require Windows XP or higher'> - VersionNT >= 501 - </Condition> - - <Property Id="INSTALLDIR"> - <ComponentSearch Id='SearchForMainExecutableComponent' - Guid='$(var.ComponentMainExecutableGUID)' /> - </Property> - - <!--Property Id='ARPCOMMENTS'>any comments</Property--> - <Property Id='ARPCONTACT'>mercurial@mercurial-scm.org</Property> - <Property Id='ARPHELPLINK'>https://mercurial-scm.org/wiki/</Property> - <Property Id='ARPURLINFOABOUT'>https://mercurial-scm.org/about/</Property> - <Property Id='ARPURLUPDATEINFO'>https://mercurial-scm.org/downloads/</Property> - <Property Id='ARPHELPTELEPHONE'>https://mercurial-scm.org/wiki/Support</Property> - <Property Id='ARPPRODUCTICON'>hgIcon.ico</Property> - - <Property Id='INSTALLEDMERCURIALPRODUCTS' Secure='yes'></Property> - <Property Id='REINSTALLMODE'>amus</Property> - - <!--Auto-accept the license page--> - <Property Id='LicenseAccepted'>1</Property> - - <Directory Id='TARGETDIR' Name='SourceDir'> - <Directory Id='$(var.PFolder)' Name='PFiles'> - <Directory Id='INSTALLDIR' Name='Mercurial'> - <Component Id='MainExecutable' Guid='$(var.ComponentMainExecutableGUID)' Win64='$(var.IsX64)'> - <File Id='hgEXE' Name='hg.exe' Source='dist\hg.exe' KeyPath='yes' /> - <Environment Id="Environment" Name="PATH" Part="last" System="yes" - Permanent="no" Value="[INSTALLDIR]" Action="set" /> - </Component> - <Component Id='ReadMe' Guid='$(var.ReadMe.guid)' Win64='$(var.IsX64)'> - <File Id='ReadMe' Name='ReadMe.html' Source='contrib\win32\ReadMe.html' - KeyPath='yes'/> - </Component> - <Component Id='COPYING' Guid='$(var.COPYING.guid)' Win64='$(var.IsX64)'> - <File Id='COPYING' Name='COPYING.rtf' Source='contrib\wix\COPYING.rtf' - KeyPath='yes'/> - </Component> - - <Directory Id='HGRCD' Name='hgrc.d'> - <Component Id='mercurial.rc' Guid='$(var.mercurial.rc.guid)' Win64='$(var.IsX64)'> - <File Id='mercurial.rc' Name='Mercurial.rc' Source='contrib\win32\mercurial.ini' - ReadOnly='yes' KeyPath='yes'/> - </Component> - <Component Id='mergetools.rc' Guid='$(var.mergetools.rc.guid)' Win64='$(var.IsX64)'> - <File Id='mergetools.rc' Name='MergeTools.rc' Source='mercurial\default.d\mergetools.rc' - ReadOnly='yes' KeyPath='yes'/> - </Component> - </Directory> - - </Directory> - </Directory> - - <Directory Id="ProgramMenuFolder" Name="Programs"> - <Directory Id="ProgramMenuDir" Name="Mercurial $(var.Version)"> - <Component Id="ProgramMenuDir" Guid="$(var.ProgramMenuDir.guid)" Win64='$(var.IsX64)'> - <RemoveFolder Id='ProgramMenuDir' On='uninstall' /> - <RegistryValue Root='HKCU' Key='Software\Mercurial\InstallDir' Type='string' - Value='[INSTALLDIR]' KeyPath='yes' /> - <Shortcut Id='UrlShortcut' Directory='ProgramMenuDir' Name='Mercurial Web Site' - Target='[ARPHELPLINK]' Icon="hgIcon.ico" IconIndex='0' /> - </Component> - </Directory> - </Directory> - - <?if $(var.Platform) = "x86" ?> - <Merge Id='VCRuntime' DiskId='1' Language='1033' - SourceFile='$(var.VCRedistSrcDir)\microsoft.vcxx.crt.x86_msm.msm' /> - <Merge Id='VCRuntimePolicy' DiskId='1' Language='1033' - SourceFile='$(var.VCRedistSrcDir)\policy.x.xx.microsoft.vcxx.crt.x86_msm.msm' /> - <?else?> - <Merge Id='VCRuntime' DiskId='1' Language='1033' - SourceFile='$(var.VCRedistSrcDir)\microsoft.vcxx.crt.x64_msm.msm' /> - <Merge Id='VCRuntimePolicy' DiskId='1' Language='1033' - SourceFile='$(var.VCRedistSrcDir)\policy.x.xx.microsoft.vcxx.crt.x64_msm.msm' /> - <?endif?> - </Directory> - - <Feature Id='Complete' Title='Mercurial' Description='The complete package' - Display='expand' Level='1' ConfigurableDirectory='INSTALLDIR' > - <Feature Id='MainProgram' Title='Program' Description='Mercurial command line app' - Level='1' Absent='disallow' > - <ComponentRef Id='MainExecutable' /> - <ComponentRef Id='distOutput' /> - <ComponentRef Id='libOutput' /> - <ComponentRef Id='ProgramMenuDir' /> - <ComponentRef Id='ReadMe' /> - <ComponentRef Id='COPYING' /> - <ComponentRef Id='mercurial.rc' /> - <ComponentRef Id='mergetools.rc' /> - <ComponentGroupRef Id='helpFolder' /> - <ComponentGroupRef Id='templatesFolder' /> - <MergeRef Id='VCRuntime' /> - <MergeRef Id='VCRuntimePolicy' /> - </Feature> - <Feature Id='Locales' Title='Translations' Description='Translations' Level='1'> - <ComponentGroupRef Id='localeFolder' /> - <ComponentRef Id='i18nFolder' /> - </Feature> - <Feature Id='Documentation' Title='Documentation' Description='HTML man pages' Level='1'> - <ComponentGroupRef Id='docFolder' /> - </Feature> - <Feature Id='Misc' Title='Miscellaneous' Description='Contributed scripts' Level='1'> - <ComponentGroupRef Id='contribFolder' /> - </Feature> - </Feature> - - <UIRef Id="WixUI_FeatureTree" /> - <UIRef Id="WixUI_ErrorProgressText" /> - - <WixVariable Id="WixUILicenseRtf" Value="contrib\wix\COPYING.rtf" /> - - <Icon Id="hgIcon.ico" SourceFile="contrib/win32/mercurial.ico" /> - - <Upgrade Id='$(var.ProductUpgradeCode)'> - <UpgradeVersion - IncludeMinimum='yes' Minimum='0.0.0' IncludeMaximum='no' OnlyDetect='no' - Property='INSTALLEDMERCURIALPRODUCTS' /> - </Upgrade> - - <InstallExecuteSequence> - <RemoveExistingProducts After='InstallInitialize'/> - </InstallExecuteSequence> - - </Product> -</Wix>
--- a/contrib/wix/templates.wxs Tue Mar 19 09:23:35 2019 -0400 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,251 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi"> - - <?include guids.wxi ?> - <?include defines.wxi ?> - - <Fragment> - <ComponentGroup Id="templatesFolder"> - - <ComponentRef Id="templates.root" /> - - <ComponentRef Id="templates.atom" /> - <ComponentRef Id="templates.coal" /> - <ComponentRef Id="templates.gitweb" /> - <ComponentRef Id="templates.json" /> - <ComponentRef Id="templates.monoblue" /> - <ComponentRef Id="templates.paper" /> - <ComponentRef Id="templates.raw" /> - <ComponentRef Id="templates.rss" /> - <ComponentRef Id="templates.spartan" /> - <ComponentRef Id="templates.static" /> - - </ComponentGroup> - </Fragment> - - <Fragment> - <DirectoryRef Id="INSTALLDIR"> - - <Directory Id="templatesdir" Name="templates" FileSource="$(var.SourceDir)"> - - <Component Id="templates.root" Guid="$(var.templates.root.guid)" Win64='$(var.IsX64)'> - <File Name="map-cmdline.changelog" KeyPath="yes" /> - <File Name="map-cmdline.compact" /> - <File Name="map-cmdline.default" /> - <File Name="map-cmdline.show" /> - <File Name="map-cmdline.bisect" /> - <File Name="map-cmdline.xml" /> - <File Name="map-cmdline.status" /> - <File Name="map-cmdline.phases" /> - </Component> - - <Directory Id="templates.jsondir" Name="json"> - <Component Id="templates.json" Guid="$(var.templates.json.guid)" Win64='$(var.IsX64)'> - <File Id="json.changelist.tmpl" Name="changelist.tmpl" KeyPath="yes" /> - <File Id="json.graph.tmpl" Name="graph.tmpl" /> - <File Id="json.map" Name="map" /> - </Component> - </Directory> - - <Directory Id="templates.atomdir" Name="atom"> - <Component Id="templates.atom" Guid="$(var.templates.atom.guid)" Win64='$(var.IsX64)'> - <File Id="atom.changelog.tmpl" Name="changelog.tmpl" KeyPath="yes" /> - <File Id="atom.changelogentry.tmpl" Name="changelogentry.tmpl" /> - <File Id="atom.error.tmpl" Name="error.tmpl" /> - <File Id="atom.filelog.tmpl" Name="filelog.tmpl" /> - <File Id="atom.header.tmpl" Name="header.tmpl" /> - <File Id="atom.map" Name="map" /> - <File Id="atom.tagentry.tmpl" Name="tagentry.tmpl" /> - <File Id="atom.tags.tmpl" Name="tags.tmpl" /> - <File Id="atom.branchentry.tmpl" Name="branchentry.tmpl" /> - <File Id="atom.branches.tmpl" Name="branches.tmpl" /> - <File Id="atom.bookmarks.tmpl" Name="bookmarks.tmpl" /> - <File Id="atom.bookmarkentry.tmpl" Name="bookmarkentry.tmpl" /> - </Component> - </Directory> - - <Directory Id="templates.coaldir" Name="coal"> - <Component Id="templates.coal" Guid="$(var.templates.coal.guid)" Win64='$(var.IsX64)'> - <File Id="coal.header.tmpl" Name="header.tmpl" KeyPath="yes" /> - <File Id="coal.map" Name="map" /> - </Component> - </Directory> - - <Directory Id="templates.gitwebdir" Name="gitweb"> - <Component Id="templates.gitweb" Guid="$(var.templates.gitweb.guid)" Win64='$(var.IsX64)'> - <File Id="gitweb.branches.tmpl" Name="branches.tmpl" KeyPath="yes" /> - <File Id="gitweb.bookmarks.tmpl" Name="bookmarks.tmpl" /> - <File Id="gitweb.changelog.tmpl" Name="changelog.tmpl" /> - <File Id="gitweb.changelogentry.tmpl" Name="changelogentry.tmpl" /> - <File Id="gitweb.changeset.tmpl" Name="changeset.tmpl" /> - <File Id="gitweb.error.tmpl" Name="error.tmpl" /> - <File Id="gitweb.fileannotate.tmpl" Name="fileannotate.tmpl" /> - <File Id="gitweb.filecomparison.tmpl" Name="filecomparison.tmpl" /> - <File Id="gitweb.filediff.tmpl" Name="filediff.tmpl" /> - <File Id="gitweb.filelog.tmpl" Name="filelog.tmpl" /> - <File Id="gitweb.filerevision.tmpl" Name="filerevision.tmpl" /> - <File Id="gitweb.footer.tmpl" Name="footer.tmpl" /> - <File Id="gitweb.graph.tmpl" Name="graph.tmpl" /> - <File Id="gitweb.graphentry.tmpl" Name="graphentry.tmpl" /> - <File Id="gitweb.header.tmpl" Name="header.tmpl" /> - <File Id="gitweb.index.tmpl" Name="index.tmpl" /> - <File Id="gitweb.manifest.tmpl" Name="manifest.tmpl" /> - <File Id="gitweb.map" Name="map" /> - <File Id="gitweb.notfound.tmpl" Name="notfound.tmpl" /> - <File Id="gitweb.search.tmpl" Name="search.tmpl" /> - <File Id="gitweb.shortlog.tmpl" Name="shortlog.tmpl" /> - <File Id="gitweb.summary.tmpl" Name="summary.tmpl" /> - <File Id="gitweb.tags.tmpl" Name="tags.tmpl" /> - <File Id="gitweb.help.tmpl" Name="help.tmpl" /> - <File Id="gitweb.helptopics.tmpl" Name="helptopics.tmpl" /> - </Component> - </Directory> - - <Directory Id="templates.monobluedir" Name="monoblue"> - <Component Id="templates.monoblue" Guid="$(var.templates.monoblue.guid)" Win64='$(var.IsX64)'> - <File Id="monoblue.branches.tmpl" Name="branches.tmpl" KeyPath="yes" /> - <File Id="monoblue.bookmarks.tmpl" Name="bookmarks.tmpl" /> - <File Id="monoblue.changelog.tmpl" Name="changelog.tmpl" /> - <File Id="monoblue.changelogentry.tmpl" Name="changelogentry.tmpl" /> - <File Id="monoblue.changeset.tmpl" Name="changeset.tmpl" /> - <File Id="monoblue.error.tmpl" Name="error.tmpl" /> - <File Id="monoblue.fileannotate.tmpl" Name="fileannotate.tmpl" /> - <File Id="monoblue.filecomparison.tmpl" Name="filecomparison.tmpl" /> - <File Id="monoblue.filediff.tmpl" Name="filediff.tmpl" /> - <File Id="monoblue.filelog.tmpl" Name="filelog.tmpl" /> - <File Id="monoblue.filerevision.tmpl" Name="filerevision.tmpl" /> - <File Id="monoblue.footer.tmpl" Name="footer.tmpl" /> - <File Id="monoblue.graph.tmpl" Name="graph.tmpl" /> - <File Id="monoblue.graphentry.tmpl" Name="graphentry.tmpl" /> - <File Id="monoblue.header.tmpl" Name="header.tmpl" /> - <File Id="monoblue.index.tmpl" Name="index.tmpl" /> - <File Id="monoblue.manifest.tmpl" Name="manifest.tmpl" /> - <File Id="monoblue.map" Name="map" /> - <File Id="monoblue.notfound.tmpl" Name="notfound.tmpl" /> - <File Id="monoblue.search.tmpl" Name="search.tmpl" /> - <File Id="monoblue.shortlog.tmpl" Name="shortlog.tmpl" /> - <File Id="monoblue.summary.tmpl" Name="summary.tmpl" /> - <File Id="monoblue.tags.tmpl" Name="tags.tmpl" /> - <File Id="monoblue.help.tmpl" Name="help.tmpl" /> - <File Id="monoblue.helptopics.tmpl" Name="helptopics.tmpl" /> - </Component> - </Directory> - - <Directory Id="templates.paperdir" Name="paper"> - <Component Id="templates.paper" Guid="$(var.templates.paper.guid)" Win64='$(var.IsX64)'> - <File Id="paper.branches.tmpl" Name="branches.tmpl" KeyPath="yes" /> - <File Id="paper.bookmarks.tmpl" Name="bookmarks.tmpl" /> - <File Id="paper.changeset.tmpl" Name="changeset.tmpl" /> - <File Id="paper.diffstat.tmpl" Name="diffstat.tmpl" /> - <File Id="paper.error.tmpl" Name="error.tmpl" /> - <File Id="paper.fileannotate.tmpl" Name="fileannotate.tmpl" /> - <File Id="paper.filecomparison.tmpl" Name="filecomparison.tmpl" /> - <File Id="paper.filediff.tmpl" Name="filediff.tmpl" /> - <File Id="paper.filelog.tmpl" Name="filelog.tmpl" /> - <File Id="paper.filelogentry.tmpl" Name="filelogentry.tmpl" /> - <File Id="paper.filerevision.tmpl" Name="filerevision.tmpl" /> - <File Id="paper.footer.tmpl" Name="footer.tmpl" /> - <File Id="paper.graph.tmpl" Name="graph.tmpl" /> - <File Id="paper.graphentry.tmpl" Name="graphentry.tmpl" /> - <File Id="paper.header.tmpl" Name="header.tmpl" /> - <File Id="paper.index.tmpl" Name="index.tmpl" /> - <File Id="paper.manifest.tmpl" Name="manifest.tmpl" /> - <File Id="paper.map" Name="map" /> - <File Id="paper.notfound.tmpl" Name="notfound.tmpl" /> - <File Id="paper.search.tmpl" Name="search.tmpl" /> - <File Id="paper.shortlog.tmpl" Name="shortlog.tmpl" /> - <File Id="paper.shortlogentry.tmpl" Name="shortlogentry.tmpl" /> - <File Id="paper.tags.tmpl" Name="tags.tmpl" /> - <File Id="paper.help.tmpl" Name="help.tmpl" /> - <File Id="paper.helptopics.tmpl" Name="helptopics.tmpl" /> - </Component> - </Directory> - - <Directory Id="templates.rawdir" Name="raw"> - <Component Id="templates.raw" Guid="$(var.templates.raw.guid)" Win64='$(var.IsX64)'> - <File Id="raw.changeset.tmpl" Name="changeset.tmpl" KeyPath="yes" /> - <File Id="raw.error.tmpl" Name="error.tmpl" /> - <File Id="raw.fileannotate.tmpl" Name="fileannotate.tmpl" /> - <File Id="raw.filediff.tmpl" Name="filediff.tmpl" /> - <File Id="raw.graph.tmpl" Name="graph.tmpl" /> - <File Id="raw.graphedge.tmpl" Name="graphedge.tmpl" /> - <File Id="raw.graphnode.tmpl" Name="graphnode.tmpl" /> - <File Id="raw.index.tmpl" Name="index.tmpl" /> - <File Id="raw.manifest.tmpl" Name="manifest.tmpl" /> - <File Id="raw.map" Name="map" /> - <File Id="raw.notfound.tmpl" Name="notfound.tmpl" /> - <File Id="raw.search.tmpl" Name="search.tmpl" /> - <File Id="raw.logentry.tmpl" Name="logentry.tmpl" /> - <File Id="raw.changelog.tmpl" Name="changelog.tmpl" /> - </Component> - </Directory> - - <Directory Id="templates.rssdir" Name="rss"> - <Component Id="templates.rss" Guid="$(var.templates.rss.guid)" Win64='$(var.IsX64)'> - <File Id="rss.changelog.tmpl" Name="changelog.tmpl" KeyPath="yes" /> - <File Id="rss.changelogentry.tmpl" Name="changelogentry.tmpl" /> - <File Id="rss.error.tmpl" Name="error.tmpl" /> - <File Id="rss.filelog.tmpl" Name="filelog.tmpl" /> - <File Id="rss.filelogentry.tmpl" Name="filelogentry.tmpl" /> - <File Id="rss.header.tmpl" Name="header.tmpl" /> - <File Id="rss.map" Name="map" /> - <File Id="rss.tagentry.tmpl" Name="tagentry.tmpl" /> - <File Id="rss.tags.tmpl" Name="tags.tmpl" /> - <File Id="rss.bookmarks.tmpl" Name="bookmarks.tmpl" /> - <File Id="rss.bookmarkentry.tmpl" Name="bookmarkentry.tmpl" /> - <File Id="rss.branchentry.tmpl" Name="branchentry.tmpl" /> - <File Id="rss.branches.tmpl" Name="branches.tmpl" /> - </Component> - </Directory> - - <Directory Id="templates.spartandir" Name="spartan"> - <Component Id="templates.spartan" Guid="$(var.templates.spartan.guid)" Win64='$(var.IsX64)'> - <File Id="spartan.branches.tmpl" Name="branches.tmpl" KeyPath="yes" /> - <File Id="spartan.changelog.tmpl" Name="changelog.tmpl" /> - <File Id="spartan.changelogentry.tmpl" Name="changelogentry.tmpl" /> - <File Id="spartan.changeset.tmpl" Name="changeset.tmpl" /> - <File Id="spartan.error.tmpl" Name="error.tmpl" /> - <File Id="spartan.fileannotate.tmpl" Name="fileannotate.tmpl" /> - <File Id="spartan.filediff.tmpl" Name="filediff.tmpl" /> - <File Id="spartan.filelog.tmpl" Name="filelog.tmpl" /> - <File Id="spartan.filelogentry.tmpl" Name="filelogentry.tmpl" /> - <File Id="spartan.filerevision.tmpl" Name="filerevision.tmpl" /> - <File Id="spartan.footer.tmpl" Name="footer.tmpl" /> - <File Id="spartan.graph.tmpl" Name="graph.tmpl" /> - <File Id="spartan.graphentry.tmpl" Name="graphentry.tmpl" /> - <File Id="spartan.header.tmpl" Name="header.tmpl" /> - <File Id="spartan.index.tmpl" Name="index.tmpl" /> - <File Id="spartan.manifest.tmpl" Name="manifest.tmpl" /> - <File Id="spartan.map" Name="map" /> - <File Id="spartan.notfound.tmpl" Name="notfound.tmpl" /> - <File Id="spartan.search.tmpl" Name="search.tmpl" /> - <File Id="spartan.shortlog.tmpl" Name="shortlog.tmpl" /> - <File Id="spartan.shortlogentry.tmpl" Name="shortlogentry.tmpl" /> - <File Id="spartan.tags.tmpl" Name="tags.tmpl" /> - </Component> - </Directory> - - <Directory Id="templates.staticdir" Name="static"> - <Component Id="templates.static" Guid="$(var.templates.static.guid)" Win64='$(var.IsX64)'> - <File Id="static.background.png" Name="background.png" KeyPath="yes" /> - <File Id="static.coal.file.png" Name="coal-file.png" /> - <File Id="static.coal.folder.png" Name="coal-folder.png" /> - <File Id="static.followlines.js" Name="followlines.js" /> - <File Id="static.mercurial.js" Name="mercurial.js" /> - <File Id="static.hgicon.png" Name="hgicon.png" /> - <File Id="static.hglogo.png" Name="hglogo.png" /> - <File Id="static.style.coal.css" Name="style-extra-coal.css" /> - <File Id="static.style.gitweb.css" Name="style-gitweb.css" /> - <File Id="static.style.monoblue.css" Name="style-monoblue.css" /> - <File Id="static.style.paper.css" Name="style-paper.css" /> - <File Id="static.style.css" Name="style.css" /> - <File Id="static.feed.icon" Name="feed-icon-14x14.png" /> - </Component> - </Directory> - - </Directory> - - </DirectoryRef> - </Fragment> - - </Wix>
--- a/contrib/zsh_completion Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/zsh_completion Tue Mar 19 16:36:59 2019 +0300 @@ -248,7 +248,7 @@ [[ -d $PREFIX ]] || PREFIX=$PREFIX:h - _hg_cmd resolve -l ./$PREFIX | while read rstate rpath + _hg_cmd resolve -l ./$PREFIX -T '{mergestatus}\ {relpath\(path\)}\\n' | while read rstate rpath do [[ $rstate == 'R' ]] && resolved_files+=($rpath) [[ $rstate == 'U' ]] && unresolved_files+=($rpath)
--- a/doc/Makefile Tue Mar 19 09:23:35 2019 -0400 +++ b/doc/Makefile Tue Mar 19 16:36:59 2019 +0300 @@ -17,6 +17,7 @@ html: $(HTML) +# This logic is duplicated in setup.py:hgbuilddoc() common.txt $(SOURCES) $(SOURCES:%.txt=%.gendoc.txt): $(GENDOC) ${PYTHON} gendoc.py "$(basename $@)" > $@.tmp mv $@.tmp $@
--- a/doc/check-seclevel.py Tue Mar 19 09:23:35 2019 -0400 +++ b/doc/check-seclevel.py Tue Mar 19 16:36:59 2019 +0300 @@ -163,8 +163,8 @@ (options, args) = optparser.parse_args() ui = uimod.ui.load() - ui.setconfig('ui', 'verbose', options.verbose, '--verbose') - ui.setconfig('ui', 'debug', options.debug, '--debug') + ui.setconfig(b'ui', b'verbose', options.verbose, b'--verbose') + ui.setconfig(b'ui', b'debug', options.debug, b'--debug') if options.file: if checkfile(ui, options.file, options.initlevel):
--- a/doc/hgmanpage.py Tue Mar 19 09:23:35 2019 -0400 +++ b/doc/hgmanpage.py Tue Mar 19 16:36:59 2019 +0300 @@ -376,7 +376,7 @@ tmpl = (".TH %(title_upper)s %(manual_section)s" " \"%(date)s\" \"%(version)s\" \"%(manual_group)s\"\n" ".SH NAME\n" - "%(title)s \- %(subtitle)s\n") + "%(title)s \\- %(subtitle)s\n") return tmpl % self._docinfo def append_header(self):
--- a/hgext/absorb.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/absorb.py Tue Mar 19 16:36:59 2019 +0300 @@ -50,7 +50,6 @@ phases, pycompat, registrar, - repair, scmutil, util, ) @@ -191,9 +190,9 @@ pctx = None # do not add another immutable fctx break fctxmap[ctx] = fctx # only for mutable fctxs - renamed = fctx.renamed() - if renamed: - path = renamed[0] # follow rename + copy = fctx.copysource() + if copy: + path = copy # follow rename if path in ctx: # but do not follow copy pctx = ctx.p1() break @@ -232,8 +231,8 @@ else: content = fctx.data() mode = (fctx.islink(), fctx.isexec()) - renamed = fctx.renamed() # False or (path, node) - return content, mode, (renamed and renamed[0]) + copy = fctx.copysource() + return content, mode, copy def overlaycontext(memworkingcopy, ctx, parents=None, extra=None): """({path: content}, ctx, (p1node, p2node)?, {}?) -> memctx @@ -689,10 +688,7 @@ self._movebookmarks(tr) if self.repo['.'].node() in self.replacemap: self._moveworkingdirectoryparent() - if self._useobsolete: - self._obsoleteoldcommits() - if not self._useobsolete: # strip must be outside transactions - self._stripoldcommits() + self._cleanupoldcommits() return self.finalnode def printchunkstats(self): @@ -726,7 +722,6 @@ # nothing changed, nothing commited nextp1 = ctx continue - msg = '' if self._willbecomenoop(memworkingcopy, ctx, nextp1): # changeset is no longer necessary self.replacemap[ctx.node()] = None @@ -850,31 +845,19 @@ if self._useobsolete and self.ui.configbool('absorb', 'add-noise'): extra['absorb_source'] = ctx.hex() mctx = overlaycontext(memworkingcopy, ctx, parents, extra=extra) - # preserve phase - with mctx.repo().ui.configoverride({ - ('phases', 'new-commit'): ctx.phase()}): - return mctx.commit() + return mctx.commit() @util.propertycache def _useobsolete(self): """() -> bool""" return obsolete.isenabled(self.repo, obsolete.createmarkersopt) - def _obsoleteoldcommits(self): - relations = [(self.repo[k], v and (self.repo[v],) or ()) - for k, v in self.replacemap.iteritems()] - if relations: - obsolete.createmarkers(self.repo, relations) - - def _stripoldcommits(self): - nodelist = self.replacemap.keys() - # make sure we don't strip innocent children - revs = self.repo.revs('%ln - (::(heads(%ln::)-%ln))', nodelist, - nodelist, nodelist) - tonode = self.repo.changelog.node - nodelist = [tonode(r) for r in revs] - if nodelist: - repair.strip(self.repo.ui, self.repo, nodelist) + def _cleanupoldcommits(self): + replacements = {k: ([v] if v is not None else []) + for k, v in self.replacemap.iteritems()} + if replacements: + scmutil.cleanupnodes(self.repo, replacements, operation='absorb', + fixphase=True) def _parsechunk(hunk): """(crecord.uihunk or patch.recordhunk) -> (path, (a1, a2, [bline]))"""
--- a/hgext/acl.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/acl.py Tue Mar 19 16:36:59 2019 +0300 @@ -293,15 +293,15 @@ # if ug is a user name: !username # if ug is a group name: !@groupname ug = ug[1:] - if not ug.startswith('@') and user != ug \ - or ug.startswith('@') and user not in _getusers(ui, ug[1:]): + if (not ug.startswith('@') and user != ug + or ug.startswith('@') and user not in _getusers(ui, ug[1:])): return True # Test for user or group. Format: # if ug is a user name: username # if ug is a group name: @groupname - elif user == ug \ - or ug.startswith('@') and user in _getusers(ui, ug[1:]): + elif (user == ug + or ug.startswith('@') and user in _getusers(ui, ug[1:])): return True return False
--- a/hgext/automv.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/automv.py Tue Mar 19 16:36:59 2019 +0300 @@ -64,7 +64,8 @@ if threshold > 0: match = scmutil.match(repo[None], pats, opts) added, removed = _interestingfiles(repo, match) - renames = _findrenames(repo, match, added, removed, + uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) + renames = _findrenames(repo, uipathfn, added, removed, threshold / 100.0) with repo.wlock(): @@ -89,7 +90,7 @@ return added, removed -def _findrenames(repo, matcher, added, removed, similarity): +def _findrenames(repo, uipathfn, added, removed, similarity): """Find what files in added are really moved files. Any file named in removed that is at least similarity% similar to a file @@ -103,7 +104,7 @@ if repo.ui.verbose: repo.ui.status( _('detected move of %s as %s (%d%% similar)\n') % ( - matcher.rel(src), matcher.rel(dst), score * 100)) + uipathfn(src), uipathfn(dst), score * 100)) renames[dst] = src if renames: repo.ui.status(_('detected move of %d files\n') % len(renames))
--- a/hgext/blackbox.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/blackbox.py Tue Mar 19 16:36:59 2019 +0300 @@ -118,7 +118,6 @@ date = dateutil.datestr(default, ui.config('blackbox', 'date-format')) user = procutil.getuser() pid = '%d' % procutil.getpid() - rev = '(unknown)' changed = '' ctx = self._repo[None] parents = ctx.parents() @@ -191,7 +190,7 @@ break # count the commands by matching lines like: 2013/01/23 19:13:36 root> - if re.match('^\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} .*> .*', line): + if re.match(br'^\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} .*> .*', line): count += 1 output.append(line)
--- a/hgext/bugzilla.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/bugzilla.py Tue Mar 19 16:36:59 2019 +0300 @@ -303,6 +303,7 @@ error, logcmdutil, mail, + pycompat, registrar, url, util, @@ -342,10 +343,10 @@ default='bugs', ) configitem('bugzilla', 'fixregexp', - default=(r'fix(?:es)?\s*(?:bugs?\s*)?,?\s*' - r'(?:nos?\.?|num(?:ber)?s?)?\s*' - r'(?P<ids>(?:#?\d+\s*(?:,?\s*(?:and)?)?\s*)+)' - r'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?') + default=(br'fix(?:es)?\s*(?:bugs?\s*)?,?\s*' + br'(?:nos?\.?|num(?:ber)?s?)?\s*' + br'(?P<ids>(?:#?\d+\s*(?:,?\s*(?:and)?)?\s*)+)' + br'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?') ) configitem('bugzilla', 'fixresolution', default='FIXED', @@ -363,9 +364,9 @@ default=None, ) configitem('bugzilla', 'regexp', - default=(r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*' - r'(?P<ids>(?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)' - r'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?') + default=(br'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*' + br'(?P<ids>(?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)' + br'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?') ) configitem('bugzilla', 'strip', default=0, @@ -599,8 +600,8 @@ def __init__(self, ui): bzmysql.__init__(self, ui) - self.default_notify = \ - "cd %(bzdir)s && perl -T contrib/sendbugmail.pl %(id)s %(user)s" + self.default_notify = ( + "cd %(bzdir)s && perl -T contrib/sendbugmail.pl %(id)s %(user)s") class bzmysql_3_0(bzmysql_2_18): '''support for bugzilla 3.0 series.''' @@ -733,7 +734,7 @@ c = self.bzproxy.Bug.comments({'ids': [id], 'include_fields': ['text'], 'token': self.bztoken}) - return ''.join([t['text'] for t in c['bugs'][str(id)]['comments']]) + return ''.join([t['text'] for t in c['bugs']['%d' % id]['comments']]) def filter_real_bug_ids(self, bugs): probe = self.bzproxy.Bug.get({'ids': sorted(bugs.keys()), @@ -804,11 +805,11 @@ def makecommandline(self, fieldname, value): if self.bzvermajor >= 4: - return "@%s %s" % (fieldname, str(value)) + return "@%s %s" % (fieldname, pycompat.bytestr(value)) else: if fieldname == "id": fieldname = "bug_id" - return "@%s = %s" % (fieldname, str(value)) + return "@%s = %s" % (fieldname, pycompat.bytestr(value)) def send_bug_modify_email(self, bugid, commands, comment, committer): '''send modification message to Bugzilla bug via email. @@ -873,7 +874,7 @@ self.fixresolution = self.ui.config('bugzilla', 'fixresolution') def apiurl(self, targets, include_fields=None): - url = '/'.join([self.bzroot] + [str(t) for t in targets]) + url = '/'.join([self.bzroot] + [pycompat.bytestr(t) for t in targets]) qv = {} if self.apikey: qv['api_key'] = self.apikey @@ -938,7 +939,7 @@ for bugid in bugs.keys(): burl = self.apiurl(('bug', bugid, 'comment'), include_fields='text') result = self._fetch(burl) - comments = result['bugs'][str(bugid)]['comments'] + comments = result['bugs'][pycompat.bytestr(bugid)]['comments'] if any(sn in c['text'] for c in comments): self.ui.status(_('bug %d already knows about changeset %s\n') % (bugid, sn)) @@ -1011,7 +1012,7 @@ self.ui.config('bugzilla', 'regexp'), re.IGNORECASE) self.fix_re = re.compile( self.ui.config('bugzilla', 'fixregexp'), re.IGNORECASE) - self.split_re = re.compile(r'\D+') + self.split_re = re.compile(br'\D+') def find_bugs(self, ctx): '''return bugs dictionary created from commit comment. @@ -1098,7 +1099,7 @@ t = logcmdutil.changesettemplater(self.ui, self.repo, spec) self.ui.pushbuffer() t.show(ctx, changes=ctx.changeset(), - bug=str(bugid), + bug=pycompat.bytestr(bugid), hgweb=self.ui.config('web', 'baseurl'), root=self.repo.root, webroot=webroot(self.repo.root))
--- a/hgext/commitextras.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/commitextras.py Tue Mar 19 16:36:59 2019 +0300 @@ -58,7 +58,7 @@ if not k: msg = _("unable to parse '%s', keys can't be empty") raise error.Abort(msg % raw) - if re.search('[^\w-]', k): + if re.search(br'[^\w-]', k): msg = _("keys can only contain ascii letters, digits," " '_' and '-'") raise error.Abort(msg)
--- a/hgext/convert/convcmd.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/convert/convcmd.py Tue Mar 19 16:36:59 2019 +0300 @@ -123,7 +123,7 @@ exceptions.append(inst) if not ui.quiet: for inst in exceptions: - ui.write("%s\n" % pycompat.bytestr(inst)) + ui.write("%s\n" % pycompat.bytestr(inst.args[0])) raise error.Abort(_('%s: missing or unsupported repository') % path) def convertsink(ui, path, type):
--- a/hgext/convert/cvs.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/convert/cvs.py Tue Mar 19 16:36:59 2019 +0300 @@ -76,7 +76,6 @@ d = encoding.getcwd() try: os.chdir(self.path) - id = None cache = 'update' if not self.ui.configbool('convert', 'cvsps.cache'): @@ -219,7 +218,7 @@ if "UseUnchanged" in r: self.writep.write("UseUnchanged\n") self.writep.flush() - r = self.readp.readline() + self.readp.readline() def getheads(self): self._parse()
--- a/hgext/convert/cvsps.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/convert/cvsps.py Tue Mar 19 16:36:59 2019 +0300 @@ -122,7 +122,7 @@ re_31 = re.compile(b'----------------------------$') re_32 = re.compile(b'=======================================' b'======================================$') - re_50 = re.compile(b'revision ([\\d.]+)(\s+locked by:\s+.+;)?$') + re_50 = re.compile(br'revision ([\d.]+)(\s+locked by:\s+.+;)?$') re_60 = re.compile(br'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);' br'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?' br'(\s+commitid:\s+([^;]+);)?' @@ -776,8 +776,8 @@ # Ensure no changeset has a synthetic changeset as a parent. while p.synthetic: - assert len(p.parents) <= 1, \ - _('synthetic changeset cannot have multiple parents') + assert len(p.parents) <= 1, ( + _('synthetic changeset cannot have multiple parents')) if p.parents: p = p.parents[0] else: @@ -954,12 +954,12 @@ # have we seen the start tag? if revisions and off: - if revisions[0] == (b"%d" % cs.id) or \ - revisions[0] in cs.tags: + if (revisions[0] == (b"%d" % cs.id) or + revisions[0] in cs.tags): off = False # see if we reached the end tag if len(revisions) > 1 and not off: - if revisions[1] == (b"%d" % cs.id) or \ - revisions[1] in cs.tags: + if (revisions[1] == (b"%d" % cs.id) or + revisions[1] in cs.tags): break
--- a/hgext/convert/git.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/convert/git.py Tue Mar 19 16:36:59 2019 +0300 @@ -13,6 +13,7 @@ config, error, node as nodemod, + pycompat, ) from . import ( @@ -175,7 +176,8 @@ self.catfilepipe[0].flush() info = self.catfilepipe[1].readline().split() if info[1] != ftype: - raise error.Abort(_('cannot read %r object at %s') % (ftype, rev)) + raise error.Abort(_('cannot read %r object at %s') % ( + pycompat.bytestr(ftype), rev)) size = int(info[2]) data = self.catfilepipe[1].read(size) if len(data) < size: @@ -294,7 +296,7 @@ if not entry: if not l.startswith(':'): continue - entry = l.split() + entry = tuple(pycompat.bytestr(p) for p in l.split()) continue f = l if entry[4][0] == 'C': @@ -385,7 +387,7 @@ def numcommits(self): output, ret = self.gitrunlines('rev-list', '--all') if ret: - raise error.Abort(_('cannot retrieve number of commits in %s') \ + raise error.Abort(_('cannot retrieve number of commits in %s') % self.path) return len(output)
--- a/hgext/convert/hg.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/convert/hg.py Tue Mar 19 16:36:59 2019 +0300 @@ -105,10 +105,6 @@ if not branch: branch = 'default' pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches] - if pbranches: - pbranch = pbranches[0][1] - else: - pbranch = 'default' branchpath = os.path.join(self.path, branch) if setbranch: @@ -561,7 +557,7 @@ if name in self.ignored: continue try: - copysource, _copynode = ctx.filectx(name).renamed() + copysource = ctx.filectx(name).copysource() if copysource in self.ignored: continue # Ignore copy sources not in parent revisions
--- a/hgext/convert/monotone.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/convert/monotone.py Tue Mar 19 16:36:59 2019 +0300 @@ -93,16 +93,16 @@ kwargs = pycompat.byteskwargs(kwargs) command = [] for k, v in kwargs.iteritems(): - command.append("%s:%s" % (len(k), k)) + command.append("%d:%s" % (len(k), k)) if v: - command.append("%s:%s" % (len(v), v)) + command.append("%d:%s" % (len(v), v)) if command: command.insert(0, 'o') command.append('e') command.append('l') for arg in args: - command += "%d:%s" % (len(arg), arg) + command.append("%d:%s" % (len(arg), arg)) command.append('e') command = ''.join(command) @@ -138,7 +138,7 @@ raise error.Abort(_('bad mtn packet - no end of packet size')) lengthstr += read try: - length = long(lengthstr[:-1]) + length = pycompat.long(lengthstr[:-1]) except TypeError: raise error.Abort(_('bad mtn packet - bad packet size %s') % lengthstr) @@ -154,7 +154,7 @@ retval = [] while True: commandnbr, stream, length, output = self.mtnstdioreadpacket() - self.ui.debug('mtn: read packet %s:%s:%s\n' % + self.ui.debug('mtn: read packet %s:%s:%d\n' % (commandnbr, stream, length)) if stream == 'l': @@ -214,13 +214,13 @@ # key "test@selenic.com" # mtn >= 0.45: # key [ff58a7ffb771907c4ff68995eada1c4da068d328] - certlist = re.split('\n\n key ["\[]', certlist) + certlist = re.split(br'\n\n key ["\[]', certlist) for e in certlist: m = self.cert_re.match(e) if m: name, value = m.groups() - value = value.replace(r'\"', '"') - value = value.replace(r'\\', '\\') + value = value.replace(br'\"', '"') + value = value.replace(br'\\', '\\') certs[name] = value # Monotone may have subsecond dates: 2005-02-05T09:39:12.364306 # and all times are stored in UTC @@ -335,7 +335,6 @@ def before(self): # Check if we have a new enough version to use automate stdio - version = 0.0 try: versionstr = self.mtnrunsingle("interface_version") version = float(versionstr)
--- a/hgext/convert/p4.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/convert/p4.py Tue Mar 19 16:36:59 2019 +0300 @@ -64,12 +64,12 @@ self.encoding = self.ui.config('convert', 'p4.encoding', convcmd.orig_encoding) self.re_type = re.compile( - "([a-z]+)?(text|binary|symlink|apple|resource|unicode|utf\d+)" - "(\+\w+)?$") + br"([a-z]+)?(text|binary|symlink|apple|resource|unicode|utf\d+)" + br"(\+\w+)?$") self.re_keywords = re.compile( - r"\$(Id|Header|Date|DateTime|Change|File|Revision|Author)" - r":[^$\n]*\$") - self.re_keywords_old = re.compile("\$(Id|Header):[^$\n]*\$") + br"\$(Id|Header|Date|DateTime|Change|File|Revision|Author)" + br":[^$\n]*\$") + self.re_keywords_old = re.compile(br"\$(Id|Header):[^$\n]*\$") if revs and len(revs) > 1: raise error.Abort(_("p4 source does not support specifying " @@ -198,8 +198,8 @@ for filename in copiedfiles: oldname = depotname[filename] - flcmd = 'p4 -G filelog %s' \ - % procutil.shellquote(oldname) + flcmd = ('p4 -G filelog %s' + % procutil.shellquote(oldname)) flstdout = procutil.popen(flcmd, mode='rb') copiedfilename = None @@ -272,8 +272,8 @@ return self.heads def getfile(self, name, rev): - cmd = 'p4 -G print %s' \ - % procutil.shellquote("%s#%s" % (self.depotname[name], rev)) + cmd = ('p4 -G print %s' + % procutil.shellquote("%s#%s" % (self.depotname[name], rev))) lasterror = None while True:
--- a/hgext/convert/subversion.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/convert/subversion.py Tue Mar 19 16:36:59 2019 +0300 @@ -790,7 +790,7 @@ if childpath: removed.add(self.recode(childpath)) else: - self.ui.debug('unknown path in revision %d: %s\n' % \ + self.ui.debug('unknown path in revision %d: %s\n' % (revnum, path)) elif kind == svn.core.svn_node_dir: if ent.action == 'M': @@ -984,7 +984,6 @@ # TODO: ra.get_file transmits the whole file instead of diffs. if file in self.removed: return None, None - mode = '' try: new_module, revnum = revsplit(rev)[1:] if self.module != new_module: @@ -1183,12 +1182,12 @@ m = set() output = self.run0('ls', recursive=True, xml=True) doc = xml.dom.minidom.parseString(output) - for e in doc.getElementsByTagName('entry'): + for e in doc.getElementsByTagName(r'entry'): for n in e.childNodes: - if n.nodeType != n.ELEMENT_NODE or n.tagName != 'name': + if n.nodeType != n.ELEMENT_NODE or n.tagName != r'name': continue - name = ''.join(c.data for c in n.childNodes - if c.nodeType == c.TEXT_NODE) + name = r''.join(c.data for c in n.childNodes + if c.nodeType == c.TEXT_NODE) # Entries are compared with names coming from # mercurial, so bytes with undefined encoding. Our # best bet is to assume they are in local @@ -1207,10 +1206,18 @@ os.unlink(filename) except OSError: pass + + if self.is_exec: + # We need to check executability of the file before the change, + # because `vfs.write` is able to reset exec bit. + wasexec = False + if os.path.exists(self.wjoin(filename)): + wasexec = self.is_exec(self.wjoin(filename)) + self.wopener.write(filename, data) if self.is_exec: - if self.is_exec(self.wjoin(filename)): + if wasexec: if 'x' not in flags: self.delexec.append(filename) else: @@ -1325,8 +1332,8 @@ try: rev = self.commit_re.search(output).group(1) except AttributeError: - if parents and not files: - return parents[0] + if not files: + return parents[0] if parents else None self.ui.warn(_('unexpected svn output:\n')) self.ui.warn(output) raise error.Abort(_('unable to cope with svn output'))
--- a/hgext/extdiff.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/extdiff.py Tue Mar 19 16:36:59 2019 +0300 @@ -59,6 +59,22 @@ [diff-tools] kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child +If a program has a graphical interface, it might be interesting to tell +Mercurial about it. It will prevent the program from being mistakenly +used in a terminal-only environment (such as an SSH terminal session), +and will make :hg:`extdiff --per-file` open multiple file diffs at once +instead of one by one (if you still want to open file diffs one by one, +you can use the --confirm option). + +Declaring that a tool has a graphical interface can be done with the +``gui`` flag next to where ``diffargs`` are specified: + +:: + + [diff-tools] + kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child + kdiff3.gui = true + You can use -I/-X and list of file or directory names like normal :hg:`diff` command. The extdiff extension makes snapshots of only needed files, so running the external diff program will actually be @@ -71,6 +87,7 @@ import re import shutil import stat +import subprocess from mercurial.i18n import _ from mercurial.node import ( @@ -80,6 +97,7 @@ from mercurial import ( archival, cmdutil, + encoding, error, filemerge, formatter, @@ -104,11 +122,19 @@ generic=True, ) +configitem('extdiff', br'gui\..*', + generic=True, +) + configitem('diff-tools', br'.*\.diffargs$', default=None, generic=True, ) +configitem('diff-tools', br'.*\.gui$', + generic=True, +) + # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should # be specifying the version(s) of Mercurial they are tested with, or @@ -175,7 +201,97 @@ cmdline += ' $parent1 $child' return re.sub(regex, quote, cmdline) -def dodiff(ui, repo, cmdline, pats, opts): +def _systembackground(cmd, environ=None, cwd=None): + ''' like 'procutil.system', but returns the Popen object directly + so we don't have to wait on it. + ''' + cmd = procutil.quotecommand(cmd) + env = procutil.shellenviron(environ) + proc = subprocess.Popen(procutil.tonativestr(cmd), + shell=True, close_fds=procutil.closefds, + env=procutil.tonativeenv(env), + cwd=pycompat.rapply(procutil.tonativestr, cwd)) + return proc + +def _runperfilediff(cmdline, repo_root, ui, guitool, do3way, confirm, + commonfiles, tmproot, dir1a, dir1b, + dir2root, dir2, + rev1a, rev1b, rev2): + # Note that we need to sort the list of files because it was + # built in an "unstable" way and it's annoying to get files in a + # random order, especially when "confirm" mode is enabled. + waitprocs = [] + totalfiles = len(commonfiles) + for idx, commonfile in enumerate(sorted(commonfiles)): + path1a = os.path.join(tmproot, dir1a, commonfile) + label1a = commonfile + rev1a + if not os.path.isfile(path1a): + path1a = os.devnull + + path1b = '' + label1b = '' + if do3way: + path1b = os.path.join(tmproot, dir1b, commonfile) + label1b = commonfile + rev1b + if not os.path.isfile(path1b): + path1b = os.devnull + + path2 = os.path.join(dir2root, dir2, commonfile) + label2 = commonfile + rev2 + + if confirm: + # Prompt before showing this diff + difffiles = _('diff %s (%d of %d)') % (commonfile, idx + 1, + totalfiles) + responses = _('[Yns?]' + '$$ &Yes, show diff' + '$$ &No, skip this diff' + '$$ &Skip remaining diffs' + '$$ &? (display help)') + r = ui.promptchoice('%s %s' % (difffiles, responses)) + if r == 3: # ? + while r == 3: + for c, t in ui.extractchoices(responses)[1]: + ui.write('%s - %s\n' % (c, encoding.lower(t))) + r = ui.promptchoice('%s %s' % (difffiles, responses)) + if r == 0: # yes + pass + elif r == 1: # no + continue + elif r == 2: # skip + break + + curcmdline = formatcmdline( + cmdline, repo_root, do3way=do3way, + parent1=path1a, plabel1=label1a, + parent2=path1b, plabel2=label1b, + child=path2, clabel=label2) + + if confirm or not guitool: + # Run the comparison program and wait for it to exit + # before we show the next file. + # This is because either we need to wait for confirmation + # from the user between each invocation, or because, as far + # as we know, the tool doesn't have a GUI, in which case + # we can't run multiple CLI programs at the same time. + ui.debug('running %r in %s\n' % + (pycompat.bytestr(curcmdline), tmproot)) + ui.system(curcmdline, cwd=tmproot, blockedtag='extdiff') + else: + # Run the comparison program but don't wait, as we're + # going to rapid-fire each file diff and then wait on + # the whole group. + ui.debug('running %r in %s (backgrounded)\n' % + (pycompat.bytestr(curcmdline), tmproot)) + proc = _systembackground(curcmdline, cwd=tmproot) + waitprocs.append(proc) + + if waitprocs: + with ui.timeblockedsection('extdiff'): + for proc in waitprocs: + proc.wait() + +def dodiff(ui, repo, cmdline, pats, opts, guitool=False): '''Do the actual diff: - copy to a temp structure if diffing 2 internal revisions @@ -201,6 +317,9 @@ else: ctx1b = repo[nullid] + perfile = opts.get('per_file') + confirm = opts.get('confirm') + node1a = ctx1a.node() node1b = ctx1b.node() node2 = ctx2.node() @@ -217,6 +336,8 @@ if opts.get('patch'): if subrepos: raise error.Abort(_('--patch cannot be used with --subrepos')) + if perfile: + raise error.Abort(_('--patch cannot be used with --per-file')) if node2 is None: raise error.Abort(_('--patch requires two revisions')) else: @@ -304,15 +425,24 @@ label1b = None fnsandstat = [] - # Run the external tool on the 2 temp directories or the patches - cmdline = formatcmdline( - cmdline, repo.root, do3way=do3way, - parent1=dir1a, plabel1=label1a, - parent2=dir1b, plabel2=label1b, - child=dir2, clabel=label2) - ui.debug('running %r in %s\n' % (pycompat.bytestr(cmdline), - tmproot)) - ui.system(cmdline, cwd=tmproot, blockedtag='extdiff') + if not perfile: + # Run the external tool on the 2 temp directories or the patches + cmdline = formatcmdline( + cmdline, repo.root, do3way=do3way, + parent1=dir1a, plabel1=label1a, + parent2=dir1b, plabel2=label1b, + child=dir2, clabel=label2) + ui.debug('running %r in %s\n' % (pycompat.bytestr(cmdline), + tmproot)) + ui.system(cmdline, cwd=tmproot, blockedtag='extdiff') + else: + # Run the external tool once for each pair of files + _runperfilediff( + cmdline, repo.root, ui, guitool=guitool, + do3way=do3way, confirm=confirm, + commonfiles=common, tmproot=tmproot, dir1a=dir1a, dir1b=dir1b, + dir2root=dir2root, dir2=dir2, + rev1a=rev1a, rev1b=rev1b, rev2=rev2) for copy_fn, working_fn, st in fnsandstat: cpstat = os.lstat(copy_fn) @@ -340,6 +470,10 @@ _('pass option to comparison program'), _('OPT')), ('r', 'rev', [], _('revision'), _('REV')), ('c', 'change', '', _('change made by revision'), _('REV')), + ('', 'per-file', False, + _('compare each file instead of revision snapshots')), + ('', 'confirm', False, + _('prompt user before each external program invocation')), ('', 'patch', None, _('compare patches for two revisions')) ] + cmdutil.walkopts + cmdutil.subrepoopts @@ -357,15 +491,29 @@ default options "-Npru". To select a different program, use the -p/--program option. The - program will be passed the names of two directories to compare. To - pass additional options to the program, use -o/--option. These - will be passed before the names of the directories to compare. + program will be passed the names of two directories to compare, + unless the --per-file option is specified (see below). To pass + additional options to the program, use -o/--option. These will be + passed before the names of the directories or files to compare. When two revision arguments are given, then changes are shown between those revisions. If only one revision is specified then that revision is compared to the working directory, and, when no revisions are specified, the working directory files are compared - to its parent.''' + to its parent. + + The --per-file option runs the external program repeatedly on each + file to diff, instead of once on two directories. By default, + this happens one by one, where the next file diff is open in the + external program only once the previous external program (for the + previous file diff) has exited. If the external program has a + graphical interface, it can open all the file diffs at once instead + of one by one. See :hg:`help -e extdiff` for information about how + to tell Mercurial that a given program has a graphical interface. + + The --confirm option will prompt the user before each invocation of + the external program. It is ignored if --per-file isn't specified. + ''' opts = pycompat.byteskwargs(opts) program = opts.get('program') option = opts.get('option') @@ -390,20 +538,22 @@ to its parent. """ - def __init__(self, path, cmdline): + def __init__(self, path, cmdline, isgui): # We can't pass non-ASCII through docstrings (and path is # in an unknown encoding anyway), but avoid double separators on # Windows docpath = stringutil.escapestr(path).replace(b'\\\\', b'\\') self.__doc__ %= {r'path': pycompat.sysstr(stringutil.uirepr(docpath))} self._cmdline = cmdline + self._isgui = isgui def __call__(self, ui, repo, *pats, **opts): opts = pycompat.byteskwargs(opts) options = ' '.join(map(procutil.shellquote, opts['option'])) if options: options = ' ' + options - return dodiff(ui, repo, self._cmdline + options, pats, opts) + return dodiff(ui, repo, self._cmdline + options, pats, opts, + guitool=self._isgui) def uisetup(ui): for cmd, path in ui.configitems('extdiff'): @@ -418,7 +568,8 @@ cmdline = procutil.shellquote(path) if diffopts: cmdline += ' ' + diffopts - elif cmd.startswith('opts.'): + isgui = ui.configbool('extdiff', 'gui.' + cmd) + elif cmd.startswith('opts.') or cmd.startswith('gui.'): continue else: if path: @@ -432,15 +583,20 @@ path = filemerge.findexternaltool(ui, cmd) or cmd cmdline = procutil.shellquote(path) diffopts = False + isgui = ui.configbool('extdiff', 'gui.' + cmd) # look for diff arguments in [diff-tools] then [merge-tools] if not diffopts: - args = ui.config('diff-tools', cmd+'.diffargs') or \ - ui.config('merge-tools', cmd+'.diffargs') - if args: - cmdline += ' ' + args + key = cmd + '.diffargs' + for section in ('diff-tools', 'merge-tools'): + args = ui.config(section, key) + if args: + cmdline += ' ' + args + if isgui is None: + isgui = ui.configbool(section, cmd + '.gui') or False + break command(cmd, extdiffopts[:], _('hg %s [OPTION]... [FILE]...') % cmd, helpcategory=command.CATEGORY_FILE_CONTENTS, - inferrepo=True)(savedcmd(path, cmdline)) + inferrepo=True)(savedcmd(path, cmdline, isgui)) # tell hggettext to extract docstrings from these functions: i18nfunctions = [savedcmd]
--- a/hgext/fastannotate/formatter.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/fastannotate/formatter.py Tue Mar 19 16:36:59 2019 +0300 @@ -38,8 +38,8 @@ if self.opts.get('rev') == 'wdir()': orig = hexfunc hexfunc = lambda x: None if x is None else orig(x) - wnode = hexfunc(repo[None].p1().node()) + '+' - wrev = '%d' % repo[None].p1().rev() + wnode = hexfunc(repo['.'].node()) + '+' + wrev = '%d' % repo['.'].rev() wrevpad = '' if not opts.get('changeset'): # only show + if changeset is hidden wrev += '+'
--- a/hgext/fastannotate/protocol.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/fastannotate/protocol.py Tue Mar 19 16:36:59 2019 +0300 @@ -71,7 +71,6 @@ for p in [actx.revmappath, actx.linelogpath]: if not os.path.exists(p): continue - content = '' with open(p, 'rb') as f: content = f.read() vfsbaselen = len(repo.vfs.base + '/')
--- a/hgext/fastannotate/support.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/fastannotate/support.py Tue Mar 19 16:36:59 2019 +0300 @@ -109,7 +109,6 @@ def _remotefctxannotate(orig, self, follow=False, skiprevs=None, diffopts=None): # skipset: a set-like used to test if a fctx needs to be downloaded - skipset = None with context.fctxannotatecontext(self, follow, diffopts) as ac: skipset = revmap.revmap(ac.revmappath) return orig(self, follow, skiprevs=skiprevs, diffopts=diffopts,
--- a/hgext/fetch.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/fetch.py Tue Mar 19 16:36:59 2019 +0300 @@ -68,7 +68,7 @@ if date: opts['date'] = dateutil.parsedate(date) - parent, _p2 = repo.dirstate.parents() + parent = repo.dirstate.p1() branch = repo.dirstate.branch() try: branchnode = repo.branchtip(branch)
--- a/hgext/fix.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/fix.py Tue Mar 19 16:36:59 2019 +0300 @@ -601,9 +601,7 @@ if path not in ctx: return None fctx = ctx[path] - copied = fctx.renamed() - if copied: - copied = copied[0] + copied = fctx.copysource() return context.memfilectx( repo, memctx,
--- a/hgext/fsmonitor/__init__.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/fsmonitor/__init__.py Tue Mar 19 16:36:59 2019 +0300 @@ -161,6 +161,12 @@ configitem('fsmonitor', 'blacklistusers', default=list, ) +configitem('fsmonitor', 'watchman_exe', + default='watchman', +) +configitem('fsmonitor', 'verbose', + default=True, +) configitem('experimental', 'fsmonitor.transaction_notify', default=False, ) @@ -172,11 +178,15 @@ def _handleunavailable(ui, state, ex): """Exception handler for Watchman interaction exceptions""" if isinstance(ex, watchmanclient.Unavailable): - if ex.warn: - ui.warn(str(ex) + '\n') + # experimental config: fsmonitor.verbose + if ex.warn and ui.configbool('fsmonitor', 'verbose'): + if 'illegal_fstypes' not in str(ex): + ui.warn(str(ex) + '\n') if ex.invalidate: state.invalidate() - ui.log('fsmonitor', 'Watchman unavailable: %s\n', ex.msg) + # experimental config: fsmonitor.verbose + if ui.configbool('fsmonitor', 'verbose'): + ui.log('fsmonitor', 'Watchman unavailable: %s\n', ex.msg) else: ui.log('fsmonitor', 'Watchman exception: %s\n', ex) @@ -240,24 +250,6 @@ clock = 'c:0:0' notefiles = [] - def fwarn(f, msg): - self._ui.warn('%s: %s\n' % (self.pathto(f), msg)) - return False - - def badtype(mode): - kind = _('unknown') - if stat.S_ISCHR(mode): - kind = _('character device') - elif stat.S_ISBLK(mode): - kind = _('block device') - elif stat.S_ISFIFO(mode): - kind = _('fifo') - elif stat.S_ISSOCK(mode): - kind = _('socket') - elif stat.S_ISDIR(mode): - kind = _('directory') - return _('unsupported file type (type is %s)') % kind - ignore = self._ignore dirignore = self._dirignore if unknown: @@ -379,6 +371,9 @@ fexists = entry['exists'] kind = getkind(fmode) + if '/.hg/' in fname or fname.endswith('/.hg'): + return bail('nested-repo-detected') + if not fexists: # if marked as deleted and we don't already have a change # record, mark it as deleted. If we already have an entry @@ -485,7 +480,7 @@ working = ctx2.rev() is None parentworking = working and ctx1 == self['.'] - match = match or matchmod.always(self.root, self.getcwd()) + match = match or matchmod.always() # Maybe we can use this opportunity to update Watchman's state. # Mercurial uses workingcommitctx and/or memctx to represent the part of @@ -752,6 +747,14 @@ repo, node, branchmerge, force, ancestor, mergeancestor, labels, matcher, **kwargs) +def repo_has_depth_one_nested_repo(repo): + for f in repo.wvfs.listdir(): + if os.path.isdir(os.path.join(repo.root, f, '.hg')): + msg = 'fsmonitor: sub-repository %r detected, fsmonitor disabled\n' + repo.ui.debug(msg % f) + return True + return False + def reposetup(ui, repo): # We don't work with largefiles or inotify exts = extensions.enabled() @@ -769,6 +772,9 @@ if repo.wvfs.exists('.hgsubstate') or repo.wvfs.exists('.hgsub'): return + if repo_has_depth_one_nested_repo(repo): + return + fsmonitorstate = state.state(repo) if fsmonitorstate.mode == 'off': return
--- a/hgext/fsmonitor/pywatchman/__init__.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/fsmonitor/pywatchman/__init__.py Tue Mar 19 16:36:59 2019 +0300 @@ -317,7 +317,7 @@ """ local unix domain socket transport """ sock = None - def __init__(self, sockpath, timeout): + def __init__(self, sockpath, timeout, watchman_exe): self.sockpath = sockpath self.timeout = timeout @@ -397,7 +397,7 @@ class WindowsNamedPipeTransport(Transport): """ connect to a named pipe """ - def __init__(self, sockpath, timeout): + def __init__(self, sockpath, timeout, watchman_exe): self.sockpath = sockpath self.timeout = int(math.ceil(timeout * 1000)) self._iobuf = None @@ -563,9 +563,10 @@ proc = None closed = True - def __init__(self, sockpath, timeout): + def __init__(self, sockpath, timeout, watchman_exe): self.sockpath = sockpath self.timeout = timeout + self.watchman_exe = watchman_exe def close(self): if self.proc: @@ -579,7 +580,7 @@ if self.proc: return self.proc args = [ - 'watchman', + self.watchman_exe, '--sockname={0}'.format(self.sockpath), '--logfile=/BOGUS', '--statefile=/BOGUS', @@ -756,6 +757,7 @@ unilateral = ['log', 'subscription'] tport = None useImmutableBser = None + watchman_exe = None def __init__(self, sockpath=None, @@ -763,10 +765,12 @@ transport=None, sendEncoding=None, recvEncoding=None, - useImmutableBser=False): + useImmutableBser=False, + watchman_exe=None): self.sockpath = sockpath self.timeout = timeout self.useImmutableBser = useImmutableBser + self.watchman_exe = watchman_exe if inspect.isclass(transport) and issubclass(transport, Transport): self.transport = transport @@ -817,7 +821,7 @@ if path: return path - cmd = ['watchman', '--output-encoding=bser', 'get-sockname'] + cmd = [self.watchman_exe, '--output-encoding=bser', 'get-sockname'] try: args = dict(stdout=subprocess.PIPE, stderr=subprocess.PIPE, @@ -858,7 +862,7 @@ if self.sockpath is None: self.sockpath = self._resolvesockname() - self.tport = self.transport(self.sockpath, self.timeout) + self.tport = self.transport(self.sockpath, self.timeout, self.watchman_exe) self.sendConn = self.sendCodec(self.tport) self.recvConn = self.recvCodec(self.tport)
--- a/hgext/fsmonitor/pywatchman/capabilities.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/fsmonitor/pywatchman/capabilities.py Tue Mar 19 16:36:59 2019 +0300 @@ -62,7 +62,6 @@ vers['capabilities'] = {} for name in opts['optional']: vers['capabilities'][name] = check(parsed_version, name) - failed = False for name in opts['required']: have = check(parsed_version, name) vers['capabilities'][name] = have
--- a/hgext/fsmonitor/pywatchman/pybser.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/fsmonitor/pywatchman/pybser.py Tue Mar 19 16:36:59 2019 +0300 @@ -267,7 +267,7 @@ key = key[3:] try: return self._values[self._keys.index(key)] - except ValueError as ex: + except ValueError: raise KeyError('_BunserDict has no key %s' % key) def __len__(self): @@ -420,7 +420,6 @@ def _pdu_info_helper(buf): - bser_version = -1 if buf[0:2] == EMPTY_HEADER[0:2]: bser_version = 1 bser_capabilities = 0
--- a/hgext/fsmonitor/watchmanclient.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/fsmonitor/watchmanclient.py Tue Mar 19 16:36:59 2019 +0300 @@ -82,9 +82,11 @@ try: if self._watchmanclient is None: self._firsttime = False + watchman_exe = self._ui.configpath('fsmonitor', 'watchman_exe') self._watchmanclient = pywatchman.client( timeout=self._timeout, - useImmutableBser=True) + useImmutableBser=True, + watchman_exe=watchman_exe) return self._watchmanclient.query(*watchmanargs) except pywatchman.CommandError as ex: if 'unable to resolve root' in ex.msg:
--- a/hgext/githelp.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/githelp.py Tue Mar 19 16:36:59 2019 +0300 @@ -25,6 +25,7 @@ encoding, error, fancyopts, + pycompat, registrar, scmutil, ) @@ -83,21 +84,22 @@ args = fancyopts.fancyopts(list(args), cmdoptions, opts, True) break except getopt.GetoptError as ex: - flag = None - if "requires argument" in ex.msg: + if r"requires argument" in ex.msg: raise - if ('--' + ex.opt) in ex.msg: - flag = '--' + ex.opt - elif ('-' + ex.opt) in ex.msg: - flag = '-' + ex.opt + if (r'--' + ex.opt) in ex.msg: + flag = '--' + pycompat.bytestr(ex.opt) + elif (r'-' + ex.opt) in ex.msg: + flag = '-' + pycompat.bytestr(ex.opt) else: - raise error.Abort(_("unknown option %s") % ex.opt) + raise error.Abort(_("unknown option %s") % + pycompat.bytestr(ex.opt)) try: args.remove(flag) except Exception: msg = _("unknown option '%s' packed with other options") hint = _("please try passing the option as its own flag: -%s") - raise error.Abort(msg % ex.opt, hint=hint % ex.opt) + raise error.Abort(msg % pycompat.bytestr(ex.opt), + hint=hint % pycompat.bytestr(ex.opt)) ui.warn(_("ignoring unknown option %s\n") % flag) @@ -119,7 +121,12 @@ for k, values in sorted(self.opts.iteritems()): for v in values: if v: - cmd += " %s %s" % (k, v) + if isinstance(v, int): + fmt = ' %s %d' + else: + fmt = ' %s %s' + + cmd += fmt % (k, v) else: cmd += " %s" % (k,) if self.args:
--- a/hgext/gpg.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/gpg.py Tue Mar 19 16:36:59 2019 +0300 @@ -297,7 +297,7 @@ return if not opts["force"]: - msigs = match.exact(repo.root, '', ['.hgsigs']) + msigs = match.exact(['.hgsigs']) if any(repo.status(match=msigs, unknown=True, ignored=True)): raise error.Abort(_("working copy of .hgsigs is changed "), hint=_("please commit .hgsigs manually"))
--- a/hgext/histedit.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/histedit.py Tue Mar 19 16:36:59 2019 +0300 @@ -156,6 +156,15 @@ [histedit] linelen = 120 # truncate rule lines at 120 characters +The summary of a change can be customized as well:: + + [histedit] + summary-template = '{rev} {bookmarks} {desc|firstline}' + +The customized summary should be kept short enough that rule lines +will fit in the configured line length. See above if that requires +customization. + ``hg histedit`` attempts to automatically choose an appropriate base revision to use. To change which base revision is used, define a revset in your configuration file:: @@ -248,6 +257,8 @@ configitem('ui', 'interface.histedit', default=None, ) +configitem('histedit', 'summary-template', + default='{rev} {desc|firstline}') # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should @@ -480,8 +491,11 @@ <hash> <rev> <summary> """ ctx = self.repo[self.node] - summary = _getsummary(ctx) - line = '%s %s %d %s' % (self.verb, ctx, ctx.rev(), summary) + ui = self.repo.ui + summary = cmdutil.rendertemplate( + ctx, ui.config('histedit', 'summary-template')) or '' + summary = summary.splitlines()[0] + line = '%s %s %s' % (self.verb, ctx, summary) # trim to 75 columns by default so it's not stupidly wide in my editor # (the 5 more are left for verb) maxlen = self.repo.ui.configint('histedit', 'linelen') @@ -575,7 +589,7 @@ def applychanges(ui, repo, ctx, opts): """Merge changeset from ctx (only) in the current working directory""" - wcpar = repo.dirstate.parents()[0] + wcpar = repo.dirstate.p1() if ctx.p1().node() == wcpar: # edits are "in place" we do not need to make any merge, # just applies changes on parent for editing @@ -608,7 +622,7 @@ if not c.mutable(): raise error.ParseError( _("cannot fold into public change %s") % node.short(c.node())) - base = firstctx.parents()[0] + base = firstctx.p1() # commit a new version of the old changeset, including the update # collect all files which might be affected @@ -693,7 +707,7 @@ class pick(histeditaction): def run(self): rulectx = self.repo[self.node] - if rulectx.parents()[0].node() == self.state.parentctxnode: + if rulectx.p1().node() == self.state.parentctxnode: self.repo.ui.debug('node %s unchanged\n' % node.short(self.node)) return rulectx, [] @@ -724,7 +738,7 @@ super(fold, self).verify(prev, expected, seen) repo = self.repo if not prev: - c = repo[self.node].parents()[0] + c = repo[self.node].p1() elif not prev.verb in ('pick', 'base'): return else: @@ -795,7 +809,7 @@ return False def finishfold(self, ui, repo, ctx, oldctx, newnode, internalchanges): - parent = ctx.parents()[0].node() + parent = ctx.p1().node() hg.updaterepo(repo, parent, overwrite=False) ### prepare new commit data commitopts = {} @@ -934,6 +948,12 @@ # Curses Support try: import curses + + # Curses requires setting the locale or it will default to the C + # locale. This sets the locale to the user's default system + # locale. + import locale + locale.setlocale(locale.LC_ALL, r'') except ImportError: curses = None @@ -943,7 +963,7 @@ 'roll': '^roll', } -COLOR_HELP, COLOR_SELECTED, COLOR_OK, COLOR_WARN = 1, 2, 3, 4 +COLOR_HELP, COLOR_SELECTED, COLOR_OK, COLOR_WARN, COLOR_CURRENT = 1, 2, 3, 4, 5 E_QUIT, E_HISTEDIT = 1, 2 E_PAGEDOWN, E_PAGEUP, E_LINEUP, E_LINEDOWN, E_RESIZE = 3, 4, 5, 6, 7 @@ -1223,6 +1243,7 @@ curses.init_pair(COLOR_SELECTED, curses.COLOR_BLACK, curses.COLOR_WHITE) curses.init_pair(COLOR_WARN, curses.COLOR_BLACK, curses.COLOR_YELLOW) curses.init_pair(COLOR_OK, curses.COLOR_BLACK, curses.COLOR_GREEN) + curses.init_pair(COLOR_CURRENT, curses.COLOR_WHITE, curses.COLOR_MAGENTA) # don't display the cursor try: @@ -1246,7 +1267,7 @@ line = "changeset: {0}:{1:<12}".format(ctx.rev(), ctx) win.addstr(1, 1, line[:length]) - line = "user: {0}".format(stringutil.shortuser(ctx.user())) + line = "user: {0}".format(ctx.user()) win.addstr(2, 1, line[:length]) bms = repo.nodebookmarks(ctx.node()) @@ -1313,7 +1334,8 @@ if y + start == selected: addln(rulesscr, y, 2, rule, curses.color_pair(COLOR_SELECTED)) elif y + start == pos: - addln(rulesscr, y, 2, rule, curses.A_BOLD) + addln(rulesscr, y, 2, rule, + curses.color_pair(COLOR_CURRENT) | curses.A_BOLD) else: addln(rulesscr, y, 2, rule) rulesscr.noutrefresh() @@ -1459,7 +1481,7 @@ 'exactly one common root')) root = rr[0].node() - topmost, empty = repo.dirstate.parents() + topmost = repo.dirstate.p1() revs = between(repo, root, topmost, keep) if not revs: raise error.Abort(_('%s is not an ancestor of working directory') % @@ -1472,7 +1494,7 @@ curses.echo() curses.endwin() if rc is False: - ui.write(_("chistedit aborted\n")) + ui.write(_("histedit aborted\n")) return 0 if type(rc) is list: ui.status(_("running histedit\n")) @@ -1760,7 +1782,7 @@ state.write(tr=tr) actobj = state.actions[0] progress.increment(item=actobj.torule()) - ui.debug('histedit: processing %s %s\n' % (actobj.verb,\ + ui.debug('histedit: processing %s %s\n' % (actobj.verb, actobj.torule())) parentctx, replacement_ = actobj.run() state.parentctxnode = parentctx.node() @@ -1859,7 +1881,7 @@ else: rules = _readfile(ui, rules) actions = parserules(rules, state) - ctxs = [repo[act.node] \ + ctxs = [repo[act.node] for act in state.actions if act.node] warnverifyactions(ui, repo, actions, state, ctxs) state.actions = actions @@ -1873,7 +1895,7 @@ cmdutil.checkunfinished(repo) cmdutil.bailifchanged(repo) - topmost, empty = repo.dirstate.parents() + topmost = repo.dirstate.p1() if outg: if freeargs: remote = freeargs[0] @@ -1902,7 +1924,7 @@ actions = parserules(rules, state) warnverifyactions(ui, repo, actions, state, ctxs) - parentctxnode = repo[root].parents()[0].node() + parentctxnode = repo[root].p1().node() state.parentctxnode = parentctxnode state.actions = actions
--- a/hgext/infinitepush/__init__.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/infinitepush/__init__.py Tue Mar 19 16:36:59 2019 +0300 @@ -282,8 +282,8 @@ scratchbranchpat = ui.config('infinitepush', 'branchpattern') if scratchbranchpat: global _scratchbranchmatcher - kind, pat, _scratchbranchmatcher = \ - stringutil.stringmatcher(scratchbranchpat) + kind, pat, _scratchbranchmatcher = ( + stringutil.stringmatcher(scratchbranchpat)) def serverextsetup(ui): origpushkeyhandler = bundle2.parthandlermapping['pushkey'] @@ -294,8 +294,8 @@ bundle2.parthandlermapping['pushkey'] = newpushkeyhandler orighandlephasehandler = bundle2.parthandlermapping['phase-heads'] - newphaseheadshandler = lambda *args, **kwargs: \ - bundle2handlephases(orighandlephasehandler, *args, **kwargs) + newphaseheadshandler = lambda *args, **kwargs: bundle2handlephases( + orighandlephasehandler, *args, **kwargs) newphaseheadshandler.params = orighandlephasehandler.params bundle2.parthandlermapping['phase-heads'] = newphaseheadshandler @@ -754,10 +754,10 @@ nametype_idx = 1 remote_idx = 2 name_idx = 3 - remotenames = [remotename for remotename in \ - remotenamesext.readremotenames(repo) \ + remotenames = [remotename for remotename in + remotenamesext.readremotenames(repo) if remotename[remote_idx] == path] - remote_bm_names = [remotename[name_idx] for remotename in \ + remote_bm_names = [remotename[name_idx] for remotename in remotenames if remotename[nametype_idx] == "bookmarks"] for name in names:
--- a/hgext/journal.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/journal.py Tue Mar 19 16:36:59 2019 +0300 @@ -194,8 +194,8 @@ return orig(ui, repo, repopath) class journalentry(collections.namedtuple( - u'journalentry', - u'timestamp user command namespace name oldhashes newhashes')): + r'journalentry', + r'timestamp user command namespace name oldhashes newhashes')): """Individual journal entry * timestamp: a mercurial (time, timezone) tuple @@ -348,7 +348,6 @@ def _write(self, vfs, entry): with self.jlock(vfs): - version = None # open file in amend mode to ensure it is created if missing with vfs('namejournal', mode='a+b') as f: f.seek(0, os.SEEK_SET)
--- a/hgext/largefiles/lfcommands.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/largefiles/lfcommands.py Tue Mar 19 16:36:59 2019 +0300 @@ -207,12 +207,12 @@ # the largefile-ness of its predecessor if f in ctx.manifest(): fctx = ctx.filectx(f) - renamed = fctx.renamed() + renamed = fctx.copysource() if renamed is None: # the code below assumes renamed to be a boolean or a list # and won't quite work with the value None renamed = False - renamedlfile = renamed and renamed[0] in lfiles + renamedlfile = renamed and renamed in lfiles islfile |= renamedlfile if 'l' in fctx.flags(): if renamedlfile: @@ -232,8 +232,8 @@ if f in ctx.manifest(): fctx = ctx.filectx(f) if 'l' in fctx.flags(): - renamed = fctx.renamed() - if renamed and renamed[0] in lfiles: + renamed = fctx.copysource() + if renamed and renamed in lfiles: raise error.Abort(_('largefile %s becomes symlink') % f) # largefile was modified, update standins @@ -259,11 +259,11 @@ fctx = ctx.filectx(srcfname) except error.LookupError: return None - renamed = fctx.renamed() + renamed = fctx.copysource() if renamed: # standin is always a largefile because largefile-ness # doesn't change after rename or copy - renamed = lfutil.standin(renamed[0]) + renamed = lfutil.standin(renamed) return context.memfilectx(repo, memctx, f, lfiletohash[srcfname] + '\n', @@ -288,12 +288,9 @@ files = set(ctx.files()) if node.nullid not in parents: mc = ctx.manifest() - mp1 = ctx.parents()[0].manifest() - mp2 = ctx.parents()[1].manifest() - files |= (set(mp1) | set(mp2)) - set(mc) - for f in mc: - if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None): - files.add(f) + for pctx in ctx.parents(): + for fn in pctx.manifest().diff(mc): + files.add(fn) return files # Convert src parents to dst parents @@ -311,9 +308,7 @@ fctx = ctx.filectx(f) except error.LookupError: return None - renamed = fctx.renamed() - if renamed: - renamed = renamed[0] + renamed = fctx.copysource() data = fctx.data() if f == '.hgtags': @@ -467,27 +462,26 @@ wvfs = repo.wvfs wctx = repo[None] for lfile in lfiles: - rellfile = lfile - rellfileorig = os.path.relpath( - scmutil.origpath(ui, repo, wvfs.join(rellfile)), + lfileorig = os.path.relpath( + scmutil.backuppath(ui, repo, lfile), start=repo.root) - relstandin = lfutil.standin(lfile) - relstandinorig = os.path.relpath( - scmutil.origpath(ui, repo, wvfs.join(relstandin)), + standin = lfutil.standin(lfile) + standinorig = os.path.relpath( + scmutil.backuppath(ui, repo, standin), start=repo.root) - if wvfs.exists(relstandin): - if (wvfs.exists(relstandinorig) and - wvfs.exists(rellfile)): - shutil.copyfile(wvfs.join(rellfile), - wvfs.join(rellfileorig)) - wvfs.unlinkpath(relstandinorig) - expecthash = lfutil.readasstandin(wctx[relstandin]) + if wvfs.exists(standin): + if (wvfs.exists(standinorig) and + wvfs.exists(lfile)): + shutil.copyfile(wvfs.join(lfile), + wvfs.join(lfileorig)) + wvfs.unlinkpath(standinorig) + expecthash = lfutil.readasstandin(wctx[standin]) if expecthash != '': if lfile not in wctx: # not switched to normal file - if repo.dirstate[relstandin] != '?': - wvfs.unlinkpath(rellfile, ignoremissing=True) + if repo.dirstate[standin] != '?': + wvfs.unlinkpath(lfile, ignoremissing=True) else: - dropped.add(rellfile) + dropped.add(lfile) # use normallookup() to allocate an entry in largefiles # dirstate to prevent lfilesrepo.status() from reporting @@ -499,9 +493,9 @@ # lfile is added to the repository again. This happens when a # largefile is converted back to a normal file: the standin # disappears, but a new (normal) file appears as the lfile. - if (wvfs.exists(rellfile) and + if (wvfs.exists(lfile) and repo.dirstate.normalize(lfile) not in wctx): - wvfs.unlinkpath(rellfile) + wvfs.unlinkpath(lfile) removed += 1 # largefile processing might be slow and be interrupted - be prepared @@ -535,19 +529,18 @@ # copy the exec mode of largefile standin from the repository's # dirstate to its state in the lfdirstate. - rellfile = lfile - relstandin = lfutil.standin(lfile) - if wvfs.exists(relstandin): + standin = lfutil.standin(lfile) + if wvfs.exists(standin): # exec is decided by the users permissions using mask 0o100 - standinexec = wvfs.stat(relstandin).st_mode & 0o100 - st = wvfs.stat(rellfile) + standinexec = wvfs.stat(standin).st_mode & 0o100 + st = wvfs.stat(lfile) mode = st.st_mode if standinexec != mode & 0o100: # first remove all X bits, then shift all R bits to X mode &= ~0o111 if standinexec: mode |= (mode >> 2) & 0o111 & ~util.umask - wvfs.chmod(rellfile, mode) + wvfs.chmod(lfile, mode) update1 = 1 updated += update1
--- a/hgext/largefiles/lfutil.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/largefiles/lfutil.py Tue Mar 19 16:36:59 2019 +0300 @@ -76,8 +76,8 @@ if path: return path if pycompat.iswindows: - appdata = encoding.environ.get('LOCALAPPDATA',\ - encoding.environ.get('APPDATA')) + appdata = encoding.environ.get('LOCALAPPDATA', + encoding.environ.get('APPDATA')) if appdata: return os.path.join(appdata, name) elif pycompat.isdarwin: @@ -168,7 +168,7 @@ def lfdirstatestatus(lfdirstate, repo): pctx = repo['.'] - match = matchmod.always(repo.root, repo.getcwd()) + match = matchmod.always() unsure, s = lfdirstate.status(match, subrepos=[], ignored=False, clean=False, unknown=False) modified, clean = s.modified, s.clean @@ -518,8 +518,8 @@ files = set(ctx.files()) if len(parents) == 2: mc = ctx.manifest() - mp1 = ctx.parents()[0].manifest() - mp2 = ctx.parents()[1].manifest() + mp1 = ctx.p1().manifest() + mp2 = ctx.p2().manifest() for f in mp1: if f not in mc: files.add(f) @@ -552,7 +552,7 @@ # otherwise to update all standins if the largefiles are # large. lfdirstate = openlfdirstate(ui, repo) - dirtymatch = matchmod.always(repo.root, repo.getcwd()) + dirtymatch = matchmod.always() unsure, s = lfdirstate.status(dirtymatch, subrepos=[], ignored=False, clean=False, unknown=False) modifiedfiles = unsure + s.modified + s.added + s.removed
--- a/hgext/largefiles/overrides.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/largefiles/overrides.py Tue Mar 19 16:36:59 2019 +0300 @@ -24,6 +24,7 @@ copies as copiesmod, error, exchange, + extensions, exthelper, filemerge, hg, @@ -77,49 +78,7 @@ m.matchfn = lambda f: notlfile(f) and origmatchfn(f) return m -def installnormalfilesmatchfn(manifest): - '''installmatchfn with a matchfn that ignores all largefiles''' - def overridematch(ctx, pats=(), opts=None, globbed=False, - default='relpath', badfn=None): - if opts is None: - opts = {} - match = oldmatch(ctx, pats, opts, globbed, default, badfn=badfn) - return composenormalfilematcher(match, manifest) - oldmatch = installmatchfn(overridematch) - -def installmatchfn(f): - '''monkey patch the scmutil module with a custom match function. - Warning: it is monkey patching the _module_ on runtime! Not thread safe!''' - oldmatch = scmutil.match - setattr(f, 'oldmatch', oldmatch) - scmutil.match = f - return oldmatch - -def restorematchfn(): - '''restores scmutil.match to what it was before installmatchfn - was called. no-op if scmutil.match is its original function. - - Note that n calls to installmatchfn will require n calls to - restore the original matchfn.''' - scmutil.match = getattr(scmutil.match, 'oldmatch') - -def installmatchandpatsfn(f): - oldmatchandpats = scmutil.matchandpats - setattr(f, 'oldmatchandpats', oldmatchandpats) - scmutil.matchandpats = f - return oldmatchandpats - -def restorematchandpatsfn(): - '''restores scmutil.matchandpats to what it was before - installmatchandpatsfn was called. No-op if scmutil.matchandpats - is its original function. - - Note that n calls to installmatchandpatsfn will require n calls - to restore the original matchfn.''' - scmutil.matchandpats = getattr(scmutil.matchandpats, 'oldmatchandpats', - scmutil.matchandpats) - -def addlargefiles(ui, repo, isaddremove, matcher, **opts): +def addlargefiles(ui, repo, isaddremove, matcher, uipathfn, **opts): large = opts.get(r'large') lfsize = lfutil.getminsize( ui, lfutil.islfilesrepo(repo), opts.get(r'lfsize')) @@ -140,17 +99,11 @@ nfile = f in wctx exists = lfile or nfile - # addremove in core gets fancy with the name, add doesn't - if isaddremove: - name = m.uipath(f) - else: - name = m.rel(f) - # Don't warn the user when they attempt to add a normal tracked file. # The normal add code will do that for us. if exact and exists: if lfile: - ui.warn(_('%s already a largefile\n') % name) + ui.warn(_('%s already a largefile\n') % uipathfn(f)) continue if (exact or not exists) and not lfutil.isstandin(f): @@ -164,7 +117,7 @@ if large or abovemin or (lfmatcher and lfmatcher(f)): lfnames.append(f) if ui.verbose or not exact: - ui.status(_('adding %s as a largefile\n') % name) + ui.status(_('adding %s as a largefile\n') % uipathfn(f)) bad = [] @@ -191,7 +144,7 @@ added = [f for f in lfnames if f not in bad] return added, bad -def removelargefiles(ui, repo, isaddremove, matcher, dryrun, **opts): +def removelargefiles(ui, repo, isaddremove, matcher, uipathfn, dryrun, **opts): after = opts.get(r'after') m = composelargefilematcher(matcher, repo[None].manifest()) try: @@ -207,11 +160,9 @@ def warn(files, msg): for f in files: - ui.warn(msg % m.rel(f)) + ui.warn(msg % uipathfn(f)) return int(len(files) > 0) - result = 0 - if after: remove = deleted result = warn(modified + added + clean, @@ -229,12 +180,7 @@ lfdirstate = lfutil.openlfdirstate(ui, repo) for f in sorted(remove): if ui.verbose or not m.exact(f): - # addremove in core gets fancy with the name, remove doesn't - if isaddremove: - name = m.uipath(f) - else: - name = m.rel(f) - ui.status(_('removing %s\n') % name) + ui.status(_('removing %s\n') % uipathfn(f)) if not dryrun: if not after: @@ -278,27 +224,27 @@ return orig(ui, repo, *pats, **opts) @eh.wrapfunction(cmdutil, 'add') -def cmdutiladd(orig, ui, repo, matcher, prefix, explicitonly, **opts): +def cmdutiladd(orig, ui, repo, matcher, prefix, uipathfn, explicitonly, **opts): # The --normal flag short circuits this override if opts.get(r'normal'): - return orig(ui, repo, matcher, prefix, explicitonly, **opts) + return orig(ui, repo, matcher, prefix, uipathfn, explicitonly, **opts) - ladded, lbad = addlargefiles(ui, repo, False, matcher, **opts) + ladded, lbad = addlargefiles(ui, repo, False, matcher, uipathfn, **opts) normalmatcher = composenormalfilematcher(matcher, repo[None].manifest(), ladded) - bad = orig(ui, repo, normalmatcher, prefix, explicitonly, **opts) + bad = orig(ui, repo, normalmatcher, prefix, uipathfn, explicitonly, **opts) bad.extend(f for f in lbad) return bad @eh.wrapfunction(cmdutil, 'remove') -def cmdutilremove(orig, ui, repo, matcher, prefix, after, force, subrepos, - dryrun): +def cmdutilremove(orig, ui, repo, matcher, prefix, uipathfn, after, force, + subrepos, dryrun): normalmatcher = composenormalfilematcher(matcher, repo[None].manifest()) - result = orig(ui, repo, normalmatcher, prefix, after, force, subrepos, - dryrun) - return removelargefiles(ui, repo, False, matcher, dryrun, after=after, - force=force) or result + result = orig(ui, repo, normalmatcher, prefix, uipathfn, after, force, + subrepos, dryrun) + return removelargefiles(ui, repo, False, matcher, uipathfn, dryrun, + after=after, force=force) or result @eh.wrapfunction(subrepo.hgsubrepo, 'status') def overridestatusfn(orig, repo, rev2, **opts): @@ -326,7 +272,7 @@ @eh.wrapcommand('log') def overridelog(orig, ui, repo, *pats, **opts): - def overridematchandpats(ctx, pats=(), opts=None, globbed=False, + def overridematchandpats(orig, ctx, pats=(), opts=None, globbed=False, default='relpath', badfn=None): """Matcher that merges root directory with .hglf, suitable for log. It is still possible to match .hglf directly. @@ -335,8 +281,7 @@ """ if opts is None: opts = {} - matchandpats = oldmatchandpats(ctx, pats, opts, globbed, default, - badfn=badfn) + matchandpats = orig(ctx, pats, opts, globbed, default, badfn=badfn) m, p = copy.copy(matchandpats) if m.always(): @@ -356,9 +301,10 @@ return kindpat[0] + ':' + tostandin(kindpat[1]) return tostandin(kindpat[1]) - if m._cwd: + cwd = repo.getcwd() + if cwd: hglf = lfutil.shortname - back = util.pconvert(m.rel(hglf)[:-len(hglf)]) + back = util.pconvert(repo.pathto(hglf)[:-len(hglf)]) def tostandin(f): # The file may already be a standin, so truncate the back @@ -371,10 +317,10 @@ # path to the root before building the standin. Otherwise cwd # is somewhere in the repo, relative to root, and needs to be # prepended before building the standin. - if os.path.isabs(m._cwd): + if os.path.isabs(cwd): f = f[len(back):] else: - f = m._cwd + '/' + f + f = cwd + '/' + f return back + lfutil.standin(f) else: def tostandin(f): @@ -416,20 +362,18 @@ # (2) to determine what files to print out diffs for. # The magic matchandpats override should be used for case (1) but not for # case (2). - def overridemakefilematcher(repo, pats, opts, badfn=None): + oldmatchandpats = scmutil.matchandpats + def overridemakefilematcher(orig, repo, pats, opts, badfn=None): wctx = repo[None] match, pats = oldmatchandpats(wctx, pats, opts, badfn=badfn) return lambda ctx: match - oldmatchandpats = installmatchandpatsfn(overridematchandpats) - oldmakefilematcher = logcmdutil._makenofollowfilematcher - setattr(logcmdutil, '_makenofollowfilematcher', overridemakefilematcher) - - try: + wrappedmatchandpats = extensions.wrappedfunction(scmutil, 'matchandpats', + overridematchandpats) + wrappedmakefilematcher = extensions.wrappedfunction( + logcmdutil, '_makenofollowfilematcher', overridemakefilematcher) + with wrappedmatchandpats, wrappedmakefilematcher: return orig(ui, repo, *pats, **opts) - finally: - restorematchandpatsfn() - setattr(logcmdutil, '_makenofollowfilematcher', oldmakefilematcher) @eh.wrapcommand('verify', opts=[('', 'large', None, @@ -636,17 +580,22 @@ # match largefiles and run it again. nonormalfiles = False nolfiles = False - installnormalfilesmatchfn(repo[None].manifest()) - try: - result = orig(ui, repo, pats, opts, rename) - except error.Abort as e: - if pycompat.bytestr(e) != _('no files to copy'): - raise e - else: - nonormalfiles = True - result = 0 - finally: - restorematchfn() + manifest = repo[None].manifest() + def normalfilesmatchfn(orig, ctx, pats=(), opts=None, globbed=False, + default='relpath', badfn=None): + if opts is None: + opts = {} + match = orig(ctx, pats, opts, globbed, default, badfn=badfn) + return composenormalfilematcher(match, manifest) + with extensions.wrappedfunction(scmutil, 'match', normalfilesmatchfn): + try: + result = orig(ui, repo, pats, opts, rename) + except error.Abort as e: + if pycompat.bytestr(e) != _('no files to copy'): + raise e + else: + nonormalfiles = True + result = 0 # The first rename can cause our current working directory to be removed. # In that case there is nothing left to copy/rename so just quit. @@ -672,7 +621,7 @@ wlock = repo.wlock() manifest = repo[None].manifest() - def overridematch(ctx, pats=(), opts=None, globbed=False, + def overridematch(orig, ctx, pats=(), opts=None, globbed=False, default='relpath', badfn=None): if opts is None: opts = {} @@ -684,7 +633,7 @@ newpats.append(pat.replace(lfutil.shortname, '')) else: newpats.append(pat) - match = oldmatch(ctx, newpats, opts, globbed, default, badfn=badfn) + match = orig(ctx, newpats, opts, globbed, default, badfn=badfn) m = copy.copy(match) lfile = lambda f: lfutil.standin(f) in manifest m._files = [lfutil.standin(f) for f in m._files if lfile(f)] @@ -698,7 +647,6 @@ None) m.matchfn = matchfn return m - oldmatch = installmatchfn(overridematch) listpats = [] for pat in pats: if matchmod.patkind(pat) is not None: @@ -706,23 +654,19 @@ else: listpats.append(makestandin(pat)) - try: - origcopyfile = util.copyfile - copiedfiles = [] - def overridecopyfile(src, dest, *args, **kwargs): - if (lfutil.shortname in src and - dest.startswith(repo.wjoin(lfutil.shortname))): - destlfile = dest.replace(lfutil.shortname, '') - if not opts['force'] and os.path.exists(destlfile): - raise IOError('', - _('destination largefile already exists')) - copiedfiles.append((src, dest)) - origcopyfile(src, dest, *args, **kwargs) - - util.copyfile = overridecopyfile - result += orig(ui, repo, listpats, opts, rename) - finally: - util.copyfile = origcopyfile + copiedfiles = [] + def overridecopyfile(orig, src, dest, *args, **kwargs): + if (lfutil.shortname in src and + dest.startswith(repo.wjoin(lfutil.shortname))): + destlfile = dest.replace(lfutil.shortname, '') + if not opts['force'] and os.path.exists(destlfile): + raise IOError('', + _('destination largefile already exists')) + copiedfiles.append((src, dest)) + orig(src, dest, *args, **kwargs) + with extensions.wrappedfunction(util, 'copyfile', overridecopyfile): + with extensions.wrappedfunction(scmutil, 'match', overridematch): + result += orig(ui, repo, listpats, opts, rename) lfdirstate = lfutil.openlfdirstate(ui, repo) for (src, dest) in copiedfiles: @@ -752,7 +696,6 @@ else: nolfiles = True finally: - restorematchfn() wlock.release() if nolfiles and nonormalfiles: @@ -787,11 +730,11 @@ oldstandins = lfutil.getstandinsstate(repo) - def overridematch(mctx, pats=(), opts=None, globbed=False, + def overridematch(orig, mctx, pats=(), opts=None, globbed=False, default='relpath', badfn=None): if opts is None: opts = {} - match = oldmatch(mctx, pats, opts, globbed, default, badfn=badfn) + match = orig(mctx, pats, opts, globbed, default, badfn=badfn) m = copy.copy(match) # revert supports recursing into subrepos, and though largefiles @@ -822,11 +765,8 @@ return origmatchfn(f) m.matchfn = matchfn return m - oldmatch = installmatchfn(overridematch) - try: + with extensions.wrappedfunction(scmutil, 'match', overridematch): orig(ui, repo, ctx, parents, *pats, **opts) - finally: - restorematchfn() newstandins = lfutil.getstandinsstate(repo) filelist = lfutil.getlfilestoupdate(oldstandins, newstandins) @@ -1048,8 +988,9 @@ for subpath in sorted(ctx.substate): sub = ctx.workingsub(subpath) submatch = matchmod.subdirmatcher(subpath, match) + subprefix = prefix + subpath + '/' sub._repo.lfstatus = True - sub.archive(archiver, prefix, submatch) + sub.archive(archiver, subprefix, submatch) archiver.done() @@ -1075,7 +1016,7 @@ if decode: data = repo._repo.wwritedata(name, data) - archiver.addfile(prefix + repo._path + '/' + name, mode, islink, data) + archiver.addfile(prefix + name, mode, islink, data) for f in ctx: ff = ctx.flags(f) @@ -1101,8 +1042,9 @@ for subpath in sorted(ctx.substate): sub = ctx.workingsub(subpath) submatch = matchmod.subdirmatcher(subpath, match) + subprefix = prefix + subpath + '/' sub._repo.lfstatus = True - sub.archive(archiver, prefix + repo._path + '/', submatch, decode) + sub.archive(archiver, subprefix, submatch, decode) # If a largefile is modified, the change is not reflected in its # standin until a commit. cmdutil.bailifchanged() raises an exception @@ -1126,11 +1068,11 @@ repo.lfstatus = False @eh.wrapfunction(cmdutil, 'forget') -def cmdutilforget(orig, ui, repo, match, prefix, explicitonly, dryrun, +def cmdutilforget(orig, ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive): normalmatcher = composenormalfilematcher(match, repo[None].manifest()) - bad, forgot = orig(ui, repo, normalmatcher, prefix, explicitonly, dryrun, - interactive) + bad, forgot = orig(ui, repo, normalmatcher, prefix, uipathfn, explicitonly, + dryrun, interactive) m = composelargefilematcher(match, repo[None].manifest()) try: @@ -1146,12 +1088,12 @@ fstandin = lfutil.standin(f) if fstandin not in repo.dirstate and not repo.wvfs.isdir(fstandin): ui.warn(_('not removing %s: file is already untracked\n') - % m.rel(f)) + % uipathfn(f)) bad.append(f) for f in forget: if ui.verbose or not m.exact(f): - ui.status(_('removing %s\n') % m.rel(f)) + ui.status(_('removing %s\n') % uipathfn(f)) # Need to lock because standin files are deleted then removed from the # repository and we could race in-between. @@ -1273,16 +1215,15 @@ repo.lfstatus = False @eh.wrapfunction(scmutil, 'addremove') -def scmutiladdremove(orig, repo, matcher, prefix, opts=None): +def scmutiladdremove(orig, repo, matcher, prefix, uipathfn, opts=None): if opts is None: opts = {} if not lfutil.islfilesrepo(repo): - return orig(repo, matcher, prefix, opts) + return orig(repo, matcher, prefix, uipathfn, opts) # Get the list of missing largefiles so we can remove them lfdirstate = lfutil.openlfdirstate(repo.ui, repo) - unsure, s = lfdirstate.status(matchmod.always(repo.root, repo.getcwd()), - subrepos=[], ignored=False, clean=False, - unknown=False) + unsure, s = lfdirstate.status(matchmod.always(), subrepos=[], + ignored=False, clean=False, unknown=False) # Call into the normal remove code, but the removing of the standin, we want # to have handled by original addremove. Monkey patching here makes sure @@ -1298,17 +1239,17 @@ matchfn = m.matchfn m.matchfn = lambda f: f in s.deleted and matchfn(f) - removelargefiles(repo.ui, repo, True, m, opts.get('dry_run'), + removelargefiles(repo.ui, repo, True, m, uipathfn, opts.get('dry_run'), **pycompat.strkwargs(opts)) # Call into the normal add code, and any files that *should* be added as # largefiles will be - added, bad = addlargefiles(repo.ui, repo, True, matcher, + added, bad = addlargefiles(repo.ui, repo, True, matcher, uipathfn, **pycompat.strkwargs(opts)) # Now that we've handled largefiles, hand off to the original addremove # function to take care of the rest. Make sure it doesn't do anything with # largefiles by passing a matcher that will ignore them. matcher = composenormalfilematcher(matcher, repo[None].manifest(), added) - return orig(repo, matcher, prefix, opts) + return orig(repo, matcher, prefix, uipathfn, opts) # Calling purge with --all will cause the largefiles to be deleted. # Override repo.status to prevent this from happening. @@ -1472,10 +1413,8 @@ # (*1) deprecated, but used internally (e.g: "rebase --collapse") lfdirstate = lfutil.openlfdirstate(repo.ui, repo) - unsure, s = lfdirstate.status(matchmod.always(repo.root, - repo.getcwd()), - subrepos=[], ignored=False, - clean=True, unknown=False) + unsure, s = lfdirstate.status(matchmod.always(), subrepos=[], + ignored=False, clean=True, unknown=False) oldclean = set(s.clean) pctx = repo['.'] dctx = repo[node]
--- a/hgext/largefiles/reposetup.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/largefiles/reposetup.py Tue Mar 19 16:36:59 2019 +0300 @@ -103,7 +103,7 @@ parentworking = working and ctx1 == self['.'] if match is None: - match = matchmod.always(self.root, self.getcwd()) + match = matchmod.always() wlock = None try: @@ -174,8 +174,8 @@ if standin not in ctx1: # from second parent modified.append(lfile) - elif lfutil.readasstandin(ctx1[standin]) \ - != lfutil.hashfile(self.wjoin(lfile)): + elif (lfutil.readasstandin(ctx1[standin]) + != lfutil.hashfile(self.wjoin(lfile))): modified.append(lfile) else: if listclean:
--- a/hgext/largefiles/storefactory.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/largefiles/storefactory.py Tue Mar 19 16:36:59 2019 +0300 @@ -43,7 +43,6 @@ path, _branches = hg.parseurl(path) remote = hg.peer(repo or ui, {}, path) elif path == 'default-push' or path == 'default': - path = '' remote = repo else: path, _branches = hg.parseurl(path)
--- a/hgext/lfs/blobstore.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/lfs/blobstore.py Tue Mar 19 16:36:59 2019 +0300 @@ -42,7 +42,7 @@ def join(self, path): """split the path at first two characters, like: XX/XXXXX...""" if not _lfsre.match(path): - raise error.ProgrammingError('unexpected lfs path: %s' % path) + raise error.ProgrammingError(b'unexpected lfs path: %s' % path) return super(lfsvfs, self).join(path[0:2], path[2:]) def walk(self, path=None, onerror=None): @@ -56,7 +56,8 @@ prefixlen = len(pathutil.normasprefix(root)) oids = [] - for dirpath, dirs, files in os.walk(self.reljoin(self.base, path or ''), + for dirpath, dirs, files in os.walk(self.reljoin(self.base, path + or b''), onerror=onerror): dirpath = dirpath[prefixlen:] @@ -79,10 +80,11 @@ # self.vfs. Raise the same error as a normal vfs when asked to read a # file that doesn't exist. The only difference is the full file path # isn't available in the error. - raise IOError(errno.ENOENT, '%s: No such file or directory' % oid) + raise IOError(errno.ENOENT, + pycompat.sysstr(b'%s: No such file or directory' % oid)) def walk(self, path=None, onerror=None): - return ('', [], []) + return (b'', [], []) def write(self, oid, data): pass @@ -123,13 +125,13 @@ """ def __init__(self, repo): - fullpath = repo.svfs.join('lfs/objects') + fullpath = repo.svfs.join(b'lfs/objects') self.vfs = lfsvfs(fullpath) - if repo.ui.configbool('experimental', 'lfs.disableusercache'): + if repo.ui.configbool(b'experimental', b'lfs.disableusercache'): self.cachevfs = nullvfs() else: - usercache = lfutil._usercachedir(repo.ui, 'lfs') + usercache = lfutil._usercachedir(repo.ui, b'lfs') self.cachevfs = lfsvfs(usercache) self.ui = repo.ui @@ -143,23 +145,23 @@ # the usercache is the only place it _could_ be. If not present, the # missing file msg here will indicate the local repo, not the usercache. if self.cachevfs.exists(oid): - return self.cachevfs(oid, 'rb') + return self.cachevfs(oid, b'rb') - return self.vfs(oid, 'rb') + return self.vfs(oid, b'rb') def download(self, oid, src): """Read the blob from the remote source in chunks, verify the content, and write to this local blobstore.""" sha256 = hashlib.sha256() - with self.vfs(oid, 'wb', atomictemp=True) as fp: + with self.vfs(oid, b'wb', atomictemp=True) as fp: for chunk in util.filechunkiter(src, size=1048576): fp.write(chunk) sha256.update(chunk) realoid = node.hex(sha256.digest()) if realoid != oid: - raise LfsCorruptionError(_('corrupt remote lfs object: %s') + raise LfsCorruptionError(_(b'corrupt remote lfs object: %s') % oid) self._linktousercache(oid) @@ -170,7 +172,7 @@ This should only be called from the filelog during a commit or similar. As such, there is no need to verify the data. Imports from a remote store must use ``download()`` instead.""" - with self.vfs(oid, 'wb', atomictemp=True) as fp: + with self.vfs(oid, b'wb', atomictemp=True) as fp: fp.write(data) self._linktousercache(oid) @@ -186,7 +188,7 @@ """ if (not isinstance(self.cachevfs, nullvfs) and not self.vfs.exists(oid)): - self.ui.note(_('lfs: found %s in the usercache\n') % oid) + self.ui.note(_(b'lfs: found %s in the usercache\n') % oid) lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid)) def _linktousercache(self, oid): @@ -194,7 +196,7 @@ # the local store on success, but truncate, write and link on failure? if (not self.cachevfs.exists(oid) and not isinstance(self.cachevfs, nullvfs)): - self.ui.note(_('lfs: adding %s to the usercache\n') % oid) + self.ui.note(_(b'lfs: adding %s to the usercache\n') % oid) lfutil.link(self.vfs.join(oid), self.cachevfs.join(oid)) def read(self, oid, verify=True): @@ -208,10 +210,10 @@ # give more useful info about the corruption- simply don't add the # hardlink. if verify or node.hex(hashlib.sha256(blob).digest()) == oid: - self.ui.note(_('lfs: found %s in the usercache\n') % oid) + self.ui.note(_(b'lfs: found %s in the usercache\n') % oid) lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid)) else: - self.ui.note(_('lfs: found %s in the local lfs store\n') % oid) + self.ui.note(_(b'lfs: found %s in the local lfs store\n') % oid) blob = self._read(self.vfs, oid, verify) return blob @@ -262,26 +264,45 @@ else: return stringutil.forcebytestr(urlerror) +class lfsauthhandler(util.urlreq.basehandler): + handler_order = 480 # Before HTTPDigestAuthHandler (== 490) + + def http_error_401(self, req, fp, code, msg, headers): + """Enforces that any authentication performed is HTTP Basic + Authentication. No authentication is also acceptable. + """ + authreq = headers.get(r'www-authenticate', None) + if authreq: + scheme = authreq.split()[0] + + if scheme.lower() != r'basic': + msg = _(b'the server must support Basic Authentication') + raise util.urlerr.httperror(req.get_full_url(), code, + encoding.strfromlocal(msg), headers, + fp) + return None + class _gitlfsremote(object): def __init__(self, repo, url): ui = repo.ui self.ui = ui baseurl, authinfo = url.authinfo() - self.baseurl = baseurl.rstrip('/') - useragent = repo.ui.config('experimental', 'lfs.user-agent') + self.baseurl = baseurl.rstrip(b'/') + useragent = repo.ui.config(b'experimental', b'lfs.user-agent') if not useragent: - useragent = 'git-lfs/2.3.4 (Mercurial %s)' % util.version() + useragent = b'git-lfs/2.3.4 (Mercurial %s)' % util.version() self.urlopener = urlmod.opener(ui, authinfo, useragent) - self.retry = ui.configint('lfs', 'retry') + self.urlopener.add_handler(lfsauthhandler()) + self.retry = ui.configint(b'lfs', b'retry') def writebatch(self, pointers, fromstore): """Batch upload from local to remote blobstore.""" - self._batch(_deduplicate(pointers), fromstore, 'upload') + self._batch(_deduplicate(pointers), fromstore, b'upload') def readbatch(self, pointers, tostore): """Batch download from remote to local blostore.""" - self._batch(_deduplicate(pointers), tostore, 'download') + self._batch(_deduplicate(pointers), tostore, b'download') def _batchrequest(self, pointers, action): """Get metadata about objects pointed by pointers for given action @@ -289,52 +310,63 @@ Return decoded JSON object like {'objects': [{'oid': '', 'size': 1}]} See https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md """ - objects = [{'oid': p.oid(), 'size': p.size()} for p in pointers] - requestdata = json.dumps({ - 'objects': objects, - 'operation': action, - }) - url = '%s/objects/batch' % self.baseurl - batchreq = util.urlreq.request(url, data=requestdata) - batchreq.add_header('Accept', 'application/vnd.git-lfs+json') - batchreq.add_header('Content-Type', 'application/vnd.git-lfs+json') + objects = [{r'oid': pycompat.strurl(p.oid()), + r'size': p.size()} for p in pointers] + requestdata = pycompat.bytesurl(json.dumps({ + r'objects': objects, + r'operation': pycompat.strurl(action), + })) + url = b'%s/objects/batch' % self.baseurl + batchreq = util.urlreq.request(pycompat.strurl(url), data=requestdata) + batchreq.add_header(r'Accept', r'application/vnd.git-lfs+json') + batchreq.add_header(r'Content-Type', r'application/vnd.git-lfs+json') try: with contextlib.closing(self.urlopener.open(batchreq)) as rsp: rawjson = rsp.read() except util.urlerr.httperror as ex: hints = { - 400: _('check that lfs serving is enabled on %s and "%s" is ' - 'supported') % (self.baseurl, action), - 404: _('the "lfs.url" config may be used to override %s') + 400: _(b'check that lfs serving is enabled on %s and "%s" is ' + b'supported') % (self.baseurl, action), + 404: _(b'the "lfs.url" config may be used to override %s') % self.baseurl, } - hint = hints.get(ex.code, _('api=%s, action=%s') % (url, action)) - raise LfsRemoteError(_('LFS HTTP error: %s') % ex, hint=hint) + hint = hints.get(ex.code, _(b'api=%s, action=%s') % (url, action)) + raise LfsRemoteError( + _(b'LFS HTTP error: %s') % stringutil.forcebytestr(ex), + hint=hint) except util.urlerr.urlerror as ex: - hint = (_('the "lfs.url" config may be used to override %s') + hint = (_(b'the "lfs.url" config may be used to override %s') % self.baseurl) - raise LfsRemoteError(_('LFS error: %s') % _urlerrorreason(ex), + raise LfsRemoteError(_(b'LFS error: %s') % _urlerrorreason(ex), hint=hint) try: response = json.loads(rawjson) except ValueError: - raise LfsRemoteError(_('LFS server returns invalid JSON: %s') - % rawjson) + raise LfsRemoteError(_(b'LFS server returns invalid JSON: %s') + % rawjson.encode("utf-8")) if self.ui.debugflag: - self.ui.debug('Status: %d\n' % rsp.status) + self.ui.debug(b'Status: %d\n' % rsp.status) # lfs-test-server and hg serve return headers in different order - self.ui.debug('%s\n' - % '\n'.join(sorted(str(rsp.info()).splitlines()))) + headers = pycompat.bytestr(rsp.info()).strip() + self.ui.debug(b'%s\n' + % b'\n'.join(sorted(headers.splitlines()))) - if 'objects' in response: - response['objects'] = sorted(response['objects'], - key=lambda p: p['oid']) - self.ui.debug('%s\n' - % json.dumps(response, indent=2, - separators=('', ': '), sort_keys=True)) + if r'objects' in response: + response[r'objects'] = sorted(response[r'objects'], + key=lambda p: p[r'oid']) + self.ui.debug(b'%s\n' + % pycompat.bytesurl( + json.dumps(response, indent=2, + separators=(r'', r': '), + sort_keys=True))) - return response + def encodestr(x): + if isinstance(x, pycompat.unicode): + return x.encode(u'utf-8') + return x + + return pycompat.rapply(encodestr, response) def _checkforservererror(self, pointers, responses, action): """Scans errors from objects @@ -345,34 +377,34 @@ # server implementation (ex. lfs-test-server) does not set "error" # but just removes "download" from "actions". Treat that case # as the same as 404 error. - if 'error' not in response: - if (action == 'download' - and action not in response.get('actions', [])): + if b'error' not in response: + if (action == b'download' + and action not in response.get(b'actions', [])): code = 404 else: continue else: # An error dict without a code doesn't make much sense, so # treat as a server error. - code = response.get('error').get('code', 500) + code = response.get(b'error').get(b'code', 500) ptrmap = {p.oid(): p for p in pointers} - p = ptrmap.get(response['oid'], None) + p = ptrmap.get(response[b'oid'], None) if p: - filename = getattr(p, 'filename', 'unknown') + filename = getattr(p, 'filename', b'unknown') errors = { - 404: 'The object does not exist', - 410: 'The object was removed by the owner', - 422: 'Validation error', - 500: 'Internal server error', + 404: b'The object does not exist', + 410: b'The object was removed by the owner', + 422: b'Validation error', + 500: b'Internal server error', } - msg = errors.get(code, 'status code %d' % code) - raise LfsRemoteError(_('LFS server error for "%s": %s') + msg = errors.get(code, b'status code %d' % code) + raise LfsRemoteError(_(b'LFS server error for "%s": %s') % (filename, msg)) else: raise LfsRemoteError( - _('LFS server error. Unsolicited response for oid %s') - % response['oid']) + _(b'LFS server error. Unsolicited response for oid %s') + % response[b'oid']) def _extractobjects(self, response, pointers, action): """extract objects from response of the batch API @@ -382,12 +414,13 @@ raise if any object has an error """ # Scan errors from objects - fail early - objects = response.get('objects', []) + objects = response.get(b'objects', []) self._checkforservererror(pointers, objects, action) # Filter objects with given action. Practically, this skips uploading # objects which exist in the server. - filteredobjects = [o for o in objects if action in o.get('actions', [])] + filteredobjects = [o for o in objects + if action in o.get(b'actions', [])] return filteredobjects @@ -401,36 +434,37 @@ See https://github.com/git-lfs/git-lfs/blob/master/docs/api/\ basic-transfers.md """ - oid = pycompat.bytestr(obj['oid']) + oid = obj[b'oid'] + href = obj[b'actions'][action].get(b'href') + headers = obj[b'actions'][action].get(b'header', {}).items() - href = pycompat.bytestr(obj['actions'][action].get('href')) - headers = obj['actions'][action].get('header', {}).items() - - request = util.urlreq.request(href) - if action == 'upload': + request = util.urlreq.request(pycompat.strurl(href)) + if action == b'upload': # If uploading blobs, read data from local blobstore. if not localstore.verify(oid): - raise error.Abort(_('detected corrupt lfs object: %s') % oid, - hint=_('run hg verify')) + raise error.Abort(_(b'detected corrupt lfs object: %s') % oid, + hint=_(b'run hg verify')) request.data = filewithprogress(localstore.open(oid), None) - request.get_method = lambda: 'PUT' - request.add_header('Content-Type', 'application/octet-stream') + request.get_method = lambda: r'PUT' + request.add_header(r'Content-Type', r'application/octet-stream') + request.add_header(r'Content-Length', len(request.data)) for k, v in headers: - request.add_header(k, v) + request.add_header(pycompat.strurl(k), pycompat.strurl(v)) response = b'' try: with contextlib.closing(self.urlopener.open(request)) as req: ui = self.ui # Shorten debug lines if self.ui.debugflag: - ui.debug('Status: %d\n' % req.status) + ui.debug(b'Status: %d\n' % req.status) # lfs-test-server and hg serve return headers in different # order - ui.debug('%s\n' - % '\n'.join(sorted(str(req.info()).splitlines()))) + headers = pycompat.bytestr(req.info()).strip() + ui.debug(b'%s\n' + % b'\n'.join(sorted(headers.splitlines()))) - if action == 'download': + if action == b'download': # If downloading blobs, store downloaded data to local # blobstore localstore.download(oid, req) @@ -441,65 +475,65 @@ break response += data if response: - ui.debug('lfs %s response: %s' % (action, response)) + ui.debug(b'lfs %s response: %s' % (action, response)) except util.urlerr.httperror as ex: if self.ui.debugflag: - self.ui.debug('%s: %s\n' % (oid, ex.read())) - raise LfsRemoteError(_('LFS HTTP error: %s (oid=%s, action=%s)') - % (ex, oid, action)) + self.ui.debug(b'%s: %s\n' % (oid, ex.read())) # XXX: also bytes? + raise LfsRemoteError(_(b'LFS HTTP error: %s (oid=%s, action=%s)') + % (stringutil.forcebytestr(ex), oid, action)) except util.urlerr.urlerror as ex: - hint = (_('attempted connection to %s') - % util.urllibcompat.getfullurl(request)) - raise LfsRemoteError(_('LFS error: %s') % _urlerrorreason(ex), + hint = (_(b'attempted connection to %s') + % pycompat.bytesurl(util.urllibcompat.getfullurl(request))) + raise LfsRemoteError(_(b'LFS error: %s') % _urlerrorreason(ex), hint=hint) def _batch(self, pointers, localstore, action): - if action not in ['upload', 'download']: - raise error.ProgrammingError('invalid Git-LFS action: %s' % action) + if action not in [b'upload', b'download']: + raise error.ProgrammingError(b'invalid Git-LFS action: %s' % action) response = self._batchrequest(pointers, action) objects = self._extractobjects(response, pointers, action) - total = sum(x.get('size', 0) for x in objects) + total = sum(x.get(b'size', 0) for x in objects) sizes = {} for obj in objects: - sizes[obj.get('oid')] = obj.get('size', 0) - topic = {'upload': _('lfs uploading'), - 'download': _('lfs downloading')}[action] + sizes[obj.get(b'oid')] = obj.get(b'size', 0) + topic = {b'upload': _(b'lfs uploading'), + b'download': _(b'lfs downloading')}[action] if len(objects) > 1: - self.ui.note(_('lfs: need to transfer %d objects (%s)\n') + self.ui.note(_(b'lfs: need to transfer %d objects (%s)\n') % (len(objects), util.bytecount(total))) def transfer(chunk): for obj in chunk: - objsize = obj.get('size', 0) + objsize = obj.get(b'size', 0) if self.ui.verbose: - if action == 'download': - msg = _('lfs: downloading %s (%s)\n') - elif action == 'upload': - msg = _('lfs: uploading %s (%s)\n') - self.ui.note(msg % (obj.get('oid'), + if action == b'download': + msg = _(b'lfs: downloading %s (%s)\n') + elif action == b'upload': + msg = _(b'lfs: uploading %s (%s)\n') + self.ui.note(msg % (obj.get(b'oid'), util.bytecount(objsize))) retry = self.retry while True: try: self._basictransfer(obj, action, localstore) - yield 1, obj.get('oid') + yield 1, obj.get(b'oid') break except socket.error as ex: if retry > 0: self.ui.note( - _('lfs: failed: %r (remaining retry %d)\n') - % (ex, retry)) + _(b'lfs: failed: %r (remaining retry %d)\n') + % (stringutil.forcebytestr(ex), retry)) retry -= 1 continue raise # Until https multiplexing gets sorted out - if self.ui.configbool('experimental', 'lfs.worker-enable'): + if self.ui.configbool(b'experimental', b'lfs.worker-enable'): oids = worker.worker(self.ui, 0.1, transfer, (), - sorted(objects, key=lambda o: o.get('oid'))) + sorted(objects, key=lambda o: o.get(b'oid'))) else: - oids = transfer(sorted(objects, key=lambda o: o.get('oid'))) + oids = transfer(sorted(objects, key=lambda o: o.get(b'oid'))) with self.ui.makeprogress(topic, total=total) as progress: progress.update(0) @@ -509,14 +543,14 @@ processed += sizes[oid] blobs += 1 progress.update(processed) - self.ui.note(_('lfs: processed: %s\n') % oid) + self.ui.note(_(b'lfs: processed: %s\n') % oid) if blobs > 0: - if action == 'upload': - self.ui.status(_('lfs: uploaded %d files (%s)\n') + if action == b'upload': + self.ui.status(_(b'lfs: uploaded %d files (%s)\n') % (blobs, util.bytecount(processed))) - elif action == 'download': - self.ui.status(_('lfs: downloaded %d files (%s)\n') + elif action == b'download': + self.ui.status(_(b'lfs: downloaded %d files (%s)\n') % (blobs, util.bytecount(processed))) def __del__(self): @@ -531,18 +565,18 @@ """Dummy store storing blobs to temp directory.""" def __init__(self, repo, url): - fullpath = repo.vfs.join('lfs', url.path) + fullpath = repo.vfs.join(b'lfs', url.path) self.vfs = lfsvfs(fullpath) def writebatch(self, pointers, fromstore): for p in _deduplicate(pointers): content = fromstore.read(p.oid(), verify=True) - with self.vfs(p.oid(), 'wb', atomictemp=True) as fp: + with self.vfs(p.oid(), b'wb', atomictemp=True) as fp: fp.write(content) def readbatch(self, pointers, tostore): for p in _deduplicate(pointers): - with self.vfs(p.oid(), 'rb') as fp: + with self.vfs(p.oid(), b'rb') as fp: tostore.download(p.oid(), fp) class _nullremote(object): @@ -570,13 +604,13 @@ self._prompt() def _prompt(self): - raise error.Abort(_('lfs.url needs to be configured')) + raise error.Abort(_(b'lfs.url needs to be configured')) _storemap = { - 'https': _gitlfsremote, - 'http': _gitlfsremote, - 'file': _dummyremote, - 'null': _nullremote, + b'https': _gitlfsremote, + b'http': _gitlfsremote, + b'file': _dummyremote, + b'null': _nullremote, None: _promptremote, } @@ -590,8 +624,8 @@ def _verify(oid, content): realoid = node.hex(hashlib.sha256(content).digest()) if realoid != oid: - raise LfsCorruptionError(_('detected corrupt lfs object: %s') % oid, - hint=_('run hg verify')) + raise LfsCorruptionError(_(b'detected corrupt lfs object: %s') % oid, + hint=_(b'run hg verify')) def remote(repo, remote=None): """remotestore factory. return a store in _storemap depending on config @@ -603,7 +637,7 @@ https://github.com/git-lfs/git-lfs/blob/master/docs/api/server-discovery.md """ - lfsurl = repo.ui.config('lfs', 'url') + lfsurl = repo.ui.config(b'lfs', b'url') url = util.url(lfsurl or '') if lfsurl is None: if remote: @@ -616,7 +650,7 @@ else: # TODO: investigate 'paths.remote:lfsurl' style path customization, # and fall back to inferring from 'paths.remote' if unspecified. - path = repo.ui.config('paths', 'default') or '' + path = repo.ui.config(b'paths', b'default') or b'' defaulturl = util.url(path) @@ -628,11 +662,11 @@ defaulturl.path = (defaulturl.path or b'') + b'.git/info/lfs' url = util.url(bytes(defaulturl)) - repo.ui.note(_('lfs: assuming remote store: %s\n') % url) + repo.ui.note(_(b'lfs: assuming remote store: %s\n') % url) scheme = url.scheme if scheme not in _storemap: - raise error.Abort(_('lfs: unknown url scheme: %s') % scheme) + raise error.Abort(_(b'lfs: unknown url scheme: %s') % scheme) return _storemap[scheme](repo, url) class LfsRemoteError(error.StorageError):
--- a/hgext/lfs/wireprotolfsserver.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/lfs/wireprotolfsserver.py Tue Mar 19 16:36:59 2019 +0300 @@ -43,7 +43,7 @@ if orig(rctx, req, res, checkperm): return True - if not rctx.repo.ui.configbool('experimental', 'lfs.serve'): + if not rctx.repo.ui.configbool(b'experimental', b'lfs.serve'): return False if not util.safehasattr(rctx.repo.svfs, 'lfslocalblobstore'): @@ -54,7 +54,7 @@ try: if req.dispatchpath == b'.git/info/lfs/objects/batch': - checkperm(rctx, req, 'pull') + checkperm(rctx, req, b'pull') return _processbatchrequest(rctx.repo, req, res) # TODO: reserve and use a path in the proposed http wireprotocol /api/ # namespace? @@ -81,7 +81,7 @@ def _logexception(req): """Write information about the current exception to wsgi.errors.""" tb = pycompat.sysbytes(traceback.format_exc()) - errorlog = req.rawenv[r'wsgi.errors'] + errorlog = req.rawenv[b'wsgi.errors'] uri = b'' if req.apppath: @@ -133,25 +133,27 @@ lfsreq = json.loads(req.bodyfh.read()) # If no transfer handlers are explicitly requested, 'basic' is assumed. - if 'basic' not in lfsreq.get('transfers', ['basic']): + if r'basic' not in lfsreq.get(r'transfers', [r'basic']): _sethttperror(res, HTTP_BAD_REQUEST, b'Only the basic LFS transfer handler is supported') return True - operation = lfsreq.get('operation') - if operation not in ('upload', 'download'): + operation = lfsreq.get(r'operation') + operation = pycompat.bytestr(operation) + + if operation not in (b'upload', b'download'): _sethttperror(res, HTTP_BAD_REQUEST, b'Unsupported LFS transfer operation: %s' % operation) return True localstore = repo.svfs.lfslocalblobstore - objects = [p for p in _batchresponseobjects(req, lfsreq.get('objects', []), + objects = [p for p in _batchresponseobjects(req, lfsreq.get(r'objects', []), operation, localstore)] rsp = { - 'transfer': 'basic', - 'objects': objects, + r'transfer': r'basic', + r'objects': objects, } res.status = hgwebcommon.statusmessage(HTTP_OK) @@ -190,11 +192,12 @@ for obj in objects: # Convert unicode to ASCII to create a filesystem path - oid = obj.get('oid').encode('ascii') + soid = obj.get(r'oid') + oid = soid.encode(r'ascii') rsp = { - 'oid': oid, - 'size': obj.get('size'), # XXX: should this check the local size? - #'authenticated': True, + r'oid': soid, + r'size': obj.get(r'size'), # XXX: should this check the local size? + #r'authenticated': True, } exists = True @@ -209,7 +212,7 @@ # verified as the file is streamed to the caller. try: verifies = store.verify(oid) - if verifies and action == 'upload': + if verifies and action == b'upload': # The client will skip this upload, but make sure it remains # available locally. store.linkfromusercache(oid) @@ -217,9 +220,9 @@ if inst.errno != errno.ENOENT: _logexception(req) - rsp['error'] = { - 'code': 500, - 'message': inst.strerror or 'Internal Server Server' + rsp[r'error'] = { + r'code': 500, + r'message': inst.strerror or r'Internal Server Server' } yield rsp continue @@ -228,19 +231,19 @@ # Items are always listed for downloads. They are dropped for uploads # IFF they already exist locally. - if action == 'download': + if action == b'download': if not exists: - rsp['error'] = { - 'code': 404, - 'message': "The object does not exist" + rsp[r'error'] = { + r'code': 404, + r'message': r"The object does not exist" } yield rsp continue elif not verifies: - rsp['error'] = { - 'code': 422, # XXX: is this the right code? - 'message': "The object is corrupt" + rsp[r'error'] = { + r'code': 422, # XXX: is this the right code? + r'message': r"The object is corrupt" } yield rsp continue @@ -256,22 +259,22 @@ # a gratuitous deviation from lfs-test-server in the test # output. hdr = { - 'Accept': 'application/vnd.git-lfs' + r'Accept': r'application/vnd.git-lfs' } - auth = req.headers.get('Authorization', '') - if auth.startswith('Basic '): - hdr['Authorization'] = auth + auth = req.headers.get(b'Authorization', b'') + if auth.startswith(b'Basic '): + hdr[r'Authorization'] = pycompat.strurl(auth) return hdr - rsp['actions'] = { - '%s' % action: { - 'href': '%s%s/.hg/lfs/objects/%s' - % (req.baseurl, req.apppath, oid), + rsp[r'actions'] = { + r'%s' % pycompat.strurl(action): { + r'href': pycompat.strurl(b'%s%s/.hg/lfs/objects/%s' + % (req.baseurl, req.apppath, oid)), # datetime.isoformat() doesn't include the 'Z' suffix - "expires_at": expiresat.strftime('%Y-%m-%dT%H:%M:%SZ'), - 'header': _buildheader(), + r"expires_at": expiresat.strftime(r'%Y-%m-%dT%H:%M:%SZ'), + r'header': _buildheader(), } } @@ -297,7 +300,7 @@ return True if method == b'PUT': - checkperm('upload') + checkperm(b'upload') # TODO: verify Content-Type? @@ -324,7 +327,7 @@ return True elif method == b'GET': - checkperm('pull') + checkperm(b'pull') res.status = hgwebcommon.statusmessage(HTTP_OK) res.headers[b'Content-Type'] = b'application/octet-stream'
--- a/hgext/mq.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/mq.py Tue Mar 19 16:36:59 2019 +0300 @@ -738,10 +738,10 @@ for f in sorted(files): absf = repo.wjoin(f) if os.path.lexists(absf): + absorig = scmutil.backuppath(self.ui, repo, f) self.ui.note(_('saving current version of %s as %s\n') % - (f, scmutil.origpath(self.ui, repo, f))) - - absorig = scmutil.origpath(self.ui, repo, absf) + (f, os.path.relpath(absorig))) + if copy: util.copyfile(absf, absorig) else: @@ -970,7 +970,7 @@ repo.dirstate.remove(f) for f in merged: repo.dirstate.merge(f) - p1, p2 = repo.dirstate.parents() + p1 = repo.dirstate.p1() repo.setparents(p1, merge) if all_files and '.hgsubstate' in all_files: @@ -1181,7 +1181,7 @@ def makepatchname(self, title, fallbackname): """Return a suitable filename for title, adding a suffix to make it unique in the existing list""" - namebase = re.sub('[\s\W_]+', '_', title.lower()).strip('_') + namebase = re.sub(br'[\s\W_]+', b'_', title.lower()).strip(b'_') namebase = namebase[:75] # avoid too long name (issue5117) if namebase: try: @@ -1700,8 +1700,7 @@ # but we do it backwards to take advantage of manifest/changelog # caching against the next repo.status call mm, aa, dd = repo.status(patchparent, top)[:3] - changes = repo.changelog.read(top) - man = repo.manifestlog[changes[0]].read() + ctx = repo[top] aaa = aa[:] match1 = scmutil.match(repo[None], pats, opts) # in short mode, we only diff the files included in the @@ -1778,13 +1777,12 @@ repo.dirstate.add(dst) # remember the copies between patchparent and qtip for dst in aaa: - f = repo.file(dst) - src = f.renamed(man[dst]) + src = ctx[dst].copysource() if src: - copies.setdefault(src[0], []).extend( + copies.setdefault(src, []).extend( copies.get(dst, [])) if dst in a: - copies[src[0]].append(dst) + copies[src].append(dst) # we can't copy a file created by the patch itself if dst in copies: del copies[dst] @@ -1813,7 +1811,7 @@ for f in forget: repo.dirstate.drop(f) - user = ph.user or changes[1] + user = ph.user or ctx.user() oldphase = repo[top].phase() @@ -3521,7 +3519,7 @@ if self.mq.applied and self.mq.checkapplied and not force: parents = self.dirstate.parents() patches = [s.node for s in self.mq.applied] - if parents[0] in patches or parents[1] in patches: + if any(p in patches for p in parents): raise error.Abort(errmsg) def commit(self, text="", user=None, date=None, match=None,
--- a/hgext/narrow/narrowcommands.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/narrow/narrowcommands.py Tue Mar 19 16:36:59 2019 +0300 @@ -278,9 +278,9 @@ p1, p2 = ds.p1(), ds.p2() with ds.parentchange(): ds.setparents(node.nullid, node.nullid) - with wrappedextraprepare,\ - repo.ui.configoverride(overrides, 'widen'): - exchange.pull(repo, remote, heads=common) + with wrappedextraprepare: + with repo.ui.configoverride(overrides, 'widen'): + exchange.pull(repo, remote, heads=common) with ds.parentchange(): ds.setparents(p1, p2) else: @@ -296,11 +296,11 @@ 'ellipses': False, }).result() - with repo.transaction('widening') as tr,\ - repo.ui.configoverride(overrides, 'widen'): - tgetter = lambda: tr - bundle2.processbundle(repo, bundle, - transactiongetter=tgetter) + with repo.transaction('widening') as tr: + with repo.ui.configoverride(overrides, 'widen'): + tgetter = lambda: tr + bundle2.processbundle(repo, bundle, + transactiongetter=tgetter) with repo.transaction('widening'): repo.setnewnarrowpats() @@ -345,10 +345,14 @@ and replaced by the new ones specified to --addinclude and --addexclude. If --clear is specified without any further options, the narrowspec will be empty and will not match any files. + + --import-rules accepts a path to a file containing rules, allowing you to + add --addinclude, --addexclude rules in bulk. Like the other include and + exclude switches, the changes are applied immediately. """ opts = pycompat.byteskwargs(opts) if repository.NARROW_REQUIREMENT not in repo.requirements: - raise error.Abort(_('the narrow command is only supported on ' + raise error.Abort(_('the tracked command is only supported on ' 'respositories cloned with --narrow')) # Before supporting, decide whether it "hg tracked --clear" should mean
--- a/hgext/notify.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/notify.py Tue Mar 19 16:36:59 2019 +0300 @@ -367,8 +367,12 @@ raise error.Abort(inst) # store sender and subject - sender = encoding.strtolocal(msg[r'From']) - subject = encoding.strtolocal(msg[r'Subject']) + sender = msg[r'From'] + subject = msg[r'Subject'] + if sender is not None: + sender = encoding.strtolocal(sender) + if subject is not None: + subject = encoding.strtolocal(subject) del msg[r'From'], msg[r'Subject'] if not msg.is_multipart():
--- a/hgext/phabricator.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/phabricator.py Tue Mar 19 16:36:59 2019 +0300 @@ -60,6 +60,7 @@ parser, patch, phases, + pycompat, registrar, scmutil, smartset, @@ -127,7 +128,7 @@ fullflags = flags + _VCR_FLAGS def decorate(fn): def inner(*args, **kwargs): - cassette = kwargs.pop(r'test_vcr', None) + cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None)) if cassette: import hgdemandimport with hgdemandimport.deactivated(): @@ -136,8 +137,9 @@ vcr = vcrmod.VCR( serializer=r'json', custom_patches=[ - (urlmod, 'httpconnection', stubs.VCRHTTPConnection), - (urlmod, 'httpsconnection', + (urlmod, r'httpconnection', + stubs.VCRHTTPConnection), + (urlmod, r'httpsconnection', stubs.VCRHTTPSConnection), ]) with vcr.use_cassette(cassette): @@ -159,7 +161,8 @@ def process(prefix, obj): if isinstance(obj, bool): obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form - items = {list: enumerate, dict: lambda x: x.items()}.get(type(obj)) + lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)] + items = {list: lister, dict: lambda x: x.items()}.get(type(obj)) if items is None: flatparams[prefix] = obj else: @@ -202,7 +205,7 @@ """call Conduit API, params is a dict. return json.loads result, or None""" host, token = readurltoken(repo) url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo() - repo.ui.debug(b'Conduit Call: %s %s\n' % (url, params)) + repo.ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params))) params = params.copy() params[b'api.token'] = token data = urlencodenested(params) @@ -215,16 +218,20 @@ body = sout.read() else: urlopener = urlmod.opener(repo.ui, authinfo) - request = util.urlreq.request(url, data=data) + request = util.urlreq.request(pycompat.strurl(url), data=data) with contextlib.closing(urlopener.open(request)) as rsp: body = rsp.read() repo.ui.debug(b'Conduit Response: %s\n' % body) - parsed = json.loads(body) - if parsed.get(r'error_code'): + parsed = pycompat.rapply( + lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode) + else x, + json.loads(body) + ) + if parsed.get(b'error_code'): msg = (_(b'Conduit Error (%s): %s') - % (parsed[r'error_code'], parsed[r'error_info'])) + % (parsed[b'error_code'], parsed[b'error_info'])) raise error.Abort(msg) - return parsed[r'result'] + return parsed[b'result'] @vcrcommand(b'debugcallconduit', [], _(b'METHOD')) def debugcallconduit(ui, repo, name): @@ -233,10 +240,20 @@ Call parameters are read from stdin as a JSON blob. Result will be written to stdout as a JSON blob. """ - params = json.loads(ui.fin.read()) - result = callconduit(repo, name, params) - s = json.dumps(result, sort_keys=True, indent=2, separators=(b',', b': ')) - ui.write(b'%s\n' % s) + # json.loads only accepts bytes from 3.6+ + rawparams = encoding.unifromlocal(ui.fin.read()) + # json.loads only returns unicode strings + params = pycompat.rapply(lambda x: + encoding.unitolocal(x) if isinstance(x, pycompat.unicode) else x, + json.loads(rawparams) + ) + # json.dumps only accepts unicode strings + result = pycompat.rapply(lambda x: + encoding.unifromlocal(x) if isinstance(x, bytes) else x, + callconduit(repo, name, params) + ) + s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': ')) + ui.write(b'%s\n' % encoding.unitolocal(s)) def getrepophid(repo): """given callsign, return repository PHID or None""" @@ -249,15 +266,15 @@ return None query = callconduit(repo, b'diffusion.repository.search', {b'constraints': {b'callsigns': [callsign]}}) - if len(query[r'data']) == 0: + if len(query[b'data']) == 0: return None - repophid = encoding.strtolocal(query[r'data'][0][r'phid']) + repophid = query[b'data'][0][b'phid'] repo.ui.setconfig(b'phabricator', b'repophid', repophid) return repophid -_differentialrevisiontagre = re.compile(b'\AD([1-9][0-9]*)\Z') +_differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z') _differentialrevisiondescre = re.compile( - b'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M) + br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M) def getoldnodedrevmap(repo, nodelist): """find previous nodes that has been sent to Phabricator @@ -277,7 +294,6 @@ The ``old node``, if not None, is guaranteed to be the last diff of corresponding Differential Revision, and exist in the repo. """ - url, token = readurltoken(repo) unfi = repo.unfiltered() nodemap = unfi.changelog.nodemap @@ -298,7 +314,7 @@ # Check commit message m = _differentialrevisiondescre.search(ctx.description()) if m: - toconfirm[node] = (1, set(precnodes), int(m.group(b'id'))) + toconfirm[node] = (1, set(precnodes), int(m.group(r'id'))) # Double check if tags are genuine by collecting all old nodes from # Phabricator, and expect precursors overlap with it. @@ -306,11 +322,11 @@ drevs = [drev for force, precs, drev in toconfirm.values()] alldiffs = callconduit(unfi, b'differential.querydiffs', {b'revisionIDs': drevs}) - getnode = lambda d: bin(encoding.unitolocal( - getdiffmeta(d).get(r'node', b''))) or None + getnode = lambda d: bin( + getdiffmeta(d).get(b'node', b'')) or None for newnode, (force, precset, drev) in toconfirm.items(): diffs = [d for d in alldiffs.values() - if int(d[r'revisionID']) == drev] + if int(d[b'revisionID']) == drev] # "precursors" as known by Phabricator phprecset = set(getnode(d) for d in diffs) @@ -329,7 +345,7 @@ # exists in the repo oldnode = lastdiff = None if diffs: - lastdiff = max(diffs, key=lambda d: int(d[r'id'])) + lastdiff = max(diffs, key=lambda d: int(d[b'id'])) oldnode = getnode(lastdiff) if oldnode and oldnode not in nodemap: oldnode = None @@ -362,25 +378,26 @@ def writediffproperties(ctx, diff): """write metadata to diff so patches could be applied losslessly""" params = { - b'diff_id': diff[r'id'], + b'diff_id': diff[b'id'], b'name': b'hg:meta', b'data': json.dumps({ - b'user': ctx.user(), - b'date': b'%d %d' % ctx.date(), - b'node': ctx.hex(), - b'parent': ctx.p1().hex(), + u'user': encoding.unifromlocal(ctx.user()), + u'date': u'{:.0f} {}'.format(*ctx.date()), + u'node': encoding.unifromlocal(ctx.hex()), + u'parent': encoding.unifromlocal(ctx.p1().hex()), }), } callconduit(ctx.repo(), b'differential.setdiffproperty', params) params = { - b'diff_id': diff[r'id'], + b'diff_id': diff[b'id'], b'name': b'local:commits', b'data': json.dumps({ - ctx.hex(): { - b'author': stringutil.person(ctx.user()), - b'authorEmail': stringutil.email(ctx.user()), - b'time': ctx.date()[0], + encoding.unifromlocal(ctx.hex()): { + u'author': encoding.unifromlocal(stringutil.person(ctx.user())), + u'authorEmail': encoding.unifromlocal( + stringutil.email(ctx.user())), + u'time': u'{:.0f}'.format(ctx.date()[0]), }, }), } @@ -409,7 +426,7 @@ transactions = [] if neednewdiff: diff = creatediff(ctx) - transactions.append({b'type': b'update', b'value': diff[r'phid']}) + transactions.append({b'type': b'update', b'value': diff[b'phid']}) else: # Even if we don't need to upload a new diff because the patch content # does not change. We might still need to update its metadata so @@ -423,7 +440,7 @@ # existing revision (revid is not None) since that introduces visible # churns (someone edited "Summary" twice) on the web page. if parentrevid and revid is None: - summary = b'Depends on D%s' % parentrevid + summary = b'Depends on D%d' % parentrevid transactions += [{b'type': b'summary', b'value': summary}, {b'type': b'summary', b'value': b' '}] @@ -434,7 +451,7 @@ desc = ctx.description() info = callconduit(repo, b'differential.parsecommitmessage', {b'corpus': desc}) - for k, v in info[r'fields'].items(): + for k, v in info[b'fields'].items(): if k in [b'title', b'summary', b'testPlan']: transactions.append({b'type': k, b'value': v}) @@ -451,17 +468,18 @@ def userphids(repo, names): """convert user names to PHIDs""" + names = [name.lower() for name in names] query = {b'constraints': {b'usernames': names}} result = callconduit(repo, b'user.search', query) # username not found is not an error of the API. So check if we have missed # some names here. - data = result[r'data'] - resolved = set(entry[r'fields'][r'username'] for entry in data) + data = result[b'data'] + resolved = set(entry[b'fields'][b'username'].lower() for entry in data) unresolved = set(names) - resolved if unresolved: raise error.Abort(_(b'unknown username: %s') % b' '.join(sorted(unresolved))) - return [entry[r'phid'] for entry in data] + return [entry[b'phid'] for entry in data] @vcrcommand(b'phabsend', [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')), @@ -497,6 +515,7 @@ phabsend will check obsstore and the above association to decide whether to update an existing Differential Revision, or create a new one. """ + opts = pycompat.byteskwargs(opts) revs = list(revs) + opts.get(b'rev', []) revs = scmutil.revrange(repo, revs) @@ -538,7 +557,7 @@ revision, diff = createdifferentialrevision( ctx, revid, lastrevid, oldnode, olddiff, actions) diffmap[ctx.node()] = diff - newrevid = int(revision[r'object'][r'id']) + newrevid = int(revision[b'object'][b'id']) if revid: action = b'updated' else: @@ -547,7 +566,7 @@ # Create a local tag to note the association, if commit message # does not have it already m = _differentialrevisiondescre.search(ctx.description()) - if not m or int(m.group(b'id')) != newrevid: + if not m or int(m.group(r'id')) != newrevid: tagname = b'D%d' % newrevid tags.tag(repo, tagname, ctx.node(), message=None, user=None, date=None, local=True) @@ -562,7 +581,7 @@ b'skipped': _(b'skipped'), b'updated': _(b'updated')}[action], b'phabricator.action.%s' % action) - drevdesc = ui.label(b'D%s' % newrevid, b'phabricator.drev') + drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev') nodedesc = ui.label(bytes(ctx), b'phabricator.node') desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc') ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, @@ -580,9 +599,8 @@ for i, rev in enumerate(revs): old = unfi[rev] drevid = drevids[i] - drev = [d for d in drevs if int(d[r'id']) == drevid][0] + drev = [d for d in drevs if int(d[b'id']) == drevid][0] newdesc = getdescfromdrev(drev) - newdesc = encoding.unitolocal(newdesc) # Make sure commit message contain "Differential Revision" if old.description() != newdesc: if old.phase() == phases.public: @@ -613,8 +631,8 @@ # Map from "hg:meta" keys to header understood by "hg import". The order is # consistent with "hg export" output. -_metanamemap = util.sortdict([(r'user', b'User'), (r'date', b'Date'), - (r'node', b'Node ID'), (r'parent', b'Parent ')]) +_metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'), + (b'node', b'Node ID'), (b'parent', b'Parent ')]) def _confirmbeforesend(repo, revs, oldmap): url, token = readurltoken(repo) @@ -644,7 +662,7 @@ def _getstatusname(drev): """get normalized status name from a Differential Revision""" - return drev[r'statusName'].replace(b' ', b'').lower() + return drev[b'statusName'].replace(b' ', b'').lower() # Small language to specify differential revisions. Support symbols: (), :X, # +, and -. @@ -668,7 +686,7 @@ length = len(text) while pos < length: symbol = b''.join(itertools.takewhile(lambda ch: ch not in special, - view[pos:])) + pycompat.iterbytestr(view[pos:]))) if symbol: yield (b'symbol', symbol, pos) pos += len(symbol) @@ -756,14 +774,14 @@ """ def fetch(params): """params -> single drev or None""" - key = (params.get(r'ids') or params.get(r'phids') or [None])[0] + key = (params.get(b'ids') or params.get(b'phids') or [None])[0] if key in prefetched: return prefetched[key] drevs = callconduit(repo, b'differential.query', params) # Fill prefetched with the result for drev in drevs: - prefetched[drev[r'phid']] = drev - prefetched[int(drev[r'id'])] = drev + prefetched[drev[b'phid']] = drev + prefetched[int(drev[b'id'])] = drev if key not in prefetched: raise error.Abort(_(b'cannot get Differential Revision %r') % params) @@ -773,16 +791,16 @@ """given a top, get a stack from the bottom, [id] -> [id]""" visited = set() result = [] - queue = [{r'ids': [i]} for i in topdrevids] + queue = [{b'ids': [i]} for i in topdrevids] while queue: params = queue.pop() drev = fetch(params) - if drev[r'id'] in visited: + if drev[b'id'] in visited: continue - visited.add(drev[r'id']) - result.append(int(drev[r'id'])) - auxiliary = drev.get(r'auxiliary', {}) - depends = auxiliary.get(r'phabricator:depends-on', []) + visited.add(drev[b'id']) + result.append(int(drev[b'id'])) + auxiliary = drev.get(b'auxiliary', {}) + depends = auxiliary.get(b'phabricator:depends-on', []) for phid in depends: queue.append({b'phids': [phid]}) result.reverse() @@ -802,7 +820,7 @@ for r in ancestordrevs: tofetch.update(range(max(1, r - batchsize), r + 1)) if drevs: - fetch({r'ids': list(tofetch)}) + fetch({b'ids': list(tofetch)}) validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs)) # Walk through the tree, return smartsets @@ -836,12 +854,12 @@ This is similar to differential.getcommitmessage API. But we only care about limited fields: title, summary, test plan, and URL. """ - title = drev[r'title'] - summary = drev[r'summary'].rstrip() - testplan = drev[r'testPlan'].rstrip() + title = drev[b'title'] + summary = drev[b'summary'].rstrip() + testplan = drev[b'testPlan'].rstrip() if testplan: testplan = b'Test Plan:\n%s' % testplan - uri = b'Differential Revision: %s' % drev[r'uri'] + uri = b'Differential Revision: %s' % drev[b'uri'] return b'\n\n'.join(filter(None, [title, summary, testplan, uri])) def getdiffmeta(diff): @@ -881,17 +899,17 @@ Note: metadata extracted from "local:commits" will lose time zone information. """ - props = diff.get(r'properties') or {} - meta = props.get(r'hg:meta') - if not meta and props.get(r'local:commits'): - commit = sorted(props[r'local:commits'].values())[0] + props = diff.get(b'properties') or {} + meta = props.get(b'hg:meta') + if not meta and props.get(b'local:commits'): + commit = sorted(props[b'local:commits'].values())[0] meta = { - r'date': r'%d 0' % commit[r'time'], - r'node': commit[r'rev'], - r'user': r'%s <%s>' % (commit[r'author'], commit[r'authorEmail']), + b'date': b'%d 0' % commit[b'time'], + b'node': commit[b'rev'], + b'user': b'%s <%s>' % (commit[b'author'], commit[b'authorEmail']), } - if len(commit.get(r'parents', ())) >= 1: - meta[r'parent'] = commit[r'parents'][0] + if len(commit.get(b'parents', ())) >= 1: + meta[b'parent'] = commit[b'parents'][0] return meta or {} def readpatch(repo, drevs, write): @@ -901,14 +919,14 @@ "differential.query". """ # Prefetch hg:meta property for all diffs - diffids = sorted(set(max(int(v) for v in drev[r'diffs']) for drev in drevs)) + diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs)) diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids}) # Generate patch for each drev for drev in drevs: - repo.ui.note(_(b'reading D%s\n') % drev[r'id']) + repo.ui.note(_(b'reading D%s\n') % drev[b'id']) - diffid = max(int(v) for v in drev[r'diffs']) + diffid = max(int(v) for v in drev[b'diffs']) body = callconduit(repo, b'differential.getrawdiff', {b'diffID': diffid}) desc = getdescfromdrev(drev) @@ -917,13 +935,13 @@ # Try to preserve metadata from hg:meta property. Write hg patch # headers that can be read by the "import" command. See patchheadermap # and extract in mercurial/patch.py for supported headers. - meta = getdiffmeta(diffs[str(diffid)]) + meta = getdiffmeta(diffs[b'%d' % diffid]) for k in _metanamemap.keys(): if k in meta: header += b'# %s %s\n' % (_metanamemap[k], meta[k]) content = b'%s%s\n%s' % (header, desc, body) - write(encoding.unitolocal(content)) + write(content) @vcrcommand(b'phabread', [(b'', b'stack', False, _(b'read dependencies'))], @@ -948,6 +966,7 @@ If --stack is given, follow dependencies information and read all patches. It is equivalent to the ``:`` operator. """ + opts = pycompat.byteskwargs(opts) if opts.get(b'stack'): spec = b':(%s)' % spec drevs = querydrev(repo, spec) @@ -966,6 +985,7 @@ DREVSPEC selects revisions. See :hg:`help phabread` for its usage. """ + opts = pycompat.byteskwargs(opts) flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)] if len(flags) > 1: raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags)) @@ -979,7 +999,7 @@ if i + 1 == len(drevs) and opts.get(b'comment'): actions.append({b'type': b'comment', b'value': opts[b'comment']}) if actions: - params = {b'objectIdentifier': drev[r'phid'], + params = {b'objectIdentifier': drev[b'phid'], b'transactions': actions} callconduit(repo, b'differential.revision.edit', params) @@ -994,8 +1014,8 @@ m = _differentialrevisiondescre.search(ctx.description()) if m: return templateutil.hybriddict({ - b'url': m.group(b'url'), - b'id': b"D{}".format(m.group(b'id')), + b'url': m.group(r'url'), + b'id': b"D%s" % m.group(r'id'), }) else: tags = ctx.repo().nodetags(ctx.node())
--- a/hgext/rebase.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/rebase.py Tue Mar 19 16:36:59 2019 +0300 @@ -1278,7 +1278,7 @@ return stats def adjustdest(repo, rev, destmap, state, skipped): - """adjust rebase destination given the current rebase state + r"""adjust rebase destination given the current rebase state rev is what is being rebased. Return a list of two revs, which are the adjusted destinations for rev's p1 and p2, respectively. If a parent is @@ -1804,7 +1804,6 @@ def pullrebase(orig, ui, repo, *args, **opts): 'Call rebase after pull if the latter has been invoked with --rebase' - ret = None if opts.get(r'rebase'): if ui.configbool('commands', 'rebase.requiredest'): msg = _('rebase destination required by configuration')
--- a/hgext/record.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/record.py Tue Mar 19 16:36:59 2019 +0300 @@ -119,6 +119,7 @@ overrides = {('experimental', 'crecord'): False} with ui.configoverride(overrides, 'record'): + cmdutil.checkunfinished(repo) cmdutil.dorecord(ui, repo, committomq, cmdsuggest, False, cmdutil.recordfilter, *pats, **opts) @@ -134,12 +135,12 @@ except KeyError: return - cmdtable["qrecord"] = \ - (qrecord, - # same options as qnew, but copy them so we don't get - # -i/--interactive for qrecord and add white space diff options - mq.cmdtable['qnew'][1][:] + cmdutil.diffwsopts, - _('hg qrecord [OPTION]... PATCH [FILE]...')) + cmdtable["qrecord"] = ( + qrecord, + # same options as qnew, but copy them so we don't get + # -i/--interactive for qrecord and add white space diff options + mq.cmdtable['qnew'][1][:] + cmdutil.diffwsopts, + _('hg qrecord [OPTION]... PATCH [FILE]...')) _wrapcmd('qnew', mq.cmdtable, qnew, _("interactively record a new patch")) _wrapcmd('qrefresh', mq.cmdtable, qrefresh,
--- a/hgext/releasenotes.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/releasenotes.py Tue Mar 19 16:36:59 2019 +0300 @@ -55,7 +55,7 @@ ('api', _('API Changes')), ] -RE_DIRECTIVE = re.compile('^\.\. ([a-zA-Z0-9_]+)::\s*([^$]+)?$') +RE_DIRECTIVE = re.compile(br'^\.\. ([a-zA-Z0-9_]+)::\s*([^$]+)?$') RE_ISSUE = br'\bissue ?[0-9]{4,6}(?![0-9])\b' BULLET_SECTION = _('Other Changes') @@ -107,8 +107,9 @@ "releasenotes is disabled\n")) for section in other: - existingnotes = converttitled(self.titledforsection(section)) + \ - convertnontitled(self.nontitledforsection(section)) + existingnotes = ( + converttitled(self.titledforsection(section)) + + convertnontitled(self.nontitledforsection(section))) for title, paragraphs in other.titledforsection(section): if self.hastitledinsection(section, title): # TODO prompt for resolution if different and running in
--- a/hgext/remotefilelog/__init__.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/remotefilelog/__init__.py Tue Mar 19 16:36:59 2019 +0300 @@ -159,7 +159,6 @@ scmutil, smartset, streamclone, - templatekw, util, ) from . import ( @@ -557,7 +556,7 @@ extensions.wrapfunction(dispatch, 'runcommand', runcommand) # disappointing hacks below - templatekw.getrenamedfn = getrenamedfn + scmutil.getrenamedfn = getrenamedfn extensions.wrapfunction(revset, 'filelog', filelogrevset) revset.symbols['filelog'] = revset.filelog extensions.wrapfunction(cmdutil, 'walkfilerevs', walkfilerevs) @@ -902,8 +901,7 @@ # If this is a non-follow log without any revs specified, recommend that # the user add -f to speed it up. if not follow and not revs: - match, pats = scmutil.matchandpats(repo['.'], pats, - pycompat.byteskwargs(opts)) + match = scmutil.match(repo['.'], pats, pycompat.byteskwargs(opts)) isfile = not match.anypats() if isfile: for file in match.files():
--- a/hgext/remotefilelog/basepack.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/remotefilelog/basepack.py Tue Mar 19 16:36:59 2019 +0300 @@ -270,9 +270,9 @@ # only affect this instance self.VERSION = version elif self.VERSION != version: - raise RuntimeError('inconsistent version: %s' % version) + raise RuntimeError('inconsistent version: %d' % version) else: - raise RuntimeError('unsupported version: %s' % version) + raise RuntimeError('unsupported version: %d' % version) class basepack(versionmixin): # The maximum amount we should read via mmap before remmaping so the old @@ -457,8 +457,6 @@ pass def writeindex(self): - rawindex = '' - largefanout = len(self.entries) > SMALLFANOUTCUTOFF if largefanout: params = indexparams(LARGEFANOUTPREFIX, self.VERSION)
--- a/hgext/remotefilelog/basestore.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/remotefilelog/basestore.py Tue Mar 19 16:36:59 2019 +0300 @@ -410,16 +410,18 @@ def wrapped(self, *args, **kwargs): retrylog = self.retrylog or noop funcname = fn.__name__ - for i in pycompat.xrange(self.numattempts): + i = 0 + while i < self.numattempts: if i > 0: retrylog('re-attempting (n=%d) %s\n' % (i, funcname)) self.markforrefresh() + i += 1 try: return fn(self, *args, **kwargs) except KeyError: - pass - # retries exhausted - retrylog('retries exhausted in %s, raising KeyError\n' % - pycompat.sysbytes(funcname)) - raise + if i == self.numattempts: + # retries exhausted + retrylog('retries exhausted in %s, raising KeyError\n' % + pycompat.sysbytes(funcname)) + raise return wrapped
--- a/hgext/remotefilelog/datapack.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/remotefilelog/datapack.py Tue Mar 19 16:36:59 2019 +0300 @@ -242,7 +242,7 @@ entry = index[end:end + entrylen] else: while start < end - entrylen: - mid = start + (end - start) / 2 + mid = start + (end - start) // 2 mid = mid - ((mid - params.indexstart) % entrylen) midnode = index[mid:mid + NODELENGTH] if midnode == node: @@ -250,10 +250,8 @@ break if node > midnode: start = mid - startnode = midnode elif node < midnode: end = mid - endnode = midnode else: return None
--- a/hgext/remotefilelog/debugcommands.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/remotefilelog/debugcommands.py Tue Mar 19 16:36:59 2019 +0300 @@ -16,6 +16,7 @@ error, filelog, node as nodemod, + pycompat, revlog, ) from . import ( @@ -175,7 +176,6 @@ return zlib.decompress(raw) def parsefileblob(path, decompress): - raw = None f = open(path, "rb") try: raw = f.read() @@ -277,11 +277,11 @@ totalblobsize += blobsize else: blobsize = "(missing)" - ui.write("%s %s %s%d\n" % ( + ui.write("%s %s %s%s\n" % ( hashformatter(node), hashformatter(deltabase), ('%d' % deltalen).ljust(14), - blobsize)) + pycompat.bytestr(blobsize))) if filename is not None: printtotals()
--- a/hgext/remotefilelog/fileserverclient.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/remotefilelog/fileserverclient.py Tue Mar 19 16:36:59 2019 +0300 @@ -138,8 +138,8 @@ def connect(self, cachecommand): if self.pipeo: raise error.Abort(_("cache connection already open")) - self.pipei, self.pipeo, self.pipee, self.subprocess = \ - procutil.popen4(cachecommand) + self.pipei, self.pipeo, self.pipee, self.subprocess = ( + procutil.popen4(cachecommand)) self.connected = True def close(self):
--- a/hgext/remotefilelog/historypack.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/remotefilelog/historypack.py Tue Mar 19 16:36:59 2019 +0300 @@ -259,10 +259,8 @@ return self._index[mid:mid + entrylen] if node > midnode: start = mid - startnode = midnode elif node < midnode: end = mid - endnode = midnode return None def markledger(self, ledger, options=None): @@ -514,7 +512,6 @@ fileindexentries.append(rawentry) - nodecountraw = '' nodecountraw = struct.pack('!Q', nodecount) return (''.join(fileindexentries) + nodecountraw + ''.join(nodeindexentries))
--- a/hgext/remotefilelog/remotefilectx.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/remotefilelog/remotefilectx.py Tue Mar 19 16:36:59 2019 +0300 @@ -15,7 +15,6 @@ context, error, phases, - pycompat, util, ) from . import shallowutil @@ -39,11 +38,11 @@ @propertycache def _changeid(self): - if '_changeid' in self.__dict__: + if r'_changeid' in self.__dict__: return self._changeid - elif '_changectx' in self.__dict__: + elif r'_changectx' in self.__dict__: return self._changectx.rev() - elif '_descendantrev' in self.__dict__: + elif r'_descendantrev' in self.__dict__: # this file context was created from a revision with a known # descendant, we can (lazily) correct for linkrev aliases linknode = self._adjustlinknode(self._path, self._filelog, @@ -102,7 +101,7 @@ """ lkr = self.linkrev() attrs = vars(self) - noctx = not ('_changeid' in attrs or '_changectx' in attrs) + noctx = not (r'_changeid' in attrs or r'_changectx' in attrs) if noctx or self.rev() == lkr: return lkr linknode = self._adjustlinknode(self._path, self._filelog, @@ -137,6 +136,10 @@ pass return renamed + def copysource(self): + copy = self.renamed() + return copy and copy[0] + def ancestormap(self): if not self._ancestormap: self._ancestormap = self.filelog().ancestormap(self._filenode) @@ -316,7 +319,7 @@ finally: elapsed = time.time() - start repo.ui.log('linkrevfixup', logmsg + '\n', elapsed=elapsed * 1000, - **pycompat.strkwargs(commonlogkwargs)) + **commonlogkwargs) def _verifylinknode(self, revs, linknode): """ @@ -452,8 +455,8 @@ class remoteworkingfilectx(context.workingfilectx, remotefilectx): def __init__(self, repo, path, filelog=None, workingctx=None): self._ancestormap = None - return super(remoteworkingfilectx, self).__init__(repo, path, - filelog, workingctx) + super(remoteworkingfilectx, self).__init__(repo, path, filelog, + workingctx) def parents(self): return remotefilectx.parents(self)
--- a/hgext/remotefilelog/remotefilelog.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/remotefilelog/remotefilelog.py Tue Mar 19 16:36:59 2019 +0300 @@ -61,8 +61,6 @@ return t[s + 2:] def add(self, text, meta, transaction, linknode, p1=None, p2=None): - hashtext = text - # hash with the metadata, like in vanilla filelogs hashtext = shallowutil.createrevlogtext(text, meta.get('copy'), meta.get('copyrev'))
--- a/hgext/remotefilelog/remotefilelogserver.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/remotefilelog/remotefilelogserver.py Tue Mar 19 16:36:59 2019 +0300 @@ -54,7 +54,7 @@ elif cap.startswith("excludepattern="): excludepattern = cap[len("excludepattern="):].split('\0') - m = match.always(repo.root, '') + m = match.always() if includepattern or excludepattern: m = match.match(repo.root, '', None, includepattern, excludepattern) @@ -104,7 +104,7 @@ oldnoflatmf = state.noflatmf try: state.shallowremote = True - state.match = match.always(repo.root, '') + state.match = match.always() state.noflatmf = other.get('noflatmanifest') == 'True' if includepattern or excludepattern: state.match = match.match(repo.root, '', None,
--- a/hgext/remotefilelog/repack.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/remotefilelog/repack.py Tue Mar 19 16:36:59 2019 +0300 @@ -338,7 +338,7 @@ packer = repacker(repo, data, history, fullhistory, category, gc=garbagecollect, isold=isold, options=options) - with datapack.mutabledatapack(repo.ui, packpath, version=2) as dpack: + with datapack.mutabledatapack(repo.ui, packpath) as dpack: with historypack.mutablehistorypack(repo.ui, packpath) as hpack: try: packer.run(dpack, hpack) @@ -601,7 +601,6 @@ # TODO: Optimize the deltachain fetching. Since we're # iterating over the different version of the file, we may # be fetching the same deltachain over and over again. - meta = None if deltabase != nullid: deltaentry = self.data.getdelta(filename, node) delta, deltabasename, origdeltabase, meta = deltaentry
--- a/hgext/remotefilelog/shallowbundle.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/remotefilelog/shallowbundle.py Tue Mar 19 16:36:59 2019 +0300 @@ -162,7 +162,7 @@ repo.shallowmatch = match.match(repo.root, '', None, includepattern, excludepattern) else: - repo.shallowmatch = match.always(repo.root, '') + repo.shallowmatch = match.always() return orig(repo, outgoing, version, source, *args, **kwargs) finally: repo.shallowmatch = original
--- a/hgext/remotefilelog/shallowrepo.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/remotefilelog/shallowrepo.py Tue Mar 19 16:36:59 2019 +0300 @@ -289,7 +289,7 @@ repo.__class__ = shallowrepository - repo.shallowmatch = match.always(repo.root, '') + repo.shallowmatch = match.always() makeunionstores(repo)
--- a/hgext/remotefilelog/shallowutil.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/remotefilelog/shallowutil.py Tue Mar 19 16:36:59 2019 +0300 @@ -237,9 +237,9 @@ # v0, str(int(size)) is the header size = int(header) except ValueError: - raise RuntimeError("unexpected remotefilelog header: illegal format") + raise RuntimeError(r"unexpected remotefilelog header: illegal format") if size is None: - raise RuntimeError("unexpected remotefilelog header: no size found") + raise RuntimeError(r"unexpected remotefilelog header: no size found") return index + 1, size, flags def buildfileblobheader(size, flags, version=None):
--- a/hgext/shelve.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/shelve.py Tue Mar 19 16:36:59 2019 +0300 @@ -248,8 +248,8 @@ if version < cls._version: d = cls._readold(repo) elif version == cls._version: - d = scmutil.simplekeyvaluefile(repo.vfs, cls._filename)\ - .read(firstlinenonkeyval=True) + d = scmutil.simplekeyvaluefile( + repo.vfs, cls._filename).read(firstlinenonkeyval=True) else: raise error.Abort(_('this version of shelve is incompatible ' 'with the version used in this repo')) @@ -287,8 +287,9 @@ "keep": cls._keep if keep else cls._nokeep, "activebook": activebook or cls._noactivebook } - scmutil.simplekeyvaluefile(repo.vfs, cls._filename)\ - .write(info, firstline=("%d" % cls._version)) + scmutil.simplekeyvaluefile( + repo.vfs, cls._filename).write(info, + firstline=("%d" % cls._version)) @classmethod def clear(cls, repo): @@ -674,12 +675,13 @@ hg.update(repo, wctx.node()) files = [] files.extend(shelvectx.files()) - files.extend(shelvectx.parents()[0].files()) + files.extend(shelvectx.p1().files()) # revert will overwrite unknown files, so move them out of the way for file in repo.status(unknown=True).unknown: if file in files: - util.rename(file, scmutil.origpath(ui, repo, file)) + util.rename(repo.wjoin(file), + scmutil.backuppath(ui, repo, file)) ui.pushbuffer(True) cmdutil.revert(ui, repo, shelvectx, repo.dirstate.parents(), *pathtofiles(repo, files), @@ -809,7 +811,7 @@ """Rebase restored commit from its original location to a destination""" # If the shelve is not immediately on top of the commit # we'll be merging with, rebase it to be on top. - if tmpwctx.node() == shelvectx.parents()[0].node(): + if tmpwctx.node() == shelvectx.p1().node(): return shelvectx overrides = {
--- a/hgext/sparse.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/sparse.py Tue Mar 19 16:36:59 2019 +0300 @@ -199,7 +199,7 @@ def walk(orig, self, match, subrepos, unknown, ignored, full=True): # hack to not exclude explicitly-specified paths so that they can # be warned later on e.g. dirstate.add() - em = matchmod.exact(match._root, match._cwd, match.files()) + em = matchmod.exact(match.files()) sm = matchmod.unionmatcher([self._sparsematcher, em]) match = matchmod.intersectmatchers(match, sm) return orig(self, match, subrepos, unknown, ignored, full)
--- a/hgext/split.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/split.py Tue Mar 19 16:36:59 2019 +0300 @@ -134,13 +134,10 @@ committed = [] # [ctx] # Set working parent to ctx.p1(), and keep working copy as ctx's content - # NOTE: if we can have "update without touching working copy" API, the - # revert step could be cheaper. - hg.clean(repo, ctx.p1().node(), show_stats=False) - parents = repo.changelog.parents(ctx.node()) - ui.pushbuffer() - cmdutil.revert(ui, repo, ctx, parents) - ui.popbuffer() # discard "reverting ..." messages + if ctx.node() != repo.dirstate.p1(): + hg.clean(repo, ctx.node(), show_stats=False) + with repo.dirstate.parentchange(): + scmutil.movedirstate(repo, ctx.p1()) # Any modified, added, removed, deleted result means split is incomplete incomplete = lambda repo: any(repo.status()[:4])
--- a/hgext/strip.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/strip.py Tue Mar 19 16:36:59 2019 +0300 @@ -39,7 +39,7 @@ if baserev: bctx = repo[baserev] else: - bctx = wctx.parents()[0] + bctx = wctx.p1() for s in sorted(wctx.substate): wctx.sub(s).bailifchanged(True) if s not in bctx.substate or bctx.sub(s).dirty(): @@ -76,7 +76,8 @@ return unode -def strip(ui, repo, revs, update=True, backup=True, force=None, bookmarks=None): +def strip(ui, repo, revs, update=True, backup=True, force=None, bookmarks=None, + soft=False): with repo.wlock(), repo.lock(): if update: @@ -85,7 +86,10 @@ hg.clean(repo, urev) repo.dirstate.write(repo.currenttransaction()) - repair.strip(ui, repo, revs, backup) + if soft: + repair.softstrip(ui, repo, revs, backup) + else: + repair.strip(ui, repo, revs, backup) repomarks = repo._bookmarks if bookmarks: @@ -110,7 +114,10 @@ ('k', 'keep', None, _("do not modify working directory during " "strip")), ('B', 'bookmark', [], _("remove revs only reachable from given" - " bookmark"), _('BOOKMARK'))], + " bookmark"), _('BOOKMARK')), + ('', 'soft', None, + _("simply drop changesets from visible history (EXPERIMENTAL)")), + ], _('hg strip [-k] [-f] [-B bookmark] [-r] REV...'), helpcategory=command.CATEGORY_MAINTENANCE) def stripcmd(ui, repo, *revs, **opts): @@ -235,6 +242,7 @@ strip(ui, repo, revs, backup=backup, update=update, - force=opts.get('force'), bookmarks=bookmarks) + force=opts.get('force'), bookmarks=bookmarks, + soft=opts['soft']) return 0
--- a/hgext/transplant.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/transplant.py Tue Mar 19 16:36:59 2019 +0300 @@ -155,7 +155,7 @@ if opts is None: opts = {} revs = sorted(revmap) - p1, p2 = repo.dirstate.parents() + p1 = repo.dirstate.p1() pulls = [] diffopts = patch.difffeatureopts(self.ui, opts) diffopts.git = True @@ -186,7 +186,7 @@ exchange.pull(repo, source.peer(), heads=pulls) merge.update(repo, pulls[-1], branchmerge=False, force=False) - p1, p2 = repo.dirstate.parents() + p1 = repo.dirstate.p1() pulls = [] domerge = False @@ -323,11 +323,11 @@ else: files = None if merge: - p1, p2 = repo.dirstate.parents() + p1 = repo.dirstate.p1() repo.setparents(p1, node) - m = match.always(repo.root, '') + m = match.always() else: - m = match.exact(repo.root, '', files) + m = match.exact(files) n = repo.commit(message, user, date, extra=extra, match=m, editor=self.getcommiteditor()) @@ -387,7 +387,7 @@ extra = {'transplant_source': node} try: - p1, p2 = repo.dirstate.parents() + p1 = repo.dirstate.p1() if p1 != parent: raise error.Abort(_('working directory not at transplant ' 'parent %s') % nodemod.hex(parent)) @@ -668,7 +668,7 @@ tp = transplanter(ui, repo, opts) - p1, p2 = repo.dirstate.parents() + p1 = repo.dirstate.p1() if len(repo) > 0 and p1 == revlog.nullid: raise error.Abort(_('no revision checked out')) if opts.get('continue'): @@ -676,11 +676,7 @@ raise error.Abort(_('no transplant to continue')) else: cmdutil.checkunfinished(repo) - if p2 != revlog.nullid: - raise error.Abort(_('outstanding uncommitted merges')) - m, a, r, d = repo.status()[:4] - if m or a or r or d: - raise error.Abort(_('outstanding local changes')) + cmdutil.bailifchanged(repo) sourcerepo = opts.get('source') if sourcerepo:
--- a/hgext/uncommit.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/uncommit.py Tue Mar 19 16:36:59 2019 +0300 @@ -25,7 +25,7 @@ cmdutil, commands, context, - copies, + copies as copiesmod, error, node, obsutil, @@ -44,6 +44,9 @@ configitem('experimental', 'uncommitondirtywdir', default=False, ) +configitem('experimental', 'uncommit.keep', + default=False, +) # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should @@ -64,13 +67,13 @@ if not exclude: return None - files = (initialfiles - exclude) # return the p1 so that we don't create an obsmarker later if not keepcommit: - return ctx.parents()[0].node() + return ctx.p1().node() + files = (initialfiles - exclude) # Filter copies - copied = copies.pathcopies(base, ctx) + copied = copiesmod.pathcopies(base, ctx) copied = dict((dst, src) for dst, src in copied.iteritems() if dst in files) def filectxfn(repo, memctx, path, contentctx=ctx, redirect=()): @@ -83,6 +86,9 @@ copied=copied.get(path)) return mctx + if not files: + repo.ui.status(_("note: keeping empty commit\n")) + new = context.memctx(repo, parents=[base.node(), node.nullid], text=ctx.description(), @@ -93,50 +99,10 @@ extra=ctx.extra()) return repo.commitctx(new) -def _fixdirstate(repo, oldctx, newctx, status): - """ fix the dirstate after switching the working directory from oldctx to - newctx which can be result of either unamend or uncommit. - """ - ds = repo.dirstate - copies = dict(ds.copies()) - s = status - for f in s.modified: - if ds[f] == 'r': - # modified + removed -> removed - continue - ds.normallookup(f) - - for f in s.added: - if ds[f] == 'r': - # added + removed -> unknown - ds.drop(f) - elif ds[f] != 'a': - ds.add(f) - - for f in s.removed: - if ds[f] == 'a': - # removed + added -> normal - ds.normallookup(f) - elif ds[f] != 'r': - ds.remove(f) - - # Merge old parent and old working dir copies - oldcopies = {} - for f in (s.modified + s.added): - src = oldctx[f].renamed() - if src: - oldcopies[f] = src[0] - oldcopies.update(copies) - copies = dict((dst, oldcopies.get(src, src)) - for dst, src in oldcopies.iteritems()) - # Adjust the dirstate copies - for dst, src in copies.iteritems(): - if (src not in newctx or dst in newctx or ds[dst] != 'a'): - src = None - ds.copy(src, dst) - @command('uncommit', - [('', 'keep', False, _('allow an empty commit after uncommiting')), + [('', 'keep', None, _('allow an empty commit after uncommiting')), + ('', 'allow-dirty-working-copy', False, + _('allow uncommit with outstanding changes')) ] + commands.walkopts, _('[OPTION]... [FILE]...'), helpcategory=command.CATEGORY_CHANGE_MANAGEMENT) @@ -155,9 +121,13 @@ with repo.wlock(), repo.lock(): - if not pats and not repo.ui.configbool('experimental', - 'uncommitondirtywdir'): - cmdutil.bailifchanged(repo) + m, a, r, d = repo.status()[:4] + isdirtypath = any(set(m + a + r + d) & set(pats)) + allowdirtywcopy = (opts['allow_dirty_working_copy'] or + repo.ui.configbool('experimental', 'uncommitondirtywdir')) + if not allowdirtywcopy and (not pats or isdirtypath): + cmdutil.bailifchanged(repo, hint=_('requires ' + '--allow-dirty-working-copy to uncommit')) old = repo['.'] rewriteutil.precheck(repo, [old.rev()], 'uncommit') if len(old.parents()) > 1: @@ -165,7 +135,12 @@ with repo.transaction('uncommit'): match = scmutil.match(old, pats, opts) - keepcommit = opts.get('keep') or pats + keepcommit = pats + if not keepcommit: + if opts.get('keep') is not None: + keepcommit = opts.get('keep') + else: + keepcommit = ui.configbool('experimental', 'uncommit.keep') newid = _commitfiltered(repo, old, match, keepcommit) if newid is None: ui.status(_("nothing to uncommit\n")) @@ -179,12 +154,10 @@ # Fully removed the old commit mapping[old.node()] = () - scmutil.cleanupnodes(repo, mapping, 'uncommit', fixphase=True) + with repo.dirstate.parentchange(): + scmutil.movedirstate(repo, repo[newid], match) - with repo.dirstate.parentchange(): - repo.dirstate.setparents(newid, node.nullid) - s = old.p1().status(old, match=match) - _fixdirstate(repo, old, repo[newid], s) + scmutil.cleanupnodes(repo, mapping, 'uncommit', fixphase=True) def predecessormarkers(ctx): """yields the obsolete markers marking the given changeset as a successor""" @@ -244,9 +217,7 @@ dirstate = repo.dirstate with dirstate.parentchange(): - dirstate.setparents(newprednode, node.nullid) - s = repo.status(predctx, curctx) - _fixdirstate(repo, curctx, newpredctx, s) + scmutil.movedirstate(repo, newpredctx) mapping = {curctx.node(): (newprednode,)} scmutil.cleanupnodes(repo, mapping, 'unamend', fixphase=True)
--- a/hgext/zeroconf/Zeroconf.py Tue Mar 19 09:23:35 2019 -0400 +++ b/hgext/zeroconf/Zeroconf.py Tue Mar 19 16:36:59 2019 +0300 @@ -84,7 +84,6 @@ import itertools import select import socket -import string import struct import threading import time @@ -106,7 +105,7 @@ # Some DNS constants -_MDNS_ADDR = '224.0.0.251' +_MDNS_ADDR = r'224.0.0.251' _MDNS_PORT = 5353 _DNS_PORT = 53 _DNS_TTL = 60 * 60 # one hour default TTL @@ -221,7 +220,7 @@ """A DNS entry""" def __init__(self, name, type, clazz): - self.key = string.lower(name) + self.key = name.lower() self.name = name self.type = type self.clazz = clazz & _CLASS_MASK @@ -620,7 +619,7 @@ first = off while True: - len = ord(self.data[off]) + len = ord(self.data[off:off + 1]) off += 1 if len == 0: break @@ -631,7 +630,7 @@ elif t == 0xC0: if next < 0: next = off + 1 - off = ((len & 0x3F) << 8) | ord(self.data[off]) + off = ((len & 0x3F) << 8) | ord(self.data[off:off + 1]) if off >= first: raise BadDomainNameCircular(off) first = off @@ -938,7 +937,6 @@ self.zeroconf.engine.addReader(self, self.zeroconf.socket) def handle_read(self): - data = addr = port = None sock = self.zeroconf.socket try: data, (addr, port) = sock.recvfrom(_MAX_MSG_ABSOLUTE) @@ -1230,7 +1228,6 @@ delay = _LISTENER_TIME next = now + delay last = now + timeout - result = 0 try: zeroconf.addListener(self, DNSQuestion(self.name, _TYPE_ANY, _CLASS_IN)) @@ -1335,7 +1332,7 @@ # SO_REUSEADDR and SO_REUSEPORT have been set, so ignore it pass self.socket.setsockopt(socket.SOL_IP, socket.IP_ADD_MEMBERSHIP, - socket.inet_aton(_MDNS_ADDR) + socket.inet_aton('0.0.0.0')) + socket.inet_aton(_MDNS_ADDR) + socket.inet_aton(r'0.0.0.0')) self.listeners = [] self.browsers = [] @@ -1659,7 +1656,7 @@ self.engine.notify() self.unregisterAllServices() self.socket.setsockopt(socket.SOL_IP, socket.IP_DROP_MEMBERSHIP, - socket.inet_aton(_MDNS_ADDR) + socket.inet_aton('0.0.0.0')) + socket.inet_aton(_MDNS_ADDR) + socket.inet_aton(r'0.0.0.0')) self.socket.close() # Test a few module features, including service registration, service
--- a/i18n/posplit Tue Mar 19 09:23:35 2019 -0400 +++ b/i18n/posplit Tue Mar 19 16:36:59 2019 +0300 @@ -77,7 +77,7 @@ continue else: # lines following directly, unexpected - print('Warning: text follows line with directive' \ + print('Warning: text follows line with directive' ' %s' % directive) comment = 'do not translate: .. %s::' % directive if not newentry.comment:
--- a/mercurial/archival.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/archival.py Tue Mar 19 16:36:59 2019 +0300 @@ -340,7 +340,8 @@ for subpath in sorted(ctx.substate): sub = ctx.workingsub(subpath) submatch = matchmod.subdirmatcher(subpath, match) - total += sub.archive(archiver, prefix, submatch, decode) + subprefix = prefix + subpath + '/' + total += sub.archive(archiver, subprefix, submatch, decode) if total == 0: raise error.Abort(_('no files match the archive pattern'))
--- a/mercurial/bdiff.c Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/bdiff.c Tue Mar 19 16:36:59 2019 +0300 @@ -35,15 +35,19 @@ /* count the lines */ i = 1; /* extra line for sentinel */ - for (p = a; p < plast; p++) - if (*p == '\n') + for (p = a; p < plast; p++) { + if (*p == '\n') { i++; - if (p == plast) + } + } + if (p == plast) { i++; + } *lr = l = (struct bdiff_line *)calloc(i, sizeof(struct bdiff_line)); - if (!l) + if (!l) { return -1; + } /* build the line array and calculate hashes */ hash = 0; @@ -90,18 +94,21 @@ struct pos *h = NULL; /* build a hash table of the next highest power of 2 */ - while (buckets < bn + 1) + while (buckets < bn + 1) { buckets *= 2; + } /* try to allocate a large hash table to avoid collisions */ for (scale = 4; scale; scale /= 2) { h = (struct pos *)calloc(buckets, scale * sizeof(struct pos)); - if (h) + if (h) { break; + } } - if (!h) + if (!h) { return 0; + } buckets = buckets * scale - 1; @@ -115,9 +122,11 @@ for (i = 0; i < bn; i++) { /* find the equivalence class */ for (j = b[i].hash & buckets; h[j].pos != -1; - j = (j + 1) & buckets) - if (!cmp(b + i, b + h[j].pos)) + j = (j + 1) & buckets) { + if (!cmp(b + i, b + h[j].pos)) { break; + } + } /* add to the head of the equivalence class */ b[i].n = h[j].pos; @@ -133,15 +142,18 @@ for (i = 0; i < an; i++) { /* find the equivalence class */ for (j = a[i].hash & buckets; h[j].pos != -1; - j = (j + 1) & buckets) - if (!cmp(a + i, b + h[j].pos)) + j = (j + 1) & buckets) { + if (!cmp(a + i, b + h[j].pos)) { break; + } + } a[i].e = j; /* use equivalence class for quick compare */ - if (h[j].len <= t) + if (h[j].len <= t) { a[i].n = h[j].pos; /* point to head of match list */ - else + } else { a[i].n = -1; /* too popular */ + } } /* discard hash tables */ @@ -158,16 +170,18 @@ /* window our search on large regions to better bound worst-case performance. by choosing a window at the end, we reduce skipping overhead on the b chains. */ - if (a2 - a1 > 30000) + if (a2 - a1 > 30000) { a1 = a2 - 30000; + } half = (a1 + a2 - 1) / 2; bhalf = (b1 + b2 - 1) / 2; for (i = a1; i < a2; i++) { /* skip all lines in b after the current block */ - for (j = a[i].n; j >= b2; j = b[j].n) + for (j = a[i].n; j >= b2; j = b[j].n) { ; + } /* loop through all lines match a[i] in b */ for (; j >= b1; j = b[j].n) { @@ -179,8 +193,9 @@ break; } /* previous line mismatch? */ - if (a[i - k].e != b[j - k].e) + if (a[i - k].e != b[j - k].e) { break; + } } pos[j].pos = i; @@ -212,8 +227,9 @@ } /* expand match to include subsequent popular lines */ - while (mi + mk < a2 && mj + mk < b2 && a[mi + mk].e == b[mj + mk].e) + while (mi + mk < a2 && mj + mk < b2 && a[mi + mk].e == b[mj + mk].e) { mk++; + } *omi = mi; *omj = mj; @@ -230,18 +246,21 @@ while (1) { /* find the longest match in this chunk */ k = longest_match(a, b, pos, a1, a2, b1, b2, &i, &j); - if (!k) + if (!k) { return l; + } /* and recurse on the remaining chunks on either side */ l = recurse(a, b, pos, a1, i, b1, j, l); - if (!l) + if (!l) { return NULL; + } l->next = (struct bdiff_hunk *)malloc(sizeof(struct bdiff_hunk)); - if (!l->next) + if (!l->next) { return NULL; + } l = l->next; l->a1 = i; @@ -271,14 +290,16 @@ /* generate the matching block list */ curr = recurse(a, b, pos, 0, an, 0, bn, base); - if (!curr) + if (!curr) { return -1; + } /* sentinel end hunk */ curr->next = (struct bdiff_hunk *)malloc(sizeof(struct bdiff_hunk)); - if (!curr->next) + if (!curr->next) { return -1; + } curr = curr->next; curr->a1 = curr->a2 = an; curr->b1 = curr->b2 = bn; @@ -291,10 +312,11 @@ for (curr = base->next; curr; curr = curr->next) { struct bdiff_hunk *next = curr->next; - if (!next) + if (!next) { break; + } - if (curr->a2 == next->a1 || curr->b2 == next->b1) + if (curr->a2 == next->a1 || curr->b2 == next->b1) { while (curr->a2 < an && curr->b2 < bn && next->a1 < next->a2 && next->b1 < next->b2 && !cmp(a + curr->a2, b + curr->b2)) { @@ -303,10 +325,12 @@ curr->b2++; next->b1++; } + } } - for (curr = base->next; curr; curr = curr->next) + for (curr = base->next; curr; curr = curr->next) { count++; + } return count; }
--- a/mercurial/bookmarks.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/bookmarks.py Tue Mar 19 16:36:59 2019 +0300 @@ -44,7 +44,7 @@ return fp class bmstore(object): - """Storage for bookmarks. + r"""Storage for bookmarks. This object should do all bookmark-related reads and writes, so that it's fairly simple to replace the storage underlying @@ -306,7 +306,6 @@ itself as we commit. This function returns the name of that bookmark. It is stored in .hg/bookmarks.current """ - mark = None try: file = repo.vfs('bookmarks.current') except IOError as inst:
--- a/mercurial/branchmap.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/branchmap.py Tue Mar 19 16:36:59 2019 +0300 @@ -30,63 +30,6 @@ pack_into = struct.pack_into unpack_from = struct.unpack_from -def _filename(repo): - """name of a branchcache file for a given repo or repoview""" - filename = "branch2" - if repo.filtername: - filename = '%s-%s' % (filename, repo.filtername) - return filename - -def read(repo): - f = None - try: - f = repo.cachevfs(_filename(repo)) - lineiter = iter(f) - cachekey = next(lineiter).rstrip('\n').split(" ", 2) - last, lrev = cachekey[:2] - last, lrev = bin(last), int(lrev) - filteredhash = None - if len(cachekey) > 2: - filteredhash = bin(cachekey[2]) - partial = branchcache(tipnode=last, tiprev=lrev, - filteredhash=filteredhash) - if not partial.validfor(repo): - # invalidate the cache - raise ValueError(r'tip differs') - cl = repo.changelog - for l in lineiter: - l = l.rstrip('\n') - if not l: - continue - node, state, label = l.split(" ", 2) - if state not in 'oc': - raise ValueError(r'invalid branch state') - label = encoding.tolocal(label.strip()) - node = bin(node) - if not cl.hasnode(node): - raise ValueError( - r'node %s does not exist' % pycompat.sysstr(hex(node))) - partial.setdefault(label, []).append(node) - if state == 'c': - partial._closednodes.add(node) - - except (IOError, OSError): - return None - - except Exception as inst: - if repo.ui.debugflag: - msg = 'invalid branchheads cache' - if repo.filtername is not None: - msg += ' (%s)' % repo.filtername - msg += ': %s\n' - repo.ui.debug(msg % pycompat.bytestr(inst)) - partial = None - - finally: - if f: - f.close() - - return partial ### Nearest subset relation # Nearest subset of filter X is a filter Y so that: @@ -100,65 +43,89 @@ 'served': 'immutable', 'immutable': 'base'} -def updatecache(repo): - cl = repo.changelog - filtername = repo.filtername - partial = repo._branchcaches.get(filtername) + +class BranchMapCache(object): + """mapping of filtered views of repo with their branchcache""" + def __init__(self): + self._per_filter = {} + + def __getitem__(self, repo): + self.updatecache(repo) + return self._per_filter[repo.filtername] - revs = [] - if partial is None or not partial.validfor(repo): - partial = read(repo) - if partial is None: + def updatecache(self, repo): + """Update the cache for the given filtered view on a repository""" + # This can trigger updates for the caches for subsets of the filtered + # view, e.g. when there is no cache for this filtered view or the cache + # is stale. + + cl = repo.changelog + filtername = repo.filtername + bcache = self._per_filter.get(filtername) + if bcache is None or not bcache.validfor(repo): + # cache object missing or cache object stale? Read from disk + bcache = branchcache.fromfile(repo) + + revs = [] + if bcache is None: + # no (fresh) cache available anymore, perhaps we can re-use + # the cache for a subset, then extend that to add info on missing + # revisions. subsetname = subsettable.get(filtername) - if subsetname is None: - partial = branchcache() - else: + if subsetname is not None: subset = repo.filtered(subsetname) - partial = subset.branchmap().copy() + bcache = self[subset].copy() extrarevs = subset.changelog.filteredrevs - cl.filteredrevs - revs.extend(r for r in extrarevs if r <= partial.tiprev) - revs.extend(cl.revs(start=partial.tiprev + 1)) - if revs: - partial.update(repo, revs) - partial.write(repo) + revs.extend(r for r in extrarevs if r <= bcache.tiprev) + else: + # nothing to fall back on, start empty. + bcache = branchcache() - assert partial.validfor(repo), filtername - repo._branchcaches[repo.filtername] = partial - -def replacecache(repo, bm): - """Replace the branchmap cache for a repo with a branch mapping. + revs.extend(cl.revs(start=bcache.tiprev + 1)) + if revs: + bcache.update(repo, revs) - This is likely only called during clone with a branch map from a remote. - """ - cl = repo.changelog - clrev = cl.rev - clbranchinfo = cl.branchinfo - rbheads = [] - closed = [] - for bheads in bm.itervalues(): - rbheads.extend(bheads) - for h in bheads: - r = clrev(h) - b, c = clbranchinfo(r) - if c: - closed.append(h) + assert bcache.validfor(repo), filtername + self._per_filter[repo.filtername] = bcache + + def replace(self, repo, remotebranchmap): + """Replace the branchmap cache for a repo with a branch mapping. + + This is likely only called during clone with a branch map from a + remote. - if rbheads: - rtiprev = max((int(clrev(node)) - for node in rbheads)) - cache = branchcache(bm, - repo[rtiprev].node(), - rtiprev, - closednodes=closed) + """ + cl = repo.changelog + clrev = cl.rev + clbranchinfo = cl.branchinfo + rbheads = [] + closed = [] + for bheads in remotebranchmap.itervalues(): + rbheads += bheads + for h in bheads: + r = clrev(h) + b, c = clbranchinfo(r) + if c: + closed.append(h) - # Try to stick it as low as possible - # filter above served are unlikely to be fetch from a clone - for candidate in ('base', 'immutable', 'served'): - rview = repo.filtered(candidate) - if cache.validfor(rview): - repo._branchcaches[candidate] = cache - cache.write(rview) - break + if rbheads: + rtiprev = max((int(clrev(node)) for node in rbheads)) + cache = branchcache( + remotebranchmap, repo[rtiprev].node(), rtiprev, + closednodes=closed) + + # Try to stick it as low as possible + # filter above served are unlikely to be fetch from a clone + for candidate in ('base', 'immutable', 'served'): + rview = repo.filtered(candidate) + if cache.validfor(rview): + self._per_filter[candidate] = cache + cache.write(rview) + return + + def clear(self): + self._per_filter.clear() + class branchcache(dict): """A dict like object that hold branches heads cache. @@ -196,6 +163,69 @@ else: self._closednodes = closednodes + @classmethod + def fromfile(cls, repo): + f = None + try: + f = repo.cachevfs(cls._filename(repo)) + lineiter = iter(f) + cachekey = next(lineiter).rstrip('\n').split(" ", 2) + last, lrev = cachekey[:2] + last, lrev = bin(last), int(lrev) + filteredhash = None + if len(cachekey) > 2: + filteredhash = bin(cachekey[2]) + bcache = cls(tipnode=last, tiprev=lrev, filteredhash=filteredhash) + if not bcache.validfor(repo): + # invalidate the cache + raise ValueError(r'tip differs') + bcache.load(repo, lineiter) + except (IOError, OSError): + return None + + except Exception as inst: + if repo.ui.debugflag: + msg = 'invalid branchheads cache' + if repo.filtername is not None: + msg += ' (%s)' % repo.filtername + msg += ': %s\n' + repo.ui.debug(msg % pycompat.bytestr(inst)) + bcache = None + + finally: + if f: + f.close() + + return bcache + + def load(self, repo, lineiter): + """ fully loads the branchcache by reading from the file using the line + iterator passed""" + cl = repo.changelog + for line in lineiter: + line = line.rstrip('\n') + if not line: + continue + node, state, label = line.split(" ", 2) + if state not in 'oc': + raise ValueError(r'invalid branch state') + label = encoding.tolocal(label.strip()) + node = bin(node) + if not cl.hasnode(node): + raise ValueError( + r'node %s does not exist' % pycompat.sysstr(hex(node))) + self.setdefault(label, []).append(node) + if state == 'c': + self._closednodes.add(node) + + @staticmethod + def _filename(repo): + """name of a branchcache file for a given repo or repoview""" + filename = "branch2" + if repo.filtername: + filename = '%s-%s' % (filename, repo.filtername) + return filename + def validfor(self, repo): """Is the cache content valid regarding a repo @@ -203,7 +233,7 @@ - True when cache is up to date or a subset of current repo.""" try: return ((self.tipnode == repo.changelog.node(self.tiprev)) - and (self.filteredhash == \ + and (self.filteredhash == scmutil.filteredhash(repo, self.tiprev))) except IndexError: return False @@ -241,26 +271,27 @@ def copy(self): """return an deep copy of the branchcache object""" - return branchcache(self, self.tipnode, self.tiprev, self.filteredhash, - self._closednodes) + return type(self)( + self, self.tipnode, self.tiprev, self.filteredhash, + self._closednodes) def write(self, repo): try: - f = repo.cachevfs(_filename(repo), "w", atomictemp=True) + f = repo.cachevfs(self._filename(repo), "w", atomictemp=True) cachekey = [hex(self.tipnode), '%d' % self.tiprev] if self.filteredhash is not None: cachekey.append(hex(self.filteredhash)) f.write(" ".join(cachekey) + '\n') nodecount = 0 for label, nodes in sorted(self.iteritems()): + label = encoding.fromlocal(label) for node in nodes: nodecount += 1 if node in self._closednodes: state = 'c' else: state = 'o' - f.write("%s %s %s\n" % (hex(node), state, - encoding.fromlocal(label))) + f.write("%s %s %s\n" % (hex(node), state, label)) f.close() repo.ui.log('branchcache', 'wrote %s branch cache with %d labels and %d nodes\n', @@ -329,7 +360,16 @@ duration = util.timer() - starttime repo.ui.log('branchcache', 'updated %s branch cache in %.4f seconds\n', - repo.filtername, duration) + repo.filtername or b'None', duration) + + self.write(repo) + + +class remotebranchcache(branchcache): + """Branchmap info for a remote connection, should not write locally""" + def write(self, repo): + pass + # Revision branch info cache
--- a/mercurial/bundle2.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/bundle2.py Tue Mar 19 16:36:59 2019 +0300 @@ -1397,8 +1397,8 @@ assert chunknum == 0, 'Must start with chunk 0' self._chunkindex.append((0, self._tellfp())) else: - assert chunknum < len(self._chunkindex), \ - 'Unknown chunk %d' % chunknum + assert chunknum < len(self._chunkindex), ( + 'Unknown chunk %d' % chunknum) self._seekfp(self._chunkindex[chunknum][1]) pos = self._chunkindex[chunknum][0] @@ -1664,6 +1664,7 @@ mandatory=False) def _formatrequirementsspec(requirements): + requirements = [req for req in requirements if req != "shared"] return urlreq.quote(','.join(sorted(requirements))) def _formatrequirementsparams(requirements):
--- a/mercurial/cext/base85.c Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/cext/base85.c Tue Mar 19 16:36:59 2019 +0300 @@ -24,8 +24,9 @@ unsigned i; memset(b85dec, 0, sizeof(b85dec)); - for (i = 0; i < sizeof(b85chars); i++) + for (i = 0; i < sizeof(b85chars); i++) { b85dec[(int)(b85chars[i])] = i + 1; + } } static PyObject *b85encode(PyObject *self, PyObject *args) @@ -37,19 +38,22 @@ unsigned int acc, val, ch; int pad = 0; - if (!PyArg_ParseTuple(args, PY23("s#|i", "y#|i"), &text, &len, &pad)) + if (!PyArg_ParseTuple(args, PY23("s#|i", "y#|i"), &text, &len, &pad)) { return NULL; + } - if (pad) + if (pad) { olen = ((len + 3) / 4 * 5) - 3; - else { + } else { olen = len % 4; - if (olen) + if (olen) { olen++; + } olen += len / 4 * 5; } - if (!(out = PyBytes_FromStringAndSize(NULL, olen + 3))) + if (!(out = PyBytes_FromStringAndSize(NULL, olen + 3))) { return NULL; + } dst = PyBytes_AsString(out); @@ -58,8 +62,9 @@ for (i = 24; i >= 0; i -= 8) { ch = *text++; acc |= ch << i; - if (--len == 0) + if (--len == 0) { break; + } } for (i = 4; i >= 0; i--) { val = acc % 85; @@ -69,8 +74,9 @@ dst += 5; } - if (!pad) + if (!pad) { _PyBytes_Resize(&out, olen); + } return out; } @@ -84,15 +90,18 @@ int c; unsigned int acc; - if (!PyArg_ParseTuple(args, PY23("s#", "y#"), &text, &len)) + if (!PyArg_ParseTuple(args, PY23("s#", "y#"), &text, &len)) { return NULL; + } olen = len / 5 * 4; i = len % 5; - if (i) + if (i) { olen += i - 1; - if (!(out = PyBytes_FromStringAndSize(NULL, olen))) + } + if (!(out = PyBytes_FromStringAndSize(NULL, olen))) { return NULL; + } dst = PyBytes_AsString(out); @@ -100,8 +109,9 @@ while (i < len) { acc = 0; cap = len - i - 1; - if (cap > 4) + if (cap > 4) { cap = 4; + } for (j = 0; j < cap; i++, j++) { c = b85dec[(int)*text++] - 1; if (c < 0) { @@ -136,10 +146,12 @@ cap = olen < 4 ? olen : 4; olen -= cap; - for (j = 0; j < 4 - cap; j++) + for (j = 0; j < 4 - cap; j++) { acc *= 85; - if (cap && cap < 4) + } + if (cap && cap < 4) { acc += 0xffffff >> (cap - 1) * 8; + } for (j = 0; j < cap; j++) { acc = (acc << 8) | (acc >> 24); *dst++ = acc;
--- a/mercurial/cext/bdiff.c Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/cext/bdiff.c Tue Mar 19 16:36:59 2019 +0300 @@ -29,22 +29,26 @@ l.next = NULL; - if (!PyArg_ParseTuple(args, "SS:bdiff", &sa, &sb)) + if (!PyArg_ParseTuple(args, "SS:bdiff", &sa, &sb)) { return NULL; + } an = bdiff_splitlines(PyBytes_AsString(sa), PyBytes_Size(sa), &a); bn = bdiff_splitlines(PyBytes_AsString(sb), PyBytes_Size(sb), &b); - if (!a || !b) + if (!a || !b) { goto nomem; + } count = bdiff_diff(a, an, b, bn, &l); - if (count < 0) + if (count < 0) { goto nomem; + } rl = PyList_New(count); - if (!rl) + if (!rl) { goto nomem; + } for (h = l.next; h; h = h->next) { m = Py_BuildValue("iiii", h->a1, h->a2, h->b1, h->b2); @@ -72,8 +76,10 @@ l.next = NULL; - if (!PyArg_ParseTuple(args, PY23("s*s*:bdiff", "y*y*:bdiff"), &ba, &bb)) + if (!PyArg_ParseTuple(args, PY23("s*s*:bdiff", "y*y*:bdiff"), &ba, + &bb)) { return NULL; + } if (!PyBuffer_IsContiguous(&ba, 'C') || ba.ndim > 1) { PyErr_SetString(PyExc_ValueError, "bdiff input not contiguous"); @@ -98,8 +104,9 @@ lmax = la > lb ? lb : la; for (ia = ba.buf, ib = bb.buf; li < lmax && *ia == *ib; ++li, ++ia, ++ib) { - if (*ia == '\n') + if (*ia == '\n') { lcommon = li + 1; + } } /* we can almost add: if (li == lmax) lcommon = li; */ @@ -119,8 +126,9 @@ /* calculate length of output */ la = lb = 0; for (h = l.next; h; h = h->next) { - if (h->a1 != la || h->b1 != lb) + if (h->a1 != la || h->b1 != lb) { len += 12 + bl[h->b1].l - bl[lb].l; + } la = h->a2; lb = h->b2; } @@ -129,8 +137,9 @@ result = PyBytes_FromStringAndSize(NULL, len); - if (!result) + if (!result) { goto cleanup; + } /* build binary patch */ rb = PyBytes_AsString(result); @@ -151,8 +160,9 @@ } cleanup: - if (_save) + if (_save) { PyEval_RestoreThread(_save); + } PyBuffer_Release(&ba); PyBuffer_Release(&bb); free(al); @@ -174,20 +184,23 @@ Py_ssize_t i, rlen, wlen = 0; char *w; - if (!PyArg_ParseTuple(args, "Sb:fixws", &s, &allws)) + if (!PyArg_ParseTuple(args, "Sb:fixws", &s, &allws)) { return NULL; + } r = PyBytes_AsString(s); rlen = PyBytes_Size(s); w = (char *)PyMem_Malloc(rlen ? rlen : 1); - if (!w) + if (!w) { goto nomem; + } for (i = 0; i != rlen; i++) { c = r[i]; if (c == ' ' || c == '\t' || c == '\r') { - if (!allws && (wlen == 0 || w[wlen - 1] != ' ')) + if (!allws && (wlen == 0 || w[wlen - 1] != ' ')) { w[wlen++] = ' '; + } } else if (c == '\n' && !allws && wlen > 0 && w[wlen - 1] == ' ') { w[wlen - 1] = '\n'; @@ -207,8 +220,9 @@ const char *source, Py_ssize_t len) { PyObject *sliced = PyBytes_FromStringAndSize(source, len); - if (sliced == NULL) + if (sliced == NULL) { return false; + } PyList_SET_ITEM(list, destidx, sliced); return true; } @@ -232,19 +246,22 @@ ++nelts; } } - if ((result = PyList_New(nelts + 1)) == NULL) + if ((result = PyList_New(nelts + 1)) == NULL) { goto abort; + } nelts = 0; for (i = 0; i < size - 1; ++i) { if (text[i] == '\n') { if (!sliceintolist(result, nelts++, text + start, - i - start + 1)) + i - start + 1)) { goto abort; + } start = i + 1; } } - if (!sliceintolist(result, nelts++, text + start, size - start)) + if (!sliceintolist(result, nelts++, text + start, size - start)) { goto abort; + } return result; abort: Py_XDECREF(result); @@ -257,8 +274,9 @@ PyObject *rl = (PyObject *)priv; PyObject *m = Py_BuildValue("LLLL", a1, a2, b1, b2); int r; - if (!m) + if (!m) { return -1; + } r = PyList_Append(rl, m); Py_DECREF(m); return r; @@ -282,15 +300,17 @@ }; if (!PyArg_ParseTuple(args, PY23("s#s#", "y#y#"), &a.ptr, &la, &b.ptr, - &lb)) + &lb)) { return NULL; + } a.size = la; b.size = lb; rl = PyList_New(0); - if (!rl) + if (!rl) { return PyErr_NoMemory(); + } ecb.priv = rl;
--- a/mercurial/cext/charencode.c Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/cext/charencode.c Tue Mar 19 16:36:59 2019 +0300 @@ -114,8 +114,9 @@ ret = PyBytes_FromStringAndSize(NULL, len / 2); - if (!ret) + if (!ret) { return NULL; + } d = PyBytes_AsString(ret); @@ -133,21 +134,24 @@ const char *buf; Py_ssize_t i, len; if (!PyArg_ParseTuple(args, PY23("s#:isasciistr", "y#:isasciistr"), - &buf, &len)) + &buf, &len)) { return NULL; + } i = 0; /* char array in PyStringObject should be at least 4-byte aligned */ if (((uintptr_t)buf & 3) == 0) { const uint32_t *p = (const uint32_t *)buf; for (; i < len / 4; i++) { - if (p[i] & 0x80808080U) + if (p[i] & 0x80808080U) { Py_RETURN_FALSE; + } } i *= 4; } for (; i < len; i++) { - if (buf[i] & 0x80) + if (buf[i] & 0x80) { Py_RETURN_FALSE; + } } Py_RETURN_TRUE; } @@ -164,8 +168,9 @@ len = PyBytes_GET_SIZE(str_obj); newobj = PyBytes_FromStringAndSize(NULL, len); - if (!newobj) + if (!newobj) { goto quit; + } newstr = PyBytes_AS_STRING(newobj); @@ -197,16 +202,18 @@ PyObject *asciilower(PyObject *self, PyObject *args) { PyObject *str_obj; - if (!PyArg_ParseTuple(args, "O!:asciilower", &PyBytes_Type, &str_obj)) + if (!PyArg_ParseTuple(args, "O!:asciilower", &PyBytes_Type, &str_obj)) { return NULL; + } return _asciitransform(str_obj, lowertable, NULL); } PyObject *asciiupper(PyObject *self, PyObject *args) { PyObject *str_obj; - if (!PyArg_ParseTuple(args, "O!:asciiupper", &PyBytes_Type, &str_obj)) + if (!PyArg_ParseTuple(args, "O!:asciiupper", &PyBytes_Type, &str_obj)) { return NULL; + } return _asciitransform(str_obj, uppertable, NULL); } @@ -222,8 +229,9 @@ if (!PyArg_ParseTuple(args, "O!O!O!:make_file_foldmap", &PyDict_Type, &dmap, &PyInt_Type, &spec_obj, &PyFunction_Type, - &normcase_fallback)) + &normcase_fallback)) { goto quit; + } spec = (int)PyInt_AS_LONG(spec_obj); switch (spec) { @@ -244,8 +252,9 @@ /* Add some more entries to deal with additions outside this function. */ file_foldmap = _dict_new_presized((PyDict_Size(dmap) / 10) * 11); - if (file_foldmap == NULL) + if (file_foldmap == NULL) { goto quit; + } while (PyDict_Next(dmap, &pos, &k, &v)) { if (!dirstate_tuple_check(v)) { @@ -265,8 +274,9 @@ normcase_fallback, k, NULL); } - if (normed == NULL) + if (normed == NULL) { goto quit; + } if (PyDict_SetItem(file_foldmap, normed, k) == -1) { Py_DECREF(normed); goto quit; @@ -377,22 +387,25 @@ Py_ssize_t origlen, esclen; int paranoid; if (!PyArg_ParseTuple(args, "O!i:jsonescapeu8fast", &PyBytes_Type, - &origstr, ¶noid)) + &origstr, ¶noid)) { return NULL; + } origbuf = PyBytes_AS_STRING(origstr); origlen = PyBytes_GET_SIZE(origstr); esclen = jsonescapelen(origbuf, origlen, paranoid); - if (esclen < 0) + if (esclen < 0) { return NULL; /* unsupported char found or overflow */ + } if (origlen == esclen) { Py_INCREF(origstr); return origstr; } escstr = PyBytes_FromStringAndSize(NULL, esclen); - if (!escstr) + if (!escstr) { return NULL; + } encodejsonescape(PyBytes_AS_STRING(escstr), esclen, origbuf, origlen, paranoid);
--- a/mercurial/cext/mpatch.c Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/cext/mpatch.c Tue Mar 19 16:36:59 2019 +0300 @@ -55,13 +55,16 @@ int r; PyObject *tmp = PyList_GetItem((PyObject *)bins, pos); - if (!tmp) + if (!tmp) { return NULL; - if (PyObject_GetBuffer(tmp, &buffer, PyBUF_CONTIG_RO)) + } + if (PyObject_GetBuffer(tmp, &buffer, PyBUF_CONTIG_RO)) { return NULL; + } if ((r = mpatch_decode(buffer.buf, buffer.len, &res)) < 0) { - if (!PyErr_Occurred()) + if (!PyErr_Occurred()) { setpyerr(r); + } res = NULL; } @@ -78,8 +81,9 @@ char *out; Py_ssize_t len, outlen; - if (!PyArg_ParseTuple(args, "OO:mpatch", &text, &bins)) + if (!PyArg_ParseTuple(args, "OO:mpatch", &text, &bins)) { return NULL; + } len = PyList_Size(bins); if (!len) { @@ -94,8 +98,9 @@ patch = mpatch_fold(bins, cpygetitem, 0, len); if (!patch) { /* error already set or memory error */ - if (!PyErr_Occurred()) + if (!PyErr_Occurred()) { PyErr_NoMemory(); + } result = NULL; goto cleanup; } @@ -126,8 +131,9 @@ cleanup: mpatch_lfree(patch); PyBuffer_Release(&buffer); - if (!result && !PyErr_Occurred()) + if (!result && !PyErr_Occurred()) { setpyerr(r); + } return result; } @@ -138,15 +144,18 @@ Py_ssize_t patchlen; char *bin; - if (!PyArg_ParseTuple(args, PY23("ls#", "ly#"), &orig, &bin, &patchlen)) + if (!PyArg_ParseTuple(args, PY23("ls#", "ly#"), &orig, &bin, + &patchlen)) { return NULL; + } while (pos >= 0 && pos < patchlen) { start = getbe32(bin + pos); end = getbe32(bin + pos + 4); len = getbe32(bin + pos + 8); - if (start > end) + if (start > end) { break; /* sanity check */ + } pos += 12 + len; outlen += start - last; last = end; @@ -154,9 +163,10 @@ } if (pos != patchlen) { - if (!PyErr_Occurred()) + if (!PyErr_Occurred()) { PyErr_SetString(mpatch_Error, "patch cannot be decoded"); + } return NULL; }
--- a/mercurial/cext/parsers.c Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/cext/parsers.c Tue Mar 19 16:36:59 2019 +0300 @@ -32,8 +32,9 @@ { Py_ssize_t expected_size; - if (!PyArg_ParseTuple(args, "n:make_presized_dict", &expected_size)) + if (!PyArg_ParseTuple(args, "n:make_presized_dict", &expected_size)) { return NULL; + } return _dict_new_presized(expected_size); } @@ -43,8 +44,9 @@ { dirstateTupleObject *t = PyObject_New(dirstateTupleObject, &dirstateTupleType); - if (!t) + if (!t) { return NULL; + } t->state = state; t->mode = mode; t->size = size; @@ -60,12 +62,14 @@ dirstateTupleObject *t; char state; int size, mode, mtime; - if (!PyArg_ParseTuple(args, "ciii", &state, &mode, &size, &mtime)) + if (!PyArg_ParseTuple(args, "ciii", &state, &mode, &size, &mtime)) { return NULL; + } t = (dirstateTupleObject *)subtype->tp_alloc(subtype, 1); - if (!t) + if (!t) { return NULL; + } t->state = state; t->mode = mode; t->size = size; @@ -165,8 +169,9 @@ if (!PyArg_ParseTuple( args, PY23("O!O!s#:parse_dirstate", "O!O!y#:parse_dirstate"), - &PyDict_Type, &dmap, &PyDict_Type, &cmap, &str, &readlen)) + &PyDict_Type, &dmap, &PyDict_Type, &cmap, &str, &readlen)) { goto quit; + } len = readlen; @@ -178,8 +183,9 @@ } parents = Py_BuildValue(PY23("s#s#", "y#y#"), str, 20, str + 20, 20); - if (!parents) + if (!parents) { goto quit; + } /* read filenames */ while (pos >= 40 && pos < len) { @@ -212,13 +218,16 @@ cpos + 1, flen - (cpos - cur) - 1); if (!fname || !cname || PyDict_SetItem(cmap, fname, cname) == -1 || - PyDict_SetItem(dmap, fname, entry) == -1) + PyDict_SetItem(dmap, fname, entry) == -1) { goto quit; + } Py_DECREF(cname); } else { fname = PyBytes_FromStringAndSize(cur, flen); - if (!fname || PyDict_SetItem(dmap, fname, entry) == -1) + if (!fname || + PyDict_SetItem(dmap, fname, entry) == -1) { goto quit; + } } Py_DECREF(fname); Py_DECREF(entry); @@ -245,16 +254,20 @@ PyObject *nonnset = NULL, *otherpset = NULL, *result = NULL; Py_ssize_t pos; - if (!PyArg_ParseTuple(args, "O!:nonnormalentries", &PyDict_Type, &dmap)) + if (!PyArg_ParseTuple(args, "O!:nonnormalentries", &PyDict_Type, + &dmap)) { goto bail; + } nonnset = PySet_New(NULL); - if (nonnset == NULL) + if (nonnset == NULL) { goto bail; + } otherpset = PySet_New(NULL); - if (otherpset == NULL) + if (otherpset == NULL) { goto bail; + } pos = 0; while (PyDict_Next(dmap, &pos, &fname, &v)) { @@ -272,15 +285,18 @@ } } - if (t->state == 'n' && t->mtime != -1) + if (t->state == 'n' && t->mtime != -1) { continue; - if (PySet_Add(nonnset, fname) == -1) + } + if (PySet_Add(nonnset, fname) == -1) { goto bail; + } } result = Py_BuildValue("(OO)", nonnset, otherpset); - if (result == NULL) + if (result == NULL) { goto bail; + } Py_DECREF(nonnset); Py_DECREF(otherpset); return result; @@ -304,8 +320,10 @@ int now; if (!PyArg_ParseTuple(args, "O!O!O!i:pack_dirstate", &PyDict_Type, &map, - &PyDict_Type, ©map, &PyTuple_Type, &pl, &now)) + &PyDict_Type, ©map, &PyTuple_Type, &pl, + &now)) { return NULL; + } if (PyTuple_Size(pl) != 2) { PyErr_SetString(PyExc_TypeError, "expected 2-element tuple"); @@ -332,8 +350,9 @@ } packobj = PyBytes_FromStringAndSize(NULL, nbytes); - if (packobj == NULL) + if (packobj == NULL) { goto bail; + } p = PyBytes_AS_STRING(packobj); @@ -377,10 +396,12 @@ mtime = -1; mtime_unset = (PyObject *)make_dirstate_tuple( state, mode, size, mtime); - if (!mtime_unset) + if (!mtime_unset) { goto bail; - if (PyDict_SetItem(map, k, mtime_unset) == -1) + } + if (PyDict_SetItem(map, k, mtime_unset) == -1) { goto bail; + } Py_DECREF(mtime_unset); mtime_unset = NULL; } @@ -664,8 +685,9 @@ manifest_module_init(mod); revlog_module_init(mod); - if (PyType_Ready(&dirstateTupleType) < 0) + if (PyType_Ready(&dirstateTupleType) < 0) { return; + } Py_INCREF(&dirstateTupleType); PyModule_AddObject(mod, "dirstatetuple", (PyObject *)&dirstateTupleType); @@ -675,12 +697,14 @@ { PyObject *sys = PyImport_ImportModule("sys"), *ver; long hexversion; - if (!sys) + if (!sys) { return -1; + } ver = PyObject_GetAttrString(sys, "hexversion"); Py_DECREF(sys); - if (!ver) + if (!ver) { return -1; + } hexversion = PyInt_AsLong(ver); Py_DECREF(ver); /* sys.hexversion is a 32-bit number by default, so the -1 case @@ -720,8 +744,9 @@ { PyObject *mod; - if (check_python_version() == -1) + if (check_python_version() == -1) { return; + } mod = Py_InitModule3("parsers", methods, parsers_doc); module_init(mod); }
--- a/mercurial/cext/pathencode.c Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/cext/pathencode.c Tue Mar 19 16:36:59 2019 +0300 @@ -126,8 +126,9 @@ if (src[i] == 'g') { state = DHGDI; charcopy(dest, &destlen, destsize, src[i++]); - } else + } else { state = DDEFAULT; + } break; case DHGDI: if (src[i] == '/') { @@ -137,8 +138,9 @@ state = DDEFAULT; break; case DDEFAULT: - if (src[i] == '.') + if (src[i] == '.') { state = DDOT; + } charcopy(dest, &destlen, destsize, src[i++]); break; } @@ -153,8 +155,9 @@ PyObject *pathobj, *newobj; char *path; - if (!PyArg_ParseTuple(args, "O:encodedir", &pathobj)) + if (!PyArg_ParseTuple(args, "O:encodedir", &pathobj)) { return NULL; + } if (PyBytes_AsStringAndSize(pathobj, &path, &len) == -1) { PyErr_SetString(PyExc_TypeError, "expected a string"); @@ -235,15 +238,17 @@ if (src[i] == 'u') { state = AU; charcopy(dest, &destlen, destsize, src[i++]); - } else + } else { state = DEFAULT; + } break; case AU: if (src[i] == 'x') { state = THIRD; i++; - } else + } else { state = DEFAULT; + } break; case THIRD: state = DEFAULT; @@ -262,8 +267,9 @@ if (src[i] == 'o') { state = CO; charcopy(dest, &destlen, destsize, src[i++]); - } else + } else { state = DEFAULT; + } break; case CO: if (src[i] == 'm') { @@ -272,8 +278,9 @@ } else if (src[i] == 'n') { state = THIRD; i++; - } else + } else { state = DEFAULT; + } break; case COMLPT: switch (src[i]) { @@ -314,43 +321,49 @@ if (src[i] == 'p') { state = LP; charcopy(dest, &destlen, destsize, src[i++]); - } else + } else { state = DEFAULT; + } break; case LP: if (src[i] == 't') { state = COMLPT; i++; - } else + } else { state = DEFAULT; + } break; case N: if (src[i] == 'u') { state = NU; charcopy(dest, &destlen, destsize, src[i++]); - } else + } else { state = DEFAULT; + } break; case NU: if (src[i] == 'l') { state = THIRD; i++; - } else + } else { state = DEFAULT; + } break; case P: if (src[i] == 'r') { state = PR; charcopy(dest, &destlen, destsize, src[i++]); - } else + } else { state = DEFAULT; + } break; case PR: if (src[i] == 'n') { state = THIRD; i++; - } else + } else { state = DEFAULT; + } break; case LDOT: switch (src[i]) { @@ -397,18 +410,21 @@ if (src[i] == 'g') { state = HGDI; charcopy(dest, &destlen, destsize, src[i++]); - } else + } else { state = DEFAULT; + } break; case HGDI: if (src[i] == '/') { state = START; - if (encodedir) + if (encodedir) { memcopy(dest, &destlen, destsize, ".hg", 3); + } charcopy(dest, &destlen, destsize, src[i++]); - } else + } else { state = DEFAULT; + } break; case SPACE: switch (src[i]) { @@ -427,8 +443,9 @@ case DEFAULT: while (inset(onebyte, src[i])) { charcopy(dest, &destlen, destsize, src[i++]); - if (i == len) + if (i == len) { goto done; + } } switch (src[i]) { case '.': @@ -456,9 +473,10 @@ charcopy(dest, &destlen, destsize, '_'); charcopy(dest, &destlen, destsize, c == '_' ? '_' : c + 32); - } else + } else { escape3(dest, &destlen, destsize, src[i++]); + } break; } break; @@ -498,12 +516,13 @@ Py_ssize_t i, destlen = 0; for (i = 0; i < len; i++) { - if (inset(onebyte, src[i])) + if (inset(onebyte, src[i])) { charcopy(dest, &destlen, destsize, src[i]); - else if (inset(lower, src[i])) + } else if (inset(lower, src[i])) { charcopy(dest, &destlen, destsize, src[i] + 32); - else + } else { escape3(dest, &destlen, destsize, src[i]); + } } return destlen; @@ -516,13 +535,15 @@ PyObject *ret; if (!PyArg_ParseTuple(args, PY23("s#:lowerencode", "y#:lowerencode"), - &path, &len)) + &path, &len)) { return NULL; + } newlen = _lowerencode(NULL, 0, path, len); ret = PyBytes_FromStringAndSize(NULL, newlen); - if (ret) + if (ret) { _lowerencode(PyBytes_AS_STRING(ret), newlen, path, len); + } return ret; } @@ -551,8 +572,9 @@ Py_ssize_t destsize, destlen = 0, slop, used; while (lastslash >= 0 && src[lastslash] != '/') { - if (src[lastslash] == '.' && lastdot == -1) + if (src[lastslash] == '.' && lastdot == -1) { lastdot = lastslash; + } lastslash--; } @@ -570,12 +592,14 @@ /* If src contains a suffix, we will append it to the end of the new string, so make room. */ destsize = 120; - if (lastdot >= 0) + if (lastdot >= 0) { destsize += len - lastdot - 1; + } ret = PyBytes_FromStringAndSize(NULL, destsize); - if (ret == NULL) + if (ret == NULL) { return NULL; + } dest = PyBytes_AS_STRING(ret); memcopy(dest, &destlen, destsize, "dh/", 3); @@ -587,30 +611,36 @@ char d = dest[destlen - 1]; /* After truncation, a directory name may end in a space or dot, which are unportable. */ - if (d == '.' || d == ' ') + if (d == '.' || d == ' ') { dest[destlen - 1] = '_'; - /* The + 3 is to account for "dh/" in the beginning */ - if (destlen > maxshortdirslen + 3) + /* The + 3 is to account for "dh/" in the + * beginning */ + } + if (destlen > maxshortdirslen + 3) { break; + } charcopy(dest, &destlen, destsize, src[i]); p = -1; - } else if (p < dirprefixlen) + } else if (p < dirprefixlen) { charcopy(dest, &destlen, destsize, src[i]); + } } /* Rewind to just before the last slash copied. */ - if (destlen > maxshortdirslen + 3) + if (destlen > maxshortdirslen + 3) { do { destlen--; } while (destlen > 0 && dest[destlen] != '/'); + } if (destlen > 3) { if (lastslash > 0) { char d = dest[destlen - 1]; /* The last directory component may be truncated, so make it safe. */ - if (d == '.' || d == ' ') + if (d == '.' || d == ' ') { dest[destlen - 1] = '_'; + } } charcopy(dest, &destlen, destsize, '/'); @@ -620,27 +650,32 @@ depends on the number of bytes left after accounting for hash and suffix. */ used = destlen + 40; - if (lastdot >= 0) + if (lastdot >= 0) { used += len - lastdot - 1; + } slop = maxstorepathlen - used; if (slop > 0) { Py_ssize_t basenamelen = lastslash >= 0 ? len - lastslash - 2 : len - 1; - if (basenamelen > slop) + if (basenamelen > slop) { basenamelen = slop; - if (basenamelen > 0) + } + if (basenamelen > 0) { memcopy(dest, &destlen, destsize, &src[lastslash + 1], basenamelen); + } } /* Add hash and suffix. */ - for (i = 0; i < 20; i++) + for (i = 0; i < 20; i++) { hexencode(dest, &destlen, destsize, sha[i]); + } - if (lastdot >= 0) + if (lastdot >= 0) { memcopy(dest, &destlen, destsize, &src[lastdot], len - lastdot - 1); + } assert(PyBytes_Check(ret)); Py_SIZE(ret) = destlen; @@ -677,13 +712,15 @@ shaobj = PyObject_CallFunction(shafunc, PY23("s#", "y#"), str, len); - if (shaobj == NULL) + if (shaobj == NULL) { return -1; + } hashobj = PyObject_CallMethod(shaobj, "digest", ""); Py_DECREF(shaobj); - if (hashobj == NULL) + if (hashobj == NULL) { return -1; + } if (!PyBytes_Check(hashobj) || PyBytes_GET_SIZE(hashobj) != 20) { PyErr_SetString(PyExc_TypeError, @@ -714,8 +751,9 @@ } dirlen = _encodedir(dired, baselen, src, len); - if (sha1hash(sha, dired, dirlen - 1) == -1) + if (sha1hash(sha, dired, dirlen - 1) == -1) { return NULL; + } lowerlen = _lowerencode(lowered, baselen, dired + 5, dirlen - 5); auxlen = auxencode(auxed, baselen, lowered, lowerlen); return hashmangle(auxed, auxlen, sha); @@ -727,18 +765,20 @@ PyObject *pathobj, *newobj; char *path; - if (!PyArg_ParseTuple(args, "O:pathencode", &pathobj)) + if (!PyArg_ParseTuple(args, "O:pathencode", &pathobj)) { return NULL; + } if (PyBytes_AsStringAndSize(pathobj, &path, &len) == -1) { PyErr_SetString(PyExc_TypeError, "expected a string"); return NULL; } - if (len > maxstorepathlen) + if (len > maxstorepathlen) { newlen = maxstorepathlen + 2; - else + } else { newlen = len ? basicencode(NULL, 0, path, len + 1) : 1; + } if (newlen <= maxstorepathlen + 1) { if (newlen == len + 1) { @@ -754,8 +794,9 @@ basicencode(PyBytes_AS_STRING(newobj), newlen, path, len + 1); } - } else + } else { newobj = hashencode(path, len + 1); + } return newobj; }
--- a/mercurial/changegroup.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/changegroup.py Tue Mar 19 16:36:59 2019 +0300 @@ -275,7 +275,7 @@ # because we need to use the top level value (if they exist) # in this function. srctype = tr.hookargs.setdefault('source', srctype) - url = tr.hookargs.setdefault('url', url) + tr.hookargs.setdefault('url', url) repo.hook('prechangegroup', throw=True, **pycompat.strkwargs(tr.hookargs)) @@ -817,13 +817,13 @@ self._verbosenote(_('uncompressed size of bundle content:\n')) size = 0 - clstate, deltas = self._generatechangelog(cl, clnodes) + clstate, deltas = self._generatechangelog(cl, clnodes, + generate=changelog) for delta in deltas: - if changelog: - for chunk in _revisiondeltatochunks(delta, - self._builddeltaheader): - size += len(chunk) - yield chunk + for chunk in _revisiondeltatochunks(delta, + self._builddeltaheader): + size += len(chunk) + yield chunk close = closechunk() size += len(close) @@ -917,12 +917,15 @@ if clnodes: repo.hook('outgoing', node=hex(clnodes[0]), source=source) - def _generatechangelog(self, cl, nodes): + def _generatechangelog(self, cl, nodes, generate=True): """Generate data for changelog chunks. Returns a 2-tuple of a dict containing state and an iterable of byte chunks. The state will not be fully populated until the chunk stream has been fully consumed. + + if generate is False, the state will be fully populated and no chunk + stream will be yielded """ clrevorder = {} manifests = {} @@ -930,6 +933,27 @@ changedfiles = set() clrevtomanifestrev = {} + state = { + 'clrevorder': clrevorder, + 'manifests': manifests, + 'changedfiles': changedfiles, + 'clrevtomanifestrev': clrevtomanifestrev, + } + + if not (generate or self._ellipses): + # sort the nodes in storage order + nodes = sorted(nodes, key=cl.rev) + for node in nodes: + c = cl.changelogrevision(node) + clrevorder[node] = len(clrevorder) + # record the first changeset introducing this manifest version + manifests.setdefault(c.manifest, node) + # Record a complete list of potentially-changed files in + # this manifest. + changedfiles.update(c.files) + + return state, () + # Callback for the changelog, used to collect changed files and # manifest nodes. # Returns the linkrev node (identity in the changelog case). @@ -970,13 +994,6 @@ return x - state = { - 'clrevorder': clrevorder, - 'manifests': manifests, - 'changedfiles': changedfiles, - 'clrevtomanifestrev': clrevtomanifestrev, - } - gen = deltagroup( self._repo, cl, nodes, True, lookupcl, self._forcedeltaparentprev, @@ -1088,6 +1105,11 @@ yield tree, [] def _prunemanifests(self, store, nodes, commonrevs): + if not self._ellipses: + # In non-ellipses case and large repositories, it is better to + # prevent calling of store.rev and store.linkrev on a lot of + # nodes as compared to sending some extra data + return nodes.copy() # This is split out as a separate method to allow filtering # commonrevs in extension code. # @@ -1296,9 +1318,9 @@ assert version in supportedoutgoingversions(repo) if matcher is None: - matcher = matchmod.alwaysmatcher(repo.root, '') + matcher = matchmod.always() if oldmatcher is None: - oldmatcher = matchmod.nevermatcher(repo.root, '') + oldmatcher = matchmod.never() if version == '01' and not matcher.always(): raise error.ProgrammingError('version 01 changegroups do not support '
--- a/mercurial/changelog.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/changelog.py Tue Mar 19 16:36:59 2019 +0300 @@ -22,6 +22,7 @@ error, pycompat, revlog, + util, ) from .utils import ( dateutil, @@ -70,7 +71,10 @@ def encodeextra(d): # keys must be sorted to produce a deterministic changelog entry - items = [_string_escape('%s:%s' % (k, d[k])) for k in sorted(d)] + items = [ + _string_escape('%s:%s' % (k, pycompat.bytestr(d[k]))) + for k in sorted(d) + ] return "\0".join(items) def stripdesc(desc): @@ -179,8 +183,8 @@ """ __slots__ = ( - u'_offsets', - u'_text', + r'_offsets', + r'_text', ) def __new__(cls, text): @@ -347,6 +351,27 @@ def reachableroots(self, minroot, heads, roots, includepath=False): return self.index.reachableroots2(minroot, heads, roots, includepath) + def _checknofilteredinrevs(self, revs): + """raise the appropriate error if 'revs' contains a filtered revision + + This returns a version of 'revs' to be used thereafter by the caller. + In particular, if revs is an iterator, it is converted into a set. + """ + safehasattr = util.safehasattr + if safehasattr(revs, '__next__'): + # Note that inspect.isgenerator() is not true for iterators, + revs = set(revs) + + filteredrevs = self.filteredrevs + if safehasattr(revs, 'first'): # smartset + offenders = revs & filteredrevs + else: + offenders = filteredrevs.intersection(revs) + + for rev in offenders: + raise error.FilteredIndexError(rev) + return revs + def headrevs(self, revs=None): if revs is None and self.filteredrevs: try: @@ -356,6 +381,8 @@ except AttributeError: return self._headrevs() + if self.filteredrevs: + revs = self._checknofilteredinrevs(revs) return super(changelog, self).headrevs(revs) def strip(self, *args, **kwargs):
--- a/mercurial/chgserver.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/chgserver.py Tue Mar 19 16:36:59 2019 +0300 @@ -64,11 +64,12 @@ from .utils import ( procutil, + stringutil, ) def _hashlist(items): """return sha1 hexdigest for a list""" - return node.hex(hashlib.sha1(str(items)).digest()) + return node.hex(hashlib.sha1(stringutil.pprint(items)).digest()) # sensitive config sections affecting confighash _configsections = [ @@ -83,7 +84,7 @@ ] # sensitive environment variables affecting confighash -_envre = re.compile(r'''\A(?: +_envre = re.compile(br'''\A(?: CHGHG |HG(?:DEMANDIMPORT|EMITWARNINGS|MODULEPOLICY|PROF|RCPATH)? |HG(?:ENCODING|PLAIN).* @@ -140,7 +141,7 @@ files = [pycompat.sysexecutable] for m in modules: try: - files.append(inspect.getabsfile(m)) + files.append(pycompat.fsencode(inspect.getabsfile(m))) except TypeError: pass return sorted(set(files)) @@ -449,7 +450,7 @@ if newhash.confighash != self.hashstate.confighash: addr = _hashaddress(self.baseaddress, newhash.confighash) insts.append('redirect %s' % addr) - self.ui.log('chgserver', 'validate: %s\n', insts) + self.ui.log('chgserver', 'validate: %s\n', stringutil.pprint(insts)) self.cresult.write('\0'.join(insts) or '\0') def chdir(self):
--- a/mercurial/cmdutil.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/cmdutil.py Tue Mar 19 16:36:59 2019 +0300 @@ -180,8 +180,8 @@ def newandmodified(chunks, originalchunks): newlyaddedandmodifiedfiles = set() for chunk in chunks: - if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \ - originalchunks: + if (ishunk(chunk) and chunk.header.isnewfile() and chunk not in + originalchunks): newlyaddedandmodifiedfiles.add(chunk.header.filename()) return newlyaddedandmodifiedfiles @@ -304,7 +304,9 @@ if not force: repo.checkcommitpatterns(wctx, vdirs, match, status, fail) - diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True) + diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True, + section='commands', + configprefix='commit.interactive.') diffopts.nodates = True diffopts.git = True diffopts.showfunc = True @@ -342,8 +344,8 @@ if backupall: tobackup = changed else: - tobackup = [f for f in newfiles if f in modified or f in \ - newlyaddedandmodifiedfiles] + tobackup = [f for f in newfiles if f in modified or f in + newlyaddedandmodifiedfiles] backups = {} if tobackup: backupdir = repo.vfs.join('record-backups') @@ -629,11 +631,9 @@ return _helpmessage('hg unshelve --continue', 'hg unshelve --abort') def _graftmsg(): - # tweakdefaults requires `update` to have a rev hence the `.` return _helpmessage('hg graft --continue', 'hg graft --abort') def _mergemsg(): - # tweakdefaults requires `update` to have a rev hence the `.` return _helpmessage('hg commit', 'hg merge --abort') def _bisectmsg(): @@ -1157,6 +1157,7 @@ dryrun = opts.get("dry_run") wctx = repo[None] + uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) def walkpat(pat): srcs = [] if after: @@ -1166,7 +1167,7 @@ m = scmutil.match(wctx, [pat], opts, globbed=True) for abs in wctx.walk(m): state = repo.dirstate[abs] - rel = m.rel(abs) + rel = uipathfn(abs) exact = m.exact(abs) if state in badstates: if exact and state == '?': @@ -1273,10 +1274,6 @@ else: ui.warn(_('%s: cannot copy - %s\n') % (relsrc, encoding.strtolocal(inst.strerror))) - if rename: - hint = _("('hg rename --after' to record the rename)\n") - else: - hint = _("('hg copy --after' to record the copy)\n") return True # report a failure if ui.verbose or not exact: @@ -1787,7 +1784,7 @@ wanted = set() copies = [] minrev, maxrev = min(revs), max(revs) - def filerevgen(filelog, last): + def filerevs(filelog, last): """ Only files, no patterns. Check the history of each file. @@ -1850,7 +1847,7 @@ ancestors = {filelog.linkrev(last)} # iterate from latest to oldest revision - for rev, flparentlinkrevs, copied in filerevgen(filelog, last): + for rev, flparentlinkrevs, copied in filerevs(filelog, last): if not follow: if rev > maxrev: continue @@ -1986,7 +1983,10 @@ else: self.revs.discard(value) ctx = change(value) - matches = [f for f in ctx.files() if match(f)] + if allfiles: + matches = list(ctx.manifest().walk(match)) + else: + matches = [f for f in ctx.files() if match(f)] if matches: fncache[value] = matches self.set.add(value) @@ -2053,8 +2053,7 @@ return iterate() -def add(ui, repo, match, prefix, explicitonly, **opts): - join = lambda f: os.path.join(prefix, f) +def add(ui, repo, match, prefix, uipathfn, explicitonly, **opts): bad = [] badfn = lambda x, y: bad.append(x) or match.bad(x, y) @@ -2078,20 +2077,24 @@ cca(f) names.append(f) if ui.verbose or not exact: - ui.status(_('adding %s\n') % match.rel(f), + ui.status(_('adding %s\n') % uipathfn(f), label='ui.addremove.added') for subpath in sorted(wctx.substate): sub = wctx.sub(subpath) try: submatch = matchmod.subdirmatcher(subpath, match) + subprefix = repo.wvfs.reljoin(prefix, subpath) + subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn) if opts.get(r'subrepos'): - bad.extend(sub.add(ui, submatch, prefix, False, **opts)) + bad.extend(sub.add(ui, submatch, subprefix, subuipathfn, False, + **opts)) else: - bad.extend(sub.add(ui, submatch, prefix, True, **opts)) + bad.extend(sub.add(ui, submatch, subprefix, subuipathfn, True, + **opts)) except error.LookupError: ui.status(_("skipping missing subrepository: %s\n") - % join(subpath)) + % uipathfn(subpath)) if not opts.get(r'dry_run'): rejected = wctx.add(names, prefix) @@ -2107,10 +2110,10 @@ for subpath in ctx.substate: ctx.sub(subpath).addwebdirpath(serverpath, webconf) -def forget(ui, repo, match, prefix, explicitonly, dryrun, interactive): +def forget(ui, repo, match, prefix, uipathfn, explicitonly, dryrun, + interactive): if dryrun and interactive: raise error.Abort(_("cannot specify both --dry-run and --interactive")) - join = lambda f: os.path.join(prefix, f) bad = [] badfn = lambda x, y: bad.append(x) or match.bad(x, y) wctx = repo[None] @@ -2123,15 +2126,18 @@ for subpath in sorted(wctx.substate): sub = wctx.sub(subpath) + submatch = matchmod.subdirmatcher(subpath, match) + subprefix = repo.wvfs.reljoin(prefix, subpath) + subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn) try: - submatch = matchmod.subdirmatcher(subpath, match) - subbad, subforgot = sub.forget(submatch, prefix, dryrun=dryrun, + subbad, subforgot = sub.forget(submatch, subprefix, subuipathfn, + dryrun=dryrun, interactive=interactive) bad.extend([subpath + '/' + f for f in subbad]) forgot.extend([subpath + '/' + f for f in subforgot]) except error.LookupError: ui.status(_("skipping missing subrepository: %s\n") - % join(subpath)) + % uipathfn(subpath)) if not explicitonly: for f in match.files(): @@ -2146,7 +2152,7 @@ continue ui.warn(_('not removing %s: ' 'file is already untracked\n') - % match.rel(f)) + % uipathfn(f)) bad.append(f) if interactive: @@ -2157,13 +2163,14 @@ '$$ Include &all remaining files' '$$ &? (display help)') for filename in forget[:]: - r = ui.promptchoice(_('forget %s %s') % (filename, responses)) + r = ui.promptchoice(_('forget %s %s') % + (uipathfn(filename), responses)) if r == 4: # ? while r == 4: for c, t in ui.extractchoices(responses)[1]: ui.write('%s - %s\n' % (c, encoding.lower(t))) - r = ui.promptchoice(_('forget %s %s') % (filename, - responses)) + r = ui.promptchoice(_('forget %s %s') % + (uipathfn(filename), responses)) if r == 0: # yes continue elif r == 1: # no @@ -2177,7 +2184,7 @@ for f in forget: if ui.verbose or not match.exact(f) or interactive: - ui.status(_('removing %s\n') % match.rel(f), + ui.status(_('removing %s\n') % uipathfn(f), label='ui.addremove.removed') if not dryrun: @@ -2186,7 +2193,7 @@ forgot.extend(f for f in forget if f not in rejected) return bad, forgot -def files(ui, ctx, m, fm, fmt, subrepos): +def files(ui, ctx, m, uipathfn, fm, fmt, subrepos): ret = 1 needsfctx = ui.verbose or {'size', 'flags'} & fm.datahint() @@ -2197,25 +2204,27 @@ fc = ctx[f] fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags()) fm.data(path=f) - fm.plain(fmt % m.rel(f)) + fm.plain(fmt % uipathfn(f)) ret = 0 for subpath in sorted(ctx.substate): submatch = matchmod.subdirmatcher(subpath, m) + subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn) if (subrepos or m.exact(subpath) or any(submatch.files())): sub = ctx.sub(subpath) try: recurse = m.exact(subpath) or subrepos - if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0: + if sub.printfiles(ui, submatch, subuipathfn, fm, fmt, + recurse) == 0: ret = 0 except error.LookupError: ui.status(_("skipping missing subrepository: %s\n") - % m.abs(subpath)) + % uipathfn(subpath)) return ret -def remove(ui, repo, m, prefix, after, force, subrepos, dryrun, warnings=None): - join = lambda f: os.path.join(prefix, f) +def remove(ui, repo, m, prefix, uipathfn, after, force, subrepos, dryrun, + warnings=None): ret = 0 s = repo.status(match=m, clean=True) modified, added, deleted, clean = s[0], s[1], s[3], s[6] @@ -2233,16 +2242,18 @@ unit=_('subrepos')) for subpath in subs: submatch = matchmod.subdirmatcher(subpath, m) + subprefix = repo.wvfs.reljoin(prefix, subpath) + subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn) if subrepos or m.exact(subpath) or any(submatch.files()): progress.increment() sub = wctx.sub(subpath) try: - if sub.removefiles(submatch, prefix, after, force, subrepos, - dryrun, warnings): + if sub.removefiles(submatch, subprefix, subuipathfn, after, + force, subrepos, dryrun, warnings): ret = 1 except error.LookupError: warnings.append(_("skipping missing subrepository: %s\n") - % join(subpath)) + % uipathfn(subpath)) progress.complete() # warn about failure to delete explicit files/dirs @@ -2266,10 +2277,10 @@ if repo.wvfs.exists(f): if repo.wvfs.isdir(f): warnings.append(_('not removing %s: no tracked files\n') - % m.rel(f)) + % uipathfn(f)) else: warnings.append(_('not removing %s: file is untracked\n') - % m.rel(f)) + % uipathfn(f)) # missing files will generate a warning elsewhere ret = 1 progress.complete() @@ -2285,7 +2296,7 @@ progress.increment() if ui.verbose or (f in files): warnings.append(_('not removing %s: file still exists\n') - % m.rel(f)) + % uipathfn(f)) ret = 1 progress.complete() else: @@ -2296,12 +2307,12 @@ for f in modified: progress.increment() warnings.append(_('not removing %s: file is modified (use -f' - ' to force removal)\n') % m.rel(f)) + ' to force removal)\n') % uipathfn(f)) ret = 1 for f in added: progress.increment() warnings.append(_("not removing %s: file has been marked for add" - " (use 'hg forget' to undo add)\n") % m.rel(f)) + " (use 'hg forget' to undo add)\n") % uipathfn(f)) ret = 1 progress.complete() @@ -2311,7 +2322,7 @@ for f in list: if ui.verbose or not m.exact(f): progress.increment() - ui.status(_('removing %s\n') % m.rel(f), + ui.status(_('removing %s\n') % uipathfn(f), label='ui.addremove.removed') progress.complete() @@ -2382,18 +2393,18 @@ write(abs) err = 0 + uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) for subpath in sorted(ctx.substate): sub = ctx.sub(subpath) try: submatch = matchmod.subdirmatcher(subpath, matcher) - - if not sub.cat(submatch, basefm, fntemplate, - os.path.join(prefix, sub._path), + subprefix = os.path.join(prefix, subpath) + if not sub.cat(submatch, basefm, fntemplate, subprefix, **pycompat.strkwargs(opts)): err = 0 except error.RepoLookupError: - ui.status(_("skipping missing subrepository: %s\n") - % os.path.join(prefix, subpath)) + ui.status(_("skipping missing subrepository: %s\n") % + uipathfn(subpath)) return err @@ -2412,7 +2423,9 @@ dsguard = dirstateguard.dirstateguard(repo, 'commit') with dsguard or util.nullcontextmanager(): if dsguard: - if scmutil.addremove(repo, matcher, "", opts) != 0: + relative = scmutil.anypats(pats, opts) + uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative) + if scmutil.addremove(repo, matcher, "", uipathfn, opts) != 0: raise error.Abort( _("failed to mark all new/missing files as added/removed")) @@ -2490,8 +2503,10 @@ # add/remove the files to the working copy if the "addremove" option # was specified. matcher = scmutil.match(wctx, pats, opts) + relative = scmutil.anypats(pats, opts) + uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative) if (opts.get('addremove') - and scmutil.addremove(repo, matcher, "", opts)): + and scmutil.addremove(repo, matcher, "", uipathfn, opts)): raise error.Abort( _("failed to mark all new/missing files as added/removed")) @@ -2807,6 +2822,7 @@ # The mapping is in the form: # <abs path in repo> -> (<path from CWD>, <exactly specified by matcher?>) names = {} + uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) with repo.wlock(): ## filling of the `names` mapping @@ -2822,7 +2838,7 @@ if not m.always(): matcher = matchmod.badmatch(m, lambda x, y: False) for abs in wctx.walk(matcher): - names[abs] = m.rel(abs), m.exact(abs) + names[abs] = m.exact(abs) # walk target manifest to fill `names` @@ -2835,11 +2851,11 @@ for f in names: if f.startswith(path_): return - ui.warn("%s: %s\n" % (m.rel(path), msg)) + ui.warn("%s: %s\n" % (uipathfn(path), msg)) for abs in ctx.walk(matchmod.badmatch(m, badfn)): if abs not in names: - names[abs] = m.rel(abs), m.exact(abs) + names[abs] = m.exact(abs) # Find status of all file in `names`. m = scmutil.matchfiles(repo, names) @@ -2850,7 +2866,7 @@ changes = repo.status(node1=node, match=m) for kind in changes: for abs in kind: - names[abs] = m.rel(abs), m.exact(abs) + names[abs] = m.exact(abs) m = scmutil.matchfiles(repo, names) @@ -2912,13 +2928,12 @@ dsmodified -= mergeadd # if f is a rename, update `names` to also revert the source - cwd = repo.getcwd() for f in localchanges: src = repo.dirstate.copied(f) # XXX should we check for rename down to target node? if src and src not in names and repo.dirstate[src] == 'r': dsremoved.add(src) - names[src] = (repo.pathto(src, cwd), True) + names[src] = True # determine the exact nature of the deleted changesets deladded = set(_deleted) @@ -3025,7 +3040,7 @@ (unknown, actions['unknown'], discard), ) - for abs, (rel, exact) in sorted(names.items()): + for abs, exact in sorted(names.items()): # target file to be touch on disk (relative to cwd) target = repo.wjoin(abs) # search the entry in the dispatch table. @@ -3042,19 +3057,21 @@ if dobackup == backupinteractive: tobackup.add(abs) elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])): - bakname = scmutil.origpath(ui, repo, rel) + absbakname = scmutil.backuppath(ui, repo, abs) + bakname = os.path.relpath(absbakname, + start=repo.root) ui.note(_('saving current version of %s as %s\n') % - (rel, bakname)) + (uipathfn(abs), uipathfn(bakname))) if not opts.get('dry_run'): if interactive: - util.copyfile(target, bakname) + util.copyfile(target, absbakname) else: - util.rename(target, bakname) + util.rename(target, absbakname) if opts.get('dry_run'): if ui.verbose or not exact: - ui.status(msg % rel) + ui.status(msg % uipathfn(abs)) elif exact: - ui.warn(msg % rel) + ui.warn(msg % uipathfn(abs)) break if not opts.get('dry_run'): @@ -3065,8 +3082,8 @@ prefetch(repo, [ctx.rev()], matchfiles(repo, [f for sublist in oplist for f in sublist])) - _performrevert(repo, parents, ctx, names, actions, interactive, - tobackup) + _performrevert(repo, parents, ctx, names, uipathfn, actions, + interactive, tobackup) if targetsubs: # Revert the subrepos on the revert list @@ -3078,8 +3095,8 @@ raise error.Abort("subrepository '%s' does not exist in %s!" % (sub, short(ctx.node()))) -def _performrevert(repo, parents, ctx, names, actions, interactive=False, - tobackup=None): +def _performrevert(repo, parents, ctx, names, uipathfn, actions, + interactive=False, tobackup=None): """function that actually perform all the actions computed for revert This is an independent function to let extension to plug in and react to @@ -3104,15 +3121,15 @@ repo.dirstate.remove(f) def prntstatusmsg(action, f): - rel, exact = names[f] + exact = names[f] if repo.ui.verbose or not exact: - repo.ui.status(actions[action][1] % rel) + repo.ui.status(actions[action][1] % uipathfn(f)) audit_path = pathutil.pathauditor(repo.root, cached=True) for f in actions['forget'][0]: if interactive: choice = repo.ui.promptchoice( - _("forget added file %s (Yn)?$$ &Yes $$ &No") % f) + _("forget added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)) if choice == 0: prntstatusmsg('forget', f) repo.dirstate.drop(f) @@ -3125,7 +3142,7 @@ audit_path(f) if interactive: choice = repo.ui.promptchoice( - _("remove added file %s (Yn)?$$ &Yes $$ &No") % f) + _("remove added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)) if choice == 0: prntstatusmsg('remove', f) doremove(f) @@ -3154,7 +3171,9 @@ # Prompt the user for changes to revert torevert = [f for f in actions['revert'][0] if f not in excluded_files] m = scmutil.matchfiles(repo, torevert) - diffopts = patch.difffeatureopts(repo.ui, whitespace=True) + diffopts = patch.difffeatureopts(repo.ui, whitespace=True, + section='commands', + configprefix='revert.interactive.') diffopts.nodates = True diffopts.git = True operation = 'discard' @@ -3192,7 +3211,7 @@ # Create a backup file only if this hunk should be backed up if c.header.filename() in tobackup: target = repo.wjoin(abs) - bakname = scmutil.origpath(repo.ui, repo, m.rel(abs)) + bakname = scmutil.backuppath(repo.ui, repo, abs) util.copyfile(target, bakname) tobackup.remove(abs) c.write(fp) @@ -3222,9 +3241,19 @@ if node == parent and p2 == nullid: normal = repo.dirstate.normal for f in actions['undelete'][0]: - prntstatusmsg('undelete', f) - checkout(f) - normal(f) + if interactive: + choice = repo.ui.promptchoice( + _("add back removed file %s (Yn)?$$ &Yes $$ &No") % f) + if choice == 0: + prntstatusmsg('undelete', f) + checkout(f) + normal(f) + else: + excluded_files.append(f) + else: + prntstatusmsg('undelete', f) + checkout(f) + normal(f) copied = copies.pathcopies(repo[parent], ctx)
--- a/mercurial/color.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/color.py Tue Mar 19 16:36:59 2019 +0300 @@ -77,12 +77,13 @@ _defaultstyles = { 'grep.match': 'red bold', 'grep.linenumber': 'green', - 'grep.rev': 'green', - 'grep.change': 'green', + 'grep.rev': 'blue', 'grep.sep': 'cyan', 'grep.filename': 'magenta', 'grep.user': 'magenta', 'grep.date': 'magenta', + 'grep.inserted': 'green bold', + 'grep.deleted': 'red bold', 'bookmarks.active': 'green', 'branches.active': 'none', 'branches.closed': 'black bold', @@ -169,7 +170,7 @@ ui._terminfoparams[key[9:]] = newval try: curses.setupterm() - except curses.error as e: + except curses.error: ui._terminfoparams.clear() return @@ -484,7 +485,7 @@ w32effects = None else: origattr = csbi.wAttributes - ansire = re.compile(b'\033\[([^m]*)m([^\033]*)(.*)', + ansire = re.compile(br'\033\[([^m]*)m([^\033]*)(.*)', re.MULTILINE | re.DOTALL) def win32print(ui, writefunc, text, **opts):
--- a/mercurial/commands.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/commands.py Tue Mar 19 16:36:59 2019 +0300 @@ -61,7 +61,6 @@ state as statemod, streamclone, tags as tagsmod, - templatekw, ui as uimod, util, wireprotoserver, @@ -180,7 +179,8 @@ """ m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts)) - rejected = cmdutil.add(ui, repo, m, "", False, **opts) + uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) + rejected = cmdutil.add(ui, repo, m, "", uipathfn, False, **opts) return rejected and 1 or 0 @command('addremove', @@ -254,7 +254,9 @@ if not opts.get('similarity'): opts['similarity'] = '100' matcher = scmutil.match(repo[None], pats, opts) - return scmutil.addremove(repo, matcher, "", opts) + relative = scmutil.anypats(pats, opts) + uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative) + return scmutil.addremove(repo, matcher, "", uipathfn, opts) @command('annotate|blame', [('r', 'rev', '', _('annotate the specified revision'), _('REV')), @@ -407,12 +409,13 @@ if skiprevs: skiprevs = scmutil.revrange(repo, skiprevs) + uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) for abs in ctx.walk(m): fctx = ctx[abs] rootfm.startitem() rootfm.data(path=abs) if not opts.get('text') and fctx.isbinary(): - rootfm.plain(_("%s: binary file\n") % m.rel(abs)) + rootfm.plain(_("%s: binary file\n") % uipathfn(abs)) continue fm = rootfm.nested('lines', tmpl='{rev}: {line}') @@ -1102,7 +1105,7 @@ with repo.wlock(): if opts.get('clean'): - label = repo[None].p1().branch() + label = repo['.'].branch() repo.dirstate.setbranch(label) ui.status(_('reset working directory to branch %s\n') % label) elif label: @@ -1672,8 +1675,8 @@ if not bheads: raise error.Abort(_('can only close branch heads')) elif opts.get('amend'): - if repo[None].parents()[0].p1().branch() != branch and \ - repo[None].parents()[0].p2().branch() != branch: + if (repo['.'].p1().branch() != branch and + repo['.'].p2().branch() != branch): raise error.Abort(_('can only close branch heads')) if opts.get('amend'): @@ -2209,8 +2212,10 @@ m = scmutil.match(ctx, pats, opts) ui.pager('files') + uipathfn = scmutil.getuipathfn(ctx.repo(), legacyrelativevalue=True) with ui.formatter('files', opts) as fm: - return cmdutil.files(ui, ctx, m, fm, fmt, opts.get('subrepos')) + return cmdutil.files(ui, ctx, m, uipathfn, fm, fmt, + opts.get('subrepos')) @command( 'forget', @@ -2254,7 +2259,8 @@ m = scmutil.match(repo[None], pats, opts) dryrun, interactive = opts.get('dry_run'), opts.get('interactive') - rejected = cmdutil.forget(ui, repo, m, prefix="", + uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) + rejected = cmdutil.forget(ui, repo, m, prefix="", uipathfn=uipathfn, explicitonly=False, dryrun=dryrun, interactive=interactive)[0] return rejected and 1 or 0 @@ -2633,7 +2639,6 @@ raise error.Abort(_("cannot abort using an old graftstate")) # changeset from which graft operation was started - startctx = None if len(newnodes) > 0: startctx = repo[newnodes[0]].p1() else: @@ -2849,6 +2854,7 @@ for i in pycompat.xrange(blo, bhi): yield ('+', b[i]) + uipathfn = scmutil.getuipathfn(repo) def display(fm, fn, ctx, pstates, states): rev = scmutil.intrev(ctx) if fm.isplain(): @@ -2868,7 +2874,7 @@ except error.WdirUnsupported: return ctx[fn].isbinary() - fieldnamemap = {'filename': 'path', 'linenumber': 'lineno'} + fieldnamemap = {'linenumber': 'lineno'} if diff: iter = difflinestates(pstates, states) else: @@ -2876,27 +2882,29 @@ for change, l in iter: fm.startitem() fm.context(ctx=ctx) - fm.data(node=fm.hexfunc(scmutil.binnode(ctx))) + fm.data(node=fm.hexfunc(scmutil.binnode(ctx)), path=fn) + fm.plain(uipathfn(fn), label='grep.filename') cols = [ - ('filename', '%s', fn, True), - ('rev', '%d', rev, not plaingrep), - ('linenumber', '%d', l.linenum, opts.get('line_number')), + ('rev', '%d', rev, not plaingrep, ''), + ('linenumber', '%d', l.linenum, opts.get('line_number'), ''), ] if diff: - cols.append(('change', '%s', change, True)) + cols.append( + ('change', '%s', change, True, + 'grep.inserted ' if change == '+' else 'grep.deleted ') + ) cols.extend([ - ('user', '%s', formatuser(ctx.user()), opts.get('user')), + ('user', '%s', formatuser(ctx.user()), opts.get('user'), ''), ('date', '%s', fm.formatdate(ctx.date(), datefmt), - opts.get('date')), + opts.get('date'), ''), ]) - lastcol = next( - name for name, fmt, data, cond in reversed(cols) if cond) - for name, fmt, data, cond in cols: + for name, fmt, data, cond, extra_label in cols: + if cond: + fm.plain(sep, label='grep.sep') field = fieldnamemap.get(name, name) - fm.condwrite(cond, field, fmt, data, label='grep.%s' % name) - if cond and name != lastcol: - fm.plain(sep, label='grep.sep') + label = extra_label + ('grep.%s' % name) + fm.condwrite(cond, field, fmt, data, label=label) if not opts.get('files_with_matches'): fm.plain(sep, label='grep.sep') if not opts.get('text') and binary(): @@ -2926,12 +2934,13 @@ fm.data(matched=False) fm.end() - skip = {} + skip = set() revfiles = {} match = scmutil.match(repo[None], pats, opts) found = False follow = opts.get('follow') + getrenamed = scmutil.getrenamedfn(repo) def prep(ctx, fns): rev = ctx.rev() pctx = ctx.p1() @@ -2945,16 +2954,15 @@ fnode = ctx.filenode(fn) except error.LookupError: continue - try: - copied = flog.renamed(fnode) - except error.WdirUnsupported: - copied = ctx[fn].renamed() - copy = follow and copied and copied[0] - if copy: - copies.setdefault(rev, {})[fn] = copy + + copy = None + if follow: + copy = getrenamed(fn, rev) + if copy: + copies.setdefault(rev, {})[fn] = copy + if fn in skip: + skip.add(copy) if fn in skip: - if copy: - skip[copy] = True continue files.append(fn) @@ -2983,16 +2991,16 @@ copy = copies.get(rev, {}).get(fn) if fn in skip: if copy: - skip[copy] = True + skip.add(copy) continue pstates = matches.get(parent, {}).get(copy or fn, []) if pstates or states: r = display(fm, fn, ctx, pstates, states) found = found or r if r and not diff and not all_files: - skip[fn] = True + skip.add(fn) if copy: - skip[copy] = True + skip.add(copy) del revfiles[rev] # We will keep the matches dict for the duration of the window # clear the matches dict once the window is over @@ -3683,11 +3691,12 @@ filesgen = sorted(repo.dirstate.matches(m)) else: filesgen = ctx.matches(m) + uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=bool(pats)) for abs in filesgen: if opts.get('fullpath'): ui.write(repo.wjoin(abs), end) else: - ui.write(((pats and m.rel(abs)) or abs), end) + ui.write(uipathfn(abs), end) ret = 0 return ret @@ -3872,7 +3881,7 @@ endrev = None if revs: endrev = revs.max() + 1 - getrenamed = templatekw.getrenamedfn(repo, endrev=endrev) + getrenamed = scmutil.getrenamedfn(repo, endrev=endrev) ui.pager('log') displayer = logcmdutil.changesetdisplayer(ui, repo, opts, differ, @@ -4361,7 +4370,7 @@ msg = _("not updating: %s") % stringutil.forcebytestr(inst) hint = inst.hint raise error.UpdateAbort(msg, hint=hint) - if modheads > 1: + if modheads is not None and modheads > 1: currentbranchheads = len(repo.branchheads()) if currentbranchheads == modheads: ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n")) @@ -4714,7 +4723,8 @@ m = scmutil.match(repo[None], pats, opts) subrepos = opts.get('subrepos') - return cmdutil.remove(ui, repo, m, "", after, force, subrepos, + uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) + return cmdutil.remove(ui, repo, m, "", uipathfn, after, force, subrepos, dryrun=dryrun) @command('rename|move|mv', @@ -4809,8 +4819,8 @@ opts = pycompat.byteskwargs(opts) confirm = ui.configbool('commands', 'resolve.confirm') flaglist = 'all mark unmark list no_status re_merge'.split() - all, mark, unmark, show, nostatus, remerge = \ - [opts.get(o) for o in flaglist] + all, mark, unmark, show, nostatus, remerge = [ + opts.get(o) for o in flaglist] actioncount = len(list(filter(None, [show, mark, unmark, remerge]))) if actioncount > 1: @@ -4839,6 +4849,8 @@ b'$$ &Yes $$ &No')): raise error.Abort(_('user quit')) + uipathfn = scmutil.getuipathfn(repo) + if show: ui.pager('resolve') fm = ui.formatter('resolve', opts) @@ -4866,7 +4878,8 @@ fm.startitem() fm.context(ctx=wctx) fm.condwrite(not nostatus, 'mergestatus', '%s ', key, label=label) - fm.write('path', '%s\n', f, label=label) + fm.data(path=f) + fm.plain('%s\n' % uipathfn(f), label=label) fm.end() return 0 @@ -4912,11 +4925,11 @@ if mark: if exact: ui.warn(_('not marking %s as it is driver-resolved\n') - % f) + % uipathfn(f)) elif unmark: if exact: ui.warn(_('not unmarking %s as it is driver-resolved\n') - % f) + % uipathfn(f)) else: runconclude = True continue @@ -4930,14 +4943,14 @@ ms.mark(f, mergemod.MERGE_RECORD_UNRESOLVED_PATH) elif ms[f] == mergemod.MERGE_RECORD_UNRESOLVED_PATH: ui.warn(_('%s: path conflict must be resolved manually\n') - % f) + % uipathfn(f)) continue if mark: if markcheck: fdata = repo.wvfs.tryread(f) - if filemerge.hasconflictmarkers(fdata) and \ - ms[f] != mergemod.MERGE_RECORD_RESOLVED: + if (filemerge.hasconflictmarkers(fdata) and + ms[f] != mergemod.MERGE_RECORD_RESOLVED): hasconflictmarkers.append(f) ms.mark(f, mergemod.MERGE_RECORD_RESOLVED) elif unmark: @@ -4968,14 +4981,15 @@ if complete: try: util.rename(a + ".resolve", - scmutil.origpath(ui, repo, a)) + scmutil.backuppath(ui, repo, f)) except OSError as inst: if inst.errno != errno.ENOENT: raise if hasconflictmarkers: ui.warn(_('warning: the following files still have conflict ' - 'markers:\n ') + '\n '.join(hasconflictmarkers) + '\n') + 'markers:\n') + ''.join(' ' + uipathfn(f) + '\n' + for f in hasconflictmarkers)) if markcheck == 'abort' and not all and not pats: raise error.Abort(_('conflict markers detected'), hint=_('use --all to mark anyway')) @@ -4994,7 +5008,7 @@ # replace filemerge's .orig file with our resolve file a = repo.wjoin(f) try: - util.rename(a + ".resolve", scmutil.origpath(ui, repo, a)) + util.rename(a + ".resolve", scmutil.backuppath(ui, repo, f)) except OSError as inst: if inst.errno != errno.ENOENT: raise @@ -5413,10 +5427,11 @@ repo = scmutil.unhidehashlikerevs(repo, revs, 'nowarn') ctx1, ctx2 = scmutil.revpair(repo, revs) - if pats or ui.configbool('commands', 'status.relative'): - cwd = repo.getcwd() - else: - cwd = '' + forcerelativevalue = None + if ui.hasconfig('commands', 'status.relative'): + forcerelativevalue = ui.configbool('commands', 'status.relative') + uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=bool(pats), + forcerelativevalue=forcerelativevalue) if opts.get('print0'): end = '\0' @@ -5467,10 +5482,10 @@ fm.context(ctx=ctx2) fm.data(path=f) fm.condwrite(showchar, 'status', '%s ', char, label=label) - fm.plain(fmt % repo.pathto(f, cwd), label=label) + fm.plain(fmt % uipathfn(f), label=label) if f in copy: fm.data(source=copy[f]) - fm.plain((' %s' + end) % repo.pathto(copy[f], cwd), + fm.plain((' %s' + end) % uipathfn(copy[f]), label='status.copied') if ((ui.verbose or ui.configbool('commands', 'status.verbose')) @@ -5503,7 +5518,6 @@ pnode = parents[0].node() marks = [] - ms = None try: ms = mergemod.mergestate.read(repo) except error.UnsupportedMergeRecords as e: @@ -5830,6 +5844,10 @@ expectedtype = 'global' for n in names: + if repo.tagtype(n) == 'global': + alltags = tagsmod.findglobaltags(ui, repo) + if alltags[n][0] == nullid: + raise error.Abort(_("tag '%s' is already removed") % n) if not repo.tagtype(n): raise error.Abort(_("tag '%s' does not exist") % n) if repo.tagtype(n) != expectedtype: @@ -5908,7 +5926,6 @@ ui.pager('tags') fm = ui.formatter('tags', opts) hexfunc = fm.hexfunc - tagtype = "" for t, n in reversed(repo.tagslist()): hn = hexfunc(n)
--- a/mercurial/config.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/config.py Tue Mar 19 16:36:59 2019 +0300 @@ -78,6 +78,10 @@ return list(self._data.get(section, {}).iteritems()) def set(self, section, item, value, source=""): if pycompat.ispy3: + assert not isinstance(section, str), ( + 'config section may not be unicode strings on Python 3') + assert not isinstance(item, str), ( + 'config item may not be unicode strings on Python 3') assert not isinstance(value, str), ( 'config values may not be unicode strings on Python 3') if section not in self:
--- a/mercurial/configitems.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/configitems.py Tue Mar 19 16:36:59 2019 +0300 @@ -113,46 +113,49 @@ coreconfigitem = getitemregister(coreitems) +def _registerdiffopts(section, configprefix=''): + coreconfigitem(section, configprefix + 'nodates', + default=False, + ) + coreconfigitem(section, configprefix + 'showfunc', + default=False, + ) + coreconfigitem(section, configprefix + 'unified', + default=None, + ) + coreconfigitem(section, configprefix + 'git', + default=False, + ) + coreconfigitem(section, configprefix + 'ignorews', + default=False, + ) + coreconfigitem(section, configprefix + 'ignorewsamount', + default=False, + ) + coreconfigitem(section, configprefix + 'ignoreblanklines', + default=False, + ) + coreconfigitem(section, configprefix + 'ignorewseol', + default=False, + ) + coreconfigitem(section, configprefix + 'nobinary', + default=False, + ) + coreconfigitem(section, configprefix + 'noprefix', + default=False, + ) + coreconfigitem(section, configprefix + 'word-diff', + default=False, + ) + coreconfigitem('alias', '.*', default=dynamicdefault, generic=True, ) -coreconfigitem('annotate', 'nodates', - default=False, -) -coreconfigitem('annotate', 'showfunc', - default=False, -) -coreconfigitem('annotate', 'unified', - default=None, -) -coreconfigitem('annotate', 'git', - default=False, -) -coreconfigitem('annotate', 'ignorews', - default=False, -) -coreconfigitem('annotate', 'ignorewsamount', - default=False, -) -coreconfigitem('annotate', 'ignoreblanklines', - default=False, -) -coreconfigitem('annotate', 'ignorewseol', - default=False, -) -coreconfigitem('annotate', 'nobinary', - default=False, -) -coreconfigitem('annotate', 'noprefix', - default=False, -) -coreconfigitem('annotate', 'word-diff', - default=False, -) coreconfigitem('auth', 'cookiefile', default=None, ) +_registerdiffopts(section='annotate') # bookmarks.pushing: internal hack for discovery coreconfigitem('bookmarks', 'pushing', default=list, @@ -198,6 +201,7 @@ coreconfigitem('color', 'pagermode', default=dynamicdefault, ) +_registerdiffopts(section='commands', configprefix='commit.interactive.') coreconfigitem('commands', 'grep.all-files', default=False, ) @@ -210,6 +214,7 @@ coreconfigitem('commands', 'resolve.mark-check', default='none', ) +_registerdiffopts(section='commands', configprefix='revert.interactive.') coreconfigitem('commands', 'show.aliasprefix', default=list, ) @@ -404,39 +409,7 @@ coreconfigitem('devel', 'debug.peer-request', default=False, ) -coreconfigitem('diff', 'nodates', - default=False, -) -coreconfigitem('diff', 'showfunc', - default=False, -) -coreconfigitem('diff', 'unified', - default=None, -) -coreconfigitem('diff', 'git', - default=False, -) -coreconfigitem('diff', 'ignorews', - default=False, -) -coreconfigitem('diff', 'ignorewsamount', - default=False, -) -coreconfigitem('diff', 'ignoreblanklines', - default=False, -) -coreconfigitem('diff', 'ignorewseol', - default=False, -) -coreconfigitem('diff', 'nobinary', - default=False, -) -coreconfigitem('diff', 'noprefix', - default=False, -) -coreconfigitem('diff', 'word-diff', - default=False, -) +_registerdiffopts(section='diff') coreconfigitem('email', 'bcc', default=None, ) @@ -497,6 +470,9 @@ coreconfigitem('experimental', 'changegroup3', default=False, ) +coreconfigitem('experimental', 'cleanup-as-archived', + default=False, +) coreconfigitem('experimental', 'clientcompressionengines', default=list, ) @@ -509,6 +485,9 @@ coreconfigitem('experimental', 'copytrace.sourcecommitlimit', default=100, ) +coreconfigitem('experimental', 'copies.read-from', + default="filelog-only", +) coreconfigitem('experimental', 'crecordtest', default=None, ) @@ -720,11 +699,11 @@ coreconfigitem('fsmonitor', 'warn_update_file_count', default=50000, ) -coreconfigitem('help', 'hidden-command\..*', +coreconfigitem('help', br'hidden-command\..*', default=False, generic=True, ) -coreconfigitem('help', 'hidden-topic\..*', +coreconfigitem('help', br'hidden-topic\..*', default=False, generic=True, ) @@ -1004,6 +983,12 @@ default=True, alias=[('format', 'aggressivemergedeltas')], ) +coreconfigitem('storage', 'revlog.reuse-external-delta', + default=True, +) +coreconfigitem('storage', 'revlog.reuse-external-delta-parent', + default=None, +) coreconfigitem('server', 'bookmarks-pushkey-compat', default=True, ) @@ -1056,6 +1041,9 @@ coreconfigitem('server', 'uncompressedallowsecret', default=False, ) +coreconfigitem('server', 'view', + default='served', +) coreconfigitem('server', 'validate', default=False, ) @@ -1108,6 +1096,10 @@ default=None, generic=True, ) +coreconfigitem('templateconfig', '.*', + default=dynamicdefault, + generic=True, +) coreconfigitem('trusted', 'groups', default=list, ) @@ -1233,6 +1225,9 @@ coreconfigitem('ui', 'quietbookmarkmove', default=False, ) +coreconfigitem('ui', 'relative-paths', + default='legacy', +) coreconfigitem('ui', 'remotecmd', default='hg', )
--- a/mercurial/context.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/context.py Tue Mar 19 16:36:59 2019 +0300 @@ -21,7 +21,7 @@ nullrev, short, wdirfilenodeids, - wdirid, + wdirhex, ) from . import ( dagop, @@ -294,16 +294,16 @@ listsubrepos=listsubrepos, badfn=badfn) def diff(self, ctx2=None, match=None, changes=None, opts=None, - losedatafn=None, prefix='', relroot='', copy=None, - hunksfilterfn=None): + losedatafn=None, pathfn=None, copy=None, + copysourcematch=None, hunksfilterfn=None): """Returns a diff generator for the given contexts and matcher""" if ctx2 is None: ctx2 = self.p1() if ctx2 is not None: ctx2 = self._repo[ctx2] return patch.diff(self._repo, ctx2, self, match=match, changes=changes, - opts=opts, losedatafn=losedatafn, prefix=prefix, - relroot=relroot, copy=copy, + opts=opts, losedatafn=losedatafn, pathfn=pathfn, + copy=copy, copysourcematch=copysourcematch, hunksfilterfn=hunksfilterfn) def dirs(self): @@ -439,6 +439,29 @@ return self._changeset.date def files(self): return self._changeset.files + @propertycache + def _copies(self): + p1copies = {} + p2copies = {} + p1 = self.p1() + p2 = self.p2() + narrowmatch = self._repo.narrowmatch() + for dst in self.files(): + if not narrowmatch(dst) or dst not in self: + continue + copied = self[dst].renamed() + if not copied: + continue + src, srcnode = copied + if src in p1 and p1[src].filenode() == srcnode: + p1copies[dst] = src + elif src in p2 and p2[src].filenode() == srcnode: + p2copies[dst] = src + return p1copies, p2copies + def p1copies(self): + return self._copies[0] + def p2copies(self): + return self._copies[1] def description(self): return self._changeset.description def branch(self): @@ -668,6 +691,8 @@ return self._changectx def renamed(self): return self._copied + def copysource(self): + return self._copied and self._copied[0] def repo(self): return self._repo def size(self): @@ -960,9 +985,9 @@ assert (changeid is not None or fileid is not None - or changectx is not None), \ - ("bad args: changeid=%r, fileid=%r, changectx=%r" - % (changeid, fileid, changectx)) + or changectx is not None), ( + "bad args: changeid=%r, fileid=%r, changectx=%r" + % (changeid, fileid, changectx)) if filelog is not None: self._filelog = filelog @@ -1158,7 +1183,6 @@ def files(self): return sorted(self._status.modified + self._status.added + self._status.removed) - def modified(self): return self._status.modified def added(self): @@ -1167,6 +1191,26 @@ return self._status.removed def deleted(self): return self._status.deleted + @propertycache + def _copies(self): + p1copies = {} + p2copies = {} + parents = self._repo.dirstate.parents() + p1manifest = self._repo[parents[0]].manifest() + p2manifest = self._repo[parents[1]].manifest() + narrowmatch = self._repo.narrowmatch() + for dst, src in self._repo.dirstate.copies().items(): + if not narrowmatch(dst): + continue + if src in p1manifest: + p1copies[dst] = src + elif src in p2manifest: + p2copies[dst] = src + return p1copies, p2copies + def p1copies(self): + return self._copies[0] + def p2copies(self): + return self._copies[1] def branch(self): return encoding.tolocal(self._extra['branch']) def closesbranch(self): @@ -1280,7 +1324,7 @@ return self._repo.dirstate[key] not in "?r" def hex(self): - return hex(wdirid) + return wdirhex @propertycache def _parents(self): @@ -1355,28 +1399,15 @@ uipath = lambda f: ds.pathto(pathutil.join(prefix, f)) rejected = [] for f in files: - if f not in self._repo.dirstate: + if f not in ds: self._repo.ui.warn(_("%s not tracked!\n") % uipath(f)) rejected.append(f) - elif self._repo.dirstate[f] != 'a': - self._repo.dirstate.remove(f) + elif ds[f] != 'a': + ds.remove(f) else: - self._repo.dirstate.drop(f) + ds.drop(f) return rejected - def undelete(self, list): - pctxs = self.parents() - with self._repo.wlock(): - ds = self._repo.dirstate - for f in list: - if self._repo.dirstate[f] != 'r': - self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f)) - else: - fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f] - t = fctx.data() - self._repo.wwrite(f, t, fctx.flags()) - self._repo.dirstate.normal(f) - def copy(self, source, dest): try: st = self._repo.wvfs.lstat(dest) @@ -1392,11 +1423,12 @@ % self._repo.dirstate.pathto(dest)) else: with self._repo.wlock(): - if self._repo.dirstate[dest] in '?': - self._repo.dirstate.add(dest) - elif self._repo.dirstate[dest] in 'r': - self._repo.dirstate.normallookup(dest) - self._repo.dirstate.copy(source, dest) + ds = self._repo.dirstate + if ds[dest] in '?': + ds.add(dest) + elif ds[dest] in 'r': + ds.normallookup(dest) + ds.copy(source, dest) def match(self, pats=None, include=None, exclude=None, default='glob', listsubrepos=False, badfn=None): @@ -1632,6 +1664,12 @@ # linked to self._changectx no matter if file is modified or not return self.rev() + def renamed(self): + path = self.copysource() + if not path: + return None + return path, self._changectx._parents[0]._manifest.get(path, nullid) + def parents(self): '''return parent filectxs, following copies if necessary''' def filenode(ctx, path): @@ -1668,11 +1706,8 @@ def data(self): return self._repo.wread(self._path) - def renamed(self): - rp = self._repo.dirstate.copied(self._path) - if not rp: - return None - return rp, self._changectx._parents[0]._manifest.get(rp, nullid) + def copysource(self): + return self._repo.dirstate.copied(self._path) def size(self): return self._repo.wvfs.lstat(self._path).st_size @@ -1822,6 +1857,30 @@ return [f for f in self._cache.keys() if not self._cache[f]['exists'] and self._existsinparent(f)] + def p1copies(self): + copies = self._repo._wrappedctx.p1copies().copy() + narrowmatch = self._repo.narrowmatch() + for f in self._cache.keys(): + if not narrowmatch(f): + continue + copies.pop(f, None) # delete if it exists + source = self._cache[f]['copied'] + if source: + copies[f] = source + return copies + + def p2copies(self): + copies = self._repo._wrappedctx.p2copies().copy() + narrowmatch = self._repo.narrowmatch() + for f in self._cache.keys(): + if not narrowmatch(f): + continue + copies.pop(f, None) # delete if it exists + source = self._cache[f]['copied'] + if source: + copies[f] = source + return copies + def isinmemory(self): return True @@ -1832,10 +1891,8 @@ return self._wrappedctx[path].date() def markcopied(self, path, origin): - if self.isdirty(path): - self._cache[path]['copied'] = origin - else: - raise error.ProgrammingError('markcopied() called on clean context') + self._markdirty(path, exists=True, date=self.filedate(path), + flags=self.flags(path), copied=origin) def copydata(self, path): if self.isdirty(path): @@ -1897,7 +1954,7 @@ # Test the other direction -- that this path from p2 isn't a directory # in p1 (test that p1 doesn't have any paths matching `path/*`). - match = self.match(pats=[path + '/'], default=b'path') + match = self.match(include=[path + '/'], default=b'path') matches = self.p1().manifest().matches(match) mfiles = matches.keys() if len(mfiles) > 0: @@ -2039,7 +2096,8 @@ del self._cache[path] return keys - def _markdirty(self, path, exists, data=None, date=None, flags=''): + def _markdirty(self, path, exists, data=None, date=None, flags='', + copied=None): # data not provided, let's see if we already have some; if not, let's # grab it from our underlying context, so that we always have data if # the file is marked as existing. @@ -2052,7 +2110,7 @@ 'data': data, 'date': date, 'flags': flags, - 'copied': None, + 'copied': copied, } def filectx(self, path, filelog=None): @@ -2088,11 +2146,8 @@ def lexists(self): return self._parent.exists(self._path) - def renamed(self): - path = self._parent.copydata(self._path) - if not path: - return None - return path, self._changectx._parents[0]._manifest.get(path, nullid) + def copysource(self): + return self._parent.copydata(self._path) def size(self): return self._parent.size(self._path) @@ -2178,11 +2233,7 @@ """ def getfilectx(repo, memctx, path): fctx = ctx[path] - # this is weird but apparently we only keep track of one parent - # (why not only store that instead of a tuple?) - copied = fctx.renamed() - if copied: - copied = copied[0] + copied = fctx.copysource() return memfilectx(repo, memctx, path, fctx.data(), islink=fctx.islink(), isexec=fctx.isexec(), copied=copied)
--- a/mercurial/copies.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/copies.py Tue Mar 19 16:36:59 2019 +0300 @@ -17,21 +17,19 @@ match as matchmod, node, pathutil, - scmutil, util, ) from .utils import ( stringutil, ) -def _findlimit(repo, a, b): +def _findlimit(repo, ctxa, ctxb): """ Find the last revision that needs to be checked to ensure that a full transitive closure for file copies can be properly calculated. Generally, this means finding the earliest revision number that's an ancestor of a or b but not both, except when a or b is a direct descendent of the other, in which case we can return the minimum revnum of a and b. - None if no such revision exists. """ # basic idea: @@ -46,27 +44,32 @@ # - quit when interesting revs is zero cl = repo.changelog + wdirparents = None + a = ctxa.rev() + b = ctxb.rev() if a is None: + wdirparents = (ctxa.p1(), ctxa.p2()) a = node.wdirrev if b is None: + assert not wdirparents + wdirparents = (ctxb.p1(), ctxb.p2()) b = node.wdirrev side = {a: -1, b: 1} visit = [-a, -b] heapq.heapify(visit) interesting = len(visit) - hascommonancestor = False limit = node.wdirrev while interesting: r = -heapq.heappop(visit) if r == node.wdirrev: - parents = [cl.rev(p) for p in repo.dirstate.parents()] + parents = [pctx.rev() for pctx in wdirparents] else: parents = cl.parentrevs(r) + if parents[1] == node.nullrev: + parents = parents[:1] for p in parents: - if p < 0: - continue if p not in side: # first time we see p; add it to visit side[p] = side[r] @@ -77,14 +80,10 @@ # p was interesting but now we know better side[p] = 0 interesting -= 1 - hascommonancestor = True if side[r]: limit = r # lowest rev visited interesting -= 1 - if not hascommonancestor: - return None - # Consider the following flow (see test-commit-amend.t under issue4405): # 1/ File 'a0' committed # 2/ File renamed from 'a0' to 'a1' in a new commit (call it 'a1') @@ -124,10 +123,13 @@ # file is a copy of an existing file t[k] = v - # remove criss-crossed copies for k, v in list(t.items()): + # remove criss-crossed copies if k in src and v in dst: del t[k] + # remove copies to files that were then removed + elif k not in dst: + del t[k] return t @@ -141,8 +143,8 @@ if limit >= 0 and not f.isintroducedafter(limit): return None -def _dirstatecopies(d, match=None): - ds = d._repo.dirstate +def _dirstatecopies(repo, match=None): + ds = repo.dirstate c = ds.copies().copy() for k in list(c): if ds[k] not in 'anm' or (match and not match(k)): @@ -163,14 +165,16 @@ # files might have to be traced back to the fctx parent of the last # one-side-only changeset, but not further back than that repo = a._repo + + if repo.ui.config('experimental', 'copies.read-from') == 'compatibility': + return _changesetforwardcopies(a, b, match) + debug = repo.ui.debugflag and repo.ui.configbool('devel', 'debug.copies') dbg = repo.ui.debug if debug: dbg('debug.copies: looking into rename from %s to %s\n' % (a, b)) - limit = _findlimit(repo, a.rev(), b.rev()) - if limit is None: - limit = node.nullrev + limit = _findlimit(repo, a, b) if debug: dbg('debug.copies: search limit: %d\n' % limit) am = a.manifest() @@ -188,7 +192,7 @@ # this comparison. forwardmissingmatch = match if b.p1() == a and b.p2().node() == node.nullid: - filesmatcher = scmutil.matchfiles(a._repo, b.files()) + filesmatcher = matchmod.exact(b.files()) forwardmissingmatch = matchmod.intersectmatchers(match, filesmatcher) missing = _computeforwardmissing(a, b, match=forwardmissingmatch) @@ -215,6 +219,76 @@ % (util.timer() - start)) return cm +def _changesetforwardcopies(a, b, match): + if a.rev() == node.nullrev: + return {} + + repo = a.repo() + children = {} + cl = repo.changelog + missingrevs = cl.findmissingrevs(common=[a.rev()], heads=[b.rev()]) + for r in missingrevs: + for p in cl.parentrevs(r): + if p == node.nullrev: + continue + if p not in children: + children[p] = [r] + else: + children[p].append(r) + + roots = set(children) - set(missingrevs) + # 'work' contains 3-tuples of a (revision number, parent number, copies). + # The parent number is only used for knowing which parent the copies dict + # came from. + work = [(r, 1, {}) for r in roots] + heapq.heapify(work) + while work: + r, i1, copies1 = heapq.heappop(work) + if work and work[0][0] == r: + # We are tracing copies from both parents + r, i2, copies2 = heapq.heappop(work) + copies = {} + ctx = repo[r] + p1man, p2man = ctx.p1().manifest(), ctx.p2().manifest() + allcopies = set(copies1) | set(copies2) + # TODO: perhaps this filtering should be done as long as ctx + # is merge, whether or not we're tracing from both parent. + for dst in allcopies: + if not match(dst): + continue + if dst not in copies2: + # Copied on p1 side: mark as copy from p1 side if it didn't + # already exist on p2 side + if dst not in p2man: + copies[dst] = copies1[dst] + elif dst not in copies1: + # Copied on p2 side: mark as copy from p2 side if it didn't + # already exist on p1 side + if dst not in p1man: + copies[dst] = copies2[dst] + else: + # Copied on both sides: mark as copy from p1 side + copies[dst] = copies1[dst] + else: + copies = copies1 + if r == b.rev(): + return copies + for c in children[r]: + childctx = repo[c] + if r == childctx.p1().rev(): + parent = 1 + childcopies = childctx.p1copies() + else: + assert r == childctx.p2().rev() + parent = 2 + childcopies = childctx.p2copies() + if not match.always(): + childcopies = {dst: src for dst, src in childcopies.items() + if match(dst)} + childcopies = _chain(a, childctx, copies, childcopies) + heapq.heappush(work, (c, parent, childcopies)) + assert False + def _forwardcopies(a, b, match=None): """find {dst@b: src@a} copy mapping where a is an ancestor of b""" @@ -223,23 +297,28 @@ if b.rev() is None: if a == b.p1(): # short-circuit to avoid issues with merge states - return _dirstatecopies(b, match) + return _dirstatecopies(b._repo, match) cm = _committedforwardcopies(a, b.p1(), match) # combine copies from dirstate if necessary - return _chain(a, b, cm, _dirstatecopies(b, match)) + return _chain(a, b, cm, _dirstatecopies(b._repo, match)) return _committedforwardcopies(a, b, match) -def _backwardrenames(a, b): +def _backwardrenames(a, b, match): if a._repo.ui.config('experimental', 'copytrace') == 'off': return {} # Even though we're not taking copies into account, 1:n rename situations # can still exist (e.g. hg cp a b; hg mv a c). In those cases we # arbitrarily pick one of the renames. + # We don't want to pass in "match" here, since that would filter + # the destination by it. Since we're reversing the copies, we want + # to filter the source instead. f = _forwardcopies(b, a) r = {} for k, v in sorted(f.iteritems()): + if match and not match(v): + continue # remove copies if v in a: continue @@ -263,10 +342,10 @@ if a == y: if debug: repo.ui.debug('debug.copies: search mode: backward\n') - return _backwardrenames(x, y) + return _backwardrenames(x, y, match=match) if debug: repo.ui.debug('debug.copies: search mode: combined\n') - return _chain(x, y, _backwardrenames(x, a), + return _chain(x, y, _backwardrenames(x, a, match=match), _forwardcopies(a, y, match=match)) def _computenonoverlap(repo, c1, c2, addedinm1, addedinm2, baselabel=''): @@ -393,9 +472,11 @@ if not c1 or not c2 or c1 == c2: return {}, {}, {}, {}, {} + narrowmatch = c1.repo().narrowmatch() + # avoid silly behavior for parent -> working dir if c2.node() is None and c1.node() == repo.dirstate.p1(): - return repo.dirstate.copies(), {}, {}, {}, {} + return _dirstatecopies(repo, narrowmatch), {}, {}, {}, {} copytracing = repo.ui.config('experimental', 'copytrace') boolctrace = stringutil.parsebool(copytracing) @@ -464,10 +545,7 @@ if graft: tca = _c1.ancestor(_c2) - limit = _findlimit(repo, c1.rev(), c2.rev()) - if limit is None: - # no common ancestor, no copies - return {}, {}, {}, {}, {} + limit = _findlimit(repo, c1, c2) repo.ui.debug(" searching for copies back to rev %d\n" % limit) m1 = c1.manifest() @@ -529,7 +607,7 @@ if dirtyc1: _combinecopies(data2['incomplete'], data1['incomplete'], copy, diverge, incompletediverge) - else: + if dirtyc2: _combinecopies(data1['incomplete'], data2['incomplete'], copy, diverge, incompletediverge) @@ -568,7 +646,13 @@ for f in bothnew: _checkcopies(c1, c2, f, base, tca, dirtyc1, limit, both1) _checkcopies(c2, c1, f, base, tca, dirtyc2, limit, both2) - if dirtyc1: + if dirtyc1 and dirtyc2: + remainder = _combinecopies(both2['incomplete'], both1['incomplete'], + copy, bothdiverge, bothincompletediverge) + remainder1 = _combinecopies(both1['incomplete'], both2['incomplete'], + copy, bothdiverge, bothincompletediverge) + remainder.update(remainder1) + elif dirtyc1: # incomplete copies may only be found on the "dirty" side for bothnew assert not both2['incomplete'] remainder = _combinecopies({}, both1['incomplete'], copy, bothdiverge, @@ -781,7 +865,7 @@ """ if f1 == f2: - return f1 # a match + return True # a match g1, g2 = f1.ancestors(), f2.ancestors() try:
--- a/mercurial/crecord.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/crecord.py Tue Mar 19 16:36:59 2019 +0300 @@ -20,6 +20,7 @@ encoding, error, patch as patchmod, + pycompat, scmutil, util, ) @@ -30,7 +31,7 @@ # This is required for ncurses to display non-ASCII characters in default user # locale encoding correctly. --immerrr -locale.setlocale(locale.LC_ALL, u'') +locale.setlocale(locale.LC_ALL, r'') # patch comments based on the git one diffhelptext = _("""# To remove '-' lines, make them ' ' lines (context). @@ -377,9 +378,9 @@ def countchanges(self): """changedlines -> (n+,n-)""" add = len([l for l in self.changedlines if l.applied - and l.prettystr()[0] == '+']) + and l.prettystr().startswith('+')]) rem = len([l for l in self.changedlines if l.applied - and l.prettystr()[0] == '-']) + and l.prettystr().startswith('-')]) return add, rem def getfromtoline(self): @@ -423,7 +424,7 @@ changedlinestr = changedline.prettystr() if changedline.applied: hunklinelist.append(changedlinestr) - elif changedlinestr[0] == "-": + elif changedlinestr.startswith("-"): hunklinelist.append(" " + changedlinestr[1:]) fp.write(''.join(self.before + hunklinelist + self.after)) @@ -471,11 +472,11 @@ for line in self.changedlines: text = line.linetext if line.applied: - if text[0] == '+': + if text.startswith('+'): dels.append(text[1:]) - elif text[0] == '-': + elif text.startswith('-'): adds.append(text[1:]) - elif text[0] == '+': + elif text.startswith('+'): dels.append(text[1:]) adds.append(text[1:]) hunk = ['-%s' % l for l in dels] + ['+%s' % l for l in adds] @@ -487,7 +488,7 @@ return getattr(self._hunk, name) def __repr__(self): - return '<hunk %r@%d>' % (self.filename(), self.fromline) + return r'<hunk %r@%d>' % (self.filename(), self.fromline) def filterpatch(ui, chunks, chunkselector, operation=None): """interactively filter patch chunks into applied-only chunks""" @@ -1519,10 +1520,10 @@ """ask for 'y' to be pressed to confirm selected. return True if confirmed.""" confirmtext = _( -"""if you answer yes to the following, the your currently chosen patch chunks -will be loaded into an editor. you may modify the patch from the editor, and -save the changes if you wish to change the patch. otherwise, you can just -close the editor without saving to accept the current patch as-is. +"""If you answer yes to the following, your currently chosen patch chunks +will be loaded into an editor. To modify the patch, make the changes in your +editor and save. To accept the current patch as-is, close the editor without +saving. note: don't add/remove lines unless you also modify the range information. failing to follow this rule will result in the commit aborting. @@ -1546,14 +1547,7 @@ new changeset will be created (the normal commit behavior). """ - try: - ver = float(util.version()[:3]) - except ValueError: - ver = 1 - if ver < 2.19: - msg = _("The amend option is unavailable with hg versions < 2.2\n\n" - "Press any key to continue.") - elif opts.get('amend') is None: + if opts.get('amend') is None: opts['amend'] = True msg = _("Amend option is turned on -- committing the currently " "selected changes will not create a new changeset, but " @@ -1674,6 +1668,7 @@ Return true to exit the main loop. """ + keypressed = pycompat.bytestr(keypressed) if keypressed in ["k", "KEY_UP"]: self.uparrowevent() if keypressed in ["K", "KEY_PPAGE"]: @@ -1799,6 +1794,7 @@ break if self.commenttext != "": - whitespaceremoved = re.sub("(?m)^\s.*(\n|$)", "", self.commenttext) + whitespaceremoved = re.sub(br"(?m)^\s.*(\n|$)", b"", + self.commenttext) if whitespaceremoved != "": self.opts['message'] = self.commenttext
--- a/mercurial/dagop.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/dagop.py Tue Mar 19 16:36:59 2019 +0300 @@ -28,7 +28,7 @@ generatorset = smartset.generatorset # possible maximum depth between null and wdir() -_maxlogdepth = 0x80000000 +maxlogdepth = 0x80000000 def _walkrevtree(pfunc, revs, startdepth, stopdepth, reverse): """Walk DAG using 'pfunc' from the given 'revs' nodes @@ -42,7 +42,7 @@ if startdepth is None: startdepth = 0 if stopdepth is None: - stopdepth = _maxlogdepth + stopdepth = maxlogdepth if stopdepth == 0: return if stopdepth < 0: @@ -142,7 +142,7 @@ def revancestors(repo, revs, followfirst=False, startdepth=None, stopdepth=None, cutfunc=None): - """Like revlog.ancestors(), but supports additional options, includes + r"""Like revlog.ancestors(), but supports additional options, includes the given revs themselves, and returns a smartset Scan ends at the stopdepth (exlusive) if specified. Revisions found @@ -221,7 +221,7 @@ Scan ends at the stopdepth (exlusive) if specified. Revisions found earlier than the startdepth are omitted. """ - if startdepth is None and stopdepth is None: + if startdepth is None and (stopdepth is None or stopdepth >= maxlogdepth): gen = _genrevdescendants(repo, revs, followfirst) else: gen = _genrevdescendantsofdepth(repo, revs, followfirst,
--- a/mercurial/debugcommands.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/debugcommands.py Tue Mar 19 16:36:59 2019 +0300 @@ -38,6 +38,7 @@ cmdutil, color, context, + copies, dagparser, encoding, error, @@ -745,7 +746,6 @@ nodates = True datesort = opts.get(r'datesort') - timestr = "" if datesort: keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename else: @@ -1086,6 +1086,7 @@ ui.write("%s\n" % pycompat.byterepr(ignore)) else: m = scmutil.match(repo[None], pats=files) + uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) for f in m.files(): nf = util.normpath(f) ignored = None @@ -1102,16 +1103,16 @@ break if ignored: if ignored == nf: - ui.write(_("%s is ignored\n") % m.uipath(f)) + ui.write(_("%s is ignored\n") % uipathfn(f)) else: ui.write(_("%s is ignored because of " "containing folder %s\n") - % (m.uipath(f), ignored)) + % (uipathfn(f), ignored)) ignorefile, lineno, line = ignoredata ui.write(_("(ignore rule in %s, line %d: '%s')\n") % (ignorefile, lineno, line)) else: - ui.write(_("%s is not ignored\n") % m.uipath(f)) + ui.write(_("%s is not ignored\n") % uipathfn(f)) @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts, _('-c|-m|FILE')) @@ -1182,13 +1183,6 @@ ''' opts = pycompat.byteskwargs(opts) - def writetemp(contents): - (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-") - f = os.fdopen(fd, r"wb") - f.write(contents) - f.close() - return name - problems = 0 fm = ui.formatter('debuginstall', opts) @@ -1448,8 +1442,8 @@ if host == socket.gethostname(): locker = 'user %s, process %s' % (user or b'None', pid) else: - locker = 'user %s, process %s, host %s' \ - % (user or b'None', pid, host) + locker = ('user %s, process %s, host %s' + % (user or b'None', pid, host)) ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age)) return 1 except OSError as e: @@ -1466,50 +1460,59 @@ @command('debugmanifestfulltextcache', [ ('', 'clear', False, _('clear the cache')), - ('a', 'add', '', _('add the given manifest node to the cache'), + ('a', 'add', [], _('add the given manifest nodes to the cache'), _('NODE')) ], '') -def debugmanifestfulltextcache(ui, repo, add=None, **opts): +def debugmanifestfulltextcache(ui, repo, add=(), **opts): """show, clear or amend the contents of the manifest fulltext cache""" - with repo.lock(): + + def getcache(): r = repo.manifestlog.getstorage(b'') try: - cache = r._fulltextcache + return r._fulltextcache except AttributeError: - ui.warn(_( - "Current revlog implementation doesn't appear to have a " - 'manifest fulltext cache\n')) + msg = _("Current revlog implementation doesn't appear to have a " + "manifest fulltext cache\n") + raise error.Abort(msg) + + if opts.get(r'clear'): + with repo.wlock(): + cache = getcache() + cache.clear(clear_persisted_data=True) return - if opts.get(r'clear'): - cache.clear() - - if add: - try: - manifest = repo.manifestlog[r.lookup(add)] - except error.LookupError as e: - raise error.Abort(e, hint="Check your manifest node id") - manifest.read() # stores revisision in cache too - - if not len(cache): - ui.write(_('Cache empty')) - else: - ui.write( - _('Cache contains %d manifest entries, in order of most to ' - 'least recent:\n') % (len(cache),)) - totalsize = 0 - for nodeid in cache: - # Use cache.get to not update the LRU order - data = cache.get(nodeid) - size = len(data) - totalsize += size + 24 # 20 bytes nodeid, 4 bytes size - ui.write(_('id: %s, size %s\n') % ( - hex(nodeid), util.bytecount(size))) - ondisk = cache._opener.stat('manifestfulltextcache').st_size - ui.write( - _('Total cache data size %s, on-disk %s\n') % ( - util.bytecount(totalsize), util.bytecount(ondisk)) - ) + if add: + with repo.wlock(): + m = repo.manifestlog + store = m.getstorage(b'') + for n in add: + try: + manifest = m[store.lookup(n)] + except error.LookupError as e: + raise error.Abort(e, hint="Check your manifest node id") + manifest.read() # stores revisision in cache too + return + + cache = getcache() + if not len(cache): + ui.write(_('cache empty\n')) + else: + ui.write( + _('cache contains %d manifest entries, in order of most to ' + 'least recent:\n') % (len(cache),)) + totalsize = 0 + for nodeid in cache: + # Use cache.get to not update the LRU order + data = cache.peek(nodeid) + size = len(data) + totalsize += size + 24 # 20 bytes nodeid, 4 bytes size + ui.write(_('id: %s, size %s\n') % ( + hex(nodeid), util.bytecount(size))) + ondisk = cache._opener.stat('manifestfulltextcache').st_size + ui.write( + _('total cache data size %s, on-disk %s\n') % ( + util.bytecount(totalsize), util.bytecount(ondisk)) + ) @command('debugmergestate', [], '') def debugmergestate(ui, repo, *args): @@ -1747,6 +1750,28 @@ cmdutil.showmarker(fm, m, index=ind) fm.end() +@command('debugp1copies', + [('r', 'rev', '', _('revision to debug'), _('REV'))], + _('[-r REV]')) +def debugp1copies(ui, repo, **opts): + """dump copy information compared to p1""" + + opts = pycompat.byteskwargs(opts) + ctx = scmutil.revsingle(repo, opts.get('rev'), default=None) + for dst, src in ctx.p1copies().items(): + ui.write('%s -> %s\n' % (src, dst)) + +@command('debugp2copies', + [('r', 'rev', '', _('revision to debug'), _('REV'))], + _('[-r REV]')) +def debugp1copies(ui, repo, **opts): + """dump copy information compared to p2""" + + opts = pycompat.byteskwargs(opts) + ctx = scmutil.revsingle(repo, opts.get('rev'), default=None) + for dst, src in ctx.p2copies().items(): + ui.write('%s -> %s\n' % (src, dst)) + @command('debugpathcomplete', [('f', 'full', None, _('complete an entire path')), ('n', 'normal', None, _('show only normal files')), @@ -1812,6 +1837,18 @@ ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files))) ui.write('\n') +@command('debugpathcopies', + cmdutil.walkopts, + 'hg debugpathcopies REV1 REV2 [FILE]', + inferrepo=True) +def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts): + """show copies between two revisions""" + ctx1 = scmutil.revsingle(repo, rev1) + ctx2 = scmutil.revsingle(repo, rev2) + m = scmutil.match(ctx1, pats, opts) + for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()): + ui.write('%s -> %s\n' % (src, dst)) + @command('debugpeer', [], _('PATH'), norepo=True) def debugpeer(ui, path): """establish a connection to a peer repository""" @@ -2004,17 +2041,17 @@ @command('debugrename', [('r', 'rev', '', _('revision to debug'), _('REV'))], - _('[-r REV] FILE')) -def debugrename(ui, repo, file1, *pats, **opts): + _('[-r REV] [FILE]...')) +def debugrename(ui, repo, *pats, **opts): """dump rename information""" opts = pycompat.byteskwargs(opts) ctx = scmutil.revsingle(repo, opts.get('rev')) - m = scmutil.match(ctx, (file1,) + pats, opts) + m = scmutil.match(ctx, pats, opts) for abs in ctx.walk(m): fctx = ctx[abs] o = fctx.filelog().renamed(fctx.filenode()) - rel = m.rel(abs) + rel = repo.pathto(abs) if o: ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1]))) else: @@ -2468,15 +2505,15 @@ ui.write(('+++ optimized\n'), label='diff.file_b') sm = difflib.SequenceMatcher(None, arevs, brevs) for tag, alo, ahi, blo, bhi in sm.get_opcodes(): - if tag in ('delete', 'replace'): + if tag in (r'delete', r'replace'): for c in arevs[alo:ahi]: - ui.write('-%s\n' % c, label='diff.deleted') - if tag in ('insert', 'replace'): + ui.write('-%d\n' % c, label='diff.deleted') + if tag in (r'insert', r'replace'): for c in brevs[blo:bhi]: - ui.write('+%s\n' % c, label='diff.inserted') - if tag == 'equal': + ui.write('+%d\n' % c, label='diff.inserted') + if tag == r'equal': for c in arevs[alo:ahi]: - ui.write(' %s\n' % c) + ui.write(' %d\n' % c) return 1 func = revset.makematcher(tree) @@ -2569,7 +2606,6 @@ source, branches = hg.parseurl(ui.expandpath(source)) url = util.url(source) - addr = None defaultport = {'https': 443, 'ssh': 22} if url.scheme in defaultport: @@ -2791,9 +2827,9 @@ f = lambda fn: util.normpath(fn) fmt = 'f %%-%ds %%-%ds %%s' % ( max([len(abs) for abs in items]), - max([len(m.rel(abs)) for abs in items])) + max([len(repo.pathto(abs)) for abs in items])) for abs in items: - line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '') + line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '') ui.write("%s\n" % line.rstrip()) @command('debugwhyunstable', [], _('REV'))
--- a/mercurial/diffutil.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/diffutil.py Tue Mar 19 16:36:59 2019 +0300 @@ -16,13 +16,15 @@ pycompat, ) -def diffallopts(ui, opts=None, untrusted=False, section='diff'): +def diffallopts(ui, opts=None, untrusted=False, section='diff', + configprefix=''): '''return diffopts with all features supported and parsed''' return difffeatureopts(ui, opts=opts, untrusted=untrusted, section=section, - git=True, whitespace=True, formatchanging=True) + git=True, whitespace=True, formatchanging=True, + configprefix=configprefix) def difffeatureopts(ui, opts=None, untrusted=False, section='diff', git=False, - whitespace=False, formatchanging=False): + whitespace=False, formatchanging=False, configprefix=''): '''return diffopts with only opted-in features parsed Features: @@ -45,7 +47,8 @@ return v if forceplain is not None and ui.plain(): return forceplain - return getter(section, name or key, untrusted=untrusted) + return getter(section, configprefix + (name or key), + untrusted=untrusted) # core options, expected to be understood by every diff parser buildopts = {
--- a/mercurial/dirstate.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/dirstate.py Tue Mar 19 16:36:59 2019 +0300 @@ -81,6 +81,10 @@ self._origpl = None self._updatedfiles = set() self._mapcls = dirstatemap + # Access and cache cwd early, so we don't access it for the first time + # after a working-copy update caused it to not exist (accessing it then + # raises an exception). + self._cwd @contextlib.contextmanager def parentchange(self): @@ -144,7 +148,7 @@ def _ignore(self): files = self._ignorefiles() if not files: - return matchmod.never(self._root, '') + return matchmod.never() pats = ['include:%s' % f for f in files] return matchmod.match(self._root, '', [], pats, warn=self._ui.warn) @@ -285,8 +289,8 @@ See localrepo.setparents() """ if self._parentwriters == 0: - raise ValueError("cannot set dirstate parent without " - "calling dirstate.beginparentchange") + raise ValueError("cannot set dirstate parent outside of " + "dirstate.parentchange context manager") self._dirty = True oldp2 = self._pl[1]
--- a/mercurial/discovery.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/discovery.py Tue Mar 19 16:36:59 2019 +0300 @@ -238,7 +238,7 @@ # D. Update newmap with outgoing changes. # This will possibly add new heads and remove existing ones. - newmap = branchmap.branchcache((branch, heads[1]) + newmap = branchmap.remotebranchcache((branch, heads[1]) for branch, heads in headssum.iteritems() if heads[0] is not None) newmap.update(repo, (ctx.rev() for ctx in missingctx))
--- a/mercurial/encoding.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/encoding.py Tue Mar 19 16:36:59 2019 +0300 @@ -65,7 +65,7 @@ else: # preferred encoding isn't known yet; use utf-8 to avoid unicode error # and recreate it once encoding is settled - environ = dict((k.encode(u'utf-8'), v.encode(u'utf-8')) + environ = dict((k.encode(r'utf-8'), v.encode(r'utf-8')) for k, v in os.environ.items()) # re-exports _encodingrewrites = { @@ -152,7 +152,7 @@ if encoding == 'UTF-8': # fast path return s - r = u.encode(_sysstr(encoding), u"replace") + r = u.encode(_sysstr(encoding), r"replace") if u == r.decode(_sysstr(encoding)): # r is a safe, non-lossy encoding of s return safelocalstr(r) @@ -161,7 +161,7 @@ # we should only get here if we're looking at an ancient changeset try: u = s.decode(_sysstr(fallbackencoding)) - r = u.encode(_sysstr(encoding), u"replace") + r = u.encode(_sysstr(encoding), r"replace") if u == r.decode(_sysstr(encoding)): # r is a safe, non-lossy encoding of s return safelocalstr(r) @@ -169,7 +169,7 @@ except UnicodeDecodeError: u = s.decode("utf-8", "replace") # last ditch # can't round-trip - return u.encode(_sysstr(encoding), u"replace") + return u.encode(_sysstr(encoding), r"replace") except LookupError as k: raise error.Abort(k, hint="please check your locale settings") @@ -230,7 +230,7 @@ if not _nativeenviron: # now encoding and helper functions are available, recreate the environ # dict to be exported to other modules - environ = dict((tolocal(k.encode(u'utf-8')), tolocal(v.encode(u'utf-8'))) + environ = dict((tolocal(k.encode(r'utf-8')), tolocal(v.encode(r'utf-8'))) for k, v in os.environ.items()) # re-exports if pycompat.ispy3: @@ -251,7 +251,7 @@ def colwidth(s): "Find the column width of a string for display in the local encoding" - return ucolwidth(s.decode(_sysstr(encoding), u'replace')) + return ucolwidth(s.decode(_sysstr(encoding), r'replace')) def ucolwidth(d): "Find the column width of a Unicode string for display"
--- a/mercurial/exchange.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/exchange.py Tue Mar 19 16:36:59 2019 +0300 @@ -297,7 +297,6 @@ 'client')) elif part.type == 'stream2' and version is None: # A stream2 part requires to be part of a v2 bundle - version = "v2" requirements = urlreq.unquote(part.params['requirements']) splitted = requirements.split() params = bundle2._formatrequirementsparams(splitted) @@ -557,18 +556,18 @@ % stringutil.forcebytestr(err)) pushop.ui.debug(msg) - with wlock or util.nullcontextmanager(), \ - lock or util.nullcontextmanager(), \ - pushop.trmanager or util.nullcontextmanager(): - pushop.repo.checkpush(pushop) - _checkpublish(pushop) - _pushdiscovery(pushop) - if not _forcebundle1(pushop): - _pushbundle2(pushop) - _pushchangeset(pushop) - _pushsyncphase(pushop) - _pushobsolete(pushop) - _pushbookmark(pushop) + with wlock or util.nullcontextmanager(): + with lock or util.nullcontextmanager(): + with pushop.trmanager or util.nullcontextmanager(): + pushop.repo.checkpush(pushop) + _checkpublish(pushop) + _pushdiscovery(pushop) + if not _forcebundle1(pushop): + _pushbundle2(pushop) + _pushchangeset(pushop) + _pushsyncphase(pushop) + _pushobsolete(pushop) + _pushbookmark(pushop) if repo.ui.configbool('experimental', 'remotenames'): logexchange.pullremotenames(repo, remote) @@ -921,7 +920,7 @@ if v in changegroup.supportedoutgoingversions( pushop.repo)] if not cgversions: - raise ValueError(_('no common changegroup version')) + raise error.Abort(_('no common changegroup version')) version = max(cgversions) cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version, 'push') @@ -2185,7 +2184,7 @@ cgversions = [v for v in cgversions if v in changegroup.supportedoutgoingversions(repo)] if not cgversions: - raise ValueError(_('no common changegroup version')) + raise error.Abort(_('no common changegroup version')) version = max(cgversions) outgoing = _computeoutgoing(repo, heads, common) @@ -2229,7 +2228,7 @@ if not kwargs.get(r'bookmarks', False): return if 'bookmarks' not in b2caps: - raise ValueError(_('no common bookmarks exchange method')) + raise error.Abort(_('no common bookmarks exchange method')) books = bookmod.listbinbookmarks(repo) data = bookmod.binaryencode(books) if data: @@ -2264,7 +2263,7 @@ """add phase heads part to the requested bundle""" if kwargs.get(r'phases', False): if not 'heads' in b2caps.get('phases'): - raise ValueError(_('no common phases exchange method')) + raise error.Abort(_('no common phases exchange method')) if heads is None: heads = repo.heads() @@ -2548,8 +2547,8 @@ return True # Stream clone v2 - if (bundlespec.wirecompression == 'UN' and \ - bundlespec.wireversion == '02' and \ + if (bundlespec.wirecompression == 'UN' and + bundlespec.wireversion == '02' and bundlespec.contentopts.get('streamv2')): return True
--- a/mercurial/filemerge.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/filemerge.py Tue Mar 19 16:36:59 2019 +0300 @@ -279,6 +279,7 @@ keep as the merged version.""" ui = repo.ui fd = fcd.path() + uipathfn = scmutil.getuipathfn(repo) # Avoid prompting during an in-memory merge since it doesn't support merge # conflicts. @@ -287,7 +288,7 @@ 'support file conflicts') prompts = partextras(labels) - prompts['fd'] = fd + prompts['fd'] = uipathfn(fd) try: if fco.isabsent(): index = ui.promptchoice( @@ -394,13 +395,14 @@ def _mergecheck(repo, mynode, orig, fcd, fco, fca, toolconf): tool, toolpath, binary, symlink, scriptfn = toolconf + uipathfn = scmutil.getuipathfn(repo) if symlink: repo.ui.warn(_('warning: internal %s cannot merge symlinks ' - 'for %s\n') % (tool, fcd.path())) + 'for %s\n') % (tool, uipathfn(fcd.path()))) return False if fcd.isabsent() or fco.isabsent(): repo.ui.warn(_('warning: internal %s cannot merge change/delete ' - 'conflict for %s\n') % (tool, fcd.path())) + 'conflict for %s\n') % (tool, uipathfn(fcd.path()))) return False return True @@ -462,7 +464,6 @@ Generic driver for _imergelocal and _imergeother """ assert localorother is not None - tool, toolpath, binary, symlink, scriptfn = toolconf r = simplemerge.simplemerge(repo.ui, fcd, fca, fco, label=labels, localorother=localorother) return True, r @@ -581,9 +582,10 @@ def _xmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): tool, toolpath, binary, symlink, scriptfn = toolconf + uipathfn = scmutil.getuipathfn(repo) if fcd.isabsent() or fco.isabsent(): repo.ui.warn(_('warning: %s cannot merge change/delete conflict ' - 'for %s\n') % (tool, fcd.path())) + 'for %s\n') % (tool, uipathfn(fcd.path()))) return False, 1, None unused, unused, unused, back = files localpath = _workingpath(repo, fcd) @@ -623,7 +625,7 @@ lambda s: procutil.shellquote(util.localpath(s))) if _toolbool(ui, tool, "gui"): repo.ui.status(_('running merge tool %s for file %s\n') % - (tool, fcd.path())) + (tool, uipathfn(fcd.path()))) if scriptfn is None: cmd = toolpath + ' ' + args repo.ui.debug('launching merge tool: %s\n' % cmd) @@ -741,8 +743,7 @@ # TODO: Break this import cycle somehow. (filectx -> ctx -> fileset -> # merge -> filemerge). (I suspect the fileset import is the weakest link) from . import context - a = _workingpath(repo, fcd) - back = scmutil.origpath(ui, repo, a) + back = scmutil.backuppath(ui, repo, fcd.path()) inworkingdir = (back.startswith(repo.wvfs.base) and not back.startswith(repo.vfs.base)) if isinstance(fcd, context.overlayworkingfilectx) and inworkingdir: @@ -762,6 +763,7 @@ if isinstance(fcd, context.overlayworkingfilectx): util.writefile(back, fcd.data()) else: + a = _workingpath(repo, fcd) util.copyfile(a, back) # A arbitraryfilectx is returned, so we can run the same functions on # the backup context regardless of where it lives. @@ -842,6 +844,8 @@ ui = repo.ui fd = fcd.path() + uipathfn = scmutil.getuipathfn(repo) + fduipath = uipathfn(fd) binary = fcd.isbinary() or fco.isbinary() or fca.isbinary() symlink = 'l' in fcd.flags() + fco.flags() changedelete = fcd.isabsent() or fco.isabsent() @@ -865,8 +869,8 @@ raise error.Abort(_("invalid 'python:' syntax: %s") % toolpath) toolpath = script ui.debug("picked tool '%s' for %s (binary %s symlink %s changedelete %s)\n" - % (tool, fd, pycompat.bytestr(binary), pycompat.bytestr(symlink), - pycompat.bytestr(changedelete))) + % (tool, fduipath, pycompat.bytestr(binary), + pycompat.bytestr(symlink), pycompat.bytestr(changedelete))) if tool in internals: func = internals[tool] @@ -892,9 +896,10 @@ if premerge: if orig != fco.path(): - ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd)) + ui.status(_("merging %s and %s to %s\n") % + (uipathfn(orig), uipathfn(fco.path()), fduipath)) else: - ui.status(_("merging %s\n") % fd) + ui.status(_("merging %s\n") % fduipath) ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca)) @@ -905,7 +910,7 @@ raise error.InMemoryMergeConflictsError('in-memory merge does ' 'not support merge ' 'conflicts') - ui.warn(onfailure % fd) + ui.warn(onfailure % fduipath) return True, 1, False back = _makebackup(repo, ui, wctx, fcd, premerge) @@ -958,7 +963,7 @@ raise error.InMemoryMergeConflictsError('in-memory merge ' 'does not support ' 'merge conflicts') - ui.warn(onfailure % fd) + ui.warn(onfailure % fduipath) _onfilemergefailure(ui) return True, r, deleted @@ -986,6 +991,7 @@ def _check(repo, r, ui, tool, fcd, files): fd = fcd.path() + uipathfn = scmutil.getuipathfn(repo) unused, unused, unused, back = files if not r and (_toolbool(ui, tool, "checkconflicts") or @@ -997,7 +1003,7 @@ if 'prompt' in _toollist(ui, tool, "check"): checked = True if ui.promptchoice(_("was merge of '%s' successful (yn)?" - "$$ &Yes $$ &No") % fd, 1): + "$$ &Yes $$ &No") % uipathfn(fd), 1): r = 1 if not r and not checked and (_toolbool(ui, tool, "checkchanged") or @@ -1006,7 +1012,7 @@ if back is not None and not fcd.cmp(back): if ui.promptchoice(_(" output file %s appears unchanged\n" "was merge successful (yn)?" - "$$ &Yes $$ &No") % fd, 1): + "$$ &Yes $$ &No") % uipathfn(fd), 1): r = 1 if back is not None and _toolbool(ui, tool, "fixeol"):
--- a/mercurial/fileset.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/fileset.py Tue Mar 19 16:36:59 2019 +0300 @@ -499,9 +499,8 @@ """Create a matcher to select files by predfn(filename)""" if cache: predfn = util.cachefunc(predfn) - repo = self.ctx.repo() - return matchmod.predicatematcher(repo.root, repo.getcwd(), predfn, - predrepr=predrepr, badfn=self._badfn) + return matchmod.predicatematcher(predfn, predrepr=predrepr, + badfn=self._badfn) def fpredicate(self, predfn, predrepr=None, cache=False): """Create a matcher to select files by predfn(fctx) at the current @@ -539,9 +538,7 @@ def never(self): """Create a matcher to select nothing""" - repo = self.ctx.repo() - return matchmod.nevermatcher(repo.root, repo.getcwd(), - badfn=self._badfn) + return matchmod.never(badfn=self._badfn) def match(ctx, expr, badfn=None): """Create a matcher for a single fileset expression"""
--- a/mercurial/graphmod.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/graphmod.py Tue Mar 19 16:36:59 2019 +0300 @@ -451,7 +451,7 @@ # If 'graphshorten' config, only draw shift_interline # when there is any non vertical flow in graph. if state['graphshorten']: - if any(c in '\/' for c in shift_interline if c): + if any(c in br'\/' for c in shift_interline if c): lines.append(shift_interline) # Else, no 'graphshorten' config so draw shift_interline. else:
--- a/mercurial/help/config.txt Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/help/config.txt Tue Mar 19 16:36:59 2019 +0300 @@ -1843,6 +1843,44 @@ Turning this option off can result in large increase of repository size for repository with many merges. +``revlog.reuse-external-delta-parent`` + Control the order in which delta parents are considered when adding new + revisions from an external source. + (typically: apply bundle from `hg pull` or `hg push`). + + New revisions are usually provided as a delta against other revisions. By + default, Mercurial will try to reuse this delta first, therefore using the + same "delta parent" as the source. Directly using delta's from the source + reduces CPU usage and usually speeds up operation. However, in some case, + the source might have sub-optimal delta bases and forcing their reevaluation + is useful. For example, pushes from an old client could have sub-optimal + delta's parent that the server want to optimize. (lack of general delta, bad + parents, choice, lack of sparse-revlog, etc). + + This option is enabled by default. Turning it off will ensure bad delta + parent choices from older client do not propagate to this repository, at + the cost of a small increase in CPU consumption. + + Note: this option only control the order in which delta parents are + considered. Even when disabled, the existing delta from the source will be + reused if the same delta parent is selected. + +``revlog.reuse-external-delta`` + Control the reuse of delta from external source. + (typically: apply bundle from `hg pull` or `hg push`). + + New revisions are usually provided as a delta against another revision. By + default, Mercurial will not recompute the same delta again, trusting + externally provided deltas. There have been rare cases of small adjustment + to the diffing algorithm in the past. So in some rare case, recomputing + delta provided by ancient clients can provides better results. Disabling + this option means going through a full delta recomputation for all incoming + revisions. It means a large increase in CPU usage and will slow operations + down. + + This option is enabled by default. When disabled, it also disables the + related ``storage.revlog.reuse-external-delta-parent`` option. + ``server`` ---------- @@ -1990,6 +2028,12 @@ See also ``server.zliblevel``. +``view`` + Repository filter used when exchanging revisions with the peer. + + The default view (``served``) excludes secret and hidden changesets. + Another useful value is ``immutable`` (no draft, secret or hidden changesets). + ``smtp`` -------- @@ -2341,6 +2385,9 @@ Reduce the amount of output printed. (default: False) +``relative-paths`` + Prefer relative paths in the UI. + ``remotecmd`` Remote command to use for clone/push/pull operations. (default: ``hg``)
--- a/mercurial/hg.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/hg.py Tue Mar 19 16:36:59 2019 +0300 @@ -38,6 +38,7 @@ narrowspec, node, phases, + pycompat, repository as repositorymod, scmutil, sshpeer, @@ -57,7 +58,15 @@ def _local(path): path = util.expandpath(util.urllocalpath(path)) - return (os.path.isfile(path) and bundlerepo or localrepo) + + try: + isfile = os.path.isfile(path) + # Python 2 raises TypeError, Python 3 ValueError. + except (TypeError, ValueError) as e: + raise error.Abort(_('invalid path %s: %s') % ( + path, pycompat.bytestr(e))) + + return isfile and bundlerepo or localrepo def addbranchrevs(lrepo, other, branches, revs): peer = other.peer() # a courtesy to callers using a localrepo for other @@ -282,25 +291,20 @@ called. """ - destlock = lock = None - lock = repo.lock() - try: + with repo.lock(): # we use locks here because if we race with commit, we # can end up with extra data in the cloned revlogs that's # not pointed to by changesets, thus causing verify to # fail - destlock = copystore(ui, repo, repo.path) - - sharefile = repo.vfs.join('sharedpath') - util.rename(sharefile, sharefile + '.old') + with destlock or util.nullcontextmanager(): - repo.requirements.discard('shared') - repo.requirements.discard('relshared') - repo._writerequirements() - finally: - destlock and destlock.release() - lock and lock.release() + sharefile = repo.vfs.join('sharedpath') + util.rename(sharefile, sharefile + '.old') + + repo.requirements.discard('shared') + repo.requirements.discard('relshared') + repo._writerequirements() # Removing share changes some fundamental properties of the repo instance. # So we instantiate a new repo object and operate on it rather than
--- a/mercurial/hgweb/hgwebdir_mod.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/hgweb/hgwebdir_mod.py Tue Mar 19 16:36:59 2019 +0300 @@ -143,7 +143,7 @@ path = path[:-len(discarded) - 1] try: - r = hg.repository(ui, path) + hg.repository(ui, path) directory = False except (IOError, error.RepoError): pass @@ -510,7 +510,7 @@ if style == styles[0]: vars['style'] = style - sessionvars = webutil.sessionvars(vars, r'?') + sessionvars = webutil.sessionvars(vars, '?') logourl = config('web', 'logourl') logoimg = config('web', 'logoimg') staticurl = (config('web', 'staticurl')
--- a/mercurial/hgweb/server.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/hgweb/server.py Tue Mar 19 16:36:59 2019 +0300 @@ -54,7 +54,7 @@ self.writelines(str.split('\n')) def writelines(self, seq): for msg in seq: - self.handler.log_error("HG error: %s", msg) + self.handler.log_error(r"HG error: %s", encoding.strfromlocal(msg)) class _httprequesthandler(httpservermod.basehttprequesthandler): @@ -100,17 +100,22 @@ def do_POST(self): try: self.do_write() - except Exception: + except Exception as e: + # I/O below could raise another exception. So log the original + # exception first to ensure it is recorded. + if not (isinstance(e, (OSError, socket.error)) + and e.errno == errno.ECONNRESET): + tb = r"".join(traceback.format_exception(*sys.exc_info())) + # We need a native-string newline to poke in the log + # message, because we won't get a newline when using an + # r-string. This is the easy way out. + newline = chr(10) + self.log_error(r"Exception happened during processing " + r"request '%s':%s%s", self.path, newline, tb) + self._start_response(r"500 Internal Server Error", []) self._write(b"Internal Server Error") self._done() - tb = r"".join(traceback.format_exception(*sys.exc_info())) - # We need a native-string newline to poke in the log - # message, because we won't get a newline when using an - # r-string. This is the easy way out. - newline = chr(10) - self.log_error(r"Exception happened during processing " - r"request '%s':%s%s", self.path, newline, tb) def do_PUT(self): self.do_POST() @@ -165,7 +170,7 @@ if length: env[r'CONTENT_LENGTH'] = length for header in [h for h in self.headers.keys() - if h not in (r'content-type', r'content-length')]: + if h.lower() not in (r'content-type', r'content-length')]: hkey = r'HTTP_' + header.replace(r'-', r'_').upper() hval = self.headers.get(header) hval = hval.replace(r'\n', r'').strip()
--- a/mercurial/hgweb/webcommands.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/hgweb/webcommands.py Tue Mar 19 16:36:59 2019 +0300 @@ -884,7 +884,7 @@ leftlines = filelines(pfctx) else: rightlines = () - pfctx = ctx.parents()[0][path] + pfctx = ctx.p1()[path] leftlines = filelines(pfctx) comparison = webutil.compare(context, leftlines, rightlines)
--- a/mercurial/hgweb/webutil.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/hgweb/webutil.py Tue Mar 19 16:36:59 2019 +0300 @@ -456,13 +456,13 @@ files = listfilediffs(ctx.files(), n, web.maxfiles) entry = commonentry(repo, ctx) - entry.update( - allparents=_kwfunc(lambda context, mapping: parents(ctx)), - parent=_kwfunc(lambda context, mapping: parents(ctx, rev - 1)), - child=_kwfunc(lambda context, mapping: children(ctx, rev + 1)), - changelogtag=showtags, - files=files, - ) + entry.update({ + 'allparents': _kwfunc(lambda context, mapping: parents(ctx)), + 'parent': _kwfunc(lambda context, mapping: parents(ctx, rev - 1)), + 'child': _kwfunc(lambda context, mapping: children(ctx, rev + 1)), + 'changelogtag': showtags, + 'files': files, + }) return entry def changelistentries(web, revs, maxcount, parityfn): @@ -565,16 +565,14 @@ def _diffsgen(context, repo, ctx, basectx, files, style, stripecount, linerange, lineidprefix): if files: - m = match.exact(repo.root, repo.getcwd(), files) + m = match.exact(files) else: - m = match.always(repo.root, repo.getcwd()) + m = match.always() diffopts = patch.diffopts(repo.ui, untrusted=True) - node1 = basectx.node() - node2 = ctx.node() parity = paritygen(stripecount) - diffhunks = patch.diffhunks(repo, node1, node2, m, opts=diffopts) + diffhunks = patch.diffhunks(repo, basectx, ctx, m, opts=diffopts) for blockno, (fctx1, fctx2, header, hunks) in enumerate(diffhunks, 1): if style != 'raw': header = header[1:]
--- a/mercurial/hgweb/wsgiheaders.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/hgweb/wsgiheaders.py Tue Mar 19 16:36:59 2019 +0300 @@ -127,7 +127,7 @@ return self._headers[:] def __repr__(self): - return "%s(%r)" % (self.__class__.__name__, self._headers) + return r"%s(%r)" % (self.__class__.__name__, self._headers) def __str__(self): """str() returns the formatted headers, complete with end line,
--- a/mercurial/httpconnection.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/httpconnection.py Tue Mar 19 16:36:59 2019 +0300 @@ -109,10 +109,10 @@ schemes, prefix = [p[0]], p[1] else: schemes = (auth.get('schemes') or 'https').split() - if (prefix == '*' or hostpath.startswith(prefix)) and \ - (len(prefix) > bestlen or (len(prefix) == bestlen and \ - not bestuser and 'username' in auth)) \ - and scheme in schemes: + if ((prefix == '*' or hostpath.startswith(prefix)) and + (len(prefix) > bestlen or (len(prefix) == bestlen and + not bestuser and 'username' in auth)) + and scheme in schemes): bestlen = len(prefix) bestauth = group, auth bestuser = auth.get('username')
--- a/mercurial/httppeer.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/httppeer.py Tue Mar 19 16:36:59 2019 +0300 @@ -816,8 +816,8 @@ return raise error.CapabilityError( - _('cannot %s; client or remote repository does not support the %r ' - 'capability') % (purpose, name)) + _('cannot %s; client or remote repository does not support the ' + '\'%s\' capability') % (purpose, name)) # End of ipeercapabilities.
--- a/mercurial/keepalive.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/keepalive.py Tue Mar 19 16:36:59 2019 +0300 @@ -84,6 +84,7 @@ from __future__ import absolute_import, print_function +import collections import errno import hashlib import socket @@ -114,15 +115,13 @@ """ def __init__(self): self._lock = threading.Lock() - self._hostmap = {} # map hosts to a list of connections + self._hostmap = collections.defaultdict(list) # host -> [connection] self._connmap = {} # map connections to host self._readymap = {} # map connection to ready state def add(self, host, connection, ready): self._lock.acquire() try: - if host not in self._hostmap: - self._hostmap[host] = [] self._hostmap[host].append(connection) self._connmap[connection] = host self._readymap[connection] = ready @@ -155,19 +154,18 @@ conn = None self._lock.acquire() try: - if host in self._hostmap: - for c in self._hostmap[host]: - if self._readymap[c]: - self._readymap[c] = 0 - conn = c - break + for c in self._hostmap[host]: + if self._readymap[c]: + self._readymap[c] = False + conn = c + break finally: self._lock.release() return conn def get_all(self, host=None): if host: - return list(self._hostmap.get(host, [])) + return list(self._hostmap[host]) else: return dict(self._hostmap) @@ -202,7 +200,7 @@ def _request_closed(self, request, host, connection): """tells us that this request is now closed and that the connection is ready for another request""" - self._cm.set_ready(connection, 1) + self._cm.set_ready(connection, True) def _remove_connection(self, host, connection, close=0): if close: @@ -239,7 +237,7 @@ if DEBUG: DEBUG.info("creating new connection to %s (%d)", host, id(h)) - self._cm.add(host, h, 0) + self._cm.add(host, h, False) self._start_transaction(h, req) r = h.getresponse() # The string form of BadStatusLine is the status line. Add some context @@ -405,6 +403,11 @@ _raw_read = httplib.HTTPResponse.read _raw_readinto = getattr(httplib.HTTPResponse, 'readinto', None) + # Python 2.7 has a single close() which closes the socket handle. + # This method was effectively renamed to _close_conn() in Python 3. But + # there is also a close(). _close_conn() is called by methods like + # read(). + def close(self): if self.fp: self.fp.close() @@ -413,6 +416,9 @@ self._handler._request_closed(self, self._host, self._connection) + def _close_conn(self): + self.close() + def close_connection(self): self._handler._remove_connection(self._host, self._connection, close=1) self.close()
--- a/mercurial/localrepo.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/localrepo.py Tue Mar 19 16:36:59 2019 +0300 @@ -752,7 +752,15 @@ b'revlog.optimize-delta-parent-choice') options[b'deltabothparents'] = deltabothparents - options[b'lazydeltabase'] = not scmutil.gddeltaconfig(ui) + lazydelta = ui.configbool(b'storage', b'revlog.reuse-external-delta') + lazydeltabase = False + if lazydelta: + lazydeltabase = ui.configbool(b'storage', + b'revlog.reuse-external-delta-parent') + if lazydeltabase is None: + lazydeltabase = not scmutil.gddeltaconfig(ui) + options[b'lazydelta'] = lazydelta + options[b'lazydeltabase'] = lazydeltabase chainspan = ui.configbytes(b'experimental', b'maxdeltachainspan') if 0 <= chainspan: @@ -992,7 +1000,7 @@ self._dirstatevalidatewarned = False - self._branchcaches = {} + self._branchcaches = branchmap.BranchMapCache() self._revbranchcache = None self._filterpats = {} self._datafilters = {} @@ -1227,14 +1235,14 @@ @storecache(narrowspec.FILENAME) def _storenarrowmatch(self): if repository.NARROW_REQUIREMENT not in self.requirements: - return matchmod.always(self.root, '') + return matchmod.always() include, exclude = self.narrowpats return narrowspec.match(self.root, include=include, exclude=exclude) @storecache(narrowspec.FILENAME) def _narrowmatch(self): if repository.NARROW_REQUIREMENT not in self.requirements: - return matchmod.always(self.root, '') + return matchmod.always() narrowspec.checkworkingcopynarrowspec(self) include, exclude = self.narrowpats return narrowspec.match(self.root, include=include, exclude=exclude) @@ -1252,7 +1260,7 @@ if includeexact and not self._narrowmatch.always(): # do not exclude explicitly-specified paths so that they can # be warned later on - em = matchmod.exact(match._root, match._cwd, match.files()) + em = matchmod.exact(match.files()) nm = matchmod.unionmatcher([self._narrowmatch, em]) return matchmod.intersectmatchers(match, nm) return matchmod.intersectmatchers(match, self._narrowmatch) @@ -1520,8 +1528,7 @@ def branchmap(self): '''returns a dictionary {branch: [branchheads]} with branchheads ordered by increasing revision number''' - branchmap.updatecache(self) - return self._branchcaches[self.filtername] + return self._branchcaches[self] @unfilteredmethod def revbranchcache(self): @@ -1811,7 +1818,6 @@ args = tr.hookargs.copy() args.update(bookmarks.preparehookargs(name, old, new)) repo.hook('pretxnclose-bookmark', throw=True, - txnname=desc, **pycompat.strkwargs(args)) if hook.hashook(repo.ui, 'pretxnclose-phase'): cl = repo.unfiltered().changelog @@ -1819,11 +1825,11 @@ args = tr.hookargs.copy() node = hex(cl.node(rev)) args.update(phases.preparehookargs(node, old, new)) - repo.hook('pretxnclose-phase', throw=True, txnname=desc, + repo.hook('pretxnclose-phase', throw=True, **pycompat.strkwargs(args)) repo.hook('pretxnclose', throw=True, - txnname=desc, **pycompat.strkwargs(tr.hookargs)) + **pycompat.strkwargs(tr.hookargs)) def releasefn(tr, success): repo = reporef() if success: @@ -1857,6 +1863,7 @@ tr.changes['bookmarks'] = {} tr.hookargs['txnid'] = txnid + tr.hookargs['txnname'] = desc # note: writing the fncache only during finalize mean that the file is # outdated when running hooks. As fncache is used for streaming clone, # this is not expected to break anything that happen during the hooks. @@ -1878,7 +1885,7 @@ args = tr.hookargs.copy() args.update(bookmarks.preparehookargs(name, old, new)) repo.hook('txnclose-bookmark', throw=False, - txnname=desc, **pycompat.strkwargs(args)) + **pycompat.strkwargs(args)) if hook.hashook(repo.ui, 'txnclose-phase'): cl = repo.unfiltered().changelog @@ -1887,10 +1894,10 @@ args = tr.hookargs.copy() node = hex(cl.node(rev)) args.update(phases.preparehookargs(node, old, new)) - repo.hook('txnclose-phase', throw=False, txnname=desc, + repo.hook('txnclose-phase', throw=False, **pycompat.strkwargs(args)) - repo.hook('txnclose', throw=False, txnname=desc, + repo.hook('txnclose', throw=False, **pycompat.strkwargs(hookargs)) reporef()._afterlock(hookfunc) tr.addfinalize('txnclose-hook', txnclosehook) @@ -1902,7 +1909,7 @@ def txnaborthook(tr2): """To be run if transaction is aborted """ - reporef().hook('txnabort', throw=False, txnname=desc, + reporef().hook('txnabort', throw=False, **pycompat.strkwargs(tr2.hookargs)) tr.addabort('txnabort-hook', txnaborthook) # avoid eager cache invalidation. in-memory data should be identical @@ -2011,8 +2018,7 @@ self.svfs.rename('undo.phaseroots', 'phaseroots', checkambig=True) self.invalidate() - parentgone = (parents[0] not in self.changelog.nodemap or - parents[1] not in self.changelog.nodemap) + parentgone = any(p not in self.changelog.nodemap for p in parents) if parentgone: # prevent dirstateguard from overwriting already restored one dsguard.close() @@ -2074,13 +2080,14 @@ return if tr is None or tr.changes['origrepolen'] < len(self): - # updating the unfiltered branchmap should refresh all the others, + # accessing the 'ser ved' branchmap should refresh all the others, self.ui.debug('updating the branch cache\n') - branchmap.updatecache(self.filtered('served')) + self.filtered('served').branchmap() if full: - rbc = self.revbranchcache() - for r in self.changelog: + unfi = self.unfiltered() + rbc = unfi.revbranchcache() + for r in unfi.changelog: rbc.branchinfo(r) rbc.write() @@ -2088,13 +2095,17 @@ for ctx in self['.'].parents(): ctx.manifest() # accessing the manifest is enough + # accessing tags warm the cache + self.tags() + self.filtered('served').tags() + def invalidatecaches(self): if r'_tagscache' in vars(self): # can't use delattr on proxy del self.__dict__[r'_tagscache'] - self.unfiltered()._branchcaches.clear() + self._branchcaches.clear() self.invalidatevolatilesets() self._sparsesignaturecache.clear() @@ -2218,8 +2229,12 @@ l.lock() return l - l = self._lock(self.svfs, "lock", wait, None, - self.invalidate, _('repository %s') % self.origroot) + l = self._lock(vfs=self.svfs, + lockname="lock", + wait=wait, + releasefn=None, + acquirefn=self.invalidate, + desc=_('repository %s') % self.origroot) self._lockref = weakref.ref(l) return l @@ -2295,8 +2310,8 @@ flog = self.file(fname) meta = {} - copy = fctx.renamed() - if copy and copy[0] != fname: + cfname = fctx.copysource() + if cfname and cfname != fname: # Mark the new revision of this file as a copy of another # file. This copy data will effectively act as a parent # of this new revision. If this is a merge, the first @@ -2316,7 +2331,6 @@ # \- 2 --- 4 as the merge base # - cfname = copy[0] crev = manifest1.get(cfname) newfparent = fparent2 @@ -2402,18 +2416,15 @@ raise error.Abort('%s: %s' % (f, msg)) if not match: - match = matchmod.always(self.root, '') + match = matchmod.always() if not force: vdirs = [] match.explicitdir = vdirs.append match.bad = fail - wlock = lock = tr = None - try: - wlock = self.wlock() - lock = self.lock() # for recent changelog (see issue4368) - + # lock() for recent changelog (see issue4368) + with self.wlock(), self.lock(): wctx = self[None] merge = len(wctx.parents()) > 1 @@ -2460,10 +2471,11 @@ # commit subs and write new state if subs: + uipathfn = scmutil.getuipathfn(self) for s in sorted(commitsubs): sub = wctx.sub(s) self.ui.status(_('committing subrepository %s\n') % - subrepoutil.subrelpath(sub)) + uipathfn(subrepoutil.subrelpath(sub))) sr = sub.commit(cctx._text, user, date) newstate[s] = (newstate[s][0], sr) subrepoutil.writestate(self, newstate) @@ -2473,21 +2485,17 @@ try: self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2) - tr = self.transaction('commit') - ret = self.commitctx(cctx, True) + with self.transaction('commit'): + ret = self.commitctx(cctx, True) + # update bookmarks, dirstate and mergestate + bookmarks.update(self, [p1, p2], ret) + cctx.markcommitted(ret) + ms.reset() except: # re-raises if edited: self.ui.write( _('note: commit message saved in %s\n') % msgfn) raise - # update bookmarks, dirstate and mergestate - bookmarks.update(self, [p1, p2], ret) - cctx.markcommitted(ret) - ms.reset() - tr.close() - - finally: - lockmod.release(tr, lock, wlock) def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2): # hack for command that use a temporary commit (eg: histedit) @@ -2509,13 +2517,10 @@ from p1 or p2 are excluded from the committed ctx.files(). """ - tr = None p1, p2 = ctx.p1(), ctx.p2() user = ctx.user() - lock = self.lock() - try: - tr = self.transaction("commit") + with self.lock(), self.transaction("commit") as tr: trp = weakref.proxy(tr) if ctx.manifestnode(): @@ -2538,8 +2543,9 @@ removed = list(ctx.removed()) linkrev = len(self) self.ui.note(_("committing files:\n")) + uipathfn = scmutil.getuipathfn(self) for f in sorted(ctx.modified() + ctx.added()): - self.ui.note(f + "\n") + self.ui.note(uipathfn(f) + "\n") try: fctx = ctx[f] if fctx is None: @@ -2549,13 +2555,15 @@ m[f] = self._filecommit(fctx, m1, m2, linkrev, trp, changed) m.setflag(f, fctx.flags()) - except OSError as inst: - self.ui.warn(_("trouble committing %s!\n") % f) + except OSError: + self.ui.warn(_("trouble committing %s!\n") % + uipathfn(f)) raise except IOError as inst: errcode = getattr(inst, 'errno', errno.ENOENT) if error or errcode and errcode != errno.ENOENT: - self.ui.warn(_("trouble committing %s!\n") % f) + self.ui.warn(_("trouble committing %s!\n") % + uipathfn(f)) raise # update manifest @@ -2612,12 +2620,7 @@ # # if minimal phase was 0 we don't need to retract anything phases.registernew(self, tr, targetphase, [n]) - tr.close() return n - finally: - if tr: - tr.release() - lock.release() @unfilteredmethod def destroying(self):
--- a/mercurial/logcmdutil.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/logcmdutil.py Tue Mar 19 16:36:59 2019 +0300 @@ -9,6 +9,7 @@ import itertools import os +import posixpath from .i18n import _ from .node import ( @@ -58,29 +59,53 @@ changes=None, stat=False, fp=None, graphwidth=0, prefix='', root='', listsubrepos=False, hunksfilterfn=None): '''show diff or diffstat.''' + ctx1 = repo[node1] + ctx2 = repo[node2] if root: relroot = pathutil.canonpath(repo.root, repo.getcwd(), root) else: relroot = '' + copysourcematch = None + def compose(f, g): + return lambda x: f(g(x)) + def pathfn(f): + return posixpath.join(prefix, f) if relroot != '': # XXX relative roots currently don't work if the root is within a # subrepo - uirelroot = match.uipath(relroot) + uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) + uirelroot = uipathfn(pathfn(relroot)) relroot += '/' for matchroot in match.files(): if not matchroot.startswith(relroot): - ui.warn(_('warning: %s not inside relative root %s\n') % ( - match.uipath(matchroot), uirelroot)) + ui.warn(_('warning: %s not inside relative root %s\n') % + (uipathfn(pathfn(matchroot)), uirelroot)) + + relrootmatch = scmutil.match(ctx2, pats=[relroot], default='path') + match = matchmod.intersectmatchers(match, relrootmatch) + copysourcematch = relrootmatch + + checkroot = (repo.ui.configbool('devel', 'all-warnings') or + repo.ui.configbool('devel', 'check-relroot')) + def relrootpathfn(f): + if checkroot and not f.startswith(relroot): + raise AssertionError( + "file %s doesn't start with relroot %s" % (f, relroot)) + return f[len(relroot):] + pathfn = compose(relrootpathfn, pathfn) if stat: diffopts = diffopts.copy(context=0, noprefix=False) width = 80 if not ui.plain(): width = ui.termwidth() - graphwidth + # If an explicit --root was given, don't respect ui.relative-paths + if not relroot: + pathfn = compose(scmutil.getuipathfn(repo), pathfn) - chunks = repo[node2].diff(repo[node1], match, changes, opts=diffopts, - prefix=prefix, relroot=relroot, - hunksfilterfn=hunksfilterfn) + chunks = ctx2.diff(ctx1, match, changes, opts=diffopts, pathfn=pathfn, + copysourcematch=copysourcematch, + hunksfilterfn=hunksfilterfn) if fp is not None or ui.canwritewithoutlabels(): out = fp or ui @@ -105,8 +130,6 @@ ui.write(chunk, label=label) if listsubrepos: - ctx1 = repo[node1] - ctx2 = repo[node2] for subpath, sub in scmutil.itersubrepos(ctx1, ctx2): tempnode2 = node2 try: @@ -118,8 +141,9 @@ # subpath. The best we can do is to ignore it. tempnode2 = None submatch = matchmod.subdirmatcher(subpath, match) + subprefix = repo.wvfs.reljoin(prefix, subpath) sub.diff(ui, diffopts, tempnode2, submatch, changes=changes, - stat=stat, fp=fp, prefix=prefix) + stat=stat, fp=fp, prefix=subprefix) class changesetdiffer(object): """Generate diff of changeset with pre-configured filtering functions"""
--- a/mercurial/logexchange.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/logexchange.py Tue Mar 19 16:36:59 2019 +0300 @@ -97,7 +97,6 @@ def activepath(repo, remote): """returns remote path""" - local = None # is the remote a local peer local = remote.local()
--- a/mercurial/mail.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/mail.py Tue Mar 19 16:36:59 2019 +0300 @@ -243,6 +243,13 @@ cs.body_encoding = email.charset.QP break + # On Python 2, this simply assigns a value. Python 3 inspects + # body and does different things depending on whether it has + # encode() or decode() attributes. We can get the old behavior + # if we pass a str and charset is None and we call set_charset(). + # But we may get into trouble later due to Python attempting to + # encode/decode using the registered charset (or attempting to + # use ascii in the absence of a charset). msg.set_payload(body, cs) return msg
--- a/mercurial/manifest.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/manifest.py Tue Mar 19 16:36:59 2019 +0300 @@ -283,7 +283,6 @@ if len(self.extradata) == 0: return l = [] - last_cut = 0 i = 0 offset = 0 self.extrainfo = [0] * len(self.positions) @@ -1277,6 +1276,9 @@ These are written in reverse cache order (oldest to newest). """ + + _file = 'manifestfulltextcache' + def __init__(self, max): super(manifestfulltextcache, self).__init__(max) self._dirty = False @@ -1288,7 +1290,7 @@ return try: - with self._opener('manifestfulltextcache') as fp: + with self._opener(self._file) as fp: set = super(manifestfulltextcache, self).__setitem__ # ignore trailing data, this is a cache, corruption is skipped while True: @@ -1314,8 +1316,7 @@ if not self._dirty or self._opener is None: return # rotate backwards to the first used node - with self._opener( - 'manifestfulltextcache', 'w', atomictemp=True, checkambig=True + with self._opener(self._file, 'w', atomictemp=True, checkambig=True ) as fp: node = self._head.prev while True: @@ -1434,10 +1435,13 @@ def _setupmanifestcachehooks(self, repo): """Persist the manifestfulltextcache on lock release""" - if not util.safehasattr(repo, '_lockref'): + if not util.safehasattr(repo, '_wlockref'): return - self._fulltextcache._opener = repo.cachevfs + self._fulltextcache._opener = repo.wcachevfs + if repo._currentlock(repo._wlockref) is None: + return + reporef = weakref.ref(repo) manifestrevlogref = weakref.ref(self) @@ -1451,8 +1455,7 @@ return self._fulltextcache.write() - if repo._currentlock(repo._lockref) is not None: - repo._afterlock(persistmanifestcache) + repo._afterlock(persistmanifestcache) @property def fulltextcache(self):
--- a/mercurial/match.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/match.py Tue Mar 19 16:36:59 2019 +0300 @@ -42,7 +42,7 @@ except AttributeError: return m.match -def _expandsets(root, cwd, kindpats, ctx, listsubrepos, badfn): +def _expandsets(kindpats, ctx, listsubrepos, badfn): '''Returns the kindpats list with the 'set' patterns expanded to matchers''' matchers = [] other = [] @@ -57,7 +57,7 @@ if listsubrepos: for subpath in ctx.substate: sm = ctx.sub(subpath).matchfileset(pat, badfn=badfn) - pm = prefixdirmatcher(root, cwd, subpath, sm, badfn=badfn) + pm = prefixdirmatcher(subpath, sm, badfn=badfn) matchers.append(pm) continue @@ -97,25 +97,24 @@ return False return True -def _buildkindpatsmatcher(matchercls, root, cwd, kindpats, ctx=None, +def _buildkindpatsmatcher(matchercls, root, kindpats, ctx=None, listsubrepos=False, badfn=None): matchers = [] - fms, kindpats = _expandsets(root, cwd, kindpats, ctx=ctx, + fms, kindpats = _expandsets(kindpats, ctx=ctx, listsubrepos=listsubrepos, badfn=badfn) if kindpats: - m = matchercls(root, cwd, kindpats, listsubrepos=listsubrepos, - badfn=badfn) + m = matchercls(root, kindpats, badfn=badfn) matchers.append(m) if fms: matchers.extend(fms) if not matchers: - return nevermatcher(root, cwd, badfn=badfn) + return nevermatcher(badfn=badfn) if len(matchers) == 1: return matchers[0] return unionmatcher(matchers) def match(root, cwd, patterns=None, include=None, exclude=None, default='glob', - exact=False, auditor=None, ctx=None, listsubrepos=False, warn=None, + auditor=None, ctx=None, listsubrepos=False, warn=None, badfn=None, icasefs=False): """build an object to match a set of file patterns @@ -126,7 +125,6 @@ include - patterns to include (unless they are excluded) exclude - patterns to exclude (even if they are included) default - if a pattern in patterns has no explicit type, assume this one - exact - patterns are actually filenames (include/exclude still apply) warn - optional function used for printing warnings badfn - optional bad() callback for this matcher instead of the default icasefs - make a matcher for wdir on case insensitive filesystems, which @@ -150,9 +148,6 @@ """ normalize = _donormalize if icasefs: - if exact: - raise error.ProgrammingError("a case-insensitive exact matcher " - "doesn't make sense") dirstate = ctx.repo().dirstate dsnormalize = dirstate.normalize @@ -171,41 +166,38 @@ kindpats.append((kind, pats, source)) return kindpats - if exact: - m = exactmatcher(root, cwd, patterns, badfn) - elif patterns: + if patterns: kindpats = normalize(patterns, default, root, cwd, auditor, warn) if _kindpatsalwaysmatch(kindpats): - m = alwaysmatcher(root, cwd, badfn, relativeuipath=True) + m = alwaysmatcher(badfn) else: - m = _buildkindpatsmatcher(patternmatcher, root, cwd, kindpats, - ctx=ctx, listsubrepos=listsubrepos, - badfn=badfn) + m = _buildkindpatsmatcher(patternmatcher, root, kindpats, ctx=ctx, + listsubrepos=listsubrepos, badfn=badfn) else: # It's a little strange that no patterns means to match everything. # Consider changing this to match nothing (probably using nevermatcher). - m = alwaysmatcher(root, cwd, badfn) + m = alwaysmatcher(badfn) if include: kindpats = normalize(include, 'glob', root, cwd, auditor, warn) - im = _buildkindpatsmatcher(includematcher, root, cwd, kindpats, ctx=ctx, + im = _buildkindpatsmatcher(includematcher, root, kindpats, ctx=ctx, listsubrepos=listsubrepos, badfn=None) m = intersectmatchers(m, im) if exclude: kindpats = normalize(exclude, 'glob', root, cwd, auditor, warn) - em = _buildkindpatsmatcher(includematcher, root, cwd, kindpats, ctx=ctx, + em = _buildkindpatsmatcher(includematcher, root, kindpats, ctx=ctx, listsubrepos=listsubrepos, badfn=None) m = differencematcher(m, em) return m -def exact(root, cwd, files, badfn=None): - return exactmatcher(root, cwd, files, badfn=badfn) +def exact(files, badfn=None): + return exactmatcher(files, badfn=badfn) -def always(root, cwd): - return alwaysmatcher(root, cwd) +def always(badfn=None): + return alwaysmatcher(badfn) -def never(root, cwd): - return nevermatcher(root, cwd) +def never(badfn=None): + return nevermatcher(badfn) def badmatch(match, badfn): """Make a copy of the given matcher, replacing its bad method with the given @@ -258,12 +250,9 @@ class basematcher(object): - def __init__(self, root, cwd, badfn=None, relativeuipath=True): - self._root = root - self._cwd = cwd + def __init__(self, badfn=None): if badfn is not None: self.bad = badfn - self._relativeuipath = relativeuipath def __call__(self, fn): return self.matchfn(fn) @@ -284,21 +273,6 @@ # by recursive traversal is visited. traversedir = None - def abs(self, f): - '''Convert a repo path back to path that is relative to the root of the - matcher.''' - return f - - def rel(self, f): - '''Convert repo path back to path that is relative to cwd of matcher.''' - return util.pathto(self._root, self._cwd, f) - - def uipath(self, f): - '''Convert repo path to a display path. If patterns or -I/-X were used - to create this matcher, the display path will be relative to cwd. - Otherwise it is relative to the root of the repo.''' - return (self._relativeuipath and self.rel(f)) or self.abs(f) - @propertycache def _files(self): return [] @@ -399,9 +373,8 @@ class alwaysmatcher(basematcher): '''Matches everything.''' - def __init__(self, root, cwd, badfn=None, relativeuipath=False): - super(alwaysmatcher, self).__init__(root, cwd, badfn, - relativeuipath=relativeuipath) + def __init__(self, badfn=None): + super(alwaysmatcher, self).__init__(badfn) def always(self): return True @@ -421,8 +394,8 @@ class nevermatcher(basematcher): '''Matches nothing.''' - def __init__(self, root, cwd, badfn=None): - super(nevermatcher, self).__init__(root, cwd, badfn) + def __init__(self, badfn=None): + super(nevermatcher, self).__init__(badfn) # It's a little weird to say that the nevermatcher is an exact matcher # or a prefix matcher, but it seems to make sense to let callers take @@ -447,8 +420,8 @@ class predicatematcher(basematcher): """A matcher adapter for a simple boolean function""" - def __init__(self, root, cwd, predfn, predrepr=None, badfn=None): - super(predicatematcher, self).__init__(root, cwd, badfn) + def __init__(self, predfn, predrepr=None, badfn=None): + super(predicatematcher, self).__init__(badfn) self.matchfn = predfn self._predrepr = predrepr @@ -460,13 +433,12 @@ class patternmatcher(basematcher): - def __init__(self, root, cwd, kindpats, listsubrepos=False, badfn=None): - super(patternmatcher, self).__init__(root, cwd, badfn) + def __init__(self, root, kindpats, badfn=None): + super(patternmatcher, self).__init__(badfn) self._files = _explicitfiles(kindpats) self._prefix = _prefix(kindpats) - self._pats, self.matchfn = _buildmatch(kindpats, '$', listsubrepos, - root) + self._pats, self.matchfn = _buildmatch(kindpats, '$', root) @propertycache def _dirs(self): @@ -539,11 +511,10 @@ class includematcher(basematcher): - def __init__(self, root, cwd, kindpats, listsubrepos=False, badfn=None): - super(includematcher, self).__init__(root, cwd, badfn) + def __init__(self, root, kindpats, badfn=None): + super(includematcher, self).__init__(badfn) - self._pats, self.matchfn = _buildmatch(kindpats, '(?:/|$)', - listsubrepos, root) + self._pats, self.matchfn = _buildmatch(kindpats, '(?:/|$)', root) self._prefix = _prefix(kindpats) roots, dirs, parents = _rootsdirsandparents(kindpats) # roots are directories which are recursively included. @@ -601,8 +572,8 @@ patterns (so no kind-prefixes). ''' - def __init__(self, root, cwd, files, badfn=None): - super(exactmatcher, self).__init__(root, cwd, badfn) + def __init__(self, files, badfn=None): + super(exactmatcher, self).__init__(badfn) if isinstance(files, list): self._files = files @@ -649,11 +620,11 @@ '''Composes two matchers by matching if the first matches and the second does not. - The second matcher's non-matching-attributes (root, cwd, bad, explicitdir, + The second matcher's non-matching-attributes (bad, explicitdir, traversedir) are ignored. ''' def __init__(self, m1, m2): - super(differencematcher, self).__init__(m1._root, m1._cwd) + super(differencematcher, self).__init__() self._m1 = m1 self._m2 = m2 self.bad = m1.bad @@ -677,6 +648,9 @@ def visitdir(self, dir): if self._m2.visitdir(dir) == 'all': return False + elif not self._m2.visitdir(dir): + # m2 does not match dir, we can return 'all' here if possible + return self._m1.visitdir(dir) return bool(self._m1.visitdir(dir)) def visitchildrenset(self, dir): @@ -714,7 +688,7 @@ def intersectmatchers(m1, m2): '''Composes two matchers by matching if both of them match. - The second matcher's non-matching-attributes (root, cwd, bad, explicitdir, + The second matcher's non-matching-attributes (bad, explicitdir, traversedir) are ignored. ''' if m1 is None or m2 is None: @@ -726,19 +700,15 @@ m.bad = m1.bad m.explicitdir = m1.explicitdir m.traversedir = m1.traversedir - m.abs = m1.abs - m.rel = m1.rel - m._relativeuipath |= m1._relativeuipath return m if m2.always(): m = copy.copy(m1) - m._relativeuipath |= m2._relativeuipath return m return intersectionmatcher(m1, m2) class intersectionmatcher(basematcher): def __init__(self, m1, m2): - super(intersectionmatcher, self).__init__(m1._root, m1._cwd) + super(intersectionmatcher, self).__init__() self._m1 = m1 self._m2 = m2 self.bad = m1.bad @@ -817,19 +787,15 @@ ['b.txt'] >>> m2.exact(b'b.txt') True - >>> util.pconvert(m2.rel(b'b.txt')) - 'sub/b.txt' >>> def bad(f, msg): ... print(pycompat.sysstr(b"%s: %s" % (f, msg))) >>> m1.bad = bad >>> m2.bad(b'x.txt', b'No such file') sub/x.txt: No such file - >>> m2.abs(b'c.txt') - 'sub/c.txt' """ def __init__(self, path, matcher): - super(subdirmatcher, self).__init__(matcher._root, matcher._cwd) + super(subdirmatcher, self).__init__() self._path = path self._matcher = matcher self._always = matcher.always() @@ -845,15 +811,6 @@ def bad(self, f, msg): self._matcher.bad(self._path + "/" + f, msg) - def abs(self, f): - return self._matcher.abs(self._path + "/" + f) - - def rel(self, f): - return self._matcher.rel(self._path + "/" + f) - - def uipath(self, f): - return self._matcher.uipath(self._path + "/" + f) - def matchfn(self, f): # Some information is lost in the superclass's constructor, so we # can not accurately create the matching function for the subdirectory @@ -889,14 +846,14 @@ class prefixdirmatcher(basematcher): """Adapt a matcher to work on a parent directory. - The matcher's non-matching-attributes (root, cwd, bad, explicitdir, - traversedir) are ignored. + The matcher's non-matching-attributes (bad, explicitdir, traversedir) are + ignored. The prefix path should usually be the relative path from the root of this matcher to the root of the wrapped matcher. >>> m1 = match(util.localpath(b'root/d/e'), b'f', [b'../a.txt', b'b.txt']) - >>> m2 = prefixdirmatcher(b'root', b'd/e/f', b'd/e', m1) + >>> m2 = prefixdirmatcher(b'd/e', m1) >>> bool(m2(b'a.txt'),) False >>> bool(m2(b'd/e/a.txt')) @@ -919,8 +876,8 @@ False """ - def __init__(self, root, cwd, path, matcher, badfn=None): - super(prefixdirmatcher, self).__init__(root, cwd, badfn) + def __init__(self, path, matcher, badfn=None): + super(prefixdirmatcher, self).__init__(badfn) if not path: raise error.ProgrammingError('prefix path must not be empty') self._path = path @@ -970,13 +927,13 @@ class unionmatcher(basematcher): """A matcher that is the union of several matchers. - The non-matching-attributes (root, cwd, bad, explicitdir, traversedir) are - taken from the first matcher. + The non-matching-attributes (bad, explicitdir, traversedir) are taken from + the first matcher. """ def __init__(self, matchers): m1 = matchers[0] - super(unionmatcher, self).__init__(m1._root, m1._cwd) + super(unionmatcher, self).__init__() self.explicitdir = m1.explicitdir self.traversedir = m1.traversedir self._matchers = matchers @@ -1142,7 +1099,7 @@ return _globre(pat) + globsuffix raise error.ProgrammingError('not a regex pattern: %s:%s' % (kind, pat)) -def _buildmatch(kindpats, globsuffix, listsubrepos, root): +def _buildmatch(kindpats, globsuffix, root): '''Return regexp string and a matcher function for kindpats. globsuffix is appended to the regexp of globs.''' matchfuncs = []
--- a/mercurial/merge.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/merge.py Tue Mar 19 16:36:59 2019 +0300 @@ -391,9 +391,9 @@ """ # Check local variables before looking at filesystem for performance # reasons. - return bool(self._local) or bool(self._state) or \ - self._repo.vfs.exists(self.statepathv1) or \ - self._repo.vfs.exists(self.statepathv2) + return (bool(self._local) or bool(self._state) or + self._repo.vfs.exists(self.statepathv1) or + self._repo.vfs.exists(self.statepathv2)) def commit(self): """Write current state on disk (if necessary)""" @@ -1186,9 +1186,6 @@ diff = m1.diff(m2, match=matcher) - if matcher is None: - matcher = matchmod.always('', '') - actions = {} for f, ((n1, fl1), (n2, fl2)) in diff.iteritems(): if n1 and n2: # file exists on both local and remote side @@ -1502,15 +1499,15 @@ # If a file or directory exists with the same name, back that # up. Otherwise, look to see if there is a file that conflicts # with a directory this file is in, and if so, back that up. - absf = repo.wjoin(f) + conflicting = f if not repo.wvfs.lexists(f): for p in util.finddirs(f): if repo.wvfs.isfileorlink(p): - absf = repo.wjoin(p) + conflicting = p break - orig = scmutil.origpath(ui, repo, absf) - if repo.wvfs.lexists(absf): - util.rename(absf, orig) + if repo.wvfs.lexists(conflicting): + orig = scmutil.backuppath(ui, repo, conflicting) + util.rename(repo.wjoin(conflicting), orig) wctx[f].clearunknown() atomictemp = ui.configbool("experimental", "update.atomic-file") wctx[f].write(fctx(f).data(), flags, backgroundclose=True,
--- a/mercurial/minirst.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/minirst.py Tue Mar 19 16:36:59 2019 +0300 @@ -114,9 +114,9 @@ # Partially minimized form: remove space and both # colons. blocks[i]['lines'][-1] = blocks[i]['lines'][-1][:-3] - elif len(blocks[i]['lines']) == 1 and \ - blocks[i]['lines'][0].lstrip(' ').startswith('.. ') and \ - blocks[i]['lines'][0].find(' ', 3) == -1: + elif (len(blocks[i]['lines']) == 1 and + blocks[i]['lines'][0].lstrip(' ').startswith('.. ') and + blocks[i]['lines'][0].find(' ', 3) == -1): # directive on its own line, not a literal block i += 1 continue @@ -641,7 +641,6 @@ def parse(text, indent=0, keep=None, admonitions=None): """Parse text into a list of blocks""" - pruned = [] blocks = findblocks(text) for b in blocks: b['indent'] += indent @@ -736,7 +735,6 @@ '''return a list of (section path, nesting level, blocks) tuples''' nest = "" names = () - level = 0 secs = [] def getname(b): @@ -792,8 +790,8 @@ if section['type'] != 'margin': sindent = section['indent'] if len(section['lines']) > 1: - sindent += len(section['lines'][1]) - \ - len(section['lines'][1].lstrip(' ')) + sindent += (len(section['lines'][1]) - + len(section['lines'][1].lstrip(' '))) if bindent >= sindent: break pointer += 1
--- a/mercurial/mpatch.c Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/mpatch.c Tue Mar 19 16:36:59 2019 +0300 @@ -41,8 +41,9 @@ { struct mpatch_flist *a = NULL; - if (size < 1) + if (size < 1) { size = 1; + } a = (struct mpatch_flist *)malloc(sizeof(struct mpatch_flist)); if (a) { @@ -110,10 +111,12 @@ while (s != src->tail) { int soffset = s->start; - if (!safeadd(offset, &soffset)) + if (!safeadd(offset, &soffset)) { break; /* add would overflow, oh well */ - if (soffset >= cut) + } + if (soffset >= cut) { break; /* we've gone far enough */ + } postend = offset; if (!safeadd(s->start, &postend) || @@ -139,11 +142,13 @@ if (!safesub(offset, &c)) { break; } - if (s->end < c) + if (s->end < c) { c = s->end; + } l = cut - offset - s->start; - if (s->len < l) + if (s->len < l) { l = s->len; + } offset += s->start + l - c; @@ -176,8 +181,9 @@ if (!safeadd(offset, &cmpcut)) { break; } - if (cmpcut >= cut) + if (cmpcut >= cut) { break; + } postend = offset; if (!safeadd(s->start, &postend)) { @@ -205,11 +211,13 @@ if (!safesub(offset, &c)) { break; } - if (s->end < c) + if (s->end < c) { c = s->end; + } l = cut - offset - s->start; - if (s->len < l) + if (s->len < l) { l = s->len; + } offset += s->start + l - c; s->start = c; @@ -233,8 +241,9 @@ struct mpatch_frag *bh, *ct; int offset = 0, post; - if (a && b) + if (a && b) { c = lalloc((lsize(a) + lsize(b)) * 2); + } if (c) { @@ -284,8 +293,9 @@ /* assume worst case size, we won't have many of these lists */ l = lalloc(len / 12 + 1); - if (!l) + if (!l) { return MPATCH_ERR_NO_MEM; + } lt = l->tail; @@ -295,8 +305,9 @@ lt->start = getbe32(bin + pos); lt->end = getbe32(bin + pos + 4); lt->len = getbe32(bin + pos + 8); - if (lt->start < 0 || lt->start > lt->end || lt->len < 0) + if (lt->start < 0 || lt->start > lt->end || lt->len < 0) { break; /* sanity check */ + } if (!safeadd(12, &pos)) { break; }
--- a/mercurial/narrowspec.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/narrowspec.py Tue Mar 19 16:36:59 2019 +0300 @@ -127,7 +127,7 @@ # Passing empty include and empty exclude to matchmod.match() # gives a matcher that matches everything, so explicitly use # the nevermatcher. - return matchmod.never(root, '') + return matchmod.never() return matchmod.match(root, '', [], include=include or [], exclude=exclude or [])
--- a/mercurial/obsutil.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/obsutil.py Tue Mar 19 16:36:59 2019 +0300 @@ -397,14 +397,17 @@ This is a first and basic implementation, with many shortcoming. """ - # lefctx.repo() and rightctx.repo() are the same here - repo = leftctx.repo() - diffopts = diffutil.diffallopts(repo.ui, {'git': True}) + diffopts = diffutil.diffallopts(leftctx.repo().ui, {'git': True}) + # Leftctx or right ctx might be filtered, so we need to use the contexts # with an unfiltered repository to safely compute the diff - leftunfi = repo.unfiltered()[leftctx.rev()] + + # leftctx and rightctx can be from different repository views in case of + # hgsubversion, do don't try to access them from same repository + # rightctx.repo() and leftctx.repo() are not always the same + leftunfi = leftctx._repo.unfiltered()[leftctx.rev()] leftdiff = leftunfi.diff(opts=diffopts) - rightunfi = repo.unfiltered()[rightctx.rev()] + rightunfi = rightctx._repo.unfiltered()[rightctx.rev()] rightdiff = rightunfi.diff(opts=diffopts) left, right = (0, 0)
--- a/mercurial/patch.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/patch.py Tue Mar 19 16:36:59 2019 +0300 @@ -15,7 +15,6 @@ import errno import hashlib import os -import posixpath import re import shutil import zlib @@ -363,7 +362,7 @@ return self._ispatchinga(afile) and self._ispatchingb(bfile) def __repr__(self): - return "<patchmeta %s %r>" % (self.op, self.path) + return r"<patchmeta %s %r>" % (self.op, self.path) def readgitpatch(lr): """extract git-style metadata about patches from <patchname>""" @@ -637,8 +636,8 @@ return self.changed | self.removed # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1 -unidesc = re.compile('@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@') -contextdesc = re.compile('(?:---|\*\*\*) (\d+)(?:,(\d+))? (?:---|\*\*\*)') +unidesc = re.compile(br'@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@') +contextdesc = re.compile(br'(?:---|\*\*\*) (\d+)(?:,(\d+))? (?:---|\*\*\*)') eolmodes = ['strict', 'crlf', 'lf', 'auto'] class patchfile(object): @@ -752,7 +751,7 @@ for l in x.hunk: lines.append(l) if l[-1:] != '\n': - lines.append("\n\ No newline at end of file\n") + lines.append("\n\\ No newline at end of file\n") self.backend.writerej(self.fname, len(self.rej), self.hunks, lines) def apply(self, h): @@ -864,7 +863,7 @@ diff_re = re.compile('diff -r .* (.*)$') allhunks_re = re.compile('(?:index|deleted file) ') pretty_re = re.compile('(?:new file|deleted file) ') - special_re = re.compile('(?:index|deleted|copy|rename) ') + special_re = re.compile('(?:index|deleted|copy|rename|new mode) ') newfile_re = re.compile('(?:new file)') def __init__(self, header): @@ -926,8 +925,8 @@ # if they have some content as we want to be able to change it nocontent = len(self.header) == 2 emptynewfile = self.isnewfile() and nocontent - return emptynewfile or \ - any(self.special_re.match(h) for h in self.header) + return (emptynewfile + or any(self.special_re.match(h) for h in self.header)) class recordhunk(object): """patch hunk @@ -1304,7 +1303,7 @@ self.hunk.append(u) l = lr.readline() - if l.startswith('\ '): + if l.startswith(br'\ '): s = self.a[-1][:-1] self.a[-1] = s self.hunk[-1] = s @@ -1322,7 +1321,7 @@ hunki = 1 for x in pycompat.xrange(self.lenb): l = lr.readline() - if l.startswith('\ '): + if l.startswith(br'\ '): # XXX: the only way to hit this is with an invalid line range. # The no-eol marker is not counted in the line range, but I # guess there are diff(1) out there which behave differently. @@ -1379,7 +1378,7 @@ def _fixnewline(self, lr): l = lr.readline() - if l.startswith('\ '): + if l.startswith(br'\ '): diffhelper.fixnewline(self.hunk, self.a, self.b) else: lr.push(l) @@ -1448,7 +1447,6 @@ hunk.append(l) return l.rstrip('\r\n') - size = 0 while True: line = getline(lr, self.hunk) if not line: @@ -1610,6 +1608,7 @@ self.headers = [] def addrange(self, limits): + self.addcontext([]) fromstart, fromend, tostart, toend, proc = limits self.fromline = int(fromstart) self.toline = int(tostart) @@ -1630,6 +1629,8 @@ if self.context: self.before = self.context self.context = [] + if self.hunk: + self.addcontext([]) self.hunk = hunk def newfile(self, hdr): @@ -1903,7 +1904,6 @@ if not gitpatches: raise PatchError(_('failed to synchronize metadata for "%s"') % afile[2:]) - gp = gitpatches[-1] newfile = True elif x.startswith('---'): # check for a unified diff @@ -2238,8 +2238,8 @@ difffeatureopts = diffutil.difffeatureopts def diff(repo, node1=None, node2=None, match=None, changes=None, - opts=None, losedatafn=None, prefix='', relroot='', copy=None, - hunksfilterfn=None): + opts=None, losedatafn=None, pathfn=None, copy=None, + copysourcematch=None, hunksfilterfn=None): '''yields diff of changes to files between two nodes, or node and working directory. @@ -2263,20 +2263,28 @@ copy, if not empty, should contain mappings {dst@y: src@x} of copy information. + if copysourcematch is not None, then copy sources will be filtered by this + matcher + hunksfilterfn, if not None, should be a function taking a filectx and hunks generator that may yield filtered hunks. ''' + if not node1 and not node2: + node1 = repo.dirstate.p1() + + ctx1 = repo[node1] + ctx2 = repo[node2] + for fctx1, fctx2, hdr, hunks in diffhunks( - repo, node1=node1, node2=node2, - match=match, changes=changes, opts=opts, - losedatafn=losedatafn, prefix=prefix, relroot=relroot, copy=copy, - ): + repo, ctx1=ctx1, ctx2=ctx2, match=match, changes=changes, opts=opts, + losedatafn=losedatafn, pathfn=pathfn, copy=copy, + copysourcematch=copysourcematch): if hunksfilterfn is not None: # If the file has been removed, fctx2 is None; but this should # not occur here since we catch removed files early in # logcmdutil.getlinerangerevs() for 'hg log -L'. - assert fctx2 is not None, \ - 'fctx2 unexpectly None in diff hunks filtering' + assert fctx2 is not None, ( + 'fctx2 unexpectly None in diff hunks filtering') hunks = hunksfilterfn(fctx2, hunks) text = ''.join(sum((list(hlines) for hrange, hlines in hunks), [])) if hdr and (text or len(hdr) > 1): @@ -2284,8 +2292,8 @@ if text: yield text -def diffhunks(repo, node1=None, node2=None, match=None, changes=None, - opts=None, losedatafn=None, prefix='', relroot='', copy=None): +def diffhunks(repo, ctx1, ctx2, match=None, changes=None, opts=None, + losedatafn=None, pathfn=None, copy=None, copysourcematch=None): """Yield diff of changes to files in the form of (`header`, `hunks`) tuples where `header` is a list of diff headers and `hunks` is an iterable of (`hunkrange`, `hunklines`) tuples. @@ -2296,9 +2304,6 @@ if opts is None: opts = mdiff.defaultopts - if not node1 and not node2: - node1 = repo.dirstate.p1() - def lrugetfilectx(): cache = {} order = collections.deque() @@ -2315,16 +2320,6 @@ return getfilectx getfilectx = lrugetfilectx() - ctx1 = repo[node1] - ctx2 = repo[node2] - - relfiltered = False - if relroot != '' and match.always(): - # as a special case, create a new matcher with just the relroot - pats = [relroot] - match = scmutil.match(ctx2, pats, default='path') - relfiltered = True - if not changes: changes = ctx1.status(ctx2, match=match) modified, added, removed = changes[:3] @@ -2343,21 +2338,11 @@ if opts.git or opts.upgrade: copy = copies.pathcopies(ctx1, ctx2, match=match) - if relroot is not None: - if not relfiltered: - # XXX this would ideally be done in the matcher, but that is - # generally meant to 'or' patterns, not 'and' them. In this case we - # need to 'and' all the patterns from the matcher with relroot. - def filterrel(l): - return [f for f in l if f.startswith(relroot)] - modified = filterrel(modified) - added = filterrel(added) - removed = filterrel(removed) - relfiltered = True - # filter out copies where either side isn't inside the relative root - copy = dict(((dst, src) for (dst, src) in copy.iteritems() - if dst.startswith(relroot) - and src.startswith(relroot))) + if copysourcematch: + # filter out copies where source side isn't inside the matcher + # (copies.pathcopies() already filtered out the destination) + copy = {dst: src for dst, src in copy.iteritems() + if copysourcematch(src)} modifiedset = set(modified) addedset = set(added) @@ -2388,7 +2373,7 @@ def difffn(opts, losedata): return trydiff(repo, revs, ctx1, ctx2, modified, added, removed, - copy, getfilectx, opts, losedata, prefix, relroot) + copy, getfilectx, opts, losedata, pathfn) if opts.upgrade and not opts.git: try: def losedata(fn): @@ -2603,16 +2588,14 @@ yield f1, f2, copyop def trydiff(repo, revs, ctx1, ctx2, modified, added, removed, - copy, getfilectx, opts, losedatafn, prefix, relroot): + copy, getfilectx, opts, losedatafn, pathfn): '''given input data, generate a diff and yield it in blocks If generating a diff would lose data like flags or binary data and losedatafn is not None, it will be called. - relroot is removed and prefix is added to every path in the diff output. - - If relroot is not empty, this function expects every path in modified, - added, removed and copy to start with it.''' + pathfn is applied to every path in the diff output. + ''' def gitindex(text): if not text: @@ -2640,12 +2623,8 @@ gitmode = {'l': '120000', 'x': '100755', '': '100644'} - if relroot != '' and (repo.ui.configbool('devel', 'all-warnings') - or repo.ui.configbool('devel', 'check-relroot')): - for f in modified + added + removed + list(copy) + list(copy.values()): - if f is not None and not f.startswith(relroot): - raise AssertionError( - "file %s doesn't start with relroot %s" % (f, relroot)) + if not pathfn: + pathfn = lambda f: f for f1, f2, copyop in _filepairs(modified, added, removed, copy, opts): content1 = None @@ -2682,10 +2661,8 @@ (f1 and f2 and flag1 != flag2)): losedatafn(f2 or f1) - path1 = f1 or f2 - path2 = f2 or f1 - path1 = posixpath.join(prefix, path1[len(relroot):]) - path2 = posixpath.join(prefix, path2[len(relroot):]) + path1 = pathfn(f1 or f2) + path2 = pathfn(f2 or f1) header = [] if opts.git: header.append('diff --git %s%s %s%s' % @@ -2705,7 +2682,7 @@ header.append('similarity index %d%%' % sim) header.append('%s from %s' % (copyop, path1)) header.append('%s to %s' % (copyop, path2)) - elif revs and not repo.ui.quiet: + elif revs: header.append(diffline(path1, revs)) # fctx.is | diffopts | what to | is fctx.data() @@ -2773,7 +2750,7 @@ return maxfile, maxtotal, addtotal, removetotal, binary def diffstatdata(lines): - diffre = re.compile('^diff .*-r [a-z0-9]+\s(.*)$') + diffre = re.compile(br'^diff .*-r [a-z0-9]+\s(.*)$') results = [] filename, adds, removes, isbinary = None, 0, 0, False @@ -2808,6 +2785,10 @@ elif (line.startswith('GIT binary patch') or line.startswith('Binary file')): isbinary = True + elif line.startswith('rename from'): + filename = line[12:] + elif line.startswith('rename to'): + filename += ' => %s' % line[10:] addresult() return results
--- a/mercurial/posix.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/posix.py Tue Mar 19 16:36:59 2019 +0300 @@ -575,15 +575,16 @@ if gid is None: gid = os.getgid() try: - return grp.getgrgid(gid)[0] + return pycompat.fsencode(grp.getgrgid(gid)[0]) except KeyError: - return str(gid) + return pycompat.bytestr(gid) def groupmembers(name): """Return the list of members of the group with the given name, KeyError if the group does not exist. """ - return list(grp.getgrnam(name).gr_mem) + name = pycompat.fsdecode(name) + return pycompat.rapply(pycompat.fsencode, list(grp.getgrnam(name).gr_mem)) def spawndetached(args): return os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
--- a/mercurial/repair.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/repair.py Tue Mar 19 16:36:59 2019 +0300 @@ -252,6 +252,24 @@ # extensions can use it return backupfile +def softstrip(ui, repo, nodelist, backup=True, topic='backup'): + """perform a "soft" strip using the archived phase""" + tostrip = [c.node() for c in repo.set('sort(%ln::)', nodelist)] + if not tostrip: + return None + + newbmtarget, updatebm = _bookmarkmovements(repo, tostrip) + if backup: + node = tostrip[0] + backupfile = _createstripbackup(repo, tostrip, node, topic) + + with repo.transaction('strip') as tr: + phases.retractboundary(repo, tr, phases.archived, tostrip) + bmchanges = [(m, repo[newbmtarget].node()) for m in updatebm] + repo._bookmarks.applychanges(repo, tr, bmchanges) + return backupfile + + def _bookmarkmovements(repo, tostrip): # compute necessary bookmark movement bm = repo._bookmarks
--- a/mercurial/repository.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/repository.py Tue Mar 19 16:36:59 2019 +0300 @@ -346,8 +346,8 @@ return raise error.CapabilityError( - _('cannot %s; remote repository does not support the %r ' - 'capability') % (purpose, name)) + _('cannot %s; remote repository does not support the ' + '\'%s\' capability') % (purpose, name)) class iverifyproblem(interfaceutil.Interface): """Represents a problem with the integrity of the repository.
--- a/mercurial/revlog.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/revlog.py Tue Mar 19 16:36:59 2019 +0300 @@ -410,7 +410,10 @@ self._maxchainlen = opts['maxchainlen'] if 'deltabothparents' in opts: self._deltabothparents = opts['deltabothparents'] - self._lazydeltabase = bool(opts.get('lazydeltabase', False)) + self._lazydelta = bool(opts.get('lazydelta', True)) + self._lazydeltabase = False + if self._lazydelta: + self._lazydeltabase = bool(opts.get('lazydeltabase', False)) if 'compengine' in opts: self._compengine = opts['compengine'] if 'maxdeltachainspan' in opts: @@ -610,6 +613,9 @@ self._pcache = {} try: + # If we are using the native C version, you are in a fun case + # where self.index, self.nodemap and self._nodecaches is the same + # object. self._nodecache.clearcaches() except AttributeError: self._nodecache = {nullid: nullrev} @@ -1118,7 +1124,9 @@ return self.index.headrevs() except AttributeError: return self._headrevs() - return dagop.headrevs(revs, self.parentrevs) + if rustext is not None: + return rustext.dagop.headrevs(self.index, revs) + return dagop.headrevs(revs, self._uncheckedparentrevs) def computephases(self, roots): return self.index.computephasesmapsets(roots) @@ -1337,7 +1345,7 @@ return True def maybewdir(prefix): - return all(c == 'f' for c in prefix) + return all(c == 'f' for c in pycompat.iterbytestr(prefix)) hexnode = hex(node) @@ -2264,6 +2272,14 @@ self._nodepos = None def checksize(self): + """Check size of index and data files + + return a (dd, di) tuple. + - dd: extra bytes for the "data" file + - di: extra bytes for the "index" file + + A healthy revlog will return (0, 0). + """ expected = 0 if len(self): expected = max(0, self.end(len(self) - 1)) @@ -2388,21 +2404,25 @@ if getattr(destrevlog, 'filteredrevs', None): raise ValueError(_('destination revlog has filtered revisions')) - # lazydeltabase controls whether to reuse a cached delta, if possible. + # lazydelta and lazydeltabase controls whether to reuse a cached delta, + # if possible. + oldlazydelta = destrevlog._lazydelta oldlazydeltabase = destrevlog._lazydeltabase oldamd = destrevlog._deltabothparents try: if deltareuse == self.DELTAREUSEALWAYS: destrevlog._lazydeltabase = True + destrevlog._lazydelta = True elif deltareuse == self.DELTAREUSESAMEREVS: destrevlog._lazydeltabase = False + destrevlog._lazydelta = True + elif deltareuse == self.DELTAREUSENEVER: + destrevlog._lazydeltabase = False + destrevlog._lazydelta = False destrevlog._deltabothparents = forcedeltabothparents or oldamd - populatecachedelta = deltareuse in (self.DELTAREUSEALWAYS, - self.DELTAREUSESAMEREVS) - deltacomputer = deltautil.deltacomputer(destrevlog) index = self.index for rev in self: @@ -2420,7 +2440,7 @@ # the revlog chunk is a delta. cachedelta = None rawtext = None - if populatecachedelta: + if destrevlog._lazydelta: dp = self.deltaparent(rev) if dp != nullrev: cachedelta = (dp, bytes(self._chunk(rev))) @@ -2452,6 +2472,7 @@ if addrevisioncb: addrevisioncb(self, rev, node) finally: + destrevlog._lazydelta = oldlazydelta destrevlog._lazydeltabase = oldlazydeltabase destrevlog._deltabothparents = oldamd
--- a/mercurial/revlogutils/deltas.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/revlogutils/deltas.py Tue Mar 19 16:36:59 2019 +0300 @@ -916,7 +916,7 @@ and currentbase != base and self.revlog.length(currentbase) == 0): currentbase = self.revlog.deltaparent(currentbase) - if currentbase == base: + if self.revlog._lazydelta and currentbase == base: delta = revinfo.cachedelta[1] if delta is None: delta = self._builddeltadiff(base, revinfo, fh)
--- a/mercurial/revset.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/revset.py Tue Mar 19 16:36:59 2019 +0300 @@ -43,7 +43,7 @@ getinteger = revsetlang.getinteger getboolean = revsetlang.getboolean getlist = revsetlang.getlist -getrange = revsetlang.getrange +getintrange = revsetlang.getintrange getargs = revsetlang.getargs getargsdict = revsetlang.getargsdict @@ -225,24 +225,70 @@ def relationset(repo, subset, x, y, order): raise error.ParseError(_("can't use a relation in this context")) -def generationsrel(repo, subset, x, rel, n, order): - # TODO: support range, rewrite tests, and drop startdepth argument - # from ancestors() and descendants() predicates - if n <= 0: - n = -n - return _ancestors(repo, subset, x, startdepth=n, stopdepth=n + 1) - else: - return _descendants(repo, subset, x, startdepth=n, stopdepth=n + 1) +def _splitrange(a, b): + """Split range with bounds a and b into two ranges at 0 and return two + tuples of numbers for use as startdepth and stopdepth arguments of + revancestors and revdescendants. + + >>> _splitrange(-10, -5) # [-10:-5] + ((5, 11), (None, None)) + >>> _splitrange(5, 10) # [5:10] + ((None, None), (5, 11)) + >>> _splitrange(-10, 10) # [-10:10] + ((0, 11), (0, 11)) + >>> _splitrange(-10, 0) # [-10:0] + ((0, 11), (None, None)) + >>> _splitrange(0, 10) # [0:10] + ((None, None), (0, 11)) + >>> _splitrange(0, 0) # [0:0] + ((0, 1), (None, None)) + >>> _splitrange(1, -1) # [1:-1] + ((None, None), (None, None)) + """ + ancdepths = (None, None) + descdepths = (None, None) + if a == b == 0: + ancdepths = (0, 1) + if a < 0: + ancdepths = (-min(b, 0), -a + 1) + if b > 0: + descdepths = (max(a, 0), b + 1) + return ancdepths, descdepths + +def generationsrel(repo, subset, x, rel, z, order): + # TODO: rewrite tests, and drop startdepth argument from ancestors() and + # descendants() predicates + a, b = getintrange(z, + _('relation subscript must be an integer or a range'), + _('relation subscript bounds must be integers'), + deffirst=-(dagop.maxlogdepth - 1), + deflast=+(dagop.maxlogdepth - 1)) + (ancstart, ancstop), (descstart, descstop) = _splitrange(a, b) + + if ancstart is None and descstart is None: + return baseset() + + revs = getset(repo, fullreposet(repo), x) + if not revs: + return baseset() + + if ancstart is not None and descstart is not None: + s = dagop.revancestors(repo, revs, False, ancstart, ancstop) + s += dagop.revdescendants(repo, revs, False, descstart, descstop) + elif ancstart is not None: + s = dagop.revancestors(repo, revs, False, ancstart, ancstop) + elif descstart is not None: + s = dagop.revdescendants(repo, revs, False, descstart, descstop) + + return subset & s def relsubscriptset(repo, subset, x, y, z, order): # this is pretty basic implementation of 'x#y[z]' operator, still # experimental so undocumented. see the wiki for further ideas. # https://www.mercurial-scm.org/wiki/RevsetOperatorPlan rel = getsymbol(y) - n = getinteger(z, _("relation subscript must be an integer")) - if rel in subscriptrelations: - return subscriptrelations[rel](repo, subset, x, rel, n, order) + return subscriptrelations[rel](repo, subset, x, rel, z, order) relnames = [r for r in subscriptrelations.keys() if len(r) > 1] raise error.UnknownIdentifier(rel, relnames) @@ -412,7 +458,7 @@ try: r = cl.parentrevs(r)[0] except error.WdirUnsupported: - r = repo[r].parents()[0].rev() + r = repo[r].p1().rev() ps.add(r) return subset & ps @@ -815,6 +861,43 @@ contentdivergent = obsmod.getrevs(repo, 'contentdivergent') return subset & contentdivergent +@predicate('expectsize(set[, size])', safe=True, takeorder=True) +def expectsize(repo, subset, x, order): + """Return the given revset if size matches the revset size. + Abort if the revset doesn't expect given size. + size can either be an integer range or an integer. + + For example, ``expectsize(0:1, 3:5)`` will abort as revset size is 2 and + 2 is not between 3 and 5 inclusive.""" + + args = getargsdict(x, 'expectsize', 'set size') + minsize = 0 + maxsize = len(repo) + 1 + err = '' + if 'size' not in args or 'set' not in args: + raise error.ParseError(_('invalid set of arguments')) + minsize, maxsize = getintrange(args['size'], + _('expectsize requires a size range' + ' or a positive integer'), + _('size range bounds must be integers'), + minsize, maxsize) + if minsize < 0 or maxsize < 0: + raise error.ParseError(_('negative size')) + rev = getset(repo, fullreposet(repo), args['set'], order=order) + if minsize != maxsize and (len(rev) < minsize or len(rev) > maxsize): + err = _('revset size mismatch.' + ' expected between %d and %d, got %d') % (minsize, maxsize, + len(rev)) + elif minsize == maxsize and len(rev) != minsize: + err = _('revset size mismatch.' + ' expected %d, got %d') % (minsize, len(rev)) + if err: + raise error.RepoLookupError(err) + if order == followorder: + return subset & rev + else: + return rev & subset + @predicate('extdata(source)', safe=False, weight=100) def extdata(repo, subset, x): """Changesets in the specified extdata source. (EXPERIMENTAL)""" @@ -1008,11 +1091,11 @@ # i18n: "followlines" is a keyword msg = _("followlines expects exactly one file") fname = scmutil.parsefollowlinespattern(repo, rev, pat, msg) - # i18n: "followlines" is a keyword - lr = getrange(args['lines'][0], _("followlines expects a line range")) - fromline, toline = [getinteger(a, _("line range bounds must be integers")) - for a in lr] - fromline, toline = util.processlinerange(fromline, toline) + fromline, toline = util.processlinerange( + *getintrange(args['lines'][0], + # i18n: "followlines" is a keyword + _("followlines expects a line number or a range"), + _("line range bounds must be integers"))) fctx = repo[rev].filectx(fname) descend = False @@ -1513,7 +1596,7 @@ try: ps.add(cl.parentrevs(r)[0]) except error.WdirUnsupported: - ps.add(repo[r].parents()[0].rev()) + ps.add(repo[r].p1().rev()) ps -= {node.nullrev} # XXX we should turn this into a baseset instead of a set, smartset may do # some optimizations from the fact this is a baseset. @@ -1632,7 +1715,7 @@ try: ps.add(cl.parentrevs(r)[0]) except error.WdirUnsupported: - ps.add(repo[r].parents()[0].rev()) + ps.add(repo[r].p1().rev()) else: try: parents = cl.parentrevs(r) @@ -2027,7 +2110,7 @@ if len(args) != 0: pat = getstring(args[0], _("subrepo requires a pattern")) - m = matchmod.exact(repo.root, repo.root, ['.hgsubstate']) + m = matchmod.exact(['.hgsubstate']) def submatches(names): k, p, m = stringutil.stringmatcher(pat)
--- a/mercurial/revsetlang.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/revsetlang.py Tue Mar 19 16:36:59 2019 +0300 @@ -62,8 +62,8 @@ # default set of valid characters for the initial letter of symbols _syminitletters = set(pycompat.iterbytestr( - string.ascii_letters.encode('ascii') + - string.digits.encode('ascii') + + pycompat.sysbytes(string.ascii_letters) + + pycompat.sysbytes(string.digits) + '._@')) | set(map(pycompat.bytechr, pycompat.xrange(128, 256))) # default set of valid characters for non-initial letters of symbols @@ -240,6 +240,18 @@ return None, None raise error.ParseError(err) +def getintrange(x, err1, err2, deffirst=_notset, deflast=_notset): + """Get [first, last] integer range (both inclusive) from a parsed tree + + If any of the sides omitted, and if no default provided, ParseError will + be raised. + """ + if x and (x[0] == 'string' or x[0] == 'symbol'): + n = getinteger(x, err1) + return n, n + a, b = getrange(x, err1) + return getinteger(a, err2, deffirst), getinteger(b, err2, deflast) + def getargs(x, min, max, err): l = getlist(x) if len(l) < min or (max >= 0 and len(l) > max):
--- a/mercurial/scmutil.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/scmutil.py Tue Mar 19 16:36:59 2019 +0300 @@ -11,6 +11,7 @@ import glob import hashlib import os +import posixpath import re import subprocess import weakref @@ -27,6 +28,7 @@ ) from . import ( + copies as copiesmod, encoding, error, match as matchmod, @@ -231,10 +233,10 @@ ui.error(_("(did you forget to compile extensions?)\n")) elif m in "zlib".split(): ui.error(_("(is your Python install correct?)\n")) - except IOError as inst: - if util.safehasattr(inst, "code"): + except (IOError, OSError) as inst: + if util.safehasattr(inst, "code"): # HTTPError ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst)) - elif util.safehasattr(inst, "reason"): + elif util.safehasattr(inst, "reason"): # URLError or SSLError try: # usually it is in the form (errno, strerror) reason = inst.reason.args[1] except (AttributeError, IndexError): @@ -247,22 +249,15 @@ elif (util.safehasattr(inst, "args") and inst.args and inst.args[0] == errno.EPIPE): pass - elif getattr(inst, "strerror", None): - if getattr(inst, "filename", None): - ui.error(_("abort: %s: %s\n") % ( + elif getattr(inst, "strerror", None): # common IOError or OSError + if getattr(inst, "filename", None) is not None: + ui.error(_("abort: %s: '%s'\n") % ( encoding.strtolocal(inst.strerror), stringutil.forcebytestr(inst.filename))) else: ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror)) - else: + else: # suspicious IOError raise - except OSError as inst: - if getattr(inst, "filename", None) is not None: - ui.error(_("abort: %s: '%s'\n") % ( - encoding.strtolocal(inst.strerror), - stringutil.forcebytestr(inst.filename))) - else: - ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror)) except MemoryError: ui.error(_("abort: out of memory\n")) except SystemExit as inst: @@ -673,19 +668,11 @@ l = revrange(repo, revs) if not l: - first = second = None - elif l.isascending(): - first = l.min() - second = l.max() - elif l.isdescending(): - first = l.max() - second = l.min() - else: - first = l.first() - second = l.last() + raise error.Abort(_('empty revision range')) - if first is None: - raise error.Abort(_('empty revision range')) + first = l.first() + second = l.last() + if (first == second and len(revs) >= 2 and not all(revrange(repo, [r]) for r in revs)): raise error.Abort(_('empty revision on one side of range')) @@ -740,6 +727,53 @@ return [] return parents +def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None): + """Return a function that produced paths for presenting to the user. + + The returned function takes a repo-relative path and produces a path + that can be presented in the UI. + + Depending on the value of ui.relative-paths, either a repo-relative or + cwd-relative path will be produced. + + legacyrelativevalue is the value to use if ui.relative-paths=legacy + + If forcerelativevalue is not None, then that value will be used regardless + of what ui.relative-paths is set to. + """ + if forcerelativevalue is not None: + relative = forcerelativevalue + else: + config = repo.ui.config('ui', 'relative-paths') + if config == 'legacy': + relative = legacyrelativevalue + else: + relative = stringutil.parsebool(config) + if relative is None: + raise error.ConfigError( + _("ui.relative-paths is not a boolean ('%s')") % config) + + if relative: + cwd = repo.getcwd() + pathto = repo.pathto + return lambda f: pathto(f, cwd) + elif repo.ui.configbool('ui', 'slash'): + return lambda f: f + else: + return util.localpath + +def subdiruipathfn(subpath, uipathfn): + '''Create a new uipathfn that treats the file as relative to subpath.''' + return lambda f: uipathfn(posixpath.join(subpath, f)) + +def anypats(pats, opts): + '''Checks if any patterns, including --include and --exclude were given. + + Some commands (e.g. addremove) use this condition for deciding whether to + print absolute or relative paths. + ''' + return bool(pats or opts.get('include') or opts.get('exclude')) + def expandpats(pats): '''Expand bare globs when running on windows. On posix we assume it already has already been done by sh.''' @@ -764,15 +798,14 @@ '''Return a matcher and the patterns that were used. The matcher will warn about bad matches, unless an alternate badfn callback is provided.''' - if pats == ("",): - pats = [] if opts is None: opts = {} if not globbed and default == 'relpath': pats = expandpats(pats or []) + uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True) def bad(f, msg): - ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg)) + ctx.repo().ui.warn("%s: %s\n" % (uipathfn(f), msg)) if badfn is None: badfn = bad @@ -791,11 +824,11 @@ def matchall(repo): '''Return a matcher that will efficiently match everything.''' - return matchmod.always(repo.root, repo.getcwd()) + return matchmod.always() def matchfiles(repo, files, badfn=None): '''Return a matcher that will efficiently match exactly these files.''' - return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn) + return matchmod.exact(files, badfn=badfn) def parsefollowlinespattern(repo, rev, pat, msg): """Return a file name from `pat` pattern suitable for usage in followlines @@ -820,26 +853,26 @@ return None return vfs.vfs(repo.wvfs.join(origbackuppath)) -def origpath(ui, repo, filepath): - '''customize where .orig files are created +def backuppath(ui, repo, filepath): + '''customize where working copy backup files (.orig files) are created Fetch user defined path from config file: [ui] origbackuppath = <path> Fall back to default (filepath with .orig suffix) if not specified + + filepath is repo-relative + + Returns an absolute path ''' origvfs = getorigvfs(ui, repo) if origvfs is None: - return filepath + ".orig" + return repo.wjoin(filepath + ".orig") - # Convert filepath from an absolute path into a path inside the repo. - filepathfromroot = util.normpath(os.path.relpath(filepath, - start=repo.root)) - - origbackupdir = origvfs.dirname(filepathfromroot) + origbackupdir = origvfs.dirname(filepath) if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir): ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir)) # Remove any files that conflict with the backup file's path - for f in reversed(list(util.finddirs(filepathfromroot))): + for f in reversed(list(util.finddirs(filepath))): if origvfs.isfileorlink(f): ui.note(_('removing conflicting file: %s\n') % origvfs.join(f)) @@ -848,12 +881,12 @@ origvfs.makedirs(origbackupdir) - if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot): + if origvfs.isdir(filepath) and not origvfs.islink(filepath): ui.note(_('removing conflicting directory: %s\n') - % origvfs.join(filepathfromroot)) - origvfs.rmtree(filepathfromroot, forcibly=True) + % origvfs.join(filepath)) + origvfs.rmtree(filepath, forcibly=True) - return origvfs.join(filepathfromroot) + return origvfs.join(filepath) class _containsnode(object): """proxy __contains__(node) to container.__contains__ which accepts revs""" @@ -984,6 +1017,7 @@ for phase, nodes in toadvance.items(): phases.advanceboundary(repo, tr, phase, nodes) + mayusearchived = repo.ui.config('experimental', 'cleanup-as-archived') # Obsolete or strip nodes if obsolete.isenabled(repo, obsolete.createmarkersopt): # If a node is already obsoleted, and we want to obsolete it @@ -1001,6 +1035,17 @@ if rels: obsolete.createmarkers(repo, rels, operation=operation, metadata=metadata) + elif phases.supportinternal(repo) and mayusearchived: + # this assume we do not have "unstable" nodes above the cleaned ones + allreplaced = set() + for ns in replacements.keys(): + allreplaced.update(ns) + if backup: + from . import repair # avoid import cycle + node = min(allreplaced, key=repo.changelog.rev) + repair.backupbundle(repo, allreplaced, allreplaced, node, + operation) + phases.retractboundary(repo, tr, phases.archived, allreplaced) else: from . import repair # avoid import cycle tostrip = list(n for ns in replacements for n in ns) @@ -1008,7 +1053,7 @@ repair.delayedstrip(repo.ui, repo, tostrip, operation, backup=backup) -def addremove(repo, matcher, prefix, opts=None): +def addremove(repo, matcher, prefix, uipathfn, opts=None): if opts is None: opts = {} m = matcher @@ -1022,19 +1067,20 @@ similarity /= 100.0 ret = 0 - join = lambda f: os.path.join(prefix, f) wctx = repo[None] for subpath in sorted(wctx.substate): submatch = matchmod.subdirmatcher(subpath, m) if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()): sub = wctx.sub(subpath) + subprefix = repo.wvfs.reljoin(prefix, subpath) + subuipathfn = subdiruipathfn(subpath, uipathfn) try: - if sub.addremove(submatch, prefix, opts): + if sub.addremove(submatch, subprefix, subuipathfn, opts): ret = 1 except error.LookupError: repo.ui.status(_("skipping missing subrepository: %s\n") - % join(subpath)) + % uipathfn(subpath)) rejected = [] def badfn(f, msg): @@ -1052,15 +1098,15 @@ for abs in sorted(toprint): if repo.ui.verbose or not m.exact(abs): if abs in unknownset: - status = _('adding %s\n') % m.uipath(abs) + status = _('adding %s\n') % uipathfn(abs) label = 'ui.addremove.added' else: - status = _('removing %s\n') % m.uipath(abs) + status = _('removing %s\n') % uipathfn(abs) label = 'ui.addremove.removed' repo.ui.status(status, label=label) renames = _findrenames(repo, m, added + unknown, removed + deleted, - similarity) + similarity, uipathfn) if not dry_run: _markchanges(repo, unknown + forgotten, deleted, renames) @@ -1089,8 +1135,12 @@ status = _('removing %s\n') % abs repo.ui.status(status) + # TODO: We should probably have the caller pass in uipathfn and apply it to + # the messages above too. legacyrelativevalue=True is consistent with how + # it used to work. + uipathfn = getuipathfn(repo, legacyrelativevalue=True) renames = _findrenames(repo, m, added + unknown, removed + deleted, - similarity) + similarity, uipathfn) _markchanges(repo, unknown + forgotten, deleted, renames) @@ -1129,7 +1179,7 @@ return added, unknown, deleted, removed, forgotten -def _findrenames(repo, matcher, added, removed, similarity): +def _findrenames(repo, matcher, added, removed, similarity, uipathfn): '''Find renames from removed files to added ones.''' renames = {} if similarity > 0: @@ -1139,7 +1189,7 @@ or not matcher.exact(new)): repo.ui.status(_('recording removal of %s as rename to %s ' '(%d%% similar)\n') % - (matcher.rel(old), matcher.rel(new), + (uipathfn(old), uipathfn(new), score * 100)) renames[new] = old return renames @@ -1154,6 +1204,37 @@ for new, old in renames.iteritems(): wctx.copy(old, new) +def getrenamedfn(repo, endrev=None): + rcache = {} + if endrev is None: + endrev = len(repo) + + def getrenamed(fn, rev): + '''looks up all renames for a file (up to endrev) the first + time the file is given. It indexes on the changerev and only + parses the manifest if linkrev != changerev. + Returns rename info for fn at changerev rev.''' + if fn not in rcache: + rcache[fn] = {} + fl = repo.file(fn) + for i in fl: + lr = fl.linkrev(i) + renamed = fl.renamed(fl.node(i)) + rcache[fn][lr] = renamed and renamed[0] + if lr >= endrev: + break + if rev in rcache[fn]: + return rcache[fn][rev] + + # If linkrev != rev (i.e. rev not found in rcache) fallback to + # filectx logic. + try: + return repo[rev][fn].copysource() + except error.LookupError: + return None + + return getrenamed + def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None): """Update the dirstate to reflect the intent of copying src to dst. For different reasons it might not end with dst being marked as copied from src. @@ -1173,6 +1254,49 @@ elif not dryrun: wctx.copy(origsrc, dst) +def movedirstate(repo, newctx, match=None): + """Move the dirstate to newctx and adjust it as necessary. + + A matcher can be provided as an optimization. It is probably a bug to pass + a matcher that doesn't match all the differences between the parent of the + working copy and newctx. + """ + oldctx = repo['.'] + ds = repo.dirstate + ds.setparents(newctx.node(), nullid) + copies = dict(ds.copies()) + s = newctx.status(oldctx, match=match) + for f in s.modified: + if ds[f] == 'r': + # modified + removed -> removed + continue + ds.normallookup(f) + + for f in s.added: + if ds[f] == 'r': + # added + removed -> unknown + ds.drop(f) + elif ds[f] != 'a': + ds.add(f) + + for f in s.removed: + if ds[f] == 'a': + # removed + added -> normal + ds.normallookup(f) + elif ds[f] != 'r': + ds.remove(f) + + # Merge old parent and old working dir copies + oldcopies = copiesmod.pathcopies(newctx, oldctx, match) + oldcopies.update(copies) + copies = dict((dst, oldcopies.get(src, src)) + for dst, src in oldcopies.iteritems()) + # Adjust the dirstate copies + for dst, src in copies.iteritems(): + if (src not in newctx or dst in newctx or ds[dst] != 'a'): + src = None + ds.copy(src, dst) + def writerequires(opener, requirements): with opener('requires', 'w', atomictemp=True) as fp: for r in sorted(requirements):
--- a/mercurial/setdiscovery.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/setdiscovery.py Tue Mar 19 16:36:59 2019 +0300 @@ -92,69 +92,6 @@ dist.setdefault(p, d + 1) visit.append(p) -def _takequicksample(repo, headrevs, revs, size): - """takes a quick sample of size <size> - - It is meant for initial sampling and focuses on querying heads and close - ancestors of heads. - - :dag: a dag object - :headrevs: set of head revisions in local DAG to consider - :revs: set of revs to discover - :size: the maximum size of the sample""" - if len(revs) <= size: - return list(revs) - sample = set(repo.revs('heads(%ld)', revs)) - - if len(sample) >= size: - return _limitsample(sample, size) - - _updatesample(None, headrevs, sample, repo.changelog.parentrevs, - quicksamplesize=size) - return sample - -def _takefullsample(repo, headrevs, revs, size): - if len(revs) <= size: - return list(revs) - sample = set(repo.revs('heads(%ld)', revs)) - - # update from heads - revsheads = set(repo.revs('heads(%ld)', revs)) - _updatesample(revs, revsheads, sample, repo.changelog.parentrevs) - - # update from roots - revsroots = set(repo.revs('roots(%ld)', revs)) - - # _updatesample() essentially does interaction over revisions to look up - # their children. This lookup is expensive and doing it in a loop is - # quadratic. We precompute the children for all relevant revisions and - # make the lookup in _updatesample() a simple dict lookup. - # - # Because this function can be called multiple times during discovery, we - # may still perform redundant work and there is room to optimize this by - # keeping a persistent cache of children across invocations. - children = {} - - parentrevs = repo.changelog.parentrevs - for rev in repo.changelog.revs(start=min(revsroots)): - # Always ensure revision has an entry so we don't need to worry about - # missing keys. - children.setdefault(rev, []) - - for prev in parentrevs(rev): - if prev == nullrev: - continue - - children.setdefault(prev, []).append(rev) - - _updatesample(revs, revsroots, sample, children.__getitem__) - assert sample - sample = _limitsample(sample, size) - if len(sample) < size: - more = size - len(sample) - sample.update(random.sample(list(revs - sample), more)) - return sample - def _limitsample(sample, desiredlen): """return a random subset of sample of at most desiredlen item""" if len(sample) > desiredlen: @@ -179,6 +116,7 @@ self._common = repo.changelog.incrementalmissingrevs() self._undecided = None self.missing = set() + self._childrenmap = None def addcommons(self, commons): """registrer nodes known as common""" @@ -228,6 +166,87 @@ # common.bases and all its ancestors return self._common.basesheads() + def _parentsgetter(self): + getrev = self._repo.changelog.index.__getitem__ + def getparents(r): + return getrev(r)[5:7] + return getparents + + def _childrengetter(self): + + if self._childrenmap is not None: + # During discovery, the `undecided` set keep shrinking. + # Therefore, the map computed for an iteration N will be + # valid for iteration N+1. Instead of computing the same + # data over and over we cached it the first time. + return self._childrenmap.__getitem__ + + # _updatesample() essentially does interaction over revisions to look + # up their children. This lookup is expensive and doing it in a loop is + # quadratic. We precompute the children for all relevant revisions and + # make the lookup in _updatesample() a simple dict lookup. + self._childrenmap = children = {} + + parentrevs = self._parentsgetter() + revs = self.undecided + + for rev in sorted(revs): + # Always ensure revision has an entry so we don't need to worry + # about missing keys. + children[rev] = [] + for prev in parentrevs(rev): + if prev == nullrev: + continue + c = children.get(prev) + if c is not None: + c.append(rev) + return children.__getitem__ + + def takequicksample(self, headrevs, size): + """takes a quick sample of size <size> + + It is meant for initial sampling and focuses on querying heads and close + ancestors of heads. + + :headrevs: set of head revisions in local DAG to consider + :size: the maximum size of the sample""" + revs = self.undecided + if len(revs) <= size: + return list(revs) + sample = set(self._repo.revs('heads(%ld)', revs)) + + if len(sample) >= size: + return _limitsample(sample, size) + + _updatesample(None, headrevs, sample, self._parentsgetter(), + quicksamplesize=size) + return sample + + def takefullsample(self, headrevs, size): + revs = self.undecided + if len(revs) <= size: + return list(revs) + repo = self._repo + sample = set(repo.revs('heads(%ld)', revs)) + parentrevs = self._parentsgetter() + + # update from heads + revsheads = sample.copy() + _updatesample(revs, revsheads, sample, parentrevs) + + # update from roots + revsroots = set(repo.revs('roots(%ld)', revs)) + + childrenrevs = self._childrengetter() + + _updatesample(revs, revsroots, sample, childrenrevs) + assert sample + sample = _limitsample(sample, size) + if len(sample) < size: + more = size - len(sample) + sample.update(random.sample(list(revs - sample), more)) + return sample + def findcommonheads(ui, local, remote, initialsamplesize=100, fullsamplesize=200, @@ -272,18 +291,18 @@ # compatibility reasons) ui.status(_("searching for changes\n")) - srvheads = [] + knownsrvheads = [] # revnos of remote heads that are known locally for node in srvheadhashes: if node == nullid: continue try: - srvheads.append(clrev(node)) + knownsrvheads.append(clrev(node)) # Catches unknown and filtered nodes. except error.LookupError: continue - if len(srvheads) == len(srvheadhashes): + if len(knownsrvheads) == len(srvheadhashes): ui.debug("all remote heads known locally\n") return srvheadhashes, False, srvheadhashes @@ -297,7 +316,7 @@ disco = partialdiscovery(local, ownheads) # treat remote heads (and maybe own heads) as a first implicit sample # response - disco.addcommons(srvheads) + disco.addcommons(knownsrvheads) disco.addinfo(zip(sample, yesno)) full = False @@ -309,14 +328,14 @@ ui.note(_("sampling from both directions\n")) else: ui.debug("taking initial sample\n") - samplefunc = _takefullsample + samplefunc = disco.takefullsample targetsize = fullsamplesize else: # use even cheaper initial sample ui.debug("taking quick initial sample\n") - samplefunc = _takequicksample + samplefunc = disco.takequicksample targetsize = initialsamplesize - sample = samplefunc(local, ownheads, disco.undecided, targetsize) + sample = samplefunc(ownheads, targetsize) roundtrips += 1 progress.update(roundtrips) @@ -340,7 +359,7 @@ ui.debug("%d total queries in %.4fs\n" % (roundtrips, elapsed)) msg = ('found %d common and %d unknown server heads,' ' %d roundtrips in %.4fs\n') - missing = set(result) - set(srvheads) + missing = set(result) - set(knownsrvheads) ui.log('discovery', msg, len(result), len(missing), roundtrips, elapsed)
--- a/mercurial/simplemerge.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/simplemerge.py Tue Mar 19 16:36:59 2019 +0300 @@ -289,15 +289,15 @@ # find matches at the front ii = 0 - while ii < alen and ii < blen and \ - self.a[a1 + ii] == self.b[b1 + ii]: + while (ii < alen and ii < blen and + self.a[a1 + ii] == self.b[b1 + ii]): ii += 1 startmatches = ii # find matches at the end ii = 0 - while ii < alen and ii < blen and \ - self.a[a2 - ii - 1] == self.b[b2 - ii - 1]: + while (ii < alen and ii < blen and + self.a[a2 - ii - 1] == self.b[b2 - ii - 1]): ii += 1 endmatches = ii @@ -350,8 +350,8 @@ aend = asub + intlen bend = bsub + intlen - assert self.base[intbase:intend] == self.a[asub:aend], \ - (self.base[intbase:intend], self.a[asub:aend]) + assert self.base[intbase:intend] == self.a[asub:aend], ( + (self.base[intbase:intend], self.a[asub:aend])) assert self.base[intbase:intend] == self.b[bsub:bend]
--- a/mercurial/sparse.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/sparse.py Tue Mar 19 16:36:59 2019 +0300 @@ -264,7 +264,7 @@ """Returns a matcher that returns true for any of the forced includes before testing against the actual matcher.""" kindpats = [('path', include, '') for include in includes] - includematcher = matchmod.includematcher('', '', kindpats) + includematcher = matchmod.includematcher('', kindpats) return matchmod.unionmatcher([includematcher, matcher]) def matcher(repo, revs=None, includetemp=True): @@ -277,7 +277,7 @@ """ # If sparse isn't enabled, sparse matcher matches everything. if not enabled: - return matchmod.always(repo.root, '') + return matchmod.always() if not revs or revs == [None]: revs = [repo.changelog.rev(node) @@ -305,7 +305,7 @@ pass if not matchers: - result = matchmod.always(repo.root, '') + result = matchmod.always() elif len(matchers) == 1: result = matchers[0] else: @@ -336,7 +336,7 @@ if branchmerge: # If we're merging, use the wctx filter, since we're merging into # the wctx. - sparsematch = matcher(repo, [wctx.parents()[0].rev()]) + sparsematch = matcher(repo, [wctx.p1().rev()]) else: # If we're updating, use the target context's filter, since we're # moving to the target context. @@ -643,8 +643,8 @@ for kindpat in pats: kind, pat = matchmod._patsplit(kindpat, None) if kind in matchmod.cwdrelativepatternkinds or kind is None: - ap = (kind + ':' if kind else '') +\ - pathutil.canonpath(root, cwd, pat) + ap = ((kind + ':' if kind else '') + + pathutil.canonpath(root, cwd, pat)) abspats.append(ap) else: abspats.append(kindpat)
--- a/mercurial/sslutil.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/sslutil.py Tue Mar 19 16:36:59 2019 +0300 @@ -430,6 +430,7 @@ 'error)\n')) except ssl.SSLError: pass + # Try to print more helpful error messages for known failures. if util.safehasattr(e, 'reason'): # This error occurs when the client and server don't share a @@ -437,7 +438,7 @@ # outright. Hopefully the reason for this error is that we require # TLS 1.1+ and the server only supports TLS 1.0. Whatever the # reason, try to emit an actionable warning. - if e.reason == 'UNSUPPORTED_PROTOCOL': + if e.reason == r'UNSUPPORTED_PROTOCOL': # We attempted TLS 1.0+. if settings['protocolui'] == 'tls1.0': # We support more than just TLS 1.0+. If this happens, @@ -453,7 +454,7 @@ 'server; see ' 'https://mercurial-scm.org/wiki/SecureConnections ' 'for more info)\n') % ( - serverhostname, + pycompat.bytesurl(serverhostname), ', '.join(sorted(supportedprotocols)))) else: ui.warn(_( @@ -462,7 +463,8 @@ 'supports TLS 1.0 because it has known security ' 'vulnerabilities; see ' 'https://mercurial-scm.org/wiki/SecureConnections ' - 'for more info)\n') % serverhostname) + 'for more info)\n') % + pycompat.bytesurl(serverhostname)) else: # We attempted TLS 1.1+. We can only get here if the client # supports the configured protocol. So the likely reason is @@ -472,19 +474,20 @@ '(could not negotiate a common security protocol (%s+) ' 'with %s; the likely cause is Mercurial is configured ' 'to be more secure than the server can support)\n') % ( - settings['protocolui'], serverhostname)) + settings['protocolui'], + pycompat.bytesurl(serverhostname))) ui.warn(_('(consider contacting the operator of this ' 'server and ask them to support modern TLS ' 'protocol versions; or, set ' 'hostsecurity.%s:minimumprotocol=tls1.0 to allow ' 'use of legacy, less secure protocols when ' 'communicating with this server)\n') % - serverhostname) + pycompat.bytesurl(serverhostname)) ui.warn(_( '(see https://mercurial-scm.org/wiki/SecureConnections ' 'for more info)\n')) - elif (e.reason == 'CERTIFICATE_VERIFY_FAILED' and + elif (e.reason == r'CERTIFICATE_VERIFY_FAILED' and pycompat.iswindows): ui.warn(_('(the full certificate chain may not be available '
--- a/mercurial/statichttprepo.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/statichttprepo.py Tue Mar 19 16:36:59 2019 +0300 @@ -13,12 +13,14 @@ from .i18n import _ from . import ( + branchmap, changelog, error, localrepo, manifest, namespaces, pathutil, + pycompat, url, util, vfs as vfsmod, @@ -44,12 +46,12 @@ def seek(self, pos): self.pos = pos def read(self, bytes=None): - req = urlreq.request(self.url) + req = urlreq.request(pycompat.strurl(self.url)) end = '' if bytes: end = self.pos + bytes - 1 if self.pos or end: - req.add_header('Range', 'bytes=%d-%s' % (self.pos, end)) + req.add_header(r'Range', r'bytes=%d-%s' % (self.pos, end)) try: f = self.opener.open(req) @@ -59,7 +61,7 @@ num = inst.code == 404 and errno.ENOENT or None raise IOError(num, inst) except urlerr.urlerror as inst: - raise IOError(None, inst.reason[1]) + raise IOError(None, inst.reason) if code == 200: # HTTPRangeHandler does nothing if remote does not support @@ -192,7 +194,7 @@ self.changelog = changelog.changelog(self.svfs) self._tags = None self.nodetagscache = None - self._branchcaches = {} + self._branchcaches = branchmap.BranchMapCache() self._revbranchcache = None self.encodepats = None self.decodepats = None
--- a/mercurial/statprof.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/statprof.py Tue Mar 19 16:36:59 2019 +0300 @@ -203,7 +203,7 @@ class CodeSite(object): cache = {} - __slots__ = (u'path', u'lineno', u'function', u'source') + __slots__ = (r'path', r'lineno', r'function', r'source') def __init__(self, path, lineno, function): assert isinstance(path, bytes) @@ -263,7 +263,7 @@ return r'%s:%s' % (self.filename(), self.function) class Sample(object): - __slots__ = (u'stack', u'time') + __slots__ = (r'stack', r'time') def __init__(self, stack, time): self.stack = stack @@ -816,9 +816,6 @@ id2stack[-1].update(parent=parent) return myid - def endswith(a, b): - return list(a)[-len(b):] == list(b) - # The sampling profiler can sample multiple times without # advancing the clock, potentially causing the Chrome trace viewer # to render single-pixel columns that we cannot zoom in on. We @@ -858,9 +855,6 @@ # events given only stack snapshots. for sample in data.samples: - tos = sample.stack[0] - name = tos.function - path = simplifypath(tos.path) stack = tuple((('%s:%d' % (simplifypath(frame.path), frame.lineno), frame.function) for frame in sample.stack)) qstack = collections.deque(stack)
--- a/mercurial/store.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/store.py Tue Mar 19 16:36:59 2019 +0300 @@ -8,6 +8,7 @@ from __future__ import absolute_import import errno +import functools import hashlib import os import stat @@ -23,6 +24,9 @@ ) parsers = policy.importmod(r'parsers') +# how much bytes should be read from fncache in one read +# It is done to prevent loading large fncache files into memory +fncache_chunksize = 10 ** 6 def _matchtrackedpath(path, matcher): """parses a fncache entry and returns whether the entry is tracking a path @@ -463,14 +467,33 @@ # skip nonexistent file self.entries = set() return - self.entries = set(decodedir(fp.read()).splitlines()) + + self.entries = set() + chunk = b'' + for c in iter(functools.partial(fp.read, fncache_chunksize), b''): + chunk += c + try: + p = chunk.rindex(b'\n') + self.entries.update(decodedir(chunk[:p + 1]).splitlines()) + chunk = chunk[p + 1:] + except ValueError: + # substring '\n' not found, maybe the entry is bigger than the + # chunksize, so let's keep iterating + pass + + if chunk: + raise error.Abort(_("fncache does not ends with a newline")) + self._checkentries(fp) + fp.close() + + def _checkentries(self, fp): + """ make sure there is no empty string in entries """ if '' in self.entries: fp.seek(0) for n, line in enumerate(util.iterfile(fp)): if not line.rstrip('\n'): t = _('invalid entry in fncache, line %d') % (n + 1) raise error.Abort(t) - fp.close() def write(self, tr): if self._dirty:
--- a/mercurial/streamclone.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/streamclone.py Tue Mar 19 16:36:59 2019 +0300 @@ -13,7 +13,6 @@ from .i18n import _ from . import ( - branchmap, cacheutil, error, narrowspec, @@ -174,7 +173,7 @@ repo._writerequirements() if rbranchmap: - branchmap.replacecache(repo, rbranchmap) + repo._branchcaches.replace(repo, rbranchmap) repo.invalidate()
--- a/mercurial/subrepo.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/subrepo.py Tue Mar 19 16:36:59 2019 +0300 @@ -11,7 +11,6 @@ import errno import hashlib import os -import posixpath import re import stat import subprocess @@ -288,10 +287,10 @@ """ raise NotImplementedError - def add(self, ui, match, prefix, explicitonly, **opts): + def add(self, ui, match, prefix, uipathfn, explicitonly, **opts): return [] - def addremove(self, matcher, prefix, opts): + def addremove(self, matcher, prefix, uipathfn, opts): self.ui.warn("%s: %s" % (prefix, _("addremove is not supported"))) return 1 @@ -324,9 +323,9 @@ def matchfileset(self, expr, badfn=None): """Resolve the fileset expression for this repo""" - return matchmod.nevermatcher(self.wvfs.base, '', badfn=badfn) + return matchmod.never(badfn=badfn) - def printfiles(self, ui, m, fm, fmt, subrepos): + def printfiles(self, ui, m, uipathfn, fm, fmt, subrepos): """handle the files command for this subrepo""" return 1 @@ -344,8 +343,8 @@ flags = self.fileflags(name) mode = 'x' in flags and 0o755 or 0o644 symlink = 'l' in flags - archiver.addfile(prefix + self._path + '/' + name, - mode, symlink, self.filedata(name, decode)) + archiver.addfile(prefix + name, mode, symlink, + self.filedata(name, decode)) progress.increment() progress.complete() return total @@ -356,10 +355,10 @@ matched by the match function ''' - def forget(self, match, prefix, dryrun, interactive): + def forget(self, match, prefix, uipathfn, dryrun, interactive): return ([], []) - def removefiles(self, matcher, prefix, after, force, subrepos, + def removefiles(self, matcher, prefix, uipathfn, after, force, subrepos, dryrun, warnings): """remove the matched files from the subrepository and the filesystem, possibly by force and/or after the file has been removed from the @@ -370,8 +369,8 @@ return 1 def revert(self, substate, *pats, **opts): - self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \ - % (substate[0], substate[2])) + self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') + % (substate[0], substate[2])) return [] def shortid(self, revid): @@ -517,20 +516,18 @@ self._repo.vfs.write('hgrc', util.tonativeeol(''.join(lines))) @annotatesubrepoerror - def add(self, ui, match, prefix, explicitonly, **opts): - return cmdutil.add(ui, self._repo, match, - self.wvfs.reljoin(prefix, self._path), + def add(self, ui, match, prefix, uipathfn, explicitonly, **opts): + return cmdutil.add(ui, self._repo, match, prefix, uipathfn, explicitonly, **opts) @annotatesubrepoerror - def addremove(self, m, prefix, opts): + def addremove(self, m, prefix, uipathfn, opts): # In the same way as sub directories are processed, once in a subrepo, # always entry any of its subrepos. Don't corrupt the options that will # be used to process sibling subrepos however. opts = copy.copy(opts) opts['subrepos'] = True - return scmutil.addremove(self._repo, m, - self.wvfs.reljoin(prefix, self._path), opts) + return scmutil.addremove(self._repo, m, prefix, uipathfn, opts) @annotatesubrepoerror def cat(self, match, fm, fntemplate, prefix, **opts): @@ -559,10 +556,9 @@ # in hex format if node2 is not None: node2 = node.bin(node2) - logcmdutil.diffordiffstat(ui, self._repo, diffopts, - node1, node2, match, - prefix=posixpath.join(prefix, self._path), - listsubrepos=True, **opts) + logcmdutil.diffordiffstat(ui, self._repo, diffopts, node1, node2, + match, prefix=prefix, listsubrepos=True, + **opts) except error.RepoLookupError as inst: self.ui.warn(_('warning: error "%s" in subrepository "%s"\n') % (inst, subrelpath(self))) @@ -581,7 +577,8 @@ for subpath in ctx.substate: s = subrepo(ctx, subpath, True) submatch = matchmod.subdirmatcher(subpath, match) - total += s.archive(archiver, prefix + self._path + '/', submatch, + subprefix = prefix + subpath + '/' + total += s.archive(archiver, subprefix, submatch, decode) return total @@ -700,7 +697,7 @@ ctx = urepo[revision] if ctx.hidden(): urepo.ui.warn( - _('revision %s in subrepository "%s" is hidden\n') \ + _('revision %s in subrepository "%s" is hidden\n') % (revision[0:12], self._path)) repo = urepo hg.updaterepo(repo, revision, overwrite) @@ -798,7 +795,7 @@ return ctx.flags(name) @annotatesubrepoerror - def printfiles(self, ui, m, fm, fmt, subrepos): + def printfiles(self, ui, m, uipathfn, fm, fmt, subrepos): # If the parent context is a workingctx, use the workingctx here for # consistency. if self._ctx.rev() is None: @@ -806,16 +803,15 @@ else: rev = self._state[1] ctx = self._repo[rev] - return cmdutil.files(ui, ctx, m, fm, fmt, subrepos) + return cmdutil.files(ui, ctx, m, uipathfn, fm, fmt, subrepos) @annotatesubrepoerror def matchfileset(self, expr, badfn=None): - repo = self._repo if self._ctx.rev() is None: - ctx = repo[None] + ctx = self._repo[None] else: rev = self._state[1] - ctx = repo[rev] + ctx = self._repo[rev] matchers = [ctx.matchfileset(expr, badfn=badfn)] @@ -824,8 +820,7 @@ try: sm = sub.matchfileset(expr, badfn=badfn) - pm = matchmod.prefixdirmatcher(repo.root, repo.getcwd(), - subpath, sm, badfn=badfn) + pm = matchmod.prefixdirmatcher(subpath, sm, badfn=badfn) matchers.append(pm) except error.LookupError: self.ui.status(_("skipping missing subrepository: %s\n") @@ -839,16 +834,14 @@ return ctx.walk(match) @annotatesubrepoerror - def forget(self, match, prefix, dryrun, interactive): - return cmdutil.forget(self.ui, self._repo, match, - self.wvfs.reljoin(prefix, self._path), + def forget(self, match, prefix, uipathfn, dryrun, interactive): + return cmdutil.forget(self.ui, self._repo, match, prefix, uipathfn, True, dryrun=dryrun, interactive=interactive) @annotatesubrepoerror - def removefiles(self, matcher, prefix, after, force, subrepos, + def removefiles(self, matcher, prefix, uipathfn, after, force, subrepos, dryrun, warnings): - return cmdutil.remove(self.ui, self._repo, matcher, - self.wvfs.reljoin(prefix, self._path), + return cmdutil.remove(self.ui, self._repo, matcher, prefix, uipathfn, after, force, subrepos, dryrun) @annotatesubrepoerror @@ -971,9 +964,8 @@ p = subprocess.Popen(pycompat.rapply(procutil.tonativestr, cmd), bufsize=-1, close_fds=procutil.closefds, stdout=subprocess.PIPE, stderr=subprocess.PIPE, - universal_newlines=True, env=procutil.tonativeenv(env), **extrakw) - stdout, stderr = p.communicate() + stdout, stderr = map(util.fromnativeeol, p.communicate()) stderr = stderr.strip() if not failok: if p.returncode: @@ -1000,13 +992,14 @@ # both. We used to store the working directory one. output, err = self._svncommand(['info', '--xml']) doc = xml.dom.minidom.parseString(output) - entries = doc.getElementsByTagName('entry') + entries = doc.getElementsByTagName(r'entry') lastrev, rev = '0', '0' if entries: - rev = str(entries[0].getAttribute('revision')) or '0' - commits = entries[0].getElementsByTagName('commit') + rev = pycompat.bytestr(entries[0].getAttribute(r'revision')) or '0' + commits = entries[0].getElementsByTagName(r'commit') if commits: - lastrev = str(commits[0].getAttribute('revision')) or '0' + lastrev = pycompat.bytestr( + commits[0].getAttribute(r'revision')) or '0' return (lastrev, rev) def _wcrev(self): @@ -1021,19 +1014,19 @@ output, err = self._svncommand(['status', '--xml']) externals, changes, missing = [], [], [] doc = xml.dom.minidom.parseString(output) - for e in doc.getElementsByTagName('entry'): - s = e.getElementsByTagName('wc-status') + for e in doc.getElementsByTagName(r'entry'): + s = e.getElementsByTagName(r'wc-status') if not s: continue - item = s[0].getAttribute('item') - props = s[0].getAttribute('props') - path = e.getAttribute('path') - if item == 'external': + item = s[0].getAttribute(r'item') + props = s[0].getAttribute(r'props') + path = e.getAttribute(r'path').encode('utf8') + if item == r'external': externals.append(path) - elif item == 'missing': + elif item == r'missing': missing.append(path) - if (item not in ('', 'normal', 'unversioned', 'external') - or props not in ('', 'none', 'normal')): + if (item not in (r'', r'normal', r'unversioned', r'external') + or props not in (r'', r'none', r'normal')): changes.append(path) for path in changes: for ext in externals: @@ -1154,14 +1147,14 @@ output = self._svncommand(['list', '--recursive', '--xml'])[0] doc = xml.dom.minidom.parseString(output) paths = [] - for e in doc.getElementsByTagName('entry'): - kind = pycompat.bytestr(e.getAttribute('kind')) + for e in doc.getElementsByTagName(r'entry'): + kind = pycompat.bytestr(e.getAttribute(r'kind')) if kind != 'file': continue - name = ''.join(c.data for c - in e.getElementsByTagName('name')[0].childNodes - if c.nodeType == c.TEXT_NODE) - paths.append(name.encode('utf-8')) + name = r''.join(c.data for c + in e.getElementsByTagName(r'name')[0].childNodes + if c.nodeType == c.TEXT_NODE) + paths.append(name.encode('utf8')) return paths def filedata(self, name, decode): @@ -1596,7 +1589,7 @@ return False @annotatesubrepoerror - def add(self, ui, match, prefix, explicitonly, **opts): + def add(self, ui, match, prefix, uipathfn, explicitonly, **opts): if self._gitmissing(): return [] @@ -1620,7 +1613,7 @@ if exact: command.append("-f") #should be added, even if ignored if ui.verbose or not exact: - ui.status(_('adding %s\n') % match.rel(f)) + ui.status(_('adding %s\n') % uipathfn(f)) if f in tracked: # hg prints 'adding' even if already tracked if exact: @@ -1630,7 +1623,7 @@ self._gitcommand(command + [f]) for f in rejected: - ui.warn(_("%s already tracked!\n") % match.abs(f)) + ui.warn(_("%s already tracked!\n") % uipathfn(f)) return rejected @@ -1673,14 +1666,14 @@ for info in tar: if info.isdir(): continue - if match and not match(info.name): + bname = pycompat.fsencode(info.name) + if match and not match(bname): continue if info.issym(): data = info.linkname else: data = tar.extractfile(info).read() - archiver.addfile(prefix + self._path + '/' + info.name, - info.mode, info.issym(), data) + archiver.addfile(prefix + bname, info.mode, info.issym(), data) total += 1 progress.increment() progress.complete() @@ -1783,21 +1776,19 @@ # for Git, this also implies '-p' cmd.append('-U%d' % diffopts.context) - gitprefix = self.wvfs.reljoin(prefix, self._path) - if diffopts.noprefix: - cmd.extend(['--src-prefix=%s/' % gitprefix, - '--dst-prefix=%s/' % gitprefix]) + cmd.extend(['--src-prefix=%s/' % prefix, + '--dst-prefix=%s/' % prefix]) else: - cmd.extend(['--src-prefix=a/%s/' % gitprefix, - '--dst-prefix=b/%s/' % gitprefix]) + cmd.extend(['--src-prefix=a/%s/' % prefix, + '--dst-prefix=b/%s/' % prefix]) if diffopts.ignorews: cmd.append('--ignore-all-space') if diffopts.ignorewsamount: cmd.append('--ignore-space-change') - if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \ - and diffopts.ignoreblanklines: + if (self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) + and diffopts.ignoreblanklines): cmd.append('--ignore-blank-lines') cmd.append(node1) @@ -1823,15 +1814,15 @@ if not opts.get(r'no_backup'): status = self.status(None) names = status.modified - origvfs = scmutil.getorigvfs(self.ui, self._subparent) - if origvfs is None: - origvfs = self.wvfs for name in names: - bakname = scmutil.origpath(self.ui, self._subparent, name) + # backuppath() expects a path relative to the parent repo (the + # repo that ui.origbackuppath is relative to) + parentname = os.path.join(self._path, name) + bakname = scmutil.backuppath(self.ui, self._subparent, + parentname) self.ui.note(_('saving current version of %s as %s\n') % - (name, bakname)) - name = self.wvfs.join(name) - origvfs.rename(name, bakname) + (name, os.path.relpath(bakname))) + util.rename(self.wvfs.join(name), bakname) if not opts.get(r'dry_run'): self.get(substate, overwrite=True)
--- a/mercurial/subrepoutil.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/subrepoutil.py Tue Mar 19 16:36:59 2019 +0300 @@ -145,7 +145,6 @@ promptssrc = filemerge.partextras(labels) for s, l in sorted(s1.iteritems()): - prompts = None a = sa.get(s, nullstate) ld = l # local state with possible dirty flag for compares if wctx.sub(s).dirty(): @@ -218,7 +217,6 @@ wctx.sub(s).remove() for s, r in sorted(s2.items()): - prompts = None if s in s1: continue elif s not in sa:
--- a/mercurial/tags.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/tags.py Tue Mar 19 16:36:59 2019 +0300 @@ -188,8 +188,8 @@ return alltags for head in reversed(heads): # oldest to newest - assert head in repo.changelog.nodemap, \ - "tag cache returned bogus head %s" % short(head) + assert head in repo.changelog.nodemap, ( + "tag cache returned bogus head %s" % short(head)) fnodes = _filterfnodes(tagfnode, reversed(heads)) alltags = _tagsfromfnodes(ui, repo, fnodes) @@ -536,7 +536,7 @@ date: date tuple to use if committing''' if not local: - m = matchmod.exact(repo.root, '', ['.hgtags']) + m = matchmod.exact(['.hgtags']) if any(repo.status(match=m, unknown=True, ignored=True)): raise error.Abort(_('working copy of .hgtags is changed'), hint=_('please commit .hgtags manually')) @@ -548,7 +548,7 @@ def _tag(repo, names, node, message, local, user, date, extra=None, editor=False): - if isinstance(names, str): + if isinstance(names, bytes): names = (names,) branches = repo.branchmap() @@ -610,7 +610,7 @@ if '.hgtags' not in repo.dirstate: repo[None].add(['.hgtags']) - m = matchmod.exact(repo.root, '', ['.hgtags']) + m = matchmod.exact(['.hgtags']) tagnode = repo.commit(message, user, date, extra=extra, match=m, editor=editor)
--- a/mercurial/templatefuncs.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/templatefuncs.py Tue Mar 19 16:36:59 2019 +0300 @@ -295,6 +295,39 @@ hint = _("get() expects a dict as first argument") raise error.ParseError(bytes(err), hint=hint) +@templatefunc('config(section, name[, default])', requires={'ui'}) +def config(context, mapping, args): + """Returns the requested hgrc config option as a string.""" + fn = context.resource(mapping, 'ui').config + return _config(context, mapping, args, fn, evalstring) + +@templatefunc('configbool(section, name[, default])', requires={'ui'}) +def configbool(context, mapping, args): + """Returns the requested hgrc config option as a boolean.""" + fn = context.resource(mapping, 'ui').configbool + return _config(context, mapping, args, fn, evalboolean) + +@templatefunc('configint(section, name[, default])', requires={'ui'}) +def configint(context, mapping, args): + """Returns the requested hgrc config option as an integer.""" + fn = context.resource(mapping, 'ui').configint + return _config(context, mapping, args, fn, evalinteger) + +def _config(context, mapping, args, configfn, defaultfn): + if not (2 <= len(args) <= 3): + raise error.ParseError(_("config expects two or three arguments")) + + # The config option can come from any section, though we specifically + # reserve the [templateconfig] section for dynamically defining options + # for this function without also requiring an extension. + section = evalstringliteral(context, mapping, args[0]) + name = evalstringliteral(context, mapping, args[1]) + if len(args) == 3: + default = defaultfn(context, mapping, args[2]) + return configfn(section, name, default) + else: + return configfn(section, name) + @templatefunc('if(expr, then[, else])') def if_(context, mapping, args): """Conditionally execute based on the result of
--- a/mercurial/templatekw.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/templatekw.py Tue Mar 19 16:36:59 2019 +0300 @@ -104,38 +104,6 @@ latesttags[rev] = pdate, pdist + 1, ptag return latesttags[rev] -def getrenamedfn(repo, endrev=None): - rcache = {} - if endrev is None: - endrev = len(repo) - - def getrenamed(fn, rev): - '''looks up all renames for a file (up to endrev) the first - time the file is given. It indexes on the changerev and only - parses the manifest if linkrev != changerev. - Returns rename info for fn at changerev rev.''' - if fn not in rcache: - rcache[fn] = {} - fl = repo.file(fn) - for i in fl: - lr = fl.linkrev(i) - renamed = fl.renamed(fl.node(i)) - rcache[fn][lr] = renamed and renamed[0] - if lr >= endrev: - break - if rev in rcache[fn]: - return rcache[fn][rev] - - # If linkrev != rev (i.e. rev not found in rcache) fallback to - # filectx logic. - try: - renamed = repo[rev][fn].renamed() - return renamed and renamed[0] - except error.LookupError: - return None - - return getrenamed - def getlogcolumns(): """Return a dict of log column labels""" _ = pycompat.identity # temporarily disable gettext @@ -344,7 +312,7 @@ copies = context.resource(mapping, 'revcache').get('copies') if copies is None: if 'getrenamed' not in cache: - cache['getrenamed'] = getrenamedfn(repo) + cache['getrenamed'] = scmutil.getrenamedfn(repo) copies = [] getrenamed = cache['getrenamed'] for fn in ctx.files(): @@ -554,6 +522,17 @@ return _hybrid(f, namespaces, makemap, pycompat.identity) +@templatekeyword('negrev', requires={'repo', 'ctx'}) +def shownegrev(context, mapping): + """Integer. The repository-local changeset negative revision number, + which counts in the opposite direction.""" + ctx = context.resource(mapping, 'ctx') + rev = ctx.rev() + if rev is None or rev < 0: # wdir() or nullrev? + return None + repo = context.resource(mapping, 'repo') + return rev - len(repo) + @templatekeyword('node', requires={'ctx'}) def shownode(context, mapping): """String. The changeset identification hash, as a 40 hexadecimal @@ -796,7 +775,7 @@ substate = ctx.substate if not substate: return compatlist(context, mapping, 'subrepo', []) - psubstate = ctx.parents()[0].substate or {} + psubstate = ctx.p1().substate or {} subrepos = [] for sub in substate: if sub not in psubstate or substate[sub] != psubstate[sub]:
--- a/mercurial/thirdparty/attr/_make.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/thirdparty/attr/_make.py Tue Mar 19 16:36:59 2019 +0300 @@ -56,7 +56,7 @@ def attr(default=NOTHING, validator=None, repr=True, cmp=True, hash=None, init=True, convert=None, metadata={}): - """ + r""" Create a new attribute on a class. .. warning:: @@ -555,7 +555,10 @@ # We cache the generated init methods for the same kinds of attributes. sha1 = hashlib.sha1() - sha1.update(repr(attrs).encode("utf-8")) + r = repr(attrs) + if not isinstance(r, bytes): + r = r.encode('utf-8') + sha1.update(r) unique_filename = "<attrs generated init {0}>".format( sha1.hexdigest() )
--- a/mercurial/thirdparty/attr/filters.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/thirdparty/attr/filters.py Tue Mar 19 16:36:59 2019 +0300 @@ -19,7 +19,7 @@ def include(*what): - """ + r""" Whitelist *what*. :param what: What to whitelist. @@ -36,7 +36,7 @@ def exclude(*what): - """ + r""" Blacklist *what*. :param what: What to blacklist.
--- a/mercurial/transaction.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/transaction.py Tue Mar 19 16:36:59 2019 +0300 @@ -89,7 +89,7 @@ except (IOError, OSError) as inst: if inst.errno != errno.ENOENT: raise - except (IOError, OSError, error.Abort) as inst: + except (IOError, OSError, error.Abort): if not c: raise @@ -101,7 +101,7 @@ for f in backupfiles: if opener.exists(f): opener.unlink(f) - except (IOError, OSError, error.Abort) as inst: + except (IOError, OSError, error.Abort): # only pure backup file remains, it is sage to ignore any error pass
--- a/mercurial/ui.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/ui.py Tue Mar 19 16:36:59 2019 +0300 @@ -58,12 +58,12 @@ statuscopies = yes # Prefer curses UIs when available. Revert to plain-text with `text`. interface = curses +# Make compatible commands emit cwd-relative paths by default. +relative-paths = yes [commands] # Grep working directory by default. grep.all-files = True -# Make `hg status` emit cwd-relative paths by default. -status.relative = yes # Refuse to perform an `hg update` that would cause a file content merge update.check = noconflict # Show conflicts information in `hg status` @@ -344,8 +344,8 @@ try: yield finally: - self._blockedtimes[key + '_blocked'] += \ - (util.timer() - starttime) * 1000 + self._blockedtimes[key + '_blocked'] += ( + (util.timer() - starttime) * 1000) @contextlib.contextmanager def uninterruptible(self): @@ -566,8 +566,6 @@ candidate = self._data(untrusted).get(s, n, None) if candidate is not None: value = candidate - section = s - name = n break if self.debugflag and not untrusted and self._reportuntrusted: @@ -1029,8 +1027,8 @@ except IOError as err: raise error.StdioError(err) finally: - self._blockedtimes['stdio_blocked'] += \ - (util.timer() - starttime) * 1000 + self._blockedtimes['stdio_blocked'] += ( + (util.timer() - starttime) * 1000) def write_err(self, *args, **opts): self._write(self._ferr, *args, **opts) @@ -1080,8 +1078,8 @@ return raise error.StdioError(err) finally: - self._blockedtimes['stdio_blocked'] += \ - (util.timer() - starttime) * 1000 + self._blockedtimes['stdio_blocked'] += ( + (util.timer() - starttime) * 1000) def _writemsg(self, dest, *args, **opts): _writemsgwith(self._write, dest, *args, **opts) @@ -1105,8 +1103,8 @@ if err.errno not in (errno.EPIPE, errno.EIO, errno.EBADF): raise error.StdioError(err) finally: - self._blockedtimes['stdio_blocked'] += \ - (util.timer() - starttime) * 1000 + self._blockedtimes['stdio_blocked'] += ( + (util.timer() - starttime) * 1000) def _isatty(self, fh): if self.configbool('ui', 'nontty'): @@ -1555,7 +1553,7 @@ raise EOFError return l.rstrip('\n') else: - return getpass.getpass('') + return getpass.getpass(r'') except EOFError: raise error.ResponseExpected() @@ -2053,7 +2051,11 @@ This is its own function so that extensions can change the definition of 'valid' in this case (like when pulling from a git repo into a hg one).""" - return os.path.isdir(os.path.join(path, '.hg')) + try: + return os.path.isdir(os.path.join(path, '.hg')) + # Python 2 may return TypeError. Python 3, ValueError. + except (TypeError, ValueError): + return False @property def suboptions(self):
--- a/mercurial/url.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/url.py Tue Mar 19 16:36:59 2019 +0300 @@ -58,11 +58,14 @@ return self.passwddb.add_password(realm, uri, user, passwd) def find_user_password(self, realm, authuri): + assert isinstance(realm, (type(None), str)) + assert isinstance(authuri, str) authinfo = self.passwddb.find_user_password(realm, authuri) user, passwd = authinfo + user, passwd = pycompat.bytesurl(user), pycompat.bytesurl(passwd) if user and passwd: self._writedebug(user, passwd) - return (user, passwd) + return (pycompat.strurl(user), pycompat.strurl(passwd)) if not user or not passwd: res = httpconnectionmod.readauthforuri(self.ui, authuri, user) @@ -90,7 +93,7 @@ self.passwddb.add_password(realm, authuri, user, passwd) self._writedebug(user, passwd) - return (user, passwd) + return (pycompat.strurl(user), pycompat.strurl(passwd)) def _writedebug(self, user, passwd): msg = _('http auth: user %s, password %s\n') @@ -128,9 +131,11 @@ else: self.no_list = no_list - proxyurl = bytes(proxy) - proxies = {'http': proxyurl, 'https': proxyurl} - ui.debug('proxying through %s\n' % util.hidepassword(proxyurl)) + # Keys and values need to be str because the standard library + # expects them to be. + proxyurl = str(proxy) + proxies = {r'http': proxyurl, r'https': proxyurl} + ui.debug('proxying through %s\n' % util.hidepassword(bytes(proxy))) else: proxies = {} @@ -138,7 +143,7 @@ self.ui = ui def proxy_open(self, req, proxy, type_): - host = urllibcompat.gethost(req).split(':')[0] + host = pycompat.bytesurl(urllibcompat.gethost(req)).split(':')[0] for e in self.no_list: if host == e: return None @@ -176,20 +181,20 @@ return proxyres return keepalive.HTTPConnection.getresponse(self) -# general transaction handler to support different ways to handle -# HTTPS proxying before and after Python 2.6.3. +# Large parts of this function have their origin from before Python 2.6 +# and could potentially be removed. def _generic_start_transaction(handler, h, req): - tunnel_host = getattr(req, '_tunnel_host', None) + tunnel_host = req._tunnel_host if tunnel_host: - if tunnel_host[:7] not in ['http://', 'https:/']: - tunnel_host = 'https://' + tunnel_host + if tunnel_host[:7] not in [r'http://', r'https:/']: + tunnel_host = r'https://' + tunnel_host new_tunnel = True else: tunnel_host = urllibcompat.getselector(req) new_tunnel = False if new_tunnel or tunnel_host == urllibcompat.getfullurl(req): # has proxy - u = util.url(tunnel_host) + u = util.url(pycompat.bytesurl(tunnel_host)) if new_tunnel or u.scheme == 'https': # only use CONNECT for HTTPS h.realhostport = ':'.join([u.host, (u.port or '443')]) h.headers = req.headers.copy() @@ -202,7 +207,7 @@ def _generic_proxytunnel(self): proxyheaders = dict( [(x, self.headers[x]) for x in self.headers - if x.lower().startswith('proxy-')]) + if x.lower().startswith(r'proxy-')]) self.send('CONNECT %s HTTP/1.0\r\n' % self.realhostport) for header in proxyheaders.iteritems(): self.send('%s: %s\r\n' % header) @@ -211,9 +216,14 @@ # majority of the following code is duplicated from # httplib.HTTPConnection as there are no adequate places to # override functions to provide the needed functionality + # strict was removed in Python 3.4. + kwargs = {} + if not pycompat.ispy3: + kwargs['strict'] = self.strict + res = self.response_class(self.sock, - strict=self.strict, - method=self._method) + method=self._method, + **kwargs) while True: version, status, reason = res._read_status()
--- a/mercurial/util.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/util.py Tue Mar 19 16:36:59 2019 +0300 @@ -789,6 +789,12 @@ res)) data = dest[0:res] if res is not None else b'' + + # _writedata() uses "in" operator and is confused by memoryview because + # characters are ints on Python 3. + if isinstance(data, memoryview): + data = data.tobytes() + self._writedata(data) def write(self, res, data): @@ -1210,7 +1216,7 @@ Holds a reference to nodes on either side as well as a key-value pair for the dictionary entry. """ - __slots__ = (u'next', u'prev', u'key', u'value', u'cost') + __slots__ = (r'next', r'prev', r'key', r'value', r'cost') def __init__(self): self.next = None @@ -3205,9 +3211,9 @@ SERVERROLE = 'server' CLIENTROLE = 'client' -compewireprotosupport = collections.namedtuple(u'compenginewireprotosupport', - (u'name', u'serverpriority', - u'clientpriority')) +compewireprotosupport = collections.namedtuple(r'compenginewireprotosupport', + (r'name', r'serverpriority', + r'clientpriority')) class compressormanager(object): """Holds registrations of various compression engines.
--- a/mercurial/utils/procutil.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/utils/procutil.py Tue Mar 19 16:36:59 2019 +0300 @@ -221,7 +221,7 @@ """ return (pycompat.safehasattr(sys, "frozen") or # new py2exe pycompat.safehasattr(sys, "importers") or # old py2exe - imp.is_frozen(u"__main__")) # tools/freeze + imp.is_frozen(r"__main__")) # tools/freeze _hgexecutable = None
--- a/mercurial/verify.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/verify.py Tue Mar 19 16:36:59 2019 +0300 @@ -51,11 +51,13 @@ self.skipflags = repo.ui.configint('verify', 'skipflags') self.warnorphanstorefiles = True - def warn(self, msg): + def _warn(self, msg): + """record a "warning" level issue""" self.ui.warn(msg + "\n") self.warnings += 1 - def err(self, linkrev, msg, filename=None): + def _err(self, linkrev, msg, filename=None): + """record a "error" level issue""" if linkrev is not None: self.badrevs.add(linkrev) linkrev = "%d" % linkrev @@ -67,15 +69,23 @@ self.ui.warn(" " + msg + "\n") self.errors += 1 - def exc(self, linkrev, msg, inst, filename=None): + def _exc(self, linkrev, msg, inst, filename=None): + """record exception raised during the verify process""" fmsg = pycompat.bytestr(inst) if not fmsg: fmsg = pycompat.byterepr(inst) - self.err(linkrev, "%s: %s" % (msg, fmsg), filename) + self._err(linkrev, "%s: %s" % (msg, fmsg), filename) + + def _checkrevlog(self, obj, name, linkrev): + """verify high level property of a revlog - def checklog(self, obj, name, linkrev): + - revlog is present, + - revlog is non-empty, + - sizes (index and data) are correct, + - revlog's format version is correct. + """ if not len(obj) and (self.havecl or self.havemf): - self.err(linkrev, _("empty or missing %s") % name) + self._err(linkrev, _("empty or missing %s") % name) return d = obj.checksize() @@ -86,18 +96,37 @@ if obj.version != revlog.REVLOGV0: if not self.revlogv1: - self.warn(_("warning: `%s' uses revlog format 1") % name) + self._warn(_("warning: `%s' uses revlog format 1") % name) elif self.revlogv1: - self.warn(_("warning: `%s' uses revlog format 0") % name) + self._warn(_("warning: `%s' uses revlog format 0") % name) + + def _checkentry(self, obj, i, node, seen, linkrevs, f): + """verify a single revlog entry - def checkentry(self, obj, i, node, seen, linkrevs, f): + arguments are: + - obj: the source revlog + - i: the revision number + - node: the revision node id + - seen: nodes previously seen for this revlog + - linkrevs: [changelog-revisions] introducing "node" + - f: string label ("changelog", "manifest", or filename) + + Performs the following checks: + - linkrev points to an existing changelog revision, + - linkrev points to a changelog revision that introduces this revision, + - linkrev points to the lowest of these changesets, + - both parents exist in the revlog, + - the revision is not duplicated. + + Return the linkrev of the revision (or None for changelog's revisions). + """ lr = obj.linkrev(obj.rev(node)) if lr < 0 or (self.havecl and lr not in linkrevs): if lr < 0 or lr >= len(self.repo.changelog): msg = _("rev %d points to nonexistent changeset %d") else: msg = _("rev %d points to unexpected changeset %d") - self.err(None, msg % (i, lr), f) + self._err(None, msg % (i, lr), f) if linkrevs: if f and len(linkrevs) > 1: try: @@ -106,31 +135,35 @@ if self.lrugetctx(l)[f].filenode() == node] except Exception: pass - self.warn(_(" (expected %s)") % " ".join - (map(pycompat.bytestr, linkrevs))) + self._warn(_(" (expected %s)") % " ".join + (map(pycompat.bytestr, linkrevs))) lr = None # can't be trusted try: p1, p2 = obj.parents(node) if p1 not in seen and p1 != nullid: - self.err(lr, _("unknown parent 1 %s of %s") % + self._err(lr, _("unknown parent 1 %s of %s") % (short(p1), short(node)), f) if p2 not in seen and p2 != nullid: - self.err(lr, _("unknown parent 2 %s of %s") % + self._err(lr, _("unknown parent 2 %s of %s") % (short(p2), short(node)), f) except Exception as inst: - self.exc(lr, _("checking parents of %s") % short(node), inst, f) + self._exc(lr, _("checking parents of %s") % short(node), inst, f) if node in seen: - self.err(lr, _("duplicate revision %d (%d)") % (i, seen[node]), f) + self._err(lr, _("duplicate revision %d (%d)") % (i, seen[node]), f) seen[node] = i return lr def verify(self): - repo = self.repo + """verify the content of the Mercurial repository + + This method run all verifications, displaying issues as they are found. + return 1 if any error have been encountered, 0 otherwise.""" + # initial validation and generic report + repo = self.repo ui = repo.ui - if not repo.url().startswith('file:'): raise error.Abort(_("cannot verify bundle or remote repos")) @@ -141,15 +174,14 @@ ui.status(_("repository uses revlog format %d\n") % (self.revlogv1 and 1 or 0)) + # data verification mflinkrevs, filelinkrevs = self._verifychangelog() - filenodes = self._verifymanifest(mflinkrevs) del mflinkrevs - self._crosscheckfiles(filelinkrevs, filenodes) - totalfiles, filerevisions = self._verifyfiles(filenodes, filelinkrevs) + # final report ui.status(_("checked %d changesets with %d changes to %d files\n") % (len(repo.changelog), filerevisions, totalfiles)) if self.warnings: @@ -163,8 +195,24 @@ ui.warn(_("(first damaged changeset appears to be %d)\n") % min(self.badrevs)) return 1 + return 0 def _verifychangelog(self): + """verify the changelog of a repository + + The following checks are performed: + - all of `_checkrevlog` checks, + - all of `_checkentry` checks (for each revisions), + - each revision can be read. + + The function returns some of the data observed in the changesets as a + (mflinkrevs, filelinkrevs) tuples: + - mflinkrevs: is a { manifest-node -> [changelog-rev] } mapping + - filelinkrevs: is a { file-path -> [changelog-rev] } mapping + + If a matcher was specified, filelinkrevs will only contains matched + files. + """ ui = self.ui repo = self.repo match = self.match @@ -174,13 +222,13 @@ mflinkrevs = {} filelinkrevs = {} seen = {} - self.checklog(cl, "changelog", 0) + self._checkrevlog(cl, "changelog", 0) progress = ui.makeprogress(_('checking'), unit=_('changesets'), total=len(repo)) for i in repo: progress.update(i) n = cl.node(i) - self.checkentry(cl, i, n, seen, [i], "changelog") + self._checkentry(cl, i, n, seen, [i], "changelog") try: changes = cl.read(n) @@ -192,12 +240,39 @@ filelinkrevs.setdefault(_normpath(f), []).append(i) except Exception as inst: self.refersmf = True - self.exc(i, _("unpacking changeset %s") % short(n), inst) + self._exc(i, _("unpacking changeset %s") % short(n), inst) progress.complete() return mflinkrevs, filelinkrevs def _verifymanifest(self, mflinkrevs, dir="", storefiles=None, subdirprogress=None): + """verify the manifestlog content + + Inputs: + - mflinkrevs: a {manifest-node -> [changelog-revisions]} mapping + - dir: a subdirectory to check (for tree manifest repo) + - storefiles: set of currently "orphan" files. + - subdirprogress: a progress object + + This function checks: + * all of `_checkrevlog` checks (for all manifest related revlogs) + * all of `_checkentry` checks (for all manifest related revisions) + * nodes for subdirectory exists in the sub-directory manifest + * each manifest entries have a file path + * each manifest node refered in mflinkrevs exist in the manifest log + + If tree manifest is in use and a matchers is specified, only the + sub-directories matching it will be verified. + + return a two level mapping: + {"path" -> { filenode -> changelog-revision}} + + This mapping primarily contains entries for every files in the + repository. In addition, when tree-manifest is used, it also contains + sub-directory entries. + + If a matcher is provided, only matching paths will be included. + """ repo = self.repo ui = self.ui match = self.match @@ -220,27 +295,27 @@ if self.refersmf: # Do not check manifest if there are only changelog entries with # null manifests. - self.checklog(mf, label, 0) + self._checkrevlog(mf, label, 0) progress = ui.makeprogress(_('checking'), unit=_('manifests'), total=len(mf)) for i in mf: if not dir: progress.update(i) n = mf.node(i) - lr = self.checkentry(mf, i, n, seen, mflinkrevs.get(n, []), label) + lr = self._checkentry(mf, i, n, seen, mflinkrevs.get(n, []), label) if n in mflinkrevs: del mflinkrevs[n] elif dir: - self.err(lr, _("%s not in parent-directory manifest") % + self._err(lr, _("%s not in parent-directory manifest") % short(n), label) else: - self.err(lr, _("%s not in changesets") % short(n), label) + self._err(lr, _("%s not in changesets") % short(n), label) try: mfdelta = mfl.get(dir, n).readdelta(shallow=True) for f, fn, fl in mfdelta.iterentries(): if not f: - self.err(lr, _("entry without name in manifest")) + self._err(lr, _("entry without name in manifest")) elif f == "/dev/null": # ignore this in very old repos continue fullpath = dir + _normpath(f) @@ -254,19 +329,21 @@ continue filenodes.setdefault(fullpath, {}).setdefault(fn, lr) except Exception as inst: - self.exc(lr, _("reading delta %s") % short(n), inst, label) + self._exc(lr, _("reading delta %s") % short(n), inst, label) if not dir: progress.complete() if self.havemf: - for c, m in sorted([(c, m) for m in mflinkrevs - for c in mflinkrevs[m]]): + # since we delete entry in `mflinkrevs` during iteration, any + # remaining entries are "missing". We need to issue errors for them. + changesetpairs = [(c, m) for m in mflinkrevs for c in mflinkrevs[m]] + for c, m in sorted(changesetpairs): if dir: - self.err(c, _("parent-directory manifest refers to unknown " - "revision %s") % short(m), label) + self._err(c, _("parent-directory manifest refers to unknown" + " revision %s") % short(m), label) else: - self.err(c, _("changeset refers to unknown revision %s") % - short(m), label) + self._err(c, _("changeset refers to unknown revision %s") % + short(m), label) if not dir and subdirnodes: self.ui.status(_("checking directory manifests\n")) @@ -275,7 +352,7 @@ revlogv1 = self.revlogv1 for f, f2, size in repo.store.datafiles(): if not f: - self.err(None, _("cannot decode filename '%s'") % f2) + self._err(None, _("cannot decode filename '%s'") % f2) elif (size > 0 or not revlogv1) and f.startswith('meta/'): storefiles.add(_normpath(f)) subdirs.add(os.path.dirname(f)) @@ -292,7 +369,7 @@ subdirprogress.complete() if self.warnorphanstorefiles: for f in sorted(storefiles): - self.warn(_("warning: orphan data file '%s'") % f) + self._warn(_("warning: orphan data file '%s'") % f) return filenodes @@ -309,7 +386,7 @@ progress.increment() if f not in filenodes: lr = filelinkrevs[f][0] - self.err(lr, _("in changeset but not in manifest"), f) + self._err(lr, _("in changeset but not in manifest"), f) if self.havecl: for f in sorted(filenodes): @@ -320,7 +397,7 @@ lr = min([fl.linkrev(fl.rev(n)) for n in filenodes[f]]) except Exception: lr = None - self.err(lr, _("in manifest but not in changeset"), f) + self._err(lr, _("in manifest but not in changeset"), f) progress.complete() @@ -335,7 +412,7 @@ storefiles = set() for f, f2, size in repo.store.datafiles(): if not f: - self.err(None, _("cannot decode filename '%s'") % f2) + self._err(None, _("cannot decode filename '%s'") % f2) elif (size > 0 or not revlogv1) and f.startswith('data/'): storefiles.add(_normpath(f)) @@ -367,7 +444,7 @@ try: fl = repo.file(f) except error.StorageError as e: - self.err(lr, _("broken revlog! (%s)") % e, f) + self._err(lr, _("broken revlog! (%s)") % e, f) continue for ff in fl.files(): @@ -375,12 +452,12 @@ storefiles.remove(ff) except KeyError: if self.warnorphanstorefiles: - self.warn(_(" warning: revlog '%s' not in fncache!") % + self._warn(_(" warning: revlog '%s' not in fncache!") % ff) self.fncachewarned = True if not len(fl) and (self.havecl or self.havemf): - self.err(lr, _("empty or missing %s") % f) + self._err(lr, _("empty or missing %s") % f) else: # Guard against implementations not setting this. state['skipread'] = set() @@ -391,10 +468,10 @@ linkrev = None if problem.warning: - self.warn(problem.warning) + self._warn(problem.warning) elif problem.error: - self.err(linkrev if linkrev is not None else lr, - problem.error, f) + self._err(linkrev if linkrev is not None else lr, + problem.error, f) else: raise error.ProgrammingError( 'problem instance does not set warning or error ' @@ -404,10 +481,10 @@ for i in fl: revisions += 1 n = fl.node(i) - lr = self.checkentry(fl, i, n, seen, linkrevs, f) + lr = self._checkentry(fl, i, n, seen, linkrevs, f) if f in filenodes: if havemf and n not in filenodes[f]: - self.err(lr, _("%s not in manifests") % (short(n)), f) + self._err(lr, _("%s not in manifests") % (short(n)), f) else: del filenodes[f][n] @@ -424,12 +501,15 @@ if lr is not None and ui.verbose: ctx = lrugetctx(lr) if not any(rp[0] in pctx for pctx in ctx.parents()): - self.warn(_("warning: copy source of '%s' not" + self._warn(_("warning: copy source of '%s' not" " in parents of %s") % (f, ctx)) fl2 = repo.file(rp[0]) if not len(fl2): - self.err(lr, _("empty or missing copy source " - "revlog %s:%s") % (rp[0], short(rp[1])), f) + self._err(lr, + _("empty or missing copy source revlog " + "%s:%s") % (rp[0], + short(rp[1])), + f) elif rp[1] == nullid: ui.note(_("warning: %s@%s: copy source" " revision is nullid %s:%s\n") @@ -437,18 +517,19 @@ else: fl2.rev(rp[1]) except Exception as inst: - self.exc(lr, _("checking rename of %s") % short(n), inst, f) + self._exc(lr, _("checking rename of %s") % short(n), + inst, f) # cross-check if f in filenodes: fns = [(v, k) for k, v in filenodes[f].iteritems()] for lr, node in sorted(fns): - self.err(lr, _("manifest refers to unknown revision %s") % - short(node), f) + self._err(lr, _("manifest refers to unknown revision %s") % + short(node), f) progress.complete() if self.warnorphanstorefiles: for f in sorted(storefiles): - self.warn(_("warning: orphan data file '%s'") % f) + self._warn(_("warning: orphan data file '%s'") % f) return len(files), revisions
--- a/mercurial/wireprotov1server.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/wireprotov1server.py Tue Mar 19 16:36:59 2019 +0300 @@ -7,6 +7,7 @@ from __future__ import absolute_import +import binascii import os from .i18n import _ @@ -63,7 +64,8 @@ extensions that need commands to operate on different repo views under specialized circumstances. """ - return repo.filtered('served') + viewconfig = repo.ui.config('server', 'view') + return repo.filtered(viewconfig) def dispatch(repo, proto, command): repo = getdispatchrepo(repo, proto, command) @@ -165,7 +167,6 @@ @wireprotocommand('batch', 'cmds *', permission='pull') def batch(repo, proto, cmds, others): unescapearg = wireprototypes.unescapebatcharg - repo = repo.filtered("served") res = [] for pair in cmds.split(';'): op, args = pair.split(' ', 1) @@ -344,7 +345,7 @@ one specific branch of many. """ def decodehexstring(s): - return set([h.decode('hex') for h in s.split(';')]) + return set([binascii.unhexlify(h) for h in s.split(';')]) manifest = repo.vfs.tryread('pullbundles.manifest') if not manifest: @@ -424,8 +425,6 @@ raise error.Abort(bundle2requiredmain, hint=bundle2requiredhint) - prefercompressed = True - try: clheads = set(repo.changelog.heads()) heads = set(opts.get('heads', set())) @@ -578,7 +577,6 @@ repo.ui.debug('redirecting incoming bundle to %s\n' % tempname) fp = os.fdopen(fd, pycompat.sysstr('wb+')) - r = 0 for p in payload: fp.write(p) fp.seek(0)
--- a/mercurial/wireprotov2peer.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/wireprotov2peer.py Tue Mar 19 16:36:59 2019 +0300 @@ -304,7 +304,7 @@ # TODO tell reactor? self._frameseof = True else: - self._ui.note(_('received %r\n') % frame) + self._ui.debug('received %r\n' % frame) self._processframe(frame) # Also try to read the first redirect. @@ -510,7 +510,7 @@ # Bytestring where each byte is a 0 or 1. raw = next(objs) - return [True if c == '1' else False for c in raw] + return [True if raw[i:i + 1] == b'1' else False for i in range(len(raw))] def decodelistkeys(objs): # Map with bytestring keys and values.
--- a/mercurial/wireprotov2server.py Tue Mar 19 09:23:35 2019 -0400 +++ b/mercurial/wireprotov2server.py Tue Mar 19 16:36:59 2019 +0300 @@ -23,6 +23,7 @@ narrowspec, pycompat, streamclone, + templatefilters, util, wireprotoframing, wireprototypes, @@ -148,8 +149,6 @@ tracker. We then dump the log of all that activity back out to the client. """ - import json - # Reflection APIs have a history of being abused, accidentally disclosing # sensitive data, etc. So we have a config knob. if not ui.configbool('experimental', 'web.api.debugreflect'): @@ -175,12 +174,11 @@ frame.payload)) action, meta = reactor.onframerecv(frame) - states.append(json.dumps((action, meta), sort_keys=True, - separators=(', ', ': '))) + states.append(templatefilters.json((action, meta))) action, meta = reactor.oninputeof() meta['action'] = action - states.append(json.dumps(meta, sort_keys=True, separators=(', ',': '))) + states.append(templatefilters.json(meta)) res.status = b'200 OK' res.headers[b'Content-Type'] = b'text/plain' @@ -344,7 +342,8 @@ action) def getdispatchrepo(repo, proto, command): - return repo.filtered('served') + viewconfig = repo.ui.config('server', 'view') + return repo.filtered(viewconfig) def dispatch(repo, proto, command, redirect): """Run a wire protocol command. @@ -390,7 +389,8 @@ return with cacher: - cachekey = entry.cachekeyfn(repo, proto, cacher, **args) + cachekey = entry.cachekeyfn(repo, proto, cacher, + **pycompat.strkwargs(args)) # No cache key or the cacher doesn't like it. Do default handling. if cachekey is None or not cacher.setcachekey(cachekey): @@ -744,7 +744,7 @@ # More granular cache key invalidation. b'localversion': localversion, # Cache keys are segmented by command. - b'command': pycompat.sysbytes(command), + b'command': command, # Throw in the media type and API version strings so changes # to exchange semantics invalid cache. b'mediatype': FRAMINGTYPE,
--- a/rust/Cargo.lock Tue Mar 19 09:23:35 2019 -0400 +++ b/rust/Cargo.lock Tue Mar 19 16:36:59 2019 +0300 @@ -7,11 +7,29 @@ ] [[package]] +name = "autocfg" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "bitflags" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] name = "cfg-if" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] +name = "cloudabi" +version = "0.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] name = "cpython" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -23,8 +41,17 @@ ] [[package]] +name = "fuchsia-cprng" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] name = "hg-core" version = "0.1.0" +dependencies = [ + "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_pcg 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] [[package]] name = "hg-cpython" @@ -89,6 +116,110 @@ ] [[package]] +name = "rand" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "autocfg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.45 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_jitter 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_os 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_pcg 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_chacha" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "autocfg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_core" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rand_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_core" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "rand_hc" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_isaac" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_jitter" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.45 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_os" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", + "fuchsia-cprng 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.45 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_pcg" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_xorshift" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rdrand" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] name = "regex" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -109,6 +240,27 @@ ] [[package]] +name = "rustc_version" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "semver" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "semver-parser" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] name = "thread_local" version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -131,19 +283,59 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "winapi" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [metadata] "checksum aho-corasick 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)" = "1e9a933f4e58658d7b12defcf96dc5c720f20832deebe3e0a19efd3b6aaeeb9e" +"checksum autocfg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a6d640bee2da49f60a4068a7fae53acde8982514ab7bae8b8cea9e88cbcfd799" +"checksum bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12" "checksum cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "082bb9b28e00d3c9d39cc03e64ce4cea0f1bb9b3fde493f0cbc008472d22bdf4" +"checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" "checksum cpython 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b489034e723e7f5109fecd19b719e664f89ef925be785885252469e9822fa940" +"checksum fuchsia-cprng 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "81f7f8eb465745ea9b02e2704612a9946a59fa40572086c6fd49d6ddcf30bf31" "checksum lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a374c89b9db55895453a74c1e38861d9deec0b01b405a82516e9d5de4820dea1" "checksum libc 0.2.45 (registry+https://github.com/rust-lang/crates.io-index)" = "2d2857ec59fadc0773853c664d2d18e7198e83883e7060b63c924cb077bd5c74" "checksum memchr 2.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "db4c41318937f6e76648f42826b1d9ade5c09cafb5aef7e351240a70f39206e9" "checksum num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0b3a5d7cc97d6d30d8b9bc8fa19bf45349ffe46241e8816f50f62f6d6aaabee1" "checksum python27-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "56114c37d4dca82526d74009df7782a28c871ac9d36b19d4cb9e67672258527e" "checksum python3-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "61e4aac43f833fd637e429506cb2ac9d7df672c4b68f2eaaa163649b7fdc0444" +"checksum rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca" +"checksum rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef" +"checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" +"checksum rand_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d0e7a549d590831370895ab7ba4ea0c1b6b011d106b5ff2da6eee112615e6dc0" +"checksum rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4" +"checksum rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08" +"checksum rand_jitter 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "080723c6145e37503a2224f801f252e14ac5531cb450f4502698542d188cb3c0" +"checksum rand_os 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "b7c690732391ae0abafced5015ffb53656abfaec61b342290e5eb56b286a679d" +"checksum rand_pcg 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "086bd09a33c7044e56bb44d5bdde5a60e7f119a9e95b0775f545de759a32fe05" +"checksum rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c" +"checksum rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" "checksum regex 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "37e7cbbd370869ce2e8dff25c7018702d10b21a20ef7135316f8daecd6c25b7f" "checksum regex-syntax 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)" = "4e47a2ed29da7a9e1960e1639e7a982e6edc6d49be308a3b02daf511504a16d1" +"checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" +"checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" +"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" "checksum thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b" "checksum ucd-util 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "535c204ee4d8434478593480b8f86ab45ec9aae0e83c568ca81abf0fd0e88f86" "checksum utf8-ranges 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "796f7e48bef87609f7ade7e06495a87d5cd06c7866e6a5cbfceffc558a243737" "checksum version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd" +"checksum winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "92c1eb33641e276cfa214a0522acad57be5c56b10cb348b3c5117db75f3ac4b0" +"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" +"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
--- a/rust/chg/src/sighandlers.c Tue Mar 19 09:23:35 2019 -0400 +++ b/rust/chg/src/sighandlers.c Tue Mar 19 16:36:59 2019 +0300 @@ -33,28 +33,36 @@ { sigset_t unblockset, oldset; struct sigaction sa, oldsa; - if (sigemptyset(&unblockset) < 0) + if (sigemptyset(&unblockset) < 0) { return; - if (sigaddset(&unblockset, sig) < 0) + } + if (sigaddset(&unblockset, sig) < 0) { return; + } memset(&sa, 0, sizeof(sa)); sa.sa_handler = SIG_DFL; sa.sa_flags = SA_RESTART; - if (sigemptyset(&sa.sa_mask) < 0) + if (sigemptyset(&sa.sa_mask) < 0) { return; + } forwardsignal(sig); - if (raise(sig) < 0) /* resend to self */ + if (raise(sig) < 0) { /* resend to self */ return; - if (sigaction(sig, &sa, &oldsa) < 0) + } + if (sigaction(sig, &sa, &oldsa) < 0) { return; - if (sigprocmask(SIG_UNBLOCK, &unblockset, &oldset) < 0) + } + if (sigprocmask(SIG_UNBLOCK, &unblockset, &oldset) < 0) { return; + } /* resent signal will be handled before sigprocmask() returns */ - if (sigprocmask(SIG_SETMASK, &oldset, NULL) < 0) + if (sigprocmask(SIG_SETMASK, &oldset, NULL) < 0) { return; - if (sigaction(sig, &oldsa, NULL) < 0) + } + if (sigaction(sig, &oldsa, NULL) < 0) { return; + } } /* @@ -81,37 +89,46 @@ * - SIGINT: usually generated by the terminal */ sa.sa_handler = forwardsignaltogroup; sa.sa_flags = SA_RESTART; - if (sigemptyset(&sa.sa_mask) < 0) + if (sigemptyset(&sa.sa_mask) < 0) { + return -1; + } + if (sigaction(SIGHUP, &sa, NULL) < 0) { return -1; - if (sigaction(SIGHUP, &sa, NULL) < 0) + } + if (sigaction(SIGINT, &sa, NULL) < 0) { return -1; - if (sigaction(SIGINT, &sa, NULL) < 0) - return -1; + } /* terminate frontend by double SIGTERM in case of server freeze */ sa.sa_handler = forwardsignal; sa.sa_flags |= SA_RESETHAND; - if (sigaction(SIGTERM, &sa, NULL) < 0) + if (sigaction(SIGTERM, &sa, NULL) < 0) { return -1; + } /* notify the worker about window resize events */ sa.sa_flags = SA_RESTART; - if (sigaction(SIGWINCH, &sa, NULL) < 0) + if (sigaction(SIGWINCH, &sa, NULL) < 0) { return -1; + } /* forward user-defined signals */ - if (sigaction(SIGUSR1, &sa, NULL) < 0) + if (sigaction(SIGUSR1, &sa, NULL) < 0) { return -1; - if (sigaction(SIGUSR2, &sa, NULL) < 0) + } + if (sigaction(SIGUSR2, &sa, NULL) < 0) { return -1; + } /* propagate job control requests to worker */ sa.sa_handler = forwardsignal; sa.sa_flags = SA_RESTART; - if (sigaction(SIGCONT, &sa, NULL) < 0) + if (sigaction(SIGCONT, &sa, NULL) < 0) { return -1; + } sa.sa_handler = handlestopsignal; sa.sa_flags = SA_RESTART; - if (sigaction(SIGTSTP, &sa, NULL) < 0) + if (sigaction(SIGTSTP, &sa, NULL) < 0) { return -1; + } return 0; } @@ -127,24 +144,31 @@ memset(&sa, 0, sizeof(sa)); sa.sa_handler = SIG_DFL; sa.sa_flags = SA_RESTART; - if (sigemptyset(&sa.sa_mask) < 0) + if (sigemptyset(&sa.sa_mask) < 0) { return -1; + } - if (sigaction(SIGHUP, &sa, NULL) < 0) + if (sigaction(SIGHUP, &sa, NULL) < 0) { return -1; - if (sigaction(SIGTERM, &sa, NULL) < 0) + } + if (sigaction(SIGTERM, &sa, NULL) < 0) { return -1; - if (sigaction(SIGWINCH, &sa, NULL) < 0) + } + if (sigaction(SIGWINCH, &sa, NULL) < 0) { return -1; - if (sigaction(SIGCONT, &sa, NULL) < 0) + } + if (sigaction(SIGCONT, &sa, NULL) < 0) { return -1; - if (sigaction(SIGTSTP, &sa, NULL) < 0) + } + if (sigaction(SIGTSTP, &sa, NULL) < 0) { return -1; + } /* ignore Ctrl+C while shutting down to make pager exits cleanly */ sa.sa_handler = SIG_IGN; - if (sigaction(SIGINT, &sa, NULL) < 0) + if (sigaction(SIGINT, &sa, NULL) < 0) { return -1; + } peerpid = 0; return 0;
--- a/rust/hg-core/Cargo.toml Tue Mar 19 09:23:35 2019 -0400 +++ b/rust/hg-core/Cargo.toml Tue Mar 19 16:36:59 2019 +0300 @@ -6,3 +6,7 @@ [lib] name = "hg" + +[dev-dependencies] +rand = "*" +rand_pcg = "*"
--- a/rust/hg-core/src/ancestors.rs Tue Mar 19 09:23:35 2019 -0400 +++ b/rust/hg-core/src/ancestors.rs Tue Mar 19 16:36:59 2019 +0300 @@ -38,6 +38,7 @@ pub struct MissingAncestors<G: Graph> { graph: G, bases: HashSet<Revision>, + max_base: Revision, } impl<G: Graph> AncestorsIterator<G> { @@ -79,8 +80,7 @@ #[inline] fn conditionally_push_rev(&mut self, rev: Revision) { - if self.stoprev <= rev && !self.seen.contains(&rev) { - self.seen.insert(rev); + if self.stoprev <= rev && self.seen.insert(rev) { self.visit.push(rev); } } @@ -154,11 +154,10 @@ Ok(ps) => ps, Err(e) => return Some(Err(e)), }; - if p1 < self.stoprev || self.seen.contains(&p1) { + if p1 < self.stoprev || !self.seen.insert(p1) { self.visit.pop(); } else { *(self.visit.peek_mut().unwrap()) = p1; - self.seen.insert(p1); }; self.conditionally_push_rev(p2); @@ -211,15 +210,17 @@ impl<G: Graph> MissingAncestors<G> { pub fn new(graph: G, bases: impl IntoIterator<Item = Revision>) -> Self { - let mut bases: HashSet<Revision> = bases.into_iter().collect(); - if bases.is_empty() { - bases.insert(NULL_REVISION); - } - MissingAncestors { graph, bases } + let mut created = MissingAncestors { + graph: graph, + bases: HashSet::new(), + max_base: NULL_REVISION, + }; + created.add_bases(bases); + created } pub fn has_bases(&self) -> bool { - self.bases.iter().any(|&b| b != NULL_REVISION) + !self.bases.is_empty() } /// Return a reference to current bases. @@ -238,16 +239,33 @@ } /// Consumes the object and returns the relative heads of its bases. - pub fn into_bases_heads(mut self) -> Result<HashSet<Revision>, GraphError> { + pub fn into_bases_heads( + mut self, + ) -> Result<HashSet<Revision>, GraphError> { dagops::retain_heads(&self.graph, &mut self.bases)?; Ok(self.bases) } + /// Add some revisions to `self.bases` + /// + /// Takes care of keeping `self.max_base` up to date. pub fn add_bases( &mut self, new_bases: impl IntoIterator<Item = Revision>, ) { - self.bases.extend(new_bases); + let mut max_base = self.max_base; + self.bases.extend( + new_bases + .into_iter() + .filter(|&rev| rev != NULL_REVISION) + .map(|r| { + if r > max_base { + max_base = r; + } + r + }), + ); + self.max_base = max_base; } /// Remove all ancestors of self.bases from the revs set (in place) @@ -256,28 +274,26 @@ revs: &mut HashSet<Revision>, ) -> Result<(), GraphError> { revs.retain(|r| !self.bases.contains(r)); - // the null revision is always an ancestor + // the null revision is always an ancestor. Logically speaking + // it's debatable in case bases is empty, but the Python + // implementation always adds NULL_REVISION to bases, making it + // unconditionnally true. revs.remove(&NULL_REVISION); if revs.is_empty() { return Ok(()); } // anything in revs > start is definitely not an ancestor of bases // revs <= start need to be investigated - // TODO optim: if a missingancestors is to be used several times, - // we shouldn't need to iterate each time on bases - let start = match self.bases.iter().cloned().max() { - Some(m) => m, - None => { - // bases is empty (shouldn't happen, but let's be safe) - return Ok(()); - } - }; + if self.max_base == NULL_REVISION { + return Ok(()); + } + // whatever happens, we'll keep at least keepcount of them // knowing this gives us a earlier stop condition than // going all the way to the root - let keepcount = revs.iter().filter(|r| **r > start).count(); + let keepcount = revs.iter().filter(|r| **r > self.max_base).count(); - let mut curr = start; + let mut curr = self.max_base; while curr != NULL_REVISION && revs.len() > keepcount { if self.bases.contains(&curr) { revs.remove(&curr); @@ -288,12 +304,17 @@ Ok(()) } - /// Add rev's parents to self.bases + /// Add the parents of `rev` to `self.bases` + /// + /// This has no effect on `self.max_base` #[inline] fn add_parents(&mut self, rev: Revision) -> Result<(), GraphError> { - // No need to bother the set with inserting NULL_REVISION over and - // over + if rev == NULL_REVISION { + return Ok(()); + } for p in self.graph.parents(rev)?.iter().cloned() { + // No need to bother the set with inserting NULL_REVISION over and + // over if p != NULL_REVISION { self.bases.insert(p); } @@ -323,12 +344,8 @@ if revs_visit.is_empty() { return Ok(Vec::new()); } - - let max_bases = - bases_visit.iter().cloned().max().unwrap_or(NULL_REVISION); - let max_revs = - revs_visit.iter().cloned().max().unwrap_or(NULL_REVISION); - let start = max(max_bases, max_revs); + let max_revs = revs_visit.iter().cloned().max().unwrap(); + let start = max(self.max_base, max_revs); // TODO heuristics for with_capacity()? let mut missing: Vec<Revision> = Vec::new(); @@ -336,12 +353,9 @@ if revs_visit.is_empty() { break; } - if both_visit.contains(&curr) { + if both_visit.remove(&curr) { // curr's parents might have made it into revs_visit through // another path - // TODO optim: Rust's HashSet.remove returns a boolean telling - // if it happened. This will spare us one set lookup - both_visit.remove(&curr); for p in self.graph.parents(curr)?.iter().cloned() { if p == NULL_REVISION { continue; @@ -356,13 +370,14 @@ if p == NULL_REVISION { continue; } - if bases_visit.contains(&p) || both_visit.contains(&p) { - // p is an ancestor of revs_visit, and is implicitly - // in bases_visit, which means p is ::revs & ::bases. - // TODO optim: hence if bothvisit, we look up twice + if bases_visit.contains(&p) { + // p is already known to be an ancestor of revs_visit + revs_visit.remove(&p); + both_visit.insert(p); + } else if both_visit.contains(&p) { + // p should have been in bases_visit revs_visit.remove(&p); bases_visit.insert(p); - both_visit.insert(p); } else { // visit later revs_visit.insert(p); @@ -373,11 +388,9 @@ if p == NULL_REVISION { continue; } - if revs_visit.contains(&p) || both_visit.contains(&p) { + if revs_visit.remove(&p) || both_visit.contains(&p) { // p is an ancestor of bases_visit, and is implicitly // in revs_visit, which means p is ::revs & ::bases. - // TODO optim: hence if bothvisit, we look up twice - revs_visit.remove(&p); bases_visit.insert(p); both_visit.insert(p); } else { @@ -578,11 +591,13 @@ missing_ancestors.get_bases().iter().cloned().collect(); as_vec.sort(); assert_eq!(as_vec, [1, 3, 5]); + assert_eq!(missing_ancestors.max_base, 5); missing_ancestors.add_bases([3, 7, 8].iter().cloned()); as_vec = missing_ancestors.get_bases().iter().cloned().collect(); as_vec.sort(); assert_eq!(as_vec, [1, 3, 5, 7, 8]); + assert_eq!(missing_ancestors.max_base, 8); as_vec = missing_ancestors.bases_heads()?.iter().cloned().collect(); as_vec.sort();
--- a/rust/hg-core/src/dagops.rs Tue Mar 19 09:23:35 2019 -0400 +++ b/rust/hg-core/src/dagops.rs Tue Mar 19 16:36:59 2019 +0300 @@ -46,7 +46,9 @@ let mut heads: HashSet<Revision> = iter_revs.clone().cloned().collect(); heads.remove(&NULL_REVISION); for rev in iter_revs { - remove_parents(graph, *rev, &mut heads)?; + if *rev != NULL_REVISION { + remove_parents(graph, *rev, &mut heads)?; + } } Ok(heads) } @@ -71,7 +73,9 @@ // mutating let as_vec: Vec<Revision> = revs.iter().cloned().collect(); for rev in as_vec { - remove_parents(graph, rev, revs)?; + if rev != NULL_REVISION { + remove_parents(graph, rev, revs)?; + } } Ok(()) }
--- a/rust/hg-core/src/lib.rs Tue Mar 19 09:23:35 2019 -0400 +++ b/rust/hg-core/src/lib.rs Tue Mar 19 16:36:59 2019 +0300 @@ -5,8 +5,7 @@ mod ancestors; pub mod dagops; pub use ancestors::{AncestorsIterator, LazyAncestors, MissingAncestors}; -#[cfg(test)] -pub mod testing; +pub mod testing; // unconditionally built, for use from integration tests /// Mercurial revision numbers /// @@ -14,6 +13,11 @@ /// 4 bytes, and are liberally converted to ints, whence the i32 pub type Revision = i32; + +/// Marker expressing the absence of a parent +/// +/// Independently of the actual representation, `NULL_REVISION` is guaranteed +/// to be smaller that all existing revisions. pub const NULL_REVISION: Revision = -1; /// Same as `mercurial.node.wdirrev`
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/rust/hg-core/tests/test_missing_ancestors.rs Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,340 @@ +extern crate hg; +extern crate rand; +extern crate rand_pcg; + +use hg::testing::VecGraph; +use hg::Revision; +use hg::*; +use rand::distributions::{Distribution, LogNormal, Uniform}; +use rand::{thread_rng, Rng, RngCore, SeedableRng}; +use std::cmp::min; +use std::collections::HashSet; +use std::env; +use std::fmt::Debug; + +fn build_random_graph( + nodes_opt: Option<usize>, + rootprob_opt: Option<f64>, + mergeprob_opt: Option<f64>, + prevprob_opt: Option<f64>, +) -> VecGraph { + let nodes = nodes_opt.unwrap_or(100); + let rootprob = rootprob_opt.unwrap_or(0.05); + let mergeprob = mergeprob_opt.unwrap_or(0.2); + let prevprob = prevprob_opt.unwrap_or(0.7); + + let mut rng = thread_rng(); + let mut vg: VecGraph = Vec::with_capacity(nodes); + for i in 0..nodes { + if i == 0 || rng.gen_bool(rootprob) { + vg.push([NULL_REVISION, NULL_REVISION]) + } else if i == 1 { + vg.push([0, NULL_REVISION]) + } else if rng.gen_bool(mergeprob) { + let p1 = { + if i == 2 || rng.gen_bool(prevprob) { + (i - 1) as Revision + } else { + rng.gen_range(0, i - 1) as Revision + } + }; + // p2 is a random revision lower than i and different from p1 + let mut p2 = rng.gen_range(0, i - 1) as Revision; + if p2 >= p1 { + p2 = p2 + 1; + } + vg.push([p1, p2]); + } else if rng.gen_bool(prevprob) { + vg.push([(i - 1) as Revision, NULL_REVISION]) + } else { + vg.push([rng.gen_range(0, i - 1) as Revision, NULL_REVISION]) + } + } + vg +} + +/// Compute the ancestors set of all revisions of a VecGraph +fn ancestors_sets(vg: &VecGraph) -> Vec<HashSet<Revision>> { + let mut ancs: Vec<HashSet<Revision>> = Vec::new(); + for i in 0..vg.len() { + let mut ancs_i = HashSet::new(); + ancs_i.insert(i as Revision); + for p in vg[i].iter().cloned() { + if p != NULL_REVISION { + ancs_i.extend(&ancs[p as usize]); + } + } + ancs.push(ancs_i); + } + ancs +} + +#[derive(Clone, Debug)] +enum MissingAncestorsAction { + InitialBases(HashSet<Revision>), + AddBases(HashSet<Revision>), + RemoveAncestorsFrom(HashSet<Revision>), + MissingAncestors(HashSet<Revision>), +} + +/// An instrumented naive yet obviously correct implementation +/// +/// It also records all its actions for easy reproduction for replay +/// of problematic cases +struct NaiveMissingAncestors<'a> { + ancestors_sets: &'a Vec<HashSet<Revision>>, + graph: &'a VecGraph, // used for error reporting only + bases: HashSet<Revision>, + history: Vec<MissingAncestorsAction>, + // for error reporting, assuming we are in a random test + random_seed: String, +} + +impl<'a> NaiveMissingAncestors<'a> { + fn new( + graph: &'a VecGraph, + ancestors_sets: &'a Vec<HashSet<Revision>>, + bases: &HashSet<Revision>, + random_seed: &str, + ) -> Self { + Self { + ancestors_sets: ancestors_sets, + bases: bases.clone(), + graph: graph, + history: vec![MissingAncestorsAction::InitialBases(bases.clone())], + random_seed: random_seed.into(), + } + } + + fn add_bases(&mut self, new_bases: HashSet<Revision>) { + self.bases.extend(&new_bases); + self.history + .push(MissingAncestorsAction::AddBases(new_bases)) + } + + fn remove_ancestors_from(&mut self, revs: &mut HashSet<Revision>) { + revs.remove(&NULL_REVISION); + self.history + .push(MissingAncestorsAction::RemoveAncestorsFrom(revs.clone())); + for base in self.bases.iter().cloned() { + if base != NULL_REVISION { + for rev in &self.ancestors_sets[base as usize] { + revs.remove(&rev); + } + } + } + } + + fn missing_ancestors( + &mut self, + revs: impl IntoIterator<Item = Revision>, + ) -> Vec<Revision> { + let revs_as_set: HashSet<Revision> = revs.into_iter().collect(); + + let mut missing: HashSet<Revision> = HashSet::new(); + for rev in revs_as_set.iter().cloned() { + if rev != NULL_REVISION { + missing.extend(&self.ancestors_sets[rev as usize]) + } + } + self.history + .push(MissingAncestorsAction::MissingAncestors(revs_as_set)); + + for base in self.bases.iter().cloned() { + if base != NULL_REVISION { + for rev in &self.ancestors_sets[base as usize] { + missing.remove(&rev); + } + } + } + let mut res: Vec<Revision> = missing.iter().cloned().collect(); + res.sort(); + res + } + + fn assert_eq<T>(&self, left: T, right: T) + where + T: PartialEq + Debug, + { + if left == right { + return; + } + panic!(format!( + "Equality assertion failed (left != right) + left={:?} + right={:?} + graph={:?} + current bases={:?} + history={:?} + random seed={} + ", + left, + right, + self.graph, + self.bases, + self.history, + self.random_seed, + )); + } +} + +/// Choose a set of random revisions +/// +/// The size of the set is taken from a LogNormal distribution +/// with default mu=1.1 and default sigma=0.8. Quoting the Python +/// test this is taken from: +/// the default mu and sigma give us a nice distribution of mostly +/// single-digit counts (including 0) with some higher ones +/// The sample may include NULL_REVISION +fn sample_revs<R: RngCore>( + rng: &mut R, + maxrev: Revision, + mu_opt: Option<f64>, + sigma_opt: Option<f64>, +) -> HashSet<Revision> { + let mu = mu_opt.unwrap_or(1.1); + let sigma = sigma_opt.unwrap_or(0.8); + + let log_normal = LogNormal::new(mu, sigma); + let nb = min(maxrev as usize, log_normal.sample(rng).floor() as usize); + + let dist = Uniform::from(NULL_REVISION..maxrev); + return rng.sample_iter(&dist).take(nb).collect(); +} + +/// Produces the hexadecimal representation of a slice of bytes +fn hex_bytes(bytes: &[u8]) -> String { + let mut s = String::with_capacity(bytes.len() * 2); + for b in bytes { + s.push_str(&format!("{:x}", b)); + } + s +} + +/// Fill a random seed from its hexadecimal representation. +/// +/// This signature is meant to be consistent with `RngCore::fill_bytes` +fn seed_parse_in(hex: &str, seed: &mut [u8]) { + if hex.len() != 32 { + panic!("Seed {} is too short for 128 bits hex", hex); + } + for i in 0..8 { + seed[i] = u8::from_str_radix(&hex[2 * i..2 * (i + 1)], 16) + .unwrap_or_else(|_e| panic!("Seed {} is not 128 bits hex", hex)); + } +} + +/// Parse the parameters for `test_missing_ancestors()` +/// +/// Returns (graphs, instances, calls per instance) +fn parse_test_missing_ancestors_params(var: &str) -> (usize, usize, usize) { + let err_msg = "TEST_MISSING_ANCESTORS format: GRAPHS,INSTANCES,CALLS"; + let params: Vec<usize> = var + .split(',') + .map(|n| n.trim().parse().expect(err_msg)) + .collect(); + if params.len() != 3 { + panic!(err_msg); + } + (params[0], params[1], params[2]) +} + +#[test] +/// This test creates lots of random VecGraphs, +/// and compare a bunch of MissingAncestors for them with +/// NaiveMissingAncestors that rely on precomputed transitive closures of +/// these VecGraphs (ancestors_sets). +/// +/// For each generater graph, several instances of `MissingAncestors` are +/// created, whose methods are called and checked a given number of times. +/// +/// This test can be parametrized by two environment variables: +/// +/// - TEST_RANDOM_SEED: must be 128 bits in hexadecimal +/// - TEST_MISSING_ANCESTORS: "GRAPHS,INSTANCES,CALLS". The default is +/// "100,10,10" +/// +/// This is slow: it runs on my workstation in about 5 seconds with the +/// default parameters with a plain `cargo --test`. +/// +/// If you want to run it faster, especially if you're changing the +/// parameters, use `cargo test --release`. +/// For me, that gets it down to 0.15 seconds with the default parameters +fn test_missing_ancestors_compare_naive() { + let (graphcount, testcount, inccount) = + match env::var("TEST_MISSING_ANCESTORS") { + Err(env::VarError::NotPresent) => (100, 10, 10), + Ok(val) => parse_test_missing_ancestors_params(&val), + Err(env::VarError::NotUnicode(_)) => { + panic!("TEST_MISSING_ANCESTORS is invalid"); + } + }; + let mut seed: [u8; 16] = [0; 16]; + match env::var("TEST_RANDOM_SEED") { + Ok(val) => { + seed_parse_in(&val, &mut seed); + } + Err(env::VarError::NotPresent) => { + thread_rng().fill_bytes(&mut seed); + } + Err(env::VarError::NotUnicode(_)) => { + panic!("TEST_RANDOM_SEED must be 128 bits in hex"); + } + } + let hex_seed = hex_bytes(&seed); + eprintln!("Random seed: {}", hex_seed); + + let mut rng = rand_pcg::Pcg32::from_seed(seed); + + eprint!("Checking MissingAncestors against brute force implementation "); + eprint!("for {} random graphs, ", graphcount); + eprintln!( + "with {} instances for each and {} calls per instance", + testcount, inccount, + ); + for g in 0..graphcount { + if g != 0 && g % 100 == 0 { + eprintln!("Tested with {} graphs", g); + } + let graph = build_random_graph(None, None, None, None); + let graph_len = graph.len() as Revision; + let ancestors_sets = ancestors_sets(&graph); + for _testno in 0..testcount { + let bases: HashSet<Revision> = + sample_revs(&mut rng, graph_len, None, None); + let mut inc = MissingAncestors::<VecGraph>::new( + graph.clone(), + bases.clone(), + ); + let mut naive = NaiveMissingAncestors::new( + &graph, + &ancestors_sets, + &bases, + &hex_seed, + ); + for _m in 0..inccount { + if rng.gen_bool(0.2) { + let new_bases = + sample_revs(&mut rng, graph_len, None, None); + inc.add_bases(new_bases.iter().cloned()); + naive.add_bases(new_bases); + } + if rng.gen_bool(0.4) { + // larger set so that there are more revs to remove from + let mut hrevs = + sample_revs(&mut rng, graph_len, Some(1.5), None); + let mut rrevs = hrevs.clone(); + inc.remove_ancestors_from(&mut hrevs).unwrap(); + naive.remove_ancestors_from(&mut rrevs); + naive.assert_eq(hrevs, rrevs); + } else { + let revs = sample_revs(&mut rng, graph_len, None, None); + let hm = + inc.missing_ancestors(revs.iter().cloned()).unwrap(); + let rm = naive.missing_ancestors(revs.iter().cloned()); + naive.assert_eq(hm, rm); + } + } + } + } +}
--- a/rust/hg-cpython/src/ancestors.rs Tue Mar 19 09:23:35 2019 -0400 +++ b/rust/hg-cpython/src/ancestors.rs Tue Mar 19 16:36:59 2019 +0300 @@ -34,11 +34,11 @@ //! [`LazyAncestors`]: struct.LazyAncestors.html //! [`MissingAncestors`]: struct.MissingAncestors.html //! [`AncestorsIterator`]: struct.AncestorsIterator.html -use crate::conversion::rev_pyiter_collect; +use crate::conversion::{py_set, rev_pyiter_collect}; use cindex::Index; use cpython::{ ObjectProtocol, PyClone, PyDict, PyList, PyModule, PyObject, PyResult, - PyTuple, Python, PythonObject, ToPyObject, + Python, PythonObject, ToPyObject, }; use exceptions::GraphError; use hg::Revision; @@ -90,24 +90,6 @@ } } -/// Copy and convert an `HashSet<Revision>` in a Python set -/// -/// This will probably turn useless once `PySet` support lands in -/// `rust-cpython`. -/// -/// This builds a Python tuple, then calls Python's "set()" on it -fn py_set(py: Python, set: &HashSet<Revision>) -> PyResult<PyObject> { - let as_vec: Vec<PyObject> = set - .iter() - .map(|rev| rev.to_py_object(py).into_object()) - .collect(); - let as_pytuple = PyTuple::new(py, as_vec.as_slice()); - - let locals = PyDict::new(py); - locals.set_item(py, "obj", as_pytuple.to_py_object(py))?; - py.eval("set(obj)", None, Some(&locals)) -} - py_class!(pub class LazyAncestors |py| { data inner: RefCell<Box<CoreLazy<Index>>>;
--- a/rust/hg-cpython/src/conversion.rs Tue Mar 19 09:23:35 2019 -0400 +++ b/rust/hg-cpython/src/conversion.rs Tue Mar 19 16:36:59 2019 +0300 @@ -8,8 +8,12 @@ //! Bindings for the hg::ancestors module provided by the //! `hg-core` crate. From Python, this will be seen as `rustext.ancestor` -use cpython::{ObjectProtocol, PyObject, PyResult, Python}; +use cpython::{ + ObjectProtocol, PyDict, PyObject, PyResult, PyTuple, Python, PythonObject, + ToPyObject, +}; use hg::Revision; +use std::collections::HashSet; use std::iter::FromIterator; /// Utility function to convert a Python iterable into various collections @@ -26,3 +30,21 @@ .map(|r| r.and_then(|o| o.extract::<Revision>(py))) .collect() } + +/// Copy and convert an `HashSet<Revision>` in a Python set +/// +/// This will probably turn useless once `PySet` support lands in +/// `rust-cpython`. +/// +/// This builds a Python tuple, then calls Python's "set()" on it +pub fn py_set(py: Python, set: &HashSet<Revision>) -> PyResult<PyObject> { + let as_vec: Vec<PyObject> = set + .iter() + .map(|rev| rev.to_py_object(py).into_object()) + .collect(); + let as_pytuple = PyTuple::new(py, as_vec.as_slice()); + + let locals = PyDict::new(py); + locals.set_item(py, "obj", as_pytuple.to_py_object(py))?; + py.eval("set(obj)", None, Some(&locals)) +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/rust/hg-cpython/src/dagops.rs Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,53 @@ +// dagops.rs +// +// Copyright 2019 Georges Racinet <georges.racinet@octobus.net> +// +// This software may be used and distributed according to the terms of the +// GNU General Public License version 2 or any later version. + +//! Bindings for the `hg::dagops` module provided by the +//! `hg-core` package. +//! +//! From Python, this will be seen as `mercurial.rustext.dagop` +use cindex::Index; +use cpython::{PyDict, PyModule, PyObject, PyResult, Python}; +use crate::conversion::{py_set, rev_pyiter_collect}; +use exceptions::GraphError; +use hg::dagops; +use hg::Revision; +use std::collections::HashSet; + +/// Using the the `index`, return heads out of any Python iterable of Revisions +/// +/// This is the Rust counterpart for `mercurial.dagop.headrevs` +pub fn headrevs( + py: Python, + index: PyObject, + revs: PyObject, +) -> PyResult<PyObject> { + let mut as_set: HashSet<Revision> = rev_pyiter_collect(py, &revs)?; + dagops::retain_heads(&Index::new(py, index)?, &mut as_set) + .map_err(|e| GraphError::pynew(py, e))?; + py_set(py, &as_set) +} + +/// Create the module, with `__package__` given from parent +pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> { + let dotted_name = &format!("{}.dagop", package); + let m = PyModule::new(py, dotted_name)?; + m.add(py, "__package__", package)?; + m.add(py, "__doc__", "DAG operations - Rust implementation")?; + m.add( + py, + "headrevs", + py_fn!(py, headrevs(index: PyObject, revs: PyObject)), + )?; + + let sys = PyModule::import(py, "sys")?; + let sys_modules: PyDict = sys.get(py, "modules")?.extract(py)?; + sys_modules.set_item(py, dotted_name, &m)?; + // Example C code (see pyexpat.c and import.c) will "give away the + // reference", but we won't because it will be consumed once the + // Rust PyObject is dropped. + Ok(m) +}
--- a/rust/hg-cpython/src/lib.rs Tue Mar 19 09:23:35 2019 -0400 +++ b/rust/hg-cpython/src/lib.rs Tue Mar 19 16:36:59 2019 +0300 @@ -27,6 +27,7 @@ pub mod ancestors; mod cindex; mod conversion; +pub mod dagops; pub mod exceptions; py_module_initializer!(rustext, initrustext, PyInit_rustext, |py, m| { @@ -38,6 +39,7 @@ let dotted_name: String = m.get(py, "__name__")?.extract(py)?; m.add(py, "ancestor", ancestors::init_module(py, &dotted_name)?)?; + m.add(py, "dagop", dagops::init_module(py, &dotted_name)?)?; m.add(py, "GraphError", py.get_type::<exceptions::GraphError>())?; Ok(()) });
--- a/setup.py Tue Mar 19 09:23:35 2019 -0400 +++ b/setup.py Tue Mar 19 16:36:59 2019 +0300 @@ -240,9 +240,9 @@ except ImportError: py2exeloaded = False -def runcmd(cmd, env): +def runcmd(cmd, env, cwd=None): p = subprocess.Popen(cmd, stdout=subprocess.PIPE, - stderr=subprocess.PIPE, env=env) + stderr=subprocess.PIPE, env=env, cwd=cwd) out, err = p.communicate() return p.returncode, out, err @@ -437,10 +437,9 @@ pure = False cffi = ispypy - global_options = Distribution.global_options + \ - [('pure', None, "use pure (slow) Python " - "code instead of C extensions"), - ] + global_options = Distribution.global_options + [ + ('pure', None, "use pure (slow) Python code instead of C extensions"), + ] def has_ext_modules(self): # self.ext_modules is emptied in hgbuildpy.finalize_options which is @@ -666,7 +665,7 @@ self.addlongpathsmanifest() def addlongpathsmanifest(self): - """Add manifest pieces so that hg.exe understands long paths + r"""Add manifest pieces so that hg.exe understands long paths This is an EXPERIMENTAL feature, use with care. To enable long paths support, one needs to do two things: @@ -703,6 +702,117 @@ dir = os.path.dirname(self.get_ext_fullpath('dummy')) return os.path.join(self.build_temp, dir, 'hg.exe') +class hgbuilddoc(Command): + description = 'build documentation' + user_options = [ + ('man', None, 'generate man pages'), + ('html', None, 'generate html pages'), + ] + + def initialize_options(self): + self.man = None + self.html = None + + def finalize_options(self): + # If --man or --html are set, only generate what we're told to. + # Otherwise generate everything. + have_subset = self.man is not None or self.html is not None + + if have_subset: + self.man = True if self.man else False + self.html = True if self.html else False + else: + self.man = True + self.html = True + + def run(self): + def normalizecrlf(p): + with open(p, 'rb') as fh: + orig = fh.read() + + if b'\r\n' not in orig: + return + + log.info('normalizing %s to LF line endings' % p) + with open(p, 'wb') as fh: + fh.write(orig.replace(b'\r\n', b'\n')) + + def gentxt(root): + txt = 'doc/%s.txt' % root + log.info('generating %s' % txt) + res, out, err = runcmd( + [sys.executable, 'gendoc.py', root], + os.environ, + cwd='doc') + if res: + raise SystemExit('error running gendoc.py: %s' % + '\n'.join([out, err])) + + with open(txt, 'wb') as fh: + fh.write(out) + + def gengendoc(root): + gendoc = 'doc/%s.gendoc.txt' % root + + log.info('generating %s' % gendoc) + res, out, err = runcmd( + [sys.executable, 'gendoc.py', '%s.gendoc' % root], + os.environ, + cwd='doc') + if res: + raise SystemExit('error running gendoc: %s' % + '\n'.join([out, err])) + + with open(gendoc, 'wb') as fh: + fh.write(out) + + def genman(root): + log.info('generating doc/%s' % root) + res, out, err = runcmd( + [sys.executable, 'runrst', 'hgmanpage', '--halt', 'warning', + '--strip-elements-with-class', 'htmlonly', + '%s.txt' % root, root], + os.environ, + cwd='doc') + if res: + raise SystemExit('error running runrst: %s' % + '\n'.join([out, err])) + + normalizecrlf('doc/%s' % root) + + def genhtml(root): + log.info('generating doc/%s.html' % root) + res, out, err = runcmd( + [sys.executable, 'runrst', 'html', '--halt', 'warning', + '--link-stylesheet', '--stylesheet-path', 'style.css', + '%s.txt' % root, '%s.html' % root], + os.environ, + cwd='doc') + if res: + raise SystemExit('error running runrst: %s' % + '\n'.join([out, err])) + + normalizecrlf('doc/%s.html' % root) + + # This logic is duplicated in doc/Makefile. + sources = {f for f in os.listdir('mercurial/help') + if re.search('[0-9]\.txt$', f)} + + # common.txt is a one-off. + gentxt('common') + + for source in sorted(sources): + assert source[-4:] == '.txt' + root = source[:-4] + + gentxt(root) + gengendoc(root) + + if self.man: + genman(root) + if self.html: + genhtml(root) + class hginstall(install): user_options = install.user_options + [ @@ -828,6 +938,7 @@ fp.write(data) cmdclass = {'build': hgbuild, + 'build_doc': hgbuilddoc, 'build_mo': hgbuildmo, 'build_ext': hgbuildext, 'build_py': hgbuildpy, @@ -1129,18 +1240,51 @@ extra = {} +py2exepackages = [ + 'hgdemandimport', + 'hgext3rd', + 'hgext', + 'email', + # implicitly imported per module policy + # (cffi wouldn't be used as a frozen exe) + 'mercurial.cext', + #'mercurial.cffi', + 'mercurial.pure', +] + +py2exeexcludes = [] +py2exedllexcludes = ['crypt32.dll'] + if issetuptools: extra['python_requires'] = supportedpy + if py2exeloaded: extra['console'] = [ {'script':'hg', 'copyright':'Copyright (C) 2005-2019 Matt Mackall and others', 'product_version':version}] - # sub command of 'build' because 'py2exe' does not handle sub_commands - build.sub_commands.insert(0, ('build_hgextindex', None)) + # Sub command of 'build' because 'py2exe' does not handle sub_commands. + # Need to override hgbuild because it has a private copy of + # build.sub_commands. + hgbuild.sub_commands.insert(0, ('build_hgextindex', None)) # put dlls in sub directory so that they won't pollute PATH extra['zipfile'] = 'lib/library.zip' + # We allow some configuration to be supplemented via environment + # variables. This is better than setup.cfg files because it allows + # supplementing configs instead of replacing them. + extrapackages = os.environ.get('HG_PY2EXE_EXTRA_PACKAGES') + if extrapackages: + py2exepackages.extend(extrapackages.split(' ')) + + excludes = os.environ.get('HG_PY2EXE_EXTRA_EXCLUDES') + if excludes: + py2exeexcludes.extend(excludes.split(' ')) + + dllexcludes = os.environ.get('HG_PY2EXE_EXTRA_DLL_EXCLUDES') + if dllexcludes: + py2exedllexcludes.extend(dllexcludes.split(' ')) + if os.name == 'nt': # Windows binary file versions for exe/dll files must have the # form W.X.Y.Z, where W,X,Y,Z are numbers in the range 0..65535 @@ -1220,16 +1364,10 @@ distclass=hgdist, options={ 'py2exe': { - 'packages': [ - 'hgdemandimport', - 'hgext', - 'email', - # implicitly imported per module policy - # (cffi wouldn't be used as a frozen exe) - 'mercurial.cext', - #'mercurial.cffi', - 'mercurial.pure', - ], + 'bundle_files': 3, + 'dll_excludes': py2exedllexcludes, + 'excludes': py2exeexcludes, + 'packages': py2exepackages, }, 'bdist_mpkg': { 'zipdist': False,
--- a/tests/artifacts/scripts/generate-churning-bundle.py Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/artifacts/scripts/generate-churning-bundle.py Tue Mar 19 16:36:59 2019 +0300 @@ -42,7 +42,6 @@ FILENAME='SPARSE-REVLOG-TEST-FILE' NB_LINES = 10500 ALWAYS_CHANGE_LINES = 500 -FILENAME = 'SPARSE-REVLOG-TEST-FILE' OTHER_CHANGES = 300 def nextcontent(previous_content):
--- a/tests/badserverext.py Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/badserverext.py Tue Mar 19 16:36:59 2019 +0300 @@ -34,6 +34,7 @@ import socket from mercurial import( + pycompat, registrar, ) @@ -48,10 +49,10 @@ default=False, ) configitem(b'badserver', b'closeafterrecvbytes', - default='0', + default=b'0', ) configitem(b'badserver', b'closeaftersendbytes', - default='0', + default=b'0', ) configitem(b'badserver', b'closebeforeaccept', default=False, @@ -74,7 +75,7 @@ object.__setattr__(self, '_closeaftersendbytes', closeaftersendbytes) def __getattribute__(self, name): - if name in ('makefile',): + if name in ('makefile', 'sendall', '_writelog'): return object.__getattribute__(self, name) return getattr(object.__getattribute__(self, '_orig'), name) @@ -85,6 +86,13 @@ def __setattr__(self, name, value): setattr(object.__getattribute__(self, '_orig'), name, value) + def _writelog(self, msg): + msg = msg.replace(b'\r', b'\\r').replace(b'\n', b'\\n') + + object.__getattribute__(self, '_logfp').write(msg) + object.__getattribute__(self, '_logfp').write(b'\n') + object.__getattribute__(self, '_logfp').flush() + def makefile(self, mode, bufsize): f = object.__getattribute__(self, '_orig').makefile(mode, bufsize) @@ -98,6 +106,38 @@ closeafterrecvbytes=closeafterrecvbytes, closeaftersendbytes=closeaftersendbytes) + def sendall(self, data, flags=0): + remaining = object.__getattribute__(self, '_closeaftersendbytes') + + # No read limit. Call original function. + if not remaining: + result = object.__getattribute__(self, '_orig').sendall(data, flags) + self._writelog(b'sendall(%d) -> %s' % (len(data), data)) + return result + + if len(data) > remaining: + newdata = data[0:remaining] + else: + newdata = data + + remaining -= len(newdata) + + result = object.__getattribute__(self, '_orig').sendall(newdata, flags) + + self._writelog(b'sendall(%d from %d) -> (%d) %s' % ( + len(newdata), len(data), remaining, newdata)) + + object.__setattr__(self, '_closeaftersendbytes', remaining) + + if remaining <= 0: + self._writelog(b'write limit reached; closing socket') + object.__getattribute__(self, '_orig').shutdown(socket.SHUT_RDWR) + + raise Exception('connection closed after sending N bytes') + + return result + + # We can't adjust __class__ on socket._fileobject, so define a proxy. class fileobjectproxy(object): __slots__ = ( @@ -115,7 +155,7 @@ object.__setattr__(self, '_closeaftersendbytes', closeaftersendbytes) def __getattribute__(self, name): - if name in ('read', 'readline', 'write', '_writelog'): + if name in ('_close', 'read', 'readline', 'write', '_writelog'): return object.__getattribute__(self, name) return getattr(object.__getattribute__(self, '_orig'), name) @@ -127,21 +167,34 @@ setattr(object.__getattribute__(self, '_orig'), name, value) def _writelog(self, msg): - msg = msg.replace('\r', '\\r').replace('\n', '\\n') + msg = msg.replace(b'\r', b'\\r').replace(b'\n', b'\\n') object.__getattribute__(self, '_logfp').write(msg) - object.__getattribute__(self, '_logfp').write('\n') + object.__getattribute__(self, '_logfp').write(b'\n') object.__getattribute__(self, '_logfp').flush() + def _close(self): + # Python 3 uses an io.BufferedIO instance. Python 2 uses some file + # object wrapper. + if pycompat.ispy3: + orig = object.__getattribute__(self, '_orig') + + if hasattr(orig, 'raw'): + orig.raw._sock.shutdown(socket.SHUT_RDWR) + else: + self.close() + else: + self._sock.shutdown(socket.SHUT_RDWR) + def read(self, size=-1): remaining = object.__getattribute__(self, '_closeafterrecvbytes') # No read limit. Call original function. if not remaining: result = object.__getattribute__(self, '_orig').read(size) - self._writelog('read(%d) -> (%d) (%s) %s' % (size, - len(result), - result)) + self._writelog(b'read(%d) -> (%d) (%s) %s' % (size, + len(result), + result)) return result origsize = size @@ -154,14 +207,15 @@ result = object.__getattribute__(self, '_orig').read(size) remaining -= len(result) - self._writelog('read(%d from %d) -> (%d) %s' % ( + self._writelog(b'read(%d from %d) -> (%d) %s' % ( size, origsize, len(result), result)) object.__setattr__(self, '_closeafterrecvbytes', remaining) if remaining <= 0: - self._writelog('read limit reached, closing socket') - self._sock.close() + self._writelog(b'read limit reached, closing socket') + self._close() + # This is the easiest way to abort the current request. raise Exception('connection closed after receiving N bytes') @@ -173,7 +227,7 @@ # No read limit. Call original function. if not remaining: result = object.__getattribute__(self, '_orig').readline(size) - self._writelog('readline(%d) -> (%d) %s' % ( + self._writelog(b'readline(%d) -> (%d) %s' % ( size, len(result), result)) return result @@ -187,14 +241,15 @@ result = object.__getattribute__(self, '_orig').readline(size) remaining -= len(result) - self._writelog('readline(%d from %d) -> (%d) %s' % ( + self._writelog(b'readline(%d from %d) -> (%d) %s' % ( size, origsize, len(result), result)) object.__setattr__(self, '_closeafterrecvbytes', remaining) if remaining <= 0: - self._writelog('read limit reached; closing socket') - self._sock.close() + self._writelog(b'read limit reached; closing socket') + self._close() + # This is the easiest way to abort the current request. raise Exception('connection closed after receiving N bytes') @@ -205,7 +260,7 @@ # No byte limit on this operation. Call original function. if not remaining: - self._writelog('write(%d) -> %s' % (len(data), data)) + self._writelog(b'write(%d) -> %s' % (len(data), data)) result = object.__getattribute__(self, '_orig').write(data) return result @@ -216,7 +271,7 @@ remaining -= len(newdata) - self._writelog('write(%d from %d) -> (%d) %s' % ( + self._writelog(b'write(%d from %d) -> (%d) %s' % ( len(newdata), len(data), remaining, newdata)) result = object.__getattribute__(self, '_orig').write(newdata) @@ -224,8 +279,9 @@ object.__setattr__(self, '_closeaftersendbytes', remaining) if remaining <= 0: - self._writelog('write limit reached; closing socket') - self._sock.close() + self._writelog(b'write limit reached; closing socket') + self._close() + raise Exception('connection closed after sending N bytes') return result @@ -239,10 +295,10 @@ super(badserver, self).__init__(ui, *args, **kwargs) recvbytes = self._ui.config(b'badserver', b'closeafterrecvbytes') - recvbytes = recvbytes.split(',') + recvbytes = recvbytes.split(b',') self.closeafterrecvbytes = [int(v) for v in recvbytes if v] sendbytes = self._ui.config(b'badserver', b'closeaftersendbytes') - sendbytes = sendbytes.split(',') + sendbytes = sendbytes.split(b',') self.closeaftersendbytes = [int(v) for v in sendbytes if v] # Need to inherit object so super() works. @@ -270,7 +326,7 @@ # Simulate failure to stop processing this request. raise socket.error('close before accept') - if self._ui.configbool('badserver', 'closeafteraccept'): + if self._ui.configbool(b'badserver', b'closeafteraccept'): request, client_address = super(badserver, self).get_request() request.close() raise socket.error('close after accept')
--- a/tests/check-perf-code.py Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/check-perf-code.py Tue Mar 19 16:36:59 2019 +0300 @@ -24,7 +24,7 @@ def modulewhitelist(names): replacement = [('.py', ''), ('.c', ''), # trim suffix - ('mercurial%s' % (os.sep), ''), # trim "mercurial/" path + ('mercurial%s' % ('/'), ''), # trim "mercurial/" path ] ignored = {'__init__'} modules = {}
--- a/tests/drawdag.py Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/drawdag.py Tue Mar 19 16:36:59 2019 +0300 @@ -275,7 +275,7 @@ def path(self): return self._path - def renamed(self): + def copysource(self): return None def flags(self): @@ -322,7 +322,7 @@ v.remove(leaf) def _getcomments(text): - """ + r""" >>> [pycompat.sysstr(s) for s in _getcomments(br''' ... G ... | @@ -341,7 +341,7 @@ @command(b'debugdrawdag', []) def debugdrawdag(ui, repo, **opts): - """read an ASCII graph from stdin and create changesets + r"""read an ASCII graph from stdin and create changesets The ASCII graph is like what :hg:`log -G` outputs, with each `o` replaced to the name of the node. The command will create dummy changesets and local
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/filtertraceback.py Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,29 @@ +#!/usr/bin/env python + +# Filters traceback lines from stdin. + +from __future__ import absolute_import, print_function + +import sys + +state = 'none' + +for line in sys.stdin: + if state == 'none': + if line.startswith('Traceback '): + state = 'tb' + + elif state == 'tb': + if line.startswith(' File '): + state = 'file' + continue + + elif not line.startswith(' '): + state = 'none' + + elif state == 'file': + # Ignore lines after " File " + state = 'tb' + continue + + print(line, end='')
--- a/tests/flagprocessorext.py Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/flagprocessorext.py Tue Mar 19 16:36:59 2019 +0300 @@ -107,7 +107,7 @@ # Teach exchange to use changegroup 3 for k in exchange._bundlespeccontentopts.keys(): - exchange._bundlespeccontentopts[k]["cg.version"] = "03" + exchange._bundlespeccontentopts[k][b"cg.version"] = b"03" # Register flag processors for each extension revlog.addflagprocessor(
--- a/tests/hghave.py Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/hghave.py Tue Mar 19 16:36:59 2019 +0300 @@ -1,6 +1,5 @@ from __future__ import absolute_import -import errno import os import re import socket @@ -118,13 +117,8 @@ is matched by the supplied regular expression. """ r = re.compile(regexp) - try: - p = subprocess.Popen( - cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - except OSError as e: - if e.errno != errno.ENOENT: - raise - ret = -1 + p = subprocess.Popen( + cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) s = p.communicate()[0] ret = p.returncode return (ignorestatus or not ret) and r.search(s) @@ -349,8 +343,8 @@ @check("svn", "subversion client and admin tools") def has_svn(): - return matchoutput('svn --version 2>&1', br'^svn, version') and \ - matchoutput('svnadmin --version 2>&1', br'^svnadmin, version') + return (matchoutput('svn --version 2>&1', br'^svn, version') and + matchoutput('svnadmin --version 2>&1', br'^svnadmin, version')) @check("svn-bindings", "subversion python bindings") def has_svn_bindings(): @@ -549,7 +543,7 @@ @check("tls1.2", "TLS 1.2 protocol support") def has_tls1_2(): from mercurial import sslutil - return 'tls1.2' in sslutil.supportedprotocols + return b'tls1.2' in sslutil.supportedprotocols @check("windows", "Windows") def has_windows(): @@ -652,6 +646,13 @@ # chg disables demandimport intentionally for performance wins. return ((not has_chg()) and os.environ.get('HGDEMANDIMPORT') != 'disable') +@checkvers("py", "Python >= %s", (2.7, 3.5, 3.6, 3.7, 3.8, 3.9)) +def has_python_range(v): + major, minor = v.split('.')[0:2] + py_major, py_minor = sys.version_info.major, sys.version_info.minor + + return (py_major, py_minor) >= (int(major), int(minor)) + @check("py3", "running with Python 3.x") def has_py3(): return 3 == sys.version_info[0] @@ -721,7 +722,7 @@ @check("clang-libfuzzer", "clang new enough to include libfuzzer") def has_clang_libfuzzer(): - mat = matchoutput('clang --version', b'clang version (\d)') + mat = matchoutput('clang --version', br'clang version (\d)') if mat: # libfuzzer is new in clang 6 return int(mat.group(1)) > 5 @@ -729,7 +730,7 @@ @check("clang-6.0", "clang 6.0 with version suffix (libfuzzer included)") def has_clang60(): - return matchoutput('clang-6.0 --version', b'clang version 6\.') + return matchoutput('clang-6.0 --version', br'clang version 6\.') @check("xdiff", "xdiff algorithm") def has_xdiff(): @@ -810,7 +811,7 @@ # WITH clause not supported return False - return matchoutput('sqlite3 -version', b'^3\.\d+') + return matchoutput('sqlite3 -version', br'^3\.\d+') @check('vcr', 'vcr http mocking library') def has_vcr(): @@ -821,3 +822,10 @@ except (ImportError, AttributeError): pass return False + +@check('emacs', 'GNU Emacs') +def has_emacs(): + # Our emacs lisp uses `with-eval-after-load` which is new in emacs + # 24.4, so we allow emacs 24.4, 24.5, and 25+ (24.5 was the last + # 24 release) + return matchoutput('emacs --version', b'GNU Emacs 2(4.4|4.5|5|6|7|8|9)')
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/httpserverauth.py Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,113 @@ +from __future__ import absolute_import + +import base64 +import hashlib + +from mercurial.hgweb import common +from mercurial import ( + node, +) + +def parse_keqv_list(req, l): + """Parse list of key=value strings where keys are not duplicated.""" + parsed = {} + for elt in l: + k, v = elt.split(b'=', 1) + if v[0:1] == b'"' and v[-1:] == b'"': + v = v[1:-1] + parsed[k] = v + return parsed + +class digestauthserver(object): + def __init__(self): + self._user_hashes = {} + + def gethashers(self): + def _md5sum(x): + m = hashlib.md5() + m.update(x) + return node.hex(m.digest()) + + h = _md5sum + + kd = lambda s, d, h=h: h(b"%s:%s" % (s, d)) + return h, kd + + def adduser(self, user, password, realm): + h, kd = self.gethashers() + a1 = h(b'%s:%s:%s' % (user, realm, password)) + self._user_hashes[(user, realm)] = a1 + + def makechallenge(self, realm): + # We aren't testing the protocol here, just that the bytes make the + # proper round trip. So hardcoded seems fine. + nonce = b'064af982c5b571cea6450d8eda91c20d' + return b'realm="%s", nonce="%s", algorithm=MD5, qop="auth"' % (realm, + nonce) + + def checkauth(self, req, header): + log = req.rawenv[b'wsgi.errors'] + + h, kd = self.gethashers() + resp = parse_keqv_list(req, header.split(b', ')) + + if resp.get(b'algorithm', b'MD5').upper() != b'MD5': + log.write(b'Unsupported algorithm: %s' % resp.get(b'algorithm')) + raise common.ErrorResponse(common.HTTP_FORBIDDEN, + b"unknown algorithm") + user = resp[b'username'] + realm = resp[b'realm'] + nonce = resp[b'nonce'] + + ha1 = self._user_hashes.get((user, realm)) + if not ha1: + log.write(b'No hash found for user/realm "%s/%s"' % (user, realm)) + raise common.ErrorResponse(common.HTTP_FORBIDDEN, b"bad user") + + qop = resp.get(b'qop', b'auth') + if qop != b'auth': + log.write(b"Unsupported qop: %s" % qop) + raise common.ErrorResponse(common.HTTP_FORBIDDEN, b"bad qop") + + cnonce, ncvalue = resp.get(b'cnonce'), resp.get(b'nc') + if not cnonce or not ncvalue: + log.write(b'No cnonce (%s) or ncvalue (%s)' % (cnonce, ncvalue)) + raise common.ErrorResponse(common.HTTP_FORBIDDEN, b"no cnonce") + + a2 = b'%s:%s' % (req.method, resp[b'uri']) + noncebit = b"%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, h(a2)) + + respdig = kd(ha1, noncebit) + if respdig != resp[b'response']: + log.write(b'User/realm "%s/%s" gave %s, but expected %s' + % (user, realm, resp[b'response'], respdig)) + return False + + return True + +digest = digestauthserver() + +def perform_authentication(hgweb, req, op): + auth = req.headers.get(b'Authorization') + + if req.headers.get(b'X-HgTest-AuthType') == b'Digest': + if not auth: + challenge = digest.makechallenge(b'mercurial') + raise common.ErrorResponse(common.HTTP_UNAUTHORIZED, b'who', + [(b'WWW-Authenticate', b'Digest %s' % challenge)]) + + if not digest.checkauth(req, auth[7:]): + raise common.ErrorResponse(common.HTTP_FORBIDDEN, b'no') + + return + + if not auth: + raise common.ErrorResponse(common.HTTP_UNAUTHORIZED, b'who', + [(b'WWW-Authenticate', b'Basic Realm="mercurial"')]) + + if base64.b64decode(auth.split()[1]).split(b':', 1) != [b'user', b'pass']: + raise common.ErrorResponse(common.HTTP_FORBIDDEN, b'no') + +def extsetup(ui): + common.permhooks.insert(0, perform_authentication) + digest.adduser(b'user', b'pass', b'mercurial')
--- a/tests/notcapable Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/notcapable Tue Mar 19 16:36:59 2019 +0300 @@ -11,7 +11,7 @@ extensions.wrapfunction(repository.peer, 'capable', wrapcapable) extensions.wrapfunction(localrepo.localrepository, 'peer', wrappeer) def wrapcapable(orig, self, name, *args, **kwargs): - if name in '$CAP'.split(' '): + if name in b'$CAP'.split(b' '): return False return orig(self, name, *args, **kwargs) def wrappeer(orig, self):
--- a/tests/phabricator/phabsend-create-alpha.json Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/phabricator/phabsend-create-alpha.json Tue Mar 19 16:36:59 2019 +0300 @@ -1,590 +1,617 @@ { - "version": 1, "interactions": [ { + "request": { + "method": "POST", + "body": "constraints%5Bcallsigns%5D%5B0%5D=HG&api.token=cli-hahayouwish", + "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search", + "headers": { + "content-type": [ + "application/x-www-form-urlencoded" + ], + "accept": [ + "application/mercurial-0.1" + ], + "user-agent": [ + "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)" + ], + "host": [ + "phab.mercurial-scm.org" + ], + "content-length": [ + "79" + ] + } + }, "response": { "status": { - "message": "OK", - "code": 200 - }, + "code": 200, + "message": "OK" + }, "body": { "string": "{\"result\":{\"data\":[{\"id\":2,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":null,\"status\":\"active\",\"isImporting\":false,\"spacePHID\":null,\"dateCreated\":1498761653,\"dateModified\":1500403184,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}" - }, + }, "headers": { - "x-xss-protection": [ - "1; mode=block" - ], "expires": [ "Sat, 01 Jan 2000 00:00:00 GMT" - ], - "set-cookie": [ - "phsid=A%2F4wycgjx3wajuukr7ggfpqedpe7czucr7mvmaems3; expires=Thu, 14-Sep-2023 04:47:40 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" - ], + ], + "x-xss-protection": [ + "1; mode=block" + ], + "transfer-encoding": [ + "chunked" + ], + "date": [ + "Sun, 03 Mar 2019 00:12:23 GMT" + ], "x-frame-options": [ "Deny" - ], + ], + "cache-control": [ + "no-store" + ], + "content-type": [ + "application/json" + ], "x-content-type-options": [ "nosniff" - ], + ], + "server": [ + "Apache/2.4.10 (Debian)" + ], + "set-cookie": [ + "phsid=A%2Fpywot5xerq4gs2tjxw3gnadzdg6vomqmfcnwqddp; expires=Fri, 01-Mar-2024 00:12:23 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" + ], "strict-transport-security": [ "max-age=0; includeSubdomains; preload" - ], - "server": [ - "Apache/2.4.10 (Debian)" - ], - "date": [ - "Sat, 15 Sep 2018 04:47:40 GMT" - ], - "content-type": [ - "application/json" - ], - "cache-control": [ - "no-store" - ] - } - }, - "request": { - "method": "POST", - "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search", - "body": "constraints%5Bcallsigns%5D%5B0%5D=HG&api.token=cli-hahayouwish", - "headers": { - "accept": [ - "application/mercurial-0.1" - ], - "content-type": [ - "application/x-www-form-urlencoded" - ], - "host": [ - "phab.mercurial-scm.org" - ], - "content-length": [ - "79" - ], - "user-agent": [ - "mercurial/proto-1.0 (Mercurial 4.7.1+866-5f07496726a1+20180915)" ] } } - }, + }, { + "request": { + "method": "POST", + "body": "repositoryPHID=PHID-REPO-bvunnehri4u2isyr7bc3&api.token=cli-hahayouwish&diff=diff+--git+a%2Falpha+b%2Falpha%0Anew+file+mode+100644%0A---+%2Fdev%2Fnull%0A%2B%2B%2B+b%2Falpha%0A%40%40+-0%2C0+%2B1%2C1+%40%40%0A%2Balpha%0A", + "uri": "https://phab.mercurial-scm.org//api/differential.createrawdiff", + "headers": { + "content-type": [ + "application/x-www-form-urlencoded" + ], + "accept": [ + "application/mercurial-0.1" + ], + "user-agent": [ + "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)" + ], + "host": [ + "phab.mercurial-scm.org" + ], + "content-length": [ + "235" + ] + } + }, "response": { "status": { - "message": "OK", - "code": 200 - }, + "code": 200, + "message": "OK" + }, "body": { - "string": "{\"result\":{\"id\":11072,\"phid\":\"PHID-DIFF-xm6cw76uivc6g56xiuv2\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/11072\\/\"},\"error_code\":null,\"error_info\":null}" - }, + "string": "{\"result\":{\"id\":14303,\"phid\":\"PHID-DIFF-allzuauvigfjpv4z6dpi\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/14303\\/\"},\"error_code\":null,\"error_info\":null}" + }, "headers": { + "expires": [ + "Sat, 01 Jan 2000 00:00:00 GMT" + ], "x-xss-protection": [ "1; mode=block" - ], - "expires": [ - "Sat, 01 Jan 2000 00:00:00 GMT" - ], - "set-cookie": [ - "phsid=A%2Fll65pt562b6d7ifhjva4jwqqzxh2oopj4tuc6lfa; expires=Thu, 14-Sep-2023 04:47:40 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" - ], + ], + "transfer-encoding": [ + "chunked" + ], + "date": [ + "Sun, 03 Mar 2019 00:12:24 GMT" + ], "x-frame-options": [ "Deny" - ], + ], + "cache-control": [ + "no-store" + ], + "content-type": [ + "application/json" + ], "x-content-type-options": [ "nosniff" - ], - "strict-transport-security": [ - "max-age=0; includeSubdomains; preload" - ], + ], "server": [ "Apache/2.4.10 (Debian)" - ], - "date": [ - "Sat, 15 Sep 2018 04:47:40 GMT" - ], - "content-type": [ - "application/json" - ], - "cache-control": [ - "no-store" - ] - } - }, - "request": { - "method": "POST", - "uri": "https://phab.mercurial-scm.org//api/differential.createrawdiff", - "body": "repositoryPHID=PHID-REPO-bvunnehri4u2isyr7bc3&diff=diff+--git+a%2Falpha+b%2Falpha%0Anew+file+mode+100644%0A---+%2Fdev%2Fnull%0A%2B%2B%2B+b%2Falpha%0A%40%40+-0%2C0+%2B1%2C1+%40%40%0A%2Balpha%0A&api.token=cli-hahayouwish", - "headers": { - "accept": [ - "application/mercurial-0.1" - ], - "content-type": [ - "application/x-www-form-urlencoded" - ], - "host": [ - "phab.mercurial-scm.org" - ], - "content-length": [ - "235" - ], - "user-agent": [ - "mercurial/proto-1.0 (Mercurial 4.7.1+866-5f07496726a1+20180915)" + ], + "set-cookie": [ + "phsid=A%2F2n2dlkkwzljrpzfghpdsflbt4ftnrwcc446dzcy5; expires=Fri, 01-Mar-2024 00:12:24 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" + ], + "strict-transport-security": [ + "max-age=0; includeSubdomains; preload" ] } } - }, + }, { + "request": { + "method": "POST", + "body": "diff_id=14303&data=%7B%22user%22%3A+%22test%22%2C+%22parent%22%3A+%220000000000000000000000000000000000000000%22%2C+%22node%22%3A+%22d386117f30e6b1282897bdbde75ac21e095163d4%22%2C+%22date%22%3A+%220+0%22%7D&api.token=cli-hahayouwish&name=hg%3Ameta", + "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty", + "headers": { + "content-type": [ + "application/x-www-form-urlencoded" + ], + "accept": [ + "application/mercurial-0.1" + ], + "user-agent": [ + "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)" + ], + "host": [ + "phab.mercurial-scm.org" + ], + "content-length": [ + "264" + ] + } + }, "response": { "status": { - "message": "OK", - "code": 200 - }, + "code": 200, + "message": "OK" + }, "body": { "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}" - }, + }, "headers": { + "expires": [ + "Sat, 01 Jan 2000 00:00:00 GMT" + ], "x-xss-protection": [ "1; mode=block" - ], - "expires": [ - "Sat, 01 Jan 2000 00:00:00 GMT" - ], - "set-cookie": [ - "phsid=A%2F5ivszbehkvbetlnks7omsqmbsu7r5by3p3yqw3ep; expires=Thu, 14-Sep-2023 04:47:41 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" - ], + ], + "transfer-encoding": [ + "chunked" + ], + "date": [ + "Sun, 03 Mar 2019 00:12:25 GMT" + ], "x-frame-options": [ "Deny" - ], + ], + "cache-control": [ + "no-store" + ], + "content-type": [ + "application/json" + ], "x-content-type-options": [ "nosniff" - ], - "strict-transport-security": [ - "max-age=0; includeSubdomains; preload" - ], + ], "server": [ "Apache/2.4.10 (Debian)" - ], - "date": [ - "Sat, 15 Sep 2018 04:47:41 GMT" - ], - "content-type": [ - "application/json" - ], - "cache-control": [ - "no-store" - ] - } - }, - "request": { - "method": "POST", - "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty", - "body": "data=%7B%22date%22%3A+%220+0%22%2C+%22node%22%3A+%225206a4fa1e6cd7dbc027640267c109e05a9d2341%22%2C+%22user%22%3A+%22test%22%2C+%22parent%22%3A+%220000000000000000000000000000000000000000%22%7D&name=hg%3Ameta&diff_id=11072&api.token=cli-hahayouwish", - "headers": { - "accept": [ - "application/mercurial-0.1" - ], - "content-type": [ - "application/x-www-form-urlencoded" - ], - "host": [ - "phab.mercurial-scm.org" - ], - "content-length": [ - "264" - ], - "user-agent": [ - "mercurial/proto-1.0 (Mercurial 4.7.1+866-5f07496726a1+20180915)" + ], + "set-cookie": [ + "phsid=A%2F5mq3t25wu5igv7oufpwcoy32fveozo7wn5wni3gw; expires=Fri, 01-Mar-2024 00:12:25 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" + ], + "strict-transport-security": [ + "max-age=0; includeSubdomains; preload" ] } } - }, + }, { + "request": { + "method": "POST", + "body": "diff_id=14303&data=%7B%22d386117f30e6b1282897bdbde75ac21e095163d4%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22time%22%3A+0.0%7D%7D&api.token=cli-hahayouwish&name=local%3Acommits", + "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty", + "headers": { + "content-type": [ + "application/x-www-form-urlencoded" + ], + "accept": [ + "application/mercurial-0.1" + ], + "user-agent": [ + "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)" + ], + "host": [ + "phab.mercurial-scm.org" + ], + "content-length": [ + "227" + ] + } + }, "response": { "status": { - "message": "OK", - "code": 200 - }, + "code": 200, + "message": "OK" + }, "body": { "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}" - }, + }, "headers": { + "expires": [ + "Sat, 01 Jan 2000 00:00:00 GMT" + ], "x-xss-protection": [ "1; mode=block" - ], - "expires": [ - "Sat, 01 Jan 2000 00:00:00 GMT" - ], - "set-cookie": [ - "phsid=A%2Fxvwxxrmwpjntx6dlohrstyox7yjssdbzufiwygcg; expires=Thu, 14-Sep-2023 04:47:41 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" - ], + ], + "transfer-encoding": [ + "chunked" + ], + "date": [ + "Sun, 03 Mar 2019 00:12:25 GMT" + ], "x-frame-options": [ "Deny" - ], + ], + "cache-control": [ + "no-store" + ], + "content-type": [ + "application/json" + ], "x-content-type-options": [ "nosniff" - ], - "strict-transport-security": [ - "max-age=0; includeSubdomains; preload" - ], + ], "server": [ "Apache/2.4.10 (Debian)" - ], - "date": [ - "Sat, 15 Sep 2018 04:47:41 GMT" - ], - "content-type": [ - "application/json" - ], - "cache-control": [ - "no-store" - ] - } - }, - "request": { - "method": "POST", - "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty", - "body": "data=%7B%225206a4fa1e6cd7dbc027640267c109e05a9d2341%22%3A+%7B%22time%22%3A+0.0%2C+%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%7D%7D&name=local%3Acommits&diff_id=11072&api.token=cli-hahayouwish", - "headers": { - "accept": [ - "application/mercurial-0.1" - ], - "content-type": [ - "application/x-www-form-urlencoded" - ], - "host": [ - "phab.mercurial-scm.org" - ], - "content-length": [ - "227" - ], - "user-agent": [ - "mercurial/proto-1.0 (Mercurial 4.7.1+866-5f07496726a1+20180915)" + ], + "set-cookie": [ + "phsid=A%2F5nja6g4cnpt63ctjjwykxyceyb7kokfptrzbejoc; expires=Fri, 01-Mar-2024 00:12:25 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" + ], + "strict-transport-security": [ + "max-age=0; includeSubdomains; preload" ] } } - }, + }, { + "request": { + "method": "POST", + "body": "api.token=cli-hahayouwish&corpus=create+alpha+for+phabricator+test+%E2%82%AC", + "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage", + "headers": { + "content-type": [ + "application/x-www-form-urlencoded" + ], + "accept": [ + "application/mercurial-0.1" + ], + "user-agent": [ + "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)" + ], + "host": [ + "phab.mercurial-scm.org" + ], + "content-length": [ + "93" + ] + } + }, "response": { "status": { - "message": "OK", - "code": 200 - }, + "code": 200, + "message": "OK" + }, "body": { - "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"create alpha for phabricator test\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"}},\"error_code\":null,\"error_info\":null}" - }, + "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"create alpha for phabricator test \\u20ac\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"}},\"error_code\":null,\"error_info\":null}" + }, "headers": { + "expires": [ + "Sat, 01 Jan 2000 00:00:00 GMT" + ], "x-xss-protection": [ "1; mode=block" - ], - "expires": [ - "Sat, 01 Jan 2000 00:00:00 GMT" - ], - "set-cookie": [ - "phsid=A%2Fy3s5iysh6h2javfdo2u7myspyjypv4mvojegqr6j; expires=Thu, 14-Sep-2023 04:47:42 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" - ], + ], + "transfer-encoding": [ + "chunked" + ], + "date": [ + "Sun, 03 Mar 2019 00:12:26 GMT" + ], "x-frame-options": [ "Deny" - ], + ], + "cache-control": [ + "no-store" + ], + "content-type": [ + "application/json" + ], "x-content-type-options": [ "nosniff" - ], - "strict-transport-security": [ - "max-age=0; includeSubdomains; preload" - ], + ], "server": [ "Apache/2.4.10 (Debian)" - ], - "date": [ - "Sat, 15 Sep 2018 04:47:42 GMT" - ], - "content-type": [ - "application/json" - ], - "cache-control": [ - "no-store" - ] - } - }, - "request": { - "method": "POST", - "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage", - "body": "corpus=create+alpha+for+phabricator+test&api.token=cli-hahayouwish", - "headers": { - "accept": [ - "application/mercurial-0.1" - ], - "content-type": [ - "application/x-www-form-urlencoded" - ], - "host": [ - "phab.mercurial-scm.org" - ], - "content-length": [ - "83" - ], - "user-agent": [ - "mercurial/proto-1.0 (Mercurial 4.7.1+866-5f07496726a1+20180915)" + ], + "set-cookie": [ + "phsid=A%2Fkrxawhyvcd4jhv77inuwdmzcci4f7kql6c7l3smz; expires=Fri, 01-Mar-2024 00:12:26 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" + ], + "strict-transport-security": [ + "max-age=0; includeSubdomains; preload" ] } } - }, + }, { + "request": { + "method": "POST", + "body": "transactions%5B0%5D%5Btype%5D=update&transactions%5B0%5D%5Bvalue%5D=PHID-DIFF-allzuauvigfjpv4z6dpi&transactions%5B1%5D%5Btype%5D=title&transactions%5B1%5D%5Bvalue%5D=create+alpha+for+phabricator+test+%E2%82%AC&api.token=cli-hahayouwish", + "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit", + "headers": { + "content-type": [ + "application/x-www-form-urlencoded" + ], + "accept": [ + "application/mercurial-0.1" + ], + "user-agent": [ + "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)" + ], + "host": [ + "phab.mercurial-scm.org" + ], + "content-length": [ + "252" + ] + } + }, "response": { "status": { - "message": "OK", - "code": 200 - }, + "code": 200, + "message": "OK" + }, "body": { - "string": "{\"result\":{\"object\":{\"id\":4596,\"phid\":\"PHID-DREV-bntcdwe74cw3vwkzt6nq\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-mnqxquobbhdgttd\"},{\"phid\":\"PHID-XACT-DREV-nd34pqrjamxbhop\"},{\"phid\":\"PHID-XACT-DREV-4ka4rghn6b7xooc\"},{\"phid\":\"PHID-XACT-DREV-mfuvfyiijdqwpyg\"},{\"phid\":\"PHID-XACT-DREV-ckar54h6yenx24s\"}]},\"error_code\":null,\"error_info\":null}" - }, + "string": "{\"result\":{\"object\":{\"id\":6054,\"phid\":\"PHID-DREV-6pczsbtdpqjc2nskmxwy\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-efgl4j4fesixjog\"},{\"phid\":\"PHID-XACT-DREV-xj7ksjeyfadwf5m\"},{\"phid\":\"PHID-XACT-DREV-gecx5zw42kkuffc\"},{\"phid\":\"PHID-XACT-DREV-asda7zcwgzdadoi\"},{\"phid\":\"PHID-XACT-DREV-ku26t33y6iiugjw\"}]},\"error_code\":null,\"error_info\":null}" + }, "headers": { + "expires": [ + "Sat, 01 Jan 2000 00:00:00 GMT" + ], "x-xss-protection": [ "1; mode=block" - ], - "expires": [ - "Sat, 01 Jan 2000 00:00:00 GMT" - ], - "set-cookie": [ - "phsid=A%2Foe7kd7hhldo25tzbegntkyfxm6wnztgdfmsfubo2; expires=Thu, 14-Sep-2023 04:47:42 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" - ], + ], + "transfer-encoding": [ + "chunked" + ], + "date": [ + "Sun, 03 Mar 2019 00:12:27 GMT" + ], "x-frame-options": [ "Deny" - ], + ], + "cache-control": [ + "no-store" + ], + "content-type": [ + "application/json" + ], "x-content-type-options": [ "nosniff" - ], - "strict-transport-security": [ - "max-age=0; includeSubdomains; preload" - ], + ], "server": [ "Apache/2.4.10 (Debian)" - ], - "date": [ - "Sat, 15 Sep 2018 04:47:42 GMT" - ], - "content-type": [ - "application/json" - ], - "cache-control": [ - "no-store" - ] - } - }, - "request": { - "method": "POST", - "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit", - "body": "transactions%5B0%5D%5Bvalue%5D=PHID-DIFF-xm6cw76uivc6g56xiuv2&transactions%5B0%5D%5Btype%5D=update&transactions%5B1%5D%5Bvalue%5D=create+alpha+for+phabricator+test&transactions%5B1%5D%5Btype%5D=title&api.token=cli-hahayouwish", - "headers": { - "accept": [ - "application/mercurial-0.1" - ], - "content-type": [ - "application/x-www-form-urlencoded" - ], - "host": [ - "phab.mercurial-scm.org" - ], - "content-length": [ - "242" - ], - "user-agent": [ - "mercurial/proto-1.0 (Mercurial 4.7.1+866-5f07496726a1+20180915)" + ], + "set-cookie": [ + "phsid=A%2Fjwgcqb5hvbltjq4jqbpauz7rmmhpuh2rb7phsdmf; expires=Fri, 01-Mar-2024 00:12:27 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" + ], + "strict-transport-security": [ + "max-age=0; includeSubdomains; preload" ] } } - }, + }, { + "request": { + "method": "POST", + "body": "api.token=cli-hahayouwish&ids%5B0%5D=6054", + "uri": "https://phab.mercurial-scm.org//api/differential.query", + "headers": { + "content-type": [ + "application/x-www-form-urlencoded" + ], + "accept": [ + "application/mercurial-0.1" + ], + "user-agent": [ + "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)" + ], + "host": [ + "phab.mercurial-scm.org" + ], + "content-length": [ + "58" + ] + } + }, "response": { "status": { - "message": "OK", - "code": 200 - }, + "code": 200, + "message": "OK" + }, "body": { - "string": "{\"result\":[{\"id\":\"4596\",\"phid\":\"PHID-DREV-bntcdwe74cw3vwkzt6nq\",\"title\":\"create alpha for phabricator test\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D4596\",\"dateCreated\":\"1536986862\",\"dateModified\":\"1536986862\",\"authorPHID\":\"PHID-USER-cgcdlc6c3gpxapbmkwa2\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":[],\"branch\":null,\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"1\",\"activeDiffPHID\":\"PHID-DIFF-xm6cw76uivc6g56xiuv2\",\"diffs\":[\"11072\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null}],\"error_code\":null,\"error_info\":null}" - }, + "string": "{\"result\":[{\"id\":\"6054\",\"phid\":\"PHID-DREV-6pczsbtdpqjc2nskmxwy\",\"title\":\"create alpha for phabricator test \\u20ac\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D6054\",\"dateCreated\":\"1551571947\",\"dateModified\":\"1551571947\",\"authorPHID\":\"PHID-USER-5iy6mkoveguhm2zthvww\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":[],\"branch\":null,\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"1\",\"activeDiffPHID\":\"PHID-DIFF-allzuauvigfjpv4z6dpi\",\"diffs\":[\"14303\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null}],\"error_code\":null,\"error_info\":null}" + }, "headers": { + "expires": [ + "Sat, 01 Jan 2000 00:00:00 GMT" + ], "x-xss-protection": [ "1; mode=block" - ], - "expires": [ - "Sat, 01 Jan 2000 00:00:00 GMT" - ], - "set-cookie": [ - "phsid=A%2F5d2bgafhoqhg5thqxeu6y4fngq7lqezf5h6eo5pd; expires=Thu, 14-Sep-2023 04:47:43 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" - ], + ], + "transfer-encoding": [ + "chunked" + ], + "date": [ + "Sun, 03 Mar 2019 00:12:28 GMT" + ], "x-frame-options": [ "Deny" - ], + ], + "cache-control": [ + "no-store" + ], + "content-type": [ + "application/json" + ], "x-content-type-options": [ "nosniff" - ], - "strict-transport-security": [ - "max-age=0; includeSubdomains; preload" - ], + ], "server": [ "Apache/2.4.10 (Debian)" - ], - "date": [ - "Sat, 15 Sep 2018 04:47:43 GMT" - ], - "content-type": [ - "application/json" - ], - "cache-control": [ - "no-store" - ] - } - }, - "request": { - "method": "POST", - "uri": "https://phab.mercurial-scm.org//api/differential.query", - "body": "api.token=cli-hahayouwish&ids%5B0%5D=4596", - "headers": { - "accept": [ - "application/mercurial-0.1" - ], - "content-type": [ - "application/x-www-form-urlencoded" - ], - "host": [ - "phab.mercurial-scm.org" - ], - "content-length": [ - "58" - ], - "user-agent": [ - "mercurial/proto-1.0 (Mercurial 4.7.1+866-5f07496726a1+20180915)" + ], + "set-cookie": [ + "phsid=A%2F3lgkbbyaa646ng5klghjyehsbjxtaqblipnvocuz; expires=Fri, 01-Mar-2024 00:12:28 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" + ], + "strict-transport-security": [ + "max-age=0; includeSubdomains; preload" ] } } - }, + }, { + "request": { + "method": "POST", + "body": "diff_id=14303&data=%7B%22user%22%3A+%22test%22%2C+%22parent%22%3A+%220000000000000000000000000000000000000000%22%2C+%22node%22%3A+%22cb03845d6dd98c72bec766c7ed08c693cc49817a%22%2C+%22date%22%3A+%220+0%22%7D&api.token=cli-hahayouwish&name=hg%3Ameta", + "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty", + "headers": { + "content-type": [ + "application/x-www-form-urlencoded" + ], + "accept": [ + "application/mercurial-0.1" + ], + "user-agent": [ + "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)" + ], + "host": [ + "phab.mercurial-scm.org" + ], + "content-length": [ + "264" + ] + } + }, "response": { "status": { - "message": "OK", - "code": 200 - }, + "code": 200, + "message": "OK" + }, "body": { "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}" - }, + }, "headers": { + "expires": [ + "Sat, 01 Jan 2000 00:00:00 GMT" + ], "x-xss-protection": [ "1; mode=block" - ], - "expires": [ - "Sat, 01 Jan 2000 00:00:00 GMT" - ], - "set-cookie": [ - "phsid=A%2F2cewrqifmvko6evm2sy2nvksvcvhk6hpsj36lcv2; expires=Thu, 14-Sep-2023 04:47:43 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" - ], + ], + "transfer-encoding": [ + "chunked" + ], + "date": [ + "Sun, 03 Mar 2019 00:12:28 GMT" + ], "x-frame-options": [ "Deny" - ], + ], + "cache-control": [ + "no-store" + ], + "content-type": [ + "application/json" + ], "x-content-type-options": [ "nosniff" - ], - "strict-transport-security": [ - "max-age=0; includeSubdomains; preload" - ], + ], "server": [ "Apache/2.4.10 (Debian)" - ], - "date": [ - "Sat, 15 Sep 2018 04:47:43 GMT" - ], - "content-type": [ - "application/json" - ], - "cache-control": [ - "no-store" - ] - } - }, - "request": { - "method": "POST", - "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty", - "body": "data=%7B%22date%22%3A+%220+0%22%2C+%22node%22%3A+%22d8f232f7d799e1064d3da179df41a2b5d04334e9%22%2C+%22user%22%3A+%22test%22%2C+%22parent%22%3A+%220000000000000000000000000000000000000000%22%7D&name=hg%3Ameta&diff_id=11072&api.token=cli-hahayouwish", - "headers": { - "accept": [ - "application/mercurial-0.1" - ], - "content-type": [ - "application/x-www-form-urlencoded" - ], - "host": [ - "phab.mercurial-scm.org" - ], - "content-length": [ - "264" - ], - "user-agent": [ - "mercurial/proto-1.0 (Mercurial 4.7.1+866-5f07496726a1+20180915)" + ], + "set-cookie": [ + "phsid=A%2Fwjxvlsjqmqwvcljfv6oe2sbometi3gebps6vzrlw; expires=Fri, 01-Mar-2024 00:12:28 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" + ], + "strict-transport-security": [ + "max-age=0; includeSubdomains; preload" ] } } - }, + }, { + "request": { + "method": "POST", + "body": "diff_id=14303&data=%7B%22cb03845d6dd98c72bec766c7ed08c693cc49817a%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22time%22%3A+0.0%7D%7D&api.token=cli-hahayouwish&name=local%3Acommits", + "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty", + "headers": { + "content-type": [ + "application/x-www-form-urlencoded" + ], + "accept": [ + "application/mercurial-0.1" + ], + "user-agent": [ + "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)" + ], + "host": [ + "phab.mercurial-scm.org" + ], + "content-length": [ + "227" + ] + } + }, "response": { "status": { - "message": "OK", - "code": 200 - }, + "code": 200, + "message": "OK" + }, "body": { "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}" - }, + }, "headers": { + "expires": [ + "Sat, 01 Jan 2000 00:00:00 GMT" + ], "x-xss-protection": [ "1; mode=block" - ], - "expires": [ - "Sat, 01 Jan 2000 00:00:00 GMT" - ], - "set-cookie": [ - "phsid=A%2Fped6v7jlldydnkfolkdmecyyjrkciqhkr7opvbt2; expires=Thu, 14-Sep-2023 04:47:44 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" - ], + ], + "transfer-encoding": [ + "chunked" + ], + "date": [ + "Sun, 03 Mar 2019 00:12:29 GMT" + ], "x-frame-options": [ "Deny" - ], + ], + "cache-control": [ + "no-store" + ], + "content-type": [ + "application/json" + ], "x-content-type-options": [ "nosniff" - ], - "strict-transport-security": [ - "max-age=0; includeSubdomains; preload" - ], + ], "server": [ "Apache/2.4.10 (Debian)" - ], - "date": [ - "Sat, 15 Sep 2018 04:47:44 GMT" - ], - "content-type": [ - "application/json" - ], - "cache-control": [ - "no-store" - ] - } - }, - "request": { - "method": "POST", - "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty", - "body": "data=%7B%22d8f232f7d799e1064d3da179df41a2b5d04334e9%22%3A+%7B%22time%22%3A+0.0%2C+%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%7D%7D&name=local%3Acommits&diff_id=11072&api.token=cli-hahayouwish", - "headers": { - "accept": [ - "application/mercurial-0.1" - ], - "content-type": [ - "application/x-www-form-urlencoded" - ], - "host": [ - "phab.mercurial-scm.org" - ], - "content-length": [ - "227" - ], - "user-agent": [ - "mercurial/proto-1.0 (Mercurial 4.7.1+866-5f07496726a1+20180915)" + ], + "set-cookie": [ + "phsid=A%2Foeyncgzaanzmnhgfc7ecvmu5pq7qju7ewq6tvgrp; expires=Fri, 01-Mar-2024 00:12:29 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" + ], + "strict-transport-security": [ + "max-age=0; includeSubdomains; preload" ] } } } - ] + ], + "version": 1 }
--- a/tests/phabricator/phabsend-update-alpha-create-beta.json Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/phabricator/phabsend-update-alpha-create-beta.json Tue Mar 19 16:36:59 2019 +0300 @@ -1,915 +1,1025 @@ { - "version": 1, "interactions": [ { "request": { - "body": "api.token=cli-hahayouwish&revisionIDs%5B0%5D=4596", - "uri": "https://phab.mercurial-scm.org//api/differential.querydiffs", + "method": "POST", + "body": "api.token=cli-hahayouwish&revisionIDs%5B0%5D=6054", + "uri": "https://phab.mercurial-scm.org//api/differential.querydiffs", "headers": { - "content-length": [ - "66" - ], - "host": [ - "phab.mercurial-scm.org" - ], "content-type": [ "application/x-www-form-urlencoded" - ], + ], "accept": [ "application/mercurial-0.1" - ], + ], "user-agent": [ - "mercurial/proto-1.0 (Mercurial 4.7.1+867-34bcd3af7109+20180915)" + "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)" + ], + "host": [ + "phab.mercurial-scm.org" + ], + "content-length": [ + "66" ] - }, - "method": "POST" - }, + } + }, "response": { "status": { - "code": 200, + "code": 200, "message": "OK" - }, - "headers": { - "server": [ - "Apache/2.4.10 (Debian)" - ], - "strict-transport-security": [ - "max-age=0; includeSubdomains; preload" - ], - "x-frame-options": [ - "Deny" - ], - "x-content-type-options": [ - "nosniff" - ], - "expires": [ - "Sat, 01 Jan 2000 00:00:00 GMT" - ], - "set-cookie": [ - "phsid=A%2F5bjqjyefdbiq65cc3qepzxq7ncczgfqo2xxsybaf; expires=Thu, 14-Sep-2023 04:53:46 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" - ], - "x-xss-protection": [ - "1; mode=block" - ], - "content-type": [ - "application/json" - ], - "cache-control": [ - "no-store" - ], - "date": [ - "Sat, 15 Sep 2018 04:53:46 GMT" - ] - }, + }, "body": { - "string": "{\"result\":{\"11073\":{\"id\":\"11073\",\"revisionID\":\"4596\",\"dateCreated\":\"1536986866\",\"dateModified\":\"1536986868\",\"sourceControlBaseRevision\":null,\"sourceControlPath\":null,\"sourceControlSystem\":null,\"branch\":null,\"bookmark\":null,\"creationMethod\":\"web\",\"description\":null,\"unitStatus\":\"4\",\"lintStatus\":\"4\",\"changes\":[{\"id\":\"24417\",\"metadata\":{\"line:first\":1},\"oldPath\":null,\"currentPath\":\"alpha\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":{\"unix:filemode\":\"100644\"},\"type\":\"1\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"2\",\"delLines\":\"0\",\"hunks\":[{\"oldOffset\":\"0\",\"newOffset\":\"1\",\"oldLength\":\"0\",\"newLength\":\"2\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"+alpha\\n+more\\n\"}]}],\"properties\":{\"hg:meta\":{\"parent\":\"0000000000000000000000000000000000000000\",\"node\":\"f70265671c65ab4b5416e611a6bd61887c013122\",\"user\":\"test\",\"date\":\"0 0\"},\"local:commits\":{\"f70265671c65ab4b5416e611a6bd61887c013122\":{\"time\":0,\"authorEmail\":\"test\",\"author\":\"test\"}}},\"authorName\":\"test\",\"authorEmail\":\"test\"},\"11072\":{\"id\":\"11072\",\"revisionID\":\"4596\",\"dateCreated\":\"1536986860\",\"dateModified\":\"1536986862\",\"sourceControlBaseRevision\":null,\"sourceControlPath\":null,\"sourceControlSystem\":null,\"branch\":null,\"bookmark\":null,\"creationMethod\":\"web\",\"description\":null,\"unitStatus\":\"4\",\"lintStatus\":\"4\",\"changes\":[{\"id\":\"24416\",\"metadata\":{\"line:first\":1},\"oldPath\":null,\"currentPath\":\"alpha\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":{\"unix:filemode\":\"100644\"},\"type\":\"1\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"0\",\"hunks\":[{\"oldOffset\":\"0\",\"newOffset\":\"1\",\"oldLength\":\"0\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"+alpha\\n\"}]}],\"properties\":{\"hg:meta\":{\"date\":\"0 0\",\"node\":\"d8f232f7d799e1064d3da179df41a2b5d04334e9\",\"user\":\"test\",\"parent\":\"0000000000000000000000000000000000000000\"},\"local:commits\":{\"d8f232f7d799e1064d3da179df41a2b5d04334e9\":{\"time\":0,\"author\":\"test\",\"authorEmail\":\"test\"}}},\"authorName\":\"test\",\"authorEmail\":\"test\"}},\"error_code\":null,\"error_info\":null}" - } - } - }, - { - "request": { - "body": "diff_id=11073&api.token=cli-hahayouwish&data=%7B%22parent%22%3A+%220000000000000000000000000000000000000000%22%2C+%22node%22%3A+%22f70265671c65ab4b5416e611a6bd61887c013122%22%2C+%22user%22%3A+%22test%22%2C+%22date%22%3A+%220+0%22%7D&name=hg%3Ameta", - "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty", + "string": "{\"result\":{\"14303\":{\"id\":\"14303\",\"revisionID\":\"6054\",\"dateCreated\":\"1551571944\",\"dateModified\":\"1551571947\",\"sourceControlBaseRevision\":null,\"sourceControlPath\":null,\"sourceControlSystem\":null,\"branch\":null,\"bookmark\":null,\"creationMethod\":\"web\",\"description\":null,\"unitStatus\":\"4\",\"lintStatus\":\"4\",\"changes\":[{\"id\":\"32287\",\"metadata\":{\"line:first\":1},\"oldPath\":null,\"currentPath\":\"alpha\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":{\"unix:filemode\":\"100644\"},\"type\":\"1\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"0\",\"hunks\":[{\"oldOffset\":\"0\",\"newOffset\":\"1\",\"oldLength\":\"0\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"+alpha\\n\"}]}],\"properties\":{\"hg:meta\":{\"user\":\"test\",\"parent\":\"0000000000000000000000000000000000000000\",\"node\":\"cb03845d6dd98c72bec766c7ed08c693cc49817a\",\"date\":\"0 0\"},\"local:commits\":{\"cb03845d6dd98c72bec766c7ed08c693cc49817a\":{\"author\":\"test\",\"authorEmail\":\"test\",\"time\":0}}},\"authorName\":\"test\",\"authorEmail\":\"test\"}},\"error_code\":null,\"error_info\":null}" + }, "headers": { - "content-length": [ - "264" - ], - "host": [ - "phab.mercurial-scm.org" - ], - "content-type": [ - "application/x-www-form-urlencoded" - ], - "accept": [ - "application/mercurial-0.1" - ], - "user-agent": [ - "mercurial/proto-1.0 (Mercurial 4.7.1+867-34bcd3af7109+20180915)" - ] - }, - "method": "POST" - }, - "response": { - "status": { - "code": 200, - "message": "OK" - }, - "headers": { - "server": [ - "Apache/2.4.10 (Debian)" - ], - "strict-transport-security": [ - "max-age=0; includeSubdomains; preload" - ], - "x-frame-options": [ - "Deny" - ], - "x-content-type-options": [ - "nosniff" - ], "expires": [ "Sat, 01 Jan 2000 00:00:00 GMT" - ], - "set-cookie": [ - "phsid=A%2Ff6o4ingm2wmr3ma4aht2kytfrrxvrkitj6ipkf5k; expires=Thu, 14-Sep-2023 04:53:46 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" - ], + ], "x-xss-protection": [ "1; mode=block" - ], - "content-type": [ - "application/json" - ], + ], + "transfer-encoding": [ + "chunked" + ], + "date": [ + "Sun, 03 Mar 2019 00:12:30 GMT" + ], + "x-frame-options": [ + "Deny" + ], "cache-control": [ "no-store" - ], - "date": [ - "Sat, 15 Sep 2018 04:53:46 GMT" - ] - }, - "body": { - "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}" - } - } - }, - { - "request": { - "body": "diff_id=11073&api.token=cli-hahayouwish&data=%7B%22f70265671c65ab4b5416e611a6bd61887c013122%22%3A+%7B%22time%22%3A+0.0%2C+%22authorEmail%22%3A+%22test%22%2C+%22author%22%3A+%22test%22%7D%7D&name=local%3Acommits", - "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty", - "headers": { - "content-length": [ - "227" - ], - "host": [ - "phab.mercurial-scm.org" - ], + ], "content-type": [ - "application/x-www-form-urlencoded" - ], - "accept": [ - "application/mercurial-0.1" - ], - "user-agent": [ - "mercurial/proto-1.0 (Mercurial 4.7.1+867-34bcd3af7109+20180915)" - ] - }, - "method": "POST" - }, - "response": { - "status": { - "code": 200, - "message": "OK" - }, - "headers": { + "application/json" + ], + "x-content-type-options": [ + "nosniff" + ], "server": [ "Apache/2.4.10 (Debian)" - ], + ], + "set-cookie": [ + "phsid=A%2Fnf3xdxgvvgky277foc7s2p6xrgtsvn4bzmayrbmb; expires=Fri, 01-Mar-2024 00:12:30 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" + ], "strict-transport-security": [ "max-age=0; includeSubdomains; preload" - ], - "x-frame-options": [ - "Deny" - ], - "x-content-type-options": [ - "nosniff" - ], - "expires": [ - "Sat, 01 Jan 2000 00:00:00 GMT" - ], - "set-cookie": [ - "phsid=A%2F4fitvy4kno46zkca6hq7npvuxvnh4dxlbvscmodb; expires=Thu, 14-Sep-2023 04:53:47 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" - ], - "x-xss-protection": [ - "1; mode=block" - ], - "content-type": [ - "application/json" - ], - "cache-control": [ - "no-store" - ], - "date": [ - "Sat, 15 Sep 2018 04:53:47 GMT" ] - }, - "body": { - "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}" } } - }, + }, { "request": { - "body": "api.token=cli-hahayouwish&corpus=create+alpha+for+phabricator+test%0A%0ADifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD4596", - "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage", + "method": "POST", + "body": "constraints%5Bcallsigns%5D%5B0%5D=HG&api.token=cli-hahayouwish", + "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search", "headers": { - "content-length": [ - "158" - ], - "host": [ - "phab.mercurial-scm.org" - ], "content-type": [ "application/x-www-form-urlencoded" - ], + ], "accept": [ "application/mercurial-0.1" - ], + ], "user-agent": [ - "mercurial/proto-1.0 (Mercurial 4.7.1+867-34bcd3af7109+20180915)" + "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)" + ], + "host": [ + "phab.mercurial-scm.org" + ], + "content-length": [ + "79" ] - }, - "method": "POST" - }, + } + }, "response": { "status": { - "code": 200, + "code": 200, "message": "OK" - }, + }, + "body": { + "string": "{\"result\":{\"data\":[{\"id\":2,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":null,\"status\":\"active\",\"isImporting\":false,\"spacePHID\":null,\"dateCreated\":1498761653,\"dateModified\":1500403184,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}" + }, + "headers": { + "expires": [ + "Sat, 01 Jan 2000 00:00:00 GMT" + ], + "x-xss-protection": [ + "1; mode=block" + ], + "transfer-encoding": [ + "chunked" + ], + "date": [ + "Sun, 03 Mar 2019 00:12:31 GMT" + ], + "x-frame-options": [ + "Deny" + ], + "cache-control": [ + "no-store" + ], + "content-type": [ + "application/json" + ], + "x-content-type-options": [ + "nosniff" + ], + "server": [ + "Apache/2.4.10 (Debian)" + ], + "set-cookie": [ + "phsid=A%2Fmlq7cl6pakmia2uecfcevwhdl3hyqe6rdb2y7usm; expires=Fri, 01-Mar-2024 00:12:31 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" + ], + "strict-transport-security": [ + "max-age=0; includeSubdomains; preload" + ] + } + } + }, + { + "request": { + "method": "POST", + "body": "repositoryPHID=PHID-REPO-bvunnehri4u2isyr7bc3&api.token=cli-hahayouwish&diff=diff+--git+a%2Falpha+b%2Falpha%0Anew+file+mode+100644%0A---+%2Fdev%2Fnull%0A%2B%2B%2B+b%2Falpha%0A%40%40+-0%2C0+%2B1%2C2+%40%40%0A%2Balpha%0A%2Bmore%0A", + "uri": "https://phab.mercurial-scm.org//api/differential.createrawdiff", "headers": { + "content-type": [ + "application/x-www-form-urlencoded" + ], + "accept": [ + "application/mercurial-0.1" + ], + "user-agent": [ + "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)" + ], + "host": [ + "phab.mercurial-scm.org" + ], + "content-length": [ + "245" + ] + } + }, + "response": { + "status": { + "code": 200, + "message": "OK" + }, + "body": { + "string": "{\"result\":{\"id\":14304,\"phid\":\"PHID-DIFF-3wv2fwmzp27uamb66xxg\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/14304\\/\"},\"error_code\":null,\"error_info\":null}" + }, + "headers": { + "expires": [ + "Sat, 01 Jan 2000 00:00:00 GMT" + ], + "x-xss-protection": [ + "1; mode=block" + ], + "transfer-encoding": [ + "chunked" + ], + "date": [ + "Sun, 03 Mar 2019 00:12:32 GMT" + ], + "x-frame-options": [ + "Deny" + ], + "cache-control": [ + "no-store" + ], + "content-type": [ + "application/json" + ], + "x-content-type-options": [ + "nosniff" + ], "server": [ "Apache/2.4.10 (Debian)" - ], + ], + "set-cookie": [ + "phsid=A%2Fptjtujvqlcwhzs4yhneogb323aqessc5axlu4rif; expires=Fri, 01-Mar-2024 00:12:32 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" + ], "strict-transport-security": [ "max-age=0; includeSubdomains; preload" - ], - "x-frame-options": [ - "Deny" - ], - "x-content-type-options": [ - "nosniff" - ], - "expires": [ - "Sat, 01 Jan 2000 00:00:00 GMT" - ], - "set-cookie": [ - "phsid=A%2F7u2j7nsrtq2dtxqws7pnsnjyaufsamwj44e45euz; expires=Thu, 14-Sep-2023 04:53:47 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" - ], - "x-xss-protection": [ - "1; mode=block" - ], - "content-type": [ - "application/json" - ], - "cache-control": [ - "no-store" - ], - "date": [ - "Sat, 15 Sep 2018 04:53:47 GMT" ] - }, - "body": { - "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"create alpha for phabricator test\",\"revisionID\":4596},\"revisionIDFieldInfo\":{\"value\":4596,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"}},\"error_code\":null,\"error_info\":null}" } } - }, + }, { "request": { - "body": "api.token=cli-hahayouwish&objectIdentifier=4596&transactions%5B0%5D%5Btype%5D=title&transactions%5B0%5D%5Bvalue%5D=create+alpha+for+phabricator+test", - "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit", + "method": "POST", + "body": "diff_id=14304&data=%7B%22user%22%3A+%22test%22%2C+%22parent%22%3A+%220000000000000000000000000000000000000000%22%2C+%22node%22%3A+%22939d862f03181a366fea64a540baf0bb33f85d92%22%2C+%22date%22%3A+%220+0%22%7D&api.token=cli-hahayouwish&name=hg%3Ameta", + "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty", "headers": { - "content-length": [ - "165" - ], - "host": [ - "phab.mercurial-scm.org" - ], "content-type": [ "application/x-www-form-urlencoded" - ], + ], "accept": [ "application/mercurial-0.1" - ], + ], "user-agent": [ - "mercurial/proto-1.0 (Mercurial 4.7.1+867-34bcd3af7109+20180915)" + "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)" + ], + "host": [ + "phab.mercurial-scm.org" + ], + "content-length": [ + "264" ] - }, - "method": "POST" - }, + } + }, "response": { "status": { - "code": 200, + "code": 200, "message": "OK" - }, + }, + "body": { + "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}" + }, "headers": { - "server": [ - "Apache/2.4.10 (Debian)" - ], - "strict-transport-security": [ - "max-age=0; includeSubdomains; preload" - ], - "x-frame-options": [ - "Deny" - ], - "x-content-type-options": [ - "nosniff" - ], "expires": [ "Sat, 01 Jan 2000 00:00:00 GMT" - ], - "set-cookie": [ - "phsid=A%2F7ubtculubfazivfxjxbmnyt3wzjcgdxnfdn57t42; expires=Thu, 14-Sep-2023 04:53:48 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" - ], + ], "x-xss-protection": [ "1; mode=block" - ], - "content-type": [ - "application/json" - ], + ], + "transfer-encoding": [ + "chunked" + ], + "date": [ + "Sun, 03 Mar 2019 00:12:32 GMT" + ], + "x-frame-options": [ + "Deny" + ], "cache-control": [ "no-store" - ], - "date": [ - "Sat, 15 Sep 2018 04:53:47 GMT" + ], + "content-type": [ + "application/json" + ], + "x-content-type-options": [ + "nosniff" + ], + "server": [ + "Apache/2.4.10 (Debian)" + ], + "set-cookie": [ + "phsid=A%2Feho2462w6mulsjeoz3e4rwgf37aekqwgpqmarn2f; expires=Fri, 01-Mar-2024 00:12:32 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" + ], + "strict-transport-security": [ + "max-age=0; includeSubdomains; preload" ] - }, - "body": { - "string": "{\"result\":{\"object\":{\"id\":\"4596\",\"phid\":\"PHID-DREV-bntcdwe74cw3vwkzt6nq\"},\"transactions\":[]},\"error_code\":null,\"error_info\":null}" } } - }, + }, { "request": { - "body": "api.token=cli-hahayouwish&constraints%5Bcallsigns%5D%5B0%5D=HG", - "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search", + "method": "POST", + "body": "diff_id=14304&data=%7B%22939d862f03181a366fea64a540baf0bb33f85d92%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22time%22%3A+0.0%7D%7D&api.token=cli-hahayouwish&name=local%3Acommits", + "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty", "headers": { - "content-length": [ - "79" - ], - "host": [ - "phab.mercurial-scm.org" - ], "content-type": [ "application/x-www-form-urlencoded" - ], + ], "accept": [ "application/mercurial-0.1" - ], + ], "user-agent": [ - "mercurial/proto-1.0 (Mercurial 4.7.1+867-34bcd3af7109+20180915)" + "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)" + ], + "host": [ + "phab.mercurial-scm.org" + ], + "content-length": [ + "227" ] - }, - "method": "POST" - }, + } + }, "response": { "status": { - "code": 200, + "code": 200, "message": "OK" - }, + }, + "body": { + "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}" + }, "headers": { - "server": [ - "Apache/2.4.10 (Debian)" - ], - "strict-transport-security": [ - "max-age=0; includeSubdomains; preload" - ], + "expires": [ + "Sat, 01 Jan 2000 00:00:00 GMT" + ], + "x-xss-protection": [ + "1; mode=block" + ], + "transfer-encoding": [ + "chunked" + ], + "date": [ + "Sun, 03 Mar 2019 00:12:33 GMT" + ], "x-frame-options": [ "Deny" - ], + ], + "cache-control": [ + "no-store" + ], + "content-type": [ + "application/json" + ], "x-content-type-options": [ "nosniff" - ], - "expires": [ - "Sat, 01 Jan 2000 00:00:00 GMT" - ], + ], + "server": [ + "Apache/2.4.10 (Debian)" + ], "set-cookie": [ - "phsid=A%2Fdpvy3rwephm5krs7posuadvjmkh7o7wbytgdhisv; expires=Thu, 14-Sep-2023 04:53:48 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" - ], - "x-xss-protection": [ - "1; mode=block" - ], - "content-type": [ - "application/json" - ], - "cache-control": [ - "no-store" - ], - "date": [ - "Sat, 15 Sep 2018 04:53:48 GMT" + "phsid=A%2F4ca3h5qhtwgn55t3zznczixyt2st4tm44t23aceg; expires=Fri, 01-Mar-2024 00:12:33 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" + ], + "strict-transport-security": [ + "max-age=0; includeSubdomains; preload" ] - }, - "body": { - "string": "{\"result\":{\"data\":[{\"id\":2,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":null,\"status\":\"active\",\"isImporting\":false,\"spacePHID\":null,\"dateCreated\":1498761653,\"dateModified\":1500403184,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}" } } - }, + }, { "request": { - "body": "api.token=cli-hahayouwish&diff=diff+--git+a%2Fbeta+b%2Fbeta%0Anew+file+mode+100644%0A---+%2Fdev%2Fnull%0A%2B%2B%2B+b%2Fbeta%0A%40%40+-0%2C0+%2B1%2C1+%40%40%0A%2Bbeta%0A&repositoryPHID=PHID-REPO-bvunnehri4u2isyr7bc3", - "uri": "https://phab.mercurial-scm.org//api/differential.createrawdiff", + "method": "POST", + "body": "api.token=cli-hahayouwish&corpus=create+alpha+for+phabricator+test+%E2%82%AC%0A%0ADifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD6054", + "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage", "headers": { - "content-length": [ - "231" - ], - "host": [ - "phab.mercurial-scm.org" - ], "content-type": [ "application/x-www-form-urlencoded" - ], + ], "accept": [ "application/mercurial-0.1" - ], + ], "user-agent": [ - "mercurial/proto-1.0 (Mercurial 4.7.1+867-34bcd3af7109+20180915)" + "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)" + ], + "host": [ + "phab.mercurial-scm.org" + ], + "content-length": [ + "168" ] - }, - "method": "POST" - }, + } + }, "response": { "status": { - "code": 200, + "code": 200, "message": "OK" - }, + }, + "body": { + "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"create alpha for phabricator test \\u20ac\",\"revisionID\":6054},\"revisionIDFieldInfo\":{\"value\":6054,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"}},\"error_code\":null,\"error_info\":null}" + }, "headers": { + "expires": [ + "Sat, 01 Jan 2000 00:00:00 GMT" + ], + "x-xss-protection": [ + "1; mode=block" + ], + "transfer-encoding": [ + "chunked" + ], + "date": [ + "Sun, 03 Mar 2019 00:12:34 GMT" + ], + "x-frame-options": [ + "Deny" + ], + "cache-control": [ + "no-store" + ], + "content-type": [ + "application/json" + ], + "x-content-type-options": [ + "nosniff" + ], "server": [ "Apache/2.4.10 (Debian)" - ], + ], + "set-cookie": [ + "phsid=A%2F7pvtbpw2waiblbsbydew3vfpulqnccf4647ymipq; expires=Fri, 01-Mar-2024 00:12:34 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" + ], "strict-transport-security": [ "max-age=0; includeSubdomains; preload" - ], - "x-frame-options": [ - "Deny" - ], - "x-content-type-options": [ - "nosniff" - ], + ] + } + } + }, + { + "request": { + "method": "POST", + "body": "api.token=cli-hahayouwish&transactions%5B0%5D%5Btype%5D=update&transactions%5B0%5D%5Bvalue%5D=PHID-DIFF-3wv2fwmzp27uamb66xxg&transactions%5B1%5D%5Btype%5D=title&transactions%5B1%5D%5Bvalue%5D=create+alpha+for+phabricator+test+%E2%82%AC&objectIdentifier=6054", + "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit", + "headers": { + "content-type": [ + "application/x-www-form-urlencoded" + ], + "accept": [ + "application/mercurial-0.1" + ], + "user-agent": [ + "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)" + ], + "host": [ + "phab.mercurial-scm.org" + ], + "content-length": [ + "274" + ] + } + }, + "response": { + "status": { + "code": 200, + "message": "OK" + }, + "body": { + "string": "{\"result\":{\"object\":{\"id\":\"6054\",\"phid\":\"PHID-DREV-6pczsbtdpqjc2nskmxwy\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-mc2gfyoyhkfz7dy\"}]},\"error_code\":null,\"error_info\":null}" + }, + "headers": { "expires": [ "Sat, 01 Jan 2000 00:00:00 GMT" - ], - "set-cookie": [ - "phsid=A%2Fafqgsnm7vbqi3vyfg5c7xgxyiv7fgi77vauw6wnv; expires=Thu, 14-Sep-2023 04:53:49 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" - ], + ], "x-xss-protection": [ "1; mode=block" - ], + ], + "transfer-encoding": [ + "chunked" + ], + "date": [ + "Sun, 03 Mar 2019 00:12:34 GMT" + ], + "x-frame-options": [ + "Deny" + ], + "cache-control": [ + "no-store" + ], "content-type": [ "application/json" - ], - "cache-control": [ - "no-store" - ], - "date": [ - "Sat, 15 Sep 2018 04:53:49 GMT" + ], + "x-content-type-options": [ + "nosniff" + ], + "server": [ + "Apache/2.4.10 (Debian)" + ], + "set-cookie": [ + "phsid=A%2Fhmyuw3lg6h4joaswqnfcmnzdkp6p2qxotsvahb7l; expires=Fri, 01-Mar-2024 00:12:34 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" + ], + "strict-transport-security": [ + "max-age=0; includeSubdomains; preload" ] - }, - "body": { - "string": "{\"result\":{\"id\":11074,\"phid\":\"PHID-DIFF-sitmath22fwgsfsbdmne\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/11074\\/\"},\"error_code\":null,\"error_info\":null}" } } - }, + }, { "request": { - "body": "diff_id=11074&api.token=cli-hahayouwish&data=%7B%22parent%22%3A+%22f70265671c65ab4b5416e611a6bd61887c013122%22%2C+%22node%22%3A+%221a5640df7bbfc26fc4f6ef38e4d1581d5b2a3122%22%2C+%22user%22%3A+%22test%22%2C+%22date%22%3A+%220+0%22%7D&name=hg%3Ameta", - "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty", + "method": "POST", + "body": "repositoryPHID=PHID-REPO-bvunnehri4u2isyr7bc3&api.token=cli-hahayouwish&diff=diff+--git+a%2Fbeta+b%2Fbeta%0Anew+file+mode+100644%0A---+%2Fdev%2Fnull%0A%2B%2B%2B+b%2Fbeta%0A%40%40+-0%2C0+%2B1%2C1+%40%40%0A%2Bbeta%0A", + "uri": "https://phab.mercurial-scm.org//api/differential.createrawdiff", "headers": { - "content-length": [ - "264" - ], + "content-type": [ + "application/x-www-form-urlencoded" + ], + "accept": [ + "application/mercurial-0.1" + ], + "user-agent": [ + "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)" + ], "host": [ "phab.mercurial-scm.org" - ], - "content-type": [ - "application/x-www-form-urlencoded" - ], - "accept": [ - "application/mercurial-0.1" - ], - "user-agent": [ - "mercurial/proto-1.0 (Mercurial 4.7.1+867-34bcd3af7109+20180915)" + ], + "content-length": [ + "231" ] - }, - "method": "POST" - }, + } + }, "response": { "status": { - "code": 200, + "code": 200, "message": "OK" - }, + }, + "body": { + "string": "{\"result\":{\"id\":14305,\"phid\":\"PHID-DIFF-pofynzhmmqm2czm33teg\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/14305\\/\"},\"error_code\":null,\"error_info\":null}" + }, "headers": { - "server": [ - "Apache/2.4.10 (Debian)" - ], - "strict-transport-security": [ - "max-age=0; includeSubdomains; preload" - ], + "expires": [ + "Sat, 01 Jan 2000 00:00:00 GMT" + ], + "x-xss-protection": [ + "1; mode=block" + ], + "transfer-encoding": [ + "chunked" + ], + "date": [ + "Sun, 03 Mar 2019 00:12:35 GMT" + ], "x-frame-options": [ "Deny" - ], - "x-content-type-options": [ - "nosniff" - ], - "expires": [ - "Sat, 01 Jan 2000 00:00:00 GMT" - ], - "set-cookie": [ - "phsid=A%2Frvpld6nyjmtrq3qynmldbquhgwbrhcdhythbot6r; expires=Thu, 14-Sep-2023 04:53:49 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" - ], - "x-xss-protection": [ - "1; mode=block" - ], + ], + "cache-control": [ + "no-store" + ], "content-type": [ "application/json" - ], - "cache-control": [ - "no-store" - ], - "date": [ - "Sat, 15 Sep 2018 04:53:49 GMT" + ], + "x-content-type-options": [ + "nosniff" + ], + "server": [ + "Apache/2.4.10 (Debian)" + ], + "set-cookie": [ + "phsid=A%2F2xpzt6bryn7n3gug3ll7iu2gfqyy4zss5d7nolew; expires=Fri, 01-Mar-2024 00:12:35 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" + ], + "strict-transport-security": [ + "max-age=0; includeSubdomains; preload" ] - }, - "body": { - "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}" } } - }, + }, { "request": { - "body": "diff_id=11074&api.token=cli-hahayouwish&data=%7B%221a5640df7bbfc26fc4f6ef38e4d1581d5b2a3122%22%3A+%7B%22time%22%3A+0.0%2C+%22authorEmail%22%3A+%22test%22%2C+%22author%22%3A+%22test%22%7D%7D&name=local%3Acommits", - "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty", + "method": "POST", + "body": "diff_id=14305&data=%7B%22user%22%3A+%22test%22%2C+%22parent%22%3A+%22939d862f03181a366fea64a540baf0bb33f85d92%22%2C+%22node%22%3A+%22f55f947ed0f8ad80a04b7e87a0bf9febda2070b1%22%2C+%22date%22%3A+%220+0%22%7D&api.token=cli-hahayouwish&name=hg%3Ameta", + "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty", "headers": { - "content-length": [ - "227" - ], - "host": [ - "phab.mercurial-scm.org" - ], "content-type": [ "application/x-www-form-urlencoded" - ], + ], "accept": [ "application/mercurial-0.1" - ], + ], "user-agent": [ - "mercurial/proto-1.0 (Mercurial 4.7.1+867-34bcd3af7109+20180915)" + "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)" + ], + "host": [ + "phab.mercurial-scm.org" + ], + "content-length": [ + "264" ] - }, - "method": "POST" - }, + } + }, "response": { "status": { - "code": 200, + "code": 200, "message": "OK" - }, + }, + "body": { + "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}" + }, "headers": { - "server": [ - "Apache/2.4.10 (Debian)" - ], - "strict-transport-security": [ - "max-age=0; includeSubdomains; preload" - ], - "x-frame-options": [ - "Deny" - ], - "x-content-type-options": [ - "nosniff" - ], "expires": [ "Sat, 01 Jan 2000 00:00:00 GMT" - ], - "set-cookie": [ - "phsid=A%2Flpkv333zitgztqx2clpg2uibjy633myliembguf2; expires=Thu, 14-Sep-2023 04:53:50 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" - ], + ], "x-xss-protection": [ "1; mode=block" - ], - "content-type": [ - "application/json" - ], + ], + "transfer-encoding": [ + "chunked" + ], + "date": [ + "Sun, 03 Mar 2019 00:12:36 GMT" + ], + "x-frame-options": [ + "Deny" + ], "cache-control": [ "no-store" - ], - "date": [ - "Sat, 15 Sep 2018 04:53:49 GMT" - ] - }, - "body": { - "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}" - } - } - }, - { - "request": { - "body": "api.token=cli-hahayouwish&corpus=create+beta+for+phabricator+test", - "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage", - "headers": { - "content-length": [ - "82" - ], - "host": [ - "phab.mercurial-scm.org" - ], + ], "content-type": [ - "application/x-www-form-urlencoded" - ], - "accept": [ - "application/mercurial-0.1" - ], - "user-agent": [ - "mercurial/proto-1.0 (Mercurial 4.7.1+867-34bcd3af7109+20180915)" - ] - }, - "method": "POST" - }, - "response": { - "status": { - "code": 200, - "message": "OK" - }, - "headers": { + "application/json" + ], + "x-content-type-options": [ + "nosniff" + ], "server": [ "Apache/2.4.10 (Debian)" - ], + ], + "set-cookie": [ + "phsid=A%2Fygzbpe74xh6shrejkd3tj32t4gaqnvumy63iudrd; expires=Fri, 01-Mar-2024 00:12:36 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" + ], "strict-transport-security": [ "max-age=0; includeSubdomains; preload" - ], - "x-frame-options": [ - "Deny" - ], - "x-content-type-options": [ - "nosniff" - ], - "expires": [ - "Sat, 01 Jan 2000 00:00:00 GMT" - ], - "set-cookie": [ - "phsid=A%2Fav6ovbqxoy3dijysouoabcz7jqescejugeedwspi; expires=Thu, 14-Sep-2023 04:53:50 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" - ], - "x-xss-protection": [ - "1; mode=block" - ], - "content-type": [ - "application/json" - ], - "cache-control": [ - "no-store" - ], - "date": [ - "Sat, 15 Sep 2018 04:53:50 GMT" ] - }, - "body": { - "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"create beta for phabricator test\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"}},\"error_code\":null,\"error_info\":null}" } } - }, + }, { "request": { - "body": "api.token=cli-hahayouwish&transactions%5B0%5D%5Btype%5D=update&transactions%5B0%5D%5Bvalue%5D=PHID-DIFF-sitmath22fwgsfsbdmne&transactions%5B1%5D%5Btype%5D=summary&transactions%5B1%5D%5Bvalue%5D=Depends+on+D4596&transactions%5B2%5D%5Btype%5D=summary&transactions%5B2%5D%5Bvalue%5D=+&transactions%5B3%5D%5Btype%5D=title&transactions%5B3%5D%5Bvalue%5D=create+beta+for+phabricator+test", - "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit", + "method": "POST", + "body": "diff_id=14305&data=%7B%22f55f947ed0f8ad80a04b7e87a0bf9febda2070b1%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22time%22%3A+0.0%7D%7D&api.token=cli-hahayouwish&name=local%3Acommits", + "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty", "headers": { - "content-length": [ - "398" - ], - "host": [ - "phab.mercurial-scm.org" - ], "content-type": [ "application/x-www-form-urlencoded" - ], + ], "accept": [ "application/mercurial-0.1" - ], + ], "user-agent": [ - "mercurial/proto-1.0 (Mercurial 4.7.1+867-34bcd3af7109+20180915)" + "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)" + ], + "host": [ + "phab.mercurial-scm.org" + ], + "content-length": [ + "227" ] - }, - "method": "POST" - }, + } + }, "response": { "status": { - "code": 200, + "code": 200, "message": "OK" - }, + }, + "body": { + "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}" + }, + "headers": { + "expires": [ + "Sat, 01 Jan 2000 00:00:00 GMT" + ], + "x-xss-protection": [ + "1; mode=block" + ], + "transfer-encoding": [ + "chunked" + ], + "date": [ + "Sun, 03 Mar 2019 00:12:37 GMT" + ], + "x-frame-options": [ + "Deny" + ], + "cache-control": [ + "no-store" + ], + "content-type": [ + "application/json" + ], + "x-content-type-options": [ + "nosniff" + ], + "server": [ + "Apache/2.4.10 (Debian)" + ], + "set-cookie": [ + "phsid=A%2Fgw67yfcsx7vvxkymeac52ca5is4jkxjwqqkhayco; expires=Fri, 01-Mar-2024 00:12:37 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" + ], + "strict-transport-security": [ + "max-age=0; includeSubdomains; preload" + ] + } + } + }, + { + "request": { + "method": "POST", + "body": "api.token=cli-hahayouwish&corpus=create+beta+for+phabricator+test", + "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage", "headers": { + "content-type": [ + "application/x-www-form-urlencoded" + ], + "accept": [ + "application/mercurial-0.1" + ], + "user-agent": [ + "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)" + ], + "host": [ + "phab.mercurial-scm.org" + ], + "content-length": [ + "82" + ] + } + }, + "response": { + "status": { + "code": 200, + "message": "OK" + }, + "body": { + "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"create beta for phabricator test\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"}},\"error_code\":null,\"error_info\":null}" + }, + "headers": { + "expires": [ + "Sat, 01 Jan 2000 00:00:00 GMT" + ], + "x-xss-protection": [ + "1; mode=block" + ], + "transfer-encoding": [ + "chunked" + ], + "date": [ + "Sun, 03 Mar 2019 00:12:37 GMT" + ], + "x-frame-options": [ + "Deny" + ], + "cache-control": [ + "no-store" + ], + "content-type": [ + "application/json" + ], + "x-content-type-options": [ + "nosniff" + ], "server": [ "Apache/2.4.10 (Debian)" - ], + ], + "set-cookie": [ + "phsid=A%2Fyt5ejs6pgvjdxzms7geaxup63jpqkisngu3cprk6; expires=Fri, 01-Mar-2024 00:12:37 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" + ], "strict-transport-security": [ "max-age=0; includeSubdomains; preload" - ], - "x-frame-options": [ - "Deny" - ], - "x-content-type-options": [ - "nosniff" - ], - "expires": [ - "Sat, 01 Jan 2000 00:00:00 GMT" - ], - "set-cookie": [ - "phsid=A%2Fywrdtdafcn5p267qiqfgfh7h4buaqxmnrgan6fh2; expires=Thu, 14-Sep-2023 04:53:50 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" - ], - "x-xss-protection": [ - "1; mode=block" - ], - "content-type": [ - "application/json" - ], - "cache-control": [ - "no-store" - ], - "date": [ - "Sat, 15 Sep 2018 04:53:50 GMT" ] - }, - "body": { - "string": "{\"result\":{\"object\":{\"id\":4597,\"phid\":\"PHID-DREV-as7flhipq636gqvnyrsf\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-bwzosyyqmzlhe6g\"},{\"phid\":\"PHID-XACT-DREV-ina5ktuwp6eiwv6\"},{\"phid\":\"PHID-XACT-DREV-22bjztn3szeyicy\"},{\"phid\":\"PHID-XACT-DREV-kcv6zk2yboepbmo\"},{\"phid\":\"PHID-XACT-DREV-mnbp6f6sq54hzs2\"},{\"phid\":\"PHID-XACT-DREV-qlakltzsdzclpha\"},{\"phid\":\"PHID-XACT-DREV-a5347cobhvqnc22\"},{\"phid\":\"PHID-XACT-DREV-sciqq5cqfuqfh67\"}]},\"error_code\":null,\"error_info\":null}" } } - }, + }, { "request": { - "body": "api.token=cli-hahayouwish&ids%5B0%5D=4596&ids%5B1%5D=4597", - "uri": "https://phab.mercurial-scm.org//api/differential.query", + "method": "POST", + "body": "transactions%5B0%5D%5Btype%5D=update&transactions%5B0%5D%5Bvalue%5D=PHID-DIFF-pofynzhmmqm2czm33teg&transactions%5B1%5D%5Btype%5D=summary&transactions%5B1%5D%5Bvalue%5D=Depends+on+D6054&transactions%5B2%5D%5Btype%5D=summary&transactions%5B2%5D%5Bvalue%5D=+&transactions%5B3%5D%5Btype%5D=title&transactions%5B3%5D%5Bvalue%5D=create+beta+for+phabricator+test&api.token=cli-hahayouwish", + "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit", "headers": { - "content-length": [ - "74" - ], - "host": [ - "phab.mercurial-scm.org" - ], "content-type": [ "application/x-www-form-urlencoded" - ], + ], "accept": [ "application/mercurial-0.1" - ], + ], "user-agent": [ - "mercurial/proto-1.0 (Mercurial 4.7.1+867-34bcd3af7109+20180915)" + "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)" + ], + "host": [ + "phab.mercurial-scm.org" + ], + "content-length": [ + "398" ] - }, - "method": "POST" - }, + } + }, "response": { "status": { - "code": 200, + "code": 200, "message": "OK" - }, + }, + "body": { + "string": "{\"result\":{\"object\":{\"id\":6055,\"phid\":\"PHID-DREV-k2hin2iytzuvu3j5icm3\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-3xjvwemev7dqsj3\"},{\"phid\":\"PHID-XACT-DREV-giypqlavgemr56i\"},{\"phid\":\"PHID-XACT-DREV-tcfqd4aj6rxtxzz\"},{\"phid\":\"PHID-XACT-DREV-2timgnudaxeln7a\"},{\"phid\":\"PHID-XACT-DREV-vb6564lrsxpsw4l\"},{\"phid\":\"PHID-XACT-DREV-maym4xi2tdhysvo\"},{\"phid\":\"PHID-XACT-DREV-bna5heyckxkk5ke\"},{\"phid\":\"PHID-XACT-DREV-b2eig3stbdic7k7\"}]},\"error_code\":null,\"error_info\":null}" + }, "headers": { - "server": [ - "Apache/2.4.10 (Debian)" - ], - "strict-transport-security": [ - "max-age=0; includeSubdomains; preload" - ], - "x-frame-options": [ - "Deny" - ], - "x-content-type-options": [ - "nosniff" - ], "expires": [ "Sat, 01 Jan 2000 00:00:00 GMT" - ], - "set-cookie": [ - "phsid=A%2F2iio6iugurtd7ml2tnwfwv24hkrfhs62yshvmouv; expires=Thu, 14-Sep-2023 04:53:51 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" - ], + ], "x-xss-protection": [ "1; mode=block" - ], - "content-type": [ - "application/json" - ], + ], + "transfer-encoding": [ + "chunked" + ], + "date": [ + "Sun, 03 Mar 2019 00:12:38 GMT" + ], + "x-frame-options": [ + "Deny" + ], "cache-control": [ "no-store" - ], - "date": [ - "Sat, 15 Sep 2018 04:53:51 GMT" + ], + "content-type": [ + "application/json" + ], + "x-content-type-options": [ + "nosniff" + ], + "server": [ + "Apache/2.4.10 (Debian)" + ], + "set-cookie": [ + "phsid=A%2Fgqyrj3op7rar26t6crqlt6rpdsxcefnrofqkw5rt; expires=Fri, 01-Mar-2024 00:12:38 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" + ], + "strict-transport-security": [ + "max-age=0; includeSubdomains; preload" ] - }, - "body": { - "string": "{\"result\":[{\"id\":\"4597\",\"phid\":\"PHID-DREV-as7flhipq636gqvnyrsf\",\"title\":\"create beta for phabricator test\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D4597\",\"dateCreated\":\"1536987231\",\"dateModified\":\"1536987231\",\"authorPHID\":\"PHID-USER-cgcdlc6c3gpxapbmkwa2\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":[],\"branch\":null,\"summary\":\" \",\"testPlan\":\"\",\"lineCount\":\"1\",\"activeDiffPHID\":\"PHID-DIFF-sitmath22fwgsfsbdmne\",\"diffs\":[\"11074\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-bntcdwe74cw3vwkzt6nq\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null},{\"id\":\"4596\",\"phid\":\"PHID-DREV-bntcdwe74cw3vwkzt6nq\",\"title\":\"create alpha for phabricator test\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D4596\",\"dateCreated\":\"1536986862\",\"dateModified\":\"1536987231\",\"authorPHID\":\"PHID-USER-cgcdlc6c3gpxapbmkwa2\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":[],\"branch\":null,\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-vwre7kpjdq52wbt56ftl\",\"diffs\":[\"11073\",\"11072\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null}],\"error_code\":null,\"error_info\":null}" } } - }, + }, { "request": { - "body": "diff_id=11074&api.token=cli-hahayouwish&data=%7B%22parent%22%3A+%22f70265671c65ab4b5416e611a6bd61887c013122%22%2C+%22node%22%3A+%22c2b605ada280b38c38031b5d31622869c72b0d8d%22%2C+%22user%22%3A+%22test%22%2C+%22date%22%3A+%220+0%22%7D&name=hg%3Ameta", - "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty", + "method": "POST", + "body": "api.token=cli-hahayouwish&ids%5B0%5D=6054&ids%5B1%5D=6055", + "uri": "https://phab.mercurial-scm.org//api/differential.query", "headers": { - "content-length": [ - "264" - ], - "host": [ - "phab.mercurial-scm.org" - ], "content-type": [ "application/x-www-form-urlencoded" - ], + ], "accept": [ "application/mercurial-0.1" - ], + ], "user-agent": [ - "mercurial/proto-1.0 (Mercurial 4.7.1+867-34bcd3af7109+20180915)" + "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)" + ], + "host": [ + "phab.mercurial-scm.org" + ], + "content-length": [ + "74" ] - }, - "method": "POST" - }, + } + }, "response": { "status": { - "code": 200, + "code": 200, "message": "OK" - }, + }, + "body": { + "string": "{\"result\":[{\"id\":\"6055\",\"phid\":\"PHID-DREV-k2hin2iytzuvu3j5icm3\",\"title\":\"create beta for phabricator test\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D6055\",\"dateCreated\":\"1551571958\",\"dateModified\":\"1551571958\",\"authorPHID\":\"PHID-USER-5iy6mkoveguhm2zthvww\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":[],\"branch\":null,\"summary\":\" \",\"testPlan\":\"\",\"lineCount\":\"1\",\"activeDiffPHID\":\"PHID-DIFF-pofynzhmmqm2czm33teg\",\"diffs\":[\"14305\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-6pczsbtdpqjc2nskmxwy\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null},{\"id\":\"6054\",\"phid\":\"PHID-DREV-6pczsbtdpqjc2nskmxwy\",\"title\":\"create alpha for phabricator test \\u20ac\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D6054\",\"dateCreated\":\"1551571947\",\"dateModified\":\"1551571958\",\"authorPHID\":\"PHID-USER-5iy6mkoveguhm2zthvww\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":[],\"branch\":null,\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-3wv2fwmzp27uamb66xxg\",\"diffs\":[\"14304\",\"14303\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null}],\"error_code\":null,\"error_info\":null}" + }, "headers": { - "server": [ - "Apache/2.4.10 (Debian)" - ], - "strict-transport-security": [ - "max-age=0; includeSubdomains; preload" - ], + "expires": [ + "Sat, 01 Jan 2000 00:00:00 GMT" + ], + "x-xss-protection": [ + "1; mode=block" + ], + "transfer-encoding": [ + "chunked" + ], + "date": [ + "Sun, 03 Mar 2019 00:12:39 GMT" + ], "x-frame-options": [ "Deny" - ], + ], + "cache-control": [ + "no-store" + ], + "content-type": [ + "application/json" + ], "x-content-type-options": [ "nosniff" - ], - "expires": [ - "Sat, 01 Jan 2000 00:00:00 GMT" - ], + ], + "server": [ + "Apache/2.4.10 (Debian)" + ], "set-cookie": [ - "phsid=A%2Fvwsd2gtkeg64gticvthsxnpufne42t4eqityra25; expires=Thu, 14-Sep-2023 04:53:52 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" - ], - "x-xss-protection": [ - "1; mode=block" - ], - "content-type": [ - "application/json" - ], - "cache-control": [ - "no-store" - ], - "date": [ - "Sat, 15 Sep 2018 04:53:52 GMT" + "phsid=A%2F5wxg6sdf2mby5iljd5e5qpgoex6uefo5pgltav7k; expires=Fri, 01-Mar-2024 00:12:39 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" + ], + "strict-transport-security": [ + "max-age=0; includeSubdomains; preload" ] - }, - "body": { - "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}" } } - }, + }, { "request": { - "body": "diff_id=11074&api.token=cli-hahayouwish&data=%7B%22c2b605ada280b38c38031b5d31622869c72b0d8d%22%3A+%7B%22time%22%3A+0.0%2C+%22authorEmail%22%3A+%22test%22%2C+%22author%22%3A+%22test%22%7D%7D&name=local%3Acommits", - "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty", + "method": "POST", + "body": "diff_id=14305&data=%7B%22user%22%3A+%22test%22%2C+%22parent%22%3A+%22939d862f03181a366fea64a540baf0bb33f85d92%22%2C+%22node%22%3A+%229c64e1fc33e1b9a70eb60643fe96a4d5badad9dc%22%2C+%22date%22%3A+%220+0%22%7D&api.token=cli-hahayouwish&name=hg%3Ameta", + "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty", "headers": { - "content-length": [ - "227" - ], - "host": [ - "phab.mercurial-scm.org" - ], "content-type": [ "application/x-www-form-urlencoded" - ], + ], "accept": [ "application/mercurial-0.1" - ], + ], "user-agent": [ - "mercurial/proto-1.0 (Mercurial 4.7.1+867-34bcd3af7109+20180915)" + "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)" + ], + "host": [ + "phab.mercurial-scm.org" + ], + "content-length": [ + "264" ] - }, - "method": "POST" - }, + } + }, "response": { "status": { - "code": 200, + "code": 200, "message": "OK" - }, + }, + "body": { + "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}" + }, "headers": { + "expires": [ + "Sat, 01 Jan 2000 00:00:00 GMT" + ], + "x-xss-protection": [ + "1; mode=block" + ], + "transfer-encoding": [ + "chunked" + ], + "date": [ + "Sun, 03 Mar 2019 00:12:40 GMT" + ], + "x-frame-options": [ + "Deny" + ], + "cache-control": [ + "no-store" + ], + "content-type": [ + "application/json" + ], + "x-content-type-options": [ + "nosniff" + ], "server": [ "Apache/2.4.10 (Debian)" - ], + ], + "set-cookie": [ + "phsid=A%2F4c7iamnsn57y6qpccmbesf4ooflmkqvt4m6udawl; expires=Fri, 01-Mar-2024 00:12:40 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" + ], "strict-transport-security": [ "max-age=0; includeSubdomains; preload" - ], - "x-frame-options": [ - "Deny" - ], - "x-content-type-options": [ - "nosniff" - ], + ] + } + } + }, + { + "request": { + "method": "POST", + "body": "diff_id=14305&data=%7B%229c64e1fc33e1b9a70eb60643fe96a4d5badad9dc%22%3A+%7B%22author%22%3A+%22test%22%2C+%22authorEmail%22%3A+%22test%22%2C+%22time%22%3A+0.0%7D%7D&api.token=cli-hahayouwish&name=local%3Acommits", + "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty", + "headers": { + "content-type": [ + "application/x-www-form-urlencoded" + ], + "accept": [ + "application/mercurial-0.1" + ], + "user-agent": [ + "mercurial/proto-1.0 (Mercurial 4.9+477-7c86ec0ca5c5+20190303)" + ], + "host": [ + "phab.mercurial-scm.org" + ], + "content-length": [ + "227" + ] + } + }, + "response": { + "status": { + "code": 200, + "message": "OK" + }, + "body": { + "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}" + }, + "headers": { "expires": [ "Sat, 01 Jan 2000 00:00:00 GMT" - ], - "set-cookie": [ - "phsid=A%2Fflxjbmx24qcq7qhggolo6b7iue7utwp7kyoazduk; expires=Thu, 14-Sep-2023 04:53:52 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" - ], + ], "x-xss-protection": [ "1; mode=block" - ], + ], + "transfer-encoding": [ + "chunked" + ], + "date": [ + "Sun, 03 Mar 2019 00:12:40 GMT" + ], + "x-frame-options": [ + "Deny" + ], + "cache-control": [ + "no-store" + ], "content-type": [ "application/json" - ], - "cache-control": [ - "no-store" - ], - "date": [ - "Sat, 15 Sep 2018 04:53:52 GMT" + ], + "x-content-type-options": [ + "nosniff" + ], + "server": [ + "Apache/2.4.10 (Debian)" + ], + "set-cookie": [ + "phsid=A%2Ftdudqohojcq4hyc7gl4kthzkhuq3nmcxgnunpbjm; expires=Fri, 01-Mar-2024 00:12:40 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly" + ], + "strict-transport-security": [ + "max-age=0; includeSubdomains; preload" ] - }, - "body": { - "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}" } } } - ] + ], + "version": 1 }
--- a/tests/run-tests.py Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/run-tests.py Tue Mar 19 16:36:59 2019 +0300 @@ -290,7 +290,7 @@ defaults = { 'jobs': ('HGTEST_JOBS', multiprocessing.cpu_count()), 'timeout': ('HGTEST_TIMEOUT', 180), - 'slowtimeout': ('HGTEST_SLOWTIMEOUT', 500), + 'slowtimeout': ('HGTEST_SLOWTIMEOUT', 1500), 'port': ('HGTEST_PORT', 20059), 'shell': ('HGTEST_SHELL', 'sh'), } @@ -634,7 +634,7 @@ # list in group 2, and the preceeding line output in group 1: # # output..output (feature !)\n -optline = re.compile(b'(.*) \((.+?) !\)\n$') +optline = re.compile(br'(.*) \((.+?) !\)\n$') def cdatasafe(data): """Make a string safe to include in a CDATA block. @@ -929,8 +929,8 @@ self.fail('no result code from test') elif out != self._refout: # Diff generation may rely on written .err file. - if (ret != 0 or out != self._refout) and not self._skipped \ - and not self._debug: + if ((ret != 0 or out != self._refout) and not self._skipped + and not self._debug): with open(self.errpath, 'wb') as f: for line in out: f.write(line) @@ -978,8 +978,8 @@ # files are deleted shutil.rmtree(self._chgsockdir, True) - if (self._ret != 0 or self._out != self._refout) and not self._skipped \ - and not self._debug and self._out: + if ((self._ret != 0 or self._out != self._refout) and not self._skipped + and not self._debug and self._out): with open(self.errpath, 'wb') as f: for line in self._out: f.write(line) @@ -1105,8 +1105,8 @@ if 'HGTESTCATAPULTSERVERPIPE' not in env: # If we don't have HGTESTCATAPULTSERVERPIPE explicitly set, pull the # non-test one in as a default, otherwise set to devnull - env['HGTESTCATAPULTSERVERPIPE'] = \ - env.get('HGCATAPULTSERVERPIPE', os.devnull) + env['HGTESTCATAPULTSERVERPIPE'] = env.get( + 'HGCATAPULTSERVERPIPE', os.devnull) extraextensions = [] for opt in self._extraconfigopts: @@ -1225,7 +1225,6 @@ killdaemons(env['DAEMON_PIDS']) return ret - output = b'' proc.tochild.close() try: @@ -1354,6 +1353,9 @@ def _hghave(self, reqs): allreqs = b' '.join(reqs) + + self._detectslow(reqs) + if allreqs in self._have: return self._have.get(allreqs) @@ -1375,12 +1377,14 @@ self._have[allreqs] = (False, stdout) return False, stdout + self._have[allreqs] = (True, None) + return True, None + + def _detectslow(self, reqs): + """update the timeout of slow test when appropriate""" if b'slow' in reqs: self._timeout = self._slowtimeout - self._have[allreqs] = (True, None) - return True, None - def _iftest(self, args): # implements "#if" reqs = [] @@ -1393,6 +1397,7 @@ return False else: reqs.append(arg) + self._detectslow(reqs) return self._hghave(reqs)[0] def _parsetest(self, lines): @@ -1409,8 +1414,8 @@ session = str(uuid.uuid4()) if PYTHON3: session = session.encode('ascii') - hgcatapult = os.getenv('HGTESTCATAPULTSERVERPIPE') or \ - os.getenv('HGCATAPULTSERVERPIPE') + hgcatapult = (os.getenv('HGTESTCATAPULTSERVERPIPE') or + os.getenv('HGCATAPULTSERVERPIPE')) def toggletrace(cmd=None): if not hgcatapult or hgcatapult == os.devnull: return @@ -1903,8 +1908,9 @@ pass elif self._options.view: v = self._options.view - os.system(r"%s %s %s" % - (v, _strpath(test.refpath), _strpath(test.errpath))) + subprocess.call(r'"%s" "%s" "%s"' % + (v, _strpath(test.refpath), + _strpath(test.errpath)), shell=True) else: servefail, lines = getdiff(expected, got, test.refpath, test.errpath) @@ -2259,14 +2265,17 @@ self.stream.writeln('') if not self._runner.options.noskips: - for test, msg in self._result.skipped: + for test, msg in sorted(self._result.skipped, + key=lambda s: s[0].name): formatted = 'Skipped %s: %s\n' % (test.name, msg) msg = highlightmsg(formatted, self._result.color) self.stream.write(msg) - for test, msg in self._result.failures: + for test, msg in sorted(self._result.failures, + key=lambda f: f[0].name): formatted = 'Failed %s: %s\n' % (test.name, msg) self.stream.write(highlightmsg(formatted, self._result.color)) - for test, msg in self._result.errors: + for test, msg in sorted(self._result.errors, + key=lambda e: e[0].name): self.stream.writeln('Errored %s: %s' % (test.name, msg)) if self._runner.options.xunit: @@ -2376,12 +2385,12 @@ timesd = dict((t[0], t[3]) for t in result.times) doc = minidom.Document() s = doc.createElement('testsuite') - s.setAttribute('name', 'run-tests') - s.setAttribute('tests', str(result.testsRun)) s.setAttribute('errors', "0") # TODO s.setAttribute('failures', str(len(result.failures))) + s.setAttribute('name', 'run-tests') s.setAttribute('skipped', str(len(result.skipped) + len(result.ignored))) + s.setAttribute('tests', str(result.testsRun)) doc.appendChild(s) for tc in result.successes: t = doc.createElement('testcase') @@ -2770,8 +2779,8 @@ """ if not args: if self.options.changed: - proc = Popen4('hg st --rev "%s" -man0 .' % - self.options.changed, None, 0) + proc = Popen4(b'hg st --rev "%s" -man0 .' % + _bytespath(self.options.changed), None, 0) stdout, stderr = proc.communicate() args = stdout.strip(b'\0').split(b'\0') else: @@ -3110,8 +3119,8 @@ # installation layout put it in bin/ directly. Fix it with open(hgbat, 'rb') as f: data = f.read() - if b'"%~dp0..\python" "%~dp0hg" %*' in data: - data = data.replace(b'"%~dp0..\python" "%~dp0hg" %*', + if br'"%~dp0..\python" "%~dp0hg" %*' in data: + data = data.replace(br'"%~dp0..\python" "%~dp0hg" %*', b'"%~dp0python" "%~dp0hg" %*') with open(hgbat, 'wb') as f: f.write(data)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/svnurlof.py Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,18 @@ +from __future__ import absolute_import, print_function +import sys + +from mercurial import ( + pycompat, + util, +) + +def main(argv): + enc = util.urlreq.quote(pycompat.sysbytes(argv[1])) + if pycompat.iswindows: + fmt = 'file:///%s' + else: + fmt = 'file://%s' + print(fmt % pycompat.sysstr(enc)) + +if __name__ == '__main__': + main(sys.argv)
--- a/tests/svnxml.py Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/svnxml.py Tue Mar 19 16:36:59 2019 +0300 @@ -20,10 +20,10 @@ if paths: paths = paths[0] for p in paths.getElementsByTagName('path'): - action = p.getAttribute('action') - path = xmltext(p) - frompath = p.getAttribute('copyfrom-path') - fromrev = p.getAttribute('copyfrom-rev') + action = p.getAttribute('action').encode('utf-8') + path = xmltext(p).encode('utf-8') + frompath = p.getAttribute('copyfrom-path').encode('utf-8') + fromrev = p.getAttribute('copyfrom-rev').encode('utf-8') e['paths'].append((path, action, frompath, fromrev)) return e @@ -43,11 +43,11 @@ for k in ('revision', 'author', 'msg'): fp.write(('%s: %s\n' % (k, e[k])).encode('utf-8')) for path, action, fpath, frev in sorted(e['paths']): - frominfo = '' + frominfo = b'' if frev: - frominfo = ' (from %s@%s)' % (fpath, frev) - p = ' %s %s%s\n' % (action, path, frominfo) - fp.write(p.encode('utf-8')) + frominfo = b' (from %s@%s)' % (fpath, frev) + p = b' %s %s%s\n' % (action, path, frominfo) + fp.write(p) if __name__ == '__main__': data = sys.stdin.read()
--- a/tests/test-absorb-strip.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-absorb-strip.t Tue Mar 19 16:36:59 2019 +0300 @@ -23,6 +23,7 @@ $ echo 1 >> B $ echo 2 >> D $ hg absorb -a + warning: orphaned descendants detected, not stripping 112478962961, 26805aba1e60 saved backup bundle to * (glob) 2 of 2 chunk(s) applied
--- a/tests/test-acl.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-acl.t Tue Mar 19 16:36:59 2019 +0300 @@ -38,8 +38,8 @@ > def fakegetusers(ui, group): > try: > return acl._getusersorig(ui, group) - > except: - > return ["fred", "betty"] + > except BaseException: + > return [b"fred", b"betty"] > acl._getusersorig = acl._getusers > acl._getusers = fakegetusers > EOF @@ -1125,7 +1125,7 @@ bundle2-input-bundle: 4 parts total transaction abort! rollback completed - abort: $ENOENT$: ../acl.config + abort: $ENOENT$: '../acl.config' no rollback information available 0:6675d58eff77
--- a/tests/test-ancestor.py Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-ancestor.py Tue Mar 19 16:36:59 2019 +0300 @@ -123,7 +123,6 @@ # reference slow algorithm naiveinc = naiveincrementalmissingancestors(ancs, bases) seq = [] - revs = [] for _ in xrange(inccount): if rng.random() < 0.2: newbases = samplerevs(graphnodes)
--- a/tests/test-annotate.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-annotate.t Tue Mar 19 16:36:59 2019 +0300 @@ -445,8 +445,8 @@ > fparent1 = manifest1.get(fname, node.nullid) > fparent2 = manifest2.get(fname, node.nullid) > meta = {} - > copy = fctx.renamed() - > if copy and copy[0] != fname: + > copy = fctx.copysource() + > if copy and copy != fname: > raise error.Abort('copying is not supported') > if fparent2 != node.nullid: > changelist.append(fname) @@ -589,7 +589,7 @@ $ hg annotate -ncr "wdir()" baz abort: $TESTTMP\repo\baz: $ENOENT$ (windows !) - abort: $ENOENT$: $TESTTMP/repo/baz (no-windows !) + abort: $ENOENT$: '$TESTTMP/repo/baz' (no-windows !) [255] annotate removed file @@ -598,7 +598,7 @@ $ hg annotate -ncr "wdir()" baz abort: $TESTTMP\repo\baz: $ENOENT$ (windows !) - abort: $ENOENT$: $TESTTMP/repo/baz (no-windows !) + abort: $ENOENT$: '$TESTTMP/repo/baz' (no-windows !) [255] $ hg revert --all --no-backup --quiet @@ -809,6 +809,15 @@ |\ ~ ~ +An integer as a line range, which is parsed as '1:1' + + $ hg log -r 'followlines(baz, 1)' + changeset: 22:2174d0bf352a + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: added two lines with 0 + + check error cases $ hg up 24 --quiet $ hg log -r 'followlines()' @@ -817,8 +826,8 @@ $ hg log -r 'followlines(baz)' hg: parse error: followlines requires a line range [255] - $ hg log -r 'followlines(baz, 1)' - hg: parse error: followlines expects a line range + $ hg log -r 'followlines(baz, x)' + hg: parse error: followlines expects a line number or a range [255] $ hg log -r 'followlines(baz, 1:2, startrev=desc("b"))' hg: parse error: followlines expects exactly one revision
--- a/tests/test-arbitraryfilectx.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-arbitraryfilectx.t Tue Mar 19 16:36:59 2019 +0300 @@ -72,30 +72,30 @@ These files are different and should return True (different): (Note that filecmp.cmp's return semantics are inverted from ours, so we invert for simplicity): - $ hg eval "context.arbitraryfilectx('A', repo).cmp(repo[None]['real_A'])" + $ hg eval "context.arbitraryfilectx(b'A', repo).cmp(repo[None][b'real_A'])" True (no-eol) - $ hg eval "not filecmp.cmp('A', 'real_A')" + $ hg eval "not filecmp.cmp(b'A', b'real_A')" True (no-eol) These files are identical and should return False (same): - $ hg eval "context.arbitraryfilectx('A', repo).cmp(repo[None]['A'])" + $ hg eval "context.arbitraryfilectx(b'A', repo).cmp(repo[None][b'A'])" False (no-eol) - $ hg eval "context.arbitraryfilectx('A', repo).cmp(repo[None]['B'])" + $ hg eval "context.arbitraryfilectx(b'A', repo).cmp(repo[None][b'B'])" False (no-eol) - $ hg eval "not filecmp.cmp('A', 'B')" + $ hg eval "not filecmp.cmp(b'A', b'B')" False (no-eol) This comparison should also return False, since A and sym_A are substantially the same in the eyes of ``filectx.cmp``, which looks at data only. - $ hg eval "context.arbitraryfilectx('real_A', repo).cmp(repo[None]['sym_A'])" + $ hg eval "context.arbitraryfilectx(b'real_A', repo).cmp(repo[None][b'sym_A'])" False (no-eol) A naive use of filecmp on those two would wrongly return True, since it follows the symlink to "A", which has different contents. #if symlink - $ hg eval "not filecmp.cmp('real_A', 'sym_A')" + $ hg eval "not filecmp.cmp(b'real_A', b'sym_A')" True (no-eol) #else - $ hg eval "not filecmp.cmp('real_A', 'sym_A')" + $ hg eval "not filecmp.cmp(b'real_A', b'sym_A')" False (no-eol) #endif
--- a/tests/test-archive.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-archive.t Tue Mar 19 16:36:59 2019 +0300 @@ -187,7 +187,7 @@ server: testing stub value transfer-encoding: chunked - body: size=(1377|1461), sha1=(677b14d3d048778d5eb5552c14a67e6192068650|be6d3983aa13dfe930361b2569291cdedd02b537) (re) + body: size=(1377|1461|1489), sha1=(677b14d3d048778d5eb5552c14a67e6192068650|be6d3983aa13dfe930361b2569291cdedd02b537|1897e496871aa89ad685a92b936f5fa0d008b9e8) (re) % tar.gz and tar.bz2 disallowed should both give 403 403 Archive type not allowed: gz content-type: text/html; charset=ascii @@ -274,7 +274,7 @@ server: testing stub value transfer-encoding: chunked - body: size=(1377|1461), sha1=(677b14d3d048778d5eb5552c14a67e6192068650|be6d3983aa13dfe930361b2569291cdedd02b537) (re) + body: size=(1377|1461|1489), sha1=(677b14d3d048778d5eb5552c14a67e6192068650|be6d3983aa13dfe930361b2569291cdedd02b537|1897e496871aa89ad685a92b936f5fa0d008b9e8) (re) % tar.gz and tar.bz2 disallowed should both give 403 403 Archive type not allowed: gz content-type: text/html; charset=ascii
--- a/tests/test-batching.py Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-batching.py Tue Mar 19 16:36:59 2019 +0300 @@ -11,25 +11,28 @@ from mercurial import ( localrepo, + pycompat, wireprotov1peer, +) -) +def bprint(*bs): + print(*[pycompat.sysstr(b) for b in bs]) # equivalent of repo.repository class thing(object): def hello(self): - return "Ready." + return b"Ready." # equivalent of localrepo.localrepository class localthing(thing): def foo(self, one, two=None): if one: - return "%s and %s" % (one, two,) - return "Nope" + return b"%s and %s" % (one, two,) + return b"Nope" def bar(self, b, a): - return "%s und %s" % (b, a,) + return b"%s und %s" % (b, a,) def greet(self, name=None): - return "Hello, %s" % name + return b"Hello, %s" % name @contextlib.contextmanager def commandexecutor(self): @@ -43,27 +46,27 @@ def use(it): # Direct call to base method shared between client and server. - print(it.hello()) + bprint(it.hello()) # Direct calls to proxied methods. They cause individual roundtrips. - print(it.foo("Un", two="Deux")) - print(it.bar("Eins", "Zwei")) + bprint(it.foo(b"Un", two=b"Deux")) + bprint(it.bar(b"Eins", b"Zwei")) # Batched call to a couple of proxied methods. with it.commandexecutor() as e: - ffoo = e.callcommand('foo', {'one': 'One', 'two': 'Two'}) - fbar = e.callcommand('bar', {'b': 'Eins', 'a': 'Zwei'}) - fbar2 = e.callcommand('bar', {'b': 'Uno', 'a': 'Due'}) + ffoo = e.callcommand(b'foo', {b'one': b'One', b'two': b'Two'}) + fbar = e.callcommand(b'bar', {b'b': b'Eins', b'a': b'Zwei'}) + fbar2 = e.callcommand(b'bar', {b'b': b'Uno', b'a': b'Due'}) - print(ffoo.result()) - print(fbar.result()) - print(fbar2.result()) + bprint(ffoo.result()) + bprint(fbar.result()) + bprint(fbar2.result()) # local usage mylocal = localthing() print() -print("== Local") +bprint(b"== Local") use(mylocal) # demo remoting; mimicks what wireproto and HTTP/SSH do @@ -72,16 +75,16 @@ def escapearg(plain): return (plain - .replace(':', '::') - .replace(',', ':,') - .replace(';', ':;') - .replace('=', ':=')) + .replace(b':', b'::') + .replace(b',', b':,') + .replace(b';', b':;') + .replace(b'=', b':=')) def unescapearg(escaped): return (escaped - .replace(':=', '=') - .replace(':;', ';') - .replace(':,', ',') - .replace('::', ':')) + .replace(b':=', b'=') + .replace(b':;', b';') + .replace(b':,', b',') + .replace(b'::', b':')) # server side @@ -90,27 +93,28 @@ def __init__(self, local): self.local = local def _call(self, name, args): - args = dict(arg.split('=', 1) for arg in args) + args = dict(arg.split(b'=', 1) for arg in args) return getattr(self, name)(**args) def perform(self, req): - print("REQ:", req) - name, args = req.split('?', 1) - args = args.split('&') - vals = dict(arg.split('=', 1) for arg in args) - res = getattr(self, name)(**vals) - print(" ->", res) + bprint(b"REQ:", req) + name, args = req.split(b'?', 1) + args = args.split(b'&') + vals = dict(arg.split(b'=', 1) for arg in args) + res = getattr(self, pycompat.sysstr(name))(**pycompat.strkwargs(vals)) + bprint(b" ->", res) return res def batch(self, cmds): res = [] - for pair in cmds.split(';'): - name, args = pair.split(':', 1) + for pair in cmds.split(b';'): + name, args = pair.split(b':', 1) vals = {} - for a in args.split(','): + for a in args.split(b','): if a: - n, v = a.split('=') + n, v = a.split(b'=') vals[n] = unescapearg(v) - res.append(escapearg(getattr(self, name)(**vals))) - return ';'.join(res) + res.append(escapearg(getattr(self, pycompat.sysstr(name))( + **pycompat.strkwargs(vals)))) + return b';'.join(res) def foo(self, one, two): return mangle(self.local.foo(unmangle(one), unmangle(two))) def bar(self, b, a): @@ -124,25 +128,25 @@ # equivalent of wireproto.encode/decodelist, that is, type-specific marshalling # here we just transform the strings a bit to check we're properly en-/decoding def mangle(s): - return ''.join(chr(ord(c) + 1) for c in s) + return b''.join(pycompat.bytechr(ord(c) + 1) for c in pycompat.bytestr(s)) def unmangle(s): - return ''.join(chr(ord(c) - 1) for c in s) + return b''.join(pycompat.bytechr(ord(c) - 1) for c in pycompat.bytestr(s)) # equivalent of wireproto.wirerepository and something like http's wire format class remotething(thing): def __init__(self, server): self.server = server def _submitone(self, name, args): - req = name + '?' + '&'.join(['%s=%s' % (n, v) for n, v in args]) + req = name + b'?' + b'&'.join([b'%s=%s' % (n, v) for n, v in args]) return self.server.perform(req) def _submitbatch(self, cmds): req = [] for name, args in cmds: - args = ','.join(n + '=' + escapearg(v) for n, v in args) - req.append(name + ':' + args) - req = ';'.join(req) - res = self._submitone('batch', [('cmds', req,)]) - for r in res.split(';'): + args = b','.join(n + b'=' + escapearg(v) for n, v in args) + req.append(name + b':' + args) + req = b';'.join(req) + res = self._submitone(b'batch', [(b'cmds', req,)]) + for r in res.split(b';'): yield r @contextlib.contextmanager @@ -155,7 +159,7 @@ @wireprotov1peer.batchable def foo(self, one, two=None): - encargs = [('one', mangle(one),), ('two', mangle(two),)] + encargs = [(b'one', mangle(one),), (b'two', mangle(two),)] encresref = wireprotov1peer.future() yield encargs, encresref yield unmangle(encresref.value) @@ -163,18 +167,18 @@ @wireprotov1peer.batchable def bar(self, b, a): encresref = wireprotov1peer.future() - yield [('b', mangle(b),), ('a', mangle(a),)], encresref + yield [(b'b', mangle(b),), (b'a', mangle(a),)], encresref yield unmangle(encresref.value) # greet is coded directly. It therefore does not support batching. If it # does appear in a batch, the batch is split around greet, and the call to # greet is done in its own roundtrip. def greet(self, name=None): - return unmangle(self._submitone('greet', [('name', mangle(name),)])) + return unmangle(self._submitone(b'greet', [(b'name', mangle(name),)])) # demo remote usage myproxy = remotething(myserver) print() -print("== Remote") +bprint(b"== Remote") use(myproxy)
--- a/tests/test-blackbox.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-blackbox.t Tue Mar 19 16:36:59 2019 +0300 @@ -354,6 +354,35 @@ warning: cannot write to blackbox.log: $TESTTMP/gone/.hg/blackbox.log: $ENOTDIR$ (windows !) $ cd .. +blackbox should disable itself if track is empty + + $ hg --config blackbox.track= init nothing_tracked + $ cd nothing_tracked + $ cat >> .hg/hgrc << EOF + > [blackbox] + > track = + > EOF + $ hg blackbox + $ cd $TESTTMP + +a '*' entry in blackbox.track is interpreted as log everything + + $ hg --config blackbox.track='*' \ + > --config blackbox.logsource=True \ + > init track_star + $ cd track_star + $ cat >> .hg/hgrc << EOF + > [blackbox] + > logsource = True + > track = * + > EOF +(only look for entries with specific logged sources, otherwise this test is +pretty brittle) + $ hg blackbox | egrep '\[command(finish)?\]' + 1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000) [commandfinish]> --config *blackbox.track=* --config *blackbox.logsource=True* init track_star exited 0 after * seconds (glob) + 1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000) [command]> blackbox + $ cd $TESTTMP + #if chg when using chg, blackbox.log should get rotated correctly
--- a/tests/test-bugzilla.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-bugzilla.t Tue Mar 19 16:36:59 2019 +0300 @@ -3,7 +3,9 @@ $ cat <<EOF > bzmock.py > from __future__ import absolute_import > from mercurial import extensions + > from mercurial import pycompat > from mercurial import registrar + > from mercurial.utils import stringutil > > configtable = {} > configitem = registrar.configitem(configtable) @@ -18,14 +20,17 @@ > super(bzmock, self).__init__(ui) > self._logfile = ui.config(b'bugzilla', b'mocklog') > def updatebug(self, bugid, newstate, text, committer): - > with open(self._logfile, 'a') as f: - > f.write('update bugid=%r, newstate=%r, committer=%r\n' - > % (bugid, newstate, committer)) - > f.write('----\n' + text + '\n----\n') + > with open(pycompat.fsdecode(self._logfile), 'ab') as f: + > f.write(b'update bugid=%s, newstate=%s, committer=%s\n' + > % (stringutil.pprint(bugid), + > stringutil.pprint(newstate), + > stringutil.pprint(committer))) + > f.write(b'----\n' + text + b'\n----\n') > def notify(self, bugs, committer): - > with open(self._logfile, 'a') as f: - > f.write('notify bugs=%r, committer=%r\n' - > % (bugs, committer)) + > with open(pycompat.fsdecode(self._logfile), 'ab') as f: + > f.write(b'notify bugs=%s, committer=%s\n' + > % (stringutil.pprint(bugs), + > stringutil.pprint(committer))) > bugzilla.bugzilla._versions[b'mock'] = bzmock > EOF
--- a/tests/test-bundle.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-bundle.t Tue Mar 19 16:36:59 2019 +0300 @@ -218,10 +218,11 @@ $ cat >> .hg/hgrc <<EOF > [hooks] - > changegroup = sh -c "printenv.py changegroup" + > changegroup = sh -c "printenv.py --line changegroup" > EOF doesn't work (yet ?) +NOTE: msys is mangling the URL below hg -R bundle://../full.hg verify @@ -233,7 +234,18 @@ adding file changes added 9 changesets with 7 changes to 4 files (+1 heads) new changesets f9ee2f85a263:aa35859c02ea (9 drafts) - changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735 HG_NODE_LAST=aa35859c02ea8bd48da5da68cd2740ac71afcbaf HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=bundle*../full.hg (glob) + changegroup hook: HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735 + HG_NODE_LAST=aa35859c02ea8bd48da5da68cd2740ac71afcbaf + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + bundle:../full.hg (no-msys !) + bundle;../full.hg (msys !) + HG_URL=bundle:../full.hg (no-msys !) + HG_URL=bundle;../full.hg (msys !) + (run 'hg heads' to see heads, 'hg merge' to merge) Rollback empty @@ -257,7 +269,16 @@ adding file changes added 9 changesets with 7 changes to 4 files (+1 heads) new changesets f9ee2f85a263:aa35859c02ea (9 drafts) - changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735 HG_NODE_LAST=aa35859c02ea8bd48da5da68cd2740ac71afcbaf HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=bundle:empty+full.hg + changegroup hook: HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735 + HG_NODE_LAST=aa35859c02ea8bd48da5da68cd2740ac71afcbaf + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + bundle:empty+full.hg + HG_URL=bundle:empty+full.hg + (run 'hg heads' to see heads, 'hg merge' to merge) #endif
--- a/tests/test-bundle2-format.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-bundle2-format.t Tue Mar 19 16:36:59 2019 +0300 @@ -82,7 +82,8 @@ > (b'', b'genraise', False, b'includes a part that raise an exception during generation'), > (b'', b'timeout', False, b'emulate a timeout during bundle generation'), > (b'r', b'rev', [], b'includes those changeset in the bundle'), - > (b'', b'compress', b'', b'compress the stream'),], + > (b'', b'compress', b'', b'compress the stream'), + > ], > b'[OUTPUTFILE]') > def cmdbundle2(ui, repo, path=None, **opts): > """write a bundle2 container on standard output"""
--- a/tests/test-bundle2-multiple-changegroups.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-bundle2-multiple-changegroups.t Tue Mar 19 16:36:59 2019 +0300 @@ -66,9 +66,9 @@ $ cd ../clone $ cat >> .hg/hgrc <<EOF > [hooks] - > pretxnchangegroup = sh -c "printenv.py pretxnchangegroup" - > changegroup = sh -c "printenv.py changegroup" - > incoming = sh -c "printenv.py incoming" + > pretxnchangegroup = sh -c "printenv.py --line pretxnchangegroup" + > changegroup = sh -c "printenv.py --line changegroup" + > incoming = sh -c "printenv.py --line incoming" > EOF Pull the new commits in the clone @@ -81,18 +81,75 @@ adding manifests adding file changes added 1 changesets with 1 changes to 1 files - pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_NODE_LAST=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo + pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup + HG_HOOKTYPE=pretxnchangegroup + HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 + HG_NODE_LAST=27547f69f25460a52fff66ad004e58da7ad3fb56 + HG_PENDING=$TESTTMP/clone + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + file:/*/$TESTTMP/repo (glob) + HG_URL=file:$TESTTMP/repo + remote: changegroup2 adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files - pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_NODE_LAST=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo + pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup + HG_HOOKTYPE=pretxnchangegroup + HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 + HG_NODE_LAST=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 + HG_PENDING=$TESTTMP/clone + HG_PHASES_MOVED=1 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + file:/*/$TESTTMP/repo (glob) + HG_URL=file:$TESTTMP/repo + new changesets 27547f69f254:f838bfaca5c7 - changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_NODE_LAST=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo - incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo - changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_NODE_LAST=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo - incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo + changegroup hook: HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 + HG_NODE_LAST=27547f69f25460a52fff66ad004e58da7ad3fb56 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + file:/*/$TESTTMP/repo (glob) + HG_URL=file:$TESTTMP/repo + + incoming hook: HG_HOOKNAME=incoming + HG_HOOKTYPE=incoming + HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + file:/*/$TESTTMP/repo (glob) + HG_URL=file:$TESTTMP/repo + + changegroup hook: HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 + HG_NODE_LAST=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 + HG_PHASES_MOVED=1 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + file:/*/$TESTTMP/repo (glob) + HG_URL=file:$TESTTMP/repo + + incoming hook: HG_HOOKNAME=incoming + HG_HOOKTYPE=incoming + HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 + HG_PHASES_MOVED=1 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + file:/*/$TESTTMP/repo (glob) + HG_URL=file:$TESTTMP/repo + pullop.cgresult is 1 (run 'hg update' to get a working copy) $ hg update @@ -152,21 +209,104 @@ adding manifests adding file changes added 2 changesets with 2 changes to 2 files (+1 heads) - pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_NODE_LAST=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo + pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup + HG_HOOKTYPE=pretxnchangegroup + HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e + HG_NODE_LAST=8a5212ebc8527f9fb821601504794e3eb11a1ed3 + HG_PENDING=$TESTTMP/clone + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + file:/*/$TESTTMP/repo (glob) + HG_URL=file:$TESTTMP/repo + remote: changegroup2 adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 3 files (+1 heads) - pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_NODE_LAST=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo + pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup + HG_HOOKTYPE=pretxnchangegroup + HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 + HG_NODE_LAST=5cd59d311f6508b8e0ed28a266756c859419c9f1 + HG_PENDING=$TESTTMP/clone + HG_PHASES_MOVED=1 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + file:/*/$TESTTMP/repo (glob) + HG_URL=file:$TESTTMP/repo + new changesets b3325c91a4d9:5cd59d311f65 - changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_NODE_LAST=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo - incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo - incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo - changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_NODE_LAST=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo - incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo - incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=1d14c3ce6ac0582d2809220d33e8cd7a696e0156 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo - incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo + changegroup hook: HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e + HG_NODE_LAST=8a5212ebc8527f9fb821601504794e3eb11a1ed3 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + file:/*/$TESTTMP/repo (glob) + HG_URL=file:$TESTTMP/repo + + incoming hook: HG_HOOKNAME=incoming + HG_HOOKTYPE=incoming + HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + file:/*/$TESTTMP/repo (glob) + HG_URL=file:$TESTTMP/repo + + incoming hook: HG_HOOKNAME=incoming + HG_HOOKTYPE=incoming + HG_NODE=8a5212ebc8527f9fb821601504794e3eb11a1ed3 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + file:/*/$TESTTMP/repo (glob) + HG_URL=file:$TESTTMP/repo + + changegroup hook: HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 + HG_NODE_LAST=5cd59d311f6508b8e0ed28a266756c859419c9f1 + HG_PHASES_MOVED=1 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + file:/*/$TESTTMP/repo (glob) + HG_URL=file:$TESTTMP/repo + + incoming hook: HG_HOOKNAME=incoming + HG_HOOKTYPE=incoming + HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 + HG_PHASES_MOVED=1 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + file:/*/$TESTTMP/repo (glob) + HG_URL=file:$TESTTMP/repo + + incoming hook: HG_HOOKNAME=incoming + HG_HOOKTYPE=incoming + HG_NODE=1d14c3ce6ac0582d2809220d33e8cd7a696e0156 + HG_PHASES_MOVED=1 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + file:/*/$TESTTMP/repo (glob) + HG_URL=file:$TESTTMP/repo + + incoming hook: HG_HOOKNAME=incoming + HG_HOOKTYPE=incoming + HG_NODE=5cd59d311f6508b8e0ed28a266756c859419c9f1 + HG_PHASES_MOVED=1 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + file:/*/$TESTTMP/repo (glob) + HG_URL=file:$TESTTMP/repo + pullop.cgresult is 3 (run 'hg heads' to see heads, 'hg merge' to merge) $ hg log -G @@ -226,18 +366,75 @@ adding manifests adding file changes added 1 changesets with 0 changes to 0 files (-1 heads) - pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_NODE_LAST=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo + pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup + HG_HOOKTYPE=pretxnchangegroup + HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 + HG_NODE_LAST=71bd7b46de72e69a32455bf88d04757d542e6cf4 + HG_PENDING=$TESTTMP/clone + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + file:/*/$TESTTMP/repo (glob) + HG_URL=file:$TESTTMP/repo + remote: changegroup2 adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files - pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_NODE_LAST=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo + pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup + HG_HOOKTYPE=pretxnchangegroup + HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 + HG_NODE_LAST=9d18e5bd9ab09337802595d49f1dad0c98df4d84 + HG_PENDING=$TESTTMP/clone + HG_PHASES_MOVED=1 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + file:/*/$TESTTMP/repo (glob) + HG_URL=file:$TESTTMP/repo + new changesets 71bd7b46de72:9d18e5bd9ab0 - changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_NODE_LAST=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo - incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo - changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_NODE_LAST=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo - incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo + changegroup hook: HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 + HG_NODE_LAST=71bd7b46de72e69a32455bf88d04757d542e6cf4 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + file:/*/$TESTTMP/repo (glob) + HG_URL=file:$TESTTMP/repo + + incoming hook: HG_HOOKNAME=incoming + HG_HOOKTYPE=incoming + HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + file:/*/$TESTTMP/repo (glob) + HG_URL=file:$TESTTMP/repo + + changegroup hook: HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 + HG_NODE_LAST=9d18e5bd9ab09337802595d49f1dad0c98df4d84 + HG_PHASES_MOVED=1 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + file:/*/$TESTTMP/repo (glob) + HG_URL=file:$TESTTMP/repo + + incoming hook: HG_HOOKNAME=incoming + HG_HOOKTYPE=incoming + HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 + HG_PHASES_MOVED=1 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + file:/*/$TESTTMP/repo (glob) + HG_URL=file:$TESTTMP/repo + pullop.cgresult is -2 (run 'hg update' to get a working copy) $ hg log -G
--- a/tests/test-bundle2-pushback.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-bundle2-pushback.t Tue Mar 19 16:36:59 2019 +0300 @@ -25,7 +25,8 @@ > b'key': b'new-server-mark', > b'old': b'', > b'new': b'tip'} - > encodedparams = [(k, pushkey.encode(v)) for (k,v) in params.items()] + > encodedparams = [(k, pushkey.encode(v)) + > for (k, v) in params.items()] > op.reply.newpart(b'pushkey', mandatoryparams=encodedparams) > else: > op.reply.newpart(b'output', data=b'pushback not enabled')
--- a/tests/test-cbor.py Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-cbor.py Tue Mar 19 16:36:59 2019 +0300 @@ -926,7 +926,7 @@ (False, None, -1, cborutil.SPECIAL_NONE)) with self.assertRaisesRegex(cborutil.CBORDecodeError, - 'semantic tag \d+ not allowed'): + r'semantic tag \d+ not allowed'): cborutil.decodeitem(encoded) class SpecialTypesTests(TestCase): @@ -942,7 +942,7 @@ encoded = cborutil.encodelength(cborutil.MAJOR_TYPE_SPECIAL, i) with self.assertRaisesRegex(cborutil.CBORDecodeError, - 'special type \d+ not allowed'): + r'special type \d+ not allowed'): cborutil.decodeitem(encoded) class SansIODecoderTests(TestCase):
--- a/tests/test-check-code.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-check-code.t Tue Mar 19 16:36:59 2019 +0300 @@ -12,6 +12,13 @@ > -X hgext/fsmonitor/pywatchman \ > -X mercurial/thirdparty \ > | sed 's-\\-/-g' | "$check_code" --warnings --per-file=0 - || false + Skipping contrib/packaging/hgpackaging/downloads.py it has no-che?k-code (glob) + Skipping contrib/packaging/hgpackaging/inno.py it has no-che?k-code (glob) + Skipping contrib/packaging/hgpackaging/py2exe.py it has no-che?k-code (glob) + Skipping contrib/packaging/hgpackaging/util.py it has no-che?k-code (glob) + Skipping contrib/packaging/hgpackaging/wix.py it has no-che?k-code (glob) + Skipping contrib/packaging/inno/build.py it has no-che?k-code (glob) + Skipping contrib/packaging/wix/build.py it has no-che?k-code (glob) Skipping i18n/polib.py it has no-che?k-code (glob) Skipping mercurial/statprof.py it has no-che?k-code (glob) Skipping tests/badserverext.py it has no-che?k-code (glob) @@ -22,7 +29,7 @@ >>> commands = [] >>> with open('mercurial/debugcommands.py', 'rb') as fh: ... for line in fh: - ... m = re.match(b"^@command\('([a-z]+)", line) + ... m = re.match(br"^@command\('([a-z]+)", line) ... if m: ... commands.append(m.group(1)) >>> scommands = list(sorted(commands))
--- a/tests/test-check-module-imports.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-check-module-imports.t Tue Mar 19 16:36:59 2019 +0300 @@ -21,6 +21,8 @@ > -X contrib/debugshell.py \ > -X contrib/hgweb.fcgi \ > -X contrib/packaging/hg-docker \ + > -X contrib/packaging/hgpackaging/ \ + > -X contrib/packaging/inno/ \ > -X contrib/python-zstandard/ \ > -X contrib/win32/hgwebdir_wsgi.py \ > -X contrib/perf-utils/perf-revlog-write-plot.py \
--- a/tests/test-check-py3-compat.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-check-py3-compat.t Tue Mar 19 16:36:59 2019 +0300 @@ -5,6 +5,9 @@ #if no-py3 $ testrepohg files 'set:(**.py)' \ + > -X contrib/packaging/hgpackaging/ \ + > -X contrib/packaging/inno/ \ + > -X contrib/packaging/wix/ \ > -X hgdemandimport/demandimportpy2.py \ > -X mercurial/thirdparty/cbor \ > | sed 's|\\|/|g' | xargs "$PYTHON" contrib/check-py3-compat.py @@ -32,6 +35,14 @@ > -X mercurial/thirdparty \ > | sed 's|\\|/|g' | xargs "$PYTHON" contrib/check-py3-compat.py \ > | sed 's/[0-9][0-9]*)$/*)/' + contrib/python-zstandard/tests/test_compressor.py:324: SyntaxWarning: invalid escape sequence \( (py38 !) + with self.assertRaisesRegexp(zstd.ZstdError, 'cannot call compress\(\) after compressor'): (py38 !) + contrib/python-zstandard/tests/test_compressor.py:1329: SyntaxWarning: invalid escape sequence \( (py38 !) + 'cannot call compress\(\) after compression finished'): (py38 !) + contrib/python-zstandard/tests/test_compressor.py:1341: SyntaxWarning: invalid escape sequence \( (py38 !) + 'cannot call flush\(\) after compression finished'): (py38 !) + contrib/python-zstandard/tests/test_compressor.py:1353: SyntaxWarning: invalid escape sequence \( (py38 !) + 'cannot call finish\(\) after compression finished'): (py38 !) hgext/convert/transport.py: error importing: <*Error> No module named 'svn.client' (error at transport.py:*) (glob) (?) hgext/infinitepush/sqlindexapi.py: error importing: <*Error> No module named 'mysql' (error at sqlindexapi.py:*) (glob) (?) mercurial/scmwindows.py: error importing: <ValueError> _type_ 'v' not supported (error at win32.py:*) (no-windows !)
--- a/tests/test-clone.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-clone.t Tue Mar 19 16:36:59 2019 +0300 @@ -43,7 +43,6 @@ default 10:a7949464abda $ ls .hg/cache branch2-served - manifestfulltextcache (reporevlogstore !) rbc-names-v1 rbc-revs-v1 @@ -569,7 +568,7 @@ > extensions.loadall(myui) > extensions.populateui(myui) > repo = hg.repository(myui, b'a') - > hg.clone(myui, {}, repo, dest=b"ua", branch=[b"stable",]) + > hg.clone(myui, {}, repo, dest=b"ua", branch=[b"stable"]) > EOF $ "$PYTHON" branchclone.py
--- a/tests/test-commit-interactive-curses.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-commit-interactive-curses.t Tue Mar 19 16:36:59 2019 +0300 @@ -333,9 +333,9 @@ $ cp $HGRCPATH.pretest $HGRCPATH $ chunkselectorinterface() { > "$PYTHON" <<EOF - > from mercurial import hg, ui;\ - > repo = hg.repository(ui.ui.load(), ".");\ - > print(repo.ui.interface("chunkselector")) + > from mercurial import hg, pycompat, ui;\ + > repo = hg.repository(ui.ui.load(), b".");\ + > print(pycompat.sysstr(repo.ui.interface(b"chunkselector"))) > EOF > } $ chunkselectorinterface
--- a/tests/test-commit-interactive.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-commit-interactive.t Tue Mar 19 16:36:59 2019 +0300 @@ -1807,3 +1807,82 @@ n 0 -1 unset subdir/f1 $ hg status -A subdir/f1 M subdir/f1 + +Test commands.commit.interactive.unified=0 + + $ hg init $TESTTMP/b + $ cd $TESTTMP/b + $ cat > foo <<EOF + > 1 + > 2 + > 3 + > 4 + > 5 + > EOF + $ hg ci -qAm initial + $ cat > foo <<EOF + > 1 + > change1 + > 2 + > 3 + > change2 + > 4 + > 5 + > EOF + $ printf 'y\ny\ny\n' | hg ci -im initial --config commands.commit.interactive.unified=0 + diff --git a/foo b/foo + 2 hunks, 2 lines changed + examine changes to 'foo'? [Ynesfdaq?] y + + @@ -1,0 +2,1 @@ 1 + +change1 + record change 1/2 to 'foo'? [Ynesfdaq?] y + + @@ -3,0 +5,1 @@ 3 + +change2 + record change 2/2 to 'foo'? [Ynesfdaq?] y + + $ cd $TESTTMP + +Test diff.ignoreblanklines=1 + + $ hg init c + $ cd c + $ cat > foo <<EOF + > 1 + > 2 + > 3 + > 4 + > 5 + > EOF + $ hg ci -qAm initial + $ cat > foo <<EOF + > 1 + > + > 2 + > 3 + > change2 + > 4 + > 5 + > EOF + $ printf 'y\ny\ny\n' | hg ci -im initial --config diff.ignoreblanklines=1 + diff --git a/foo b/foo + 2 hunks, 2 lines changed + examine changes to 'foo'? [Ynesfdaq?] y + + @@ -1,3 +1,4 @@ + 1 + + + 2 + 3 + record change 1/2 to 'foo'? [Ynesfdaq?] y + + @@ -2,4 +3,5 @@ + 2 + 3 + +change2 + 4 + 5 + record change 2/2 to 'foo'? [Ynesfdaq?] y + +
--- a/tests/test-commit-multiple.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-commit-multiple.t Tue Mar 19 16:36:59 2019 +0300 @@ -95,8 +95,7 @@ > for f in repo[rev].files()))) > > repo = hg.repository(uimod.ui.load(), b'.') - > assert len(repo) == 6, \ - > "initial: len(repo): %d, expected: 6" % len(repo) + > assert len(repo) == 6, "initial: len(repo): %d, expected: 6" % len(repo) > > replacebyte(b"bugfix", b"u") > time.sleep(2)
--- a/tests/test-commit.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-commit.t Tue Mar 19 16:36:59 2019 +0300 @@ -512,6 +512,7 @@ HG: dels= HG: files=changed HG: + HG: diff -r d2313f97106f changed HG: --- a/changed Thu Jan 01 00:00:00 1970 +0000 HG: +++ b/changed Thu Jan 01 00:00:00 1970 +0000 HG: @@ -1,1 +1,2 @@ @@ -573,6 +574,7 @@ HG: dels=removed HG: files=added removed HG: + HG: diff -r d2313f97106f added HG: --- /dev/null Thu Jan 01 00:00:00 1970 +0000 HG: +++ b/added Thu Jan 01 00:00:00 1970 +0000 HG: @@ -0,0 +1,1 @@ @@ -583,6 +585,7 @@ HG: dels=removed HG: files=added removed HG: + HG: diff -r d2313f97106f removed HG: --- a/removed Thu Jan 01 00:00:00 1970 +0000 HG: +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 HG: @@ -1,1 +0,0 @@
--- a/tests/test-completion.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-completion.t Tue Mar 19 16:36:59 2019 +0300 @@ -103,7 +103,10 @@ debugmergestate debugnamecomplete debugobsolete + debugp1copies + debugp2copies debugpathcomplete + debugpathcopies debugpeer debugpickmergetool debugpushkey @@ -279,7 +282,10 @@ debugmergestate: debugnamecomplete: debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template + debugp1copies: rev + debugp2copies: rev debugpathcomplete: full, normal, added, removed + debugpathcopies: include, exclude debugpeer: debugpickmergetool: rev, changedelete, include, exclude, tool debugpushkey:
--- a/tests/test-context.py Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-context.py Tue Mar 19 16:36:59 2019 +0300 @@ -63,7 +63,7 @@ # test performing a status def getfilectx(repo, memctx, f): - fctx = memctx.parents()[0][f] + fctx = memctx.p1()[f] data, flags = fctx.data(), fctx.flags() if f == b'foo': data += b'bar\n'
--- a/tests/test-contrib-check-code.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-contrib-check-code.t Tue Mar 19 16:36:59 2019 +0300 @@ -7,6 +7,9 @@ > def toto( arg1, arg2): > del(arg2) > return ( 5+6, 9) + > def badwrap(): + > return 1 + \\ + > 2 > NO_CHECK_EOF $ cat > quote.py <<NO_CHECK_EOF > # let's use quote in comments @@ -42,6 +45,9 @@ > return ( 5+6, 9) gratuitous whitespace in () or [] missing whitespace in expression + ./wrong.py:5: + > return 1 + \ + Use () to wrap long lines in Python, not \ ./quote.py:5: > '"""', 42+1, """and missing whitespace in expression @@ -373,3 +379,51 @@ > class empty(object): omit superfluous pass [1] + +Check code fragments embedded in test script + + $ cat > embedded-code.t <<NO_CHECK_EOF + > code fragment in doctest style + > >>> x = (1,2) + > ... + > ... x = (1,2) + > + > code fragment in heredoc style + > $ python <<EOF + > > x = (1,2) + > > EOF + > + > code fragment in file heredoc style + > $ python > file.py <<EOF + > > x = (1,2) + > > EOF + > NO_CHECK_EOF + $ "$check_code" embedded-code.t + embedded-code.t:2: + > x = (1,2) + missing whitespace after , + embedded-code.t:4: + > x = (1,2) + missing whitespace after , + embedded-code.t:8: + > x = (1,2) + missing whitespace after , + embedded-code.t:13: + > x = (1,2) + missing whitespace after , + [1] + +"max warnings per file" is shared by all embedded code fragments + + $ "$check_code" --per-file=3 embedded-code.t + embedded-code.t:2: + > x = (1,2) + missing whitespace after , + embedded-code.t:4: + > x = (1,2) + missing whitespace after , + embedded-code.t:8: + > x = (1,2) + missing whitespace after , + (too many errors, giving up) + [1]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-contrib-emacs.t Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,8 @@ +#require emacs + $ emacs -q -no-site-file -batch -l $TESTDIR/../contrib/hg-test-mode.el \ + > -f ert-run-tests-batch-and-exit + Running 1 tests (*) (glob) + passed 1/1 hg-test-mode--compilation-mode-support + + Ran 1 tests, 1 results as expected (*) (glob) +
--- a/tests/test-contrib-perf.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-contrib-perf.t Tue Mar 19 16:36:59 2019 +0300 @@ -88,12 +88,12 @@ (no help text available) perffncachewrite (no help text available) - perfheads (no help text available) + perfheads benchmark the computation of a changelog heads perfhelper-pathcopies find statistic about potential parameters for the 'perftracecopies' perfignore benchmark operation related to computing ignore - perfindex (no help text available) + perfindex benchmark index creation time followed by a lookup perflinelogedits (no help text available) perfloadmarkers @@ -109,6 +109,8 @@ perfmoonwalk benchmark walking the changelog backwards perfnodelookup (no help text available) + perfnodemap benchmark the time necessary to look up revision from a cold + nodemap perfparents (no help text available) perfpathcopies benchmark the copy tracing logic
--- a/tests/test-contrib-relnotes.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-contrib-relnotes.t Tue Mar 19 16:36:59 2019 +0300 @@ -266,7 +266,6 @@ * diff: disable diff.noprefix option for diffstat (Bts:issue5759) * evolution: make reporting of new unstable changesets optional * extdata: abort if external command exits with non-zero status (BC) - * fancyopts: add early-options parser compatible with getopt() * graphlog: add another graph node type, unstable, using character "*" (BC) * hgdemandimport: use correct hyperlink to python-bug in comments (Bts:issue5765) * httppeer: add support for tracing all http request made by the peer @@ -277,17 +276,18 @@ * morestatus: don't crash with different drive letters for repo.root and CWD * outgoing: respect ":pushurl" paths (Bts:issue5365) * remove: print message for each file in verbose mode only while using '-A' (BC) - * rewriteutil: use precheck() in uncommit and amend commands * scmutil: don't try to delete origbackup symlinks to directories (Bts:issue5731) * sshpeer: add support for request tracing * subrepo: add config option to reject any subrepo operations (SEC) * subrepo: disable git and svn subrepos by default (BC) (SEC) + * subrepo: disallow symlink traversal across subrepo mount point (SEC) * subrepo: extend config option to disable subrepos by type (SEC) * subrepo: handle 'C:' style paths on the command line (Bts:issue5770) * subrepo: use per-type config options to enable subrepos * svnsubrepo: check if subrepo is missing when checking dirty state (Bts:issue5657) * test-bookmarks-pushpull: stabilize for Windows * test-run-tests: stabilize the test (Bts:issue5735) + * tests: show symlink traversal across subrepo mount point (SEC) * tr-summary: keep a weakref to the unfiltered repository * unamend: fix command summary line * uncommit: unify functions _uncommitdirstate and _unamenddirstate to one
--- a/tests/test-convert-cvs.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-convert-cvs.t Tue Mar 19 16:36:59 2019 +0300 @@ -11,11 +11,11 @@ $ echo "[extensions]" >> $HGRCPATH $ echo "convert = " >> $HGRCPATH $ cat > cvshooks.py <<EOF - > def cvslog(ui,repo,hooktype,log): - > ui.write(b'%s hook: %d entries\n' % (hooktype,len(log))) + > def cvslog(ui, repo, hooktype, log): + > ui.write(b'%s hook: %d entries\n' % (hooktype, len(log))) > - > def cvschangesets(ui,repo,hooktype,changesets): - > ui.write(b'%s hook: %d changesets\n' % (hooktype,len(changesets))) + > def cvschangesets(ui, repo, hooktype, changesets): + > ui.write(b'%s hook: %d changesets\n' % (hooktype, len(changesets))) > EOF $ hookpath=`pwd` $ cat <<EOF >> $HGRCPATH
--- a/tests/test-convert-hg-svn.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-convert-hg-svn.t Tue Mar 19 16:36:59 2019 +0300 @@ -11,11 +11,7 @@ > EOF $ SVNREPOPATH=`pwd`/svn-repo -#if windows - $ SVNREPOURL=file:///`"$PYTHON" -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"` -#else - $ SVNREPOURL=file://`"$PYTHON" -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"` -#endif + $ SVNREPOURL="`"$PYTHON" $TESTDIR/svnurlof.py \"$SVNREPOPATH\"`" $ svnadmin create "$SVNREPOPATH" $ cat > "$SVNREPOPATH"/hooks/pre-revprop-change <<EOF
--- a/tests/test-convert-svn-move.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-convert-svn-move.t Tue Mar 19 16:36:59 2019 +0300 @@ -8,11 +8,7 @@ $ svnadmin create svn-repo $ svnadmin load -q svn-repo < "$TESTDIR/svn/move.svndump" $ SVNREPOPATH=`pwd`/svn-repo -#if windows - $ SVNREPOURL=file:///`"$PYTHON" -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"` -#else - $ SVNREPOURL=file://`"$PYTHON" -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"` -#endif + $ SVNREPOURL="`"$PYTHON" $TESTDIR/svnurlof.py \"$SVNREPOPATH\"`" Convert trunk and branches
--- a/tests/test-convert-svn-sink.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-convert-svn-sink.t Tue Mar 19 16:36:59 2019 +0300 @@ -466,3 +466,85 @@ msg: Add file a A /a $ rm -rf a a-hg a-hg-wc + +#if execbit + +Executable bit removal + + $ hg init a + + $ echo a > a/exec + $ chmod +x a/exec + $ hg --cwd a ci -d '1 0' -A -m 'create executable' + adding exec + $ chmod -x a/exec + $ hg --cwd a ci -d '2 0' -A -m 'remove executable bit' + + $ hg convert -d svn a + assuming destination a-hg + initializing svn repository 'a-hg' + initializing svn working copy 'a-hg-wc' + scanning source... + sorting... + converting... + 1 create executable + 0 remove executable bit + $ svnupanddisplay a-hg-wc 0 + 2 2 test . + 2 2 test exec + revision: 2 + author: test + msg: remove executable bit + M /exec + revision: 1 + author: test + msg: create executable + A /exec + $ test ! -x a-hg-wc/exec + + $ rm -rf a a-hg a-hg-wc + +#endif + +Skipping empty commits + + $ hg init a + + $ hg --cwd a --config ui.allowemptycommit=True ci -d '1 0' -m 'Initial empty commit' + + $ echo a > a/a + $ hg --cwd a ci -d '0 0' -A -m 'Some change' + adding a + $ hg --cwd a --config ui.allowemptycommit=True ci -d '2 0' -m 'Empty commit 1' + $ hg --cwd a --config ui.allowemptycommit=True ci -d '3 0' -m 'Empty commit 2' + $ echo b > a/b + $ hg --cwd a ci -d '0 0' -A -m 'Another change' + adding b + + $ hg convert -d svn a + assuming destination a-hg + initializing svn repository 'a-hg' + initializing svn working copy 'a-hg-wc' + scanning source... + sorting... + converting... + 4 Initial empty commit + 3 Some change + 2 Empty commit 1 + 1 Empty commit 2 + 0 Another change + + $ svnupanddisplay a-hg-wc 0 + 2 1 test a + 2 2 test . + 2 2 test b + revision: 2 + author: test + msg: Another change + A /b + revision: 1 + author: test + msg: Some change + A /a + + $ rm -rf a a-hg a-hg-wc
--- a/tests/test-convert-svn-source.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-convert-svn-source.t Tue Mar 19 16:36:59 2019 +0300 @@ -13,11 +13,7 @@ $ svnadmin create svn-repo $ SVNREPOPATH=`pwd`/svn-repo -#if windows - $ SVNREPOURL=file:///`"$PYTHON" -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"` -#else - $ SVNREPOURL=file://`"$PYTHON" -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"` -#endif + $ SVNREPOURL="`"$PYTHON" $TESTDIR/svnurlof.py \"$SVNREPOPATH\"`" $ INVALIDREVISIONID=svn:x2147622-4a9f-4db4-a8d3-13562ff547b2/proj%20B/mytrunk@1 $ VALIDREVISIONID=svn:a2147622-4a9f-4db4-a8d3-13562ff547b2/proj%20B/mytrunk/mytrunk@1
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-copies.t Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,633 @@ +#testcases filelog compatibility + + $ cat >> $HGRCPATH << EOF + > [extensions] + > rebase= + > [alias] + > l = log -G -T '{rev} {desc}\n{files}\n' + > EOF + +#if compatibility + $ cat >> $HGRCPATH << EOF + > [experimental] + > copies.read-from = compatibility + > EOF +#endif + + $ REPONUM=0 + $ newrepo() { + > cd $TESTTMP + > REPONUM=`expr $REPONUM + 1` + > hg init repo-$REPONUM + > cd repo-$REPONUM + > } + +Simple rename case + $ newrepo + $ echo x > x + $ hg ci -Aqm 'add x' + $ hg mv x y + $ hg debugp1copies + x -> y + $ hg debugp2copies + $ hg ci -m 'rename x to y' + $ hg l + @ 1 rename x to y + | x y + o 0 add x + x + $ hg debugp1copies -r 1 + x -> y + $ hg debugpathcopies 0 1 + x -> y + $ hg debugpathcopies 1 0 + y -> x +Test filtering copies by path. We do filtering by destination. + $ hg debugpathcopies 0 1 x + $ hg debugpathcopies 1 0 x + y -> x + $ hg debugpathcopies 0 1 y + x -> y + $ hg debugpathcopies 1 0 y + +Copy a file onto another file + $ newrepo + $ echo x > x + $ echo y > y + $ hg ci -Aqm 'add x and y' + $ hg cp -f x y + $ hg debugp1copies + x -> y + $ hg debugp2copies + $ hg ci -m 'copy x onto y' + $ hg l + @ 1 copy x onto y + | y + o 0 add x and y + x y + $ hg debugp1copies -r 1 + x -> y +Incorrectly doesn't show the rename + $ hg debugpathcopies 0 1 + +Copy a file onto another file with same content. If metadata is stored in changeset, this does not +produce a new filelog entry. The changeset's "files" entry should still list the file. + $ newrepo + $ echo x > x + $ echo x > x2 + $ hg ci -Aqm 'add x and x2 with same content' + $ hg cp -f x x2 + $ hg ci -m 'copy x onto x2' + $ hg l + @ 1 copy x onto x2 + | x2 + o 0 add x and x2 with same content + x x2 + $ hg debugp1copies -r 1 + x -> x2 +Incorrectly doesn't show the rename + $ hg debugpathcopies 0 1 + +Copy a file, then delete destination, then copy again. This does not create a new filelog entry. + $ newrepo + $ echo x > x + $ hg ci -Aqm 'add x' + $ hg cp x y + $ hg ci -m 'copy x to y' + $ hg rm y + $ hg ci -m 'remove y' + $ hg cp -f x y + $ hg ci -m 'copy x onto y (again)' + $ hg l + @ 3 copy x onto y (again) + | y + o 2 remove y + | y + o 1 copy x to y + | y + o 0 add x + x + $ hg debugp1copies -r 3 + x -> y + $ hg debugpathcopies 0 3 + x -> y + +Rename file in a loop: x->y->z->x + $ newrepo + $ echo x > x + $ hg ci -Aqm 'add x' + $ hg mv x y + $ hg debugp1copies + x -> y + $ hg debugp2copies + $ hg ci -m 'rename x to y' + $ hg mv y z + $ hg ci -m 'rename y to z' + $ hg mv z x + $ hg ci -m 'rename z to x' + $ hg l + @ 3 rename z to x + | x z + o 2 rename y to z + | y z + o 1 rename x to y + | x y + o 0 add x + x + $ hg debugpathcopies 0 3 + +Copy x to y, then remove y, then add back y. With copy metadata in the changeset, this could easily +end up reporting y as copied from x (if we don't unmark it as a copy when it's removed). + $ newrepo + $ echo x > x + $ hg ci -Aqm 'add x' + $ hg mv x y + $ hg ci -m 'rename x to y' + $ hg rm y + $ hg ci -qm 'remove y' + $ echo x > y + $ hg ci -Aqm 'add back y' + $ hg l + @ 3 add back y + | y + o 2 remove y + | y + o 1 rename x to y + | x y + o 0 add x + x + $ hg debugp1copies -r 3 + $ hg debugpathcopies 0 3 + +Copy x to z, then remove z, then copy x2 (same content as x) to z. With copy metadata in the +changeset, the two copies here will have the same filelog entry, so ctx['z'].introrev() might point +to the first commit that added the file. We should still report the copy as being from x2. + $ newrepo + $ echo x > x + $ echo x > x2 + $ hg ci -Aqm 'add x and x2 with same content' + $ hg cp x z + $ hg ci -qm 'copy x to z' + $ hg rm z + $ hg ci -m 'remove z' + $ hg cp x2 z + $ hg ci -m 'copy x2 to z' + $ hg l + @ 3 copy x2 to z + | z + o 2 remove z + | z + o 1 copy x to z + | z + o 0 add x and x2 with same content + x x2 + $ hg debugp1copies -r 3 + x2 -> z + $ hg debugpathcopies 0 3 + x2 -> z + +Create x and y, then rename them both to the same name, but on different sides of a fork + $ newrepo + $ echo x > x + $ echo y > y + $ hg ci -Aqm 'add x and y' + $ hg mv x z + $ hg ci -qm 'rename x to z' + $ hg co -q 0 + $ hg mv y z + $ hg ci -qm 'rename y to z' + $ hg l + @ 2 rename y to z + | y z + | o 1 rename x to z + |/ x z + o 0 add x and y + x y + $ hg debugpathcopies 1 2 + z -> x + y -> z + +Fork renames x to y on one side and removes x on the other + $ newrepo + $ echo x > x + $ hg ci -Aqm 'add x' + $ hg mv x y + $ hg ci -m 'rename x to y' + $ hg co -q 0 + $ hg rm x + $ hg ci -m 'remove x' + created new head + $ hg l + @ 2 remove x + | x + | o 1 rename x to y + |/ x y + o 0 add x + x + $ hg debugpathcopies 1 2 + +Copies via null revision (there shouldn't be any) + $ newrepo + $ echo x > x + $ hg ci -Aqm 'add x' + $ hg cp x y + $ hg ci -m 'copy x to y' + $ hg co -q null + $ echo x > x + $ hg ci -Aqm 'add x (again)' + $ hg l + @ 2 add x (again) + x + o 1 copy x to y + | y + o 0 add x + x + $ hg debugpathcopies 1 2 + $ hg debugpathcopies 2 1 + +Merge rename from other branch + $ newrepo + $ echo x > x + $ hg ci -Aqm 'add x' + $ hg mv x y + $ hg ci -m 'rename x to y' + $ hg co -q 0 + $ echo z > z + $ hg ci -Aqm 'add z' + $ hg merge -q 1 + $ hg debugp1copies + $ hg debugp2copies + $ hg ci -m 'merge rename from p2' + $ hg l + @ 3 merge rename from p2 + |\ x + | o 2 add z + | | z + o | 1 rename x to y + |/ x y + o 0 add x + x +Perhaps we should indicate the rename here, but `hg status` is documented to be weird during +merges, so... + $ hg debugp1copies -r 3 + $ hg debugp2copies -r 3 + $ hg debugpathcopies 0 3 + x -> y + $ hg debugpathcopies 1 2 + y -> x + $ hg debugpathcopies 1 3 + $ hg debugpathcopies 2 3 + x -> y + +Copy file from either side in a merge + $ newrepo + $ echo x > x + $ hg ci -Aqm 'add x' + $ hg co -q null + $ echo y > y + $ hg ci -Aqm 'add y' + $ hg merge -q 0 + $ hg cp y z + $ hg debugp1copies + y -> z + $ hg debugp2copies + $ hg ci -m 'copy file from p1 in merge' + $ hg co -q 1 + $ hg merge -q 0 + $ hg cp x z + $ hg debugp1copies + $ hg debugp2copies + x -> z + $ hg ci -qm 'copy file from p2 in merge' + $ hg l + @ 3 copy file from p2 in merge + |\ z + +---o 2 copy file from p1 in merge + | |/ z + | o 1 add y + | y + o 0 add x + x + $ hg debugp1copies -r 2 + y -> z + $ hg debugp2copies -r 2 + $ hg debugpathcopies 1 2 + y -> z + $ hg debugpathcopies 0 2 + $ hg debugp1copies -r 3 + $ hg debugp2copies -r 3 + x -> z + $ hg debugpathcopies 1 3 + $ hg debugpathcopies 0 3 + x -> z + +Copy file that exists on both sides of the merge, same content on both sides + $ newrepo + $ echo x > x + $ hg ci -Aqm 'add x on branch 1' + $ hg co -q null + $ echo x > x + $ hg ci -Aqm 'add x on branch 2' + $ hg merge -q 0 + $ hg cp x z + $ hg debugp1copies + x -> z + $ hg debugp2copies + $ hg ci -qm 'merge' + $ hg l + @ 2 merge + |\ z + | o 1 add x on branch 2 + | x + o 0 add x on branch 1 + x + $ hg debugp1copies -r 2 + x -> z + $ hg debugp2copies -r 2 +It's a little weird that it shows up on both sides + $ hg debugpathcopies 1 2 + x -> z + $ hg debugpathcopies 0 2 + x -> z (filelog !) + +Copy file that exists on both sides of the merge, different content + $ newrepo + $ echo branch1 > x + $ hg ci -Aqm 'add x on branch 1' + $ hg co -q null + $ echo branch2 > x + $ hg ci -Aqm 'add x on branch 2' + $ hg merge -q 0 + warning: conflicts while merging x! (edit, then use 'hg resolve --mark') + [1] + $ echo resolved > x + $ hg resolve -m x + (no more unresolved files) + $ hg cp x z + $ hg debugp1copies + x -> z + $ hg debugp2copies + $ hg ci -qm 'merge' + $ hg l + @ 2 merge + |\ x z + | o 1 add x on branch 2 + | x + o 0 add x on branch 1 + x + $ hg debugp1copies -r 2 + $ hg debugp2copies -r 2 + x -> z + $ hg debugpathcopies 1 2 + $ hg debugpathcopies 0 2 + x -> z + +Copy x->y on one side of merge and copy x->z on the other side. Pathcopies from one parent +of the merge to the merge should include the copy from the other side. + $ newrepo + $ echo x > x + $ hg ci -Aqm 'add x' + $ hg cp x y + $ hg ci -qm 'copy x to y' + $ hg co -q 0 + $ hg cp x z + $ hg ci -qm 'copy x to z' + $ hg merge -q 1 + $ hg ci -m 'merge copy x->y and copy x->z' + $ hg l + @ 3 merge copy x->y and copy x->z + |\ + | o 2 copy x to z + | | z + o | 1 copy x to y + |/ y + o 0 add x + x + $ hg debugp1copies -r 3 + $ hg debugp2copies -r 3 + $ hg debugpathcopies 2 3 + x -> y + $ hg debugpathcopies 1 3 + x -> z + +Copy x to y on one side of merge, create y and rename to z on the other side. Pathcopies from the +first side should not include the y->z rename since y didn't exist in the merge base. + $ newrepo + $ echo x > x + $ hg ci -Aqm 'add x' + $ hg cp x y + $ hg ci -qm 'copy x to y' + $ hg co -q 0 + $ echo y > y + $ hg ci -Aqm 'add y' + $ hg mv y z + $ hg ci -m 'rename y to z' + $ hg merge -q 1 + $ hg ci -m 'merge' + $ hg l + @ 4 merge + |\ + | o 3 rename y to z + | | y z + | o 2 add y + | | y + o | 1 copy x to y + |/ y + o 0 add x + x + $ hg debugp1copies -r 3 + y -> z + $ hg debugp2copies -r 3 + $ hg debugpathcopies 2 3 + y -> z + $ hg debugpathcopies 1 3 + +Create x and y, then rename x to z on one side of merge, and rename y to z and modify z on the +other side. + $ newrepo + $ echo x > x + $ echo y > y + $ hg ci -Aqm 'add x and y' + $ hg mv x z + $ hg ci -qm 'rename x to z' + $ hg co -q 0 + $ hg mv y z + $ hg ci -qm 'rename y to z' + $ echo z >> z + $ hg ci -m 'modify z' + $ hg merge -q 1 + warning: conflicts while merging z! (edit, then use 'hg resolve --mark') + [1] + $ echo z > z + $ hg resolve -qm z + $ hg ci -m 'merge 1 into 3' +Try merging the other direction too + $ hg co -q 1 + $ hg merge -q 3 + warning: conflicts while merging z! (edit, then use 'hg resolve --mark') + [1] + $ echo z > z + $ hg resolve -qm z + $ hg ci -m 'merge 3 into 1' + created new head + $ hg l + @ 5 merge 3 into 1 + |\ y z + +---o 4 merge 1 into 3 + | |/ x z + | o 3 modify z + | | z + | o 2 rename y to z + | | y z + o | 1 rename x to z + |/ x z + o 0 add x and y + x y + $ hg debugpathcopies 1 4 + $ hg debugpathcopies 2 4 + $ hg debugpathcopies 0 4 + x -> z (filelog !) + y -> z (compatibility !) + $ hg debugpathcopies 1 5 + $ hg debugpathcopies 2 5 + $ hg debugpathcopies 0 5 + x -> z + + +Test for a case in fullcopytracing algorithm where both the merging csets are +"dirty"; where a dirty cset means that cset is descendant of merge base. This +test reflect that for this particular case this algorithm correctly find the copies: + + $ cat >> $HGRCPATH << EOF + > [experimental] + > evolution.createmarkers=True + > evolution.allowunstable=True + > EOF + + $ newrepo + $ echo a > a + $ hg add a + $ hg ci -m "added a" + $ echo b > b + $ hg add b + $ hg ci -m "added b" + + $ hg mv b b1 + $ hg ci -m "rename b to b1" + + $ hg up ".^" + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ echo d > d + $ hg add d + $ hg ci -m "added d" + created new head + + $ echo baba >> b + $ hg ci --amend -m "added d, modified b" + + $ hg l --hidden + @ 4 added d, modified b + | b d + | x 3 added d + |/ d + | o 2 rename b to b1 + |/ b b1 + o 1 added b + | b + o 0 added a + a + +Grafting revision 4 on top of revision 2, showing that it respect the rename: + + $ hg up 2 -q + $ hg graft -r 4 --base 3 --hidden + grafting 4:af28412ec03c "added d, modified b" (tip) + merging b1 and b to b1 + + $ hg l -l1 -p + @ 5 added d, modified b + | b1 + ~ diff -r 5a4825cc2926 -r 94a2f1a0e8e2 b1 + --- a/b1 Thu Jan 01 00:00:00 1970 +0000 + +++ b/b1 Thu Jan 01 00:00:00 1970 +0000 + @@ -1,1 +1,2 @@ + b + +baba + +Test to make sure that fullcopytracing algorithm don't fail when both the merging csets are dirty +(a dirty cset is one who is not the descendant of merge base) +------------------------------------------------------------------------------------------------- + + $ newrepo + $ echo a > a + $ hg add a + $ hg ci -m "added a" + $ echo b > b + $ hg add b + $ hg ci -m "added b" + + $ echo foobar > willconflict + $ hg add willconflict + $ hg ci -m "added willconflict" + $ echo c > c + $ hg add c + $ hg ci -m "added c" + + $ hg l + @ 3 added c + | c + o 2 added willconflict + | willconflict + o 1 added b + | b + o 0 added a + a + + $ hg up ".^^" + 0 files updated, 0 files merged, 2 files removed, 0 files unresolved + $ echo d > d + $ hg add d + $ hg ci -m "added d" + created new head + + $ echo barfoo > willconflict + $ hg add willconflict + $ hg ci --amend -m "added willconflict and d" + + $ hg l + @ 5 added willconflict and d + | d willconflict + | o 3 added c + | | c + | o 2 added willconflict + |/ willconflict + o 1 added b + | b + o 0 added a + a + + $ hg rebase -r . -d 2 -t :other + rebasing 5:5018b1509e94 "added willconflict and d" (tip) + + $ hg up 3 -q + $ hg l --hidden + o 6 added willconflict and d + | d willconflict + | x 5 added willconflict and d + | | d willconflict + | | x 4 added d + | |/ d + +---@ 3 added c + | | c + o | 2 added willconflict + |/ willconflict + o 1 added b + | b + o 0 added a + a + +Now if we trigger a merge between cset revision 3 and 6 using base revision 4, in this case +both the merging csets will be dirty as no one is descendent of base revision: + + $ hg graft -r 6 --base 4 --hidden -t :other + grafting 6:99802e4f1e46 "added willconflict and d" (tip)
--- a/tests/test-copy.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-copy.t Tue Mar 19 16:36:59 2019 +0300 @@ -118,6 +118,23 @@ [255] $ hg st -A ? foo +respects ui.relative-paths + $ mkdir dir + $ cd dir + $ hg mv ../foo ../bar + ../foo: not copying - file is not managed + abort: no files to copy + [255] + $ hg mv ../foo ../bar --config ui.relative-paths=yes + ../foo: not copying - file is not managed + abort: no files to copy + [255] + $ hg mv ../foo ../bar --config ui.relative-paths=no + foo: not copying - file is not managed + abort: no files to copy + [255] + $ cd .. + $ rmdir dir $ hg add foo dry-run; print a warning that this is not a real copy; foo is added $ hg mv --dry-run foo bar
--- a/tests/test-debugcommands.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-debugcommands.t Tue Mar 19 16:36:59 2019 +0300 @@ -541,9 +541,10 @@ $ hg debugupdatecaches --debug updating the branch cache $ ls -r .hg/cache/* + .hg/cache/tags2-served + .hg/cache/tags2 .hg/cache/rbc-revs-v1 .hg/cache/rbc-names-v1 - .hg/cache/manifestfulltextcache (reporevlogstore !) .hg/cache/branch2-served Test debugcolor
--- a/tests/test-demandimport.py Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-demandimport.py Tue Mar 19 16:36:59 2019 +0300 @@ -6,12 +6,30 @@ import os import subprocess import sys +import types + +# Don't import pycompat because it has too many side-effects. +ispy3 = sys.version_info[0] >= 3 # Only run if demandimport is allowed if subprocess.call(['python', '%s/hghave' % os.environ['TESTDIR'], 'demandimport']): sys.exit(80) +# We rely on assert, which gets optimized out. +if sys.flags.optimize: + sys.exit(80) + +if ispy3: + from importlib.util import _LazyModule + + try: + from importlib.util import _Module as moduletype + except ImportError: + moduletype = types.ModuleType +else: + moduletype = types.ModuleType + if os.name != 'nt': try: import distutils.msvc9compiler @@ -36,76 +54,173 @@ # this enable call should not actually enable demandimport! demandimport.enable() from mercurial import node -print("node =", f(node)) + +# We use assert instead of a unittest test case because having imports inside +# functions changes behavior of the demand importer. +if ispy3: + assert not isinstance(node, _LazyModule) +else: + assert f(node) == "<module 'mercurial.node' from '?'>", f(node) + # now enable it for real del os.environ['HGDEMANDIMPORT'] demandimport.enable() # Test access to special attributes through demandmod proxy +assert 'mercurial.error' not in sys.modules from mercurial import error as errorproxy -print("errorproxy =", f(errorproxy)) -print("errorproxy.__doc__ = %r" - % (' '.join(errorproxy.__doc__.split()[:3]) + ' ...')) -print("errorproxy.__name__ = %r" % errorproxy.__name__) + +if ispy3: + # unsure why this isn't lazy. + assert not isinstance(f, _LazyModule) + assert f(errorproxy) == "<module 'mercurial.error' from '?'>", f(errorproxy) +else: + assert f(errorproxy) == "<unloaded module 'error'>", f(errorproxy) + +doc = ' '.join(errorproxy.__doc__.split()[:3]) +assert doc == 'Mercurial exceptions. This', doc +assert errorproxy.__name__ == 'mercurial.error', errorproxy.__name__ + # __name__ must be accessible via __dict__ so the relative imports can be # resolved -print("errorproxy.__dict__['__name__'] = %r" % errorproxy.__dict__['__name__']) -print("errorproxy =", f(errorproxy)) +name = errorproxy.__dict__['__name__'] +assert name == 'mercurial.error', name + +if ispy3: + assert not isinstance(errorproxy, _LazyModule) + assert f(errorproxy) == "<module 'mercurial.error' from '?'>", f(errorproxy) +else: + assert f(errorproxy) == "<proxied module 'error'>", f(errorproxy) import os -print("os =", f(os)) -print("os.system =", f(os.system)) -print("os =", f(os)) +if ispy3: + assert not isinstance(os, _LazyModule) + assert f(os) == "<module 'os' from '?'>", f(os) +else: + assert f(os) == "<unloaded module 'os'>", f(os) +assert f(os.system) == '<built-in function system>', f(os.system) +assert f(os) == "<module 'os' from '?'>", f(os) + +assert 'mercurial.utils.procutil' not in sys.modules from mercurial.utils import procutil -print("procutil =", f(procutil)) -print("procutil.system =", f(procutil.system)) -print("procutil =", f(procutil)) -print("procutil.system =", f(procutil.system)) +if ispy3: + assert isinstance(procutil, _LazyModule) + assert f(procutil) == "<module 'mercurial.utils.procutil' from '?'>", f( + procutil + ) +else: + assert f(procutil) == "<unloaded module 'procutil'>", f(procutil) + +assert f(procutil.system) == '<function system at 0x?>', f(procutil.system) +assert procutil.__class__ == moduletype, procutil.__class__ +assert f(procutil) == "<module 'mercurial.utils.procutil' from '?'>", f( + procutil +) +assert f(procutil.system) == '<function system at 0x?>', f(procutil.system) +assert 'mercurial.hgweb' not in sys.modules from mercurial import hgweb -print("hgweb =", f(hgweb)) -print("hgweb_mod =", f(hgweb.hgweb_mod)) -print("hgweb =", f(hgweb)) + +if ispy3: + assert not isinstance(hgweb, _LazyModule) + assert f(hgweb) == "<module 'mercurial.hgweb' from '?'>", f(hgweb) + assert isinstance(hgweb.hgweb_mod, _LazyModule) + assert ( + f(hgweb.hgweb_mod) == "<module 'mercurial.hgweb.hgweb_mod' from '?'>" + ), f(hgweb.hgweb_mod) +else: + assert f(hgweb) == "<unloaded module 'hgweb'>", f(hgweb) + assert f(hgweb.hgweb_mod) == "<unloaded module 'hgweb_mod'>", f( + hgweb.hgweb_mod + ) + +assert f(hgweb) == "<module 'mercurial.hgweb' from '?'>", f(hgweb) import re as fred -print("fred =", f(fred)) + +if ispy3: + assert not isinstance(fred, _LazyModule) + assert f(fred) == "<module 're' from '?'>" +else: + assert f(fred) == "<unloaded module 're'>", f(fred) import re as remod -print("remod =", f(remod)) + +if ispy3: + assert not isinstance(remod, _LazyModule) + assert f(remod) == "<module 're' from '?'>" +else: + assert f(remod) == "<unloaded module 're'>", f(remod) import sys as re -print("re =", f(re)) + +if ispy3: + assert not isinstance(re, _LazyModule) + assert f(re) == "<module 'sys' (built-in)>" +else: + assert f(re) == "<unloaded module 'sys'>", f(re) -print("fred =", f(fred)) -print("fred.sub =", f(fred.sub)) -print("fred =", f(fred)) +if ispy3: + assert not isinstance(fred, _LazyModule) + assert f(fred) == "<module 're' from '?'>", f(fred) +else: + assert f(fred) == "<unloaded module 're'>", f(fred) + +assert f(fred.sub) == '<function sub at 0x?>', f(fred.sub) + +if ispy3: + assert not isinstance(fred, _LazyModule) + assert f(fred) == "<module 're' from '?'>", f(fred) +else: + assert f(fred) == "<proxied module 're'>", f(fred) remod.escape # use remod -print("remod =", f(remod)) +assert f(remod) == "<module 're' from '?'>", f(remod) -print("re =", f(re)) -print("re.stderr =", f(re.stderr)) -print("re =", f(re)) +if ispy3: + assert not isinstance(re, _LazyModule) + assert f(re) == "<module 'sys' (built-in)>" + assert f(type(re.stderr)) == "<class '_io.TextIOWrapper'>", f( + type(re.stderr) + ) + assert f(re) == "<module 'sys' (built-in)>" +else: + assert f(re) == "<unloaded module 'sys'>", f(re) + assert f(re.stderr) == "<open file '<whatever>', mode 'w' at 0x?>", f( + re.stderr + ) + assert f(re) == "<proxied module 'sys'>", f(re) -import contextlib -print("contextlib =", f(contextlib)) +assert 'telnetlib' not in sys.modules +import telnetlib + +if ispy3: + assert not isinstance(telnetlib, _LazyModule) + assert f(telnetlib) == "<module 'telnetlib' from '?'>" +else: + assert f(telnetlib) == "<unloaded module 'telnetlib'>", f(telnetlib) + try: - from contextlib import unknownattr - print('no demandmod should be created for attribute of non-package ' - 'module:\ncontextlib.unknownattr =', f(unknownattr)) + from telnetlib import unknownattr + + assert False, ( + 'no demandmod should be created for attribute of non-package ' + 'module:\ntelnetlib.unknownattr = %s' % f(unknownattr) + ) except ImportError as inst: - print('contextlib.unknownattr = ImportError: %s' - % rsub(r"'", '', str(inst))) + assert rsub(r"'", '', str(inst)).startswith( + 'cannot import name unknownattr' + ) from mercurial import util # Unlike the import statement, __import__() function should not raise # ImportError even if fromlist has an unknown item # (see Python/import.c:import_module_level() and ensure_fromlist()) -contextlibimp = __import__('contextlib', globals(), locals(), ['unknownattr']) -print("__import__('contextlib', ..., ['unknownattr']) =", f(contextlibimp)) -print("hasattr(contextlibimp, 'unknownattr') =", - util.safehasattr(contextlibimp, 'unknownattr')) +assert 'zipfile' not in sys.modules +zipfileimp = __import__('zipfile', globals(), locals(), ['unknownattr']) +assert f(zipfileimp) == "<module 'zipfile' from '?'>", f(zipfileimp) +assert not util.safehasattr(zipfileimp, 'unknownattr')
--- a/tests/test-demandimport.py.out Tue Mar 19 09:23:35 2019 -0400 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,30 +0,0 @@ -node = <module 'mercurial.node' from '?'> -errorproxy = <unloaded module 'error'> -errorproxy.__doc__ = 'Mercurial exceptions. This ...' -errorproxy.__name__ = 'mercurial.error' -errorproxy.__dict__['__name__'] = 'mercurial.error' -errorproxy = <proxied module 'error'> -os = <unloaded module 'os'> -os.system = <built-in function system> -os = <module 'os' from '?'> -procutil = <unloaded module 'procutil'> -procutil.system = <function system at 0x?> -procutil = <module 'mercurial.utils.procutil' from '?'> -procutil.system = <function system at 0x?> -hgweb = <unloaded module 'hgweb'> -hgweb_mod = <unloaded module 'hgweb_mod'> -hgweb = <module 'mercurial.hgweb' from '?'> -fred = <unloaded module 're'> -remod = <unloaded module 're'> -re = <unloaded module 'sys'> -fred = <unloaded module 're'> -fred.sub = <function sub at 0x?> -fred = <proxied module 're'> -remod = <module 're' from '?'> -re = <unloaded module 'sys'> -re.stderr = <open file '<whatever>', mode 'w' at 0x?> -re = <proxied module 'sys'> -contextlib = <unloaded module 'contextlib'> -contextlib.unknownattr = ImportError: cannot import name unknownattr -__import__('contextlib', ..., ['unknownattr']) = <module 'contextlib' from '?'> -hasattr(contextlibimp, 'unknownattr') = False
--- a/tests/test-diff-hashes.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-diff-hashes.t Tue Mar 19 16:36:59 2019 +0300 @@ -13,6 +13,7 @@ $ hg ci -m 'change foo' $ hg --quiet diff -r 0 -r 1 + diff -r a99fb63adac3 -r 9b8568d3af2f foo --- a/foo Thu Jan 01 00:00:00 1970 +0000 +++ b/foo Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +1,1 @@
--- a/tests/test-diffstat.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-diffstat.t Tue Mar 19 16:36:59 2019 +0300 @@ -146,10 +146,21 @@ $ hg diff --stat . dir1/new | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) + $ hg diff --stat . --config ui.relative-paths=yes + new | 1 + + 1 files changed, 1 insertions(+), 0 deletions(-) $ hg diff --stat --root . new | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) + $ hg diff --stat --root . --config ui.relative-paths=yes + new | 1 + + 1 files changed, 1 insertions(+), 0 deletions(-) +--root trumps ui.relative-paths + $ hg diff --stat --root .. --config ui.relative-paths=yes + new | 1 + + ../dir2/new | 1 + + 2 files changed, 2 insertions(+), 0 deletions(-) $ hg diff --stat --root ../dir1 ../dir2 warning: ../dir2 not inside relative root . @@ -236,3 +247,48 @@ $ hg diff --root . --stat file | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) + +When a file is renamed, --git shouldn't loss the info about old file + $ hg init issue6025 + $ cd issue6025 + $ echo > a + $ hg ci -Am 'add a' + adding a + $ hg mv a b + $ hg diff --git + diff --git a/a b/b + rename from a + rename to b + $ hg diff --stat + a | 1 - + b | 1 + + 2 files changed, 1 insertions(+), 1 deletions(-) + $ hg diff --stat --git + a => b | 0 + 1 files changed, 0 insertions(+), 0 deletions(-) +-- filename may contain whitespaces + $ echo > c + $ hg ci -Am 'add c' + adding c + $ hg mv c 'new c' + $ hg diff --git + diff --git a/c b/new c + rename from c + rename to new c + $ hg diff --stat + c | 1 - + new c | 1 + + 2 files changed, 1 insertions(+), 1 deletions(-) + $ hg diff --stat --git + c => new c | 0 + 1 files changed, 0 insertions(+), 0 deletions(-) + +Make sure `diff --stat -q --config diff.git-0` shows stat (issue4037) + + $ hg status + A new c + R c + $ hg diff --stat -q + c | 1 - + new c | 1 + + 2 files changed, 1 insertions(+), 1 deletions(-)
--- a/tests/test-dispatch.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-dispatch.t Tue Mar 19 16:36:59 2019 +0300 @@ -188,7 +188,8 @@ specified" should include filename even when it is empty $ hg -R a archive '' - abort: *: '' (glob) + abort: $ENOENT$: '' (no-windows !) + abort: $ENOTDIR$: '' (windows !) [255] #if no-outer-repo
--- a/tests/test-doctest.py Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-doctest.py Tue Mar 19 16:36:59 2019 +0300 @@ -62,6 +62,7 @@ testmod('mercurial.pycompat') testmod('mercurial.revlog') testmod('mercurial.revlogutils.deltas') +testmod('mercurial.revset') testmod('mercurial.revsetlang') testmod('mercurial.smartset') testmod('mercurial.store')
--- a/tests/test-duplicateoptions.py Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-duplicateoptions.py Tue Mar 19 16:36:59 2019 +0300 @@ -41,8 +41,8 @@ seenshort = globalshort.copy() seenlong = globallong.copy() for option in entry[1]: - if (option[0] and option[0] in seenshort) or \ - (option[1] and option[1] in seenlong): + if ((option[0] and option[0] in seenshort) or + (option[1] and option[1] in seenlong)): print("command '" + cmd + "' has duplicate option " + str(option)) seenshort.add(option[0]) seenlong.add(option[1])
--- a/tests/test-encoding-align.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-encoding-align.t Tue Mar 19 16:36:59 2019 +0300 @@ -5,6 +5,7 @@ $ hg init t $ cd t $ "$PYTHON" << EOF + > from mercurial import pycompat > # (byte, width) = (6, 4) > s = b"\xe7\x9f\xad\xe5\x90\x8d" > # (byte, width) = (7, 7): odd width is good for alignment test @@ -21,14 +22,17 @@ > command = registrar.command(cmdtable) > > @command(b'showoptlist', - > [('s', 'opt1', '', 'short width' + ' %(s)s' * 8, '%(s)s'), - > ('m', 'opt2', '', 'middle width' + ' %(m)s' * 8, '%(m)s'), - > ('l', 'opt3', '', 'long width' + ' %(l)s' * 8, '%(l)s')], - > '') + > [(b's', b'opt1', b'', b'short width' + (b' ' +%(s)s) * 8, %(s)s), + > (b'm', b'opt2', b'', b'middle width' + (b' ' + %(m)s) * 8, %(m)s), + > (b'l', b'opt3', b'', b'long width' + (b' ' + %(l)s) * 8, %(l)s)], + > b'') > def showoptlist(ui, repo, *pats, **opts): > '''dummy command to show option descriptions''' > return 0 - > """ % globals()) + > """ % {b's': pycompat.byterepr(s), + > b'm': pycompat.byterepr(m), + > b'l': pycompat.byterepr(l), + > }) > f.close() > EOF $ S=`cat s`
--- a/tests/test-extdiff.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-extdiff.t Tue Mar 19 16:36:59 2019 +0300 @@ -22,6 +22,10 @@ > opts.falabala = diffing > cmd.edspace = echo > opts.edspace = "name <user@example.com>" + > alabalaf = + > [merge-tools] + > alabalaf.executable = echo + > alabalaf.diffargs = diffing > EOF $ hg falabala @@ -48,6 +52,8 @@ -o --option OPT [+] pass option to comparison program -r --rev REV [+] revision -c --change REV change made by revision + --per-file compare each file instead of revision snapshots + --confirm prompt user before each external program invocation --patch compare patches for two revisions -I --include PATTERN [+] include names matching the given patterns -X --exclude PATTERN [+] exclude names matching the given patterns @@ -128,6 +134,72 @@ diffing a.398e36faf9c6 a.5ab95fb166c4 [1] +Test --per-file option: + + $ hg up -q -C 3 + $ echo a2 > a + $ echo b2 > b + $ hg ci -d '3 0' -mtestmode1 + created new head + $ hg falabala -c 6 --per-file + diffing "*\\extdiff.*\\a.46c0e4daeb72\\a" "a.81906f2b98ac\\a" (glob) (windows !) + diffing */extdiff.*/a.46c0e4daeb72/a a.81906f2b98ac/a (glob) (no-windows !) + diffing "*\\extdiff.*\\a.46c0e4daeb72\\b" "a.81906f2b98ac\\b" (glob) (windows !) + diffing */extdiff.*/a.46c0e4daeb72/b a.81906f2b98ac/b (glob) (no-windows !) + [1] + +Test --per-file option for gui tool: + + $ hg --config extdiff.gui.alabalaf=True alabalaf -c 6 --per-file --debug + diffing */extdiff.*/a.46c0e4daeb72/* a.81906f2b98ac/* (glob) + diffing */extdiff.*/a.46c0e4daeb72/* a.81906f2b98ac/* (glob) + making snapshot of 2 files from rev 46c0e4daeb72 + a + b + making snapshot of 2 files from rev 81906f2b98ac + a + b + running '* diffing * *' in * (backgrounded) (glob) + running '* diffing * *' in * (backgrounded) (glob) + cleaning up temp directory + [1] + +Test --per-file option for gui tool again: + + $ hg --config merge-tools.alabalaf.gui=True alabalaf -c 6 --per-file --debug + diffing */extdiff.*/a.46c0e4daeb72/* a.81906f2b98ac/* (glob) + diffing */extdiff.*/a.46c0e4daeb72/* a.81906f2b98ac/* (glob) + making snapshot of 2 files from rev 46c0e4daeb72 + a + b + making snapshot of 2 files from rev 81906f2b98ac + a + b + running '* diffing * *' in * (backgrounded) (glob) + running '* diffing * *' in * (backgrounded) (glob) + cleaning up temp directory + [1] + +Test --per-file and --confirm options: + + $ hg --config ui.interactive=True falabala -c 6 --per-file --confirm <<EOF + > n + > y + > EOF + diff a (1 of 2) [Yns?] n + diff b (2 of 2) [Yns?] y + diffing "*\\extdiff.*\\a.46c0e4daeb72\\b" "a.81906f2b98ac\\b" (glob) (windows !) + diffing */extdiff.*/a.46c0e4daeb72/b a.81906f2b98ac/b (glob) (no-windows !) + [1] + +Test --per-file and --confirm options with skipping: + + $ hg --config ui.interactive=True falabala -c 6 --per-file --confirm <<EOF + > s + > EOF + diff a (1 of 2) [Yns?] s + [1] + issue4463: usage of command line configuration without additional quoting $ cat <<EOF >> $HGRCPATH
--- a/tests/test-extension.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-extension.t Tue Mar 19 16:36:59 2019 +0300 @@ -610,7 +610,8 @@ > cmdtable = {} > command = registrar.command(cmdtable) > - > # demand import avoids failure of importing notexist here + > # demand import avoids failure of importing notexist here, but only on + > # Python 2. > import extlibroot.lsub1.lsub2.notexist > > @command(b'checkrelativity', [], norepo=True) @@ -622,7 +623,13 @@ > pass # intentional failure > NO_CHECK_EOF - $ (PYTHONPATH=${PYTHONPATH}${PATHSEP}${TESTTMP}; hg --config extensions.checkrelativity=$TESTTMP/checkrelativity.py checkrelativity) +Python 3's lazy importer verifies modules exist before returning the lazy +module stub. Our custom lazy importer for Python 2 always returns a stub. + + $ (PYTHONPATH=${PYTHONPATH}${PATHSEP}${TESTTMP}; hg --config extensions.checkrelativity=$TESTTMP/checkrelativity.py checkrelativity) || true + *** failed to import extension checkrelativity from $TESTTMP/checkrelativity.py: No module named 'extlibroot.lsub1.lsub2.notexist' (py3 !) + hg: unknown command 'checkrelativity' (py3 !) + (use 'hg help' for a list of commands) (py3 !) #endif @@ -633,7 +640,7 @@ Make sure a broken uisetup doesn't globally break hg: $ cat > $TESTTMP/baduisetup.py <<EOF > def uisetup(ui): - > 1/0 + > 1 / 0 > EOF Even though the extension fails during uisetup, hg is still basically usable: @@ -642,7 +649,7 @@ File "*/mercurial/extensions.py", line *, in _runuisetup (glob) uisetup(ui) File "$TESTTMP/baduisetup.py", line 2, in uisetup - 1/0 + 1 / 0 ZeroDivisionError: * by zero (glob) *** failed to set up extension baduisetup: * by zero (glob) Mercurial Distributed SCM (version *) (glob) @@ -681,13 +688,11 @@ > @command(b'debugfoobar', [], b'hg debugfoobar') > def debugfoobar(ui, repo, *args, **opts): > "yet another debug command" - > pass > @command(b'foo', [], b'hg foo') > def foo(ui, repo, *args, **opts): > """yet another foo command > This command has been DEPRECATED since forever. > """ - > pass > EOF $ debugpath=`pwd`/debugextension.py $ echo "debugextension = $debugpath" >> $HGRCPATH @@ -805,15 +810,28 @@ "-Npru". To select a different program, use the -p/--program option. The program - will be passed the names of two directories to compare. To pass additional - options to the program, use -o/--option. These will be passed before the - names of the directories to compare. + will be passed the names of two directories to compare, unless the --per- + file option is specified (see below). To pass additional options to the + program, use -o/--option. These will be passed before the names of the + directories or files to compare. When two revision arguments are given, then changes are shown between those revisions. If only one revision is specified then that revision is compared to the working directory, and, when no revisions are specified, the working directory files are compared to its parent. + The --per-file option runs the external program repeatedly on each file to + diff, instead of once on two directories. By default, this happens one by + one, where the next file diff is open in the external program only once + the previous external program (for the previous file diff) has exited. If + the external program has a graphical interface, it can open all the file + diffs at once instead of one by one. See 'hg help -e extdiff' for + information about how to tell Mercurial that a given program has a + graphical interface. + + The --confirm option will prompt the user before each invocation of the + external program. It is ignored if --per-file isn't specified. + (use 'hg help -e extdiff' to show help for the extdiff extension) options ([+] can be repeated): @@ -822,6 +840,8 @@ -o --option OPT [+] pass option to comparison program -r --rev REV [+] revision -c --change REV change made by revision + --per-file compare each file instead of revision snapshots + --confirm prompt user before each external program invocation --patch compare patches for two revisions -I --include PATTERN [+] include names matching the given patterns -X --exclude PATTERN [+] exclude names matching the given patterns @@ -889,6 +909,20 @@ [diff-tools] kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child + If a program has a graphical interface, it might be interesting to tell + Mercurial about it. It will prevent the program from being mistakenly used in + a terminal-only environment (such as an SSH terminal session), and will make + 'hg extdiff --per-file' open multiple file diffs at once instead of one by one + (if you still want to open file diffs one by one, you can use the --confirm + option). + + Declaring that a tool has a graphical interface can be done with the "gui" + flag next to where "diffargs" are specified: + + [diff-tools] + kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child + kdiff3.gui = true + You can use -I/-X and list of file or directory names like normal 'hg diff' command. The extdiff extension makes snapshots of only needed files, so running the external diff program will actually be pretty fast (at least @@ -928,7 +962,6 @@ > @command(b'multirevs', [], b'ARG', norepo=True) > def multirevs(ui, repo, arg, *args, **opts): > """multirevs command""" - > pass > EOF $ echo "multirevs = multirevs.py" >> $HGRCPATH
--- a/tests/test-fastannotate-hg.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-fastannotate-hg.t Tue Mar 19 16:36:59 2019 +0300 @@ -593,7 +593,7 @@ $ rm baz $ hg annotate -ncr "wdir()" baz abort: $TESTTMP/repo/baz: $ENOENT$ (windows !) - abort: $ENOENT$: $TESTTMP/repo/baz (no-windows !) + abort: $ENOENT$: '$TESTTMP/repo/baz' (no-windows !) [255] annotate removed file @@ -601,7 +601,7 @@ $ hg rm baz $ hg annotate -ncr "wdir()" baz abort: $TESTTMP/repo/baz: $ENOENT$ (windows !) - abort: $ENOENT$: $TESTTMP/repo/baz (no-windows !) + abort: $ENOENT$: '$TESTTMP/repo/baz' (no-windows !) [255] Test annotate with whitespace options
--- a/tests/test-flagprocessor.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-flagprocessor.t Tue Mar 19 16:36:59 2019 +0300 @@ -209,11 +209,13 @@ _insertflagprocessor(flag, processor, _flagprocessors) File "*/mercurial/revlog.py", line *, in _insertflagprocessor (glob) raise error.Abort(msg) - Abort: cannot register multiple processors on flag '0x8'. + mercurial.error.Abort: b"cannot register multiple processors on flag '0x8'." (py3 !) + Abort: cannot register multiple processors on flag '0x8'. (no-py3 !) *** failed to set up extension duplicate: cannot register multiple processors on flag '0x8'. $ hg st 2>&1 | egrep 'cannot register multiple processors|flagprocessorext' File "*/tests/flagprocessorext.py", line *, in extsetup (glob) - Abort: cannot register multiple processors on flag '0x8'. + mercurial.error.Abort: b"cannot register multiple processors on flag '0x8'." (py3 !) + Abort: cannot register multiple processors on flag '0x8'. (no-py3 !) *** failed to set up extension duplicate: cannot register multiple processors on flag '0x8'. File "*/tests/flagprocessorext.py", line *, in b64decode (glob)
--- a/tests/test-fncache.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-fncache.t Tue Mar 19 16:36:59 2019 +0300 @@ -1,5 +1,19 @@ #require repofncache +An extension which will set fncache chunksize to 1 byte to make sure that logic +does not break + + $ cat > chunksize.py <<EOF + > from __future__ import absolute_import + > from mercurial import store + > store.fncache_chunksize = 1 + > EOF + + $ cat >> $HGRCPATH <<EOF + > [extensions] + > chunksize = $TESTTMP/chunksize.py + > EOF + Init repo1: $ hg init repo1 @@ -88,7 +102,6 @@ .hg/00manifest.i .hg/cache .hg/cache/branch2-served - .hg/cache/manifestfulltextcache (reporevlogstore !) .hg/cache/rbc-names-v1 .hg/cache/rbc-revs-v1 .hg/data @@ -111,6 +124,7 @@ .hg/wcache/checkisexec (execbit !) .hg/wcache/checklink (symlink !) .hg/wcache/checklink-target (symlink !) + .hg/wcache/manifestfulltextcache (reporevlogstore !) $ cd .. Non fncache repo: @@ -126,7 +140,6 @@ .hg/00changelog.i .hg/cache .hg/cache/branch2-served - .hg/cache/manifestfulltextcache (reporevlogstore !) .hg/cache/rbc-names-v1 .hg/cache/rbc-revs-v1 .hg/dirstate @@ -152,6 +165,7 @@ .hg/wcache/checkisexec (execbit !) .hg/wcache/checklink (symlink !) .hg/wcache/checklink-target (symlink !) + .hg/wcache/manifestfulltextcache (reporevlogstore !) $ cd .. Encoding of reserved / long paths in the store
--- a/tests/test-generaldelta.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-generaldelta.t Tue Mar 19 16:36:59 2019 +0300 @@ -339,7 +339,7 @@ 52 5 1 -1 base 369 640 369 0.57656 369 0 0.00000 53 6 1 -1 base 0 0 0 0.00000 0 0 0.00000 54 7 1 -1 base 369 640 369 0.57656 369 0 0.00000 - $ hg clone --pull source-repo --config experimental.maxdeltachainspan=0 noconst-chain --config format.generaldelta=yes + $ hg clone --pull source-repo --config experimental.maxdeltachainspan=0 noconst-chain --config format.usegeneraldelta=yes --config storage.revlog.reuse-external-delta-parent=no requesting all changes adding changesets adding manifests
--- a/tests/test-grep.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-grep.t Tue Mar 19 16:36:59 2019 +0300 @@ -32,13 +32,27 @@ port:4:vaportight port:4:import/export +simple from subdirectory + + $ mkdir dir + $ cd dir + $ hg grep -r tip:0 port + port:4:export + port:4:vaportight + port:4:import/export + $ hg grep -r tip:0 port --config ui.relative-paths=yes + ../port:4:export + ../port:4:vaportight + ../port:4:import/export + $ cd .. + simple with color $ hg --config extensions.color= grep --config color.mode=ansi \ > --color=always port port -r tip:0 - \x1b[0;35mport\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;32m4\x1b[0m\x1b[0;36m:\x1b[0mex\x1b[0;31;1mport\x1b[0m (esc) - \x1b[0;35mport\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;32m4\x1b[0m\x1b[0;36m:\x1b[0mva\x1b[0;31;1mport\x1b[0might (esc) - \x1b[0;35mport\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;32m4\x1b[0m\x1b[0;36m:\x1b[0mim\x1b[0;31;1mport\x1b[0m/ex\x1b[0;31;1mport\x1b[0m (esc) + \x1b[0;35mport\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;34m4\x1b[0m\x1b[0;36m:\x1b[0mex\x1b[0;31;1mport\x1b[0m (esc) + \x1b[0;35mport\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;34m4\x1b[0m\x1b[0;36m:\x1b[0mva\x1b[0;31;1mport\x1b[0might (esc) + \x1b[0;35mport\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;34m4\x1b[0m\x1b[0;36m:\x1b[0mim\x1b[0;31;1mport\x1b[0m/ex\x1b[0;31;1mport\x1b[0m (esc) simple templated @@ -285,6 +299,15 @@ color:3:+:orange color:2:-:orange color:1:+:orange + $ hg grep --diff orange --color=debug + [grep.filename|color][grep.sep|:][grep.rev|3][grep.sep|:][grep.inserted grep.change|+][grep.sep|:][grep.match|orange] + [grep.filename|color][grep.sep|:][grep.rev|2][grep.sep|:][grep.deleted grep.change|-][grep.sep|:][grep.match|orange] + [grep.filename|color][grep.sep|:][grep.rev|1][grep.sep|:][grep.inserted grep.change|+][grep.sep|:][grep.match|orange] + + $ hg grep --diff orange --color=yes + \x1b[0;35mcolor\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;34m3\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;32;1m+\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;31;1morange\x1b[0m (esc) + \x1b[0;35mcolor\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;34m2\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;31;1m-\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;31;1morange\x1b[0m (esc) + \x1b[0;35mcolor\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;34m1\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;32;1m+\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;31;1morange\x1b[0m (esc) $ hg grep --diff orange color:3:+:orange @@ -503,5 +526,8 @@ $ hg grep -r "0:2" "unmod" --all-files um um:0:unmod um:1:unmod + $ hg grep -r "0:2" "unmod" --all-files "glob:**/um" # Check that patterns also work + um:0:unmod + um:1:unmod $ cd ..
--- a/tests/test-hardlinks.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-hardlinks.t Tue Mar 19 16:36:59 2019 +0300 @@ -239,7 +239,6 @@ 2 r4/.hg/branch 2 r4/.hg/cache/branch2-base 2 r4/.hg/cache/branch2-served - 2 r4/.hg/cache/manifestfulltextcache (reporevlogstore !) 2 r4/.hg/cache/rbc-names-v1 2 r4/.hg/cache/rbc-revs-v1 2 r4/.hg/dirstate @@ -268,6 +267,7 @@ 2 r4/.hg/wcache/checkisexec (execbit !) 2 r4/.hg/wcache/checklink-target (symlink !) 2 r4/.hg/wcache/checknoexec (execbit !) + 2 r4/.hg/wcache/manifestfulltextcache (reporevlogstore !) 2 r4/d1/data1 2 r4/d1/f2 2 r4/f1 @@ -290,7 +290,6 @@ 1 r4/.hg/branch 2 r4/.hg/cache/branch2-base 2 r4/.hg/cache/branch2-served - 2 r4/.hg/cache/manifestfulltextcache (reporevlogstore !) 2 r4/.hg/cache/rbc-names-v1 2 r4/.hg/cache/rbc-revs-v1 1 r4/.hg/dirstate @@ -319,6 +318,7 @@ 2 r4/.hg/wcache/checkisexec (execbit !) 2 r4/.hg/wcache/checklink-target (symlink !) 2 r4/.hg/wcache/checknoexec (execbit !) + 1 r4/.hg/wcache/manifestfulltextcache (reporevlogstore !) 2 r4/d1/data1 2 r4/d1/f2 1 r4/f1
--- a/tests/test-help.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-help.t Tue Mar 19 16:36:59 2019 +0300 @@ -825,7 +825,6 @@ > @command(b'hashelp', [], b'hg hashelp', norepo=True) > def hashelp(ui, *args, **kwargs): > """Extension command's help""" - > pass > > def uisetup(ui): > ui.setconfig(b'alias', b'shellalias', b'!echo hi', b'helpext') @@ -1012,8 +1011,14 @@ debugoptADV (no help text available) debugoptDEP (no help text available) debugoptEXP (no help text available) + debugp1copies + dump copy information compared to p1 + debugp2copies + dump copy information compared to p2 debugpathcomplete complete part or all of a tracked path + debugpathcopies + show copies between two revisions debugpeer establish a connection to a peer repository debugpickmergetool examine which merge tool is chosen for specified file @@ -1672,7 +1677,7 @@ Test omit indicating for help $ cat > addverboseitems.py <<EOF - > '''extension to test omit indicating. + > r'''extension to test omit indicating. > > This paragraph is never omitted (for extension) > @@ -1685,7 +1690,7 @@ > ''' > from __future__ import absolute_import > from mercurial import commands, help - > testtopic = b"""This paragraph is never omitted (for topic). + > testtopic = br"""This paragraph is never omitted (for topic). > > .. container:: verbose >
--- a/tests/test-hgignore.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-hgignore.t Tue Mar 19 16:36:59 2019 +0300 @@ -356,7 +356,7 @@ $ rm dir1/.hgignore $ echo "dir1/file*" >> .hgignore $ hg debugignore "dir1\file2" - dir1\file2 is ignored + dir1/file2 is ignored (ignore rule in $TESTTMP\ignorerepo\.hgignore, line 4: 'dir1/file*') $ hg up -qC .
--- a/tests/test-hgweb-auth.py Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-hgweb-auth.py Tue Mar 19 16:36:59 2019 +0300 @@ -24,16 +24,26 @@ def writeauth(items): ui = origui.copy() for name, value in items.items(): - ui.setconfig('auth', name, value) + ui.setconfig(b'auth', name, value) return ui +def _stringifyauthinfo(ai): + if ai is None: + return ai + realm, authuris, user, passwd = ai + return (pycompat.strurl(realm), + [pycompat.strurl(u) for u in authuris], + pycompat.strurl(user), + pycompat.strurl(passwd), + ) + def test(auth, urls=None): print('CFG:', pycompat.sysstr(stringutil.pprint(auth, bprefix=True))) prefixes = set() for k in auth: - prefixes.add(k.split('.', 1)[0]) + prefixes.add(k.split(b'.', 1)[0]) for p in prefixes: - for name in ('.username', '.password'): + for name in (b'.username', b'.password'): if (p + name) not in auth: auth[p + name] = p auth = dict((k, v) for k, v in auth.items() if v is not None) @@ -41,106 +51,109 @@ ui = writeauth(auth) def _test(uri): - print('URI:', uri) + print('URI:', pycompat.strurl(uri)) try: pm = url.passwordmgr(ui, urlreq.httppasswordmgrwithdefaultrealm()) u, authinfo = util.url(uri).authinfo() if authinfo is not None: - pm.add_password(*authinfo) - print(' ', pm.find_user_password('test', u)) + pm.add_password(*_stringifyauthinfo(authinfo)) + print(' ', tuple(pycompat.strurl(a) for a in + pm.find_user_password('test', + pycompat.strurl(u)))) except error.Abort: print(' ','abort') if not urls: urls = [ - 'http://example.org/foo', - 'http://example.org/foo/bar', - 'http://example.org/bar', - 'https://example.org/foo', - 'https://example.org/foo/bar', - 'https://example.org/bar', - 'https://x@example.org/bar', - 'https://y@example.org/bar', + b'http://example.org/foo', + b'http://example.org/foo/bar', + b'http://example.org/bar', + b'https://example.org/foo', + b'https://example.org/foo/bar', + b'https://example.org/bar', + b'https://x@example.org/bar', + b'https://y@example.org/bar', ] for u in urls: _test(u) print('\n*** Test in-uri schemes\n') -test({'x.prefix': 'http://example.org'}) -test({'x.prefix': 'https://example.org'}) -test({'x.prefix': 'http://example.org', 'x.schemes': 'https'}) -test({'x.prefix': 'https://example.org', 'x.schemes': 'http'}) +test({b'x.prefix': b'http://example.org'}) +test({b'x.prefix': b'https://example.org'}) +test({b'x.prefix': b'http://example.org', b'x.schemes': b'https'}) +test({b'x.prefix': b'https://example.org', b'x.schemes': b'http'}) print('\n*** Test separately configured schemes\n') -test({'x.prefix': 'example.org', 'x.schemes': 'http'}) -test({'x.prefix': 'example.org', 'x.schemes': 'https'}) -test({'x.prefix': 'example.org', 'x.schemes': 'http https'}) +test({b'x.prefix': b'example.org', b'x.schemes': b'http'}) +test({b'x.prefix': b'example.org', b'x.schemes': b'https'}) +test({b'x.prefix': b'example.org', b'x.schemes': b'http https'}) print('\n*** Test prefix matching\n') -test({'x.prefix': 'http://example.org/foo', - 'y.prefix': 'http://example.org/bar'}) -test({'x.prefix': 'http://example.org/foo', - 'y.prefix': 'http://example.org/foo/bar'}) -test({'x.prefix': '*', 'y.prefix': 'https://example.org/bar'}) +test({b'x.prefix': b'http://example.org/foo', + b'y.prefix': b'http://example.org/bar'}) +test({b'x.prefix': b'http://example.org/foo', + b'y.prefix': b'http://example.org/foo/bar'}) +test({b'x.prefix': b'*', b'y.prefix': b'https://example.org/bar'}) print('\n*** Test user matching\n') -test({'x.prefix': 'http://example.org/foo', - 'x.username': None, - 'x.password': 'xpassword'}, - urls=['http://y@example.org/foo']) -test({'x.prefix': 'http://example.org/foo', - 'x.username': None, - 'x.password': 'xpassword', - 'y.prefix': 'http://example.org/foo', - 'y.username': 'y', - 'y.password': 'ypassword'}, - urls=['http://y@example.org/foo']) -test({'x.prefix': 'http://example.org/foo/bar', - 'x.username': None, - 'x.password': 'xpassword', - 'y.prefix': 'http://example.org/foo', - 'y.username': 'y', - 'y.password': 'ypassword'}, - urls=['http://y@example.org/foo/bar']) +test({b'x.prefix': b'http://example.org/foo', + b'x.username': None, + b'x.password': b'xpassword'}, + urls=[b'http://y@example.org/foo']) +test({b'x.prefix': b'http://example.org/foo', + b'x.username': None, + b'x.password': b'xpassword', + b'y.prefix': b'http://example.org/foo', + b'y.username': b'y', + b'y.password': b'ypassword'}, + urls=[b'http://y@example.org/foo']) +test({b'x.prefix': b'http://example.org/foo/bar', + b'x.username': None, + b'x.password': b'xpassword', + b'y.prefix': b'http://example.org/foo', + b'y.username': b'y', + b'y.password': b'ypassword'}, + urls=[b'http://y@example.org/foo/bar']) print('\n*** Test user matching with name in prefix\n') # prefix, username and URL have the same user -test({'x.prefix': 'https://example.org/foo', - 'x.username': None, - 'x.password': 'xpassword', - 'y.prefix': 'http://y@example.org/foo', - 'y.username': 'y', - 'y.password': 'ypassword'}, - urls=['http://y@example.org/foo']) +test({b'x.prefix': b'https://example.org/foo', + b'x.username': None, + b'x.password': b'xpassword', + b'y.prefix': b'http://y@example.org/foo', + b'y.username': b'y', + b'y.password': b'ypassword'}, + urls=[b'http://y@example.org/foo']) # Prefix has a different user from username and URL -test({'y.prefix': 'http://z@example.org/foo', - 'y.username': 'y', - 'y.password': 'ypassword'}, - urls=['http://y@example.org/foo']) +test({b'y.prefix': b'http://z@example.org/foo', + b'y.username': b'y', + b'y.password': b'ypassword'}, + urls=[b'http://y@example.org/foo']) # Prefix has a different user from URL; no username -test({'y.prefix': 'http://z@example.org/foo', - 'y.password': 'ypassword'}, - urls=['http://y@example.org/foo']) +test({b'y.prefix': b'http://z@example.org/foo', + b'y.password': b'ypassword'}, + urls=[b'http://y@example.org/foo']) # Prefix and URL have same user, but doesn't match username -test({'y.prefix': 'http://y@example.org/foo', - 'y.username': 'z', - 'y.password': 'ypassword'}, - urls=['http://y@example.org/foo']) +test({b'y.prefix': b'http://y@example.org/foo', + b'y.username': b'z', + b'y.password': b'ypassword'}, + urls=[b'http://y@example.org/foo']) # Prefix and URL have the same user; no username -test({'y.prefix': 'http://y@example.org/foo', - 'y.password': 'ypassword'}, - urls=['http://y@example.org/foo']) +test({b'y.prefix': b'http://y@example.org/foo', + b'y.password': b'ypassword'}, + urls=[b'http://y@example.org/foo']) # Prefix user, but no URL user or username -test({'y.prefix': 'http://y@example.org/foo', - 'y.password': 'ypassword'}, - urls=['http://example.org/foo']) +test({b'y.prefix': b'http://y@example.org/foo', + b'y.password': b'ypassword'}, + urls=[b'http://example.org/foo']) def testauthinfo(fullurl, authurl): print('URIs:', fullurl, authurl) pm = urlreq.httppasswordmgrwithdefaultrealm() - pm.add_password(*util.url(fullurl).authinfo()[1]) + ai = _stringifyauthinfo(util.url(pycompat.bytesurl(fullurl)).authinfo()[1]) + pm.add_password(*ai) print(pm.find_user_password('test', authurl)) print('\n*** Test urllib2 and util.url\n')
--- a/tests/test-hgweb-json.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-hgweb-json.t Tue Mar 19 16:36:59 2019 +0300 @@ -2196,7 +2196,8 @@ Commit message with Japanese Kanji 'Noh', which ends with '\x5c' $ echo foo >> da/foo - $ HGENCODING=cp932 hg ci -m `"$PYTHON" -c 'print("\x94\x5c")'` + >>> open('msg', 'wb').write(b'\x94\x5c\x0a') and None + $ HGENCODING=cp932 hg ci -l msg Commit message with null character
--- a/tests/test-hgweb-no-request-uri.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-hgweb-no-request-uri.t Tue Mar 19 16:36:59 2019 +0300 @@ -62,12 +62,12 @@ > output = stringio() > env['PATH_INFO'] = '/' > env['QUERY_STRING'] = 'style=atom' - > process(hgweb.hgweb(b'.', name = b'repo')) + > process(hgweb.hgweb(b'.', name=b'repo')) > > output = stringio() > env['PATH_INFO'] = '/file/tip/' > env['QUERY_STRING'] = 'style=raw' - > process(hgweb.hgweb(b'.', name = b'repo')) + > process(hgweb.hgweb(b'.', name=b'repo')) > > output = stringio() > env['PATH_INFO'] = '/'
--- a/tests/test-hgweb.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-hgweb.t Tue Mar 19 16:36:59 2019 +0300 @@ -910,7 +910,8 @@ errors - $ cat errors.log + $ cat errors.log | "$PYTHON" $TESTDIR/filtertraceback.py + $ rm -f errors.log Uncaught exceptions result in a logged error and canned HTTP response @@ -925,8 +926,11 @@ [1] $ killdaemons.py - $ head -1 errors.log + $ cat errors.log | "$PYTHON" $TESTDIR/filtertraceback.py .* Exception happened during processing request '/raiseerror': (re) + Traceback (most recent call last): + AttributeError: I am an uncaught error! + Uncaught exception after partial content sent
--- a/tests/test-highlight.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-highlight.t Tue Mar 19 16:36:59 2019 +0300 @@ -19,7 +19,7 @@ create random Python file to exercise Pygments - $ cat <<EOF > primes.py + $ cat <<NO_CHECK_EOF > primes.py > """Fun with generators. Corresponding Haskell implementation: > > primes = 2 : sieve [3, 5..] @@ -51,7 +51,7 @@ > n = 10 > p = primes() > print("The first %d primes: %s" % (n, list(itertools.islice(p, n)))) - > EOF + > NO_CHECK_EOF $ echo >> primes.py # to test html markup with an empty line just before EOF $ hg ci -Ama adding primes.py
--- a/tests/test-histedit-arguments.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-histedit-arguments.t Tue Mar 19 16:36:59 2019 +0300 @@ -362,7 +362,7 @@ $ hg histedit --abort warning: encountered an exception during histedit --abort; the repository may not have been completely cleaned up abort: $TESTTMP/foo/.hg/strip-backup/*-histedit.hg: $ENOENT$ (glob) (windows !) - abort: $ENOENT$: $TESTTMP/foo/.hg/strip-backup/*-histedit.hg (glob) (no-windows !) + abort: $ENOENT$: '$TESTTMP/foo/.hg/strip-backup/*-histedit.hg' (glob) (no-windows !) [255] Histedit state has been exited $ hg summary -q
--- a/tests/test-histedit-commute.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-histedit-commute.t Tue Mar 19 16:36:59 2019 +0300 @@ -52,6 +52,7 @@ summary: a + show the edit commands offered $ HGEDITOR=cat hg histedit 177f92b77385 pick 177f92b77385 2 c @@ -76,6 +77,33 @@ # r, roll = like fold, but discard this commit's description and date # + +test customization of revision summary + $ HGEDITOR=cat hg histedit 177f92b77385 \ + > --config histedit.summary-template='I am rev {rev} desc {desc} tags {tags}' + pick 177f92b77385 I am rev 2 desc c tags + pick 055a42cdd887 I am rev 3 desc d tags + pick e860deea161a I am rev 4 desc e tags + pick 652413bf663e I am rev 5 desc f tags tip + + # Edit history between 177f92b77385 and 652413bf663e + # + # Commits are listed from least to most recent + # + # You can reorder changesets by reordering the lines + # + # Commands: + # + # e, edit = use commit, but stop for amending + # m, mess = edit commit message without changing commit content + # p, pick = use commit + # b, base = checkout changeset and apply further changesets from there + # d, drop = remove commit from history + # f, fold = use commit, but combine it with the one above + # r, roll = like fold, but discard this commit's description and date + # + + edit the history (use a hacky editor to check histedit-last-edit.txt backup) @@ -142,6 +170,7 @@ summary: a + put things back $ hg histedit 177f92b77385 --commands - 2>&1 << EOF | fixbundle @@ -184,6 +213,7 @@ summary: a + slightly different this time $ hg histedit 177f92b77385 --commands - << EOF 2>&1 | fixbundle @@ -225,6 +255,7 @@ summary: a + keep prevents stripping dead revs $ hg histedit 799205341b6b --keep --commands - 2>&1 << EOF | fixbundle > pick 799205341b6b d @@ -276,6 +307,7 @@ summary: a + try with --rev $ hg histedit --commands - --rev -2 2>&1 <<EOF | fixbundle > pick de71b079d9ce e @@ -326,6 +358,7 @@ date: Thu Jan 01 00:00:00 1970 +0000 summary: a + Verify that revsetalias entries work with histedit: $ cat >> $HGRCPATH <<EOF > [revsetalias] @@ -355,6 +388,7 @@ # r, roll = like fold, but discard this commit's description and date # + should also work if a commit message is missing $ BUNDLE="$TESTDIR/missing-comment.hg" $ hg init missing @@ -384,6 +418,7 @@ date: Mon Nov 28 16:35:28 2011 +0000 summary: Checked in text file + $ hg histedit 0 $ cd .. @@ -440,6 +475,7 @@ @@ -0,0 +1,1 @@ +changed + $ hg --config diff.git=yes export 1 # HG changeset patch # User test @@ -453,6 +489,7 @@ rename from another-dir/initial-file rename to another-dir/renamed-file + $ cd .. Test that branches are preserved and stays active
--- a/tests/test-histedit-edit.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-histedit-edit.t Tue Mar 19 16:36:59 2019 +0300 @@ -370,9 +370,9 @@ HG: branch 'default' HG: added f ==== - note: commit message saved in .hg/last-message.txt transaction abort! rollback completed + note: commit message saved in .hg/last-message.txt abort: pretxncommit.unexpectedabort hook exited with status 1 [255] $ cat .hg/last-message.txt @@ -394,9 +394,9 @@ HG: user: test HG: branch 'default' HG: added f - note: commit message saved in .hg/last-message.txt transaction abort! rollback completed + note: commit message saved in .hg/last-message.txt abort: pretxncommit.unexpectedabort hook exited with status 1 [255]
--- a/tests/test-hook.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-hook.t Tue Mar 19 16:36:59 2019 +0300 @@ -14,32 +14,63 @@ $ cd a $ cat > .hg/hgrc <<EOF > [hooks] - > commit = sh -c "HG_LOCAL= HG_TAG= printenv.py commit" - > commit.b = sh -c "HG_LOCAL= HG_TAG= printenv.py commit.b" - > precommit = sh -c "HG_LOCAL= HG_NODE= HG_TAG= printenv.py precommit" - > pretxncommit = sh -c "HG_LOCAL= HG_TAG= printenv.py pretxncommit" + > commit = sh -c "HG_LOCAL= HG_TAG= printenv.py --line commit" + > commit.b = sh -c "HG_LOCAL= HG_TAG= printenv.py --line commit.b" + > precommit = sh -c "HG_LOCAL= HG_NODE= HG_TAG= printenv.py --line precommit" + > pretxncommit = sh -c "HG_LOCAL= HG_TAG= printenv.py --line pretxncommit" > pretxncommit.tip = hg -q tip - > pre-identify = sh -c "printenv.py pre-identify 1" - > pre-cat = sh -c "printenv.py pre-cat" - > post-cat = sh -c "printenv.py post-cat" - > pretxnopen = sh -c "HG_LOCAL= HG_TAG= printenv.py pretxnopen" - > pretxnclose = sh -c "HG_LOCAL= HG_TAG= printenv.py pretxnclose" - > txnclose = sh -c "HG_LOCAL= HG_TAG= printenv.py txnclose" + > pre-identify = sh -c "printenv.py --line pre-identify 1" + > pre-cat = sh -c "printenv.py --line pre-cat" + > post-cat = sh -c "printenv.py --line post-cat" + > pretxnopen = sh -c "HG_LOCAL= HG_TAG= printenv.py --line pretxnopen" + > pretxnclose = sh -c "HG_LOCAL= HG_TAG= printenv.py --line pretxnclose" + > txnclose = sh -c "HG_LOCAL= HG_TAG= printenv.py --line txnclose" > txnabort.0 = python:$TESTTMP/txnabort.checkargs.py:showargs - > txnabort.1 = sh -c "HG_LOCAL= HG_TAG= printenv.py txnabort" + > txnabort.1 = sh -c "HG_LOCAL= HG_TAG= printenv.py --line txnabort" > txnclose.checklock = sh -c "hg debuglock > /dev/null" > EOF $ echo a > a $ hg add a $ hg commit -m a - precommit hook: HG_HOOKNAME=precommit HG_HOOKTYPE=precommit HG_PARENT1=0000000000000000000000000000000000000000 - pretxnopen hook: HG_HOOKNAME=pretxnopen HG_HOOKTYPE=pretxnopen HG_TXNID=TXN:$ID$ HG_TXNNAME=commit - pretxncommit hook: HG_HOOKNAME=pretxncommit HG_HOOKTYPE=pretxncommit HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000 HG_PENDING=$TESTTMP/a + precommit hook: HG_HOOKNAME=precommit + HG_HOOKTYPE=precommit + HG_PARENT1=0000000000000000000000000000000000000000 + + pretxnopen hook: HG_HOOKNAME=pretxnopen + HG_HOOKTYPE=pretxnopen + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + + pretxncommit hook: HG_HOOKNAME=pretxncommit + HG_HOOKTYPE=pretxncommit + HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b + HG_PARENT1=0000000000000000000000000000000000000000 + HG_PENDING=$TESTTMP/a + 0:cb9a9f314b8b - pretxnclose hook: HG_HOOKNAME=pretxnclose HG_HOOKTYPE=pretxnclose HG_PENDING=$TESTTMP/a HG_PHASES_MOVED=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=commit - txnclose hook: HG_HOOKNAME=txnclose HG_HOOKTYPE=txnclose HG_PHASES_MOVED=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=commit - commit hook: HG_HOOKNAME=commit HG_HOOKTYPE=commit HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000 - commit.b hook: HG_HOOKNAME=commit.b HG_HOOKTYPE=commit HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000 + pretxnclose hook: HG_HOOKNAME=pretxnclose + HG_HOOKTYPE=pretxnclose + HG_PENDING=$TESTTMP/a + HG_PHASES_MOVED=1 + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + + txnclose hook: HG_HOOKNAME=txnclose + HG_HOOKTYPE=txnclose + HG_PHASES_MOVED=1 + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + + commit hook: HG_HOOKNAME=commit + HG_HOOKTYPE=commit + HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b + HG_PARENT1=0000000000000000000000000000000000000000 + + commit.b hook: HG_HOOKNAME=commit.b + HG_HOOKTYPE=commit + HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b + HG_PARENT1=0000000000000000000000000000000000000000 + $ hg clone . ../b updating to branch default @@ -50,9 +81,9 @@ $ cat > .hg/hgrc <<EOF > [hooks] - > prechangegroup = sh -c "printenv.py prechangegroup" - > changegroup = sh -c "printenv.py changegroup" - > incoming = sh -c "printenv.py incoming" + > prechangegroup = sh -c "printenv.py --line prechangegroup" + > changegroup = sh -c "printenv.py --line changegroup" + > incoming = sh -c "printenv.py --line incoming" > EOF pretxncommit and commit hooks can see both parents of merge @@ -60,103 +91,319 @@ $ cd ../a $ echo b >> a $ hg commit -m a1 -d "1 0" - precommit hook: HG_HOOKNAME=precommit HG_HOOKTYPE=precommit HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b - pretxnopen hook: HG_HOOKNAME=pretxnopen HG_HOOKTYPE=pretxnopen HG_TXNID=TXN:$ID$ HG_TXNNAME=commit - pretxncommit hook: HG_HOOKNAME=pretxncommit HG_HOOKTYPE=pretxncommit HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a + precommit hook: HG_HOOKNAME=precommit + HG_HOOKTYPE=precommit + HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b + + pretxnopen hook: HG_HOOKNAME=pretxnopen + HG_HOOKTYPE=pretxnopen + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + + pretxncommit hook: HG_HOOKNAME=pretxncommit + HG_HOOKTYPE=pretxncommit + HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd + HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b + HG_PENDING=$TESTTMP/a + 1:ab228980c14d - pretxnclose hook: HG_HOOKNAME=pretxnclose HG_HOOKTYPE=pretxnclose HG_PENDING=$TESTTMP/a HG_TXNID=TXN:$ID$ HG_TXNNAME=commit - txnclose hook: HG_HOOKNAME=txnclose HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=commit - commit hook: HG_HOOKNAME=commit HG_HOOKTYPE=commit HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b - commit.b hook: HG_HOOKNAME=commit.b HG_HOOKTYPE=commit HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b + pretxnclose hook: HG_HOOKNAME=pretxnclose + HG_HOOKTYPE=pretxnclose + HG_PENDING=$TESTTMP/a + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + + txnclose hook: HG_HOOKNAME=txnclose + HG_HOOKTYPE=txnclose + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + + commit hook: HG_HOOKNAME=commit + HG_HOOKTYPE=commit + HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd + HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b + + commit.b hook: HG_HOOKNAME=commit.b + HG_HOOKTYPE=commit + HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd + HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b + $ hg update -C 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo b > b $ hg add b $ hg commit -m b -d '1 0' - precommit hook: HG_HOOKNAME=precommit HG_HOOKTYPE=precommit HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b - pretxnopen hook: HG_HOOKNAME=pretxnopen HG_HOOKTYPE=pretxnopen HG_TXNID=TXN:$ID$ HG_TXNNAME=commit - pretxncommit hook: HG_HOOKNAME=pretxncommit HG_HOOKTYPE=pretxncommit HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a + precommit hook: HG_HOOKNAME=precommit + HG_HOOKTYPE=precommit + HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b + + pretxnopen hook: HG_HOOKNAME=pretxnopen + HG_HOOKTYPE=pretxnopen + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + + pretxncommit hook: HG_HOOKNAME=pretxncommit + HG_HOOKTYPE=pretxncommit + HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 + HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b + HG_PENDING=$TESTTMP/a + 2:ee9deb46ab31 - pretxnclose hook: HG_HOOKNAME=pretxnclose HG_HOOKTYPE=pretxnclose HG_PENDING=$TESTTMP/a HG_TXNID=TXN:$ID$ HG_TXNNAME=commit + pretxnclose hook: HG_HOOKNAME=pretxnclose + HG_HOOKTYPE=pretxnclose + HG_PENDING=$TESTTMP/a + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + created new head - txnclose hook: HG_HOOKNAME=txnclose HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=commit - commit hook: HG_HOOKNAME=commit HG_HOOKTYPE=commit HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b - commit.b hook: HG_HOOKNAME=commit.b HG_HOOKTYPE=commit HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b + txnclose hook: HG_HOOKNAME=txnclose + HG_HOOKTYPE=txnclose + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + + commit hook: HG_HOOKNAME=commit + HG_HOOKTYPE=commit + HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 + HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b + + commit.b hook: HG_HOOKNAME=commit.b + HG_HOOKTYPE=commit + HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 + HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b + $ hg merge 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg commit -m merge -d '2 0' - precommit hook: HG_HOOKNAME=precommit HG_HOOKTYPE=precommit HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd - pretxnopen hook: HG_HOOKNAME=pretxnopen HG_HOOKTYPE=pretxnopen HG_TXNID=TXN:$ID$ HG_TXNNAME=commit - pretxncommit hook: HG_HOOKNAME=pretxncommit HG_HOOKTYPE=pretxncommit HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd HG_PENDING=$TESTTMP/a + precommit hook: HG_HOOKNAME=precommit + HG_HOOKTYPE=precommit + HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 + HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd + + pretxnopen hook: HG_HOOKNAME=pretxnopen + HG_HOOKTYPE=pretxnopen + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + + pretxncommit hook: HG_HOOKNAME=pretxncommit + HG_HOOKTYPE=pretxncommit + HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 + HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 + HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd + HG_PENDING=$TESTTMP/a + 3:07f3376c1e65 - pretxnclose hook: HG_HOOKNAME=pretxnclose HG_HOOKTYPE=pretxnclose HG_PENDING=$TESTTMP/a HG_TXNID=TXN:$ID$ HG_TXNNAME=commit - txnclose hook: HG_HOOKNAME=txnclose HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=commit - commit hook: HG_HOOKNAME=commit HG_HOOKTYPE=commit HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd - commit.b hook: HG_HOOKNAME=commit.b HG_HOOKTYPE=commit HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd + pretxnclose hook: HG_HOOKNAME=pretxnclose + HG_HOOKTYPE=pretxnclose + HG_PENDING=$TESTTMP/a + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + + txnclose hook: HG_HOOKNAME=txnclose + HG_HOOKTYPE=txnclose + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + + commit hook: HG_HOOKNAME=commit + HG_HOOKTYPE=commit + HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 + HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 + HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd + + commit.b hook: HG_HOOKNAME=commit.b + HG_HOOKTYPE=commit + HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 + HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 + HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd + test generic hooks $ hg id - pre-identify hook: HG_ARGS=id HG_HOOKNAME=pre-identify HG_HOOKTYPE=pre-identify HG_OPTS={'bookmarks': None, 'branch': None, 'id': None, 'insecure': None, 'num': None, 'remotecmd': '', 'rev': '', 'ssh': '', 'tags': None, 'template': ''} HG_PATS=[] + pre-identify hook: HG_ARGS=id + HG_HOOKNAME=pre-identify + HG_HOOKTYPE=pre-identify + HG_OPTS={'bookmarks': None, 'branch': None, 'id': None, 'insecure': None, 'num': None, 'remotecmd': '', 'rev': '', 'ssh': '', 'tags': None, 'template': ''} + HG_PATS=[] + abort: pre-identify hook exited with status 1 [255] $ hg cat b - pre-cat hook: HG_ARGS=cat b HG_HOOKNAME=pre-cat HG_HOOKTYPE=pre-cat HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': '', 'template': ''} HG_PATS=['b'] + pre-cat hook: HG_ARGS=cat b + HG_HOOKNAME=pre-cat + HG_HOOKTYPE=pre-cat + HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': '', 'template': ''} + HG_PATS=['b'] + b - post-cat hook: HG_ARGS=cat b HG_HOOKNAME=post-cat HG_HOOKTYPE=post-cat HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': '', 'template': ''} HG_PATS=['b'] HG_RESULT=0 + post-cat hook: HG_ARGS=cat b + HG_HOOKNAME=post-cat + HG_HOOKTYPE=post-cat + HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': '', 'template': ''} + HG_PATS=['b'] + HG_RESULT=0 + $ cd ../b $ hg pull ../a pulling from ../a searching for changes - prechangegroup hook: HG_HOOKNAME=prechangegroup HG_HOOKTYPE=prechangegroup HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/a + prechangegroup hook: HG_HOOKNAME=prechangegroup + HG_HOOKTYPE=prechangegroup + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + file:/*/$TESTTMP/a (glob) + HG_URL=file:$TESTTMP/a + adding changesets adding manifests adding file changes added 3 changesets with 2 changes to 2 files new changesets ab228980c14d:07f3376c1e65 - changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_NODE_LAST=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/a - incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/a - incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/a - incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/a + changegroup hook: HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd + HG_NODE_LAST=07f3376c1e655977439df2a814e3cc14b27abac2 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + file:/*/$TESTTMP/a (glob) + HG_URL=file:$TESTTMP/a + + incoming hook: HG_HOOKNAME=incoming + HG_HOOKTYPE=incoming + HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + file:/*/$TESTTMP/a (glob) + HG_URL=file:$TESTTMP/a + + incoming hook: HG_HOOKNAME=incoming + HG_HOOKTYPE=incoming + HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + file:/*/$TESTTMP/a (glob) + HG_URL=file:$TESTTMP/a + + incoming hook: HG_HOOKNAME=incoming + HG_HOOKTYPE=incoming + HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + file:/*/$TESTTMP/a (glob) + HG_URL=file:$TESTTMP/a + (run 'hg update' to get a working copy) tag hooks can see env vars $ cd ../a $ cat >> .hg/hgrc <<EOF - > pretag = sh -c "printenv.py pretag" - > tag = sh -c "HG_PARENT1= HG_PARENT2= printenv.py tag" + > pretag = sh -c "printenv.py --line pretag" + > tag = sh -c "HG_PARENT1= HG_PARENT2= printenv.py --line tag" > EOF $ hg tag -d '3 0' a - pretag hook: HG_HOOKNAME=pretag HG_HOOKTYPE=pretag HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a - precommit hook: HG_HOOKNAME=precommit HG_HOOKTYPE=precommit HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 - pretxnopen hook: HG_HOOKNAME=pretxnopen HG_HOOKTYPE=pretxnopen HG_TXNID=TXN:$ID$ HG_TXNNAME=commit - pretxncommit hook: HG_HOOKNAME=pretxncommit HG_HOOKTYPE=pretxncommit HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PENDING=$TESTTMP/a + pretag hook: HG_HOOKNAME=pretag + HG_HOOKTYPE=pretag + HG_LOCAL=0 + HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 + HG_TAG=a + + precommit hook: HG_HOOKNAME=precommit + HG_HOOKTYPE=precommit + HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 + + pretxnopen hook: HG_HOOKNAME=pretxnopen + HG_HOOKTYPE=pretxnopen + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + + pretxncommit hook: HG_HOOKNAME=pretxncommit + HG_HOOKTYPE=pretxncommit + HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 + HG_PENDING=$TESTTMP/a + 4:539e4b31b6dc - pretxnclose hook: HG_HOOKNAME=pretxnclose HG_HOOKTYPE=pretxnclose HG_PENDING=$TESTTMP/a HG_TXNID=TXN:$ID$ HG_TXNNAME=commit - tag hook: HG_HOOKNAME=tag HG_HOOKTYPE=tag HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a - txnclose hook: HG_HOOKNAME=txnclose HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=commit - commit hook: HG_HOOKNAME=commit HG_HOOKTYPE=commit HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 - commit.b hook: HG_HOOKNAME=commit.b HG_HOOKTYPE=commit HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 + pretxnclose hook: HG_HOOKNAME=pretxnclose + HG_HOOKTYPE=pretxnclose + HG_PENDING=$TESTTMP/a + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + + tag hook: HG_HOOKNAME=tag + HG_HOOKTYPE=tag + HG_LOCAL=0 + HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 + HG_TAG=a + + txnclose hook: HG_HOOKNAME=txnclose + HG_HOOKTYPE=txnclose + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + + commit hook: HG_HOOKNAME=commit + HG_HOOKTYPE=commit + HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 + + commit.b hook: HG_HOOKNAME=commit.b + HG_HOOKTYPE=commit + HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 + $ hg tag -l la - pretag hook: HG_HOOKNAME=pretag HG_HOOKTYPE=pretag HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la - tag hook: HG_HOOKNAME=tag HG_HOOKTYPE=tag HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la + pretag hook: HG_HOOKNAME=pretag + HG_HOOKTYPE=pretag + HG_LOCAL=1 + HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + HG_TAG=la + + tag hook: HG_HOOKNAME=tag + HG_HOOKTYPE=tag + HG_LOCAL=1 + HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + HG_TAG=la + pretag hook can forbid tagging $ cat >> .hg/hgrc <<EOF - > pretag.forbid = sh -c "printenv.py pretag.forbid 1" + > pretag.forbid = sh -c "printenv.py --line pretag.forbid 1" > EOF $ hg tag -d '4 0' fa - pretag hook: HG_HOOKNAME=pretag HG_HOOKTYPE=pretag HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa - pretag.forbid hook: HG_HOOKNAME=pretag.forbid HG_HOOKTYPE=pretag HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa + pretag hook: HG_HOOKNAME=pretag + HG_HOOKTYPE=pretag + HG_LOCAL=0 + HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + HG_TAG=fa + + pretag.forbid hook: HG_HOOKNAME=pretag.forbid + HG_HOOKTYPE=pretag + HG_LOCAL=0 + HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + HG_TAG=fa + abort: pretag.forbid hook exited with status 1 [255] $ hg tag -l fla - pretag hook: HG_HOOKNAME=pretag HG_HOOKTYPE=pretag HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla - pretag.forbid hook: HG_HOOKNAME=pretag.forbid HG_HOOKTYPE=pretag HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla + pretag hook: HG_HOOKNAME=pretag + HG_HOOKTYPE=pretag + HG_LOCAL=1 + HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + HG_TAG=fla + + pretag.forbid hook: HG_HOOKNAME=pretag.forbid + HG_HOOKTYPE=pretag + HG_LOCAL=1 + HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + HG_TAG=fla + abort: pretag.forbid hook exited with status 1 [255] @@ -165,22 +412,43 @@ $ cat >> .hg/hgrc <<EOF > pretxncommit.forbid0 = sh -c "hg tip -q" - > pretxncommit.forbid1 = sh -c "printenv.py pretxncommit.forbid 1" + > pretxncommit.forbid1 = sh -c "printenv.py --line pretxncommit.forbid 1" > EOF $ echo z > z $ hg add z $ hg -q tip 4:539e4b31b6dc $ hg commit -m 'fail' -d '4 0' - precommit hook: HG_HOOKNAME=precommit HG_HOOKTYPE=precommit HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 - pretxnopen hook: HG_HOOKNAME=pretxnopen HG_HOOKTYPE=pretxnopen HG_TXNID=TXN:$ID$ HG_TXNNAME=commit - pretxncommit hook: HG_HOOKNAME=pretxncommit HG_HOOKTYPE=pretxncommit HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a + precommit hook: HG_HOOKNAME=precommit + HG_HOOKTYPE=precommit + HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + + pretxnopen hook: HG_HOOKNAME=pretxnopen + HG_HOOKTYPE=pretxnopen + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + + pretxncommit hook: HG_HOOKNAME=pretxncommit + HG_HOOKTYPE=pretxncommit + HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 + HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + HG_PENDING=$TESTTMP/a + 5:6f611f8018c1 5:6f611f8018c1 - pretxncommit.forbid hook: HG_HOOKNAME=pretxncommit.forbid1 HG_HOOKTYPE=pretxncommit HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a + pretxncommit.forbid hook: HG_HOOKNAME=pretxncommit.forbid1 + HG_HOOKTYPE=pretxncommit + HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 + HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + HG_PENDING=$TESTTMP/a + transaction abort! txnabort Python hook: txnid,txnname - txnabort hook: HG_HOOKNAME=txnabort.1 HG_HOOKTYPE=txnabort HG_TXNID=TXN:$ID$ HG_TXNNAME=commit + txnabort hook: HG_HOOKNAME=txnabort.1 + HG_HOOKTYPE=txnabort + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + rollback completed abort: pretxncommit.forbid1 hook exited with status 1 [255] @@ -205,11 +473,17 @@ precommit hook can prevent commit $ cat >> .hg/hgrc <<EOF - > precommit.forbid = sh -c "printenv.py precommit.forbid 1" + > precommit.forbid = sh -c "printenv.py --line precommit.forbid 1" > EOF $ hg commit -m 'fail' -d '4 0' - precommit hook: HG_HOOKNAME=precommit HG_HOOKTYPE=precommit HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 - precommit.forbid hook: HG_HOOKNAME=precommit.forbid HG_HOOKTYPE=precommit HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + precommit hook: HG_HOOKNAME=precommit + HG_HOOKTYPE=precommit + HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + + precommit.forbid hook: HG_HOOKNAME=precommit.forbid + HG_HOOKTYPE=precommit + HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + abort: precommit.forbid hook exited with status 1 [255] $ hg -q tip @@ -218,26 +492,36 @@ preupdate hook can prevent update $ cat >> .hg/hgrc <<EOF - > preupdate = sh -c "printenv.py preupdate" + > preupdate = sh -c "printenv.py --line preupdate" > EOF $ hg update 1 - preupdate hook: HG_HOOKNAME=preupdate HG_HOOKTYPE=preupdate HG_PARENT1=ab228980c14d + preupdate hook: HG_HOOKNAME=preupdate + HG_HOOKTYPE=preupdate + HG_PARENT1=ab228980c14d + 0 files updated, 0 files merged, 2 files removed, 0 files unresolved update hook $ cat >> .hg/hgrc <<EOF - > update = sh -c "printenv.py update" + > update = sh -c "printenv.py --line update" > EOF $ hg update - preupdate hook: HG_HOOKNAME=preupdate HG_HOOKTYPE=preupdate HG_PARENT1=539e4b31b6dc - update hook: HG_ERROR=0 HG_HOOKNAME=update HG_HOOKTYPE=update HG_PARENT1=539e4b31b6dc + preupdate hook: HG_HOOKNAME=preupdate + HG_HOOKTYPE=preupdate + HG_PARENT1=539e4b31b6dc + + update hook: HG_ERROR=0 + HG_HOOKNAME=update + HG_HOOKTYPE=update + HG_PARENT1=539e4b31b6dc + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved pushkey hook $ cat >> .hg/hgrc <<EOF - > pushkey = sh -c "printenv.py pushkey" + > pushkey = sh -c "printenv.py --line pushkey" > EOF $ cd ../b $ hg bookmark -r null foo @@ -245,10 +529,42 @@ pushing to ../a searching for changes no changes found - pretxnopen hook: HG_HOOKNAME=pretxnopen HG_HOOKTYPE=pretxnopen HG_TXNID=TXN:$ID$ HG_TXNNAME=push - pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_HOOKNAME=pretxnclose HG_HOOKTYPE=pretxnclose HG_PENDING=$TESTTMP/a HG_SOURCE=push HG_TXNID=TXN:$ID$ HG_TXNNAME=push HG_URL=file:$TESTTMP/a - pushkey hook: HG_BUNDLE2=1 HG_HOOKNAME=pushkey HG_HOOKTYPE=pushkey HG_KEY=foo HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_PUSHKEYCOMPAT=1 HG_SOURCE=push HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/a - txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_HOOKNAME=txnclose HG_HOOKTYPE=txnclose HG_SOURCE=push HG_TXNID=TXN:$ID$ HG_TXNNAME=push HG_URL=file:$TESTTMP/a + pretxnopen hook: HG_HOOKNAME=pretxnopen + HG_HOOKTYPE=pretxnopen + HG_TXNID=TXN:$ID$ + HG_TXNNAME=push + + pretxnclose hook: HG_BOOKMARK_MOVED=1 + HG_BUNDLE2=1 + HG_HOOKNAME=pretxnclose + HG_HOOKTYPE=pretxnclose + HG_PENDING=$TESTTMP/a + HG_SOURCE=push + HG_TXNID=TXN:$ID$ + HG_TXNNAME=push + HG_URL=file:$TESTTMP/a + + pushkey hook: HG_BUNDLE2=1 + HG_HOOKNAME=pushkey + HG_HOOKTYPE=pushkey + HG_KEY=foo + HG_NAMESPACE=bookmarks + HG_NEW=0000000000000000000000000000000000000000 + HG_PUSHKEYCOMPAT=1 + HG_SOURCE=push + HG_TXNID=TXN:$ID$ + HG_TXNNAME=push + HG_URL=file:$TESTTMP/a + + txnclose hook: HG_BOOKMARK_MOVED=1 + HG_BUNDLE2=1 + HG_HOOKNAME=txnclose + HG_HOOKTYPE=txnclose + HG_SOURCE=push + HG_TXNID=TXN:$ID$ + HG_TXNNAME=push + HG_URL=file:$TESTTMP/a + exporting bookmark foo [1] $ cd ../a @@ -256,16 +572,35 @@ listkeys hook $ cat >> .hg/hgrc <<EOF - > listkeys = sh -c "printenv.py listkeys" + > listkeys = sh -c "printenv.py --line listkeys" > EOF $ hg bookmark -r null bar - pretxnopen hook: HG_HOOKNAME=pretxnopen HG_HOOKTYPE=pretxnopen HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark - pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=pretxnclose HG_HOOKTYPE=pretxnclose HG_PENDING=$TESTTMP/a HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark - txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark + pretxnopen hook: HG_HOOKNAME=pretxnopen + HG_HOOKTYPE=pretxnopen + HG_TXNID=TXN:$ID$ + HG_TXNNAME=bookmark + + pretxnclose hook: HG_BOOKMARK_MOVED=1 + HG_HOOKNAME=pretxnclose + HG_HOOKTYPE=pretxnclose + HG_PENDING=$TESTTMP/a + HG_TXNID=TXN:$ID$ + HG_TXNNAME=bookmark + + txnclose hook: HG_BOOKMARK_MOVED=1 + HG_HOOKNAME=txnclose + HG_HOOKTYPE=txnclose + HG_TXNID=TXN:$ID$ + HG_TXNNAME=bookmark + $ cd ../b $ hg pull -B bar ../a pulling from ../a - listkeys hook: HG_HOOKNAME=listkeys HG_HOOKTYPE=listkeys HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'} + listkeys hook: HG_HOOKNAME=listkeys + HG_HOOKTYPE=listkeys + HG_NAMESPACE=bookmarks + HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'} + no changes found adding remote bookmark bar $ cd ../a @@ -273,18 +608,41 @@ test that prepushkey can prevent incoming keys $ cat >> .hg/hgrc <<EOF - > prepushkey = sh -c "printenv.py prepushkey.forbid 1" + > prepushkey = sh -c "printenv.py --line prepushkey.forbid 1" > EOF $ cd ../b $ hg bookmark -r null baz $ hg push -B baz ../a pushing to ../a searching for changes - listkeys hook: HG_HOOKNAME=listkeys HG_HOOKTYPE=listkeys HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'} - listkeys hook: HG_HOOKNAME=listkeys HG_HOOKTYPE=listkeys HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'} + listkeys hook: HG_HOOKNAME=listkeys + HG_HOOKTYPE=listkeys + HG_NAMESPACE=phases + HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'} + + listkeys hook: HG_HOOKNAME=listkeys + HG_HOOKTYPE=listkeys + HG_NAMESPACE=bookmarks + HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'} + no changes found - pretxnopen hook: HG_HOOKNAME=pretxnopen HG_HOOKTYPE=pretxnopen HG_TXNID=TXN:$ID$ HG_TXNNAME=push - prepushkey.forbid hook: HG_BUNDLE2=1 HG_HOOKNAME=prepushkey HG_HOOKTYPE=prepushkey HG_KEY=baz HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_PUSHKEYCOMPAT=1 HG_SOURCE=push HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/a + pretxnopen hook: HG_HOOKNAME=pretxnopen + HG_HOOKTYPE=pretxnopen + HG_TXNID=TXN:$ID$ + HG_TXNNAME=push + + prepushkey.forbid hook: HG_BUNDLE2=1 + HG_HOOKNAME=prepushkey + HG_HOOKTYPE=prepushkey + HG_KEY=baz + HG_NAMESPACE=bookmarks + HG_NEW=0000000000000000000000000000000000000000 + HG_PUSHKEYCOMPAT=1 + HG_SOURCE=push + HG_TXNID=TXN:$ID$ + HG_TXNNAME=push + HG_URL=file:$TESTTMP/a + abort: prepushkey hook exited with status 1 [255] $ cd ../a @@ -292,16 +650,34 @@ test that prelistkeys can prevent listing keys $ cat >> .hg/hgrc <<EOF - > prelistkeys = sh -c "printenv.py prelistkeys.forbid 1" + > prelistkeys = sh -c "printenv.py --line prelistkeys.forbid 1" > EOF $ hg bookmark -r null quux - pretxnopen hook: HG_HOOKNAME=pretxnopen HG_HOOKTYPE=pretxnopen HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark - pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=pretxnclose HG_HOOKTYPE=pretxnclose HG_PENDING=$TESTTMP/a HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark - txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark + pretxnopen hook: HG_HOOKNAME=pretxnopen + HG_HOOKTYPE=pretxnopen + HG_TXNID=TXN:$ID$ + HG_TXNNAME=bookmark + + pretxnclose hook: HG_BOOKMARK_MOVED=1 + HG_HOOKNAME=pretxnclose + HG_HOOKTYPE=pretxnclose + HG_PENDING=$TESTTMP/a + HG_TXNID=TXN:$ID$ + HG_TXNNAME=bookmark + + txnclose hook: HG_BOOKMARK_MOVED=1 + HG_HOOKNAME=txnclose + HG_HOOKTYPE=txnclose + HG_TXNID=TXN:$ID$ + HG_TXNNAME=bookmark + $ cd ../b $ hg pull -B quux ../a pulling from ../a - prelistkeys.forbid hook: HG_HOOKNAME=prelistkeys HG_HOOKTYPE=prelistkeys HG_NAMESPACE=bookmarks + prelistkeys.forbid hook: HG_HOOKNAME=prelistkeys + HG_HOOKTYPE=prelistkeys + HG_NAMESPACE=bookmarks + abort: prelistkeys hook exited with status 1 [255] $ cd ../a @@ -314,12 +690,19 @@ 3:07f3376c1e65 $ cat > .hg/hgrc <<EOF > [hooks] - > prechangegroup.forbid = sh -c "printenv.py prechangegroup.forbid 1" + > prechangegroup.forbid = sh -c "printenv.py --line prechangegroup.forbid 1" > EOF $ hg pull ../a pulling from ../a searching for changes - prechangegroup.forbid hook: HG_HOOKNAME=prechangegroup.forbid HG_HOOKTYPE=prechangegroup HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/a + prechangegroup.forbid hook: HG_HOOKNAME=prechangegroup.forbid + HG_HOOKTYPE=prechangegroup + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + file:/*/$TESTTMP/a (glob) + HG_URL=file:$TESTTMP/a + abort: prechangegroup.forbid hook exited with status 1 [255] @@ -329,7 +712,7 @@ $ cat > .hg/hgrc <<EOF > [hooks] > pretxnchangegroup.forbid0 = hg tip -q - > pretxnchangegroup.forbid1 = sh -c "printenv.py pretxnchangegroup.forbid 1" + > pretxnchangegroup.forbid1 = sh -c "printenv.py --line pretxnchangegroup.forbid 1" > EOF $ hg pull ../a pulling from ../a @@ -339,7 +722,17 @@ adding file changes added 1 changesets with 1 changes to 1 files 4:539e4b31b6dc - pretxnchangegroup.forbid hook: HG_HOOKNAME=pretxnchangegroup.forbid1 HG_HOOKTYPE=pretxnchangegroup HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_NODE_LAST=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/b HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/a + pretxnchangegroup.forbid hook: HG_HOOKNAME=pretxnchangegroup.forbid1 + HG_HOOKTYPE=pretxnchangegroup + HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + HG_NODE_LAST=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + HG_PENDING=$TESTTMP/b + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + file:/*/$TESTTMP/a (glob) + HG_URL=file:$TESTTMP/a + transaction abort! rollback completed abort: pretxnchangegroup.forbid1 hook exited with status 1 @@ -352,14 +745,21 @@ $ rm .hg/hgrc $ cat > ../a/.hg/hgrc <<EOF > [hooks] - > preoutgoing = sh -c "printenv.py preoutgoing" - > outgoing = sh -c "printenv.py outgoing" + > preoutgoing = sh -c "printenv.py --line preoutgoing" + > outgoing = sh -c "printenv.py --line outgoing" > EOF $ hg pull ../a pulling from ../a searching for changes - preoutgoing hook: HG_HOOKNAME=preoutgoing HG_HOOKTYPE=preoutgoing HG_SOURCE=pull - outgoing hook: HG_HOOKNAME=outgoing HG_HOOKTYPE=outgoing HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_SOURCE=pull + preoutgoing hook: HG_HOOKNAME=preoutgoing + HG_HOOKTYPE=preoutgoing + HG_SOURCE=pull + + outgoing hook: HG_HOOKNAME=outgoing + HG_HOOKTYPE=outgoing + HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + HG_SOURCE=pull + adding changesets adding manifests adding file changes @@ -373,13 +773,19 @@ preoutgoing hook can prevent outgoing changes $ cat >> ../a/.hg/hgrc <<EOF - > preoutgoing.forbid = sh -c "printenv.py preoutgoing.forbid 1" + > preoutgoing.forbid = sh -c "printenv.py --line preoutgoing.forbid 1" > EOF $ hg pull ../a pulling from ../a searching for changes - preoutgoing hook: HG_HOOKNAME=preoutgoing HG_HOOKTYPE=preoutgoing HG_SOURCE=pull - preoutgoing.forbid hook: HG_HOOKNAME=preoutgoing.forbid HG_HOOKTYPE=preoutgoing HG_SOURCE=pull + preoutgoing hook: HG_HOOKNAME=preoutgoing + HG_HOOKTYPE=preoutgoing + HG_SOURCE=pull + + preoutgoing.forbid hook: HG_HOOKNAME=preoutgoing.forbid + HG_HOOKTYPE=preoutgoing + HG_SOURCE=pull + abort: preoutgoing.forbid hook exited with status 1 [255] @@ -388,12 +794,19 @@ $ cd .. $ cat > a/.hg/hgrc <<EOF > [hooks] - > preoutgoing = sh -c "printenv.py preoutgoing" - > outgoing = sh -c "printenv.py outgoing" + > preoutgoing = sh -c "printenv.py --line preoutgoing" + > outgoing = sh -c "printenv.py --line outgoing" > EOF $ hg clone a c - preoutgoing hook: HG_HOOKNAME=preoutgoing HG_HOOKTYPE=preoutgoing HG_SOURCE=clone - outgoing hook: HG_HOOKNAME=outgoing HG_HOOKTYPE=outgoing HG_NODE=0000000000000000000000000000000000000000 HG_SOURCE=clone + preoutgoing hook: HG_HOOKNAME=preoutgoing + HG_HOOKTYPE=preoutgoing + HG_SOURCE=clone + + outgoing hook: HG_HOOKNAME=outgoing + HG_HOOKTYPE=outgoing + HG_NODE=0000000000000000000000000000000000000000 + HG_SOURCE=clone + updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -rf c @@ -401,11 +814,17 @@ preoutgoing hook can prevent outgoing changes for local clones $ cat >> a/.hg/hgrc <<EOF - > preoutgoing.forbid = sh -c "printenv.py preoutgoing.forbid 1" + > preoutgoing.forbid = sh -c "printenv.py --line preoutgoing.forbid 1" > EOF $ hg clone a zzz - preoutgoing hook: HG_HOOKNAME=preoutgoing HG_HOOKTYPE=preoutgoing HG_SOURCE=clone - preoutgoing.forbid hook: HG_HOOKNAME=preoutgoing.forbid HG_HOOKTYPE=preoutgoing HG_SOURCE=clone + preoutgoing hook: HG_HOOKNAME=preoutgoing + HG_HOOKTYPE=preoutgoing + HG_SOURCE=clone + + preoutgoing.forbid hook: HG_HOOKNAME=preoutgoing.forbid + HG_HOOKTYPE=preoutgoing + HG_SOURCE=clone + abort: preoutgoing.forbid hook exited with status 1 [255] @@ -452,7 +871,7 @@ > def printtags(ui, repo, **args): > ui.write(b'[%s]\n' % b', '.join(sorted(repo.tags()))) > - > class container: + > class container(object): > unreachable = 1 > EOF @@ -690,7 +1109,7 @@ $ hg up null loading update.ne hook failed: - abort: $ENOENT$: $TESTTMP/d/repo/nonexistent.py + abort: $ENOENT$: '$TESTTMP/d/repo/nonexistent.py' [255] $ hg id @@ -780,10 +1199,16 @@ $ cd .. $ cat << EOF >> hgrc-with-post-init-hook > [hooks] - > post-init = sh -c "printenv.py post-init" + > post-init = sh -c "printenv.py --line post-init" > EOF $ HGRCPATH=hgrc-with-post-init-hook hg init to - post-init hook: HG_ARGS=init to HG_HOOKNAME=post-init HG_HOOKTYPE=post-init HG_OPTS={'insecure': None, 'remotecmd': '', 'ssh': ''} HG_PATS=['to'] HG_RESULT=0 + post-init hook: HG_ARGS=init to + HG_HOOKNAME=post-init + HG_HOOKTYPE=post-init + HG_OPTS={'insecure': None, 'remotecmd': '', 'ssh': ''} + HG_PATS=['to'] + HG_RESULT=0 + new commits must be visible in pretxnchangegroup (issue3428)
--- a/tests/test-http-api-httpv2.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-http-api-httpv2.t Tue Mar 19 16:36:59 2019 +0300 @@ -18,6 +18,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /api/exp-http-v2-0003 HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -46,6 +47,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/badcommand HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -67,6 +69,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /api/exp-http-v2-0003/ro/customreadonly HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -88,6 +91,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/customreadonly HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -110,6 +114,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/customreadonly HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: invalid\r\n @@ -134,6 +139,7 @@ > content-type: badmedia > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/customreadonly HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -160,6 +166,7 @@ > frame 1 1 stream-begin command-request new cbor:{b'name': b'customreadonly'} > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/customreadonly HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> *\r\n (glob) @@ -196,6 +203,7 @@ > EOF creating http peer for wire protocol version 2 sending customreadonly command + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/customreadonly HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -216,23 +224,19 @@ s> \t\x00\x00\x01\x00\x02\x01\x92 s> Hidentity s> \r\n - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) s> 13\r\n s> \x0b\x00\x00\x01\x00\x02\x041 s> \xa1FstatusBok s> \r\n - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 27\r\n s> \x1f\x00\x00\x01\x00\x02\x041 s> X\x1dcustomreadonly bytes response s> \r\n - received frame(size=31; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 8\r\n s> \x00\x00\x00\x01\x00\x02\x002 s> \r\n s> 0\r\n s> \r\n - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) response: gen[ b'customreadonly bytes response' ] @@ -247,6 +251,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /api/exp-http-v2-0003/rw/customreadonly HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -268,6 +273,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /api/exp-http-v2-0003/rw/badcommand HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -289,6 +295,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/rw/customreadonly HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -327,6 +334,7 @@ > frame 1 1 stream-begin command-request new cbor:{b'name': b'customreadonly'} > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/rw/customreadonly HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -366,6 +374,7 @@ > accept: $MEDIATYPE > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/rw/badcommand HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -388,6 +397,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/debugreflect HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -428,6 +438,7 @@ > frame 1 1 stream-begin command-request new cbor:{b'name': b'command1', b'args': {b'foo': b'val1', b'bar1': b'val'}} > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/debugreflect HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -459,6 +470,7 @@ > frame 1 1 stream-begin command-request new cbor:{b'name': b'customreadonly'} > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/customreadonly HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -501,6 +513,7 @@ > frame 3 1 0 command-request new cbor:{b'name': b'customreadonly'} > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/multirequest HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> *\r\n (glob) @@ -554,6 +567,7 @@ > frame 1 1 0 command-request continuation IbookmarksDnameHlistkeys > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/multirequest HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -619,6 +633,7 @@ > frame 1 1 stream-begin command-request new cbor:{b'name': b'pushkey'} > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/multirequest HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -645,6 +660,7 @@ creating http peer for wire protocol version 2 sending heads command wire protocol version 2 encoder referenced in config (badencoder) is not known; ignoring + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/heads HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -665,23 +681,19 @@ s> \t\x00\x00\x01\x00\x02\x01\x92 s> Hidentity s> \r\n - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) s> 13\r\n s> \x0b\x00\x00\x01\x00\x02\x041 s> \xa1FstatusBok s> \r\n - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 1e\r\n s> \x16\x00\x00\x01\x00\x02\x041 s> \x81T\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 s> \r\n - received frame(size=22; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 8\r\n s> \x00\x00\x00\x01\x00\x02\x002 s> \r\n s> 0\r\n s> \r\n - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) response: [ b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' ] @@ -694,6 +706,7 @@ > EOF creating http peer for wire protocol version 2 sending heads command + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/heads HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -714,12 +727,10 @@ s> \t\x00\x00\x01\x00\x02\x01\x92 s> Hzstd-8mb s> \r\n - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) s> 25\r\n s> \x1d\x00\x00\x01\x00\x02\x042 s> (\xb5/\xfd\x00P\xa4\x00\x00p\xa1FstatusBok\x81T\x00\x01\x00\tP\x02 s> \r\n - received frame(size=29; request=1; stream=2; streamflags=encoded; type=command-response; flags=eos) s> 0\r\n s> \r\n response: [
--- a/tests/test-http-api.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-http-api.t Tue Mar 19 16:36:59 2019 +0300 @@ -156,6 +156,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /api HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -177,6 +178,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /api/ HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -200,6 +202,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /api/unknown HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -222,6 +225,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /api/exp-http-v2-0003 HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -255,6 +259,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /api HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -276,6 +281,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /api/ HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n
--- a/tests/test-http-bad-server.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-http-bad-server.t Tue Mar 19 16:36:59 2019 +0300 @@ -94,7 +94,7 @@ $ cat error.log readline(40 from 65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n - readline(7 from -1) -> (7) Accept- + readline(7 from *) -> (7) Accept- (glob) read limit reached; closing socket $ rm -f error.log @@ -111,28 +111,32 @@ $ cat error.log readline(210 from 65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n - readline(177 from -1) -> (27) Accept-Encoding: identity\r\n - readline(150 from -1) -> (35) accept: application/mercurial-0.1\r\n - readline(115 from -1) -> (*) host: localhost:$HGPORT\r\n (glob) - readline(* from -1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) - readline(* from -1) -> (2) \r\n (glob) - write(36) -> HTTP/1.1 200 Script output follows\r\n - write(23) -> Server: badhttpserver\r\n - write(37) -> Date: $HTTP_DATE$\r\n - write(41) -> Content-Type: application/mercurial-0.1\r\n - write(21) -> Content-Length: 450\r\n - write(2) -> \r\n - write(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + readline(177 from *) -> (27) Accept-Encoding: identity\r\n (glob) + readline(150 from *) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(115 from *) -> (*) host: localhost:$HGPORT\r\n (glob) + readline(* from *) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(* from *) -> (2) \r\n (glob) + sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) + sendall(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) + write(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23) -> Server: badhttpserver\r\n (no-py3 !) + write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !) + write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(21) -> Content-Length: 450\r\n (no-py3 !) + write(2) -> \r\n (no-py3 !) + write(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(4? from 65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n (glob) - readline(1? from -1) -> (1?) Accept-Encoding* (glob) + readline(1? from *) -> (1?) Accept-Encoding* (glob) read limit reached; closing socket readline(223 from 65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n - readline(197 from -1) -> (27) Accept-Encoding: identity\r\n - readline(170 from -1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n - readline(141 from -1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n - readline(100 from -1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n - readline(39 from -1) -> (35) accept: application/mercurial-0.1\r\n - readline(4 from -1) -> (4) host + readline(197 from *) -> (27) Accept-Encoding: identity\r\n (glob) + readline(170 from *) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) + readline(141 from *) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob) + readline(100 from *) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob) + readline(39 from *) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(4 from *) -> (4) host (glob) read limit reached; closing socket $ rm -f error.log @@ -152,46 +156,54 @@ readline(1 from -1) -> (1) x (?) readline(1 from -1) -> (1) x (?) readline(308 from 65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n - readline(275 from -1) -> (27) Accept-Encoding: identity\r\n - readline(248 from -1) -> (35) accept: application/mercurial-0.1\r\n - readline(213 from -1) -> (*) host: localhost:$HGPORT\r\n (glob) - readline(* from -1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) - readline(* from -1) -> (2) \r\n (glob) - write(36) -> HTTP/1.1 200 Script output follows\r\n - write(23) -> Server: badhttpserver\r\n - write(37) -> Date: $HTTP_DATE$\r\n - write(41) -> Content-Type: application/mercurial-0.1\r\n - write(21) -> Content-Length: 450\r\n - write(2) -> \r\n - write(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + readline(275 from *) -> (27) Accept-Encoding: identity\r\n (glob) + readline(248 from *) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(213 from *) -> (*) host: localhost:$HGPORT\r\n (glob) + readline(* from *) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(* from *) -> (2) \r\n (glob) + sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) + sendall(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) + write(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23) -> Server: badhttpserver\r\n (no-py3 !) + write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !) + write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(21) -> Content-Length: 450\r\n (no-py3 !) + write(2) -> \r\n (no-py3 !) + write(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(13? from 65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n (glob) - readline(1?? from -1) -> (27) Accept-Encoding: identity\r\n (glob) - readline(8? from -1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) - readline(5? from -1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob) - readline(1? from -1) -> (1?) x-hgproto-1:* (glob) + readline(1?? from *) -> (27) Accept-Encoding: identity\r\n (glob) + readline(8? from *) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) + readline(5? from *) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob) + readline(1? from *) -> (1?) x-hgproto-1:* (glob) read limit reached; closing socket readline(317 from 65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n - readline(291 from -1) -> (27) Accept-Encoding: identity\r\n - readline(264 from -1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n - readline(235 from -1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n - readline(194 from -1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n - readline(133 from -1) -> (35) accept: application/mercurial-0.1\r\n - readline(98 from -1) -> (*) host: localhost:$HGPORT\r\n (glob) - readline(* from -1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) - readline(* from -1) -> (2) \r\n (glob) - write(36) -> HTTP/1.1 200 Script output follows\r\n - write(23) -> Server: badhttpserver\r\n - write(37) -> Date: $HTTP_DATE$\r\n - write(41) -> Content-Type: application/mercurial-0.1\r\n - write(20) -> Content-Length: 42\r\n - write(2) -> \r\n - write(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; + readline(291 from *) -> (27) Accept-Encoding: identity\r\n (glob) + readline(264 from *) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) + readline(235 from *) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob) + readline(194 from *) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob) + readline(133 from *) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(98 from *) -> (*) host: localhost:$HGPORT\r\n (glob) + readline(* from *) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(* from *) -> (2) \r\n (glob) + sendall(159) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py36 !) + sendall(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py36 !) + write(159) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py3 no-py36 !) + write(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py3 no-py36 !) + write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23) -> Server: badhttpserver\r\n (no-py3 !) + write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !) + write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(20) -> Content-Length: 42\r\n (no-py3 !) + write(2) -> \r\n (no-py3 !) + write(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (no-py3 !) readline(* from 65537) -> (*) GET /?cmd=getbundle HTTP* (glob) read limit reached; closing socket readline(304 from 65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n - readline(274 from -1) -> (27) Accept-Encoding: identity\r\n - readline(247 from -1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n - readline(218 from -1) -> (218) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtag + readline(274 from *) -> (27) Accept-Encoding: identity\r\n (glob) + readline(247 from *) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) + readline(218 from *) -> (218) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtag (glob) read limit reached; closing socket $ rm -f error.log @@ -207,41 +219,50 @@ $ killdaemons.py $DAEMON_PIDS - $ cat error.log + $ cat error.log | "$PYTHON" $TESTDIR/filtertraceback.py readline(329 from 65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n - readline(296 from -1) -> (27) Accept-Encoding: identity\r\n - readline(269 from -1) -> (35) accept: application/mercurial-0.1\r\n - readline(234 from -1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(* from -1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) - readline(* from -1) -> (2) \r\n (glob) - write(36) -> HTTP/1.1 200 Script output follows\r\n - write(23) -> Server: badhttpserver\r\n - write(37) -> Date: $HTTP_DATE$\r\n - write(41) -> Content-Type: application/mercurial-0.1\r\n - write(21) -> Content-Length: 463\r\n - write(2) -> \r\n - write(463) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx httppostargs known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + readline(296 from *) -> (27) Accept-Encoding: identity\r\n (glob) + readline(269 from *) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(234 from *) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(* from *) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(* from *) -> (2) \r\n (glob) + sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 463\r\n\r\n (py36 !) + sendall(463) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx httppostargs known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 463\r\n\r\n (py3 no-py36 !) + write(463) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx httppostargs known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23) -> Server: badhttpserver\r\n (no-py3 !) + write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !) + write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(21) -> Content-Length: 463\r\n (no-py3 !) + write(2) -> \r\n (no-py3 !) + write(463) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx httppostargs known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(1?? from 65537) -> (27) POST /?cmd=batch HTTP/1.1\r\n (glob) - readline(1?? from -1) -> (27) Accept-Encoding: identity\r\n (glob) - readline(1?? from -1) -> (41) content-type: application/mercurial-0.1\r\n (glob) - readline(6? from -1) -> (33) vary: X-HgArgs-Post,X-HgProto-1\r\n (glob) - readline(3? from -1) -> (19) x-hgargs-post: 28\r\n (glob) - readline(1? from -1) -> (1?) x-hgproto-1: * (glob) + readline(1?? from *) -> (27) Accept-Encoding: identity\r\n (glob) + readline(1?? from *) -> (41) content-type: application/mercurial-0.1\r\n (glob) + readline(6? from *) -> (33) vary: X-HgArgs-Post,X-HgProto-1\r\n (glob) + readline(3? from *) -> (19) x-hgargs-post: 28\r\n (glob) + readline(1? from *) -> (1?) x-hgproto-1: * (glob) read limit reached; closing socket readline(344 from 65537) -> (27) POST /?cmd=batch HTTP/1.1\r\n - readline(317 from -1) -> (27) Accept-Encoding: identity\r\n - readline(290 from -1) -> (41) content-type: application/mercurial-0.1\r\n - readline(249 from -1) -> (33) vary: X-HgArgs-Post,X-HgProto-1\r\n - readline(216 from -1) -> (19) x-hgargs-post: 28\r\n - readline(197 from -1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n - readline(136 from -1) -> (35) accept: application/mercurial-0.1\r\n - readline(101 from -1) -> (20) content-length: 28\r\n - readline(81 from -1) -> (*) host: localhost:$HGPORT\r\n (glob) - readline(* from -1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) - readline(* from -1) -> (2) \r\n (glob) + readline(317 from *) -> (27) Accept-Encoding: identity\r\n (glob) + readline(290 from *) -> (41) content-type: application/mercurial-0.1\r\n (glob) + readline(249 from *) -> (33) vary: X-HgArgs-Post,X-HgProto-1\r\n (glob) + readline(216 from *) -> (19) x-hgargs-post: 28\r\n (glob) + readline(197 from *) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob) + readline(136 from *) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(101 from *) -> (20) content-length: 28\r\n (glob) + readline(81 from *) -> (*) host: localhost:$HGPORT\r\n (glob) + readline(* from *) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(* from *) -> (2) \r\n (glob) read(* from 28) -> (*) cmds=* (glob) read limit reached, closing socket - write(36) -> HTTP/1.1 500 Internal Server Error\r\n + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=batch': (glob) + Traceback (most recent call last): + Exception: connection closed after receiving N bytes + + write(126) -> HTTP/1.1 500 Internal Server Error\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nTransfer-Encoding: chunked\r\n\r\n (py3 no-py36 !) + write(36) -> HTTP/1.1 500 Internal Server Error\r\n (no-py3 !) $ rm -f error.log @@ -258,16 +279,23 @@ $ killdaemons.py $DAEMON_PIDS - $ cat error.log + $ cat error.log | "$PYTHON" $TESTDIR/filtertraceback.py readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n - readline(-1) -> (27) Accept-Encoding: identity\r\n - readline(-1) -> (35) accept: application/mercurial-0.1\r\n - readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n - readline(-1) -> (2) \r\n - write(1 from 36) -> (0) H + readline(*) -> (27) Accept-Encoding: identity\r\n (glob) + readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(*) -> (2) \r\n (glob) + sendall(1 from 160) -> (0) H (py36 !) + write(1 from 160) -> (0) H (py3 no-py36 !) + write(1 from 36) -> (0) H (no-py3 !) write limit reached; closing socket - write(36) -> HTTP/1.1 500 Internal Server Error\r\n + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=capabilities': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + write(286) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\nHTTP/1.1 500 Internal Server Error\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nTransfer-Encoding: chunked\r\n\r\n (py3 no-py36 !) + write(36) -> HTTP/1.1 500 Internal Server Error\r\n (no-py3 !) $ rm -f error.log @@ -283,21 +311,29 @@ $ killdaemons.py $DAEMON_PIDS - $ cat error.log + $ cat error.log | "$PYTHON" $TESTDIR/filtertraceback.py readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n - readline(-1) -> (27) Accept-Encoding: identity\r\n - readline(-1) -> (35) accept: application/mercurial-0.1\r\n - readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n - readline(-1) -> (2) \r\n - write(36 from 36) -> (144) HTTP/1.1 200 Script output follows\r\n - write(23 from 23) -> (121) Server: badhttpserver\r\n - write(37 from 37) -> (84) Date: $HTTP_DATE$\r\n - write(41 from 41) -> (43) Content-Type: application/mercurial-0.1\r\n - write(21 from 21) -> (22) Content-Length: 450\r\n - write(2 from 2) -> (20) \r\n - write(20 from 450) -> (0) batch branchmap bund + readline(*) -> (27) Accept-Encoding: identity\r\n (glob) + readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(*) -> (2) \r\n (glob) + sendall(160 from 160) -> (20) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) + sendall(20 from 450) -> (0) batch branchmap bund (py36 !) + write(160 from 160) -> (20) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) + write(20 from 450) -> (0) batch branchmap bund (py3 no-py36 !) + write(36 from 36) -> (144) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (121) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (84) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (43) Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(21 from 21) -> (22) Content-Length: 450\r\n (no-py3 !) + write(2 from 2) -> (20) \r\n (no-py3 !) + write(20 from 450) -> (0) batch branchmap bund (no-py3 !) write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=capabilities': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + $ rm -f error.log @@ -318,35 +354,46 @@ $ killdaemons.py $DAEMON_PIDS - $ cat error.log + $ cat error.log | "$PYTHON" $TESTDIR/filtertraceback.py readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n - readline(-1) -> (27) Accept-Encoding: identity\r\n - readline(-1) -> (35) accept: application/mercurial-0.1\r\n - readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n - readline(-1) -> (2) \r\n - write(36 from 36) -> (692) HTTP/1.1 200 Script output follows\r\n - write(23 from 23) -> (669) Server: badhttpserver\r\n - write(37 from 37) -> (632) Date: $HTTP_DATE$\r\n - write(41 from 41) -> (591) Content-Type: application/mercurial-0.1\r\n - write(21 from 21) -> (570) Content-Length: 450\r\n - write(2 from 2) -> (568) \r\n - write(450 from 450) -> (118) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + readline(*) -> (27) Accept-Encoding: identity\r\n (glob) + readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(*) -> (2) \r\n (glob) + sendall(160 from 160) -> (568) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) + sendall(450 from 450) -> (118) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160 from 160) -> (568) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) + write(450 from 450) -> (118) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + write(36 from 36) -> (692) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (669) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (632) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (591) Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(21 from 21) -> (570) Content-Length: 450\r\n (no-py3 !) + write(2 from 2) -> (568) \r\n (no-py3 !) + write(450 from 450) -> (118) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n - readline(-1) -> (27) Accept-Encoding: identity\r\n - readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n - readline(-1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n - readline(-1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n - readline(-1) -> (35) accept: application/mercurial-0.1\r\n - readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n - readline(-1) -> (2) \r\n - write(36 from 36) -> (82) HTTP/1.1 200 Script output follows\r\n - write(23 from 23) -> (59) Server: badhttpserver\r\n - write(37 from 37) -> (22) Date: $HTTP_DATE$\r\n - write(22 from 41) -> (0) Content-Type: applicat + readline(*) -> (27) Accept-Encoding: identity\r\n (glob) + readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) + readline(*) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob) + readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob) + readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(*) -> (2) \r\n (glob) + sendall(118 from 159) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: applicat (py36 !) + write(118 from 159) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: applicat (py3 no-py36 !) + write(36 from 36) -> (82) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (59) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (22) Date: $HTTP_DATE$\r\n (no-py3 !) + write(22 from 41) -> (0) Content-Type: applicat (no-py3 !) write limit reached; closing socket - write(36) -> HTTP/1.1 500 Internal Server Error\r\n + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=batch': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + write(285) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\nHTTP/1.1 500 Internal Server Error\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nTransfer-Encoding: chunked\r\n\r\n (py3 no-py36 !) + write(36) -> HTTP/1.1 500 Internal Server Error\r\n (no-py3 !) $ rm -f error.log @@ -366,37 +413,49 @@ $ killdaemons.py $DAEMON_PIDS - $ cat error.log + $ cat error.log | "$PYTHON" $TESTDIR/filtertraceback.py readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n - readline(-1) -> (27) Accept-Encoding: identity\r\n - readline(-1) -> (35) accept: application/mercurial-0.1\r\n - readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n - readline(-1) -> (2) \r\n - write(36 from 36) -> (757) HTTP/1.1 200 Script output follows\r\n - write(23 from 23) -> (734) Server: badhttpserver\r\n - write(37 from 37) -> (697) Date: $HTTP_DATE$\r\n - write(41 from 41) -> (656) Content-Type: application/mercurial-0.1\r\n - write(21 from 21) -> (635) Content-Length: 450\r\n - write(2 from 2) -> (633) \r\n - write(450 from 450) -> (183) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + readline(*) -> (27) Accept-Encoding: identity\r\n (glob) + readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(*) -> (2) \r\n (glob) + sendall(160 from 160) -> (633) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) + sendall(450 from 450) -> (183) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160 from 160) -> (633) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) + write(450 from 450) -> (183) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + write(36 from 36) -> (757) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (734) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (697) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (656) Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(21 from 21) -> (635) Content-Length: 450\r\n (no-py3 !) + write(2 from 2) -> (633) \r\n (no-py3 !) + write(450 from 450) -> (183) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n - readline(-1) -> (27) Accept-Encoding: identity\r\n - readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n - readline(-1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n - readline(-1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n - readline(-1) -> (35) accept: application/mercurial-0.1\r\n - readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n - readline(-1) -> (2) \r\n - write(36 from 36) -> (147) HTTP/1.1 200 Script output follows\r\n - write(23 from 23) -> (124) Server: badhttpserver\r\n - write(37 from 37) -> (87) Date: $HTTP_DATE$\r\n - write(41 from 41) -> (46) Content-Type: application/mercurial-0.1\r\n - write(20 from 20) -> (26) Content-Length: 42\r\n - write(2 from 2) -> (24) \r\n - write(24 from 42) -> (0) 96ee1d7354c4ad7372047672 + readline(*) -> (27) Accept-Encoding: identity\r\n (glob) + readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) + readline(*) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob) + readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob) + readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(*) -> (2) \r\n (glob) + sendall(159 from 159) -> (24) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py36 !) + sendall(24 from 42) -> (0) 96ee1d7354c4ad7372047672 (py36 !) + write(159 from 159) -> (24) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py3 no-py36 !) + write(24 from 42) -> (0) 96ee1d7354c4ad7372047672 (py3 no-py36 !) + write(36 from 36) -> (147) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (124) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (87) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (46) Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(20 from 20) -> (26) Content-Length: 42\r\n (no-py3 !) + write(2 from 2) -> (24) \r\n (no-py3 !) + write(24 from 42) -> (0) 96ee1d7354c4ad7372047672 (no-py3 !) write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=batch': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + $ rm -f error.log @@ -418,51 +477,66 @@ $ killdaemons.py $DAEMON_PIDS - $ cat error.log + $ cat error.log | "$PYTHON" $TESTDIR/filtertraceback.py readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n - readline(-1) -> (27) Accept-Encoding: identity\r\n - readline(-1) -> (35) accept: application/mercurial-0.1\r\n - readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n - readline(-1) -> (2) \r\n - write(36 from 36) -> (904) HTTP/1.1 200 Script output follows\r\n - write(23 from 23) -> (881) Server: badhttpserver\r\n - write(37 from 37) -> (844) Date: $HTTP_DATE$\r\n - write(41 from 41) -> (803) Content-Type: application/mercurial-0.1\r\n - write(21 from 21) -> (782) Content-Length: 450\r\n - write(2 from 2) -> (780) \r\n - write(450 from 450) -> (330) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + readline(*) -> (27) Accept-Encoding: identity\r\n (glob) + readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(*) -> (2) \r\n (glob) + sendall(160 from 160) -> (780) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) + sendall(450 from 450) -> (330) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160 from 160) -> (780) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) + write(450 from 450) -> (330) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + write(36 from 36) -> (904) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (881) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (844) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (803) Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(21 from 21) -> (782) Content-Length: 450\r\n (no-py3 !) + write(2 from 2) -> (780) \r\n (no-py3 !) + write(450 from 450) -> (330) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n - readline(-1) -> (27) Accept-Encoding: identity\r\n - readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n - readline(-1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n - readline(-1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n - readline(-1) -> (35) accept: application/mercurial-0.1\r\n - readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n - readline(-1) -> (2) \r\n - write(36 from 36) -> (294) HTTP/1.1 200 Script output follows\r\n - write(23 from 23) -> (271) Server: badhttpserver\r\n - write(37 from 37) -> (234) Date: $HTTP_DATE$\r\n - write(41 from 41) -> (193) Content-Type: application/mercurial-0.1\r\n - write(20 from 20) -> (173) Content-Length: 42\r\n - write(2 from 2) -> (171) \r\n - write(42 from 42) -> (129) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; + readline(*) -> (27) Accept-Encoding: identity\r\n (glob) + readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) + readline(*) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob) + readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob) + readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(*) -> (2) \r\n (glob) + sendall(159 from 159) -> (171) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py36 !) + sendall(42 from 42) -> (129) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py36 !) + write(159 from 159) -> (171) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py3 no-py36 !) + write(42 from 42) -> (129) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py3 no-py36 !) + write(36 from 36) -> (294) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (271) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (234) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (193) Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(20 from 20) -> (173) Content-Length: 42\r\n (no-py3 !) + write(2 from 2) -> (171) \r\n (no-py3 !) + write(42 from 42) -> (129) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (no-py3 !) readline(65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n - readline(-1) -> (27) Accept-Encoding: identity\r\n - readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n - readline(-1) -> (461) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n - readline(-1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n - readline(-1) -> (35) accept: application/mercurial-0.1\r\n - readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n - readline(-1) -> (2) \r\n - write(36 from 36) -> (93) HTTP/1.1 200 Script output follows\r\n - write(23 from 23) -> (70) Server: badhttpserver\r\n - write(37 from 37) -> (33) Date: $HTTP_DATE$\r\n - write(33 from 41) -> (0) Content-Type: application/mercuri + readline(*) -> (27) Accept-Encoding: identity\r\n (glob) + readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) + readline(*) -> (461) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n (glob) + readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob) + readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(*) -> (2) \r\n (glob) + sendall(129 from 167) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercuri (py36 !) + write(129 from 167) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercuri (py3 no-py36 !) + write(36 from 36) -> (93) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (70) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (33) Date: $HTTP_DATE$\r\n (no-py3 !) + write(33 from 41) -> (0) Content-Type: application/mercuri (no-py3 !) write limit reached; closing socket - write(36) -> HTTP/1.1 500 Internal Server Error\r\n + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + write(293) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\nHTTP/1.1 500 Internal Server Error\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nTransfer-Encoding: chunked\r\n\r\n (py3 no-py36 !) + write(36) -> HTTP/1.1 500 Internal Server Error\r\n (no-py3 !) $ rm -f error.log @@ -478,11 +552,20 @@ $ killdaemons.py $DAEMON_PIDS - $ tail -4 error.log - write(41 from 41) -> (25) Content-Type: application/mercurial-0.2\r\n - write(25 from 28) -> (0) Transfer-Encoding: chunke - write limit reached; closing socket - write(36) -> HTTP/1.1 500 Internal Server Error\r\n +#if py36 + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -3 + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + +#else + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -4 + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + write(293) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\nHTTP/1.1 500 Internal Server Error\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !) + write(36) -> HTTP/1.1 500 Internal Server Error\r\n (no-py3 !) +#endif $ rm -f error.log @@ -499,53 +582,68 @@ $ killdaemons.py $DAEMON_PIDS - $ cat error.log + $ cat error.log | "$PYTHON" $TESTDIR/filtertraceback.py readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n - readline(-1) -> (27) Accept-Encoding: identity\r\n - readline(-1) -> (35) accept: application/mercurial-0.1\r\n - readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n - readline(-1) -> (2) \r\n - write(36 from 36) -> (942) HTTP/1.1 200 Script output follows\r\n - write(23 from 23) -> (919) Server: badhttpserver\r\n - write(37 from 37) -> (882) Date: $HTTP_DATE$\r\n - write(41 from 41) -> (841) Content-Type: application/mercurial-0.1\r\n - write(21 from 21) -> (820) Content-Length: 450\r\n - write(2 from 2) -> (818) \r\n - write(450 from 450) -> (368) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + readline(*) -> (27) Accept-Encoding: identity\r\n (glob) + readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(*) -> (2) \r\n (glob) + sendall(160 from 160) -> (818) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) + sendall(450 from 450) -> (368) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160 from 160) -> (818) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) + write(450 from 450) -> (368) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + write(36 from 36) -> (942) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (919) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (882) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (841) Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(21 from 21) -> (820) Content-Length: 450\r\n (no-py3 !) + write(2 from 2) -> (818) \r\n (no-py3 !) + write(450 from 450) -> (368) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n - readline(-1) -> (27) Accept-Encoding: identity\r\n - readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n - readline(-1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n - readline(-1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n - readline(-1) -> (35) accept: application/mercurial-0.1\r\n - readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n - readline(-1) -> (2) \r\n - write(36 from 36) -> (332) HTTP/1.1 200 Script output follows\r\n - write(23 from 23) -> (309) Server: badhttpserver\r\n - write(37 from 37) -> (272) Date: $HTTP_DATE$\r\n - write(41 from 41) -> (231) Content-Type: application/mercurial-0.1\r\n - write(20 from 20) -> (211) Content-Length: 42\r\n - write(2 from 2) -> (209) \r\n - write(42 from 42) -> (167) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; + readline(*) -> (27) Accept-Encoding: identity\r\n (glob) + readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) + readline(*) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob) + readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob) + readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(*) -> (2) \r\n (glob) + sendall(159 from 159) -> (209) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py36 !) + sendall(42 from 42) -> (167) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py36 !) + write(159 from 159) -> (209) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py3 no-py36 !) + write(42 from 42) -> (167) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py3 no-py36 !) + write(36 from 36) -> (332) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (309) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (272) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (231) Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(20 from 20) -> (211) Content-Length: 42\r\n (no-py3 !) + write(2 from 2) -> (209) \r\n (no-py3 !) + write(42 from 42) -> (167) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (no-py3 !) readline(65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n - readline(-1) -> (27) Accept-Encoding: identity\r\n - readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n - readline(-1) -> (461) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n - readline(-1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n - readline(-1) -> (35) accept: application/mercurial-0.1\r\n - readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n - readline(-1) -> (2) \r\n - write(36 from 36) -> (131) HTTP/1.1 200 Script output follows\r\n - write(23 from 23) -> (108) Server: badhttpserver\r\n - write(37 from 37) -> (71) Date: $HTTP_DATE$\r\n - write(41 from 41) -> (30) Content-Type: application/mercurial-0.2\r\n - write(28 from 28) -> (2) Transfer-Encoding: chunked\r\n - write(2 from 2) -> (0) \r\n + readline(*) -> (27) Accept-Encoding: identity\r\n (glob) + readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) + readline(*) -> (461) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n (glob) + readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob) + readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(*) -> (2) \r\n (glob) + sendall(167 from 167) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py36 !) + write(167 from 167) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 no-py36 !) + write(36 from 36) -> (131) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (108) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (71) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (30) Content-Type: application/mercurial-0.2\r\n (no-py3 !) + write(28 from 28) -> (2) Transfer-Encoding: chunked\r\n (no-py3 !) + write(2 from 2) -> (0) \r\n (no-py3 !) write limit reached; closing socket - write(36) -> HTTP/1.1 500 Internal Server Error\r\n + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + write(293) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\nHTTP/1.1 500 Internal Server Error\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nTransfer-Encoding: chunked\r\n\r\n (py3 no-py36 !) + write(36) -> HTTP/1.1 500 Internal Server Error\r\n (no-py3 !) $ rm -f error.log @@ -562,56 +660,72 @@ $ killdaemons.py $DAEMON_PIDS - $ cat error.log + $ cat error.log | "$PYTHON" $TESTDIR/filtertraceback.py readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n - readline(-1) -> (27) Accept-Encoding: identity\r\n - readline(-1) -> (35) accept: application/mercurial-0.1\r\n - readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n - readline(-1) -> (2) \r\n - write(36 from 36) -> (966) HTTP/1.1 200 Script output follows\r\n - write(23 from 23) -> (943) Server: badhttpserver\r\n - write(37 from 37) -> (906) Date: $HTTP_DATE$\r\n - write(41 from 41) -> (865) Content-Type: application/mercurial-0.1\r\n - write(21 from 21) -> (844) Content-Length: 450\r\n - write(2 from 2) -> (842) \r\n - write(450 from 450) -> (392) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + readline(*) -> (27) Accept-Encoding: identity\r\n (glob) + readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(*) -> (2) \r\n (glob) + sendall(160 from 160) -> (842) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) + sendall(450 from 450) -> (392) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160 from 160) -> (842) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) + write(450 from 450) -> (392) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + write(36 from 36) -> (966) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (943) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (906) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (865) Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(21 from 21) -> (844) Content-Length: 450\r\n (no-py3 !) + write(2 from 2) -> (842) \r\n (no-py3 !) + write(450 from 450) -> (392) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n - readline(-1) -> (27) Accept-Encoding: identity\r\n - readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n - readline(-1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n - readline(-1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n - readline(-1) -> (35) accept: application/mercurial-0.1\r\n - readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n - readline(-1) -> (2) \r\n - write(36 from 36) -> (356) HTTP/1.1 200 Script output follows\r\n - write(23 from 23) -> (333) Server: badhttpserver\r\n - write(37 from 37) -> (296) Date: $HTTP_DATE$\r\n - write(41 from 41) -> (255) Content-Type: application/mercurial-0.1\r\n - write(20 from 20) -> (235) Content-Length: 42\r\n - write(2 from 2) -> (233) \r\n - write(42 from 42) -> (191) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; + readline(*) -> (27) Accept-Encoding: identity\r\n (glob) + readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) + readline(*) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob) + readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob) + readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(*) -> (2) \r\n (glob) + sendall(159 from 159) -> (233) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py36 !) + sendall(42 from 42) -> (191) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py36 !) + write(159 from 159) -> (233) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py3 no-py36 !) + write(36 from 36) -> (356) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (333) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (296) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (255) Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(20 from 20) -> (235) Content-Length: 42\r\n (no-py3 !) + write(2 from 2) -> (233) \r\n (no-py3 !) + write(42 from 42) -> (191) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (no-py3 !) readline(65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n - readline(-1) -> (27) Accept-Encoding: identity\r\n - readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n - readline(-1) -> (461) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n - readline(-1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n - readline(-1) -> (35) accept: application/mercurial-0.1\r\n - readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n - readline(-1) -> (2) \r\n - write(36 from 36) -> (155) HTTP/1.1 200 Script output follows\r\n - write(23 from 23) -> (132) Server: badhttpserver\r\n - write(37 from 37) -> (95) Date: $HTTP_DATE$\r\n - write(41 from 41) -> (54) Content-Type: application/mercurial-0.2\r\n - write(28 from 28) -> (26) Transfer-Encoding: chunked\r\n - write(2 from 2) -> (24) \r\n - write(6 from 6) -> (18) 1\\r\\n\x04\\r\\n (esc) - write(9 from 9) -> (9) 4\r\nnone\r\n - write(9 from 9) -> (0) 4\r\nHG20\r\n + readline(*) -> (27) Accept-Encoding: identity\r\n (glob) + readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) + readline(*) -> (461) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n (glob) + readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob) + readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(*) -> (2) \r\n (glob) + sendall(167 from 167) -> (24) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py36 !) + sendall(6 from 6) -> (18) 1\\r\\n\x04\\r\\n (esc) (py36 !) + sendall(9 from 9) -> (9) 4\r\nnone\r\n (py36 !) + sendall(9 from 9) -> (0) 4\r\nHG20\r\n (py36 !) + write(167 from 167) -> (24) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 no-py36 !) + write(36 from 36) -> (155) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (132) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (95) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (54) Content-Type: application/mercurial-0.2\r\n (no-py3 !) + write(28 from 28) -> (26) Transfer-Encoding: chunked\r\n (no-py3 !) + write(2 from 2) -> (24) \r\n (no-py3 !) + write(6 from 6) -> (18) 1\\r\\n\x04\\r\\n (esc) (no-py3 !) + write(9 from 9) -> (9) 4\r\nnone\r\n (no-py3 !) + write(9 from 9) -> (0) 4\r\nHG20\r\n (no-py3 !) write limit reached; closing socket - write(27) -> 15\r\nInternal Server Error\r\n + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + write(27) -> 15\r\nInternal Server Error\r\n (no-py3 !) $ rm -f error.log @@ -622,20 +736,41 @@ $ hg clone http://localhost:$HGPORT/ clone requesting all changes - abort: HTTP request error (incomplete response; expected 4 bytes got 3) + abort: HTTP request error (incomplete response) (py3 !) + abort: HTTP request error (incomplete response; expected 4 bytes got 3) (no-py3 !) (this may be an intermittent network failure; if the error persists, consider contacting the network or server operator) [255] $ killdaemons.py $DAEMON_PIDS - $ tail -7 error.log - write(28 from 28) -> (23) Transfer-Encoding: chunked\r\n - write(2 from 2) -> (21) \r\n +#if py36 + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -9 + sendall(167 from 167) -> (21) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n + sendall(6 from 6) -> (15) 1\\r\\n\x04\\r\\n (esc) + sendall(9 from 9) -> (6) 4\r\nnone\r\n + sendall(6 from 9) -> (0) 4\r\nHG2 + write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + +#else + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -11 + readline(65537) -> (2) \r\n (py3 !) + write(167 from 167) -> (21) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !) + write(28 from 28) -> (23) Transfer-Encoding: chunked\r\n (no-py3 !) + write(2 from 2) -> (21) \r\n (no-py3 !) write(6 from 6) -> (15) 1\\r\\n\x04\\r\\n (esc) write(9 from 9) -> (6) 4\r\nnone\r\n write(6 from 9) -> (0) 4\r\nHG2 write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + write(27) -> 15\r\nInternal Server Error\r\n +#endif $ rm -f error.log @@ -646,21 +781,43 @@ $ hg clone http://localhost:$HGPORT/ clone requesting all changes - abort: HTTP request error (incomplete response; expected 4 bytes got 3) + abort: HTTP request error (incomplete response) (py3 !) + abort: HTTP request error (incomplete response; expected 4 bytes got 3) (no-py3 !) (this may be an intermittent network failure; if the error persists, consider contacting the network or server operator) [255] $ killdaemons.py $DAEMON_PIDS - $ tail -8 error.log - write(28 from 28) -> (32) Transfer-Encoding: chunked\r\n - write(2 from 2) -> (30) \r\n +#if py36 + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -10 + sendall(167 from 167) -> (30) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n + sendall(6 from 6) -> (24) 1\\r\\n\x04\\r\\n (esc) + sendall(9 from 9) -> (15) 4\r\nnone\r\n + sendall(9 from 9) -> (6) 4\r\nHG20\r\n + sendall(6 from 9) -> (0) 4\\r\\n\x00\x00\x00 (esc) + write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + +#else + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -12 + readline(65537) -> (2) \r\n (py3 !) + write(167 from 167) -> (30) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !) + write(28 from 28) -> (32) Transfer-Encoding: chunked\r\n (no-py3 !) + write(2 from 2) -> (30) \r\n (no-py3 !) write(6 from 6) -> (24) 1\\r\\n\x04\\r\\n (esc) write(9 from 9) -> (15) 4\r\nnone\r\n write(9 from 9) -> (6) 4\r\nHG20\r\n write(6 from 9) -> (0) 4\\r\\n\x00\x00\x00 (esc) write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + write(27) -> 15\r\nInternal Server Error\r\n +#endif $ rm -f error.log @@ -677,15 +834,36 @@ $ killdaemons.py $DAEMON_PIDS - $ tail -8 error.log - write(28 from 28) -> (35) Transfer-Encoding: chunked\r\n - write(2 from 2) -> (33) \r\n +#if py36 + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -10 + sendall(167 from 167) -> (33) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n + sendall(6 from 6) -> (27) 1\\r\\n\x04\\r\\n (esc) + sendall(9 from 9) -> (18) 4\r\nnone\r\n + sendall(9 from 9) -> (9) 4\r\nHG20\r\n + sendall(9 from 9) -> (0) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + +#else + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -12 + readline(65537) -> (2) \r\n (py3 !) + write(167 from 167) -> (33) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !) + write(28 from 28) -> (35) Transfer-Encoding: chunked\r\n (no-py3 !) + write(2 from 2) -> (33) \r\n (no-py3 !) write(6 from 6) -> (27) 1\\r\\n\x04\\r\\n (esc) write(9 from 9) -> (18) 4\r\nnone\r\n write(9 from 9) -> (9) 4\r\nHG20\r\n write(9 from 9) -> (0) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + write(27) -> 15\r\nInternal Server Error\r\n +#endif $ rm -f error.log @@ -702,16 +880,39 @@ $ killdaemons.py $DAEMON_PIDS - $ tail -9 error.log - write(28 from 28) -> (44) Transfer-Encoding: chunked\r\n - write(2 from 2) -> (42) \r\n +#if py36 + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -11 + sendall(167 from 167) -> (42) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n + sendall(6 from 6) -> (36) 1\\r\\n\x04\\r\\n (esc) + sendall(9 from 9) -> (27) 4\r\nnone\r\n + sendall(9 from 9) -> (18) 4\r\nHG20\r\n + sendall(9 from 9) -> (9) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (0) 4\\r\\n\x00\x00\x00)\\r\\n (esc) + write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + +#else + + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -13 + readline(65537) -> (2) \r\n (py3 !) + write(167 from 167) -> (42) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !) + write(28 from 28) -> (44) Transfer-Encoding: chunked\r\n (no-py3 !) + write(2 from 2) -> (42) \r\n (no-py3 !) write(6 from 6) -> (36) 1\\r\\n\x04\\r\\n (esc) write(9 from 9) -> (27) 4\r\nnone\r\n write(9 from 9) -> (18) 4\r\nHG20\r\n write(9 from 9) -> (9) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) write(9 from 9) -> (0) 4\\r\\n\x00\x00\x00)\\r\\n (esc) write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + write(27) -> 15\r\nInternal Server Error\r\n +#endif $ rm -f error.log @@ -731,9 +932,27 @@ $ killdaemons.py $DAEMON_PIDS - $ tail -10 error.log - write(28 from 28) -> (91) Transfer-Encoding: chunked\r\n - write(2 from 2) -> (89) \r\n +#if py36 + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -12 + sendall(167 from 167) -> (89) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n + sendall(6 from 6) -> (83) 1\\r\\n\x04\\r\\n (esc) + sendall(9 from 9) -> (74) 4\r\nnone\r\n + sendall(9 from 9) -> (65) 4\r\nHG20\r\n + sendall(9 from 9) -> (56) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (47) 4\\r\\n\x00\x00\x00)\\r\\n (esc) + sendall(47 from 47) -> (0) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) + write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + +#else + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -14 + readline(65537) -> (2) \r\n (py3 !) + write(167 from 167) -> (89) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !) + write(28 from 28) -> (91) Transfer-Encoding: chunked\r\n (no-py3 !) + write(2 from 2) -> (89) \r\n (no-py3 !) write(6 from 6) -> (83) 1\\r\\n\x04\\r\\n (esc) write(9 from 9) -> (74) 4\r\nnone\r\n write(9 from 9) -> (65) 4\r\nHG20\r\n @@ -741,7 +960,12 @@ write(9 from 9) -> (47) 4\\r\\n\x00\x00\x00)\\r\\n (esc) write(47 from 47) -> (0) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + write(27) -> 15\r\nInternal Server Error\r\n +#endif $ rm -f error.log @@ -755,14 +979,34 @@ adding changesets transaction abort! rollback completed - abort: HTTP request error (incomplete response; expected 466 bytes got 7) + abort: HTTP request error (incomplete response) (py3 !) + abort: HTTP request error (incomplete response; expected 466 bytes got 7) (no-py3 !) (this may be an intermittent network failure; if the error persists, consider contacting the network or server operator) [255] $ killdaemons.py $DAEMON_PIDS - $ tail -11 error.log - write(2 from 2) -> (110) \r\n +#if py36 + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -14 + sendall(167 from 167) -> (110) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n + sendall(6 from 6) -> (104) 1\\r\\n\x04\\r\\n (esc) + sendall(9 from 9) -> (95) 4\r\nnone\r\n + sendall(9 from 9) -> (86) 4\r\nHG20\r\n + sendall(9 from 9) -> (77) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (68) 4\\r\\n\x00\x00\x00)\\r\\n (esc) + sendall(47 from 47) -> (21) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) + sendall(9 from 9) -> (12) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc) + sendall(12 from 473) -> (0) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1d (esc) + write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + +#else + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -15 + write(167 from 167) -> (110) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !) + write(2 from 2) -> (110) \r\n (no-py3 !) write(6 from 6) -> (104) 1\\r\\n\x04\\r\\n (esc) write(9 from 9) -> (95) 4\r\nnone\r\n write(9 from 9) -> (86) 4\r\nHG20\r\n @@ -772,7 +1016,12 @@ write(9 from 9) -> (12) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc) write(12 from 473) -> (0) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1d (esc) write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + write(27) -> 15\r\nInternal Server Error\r\n +#endif $ rm -f error.log @@ -792,9 +1041,29 @@ $ killdaemons.py $DAEMON_PIDS - $ tail -12 error.log - write(28 from 28) -> (573) Transfer-Encoding: chunked\r\n - write(2 from 2) -> (571) \r\n +#if py36 + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -14 + sendall(167 from 167) -> (571) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n + sendall(6 from 6) -> (565) 1\\r\\n\x04\\r\\n (esc) + sendall(9 from 9) -> (556) 4\r\nnone\r\n + sendall(9 from 9) -> (547) 4\r\nHG20\r\n + sendall(9 from 9) -> (538) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (529) 4\\r\\n\x00\x00\x00)\\r\\n (esc) + sendall(47 from 47) -> (482) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) + sendall(9 from 9) -> (473) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc) + sendall(473 from 473) -> (0) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc) + write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + +#else + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -16 + readline(65537) -> (2) \r\n (py3 !) + write(167 from 167) -> (571) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !) + write(28 from 28) -> (573) Transfer-Encoding: chunked\r\n (no-py3 !) + write(2 from 2) -> (571) \r\n (no-py3 !) write(6 from 6) -> (565) 1\\r\\n\x04\\r\\n (esc) write(9 from 9) -> (556) 4\r\nnone\r\n write(9 from 9) -> (547) 4\r\nHG20\r\n @@ -804,7 +1073,12 @@ write(9 from 9) -> (473) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc) write(473 from 473) -> (0) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc) write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + write(27) -> 15\r\nInternal Server Error\r\n +#endif $ rm -f error.log @@ -821,13 +1095,34 @@ added 1 changesets with 1 changes to 1 files transaction abort! rollback completed - abort: HTTP request error (incomplete response; expected 32 bytes got 9) + abort: HTTP request error (incomplete response) (py3 !) + abort: HTTP request error (incomplete response; expected 32 bytes got 9) (no-py3 !) (this may be an intermittent network failure; if the error persists, consider contacting the network or server operator) [255] $ killdaemons.py $DAEMON_PIDS - $ tail -13 error.log +#if py36 + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -16 + sendall(6 from 6) -> (596) 1\\r\\n\x04\\r\\n (esc) + sendall(9 from 9) -> (587) 4\r\nnone\r\n + sendall(9 from 9) -> (578) 4\r\nHG20\r\n + sendall(9 from 9) -> (569) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (560) 4\\r\\n\x00\x00\x00)\\r\\n (esc) + sendall(47 from 47) -> (513) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) + sendall(9 from 9) -> (504) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc) + sendall(473 from 473) -> (31) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (22) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (13) 4\\r\\n\x00\x00\x00 \\r\\n (esc) + sendall(13 from 38) -> (0) 20\\r\\n\x08LISTKEYS (esc) + write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + +#else + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -17 write(6 from 6) -> (596) 1\\r\\n\x04\\r\\n (esc) write(9 from 9) -> (587) 4\r\nnone\r\n write(9 from 9) -> (578) 4\r\nHG20\r\n @@ -840,7 +1135,12 @@ write(9 from 9) -> (13) 4\\r\\n\x00\x00\x00 \\r\\n (esc) write(13 from 38) -> (0) 20\\r\\n\x08LISTKEYS (esc) write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + write(27) -> 15\r\nInternal Server Error\r\n +#endif $ rm -f error.log @@ -863,7 +1163,36 @@ $ killdaemons.py $DAEMON_PIDS - $ tail -22 error.log +#if py36 + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -25 + sendall(9 from 9) -> (851) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (842) 4\\r\\n\x00\x00\x00)\\r\\n (esc) + sendall(47 from 47) -> (795) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) + sendall(9 from 9) -> (786) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc) + sendall(473 from 473) -> (313) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (304) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (295) 4\\r\\n\x00\x00\x00 \\r\\n (esc) + sendall(38 from 38) -> (257) 20\\r\\n\x08LISTKEYS\x00\x00\x00\x01\x01\x00 \x06namespacephases\\r\\n (esc) + sendall(9 from 9) -> (248) 4\\r\\n\x00\x00\x00:\\r\\n (esc) + sendall(64 from 64) -> (184) 3a\r\n96ee1d7354c4ad7372047672c36a1f561e3a6a4c 1\npublishing True\r\n + sendall(9 from 9) -> (175) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (166) 4\\r\\n\x00\x00\x00#\\r\\n (esc) + sendall(41 from 41) -> (125) 23\\r\\n\x08LISTKEYS\x00\x00\x00\x02\x01\x00 namespacebookmarks\\r\\n (esc) + sendall(9 from 9) -> (116) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (107) 4\\r\\n\x00\x00\x00\x1d\\r\\n (esc) + sendall(35 from 35) -> (72) 1d\\r\\n\x16cache:rev-branch-cache\x00\x00\x00\x03\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (63) 4\\r\\n\x00\x00\x00'\\r\\n (esc) + sendall(45 from 45) -> (18) 27\\r\\n\x00\x00\x00\x07\x00\x00\x00\x01\x00\x00\x00\x00default\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\\r\\n (esc) + sendall(9 from 9) -> (9) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (0) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + +#else + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -26 write(9 from 9) -> (851) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) write(9 from 9) -> (842) 4\\r\\n\x00\x00\x00)\\r\\n (esc) write(47 from 47) -> (795) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) @@ -885,7 +1214,12 @@ write(9 from 9) -> (9) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) write(9 from 9) -> (0) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + write(27) -> 15\r\nInternal Server Error\r\n +#endif $ rm -f error.log $ rm -rf clone @@ -907,7 +1241,37 @@ $ killdaemons.py $DAEMON_PIDS - $ tail -23 error.log +#if py36 + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -26 + sendall(9 from 9) -> (854) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (845) 4\\r\\n\x00\x00\x00)\\r\\n (esc) + sendall(47 from 47) -> (798) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) + sendall(9 from 9) -> (789) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc) + sendall(473 from 473) -> (316) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (307) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (298) 4\\r\\n\x00\x00\x00 \\r\\n (esc) + sendall(38 from 38) -> (260) 20\\r\\n\x08LISTKEYS\x00\x00\x00\x01\x01\x00 \x06namespacephases\\r\\n (esc) + sendall(9 from 9) -> (251) 4\\r\\n\x00\x00\x00:\\r\\n (esc) + sendall(64 from 64) -> (187) 3a\r\n96ee1d7354c4ad7372047672c36a1f561e3a6a4c 1\npublishing True\r\n + sendall(9 from 9) -> (178) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (169) 4\\r\\n\x00\x00\x00#\\r\\n (esc) + sendall(41 from 41) -> (128) 23\\r\\n\x08LISTKEYS\x00\x00\x00\x02\x01\x00 namespacebookmarks\\r\\n (esc) + sendall(9 from 9) -> (119) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (110) 4\\r\\n\x00\x00\x00\x1d\\r\\n (esc) + sendall(35 from 35) -> (75) 1d\\r\\n\x16cache:rev-branch-cache\x00\x00\x00\x03\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (66) 4\\r\\n\x00\x00\x00'\\r\\n (esc) + sendall(45 from 45) -> (21) 27\\r\\n\x00\x00\x00\x07\x00\x00\x00\x01\x00\x00\x00\x00default\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\\r\\n (esc) + sendall(9 from 9) -> (12) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (3) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(3 from 5) -> (0) 0\r\n + write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + +#else + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -27 write(9 from 9) -> (854) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) write(9 from 9) -> (845) 4\\r\\n\x00\x00\x00)\\r\\n (esc) write(47 from 47) -> (798) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) @@ -930,7 +1294,12 @@ write(9 from 9) -> (3) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) write(3 from 5) -> (0) 0\r\n write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + write(27) -> 15\r\nInternal Server Error\r\n +#endif $ rm -f error.log $ rm -rf clone
--- a/tests/test-http-bundle1.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-http-bundle1.t Tue Mar 19 16:36:59 2019 +0300 @@ -151,7 +151,7 @@ $ cd copy-pull $ cat >> .hg/hgrc <<EOF > [hooks] - > changegroup = sh -c "printenv.py changegroup" + > changegroup = sh -c "printenv.py --line changegroup" > EOF $ hg pull pulling from http://localhost:$HGPORT1/ @@ -161,7 +161,16 @@ adding file changes added 1 changesets with 1 changes to 1 files new changesets 5fed3813f7f5 - changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=5fed3813f7f5e1824344fdc9cf8f63bb662c292d HG_NODE_LAST=5fed3813f7f5e1824344fdc9cf8f63bb662c292d HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=http://localhost:$HGPORT1/ + changegroup hook: HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=5fed3813f7f5e1824344fdc9cf8f63bb662c292d + HG_NODE_LAST=5fed3813f7f5e1824344fdc9cf8f63bb662c292d + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + http://localhost:$HGPORT1/ + HG_URL=http://localhost:$HGPORT1/ + (run 'hg update' to get a working copy) $ cd .. @@ -175,22 +184,9 @@ + use the same server to test server side streaming preference $ cd test - $ cat << EOT > userpass.py - > import base64 - > from mercurial.hgweb import common - > def perform_authentication(hgweb, req, op): - > auth = req.headers.get(b'Authorization') - > if not auth: - > raise common.ErrorResponse(common.HTTP_UNAUTHORIZED, b'who', - > [(b'WWW-Authenticate', b'Basic Realm="mercurial"')]) - > if base64.b64decode(auth.split()[1]).split(b':', 1) != [b'user', - > b'pass']: - > raise common.ErrorResponse(common.HTTP_FORBIDDEN, b'no') - > def extsetup(ui): - > common.permhooks.insert(0, perform_authentication) - > EOT - $ hg serve --config extensions.x=userpass.py -p $HGPORT2 -d --pid-file=pid \ - > --config server.preferuncompressed=True \ + + $ hg serve --config extensions.x=$TESTDIR/httpserverauth.py -p $HGPORT2 -d \ + > --pid-file=pid --config server.preferuncompressed=True \ > --config web.push_ssl=False --config web.allow_push=* -A ../access.log $ cat pid >> $DAEMON_PIDS
--- a/tests/test-http-protocol.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-http-protocol.t Tue Mar 19 16:36:59 2019 +0300 @@ -179,6 +179,7 @@ > command listkeys > namespace namespaces > EOF + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-0.1\r\n @@ -194,6 +195,7 @@ s> \r\n s> batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash sending listkeys command + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=listkeys HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> vary: X-HgArg-1,X-HgProto-1\r\n @@ -228,6 +230,7 @@ > x-hgarg-1: namespace=namespaces > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=listkeys HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -250,6 +253,7 @@ $ hg --config experimental.httppeer.advertise-v2=true --verbose debugwireproto http://$LOCALIP:$HGPORT << EOF > command heads > EOF + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> vary: X-HgProto-1,X-HgUpgrade-1\r\n @@ -268,6 +272,7 @@ s> \r\n s> batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash sending heads command + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=heads HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> vary: X-HgProto-1\r\n @@ -299,6 +304,7 @@ $ hg --config experimental.httppeer.advertise-v2=true --config experimental.httppeer.v2-encoder-order=identity --verbose debugwireproto http://$LOCALIP:$HGPORT << EOF > command heads > EOF + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> vary: X-HgProto-1,X-HgUpgrade-1\r\n @@ -317,6 +323,7 @@ s> \r\n s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa4Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogNv1capabilitiesY\x01\xe0batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash sending heads command + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/heads HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -337,23 +344,19 @@ s> \t\x00\x00\x01\x00\x02\x01\x92 s> Hidentity s> \r\n - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) s> 13\r\n s> \x0b\x00\x00\x01\x00\x02\x041 s> \xa1FstatusBok s> \r\n - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 1e\r\n s> \x16\x00\x00\x01\x00\x02\x041 s> \x81T\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 s> \r\n - received frame(size=22; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 8\r\n s> \x00\x00\x00\x01\x00\x02\x002 s> \r\n s> 0\r\n s> \r\n - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) response: [ b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' ] @@ -386,7 +389,7 @@ > relpath = path[len(b'/redirector'):] > res.status = b'301 Redirect' > newurl = b'%s/redirected%s' % (req.baseurl, relpath) - > if not repo.ui.configbool('testing', 'redirectqs', True) and b'?' in newurl: + > if not repo.ui.configbool(b'testing', b'redirectqs', True) and b'?' in newurl: > newurl = newurl[0:newurl.index(b'?')] > res.headers[b'Location'] = newurl > res.headers[b'Content-Type'] = b'text/plain' @@ -408,6 +411,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /redirector?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -422,6 +426,7 @@ s> Content-Length: 10\r\n s> \r\n s> redirected + s> setsockopt(6, 1, 1) -> None (?) s> GET /redirected?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -441,6 +446,7 @@ $ hg --verbose debugwireproto http://$LOCALIP:$HGPORT/redirector << EOF > command heads > EOF + s> setsockopt(6, 1, 1) -> None (?) s> GET /redirector?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-0.1\r\n @@ -456,6 +462,7 @@ s> Content-Length: 10\r\n s> \r\n s> redirected + s> setsockopt(6, 1, 1) -> None (?) s> GET /redirected?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-0.1\r\n @@ -472,6 +479,7 @@ real URL is http://$LOCALIP:$HGPORT/redirected (glob) s> batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash sending heads command + s> setsockopt(6, 1, 1) -> None (?) s> GET /redirected?cmd=heads HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> vary: X-HgProto-1\r\n @@ -509,6 +517,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /redirector?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -523,6 +532,7 @@ s> Content-Length: 10\r\n s> \r\n s> redirected + s> setsockopt(6, 1, 1) -> None (?) s> GET /redirected HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -664,6 +674,7 @@ $ hg --verbose debugwireproto http://$LOCALIP:$HGPORT/redirector << EOF > command heads > EOF + s> setsockopt(6, 1, 1) -> None (?) s> GET /redirector?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-0.1\r\n @@ -679,6 +690,7 @@ s> Content-Length: 10\r\n s> \r\n s> redirected + s> setsockopt(6, 1, 1) -> None (?) s> GET /redirected HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-0.1\r\n @@ -721,6 +733,7 @@ s> <li class="active">log</li>\n s> <li><a href="/redirected/graph/tip">graph</a></li>\n s> <li><a href="/redirected/tags">tags</a + s> setsockopt(6, 1, 1) -> None (?) s> GET /redirected?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-0.1\r\n @@ -737,6 +750,7 @@ real URL is http://$LOCALIP:$HGPORT/redirected (glob) s> batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash sending heads command + s> setsockopt(6, 1, 1) -> None (?) s> GET /redirected?cmd=heads HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> vary: X-HgProto-1\r\n
--- a/tests/test-http.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-http.t Tue Mar 19 16:36:59 2019 +0300 @@ -156,6 +156,8 @@ HG_NODE_LAST=5fed3813f7f5e1824344fdc9cf8f63bb662c292d HG_SOURCE=pull HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + http://localhost:$HGPORT1/ HG_URL=http://localhost:$HGPORT1/ (run 'hg update' to get a working copy) @@ -171,21 +173,9 @@ + use the same server to test server side streaming preference $ cd test - $ cat << EOT > userpass.py - > import base64 - > from mercurial.hgweb import common - > def perform_authentication(hgweb, req, op): - > auth = req.headers.get(b'Authorization') - > if not auth: - > raise common.ErrorResponse(common.HTTP_UNAUTHORIZED, b'who', - > [(b'WWW-Authenticate', b'Basic Realm="mercurial"')]) - > if base64.b64decode(auth.split()[1]).split(b':', 1) != [b'user', b'pass']: - > raise common.ErrorResponse(common.HTTP_FORBIDDEN, b'no') - > def extsetup(ui): - > common.permhooks.insert(0, perform_authentication) - > EOT - $ hg serve --config extensions.x=userpass.py -p $HGPORT2 -d --pid-file=pid \ - > --config server.preferuncompressed=True \ + + $ hg serve --config extensions.x=$TESTDIR/httpserverauth.py -p $HGPORT2 -d \ + > --pid-file=pid --config server.preferuncompressed=True -E ../errors2.log \ > --config web.push_ssl=False --config web.allow_push=* -A ../access.log $ cat pid >> $DAEMON_PIDS @@ -221,6 +211,25 @@ $ hg id http://user@localhost:$HGPORT2/ 5fed3813f7f5 + $ cat > use_digests.py << EOF + > from mercurial import ( + > exthelper, + > url, + > ) + > + > eh = exthelper.exthelper() + > uisetup = eh.finaluisetup + > + > @eh.wrapfunction(url, 'opener') + > def urlopener(orig, *args, **kwargs): + > opener = orig(*args, **kwargs) + > opener.addheaders.append((r'X-HgTest-AuthType', r'Digest')) + > return opener + > EOF + + $ hg id http://localhost:$HGPORT2/ --config extensions.x=use_digests.py + 5fed3813f7f5 + #if no-reposimplestore $ hg clone http://user:pass@localhost:$HGPORT2/ dest 2>&1 streaming all changes @@ -374,6 +383,14 @@ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull + "GET /?cmd=capabilities HTTP/1.1" 401 - x-hgtest-authtype:Digest + "GET /?cmd=capabilities HTTP/1.1" 200 - x-hgtest-authtype:Digest + "GET /?cmd=lookup HTTP/1.1" 401 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest + "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest + "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest + "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest + "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest + "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest "GET /?cmd=capabilities HTTP/1.1" 401 - (no-reposimplestore !) "GET /?cmd=capabilities HTTP/1.1" 200 - (no-reposimplestore !) "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (no-reposimplestore !) @@ -443,6 +460,8 @@ $ cat error.log + $ cat errors2.log + check abort error reporting while pulling/cloning $ $RUNTESTDIR/killdaemons.py
--- a/tests/test-https.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-https.t Tue Mar 19 16:36:59 2019 +0300 @@ -213,7 +213,7 @@ $ cd copy-pull $ cat >> .hg/hgrc <<EOF > [hooks] - > changegroup = sh -c "printenv.py changegroup" + > changegroup = sh -c "printenv.py --line changegroup" > EOF $ hg pull $DISABLECACERTS pulling from https://localhost:$HGPORT/ @@ -232,7 +232,16 @@ adding file changes added 1 changesets with 1 changes to 1 files new changesets 5fed3813f7f5 - changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=5fed3813f7f5e1824344fdc9cf8f63bb662c292d HG_NODE_LAST=5fed3813f7f5e1824344fdc9cf8f63bb662c292d HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=https://localhost:$HGPORT/ + changegroup hook: HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=5fed3813f7f5e1824344fdc9cf8f63bb662c292d + HG_NODE_LAST=5fed3813f7f5e1824344fdc9cf8f63bb662c292d + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + https://localhost:$HGPORT/ + HG_URL=https://localhost:$HGPORT/ + (run 'hg update' to get a working copy) $ cd ..
--- a/tests/test-impexp-branch.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-impexp-branch.t Tue Mar 19 16:36:59 2019 +0300 @@ -6,7 +6,7 @@ > import re > import sys > - > head_re = re.compile('^#(?:(?:\\s+([A-Za-z][A-Za-z0-9_]*)(?:\\s.*)?)|(?:\\s*))$') + > head_re = re.compile(r'^#(?:(?:\\s+([A-Za-z][A-Za-z0-9_]*)(?:\\s.*)?)|(?:\\s*))$') > > for line in sys.stdin: > hmatch = head_re.match(line)
--- a/tests/test-import-context.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-import-context.t Tue Mar 19 16:36:59 2019 +0300 @@ -12,9 +12,9 @@ > count = int(pattern[0:-1]) > char = pattern[-1].encode('utf8') + b'\n' > if not lasteol and i == len(patterns) - 1: - > fp.write((char*count)[:-1]) + > fp.write((char * count)[:-1]) > else: - > fp.write(char*count) + > fp.write(char * count) > fp.close() > EOF $ cat > cat.py <<EOF
--- a/tests/test-import-eol.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-import-eol.t Tue Mar 19 16:36:59 2019 +0300 @@ -17,9 +17,9 @@ > 'empty:stripped-crlf': b'\r\n'}[sys.argv[1]]) > w(b' d\n') > w(b'-e\n') - > w(b'\ No newline at end of file\n') + > w(b'\\\\ No newline at end of file\n') > w(b'+z\r\n') - > w(b'\ No newline at end of file\r\n') + > w(b'\\\\ No newline at end of file\r\n') > EOF $ hg init repo
--- a/tests/test-import-git.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-import-git.t Tue Mar 19 16:36:59 2019 +0300 @@ -826,7 +826,7 @@ $ hg revert -qa $ hg --encoding utf-8 import - <<EOF - > From: =?UTF-8?q?Rapha=C3=ABl=20Hertzog?= <hertzog@debian.org> + > From: =?utf-8?q?Rapha=C3=ABl_Hertzog_=3Chertzog=40debian=2Eorg=3E?= > Subject: [PATCH] =?UTF-8?q?=C5=A7=E2=82=AC=C3=9F=E1=B9=AA?= > > diff --git a/a b/a
--- a/tests/test-inherit-mode.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-inherit-mode.t Tue Mar 19 16:36:59 2019 +0300 @@ -71,7 +71,6 @@ 00600 ./.hg/00changelog.i 00770 ./.hg/cache/ 00660 ./.hg/cache/branch2-served - 00660 ./.hg/cache/manifestfulltextcache (reporevlogstore !) 00660 ./.hg/cache/rbc-names-v1 00660 ./.hg/cache/rbc-revs-v1 00660 ./.hg/dirstate @@ -105,6 +104,7 @@ 00711 ./.hg/wcache/checkisexec 007.. ./.hg/wcache/checklink (re) 00600 ./.hg/wcache/checklink-target + 00660 ./.hg/wcache/manifestfulltextcache (reporevlogstore !) 00700 ./dir/ 00600 ./dir/bar 00600 ./foo
--- a/tests/test-install.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-install.t Tue Mar 19 16:36:59 2019 +0300 @@ -161,6 +161,7 @@ > import subprocess > import sys > import xml.etree.ElementTree as ET + > from mercurial import pycompat > > # MSYS mangles the path if it expands $TESTDIR > testdir = os.environ['TESTDIR'] @@ -177,7 +178,7 @@ > files = node.findall('./{%(wix)s}Component/{%(wix)s}File' % ns) > > for f in files: - > yield relpath + f.attrib['Name'] + > yield pycompat.sysbytes(relpath + f.attrib['Name']) > > def hgdirectory(relpath): > '''generator of tracked files, rooted at relpath''' @@ -187,16 +188,15 @@ > stderr=subprocess.PIPE) > output = proc.communicate()[0] > - > slash = '/' > for line in output.splitlines(): > if os.name == 'nt': - > yield line.replace(os.sep, slash) + > yield line.replace(pycompat.sysbytes(os.sep), b'/') > else: > yield line > > tracked = [f for f in hgdirectory(sys.argv[1])] > - > xml = ET.parse("%s/../contrib/wix/%s.wxs" % (testdir, sys.argv[1])) + > xml = ET.parse("%s/../contrib/packaging/wix/%s.wxs" % (testdir, sys.argv[1])) > root = xml.getroot() > dir = root.find('.//{%(wix)s}DirectoryRef' % ns) > @@ -204,11 +204,11 @@ > > print('Not installed:') > for f in sorted(set(tracked) - set(installed)): - > print(' %s' % f) + > print(' %s' % pycompat.sysstr(f)) > > print('Not tracked:') > for f in sorted(set(installed) - set(tracked)): - > print(' %s' % f) + > print(' %s' % pycompat.sysstr(f)) > EOF $ ( testrepohgenv; "$PYTHON" wixxml.py help ) @@ -238,6 +238,7 @@ the default for them. $ unset PYTHONPATH $ "$PYTHON" -m virtualenv --no-site-packages --never-download installenv >> pip.log + DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. (?) Note: we use this weird path to run pip and hg to avoid platform differences, since it's bin on most platforms but Scripts on Windows. $ ./installenv/*/pip install --no-index $TESTDIR/.. >> pip.log
--- a/tests/test-journal-exists.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-journal-exists.t Tue Mar 19 16:36:59 2019 +0300 @@ -29,7 +29,7 @@ $ hg -R foo unbundle repo.hg adding changesets - abort: Permission denied: $TESTTMP/foo/.hg/store/.00changelog.i-* (glob) + abort: Permission denied: '$TESTTMP/foo/.hg/store/.00changelog.i-*' (glob) [255] $ if test -f foo/.hg/store/journal; then echo 'journal exists :-('; fi
--- a/tests/test-largefiles-misc.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-largefiles-misc.t Tue Mar 19 16:36:59 2019 +0300 @@ -578,7 +578,7 @@ $ echo moremore >> anotherlarge $ hg revert anotherlarge -v --config 'ui.origbackuppath=.hg/origbackups' creating directory: $TESTTMP/addrm2/.hg/origbackups/.hglf/sub - saving current version of ../.hglf/sub/anotherlarge as $TESTTMP/addrm2/.hg/origbackups/.hglf/sub/anotherlarge + saving current version of ../.hglf/sub/anotherlarge as ../.hg/origbackups/.hglf/sub/anotherlarge reverting ../.hglf/sub/anotherlarge creating directory: $TESTTMP/addrm2/.hg/origbackups/sub found 90c622cf65cebe75c5842f9136c459333faf392e in store
--- a/tests/test-largefiles-small-disk.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-largefiles-small-disk.t Tue Mar 19 16:36:59 2019 +0300 @@ -9,7 +9,7 @@ > # > # this makes the original largefiles code abort: > _origcopyfileobj = shutil.copyfileobj - > def copyfileobj(fsrc, fdst, length=16*1024): + > def copyfileobj(fsrc, fdst, length=16 * 1024): > # allow journal files (used by transaction) to be written > if b'journal.' in fdst.name: > return _origcopyfileobj(fsrc, fdst, length)
--- a/tests/test-largefiles-wireproto.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-largefiles-wireproto.t Tue Mar 19 16:36:59 2019 +0300 @@ -420,20 +420,8 @@ $ rm "${USERCACHE}"/* $ cd .. - $ cat << EOT > userpass.py - > import base64 - > from mercurial.hgweb import common - > def perform_authentication(hgweb, req, op): - > auth = req.headers.get(b'Authorization') - > if not auth: - > raise common.ErrorResponse(common.HTTP_UNAUTHORIZED, b'who', - > [(b'WWW-Authenticate', b'Basic Realm="mercurial"')]) - > if base64.b64decode(auth.split()[1]).split(b':', 1) != [b'user', b'pass']: - > raise common.ErrorResponse(common.HTTP_FORBIDDEN, b'no') - > def extsetup(ui): - > common.permhooks.insert(0, perform_authentication) - > EOT - $ hg serve --config extensions.x=userpass.py -R credentialmain \ + + $ hg serve --config extensions.x=$TESTDIR/httpserverauth.py -R credentialmain \ > -d -p $HGPORT --pid-file hg.pid -A access.log $ cat hg.pid >> $DAEMON_PIDS $ cat << EOF > get_pass.py
--- a/tests/test-lfs-serve-access.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-lfs-serve-access.t Tue Mar 19 16:36:59 2019 +0300 @@ -227,9 +227,9 @@ > # One time simulation of a read error > if _readerr: > _readerr = False - > raise IOError(errno.EIO, '%s: I/O error' % oid) + > raise IOError(errno.EIO, r'%s: I/O error' % oid.decode("utf-8")) > # Simulate corrupt content on client download - > blobstore._verify(oid, 'dummy content') + > blobstore._verify(oid, b'dummy content') > > def verify(self, oid): > '''Called in the server to populate the Batch API response, @@ -240,7 +240,7 @@ > global _numverifies > _numverifies += 1 > if _numverifies <= 2: - > raise IOError(errno.EIO, '%s: I/O error' % oid) + > raise IOError(errno.EIO, r'%s: I/O error' % oid.decode("utf-8")) > return super(badstore, self).verify(oid) > > store.__class__ = badstore @@ -340,14 +340,14 @@ $LOCALIP - - [$ERRDATE$] HG error: Exception happened while processing request '/.git/info/lfs/objects/batch': (glob) $LOCALIP - - [$ERRDATE$] HG error: Traceback (most recent call last): (glob) $LOCALIP - - [$ERRDATE$] HG error: verifies = store.verify(oid) (glob) - $LOCALIP - - [$ERRDATE$] HG error: raise IOError(errno.EIO, '%s: I/O error' % oid) (glob) - $LOCALIP - - [$ERRDATE$] HG error: IOError: [Errno 5] f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e: I/O error (glob) + $LOCALIP - - [$ERRDATE$] HG error: raise IOError(errno.EIO, r'%s: I/O error' % oid.decode("utf-8")) (glob) + $LOCALIP - - [$ERRDATE$] HG error: *Error: [Errno 5] f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e: I/O error (glob) $LOCALIP - - [$ERRDATE$] HG error: (glob) $LOCALIP - - [$ERRDATE$] HG error: Exception happened while processing request '/.git/info/lfs/objects/batch': (glob) $LOCALIP - - [$ERRDATE$] HG error: Traceback (most recent call last): (glob) $LOCALIP - - [$ERRDATE$] HG error: verifies = store.verify(oid) (glob) - $LOCALIP - - [$ERRDATE$] HG error: raise IOError(errno.EIO, '%s: I/O error' % oid) (glob) - $LOCALIP - - [$ERRDATE$] HG error: IOError: [Errno 5] b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c: I/O error (glob) + $LOCALIP - - [$ERRDATE$] HG error: raise IOError(errno.EIO, r'%s: I/O error' % oid.decode("utf-8")) (glob) + $LOCALIP - - [$ERRDATE$] HG error: *Error: [Errno 5] b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c: I/O error (glob) $LOCALIP - - [$ERRDATE$] HG error: (glob) $LOCALIP - - [$ERRDATE$] HG error: Exception happened while processing request '/.hg/lfs/objects/b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c': (glob) $LOCALIP - - [$ERRDATE$] HG error: Traceback (most recent call last): (glob) @@ -363,19 +363,19 @@ for chunk in self.server.application(env, self._start_response): for r in self._runwsgi(req, res, repo): rctx, req, res, self.check_perm) - return func(*(args + a), **kw) + return func(*(args + a), **kw) (no-py3 !) lambda perm: res.setbodybytes(localstore.read(oid)) blob = self._read(self.vfs, oid, verify) - raise IOError(errno.EIO, '%s: I/O error' % oid) - IOError: [Errno 5] 276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d: I/O error + raise IOError(errno.EIO, r'%s: I/O error' % oid.decode("utf-8")) + *Error: [Errno 5] 276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d: I/O error (glob) $LOCALIP - - [$ERRDATE$] HG error: Exception happened while processing request '/.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d': (glob) $LOCALIP - - [$ERRDATE$] HG error: Traceback (most recent call last): (glob) $LOCALIP - - [$ERRDATE$] HG error: res.setbodybytes(localstore.read(oid)) (glob) $LOCALIP - - [$ERRDATE$] HG error: blob = self._read(self.vfs, oid, verify) (glob) - $LOCALIP - - [$ERRDATE$] HG error: blobstore._verify(oid, 'dummy content') (glob) - $LOCALIP - - [$ERRDATE$] HG error: hint=_('run hg verify')) (glob) + $LOCALIP - - [$ERRDATE$] HG error: blobstore._verify(oid, b'dummy content') (glob) + $LOCALIP - - [$ERRDATE$] HG error: hint=_(b'run hg verify')) (glob) $LOCALIP - - [$ERRDATE$] HG error: LfsCorruptionError: detected corrupt lfs object: 276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d (glob) $LOCALIP - - [$ERRDATE$] HG error: (glob) @@ -394,22 +394,7 @@ > l.password=pass > EOF - $ cat << EOF > userpass.py - > import base64 - > from mercurial.hgweb import common - > def perform_authentication(hgweb, req, op): - > auth = req.headers.get(b'Authorization') - > if not auth: - > raise common.ErrorResponse(common.HTTP_UNAUTHORIZED, b'who', - > [(b'WWW-Authenticate', b'Basic Realm="mercurial"')]) - > if base64.b64decode(auth.split()[1]).split(b':', 1) != [b'user', - > b'pass']: - > raise common.ErrorResponse(common.HTTP_FORBIDDEN, b'no') - > def extsetup(ui): - > common.permhooks.insert(0, perform_authentication) - > EOF - - $ hg --config extensions.x=$TESTTMP/userpass.py \ + $ hg --config extensions.x=$TESTDIR/httpserverauth.py \ > -R server serve -d -p $HGPORT1 --pid-file=hg.pid \ > -A $TESTTMP/access.log -E $TESTTMP/errors.log $ mv hg.pid $DAEMON_PIDS @@ -437,6 +422,32 @@ $ echo 'another blob' > auth_clone/lfs.blob $ hg -R auth_clone ci -Aqm 'add blob' + + $ cat > use_digests.py << EOF + > from mercurial import ( + > exthelper, + > url, + > ) + > + > eh = exthelper.exthelper() + > uisetup = eh.finaluisetup + > + > @eh.wrapfunction(url, 'opener') + > def urlopener(orig, *args, **kwargs): + > opener = orig(*args, **kwargs) + > opener.addheaders.append((r'X-HgTest-AuthType', r'Digest')) + > return opener + > EOF + +Test that Digest Auth fails gracefully before testing the successful Basic Auth + + $ hg -R auth_clone push --config extensions.x=use_digests.py + pushing to http://localhost:$HGPORT1/ + searching for changes + abort: LFS HTTP error: HTTP Error 401: the server must support Basic Authentication! + (api=http://localhost:$HGPORT1/.git/info/lfs/objects/batch, action=upload) + [255] + $ hg -R auth_clone --debug push | egrep '^[{}]| ' { "objects": [ @@ -468,6 +479,19 @@ $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 401 - (glob) $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob) $LOCALIP - - [$LOGDATE$] "GET /.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d HTTP/1.1" 200 - (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 401 - x-hgtest-authtype:Digest (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - x-hgtest-authtype:Digest (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 401 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D525251863cad618e55d483555f3d00a2ca99597e+4d9397055dc0c205f3132f331f36353ab1a525a3 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D525251863cad618e55d483555f3d00a2ca99597e+4d9397055dc0c205f3132f331f36353ab1a525a3 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=branchmap HTTP/1.1" 401 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob) + $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 401 - x-hgtest-authtype:Digest (glob) $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 401 - (glob) $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D525251863cad618e55d483555f3d00a2ca99597e+4d9397055dc0c205f3132f331f36353ab1a525a3 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
--- a/tests/test-lfs-serve.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-lfs-serve.t Tue Mar 19 16:36:59 2019 +0300 @@ -51,16 +51,15 @@ > opts[b'manifest'] = False > opts[b'dir'] = False > rl = cmdutil.openrevlog(repo, b'debugprocessors', file_, opts) - > for flag, proc in rl._flagprocessors.iteritems(): + > for flag, proc in rl._flagprocessors.items(): > ui.status(b"registered processor '%#x'\n" % (flag)) > EOF Skip the experimental.changegroup3=True config. Failure to agree on this comes -first, and causes a "ValueError: no common changegroup version" or "abort: -HTTP Error 500: Internal Server Error", if the extension is only loaded on one -side. If that *is* enabled, the subsequent failure is "abort: missing processor -for flag '0x2000'!" if the extension is only loaded on one side (possibly also -masked by the Internal Server Error message). +first, and causes an "abort: no common changegroup version" if the extension is +only loaded on one side. If that *is* enabled, the subsequent failure is "abort: +missing processor for flag '0x2000'!" if the extension is only loaded on one side +(possibly also masked by the Internal Server Error message). $ cat >> $HGRCPATH <<EOF > [extensions] > debugprocessors = $TESTTMP/debugprocessors.py @@ -110,14 +109,14 @@ ... def diff(server): ... readchannel(server) ... # run an arbitrary command in the repo with the extension loaded - ... runcommand(server, ['id', '-R', '../cmdservelfs']) + ... runcommand(server, [b'id', b'-R', b'../cmdservelfs']) ... # now run a command in a repo without the extension to ensure that ... # files are added safely.. - ... runcommand(server, ['ci', '-Aqm', 'non-lfs']) + ... runcommand(server, [b'ci', b'-Aqm', b'non-lfs']) ... # .. and that scmutil.prefetchfiles() safely no-ops.. - ... runcommand(server, ['diff', '-r', '.~1']) + ... runcommand(server, [b'diff', b'-r', b'.~1']) ... # .. and that debugupgraderepo safely no-ops. - ... runcommand(server, ['debugupgraderepo', '-q', '--run']) + ... runcommand(server, [b'debugupgraderepo', b'-q', b'--run']) *** runcommand id -R ../cmdservelfs 000000000000 tip *** runcommand ci -Aqm non-lfs @@ -257,12 +256,12 @@ ... def addrequirement(server): ... readchannel(server) ... # change the repo in a way that adds the lfs requirement - ... runcommand(server, ['pull', '-qu']) + ... runcommand(server, [b'pull', b'-qu']) ... # Now cause the requirement adding hook to fire again, without going ... # through reposetup() again. ... with open('file.txt', 'wb') as fp: - ... fp.write('data') - ... runcommand(server, ['ci', '-Aqm', 'non-lfs']) + ... fp.write(b'data') + ... runcommand(server, [b'ci', b'-Aqm', b'non-lfs']) *** runcommand pull -qu *** runcommand ci -Aqm non-lfs @@ -317,8 +316,11 @@ TODO: fail more gracefully. $ hg init $TESTTMP/client4_pull - $ hg -R $TESTTMP/client4_pull pull -q http://localhost:$HGPORT - abort: HTTP Error 500: Internal Server Error + $ hg -R $TESTTMP/client4_pull pull http://localhost:$HGPORT + pulling from http://localhost:$HGPORT/ + requesting all changes + remote: abort: no common changegroup version + abort: pull failed on remote [255] $ grep 'lfs' $TESTTMP/client4_pull/.hg/requires $SERVER_REQUIRES $TESTTMP/server/.hg/requires:lfs @@ -359,22 +361,24 @@ $ cp $HGRCPATH.orig $HGRCPATH >>> from __future__ import absolute_import - >>> from hgclient import check, readchannel, runcommand + >>> from hgclient import bprint, check, readchannel, runcommand, stdout >>> @check ... def checkflags(server): ... readchannel(server) - ... print('') - ... print('# LFS required- both lfs and non-lfs revlogs have 0x2000 flag') - ... runcommand(server, ['debugprocessors', 'lfs.bin', '-R', - ... '../server']) - ... runcommand(server, ['debugprocessors', 'nonlfs2.txt', '-R', - ... '../server']) - ... runcommand(server, ['config', 'extensions', '--cwd', - ... '../server']) + ... bprint(b'') + ... bprint(b'# LFS required- both lfs and non-lfs revlogs have 0x2000 flag') + ... stdout.flush() + ... runcommand(server, [b'debugprocessors', b'lfs.bin', b'-R', + ... b'../server']) + ... runcommand(server, [b'debugprocessors', b'nonlfs2.txt', b'-R', + ... b'../server']) + ... runcommand(server, [b'config', b'extensions', b'--cwd', + ... b'../server']) ... - ... print("\n# LFS not enabled- revlogs don't have 0x2000 flag") - ... runcommand(server, ['debugprocessors', 'nonlfs3.txt']) - ... runcommand(server, ['config', 'extensions']) + ... bprint(b"\n# LFS not enabled- revlogs don't have 0x2000 flag") + ... stdout.flush() + ... runcommand(server, [b'debugprocessors', b'nonlfs3.txt']) + ... runcommand(server, [b'config', b'extensions']) # LFS required- both lfs and non-lfs revlogs have 0x2000 flag *** runcommand debugprocessors lfs.bin -R ../server @@ -403,28 +407,31 @@ > EOF >>> from __future__ import absolute_import, print_function - >>> from hgclient import check, readchannel, runcommand + >>> from hgclient import bprint, check, readchannel, runcommand, stdout >>> @check ... def checkflags2(server): ... readchannel(server) - ... print('') - ... print('# LFS enabled- both lfs and non-lfs revlogs have 0x2000 flag') - ... runcommand(server, ['debugprocessors', 'lfs.bin', '-R', - ... '../server']) - ... runcommand(server, ['debugprocessors', 'nonlfs2.txt', '-R', - ... '../server']) - ... runcommand(server, ['config', 'extensions', '--cwd', - ... '../server']) + ... bprint(b'') + ... bprint(b'# LFS enabled- both lfs and non-lfs revlogs have 0x2000 flag') + ... stdout.flush() + ... runcommand(server, [b'debugprocessors', b'lfs.bin', b'-R', + ... b'../server']) + ... runcommand(server, [b'debugprocessors', b'nonlfs2.txt', b'-R', + ... b'../server']) + ... runcommand(server, [b'config', b'extensions', b'--cwd', + ... b'../server']) ... - ... print('\n# LFS enabled without requirement- revlogs have 0x2000 flag') - ... runcommand(server, ['debugprocessors', 'nonlfs3.txt']) - ... runcommand(server, ['config', 'extensions']) + ... bprint(b'\n# LFS enabled without requirement- revlogs have 0x2000 flag') + ... stdout.flush() + ... runcommand(server, [b'debugprocessors', b'nonlfs3.txt']) + ... runcommand(server, [b'config', b'extensions']) ... - ... print("\n# LFS disabled locally- revlogs don't have 0x2000 flag") - ... runcommand(server, ['debugprocessors', 'nonlfs.txt', '-R', - ... '../nonlfs']) - ... runcommand(server, ['config', 'extensions', '--cwd', - ... '../nonlfs']) + ... bprint(b"\n# LFS disabled locally- revlogs don't have 0x2000 flag") + ... stdout.flush() + ... runcommand(server, [b'debugprocessors', b'nonlfs.txt', b'-R', + ... b'../nonlfs']) + ... runcommand(server, [b'config', b'extensions', b'--cwd', + ... b'../nonlfs']) # LFS enabled- both lfs and non-lfs revlogs have 0x2000 flag *** runcommand debugprocessors lfs.bin -R ../server @@ -657,10 +664,4 @@ $ "$PYTHON" $TESTDIR/killdaemons.py $DAEMON_PIDS -#if lfsremote-on - $ cat $TESTTMP/errors.log | grep '^[A-Z]' - Traceback (most recent call last): - ValueError: no common changegroup version -#else $ cat $TESTTMP/errors.log -#endif
--- a/tests/test-linelog.py Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-linelog.py Tue Mar 19 16:36:59 2019 +0300 @@ -15,7 +15,6 @@ def _genedits(seed, endrev): lines = [] random.seed(seed) - rev = 0 for rev in range(0, endrev): n = len(lines) a1 = random.randint(0, n)
--- a/tests/test-locate.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-locate.t Tue Mar 19 16:36:59 2019 +0300 @@ -123,6 +123,24 @@ ../t.h ../t/e.h ../t/x + $ hg files --config ui.relative-paths=yes + ../b + ../dir.h/foo + ../t.h + ../t/e.h + ../t/x + $ hg files --config ui.relative-paths=no + b + dir.h/foo + t.h + t/e.h + t/x + $ hg files --config ui.relative-paths=legacy + ../b + ../dir.h/foo + ../t.h + ../t/e.h + ../t/x $ hg locate b ../b
--- a/tests/test-lock.py Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-lock.py Tue Mar 19 16:36:59 2019 +0300 @@ -141,7 +141,7 @@ state.assertacquirecalled(True) # fake a fork - forklock = copy.deepcopy(lock) + forklock = copy.copy(lock) forklock._pidoffset = 1 forklock.release() state.assertreleasecalled(False) @@ -238,7 +238,7 @@ childstate.assertacquirecalled(True) # fork the child lock - forkchildlock = copy.deepcopy(childlock) + forkchildlock = copy.copy(childlock) forkchildlock._pidoffset += 1 forkchildlock.release() childstate.assertreleasecalled(False) @@ -290,7 +290,7 @@ self.fail("unexpected lock acquisition") except error.LockHeld as why: self.assertTrue(why.errno == errno.ETIMEDOUT) - self.assertTrue(why.locker == "") + self.assertTrue(why.locker == b"") state.assertlockexists(False) if __name__ == '__main__':
--- a/tests/test-manifest.py Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-manifest.py Tue Mar 19 16:36:59 2019 +0300 @@ -289,8 +289,7 @@ the resulting manifest.''' m = self.parsemanifest(A_HUGE_MANIFEST) - match = matchmod.match(b'/', b'', - [b'file1', b'file200', b'file300'], exact=True) + match = matchmod.exact([b'file1', b'file200', b'file300']) m2 = m.matches(match) w = (b'file1\0%sx\n' @@ -304,10 +303,8 @@ ''' m = self.parsemanifest(A_DEEPER_MANIFEST) - match = matchmod.match(b'/', b'', - [b'a/b/c/bar.txt', b'a/b/d/qux.py', - b'readme.txt', b'nonexistent'], - exact=True) + match = matchmod.exact([b'a/b/c/bar.txt', b'a/b/d/qux.py', + b'readme.txt', b'nonexistent']) m2 = m.matches(match) self.assertEqual( @@ -330,7 +327,7 @@ m = self.parsemanifest(A_HUGE_MANIFEST) flist = m.keys()[80:300] - match = matchmod.match(b'/', b'', flist, exact=True) + match = matchmod.exact(flist) m2 = m.matches(match) self.assertEqual(flist, m2.keys()) @@ -364,7 +361,7 @@ against a directory.''' m = self.parsemanifest(A_DEEPER_MANIFEST) - match = matchmod.match(b'/', b'', [b'a/b'], exact=True) + match = matchmod.exact([b'a/b']) m2 = m.matches(match) self.assertEqual([], m2.keys())
--- a/tests/test-manifest.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-manifest.t Tue Mar 19 16:36:59 2019 +0300 @@ -93,3 +93,111 @@ $ hg manifest -r tip tip abort: please specify just one revision [255] + +Testing the manifest full text cache utility +-------------------------------------------- + +Reminder of the manifest log content + + $ hg log --debug | grep 'manifest:' + manifest: 1:1e01206b1d2f72bd55f2a33fa8ccad74144825b7 + manifest: 0:fce2a30dedad1eef4da95ca1dc0004157aa527cf + +Showing the content of the caches after the above operations + + $ hg debugmanifestfulltextcache + cache contains 1 manifest entries, in order of most to least recent: + id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes + total cache data size 157 bytes, on-disk 157 bytes + +(Clearing the cache in case of any content) + + $ hg debugmanifestfulltextcache --clear + +Adding a new persistent entry in the cache + + $ hg debugmanifestfulltextcache --add 1e01206b1d2f72bd55f2a33fa8ccad74144825b7 + + $ hg debugmanifestfulltextcache + cache contains 1 manifest entries, in order of most to least recent: + id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes + total cache data size 157 bytes, on-disk 157 bytes + +Check we don't duplicated entry (added from the debug command) + + $ hg debugmanifestfulltextcache --add 1e01206b1d2f72bd55f2a33fa8ccad74144825b7 + $ hg debugmanifestfulltextcache + cache contains 1 manifest entries, in order of most to least recent: + id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes + total cache data size 157 bytes, on-disk 157 bytes + +Adding a second entry + + $ hg debugmanifestfulltextcache --add fce2a30dedad1eef4da95ca1dc0004157aa527cf + $ hg debugmanifestfulltextcache + cache contains 2 manifest entries, in order of most to least recent: + id: fce2a30dedad1eef4da95ca1dc0004157aa527cf, size 87 bytes + id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes + total cache data size 268 bytes, on-disk 268 bytes + +Accessing the initial entry again, refresh their order + + $ hg debugmanifestfulltextcache --add 1e01206b1d2f72bd55f2a33fa8ccad74144825b7 + $ hg debugmanifestfulltextcache + cache contains 2 manifest entries, in order of most to least recent: + id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes + id: fce2a30dedad1eef4da95ca1dc0004157aa527cf, size 87 bytes + total cache data size 268 bytes, on-disk 268 bytes + +Check cache clearing + + $ hg debugmanifestfulltextcache --clear + $ hg debugmanifestfulltextcache + cache empty + +Check adding multiple entry in one go: + + $ hg debugmanifestfulltextcache --add fce2a30dedad1eef4da95ca1dc0004157aa527cf --add 1e01206b1d2f72bd55f2a33fa8ccad74144825b7 + $ hg debugmanifestfulltextcache + cache contains 2 manifest entries, in order of most to least recent: + id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes + id: fce2a30dedad1eef4da95ca1dc0004157aa527cf, size 87 bytes + total cache data size 268 bytes, on-disk 268 bytes + $ hg debugmanifestfulltextcache --clear + +Test caching behavior on actual operation +----------------------------------------- + +Make sure we start empty + + $ hg debugmanifestfulltextcache + cache empty + +Commit should have the new node cached: + + $ echo a >> b/a + $ hg commit -m 'foo' + $ hg debugmanifestfulltextcache + cache contains 2 manifest entries, in order of most to least recent: + id: 26b8653b67af8c1a0a0317c4ee8dac50a41fdb65, size 133 bytes + id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes + total cache data size 314 bytes, on-disk 314 bytes + $ hg log -r 'ancestors(., 1)' --debug | grep 'manifest:' + manifest: 1:1e01206b1d2f72bd55f2a33fa8ccad74144825b7 + manifest: 2:26b8653b67af8c1a0a0317c4ee8dac50a41fdb65 + +hg update should warm the cache too + +(force dirstate check to avoid flackiness in manifest order) + $ hg debugrebuilddirstate + + $ hg update 0 + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg debugmanifestfulltextcache + cache contains 3 manifest entries, in order of most to least recent: + id: fce2a30dedad1eef4da95ca1dc0004157aa527cf, size 87 bytes + id: 26b8653b67af8c1a0a0317c4ee8dac50a41fdb65, size 133 bytes + id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes + total cache data size 425 bytes, on-disk 425 bytes + $ hg log -r '0' --debug | grep 'manifest:' + manifest: 0:fce2a30dedad1eef4da95ca1dc0004157aa527cf
--- a/tests/test-match.py Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-match.py Tue Mar 19 16:36:59 2019 +0300 @@ -12,36 +12,36 @@ class BaseMatcherTests(unittest.TestCase): def testVisitdir(self): - m = matchmod.basematcher(b'', b'') + m = matchmod.basematcher() self.assertTrue(m.visitdir(b'.')) self.assertTrue(m.visitdir(b'dir')) def testVisitchildrenset(self): - m = matchmod.basematcher(b'', b'') + m = matchmod.basematcher() self.assertEqual(m.visitchildrenset(b'.'), b'this') self.assertEqual(m.visitchildrenset(b'dir'), b'this') class AlwaysMatcherTests(unittest.TestCase): def testVisitdir(self): - m = matchmod.alwaysmatcher(b'', b'') + m = matchmod.alwaysmatcher() self.assertEqual(m.visitdir(b'.'), b'all') self.assertEqual(m.visitdir(b'dir'), b'all') def testVisitchildrenset(self): - m = matchmod.alwaysmatcher(b'', b'') + m = matchmod.alwaysmatcher() self.assertEqual(m.visitchildrenset(b'.'), b'all') self.assertEqual(m.visitchildrenset(b'dir'), b'all') class NeverMatcherTests(unittest.TestCase): def testVisitdir(self): - m = matchmod.nevermatcher(b'', b'') + m = matchmod.nevermatcher() self.assertFalse(m.visitdir(b'.')) self.assertFalse(m.visitdir(b'dir')) def testVisitchildrenset(self): - m = matchmod.nevermatcher(b'', b'') + m = matchmod.nevermatcher() self.assertEqual(m.visitchildrenset(b'.'), set()) self.assertEqual(m.visitchildrenset(b'dir'), set()) @@ -50,12 +50,12 @@ # this is equivalent to BaseMatcherTests. def testVisitdir(self): - m = matchmod.predicatematcher(b'', b'', lambda *a: False) + m = matchmod.predicatematcher(lambda *a: False) self.assertTrue(m.visitdir(b'.')) self.assertTrue(m.visitdir(b'dir')) def testVisitchildrenset(self): - m = matchmod.predicatematcher(b'', b'', lambda *a: False) + m = matchmod.predicatematcher(lambda *a: False) self.assertEqual(m.visitchildrenset(b'.'), b'this') self.assertEqual(m.visitchildrenset(b'dir'), b'this') @@ -185,8 +185,7 @@ class ExactMatcherTests(unittest.TestCase): def testVisitdir(self): - m = matchmod.match(b'x', b'', patterns=[b'dir/subdir/foo.txt'], - exact=True) + m = matchmod.exact(files=[b'dir/subdir/foo.txt']) assert isinstance(m, matchmod.exactmatcher) self.assertTrue(m.visitdir(b'.')) self.assertTrue(m.visitdir(b'dir')) @@ -197,8 +196,7 @@ self.assertFalse(m.visitdir(b'folder')) def testVisitchildrenset(self): - m = matchmod.match(b'x', b'', patterns=[b'dir/subdir/foo.txt'], - exact=True) + m = matchmod.exact(files=[b'dir/subdir/foo.txt']) assert isinstance(m, matchmod.exactmatcher) self.assertEqual(m.visitchildrenset(b'.'), {b'dir'}) self.assertEqual(m.visitchildrenset(b'dir'), {b'subdir'}) @@ -208,12 +206,11 @@ self.assertEqual(m.visitchildrenset(b'folder'), set()) def testVisitchildrensetFilesAndDirs(self): - m = matchmod.match(b'x', b'', patterns=[b'rootfile.txt', - b'a/file1.txt', - b'a/b/file2.txt', - # no file in a/b/c - b'a/b/c/d/file4.txt'], - exact=True) + m = matchmod.exact(files=[b'rootfile.txt', + b'a/file1.txt', + b'a/b/file2.txt', + # no file in a/b/c + b'a/b/c/d/file4.txt']) assert isinstance(m, matchmod.exactmatcher) self.assertEqual(m.visitchildrenset(b'.'), {b'a', b'rootfile.txt'}) self.assertEqual(m.visitchildrenset(b'a'), {b'b', b'file1.txt'}) @@ -226,8 +223,8 @@ class DifferenceMatcherTests(unittest.TestCase): def testVisitdirM2always(self): - m1 = matchmod.alwaysmatcher(b'', b'') - m2 = matchmod.alwaysmatcher(b'', b'') + m1 = matchmod.alwaysmatcher() + m2 = matchmod.alwaysmatcher() dm = matchmod.differencematcher(m1, m2) # dm should be equivalent to a nevermatcher. self.assertFalse(dm.visitdir(b'.')) @@ -239,8 +236,8 @@ self.assertFalse(dm.visitdir(b'folder')) def testVisitchildrensetM2always(self): - m1 = matchmod.alwaysmatcher(b'', b'') - m2 = matchmod.alwaysmatcher(b'', b'') + m1 = matchmod.alwaysmatcher() + m2 = matchmod.alwaysmatcher() dm = matchmod.differencematcher(m1, m2) # dm should be equivalent to a nevermatcher. self.assertEqual(dm.visitchildrenset(b'.'), set()) @@ -252,27 +249,26 @@ self.assertEqual(dm.visitchildrenset(b'folder'), set()) def testVisitdirM2never(self): - m1 = matchmod.alwaysmatcher(b'', b'') - m2 = matchmod.nevermatcher(b'', b'') + m1 = matchmod.alwaysmatcher() + m2 = matchmod.nevermatcher() dm = matchmod.differencematcher(m1, m2) - # dm should be equivalent to a alwaysmatcher. OPT: if m2 is a - # nevermatcher, we could return 'all' for these. + # dm should be equivalent to a alwaysmatcher. # # We're testing Equal-to-True instead of just 'assertTrue' since # assertTrue does NOT verify that it's a bool, just that it's truthy. # While we may want to eventually make these return 'all', they should # not currently do so. - self.assertEqual(dm.visitdir(b'.'), True) - self.assertEqual(dm.visitdir(b'dir'), True) - self.assertEqual(dm.visitdir(b'dir/subdir'), True) - self.assertEqual(dm.visitdir(b'dir/subdir/z'), True) - self.assertEqual(dm.visitdir(b'dir/foo'), True) - self.assertEqual(dm.visitdir(b'dir/subdir/x'), True) - self.assertEqual(dm.visitdir(b'folder'), True) + self.assertEqual(dm.visitdir(b'.'), b'all') + self.assertEqual(dm.visitdir(b'dir'), b'all') + self.assertEqual(dm.visitdir(b'dir/subdir'), b'all') + self.assertEqual(dm.visitdir(b'dir/subdir/z'), b'all') + self.assertEqual(dm.visitdir(b'dir/foo'), b'all') + self.assertEqual(dm.visitdir(b'dir/subdir/x'), b'all') + self.assertEqual(dm.visitdir(b'folder'), b'all') def testVisitchildrensetM2never(self): - m1 = matchmod.alwaysmatcher(b'', b'') - m2 = matchmod.nevermatcher(b'', b'') + m1 = matchmod.alwaysmatcher() + m2 = matchmod.nevermatcher() dm = matchmod.differencematcher(m1, m2) # dm should be equivalent to a alwaysmatcher. self.assertEqual(dm.visitchildrenset(b'.'), b'all') @@ -284,7 +280,7 @@ self.assertEqual(dm.visitchildrenset(b'folder'), b'all') def testVisitdirM2SubdirPrefix(self): - m1 = matchmod.alwaysmatcher(b'', b'') + m1 = matchmod.alwaysmatcher() m2 = matchmod.match(b'', b'', patterns=[b'path:dir/subdir']) dm = matchmod.differencematcher(m1, m2) self.assertEqual(dm.visitdir(b'.'), True) @@ -295,12 +291,11 @@ # an 'all' pattern, just True. self.assertEqual(dm.visitdir(b'dir/subdir/z'), True) self.assertEqual(dm.visitdir(b'dir/subdir/x'), True) - # OPT: We could return 'all' for these. - self.assertEqual(dm.visitdir(b'dir/foo'), True) - self.assertEqual(dm.visitdir(b'folder'), True) + self.assertEqual(dm.visitdir(b'dir/foo'), b'all') + self.assertEqual(dm.visitdir(b'folder'), b'all') def testVisitchildrensetM2SubdirPrefix(self): - m1 = matchmod.alwaysmatcher(b'', b'') + m1 = matchmod.alwaysmatcher() m2 = matchmod.match(b'', b'', patterns=[b'path:dir/subdir']) dm = matchmod.differencematcher(m1, m2) self.assertEqual(dm.visitchildrenset(b'.'), b'this') @@ -322,7 +317,7 @@ dm = matchmod.differencematcher(m1, m2) self.assertEqual(dm.visitdir(b'.'), True) self.assertEqual(dm.visitdir(b'dir'), True) - self.assertEqual(dm.visitdir(b'dir/subdir'), True) + self.assertEqual(dm.visitdir(b'dir/subdir'), b'all') self.assertFalse(dm.visitdir(b'dir/foo')) self.assertFalse(dm.visitdir(b'folder')) # OPT: We should probably return False for these; we don't because @@ -349,8 +344,8 @@ class IntersectionMatcherTests(unittest.TestCase): def testVisitdirM2always(self): - m1 = matchmod.alwaysmatcher(b'', b'') - m2 = matchmod.alwaysmatcher(b'', b'') + m1 = matchmod.alwaysmatcher() + m2 = matchmod.alwaysmatcher() im = matchmod.intersectmatchers(m1, m2) # im should be equivalent to a alwaysmatcher. self.assertEqual(im.visitdir(b'.'), b'all') @@ -362,8 +357,8 @@ self.assertEqual(im.visitdir(b'folder'), b'all') def testVisitchildrensetM2always(self): - m1 = matchmod.alwaysmatcher(b'', b'') - m2 = matchmod.alwaysmatcher(b'', b'') + m1 = matchmod.alwaysmatcher() + m2 = matchmod.alwaysmatcher() im = matchmod.intersectmatchers(m1, m2) # im should be equivalent to a alwaysmatcher. self.assertEqual(im.visitchildrenset(b'.'), b'all') @@ -375,8 +370,8 @@ self.assertEqual(im.visitchildrenset(b'folder'), b'all') def testVisitdirM2never(self): - m1 = matchmod.alwaysmatcher(b'', b'') - m2 = matchmod.nevermatcher(b'', b'') + m1 = matchmod.alwaysmatcher() + m2 = matchmod.nevermatcher() im = matchmod.intersectmatchers(m1, m2) # im should be equivalent to a nevermatcher. self.assertFalse(im.visitdir(b'.')) @@ -388,8 +383,8 @@ self.assertFalse(im.visitdir(b'folder')) def testVisitchildrensetM2never(self): - m1 = matchmod.alwaysmatcher(b'', b'') - m2 = matchmod.nevermatcher(b'', b'') + m1 = matchmod.alwaysmatcher() + m2 = matchmod.nevermatcher() im = matchmod.intersectmatchers(m1, m2) # im should be equivalent to a nevermqtcher. self.assertEqual(im.visitchildrenset(b'.'), set()) @@ -401,7 +396,7 @@ self.assertEqual(im.visitchildrenset(b'folder'), set()) def testVisitdirM2SubdirPrefix(self): - m1 = matchmod.alwaysmatcher(b'', b'') + m1 = matchmod.alwaysmatcher() m2 = matchmod.match(b'', b'', patterns=[b'path:dir/subdir']) im = matchmod.intersectmatchers(m1, m2) self.assertEqual(im.visitdir(b'.'), True) @@ -416,7 +411,7 @@ self.assertEqual(im.visitdir(b'dir/subdir/x'), True) def testVisitchildrensetM2SubdirPrefix(self): - m1 = matchmod.alwaysmatcher(b'', b'') + m1 = matchmod.alwaysmatcher() m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir']) im = matchmod.intersectmatchers(m1, m2) self.assertEqual(im.visitchildrenset(b'.'), {b'dir'}) @@ -541,8 +536,8 @@ class UnionMatcherTests(unittest.TestCase): def testVisitdirM2always(self): - m1 = matchmod.alwaysmatcher(b'', b'') - m2 = matchmod.alwaysmatcher(b'', b'') + m1 = matchmod.alwaysmatcher() + m2 = matchmod.alwaysmatcher() um = matchmod.unionmatcher([m1, m2]) # um should be equivalent to a alwaysmatcher. self.assertEqual(um.visitdir(b'.'), b'all') @@ -554,8 +549,8 @@ self.assertEqual(um.visitdir(b'folder'), b'all') def testVisitchildrensetM2always(self): - m1 = matchmod.alwaysmatcher(b'', b'') - m2 = matchmod.alwaysmatcher(b'', b'') + m1 = matchmod.alwaysmatcher() + m2 = matchmod.alwaysmatcher() um = matchmod.unionmatcher([m1, m2]) # um should be equivalent to a alwaysmatcher. self.assertEqual(um.visitchildrenset(b'.'), b'all') @@ -567,8 +562,8 @@ self.assertEqual(um.visitchildrenset(b'folder'), b'all') def testVisitdirM1never(self): - m1 = matchmod.nevermatcher(b'', b'') - m2 = matchmod.alwaysmatcher(b'', b'') + m1 = matchmod.nevermatcher() + m2 = matchmod.alwaysmatcher() um = matchmod.unionmatcher([m1, m2]) # um should be equivalent to a alwaysmatcher. self.assertEqual(um.visitdir(b'.'), b'all') @@ -580,8 +575,8 @@ self.assertEqual(um.visitdir(b'folder'), b'all') def testVisitchildrensetM1never(self): - m1 = matchmod.nevermatcher(b'', b'') - m2 = matchmod.alwaysmatcher(b'', b'') + m1 = matchmod.nevermatcher() + m2 = matchmod.alwaysmatcher() um = matchmod.unionmatcher([m1, m2]) # um should be equivalent to a alwaysmatcher. self.assertEqual(um.visitchildrenset(b'.'), b'all') @@ -593,8 +588,8 @@ self.assertEqual(um.visitchildrenset(b'folder'), b'all') def testVisitdirM2never(self): - m1 = matchmod.alwaysmatcher(b'', b'') - m2 = matchmod.nevermatcher(b'', b'') + m1 = matchmod.alwaysmatcher() + m2 = matchmod.nevermatcher() um = matchmod.unionmatcher([m1, m2]) # um should be equivalent to a alwaysmatcher. self.assertEqual(um.visitdir(b'.'), b'all') @@ -606,8 +601,8 @@ self.assertEqual(um.visitdir(b'folder'), b'all') def testVisitchildrensetM2never(self): - m1 = matchmod.alwaysmatcher(b'', b'') - m2 = matchmod.nevermatcher(b'', b'') + m1 = matchmod.alwaysmatcher() + m2 = matchmod.nevermatcher() um = matchmod.unionmatcher([m1, m2]) # um should be equivalent to a alwaysmatcher. self.assertEqual(um.visitchildrenset(b'.'), b'all') @@ -619,7 +614,7 @@ self.assertEqual(um.visitchildrenset(b'folder'), b'all') def testVisitdirM2SubdirPrefix(self): - m1 = matchmod.alwaysmatcher(b'', b'') + m1 = matchmod.alwaysmatcher() m2 = matchmod.match(b'', b'', patterns=[b'path:dir/subdir']) um = matchmod.unionmatcher([m1, m2]) self.assertEqual(um.visitdir(b'.'), b'all') @@ -631,7 +626,7 @@ self.assertEqual(um.visitdir(b'dir/subdir/x'), b'all') def testVisitchildrensetM2SubdirPrefix(self): - m1 = matchmod.alwaysmatcher(b'', b'') + m1 = matchmod.alwaysmatcher() m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir']) um = matchmod.unionmatcher([m1, m2]) self.assertEqual(um.visitchildrenset(b'.'), b'all') @@ -782,7 +777,7 @@ def testVisitdir(self): m = matchmod.match(util.localpath(b'root/d'), b'e/f', [b'../a.txt', b'b.txt']) - pm = matchmod.prefixdirmatcher(b'root', b'd/e/f', b'd', m) + pm = matchmod.prefixdirmatcher(b'd', m) # `m` elides 'd' because it's part of the root, and the rest of the # patterns are relative. @@ -814,7 +809,7 @@ def testVisitchildrenset(self): m = matchmod.match(util.localpath(b'root/d'), b'e/f', [b'../a.txt', b'b.txt']) - pm = matchmod.prefixdirmatcher(b'root', b'd/e/f', b'd', m) + pm = matchmod.prefixdirmatcher(b'd', m) # OPT: visitchildrenset could possibly return {'e'} and {'f'} for these # next two, respectively; patternmatcher does not have this
--- a/tests/test-merge10.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-merge10.t Tue Mar 19 16:36:59 2019 +0300 @@ -37,8 +37,9 @@ (run 'hg heads' to see heads, 'hg merge' to merge) $ hg up -C 2 0 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ hg merge - merging testdir/subdir/a and testdir/a to testdir/subdir/a +Abuse this test for also testing that merge respects ui.relative-paths + $ hg --cwd testdir merge --config ui.relative-paths=yes + merging subdir/a and a to subdir/a 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg stat
--- a/tests/test-missing-capability.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-missing-capability.t Tue Mar 19 16:36:59 2019 +0300 @@ -15,7 +15,7 @@ > from mercurial import extensions, wireprotov1server > def wcapabilities(orig, *args, **kwargs): > cap = orig(*args, **kwargs) - > cap.remove('$1') + > cap.remove(b'$1') > return cap > extensions.wrapfunction(wireprotov1server, '_capabilities', wcapabilities) > EOF
--- a/tests/test-mq-eol.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-mq-eol.t Tue Mar 19 16:36:59 2019 +0300 @@ -23,17 +23,21 @@ > w(b' c\r\n') > w(b' d\n') > w(b'-e\n') - > w(b'\ No newline at end of file\n') + > w(b'\\\\ No newline at end of file\n') > w(b'+z\r\n') - > w(b'\ No newline at end of file\r\n') + > w(b'\\\\ No newline at end of file\r\n') > EOF $ cat > cateol.py <<EOF > import sys + > try: + > stdout = sys.stdout.buffer + > except AttributeError: + > stdout = sys.stdout > for line in open(sys.argv[1], 'rb'): > line = line.replace(b'\r', b'<CR>') > line = line.replace(b'\n', b'<LF>') - > print(line) + > stdout.write(line + b'\n') > EOF $ hg init repo
--- a/tests/test-mq-missingfiles.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-mq-missingfiles.t Tue Mar 19 16:36:59 2019 +0300 @@ -5,16 +5,20 @@ $ cat > writelines.py <<EOF > import sys + > if sys.version_info[0] >= 3: + > encode = lambda x: x.encode('utf-8').decode('unicode_escape').encode('utf-8') + > else: + > encode = lambda x: x.decode('string_escape') > path = sys.argv[1] > args = sys.argv[2:] > assert (len(args) % 2) == 0 > > f = open(path, 'wb') > for i in range(len(args) // 2): - > count, s = args[2*i:2*i+2] + > count, s = args[2 * i:2 * i + 2] > count = int(count) - > s = s.decode('string_escape') - > f.write(s*count) + > s = encode(s) + > f.write(s * count) > f.close() > EOF
--- a/tests/test-mq-qimport.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-mq-qimport.t Tue Mar 19 16:36:59 2019 +0300 @@ -1,15 +1,19 @@ $ cat > writelines.py <<EOF > import sys + > if sys.version_info[0] >= 3: + > encode = lambda x: x.encode('utf-8').decode('unicode_escape').encode('utf-8') + > else: + > encode = lambda x: x.decode('string_escape') > path = sys.argv[1] > args = sys.argv[2:] > assert (len(args) % 2) == 0 > > f = open(path, 'wb') - > for i in range(len(args)//2): - > count, s = args[2*i:2*i+2] + > for i in range(len(args) // 2): + > count, s = args[2 * i:2 * i + 2] > count = int(count) - > s = s.decode('string_escape') - > f.write(s*count) + > s = encode(s) + > f.write(s * count) > f.close() > > EOF
--- a/tests/test-mq-qnew.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-mq-qnew.t Tue Mar 19 16:36:59 2019 +0300 @@ -305,9 +305,9 @@ HG: branch 'default' HG: no files changed ==== - note: commit message saved in .hg/last-message.txt transaction abort! rollback completed + note: commit message saved in .hg/last-message.txt abort: pretxncommit.unexpectedabort hook exited with status 1 [255] $ cat .hg/last-message.txt
--- a/tests/test-mq-subrepo-svn.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-mq-subrepo-svn.t Tue Mar 19 16:36:59 2019 +0300 @@ -23,11 +23,7 @@ $ svnadmin create svn-repo-2499 $ SVNREPOPATH=`pwd`/svn-repo-2499/project -#if windows - $ SVNREPOURL=file:///`"$PYTHON" -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"` -#else - $ SVNREPOURL=file://`"$PYTHON" -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"` -#endif + $ SVNREPOURL="`"$PYTHON" $TESTDIR/svnurlof.py \"$SVNREPOPATH\"`" $ mkdir -p svn-project-2499/trunk $ svn import -qm 'init project' svn-project-2499 "$SVNREPOURL"
--- a/tests/test-mq.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-mq.t Tue Mar 19 16:36:59 2019 +0300 @@ -305,6 +305,7 @@ working dir diff: $ hg diff --nodates -q + diff -r dde259bd5934 a --- a/a +++ b/a @@ -1,1 +1,2 @@ @@ -1406,7 +1407,7 @@ $ hg qpush -f --verbose --config 'ui.origbackuppath=.hg/origbackups' applying empty creating directory: $TESTTMP/forcepush/.hg/origbackups - saving current version of hello.txt as $TESTTMP/forcepush/.hg/origbackups/hello.txt + saving current version of hello.txt as .hg/origbackups/hello.txt patching file hello.txt committing files: hello.txt
--- a/tests/test-narrow-trackedcmd.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-narrow-trackedcmd.t Tue Mar 19 16:36:59 2019 +0300 @@ -218,3 +218,13 @@ adding file changes added 3 changesets with 0 changes to 0 files new changesets *:* (glob) + + $ cd .. + +Testing tracked command on a non-narrow repo + + $ hg init non-narrow + $ cd non-narrow + $ hg tracked --addinclude foobar + abort: the tracked command is only supported on respositories cloned with --narrow + [255]
--- a/tests/test-narrow-widen-no-ellipsis.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-narrow-widen-no-ellipsis.t Tue Mar 19 16:36:59 2019 +0300 @@ -406,7 +406,7 @@ * bookmark 11:* (glob) $ hg unbundle .hg/strip-backup/*-widen.hg abort: .hg/strip-backup/*-widen.hg: $ENOTDIR$ (windows !) - abort: $ENOENT$: .hg/strip-backup/*-widen.hg (no-windows !) + abort: $ENOENT$: '.hg/strip-backup/*-widen.hg' (no-windows !) [255] $ hg log -T "{if(ellipsis, '...')}{rev}: {desc}\n" 11: local
--- a/tests/test-newcgi.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-newcgi.t Tue Mar 19 16:36:59 2019 +0300 @@ -18,7 +18,7 @@ > from mercurial.hgweb.request import wsgiapplication > > def make_web_app(): - > return hgweb("test", "Empty test repository") + > return hgweb(b"test", b"Empty test repository") > > wsgicgi.launch(wsgiapplication(make_web_app)) > HGWEB @@ -44,7 +44,7 @@ > from mercurial.hgweb.request import wsgiapplication > > def make_web_app(): - > return hgwebdir("hgweb.config") + > return hgwebdir(b"hgweb.config") > > wsgicgi.launch(wsgiapplication(make_web_app)) > HGWEBDIR
--- a/tests/test-notify.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-notify.t Tue Mar 19 16:36:59 2019 +0300 @@ -455,7 +455,7 @@ > test = False > mbox = mbox > EOF - $ "$PYTHON" -c 'open("a/a", "ab").write("no" * 500 + "\xd1\x84" + "\n")' + $ "$PYTHON" -c 'open("a/a", "ab").write(b"no" * 500 + b"\xd1\x84" + b"\n")' $ hg --cwd a commit -A -m "long line" $ hg --traceback --cwd b pull ../a pulling from ../a
--- a/tests/test-obsmarker-template.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-obsmarker-template.t Tue Mar 19 16:36:59 2019 +0300 @@ -2429,6 +2429,23 @@ date: Thu Jan 01 00:00:00 1970 +0000 summary: ROOT +Check that {negrev} shows usable negative revisions despite hidden commits + + $ hg log -G -T "{negrev}\n" + @ -3 + | + o -4 + + + $ hg log -G -T "{negrev}\n" --hidden + x -1 + | + | x -2 + |/ + | @ -3 + |/ + o -4 + Test templates with splitted and pruned commit ============================================== @@ -2639,3 +2656,10 @@ |/ Obsfate: rewritten using amend as 2:718c0d00cee1 by test (at 1970-01-01 00:00 +0000); o ea207398892e + $ hg log -G -T "{negrev}\n" + @ -1 + | + o -2 + | + o -5 +
--- a/tests/test-oldcgi.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-oldcgi.t Tue Mar 19 16:36:59 2019 +0300 @@ -55,7 +55,7 @@ > # Alternatively you can pass a list of ('virtual/path', '/real/path') tuples > # or use a dictionary with entries like 'virtual/path': '/real/path' > - > h = hgweb.hgwebdir("hgweb.config") + > h = hgweb.hgwebdir(b"hgweb.config") > h.run() > HGWEBDIR
--- a/tests/test-parseindex.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-parseindex.t Tue Mar 19 16:36:59 2019 +0300 @@ -27,7 +27,7 @@ $ cat >> test.py << EOF > from __future__ import print_function - > from mercurial import changelog, node, vfs + > from mercurial import changelog, node, pycompat, vfs > > class singlebyteread(object): > def __init__(self, real): @@ -55,10 +55,10 @@ > return singlebyteread(f) > return wrapper > - > cl = changelog.changelog(opener('.hg/store')) + > cl = changelog.changelog(opener(b'.hg/store')) > print(len(cl), 'revisions:') > for r in cl: - > print(node.short(cl.node(r))) + > print(pycompat.sysstr(node.short(cl.node(r)))) > EOF $ "$PYTHON" test.py 2 revisions: @@ -76,7 +76,7 @@ $ "$PYTHON" <<EOF > from __future__ import print_function > from mercurial import changelog, vfs - > cl = changelog.changelog(vfs.vfs('.hg/store')) + > cl = changelog.changelog(vfs.vfs(b'.hg/store')) > print('good heads:') > for head in [0, len(cl) - 1, -1]: > print('%s: %r' % (head, cl.reachableroots(0, [head], [0]))) @@ -112,7 +112,7 @@ 10000: head out of range -2: head out of range -10000: head out of range - None: an integer is required + None: an integer is required( .got type NoneType.)? (re) good roots: 0: [0] 1: [1] @@ -123,7 +123,7 @@ -2: [] -10000: [] bad roots: - None: an integer is required + None: an integer is required( .got type NoneType.)? (re) $ cd .. @@ -178,8 +178,8 @@ $ cat <<EOF > test.py > from __future__ import print_function > import sys - > from mercurial import changelog, vfs - > cl = changelog.changelog(vfs.vfs(sys.argv[1])) + > from mercurial import changelog, pycompat, vfs + > cl = changelog.changelog(vfs.vfs(pycompat.fsencode(sys.argv[1]))) > n0, n1 = cl.node(0), cl.node(1) > ops = [ > ('reachableroots',
--- a/tests/test-patch-offset.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-patch-offset.t Tue Mar 19 16:36:59 2019 +0300 @@ -9,7 +9,7 @@ > for pattern in patterns: > count = int(pattern[0:-1]) > char = pattern[-1].encode('utf8') + b'\n' - > fp.write(char*count) + > fp.write(char * count) > fp.close() > EOF
--- a/tests/test-permissions.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-permissions.t Tue Mar 19 16:36:59 2019 +0300 @@ -22,7 +22,7 @@ checking manifests crosschecking files in changesets and manifests checking files - abort: Permission denied: $TESTTMP/t/.hg/store/data/a.i + abort: Permission denied: '$TESTTMP/t/.hg/store/data/a.i' [255] $ chmod +r .hg/store/data/a.i @@ -39,7 +39,7 @@ $ echo barber > a $ hg commit -m "2" trouble committing a! - abort: Permission denied: $TESTTMP/t/.hg/store/data/a.i + abort: Permission denied: '$TESTTMP/t/.hg/store/data/a.i' [255] $ chmod -w .
--- a/tests/test-phabricator.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-phabricator.t Tue Mar 19 16:36:59 2019 +0300 @@ -48,22 +48,24 @@ > --test-vcr "$VCR/accept-4564.json" Create a differential diff: + $ HGENCODING=utf-8; export HGENCODING $ echo alpha > alpha - $ hg ci --addremove -m 'create alpha for phabricator test' + $ hg ci --addremove -m 'create alpha for phabricator test €' adding alpha $ hg phabsend -r . --test-vcr "$VCR/phabsend-create-alpha.json" - D4596 - created - 5206a4fa1e6c: create alpha for phabricator test - saved backup bundle to $TESTTMP/repo/.hg/strip-backup/5206a4fa1e6c-dec9e777-phabsend.hg + D6054 - created - d386117f30e6: create alpha for phabricator test \xe2\x82\xac (esc) + saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d386117f30e6-24ffe649-phabsend.hg $ echo more >> alpha $ HGEDITOR=true hg ci --amend - saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d8f232f7d799-c573510a-amend.hg + saved backup bundle to $TESTTMP/repo/.hg/strip-backup/cb03845d6dd9-870f61a6-amend.hg $ echo beta > beta $ hg ci --addremove -m 'create beta for phabricator test' adding beta $ hg phabsend -r ".^::" --test-vcr "$VCR/phabsend-update-alpha-create-beta.json" - D4596 - updated - f70265671c65: create alpha for phabricator test - D4597 - created - 1a5640df7bbf: create beta for phabricator test - saved backup bundle to $TESTTMP/repo/.hg/strip-backup/1a5640df7bbf-6daf3e6e-phabsend.hg + D6054 - updated - 939d862f0318: create alpha for phabricator test \xe2\x82\xac (esc) + D6055 - created - f55f947ed0f8: create beta for phabricator test + saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f55f947ed0f8-0d1e502e-phabsend.hg + $ unset HGENCODING The amend won't explode after posting a public commit. The local tag is left behind to identify it. @@ -74,13 +76,13 @@ $ echo 'draft change' > alpha $ hg ci -m 'create draft change for phabricator testing' $ hg phabsend --amend -r '.^::' --test-vcr "$VCR/phabsend-create-public.json" - D5544 - created - 540a21d3fbeb: create public change for phabricator testing - D5545 - created - 6bca752686cd: create draft change for phabricator testing - warning: not updating public commit 2:540a21d3fbeb - saved backup bundle to $TESTTMP/repo/.hg/strip-backup/6bca752686cd-41faefb4-phabsend.hg + D5544 - created - a56e5ebd77e6: create public change for phabricator testing + D5545 - created - 6a0ade3e3ec2: create draft change for phabricator testing + warning: not updating public commit 2:a56e5ebd77e6 + saved backup bundle to $TESTTMP/repo/.hg/strip-backup/6a0ade3e3ec2-aca7d23c-phabsend.hg $ hg tags -v - tip 3:620a50fd6ed9 - D5544 2:540a21d3fbeb local + tip 3:90532860b5e1 + D5544 2:a56e5ebd77e6 local $ hg debugcallconduit user.search --test-vcr "$VCR/phab-conduit.json" <<EOF > { @@ -107,13 +109,13 @@ $ hg log -T'{rev} {phabreview|json}\n' 3 {"id": "D5545", "url": "https://phab.mercurial-scm.org/D5545"} 2 {"id": "D5544", "url": "https://phab.mercurial-scm.org/D5544"} - 1 {"id": "D4597", "url": "https://phab.mercurial-scm.org/D4597"} - 0 {"id": "D4596", "url": "https://phab.mercurial-scm.org/D4596"} + 1 {"id": "D6055", "url": "https://phab.mercurial-scm.org/D6055"} + 0 {"id": "D6054", "url": "https://phab.mercurial-scm.org/D6054"} $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n' 3 https://phab.mercurial-scm.org/D5545 D5545 2 https://phab.mercurial-scm.org/D5544 D5544 - 1 https://phab.mercurial-scm.org/D4597 D4597 - 0 https://phab.mercurial-scm.org/D4596 D4596 + 1 https://phab.mercurial-scm.org/D6055 D6055 + 0 https://phab.mercurial-scm.org/D6054 D6054 $ cd ..
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-phase-archived.t Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,143 @@ +========================================================= +Test features and behaviors related to the archived phase +========================================================= + + $ cat << EOF >> $HGRCPATH + > [format] + > internal-phase=yes + > [extensions] + > strip= + > [experimental] + > EOF + + $ hg init repo + $ cd repo + $ echo root > a + $ hg add a + $ hg ci -m 'root' + +Test that bundle can unarchive a changeset +------------------------------------------ + + $ echo foo >> a + $ hg st + M a + $ hg ci -m 'unbundletesting' + $ hg log -G + @ changeset: 1:883aadbbf309 + | tag: tip + | user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: unbundletesting + | + o changeset: 0:c1863a3840c6 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: root + + $ hg strip --soft --rev '.' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + saved backup bundle to $TESTTMP/repo/.hg/strip-backup/883aadbbf309-efc55adc-backup.hg + $ hg log -G + @ changeset: 0:c1863a3840c6 + tag: tip + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: root + + $ hg log -G --hidden + o changeset: 1:883aadbbf309 + | tag: tip + | user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: unbundletesting + | + @ changeset: 0:c1863a3840c6 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: root + + $ hg unbundle .hg/strip-backup/883aadbbf309-efc55adc-backup.hg + adding changesets + adding manifests + adding file changes + added 0 changesets with 0 changes to 1 files + (run 'hg update' to get a working copy) + $ hg log -G + o changeset: 1:883aadbbf309 + | tag: tip + | user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: unbundletesting + | + @ changeset: 0:c1863a3840c6 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: root + + +Test that history rewriting command can use the archived phase when allowed to +------------------------------------------------------------------------------ + + $ hg up 'desc(unbundletesting)' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ echo bar >> a + $ hg commit --amend --config experimental.cleanup-as-archived=yes + $ hg log -G + @ changeset: 2:d1e73e428f29 + | tag: tip + | parent: 0:c1863a3840c6 + | user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: unbundletesting + | + o changeset: 0:c1863a3840c6 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: root + + $ hg log -G --hidden + @ changeset: 2:d1e73e428f29 + | tag: tip + | parent: 0:c1863a3840c6 + | user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: unbundletesting + | + | o changeset: 1:883aadbbf309 + |/ user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: unbundletesting + | + o changeset: 0:c1863a3840c6 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: root + + $ ls -1 .hg/strip-backup/ + 883aadbbf309-efc55adc-amend.hg + 883aadbbf309-efc55adc-backup.hg + $ hg unbundle .hg/strip-backup/883aadbbf309*amend.hg + adding changesets + adding manifests + adding file changes + added 0 changesets with 0 changes to 1 files + (run 'hg update' to get a working copy) + $ hg log -G + @ changeset: 2:d1e73e428f29 + | tag: tip + | parent: 0:c1863a3840c6 + | user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: unbundletesting + | + | o changeset: 1:883aadbbf309 + |/ user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: unbundletesting + | + o changeset: 0:c1863a3840c6 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: root +
--- a/tests/test-purge.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-purge.t Tue Mar 19 16:36:59 2019 +0300 @@ -52,7 +52,7 @@ $ "$PYTHON" <<EOF > import os > import stat - > f= 'untracked_file_readonly' + > f = 'untracked_file_readonly' > os.chmod(f, stat.S_IMODE(os.stat(f).st_mode) & ~stat.S_IWRITE) > EOF $ hg purge -p
--- a/tests/test-push-http.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-push-http.t Tue Mar 19 16:36:59 2019 +0300 @@ -74,8 +74,8 @@ $ cat >> .hg/hgrc <<EOF > allow_push = * > [hooks] - > changegroup = sh -c "printenv.py changegroup 0" - > pushkey = sh -c "printenv.py pushkey 0" + > changegroup = sh -c "printenv.py --line changegroup 0" + > pushkey = sh -c "printenv.py --line pushkey 0" > txnclose-phase.test = sh $TESTTMP/hook.sh > EOF $ req "--debug --config extensions.blackbox=" @@ -94,8 +94,17 @@ remote: phase-move: cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b: draft -> public remote: running hook txnclose-phase.test: sh $TESTTMP/hook.sh remote: phase-move: ba677d0156c1196c1a699fa53f390dcfc3ce3872: -> public - remote: running hook changegroup: sh -c "printenv.py changegroup 0" - remote: changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:http:$LOCALIP: (glob) + remote: running hook changegroup: sh -c "printenv.py --line changegroup 0" + remote: changegroup hook: HG_HOOKNAME=changegroup + remote: HG_HOOKTYPE=changegroup + remote: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_SOURCE=serve + remote: HG_TXNID=TXN:$ID$ + remote: HG_TXNNAME=serve + remote: remote:http:$LOCALIP: (glob) + remote: HG_URL=remote:http:$LOCALIP: (glob) + remote: % serve errors $ hg rollback repository tip rolled back to revision 0 (undo serve) @@ -114,8 +123,17 @@ remote: phase-move: cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b: draft -> public remote: running hook txnclose-phase.test: sh $TESTTMP/hook.sh remote: phase-move: ba677d0156c1196c1a699fa53f390dcfc3ce3872: -> public - remote: running hook changegroup: sh -c "printenv.py changegroup 0" - remote: changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:http:$LOCALIP: (glob) + remote: running hook changegroup: sh -c "printenv.py --line changegroup 0" + remote: changegroup hook: HG_HOOKNAME=changegroup + remote: HG_HOOKTYPE=changegroup + remote: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_SOURCE=serve + remote: HG_TXNID=TXN:$ID$ + remote: HG_TXNNAME=serve + remote: remote:http:$LOCALIP: (glob) + remote: HG_URL=remote:http:$LOCALIP: (glob) + remote: % serve errors $ hg rollback repository tip rolled back to revision 0 (undo serve) @@ -125,8 +143,8 @@ $ cat >> .hg/hgrc <<EOF > allow_push = * > [hooks] - > changegroup = sh -c "printenv.py changegroup 0" - > pushkey = sh -c "printenv.py pushkey 0" + > changegroup = sh -c "printenv.py --line changegroup 0" + > pushkey = sh -c "printenv.py --line pushkey 0" > txnclose-phase.test = sh $TESTTMP/hook.sh > EOF $ req @@ -138,7 +156,16 @@ remote: added 1 changesets with 1 changes to 1 files remote: phase-move: cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b: draft -> public remote: phase-move: ba677d0156c1196c1a699fa53f390dcfc3ce3872: -> public - remote: changegroup hook: HG_BUNDLE2=1 HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:http:$LOCALIP: (glob) + remote: changegroup hook: HG_BUNDLE2=1 + remote: HG_HOOKNAME=changegroup + remote: HG_HOOKTYPE=changegroup + remote: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_SOURCE=serve + remote: HG_TXNID=TXN:$ID$ + remote: HG_TXNNAME=serve + remote: HG_URL=remote:http:$LOCALIP: (glob) + remote: % serve errors $ hg rollback repository tip rolled back to revision 0 (undo serve) @@ -157,8 +184,18 @@ remote: added 1 changesets with 1 changes to 1 files remote: phase-move: cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b: draft -> public remote: phase-move: ba677d0156c1196c1a699fa53f390dcfc3ce3872: -> public - remote: changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:http:$LOCALIP: (glob) (bundle1 !) - remote: changegroup hook: HG_BUNDLE2=1 HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:http:$LOCALIP: (glob) (bundle2 !) + remote: changegroup hook: HG_HOOKNAME=changegroup (no-bundle2 !) + remote: changegroup hook: HG_BUNDLE2=1 (bundle2 !) + remote: HG_HOOKNAME=changegroup (bundle2 !) + remote: HG_HOOKTYPE=changegroup + remote: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_SOURCE=serve + remote: HG_TXNID=TXN:$ID$ + remote: HG_TXNNAME=serve + remote: remote:http:$LOCALIP: (glob) (no-bundle2 !) + remote: HG_URL=remote:http:$LOCALIP: (glob) + remote: % serve errors $ hg rollback repository tip rolled back to revision 0 (undo serve) @@ -176,8 +213,18 @@ remote: added 1 changesets with 1 changes to 1 files remote: phase-move: cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b: draft -> public remote: phase-move: ba677d0156c1196c1a699fa53f390dcfc3ce3872: -> public - remote: changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:http:$LOCALIP: (glob) (bundle1 !) - remote: changegroup hook: HG_BUNDLE2=1 HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:http:$LOCALIP: (glob) (bundle2 !) + remote: changegroup hook: HG_HOOKNAME=changegroup (no-bundle2 !) + remote: changegroup hook: HG_BUNDLE2=1 (bundle2 !) + remote: HG_HOOKNAME=changegroup (bundle2 !) + remote: HG_HOOKTYPE=changegroup + remote: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_SOURCE=serve + remote: HG_TXNID=TXN:$ID$ + remote: HG_TXNNAME=serve + remote: remote:http:$LOCALIP: (glob) (no-bundle2 !) + remote: HG_URL=remote:http:$LOCALIP: (glob) + remote: % serve errors $ hg rollback repository tip rolled back to revision 0 (undo serve) @@ -209,6 +256,16 @@ remote: phase-move: cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b: draft -> public remote: phase-move: ba677d0156c1196c1a699fa53f390dcfc3ce3872: -> public remote: changegroup hook: * (glob) + remote: HG_HOOKNAME=changegroup (bundle2 !) + remote: HG_HOOKTYPE=changegroup + remote: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_SOURCE=serve + remote: HG_TXNID=TXN:$ID$ + remote: HG_TXNNAME=serve + remote: remote:http:$LOCALIP: (glob) (no-bundle2 !) + remote: HG_URL=remote:http:$LOCALIP: (glob) + remote: % serve errors $ hg rollback repository tip rolled back to revision 0 (undo serve) @@ -221,7 +278,7 @@ > push_ssl = false > allow_push = * > [hooks] - > prepushkey = sh -c "printenv.py prepushkey 1" + > prepushkey = sh -c "printenv.py --line prepushkey 1" > [devel] > legacy.exchange=phases > EOF @@ -253,7 +310,22 @@ remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files - remote: prepushkey hook: HG_BUNDLE2=1 HG_HOOKNAME=prepushkey HG_HOOKTYPE=prepushkey HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NAMESPACE=phases HG_NEW=0 HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_OLD=1 HG_PENDING=$TESTTMP/test HG_PHASES_MOVED=1 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:http:$LOCALIP: (glob) + remote: prepushkey hook: HG_BUNDLE2=1 + remote: HG_HOOKNAME=prepushkey + remote: HG_HOOKTYPE=prepushkey + remote: HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NAMESPACE=phases + remote: HG_NEW=0 + remote: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_OLD=1 + remote: HG_PENDING=$TESTTMP/test + remote: HG_PHASES_MOVED=1 + remote: HG_SOURCE=serve + remote: HG_TXNID=TXN:$ID$ + remote: HG_TXNNAME=serve + remote: HG_URL=remote:http:$LOCALIP: (glob) + remote: remote: pushkey-abort: prepushkey hook exited with status 1 remote: transaction abort! remote: rollback completed @@ -267,7 +339,7 @@ $ cat >> .hg/hgrc <<EOF > [hooks] - > prepushkey = sh -c "printenv.py prepushkey 0" + > prepushkey = sh -c "printenv.py --line prepushkey 0" > EOF We don't need to test bundle1 because it succeeded above. @@ -280,7 +352,22 @@ remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files - remote: prepushkey hook: HG_BUNDLE2=1 HG_HOOKNAME=prepushkey HG_HOOKTYPE=prepushkey HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NAMESPACE=phases HG_NEW=0 HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_OLD=1 HG_PENDING=$TESTTMP/test HG_PHASES_MOVED=1 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:http:$LOCALIP: (glob) + remote: prepushkey hook: HG_BUNDLE2=1 + remote: HG_HOOKNAME=prepushkey + remote: HG_HOOKTYPE=prepushkey + remote: HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NAMESPACE=phases + remote: HG_NEW=0 + remote: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_OLD=1 + remote: HG_PENDING=$TESTTMP/test + remote: HG_PHASES_MOVED=1 + remote: HG_SOURCE=serve + remote: HG_TXNID=TXN:$ID$ + remote: HG_TXNNAME=serve + remote: HG_URL=remote:http:$LOCALIP: (glob) + remote: % serve errors #endif @@ -293,7 +380,7 @@ > [phases] > publish = false > [hooks] - > prepushkey = sh -c "printenv.py prepushkey 1" + > prepushkey = sh -c "printenv.py --line prepushkey 1" > EOF #if bundle1 @@ -304,7 +391,13 @@ remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files - remote: prepushkey hook: HG_HOOKNAME=prepushkey HG_HOOKTYPE=prepushkey HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NAMESPACE=phases HG_NEW=0 HG_OLD=1 + remote: prepushkey hook: HG_HOOKNAME=prepushkey + remote: HG_HOOKTYPE=prepushkey + remote: HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NAMESPACE=phases + remote: HG_NEW=0 + remote: HG_OLD=1 + remote: remote: pushkey-abort: prepushkey hook exited with status 1 updating ba677d0156c1 to public failed! % serve errors @@ -318,7 +411,22 @@ remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files - remote: prepushkey hook: HG_BUNDLE2=1 HG_HOOKNAME=prepushkey HG_HOOKTYPE=prepushkey HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NAMESPACE=phases HG_NEW=0 HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_OLD=1 HG_PENDING=$TESTTMP/test HG_PHASES_MOVED=1 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:http:$LOCALIP: (glob) + remote: prepushkey hook: HG_BUNDLE2=1 + remote: HG_HOOKNAME=prepushkey + remote: HG_HOOKTYPE=prepushkey + remote: HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NAMESPACE=phases + remote: HG_NEW=0 + remote: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_OLD=1 + remote: HG_PENDING=$TESTTMP/test + remote: HG_PHASES_MOVED=1 + remote: HG_SOURCE=serve + remote: HG_TXNID=TXN:$ID$ + remote: HG_TXNNAME=serve + remote: HG_URL=remote:http:$LOCALIP: (glob) + remote: remote: pushkey-abort: prepushkey hook exited with status 1 remote: transaction abort! remote: rollback completed @@ -331,7 +439,7 @@ $ cat >> .hg/hgrc <<EOF > [hooks] - > prepushkey = sh -c "printenv.py prepushkey 0" + > prepushkey = sh -c "printenv.py --line prepushkey 0" > EOF #if bundle1 @@ -339,7 +447,13 @@ pushing to http://localhost:$HGPORT/ searching for changes no changes found - remote: prepushkey hook: HG_HOOKNAME=prepushkey HG_HOOKTYPE=prepushkey HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NAMESPACE=phases HG_NEW=0 HG_OLD=1 + remote: prepushkey hook: HG_HOOKNAME=prepushkey + remote: HG_HOOKTYPE=prepushkey + remote: HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NAMESPACE=phases + remote: HG_NEW=0 + remote: HG_OLD=1 + remote: % serve errors [1] #endif @@ -352,7 +466,22 @@ remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files - remote: prepushkey hook: HG_BUNDLE2=1 HG_HOOKNAME=prepushkey HG_HOOKTYPE=prepushkey HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NAMESPACE=phases HG_NEW=0 HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_OLD=1 HG_PENDING=$TESTTMP/test HG_PHASES_MOVED=1 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:http:$LOCALIP: (glob) + remote: prepushkey hook: HG_BUNDLE2=1 + remote: HG_HOOKNAME=prepushkey + remote: HG_HOOKTYPE=prepushkey + remote: HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NAMESPACE=phases + remote: HG_NEW=0 + remote: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_OLD=1 + remote: HG_PENDING=$TESTTMP/test + remote: HG_PHASES_MOVED=1 + remote: HG_SOURCE=serve + remote: HG_TXNID=TXN:$ID$ + remote: HG_TXNNAME=serve + remote: HG_URL=remote:http:$LOCALIP: (glob) + remote: % serve errors #endif
--- a/tests/test-qrecord.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-qrecord.t Tue Mar 19 16:36:59 2019 +0300 @@ -422,3 +422,43 @@ $ hg diff --nodates $ cd .. + +qrecord should throw an error when histedit in process + + $ hg init issue5981 + $ cd issue5981 + $ cat >> $HGRCPATH <<EOF + > [extensions] + > histedit= + > mq= + > EOF + $ echo > a + $ hg ci -Am 'foo bar' + adding a + $ hg log + changeset: 0:ea55e2ae468f + tag: tip + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: foo bar + + $ hg histedit tip --commands - 2>&1 <<EOF + > edit ea55e2ae468f foo bar + > EOF + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved + adding a + Editing (ea55e2ae468f), you may commit or record as needed now. + (hg histedit --continue to resume) + [1] + $ echo 'foo bar' > a + $ hg qrecord -d '0 0' -m aaa a.patch <<EOF + > y + > y + > n + > y + > y + > n + > EOF + abort: histedit in progress + (use 'hg histedit --continue' or 'hg histedit --abort') + [255]
--- a/tests/test-rebase-inmemory.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-rebase-inmemory.t Tue Mar 19 16:36:59 2019 +0300 @@ -240,19 +240,19 @@ |/ o 0: b173517d0057 'a' - $ mkdir c - $ echo c > c/c - $ hg add c/c - $ hg ci -m 'c/c' + $ mkdir -p c/subdir + $ echo c > c/subdir/file.txt + $ hg add c/subdir/file.txt + $ hg ci -m 'c/subdir/file.txt' $ hg rebase -r . -d 3 -n starting dry-run rebase; repository will not be changed - rebasing 8:755f0104af9b "c/c" (tip) - abort: error: 'c/c' conflicts with file 'c' in 3. + rebasing 8:e147e6e3c490 "c/subdir/file.txt" (tip) + abort: error: 'c/subdir/file.txt' conflicts with file 'c' in 3. [255] $ hg rebase -r 3 -d . -n starting dry-run rebase; repository will not be changed rebasing 3:844a7de3e617 "c" - abort: error: file 'c' cannot be written because 'c/' is a folder in 755f0104af9b (containing 1 entries: c/c) + abort: error: file 'c' cannot be written because 'c/' is a folder in e147e6e3c490 (containing 1 entries: c/subdir/file.txt) [255] $ cd .. @@ -718,3 +718,45 @@ diff --git a/foo.txt b/foo.txt old mode 100644 new mode 100755 + +Test rebasing a commit with copy information, but no content changes + + $ cd .. + $ hg clone -q repo1 merge-and-rename + $ cd merge-and-rename + $ cat << EOF >> .hg/hgrc + > [experimental] + > evolution.createmarkers=True + > evolution.allowunstable=True + > EOF + $ hg co -q 1 + $ hg mv d e + $ hg ci -qm 'rename d to e' + $ hg co -q 3 + $ hg merge -q 4 + $ hg ci -m 'merge' + $ hg co -q 2 + $ mv d e + $ hg addremove -qs 0 + $ hg ci -qm 'untracked rename of d to e' + $ hg debugobsolete -q `hg log -T '{node}' -r 4` `hg log -T '{node}' -r .` + 1 new orphan changesets + $ hg tglog + @ 6: 676538af172d 'untracked rename of d to e' + | + | * 5: 71cb43376053 'merge' + | |\ + | | x 4: 2c8b5dad7956 'rename d to e' + | | | + | o | 3: ca58782ad1e4 'b' + |/ / + o / 2: 814f6bd05178 'c' + |/ + o 1: 02952614a83d 'd' + | + o 0: b173517d0057 'a' + + $ hg rebase -b 5 -d tip + rebasing 3:ca58782ad1e4 "b" + rebasing 5:71cb43376053 "merge" + note: not rebasing 5:71cb43376053 "merge", its destination already has all its changes
--- a/tests/test-remotefilelog-bgprefetch.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-remotefilelog-bgprefetch.t Tue Mar 19 16:36:59 2019 +0300 @@ -105,6 +105,7 @@ $ hg debugwaitonprefetch >/dev/null 2>%1 $ sleep 0.5 $ hg debugwaitonrepack >/dev/null 2>%1 + $ sleep 0.5 $ find $CACHEDIR -type f | sort $TESTTMP/hgcache/master/packs/6e8633deba6e544e5f8edbd7b996d6e31a2c42ae.histidx $TESTTMP/hgcache/master/packs/6e8633deba6e544e5f8edbd7b996d6e31a2c42ae.histpack @@ -141,6 +142,7 @@ $ hg debugwaitonprefetch >/dev/null 2>%1 $ sleep 1 $ hg debugwaitonrepack >/dev/null 2>%1 + $ sleep 1 $ find $CACHEDIR -type f | sort $TESTTMP/hgcache/master/packs/8f1443d44e57fec96f72fb2412e01d2818767ef2.histidx $TESTTMP/hgcache/master/packs/8f1443d44e57fec96f72fb2412e01d2818767ef2.histpack @@ -193,6 +195,7 @@ $ hg debugwaitonprefetch >/dev/null 2>%1 $ sleep 1 $ hg debugwaitonrepack >/dev/null 2>%1 + $ sleep 1 $ find $CACHEDIR -type f | sort $TESTTMP/hgcache/master/packs/8f1443d44e57fec96f72fb2412e01d2818767ef2.histidx $TESTTMP/hgcache/master/packs/8f1443d44e57fec96f72fb2412e01d2818767ef2.histpack @@ -243,6 +246,7 @@ $ hg debugwaitonprefetch >/dev/null 2>%1 $ sleep 1 $ hg debugwaitonrepack >/dev/null 2>%1 + $ sleep 1 # Ensure that file 'y' was prefetched - it was not part of the rebase operation and therefore # could only be downloaded by the background prefetch @@ -284,6 +288,7 @@ $ sleep 0.5 $ hg debugwaitonrepack >/dev/null 2>%1 + $ sleep 0.5 $ find $CACHEDIR -type f | sort $TESTTMP/hgcache/master/packs/8f1443d44e57fec96f72fb2412e01d2818767ef2.histidx @@ -328,6 +333,7 @@ * files fetched over 1 fetches - (* misses, 0.00% hit ratio) over *s (glob) (?) $ sleep 0.5 $ hg debugwaitonrepack >/dev/null 2>%1 + $ sleep 0.5 $ find $CACHEDIR -type f | sort $TESTTMP/hgcache/master/packs/8f1443d44e57fec96f72fb2412e01d2818767ef2.histidx
--- a/tests/test-remotefilelog-cacheprocess.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-remotefilelog-cacheprocess.t Tue Mar 19 16:36:59 2019 +0300 @@ -56,11 +56,11 @@ > log('requested %r\n' % key) > sys.stdout.flush() > elif cmd == 'set': - > assert False, 'todo writing' + > raise Exception('todo writing') > else: - > assert False, 'unknown command! %r' % cmd + > raise Exception('unknown command! %r' % cmd) > except Exception as e: - > log('Exception! %r\n' % e) + > log('Exception! %s\n' % e) > raise > EOF @@ -79,7 +79,7 @@ requested 'master/39/5df8f7c51f007019cb30201c49e884b46b92fa/69a1b67522704ec122181c0890bd16e9d3e7516a' requested 'master/95/cb0bfd2977c761298d9624e4b4d4c72a39974a/076f5e2225b3ff0400b98c92aa6cdf403ee24cca' got command 'set' - Exception! AssertionError('todo writing',) + Exception! todo writing Test cache hits. $ mv hgcache oldhgcache @@ -110,7 +110,7 @@ requested 'y\x00master/95/cb0bfd2977c761298d9624e4b4d4c72a39974a/076f5e2225b3ff0400b98c92aa6cdf403ee24cca' requested 'z\x00master/39/5df8f7c51f007019cb30201c49e884b46b92fa/69a1b67522704ec122181c0890bd16e9d3e7516a' got command 'set' - Exception! AssertionError('todo writing',) + Exception! todo writing Test cache hits with includepath. $ mv hgcache oldhgcache
--- a/tests/test-remotefilelog-datapack.py Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-remotefilelog-datapack.py Tue Mar 19 16:36:59 2019 +0300 @@ -40,7 +40,7 @@ shutil.rmtree(d) def makeTempDir(self): - tempdir = tempfile.mkdtemp() + tempdir = pycompat.bytestr(tempfile.mkdtemp()) self.tempdirs.append(tempdir) return tempdir @@ -48,11 +48,12 @@ return hashlib.sha1(content).digest() def getFakeHash(self): - return ''.join(chr(random.randint(0, 255)) for _ in range(20)) + return b''.join(pycompat.bytechr(random.randint(0, 255)) + for _ in range(20)) def createPack(self, revisions=None, packdir=None): if revisions is None: - revisions = [("filename", self.getFakeHash(), nullid, "content")] + revisions = [(b"filename", self.getFakeHash(), nullid, b"content")] if packdir is None: packdir = self.makeTempDir() @@ -73,23 +74,23 @@ def _testAddSingle(self, content): """Test putting a simple blob into a pack and reading it out. """ - filename = "foo" + filename = b"foo" node = self.getHash(content) revisions = [(filename, node, nullid, content)] pack = self.createPack(revisions) if self.paramsavailable: - self.assertEquals(pack.params.fanoutprefix, - basepack.SMALLFANOUTPREFIX) + self.assertEqual(pack.params.fanoutprefix, + basepack.SMALLFANOUTPREFIX) chain = pack.getdeltachain(filename, node) - self.assertEquals(content, chain[0][4]) + self.assertEqual(content, chain[0][4]) def testAddSingle(self): - self._testAddSingle('') + self._testAddSingle(b'') def testAddSingleEmpty(self): - self._testAddSingle('abcdef') + self._testAddSingle(b'abcdef') def testAddMultiple(self): """Test putting multiple unrelated blobs into a pack and reading them @@ -97,8 +98,8 @@ """ revisions = [] for i in range(10): - filename = "foo%s" % i - content = "abcdef%s" % i + filename = b"foo%d" % i + content = b"abcdef%d" % i node = self.getHash(content) revisions.append((filename, node, self.getFakeHash(), content)) @@ -106,19 +107,19 @@ for filename, node, base, content in revisions: entry = pack.getdelta(filename, node) - self.assertEquals((content, filename, base, {}), entry) + self.assertEqual((content, filename, base, {}), entry) chain = pack.getdeltachain(filename, node) - self.assertEquals(content, chain[0][4]) + self.assertEqual(content, chain[0][4]) def testAddDeltas(self): """Test putting multiple delta blobs into a pack and read the chain. """ revisions = [] - filename = "foo" + filename = b"foo" lastnode = nullid for i in range(10): - content = "abcdef%s" % i + content = b"abcdef%d" % i node = self.getHash(content) revisions.append((filename, node, lastnode, content)) lastnode = node @@ -127,13 +128,13 @@ entry = pack.getdelta(filename, revisions[0][1]) realvalue = (revisions[0][3], filename, revisions[0][2], {}) - self.assertEquals(entry, realvalue) + self.assertEqual(entry, realvalue) # Test that the chain for the final entry has all the others chain = pack.getdeltachain(filename, node) for i in range(10): - content = "abcdef%s" % i - self.assertEquals(content, chain[-i - 1][4]) + content = b"abcdef%d" % i + self.assertEqual(content, chain[-i - 1][4]) def testPackMany(self): """Pack many related and unrelated objects. @@ -143,10 +144,10 @@ blobs = {} random.seed(0) for i in range(100): - filename = "filename-%s" % i + filename = b"filename-%d" % i filerevs = [] for j in range(random.randint(1, 100)): - content = "content-%s" % j + content = b"content-%d" % j node = self.getHash(content) lastnode = nullid if len(filerevs) > 0: @@ -158,22 +159,22 @@ pack = self.createPack(revisions) # Verify the pack contents - for (filename, node, lastnode), content in sorted(blobs.iteritems()): + for (filename, node, lastnode), content in sorted(blobs.items()): chain = pack.getdeltachain(filename, node) for entry in chain: expectedcontent = blobs[(entry[0], entry[1], entry[3])] - self.assertEquals(entry[4], expectedcontent) + self.assertEqual(entry[4], expectedcontent) def testPackMetadata(self): revisions = [] for i in range(100): - filename = '%s.txt' % i - content = 'put-something-here \n' * i + filename = b'%d.txt' % i + content = b'put-something-here \n' * i node = self.getHash(content) meta = {constants.METAKEYFLAG: i ** 4, constants.METAKEYSIZE: len(content), - 'Z': 'random_string', - '_': '\0' * i} + b'Z': b'random_string', + b'_': b'\0' * i} revisions.append((filename, node, nullid, content, meta)) pack = self.createPack(revisions) for name, node, x, content, origmeta in revisions: @@ -181,50 +182,51 @@ # flag == 0 should be optimized out if origmeta[constants.METAKEYFLAG] == 0: del origmeta[constants.METAKEYFLAG] - self.assertEquals(parsedmeta, origmeta) + self.assertEqual(parsedmeta, origmeta) def testGetMissing(self): """Test the getmissing() api. """ revisions = [] - filename = "foo" + filename = b"foo" lastnode = nullid for i in range(10): - content = "abcdef%s" % i + content = b"abcdef%d" % i node = self.getHash(content) revisions.append((filename, node, lastnode, content)) lastnode = node pack = self.createPack(revisions) - missing = pack.getmissing([("foo", revisions[0][1])]) + missing = pack.getmissing([(b"foo", revisions[0][1])]) self.assertFalse(missing) - missing = pack.getmissing([("foo", revisions[0][1]), - ("foo", revisions[1][1])]) + missing = pack.getmissing([(b"foo", revisions[0][1]), + (b"foo", revisions[1][1])]) self.assertFalse(missing) fakenode = self.getFakeHash() - missing = pack.getmissing([("foo", revisions[0][1]), ("foo", fakenode)]) - self.assertEquals(missing, [("foo", fakenode)]) + missing = pack.getmissing([(b"foo", revisions[0][1]), + (b"foo", fakenode)]) + self.assertEqual(missing, [(b"foo", fakenode)]) def testAddThrows(self): pack = self.createPack() try: - pack.add('filename', nullid, 'contents') + pack.add(b'filename', nullid, b'contents') self.assertTrue(False, "datapack.add should throw") except RuntimeError: pass def testBadVersionThrows(self): pack = self.createPack() - path = pack.path + '.datapack' - with open(path) as f: + path = pack.path + b'.datapack' + with open(path, 'rb') as f: raw = f.read() raw = struct.pack('!B', 255) + raw[1:] os.chmod(path, os.stat(path).st_mode | stat.S_IWRITE) - with open(path, 'w+') as f: + with open(path, 'wb+') as f: f.write(raw) try: @@ -235,10 +237,10 @@ def testMissingDeltabase(self): fakenode = self.getFakeHash() - revisions = [("filename", fakenode, self.getFakeHash(), "content")] + revisions = [(b"filename", fakenode, self.getFakeHash(), b"content")] pack = self.createPack(revisions) - chain = pack.getdeltachain("filename", fakenode) - self.assertEquals(len(chain), 1) + chain = pack.getdeltachain(b"filename", fakenode) + self.assertEqual(len(chain), 1) def testLargePack(self): """Test creating and reading from a large pack with over X entries. @@ -247,7 +249,7 @@ blobs = {} total = basepack.SMALLFANOUTCUTOFF + 1 for i in pycompat.xrange(total): - filename = "filename-%s" % i + filename = b"filename-%d" % i content = filename node = self.getHash(content) blobs[(filename, node)] = content @@ -255,12 +257,12 @@ pack = self.createPack(revisions) if self.paramsavailable: - self.assertEquals(pack.params.fanoutprefix, - basepack.LARGEFANOUTPREFIX) + self.assertEqual(pack.params.fanoutprefix, + basepack.LARGEFANOUTPREFIX) - for (filename, node), content in blobs.iteritems(): + for (filename, node), content in blobs.items(): actualcontent = pack.getdeltachain(filename, node)[0][4] - self.assertEquals(actualcontent, content) + self.assertEqual(actualcontent, content) def testPacksCache(self): """Test that we remember the most recent packs while fetching the delta @@ -274,12 +276,12 @@ for i in range(numpacks): chain = [] - revision = (str(i), self.getFakeHash(), nullid, "content") + revision = (b'%d' % i, self.getFakeHash(), nullid, b"content") for _ in range(revisionsperpack): chain.append(revision) revision = ( - str(i), + b'%d' % i, self.getFakeHash(), revision[1], self.getFakeHash() @@ -290,7 +292,7 @@ class testdatapackstore(datapack.datapackstore): # Ensures that we are not keeping everything in the cache. - DEFAULTCACHESIZE = numpacks / 2 + DEFAULTCACHESIZE = numpacks // 2 store = testdatapackstore(uimod.ui(), packdir) @@ -300,12 +302,12 @@ chain = store.getdeltachain(revision[0], revision[1]) mostrecentpack = next(iter(store.packs), None) - self.assertEquals( + self.assertEqual( mostrecentpack.getdeltachain(revision[0], revision[1]), chain ) - self.assertEquals(randomchain.index(revision) + 1, len(chain)) + self.assertEqual(randomchain.index(revision) + 1, len(chain)) # perf test off by default since it's slow def _testIndexPerf(self): @@ -330,8 +332,8 @@ for packsize in packsizes: revisions = [] for i in pycompat.xrange(packsize): - filename = "filename-%s" % i - content = "content-%s" % i + filename = b"filename-%d" % i + content = b"content-%d" % i node = self.getHash(content) revisions.append((filename, node, nullid, content)) @@ -350,9 +352,9 @@ start = time.time() pack.getmissing(findnodes[:lookupsize]) elapsed = time.time() - start - print ("%s pack %s lookups = %0.04f" % - (('%s' % packsize).rjust(7), - ('%s' % lookupsize).rjust(7), + print ("%s pack %d lookups = %0.04f" % + (('%d' % packsize).rjust(7), + ('%d' % lookupsize).rjust(7), elapsed)) print("")
--- a/tests/test-remotefilelog-gc.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-remotefilelog-gc.t Tue Mar 19 16:36:59 2019 +0300 @@ -107,6 +107,6 @@ # Test that warning is displayed when the repo path is malformed $ printf "asdas\0das" >> $CACHEDIR/repos - $ hg gc 2>&1 | head -n2 - warning: malformed path: * (glob) - Traceback (most recent call last): + $ hg gc + abort: invalid path asdas\x00da: .*(null|NULL).* (re) + [255]
--- a/tests/test-remotefilelog-histpack.py Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-remotefilelog-histpack.py Tue Mar 19 16:36:59 2019 +0300 @@ -52,7 +52,7 @@ node, p1node, p2node, and linknode. """ if revisions is None: - revisions = [("filename", self.getFakeHash(), nullid, nullid, + revisions = [(b"filename", self.getFakeHash(), nullid, nullid, self.getFakeHash(), None)] packdir = pycompat.fsencode(self.makeTempDir()) @@ -68,7 +68,7 @@ def testAddSingle(self): """Test putting a single entry into a pack and reading it out. """ - filename = "foo" + filename = b"foo" node = self.getFakeHash() p1 = self.getFakeHash() p2 = self.getFakeHash() @@ -78,9 +78,9 @@ pack = self.createPack(revisions) actual = pack.getancestors(filename, node)[node] - self.assertEquals(p1, actual[0]) - self.assertEquals(p2, actual[1]) - self.assertEquals(linknode, actual[2]) + self.assertEqual(p1, actual[0]) + self.assertEqual(p2, actual[1]) + self.assertEqual(linknode, actual[2]) def testAddMultiple(self): """Test putting multiple unrelated revisions into a pack and reading @@ -88,7 +88,7 @@ """ revisions = [] for i in range(10): - filename = "foo-%s" % i + filename = b"foo-%d" % i node = self.getFakeHash() p1 = self.getFakeHash() p2 = self.getFakeHash() @@ -99,10 +99,10 @@ for filename, node, p1, p2, linknode, copyfrom in revisions: actual = pack.getancestors(filename, node)[node] - self.assertEquals(p1, actual[0]) - self.assertEquals(p2, actual[1]) - self.assertEquals(linknode, actual[2]) - self.assertEquals(copyfrom, actual[3]) + self.assertEqual(p1, actual[0]) + self.assertEqual(p2, actual[1]) + self.assertEqual(linknode, actual[2]) + self.assertEqual(copyfrom, actual[3]) def testAddAncestorChain(self): """Test putting multiple revisions in into a pack and read the ancestor @@ -124,10 +124,10 @@ ancestors = pack.getancestors(revisions[0][0], revisions[0][1]) for filename, node, p1, p2, linknode, copyfrom in revisions: ap1, ap2, alinknode, acopyfrom = ancestors[node] - self.assertEquals(ap1, p1) - self.assertEquals(ap2, p2) - self.assertEquals(alinknode, linknode) - self.assertEquals(acopyfrom, copyfrom) + self.assertEqual(ap1, p1) + self.assertEqual(ap2, p2) + self.assertEqual(alinknode, linknode) + self.assertEqual(acopyfrom, copyfrom) def testPackMany(self): """Pack many related and unrelated ancestors. @@ -161,16 +161,16 @@ pack = self.createPack(revisions) # Verify the pack contents - for (filename, node), (p1, p2, lastnode) in allentries.items(): + for (filename, node) in allentries: ancestors = pack.getancestors(filename, node) - self.assertEquals(ancestorcounts[(filename, node)], - len(ancestors)) + self.assertEqual(ancestorcounts[(filename, node)], + len(ancestors)) for anode, (ap1, ap2, alinknode, copyfrom) in ancestors.items(): ep1, ep2, elinknode = allentries[(filename, anode)] - self.assertEquals(ap1, ep1) - self.assertEquals(ap2, ep2) - self.assertEquals(alinknode, elinknode) - self.assertEquals(copyfrom, None) + self.assertEqual(ap1, ep1) + self.assertEqual(ap2, ep2) + self.assertEqual(alinknode, elinknode) + self.assertEqual(copyfrom, None) def testGetNodeInfo(self): revisions = [] @@ -186,10 +186,10 @@ # Test that getnodeinfo returns the expected results for filename, node, p1, p2, linknode, copyfrom in revisions: ap1, ap2, alinknode, acopyfrom = pack.getnodeinfo(filename, node) - self.assertEquals(ap1, p1) - self.assertEquals(ap2, p2) - self.assertEquals(alinknode, linknode) - self.assertEquals(acopyfrom, copyfrom) + self.assertEqual(ap1, p1) + self.assertEqual(ap2, p2) + self.assertEqual(alinknode, linknode) + self.assertEqual(acopyfrom, copyfrom) def testGetMissing(self): """Test the getmissing() api. @@ -215,11 +215,11 @@ fakenode = self.getFakeHash() missing = pack.getmissing([(filename, revisions[0][1]), (filename, fakenode)]) - self.assertEquals(missing, [(filename, fakenode)]) + self.assertEqual(missing, [(filename, fakenode)]) # Test getmissing on a non-existant filename - missing = pack.getmissing([("bar", fakenode)]) - self.assertEquals(missing, [("bar", fakenode)]) + missing = pack.getmissing([(b"bar", fakenode)]) + self.assertEqual(missing, [(b"bar", fakenode)]) def testAddThrows(self): pack = self.createPack() @@ -232,12 +232,12 @@ def testBadVersionThrows(self): pack = self.createPack() - path = pack.path + '.histpack' - with open(path) as f: + path = pack.path + b'.histpack' + with open(path, 'rb') as f: raw = f.read() raw = struct.pack('!B', 255) + raw[1:] os.chmod(path, os.stat(path).st_mode | stat.S_IWRITE) - with open(path, 'w+') as f: + with open(path, 'wb+') as f: f.write(raw) try: @@ -260,14 +260,14 @@ revisions.append((filename, node, p1, p2, linknode, None)) pack = self.createPack(revisions) - self.assertEquals(pack.params.fanoutprefix, basepack.LARGEFANOUTPREFIX) + self.assertEqual(pack.params.fanoutprefix, basepack.LARGEFANOUTPREFIX) for filename, node, p1, p2, linknode, copyfrom in revisions: actual = pack.getancestors(filename, node)[node] - self.assertEquals(p1, actual[0]) - self.assertEquals(p2, actual[1]) - self.assertEquals(linknode, actual[2]) - self.assertEquals(copyfrom, actual[3]) + self.assertEqual(p1, actual[0]) + self.assertEqual(p2, actual[1]) + self.assertEqual(linknode, actual[2]) + self.assertEqual(copyfrom, actual[3]) # TODO: # histpack store: # - repack two packs into one
--- a/tests/test-removeemptydirs.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-removeemptydirs.t Tue Mar 19 16:36:59 2019 +0300 @@ -265,91 +265,3 @@ 0:d17db4b0303a add bar $ cd $TESTTMP - -Testing `hg split` being run from inside of a directory that was created in the -commit being split: - - $ hg init hgsplit - $ cd hgsplit - $ cat >> .hg/hgrc << EOF - > [ui] - > interactive = 1 - > [extensions] - > split = - > EOF - $ echo anchor > anchor.txt - $ hg ci -qAm anchor - -Create a changeset with '/otherfile_in_root' and 'somedir/foo', then try to -split it. - $ echo otherfile > otherfile_in_root - $ mkdir somedir - $ cd somedir - $ echo hi > foo - $ hg ci -qAm split_me -(Note: need to make this file not in this directory, or else the bug doesn't -reproduce; we're using a separate file due to concerns of portability on -`echo -e`) - $ cat > ../split_commands << EOF - > n - > y - > y - > a - > EOF - -The split succeeds on no-rmcwd platforms, which alters the rest of the tests -#if rmcwd - $ cat ../split_commands | hg split - current directory was removed - (consider changing to repo root: $TESTTMP/hgsplit) - diff --git a/otherfile_in_root b/otherfile_in_root - new file mode 100644 - examine changes to 'otherfile_in_root'? [Ynesfdaq?] n - - diff --git a/somedir/foo b/somedir/foo - new file mode 100644 - examine changes to 'somedir/foo'? [Ynesfdaq?] y - - @@ -0,0 +1,1 @@ - +hi - record change 2/2 to 'somedir/foo'? [Ynesfdaq?] y - - abort: $ENOENT$ - [255] -#endif - -Let's try that again without the rmdir - $ cd $TESTTMP/hgsplit/somedir -Show that the previous split didn't do anything - $ hg log -T '{rev}:{node|short} {desc}\n' - 1:e26b22a4f0b7 split_me - 0:7e53273730c0 anchor - $ hg status - ? split_commands -Try again - $ cat ../split_commands | hg $NO_RM split - diff --git a/otherfile_in_root b/otherfile_in_root - new file mode 100644 - examine changes to 'otherfile_in_root'? [Ynesfdaq?] n - - diff --git a/somedir/foo b/somedir/foo - new file mode 100644 - examine changes to 'somedir/foo'? [Ynesfdaq?] y - - @@ -0,0 +1,1 @@ - +hi - record change 2/2 to 'somedir/foo'? [Ynesfdaq?] y - - created new head - diff --git a/otherfile_in_root b/otherfile_in_root - new file mode 100644 - examine changes to 'otherfile_in_root'? [Ynesfdaq?] a - - saved backup bundle to $TESTTMP/hgsplit/.hg/strip-backup/*-split.hg (glob) -Show that this split did something - $ hg log -T '{rev}:{node|short} {desc}\n' - 2:a440f24fca4f split_me - 1:c994f20276ab split_me - 0:7e53273730c0 anchor - $ hg status - ? split_commands
--- a/tests/test-repair-strip.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-repair-strip.t Tue Mar 19 16:36:59 2019 +0300 @@ -53,7 +53,7 @@ rollback failed - please run hg recover (failure reason: [Errno 13] Permission denied .hg/store/data/b.i') strip failed, backup bundle - abort: Permission denied .hg/store/data/b.i + abort: Permission denied .hg/store/data/b.i' % after update 0, strip 2 abandoned transaction found - run hg recover checking changesets @@ -85,7 +85,7 @@ date: Thu Jan 01 00:00:00 1970 +0000 summary: a - abort: Permission denied .hg/store/data/b.i + abort: Permission denied .hg/store/data/b.i' % after update 0, strip 2 checking changesets checking manifests @@ -107,7 +107,7 @@ rollback failed - please run hg recover (failure reason: [Errno 13] Permission denied .hg/store/00manifest.i') strip failed, backup bundle - abort: Permission denied .hg/store/00manifest.i + abort: Permission denied .hg/store/00manifest.i' % after update 0, strip 2 abandoned transaction found - run hg recover checking changesets
--- a/tests/test-resolve.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-resolve.t Tue Mar 19 16:36:59 2019 +0300 @@ -67,6 +67,9 @@ $ hg resolve -l R file1 U file2 + $ hg resolve -l --config ui.relative-paths=yes + R ../file1 + U ../file2 $ hg resolve --re-merge filez file2 arguments do not match paths that need resolving (try: hg resolve --re-merge path:filez path:file2)
--- a/tests/test-revert-interactive.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-revert-interactive.t Tue Mar 19 16:36:59 2019 +0300 @@ -424,3 +424,24 @@ b: no such file in rev b40d1912accf $ cd .. + +Prompt before undeleting file(issue6008) + $ hg init repo + $ cd repo + $ echo a > a + $ hg ci -qAm a + $ hg rm a + $ hg revert -i<<EOF + > y + > EOF + add back removed file a (Yn)? y + undeleting a + $ ls + a + $ hg rm a + $ hg revert -i<<EOF + > n + > EOF + add back removed file a (Yn)? n + $ ls + $ cd ..
--- a/tests/test-revert.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-revert.t Tue Mar 19 16:36:59 2019 +0300 @@ -92,7 +92,7 @@ $ echo z > e $ hg revert --all -v --config 'ui.origbackuppath=.hg/origbackups' creating directory: $TESTTMP/repo/.hg/origbackups - saving current version of e as $TESTTMP/repo/.hg/origbackups/e + saving current version of e as .hg/origbackups/e reverting e $ rm -rf .hg/origbackups @@ -289,6 +289,23 @@ $ hg revert . reverting b/b +respects ui.relative-paths +-------------------------- + + $ echo foo > newdir/newfile + $ hg add newdir/newfile + $ hg revert --all --cwd newdir + forgetting newfile + + $ echo foo > newdir/newfile + $ hg add newdir/newfile + $ hg revert --all --cwd newdir --config ui.relative-paths=True + forgetting newfile + + $ echo foo > newdir/newfile + $ hg add newdir/newfile + $ hg revert --all --cwd newdir --config ui.relative-paths=False + forgetting newdir/newfile reverting a rename target should revert the source --------------------------------------------------
--- a/tests/test-revlog-raw.py Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-revlog-raw.py Tue Mar 19 16:36:59 2019 +0300 @@ -417,7 +417,6 @@ print(' got: %s' % result15) def maintest(): - expected = rl = None with newtransaction() as tr: rl = newrevlog(recreate=True) expected = writecases(rl, tr)
--- a/tests/test-revset.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-revset.t Tue Mar 19 16:36:59 2019 +0300 @@ -12,9 +12,9 @@ > """ > if 3 not in subset: > if 2 in subset: - > return baseset([2,2]) + > return baseset([2, 2]) > return baseset() - > return baseset([3,3,2,2]) + > return baseset([3, 3, 2, 2]) > > mercurial.revset.symbols[b'r3232'] = r3232 > EOF @@ -643,10 +643,13 @@ [255] $ hg debugrevspec '.#generations[a]' - hg: parse error: relation subscript must be an integer + hg: parse error: relation subscript must be an integer or a range [255] $ hg debugrevspec '.#generations[1-2]' - hg: parse error: relation subscript must be an integer + hg: parse error: relation subscript must be an integer or a range + [255] + $ hg debugrevspec '.#generations[foo:bar]' + hg: parse error: relation subscript bounds must be integers [255] suggested relations @@ -1274,6 +1277,31 @@ $ log '.#g[(-1)]' 8 + $ log '6#generations[0:1]' + 6 + 7 + $ log '6#generations[-1:1]' + 4 + 5 + 6 + 7 + $ log '6#generations[0:]' + 6 + 7 + $ log '5#generations[:0]' + 0 + 1 + 3 + 5 + $ log '3#generations[:]' + 0 + 1 + 3 + 5 + 6 + 7 + $ log 'tip#generations[1:-1]' + $ hg debugrevspec -p parsed 'roots(:)#g[2]' * parsed: (relsubscript @@ -2950,3 +2978,63 @@ * set: <baseset+ [0]> 0 + +abort if the revset doesn't expect given size + $ log 'expectsize()' + hg: parse error: invalid set of arguments + [255] + $ log 'expectsize(0:2, a)' + hg: parse error: expectsize requires a size range or a positive integer + [255] + $ log 'expectsize(0:2, 3)' + 0 + 1 + 2 + + $ log 'expectsize(2:0, 3)' + 2 + 1 + 0 + $ log 'expectsize(0:1, 1)' + abort: revset size mismatch. expected 1, got 2! + [255] + $ log 'expectsize(0:4, -1)' + hg: parse error: negative size + [255] + $ log 'expectsize(0:2, 2:4)' + 0 + 1 + 2 + $ log 'expectsize(0:1, 3:5)' + abort: revset size mismatch. expected between 3 and 5, got 2! + [255] + $ log 'expectsize(0:1, -1:2)' + hg: parse error: negative size + [255] + $ log 'expectsize(0:1, 1:-2)' + hg: parse error: negative size + [255] + $ log 'expectsize(0:2, a:4)' + hg: parse error: size range bounds must be integers + [255] + $ log 'expectsize(0:2, 2:b)' + hg: parse error: size range bounds must be integers + [255] + $ log 'expectsize(0:2, 2:)' + 0 + 1 + 2 + $ log 'expectsize(0:2, :5)' + 0 + 1 + 2 + $ log 'expectsize(0:2, :)' + 0 + 1 + 2 + $ log 'expectsize(0:2, 4:)' + abort: revset size mismatch. expected between 4 and 11, got 3! + [255] + $ log 'expectsize(0:2, :2)' + abort: revset size mismatch. expected between 0 and 2, got 3! + [255]
--- a/tests/test-revset2.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-revset2.t Tue Mar 19 16:36:59 2019 +0300 @@ -1525,8 +1525,8 @@ $ hg init problematicencoding $ cd problematicencoding - $ "$PYTHON" > setup.sh <<EOF - > print(u''' + $ "$PYTHON" <<EOF + > open('setup.sh', 'wb').write(u''' > echo a > text > hg add text > hg --encoding utf-8 commit -u '\u30A2' -m none @@ -1541,8 +1541,8 @@ $ sh < setup.sh test in problematic encoding - $ "$PYTHON" > test.sh <<EOF - > print(u''' + $ "$PYTHON" <<EOF + > open('test.sh', 'wb').write(u''' > hg --encoding cp932 log --template '{rev}\\n' -r 'author(\u30A2)' > echo ==== > hg --encoding cp932 log --template '{rev}\\n' -r 'author(\u30C2)'
--- a/tests/test-rollback.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-rollback.t Tue Mar 19 16:36:59 2019 +0300 @@ -113,9 +113,9 @@ > echo "another precious commit message" > "$1" > __EOF__ $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg --config hooks.pretxncommit=false commit 2>&1 - note: commit message saved in .hg/last-message.txt transaction abort! rollback completed + note: commit message saved in .hg/last-message.txt abort: pretxncommit hook exited with status * (glob) [255] $ cat .hg/last-message.txt
--- a/tests/test-run-tests.py Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-run-tests.py Tue Mar 19 16:36:59 2019 +0300 @@ -37,8 +37,8 @@ """ assert (expected.endswith(b'\n') and output.endswith(b'\n')), 'missing newline' - assert not re.search(br'[^ \w\\/\r\n()*?]', expected + output), \ - b'single backslash or unknown char' + assert not re.search(br'[^ \w\\/\r\n()*?]', expected + output), ( + b'single backslash or unknown char') test = run_tests.TTest(b'test-run-test.t', b'.', b'.') match, exact = test.linematch(expected, output) if isinstance(match, str):
--- a/tests/test-run-tests.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-run-tests.t Tue Mar 19 16:36:59 2019 +0300 @@ -324,8 +324,8 @@ ERROR: test-failure-unicode.t output changed ! + Failed test-failure-unicode.t: output changed Failed test-failure.t: output changed - Failed test-failure-unicode.t: output changed # Ran 3 tests, 0 skipped, 2 failed. python hash seed: * (glob) [1] @@ -356,8 +356,8 @@ ERROR: test-failure-unicode.t output changed ! + Failed test-failure-unicode.t: output changed Failed test-failure.t: output changed - Failed test-failure-unicode.t: output changed # Ran 3 tests, 0 skipped, 2 failed. python hash seed: * (glob) [1] @@ -393,8 +393,8 @@ ERROR: test-failure-unicode.t output changed ! + Failed test-failure-unicode.t: output changed Failed test-failure.t: output changed - Failed test-failure-unicode.t: output changed # Ran 3 tests, 0 skipped, 2 failed. python hash seed: * (glob) [1] @@ -1174,31 +1174,31 @@ $ cat report.json testreport ={ "test-failure.t": [\{] (re) - "csys": "\s*[\d\.]{4,5}", ? (re) - "cuser": "\s*[\d\.]{4,5}", ? (re) + "csys": "\s*\d+\.\d{3,4}", ? (re) + "cuser": "\s*\d+\.\d{3,4}", ? (re) "diff": "---.+\+\+\+.+", ? (re) - "end": "\s*[\d\.]{4,5}", ? (re) + "end": "\s*\d+\.\d{3,4}", ? (re) "result": "failure", ? (re) - "start": "\s*[\d\.]{4,5}", ? (re) - "time": "\s*[\d\.]{4,5}" (re) + "start": "\s*\d+\.\d{3,4}", ? (re) + "time": "\s*\d+\.\d{3,4}" (re) }, ? (re) "test-skip.t": { - "csys": "\s*[\d\.]{4,5}", ? (re) - "cuser": "\s*[\d\.]{4,5}", ? (re) + "csys": "\s*\d+\.\d{3,4}", ? (re) + "cuser": "\s*\d+\.\d{3,4}", ? (re) "diff": "", ? (re) - "end": "\s*[\d\.]{4,5}", ? (re) + "end": "\s*\d+\.\d{3,4}", ? (re) "result": "skip", ? (re) - "start": "\s*[\d\.]{4,5}", ? (re) - "time": "\s*[\d\.]{4,5}" (re) + "start": "\s*\d+\.\d{3,4}", ? (re) + "time": "\s*\d+\.\d{3,4}" (re) }, ? (re) "test-success.t": [\{] (re) - "csys": "\s*[\d\.]{4,5}", ? (re) - "cuser": "\s*[\d\.]{4,5}", ? (re) + "csys": "\s*\d+\.\d{3,4}", ? (re) + "cuser": "\s*\d+\.\d{3,4}", ? (re) "diff": "", ? (re) - "end": "\s*[\d\.]{4,5}", ? (re) + "end": "\s*\d+\.\d{3,4}", ? (re) "result": "success", ? (re) - "start": "\s*[\d\.]{4,5}", ? (re) - "time": "\s*[\d\.]{4,5}" (re) + "start": "\s*\d+\.\d{3,4}", ? (re) + "time": "\s*\d+\.\d{3,4}" (re) } } (no-eol) --json with --outputdir @@ -1231,31 +1231,31 @@ $ cat output/report.json testreport ={ "test-failure.t": [\{] (re) - "csys": "\s*[\d\.]{4,5}", ? (re) - "cuser": "\s*[\d\.]{4,5}", ? (re) + "csys": "\s*\d+\.\d{3,4}", ? (re) + "cuser": "\s*\d+\.\d{3,4}", ? (re) "diff": "---.+\+\+\+.+", ? (re) - "end": "\s*[\d\.]{4,5}", ? (re) + "end": "\s*\d+\.\d{3,4}", ? (re) "result": "failure", ? (re) - "start": "\s*[\d\.]{4,5}", ? (re) - "time": "\s*[\d\.]{4,5}" (re) + "start": "\s*\d+\.\d{3,4}", ? (re) + "time": "\s*\d+\.\d{3,4}" (re) }, ? (re) "test-skip.t": { - "csys": "\s*[\d\.]{4,5}", ? (re) - "cuser": "\s*[\d\.]{4,5}", ? (re) + "csys": "\s*\d+\.\d{3,4}", ? (re) + "cuser": "\s*\d+\.\d{3,4}", ? (re) "diff": "", ? (re) - "end": "\s*[\d\.]{4,5}", ? (re) + "end": "\s*\d+\.\d{3,4}", ? (re) "result": "skip", ? (re) - "start": "\s*[\d\.]{4,5}", ? (re) - "time": "\s*[\d\.]{4,5}" (re) + "start": "\s*\d+\.\d{3,4}", ? (re) + "time": "\s*\d+\.\d{3,4}" (re) }, ? (re) "test-success.t": [\{] (re) - "csys": "\s*[\d\.]{4,5}", ? (re) - "cuser": "\s*[\d\.]{4,5}", ? (re) + "csys": "\s*\d+\.\d{3,4}", ? (re) + "cuser": "\s*\d+\.\d{3,4}", ? (re) "diff": "", ? (re) - "end": "\s*[\d\.]{4,5}", ? (re) + "end": "\s*\d+\.\d{3,4}", ? (re) "result": "success", ? (re) - "start": "\s*[\d\.]{4,5}", ? (re) - "time": "\s*[\d\.]{4,5}" (re) + "start": "\s*\d+\.\d{3,4}", ? (re) + "time": "\s*\d+\.\d{3,4}" (re) } } (no-eol) $ ls -a output @@ -1287,31 +1287,31 @@ $ cat report.json testreport ={ "test-failure.t": [\{] (re) - "csys": "\s*[\d\.]{4,5}", ? (re) - "cuser": "\s*[\d\.]{4,5}", ? (re) + "csys": "\s*\d+\.\d{3,4}", ? (re) + "cuser": "\s*\d+\.\d{3,4}", ? (re) "diff": "", ? (re) - "end": "\s*[\d\.]{4,5}", ? (re) + "end": "\s*\d+\.\d{3,4}", ? (re) "result": "success", ? (re) - "start": "\s*[\d\.]{4,5}", ? (re) - "time": "\s*[\d\.]{4,5}" (re) + "start": "\s*\d+\.\d{3,4}", ? (re) + "time": "\s*\d+\.\d{3,4}" (re) }, ? (re) "test-skip.t": { - "csys": "\s*[\d\.]{4,5}", ? (re) - "cuser": "\s*[\d\.]{4,5}", ? (re) + "csys": "\s*\d+\.\d{3,4}", ? (re) + "cuser": "\s*\d+\.\d{3,4}", ? (re) "diff": "", ? (re) - "end": "\s*[\d\.]{4,5}", ? (re) + "end": "\s*\d+\.\d{3,4}", ? (re) "result": "skip", ? (re) - "start": "\s*[\d\.]{4,5}", ? (re) - "time": "\s*[\d\.]{4,5}" (re) + "start": "\s*\d+\.\d{3,4}", ? (re) + "time": "\s*\d+\.\d{3,4}" (re) }, ? (re) "test-success.t": [\{] (re) - "csys": "\s*[\d\.]{4,5}", ? (re) - "cuser": "\s*[\d\.]{4,5}", ? (re) + "csys": "\s*\d+\.\d{3,4}", ? (re) + "cuser": "\s*\d+\.\d{3,4}", ? (re) "diff": "", ? (re) - "end": "\s*[\d\.]{4,5}", ? (re) + "end": "\s*\d+\.\d{3,4}", ? (re) "result": "success", ? (re) - "start": "\s*[\d\.]{4,5}", ? (re) - "time": "\s*[\d\.]{4,5}" (re) + "start": "\s*\d+\.\d{3,4}", ? (re) + "time": "\s*\d+\.\d{3,4}" (re) } } (no-eol) $ mv backup test-failure.t
--- a/tests/test-rust-ancestor.py Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-rust-ancestor.py Tue Mar 19 16:36:59 2019 +0300 @@ -19,6 +19,7 @@ LazyAncestors, MissingAncestors, ) + from mercurial.rustext import dagop try: from mercurial.cext import parsers as cparsers @@ -165,6 +166,10 @@ with self.assertRaises(error.WdirUnsupported): list(AncestorsIterator(idx, [node.wdirrev], -1, False)) + def testheadrevs(self): + idx = self.parseindex() + self.assertEqual(dagop.headrevs(idx, [1, 2, 3]), {3}) + if __name__ == '__main__': import silenttestrunner silenttestrunner.main(__name__)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test-server-view.t Tue Mar 19 16:36:59 2019 +0300 @@ -0,0 +1,38 @@ + $ hg init test + $ cd test + $ hg debugbuilddag '+2' + $ hg phase --public 0 + + $ hg serve -p $HGPORT -d --pid-file=hg.pid -E errors.log + $ cat hg.pid >> $DAEMON_PIDS + $ cd .. + $ hg init test2 + $ cd test2 + $ hg incoming http://foo:xyzzy@localhost:$HGPORT/ + comparing with http://foo:***@localhost:$HGPORT/ + changeset: 0:1ea73414a91b + user: debugbuilddag + date: Thu Jan 01 00:00:00 1970 +0000 + summary: r0 + + changeset: 1:66f7d451a68b + tag: tip + user: debugbuilddag + date: Thu Jan 01 00:00:01 1970 +0000 + summary: r1 + + $ killdaemons.py + + $ cd ../test + $ hg --config server.view=immutable serve -p $HGPORT -d --pid-file=hg.pid -E errors.log + $ cat hg.pid >> $DAEMON_PIDS + $ cd ../test2 + $ hg incoming http://foo:xyzzy@localhost:$HGPORT/ + comparing with http://foo:***@localhost:$HGPORT/ + changeset: 0:1ea73414a91b + tag: tip + user: debugbuilddag + date: Thu Jan 01 00:00:00 1970 +0000 + summary: r0 + + $ killdaemons.py
--- a/tests/test-share.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-share.t Tue Mar 19 16:36:59 2019 +0300 @@ -34,9 +34,9 @@ checkisexec (execbit !) checklink (symlink !) checklink-target (symlink !) + manifestfulltextcache (reporevlogstore !) $ ls -1 ../repo1/.hg/cache branch2-served - manifestfulltextcache (reporevlogstore !) rbc-names-v1 rbc-revs-v1 tags2-visible @@ -124,6 +124,15 @@ -rw-r--r-- 2 b +Cloning a shared repo via bundle2 results in a non-shared clone + + $ cd .. + $ hg clone -q --stream --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/`pwd`/repo2 cloned-via-bundle2 + $ cat ./cloned-via-bundle2/.hg/requires | grep "shared" + [1] + $ hg id --cwd cloned-via-bundle2 -r tip + c2e0ac586386 tip + $ cd repo2 test unshare command
--- a/tests/test-shelve2.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-shelve2.t Tue Mar 19 16:36:59 2019 +0300 @@ -130,13 +130,28 @@ e $ cat e.orig z + $ rm e.orig +restores backup of unknown file to right directory + + $ hg shelve + shelved as default + 0 files updated, 0 files merged, 2 files removed, 0 files unresolved + $ echo z > e + $ mkdir dir + $ hg unshelve --cwd dir + unshelving change 'default' + $ rmdir dir + $ cat e + e + $ cat e.orig + z unshelve and conflicts with tracked and untracked files preparing: - $ rm *.orig + $ rm -f *.orig $ hg ci -qm 'commit stuff' $ hg phase -p null:
--- a/tests/test-simplekeyvaluefile.py Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-simplekeyvaluefile.py Tue Mar 19 16:36:59 2019 +0300 @@ -82,8 +82,8 @@ dw = {b'key1': b'value1'} scmutil.simplekeyvaluefile(self.vfs, b'fl').write(dw, firstline=b'1.0') self.assertEqual(self.vfs.read(b'fl'), b'1.0\nkey1=value1\n') - dr = scmutil.simplekeyvaluefile(self.vfs, b'fl')\ - .read(firstlinenonkeyval=True) + dr = scmutil.simplekeyvaluefile( + self.vfs, b'fl').read(firstlinenonkeyval=True) self.assertEqual(dr, {b'__firstline': b'1.0', b'key1': b'value1'}) if __name__ == "__main__":
--- a/tests/test-sparse-revlog.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-sparse-revlog.t Tue Mar 19 16:36:59 2019 +0300 @@ -12,10 +12,22 @@ $ bundlepath="$TESTDIR/artifacts/cache/big-file-churn.hg" $ expectedhash=`cat "$bundlepath".md5` + +#if slow + + $ if [ ! -f "$bundlepath" ]; then + > "$TESTDIR"/artifacts/scripts/generate-churning-bundle.py > /dev/null + > fi + +#else + $ if [ ! -f "$bundlepath" ]; then > echo 'skipped: missing artifact, run "'"$TESTDIR"'/artifacts/scripts/generate-churning-bundle.py"' > exit 80 > fi + +#endif + $ currenthash=`f -M "$bundlepath" | cut -d = -f 2` $ if [ "$currenthash" != "$expectedhash" ]; then > echo 'skipped: outdated artifact, md5 "'"$currenthash"'" expected "'"$expectedhash"'" run "'"$TESTDIR"'/artifacts/scripts/generate-churning-bundle.py"' @@ -28,8 +40,7 @@ > maxchainlen = 15 > [storage] > revlog.optimize-delta-parent-choice = yes - > [format] - > generaldelta = yes + > revlog.reuse-external-delta = no > EOF $ hg init sparse-repo $ cd sparse-repo
--- a/tests/test-split.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-split.t Tue Mar 19 16:36:59 2019 +0300 @@ -26,6 +26,8 @@ > [diff] > git=1 > unified=0 + > [commands] + > commit.interactive.unified=0 > [alias] > glog=log -G -T '{rev}:{node|short} {desc} {bookmarks}\n' > EOF @@ -103,6 +105,12 @@ abort: cannot split multiple revisions [255] +This function splits a bit strangely primarily to avoid changing the behavior of +the test after a bug was fixed with how split/commit --interactive handled +`commands.commit.interactive.unified=0`: when there were no context lines, +it kept only the last diff hunk. When running split, this meant that runsplit +was always recording three commits, one for each diff hunk, in reverse order +(the base commit was the last diff hunk in the file). $ runsplit() { > cat > $TESTTMP/messages <<EOF > split 1 @@ -113,8 +121,11 @@ > EOF > cat <<EOF | hg split "$@" > y + > n + > n > y > y + > n > y > y > y @@ -123,13 +134,23 @@ $ HGEDITOR=false runsplit diff --git a/a b/a - 1 hunks, 1 lines changed + 3 hunks, 3 lines changed examine changes to 'a'? [Ynesfdaq?] y + @@ -1,1 +1,1 @@ + -1 + +11 + record change 1/3 to 'a'? [Ynesfdaq?] n + + @@ -3,1 +3,1 @@ 2 + -3 + +33 + record change 2/3 to 'a'? [Ynesfdaq?] n + @@ -5,1 +5,1 @@ 4 -5 +55 - record this change to 'a'? [Ynesfdaq?] y + record change 3/3 to 'a'? [Ynesfdaq?] y transaction abort! rollback completed @@ -140,13 +161,23 @@ $ HGEDITOR="\"$PYTHON\" $TESTTMP/editor.py" $ runsplit diff --git a/a b/a - 1 hunks, 1 lines changed + 3 hunks, 3 lines changed examine changes to 'a'? [Ynesfdaq?] y + @@ -1,1 +1,1 @@ + -1 + +11 + record change 1/3 to 'a'? [Ynesfdaq?] n + + @@ -3,1 +3,1 @@ 2 + -3 + +33 + record change 2/3 to 'a'? [Ynesfdaq?] n + @@ -5,1 +5,1 @@ 4 -5 +55 - record this change to 'a'? [Ynesfdaq?] y + record change 3/3 to 'a'? [Ynesfdaq?] y EDITOR: HG: Splitting 1df0d5c5a3ab. Write commit message for the first split changeset. EDITOR: a2 @@ -160,13 +191,18 @@ EDITOR: HG: changed a created new head diff --git a/a b/a - 1 hunks, 1 lines changed + 2 hunks, 2 lines changed examine changes to 'a'? [Ynesfdaq?] y + @@ -1,1 +1,1 @@ + -1 + +11 + record change 1/2 to 'a'? [Ynesfdaq?] n + @@ -3,1 +3,1 @@ 2 -3 +33 - record this change to 'a'? [Ynesfdaq?] y + record change 2/2 to 'a'? [Ynesfdaq?] y EDITOR: HG: Splitting 1df0d5c5a3ab. So far it has been split into: EDITOR: HG: - e704349bd21b: split 1 @@ -565,3 +601,169 @@ a09ad58faae3 draft e704349bd21b draft a61bcde8c529 draft + +`hg split` with ignoreblanklines=1 does not infinite loop + + $ mkdir $TESTTMP/f + $ hg init $TESTTMP/f/a + $ cd $TESTTMP/f/a + $ printf '1\n2\n3\n4\n5\n' > foo + $ cp foo bar + $ hg ci -qAm initial + $ printf '1\n\n2\n3\ntest\n4\n5\n' > bar + $ printf '1\n2\n3\ntest\n4\n5\n' > foo + $ hg ci -qm splitme + $ cat > $TESTTMP/messages <<EOF + > split 1 + > -- + > split 2 + > EOF + $ printf 'f\nn\nf\n' | hg --config extensions.split= --config diff.ignoreblanklines=1 split + diff --git a/bar b/bar + 2 hunks, 2 lines changed + examine changes to 'bar'? [Ynesfdaq?] f + + diff --git a/foo b/foo + 1 hunks, 1 lines changed + examine changes to 'foo'? [Ynesfdaq?] n + + EDITOR: HG: Splitting dd3c45017cbf. Write commit message for the first split changeset. + EDITOR: splitme + EDITOR: + EDITOR: + EDITOR: HG: Enter commit message. Lines beginning with 'HG:' are removed. + EDITOR: HG: Leave message empty to abort commit. + EDITOR: HG: -- + EDITOR: HG: user: test + EDITOR: HG: branch 'default' + EDITOR: HG: changed bar + created new head + diff --git a/foo b/foo + 1 hunks, 1 lines changed + examine changes to 'foo'? [Ynesfdaq?] f + + EDITOR: HG: Splitting dd3c45017cbf. So far it has been split into: + EDITOR: HG: - f205aea1c624: split 1 + EDITOR: HG: Write commit message for the next split changeset. + EDITOR: splitme + EDITOR: + EDITOR: + EDITOR: HG: Enter commit message. Lines beginning with 'HG:' are removed. + EDITOR: HG: Leave message empty to abort commit. + EDITOR: HG: -- + EDITOR: HG: user: test + EDITOR: HG: branch 'default' + EDITOR: HG: changed foo + saved backup bundle to $TESTTMP/f/a/.hg/strip-backup/dd3c45017cbf-463441b5-split.hg (obsstore-off !) + +Let's try that again, with a slightly different set of patches, to ensure that +the ignoreblanklines thing isn't somehow position dependent. + + $ hg init $TESTTMP/f/b + $ cd $TESTTMP/f/b + $ printf '1\n2\n3\n4\n5\n' > foo + $ cp foo bar + $ hg ci -qAm initial + $ printf '1\n2\n3\ntest\n4\n5\n' > bar + $ printf '1\n2\n3\ntest\n4\n\n5\n' > foo + $ hg ci -qm splitme + $ cat > $TESTTMP/messages <<EOF + > split 1 + > -- + > split 2 + > EOF + $ printf 'f\nn\nf\n' | hg --config extensions.split= --config diff.ignoreblanklines=1 split + diff --git a/bar b/bar + 1 hunks, 1 lines changed + examine changes to 'bar'? [Ynesfdaq?] f + + diff --git a/foo b/foo + 2 hunks, 2 lines changed + examine changes to 'foo'? [Ynesfdaq?] n + + EDITOR: HG: Splitting 904c80b40a4a. Write commit message for the first split changeset. + EDITOR: splitme + EDITOR: + EDITOR: + EDITOR: HG: Enter commit message. Lines beginning with 'HG:' are removed. + EDITOR: HG: Leave message empty to abort commit. + EDITOR: HG: -- + EDITOR: HG: user: test + EDITOR: HG: branch 'default' + EDITOR: HG: changed bar + created new head + diff --git a/foo b/foo + 2 hunks, 2 lines changed + examine changes to 'foo'? [Ynesfdaq?] f + + EDITOR: HG: Splitting 904c80b40a4a. So far it has been split into: + EDITOR: HG: - ffecf40fa954: split 1 + EDITOR: HG: Write commit message for the next split changeset. + EDITOR: splitme + EDITOR: + EDITOR: + EDITOR: HG: Enter commit message. Lines beginning with 'HG:' are removed. + EDITOR: HG: Leave message empty to abort commit. + EDITOR: HG: -- + EDITOR: HG: user: test + EDITOR: HG: branch 'default' + EDITOR: HG: changed foo + saved backup bundle to $TESTTMP/f/b/.hg/strip-backup/904c80b40a4a-47fb907f-split.hg (obsstore-off !) + + +Testing the case in split when commiting flag-only file changes (issue5864) +--------------------------------------------------------------------------- + $ hg init $TESTTMP/issue5864 + $ cd $TESTTMP/issue5864 + $ echo foo > foo + $ hg add foo + $ hg ci -m "initial" + $ hg import -q --bypass -m "make executable" - <<EOF + > diff --git a/foo b/foo + > old mode 100644 + > new mode 100755 + > EOF + $ hg up -q + + $ hg glog + @ 1:3a2125f0f4cb make executable + | + o 0:51f273a58d82 initial + + +#if no-windows + $ cat > $TESTTMP/messages <<EOF + > split 1 + > EOF + $ printf 'y\n' | hg split + diff --git a/foo b/foo + old mode 100644 + new mode 100755 + examine changes to 'foo'? [Ynesfdaq?] y + + EDITOR: HG: Splitting 3a2125f0f4cb. Write commit message for the first split changeset. + EDITOR: make executable + EDITOR: + EDITOR: + EDITOR: HG: Enter commit message. Lines beginning with 'HG:' are removed. + EDITOR: HG: Leave message empty to abort commit. + EDITOR: HG: -- + EDITOR: HG: user: test + EDITOR: HG: branch 'default' + EDITOR: HG: changed foo + created new head + saved backup bundle to $TESTTMP/issue5864/.hg/strip-backup/3a2125f0f4cb-629e4432-split.hg (obsstore-off !) + + $ hg log -G -T "{node|short} {desc}\n" + @ b154670c87da split 1 + | + o 51f273a58d82 initial + +#else + +TODO: Fix this on Windows. See issue 2020 and 5883 + + $ printf 'y\ny\ny\n' | hg split + abort: cannot split an empty revision + [255] +#endif
--- a/tests/test-sqlitestore.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-sqlitestore.t Tue Mar 19 16:36:59 2019 +0300 @@ -71,17 +71,17 @@ That results in a row being inserted into various tables - $ sqlite3 .hg/store/db.sqlite << EOF + $ sqlite3 .hg/store/db.sqlite -init /dev/null << EOF > SELECT * FROM filepath; > EOF 1|foo - $ sqlite3 .hg/store/db.sqlite << EOF + $ sqlite3 .hg/store/db.sqlite -init /dev/null << EOF > SELECT * FROM fileindex; > EOF 1|1|0|-1|-1|0|0|1||6/\xef(L\xe2\xca\x02\xae\xcc\x8d\xe6\xd5\xe8\xa1\xc3\xaf\x05V\xfe (esc) - $ sqlite3 .hg/store/db.sqlite << EOF + $ sqlite3 .hg/store/db.sqlite -init /dev/null << EOF > SELECT * FROM delta; > EOF 1|1| \xd2\xaf\x8d\xd2"\x01\xdd\x8dH\xe5\xdc\xfc\xae\xd2\x81\xff\x94"\xc7|0 (esc) @@ -93,7 +93,7 @@ $ hg commit -A -m 'add bar' adding bar - $ sqlite3 .hg/store/db.sqlite << EOF + $ sqlite3 .hg/store/db.sqlite -init /dev/null << EOF > SELECT * FROM filedata ORDER BY id ASC; > EOF 1|1|foo|0|6/\xef(L\xe2\xca\x02\xae\xcc\x8d\xe6\xd5\xe8\xa1\xc3\xaf\x05V\xfe|-1|-1|0|0|1| (esc) @@ -104,7 +104,7 @@ $ echo a >> foo $ hg commit -m 'modify foo' - $ sqlite3 .hg/store/db.sqlite << EOF + $ sqlite3 .hg/store/db.sqlite -init /dev/null << EOF > SELECT * FROM filedata ORDER BY id ASC; > EOF 1|1|foo|0|6/\xef(L\xe2\xca\x02\xae\xcc\x8d\xe6\xd5\xe8\xa1\xc3\xaf\x05V\xfe|-1|-1|0|0|1| (esc)
--- a/tests/test-ssh-bundle1.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-ssh-bundle1.t Tue Mar 19 16:36:59 2019 +0300 @@ -46,7 +46,7 @@ > uncompressed = True > > [hooks] - > changegroup = sh -c "printenv.py changegroup-in-remote 0 ../dummylog" + > changegroup = sh -c "printenv.py --line changegroup-in-remote 0 ../dummylog" > EOF $ cd $TESTTMP @@ -131,7 +131,7 @@ checked 3 changesets with 2 changes to 2 files $ cat >> .hg/hgrc <<EOF > [hooks] - > changegroup = sh -c "printenv.py changegroup-in-local 0 ../dummylog" + > changegroup = sh -c "printenv.py --line changegroup-in-local 0 ../dummylog" > EOF empty default pull @@ -514,7 +514,16 @@ Got arguments 1:user@dummy 2:hg -R local serve --stdio Got arguments 1:user@dummy 2:hg -R $TESTTMP/local serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio - changegroup-in-remote hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_NODE_LAST=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:ssh:$LOCALIP + changegroup-in-remote hook: HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 + HG_NODE_LAST=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 + HG_SOURCE=serve + HG_TXNID=TXN:$ID$ + HG_TXNNAME=serve + remote:ssh:$LOCALIP + HG_URL=remote:ssh:$LOCALIP + Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio @@ -524,7 +533,16 @@ Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio - changegroup-in-remote hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6 HG_NODE_LAST=1383141674ec756a6056f6a9097618482fe0f4a6 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:ssh:$LOCALIP + changegroup-in-remote hook: HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6 + HG_NODE_LAST=1383141674ec756a6056f6a9097618482fe0f4a6 + HG_SOURCE=serve + HG_TXNID=TXN:$ID$ + HG_TXNNAME=serve + remote:ssh:$LOCALIP + HG_URL=remote:ssh:$LOCALIP + Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg init 'a repo' Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio @@ -532,7 +550,16 @@ Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio - changegroup-in-remote hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=65c38f4125f9602c8db4af56530cc221d93b8ef8 HG_NODE_LAST=65c38f4125f9602c8db4af56530cc221d93b8ef8 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:ssh:$LOCALIP + changegroup-in-remote hook: HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=65c38f4125f9602c8db4af56530cc221d93b8ef8 + HG_NODE_LAST=65c38f4125f9602c8db4af56530cc221d93b8ef8 + HG_SOURCE=serve + HG_TXNID=TXN:$ID$ + HG_TXNNAME=serve + remote:ssh:$LOCALIP + HG_URL=remote:ssh:$LOCALIP + Got arguments 1:user@dummy 2:hg -R remote serve --stdio remote hook failure is attributed to remote
--- a/tests/test-ssh-repoerror.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-ssh-repoerror.t Tue Mar 19 16:36:59 2019 +0300 @@ -34,7 +34,7 @@ > done $ hg id ssh://user@dummy/other - remote: abort: Permission denied: $TESTTMP/other/.hg/requires + remote: abort: Permission denied: '$TESTTMP/other/.hg/requires' abort: no suitable response from remote hg! [255]
--- a/tests/test-ssh.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-ssh.t Tue Mar 19 16:36:59 2019 +0300 @@ -36,7 +36,7 @@ > uncompressed = True > > [hooks] - > changegroup = sh -c "printenv.py changegroup-in-remote 0 ../dummylog" + > changegroup = sh -c "printenv.py --line changegroup-in-remote 0 ../dummylog" > EOF $ cd $TESTTMP @@ -563,7 +563,16 @@ Got arguments 1:user@dummy 2:hg -R local serve --stdio Got arguments 1:user@dummy 2:hg -R $TESTTMP/local serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio - changegroup-in-remote hook: HG_BUNDLE2=1 HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_NODE_LAST=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:ssh:$LOCALIP + changegroup-in-remote hook: HG_BUNDLE2=1 + HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 + HG_NODE_LAST=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 + HG_SOURCE=serve + HG_TXNID=TXN:$ID$ + HG_TXNNAME=serve + HG_URL=remote:ssh:$LOCALIP + Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio @@ -573,9 +582,27 @@ Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio - changegroup-in-remote hook: HG_BUNDLE2=1 HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6 HG_NODE_LAST=1383141674ec756a6056f6a9097618482fe0f4a6 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:ssh:$LOCALIP + changegroup-in-remote hook: HG_BUNDLE2=1 + HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6 + HG_NODE_LAST=1383141674ec756a6056f6a9097618482fe0f4a6 + HG_SOURCE=serve + HG_TXNID=TXN:$ID$ + HG_TXNNAME=serve + HG_URL=remote:ssh:$LOCALIP + Got arguments 1:user@dummy 2:chg -R remote serve --stdio (chg !) - changegroup-in-remote hook: HG_BUNDLE2=1 HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6 HG_NODE_LAST=1383141674ec756a6056f6a9097618482fe0f4a6 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:ssh:$LOCALIP (chg !) + changegroup-in-remote hook: HG_BUNDLE2=1 (chg !) + HG_HOOKNAME=changegroup (chg !) + HG_HOOKTYPE=changegroup (chg !) + HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6 (chg !) + HG_NODE_LAST=1383141674ec756a6056f6a9097618482fe0f4a6 (chg !) + HG_SOURCE=serve (chg !) + HG_TXNID=TXN:$ID$ (chg !) + HG_TXNNAME=serve (chg !) + HG_URL=remote:ssh:$LOCALIP (chg !) + (chg !) Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg init 'a repo' Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio @@ -583,9 +610,19 @@ Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio - changegroup-in-remote hook: HG_BUNDLE2=1 HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=65c38f4125f9602c8db4af56530cc221d93b8ef8 HG_NODE_LAST=65c38f4125f9602c8db4af56530cc221d93b8ef8 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:ssh:$LOCALIP + changegroup-in-remote hook: HG_BUNDLE2=1 + HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=65c38f4125f9602c8db4af56530cc221d93b8ef8 + HG_NODE_LAST=65c38f4125f9602c8db4af56530cc221d93b8ef8 + HG_SOURCE=serve + HG_TXNID=TXN:$ID$ + HG_TXNNAME=serve + HG_URL=remote:ssh:$LOCALIP + Got arguments 1:user@dummy 2:hg -R remote serve --stdio + remote hook failure is attributed to remote $ cat > $TESTTMP/failhook << EOF
--- a/tests/test-static-http.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-static-http.t Tue Mar 19 16:36:59 2019 +0300 @@ -57,7 +57,7 @@ $ cd ../local $ cat >> .hg/hgrc <<EOF > [hooks] - > changegroup = sh -c "printenv.py changegroup" + > changegroup = sh -c "printenv.py --line changegroup" > EOF $ hg pull pulling from static-http://localhost:$HGPORT/remote @@ -67,7 +67,16 @@ adding file changes added 1 changesets with 1 changes to 1 files new changesets 4ac2e3648604 - changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=4ac2e3648604439c580c69b09ec9d93a88d93432 HG_NODE_LAST=4ac2e3648604439c580c69b09ec9d93a88d93432 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=http://localhost:$HGPORT/remote + changegroup hook: HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=4ac2e3648604439c580c69b09ec9d93a88d93432 + HG_NODE_LAST=4ac2e3648604439c580c69b09ec9d93a88d93432 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_TXNNAME=pull + http://localhost:$HGPORT/remote + HG_URL=http://localhost:$HGPORT/remote + (run 'hg update' to get a working copy) trying to push @@ -227,9 +236,11 @@ /.hg/requires /.hg/store/00changelog.i /.hg/store/00manifest.i - /.hg/store/data/%7E2ehgsub.i - /.hg/store/data/%7E2ehgsubstate.i + /.hg/store/data/%7E2ehgsub.i (no-py37 !) + /.hg/store/data/%7E2ehgsubstate.i (no-py37 !) /.hg/store/data/a.i + /.hg/store/data/~2ehgsub.i (py37 !) + /.hg/store/data/~2ehgsubstate.i (py37 !) /notarepo/.hg/00changelog.i /notarepo/.hg/requires /remote-with-names/.hg/bookmarks @@ -243,8 +254,9 @@ /remote-with-names/.hg/requires /remote-with-names/.hg/store/00changelog.i /remote-with-names/.hg/store/00manifest.i - /remote-with-names/.hg/store/data/%7E2ehgtags.i + /remote-with-names/.hg/store/data/%7E2ehgtags.i (no-py37 !) /remote-with-names/.hg/store/data/foo.i + /remote-with-names/.hg/store/data/~2ehgtags.i (py37 !) /remote/.hg/bookmarks /remote/.hg/bookmarks.current /remote/.hg/cache/branch2-base @@ -258,10 +270,12 @@ /remote/.hg/requires /remote/.hg/store/00changelog.i /remote/.hg/store/00manifest.i - /remote/.hg/store/data/%7E2edotfile%20with%20spaces.i - /remote/.hg/store/data/%7E2ehgtags.i + /remote/.hg/store/data/%7E2edotfile%20with%20spaces.i (no-py37 !) + /remote/.hg/store/data/%7E2ehgtags.i (no-py37 !) /remote/.hg/store/data/bar.i /remote/.hg/store/data/quux.i + /remote/.hg/store/data/~2edotfile%20with%20spaces.i (py37 !) + /remote/.hg/store/data/~2ehgtags.i (py37 !) /remotempty/.hg/bookmarks /remotempty/.hg/bookmarks.current /remotempty/.hg/requires @@ -275,5 +289,6 @@ /sub/.hg/requires /sub/.hg/store/00changelog.i /sub/.hg/store/00manifest.i - /sub/.hg/store/data/%7E2ehgtags.i + /sub/.hg/store/data/%7E2ehgtags.i (no-py37 !) /sub/.hg/store/data/test.i + /sub/.hg/store/data/~2ehgtags.i (py37 !)
--- a/tests/test-status.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-status.t Tue Mar 19 16:36:59 2019 +0300 @@ -132,7 +132,26 @@ relative paths can be requested + $ hg status --cwd a --config ui.relative-paths=yes + ? 1/in_a_1 + ? in_a + ? ../b/1/in_b_1 + ? ../b/2/in_b_2 + ? ../b/in_b + ? ../in_root + + $ hg status --cwd a . --config ui.relative-paths=legacy + ? 1/in_a_1 + ? in_a + $ hg status --cwd a . --config ui.relative-paths=no + ? a/1/in_a_1 + ? a/in_a + +commands.status.relative overrides ui.relative-paths + $ cat >> $HGRCPATH <<EOF + > [ui] + > relative-paths = False > [commands] > status.relative = True > EOF @@ -271,7 +290,8 @@ $ hg status -A -Tpickle > pickle >>> from __future__ import print_function - >>> import pickle + >>> from mercurial import util + >>> pickle = util.pickle >>> data = sorted((x[b'status'].decode(), x[b'path'].decode()) for x in pickle.load(open("pickle", r"rb"))) >>> for s, p in data: print("%s %s" % (s, p)) ! deleted
--- a/tests/test-subrepo-git.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-subrepo-git.t Tue Mar 19 16:36:59 2019 +0300 @@ -924,9 +924,9 @@ $ echo 'bloop' > s/foobar $ hg revert --all --verbose --config 'ui.origbackuppath=.hg/origbackups' reverting subrepo ../gitroot - creating directory: $TESTTMP/tc/.hg/origbackups - saving current version of foobar as $TESTTMP/tc/.hg/origbackups/foobar - $ ls .hg/origbackups + creating directory: $TESTTMP/tc/.hg/origbackups/s + saving current version of foobar as .hg/origbackups/s/foobar + $ ls .hg/origbackups/s foobar $ rm -rf .hg/origbackups
--- a/tests/test-subrepo-svn.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-subrepo-svn.t Tue Mar 19 16:36:59 2019 +0300 @@ -1,11 +1,7 @@ #require svn15 $ SVNREPOPATH=`pwd`/svn-repo -#if windows - $ SVNREPOURL=file:///`"$PYTHON" -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"` -#else - $ SVNREPOURL=file://`"$PYTHON" -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"` -#endif + $ SVNREPOURL="`"$PYTHON" $TESTDIR/svnurlof.py \"$SVNREPOPATH\"`" $ filter_svn_output () { > egrep -v 'Committing|Transmitting|Updating|(^$)' || true
--- a/tests/test-subrepo.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-subrepo.t Tue Mar 19 16:36:59 2019 +0300 @@ -31,6 +31,13 @@ a s/a +`hg files` respects ui.relative-paths +BROKEN: shows subrepo paths relative to the subrepo + $ hg files -S --config ui.relative-paths=no + .hgsub + a + s/a + $ hg -R s ci -Ams0 $ hg sum parent: 0:f7b1eb17ad24 tip @@ -1257,6 +1264,7 @@ ../shared/subrepo-2/.hg/wcache/checkisexec (execbit !) ../shared/subrepo-2/.hg/wcache/checklink (symlink !) ../shared/subrepo-2/.hg/wcache/checklink-target (symlink !) + ../shared/subrepo-2/.hg/wcache/manifestfulltextcache (reporevlogstore !) ../shared/subrepo-2/file $ hg -R ../shared in abort: repository default not found!
--- a/tests/test-tag.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-tag.t Tue Mar 19 16:36:59 2019 +0300 @@ -320,9 +320,9 @@ HG: branch 'tag-and-branch-same-name' HG: changed .hgtags ==== - note: commit message saved in .hg/last-message.txt transaction abort! rollback completed + note: commit message saved in .hg/last-message.txt abort: pretxncommit.unexpectedabort hook exited with status 1 [255] $ cat .hg/last-message.txt
--- a/tests/test-tags.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-tags.t Tue Mar 19 16:36:59 2019 +0300 @@ -759,3 +759,69 @@ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ (cd tags-local-clone/.hg/cache/; ls -1 tag*) tags2-visible + +Avoid writing logs on trying to delete an already deleted tag + $ hg init issue5752 + $ cd issue5752 + $ echo > a + $ hg commit -Am 'add a' + adding a + $ hg tag a + $ hg tags + tip 1:bd7ee4f3939b + a 0:a8a82d372bb3 + $ hg log + changeset: 1:bd7ee4f3939b + tag: tip + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: Added tag a for changeset a8a82d372bb3 + + changeset: 0:a8a82d372bb3 + tag: a + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: add a + + $ hg tag --remove a + $ hg log + changeset: 2:e7feacc7ec9e + tag: tip + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: Removed tag a + + changeset: 1:bd7ee4f3939b + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: Added tag a for changeset a8a82d372bb3 + + changeset: 0:a8a82d372bb3 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: add a + + $ hg tag --remove a + abort: tag 'a' is already removed + [255] + $ hg log + changeset: 2:e7feacc7ec9e + tag: tip + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: Removed tag a + + changeset: 1:bd7ee4f3939b + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: Added tag a for changeset a8a82d372bb3 + + changeset: 0:a8a82d372bb3 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: add a + + $ cat .hgtags + a8a82d372bb35b42ff736e74f07c23bcd99c371f a + a8a82d372bb35b42ff736e74f07c23bcd99c371f a + 0000000000000000000000000000000000000000 a
--- a/tests/test-template-functions.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-template-functions.t Tue Mar 19 16:36:59 2019 +0300 @@ -1549,4 +1549,31 @@ $ HGENCODING=utf-8 hg debugtemplate "{pad('`cat utf-8`', 2, '-')}\n" \xc3\xa9- (esc) +read config options: + + $ hg log -T "{config('templateconfig', 'knob', 'foo')}\n" + foo + $ hg log -T "{config('templateconfig', 'knob', 'foo')}\n" \ + > --config templateconfig.knob=bar + bar + $ hg log -T "{configbool('templateconfig', 'knob', True)}\n" + True + $ hg log -T "{configbool('templateconfig', 'knob', True)}\n" \ + > --config templateconfig.knob=0 + False + $ hg log -T "{configint('templateconfig', 'knob', 123)}\n" + 123 + $ hg log -T "{configint('templateconfig', 'knob', 123)}\n" \ + > --config templateconfig.knob=456 + 456 + $ hg log -T "{config('templateconfig', 'knob')}\n" + devel-warn: config item requires an explicit default value: 'templateconfig.knob' at: * (glob) + + $ hg log -T "{configbool('ui', 'interactive')}\n" + False + $ hg log -T "{configbool('ui', 'interactive')}\n" --config ui.interactive=1 + True + $ hg log -T "{config('templateconfig', 'knob', if(true, 'foo', 'bar'))}\n" + foo + $ cd ..
--- a/tests/test-template-keywords.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-template-keywords.t Tue Mar 19 16:36:59 2019 +0300 @@ -76,6 +76,12 @@ $ hg log -r 'wdir()' -T '{manifest}\n' 2147483647:ffffffffffff +However, for negrev, we refuse to output anything (as well as for null) + + $ hg log -r 'wdir() + null' -T 'bla{negrev}nk\n' + blank + blank + Changectx-derived keywords are disabled within {manifest} as {node} changes: $ hg log -r0 -T 'outer:{p1node} {manifest % "inner:{p1node}"}\n'
--- a/tests/test-template-map.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-template-map.t Tue Mar 19 16:36:59 2019 +0300 @@ -1039,7 +1039,7 @@ $ touch q $ chmod 0 q $ hg log --style ./q - abort: Permission denied: ./q + abort: Permission denied: './q' [255] #endif
--- a/tests/test-transplant.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-transplant.t Tue Mar 19 16:36:59 2019 +0300 @@ -39,12 +39,12 @@ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg transplant 1 - abort: outstanding uncommitted merges + abort: outstanding uncommitted merge [255] $ hg up -qC tip $ echo b0 > b1 $ hg transplant 1 - abort: outstanding local changes + abort: uncommitted changes [255] $ hg up -qC tip $ echo b2 > b2 @@ -599,6 +599,7 @@ > EOF 0:17ab29e464c6 apply changeset? [ynmpcq?]: p + diff -r 000000000000 -r 17ab29e464c6 r1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/r1 Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@
--- a/tests/test-trusted.py Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-trusted.py Tue Mar 19 16:36:59 2019 +0300 @@ -5,19 +5,34 @@ from __future__ import absolute_import, print_function import os +import sys + from mercurial import ( error, + pycompat, ui as uimod, util, ) +from mercurial.utils import stringutil hgrc = os.environ['HGRCPATH'] -f = open(hgrc) +f = open(hgrc, 'rb') basehgrc = f.read() f.close() -def testui(user='foo', group='bar', tusers=(), tgroups=(), - cuser='foo', cgroup='bar', debug=False, silent=False, +def _maybesysstr(v): + if isinstance(v, bytes): + return pycompat.sysstr(v) + return pycompat.sysstr(stringutil.pprint(v)) + +def bprint(*args, **kwargs): + print(*[_maybesysstr(a) for a in args], + **{k: _maybesysstr(v) for k, v in kwargs.items()}) + # avoid awkward interleaving with ui object's output + sys.stdout.flush() + +def testui(user=b'foo', group=b'bar', tusers=(), tgroups=(), + cuser=b'foo', cgroup=b'bar', debug=False, silent=False, report=True): # user, group => owners of the file # tusers, tgroups => trusted users/groups @@ -25,17 +40,17 @@ # write a global hgrc with the list of trusted users/groups and # some setting so that we can be sure it was read - f = open(hgrc, 'w') + f = open(hgrc, 'wb') f.write(basehgrc) - f.write('\n[paths]\n') - f.write('global = /some/path\n\n') + f.write(b'\n[paths]\n') + f.write(b'global = /some/path\n\n') if tusers or tgroups: - f.write('[trusted]\n') + f.write(b'[trusted]\n') if tusers: - f.write('users = %s\n' % ', '.join(tusers)) + f.write(b'users = %s\n' % b', '.join(tusers)) if tgroups: - f.write('groups = %s\n' % ', '.join(tgroups)) + f.write(b'groups = %s\n' % b', '.join(tgroups)) f.close() # override the functions that give names to uids and gids @@ -47,7 +62,7 @@ def groupname(gid=None): if gid is None: - return 'bar' + return b'bar' return group util.groupname = groupname @@ -58,13 +73,14 @@ # try to read everything #print '# File belongs to user %s, group %s' % (user, group) #print '# trusted users = %s; trusted groups = %s' % (tusers, tgroups) - kind = ('different', 'same') - who = ('', 'user', 'group', 'user and the group') + kind = (b'different', b'same') + who = (b'', b'user', b'group', b'user and the group') trusted = who[(user in tusers) + 2*(group in tgroups)] if trusted: - trusted = ', but we trust the ' + trusted - print('# %s user, %s group%s' % (kind[user == cuser], kind[group == cgroup], - trusted)) + trusted = b', but we trust the ' + trusted + bprint(b'# %s user, %s group%s' % (kind[user == cuser], + kind[group == cgroup], + trusted)) u = uimod.ui.load() # disable the configuration registration warning @@ -72,33 +88,33 @@ # the purpose of this test is to check the old behavior, not to validate the # behavior from registered item. so we silent warning related to unregisted # config. - u.setconfig('devel', 'warn-config-unknown', False, 'test') - u.setconfig('devel', 'all-warnings', False, 'test') - u.setconfig('ui', 'debug', str(bool(debug))) - u.setconfig('ui', 'report_untrusted', str(bool(report))) - u.readconfig('.hg/hgrc') + u.setconfig(b'devel', b'warn-config-unknown', False, b'test') + u.setconfig(b'devel', b'all-warnings', False, b'test') + u.setconfig(b'ui', b'debug', pycompat.bytestr(bool(debug))) + u.setconfig(b'ui', b'report_untrusted', pycompat.bytestr(bool(report))) + u.readconfig(b'.hg/hgrc') if silent: return u - print('trusted') - for name, path in u.configitems('paths'): - print(' ', name, '=', util.pconvert(path)) - print('untrusted') - for name, path in u.configitems('paths', untrusted=True): - print('.', end=' ') - u.config('paths', name) # warning with debug=True - print('.', end=' ') - u.config('paths', name, untrusted=True) # no warnings - print(name, '=', util.pconvert(path)) + bprint(b'trusted') + for name, path in u.configitems(b'paths'): + bprint(b' ', name, b'=', util.pconvert(path)) + bprint(b'untrusted') + for name, path in u.configitems(b'paths', untrusted=True): + bprint(b'.', end=b' ') + u.config(b'paths', name) # warning with debug=True + bprint(b'.', end=b' ') + u.config(b'paths', name, untrusted=True) # no warnings + bprint(name, b'=', util.pconvert(path)) print() return u -os.mkdir('repo') -os.chdir('repo') -os.mkdir('.hg') -f = open('.hg/hgrc', 'w') -f.write('[paths]\n') -f.write('local = /another/path\n\n') +os.mkdir(b'repo') +os.chdir(b'repo') +os.mkdir(b'.hg') +f = open(b'.hg/hgrc', 'wb') +f.write(b'[paths]\n') +f.write(b'local = /another/path\n\n') f.close() #print '# Everything is run by user foo, group bar\n' @@ -106,120 +122,130 @@ # same user, same group testui() # same user, different group -testui(group='def') +testui(group=b'def') # different user, same group -testui(user='abc') +testui(user=b'abc') # ... but we trust the group -testui(user='abc', tgroups=['bar']) +testui(user=b'abc', tgroups=[b'bar']) # different user, different group -testui(user='abc', group='def') +testui(user=b'abc', group=b'def') # ... but we trust the user -testui(user='abc', group='def', tusers=['abc']) +testui(user=b'abc', group=b'def', tusers=[b'abc']) # ... but we trust the group -testui(user='abc', group='def', tgroups=['def']) +testui(user=b'abc', group=b'def', tgroups=[b'def']) # ... but we trust the user and the group -testui(user='abc', group='def', tusers=['abc'], tgroups=['def']) +testui(user=b'abc', group=b'def', tusers=[b'abc'], tgroups=[b'def']) # ... but we trust all users -print('# we trust all users') -testui(user='abc', group='def', tusers=['*']) +bprint(b'# we trust all users') +testui(user=b'abc', group=b'def', tusers=[b'*']) # ... but we trust all groups -print('# we trust all groups') -testui(user='abc', group='def', tgroups=['*']) +bprint(b'# we trust all groups') +testui(user=b'abc', group=b'def', tgroups=[b'*']) # ... but we trust the whole universe -print('# we trust all users and groups') -testui(user='abc', group='def', tusers=['*'], tgroups=['*']) +bprint(b'# we trust all users and groups') +testui(user=b'abc', group=b'def', tusers=[b'*'], tgroups=[b'*']) # ... check that users and groups are in different namespaces -print("# we don't get confused by users and groups with the same name") -testui(user='abc', group='def', tusers=['def'], tgroups=['abc']) +bprint(b"# we don't get confused by users and groups with the same name") +testui(user=b'abc', group=b'def', tusers=[b'def'], tgroups=[b'abc']) # ... lists of user names work -print("# list of user names") -testui(user='abc', group='def', tusers=['foo', 'xyz', 'abc', 'bleh'], - tgroups=['bar', 'baz', 'qux']) +bprint(b"# list of user names") +testui(user=b'abc', group=b'def', tusers=[b'foo', b'xyz', b'abc', b'bleh'], + tgroups=[b'bar', b'baz', b'qux']) # ... lists of group names work -print("# list of group names") -testui(user='abc', group='def', tusers=['foo', 'xyz', 'bleh'], - tgroups=['bar', 'def', 'baz', 'qux']) +bprint(b"# list of group names") +testui(user=b'abc', group=b'def', tusers=[b'foo', b'xyz', b'bleh'], + tgroups=[b'bar', b'def', b'baz', b'qux']) -print("# Can't figure out the name of the user running this process") -testui(user='abc', group='def', cuser=None) +bprint(b"# Can't figure out the name of the user running this process") +testui(user=b'abc', group=b'def', cuser=None) -print("# prints debug warnings") -u = testui(user='abc', group='def', cuser='foo', debug=True) +bprint(b"# prints debug warnings") +u = testui(user=b'abc', group=b'def', cuser=b'foo', debug=True) -print("# report_untrusted enabled without debug hides warnings") -u = testui(user='abc', group='def', cuser='foo', report=False) +bprint(b"# report_untrusted enabled without debug hides warnings") +u = testui(user=b'abc', group=b'def', cuser=b'foo', report=False) -print("# report_untrusted enabled with debug shows warnings") -u = testui(user='abc', group='def', cuser='foo', debug=True, report=False) +bprint(b"# report_untrusted enabled with debug shows warnings") +u = testui(user=b'abc', group=b'def', cuser=b'foo', debug=True, report=False) -print("# ui.readconfig sections") -filename = 'foobar' -f = open(filename, 'w') -f.write('[foobar]\n') -f.write('baz = quux\n') +bprint(b"# ui.readconfig sections") +filename = b'foobar' +f = open(filename, 'wb') +f.write(b'[foobar]\n') +f.write(b'baz = quux\n') f.close() -u.readconfig(filename, sections=['foobar']) -print(u.config('foobar', 'baz')) +u.readconfig(filename, sections=[b'foobar']) +bprint(u.config(b'foobar', b'baz')) print() -print("# read trusted, untrusted, new ui, trusted") +bprint(b"# read trusted, untrusted, new ui, trusted") u = uimod.ui.load() # disable the configuration registration warning # # the purpose of this test is to check the old behavior, not to validate the # behavior from registered item. so we silent warning related to unregisted # config. -u.setconfig('devel', 'warn-config-unknown', False, 'test') -u.setconfig('devel', 'all-warnings', False, 'test') -u.setconfig('ui', 'debug', 'on') +u.setconfig(b'devel', b'warn-config-unknown', False, b'test') +u.setconfig(b'devel', b'all-warnings', False, b'test') +u.setconfig(b'ui', b'debug', b'on') u.readconfig(filename) u2 = u.copy() def username(uid=None): - return 'foo' + return b'foo' util.username = username -u2.readconfig('.hg/hgrc') -print('trusted:') -print(u2.config('foobar', 'baz')) -print('untrusted:') -print(u2.config('foobar', 'baz', untrusted=True)) +u2.readconfig(b'.hg/hgrc') +bprint(b'trusted:') +bprint(u2.config(b'foobar', b'baz')) +bprint(b'untrusted:') +bprint(u2.config(b'foobar', b'baz', untrusted=True)) print() -print("# error handling") +bprint(b"# error handling") def assertraises(f, exc=error.Abort): try: f() except exc as inst: - print('raised', inst.__class__.__name__) + bprint(b'raised', inst.__class__.__name__) else: - print('no exception?!') + bprint(b'no exception?!') -print("# file doesn't exist") -os.unlink('.hg/hgrc') -assert not os.path.exists('.hg/hgrc') +bprint(b"# file doesn't exist") +os.unlink(b'.hg/hgrc') +assert not os.path.exists(b'.hg/hgrc') testui(debug=True, silent=True) -testui(user='abc', group='def', debug=True, silent=True) +testui(user=b'abc', group=b'def', debug=True, silent=True) print() -print("# parse error") -f = open('.hg/hgrc', 'w') -f.write('foo') +bprint(b"# parse error") +f = open(b'.hg/hgrc', 'wb') +f.write(b'foo') f.close() +# This is a hack to remove b'' prefixes from ParseError.__bytes__ on +# Python 3. +def normalizeparseerror(e): + if pycompat.ispy3: + args = [a.decode('utf-8') for a in e.args] + else: + args = e.args + + return error.ParseError(*args) + try: - testui(user='abc', group='def', silent=True) + testui(user=b'abc', group=b'def', silent=True) except error.ParseError as inst: - print(inst) + bprint(normalizeparseerror(inst)) try: testui(debug=True, silent=True) except error.ParseError as inst: - print(inst) + bprint(normalizeparseerror(inst)) print() -print('# access typed information') -with open('.hg/hgrc', 'w') as f: - f.write('''\ +bprint(b'# access typed information') +with open(b'.hg/hgrc', 'wb') as f: + f.write(b'''\ [foo] sub=main sub:one=one @@ -230,32 +256,33 @@ bytes=81mb list=spam,ham,eggs ''') -u = testui(user='abc', group='def', cuser='foo', silent=True) +u = testui(user=b'abc', group=b'def', cuser=b'foo', silent=True) def configpath(section, name, default=None, untrusted=False): path = u.configpath(section, name, default, untrusted) if path is None: return None return util.pconvert(path) -print('# suboptions, trusted and untrusted') -trusted = u.configsuboptions('foo', 'sub') -untrusted = u.configsuboptions('foo', 'sub', untrusted=True) -print( +bprint(b'# suboptions, trusted and untrusted') +trusted = u.configsuboptions(b'foo', b'sub') +untrusted = u.configsuboptions(b'foo', b'sub', untrusted=True) +bprint( (trusted[0], sorted(trusted[1].items())), (untrusted[0], sorted(untrusted[1].items()))) -print('# path, trusted and untrusted') -print(configpath('foo', 'path'), configpath('foo', 'path', untrusted=True)) -print('# bool, trusted and untrusted') -print(u.configbool('foo', 'bool'), u.configbool('foo', 'bool', untrusted=True)) -print('# int, trusted and untrusted') -print( - u.configint('foo', 'int', 0), - u.configint('foo', 'int', 0, untrusted=True)) -print('# bytes, trusted and untrusted') -print( - u.configbytes('foo', 'bytes', 0), - u.configbytes('foo', 'bytes', 0, untrusted=True)) -print('# list, trusted and untrusted') -print( - u.configlist('foo', 'list', []), - u.configlist('foo', 'list', [], untrusted=True)) +bprint(b'# path, trusted and untrusted') +bprint(configpath(b'foo', b'path'), configpath(b'foo', b'path', untrusted=True)) +bprint(b'# bool, trusted and untrusted') +bprint(u.configbool(b'foo', b'bool'), + u.configbool(b'foo', b'bool', untrusted=True)) +bprint(b'# int, trusted and untrusted') +bprint( + u.configint(b'foo', b'int', 0), + u.configint(b'foo', b'int', 0, untrusted=True)) +bprint(b'# bytes, trusted and untrusted') +bprint( + u.configbytes(b'foo', b'bytes', 0), + u.configbytes(b'foo', b'bytes', 0, untrusted=True)) +bprint(b'# list, trusted and untrusted') +bprint( + u.configlist(b'foo', b'list', []), + u.configlist(b'foo', b'list', [], untrusted=True))
--- a/tests/test-trusted.py.out Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-trusted.py.out Tue Mar 19 16:36:59 2019 +0300 @@ -174,9 +174,9 @@ # parse error # different user, different group not trusting file .hg/hgrc from untrusted user abc, group def -('foo', '.hg/hgrc:1') +ParseError('foo', '.hg/hgrc:1') # same user, same group -('foo', '.hg/hgrc:1') +ParseError('foo', '.hg/hgrc:1') # access typed information # different user, different group
--- a/tests/test-unamend.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-unamend.t Tue Mar 19 16:36:59 2019 +0300 @@ -232,6 +232,7 @@ $ hg revert --all forgetting bar + $ rm bar Unamending in middle of a stack @@ -302,7 +303,6 @@ Testing whether unamend retains copies or not $ hg status - ? bar $ hg mv a foo @@ -370,3 +370,42 @@ diff --git a/c b/wat rename from c rename to wat + $ hg revert -qa + $ rm foobar wat + +Rename a->b, then amend b->c. After unamend, should look like b->c. + + $ hg co -q 0 + $ hg mv a b + $ hg ci -qm 'move to a b' + $ hg mv b c + $ hg amend + $ hg unamend + $ hg st --copies --change . + A b + a + R a + $ hg st --copies + A c + b + R b + $ hg revert -qa + $ rm c + +Rename a->b, then amend b->c, and working copy change c->d. After unamend, should look like b->d + + $ hg co -q 0 + $ hg mv a b + $ hg ci -qm 'move to a b' + $ hg mv b c + $ hg amend + $ hg mv c d + $ hg unamend + $ hg st --copies --change . + A b + a + R a + $ hg st --copies + A d + b + R b
--- a/tests/test-uncommit.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-uncommit.t Tue Mar 19 16:36:59 2019 +0300 @@ -34,9 +34,10 @@ options ([+] can be repeated): - --keep allow an empty commit after uncommiting - -I --include PATTERN [+] include names matching the given patterns - -X --exclude PATTERN [+] exclude names matching the given patterns + --keep allow an empty commit after uncommiting + --allow-dirty-working-copy allow uncommit with outstanding changes + -I --include PATTERN [+] include names matching the given patterns + -X --exclude PATTERN [+] exclude names matching the given patterns (some details hidden, use --verbose to show complete help) @@ -156,8 +157,12 @@ M files $ hg uncommit abort: uncommitted changes + (requires --allow-dirty-working-copy to uncommit) [255] $ hg uncommit files + abort: uncommitted changes + (requires --allow-dirty-working-copy to uncommit) + [255] $ cat files abcde foo @@ -168,6 +173,7 @@ $ echo "bar" >> files $ hg uncommit abort: uncommitted changes + (requires --allow-dirty-working-copy to uncommit) [255] $ hg uncommit --config experimental.uncommitondirtywdir=True $ hg commit -m "files abcde + foo" @@ -191,16 +197,16 @@ +abc $ hg bookmark - foo 10:48e5bd7cd583 + foo 9:48e5bd7cd583 $ hg uncommit 3 new orphan changesets $ hg status M files A file-abc $ hg heads -T '{rev}:{node} {desc}' - 10:48e5bd7cd583eb24164ef8b89185819c84c96ed7 files abcde + foo (no-eol) + 9:48e5bd7cd583eb24164ef8b89185819c84c96ed7 files abcde + foo (no-eol) $ hg bookmark - foo 10:48e5bd7cd583 + foo 9:48e5bd7cd583 $ hg commit -m 'new abc' created new head @@ -222,38 +228,36 @@ +ab $ hg bookmark - foo 10:48e5bd7cd583 + foo 9:48e5bd7cd583 $ hg uncommit file-ab 1 new orphan changesets $ hg status A file-ab $ hg heads -T '{rev}:{node} {desc}\n' - 12:8eb87968f2edb7f27f27fe676316e179de65fff6 added file-ab - 11:5dc89ca4486f8a88716c5797fa9f498d13d7c2e1 new abc - 10:48e5bd7cd583eb24164ef8b89185819c84c96ed7 files abcde + foo + 11:8eb87968f2edb7f27f27fe676316e179de65fff6 added file-ab + 10:5dc89ca4486f8a88716c5797fa9f498d13d7c2e1 new abc + 9:48e5bd7cd583eb24164ef8b89185819c84c96ed7 files abcde + foo $ hg bookmark - foo 10:48e5bd7cd583 + foo 9:48e5bd7cd583 $ hg commit -m 'update ab' $ hg status $ hg heads -T '{rev}:{node} {desc}\n' - 13:f21039c59242b085491bb58f591afc4ed1c04c09 update ab - 11:5dc89ca4486f8a88716c5797fa9f498d13d7c2e1 new abc - 10:48e5bd7cd583eb24164ef8b89185819c84c96ed7 files abcde + foo + 12:f21039c59242b085491bb58f591afc4ed1c04c09 update ab + 10:5dc89ca4486f8a88716c5797fa9f498d13d7c2e1 new abc + 9:48e5bd7cd583eb24164ef8b89185819c84c96ed7 files abcde + foo $ hg log -G -T '{rev}:{node} {desc}' --hidden - @ 13:f21039c59242b085491bb58f591afc4ed1c04c09 update ab + @ 12:f21039c59242b085491bb58f591afc4ed1c04c09 update ab | - o 12:8eb87968f2edb7f27f27fe676316e179de65fff6 added file-ab + o 11:8eb87968f2edb7f27f27fe676316e179de65fff6 added file-ab | - | * 11:5dc89ca4486f8a88716c5797fa9f498d13d7c2e1 new abc + | * 10:5dc89ca4486f8a88716c5797fa9f498d13d7c2e1 new abc | | - | | * 10:48e5bd7cd583eb24164ef8b89185819c84c96ed7 files abcde + foo + | | * 9:48e5bd7cd583eb24164ef8b89185819c84c96ed7 files abcde + foo | | | - | | | x 9:8a6b58c173ca6a2e3745d8bd86698718d664bc6c files abcde + foo - | | |/ - | | | x 8:39ad452c7f684a55d161c574340c5766c4569278 update files for abcde + | | | x 8:84beeba0ac30e19521c036e4d2dd3a5fa02586ff files abcde + foo | | |/ | | | x 7:0977fa602c2fd7d8427ed4e7ee15ea13b84c9173 update files for abcde | | |/ @@ -275,14 +279,15 @@ $ hg uncommit $ hg phase -r . - 12: draft + 11: draft $ hg commit -m 'update ab again' Phase is preserved $ hg uncommit --keep --config phases.new-commit=secret + note: keeping empty commit $ hg phase -r . - 15: draft + 14: draft $ hg commit --amend -m 'update ab again' Uncommit with public parent @@ -290,7 +295,7 @@ $ hg phase -p "::.^" $ hg uncommit $ hg phase -r . - 12: public + 11: public Partial uncommit with public parent @@ -301,11 +306,11 @@ $ hg status A xyz $ hg phase -r . - 18: draft + 17: draft $ hg phase -r ".^" - 12: public + 11: public -Uncommit leaving an empty changeset +Uncommit with --keep or experimental.uncommit.keep leaves an empty changeset $ cd $TESTTMP $ hg init repo1 @@ -317,6 +322,21 @@ > EOS $ hg up Q -q $ hg uncommit --keep + note: keeping empty commit + $ hg log -G -T '{desc} FILES: {files}' + @ Q FILES: + | + | x Q FILES: Q + |/ + o P FILES: P + + $ cat >> .hg/hgrc <<EOF + > [experimental] + > uncommit.keep=True + > EOF + $ hg ci --amend + $ hg uncommit + note: keeping empty commit $ hg log -G -T '{desc} FILES: {files}' @ Q FILES: | @@ -326,7 +346,15 @@ $ hg status A Q - + $ hg ci --amend + $ hg uncommit --no-keep + $ hg log -G -T '{desc} FILES: {files}' + x Q FILES: Q + | + @ P FILES: P + + $ hg status + A Q $ cd .. $ rm -rf repo1 @@ -368,6 +396,7 @@ $ hg uncommit abort: outstanding uncommitted merge + (requires --allow-dirty-working-copy to uncommit) [255] $ hg uncommit --config experimental.uncommitondirtywdir=True @@ -398,3 +427,89 @@ |/ o 0:ea4e33293d4d274a2ba73150733c2612231f398c a 1 + +Rename a->b, then remove b in working copy. Result should remove a. + + $ hg co -q 0 + $ hg mv a b + $ hg ci -qm 'move a to b' + $ hg rm b + $ hg uncommit --config experimental.uncommitondirtywdir=True + $ hg st --copies + R a + $ hg revert a + +Rename a->b, then rename b->c in working copy. Result should rename a->c. + + $ hg co -q 0 + $ hg mv a b + $ hg ci -qm 'move a to b' + $ hg mv b c + $ hg uncommit --config experimental.uncommitondirtywdir=True + $ hg st --copies + A c + a + R a + $ hg revert a + $ hg forget c + $ rm c + +Copy a->b1 and a->b2, then rename b1->c in working copy. Result should copy a->b2 and a->c. + + $ hg co -q 0 + $ hg cp a b1 + $ hg cp a b2 + $ hg ci -qm 'move a to b1 and b2' + $ hg mv b1 c + $ hg uncommit --config experimental.uncommitondirtywdir=True + $ hg st --copies + A b2 + a + A c + a + $ cd .. + +--allow-dirty-working-copy should also work on a dirty PATH + + $ hg init issue5977 + $ cd issue5977 + $ echo 'super critical info!' > a + $ hg ci -Am 'add a' + adding a + $ echo 'foo' > b + $ hg add b + $ hg status + A b + $ hg unc a + note: keeping empty commit + $ cat a + super critical info! + $ hg log + changeset: 1:656ba143d384 + tag: tip + parent: -1:000000000000 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: add a + + $ hg ci -Am 'add b' + $ echo 'foo bar' > b + $ hg unc b + abort: uncommitted changes + (requires --allow-dirty-working-copy to uncommit) + [255] + $ hg unc --allow-dirty-working-copy b + $ hg log + changeset: 3:30fa958635b2 + tag: tip + parent: 1:656ba143d384 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: add b + + changeset: 1:656ba143d384 + parent: -1:000000000000 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: add a +
--- a/tests/test-update-atomic.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-update-atomic.t Tue Mar 19 16:36:59 2019 +0300 @@ -4,13 +4,14 @@ $ cat > $TESTTMP/show_mode.py <<EOF > from __future__ import print_function + > import os + > import stat > import sys - > import os - > from stat import ST_MODE + > ST_MODE = stat.ST_MODE > > for file_path in sys.argv[1:]: > file_stat = os.stat(file_path) - > octal_mode = oct(file_stat[ST_MODE] & 0o777) + > octal_mode = oct(file_stat[ST_MODE] & 0o777).replace('o', '') > print("%s:%s" % (file_path, octal_mode)) > > EOF @@ -19,11 +20,15 @@ $ cd repo $ cat > .hg/showwrites.py <<EOF + > from __future__ import print_function + > from mercurial import pycompat + > from mercurial.utils import stringutil > def uisetup(ui): > from mercurial import vfs > class newvfs(vfs.vfs): > def __call__(self, *args, **kwargs): - > print('vfs open', args, sorted(list(kwargs.items()))) + > print(pycompat.sysstr(stringutil.pprint( + > ('vfs open', args, sorted(list(kwargs.items())))))) > return super(newvfs, self).__call__(*args, **kwargs) > vfs.vfs = newvfs > EOF
--- a/tests/test-wireproto-command-capabilities.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-wireproto-command-capabilities.t Tue Mar 19 16:36:59 2019 +0300 @@ -22,6 +22,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -45,6 +46,7 @@ > x-hgproto-1: cbor > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -82,6 +84,7 @@ > x-hgupgrade-1: foo bar > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -106,6 +109,7 @@ > x-hgproto-1: some value > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -131,6 +135,7 @@ > x-hgproto-1: cbor > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -170,6 +175,7 @@ > x-hgproto-1: cbor > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -202,6 +208,7 @@ > x-hgproto-1: cbor > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -460,6 +467,7 @@ > command capabilities > EOF creating http peer for wire protocol version 2 + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> vary: X-HgProto-1,X-HgUpgrade-1\r\n @@ -478,6 +486,7 @@ s> \r\n s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa4Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogNv1capabilitiesY\x01\xe0batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash sending capabilities command + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -498,23 +507,19 @@ s> \t\x00\x00\x01\x00\x02\x01\x92 s> Hidentity s> \r\n - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) s> 13\r\n s> \x0b\x00\x00\x01\x00\x02\x041 s> \xa1FstatusBok s> \r\n - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 65e\r\n s> V\x06\x00\x01\x00\x02\x041 s> \xa4Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1Lsparserevlog s> \r\n - received frame(size=1622; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 8\r\n s> \x00\x00\x00\x01\x00\x02\x002 s> \r\n s> 0\r\n s> \r\n - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) response: gen[ { b'commands': {
--- a/tests/test-wireproto-content-redirects.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-wireproto-content-redirects.t Tue Mar 19 16:36:59 2019 +0300 @@ -51,6 +51,7 @@ > command capabilities > EOF creating http peer for wire protocol version 2 + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> vary: X-HgProto-1,X-HgUpgrade-1\r\n @@ -71,6 +72,7 @@ (remote redirect target target-a is compatible) (tls1.2 !) (remote redirect target target-a requires unsupported TLS versions: 1.2, 1.3) (no-tls1.2 !) sending capabilities command + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -93,23 +95,19 @@ s> \t\x00\x00\x01\x00\x02\x01\x92 s> Hidentity s> \r\n - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) s> 13\r\n s> \x0b\x00\x00\x01\x00\x02\x041 s> \xa1FstatusBok s> \r\n - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 6de\r\n s> \xd6\x06\x00\x01\x00\x02\x041 s> \xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa5DnameHtarget-aHprotocolDhttpKsnirequired\xf4Ktlsversions\x82C1.2C1.3Duris\x81Shttp://example.com/ s> \r\n - received frame(size=1750; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 8\r\n s> \x00\x00\x00\x01\x00\x02\x002 s> \r\n s> 0\r\n s> \r\n - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) response: gen[ { b'commands': { @@ -383,6 +381,7 @@ > command capabilities > EOF creating http peer for wire protocol version 2 + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> vary: X-HgProto-1,X-HgUpgrade-1\r\n @@ -403,6 +402,7 @@ (remote redirect target target-a is compatible) (remote redirect target target-b uses unsupported protocol: unknown) sending capabilities command + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -423,23 +423,19 @@ s> \t\x00\x00\x01\x00\x02\x01\x92 s> Hidentity s> \r\n - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) s> 13\r\n s> \x0b\x00\x00\x01\x00\x02\x041 s> \xa1FstatusBok s> \r\n - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 6f9\r\n s> \xf1\x06\x00\x01\x00\x02\x041 s> \xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x82\xa3DnameHtarget-aHprotocolDhttpDuris\x81Shttp://example.com/\xa3DnameHtarget-bHprotocolGunknownDuris\x81Vunknown://example.com/ s> \r\n - received frame(size=1777; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 8\r\n s> \x00\x00\x00\x01\x00\x02\x002 s> \r\n s> 0\r\n s> \r\n - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) response: gen[ { b'commands': { @@ -720,6 +716,7 @@ > command capabilities > EOF creating http peer for wire protocol version 2 + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> vary: X-HgProto-1,X-HgUpgrade-1\r\n @@ -739,6 +736,7 @@ s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKsnirequired\xf5Duris\x81Thttps://example.com/Nv1capabilitiesY\x01\xe0batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (redirect target target-bad-tls requires SNI, which is unsupported) sending capabilities command + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -759,23 +757,19 @@ s> \t\x00\x00\x01\x00\x02\x01\x92 s> Hidentity s> \r\n - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) s> 13\r\n s> \x0b\x00\x00\x01\x00\x02\x041 s> \xa1FstatusBok s> \r\n - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 6d1\r\n s> \xc9\x06\x00\x01\x00\x02\x041 s> \xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKsnirequired\xf5Duris\x81Thttps://example.com/ s> \r\n - received frame(size=1737; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 8\r\n s> \x00\x00\x00\x01\x00\x02\x002 s> \r\n s> 0\r\n s> \r\n - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) response: gen[ { b'commands': { @@ -1046,6 +1040,7 @@ > command capabilities > EOF creating http peer for wire protocol version 2 + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> vary: X-HgProto-1,X-HgUpgrade-1\r\n @@ -1065,6 +1060,7 @@ s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKtlsversions\x82B42B39Duris\x81Thttps://example.com/Nv1capabilitiesY\x01\xe0batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (remote redirect target target-bad-tls requires unsupported TLS versions: 39, 42) sending capabilities command + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -1085,23 +1081,19 @@ s> \t\x00\x00\x01\x00\x02\x01\x92 s> Hidentity s> \r\n - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) s> 13\r\n s> \x0b\x00\x00\x01\x00\x02\x041 s> \xa1FstatusBok s> \r\n - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 6d7\r\n s> \xcf\x06\x00\x01\x00\x02\x041 s> \xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKtlsversions\x82B42B39Duris\x81Thttps://example.com/ s> \r\n - received frame(size=1743; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 8\r\n s> \x00\x00\x00\x01\x00\x02\x002 s> \r\n s> 0\r\n s> \r\n - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) response: gen[ { b'commands': { @@ -1372,6 +1364,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /api/simplecache/missingkey HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -1416,6 +1409,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /api/simplecache/47abb8efa5f01b8964d74917793ad2464db0fa2c HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n
--- a/tests/test-wireproto-exchangev2.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-wireproto-exchangev2.t Tue Mar 19 16:36:59 2019 +0300 @@ -36,7 +36,10 @@ Test basic clone - $ hg --debug clone -U http://localhost:$HGPORT client-simple +Output is flaky, save it in a file and check part independently + $ hg --debug clone -U http://localhost:$HGPORT client-simple > clone-output + + $ cat clone-output | grep -v "received frame" using http://localhost:$HGPORT/ sending capabilities command query 1; heads @@ -45,13 +48,6 @@ sending command known: { 'nodes': [] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=43; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=1; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) sending 1 commands sending command changesetdata: { 'fields': set([ @@ -71,10 +67,6 @@ } ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=941; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) add changeset 3390ef850073 add changeset 4432d83626e8 add changeset cd2534766bec @@ -97,10 +89,6 @@ ], 'tree': '' } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=992; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) sending 1 commands sending command filesdata: { 'fields': set([ @@ -121,13 +109,32 @@ } ] } + updating the branch cache + new changesets 3390ef850073:caa2a465451d (3 drafts) + (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ cat clone-output | grep "received frame" + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=43; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=1; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=941; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=992; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=901; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - updating the branch cache - new changesets 3390ef850073:caa2a465451d (3 drafts) - (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ rm clone-output All changesets should have been transferred @@ -163,30 +170,22 @@ Cloning only a specific revision works - $ hg --debug clone -U -r 4432d83626e8 http://localhost:$HGPORT client-singlehead +Output is flaky, save it in a file and check part independently + $ hg --debug clone -U -r 4432d83626e8 http://localhost:$HGPORT client-singlehead > clone-output + + $ cat clone-output | grep -v "received frame" using http://localhost:$HGPORT/ sending capabilities command sending 1 commands sending command lookup: { 'key': '4432d83626e8' } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=21; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) query 1; heads sending 2 commands sending command heads: {} sending command known: { 'nodes': [] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=43; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=1; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) sending 1 commands sending command changesetdata: { 'fields': set([ @@ -205,10 +204,6 @@ } ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=381; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) add changeset 3390ef850073 add changeset 4432d83626e8 checking for updated bookmarks @@ -225,10 +220,6 @@ ], 'tree': '' } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=404; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) sending 1 commands sending command filesdata: { 'fields': set([ @@ -246,13 +237,36 @@ } ] } + updating the branch cache + new changesets 3390ef850073:4432d83626e8 + (sent 6 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ cat clone-output | grep "received frame" + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=21; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=43; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=1; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=381; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=404; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=439; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - updating the branch cache - new changesets 3390ef850073:4432d83626e8 - (sent 6 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ rm clone-output $ cd client-singlehead @@ -269,7 +283,10 @@ Incremental pull works - $ hg --debug pull +Output is flaky, save it in a file and check part independently + $ hg --debug pull > pull-output + + $ cat pull-output | grep -v "received frame" pulling from http://localhost:$HGPORT/ using http://localhost:$HGPORT/ sending capabilities command @@ -281,13 +298,6 @@ 'D2\xd86&\xe8\xa9\x86U\xf0b\xec\x1f*C\xb0\x7f\x7f\xbb\xb0' ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=43; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=2; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) searching for changes all local heads known remotely sending 1 commands @@ -311,10 +321,6 @@ } ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=573; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) add changeset cd2534766bec add changeset e96ae20f4188 add changeset caa2a465451d @@ -333,10 +339,6 @@ ], 'tree': '' } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=601; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) sending 1 commands sending command filesdata: { 'fields': set([ @@ -355,14 +357,33 @@ } ] } + updating the branch cache + new changesets cd2534766bec:caa2a465451d (3 drafts) + (run 'hg update' to get a working copy) + (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ cat pull-output | grep "received frame" + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=43; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=2; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=573; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=601; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=527; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - updating the branch cache - new changesets cd2534766bec:caa2a465451d (3 drafts) - (run 'hg update' to get a working copy) - (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ rm pull-output $ hg log -G -T '{rev} {node} {phase}\n' o 4 caa2a465451dd1facda0f5b12312c355584188a1 draft @@ -459,7 +480,10 @@ $ hg -R server-simple bookmark -r 3390ef850073fbc2f0dfff2244342c8e9229013a book-1 $ hg -R server-simple bookmark -r cd2534766bece138c7c1afdc6825302f0f62d81f book-2 - $ hg --debug clone -U http://localhost:$HGPORT/ client-bookmarks +Output is flaky, save it in a file and check part independently + $ hg --debug clone -U http://localhost:$HGPORT/ client-bookmarks > clone-output + + $ cat clone-output | grep -v "received frame" using http://localhost:$HGPORT/ sending capabilities command query 1; heads @@ -468,13 +492,6 @@ sending command known: { 'nodes': [] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=43; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=1; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) sending 1 commands sending command changesetdata: { 'fields': set([ @@ -494,10 +511,6 @@ } ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=979; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) add changeset 3390ef850073 add changeset 4432d83626e8 add changeset cd2534766bec @@ -522,10 +535,6 @@ ], 'tree': '' } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=992; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) sending 1 commands sending command filesdata: { 'fields': set([ @@ -546,13 +555,32 @@ } ] } + updating the branch cache + new changesets 3390ef850073:caa2a465451d (1 drafts) + (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ cat clone-output | grep "received frame" + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=43; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=1; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=979; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=992; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=901; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - updating the branch cache - new changesets 3390ef850073:caa2a465451d (1 drafts) - (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ rm clone-output $ hg -R client-bookmarks bookmarks book-1 0:3390ef850073 @@ -563,7 +591,10 @@ $ hg -R server-simple bookmark -r cd2534766bece138c7c1afdc6825302f0f62d81f book-1 moving bookmark 'book-1' forward from 3390ef850073 - $ hg -R client-bookmarks --debug pull +Output is flaky, save it in a file and check part independently + $ hg -R client-bookmarks --debug pull > pull-output + + $ cat pull-output | grep -v "received frame" pulling from http://localhost:$HGPORT/ using http://localhost:$HGPORT/ sending capabilities command @@ -576,13 +607,6 @@ '\xca\xa2\xa4eE\x1d\xd1\xfa\xcd\xa0\xf5\xb1#\x12\xc3UXA\x88\xa1' ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=43; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=3; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) searching for changes all remote heads known locally sending 1 commands @@ -607,14 +631,25 @@ } ] } + checking for updated bookmarks + updating bookmark book-1 + (run 'hg update' to get a working copy) + (sent 3 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ cat pull-output | grep "received frame" + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=43; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=3; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=65; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - checking for updated bookmarks - updating bookmark book-1 - (run 'hg update' to get a working copy) - (sent 3 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ rm pull-output $ hg -R client-bookmarks bookmarks book-1 2:cd2534766bec @@ -647,7 +682,10 @@ Narrow clone only fetches some files - $ hg --config extensions.pullext=$TESTDIR/pullext.py --debug clone -U --include dir0/ http://localhost:$HGPORT/ client-narrow-0 +Output is flaky, save it in a file and check part independently + $ hg --config extensions.pullext=$TESTDIR/pullext.py --debug clone -U --include dir0/ http://localhost:$HGPORT/ client-narrow-0 > clone-output + + $ cat clone-output | grep -v "received frame" using http://localhost:$HGPORT/ sending capabilities command query 1; heads @@ -656,13 +694,6 @@ sending command known: { 'nodes': [] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=22; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=1; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) sending 1 commands sending command changesetdata: { 'fields': set([ @@ -681,10 +712,6 @@ } ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=783; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) add changeset 3390ef850073 add changeset b709380892b1 add changeset 47fe012ab237 @@ -705,10 +732,6 @@ ], 'tree': '' } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=967; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) sending 1 commands sending command filesdata: { 'fields': set([ @@ -733,13 +756,32 @@ } ] } + updating the branch cache + new changesets 3390ef850073:97765fc3cd62 + (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ cat clone-output | grep "received frame" + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=22; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=1; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=783; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=967; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=449; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - updating the branch cache - new changesets 3390ef850073:97765fc3cd62 - (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ rm clone-output #if reporevlogstore $ find client-narrow-0/.hg/store -type f -name '*.i' | sort @@ -751,7 +793,10 @@ --exclude by itself works - $ hg --config extensions.pullext=$TESTDIR/pullext.py --debug clone -U --exclude dir0/ http://localhost:$HGPORT/ client-narrow-1 +Output is flaky, save it in a file and check part independently + $ hg --config extensions.pullext=$TESTDIR/pullext.py --debug clone -U --exclude dir0/ http://localhost:$HGPORT/ client-narrow-1 > clone-output + + $ cat clone-output | grep -v "received frame" using http://localhost:$HGPORT/ sending capabilities command query 1; heads @@ -760,13 +805,6 @@ sending command known: { 'nodes': [] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=22; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=1; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) sending 1 commands sending command changesetdata: { 'fields': set([ @@ -785,10 +823,6 @@ } ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=783; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) add changeset 3390ef850073 add changeset b709380892b1 add changeset 47fe012ab237 @@ -809,10 +843,6 @@ ], 'tree': '' } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=967; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) sending 1 commands sending command filesdata: { 'fields': set([ @@ -840,13 +870,32 @@ } ] } + updating the branch cache + new changesets 3390ef850073:97765fc3cd62 + (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ cat clone-output | grep "received frame" + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=22; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=1; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=783; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=967; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=709; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - updating the branch cache - new changesets 3390ef850073:97765fc3cd62 - (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ rm clone-output #if reporevlogstore $ find client-narrow-1/.hg/store -type f -name '*.i' | sort @@ -860,7 +909,10 @@ Mixing --include and --exclude works - $ hg --config extensions.pullext=$TESTDIR/pullext.py --debug clone -U --include dir0/ --exclude dir0/c http://localhost:$HGPORT/ client-narrow-2 +Output is flaky, save it in a file and check part independently + $ hg --config extensions.pullext=$TESTDIR/pullext.py --debug clone -U --include dir0/ --exclude dir0/c http://localhost:$HGPORT/ client-narrow-2 > clone-output + + $ cat clone-output | grep -v "received frame" using http://localhost:$HGPORT/ sending capabilities command query 1; heads @@ -869,13 +921,6 @@ sending command known: { 'nodes': [] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=22; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=1; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) sending 1 commands sending command changesetdata: { 'fields': set([ @@ -894,10 +939,6 @@ } ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=783; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) add changeset 3390ef850073 add changeset b709380892b1 add changeset 47fe012ab237 @@ -918,10 +959,6 @@ ], 'tree': '' } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=967; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) sending 1 commands sending command filesdata: { 'fields': set([ @@ -949,13 +986,32 @@ } ] } + updating the branch cache + new changesets 3390ef850073:97765fc3cd62 + (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ cat clone-output | grep "received frame" + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=22; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=1; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=783; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=967; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=160; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - updating the branch cache - new changesets 3390ef850073:97765fc3cd62 - (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ rm clone-output #if reporevlogstore $ find client-narrow-2/.hg/store -type f -name '*.i' | sort @@ -967,7 +1023,10 @@ --stream will use rawfiledata to transfer changelog and manifestlog, then fall through to get files data - $ hg --debug clone --stream -U http://localhost:$HGPORT client-stream-0 +Output is flaky, save it in a file and check part independently + $ hg --debug clone --stream -U http://localhost:$HGPORT client-stream-0 > clone-output + + $ cat clone-output | grep -v "received frame" using http://localhost:$HGPORT/ sending capabilities command sending 1 commands @@ -977,10 +1036,6 @@ 'manifestlog' ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=1275; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) updating the branch cache query 1; heads sending 2 commands @@ -990,13 +1045,6 @@ '\x97v_\xc3\xcdbO\xd1\xfa\x01v\x93,!\xff\xd1j\xdfC.' ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=22; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=2; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) searching for changes all remote heads known locally sending 1 commands @@ -1019,10 +1067,6 @@ } ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=13; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) checking for updated bookmarks sending 1 commands sending command filesdata: { @@ -1043,15 +1087,37 @@ } ] } + (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ cat clone-output | grep "received frame" + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=1275; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=22; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=2; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=13; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=1133; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ rm clone-output --stream + --include/--exclude will only obtain some files - $ hg --debug --config extensions.pullext=$TESTDIR/pullext.py clone --stream --include dir0/ -U http://localhost:$HGPORT client-stream-2 +Output is flaky, save it in a file and check part independently + $ hg --debug --config extensions.pullext=$TESTDIR/pullext.py clone --stream --include dir0/ -U http://localhost:$HGPORT client-stream-2 > clone-output + + $ cat clone-output | grep -v "received frame" using http://localhost:$HGPORT/ sending capabilities command sending 1 commands @@ -1061,10 +1127,6 @@ 'manifestlog' ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=1275; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) updating the branch cache query 1; heads sending 2 commands @@ -1074,13 +1136,6 @@ '\x97v_\xc3\xcdbO\xd1\xfa\x01v\x93,!\xff\xd1j\xdfC.' ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=22; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=2; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) searching for changes all remote heads known locally sending 1 commands @@ -1103,10 +1158,6 @@ } ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=13; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) checking for updated bookmarks sending 1 commands sending command filesdata: { @@ -1132,11 +1183,30 @@ } ] } + (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ cat clone-output | grep "received frame" + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=1275; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=22; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=2; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=13; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=449; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ rm clone-output #if reporevlogstore $ find client-stream-2/.hg/store -type f -name '*.i' | sort @@ -1148,7 +1218,14 @@ Shallow clone doesn't work with revlogs - $ hg --debug --config extensions.pullext=$TESTDIR/pullext.py clone --depth 1 -U http://localhost:$HGPORT client-shallow-revlogs +Output is flaky, save it in a file and check part independently + $ hg --debug --config extensions.pullext=$TESTDIR/pullext.py clone --depth 1 -U http://localhost:$HGPORT client-shallow-revlogs > clone-output + transaction abort! + rollback completed + abort: revlog storage does not support missing parents write mode + [255] + + $ cat clone-output | grep -v "received frame" using http://localhost:$HGPORT/ sending capabilities command query 1; heads @@ -1157,13 +1234,6 @@ sending command known: { 'nodes': [] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=22; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=1; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) sending 1 commands sending command changesetdata: { 'fields': set([ @@ -1182,10 +1252,6 @@ } ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=783; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) add changeset 3390ef850073 add changeset b709380892b1 add changeset 47fe012ab237 @@ -1206,10 +1272,6 @@ ], 'tree': '' } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=967; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) sending 1 commands sending command filesdata: { 'fields': set([ @@ -1227,15 +1289,30 @@ } ] } + (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ cat clone-output | grep "received frame" + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=22; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=1; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=783; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=967; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=1005; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - transaction abort! - rollback completed - (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) - abort: revlog storage does not support missing parents write mode - [255] + + $ rm clone-output $ killdaemons.py
--- a/tests/test-wireproto.py Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-wireproto.py Tue Mar 19 16:36:59 2019 +0300 @@ -78,6 +78,9 @@ yield unmangle(f.value) class serverrepo(object): + def __init__(self, ui): + self.ui = ui + def greet(self, name): return b"Hello, " + name @@ -94,7 +97,7 @@ wireprotov1server.commands[b'greet'] = (greet, b'name') -srv = serverrepo() +srv = serverrepo(uimod.ui()) clt = clientpeer(srv, uimod.ui()) def printb(data, end=b'\n'):
--- a/tests/test-worker.t Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/test-worker.t Tue Mar 19 16:36:59 2019 +0300 @@ -83,8 +83,10 @@ [255] $ hg --config "extensions.t=$abspath" --config 'worker.numcpus=8' \ - > test 100000.0 abort --traceback 2>&1 | egrep '^(SystemExit|Abort)' - Abort: known exception + > test 100000.0 abort --traceback 2>&1 | egrep '(SystemExit|Abort)' + raise error.Abort(b'known exception') + mercurial.error.Abort: b'known exception' (py3 !) + Abort: known exception (no-py3 !) SystemExit: 255 Traceback must be printed for unknown exceptions
--- a/tests/tinyproxy.py Tue Mar 19 09:23:35 2019 -0400 +++ b/tests/tinyproxy.py Tue Mar 19 16:36:59 2019 +0300 @@ -20,7 +20,10 @@ import socket import sys -from mercurial import util +from mercurial import ( + pycompat, + util, +) httpserver = util.httpserver socketserver = util.socketserver @@ -77,10 +80,11 @@ try: if self._connect_to(self.path, soc): self.log_request(200) - self.wfile.write(self.protocol_version + - " 200 Connection established\r\n") - self.wfile.write("Proxy-agent: %s\r\n" % self.version_string()) - self.wfile.write("\r\n") + self.wfile.write(pycompat.bytestr(self.protocol_version) + + b" 200 Connection established\r\n") + self.wfile.write(b"Proxy-agent: %s\r\n" % + pycompat.bytestr(self.version_string())) + self.wfile.write(b"\r\n") self._read_write(soc, 300) finally: print("\t" "bye") @@ -97,15 +101,17 @@ try: if self._connect_to(netloc, soc): self.log_request() - soc.send("%s %s %s\r\n" % ( - self.command, - urlreq.urlunparse(('', '', path, params, query, '')), - self.request_version)) + url = urlreq.urlunparse(('', '', path, params, query, '')) + soc.send(b"%s %s %s\r\n" % ( + pycompat.bytestr(self.command), + pycompat.bytestr(url), + pycompat.bytestr(self.request_version))) self.headers['Connection'] = 'close' del self.headers['Proxy-Connection'] - for key_val in self.headers.items(): - soc.send("%s: %s\r\n" % key_val) - soc.send("\r\n") + for key, val in self.headers.items(): + soc.send(b"%s: %s\r\n" % (pycompat.bytestr(key), + pycompat.bytestr(val))) + soc.send(b"\r\n") self._read_write(soc) finally: print("\t" "bye")