Mercurial > hg-stable
changeset 41751:3d5a73c8a417
merge with stable
author | Martin von Zweigbergk <martinvonz@google.com> |
---|---|
date | Tue, 19 Feb 2019 21:55:05 -0800 |
parents | a91615b71679 (diff) ffbf742bfe1f (current diff) |
children | 1c4d6ab2ecb8 |
files | |
diffstat | 308 files changed, 7633 insertions(+), 3544 deletions(-) [+] |
line wrap: on
line diff
--- a/contrib/bdiff-torture.py Thu Feb 07 20:50:41 2019 +0900 +++ b/contrib/bdiff-torture.py Tue Feb 19 21:55:05 2019 -0800 @@ -25,7 +25,7 @@ try: test1(a, b) - except Exception as inst: + except Exception: reductions += 1 tries = 0 a = a2
--- a/contrib/check-code.py Thu Feb 07 20:50:41 2019 +0900 +++ b/contrib/check-code.py Tue Feb 19 21:55:05 2019 -0800 @@ -414,7 +414,7 @@ txtpats = [ [ - ('\s$', 'trailing whitespace'), + (r'\s$', 'trailing whitespace'), ('.. note::[ \n][^\n]', 'add two newlines after note::') ], [] @@ -610,7 +610,7 @@ try: with opentext(f) as fp: try: - pre = post = fp.read() + pre = fp.read() except UnicodeDecodeError as e: print("%s while reading %s" % (e, f)) return result
--- a/contrib/check-commit Thu Feb 07 20:50:41 2019 +0900 +++ b/contrib/check-commit Tue Feb 19 21:55:05 2019 -0800 @@ -47,7 +47,7 @@ "adds a function with foo_bar naming"), ] -word = re.compile('\S') +word = re.compile(r'\S') def nonempty(first, second): if word.search(first): return first
--- a/contrib/check-config.py Thu Feb 07 20:50:41 2019 +0900 +++ b/contrib/check-config.py Tue Feb 19 21:55:05 2019 -0800 @@ -25,7 +25,7 @@ (?:default=)?(?P<default>\S+?))? \)''', re.VERBOSE | re.MULTILINE) -configwithre = re.compile(b''' +configwithre = re.compile(br''' ui\.config(?P<ctype>with)\( # First argument is callback function. This doesn't parse robustly # if it is e.g. a function call. @@ -61,10 +61,10 @@ linenum += 1 # check topic-like bits - m = re.match(b'\s*``(\S+)``', l) + m = re.match(br'\s*``(\S+)``', l) if m: prevname = m.group(1) - if re.match(b'^\s*-+$', l): + if re.match(br'^\s*-+$', l): sect = prevname prevname = b''
--- a/contrib/check-py3-compat.py Thu Feb 07 20:50:41 2019 +0900 +++ b/contrib/check-py3-compat.py Tue Feb 19 21:55:05 2019 -0800 @@ -14,6 +14,7 @@ import os import sys import traceback +import warnings def check_compat_py2(f): """Check Python 3 compatibility for a file with Python 2""" @@ -45,7 +46,7 @@ content = fh.read() try: - ast.parse(content) + ast.parse(content, filename=f) except SyntaxError as e: print('%s: invalid syntax: %s' % (f, e)) return @@ -91,6 +92,11 @@ fn = check_compat_py3 for f in sys.argv[1:]: - fn(f) + with warnings.catch_warnings(record=True) as warns: + fn(f) + + for w in warns: + print(warnings.formatwarning(w.message, w.category, + w.filename, w.lineno).rstrip()) sys.exit(0)
--- a/contrib/chg/hgclient.c Thu Feb 07 20:50:41 2019 +0900 +++ b/contrib/chg/hgclient.c Tue Feb 19 21:55:05 2019 -0800 @@ -84,8 +84,9 @@ static void enlargecontext(context_t *ctx, size_t newsize) { - if (newsize <= ctx->maxdatasize) + if (newsize <= ctx->maxdatasize) { return; + } newsize = defaultdatasize * ((newsize + defaultdatasize - 1) / defaultdatasize); @@ -117,22 +118,25 @@ uint32_t datasize_n; rsize = recv(hgc->sockfd, &datasize_n, sizeof(datasize_n), 0); - if (rsize != sizeof(datasize_n)) + if (rsize != sizeof(datasize_n)) { abortmsg("failed to read data size"); + } /* datasize denotes the maximum size to write if input request */ hgc->ctx.datasize = ntohl(datasize_n); enlargecontext(&hgc->ctx, hgc->ctx.datasize); - if (isupper(hgc->ctx.ch) && hgc->ctx.ch != 'S') + if (isupper(hgc->ctx.ch) && hgc->ctx.ch != 'S') { return; /* assumes input request */ + } size_t cursize = 0; while (cursize < hgc->ctx.datasize) { rsize = recv(hgc->sockfd, hgc->ctx.data + cursize, hgc->ctx.datasize - cursize, 0); - if (rsize < 1) + if (rsize < 1) { abortmsg("failed to read data block"); + } cursize += rsize; } } @@ -143,8 +147,9 @@ const char *const endp = p + datasize; while (p < endp) { ssize_t r = send(sockfd, p, endp - p, 0); - if (r < 0) + if (r < 0) { abortmsgerrno("cannot communicate"); + } p += r; } } @@ -186,8 +191,9 @@ ctx->datasize += n; } - if (ctx->datasize > 0) + if (ctx->datasize > 0) { --ctx->datasize; /* strip last '\0' */ + } } /* Extract '\0'-separated list of args to new buffer, terminated by NULL */ @@ -205,8 +211,9 @@ args[nargs] = s; nargs++; s = memchr(s, '\0', e - s); - if (!s) + if (!s) { break; + } s++; } args[nargs] = NULL; @@ -225,8 +232,9 @@ static void handlereadlinerequest(hgclient_t *hgc) { context_t *ctx = &hgc->ctx; - if (!fgets(ctx->data, ctx->datasize, stdin)) + if (!fgets(ctx->data, ctx->datasize, stdin)) { ctx->data[0] = '\0'; + } ctx->datasize = strlen(ctx->data); writeblock(hgc); } @@ -239,8 +247,9 @@ ctx->data[ctx->datasize] = '\0'; /* terminate last string */ const char **args = unpackcmdargsnul(ctx); - if (!args[0] || !args[1] || !args[2]) + if (!args[0] || !args[1] || !args[2]) { abortmsg("missing type or command or cwd in system request"); + } if (strcmp(args[0], "system") == 0) { debugmsg("run '%s' at '%s'", args[1], args[2]); int32_t r = runshellcmd(args[1], args + 3, args[2]); @@ -252,8 +261,9 @@ writeblock(hgc); } else if (strcmp(args[0], "pager") == 0) { setuppager(args[1], args + 3); - if (hgc->capflags & CAP_ATTACHIO) + if (hgc->capflags & CAP_ATTACHIO) { attachio(hgc); + } /* unblock the server */ static const char emptycmd[] = "\n"; sendall(hgc->sockfd, emptycmd, sizeof(emptycmd) - 1); @@ -296,9 +306,10 @@ handlesystemrequest(hgc); break; default: - if (isupper(ctx->ch)) + if (isupper(ctx->ch)) { abortmsg("cannot handle response (ch = %c)", ctx->ch); + } } } } @@ -308,8 +319,9 @@ unsigned int flags = 0; while (s < e) { const char *t = strchr(s, ' '); - if (!t || t > e) + if (!t || t > e) { t = e; + } const cappair_t *cap; for (cap = captable; cap->flag; ++cap) { size_t n = t - s; @@ -346,11 +358,13 @@ const char *const dataend = ctx->data + ctx->datasize; while (s < dataend) { const char *t = strchr(s, ':'); - if (!t || t[1] != ' ') + if (!t || t[1] != ' ') { break; + } const char *u = strchr(t + 2, '\n'); - if (!u) + if (!u) { u = dataend; + } if (strncmp(s, "capabilities:", t - s + 1) == 0) { hgc->capflags = parsecapabilities(t + 2, u); } else if (strncmp(s, "pgid:", t - s + 1) == 0) { @@ -367,8 +381,9 @@ { int r = snprintf(hgc->ctx.data, hgc->ctx.maxdatasize, "chg[worker/%d]", (int)getpid()); - if (r < 0 || (size_t)r >= hgc->ctx.maxdatasize) + if (r < 0 || (size_t)r >= hgc->ctx.maxdatasize) { abortmsg("insufficient buffer to write procname (r = %d)", r); + } hgc->ctx.datasize = (size_t)r; writeblockrequest(hgc, "setprocname"); } @@ -380,8 +395,9 @@ sendall(hgc->sockfd, chcmd, sizeof(chcmd) - 1); readchannel(hgc); context_t *ctx = &hgc->ctx; - if (ctx->ch != 'I') + if (ctx->ch != 'I') { abortmsg("unexpected response for attachio (ch = %c)", ctx->ch); + } static const int fds[3] = {STDIN_FILENO, STDOUT_FILENO, STDERR_FILENO}; struct msghdr msgh; @@ -399,23 +415,27 @@ memcpy(CMSG_DATA(cmsg), fds, sizeof(fds)); msgh.msg_controllen = cmsg->cmsg_len; ssize_t r = sendmsg(hgc->sockfd, &msgh, 0); - if (r < 0) + if (r < 0) { abortmsgerrno("sendmsg failed"); + } handleresponse(hgc); int32_t n; - if (ctx->datasize != sizeof(n)) + if (ctx->datasize != sizeof(n)) { abortmsg("unexpected size of attachio result"); + } memcpy(&n, ctx->data, sizeof(n)); n = ntohl(n); - if (n != sizeof(fds) / sizeof(fds[0])) + if (n != sizeof(fds) / sizeof(fds[0])) { abortmsg("failed to send fds (n = %d)", n); + } } static void chdirtocwd(hgclient_t *hgc) { - if (!getcwd(hgc->ctx.data, hgc->ctx.maxdatasize)) + if (!getcwd(hgc->ctx.data, hgc->ctx.maxdatasize)) { abortmsgerrno("failed to getcwd"); + } hgc->ctx.datasize = strlen(hgc->ctx.data); writeblockrequest(hgc, "chdir"); } @@ -440,8 +460,9 @@ hgclient_t *hgc_open(const char *sockname) { int fd = socket(AF_UNIX, SOCK_STREAM, 0); - if (fd < 0) + if (fd < 0) { abortmsgerrno("cannot create socket"); + } /* don't keep fd on fork(), so that it can be closed when the parent * process get terminated. */ @@ -456,34 +477,39 @@ { const char *split = strrchr(sockname, '/'); if (split && split != sockname) { - if (split[1] == '\0') + if (split[1] == '\0') { abortmsg("sockname cannot end with a slash"); + } size_t len = split - sockname; char sockdir[len + 1]; memcpy(sockdir, sockname, len); sockdir[len] = '\0'; bakfd = open(".", O_DIRECTORY); - if (bakfd == -1) + if (bakfd == -1) { abortmsgerrno("cannot open cwd"); + } int r = chdir(sockdir); - if (r != 0) + if (r != 0) { abortmsgerrno("cannot chdir %s", sockdir); + } basename = split + 1; } } - if (strlen(basename) >= sizeof(addr.sun_path)) + if (strlen(basename) >= sizeof(addr.sun_path)) { abortmsg("sockname is too long: %s", basename); + } strncpy(addr.sun_path, basename, sizeof(addr.sun_path)); addr.sun_path[sizeof(addr.sun_path) - 1] = '\0'; /* real connect */ int r = connect(fd, (struct sockaddr *)&addr, sizeof(addr)); if (r < 0) { - if (errno != ENOENT && errno != ECONNREFUSED) + if (errno != ENOENT && errno != ECONNREFUSED) { abortmsgerrno("cannot connect to %s", sockname); + } } if (bakfd != -1) { fchdirx(bakfd); @@ -501,16 +527,21 @@ initcontext(&hgc->ctx); readhello(hgc); - if (!(hgc->capflags & CAP_RUNCOMMAND)) + if (!(hgc->capflags & CAP_RUNCOMMAND)) { abortmsg("insufficient capability: runcommand"); - if (hgc->capflags & CAP_SETPROCNAME) + } + if (hgc->capflags & CAP_SETPROCNAME) { updateprocname(hgc); - if (hgc->capflags & CAP_ATTACHIO) + } + if (hgc->capflags & CAP_ATTACHIO) { attachio(hgc); - if (hgc->capflags & CAP_CHDIR) + } + if (hgc->capflags & CAP_CHDIR) { chdirtocwd(hgc); - if (hgc->capflags & CAP_SETUMASK2) + } + if (hgc->capflags & CAP_SETUMASK2) { forwardumask(hgc); + } return hgc; } @@ -555,16 +586,18 @@ size_t argsize) { assert(hgc); - if (!(hgc->capflags & CAP_VALIDATE)) + if (!(hgc->capflags & CAP_VALIDATE)) { return NULL; + } packcmdargs(&hgc->ctx, args, argsize); writeblockrequest(hgc, "validate"); handleresponse(hgc); /* the server returns '\0' if it can handle our request */ - if (hgc->ctx.datasize <= 1) + if (hgc->ctx.datasize <= 1) { return NULL; + } /* make sure the buffer is '\0' terminated */ enlargecontext(&hgc->ctx, hgc->ctx.datasize + 1); @@ -599,8 +632,9 @@ void hgc_attachio(hgclient_t *hgc) { assert(hgc); - if (!(hgc->capflags & CAP_ATTACHIO)) + if (!(hgc->capflags & CAP_ATTACHIO)) { return; + } attachio(hgc); } @@ -613,8 +647,9 @@ void hgc_setenv(hgclient_t *hgc, const char *const envp[]) { assert(hgc && envp); - if (!(hgc->capflags & CAP_SETENV)) + if (!(hgc->capflags & CAP_SETENV)) { return; + } packcmdargs(&hgc->ctx, envp, /*argsize*/ -1); writeblockrequest(hgc, "setenv"); }
--- a/contrib/chg/procutil.c Thu Feb 07 20:50:41 2019 +0900 +++ b/contrib/chg/procutil.c Tue Feb 19 21:55:05 2019 -0800 @@ -25,8 +25,9 @@ static void forwardsignal(int sig) { assert(peerpid > 0); - if (kill(peerpid, sig) < 0) + if (kill(peerpid, sig) < 0) { abortmsgerrno("cannot kill %d", peerpid); + } debugmsg("forward signal %d", sig); } @@ -34,8 +35,9 @@ { /* prefer kill(-pgid, sig), fallback to pid if pgid is invalid */ pid_t killpid = peerpgid > 1 ? -peerpgid : peerpid; - if (kill(killpid, sig) < 0) + if (kill(killpid, sig) < 0) { abortmsgerrno("cannot kill %d", killpid); + } debugmsg("forward signal %d to %d", sig, killpid); } @@ -43,28 +45,36 @@ { sigset_t unblockset, oldset; struct sigaction sa, oldsa; - if (sigemptyset(&unblockset) < 0) + if (sigemptyset(&unblockset) < 0) { goto error; - if (sigaddset(&unblockset, sig) < 0) + } + if (sigaddset(&unblockset, sig) < 0) { goto error; + } memset(&sa, 0, sizeof(sa)); sa.sa_handler = SIG_DFL; sa.sa_flags = SA_RESTART; - if (sigemptyset(&sa.sa_mask) < 0) + if (sigemptyset(&sa.sa_mask) < 0) { goto error; + } forwardsignal(sig); - if (raise(sig) < 0) /* resend to self */ + if (raise(sig) < 0) { /* resend to self */ goto error; - if (sigaction(sig, &sa, &oldsa) < 0) + } + if (sigaction(sig, &sa, &oldsa) < 0) { goto error; - if (sigprocmask(SIG_UNBLOCK, &unblockset, &oldset) < 0) + } + if (sigprocmask(SIG_UNBLOCK, &unblockset, &oldset) < 0) { goto error; + } /* resent signal will be handled before sigprocmask() returns */ - if (sigprocmask(SIG_SETMASK, &oldset, NULL) < 0) + if (sigprocmask(SIG_SETMASK, &oldset, NULL) < 0) { goto error; - if (sigaction(sig, &oldsa, NULL) < 0) + } + if (sigaction(sig, &oldsa, NULL) < 0) { goto error; + } return; error: @@ -73,19 +83,22 @@ static void handlechildsignal(int sig UNUSED_) { - if (peerpid == 0 || pagerpid == 0) + if (peerpid == 0 || pagerpid == 0) { return; + } /* if pager exits, notify the server with SIGPIPE immediately. * otherwise the server won't get SIGPIPE if it does not write * anything. (issue5278) */ - if (waitpid(pagerpid, NULL, WNOHANG) == pagerpid) + if (waitpid(pagerpid, NULL, WNOHANG) == pagerpid) { kill(peerpid, SIGPIPE); + } } void setupsignalhandler(pid_t pid, pid_t pgid) { - if (pid <= 0) + if (pid <= 0) { return; + } peerpid = pid; peerpgid = (pgid <= 1 ? 0 : pgid); @@ -98,42 +111,52 @@ * - SIGINT: usually generated by the terminal */ sa.sa_handler = forwardsignaltogroup; sa.sa_flags = SA_RESTART; - if (sigemptyset(&sa.sa_mask) < 0) + if (sigemptyset(&sa.sa_mask) < 0) { + goto error; + } + if (sigaction(SIGHUP, &sa, NULL) < 0) { goto error; - if (sigaction(SIGHUP, &sa, NULL) < 0) + } + if (sigaction(SIGINT, &sa, NULL) < 0) { goto error; - if (sigaction(SIGINT, &sa, NULL) < 0) - goto error; + } /* terminate frontend by double SIGTERM in case of server freeze */ sa.sa_handler = forwardsignal; sa.sa_flags |= SA_RESETHAND; - if (sigaction(SIGTERM, &sa, NULL) < 0) + if (sigaction(SIGTERM, &sa, NULL) < 0) { goto error; + } /* notify the worker about window resize events */ sa.sa_flags = SA_RESTART; - if (sigaction(SIGWINCH, &sa, NULL) < 0) + if (sigaction(SIGWINCH, &sa, NULL) < 0) { goto error; + } /* forward user-defined signals */ - if (sigaction(SIGUSR1, &sa, NULL) < 0) + if (sigaction(SIGUSR1, &sa, NULL) < 0) { goto error; - if (sigaction(SIGUSR2, &sa, NULL) < 0) + } + if (sigaction(SIGUSR2, &sa, NULL) < 0) { goto error; + } /* propagate job control requests to worker */ sa.sa_handler = forwardsignal; sa.sa_flags = SA_RESTART; - if (sigaction(SIGCONT, &sa, NULL) < 0) + if (sigaction(SIGCONT, &sa, NULL) < 0) { goto error; + } sa.sa_handler = handlestopsignal; sa.sa_flags = SA_RESTART; - if (sigaction(SIGTSTP, &sa, NULL) < 0) + if (sigaction(SIGTSTP, &sa, NULL) < 0) { goto error; + } /* get notified when pager exits */ sa.sa_handler = handlechildsignal; sa.sa_flags = SA_RESTART; - if (sigaction(SIGCHLD, &sa, NULL) < 0) + if (sigaction(SIGCHLD, &sa, NULL) < 0) { goto error; + } return; @@ -147,26 +170,34 @@ memset(&sa, 0, sizeof(sa)); sa.sa_handler = SIG_DFL; sa.sa_flags = SA_RESTART; - if (sigemptyset(&sa.sa_mask) < 0) + if (sigemptyset(&sa.sa_mask) < 0) { goto error; + } - if (sigaction(SIGHUP, &sa, NULL) < 0) + if (sigaction(SIGHUP, &sa, NULL) < 0) { goto error; - if (sigaction(SIGTERM, &sa, NULL) < 0) + } + if (sigaction(SIGTERM, &sa, NULL) < 0) { goto error; - if (sigaction(SIGWINCH, &sa, NULL) < 0) + } + if (sigaction(SIGWINCH, &sa, NULL) < 0) { goto error; - if (sigaction(SIGCONT, &sa, NULL) < 0) + } + if (sigaction(SIGCONT, &sa, NULL) < 0) { goto error; - if (sigaction(SIGTSTP, &sa, NULL) < 0) + } + if (sigaction(SIGTSTP, &sa, NULL) < 0) { goto error; - if (sigaction(SIGCHLD, &sa, NULL) < 0) + } + if (sigaction(SIGCHLD, &sa, NULL) < 0) { goto error; + } /* ignore Ctrl+C while shutting down to make pager exits cleanly */ sa.sa_handler = SIG_IGN; - if (sigaction(SIGINT, &sa, NULL) < 0) + if (sigaction(SIGINT, &sa, NULL) < 0) { goto error; + } peerpid = 0; return; @@ -180,22 +211,27 @@ pid_t setuppager(const char *pagercmd, const char *envp[]) { assert(pagerpid == 0); - if (!pagercmd) + if (!pagercmd) { return 0; + } int pipefds[2]; - if (pipe(pipefds) < 0) + if (pipe(pipefds) < 0) { return 0; + } pid_t pid = fork(); - if (pid < 0) + if (pid < 0) { goto error; + } if (pid > 0) { close(pipefds[0]); - if (dup2(pipefds[1], fileno(stdout)) < 0) + if (dup2(pipefds[1], fileno(stdout)) < 0) { goto error; + } if (isatty(fileno(stderr))) { - if (dup2(pipefds[1], fileno(stderr)) < 0) + if (dup2(pipefds[1], fileno(stderr)) < 0) { goto error; + } } close(pipefds[1]); pagerpid = pid; @@ -222,16 +258,18 @@ void waitpager(void) { - if (pagerpid == 0) + if (pagerpid == 0) { return; + } /* close output streams to notify the pager its input ends */ fclose(stdout); fclose(stderr); while (1) { pid_t ret = waitpid(pagerpid, NULL, 0); - if (ret == -1 && errno == EINTR) + if (ret == -1 && errno == EINTR) { continue; + } break; } }
--- a/contrib/chg/util.c Thu Feb 07 20:50:41 2019 +0900 +++ b/contrib/chg/util.c Tue Feb 19 21:55:05 2019 -0800 @@ -25,8 +25,9 @@ static inline void fsetcolor(FILE *fp, const char *code) { - if (!colorenabled) + if (!colorenabled) { return; + } fprintf(fp, "\033[%sm", code); } @@ -35,8 +36,9 @@ fsetcolor(stderr, "1;31"); fputs("chg: abort: ", stderr); vfprintf(stderr, fmt, args); - if (no != 0) + if (no != 0) { fprintf(stderr, " (errno = %d, %s)", no, strerror(no)); + } fsetcolor(stderr, ""); fputc('\n', stderr); exit(255); @@ -82,8 +84,9 @@ void debugmsg(const char *fmt, ...) { - if (!debugmsgenabled) + if (!debugmsgenabled) { return; + } va_list args; va_start(args, fmt); @@ -98,32 +101,37 @@ void fchdirx(int dirfd) { int r = fchdir(dirfd); - if (r == -1) + if (r == -1) { abortmsgerrno("failed to fchdir"); + } } void fsetcloexec(int fd) { int flags = fcntl(fd, F_GETFD); - if (flags < 0) + if (flags < 0) { abortmsgerrno("cannot get flags of fd %d", fd); - if (fcntl(fd, F_SETFD, flags | FD_CLOEXEC) < 0) + } + if (fcntl(fd, F_SETFD, flags | FD_CLOEXEC) < 0) { abortmsgerrno("cannot set flags of fd %d", fd); + } } void *mallocx(size_t size) { void *result = malloc(size); - if (!result) + if (!result) { abortmsg("failed to malloc"); + } return result; } void *reallocx(void *ptr, size_t size) { void *result = realloc(ptr, size); - if (!result) + if (!result) { abortmsg("failed to realloc"); + } return result; } @@ -144,30 +152,37 @@ memset(&newsa, 0, sizeof(newsa)); newsa.sa_handler = SIG_IGN; newsa.sa_flags = 0; - if (sigemptyset(&newsa.sa_mask) < 0) + if (sigemptyset(&newsa.sa_mask) < 0) { goto done; - if (sigaction(SIGINT, &newsa, &oldsaint) < 0) + } + if (sigaction(SIGINT, &newsa, &oldsaint) < 0) { goto done; + } doneflags |= F_SIGINT; - if (sigaction(SIGQUIT, &newsa, &oldsaquit) < 0) + if (sigaction(SIGQUIT, &newsa, &oldsaquit) < 0) { goto done; + } doneflags |= F_SIGQUIT; - if (sigaddset(&newsa.sa_mask, SIGCHLD) < 0) + if (sigaddset(&newsa.sa_mask, SIGCHLD) < 0) { goto done; - if (sigprocmask(SIG_BLOCK, &newsa.sa_mask, &oldmask) < 0) + } + if (sigprocmask(SIG_BLOCK, &newsa.sa_mask, &oldmask) < 0) { goto done; + } doneflags |= F_SIGMASK; pid_t pid = fork(); - if (pid < 0) + if (pid < 0) { goto done; + } if (pid == 0) { sigaction(SIGINT, &oldsaint, NULL); sigaction(SIGQUIT, &oldsaquit, NULL); sigprocmask(SIG_SETMASK, &oldmask, NULL); - if (cwd && chdir(cwd) < 0) + if (cwd && chdir(cwd) < 0) { _exit(127); + } const char *argv[] = {"sh", "-c", cmd, NULL}; if (envp) { execve("/bin/sh", (char **)argv, (char **)envp); @@ -176,25 +191,32 @@ } _exit(127); } else { - if (waitpid(pid, &status, 0) < 0) + if (waitpid(pid, &status, 0) < 0) { goto done; + } doneflags |= F_WAITPID; } done: - if (doneflags & F_SIGINT) + if (doneflags & F_SIGINT) { sigaction(SIGINT, &oldsaint, NULL); - if (doneflags & F_SIGQUIT) + } + if (doneflags & F_SIGQUIT) { sigaction(SIGQUIT, &oldsaquit, NULL); - if (doneflags & F_SIGMASK) + } + if (doneflags & F_SIGMASK) { sigprocmask(SIG_SETMASK, &oldmask, NULL); + } /* no way to report other errors, use 127 (= shell termination) */ - if (!(doneflags & F_WAITPID)) + if (!(doneflags & F_WAITPID)) { return 127; - if (WIFEXITED(status)) + } + if (WIFEXITED(status)) { return WEXITSTATUS(status); - if (WIFSIGNALED(status)) + } + if (WIFSIGNALED(status)) { return -WTERMSIG(status); + } return 127; }
--- a/contrib/fuzz/manifest.cc Thu Feb 07 20:50:41 2019 +0900 +++ b/contrib/fuzz/manifest.cc Tue Feb 19 21:55:05 2019 -0800 @@ -20,11 +20,19 @@ lm = lazymanifest(mdata) # iterate the whole thing, which causes the code to fully parse # every line in the manifest - list(lm.iterentries()) + for e, _, _ in lm.iterentries(): + # also exercise __getitem__ et al + lm[e] + e in lm + (e + 'nope') in lm lm[b'xyzzy'] = (b'\0' * 20, 'x') # do an insert, text should change assert lm.text() != mdata, "insert should change text and didn't: %r %r" % (lm.text(), mdata) + cloned = lm.filtercopy(lambda x: x != 'xyzzy') + assert cloned.text() == mdata, 'cloned text should equal mdata' + cloned.diff(lm) del lm[b'xyzzy'] + cloned.diff(lm) # should be back to the same assert lm.text() == mdata, "delete should have restored text but didn't: %r %r" % (lm.text(), mdata) except Exception as e: @@ -39,6 +47,11 @@ int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size) { + // Don't allow fuzzer inputs larger than 100k, since we'll just bog + // down and not accomplish much. + if (Size > 100000) { + return 0; + } PyObject *mtext = PyBytes_FromStringAndSize((const char *)Data, (Py_ssize_t)Size); PyObject *locals = PyDict_New();
--- a/contrib/fuzz/revlog.cc Thu Feb 07 20:50:41 2019 +0900 +++ b/contrib/fuzz/revlog.cc Tue Feb 19 21:55:05 2019 -0800 @@ -19,6 +19,11 @@ for inline in (True, False): try: index, cache = parse_index2(data, inline) + index.slicechunktodensity(list(range(len(index))), 0.5, 262144) + for rev in range(len(index)): + node = index[rev][7] + partial = index.shortest(node) + index.partialmatch(node[:partial]) except Exception as e: pass # uncomment this print if you're editing this Python code @@ -31,6 +36,11 @@ int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size) { + // Don't allow fuzzer inputs larger than 60k, since we'll just bog + // down and not accomplish much. + if (Size > 60000) { + return 0; + } PyObject *text = PyBytes_FromStringAndSize((const char *)Data, (Py_ssize_t)Size); PyObject *locals = PyDict_New();
--- a/contrib/perf.py Thu Feb 07 20:50:41 2019 +0900 +++ b/contrib/perf.py Tue Feb 19 21:55:05 2019 -0800 @@ -519,7 +519,11 @@ repo.ui.quiet = True matcher = scmutil.match(repo[None]) opts[b'dry_run'] = True - timer(lambda: scmutil.addremove(repo, matcher, b"", opts)) + if b'uipathfn' in getargspec(scmutil.addremove).args: + uipathfn = scmutil.getuipathfn(repo) + timer(lambda: scmutil.addremove(repo, matcher, b"", uipathfn, opts)) + else: + timer(lambda: scmutil.addremove(repo, matcher, b"", opts)) finally: repo.ui.quiet = oldquiet fm.end() @@ -535,13 +539,15 @@ @command(b'perfheads', formatteropts) def perfheads(ui, repo, **opts): + """benchmark the computation of a changelog heads""" opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) cl = repo.changelog + def s(): + clearcaches(cl) def d(): len(cl.headrevs()) - clearcaches(cl) - timer(d) + timer(d, setup=s) fm.end() @command(b'perftags', formatteropts+ @@ -911,9 +917,7 @@ raise error.Abort((b'default repository not configured!'), hint=(b"see 'hg help config.paths'")) dest = path.pushloc or path.loc - branches = (path.branch, opts.get(b'branch') or []) ui.status((b'analysing phase of %s\n') % util.hidepassword(dest)) - revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get(b'rev')) other = hg.peer(repo, opts, dest) # easier to perform discovery through the operation @@ -1014,18 +1018,44 @@ fm.end() @command(b'perfindex', [ - (b'', b'rev', b'', b'revision to be looked up (default tip)'), + (b'', b'rev', [], b'revision to be looked up (default tip)'), + (b'', b'no-lookup', None, b'do not revision lookup post creation'), ] + formatteropts) def perfindex(ui, repo, **opts): + """benchmark index creation time followed by a lookup + + The default is to look `tip` up. Depending on the index implementation, + the revision looked up can matters. For example, an implementation + scanning the index will have a faster lookup time for `--rev tip` than for + `--rev 0`. The number of looked up revisions and their order can also + matters. + + Example of useful set to test: + * tip + * 0 + * -10: + * :10 + * -10: + :10 + * :10: + -10: + * -10000: + * -10000: + 0 + + It is not currently possible to check for lookup of a missing node. For + deeper lookup benchmarking, checkout the `perfnodemap` command.""" import mercurial.revlog opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg - if opts[b'rev'] is None: - n = repo[b"tip"].node() + if opts[b'no_lookup']: + if opts['rev']: + raise error.Abort('--no-lookup and --rev are mutually exclusive') + nodes = [] + elif not opts[b'rev']: + nodes = [repo[b"tip"].node()] else: - rev = scmutil.revsingle(repo, opts[b'rev']) - n = repo[rev].node() + revs = scmutil.revrange(repo, opts[b'rev']) + cl = repo.changelog + nodes = [cl.node(r) for r in revs] unfi = repo.unfiltered() # find the filecache func directly @@ -1036,7 +1066,67 @@ clearchangelog(unfi) def d(): cl = makecl(unfi) - cl.rev(n) + for n in nodes: + cl.rev(n) + timer(d, setup=setup) + fm.end() + +@command(b'perfnodemap', [ + (b'', b'rev', [], b'revision to be looked up (default tip)'), + (b'', b'clear-caches', True, b'clear revlog cache between calls'), + ] + formatteropts) +def perfnodemap(ui, repo, **opts): + """benchmark the time necessary to look up revision from a cold nodemap + + Depending on the implementation, the amount and order of revision we look + up can varies. Example of useful set to test: + * tip + * 0 + * -10: + * :10 + * -10: + :10 + * :10: + -10: + * -10000: + * -10000: + 0 + + The command currently focus on valid binary lookup. Benchmarking for + hexlookup, prefix lookup and missing lookup would also be valuable. + """ + import mercurial.revlog + opts = _byteskwargs(opts) + timer, fm = gettimer(ui, opts) + mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg + + unfi = repo.unfiltered() + clearcaches = opts['clear_caches'] + # find the filecache func directly + # This avoid polluting the benchmark with the filecache logic + makecl = unfi.__class__.changelog.func + if not opts[b'rev']: + raise error.Abort('use --rev to specify revisions to look up') + revs = scmutil.revrange(repo, opts[b'rev']) + cl = repo.changelog + nodes = [cl.node(r) for r in revs] + + # use a list to pass reference to a nodemap from one closure to the next + nodeget = [None] + def setnodeget(): + # probably not necessary, but for good measure + clearchangelog(unfi) + nodeget[0] = makecl(unfi).nodemap.get + + def d(): + get = nodeget[0] + for n in nodes: + get(n) + + setup = None + if clearcaches: + def setup(): + setnodeget() + else: + setnodeget() + d() # prewarm the data structure timer(d, setup=setup) fm.end() @@ -2290,13 +2380,18 @@ view = repo else: view = repo.filtered(filtername) + if util.safehasattr(view._branchcaches, '_per_filter'): + filtered = view._branchcaches._per_filter + else: + # older versions + filtered = view._branchcaches def d(): if clear_revbranch: repo.revbranchcache()._clear() if full: view._branchcaches.clear() else: - view._branchcaches.pop(filtername, None) + filtered.pop(filtername, None) view.branchmap() return d # add filter in smaller subset to bigger subset @@ -2323,10 +2418,15 @@ # add unfiltered allfilters.append(None) - branchcacheread = safeattrsetter(branchmap, b'read') + if util.safehasattr(branchmap.branchcache, 'fromfile'): + branchcacheread = safeattrsetter(branchmap.branchcache, b'fromfile') + branchcacheread.set(classmethod(lambda *args: None)) + else: + # older versions + branchcacheread = safeattrsetter(branchmap, b'read') + branchcacheread.set(lambda *args: None) branchcachewrite = safeattrsetter(branchmap.branchcache, b'write') - branchcacheread.set(lambda repo: None) - branchcachewrite.set(lambda bc, repo: None) + branchcachewrite.set(lambda *args: None) try: for name in allfilters: printname = name @@ -2470,9 +2570,15 @@ repo.branchmap() # make sure we have a relevant, up to date branchmap + try: + fromfile = branchmap.branchcache.fromfile + except AttributeError: + # older versions + fromfile = branchmap.read + currentfilter = filter # try once without timer, the filter may not be cached - while branchmap.read(repo) is None: + while fromfile(repo) is None: currentfilter = subsettable.get(currentfilter) if currentfilter is None: raise error.Abort(b'No branchmap cached for %s repo' @@ -2483,7 +2589,7 @@ if clearrevlogs: clearchangelog(repo) def bench(): - branchmap.read(repo) + fromfile(repo) timer(bench, setup=setup) fm.end()
--- a/contrib/python3-whitelist Thu Feb 07 20:50:41 2019 +0900 +++ b/contrib/python3-whitelist Tue Feb 19 21:55:05 2019 -0800 @@ -5,6 +5,7 @@ test-absorb-rename.t test-absorb-strip.t test-absorb.t +test-acl.t test-add.t test-addremove-similar.t test-addremove.t @@ -14,6 +15,7 @@ test-ancestor.py test-annotate.py test-annotate.t +test-arbitraryfilectx.t test-archive-symlinks.t test-archive.t test-atomictempfile.py @@ -25,6 +27,7 @@ test-bad-extension.t test-bad-pull.t test-basic.t +test-batching.py test-bdiff.py test-bheads.t test-bisect.t @@ -42,6 +45,7 @@ test-branch-option.t test-branch-tag-confict.t test-branches.t +test-bugzilla.t test-bundle-phases.t test-bundle-r.t test-bundle-type.t @@ -54,7 +58,9 @@ test-bundle2-remote-changegroup.t test-cache-abuse.t test-cappedreader.py +test-casecollision-merge.t test-casecollision.t +test-casefolding.t test-cat.t test-cbor.py test-censor.t @@ -83,6 +89,7 @@ test-close-head.t test-commandserver.t test-commit-amend.t +test-commit-interactive-curses.t test-commit-interactive.t test-commit-multiple.t test-commit-unresolved.t @@ -111,11 +118,15 @@ test-convert-cvsnt-mergepoints.t test-convert-datesort.t test-convert-filemap.t +test-convert-git.t test-convert-hg-sink.t test-convert-hg-source.t test-convert-hg-startrev.t +test-convert-mtn.t test-convert-splicemap.t +test-convert-svn-sink.t test-convert-tagsbranch-topology.t +test-convert.t test-copy-move-merge.t test-copy.t test-copytrace-heuristics.t @@ -127,6 +138,7 @@ test-debugindexdot.t test-debugrename.t test-default-push.t +test-demandimport.py test-diff-antipatience.t test-diff-binary-file.t test-diff-change.t @@ -149,6 +161,7 @@ test-dirstate-race.t test-dirstate.t test-dispatch.py +test-dispatch.t test-doctest.py test-double-merge.t test-drawdag.t @@ -159,6 +172,7 @@ test-empty-group.t test-empty.t test-encode.t +test-encoding-align.t test-encoding-func.py test-encoding-textwrap.t test-encoding.t @@ -198,6 +212,7 @@ test-extdata.t test-extdiff.t test-extension-timing.t +test-extension.t test-extensions-afterloaded.t test-extensions-wrapfunction.py test-extra-filelog-entry.t @@ -217,6 +232,7 @@ test-fileset.t test-fix-topology.t test-fix.t +test-flagprocessor.t test-flags.t test-fncache.t test-gendoc-da.t @@ -235,6 +251,7 @@ test-generaldelta.t test-getbundle.t test-git-export.t +test-githelp.t test-globalopts.t test-glog-beautifygraph.t test-glog-topological.t @@ -251,17 +268,24 @@ test-hgk.t test-hgrc.t test-hgweb-annotate-whitespace.t +test-hgweb-auth.py test-hgweb-bundle.t +test-hgweb-commands.t test-hgweb-csp.t test-hgweb-descend-empties.t test-hgweb-diffs.t test-hgweb-empty.t test-hgweb-filelog.t +test-hgweb-json.t +test-hgweb-no-path-info.t +test-hgweb-no-request-uri.t test-hgweb-non-interactive.t test-hgweb-raw.t test-hgweb-removed.t +test-hgweb-symrev.t test-hgweb.t test-hgwebdir-paths.py +test-hgwebdir.t test-hgwebdirsym.t test-histedit-arguments.t test-histedit-base.t @@ -278,11 +302,16 @@ test-histedit-obsolete.t test-histedit-outgoing.t test-histedit-templates.t +test-http-api-httpv2.t +test-http-api.t +test-http-bad-server.t test-http-branchmap.t test-http-bundle1.t test-http-clone-r.t test-http-permissions.t +test-http-protocol.t test-http.t +test-https.t test-hybridencode.py test-i18n.t test-identify.t @@ -290,6 +319,7 @@ test-import-bypass.t test-import-context.t test-import-eol.t +test-import-git.t test-import-merge.t test-import-unknown.t test-import.t @@ -300,6 +330,7 @@ test-infinitepush.t test-inherit-mode.t test-init.t +test-install.t test-issue1089.t test-issue1102.t test-issue1175.t @@ -335,11 +366,13 @@ test-lfs-bundle.t test-lfs-largefiles.t test-lfs-pointer.py +test-lfs-test-server.t test-lfs.t test-linelog.py test-linerange.py test-locate.t test-lock-badness.t +test-lock.py test-log-exthook.t test-log-linerange.t test-log.t @@ -381,11 +414,14 @@ test-merge9.t test-minifileset.py test-minirst.py +test-missing-capability.t +test-mq-eol.t test-mq-git.t test-mq-guards.t test-mq-header-date.t test-mq-header-from.t test-mq-merge.t +test-mq-missingfiles.t test-mq-pull-from-bundle.t test-mq-qclone-http.t test-mq-qdelete.t @@ -393,6 +429,7 @@ test-mq-qfold.t test-mq-qgoto.t test-mq-qimport-fail-cleanup.t +test-mq-qimport.t test-mq-qnew.t test-mq-qpush-exact.t test-mq-qpush-fail.t @@ -403,6 +440,7 @@ test-mq-qrename.t test-mq-qsave.t test-mq-safety.t +test-mq-subrepo-svn.t test-mq-subrepo.t test-mq-symlinks.t test-mq.t @@ -438,8 +476,10 @@ test-narrow.t test-nested-repo.t test-newbranch.t +test-newcgi.t test-newercgi.t test-nointerrupt.t +test-notify-changegroup.t test-obshistory.t test-obsmarker-template.t test-obsmarkers-effectflag.t @@ -451,11 +491,13 @@ test-obsolete-divergent.t test-obsolete-tag-cache.t test-obsolete.t +test-oldcgi.t test-origbackup-conflict.t test-pager-legacy.t test-pager.t test-parents.t test-parse-date.t +test-parseindex.t test-parseindex2.py test-patch-offset.t test-patch.t @@ -474,6 +516,7 @@ test-progress.t test-propertycache.py test-pull-branch.t +test-pull-bundle.t test-pull-http.t test-pull-permission.t test-pull-pull-corruption.t @@ -557,7 +600,11 @@ test-remotefilelog-cacheprocess.t test-remotefilelog-clone-tree.t test-remotefilelog-clone.t +test-remotefilelog-corrupt-cache.t +test-remotefilelog-datapack.py +test-remotefilelog-gc.t test-remotefilelog-gcrepack.t +test-remotefilelog-histpack.py test-remotefilelog-http.t test-remotefilelog-keepset.t test-remotefilelog-local.t @@ -567,6 +614,8 @@ test-remotefilelog-permisssions.t test-remotefilelog-prefetch.t test-remotefilelog-pull-noshallow.t +test-remotefilelog-repack-fast.t +test-remotefilelog-repack.t test-remotefilelog-share.t test-remotefilelog-sparse.t test-remotefilelog-tags.t @@ -597,6 +646,7 @@ test-revset-dirstate-parents.t test-revset-legacy-lookup.t test-revset-outgoing.t +test-revset2.t test-rollback.t test-run-tests.py test-run-tests.t @@ -631,6 +681,7 @@ test-ssh.t test-sshserver.py test-stack.t +test-static-http.t test-status-color.t test-status-inprocess.py test-status-rev.t @@ -642,10 +693,12 @@ test-strip-cross.t test-strip.t test-subrepo-deep-nested-change.t +test-subrepo-git.t test-subrepo-missing.t test-subrepo-paths.t test-subrepo-recursion.t test-subrepo-relative-path.t +test-subrepo-svn.t test-subrepo.t test-symlink-os-yes-fs-no.py test-symlink-placeholder.t @@ -658,7 +711,10 @@ test-template-map.t test-tools.t test-transplant.t +test-treediscovery-legacy.t +test-treediscovery.t test-treemanifest.t +test-trusted.py test-ui-color.py test-ui-config.py test-ui-verbosity.py @@ -669,6 +725,7 @@ test-unionrepo.t test-unrelated-pull.t test-up-local-change.t +test-update-atomic.t test-update-branches.t test-update-dest.t test-update-issue1456.t @@ -685,19 +742,26 @@ test-walkrepo.py test-websub.t test-win32text.t +test-wireproto-caching.t test-wireproto-clientreactor.py test-wireproto-command-branchmap.t +test-wireproto-command-capabilities.t test-wireproto-command-changesetdata.t test-wireproto-command-filedata.t test-wireproto-command-filesdata.t test-wireproto-command-heads.t +test-wireproto-command-known.t test-wireproto-command-listkeys.t test-wireproto-command-lookup.t test-wireproto-command-manifestdata.t test-wireproto-command-pushkey.t test-wireproto-command-rawstorefiledata.t +test-wireproto-content-redirects.t +test-wireproto-exchangev2.t test-wireproto-framing.py test-wireproto-serverreactor.py test-wireproto.py +test-wireproto.t +test-worker.t test-wsgirequest.py test-xdg.t
--- a/contrib/relnotes Thu Feb 07 20:50:41 2019 +0900 +++ b/contrib/relnotes Tue Feb 19 21:55:05 2019 -0800 @@ -14,6 +14,7 @@ r"\(issue": 100, r"\(BC\)": 100, r"\(API\)": 100, + r"\(SEC\)": 100, # core commands, bump up r"(commit|files|log|pull|push|patch|status|tag|summary)(|s|es):": 20, r"(annotate|alias|branch|bookmark|clone|graft|import|verify).*:": 20, @@ -21,6 +22,7 @@ r"(mq|shelve|rebase):": 20, # newsy r": deprecate": 20, + r": new.*(extension|flag|module)": 10, r"( ability|command|feature|option|support)": 10, # experimental r"hg-experimental": 20, @@ -29,22 +31,23 @@ # bug-like? r"(fix|don't break|improve)": 7, r"(not|n't|avoid|fix|prevent).*crash": 10, + r"vulnerab": 10, # boring stuff, bump down r"^contrib": -5, r"debug": -5, r"help": -5, + r"minor": -5, r"(doc|metavar|bundle2|obsolete|obsmarker|rpm|setup|debug\S+:)": -15, r"(check-code|check-commit|check-config|import-checker)": -20, r"(flake8|lintian|pyflakes|pylint)": -20, # cleanups and refactoring - r"(cleanup|white ?space|spelling|quoting)": -20, + r"(clean ?up|white ?space|spelling|quoting)": -20, r"(flatten|dedent|indent|nesting|unnest)": -20, r"(typo|hint|note|comment|TODO|FIXME)": -20, r"(style:|convention|one-?liner)": -20, - r"_": -10, r"(argument|absolute_import|attribute|assignment|mutable)": -15, r"(scope|True|False)": -10, - r"(unused|useless|unnecessary|superfluous|duplicate|deprecated)": -10, + r"(unused|useless|unnecessar|superfluous|duplicate|deprecated)": -10, r"(redundant|pointless|confusing|uninitialized|meaningless|dead)": -10, r": (drop|remove|delete|rip out)": -10, r": (inherit|rename|simplify|naming|inline)": -10, @@ -54,9 +57,12 @@ r": (move|extract) .* (to|into|from|out of)": -20, r": implement ": -5, r": use .* implementation": -20, + r": use .* instead of": -20, + # code + r"_": -10, + r"__": -5, + r"\(\)": -5, r"\S\S\S+\.\S\S\S\S+": -5, - r": use .* instead of": -20, - r"__": -5, # dumb keywords r"\S+/\S+:": -10, r"\S+\.\S+:": -10, @@ -92,6 +98,15 @@ (r"shelve|unshelve", "extensions"), ] +def wikify(desc): + desc = desc.replace("(issue", "(Bts:issue") + desc = re.sub(r"\b([0-9a-f]{12})\b", r"Cset:\1", desc) + # stop ParseError from being recognized as a (nonexistent) wiki page + desc = re.sub(r" ([A-Z][a-z]+[A-Z][a-z]+)\b", r" !\1", desc) + # prevent wiki markup of magic methods + desc = re.sub(r"\b(\S*__\S*)\b", r"`\1`", desc) + return desc + def main(): desc = "example: %(prog)s 4.7.2 --stoprev 4.8rc0" ap = argparse.ArgumentParser(description=desc) @@ -148,10 +163,8 @@ if re.search(rule, desc): score += val - desc = desc.replace("(issue", "(Bts:issue") - if score >= cutoff: - commits.append(desc) + commits.append(wikify(desc)) # Group unflagged notes. groups = {} bcs = []
--- a/contrib/revsetbenchmarks.py Thu Feb 07 20:50:41 2019 +0900 +++ b/contrib/revsetbenchmarks.py Tue Feb 19 21:55:05 2019 -0800 @@ -71,8 +71,8 @@ print(exc.output, file=sys.stderr) return None -outputre = re.compile(r'! wall (\d+.\d+) comb (\d+.\d+) user (\d+.\d+) ' - 'sys (\d+.\d+) \(best of (\d+)\)') +outputre = re.compile(br'! wall (\d+.\d+) comb (\d+.\d+) user (\d+.\d+) ' + br'sys (\d+.\d+) \(best of (\d+)\)') def parseoutput(output): """parse a textual output into a dict
--- a/contrib/showstack.py Thu Feb 07 20:50:41 2019 +0900 +++ b/contrib/showstack.py Tue Feb 19 21:55:05 2019 -0800 @@ -1,7 +1,7 @@ # showstack.py - extension to dump a Python stack trace on signal # # binds to both SIGQUIT (Ctrl-\) and SIGINFO (Ctrl-T on BSDs) -"""dump stack trace when receiving SIGQUIT (Ctrl-\) and SIGINFO (Ctrl-T on BSDs) +r"""dump stack trace when receiving SIGQUIT (Ctrl-\) or SIGINFO (Ctrl-T on BSDs) """ from __future__ import absolute_import, print_function
--- a/contrib/synthrepo.py Thu Feb 07 20:50:41 2019 +0900 +++ b/contrib/synthrepo.py Tue Feb 19 21:55:05 2019 -0800 @@ -349,7 +349,7 @@ # to the modeled directory structure. initcount = int(opts['initfiles']) if initcount and initdirs: - pctx = repo[None].parents()[0] + pctx = repo['.'] dirs = set(pctx.dirs()) files = {} @@ -450,7 +450,6 @@ path = fctx.path() changes[path] = '\n'.join(lines) + '\n' for __ in xrange(pick(filesremoved)): - path = random.choice(mfk) for __ in xrange(10): path = random.choice(mfk) if path not in changes:
--- a/contrib/testparseutil.py Thu Feb 07 20:50:41 2019 +0900 +++ b/contrib/testparseutil.py Tue Feb 19 21:55:05 2019 -0800 @@ -265,7 +265,7 @@ class fileheredocmatcher(embeddedmatcher): """Detect "cat > FILE << LIMIT" style embedded code - >>> matcher = fileheredocmatcher(b'heredoc .py file', br'[^<]+\.py') + >>> matcher = fileheredocmatcher(b'heredoc .py file', br'[^<]+\\.py') >>> b2s(matcher.startsat(b' $ cat > file.py << EOF\\n')) ('file.py', ' > EOF\\n') >>> b2s(matcher.startsat(b' $ cat >>file.py <<EOF\\n'))
--- a/contrib/zsh_completion Thu Feb 07 20:50:41 2019 +0900 +++ b/contrib/zsh_completion Tue Feb 19 21:55:05 2019 -0800 @@ -248,7 +248,7 @@ [[ -d $PREFIX ]] || PREFIX=$PREFIX:h - _hg_cmd resolve -l ./$PREFIX | while read rstate rpath + _hg_cmd resolve -l ./$PREFIX -T '{mergestatus}\ {relpath\(path\)}\\n' | while read rstate rpath do [[ $rstate == 'R' ]] && resolved_files+=($rpath) [[ $rstate == 'U' ]] && unresolved_files+=($rpath)
--- a/doc/check-seclevel.py Thu Feb 07 20:50:41 2019 +0900 +++ b/doc/check-seclevel.py Tue Feb 19 21:55:05 2019 -0800 @@ -163,8 +163,8 @@ (options, args) = optparser.parse_args() ui = uimod.ui.load() - ui.setconfig('ui', 'verbose', options.verbose, '--verbose') - ui.setconfig('ui', 'debug', options.debug, '--debug') + ui.setconfig(b'ui', b'verbose', options.verbose, b'--verbose') + ui.setconfig(b'ui', b'debug', options.debug, b'--debug') if options.file: if checkfile(ui, options.file, options.initlevel):
--- a/doc/hgmanpage.py Thu Feb 07 20:50:41 2019 +0900 +++ b/doc/hgmanpage.py Tue Feb 19 21:55:05 2019 -0800 @@ -376,7 +376,7 @@ tmpl = (".TH %(title_upper)s %(manual_section)s" " \"%(date)s\" \"%(version)s\" \"%(manual_group)s\"\n" ".SH NAME\n" - "%(title)s \- %(subtitle)s\n") + "%(title)s \\- %(subtitle)s\n") return tmpl % self._docinfo def append_header(self):
--- a/hgext/absorb.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/absorb.py Tue Feb 19 21:55:05 2019 -0800 @@ -726,7 +726,6 @@ # nothing changed, nothing commited nextp1 = ctx continue - msg = '' if self._willbecomenoop(memworkingcopy, ctx, nextp1): # changeset is no longer necessary self.replacemap[ctx.node()] = None
--- a/hgext/automv.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/automv.py Tue Feb 19 21:55:05 2019 -0800 @@ -64,7 +64,8 @@ if threshold > 0: match = scmutil.match(repo[None], pats, opts) added, removed = _interestingfiles(repo, match) - renames = _findrenames(repo, match, added, removed, + uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) + renames = _findrenames(repo, uipathfn, added, removed, threshold / 100.0) with repo.wlock(): @@ -89,7 +90,7 @@ return added, removed -def _findrenames(repo, matcher, added, removed, similarity): +def _findrenames(repo, uipathfn, added, removed, similarity): """Find what files in added are really moved files. Any file named in removed that is at least similarity% similar to a file @@ -103,7 +104,7 @@ if repo.ui.verbose: repo.ui.status( _('detected move of %s as %s (%d%% similar)\n') % ( - matcher.rel(src), matcher.rel(dst), score * 100)) + uipathfn(src), uipathfn(dst), score * 100)) renames[dst] = src if renames: repo.ui.status(_('detected move of %d files\n') % len(renames))
--- a/hgext/blackbox.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/blackbox.py Tue Feb 19 21:55:05 2019 -0800 @@ -118,7 +118,6 @@ date = dateutil.datestr(default, ui.config('blackbox', 'date-format')) user = procutil.getuser() pid = '%d' % procutil.getpid() - rev = '(unknown)' changed = '' ctx = self._repo[None] parents = ctx.parents() @@ -191,7 +190,7 @@ break # count the commands by matching lines like: 2013/01/23 19:13:36 root> - if re.match('^\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} .*> .*', line): + if re.match(br'^\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} .*> .*', line): count += 1 output.append(line)
--- a/hgext/bugzilla.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/bugzilla.py Tue Feb 19 21:55:05 2019 -0800 @@ -303,6 +303,7 @@ error, logcmdutil, mail, + pycompat, registrar, url, util, @@ -342,10 +343,10 @@ default='bugs', ) configitem('bugzilla', 'fixregexp', - default=(r'fix(?:es)?\s*(?:bugs?\s*)?,?\s*' - r'(?:nos?\.?|num(?:ber)?s?)?\s*' - r'(?P<ids>(?:#?\d+\s*(?:,?\s*(?:and)?)?\s*)+)' - r'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?') + default=(br'fix(?:es)?\s*(?:bugs?\s*)?,?\s*' + br'(?:nos?\.?|num(?:ber)?s?)?\s*' + br'(?P<ids>(?:#?\d+\s*(?:,?\s*(?:and)?)?\s*)+)' + br'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?') ) configitem('bugzilla', 'fixresolution', default='FIXED', @@ -363,9 +364,9 @@ default=None, ) configitem('bugzilla', 'regexp', - default=(r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*' - r'(?P<ids>(?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)' - r'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?') + default=(br'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*' + br'(?P<ids>(?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)' + br'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?') ) configitem('bugzilla', 'strip', default=0, @@ -733,7 +734,7 @@ c = self.bzproxy.Bug.comments({'ids': [id], 'include_fields': ['text'], 'token': self.bztoken}) - return ''.join([t['text'] for t in c['bugs'][str(id)]['comments']]) + return ''.join([t['text'] for t in c['bugs']['%d' % id]['comments']]) def filter_real_bug_ids(self, bugs): probe = self.bzproxy.Bug.get({'ids': sorted(bugs.keys()), @@ -804,11 +805,11 @@ def makecommandline(self, fieldname, value): if self.bzvermajor >= 4: - return "@%s %s" % (fieldname, str(value)) + return "@%s %s" % (fieldname, pycompat.bytestr(value)) else: if fieldname == "id": fieldname = "bug_id" - return "@%s = %s" % (fieldname, str(value)) + return "@%s = %s" % (fieldname, pycompat.bytestr(value)) def send_bug_modify_email(self, bugid, commands, comment, committer): '''send modification message to Bugzilla bug via email. @@ -873,7 +874,7 @@ self.fixresolution = self.ui.config('bugzilla', 'fixresolution') def apiurl(self, targets, include_fields=None): - url = '/'.join([self.bzroot] + [str(t) for t in targets]) + url = '/'.join([self.bzroot] + [pycompat.bytestr(t) for t in targets]) qv = {} if self.apikey: qv['api_key'] = self.apikey @@ -938,7 +939,7 @@ for bugid in bugs.keys(): burl = self.apiurl(('bug', bugid, 'comment'), include_fields='text') result = self._fetch(burl) - comments = result['bugs'][str(bugid)]['comments'] + comments = result['bugs'][pycompat.bytestr(bugid)]['comments'] if any(sn in c['text'] for c in comments): self.ui.status(_('bug %d already knows about changeset %s\n') % (bugid, sn)) @@ -1011,7 +1012,7 @@ self.ui.config('bugzilla', 'regexp'), re.IGNORECASE) self.fix_re = re.compile( self.ui.config('bugzilla', 'fixregexp'), re.IGNORECASE) - self.split_re = re.compile(r'\D+') + self.split_re = re.compile(br'\D+') def find_bugs(self, ctx): '''return bugs dictionary created from commit comment. @@ -1098,7 +1099,7 @@ t = logcmdutil.changesettemplater(self.ui, self.repo, spec) self.ui.pushbuffer() t.show(ctx, changes=ctx.changeset(), - bug=str(bugid), + bug=pycompat.bytestr(bugid), hgweb=self.ui.config('web', 'baseurl'), root=self.repo.root, webroot=webroot(self.repo.root))
--- a/hgext/commitextras.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/commitextras.py Tue Feb 19 21:55:05 2019 -0800 @@ -58,7 +58,7 @@ if not k: msg = _("unable to parse '%s', keys can't be empty") raise error.Abort(msg % raw) - if re.search('[^\w-]', k): + if re.search(br'[^\w-]', k): msg = _("keys can only contain ascii letters, digits," " '_' and '-'") raise error.Abort(msg)
--- a/hgext/convert/convcmd.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/convert/convcmd.py Tue Feb 19 21:55:05 2019 -0800 @@ -123,7 +123,7 @@ exceptions.append(inst) if not ui.quiet: for inst in exceptions: - ui.write("%s\n" % pycompat.bytestr(inst)) + ui.write("%s\n" % pycompat.bytestr(inst.args[0])) raise error.Abort(_('%s: missing or unsupported repository') % path) def convertsink(ui, path, type):
--- a/hgext/convert/cvs.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/convert/cvs.py Tue Feb 19 21:55:05 2019 -0800 @@ -76,7 +76,6 @@ d = encoding.getcwd() try: os.chdir(self.path) - id = None cache = 'update' if not self.ui.configbool('convert', 'cvsps.cache'): @@ -219,7 +218,7 @@ if "UseUnchanged" in r: self.writep.write("UseUnchanged\n") self.writep.flush() - r = self.readp.readline() + self.readp.readline() def getheads(self): self._parse()
--- a/hgext/convert/cvsps.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/convert/cvsps.py Tue Feb 19 21:55:05 2019 -0800 @@ -122,7 +122,7 @@ re_31 = re.compile(b'----------------------------$') re_32 = re.compile(b'=======================================' b'======================================$') - re_50 = re.compile(b'revision ([\\d.]+)(\s+locked by:\s+.+;)?$') + re_50 = re.compile(br'revision ([\d.]+)(\s+locked by:\s+.+;)?$') re_60 = re.compile(br'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);' br'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?' br'(\s+commitid:\s+([^;]+);)?'
--- a/hgext/convert/git.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/convert/git.py Tue Feb 19 21:55:05 2019 -0800 @@ -13,6 +13,7 @@ config, error, node as nodemod, + pycompat, ) from . import ( @@ -175,7 +176,8 @@ self.catfilepipe[0].flush() info = self.catfilepipe[1].readline().split() if info[1] != ftype: - raise error.Abort(_('cannot read %r object at %s') % (ftype, rev)) + raise error.Abort(_('cannot read %r object at %s') % ( + pycompat.bytestr(ftype), rev)) size = int(info[2]) data = self.catfilepipe[1].read(size) if len(data) < size: @@ -294,7 +296,7 @@ if not entry: if not l.startswith(':'): continue - entry = l.split() + entry = tuple(pycompat.bytestr(p) for p in l.split()) continue f = l if entry[4][0] == 'C':
--- a/hgext/convert/hg.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/convert/hg.py Tue Feb 19 21:55:05 2019 -0800 @@ -105,10 +105,6 @@ if not branch: branch = 'default' pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches] - if pbranches: - pbranch = pbranches[0][1] - else: - pbranch = 'default' branchpath = os.path.join(self.path, branch) if setbranch:
--- a/hgext/convert/monotone.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/convert/monotone.py Tue Feb 19 21:55:05 2019 -0800 @@ -93,16 +93,16 @@ kwargs = pycompat.byteskwargs(kwargs) command = [] for k, v in kwargs.iteritems(): - command.append("%s:%s" % (len(k), k)) + command.append("%d:%s" % (len(k), k)) if v: - command.append("%s:%s" % (len(v), v)) + command.append("%d:%s" % (len(v), v)) if command: command.insert(0, 'o') command.append('e') command.append('l') for arg in args: - command += "%d:%s" % (len(arg), arg) + command.append("%d:%s" % (len(arg), arg)) command.append('e') command = ''.join(command) @@ -138,7 +138,7 @@ raise error.Abort(_('bad mtn packet - no end of packet size')) lengthstr += read try: - length = long(lengthstr[:-1]) + length = pycompat.long(lengthstr[:-1]) except TypeError: raise error.Abort(_('bad mtn packet - bad packet size %s') % lengthstr) @@ -154,7 +154,7 @@ retval = [] while True: commandnbr, stream, length, output = self.mtnstdioreadpacket() - self.ui.debug('mtn: read packet %s:%s:%s\n' % + self.ui.debug('mtn: read packet %s:%s:%d\n' % (commandnbr, stream, length)) if stream == 'l': @@ -214,13 +214,13 @@ # key "test@selenic.com" # mtn >= 0.45: # key [ff58a7ffb771907c4ff68995eada1c4da068d328] - certlist = re.split('\n\n key ["\[]', certlist) + certlist = re.split(br'\n\n key ["\[]', certlist) for e in certlist: m = self.cert_re.match(e) if m: name, value = m.groups() - value = value.replace(r'\"', '"') - value = value.replace(r'\\', '\\') + value = value.replace(br'\"', '"') + value = value.replace(br'\\', '\\') certs[name] = value # Monotone may have subsecond dates: 2005-02-05T09:39:12.364306 # and all times are stored in UTC @@ -335,7 +335,6 @@ def before(self): # Check if we have a new enough version to use automate stdio - version = 0.0 try: versionstr = self.mtnrunsingle("interface_version") version = float(versionstr)
--- a/hgext/convert/p4.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/convert/p4.py Tue Feb 19 21:55:05 2019 -0800 @@ -64,12 +64,12 @@ self.encoding = self.ui.config('convert', 'p4.encoding', convcmd.orig_encoding) self.re_type = re.compile( - "([a-z]+)?(text|binary|symlink|apple|resource|unicode|utf\d+)" - "(\+\w+)?$") + br"([a-z]+)?(text|binary|symlink|apple|resource|unicode|utf\d+)" + br"(\+\w+)?$") self.re_keywords = re.compile( - r"\$(Id|Header|Date|DateTime|Change|File|Revision|Author)" - r":[^$\n]*\$") - self.re_keywords_old = re.compile("\$(Id|Header):[^$\n]*\$") + br"\$(Id|Header|Date|DateTime|Change|File|Revision|Author)" + br":[^$\n]*\$") + self.re_keywords_old = re.compile(br"\$(Id|Header):[^$\n]*\$") if revs and len(revs) > 1: raise error.Abort(_("p4 source does not support specifying "
--- a/hgext/convert/subversion.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/convert/subversion.py Tue Feb 19 21:55:05 2019 -0800 @@ -984,7 +984,6 @@ # TODO: ra.get_file transmits the whole file instead of diffs. if file in self.removed: return None, None - mode = '' try: new_module, revnum = revsplit(rev)[1:] if self.module != new_module: @@ -1183,12 +1182,12 @@ m = set() output = self.run0('ls', recursive=True, xml=True) doc = xml.dom.minidom.parseString(output) - for e in doc.getElementsByTagName('entry'): + for e in doc.getElementsByTagName(r'entry'): for n in e.childNodes: - if n.nodeType != n.ELEMENT_NODE or n.tagName != 'name': + if n.nodeType != n.ELEMENT_NODE or n.tagName != r'name': continue - name = ''.join(c.data for c in n.childNodes - if c.nodeType == c.TEXT_NODE) + name = r''.join(c.data for c in n.childNodes + if c.nodeType == c.TEXT_NODE) # Entries are compared with names coming from # mercurial, so bytes with undefined encoding. Our # best bet is to assume they are in local @@ -1207,10 +1206,18 @@ os.unlink(filename) except OSError: pass + + if self.is_exec: + # We need to check executability of the file before the change, + # because `vfs.write` is able to reset exec bit. + wasexec = False + if os.path.exists(self.wjoin(filename)): + wasexec = self.is_exec(self.wjoin(filename)) + self.wopener.write(filename, data) if self.is_exec: - if self.is_exec(self.wjoin(filename)): + if wasexec: if 'x' not in flags: self.delexec.append(filename) else: @@ -1325,8 +1332,8 @@ try: rev = self.commit_re.search(output).group(1) except AttributeError: - if parents and not files: - return parents[0] + if not files: + return parents[0] if parents else None self.ui.warn(_('unexpected svn output:\n')) self.ui.warn(output) raise error.Abort(_('unable to cope with svn output'))
--- a/hgext/extdiff.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/extdiff.py Tue Feb 19 21:55:05 2019 -0800 @@ -59,6 +59,22 @@ [diff-tools] kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child +If a program has a graphical interface, it might be interesting to tell +Mercurial about it. It will prevent the program from being mistakenly +used in a terminal-only environment (such as an SSH terminal session), +and will make :hg:`extdiff --per-file` open multiple file diffs at once +instead of one by one (if you still want to open file diffs one by one, +you can use the --confirm option). + +Declaring that a tool has a graphical interface can be done with the +``gui`` flag next to where ``diffargs`` are specified: + +:: + + [diff-tools] + kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child + kdiff3.gui = true + You can use -I/-X and list of file or directory names like normal :hg:`diff` command. The extdiff extension makes snapshots of only needed files, so running the external diff program will actually be @@ -71,6 +87,7 @@ import re import shutil import stat +import subprocess from mercurial.i18n import _ from mercurial.node import ( @@ -80,6 +97,7 @@ from mercurial import ( archival, cmdutil, + encoding, error, filemerge, formatter, @@ -104,11 +122,19 @@ generic=True, ) +configitem('extdiff', br'gui\..*', + generic=True, +) + configitem('diff-tools', br'.*\.diffargs$', default=None, generic=True, ) +configitem('diff-tools', br'.*\.gui$', + generic=True, +) + # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should # be specifying the version(s) of Mercurial they are tested with, or @@ -175,7 +201,97 @@ cmdline += ' $parent1 $child' return re.sub(regex, quote, cmdline) -def dodiff(ui, repo, cmdline, pats, opts): +def _systembackground(cmd, environ=None, cwd=None): + ''' like 'procutil.system', but returns the Popen object directly + so we don't have to wait on it. + ''' + cmd = procutil.quotecommand(cmd) + env = procutil.shellenviron(environ) + proc = subprocess.Popen(procutil.tonativestr(cmd), + shell=True, close_fds=procutil.closefds, + env=procutil.tonativeenv(env), + cwd=pycompat.rapply(procutil.tonativestr, cwd)) + return proc + +def _runperfilediff(cmdline, repo_root, ui, guitool, do3way, confirm, + commonfiles, tmproot, dir1a, dir1b, + dir2root, dir2, + rev1a, rev1b, rev2): + # Note that we need to sort the list of files because it was + # built in an "unstable" way and it's annoying to get files in a + # random order, especially when "confirm" mode is enabled. + waitprocs = [] + totalfiles = len(commonfiles) + for idx, commonfile in enumerate(sorted(commonfiles)): + path1a = os.path.join(tmproot, dir1a, commonfile) + label1a = commonfile + rev1a + if not os.path.isfile(path1a): + path1a = os.devnull + + path1b = '' + label1b = '' + if do3way: + path1b = os.path.join(tmproot, dir1b, commonfile) + label1b = commonfile + rev1b + if not os.path.isfile(path1b): + path1b = os.devnull + + path2 = os.path.join(dir2root, dir2, commonfile) + label2 = commonfile + rev2 + + if confirm: + # Prompt before showing this diff + difffiles = _('diff %s (%d of %d)') % (commonfile, idx + 1, + totalfiles) + responses = _('[Yns?]' + '$$ &Yes, show diff' + '$$ &No, skip this diff' + '$$ &Skip remaining diffs' + '$$ &? (display help)') + r = ui.promptchoice('%s %s' % (difffiles, responses)) + if r == 3: # ? + while r == 3: + for c, t in ui.extractchoices(responses)[1]: + ui.write('%s - %s\n' % (c, encoding.lower(t))) + r = ui.promptchoice('%s %s' % (difffiles, responses)) + if r == 0: # yes + pass + elif r == 1: # no + continue + elif r == 2: # skip + break + + curcmdline = formatcmdline( + cmdline, repo_root, do3way=do3way, + parent1=path1a, plabel1=label1a, + parent2=path1b, plabel2=label1b, + child=path2, clabel=label2) + + if confirm or not guitool: + # Run the comparison program and wait for it to exit + # before we show the next file. + # This is because either we need to wait for confirmation + # from the user between each invocation, or because, as far + # as we know, the tool doesn't have a GUI, in which case + # we can't run multiple CLI programs at the same time. + ui.debug('running %r in %s\n' % + (pycompat.bytestr(curcmdline), tmproot)) + ui.system(curcmdline, cwd=tmproot, blockedtag='extdiff') + else: + # Run the comparison program but don't wait, as we're + # going to rapid-fire each file diff and then wait on + # the whole group. + ui.debug('running %r in %s (backgrounded)\n' % + (pycompat.bytestr(curcmdline), tmproot)) + proc = _systembackground(curcmdline, cwd=tmproot) + waitprocs.append(proc) + + if waitprocs: + with ui.timeblockedsection('extdiff'): + for proc in waitprocs: + proc.wait() + +def dodiff(ui, repo, cmdline, pats, opts, guitool=False): '''Do the actual diff: - copy to a temp structure if diffing 2 internal revisions @@ -201,6 +317,9 @@ else: ctx1b = repo[nullid] + perfile = opts.get('per_file') + confirm = opts.get('confirm') + node1a = ctx1a.node() node1b = ctx1b.node() node2 = ctx2.node() @@ -217,6 +336,8 @@ if opts.get('patch'): if subrepos: raise error.Abort(_('--patch cannot be used with --subrepos')) + if perfile: + raise error.Abort(_('--patch cannot be used with --per-file')) if node2 is None: raise error.Abort(_('--patch requires two revisions')) else: @@ -304,15 +425,24 @@ label1b = None fnsandstat = [] - # Run the external tool on the 2 temp directories or the patches - cmdline = formatcmdline( - cmdline, repo.root, do3way=do3way, - parent1=dir1a, plabel1=label1a, - parent2=dir1b, plabel2=label1b, - child=dir2, clabel=label2) - ui.debug('running %r in %s\n' % (pycompat.bytestr(cmdline), - tmproot)) - ui.system(cmdline, cwd=tmproot, blockedtag='extdiff') + if not perfile: + # Run the external tool on the 2 temp directories or the patches + cmdline = formatcmdline( + cmdline, repo.root, do3way=do3way, + parent1=dir1a, plabel1=label1a, + parent2=dir1b, plabel2=label1b, + child=dir2, clabel=label2) + ui.debug('running %r in %s\n' % (pycompat.bytestr(cmdline), + tmproot)) + ui.system(cmdline, cwd=tmproot, blockedtag='extdiff') + else: + # Run the external tool once for each pair of files + _runperfilediff( + cmdline, repo.root, ui, guitool=guitool, + do3way=do3way, confirm=confirm, + commonfiles=common, tmproot=tmproot, dir1a=dir1a, dir1b=dir1b, + dir2root=dir2root, dir2=dir2, + rev1a=rev1a, rev1b=rev1b, rev2=rev2) for copy_fn, working_fn, st in fnsandstat: cpstat = os.lstat(copy_fn) @@ -340,6 +470,10 @@ _('pass option to comparison program'), _('OPT')), ('r', 'rev', [], _('revision'), _('REV')), ('c', 'change', '', _('change made by revision'), _('REV')), + ('', 'per-file', False, + _('compare each file instead of revision snapshots')), + ('', 'confirm', False, + _('prompt user before each external program invocation')), ('', 'patch', None, _('compare patches for two revisions')) ] + cmdutil.walkopts + cmdutil.subrepoopts @@ -357,15 +491,29 @@ default options "-Npru". To select a different program, use the -p/--program option. The - program will be passed the names of two directories to compare. To - pass additional options to the program, use -o/--option. These - will be passed before the names of the directories to compare. + program will be passed the names of two directories to compare, + unless the --per-file option is specified (see below). To pass + additional options to the program, use -o/--option. These will be + passed before the names of the directories or files to compare. When two revision arguments are given, then changes are shown between those revisions. If only one revision is specified then that revision is compared to the working directory, and, when no revisions are specified, the working directory files are compared - to its parent.''' + to its parent. + + The --per-file option runs the external program repeatedly on each + file to diff, instead of once on two directories. By default, + this happens one by one, where the next file diff is open in the + external program only once the previous external program (for the + previous file diff) has exited. If the external program has a + graphical interface, it can open all the file diffs at once instead + of one by one. See :hg:`help -e extdiff` for information about how + to tell Mercurial that a given program has a graphical interface. + + The --confirm option will prompt the user before each invocation of + the external program. It is ignored if --per-file isn't specified. + ''' opts = pycompat.byteskwargs(opts) program = opts.get('program') option = opts.get('option') @@ -390,20 +538,22 @@ to its parent. """ - def __init__(self, path, cmdline): + def __init__(self, path, cmdline, isgui): # We can't pass non-ASCII through docstrings (and path is # in an unknown encoding anyway), but avoid double separators on # Windows docpath = stringutil.escapestr(path).replace(b'\\\\', b'\\') self.__doc__ %= {r'path': pycompat.sysstr(stringutil.uirepr(docpath))} self._cmdline = cmdline + self._isgui = isgui def __call__(self, ui, repo, *pats, **opts): opts = pycompat.byteskwargs(opts) options = ' '.join(map(procutil.shellquote, opts['option'])) if options: options = ' ' + options - return dodiff(ui, repo, self._cmdline + options, pats, opts) + return dodiff(ui, repo, self._cmdline + options, pats, opts, + guitool=self._isgui) def uisetup(ui): for cmd, path in ui.configitems('extdiff'): @@ -418,7 +568,8 @@ cmdline = procutil.shellquote(path) if diffopts: cmdline += ' ' + diffopts - elif cmd.startswith('opts.'): + isgui = ui.configbool('extdiff', 'gui.' + cmd) + elif cmd.startswith('opts.') or cmd.startswith('gui.'): continue else: if path: @@ -432,15 +583,20 @@ path = filemerge.findexternaltool(ui, cmd) or cmd cmdline = procutil.shellquote(path) diffopts = False + isgui = ui.configbool('extdiff', 'gui.' + cmd) # look for diff arguments in [diff-tools] then [merge-tools] if not diffopts: - args = ui.config('diff-tools', cmd+'.diffargs') or \ - ui.config('merge-tools', cmd+'.diffargs') - if args: - cmdline += ' ' + args + key = cmd + '.diffargs' + for section in ('diff-tools', 'merge-tools'): + args = ui.config(section, key) + if args: + cmdline += ' ' + args + if isgui is None: + isgui = ui.configbool(section, cmd + '.gui') or False + break command(cmd, extdiffopts[:], _('hg %s [OPTION]... [FILE]...') % cmd, helpcategory=command.CATEGORY_FILE_CONTENTS, - inferrepo=True)(savedcmd(path, cmdline)) + inferrepo=True)(savedcmd(path, cmdline, isgui)) # tell hggettext to extract docstrings from these functions: i18nfunctions = [savedcmd]
--- a/hgext/fastannotate/formatter.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/fastannotate/formatter.py Tue Feb 19 21:55:05 2019 -0800 @@ -38,8 +38,8 @@ if self.opts.get('rev') == 'wdir()': orig = hexfunc hexfunc = lambda x: None if x is None else orig(x) - wnode = hexfunc(repo[None].p1().node()) + '+' - wrev = '%d' % repo[None].p1().rev() + wnode = hexfunc(repo['.'].node()) + '+' + wrev = '%d' % repo['.'].rev() wrevpad = '' if not opts.get('changeset'): # only show + if changeset is hidden wrev += '+'
--- a/hgext/fastannotate/protocol.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/fastannotate/protocol.py Tue Feb 19 21:55:05 2019 -0800 @@ -71,7 +71,6 @@ for p in [actx.revmappath, actx.linelogpath]: if not os.path.exists(p): continue - content = '' with open(p, 'rb') as f: content = f.read() vfsbaselen = len(repo.vfs.base + '/')
--- a/hgext/fastannotate/support.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/fastannotate/support.py Tue Feb 19 21:55:05 2019 -0800 @@ -109,7 +109,6 @@ def _remotefctxannotate(orig, self, follow=False, skiprevs=None, diffopts=None): # skipset: a set-like used to test if a fctx needs to be downloaded - skipset = None with context.fctxannotatecontext(self, follow, diffopts) as ac: skipset = revmap.revmap(ac.revmappath) return orig(self, follow, skiprevs=skiprevs, diffopts=diffopts,
--- a/hgext/fetch.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/fetch.py Tue Feb 19 21:55:05 2019 -0800 @@ -68,7 +68,7 @@ if date: opts['date'] = dateutil.parsedate(date) - parent, _p2 = repo.dirstate.parents() + parent = repo.dirstate.p1() branch = repo.dirstate.branch() try: branchnode = repo.branchtip(branch)
--- a/hgext/fsmonitor/__init__.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/fsmonitor/__init__.py Tue Feb 19 21:55:05 2019 -0800 @@ -161,6 +161,9 @@ configitem('fsmonitor', 'blacklistusers', default=list, ) +configitem('fsmonitor', 'verbose', + default=True, +) configitem('experimental', 'fsmonitor.transaction_notify', default=False, ) @@ -172,11 +175,14 @@ def _handleunavailable(ui, state, ex): """Exception handler for Watchman interaction exceptions""" if isinstance(ex, watchmanclient.Unavailable): - if ex.warn: + # experimental config: fsmonitor.verbose + if ex.warn and ui.configbool('fsmonitor', 'verbose'): ui.warn(str(ex) + '\n') if ex.invalidate: state.invalidate() - ui.log('fsmonitor', 'Watchman unavailable: %s\n', ex.msg) + # experimental config: fsmonitor.verbose + if ui.configbool('fsmonitor', 'verbose'): + ui.log('fsmonitor', 'Watchman unavailable: %s\n', ex.msg) else: ui.log('fsmonitor', 'Watchman exception: %s\n', ex) @@ -240,24 +246,6 @@ clock = 'c:0:0' notefiles = [] - def fwarn(f, msg): - self._ui.warn('%s: %s\n' % (self.pathto(f), msg)) - return False - - def badtype(mode): - kind = _('unknown') - if stat.S_ISCHR(mode): - kind = _('character device') - elif stat.S_ISBLK(mode): - kind = _('block device') - elif stat.S_ISFIFO(mode): - kind = _('fifo') - elif stat.S_ISSOCK(mode): - kind = _('socket') - elif stat.S_ISDIR(mode): - kind = _('directory') - return _('unsupported file type (type is %s)') % kind - ignore = self._ignore dirignore = self._dirignore if unknown: @@ -379,6 +367,9 @@ fexists = entry['exists'] kind = getkind(fmode) + if '/.hg/' in fname or fname.endswith('/.hg'): + return bail('nested-repo-detected') + if not fexists: # if marked as deleted and we don't already have a change # record, mark it as deleted. If we already have an entry @@ -485,7 +476,7 @@ working = ctx2.rev() is None parentworking = working and ctx1 == self['.'] - match = match or matchmod.always(self.root, self.getcwd()) + match = match or matchmod.always() # Maybe we can use this opportunity to update Watchman's state. # Mercurial uses workingcommitctx and/or memctx to represent the part of @@ -752,6 +743,14 @@ repo, node, branchmerge, force, ancestor, mergeancestor, labels, matcher, **kwargs) +def repo_has_depth_one_nested_repo(repo): + for f in repo.wvfs.listdir(): + if os.path.isdir(os.path.join(repo.root, f, '.hg')): + msg = 'fsmonitor: sub-repository %r detected, fsmonitor disabled\n' + repo.ui.debug(msg % f) + return True + return False + def reposetup(ui, repo): # We don't work with largefiles or inotify exts = extensions.enabled() @@ -769,6 +768,9 @@ if repo.wvfs.exists('.hgsubstate') or repo.wvfs.exists('.hgsub'): return + if repo_has_depth_one_nested_repo(repo): + return + fsmonitorstate = state.state(repo) if fsmonitorstate.mode == 'off': return
--- a/hgext/fsmonitor/pywatchman/capabilities.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/fsmonitor/pywatchman/capabilities.py Tue Feb 19 21:55:05 2019 -0800 @@ -62,7 +62,6 @@ vers['capabilities'] = {} for name in opts['optional']: vers['capabilities'][name] = check(parsed_version, name) - failed = False for name in opts['required']: have = check(parsed_version, name) vers['capabilities'][name] = have
--- a/hgext/fsmonitor/pywatchman/pybser.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/fsmonitor/pywatchman/pybser.py Tue Feb 19 21:55:05 2019 -0800 @@ -267,7 +267,7 @@ key = key[3:] try: return self._values[self._keys.index(key)] - except ValueError as ex: + except ValueError: raise KeyError('_BunserDict has no key %s' % key) def __len__(self): @@ -420,7 +420,6 @@ def _pdu_info_helper(buf): - bser_version = -1 if buf[0:2] == EMPTY_HEADER[0:2]: bser_version = 1 bser_capabilities = 0
--- a/hgext/githelp.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/githelp.py Tue Feb 19 21:55:05 2019 -0800 @@ -25,6 +25,7 @@ encoding, error, fancyopts, + pycompat, registrar, scmutil, ) @@ -83,21 +84,22 @@ args = fancyopts.fancyopts(list(args), cmdoptions, opts, True) break except getopt.GetoptError as ex: - flag = None - if "requires argument" in ex.msg: + if r"requires argument" in ex.msg: raise - if ('--' + ex.opt) in ex.msg: - flag = '--' + ex.opt - elif ('-' + ex.opt) in ex.msg: - flag = '-' + ex.opt + if (r'--' + ex.opt) in ex.msg: + flag = '--' + pycompat.bytestr(ex.opt) + elif (r'-' + ex.opt) in ex.msg: + flag = '-' + pycompat.bytestr(ex.opt) else: - raise error.Abort(_("unknown option %s") % ex.opt) + raise error.Abort(_("unknown option %s") % + pycompat.bytestr(ex.opt)) try: args.remove(flag) except Exception: msg = _("unknown option '%s' packed with other options") hint = _("please try passing the option as its own flag: -%s") - raise error.Abort(msg % ex.opt, hint=hint % ex.opt) + raise error.Abort(msg % pycompat.bytestr(ex.opt), + hint=hint % pycompat.bytestr(ex.opt)) ui.warn(_("ignoring unknown option %s\n") % flag) @@ -119,7 +121,12 @@ for k, values in sorted(self.opts.iteritems()): for v in values: if v: - cmd += " %s %s" % (k, v) + if isinstance(v, int): + fmt = ' %s %d' + else: + fmt = ' %s %s' + + cmd += fmt % (k, v) else: cmd += " %s" % (k,) if self.args:
--- a/hgext/gpg.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/gpg.py Tue Feb 19 21:55:05 2019 -0800 @@ -297,7 +297,7 @@ return if not opts["force"]: - msigs = match.exact(repo.root, '', ['.hgsigs']) + msigs = match.exact(['.hgsigs']) if any(repo.status(match=msigs, unknown=True, ignored=True)): raise error.Abort(_("working copy of .hgsigs is changed "), hint=_("please commit .hgsigs manually"))
--- a/hgext/histedit.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/histedit.py Tue Feb 19 21:55:05 2019 -0800 @@ -156,6 +156,15 @@ [histedit] linelen = 120 # truncate rule lines at 120 characters +The summary of a change can be customized as well:: + + [histedit] + summary-template = '{rev} {bookmarks} {desc|firstline}' + +The customized summary should be kept short enough that rule lines +will fit in the configured line length. See above if that requires +customization. + ``hg histedit`` attempts to automatically choose an appropriate base revision to use. To change which base revision is used, define a revset in your configuration file:: @@ -248,6 +257,8 @@ configitem('ui', 'interface.histedit', default=None, ) +configitem('histedit', 'summary-template', + default='{rev} {desc|firstline}') # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should @@ -480,8 +491,11 @@ <hash> <rev> <summary> """ ctx = self.repo[self.node] - summary = _getsummary(ctx) - line = '%s %s %d %s' % (self.verb, ctx, ctx.rev(), summary) + ui = self.repo.ui + summary = cmdutil.rendertemplate( + ctx, ui.config('histedit', 'summary-template')) or '' + summary = summary.splitlines()[0] + line = '%s %s %s' % (self.verb, ctx, summary) # trim to 75 columns by default so it's not stupidly wide in my editor # (the 5 more are left for verb) maxlen = self.repo.ui.configint('histedit', 'linelen') @@ -575,7 +589,7 @@ def applychanges(ui, repo, ctx, opts): """Merge changeset from ctx (only) in the current working directory""" - wcpar = repo.dirstate.parents()[0] + wcpar = repo.dirstate.p1() if ctx.p1().node() == wcpar: # edits are "in place" we do not need to make any merge, # just applies changes on parent for editing @@ -608,7 +622,7 @@ if not c.mutable(): raise error.ParseError( _("cannot fold into public change %s") % node.short(c.node())) - base = firstctx.parents()[0] + base = firstctx.p1() # commit a new version of the old changeset, including the update # collect all files which might be affected @@ -693,7 +707,7 @@ class pick(histeditaction): def run(self): rulectx = self.repo[self.node] - if rulectx.parents()[0].node() == self.state.parentctxnode: + if rulectx.p1().node() == self.state.parentctxnode: self.repo.ui.debug('node %s unchanged\n' % node.short(self.node)) return rulectx, [] @@ -724,7 +738,7 @@ super(fold, self).verify(prev, expected, seen) repo = self.repo if not prev: - c = repo[self.node].parents()[0] + c = repo[self.node].p1() elif not prev.verb in ('pick', 'base'): return else: @@ -795,7 +809,7 @@ return False def finishfold(self, ui, repo, ctx, oldctx, newnode, internalchanges): - parent = ctx.parents()[0].node() + parent = ctx.p1().node() hg.updaterepo(repo, parent, overwrite=False) ### prepare new commit data commitopts = {} @@ -934,6 +948,12 @@ # Curses Support try: import curses + + # Curses requires setting the locale or it will default to the C + # locale. This sets the locale to the user's default system + # locale. + import locale + locale.setlocale(locale.LC_ALL, u'') except ImportError: curses = None @@ -943,7 +963,7 @@ 'roll': '^roll', } -COLOR_HELP, COLOR_SELECTED, COLOR_OK, COLOR_WARN = 1, 2, 3, 4 +COLOR_HELP, COLOR_SELECTED, COLOR_OK, COLOR_WARN, COLOR_CURRENT = 1, 2, 3, 4, 5 E_QUIT, E_HISTEDIT = 1, 2 E_PAGEDOWN, E_PAGEUP, E_LINEUP, E_LINEDOWN, E_RESIZE = 3, 4, 5, 6, 7 @@ -1223,6 +1243,7 @@ curses.init_pair(COLOR_SELECTED, curses.COLOR_BLACK, curses.COLOR_WHITE) curses.init_pair(COLOR_WARN, curses.COLOR_BLACK, curses.COLOR_YELLOW) curses.init_pair(COLOR_OK, curses.COLOR_BLACK, curses.COLOR_GREEN) + curses.init_pair(COLOR_CURRENT, curses.COLOR_WHITE, curses.COLOR_MAGENTA) # don't display the cursor try: @@ -1246,7 +1267,7 @@ line = "changeset: {0}:{1:<12}".format(ctx.rev(), ctx) win.addstr(1, 1, line[:length]) - line = "user: {0}".format(stringutil.shortuser(ctx.user())) + line = "user: {0}".format(ctx.user()) win.addstr(2, 1, line[:length]) bms = repo.nodebookmarks(ctx.node()) @@ -1313,7 +1334,8 @@ if y + start == selected: addln(rulesscr, y, 2, rule, curses.color_pair(COLOR_SELECTED)) elif y + start == pos: - addln(rulesscr, y, 2, rule, curses.A_BOLD) + addln(rulesscr, y, 2, rule, + curses.color_pair(COLOR_CURRENT) | curses.A_BOLD) else: addln(rulesscr, y, 2, rule) rulesscr.noutrefresh() @@ -1459,7 +1481,7 @@ 'exactly one common root')) root = rr[0].node() - topmost, empty = repo.dirstate.parents() + topmost = repo.dirstate.p1() revs = between(repo, root, topmost, keep) if not revs: raise error.Abort(_('%s is not an ancestor of working directory') % @@ -1472,7 +1494,7 @@ curses.echo() curses.endwin() if rc is False: - ui.write(_("chistedit aborted\n")) + ui.write(_("histedit aborted\n")) return 0 if type(rc) is list: ui.status(_("running histedit\n")) @@ -1873,7 +1895,7 @@ cmdutil.checkunfinished(repo) cmdutil.bailifchanged(repo) - topmost, empty = repo.dirstate.parents() + topmost = repo.dirstate.p1() if outg: if freeargs: remote = freeargs[0] @@ -1902,7 +1924,7 @@ actions = parserules(rules, state) warnverifyactions(ui, repo, actions, state, ctxs) - parentctxnode = repo[root].parents()[0].node() + parentctxnode = repo[root].p1().node() state.parentctxnode = parentctxnode state.actions = actions
--- a/hgext/journal.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/journal.py Tue Feb 19 21:55:05 2019 -0800 @@ -348,7 +348,6 @@ def _write(self, vfs, entry): with self.jlock(vfs): - version = None # open file in amend mode to ensure it is created if missing with vfs('namejournal', mode='a+b') as f: f.seek(0, os.SEEK_SET)
--- a/hgext/largefiles/lfcommands.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/largefiles/lfcommands.py Tue Feb 19 21:55:05 2019 -0800 @@ -288,12 +288,9 @@ files = set(ctx.files()) if node.nullid not in parents: mc = ctx.manifest() - mp1 = ctx.parents()[0].manifest() - mp2 = ctx.parents()[1].manifest() - files |= (set(mp1) | set(mp2)) - set(mc) - for f in mc: - if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None): - files.add(f) + for pctx in ctx.parents(): + for fn in pctx.manifest().diff(mc): + files.add(fn) return files # Convert src parents to dst parents @@ -467,27 +464,26 @@ wvfs = repo.wvfs wctx = repo[None] for lfile in lfiles: - rellfile = lfile - rellfileorig = os.path.relpath( - scmutil.origpath(ui, repo, wvfs.join(rellfile)), + lfileorig = os.path.relpath( + scmutil.backuppath(ui, repo, lfile), start=repo.root) - relstandin = lfutil.standin(lfile) - relstandinorig = os.path.relpath( - scmutil.origpath(ui, repo, wvfs.join(relstandin)), + standin = lfutil.standin(lfile) + standinorig = os.path.relpath( + scmutil.backuppath(ui, repo, standin), start=repo.root) - if wvfs.exists(relstandin): - if (wvfs.exists(relstandinorig) and - wvfs.exists(rellfile)): - shutil.copyfile(wvfs.join(rellfile), - wvfs.join(rellfileorig)) - wvfs.unlinkpath(relstandinorig) - expecthash = lfutil.readasstandin(wctx[relstandin]) + if wvfs.exists(standin): + if (wvfs.exists(standinorig) and + wvfs.exists(lfile)): + shutil.copyfile(wvfs.join(lfile), + wvfs.join(lfileorig)) + wvfs.unlinkpath(standinorig) + expecthash = lfutil.readasstandin(wctx[standin]) if expecthash != '': if lfile not in wctx: # not switched to normal file - if repo.dirstate[relstandin] != '?': - wvfs.unlinkpath(rellfile, ignoremissing=True) + if repo.dirstate[standin] != '?': + wvfs.unlinkpath(lfile, ignoremissing=True) else: - dropped.add(rellfile) + dropped.add(lfile) # use normallookup() to allocate an entry in largefiles # dirstate to prevent lfilesrepo.status() from reporting @@ -499,9 +495,9 @@ # lfile is added to the repository again. This happens when a # largefile is converted back to a normal file: the standin # disappears, but a new (normal) file appears as the lfile. - if (wvfs.exists(rellfile) and + if (wvfs.exists(lfile) and repo.dirstate.normalize(lfile) not in wctx): - wvfs.unlinkpath(rellfile) + wvfs.unlinkpath(lfile) removed += 1 # largefile processing might be slow and be interrupted - be prepared @@ -535,19 +531,18 @@ # copy the exec mode of largefile standin from the repository's # dirstate to its state in the lfdirstate. - rellfile = lfile - relstandin = lfutil.standin(lfile) - if wvfs.exists(relstandin): + standin = lfutil.standin(lfile) + if wvfs.exists(standin): # exec is decided by the users permissions using mask 0o100 - standinexec = wvfs.stat(relstandin).st_mode & 0o100 - st = wvfs.stat(rellfile) + standinexec = wvfs.stat(standin).st_mode & 0o100 + st = wvfs.stat(lfile) mode = st.st_mode if standinexec != mode & 0o100: # first remove all X bits, then shift all R bits to X mode &= ~0o111 if standinexec: mode |= (mode >> 2) & 0o111 & ~util.umask - wvfs.chmod(rellfile, mode) + wvfs.chmod(lfile, mode) update1 = 1 updated += update1
--- a/hgext/largefiles/lfutil.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/largefiles/lfutil.py Tue Feb 19 21:55:05 2019 -0800 @@ -168,7 +168,7 @@ def lfdirstatestatus(lfdirstate, repo): pctx = repo['.'] - match = matchmod.always(repo.root, repo.getcwd()) + match = matchmod.always() unsure, s = lfdirstate.status(match, subrepos=[], ignored=False, clean=False, unknown=False) modified, clean = s.modified, s.clean @@ -518,8 +518,8 @@ files = set(ctx.files()) if len(parents) == 2: mc = ctx.manifest() - mp1 = ctx.parents()[0].manifest() - mp2 = ctx.parents()[1].manifest() + mp1 = ctx.p1().manifest() + mp2 = ctx.p2().manifest() for f in mp1: if f not in mc: files.add(f) @@ -552,7 +552,7 @@ # otherwise to update all standins if the largefiles are # large. lfdirstate = openlfdirstate(ui, repo) - dirtymatch = matchmod.always(repo.root, repo.getcwd()) + dirtymatch = matchmod.always() unsure, s = lfdirstate.status(dirtymatch, subrepos=[], ignored=False, clean=False, unknown=False) modifiedfiles = unsure + s.modified + s.added + s.removed
--- a/hgext/largefiles/overrides.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/largefiles/overrides.py Tue Feb 19 21:55:05 2019 -0800 @@ -24,6 +24,7 @@ copies as copiesmod, error, exchange, + extensions, exthelper, filemerge, hg, @@ -77,49 +78,7 @@ m.matchfn = lambda f: notlfile(f) and origmatchfn(f) return m -def installnormalfilesmatchfn(manifest): - '''installmatchfn with a matchfn that ignores all largefiles''' - def overridematch(ctx, pats=(), opts=None, globbed=False, - default='relpath', badfn=None): - if opts is None: - opts = {} - match = oldmatch(ctx, pats, opts, globbed, default, badfn=badfn) - return composenormalfilematcher(match, manifest) - oldmatch = installmatchfn(overridematch) - -def installmatchfn(f): - '''monkey patch the scmutil module with a custom match function. - Warning: it is monkey patching the _module_ on runtime! Not thread safe!''' - oldmatch = scmutil.match - setattr(f, 'oldmatch', oldmatch) - scmutil.match = f - return oldmatch - -def restorematchfn(): - '''restores scmutil.match to what it was before installmatchfn - was called. no-op if scmutil.match is its original function. - - Note that n calls to installmatchfn will require n calls to - restore the original matchfn.''' - scmutil.match = getattr(scmutil.match, 'oldmatch') - -def installmatchandpatsfn(f): - oldmatchandpats = scmutil.matchandpats - setattr(f, 'oldmatchandpats', oldmatchandpats) - scmutil.matchandpats = f - return oldmatchandpats - -def restorematchandpatsfn(): - '''restores scmutil.matchandpats to what it was before - installmatchandpatsfn was called. No-op if scmutil.matchandpats - is its original function. - - Note that n calls to installmatchandpatsfn will require n calls - to restore the original matchfn.''' - scmutil.matchandpats = getattr(scmutil.matchandpats, 'oldmatchandpats', - scmutil.matchandpats) - -def addlargefiles(ui, repo, isaddremove, matcher, **opts): +def addlargefiles(ui, repo, isaddremove, matcher, uipathfn, **opts): large = opts.get(r'large') lfsize = lfutil.getminsize( ui, lfutil.islfilesrepo(repo), opts.get(r'lfsize')) @@ -140,17 +99,11 @@ nfile = f in wctx exists = lfile or nfile - # addremove in core gets fancy with the name, add doesn't - if isaddremove: - name = m.uipath(f) - else: - name = m.rel(f) - # Don't warn the user when they attempt to add a normal tracked file. # The normal add code will do that for us. if exact and exists: if lfile: - ui.warn(_('%s already a largefile\n') % name) + ui.warn(_('%s already a largefile\n') % uipathfn(f)) continue if (exact or not exists) and not lfutil.isstandin(f): @@ -164,7 +117,7 @@ if large or abovemin or (lfmatcher and lfmatcher(f)): lfnames.append(f) if ui.verbose or not exact: - ui.status(_('adding %s as a largefile\n') % name) + ui.status(_('adding %s as a largefile\n') % uipathfn(f)) bad = [] @@ -191,7 +144,7 @@ added = [f for f in lfnames if f not in bad] return added, bad -def removelargefiles(ui, repo, isaddremove, matcher, dryrun, **opts): +def removelargefiles(ui, repo, isaddremove, matcher, uipathfn, dryrun, **opts): after = opts.get(r'after') m = composelargefilematcher(matcher, repo[None].manifest()) try: @@ -207,11 +160,9 @@ def warn(files, msg): for f in files: - ui.warn(msg % m.rel(f)) + ui.warn(msg % uipathfn(f)) return int(len(files) > 0) - result = 0 - if after: remove = deleted result = warn(modified + added + clean, @@ -229,12 +180,7 @@ lfdirstate = lfutil.openlfdirstate(ui, repo) for f in sorted(remove): if ui.verbose or not m.exact(f): - # addremove in core gets fancy with the name, remove doesn't - if isaddremove: - name = m.uipath(f) - else: - name = m.rel(f) - ui.status(_('removing %s\n') % name) + ui.status(_('removing %s\n') % uipathfn(f)) if not dryrun: if not after: @@ -278,27 +224,27 @@ return orig(ui, repo, *pats, **opts) @eh.wrapfunction(cmdutil, 'add') -def cmdutiladd(orig, ui, repo, matcher, prefix, explicitonly, **opts): +def cmdutiladd(orig, ui, repo, matcher, prefix, uipathfn, explicitonly, **opts): # The --normal flag short circuits this override if opts.get(r'normal'): - return orig(ui, repo, matcher, prefix, explicitonly, **opts) + return orig(ui, repo, matcher, prefix, uipathfn, explicitonly, **opts) - ladded, lbad = addlargefiles(ui, repo, False, matcher, **opts) + ladded, lbad = addlargefiles(ui, repo, False, matcher, uipathfn, **opts) normalmatcher = composenormalfilematcher(matcher, repo[None].manifest(), ladded) - bad = orig(ui, repo, normalmatcher, prefix, explicitonly, **opts) + bad = orig(ui, repo, normalmatcher, prefix, uipathfn, explicitonly, **opts) bad.extend(f for f in lbad) return bad @eh.wrapfunction(cmdutil, 'remove') -def cmdutilremove(orig, ui, repo, matcher, prefix, after, force, subrepos, - dryrun): +def cmdutilremove(orig, ui, repo, matcher, prefix, uipathfn, after, force, + subrepos, dryrun): normalmatcher = composenormalfilematcher(matcher, repo[None].manifest()) - result = orig(ui, repo, normalmatcher, prefix, after, force, subrepos, - dryrun) - return removelargefiles(ui, repo, False, matcher, dryrun, after=after, - force=force) or result + result = orig(ui, repo, normalmatcher, prefix, uipathfn, after, force, + subrepos, dryrun) + return removelargefiles(ui, repo, False, matcher, uipathfn, dryrun, + after=after, force=force) or result @eh.wrapfunction(subrepo.hgsubrepo, 'status') def overridestatusfn(orig, repo, rev2, **opts): @@ -326,7 +272,7 @@ @eh.wrapcommand('log') def overridelog(orig, ui, repo, *pats, **opts): - def overridematchandpats(ctx, pats=(), opts=None, globbed=False, + def overridematchandpats(orig, ctx, pats=(), opts=None, globbed=False, default='relpath', badfn=None): """Matcher that merges root directory with .hglf, suitable for log. It is still possible to match .hglf directly. @@ -335,8 +281,7 @@ """ if opts is None: opts = {} - matchandpats = oldmatchandpats(ctx, pats, opts, globbed, default, - badfn=badfn) + matchandpats = orig(ctx, pats, opts, globbed, default, badfn=badfn) m, p = copy.copy(matchandpats) if m.always(): @@ -356,9 +301,10 @@ return kindpat[0] + ':' + tostandin(kindpat[1]) return tostandin(kindpat[1]) - if m._cwd: + cwd = repo.getcwd() + if cwd: hglf = lfutil.shortname - back = util.pconvert(m.rel(hglf)[:-len(hglf)]) + back = util.pconvert(repo.pathto(hglf)[:-len(hglf)]) def tostandin(f): # The file may already be a standin, so truncate the back @@ -371,10 +317,10 @@ # path to the root before building the standin. Otherwise cwd # is somewhere in the repo, relative to root, and needs to be # prepended before building the standin. - if os.path.isabs(m._cwd): + if os.path.isabs(cwd): f = f[len(back):] else: - f = m._cwd + '/' + f + f = cwd + '/' + f return back + lfutil.standin(f) else: def tostandin(f): @@ -416,20 +362,18 @@ # (2) to determine what files to print out diffs for. # The magic matchandpats override should be used for case (1) but not for # case (2). - def overridemakefilematcher(repo, pats, opts, badfn=None): + oldmatchandpats = scmutil.matchandpats + def overridemakefilematcher(orig, repo, pats, opts, badfn=None): wctx = repo[None] match, pats = oldmatchandpats(wctx, pats, opts, badfn=badfn) return lambda ctx: match - oldmatchandpats = installmatchandpatsfn(overridematchandpats) - oldmakefilematcher = logcmdutil._makenofollowfilematcher - setattr(logcmdutil, '_makenofollowfilematcher', overridemakefilematcher) - - try: + wrappedmatchandpats = extensions.wrappedfunction(scmutil, 'matchandpats', + overridematchandpats) + wrappedmakefilematcher = extensions.wrappedfunction( + logcmdutil, '_makenofollowfilematcher', overridemakefilematcher) + with wrappedmatchandpats, wrappedmakefilematcher: return orig(ui, repo, *pats, **opts) - finally: - restorematchandpatsfn() - setattr(logcmdutil, '_makenofollowfilematcher', oldmakefilematcher) @eh.wrapcommand('verify', opts=[('', 'large', None, @@ -636,17 +580,22 @@ # match largefiles and run it again. nonormalfiles = False nolfiles = False - installnormalfilesmatchfn(repo[None].manifest()) - try: - result = orig(ui, repo, pats, opts, rename) - except error.Abort as e: - if pycompat.bytestr(e) != _('no files to copy'): - raise e - else: - nonormalfiles = True - result = 0 - finally: - restorematchfn() + manifest = repo[None].manifest() + def normalfilesmatchfn(orig, ctx, pats=(), opts=None, globbed=False, + default='relpath', badfn=None): + if opts is None: + opts = {} + match = orig(ctx, pats, opts, globbed, default, badfn=badfn) + return composenormalfilematcher(match, manifest) + with extensions.wrappedfunction(scmutil, 'match', normalfilesmatchfn): + try: + result = orig(ui, repo, pats, opts, rename) + except error.Abort as e: + if pycompat.bytestr(e) != _('no files to copy'): + raise e + else: + nonormalfiles = True + result = 0 # The first rename can cause our current working directory to be removed. # In that case there is nothing left to copy/rename so just quit. @@ -672,7 +621,7 @@ wlock = repo.wlock() manifest = repo[None].manifest() - def overridematch(ctx, pats=(), opts=None, globbed=False, + def overridematch(orig, ctx, pats=(), opts=None, globbed=False, default='relpath', badfn=None): if opts is None: opts = {} @@ -684,7 +633,7 @@ newpats.append(pat.replace(lfutil.shortname, '')) else: newpats.append(pat) - match = oldmatch(ctx, newpats, opts, globbed, default, badfn=badfn) + match = orig(ctx, newpats, opts, globbed, default, badfn=badfn) m = copy.copy(match) lfile = lambda f: lfutil.standin(f) in manifest m._files = [lfutil.standin(f) for f in m._files if lfile(f)] @@ -698,7 +647,6 @@ None) m.matchfn = matchfn return m - oldmatch = installmatchfn(overridematch) listpats = [] for pat in pats: if matchmod.patkind(pat) is not None: @@ -706,23 +654,19 @@ else: listpats.append(makestandin(pat)) - try: - origcopyfile = util.copyfile - copiedfiles = [] - def overridecopyfile(src, dest, *args, **kwargs): - if (lfutil.shortname in src and - dest.startswith(repo.wjoin(lfutil.shortname))): - destlfile = dest.replace(lfutil.shortname, '') - if not opts['force'] and os.path.exists(destlfile): - raise IOError('', - _('destination largefile already exists')) - copiedfiles.append((src, dest)) - origcopyfile(src, dest, *args, **kwargs) - - util.copyfile = overridecopyfile + copiedfiles = [] + def overridecopyfile(orig, src, dest, *args, **kwargs): + if (lfutil.shortname in src and + dest.startswith(repo.wjoin(lfutil.shortname))): + destlfile = dest.replace(lfutil.shortname, '') + if not opts['force'] and os.path.exists(destlfile): + raise IOError('', + _('destination largefile already exists')) + copiedfiles.append((src, dest)) + orig(src, dest, *args, **kwargs) + with extensions.wrappedfunction(util, 'copyfile', overridecopyfile), \ + extensions.wrappedfunction(scmutil, 'match', overridematch): result += orig(ui, repo, listpats, opts, rename) - finally: - util.copyfile = origcopyfile lfdirstate = lfutil.openlfdirstate(ui, repo) for (src, dest) in copiedfiles: @@ -752,7 +696,6 @@ else: nolfiles = True finally: - restorematchfn() wlock.release() if nolfiles and nonormalfiles: @@ -787,11 +730,11 @@ oldstandins = lfutil.getstandinsstate(repo) - def overridematch(mctx, pats=(), opts=None, globbed=False, + def overridematch(orig, mctx, pats=(), opts=None, globbed=False, default='relpath', badfn=None): if opts is None: opts = {} - match = oldmatch(mctx, pats, opts, globbed, default, badfn=badfn) + match = orig(mctx, pats, opts, globbed, default, badfn=badfn) m = copy.copy(match) # revert supports recursing into subrepos, and though largefiles @@ -822,11 +765,8 @@ return origmatchfn(f) m.matchfn = matchfn return m - oldmatch = installmatchfn(overridematch) - try: + with extensions.wrappedfunction(scmutil, 'match', overridematch): orig(ui, repo, ctx, parents, *pats, **opts) - finally: - restorematchfn() newstandins = lfutil.getstandinsstate(repo) filelist = lfutil.getlfilestoupdate(oldstandins, newstandins) @@ -1048,8 +988,9 @@ for subpath in sorted(ctx.substate): sub = ctx.workingsub(subpath) submatch = matchmod.subdirmatcher(subpath, match) + subprefix = prefix + subpath + '/' sub._repo.lfstatus = True - sub.archive(archiver, prefix, submatch) + sub.archive(archiver, subprefix, submatch) archiver.done() @@ -1075,7 +1016,7 @@ if decode: data = repo._repo.wwritedata(name, data) - archiver.addfile(prefix + repo._path + '/' + name, mode, islink, data) + archiver.addfile(prefix + name, mode, islink, data) for f in ctx: ff = ctx.flags(f) @@ -1101,8 +1042,9 @@ for subpath in sorted(ctx.substate): sub = ctx.workingsub(subpath) submatch = matchmod.subdirmatcher(subpath, match) + subprefix = prefix + subpath + '/' sub._repo.lfstatus = True - sub.archive(archiver, prefix + repo._path + '/', submatch, decode) + sub.archive(archiver, subprefix, submatch, decode) # If a largefile is modified, the change is not reflected in its # standin until a commit. cmdutil.bailifchanged() raises an exception @@ -1126,11 +1068,11 @@ repo.lfstatus = False @eh.wrapfunction(cmdutil, 'forget') -def cmdutilforget(orig, ui, repo, match, prefix, explicitonly, dryrun, +def cmdutilforget(orig, ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive): normalmatcher = composenormalfilematcher(match, repo[None].manifest()) - bad, forgot = orig(ui, repo, normalmatcher, prefix, explicitonly, dryrun, - interactive) + bad, forgot = orig(ui, repo, normalmatcher, prefix, uipathfn, explicitonly, + dryrun, interactive) m = composelargefilematcher(match, repo[None].manifest()) try: @@ -1146,12 +1088,12 @@ fstandin = lfutil.standin(f) if fstandin not in repo.dirstate and not repo.wvfs.isdir(fstandin): ui.warn(_('not removing %s: file is already untracked\n') - % m.rel(f)) + % uipathfn(f)) bad.append(f) for f in forget: if ui.verbose or not m.exact(f): - ui.status(_('removing %s\n') % m.rel(f)) + ui.status(_('removing %s\n') % uipathfn(f)) # Need to lock because standin files are deleted then removed from the # repository and we could race in-between. @@ -1273,16 +1215,15 @@ repo.lfstatus = False @eh.wrapfunction(scmutil, 'addremove') -def scmutiladdremove(orig, repo, matcher, prefix, opts=None): +def scmutiladdremove(orig, repo, matcher, prefix, uipathfn, opts=None): if opts is None: opts = {} if not lfutil.islfilesrepo(repo): - return orig(repo, matcher, prefix, opts) + return orig(repo, matcher, prefix, uipathfn, opts) # Get the list of missing largefiles so we can remove them lfdirstate = lfutil.openlfdirstate(repo.ui, repo) - unsure, s = lfdirstate.status(matchmod.always(repo.root, repo.getcwd()), - subrepos=[], ignored=False, clean=False, - unknown=False) + unsure, s = lfdirstate.status(matchmod.always(), subrepos=[], + ignored=False, clean=False, unknown=False) # Call into the normal remove code, but the removing of the standin, we want # to have handled by original addremove. Monkey patching here makes sure @@ -1298,17 +1239,17 @@ matchfn = m.matchfn m.matchfn = lambda f: f in s.deleted and matchfn(f) - removelargefiles(repo.ui, repo, True, m, opts.get('dry_run'), + removelargefiles(repo.ui, repo, True, m, uipathfn, opts.get('dry_run'), **pycompat.strkwargs(opts)) # Call into the normal add code, and any files that *should* be added as # largefiles will be - added, bad = addlargefiles(repo.ui, repo, True, matcher, + added, bad = addlargefiles(repo.ui, repo, True, matcher, uipathfn, **pycompat.strkwargs(opts)) # Now that we've handled largefiles, hand off to the original addremove # function to take care of the rest. Make sure it doesn't do anything with # largefiles by passing a matcher that will ignore them. matcher = composenormalfilematcher(matcher, repo[None].manifest(), added) - return orig(repo, matcher, prefix, opts) + return orig(repo, matcher, prefix, uipathfn, opts) # Calling purge with --all will cause the largefiles to be deleted. # Override repo.status to prevent this from happening. @@ -1472,10 +1413,8 @@ # (*1) deprecated, but used internally (e.g: "rebase --collapse") lfdirstate = lfutil.openlfdirstate(repo.ui, repo) - unsure, s = lfdirstate.status(matchmod.always(repo.root, - repo.getcwd()), - subrepos=[], ignored=False, - clean=True, unknown=False) + unsure, s = lfdirstate.status(matchmod.always(), subrepos=[], + ignored=False, clean=True, unknown=False) oldclean = set(s.clean) pctx = repo['.'] dctx = repo[node]
--- a/hgext/largefiles/reposetup.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/largefiles/reposetup.py Tue Feb 19 21:55:05 2019 -0800 @@ -103,7 +103,7 @@ parentworking = working and ctx1 == self['.'] if match is None: - match = matchmod.always(self.root, self.getcwd()) + match = matchmod.always() wlock = None try:
--- a/hgext/largefiles/storefactory.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/largefiles/storefactory.py Tue Feb 19 21:55:05 2019 -0800 @@ -43,7 +43,6 @@ path, _branches = hg.parseurl(path) remote = hg.peer(repo or ui, {}, path) elif path == 'default-push' or path == 'default': - path = '' remote = repo else: path, _branches = hg.parseurl(path)
--- a/hgext/lfs/blobstore.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/lfs/blobstore.py Tue Feb 19 21:55:05 2019 -0800 @@ -42,7 +42,7 @@ def join(self, path): """split the path at first two characters, like: XX/XXXXX...""" if not _lfsre.match(path): - raise error.ProgrammingError('unexpected lfs path: %s' % path) + raise error.ProgrammingError(b'unexpected lfs path: %s' % path) return super(lfsvfs, self).join(path[0:2], path[2:]) def walk(self, path=None, onerror=None): @@ -56,7 +56,8 @@ prefixlen = len(pathutil.normasprefix(root)) oids = [] - for dirpath, dirs, files in os.walk(self.reljoin(self.base, path or ''), + for dirpath, dirs, files in os.walk(self.reljoin(self.base, path + or b''), onerror=onerror): dirpath = dirpath[prefixlen:] @@ -79,10 +80,11 @@ # self.vfs. Raise the same error as a normal vfs when asked to read a # file that doesn't exist. The only difference is the full file path # isn't available in the error. - raise IOError(errno.ENOENT, '%s: No such file or directory' % oid) + raise IOError(errno.ENOENT, + pycompat.sysstr(b'%s: No such file or directory' % oid)) def walk(self, path=None, onerror=None): - return ('', [], []) + return (b'', [], []) def write(self, oid, data): pass @@ -123,13 +125,13 @@ """ def __init__(self, repo): - fullpath = repo.svfs.join('lfs/objects') + fullpath = repo.svfs.join(b'lfs/objects') self.vfs = lfsvfs(fullpath) - if repo.ui.configbool('experimental', 'lfs.disableusercache'): + if repo.ui.configbool(b'experimental', b'lfs.disableusercache'): self.cachevfs = nullvfs() else: - usercache = lfutil._usercachedir(repo.ui, 'lfs') + usercache = lfutil._usercachedir(repo.ui, b'lfs') self.cachevfs = lfsvfs(usercache) self.ui = repo.ui @@ -143,23 +145,23 @@ # the usercache is the only place it _could_ be. If not present, the # missing file msg here will indicate the local repo, not the usercache. if self.cachevfs.exists(oid): - return self.cachevfs(oid, 'rb') + return self.cachevfs(oid, b'rb') - return self.vfs(oid, 'rb') + return self.vfs(oid, b'rb') def download(self, oid, src): """Read the blob from the remote source in chunks, verify the content, and write to this local blobstore.""" sha256 = hashlib.sha256() - with self.vfs(oid, 'wb', atomictemp=True) as fp: + with self.vfs(oid, b'wb', atomictemp=True) as fp: for chunk in util.filechunkiter(src, size=1048576): fp.write(chunk) sha256.update(chunk) realoid = node.hex(sha256.digest()) if realoid != oid: - raise LfsCorruptionError(_('corrupt remote lfs object: %s') + raise LfsCorruptionError(_(b'corrupt remote lfs object: %s') % oid) self._linktousercache(oid) @@ -170,7 +172,7 @@ This should only be called from the filelog during a commit or similar. As such, there is no need to verify the data. Imports from a remote store must use ``download()`` instead.""" - with self.vfs(oid, 'wb', atomictemp=True) as fp: + with self.vfs(oid, b'wb', atomictemp=True) as fp: fp.write(data) self._linktousercache(oid) @@ -186,7 +188,7 @@ """ if (not isinstance(self.cachevfs, nullvfs) and not self.vfs.exists(oid)): - self.ui.note(_('lfs: found %s in the usercache\n') % oid) + self.ui.note(_(b'lfs: found %s in the usercache\n') % oid) lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid)) def _linktousercache(self, oid): @@ -194,7 +196,7 @@ # the local store on success, but truncate, write and link on failure? if (not self.cachevfs.exists(oid) and not isinstance(self.cachevfs, nullvfs)): - self.ui.note(_('lfs: adding %s to the usercache\n') % oid) + self.ui.note(_(b'lfs: adding %s to the usercache\n') % oid) lfutil.link(self.vfs.join(oid), self.cachevfs.join(oid)) def read(self, oid, verify=True): @@ -208,10 +210,10 @@ # give more useful info about the corruption- simply don't add the # hardlink. if verify or node.hex(hashlib.sha256(blob).digest()) == oid: - self.ui.note(_('lfs: found %s in the usercache\n') % oid) + self.ui.note(_(b'lfs: found %s in the usercache\n') % oid) lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid)) else: - self.ui.note(_('lfs: found %s in the local lfs store\n') % oid) + self.ui.note(_(b'lfs: found %s in the local lfs store\n') % oid) blob = self._read(self.vfs, oid, verify) return blob @@ -262,26 +264,45 @@ else: return stringutil.forcebytestr(urlerror) +class lfsauthhandler(util.urlreq.basehandler): + handler_order = 480 # Before HTTPDigestAuthHandler (== 490) + + def http_error_401(self, req, fp, code, msg, headers): + """Enforces that any authentication performed is HTTP Basic + Authentication. No authentication is also acceptable. + """ + authreq = headers.get(r'www-authenticate', None) + if authreq: + scheme = authreq.split()[0] + + if scheme.lower() != r'basic': + msg = _(b'the server must support Basic Authentication') + raise util.urlerr.httperror(req.get_full_url(), code, + encoding.strfromlocal(msg), headers, + fp) + return None + class _gitlfsremote(object): def __init__(self, repo, url): ui = repo.ui self.ui = ui baseurl, authinfo = url.authinfo() - self.baseurl = baseurl.rstrip('/') - useragent = repo.ui.config('experimental', 'lfs.user-agent') + self.baseurl = baseurl.rstrip(b'/') + useragent = repo.ui.config(b'experimental', b'lfs.user-agent') if not useragent: - useragent = 'git-lfs/2.3.4 (Mercurial %s)' % util.version() + useragent = b'git-lfs/2.3.4 (Mercurial %s)' % util.version() self.urlopener = urlmod.opener(ui, authinfo, useragent) - self.retry = ui.configint('lfs', 'retry') + self.urlopener.add_handler(lfsauthhandler()) + self.retry = ui.configint(b'lfs', b'retry') def writebatch(self, pointers, fromstore): """Batch upload from local to remote blobstore.""" - self._batch(_deduplicate(pointers), fromstore, 'upload') + self._batch(_deduplicate(pointers), fromstore, b'upload') def readbatch(self, pointers, tostore): """Batch download from remote to local blostore.""" - self._batch(_deduplicate(pointers), tostore, 'download') + self._batch(_deduplicate(pointers), tostore, b'download') def _batchrequest(self, pointers, action): """Get metadata about objects pointed by pointers for given action @@ -289,52 +310,63 @@ Return decoded JSON object like {'objects': [{'oid': '', 'size': 1}]} See https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md """ - objects = [{'oid': p.oid(), 'size': p.size()} for p in pointers] - requestdata = json.dumps({ - 'objects': objects, - 'operation': action, - }) - url = '%s/objects/batch' % self.baseurl - batchreq = util.urlreq.request(url, data=requestdata) - batchreq.add_header('Accept', 'application/vnd.git-lfs+json') - batchreq.add_header('Content-Type', 'application/vnd.git-lfs+json') + objects = [{r'oid': pycompat.strurl(p.oid()), + r'size': p.size()} for p in pointers] + requestdata = pycompat.bytesurl(json.dumps({ + r'objects': objects, + r'operation': pycompat.strurl(action), + })) + url = b'%s/objects/batch' % self.baseurl + batchreq = util.urlreq.request(pycompat.strurl(url), data=requestdata) + batchreq.add_header(r'Accept', r'application/vnd.git-lfs+json') + batchreq.add_header(r'Content-Type', r'application/vnd.git-lfs+json') try: with contextlib.closing(self.urlopener.open(batchreq)) as rsp: rawjson = rsp.read() except util.urlerr.httperror as ex: hints = { - 400: _('check that lfs serving is enabled on %s and "%s" is ' - 'supported') % (self.baseurl, action), - 404: _('the "lfs.url" config may be used to override %s') + 400: _(b'check that lfs serving is enabled on %s and "%s" is ' + b'supported') % (self.baseurl, action), + 404: _(b'the "lfs.url" config may be used to override %s') % self.baseurl, } - hint = hints.get(ex.code, _('api=%s, action=%s') % (url, action)) - raise LfsRemoteError(_('LFS HTTP error: %s') % ex, hint=hint) + hint = hints.get(ex.code, _(b'api=%s, action=%s') % (url, action)) + raise LfsRemoteError( + _(b'LFS HTTP error: %s') % stringutil.forcebytestr(ex), + hint=hint) except util.urlerr.urlerror as ex: - hint = (_('the "lfs.url" config may be used to override %s') + hint = (_(b'the "lfs.url" config may be used to override %s') % self.baseurl) - raise LfsRemoteError(_('LFS error: %s') % _urlerrorreason(ex), + raise LfsRemoteError(_(b'LFS error: %s') % _urlerrorreason(ex), hint=hint) try: response = json.loads(rawjson) except ValueError: - raise LfsRemoteError(_('LFS server returns invalid JSON: %s') - % rawjson) + raise LfsRemoteError(_(b'LFS server returns invalid JSON: %s') + % rawjson.encode("utf-8")) if self.ui.debugflag: - self.ui.debug('Status: %d\n' % rsp.status) + self.ui.debug(b'Status: %d\n' % rsp.status) # lfs-test-server and hg serve return headers in different order - self.ui.debug('%s\n' - % '\n'.join(sorted(str(rsp.info()).splitlines()))) + headers = pycompat.bytestr(rsp.info()).strip() + self.ui.debug(b'%s\n' + % b'\n'.join(sorted(headers.splitlines()))) - if 'objects' in response: - response['objects'] = sorted(response['objects'], - key=lambda p: p['oid']) - self.ui.debug('%s\n' - % json.dumps(response, indent=2, - separators=('', ': '), sort_keys=True)) + if r'objects' in response: + response[r'objects'] = sorted(response[r'objects'], + key=lambda p: p[r'oid']) + self.ui.debug(b'%s\n' + % pycompat.bytesurl( + json.dumps(response, indent=2, + separators=(r'', r': '), + sort_keys=True))) - return response + def encodestr(x): + if isinstance(x, pycompat.unicode): + return x.encode(u'utf-8') + return x + + return pycompat.rapply(encodestr, response) def _checkforservererror(self, pointers, responses, action): """Scans errors from objects @@ -345,34 +377,34 @@ # server implementation (ex. lfs-test-server) does not set "error" # but just removes "download" from "actions". Treat that case # as the same as 404 error. - if 'error' not in response: - if (action == 'download' - and action not in response.get('actions', [])): + if b'error' not in response: + if (action == b'download' + and action not in response.get(b'actions', [])): code = 404 else: continue else: # An error dict without a code doesn't make much sense, so # treat as a server error. - code = response.get('error').get('code', 500) + code = response.get(b'error').get(b'code', 500) ptrmap = {p.oid(): p for p in pointers} - p = ptrmap.get(response['oid'], None) + p = ptrmap.get(response[b'oid'], None) if p: - filename = getattr(p, 'filename', 'unknown') + filename = getattr(p, 'filename', b'unknown') errors = { - 404: 'The object does not exist', - 410: 'The object was removed by the owner', - 422: 'Validation error', - 500: 'Internal server error', + 404: b'The object does not exist', + 410: b'The object was removed by the owner', + 422: b'Validation error', + 500: b'Internal server error', } - msg = errors.get(code, 'status code %d' % code) - raise LfsRemoteError(_('LFS server error for "%s": %s') + msg = errors.get(code, b'status code %d' % code) + raise LfsRemoteError(_(b'LFS server error for "%s": %s') % (filename, msg)) else: raise LfsRemoteError( - _('LFS server error. Unsolicited response for oid %s') - % response['oid']) + _(b'LFS server error. Unsolicited response for oid %s') + % response[b'oid']) def _extractobjects(self, response, pointers, action): """extract objects from response of the batch API @@ -382,12 +414,13 @@ raise if any object has an error """ # Scan errors from objects - fail early - objects = response.get('objects', []) + objects = response.get(b'objects', []) self._checkforservererror(pointers, objects, action) # Filter objects with given action. Practically, this skips uploading # objects which exist in the server. - filteredobjects = [o for o in objects if action in o.get('actions', [])] + filteredobjects = [o for o in objects + if action in o.get(b'actions', [])] return filteredobjects @@ -401,36 +434,37 @@ See https://github.com/git-lfs/git-lfs/blob/master/docs/api/\ basic-transfers.md """ - oid = pycompat.bytestr(obj['oid']) + oid = obj[b'oid'] + href = obj[b'actions'][action].get(b'href') + headers = obj[b'actions'][action].get(b'header', {}).items() - href = pycompat.bytestr(obj['actions'][action].get('href')) - headers = obj['actions'][action].get('header', {}).items() - - request = util.urlreq.request(href) - if action == 'upload': + request = util.urlreq.request(pycompat.strurl(href)) + if action == b'upload': # If uploading blobs, read data from local blobstore. if not localstore.verify(oid): - raise error.Abort(_('detected corrupt lfs object: %s') % oid, - hint=_('run hg verify')) + raise error.Abort(_(b'detected corrupt lfs object: %s') % oid, + hint=_(b'run hg verify')) request.data = filewithprogress(localstore.open(oid), None) - request.get_method = lambda: 'PUT' - request.add_header('Content-Type', 'application/octet-stream') + request.get_method = lambda: r'PUT' + request.add_header(r'Content-Type', r'application/octet-stream') + request.add_header(r'Content-Length', len(request.data)) for k, v in headers: - request.add_header(k, v) + request.add_header(pycompat.strurl(k), pycompat.strurl(v)) response = b'' try: with contextlib.closing(self.urlopener.open(request)) as req: ui = self.ui # Shorten debug lines if self.ui.debugflag: - ui.debug('Status: %d\n' % req.status) + ui.debug(b'Status: %d\n' % req.status) # lfs-test-server and hg serve return headers in different # order - ui.debug('%s\n' - % '\n'.join(sorted(str(req.info()).splitlines()))) + headers = pycompat.bytestr(req.info()).strip() + ui.debug(b'%s\n' + % b'\n'.join(sorted(headers.splitlines()))) - if action == 'download': + if action == b'download': # If downloading blobs, store downloaded data to local # blobstore localstore.download(oid, req) @@ -441,65 +475,65 @@ break response += data if response: - ui.debug('lfs %s response: %s' % (action, response)) + ui.debug(b'lfs %s response: %s' % (action, response)) except util.urlerr.httperror as ex: if self.ui.debugflag: - self.ui.debug('%s: %s\n' % (oid, ex.read())) - raise LfsRemoteError(_('LFS HTTP error: %s (oid=%s, action=%s)') - % (ex, oid, action)) + self.ui.debug(b'%s: %s\n' % (oid, ex.read())) # XXX: also bytes? + raise LfsRemoteError(_(b'LFS HTTP error: %s (oid=%s, action=%s)') + % (stringutil.forcebytestr(ex), oid, action)) except util.urlerr.urlerror as ex: - hint = (_('attempted connection to %s') - % util.urllibcompat.getfullurl(request)) - raise LfsRemoteError(_('LFS error: %s') % _urlerrorreason(ex), + hint = (_(b'attempted connection to %s') + % pycompat.bytesurl(util.urllibcompat.getfullurl(request))) + raise LfsRemoteError(_(b'LFS error: %s') % _urlerrorreason(ex), hint=hint) def _batch(self, pointers, localstore, action): - if action not in ['upload', 'download']: - raise error.ProgrammingError('invalid Git-LFS action: %s' % action) + if action not in [b'upload', b'download']: + raise error.ProgrammingError(b'invalid Git-LFS action: %s' % action) response = self._batchrequest(pointers, action) objects = self._extractobjects(response, pointers, action) - total = sum(x.get('size', 0) for x in objects) + total = sum(x.get(b'size', 0) for x in objects) sizes = {} for obj in objects: - sizes[obj.get('oid')] = obj.get('size', 0) - topic = {'upload': _('lfs uploading'), - 'download': _('lfs downloading')}[action] + sizes[obj.get(b'oid')] = obj.get(b'size', 0) + topic = {b'upload': _(b'lfs uploading'), + b'download': _(b'lfs downloading')}[action] if len(objects) > 1: - self.ui.note(_('lfs: need to transfer %d objects (%s)\n') + self.ui.note(_(b'lfs: need to transfer %d objects (%s)\n') % (len(objects), util.bytecount(total))) def transfer(chunk): for obj in chunk: - objsize = obj.get('size', 0) + objsize = obj.get(b'size', 0) if self.ui.verbose: - if action == 'download': - msg = _('lfs: downloading %s (%s)\n') - elif action == 'upload': - msg = _('lfs: uploading %s (%s)\n') - self.ui.note(msg % (obj.get('oid'), + if action == b'download': + msg = _(b'lfs: downloading %s (%s)\n') + elif action == b'upload': + msg = _(b'lfs: uploading %s (%s)\n') + self.ui.note(msg % (obj.get(b'oid'), util.bytecount(objsize))) retry = self.retry while True: try: self._basictransfer(obj, action, localstore) - yield 1, obj.get('oid') + yield 1, obj.get(b'oid') break except socket.error as ex: if retry > 0: self.ui.note( - _('lfs: failed: %r (remaining retry %d)\n') - % (ex, retry)) + _(b'lfs: failed: %r (remaining retry %d)\n') + % (stringutil.forcebytestr(ex), retry)) retry -= 1 continue raise # Until https multiplexing gets sorted out - if self.ui.configbool('experimental', 'lfs.worker-enable'): + if self.ui.configbool(b'experimental', b'lfs.worker-enable'): oids = worker.worker(self.ui, 0.1, transfer, (), - sorted(objects, key=lambda o: o.get('oid'))) + sorted(objects, key=lambda o: o.get(b'oid'))) else: - oids = transfer(sorted(objects, key=lambda o: o.get('oid'))) + oids = transfer(sorted(objects, key=lambda o: o.get(b'oid'))) with self.ui.makeprogress(topic, total=total) as progress: progress.update(0) @@ -509,14 +543,14 @@ processed += sizes[oid] blobs += 1 progress.update(processed) - self.ui.note(_('lfs: processed: %s\n') % oid) + self.ui.note(_(b'lfs: processed: %s\n') % oid) if blobs > 0: - if action == 'upload': - self.ui.status(_('lfs: uploaded %d files (%s)\n') + if action == b'upload': + self.ui.status(_(b'lfs: uploaded %d files (%s)\n') % (blobs, util.bytecount(processed))) - elif action == 'download': - self.ui.status(_('lfs: downloaded %d files (%s)\n') + elif action == b'download': + self.ui.status(_(b'lfs: downloaded %d files (%s)\n') % (blobs, util.bytecount(processed))) def __del__(self): @@ -531,18 +565,18 @@ """Dummy store storing blobs to temp directory.""" def __init__(self, repo, url): - fullpath = repo.vfs.join('lfs', url.path) + fullpath = repo.vfs.join(b'lfs', url.path) self.vfs = lfsvfs(fullpath) def writebatch(self, pointers, fromstore): for p in _deduplicate(pointers): content = fromstore.read(p.oid(), verify=True) - with self.vfs(p.oid(), 'wb', atomictemp=True) as fp: + with self.vfs(p.oid(), b'wb', atomictemp=True) as fp: fp.write(content) def readbatch(self, pointers, tostore): for p in _deduplicate(pointers): - with self.vfs(p.oid(), 'rb') as fp: + with self.vfs(p.oid(), b'rb') as fp: tostore.download(p.oid(), fp) class _nullremote(object): @@ -570,13 +604,13 @@ self._prompt() def _prompt(self): - raise error.Abort(_('lfs.url needs to be configured')) + raise error.Abort(_(b'lfs.url needs to be configured')) _storemap = { - 'https': _gitlfsremote, - 'http': _gitlfsremote, - 'file': _dummyremote, - 'null': _nullremote, + b'https': _gitlfsremote, + b'http': _gitlfsremote, + b'file': _dummyremote, + b'null': _nullremote, None: _promptremote, } @@ -590,8 +624,8 @@ def _verify(oid, content): realoid = node.hex(hashlib.sha256(content).digest()) if realoid != oid: - raise LfsCorruptionError(_('detected corrupt lfs object: %s') % oid, - hint=_('run hg verify')) + raise LfsCorruptionError(_(b'detected corrupt lfs object: %s') % oid, + hint=_(b'run hg verify')) def remote(repo, remote=None): """remotestore factory. return a store in _storemap depending on config @@ -603,7 +637,7 @@ https://github.com/git-lfs/git-lfs/blob/master/docs/api/server-discovery.md """ - lfsurl = repo.ui.config('lfs', 'url') + lfsurl = repo.ui.config(b'lfs', b'url') url = util.url(lfsurl or '') if lfsurl is None: if remote: @@ -616,7 +650,7 @@ else: # TODO: investigate 'paths.remote:lfsurl' style path customization, # and fall back to inferring from 'paths.remote' if unspecified. - path = repo.ui.config('paths', 'default') or '' + path = repo.ui.config(b'paths', b'default') or b'' defaulturl = util.url(path) @@ -628,11 +662,11 @@ defaulturl.path = (defaulturl.path or b'') + b'.git/info/lfs' url = util.url(bytes(defaulturl)) - repo.ui.note(_('lfs: assuming remote store: %s\n') % url) + repo.ui.note(_(b'lfs: assuming remote store: %s\n') % url) scheme = url.scheme if scheme not in _storemap: - raise error.Abort(_('lfs: unknown url scheme: %s') % scheme) + raise error.Abort(_(b'lfs: unknown url scheme: %s') % scheme) return _storemap[scheme](repo, url) class LfsRemoteError(error.StorageError):
--- a/hgext/lfs/wireprotolfsserver.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/lfs/wireprotolfsserver.py Tue Feb 19 21:55:05 2019 -0800 @@ -43,7 +43,7 @@ if orig(rctx, req, res, checkperm): return True - if not rctx.repo.ui.configbool('experimental', 'lfs.serve'): + if not rctx.repo.ui.configbool(b'experimental', b'lfs.serve'): return False if not util.safehasattr(rctx.repo.svfs, 'lfslocalblobstore'): @@ -54,7 +54,7 @@ try: if req.dispatchpath == b'.git/info/lfs/objects/batch': - checkperm(rctx, req, 'pull') + checkperm(rctx, req, b'pull') return _processbatchrequest(rctx.repo, req, res) # TODO: reserve and use a path in the proposed http wireprotocol /api/ # namespace? @@ -81,7 +81,7 @@ def _logexception(req): """Write information about the current exception to wsgi.errors.""" tb = pycompat.sysbytes(traceback.format_exc()) - errorlog = req.rawenv[r'wsgi.errors'] + errorlog = req.rawenv[b'wsgi.errors'] uri = b'' if req.apppath: @@ -133,25 +133,27 @@ lfsreq = json.loads(req.bodyfh.read()) # If no transfer handlers are explicitly requested, 'basic' is assumed. - if 'basic' not in lfsreq.get('transfers', ['basic']): + if r'basic' not in lfsreq.get(r'transfers', [r'basic']): _sethttperror(res, HTTP_BAD_REQUEST, b'Only the basic LFS transfer handler is supported') return True - operation = lfsreq.get('operation') - if operation not in ('upload', 'download'): + operation = lfsreq.get(r'operation') + operation = pycompat.bytestr(operation) + + if operation not in (b'upload', b'download'): _sethttperror(res, HTTP_BAD_REQUEST, b'Unsupported LFS transfer operation: %s' % operation) return True localstore = repo.svfs.lfslocalblobstore - objects = [p for p in _batchresponseobjects(req, lfsreq.get('objects', []), + objects = [p for p in _batchresponseobjects(req, lfsreq.get(r'objects', []), operation, localstore)] rsp = { - 'transfer': 'basic', - 'objects': objects, + r'transfer': r'basic', + r'objects': objects, } res.status = hgwebcommon.statusmessage(HTTP_OK) @@ -190,11 +192,12 @@ for obj in objects: # Convert unicode to ASCII to create a filesystem path - oid = obj.get('oid').encode('ascii') + soid = obj.get(r'oid') + oid = soid.encode(r'ascii') rsp = { - 'oid': oid, - 'size': obj.get('size'), # XXX: should this check the local size? - #'authenticated': True, + r'oid': soid, + r'size': obj.get(r'size'), # XXX: should this check the local size? + #r'authenticated': True, } exists = True @@ -209,7 +212,7 @@ # verified as the file is streamed to the caller. try: verifies = store.verify(oid) - if verifies and action == 'upload': + if verifies and action == b'upload': # The client will skip this upload, but make sure it remains # available locally. store.linkfromusercache(oid) @@ -217,9 +220,9 @@ if inst.errno != errno.ENOENT: _logexception(req) - rsp['error'] = { - 'code': 500, - 'message': inst.strerror or 'Internal Server Server' + rsp[r'error'] = { + r'code': 500, + r'message': inst.strerror or r'Internal Server Server' } yield rsp continue @@ -228,19 +231,19 @@ # Items are always listed for downloads. They are dropped for uploads # IFF they already exist locally. - if action == 'download': + if action == b'download': if not exists: - rsp['error'] = { - 'code': 404, - 'message': "The object does not exist" + rsp[r'error'] = { + r'code': 404, + r'message': r"The object does not exist" } yield rsp continue elif not verifies: - rsp['error'] = { - 'code': 422, # XXX: is this the right code? - 'message': "The object is corrupt" + rsp[r'error'] = { + r'code': 422, # XXX: is this the right code? + r'message': r"The object is corrupt" } yield rsp continue @@ -256,22 +259,22 @@ # a gratuitous deviation from lfs-test-server in the test # output. hdr = { - 'Accept': 'application/vnd.git-lfs' + r'Accept': r'application/vnd.git-lfs' } - auth = req.headers.get('Authorization', '') - if auth.startswith('Basic '): - hdr['Authorization'] = auth + auth = req.headers.get(b'Authorization', b'') + if auth.startswith(b'Basic '): + hdr[r'Authorization'] = pycompat.strurl(auth) return hdr - rsp['actions'] = { - '%s' % action: { - 'href': '%s%s/.hg/lfs/objects/%s' - % (req.baseurl, req.apppath, oid), + rsp[r'actions'] = { + r'%s' % pycompat.strurl(action): { + r'href': pycompat.strurl(b'%s%s/.hg/lfs/objects/%s' + % (req.baseurl, req.apppath, oid)), # datetime.isoformat() doesn't include the 'Z' suffix - "expires_at": expiresat.strftime('%Y-%m-%dT%H:%M:%SZ'), - 'header': _buildheader(), + r"expires_at": expiresat.strftime(r'%Y-%m-%dT%H:%M:%SZ'), + r'header': _buildheader(), } } @@ -297,7 +300,7 @@ return True if method == b'PUT': - checkperm('upload') + checkperm(b'upload') # TODO: verify Content-Type? @@ -324,7 +327,7 @@ return True elif method == b'GET': - checkperm('pull') + checkperm(b'pull') res.status = hgwebcommon.statusmessage(HTTP_OK) res.headers[b'Content-Type'] = b'application/octet-stream'
--- a/hgext/mq.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/mq.py Tue Feb 19 21:55:05 2019 -0800 @@ -738,10 +738,10 @@ for f in sorted(files): absf = repo.wjoin(f) if os.path.lexists(absf): + absorig = scmutil.backuppath(self.ui, repo, f) self.ui.note(_('saving current version of %s as %s\n') % - (f, scmutil.origpath(self.ui, repo, f))) - - absorig = scmutil.origpath(self.ui, repo, absf) + (f, os.path.relpath(absorig))) + if copy: util.copyfile(absf, absorig) else: @@ -970,7 +970,7 @@ repo.dirstate.remove(f) for f in merged: repo.dirstate.merge(f) - p1, p2 = repo.dirstate.parents() + p1 = repo.dirstate.p1() repo.setparents(p1, merge) if all_files and '.hgsubstate' in all_files: @@ -1181,7 +1181,7 @@ def makepatchname(self, title, fallbackname): """Return a suitable filename for title, adding a suffix to make it unique in the existing list""" - namebase = re.sub('[\s\W_]+', '_', title.lower()).strip('_') + namebase = re.sub(br'[\s\W_]+', b'_', title.lower()).strip(b'_') namebase = namebase[:75] # avoid too long name (issue5117) if namebase: try: @@ -3521,7 +3521,7 @@ if self.mq.applied and self.mq.checkapplied and not force: parents = self.dirstate.parents() patches = [s.node for s in self.mq.applied] - if parents[0] in patches or parents[1] in patches: + if any(p in patches for p in parents): raise error.Abort(errmsg) def commit(self, text="", user=None, date=None, match=None,
--- a/hgext/narrow/narrowcommands.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/narrow/narrowcommands.py Tue Feb 19 21:55:05 2019 -0800 @@ -348,7 +348,7 @@ """ opts = pycompat.byteskwargs(opts) if repository.NARROW_REQUIREMENT not in repo.requirements: - raise error.Abort(_('the narrow command is only supported on ' + raise error.Abort(_('the tracked command is only supported on ' 'respositories cloned with --narrow')) # Before supporting, decide whether it "hg tracked --clear" should mean
--- a/hgext/notify.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/notify.py Tue Feb 19 21:55:05 2019 -0800 @@ -367,8 +367,12 @@ raise error.Abort(inst) # store sender and subject - sender = encoding.strtolocal(msg[r'From']) - subject = encoding.strtolocal(msg[r'Subject']) + sender = msg[r'From'] + subject = msg[r'Subject'] + if sender is not None: + sender = encoding.strtolocal(sender) + if subject is not None: + subject = encoding.strtolocal(subject) del msg[r'From'], msg[r'Subject'] if not msg.is_multipart():
--- a/hgext/phabricator.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/phabricator.py Tue Feb 19 21:55:05 2019 -0800 @@ -255,9 +255,9 @@ repo.ui.setconfig(b'phabricator', b'repophid', repophid) return repophid -_differentialrevisiontagre = re.compile(b'\AD([1-9][0-9]*)\Z') +_differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z') _differentialrevisiondescre = re.compile( - b'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M) + br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M) def getoldnodedrevmap(repo, nodelist): """find previous nodes that has been sent to Phabricator @@ -277,7 +277,6 @@ The ``old node``, if not None, is guaranteed to be the last diff of corresponding Differential Revision, and exist in the repo. """ - url, token = readurltoken(repo) unfi = repo.unfiltered() nodemap = unfi.changelog.nodemap @@ -451,12 +450,13 @@ def userphids(repo, names): """convert user names to PHIDs""" + names = [name.lower() for name in names] query = {b'constraints': {b'usernames': names}} result = callconduit(repo, b'user.search', query) # username not found is not an error of the API. So check if we have missed # some names here. data = result[r'data'] - resolved = set(entry[r'fields'][r'username'] for entry in data) + resolved = set(entry[r'fields'][r'username'].lower() for entry in data) unresolved = set(names) - resolved if unresolved: raise error.Abort(_(b'unknown username: %s')
--- a/hgext/rebase.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/rebase.py Tue Feb 19 21:55:05 2019 -0800 @@ -1278,7 +1278,7 @@ return stats def adjustdest(repo, rev, destmap, state, skipped): - """adjust rebase destination given the current rebase state + r"""adjust rebase destination given the current rebase state rev is what is being rebased. Return a list of two revs, which are the adjusted destinations for rev's p1 and p2, respectively. If a parent is @@ -1804,7 +1804,6 @@ def pullrebase(orig, ui, repo, *args, **opts): 'Call rebase after pull if the latter has been invoked with --rebase' - ret = None if opts.get(r'rebase'): if ui.configbool('commands', 'rebase.requiredest'): msg = _('rebase destination required by configuration')
--- a/hgext/releasenotes.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/releasenotes.py Tue Feb 19 21:55:05 2019 -0800 @@ -55,7 +55,7 @@ ('api', _('API Changes')), ] -RE_DIRECTIVE = re.compile('^\.\. ([a-zA-Z0-9_]+)::\s*([^$]+)?$') +RE_DIRECTIVE = re.compile(br'^\.\. ([a-zA-Z0-9_]+)::\s*([^$]+)?$') RE_ISSUE = br'\bissue ?[0-9]{4,6}(?![0-9])\b' BULLET_SECTION = _('Other Changes')
--- a/hgext/remotefilelog/__init__.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/remotefilelog/__init__.py Tue Feb 19 21:55:05 2019 -0800 @@ -902,8 +902,7 @@ # If this is a non-follow log without any revs specified, recommend that # the user add -f to speed it up. if not follow and not revs: - match, pats = scmutil.matchandpats(repo['.'], pats, - pycompat.byteskwargs(opts)) + match = scmutil.match(repo['.'], pats, pycompat.byteskwargs(opts)) isfile = not match.anypats() if isfile: for file in match.files():
--- a/hgext/remotefilelog/basepack.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/remotefilelog/basepack.py Tue Feb 19 21:55:05 2019 -0800 @@ -270,9 +270,9 @@ # only affect this instance self.VERSION = version elif self.VERSION != version: - raise RuntimeError('inconsistent version: %s' % version) + raise RuntimeError('inconsistent version: %d' % version) else: - raise RuntimeError('unsupported version: %s' % version) + raise RuntimeError('unsupported version: %d' % version) class basepack(versionmixin): # The maximum amount we should read via mmap before remmaping so the old @@ -457,8 +457,6 @@ pass def writeindex(self): - rawindex = '' - largefanout = len(self.entries) > SMALLFANOUTCUTOFF if largefanout: params = indexparams(LARGEFANOUTPREFIX, self.VERSION)
--- a/hgext/remotefilelog/basestore.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/remotefilelog/basestore.py Tue Feb 19 21:55:05 2019 -0800 @@ -410,16 +410,18 @@ def wrapped(self, *args, **kwargs): retrylog = self.retrylog or noop funcname = fn.__name__ - for i in pycompat.xrange(self.numattempts): + i = 0 + while i < self.numattempts: if i > 0: retrylog('re-attempting (n=%d) %s\n' % (i, funcname)) self.markforrefresh() + i += 1 try: return fn(self, *args, **kwargs) except KeyError: - pass - # retries exhausted - retrylog('retries exhausted in %s, raising KeyError\n' % - pycompat.sysbytes(funcname)) - raise + if i == self.numattempts: + # retries exhausted + retrylog('retries exhausted in %s, raising KeyError\n' % + pycompat.sysbytes(funcname)) + raise return wrapped
--- a/hgext/remotefilelog/datapack.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/remotefilelog/datapack.py Tue Feb 19 21:55:05 2019 -0800 @@ -242,7 +242,7 @@ entry = index[end:end + entrylen] else: while start < end - entrylen: - mid = start + (end - start) / 2 + mid = start + (end - start) // 2 mid = mid - ((mid - params.indexstart) % entrylen) midnode = index[mid:mid + NODELENGTH] if midnode == node: @@ -250,10 +250,8 @@ break if node > midnode: start = mid - startnode = midnode elif node < midnode: end = mid - endnode = midnode else: return None
--- a/hgext/remotefilelog/debugcommands.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/remotefilelog/debugcommands.py Tue Feb 19 21:55:05 2019 -0800 @@ -175,7 +175,6 @@ return zlib.decompress(raw) def parsefileblob(path, decompress): - raw = None f = open(path, "rb") try: raw = f.read()
--- a/hgext/remotefilelog/historypack.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/remotefilelog/historypack.py Tue Feb 19 21:55:05 2019 -0800 @@ -259,10 +259,8 @@ return self._index[mid:mid + entrylen] if node > midnode: start = mid - startnode = midnode elif node < midnode: end = mid - endnode = midnode return None def markledger(self, ledger, options=None): @@ -514,7 +512,6 @@ fileindexentries.append(rawentry) - nodecountraw = '' nodecountraw = struct.pack('!Q', nodecount) return (''.join(fileindexentries) + nodecountraw + ''.join(nodeindexentries))
--- a/hgext/remotefilelog/remotefilectx.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/remotefilelog/remotefilectx.py Tue Feb 19 21:55:05 2019 -0800 @@ -15,7 +15,6 @@ context, error, phases, - pycompat, util, ) from . import shallowutil @@ -39,11 +38,11 @@ @propertycache def _changeid(self): - if '_changeid' in self.__dict__: + if r'_changeid' in self.__dict__: return self._changeid - elif '_changectx' in self.__dict__: + elif r'_changectx' in self.__dict__: return self._changectx.rev() - elif '_descendantrev' in self.__dict__: + elif r'_descendantrev' in self.__dict__: # this file context was created from a revision with a known # descendant, we can (lazily) correct for linkrev aliases linknode = self._adjustlinknode(self._path, self._filelog, @@ -102,7 +101,7 @@ """ lkr = self.linkrev() attrs = vars(self) - noctx = not ('_changeid' in attrs or '_changectx' in attrs) + noctx = not (r'_changeid' in attrs or r'_changectx' in attrs) if noctx or self.rev() == lkr: return lkr linknode = self._adjustlinknode(self._path, self._filelog, @@ -316,7 +315,7 @@ finally: elapsed = time.time() - start repo.ui.log('linkrevfixup', logmsg + '\n', elapsed=elapsed * 1000, - **pycompat.strkwargs(commonlogkwargs)) + **commonlogkwargs) def _verifylinknode(self, revs, linknode): """ @@ -452,8 +451,8 @@ class remoteworkingfilectx(context.workingfilectx, remotefilectx): def __init__(self, repo, path, filelog=None, workingctx=None): self._ancestormap = None - return super(remoteworkingfilectx, self).__init__(repo, path, - filelog, workingctx) + super(remoteworkingfilectx, self).__init__(repo, path, filelog, + workingctx) def parents(self): return remotefilectx.parents(self)
--- a/hgext/remotefilelog/remotefilelog.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/remotefilelog/remotefilelog.py Tue Feb 19 21:55:05 2019 -0800 @@ -61,8 +61,6 @@ return t[s + 2:] def add(self, text, meta, transaction, linknode, p1=None, p2=None): - hashtext = text - # hash with the metadata, like in vanilla filelogs hashtext = shallowutil.createrevlogtext(text, meta.get('copy'), meta.get('copyrev'))
--- a/hgext/remotefilelog/remotefilelogserver.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/remotefilelog/remotefilelogserver.py Tue Feb 19 21:55:05 2019 -0800 @@ -54,7 +54,7 @@ elif cap.startswith("excludepattern="): excludepattern = cap[len("excludepattern="):].split('\0') - m = match.always(repo.root, '') + m = match.always() if includepattern or excludepattern: m = match.match(repo.root, '', None, includepattern, excludepattern) @@ -104,7 +104,7 @@ oldnoflatmf = state.noflatmf try: state.shallowremote = True - state.match = match.always(repo.root, '') + state.match = match.always() state.noflatmf = other.get('noflatmanifest') == 'True' if includepattern or excludepattern: state.match = match.match(repo.root, '', None,
--- a/hgext/remotefilelog/repack.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/remotefilelog/repack.py Tue Feb 19 21:55:05 2019 -0800 @@ -601,7 +601,6 @@ # TODO: Optimize the deltachain fetching. Since we're # iterating over the different version of the file, we may # be fetching the same deltachain over and over again. - meta = None if deltabase != nullid: deltaentry = self.data.getdelta(filename, node) delta, deltabasename, origdeltabase, meta = deltaentry
--- a/hgext/remotefilelog/shallowbundle.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/remotefilelog/shallowbundle.py Tue Feb 19 21:55:05 2019 -0800 @@ -162,7 +162,7 @@ repo.shallowmatch = match.match(repo.root, '', None, includepattern, excludepattern) else: - repo.shallowmatch = match.always(repo.root, '') + repo.shallowmatch = match.always() return orig(repo, outgoing, version, source, *args, **kwargs) finally: repo.shallowmatch = original
--- a/hgext/remotefilelog/shallowrepo.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/remotefilelog/shallowrepo.py Tue Feb 19 21:55:05 2019 -0800 @@ -289,7 +289,7 @@ repo.__class__ = shallowrepository - repo.shallowmatch = match.always(repo.root, '') + repo.shallowmatch = match.always() makeunionstores(repo)
--- a/hgext/remotefilelog/shallowutil.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/remotefilelog/shallowutil.py Tue Feb 19 21:55:05 2019 -0800 @@ -237,9 +237,9 @@ # v0, str(int(size)) is the header size = int(header) except ValueError: - raise RuntimeError("unexpected remotefilelog header: illegal format") + raise RuntimeError(r"unexpected remotefilelog header: illegal format") if size is None: - raise RuntimeError("unexpected remotefilelog header: no size found") + raise RuntimeError(r"unexpected remotefilelog header: no size found") return index + 1, size, flags def buildfileblobheader(size, flags, version=None):
--- a/hgext/shelve.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/shelve.py Tue Feb 19 21:55:05 2019 -0800 @@ -674,12 +674,13 @@ hg.update(repo, wctx.node()) files = [] files.extend(shelvectx.files()) - files.extend(shelvectx.parents()[0].files()) + files.extend(shelvectx.p1().files()) # revert will overwrite unknown files, so move them out of the way for file in repo.status(unknown=True).unknown: if file in files: - util.rename(file, scmutil.origpath(ui, repo, file)) + util.rename(repo.wjoin(file), + scmutil.backuppath(ui, repo, file)) ui.pushbuffer(True) cmdutil.revert(ui, repo, shelvectx, repo.dirstate.parents(), *pathtofiles(repo, files), @@ -809,7 +810,7 @@ """Rebase restored commit from its original location to a destination""" # If the shelve is not immediately on top of the commit # we'll be merging with, rebase it to be on top. - if tmpwctx.node() == shelvectx.parents()[0].node(): + if tmpwctx.node() == shelvectx.p1().node(): return shelvectx overrides = {
--- a/hgext/sparse.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/sparse.py Tue Feb 19 21:55:05 2019 -0800 @@ -199,7 +199,7 @@ def walk(orig, self, match, subrepos, unknown, ignored, full=True): # hack to not exclude explicitly-specified paths so that they can # be warned later on e.g. dirstate.add() - em = matchmod.exact(match._root, match._cwd, match.files()) + em = matchmod.exact(match.files()) sm = matchmod.unionmatcher([self._sparsematcher, em]) match = matchmod.intersectmatchers(match, sm) return orig(self, match, subrepos, unknown, ignored, full)
--- a/hgext/strip.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/strip.py Tue Feb 19 21:55:05 2019 -0800 @@ -39,7 +39,7 @@ if baserev: bctx = repo[baserev] else: - bctx = wctx.parents()[0] + bctx = wctx.p1() for s in sorted(wctx.substate): wctx.sub(s).bailifchanged(True) if s not in bctx.substate or bctx.sub(s).dirty():
--- a/hgext/transplant.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/transplant.py Tue Feb 19 21:55:05 2019 -0800 @@ -155,7 +155,7 @@ if opts is None: opts = {} revs = sorted(revmap) - p1, p2 = repo.dirstate.parents() + p1 = repo.dirstate.p1() pulls = [] diffopts = patch.difffeatureopts(self.ui, opts) diffopts.git = True @@ -186,7 +186,7 @@ exchange.pull(repo, source.peer(), heads=pulls) merge.update(repo, pulls[-1], branchmerge=False, force=False) - p1, p2 = repo.dirstate.parents() + p1 = repo.dirstate.p1() pulls = [] domerge = False @@ -323,11 +323,11 @@ else: files = None if merge: - p1, p2 = repo.dirstate.parents() + p1 = repo.dirstate.p1() repo.setparents(p1, node) - m = match.always(repo.root, '') + m = match.always() else: - m = match.exact(repo.root, '', files) + m = match.exact(files) n = repo.commit(message, user, date, extra=extra, match=m, editor=self.getcommiteditor()) @@ -387,7 +387,7 @@ extra = {'transplant_source': node} try: - p1, p2 = repo.dirstate.parents() + p1 = repo.dirstate.p1() if p1 != parent: raise error.Abort(_('working directory not at transplant ' 'parent %s') % nodemod.hex(parent)) @@ -668,7 +668,7 @@ tp = transplanter(ui, repo, opts) - p1, p2 = repo.dirstate.parents() + p1 = repo.dirstate.p1() if len(repo) > 0 and p1 == revlog.nullid: raise error.Abort(_('no revision checked out')) if opts.get('continue'): @@ -676,11 +676,7 @@ raise error.Abort(_('no transplant to continue')) else: cmdutil.checkunfinished(repo) - if p2 != revlog.nullid: - raise error.Abort(_('outstanding uncommitted merges')) - m, a, r, d = repo.status()[:4] - if m or a or r or d: - raise error.Abort(_('outstanding local changes')) + cmdutil.bailifchanged(repo) sourcerepo = opts.get('source') if sourcerepo:
--- a/hgext/uncommit.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/uncommit.py Tue Feb 19 21:55:05 2019 -0800 @@ -25,7 +25,7 @@ cmdutil, commands, context, - copies, + copies as copiesmod, error, node, obsutil, @@ -67,10 +67,10 @@ files = (initialfiles - exclude) # return the p1 so that we don't create an obsmarker later if not keepcommit: - return ctx.parents()[0].node() + return ctx.p1().node() # Filter copies - copied = copies.pathcopies(base, ctx) + copied = copiesmod.pathcopies(base, ctx) copied = dict((dst, src) for dst, src in copied.iteritems() if dst in files) def filectxfn(repo, memctx, path, contentctx=ctx, redirect=()): @@ -93,13 +93,14 @@ extra=ctx.extra()) return repo.commitctx(new) -def _fixdirstate(repo, oldctx, newctx, status): +def _fixdirstate(repo, oldctx, newctx, match=None): """ fix the dirstate after switching the working directory from oldctx to newctx which can be result of either unamend or uncommit. """ ds = repo.dirstate + ds.setparents(newctx.node(), node.nullid) copies = dict(ds.copies()) - s = status + s = newctx.status(oldctx, match=match) for f in s.modified: if ds[f] == 'r': # modified + removed -> removed @@ -121,11 +122,7 @@ ds.remove(f) # Merge old parent and old working dir copies - oldcopies = {} - for f in (s.modified + s.added): - src = oldctx[f].renamed() - if src: - oldcopies[f] = src[0] + oldcopies = copiesmod.pathcopies(newctx, oldctx, match) oldcopies.update(copies) copies = dict((dst, oldcopies.get(src, src)) for dst, src in oldcopies.iteritems()) @@ -179,12 +176,10 @@ # Fully removed the old commit mapping[old.node()] = () - scmutil.cleanupnodes(repo, mapping, 'uncommit', fixphase=True) + with repo.dirstate.parentchange(): + _fixdirstate(repo, old, repo[newid], match) - with repo.dirstate.parentchange(): - repo.dirstate.setparents(newid, node.nullid) - s = old.p1().status(old, match=match) - _fixdirstate(repo, old, repo[newid], s) + scmutil.cleanupnodes(repo, mapping, 'uncommit', fixphase=True) def predecessormarkers(ctx): """yields the obsolete markers marking the given changeset as a successor""" @@ -244,9 +239,7 @@ dirstate = repo.dirstate with dirstate.parentchange(): - dirstate.setparents(newprednode, node.nullid) - s = repo.status(predctx, curctx) - _fixdirstate(repo, curctx, newpredctx, s) + _fixdirstate(repo, curctx, newpredctx) mapping = {curctx.node(): (newprednode,)} scmutil.cleanupnodes(repo, mapping, 'unamend', fixphase=True)
--- a/hgext/zeroconf/Zeroconf.py Thu Feb 07 20:50:41 2019 +0900 +++ b/hgext/zeroconf/Zeroconf.py Tue Feb 19 21:55:05 2019 -0800 @@ -84,7 +84,6 @@ import itertools import select import socket -import string import struct import threading import time @@ -106,7 +105,7 @@ # Some DNS constants -_MDNS_ADDR = '224.0.0.251' +_MDNS_ADDR = r'224.0.0.251' _MDNS_PORT = 5353 _DNS_PORT = 53 _DNS_TTL = 60 * 60 # one hour default TTL @@ -221,7 +220,7 @@ """A DNS entry""" def __init__(self, name, type, clazz): - self.key = string.lower(name) + self.key = name.lower() self.name = name self.type = type self.clazz = clazz & _CLASS_MASK @@ -620,7 +619,7 @@ first = off while True: - len = ord(self.data[off]) + len = ord(self.data[off:off + 1]) off += 1 if len == 0: break @@ -631,7 +630,7 @@ elif t == 0xC0: if next < 0: next = off + 1 - off = ((len & 0x3F) << 8) | ord(self.data[off]) + off = ((len & 0x3F) << 8) | ord(self.data[off:off + 1]) if off >= first: raise BadDomainNameCircular(off) first = off @@ -938,7 +937,6 @@ self.zeroconf.engine.addReader(self, self.zeroconf.socket) def handle_read(self): - data = addr = port = None sock = self.zeroconf.socket try: data, (addr, port) = sock.recvfrom(_MAX_MSG_ABSOLUTE) @@ -1230,7 +1228,6 @@ delay = _LISTENER_TIME next = now + delay last = now + timeout - result = 0 try: zeroconf.addListener(self, DNSQuestion(self.name, _TYPE_ANY, _CLASS_IN)) @@ -1335,7 +1332,7 @@ # SO_REUSEADDR and SO_REUSEPORT have been set, so ignore it pass self.socket.setsockopt(socket.SOL_IP, socket.IP_ADD_MEMBERSHIP, - socket.inet_aton(_MDNS_ADDR) + socket.inet_aton('0.0.0.0')) + socket.inet_aton(_MDNS_ADDR) + socket.inet_aton(r'0.0.0.0')) self.listeners = [] self.browsers = [] @@ -1659,7 +1656,7 @@ self.engine.notify() self.unregisterAllServices() self.socket.setsockopt(socket.SOL_IP, socket.IP_DROP_MEMBERSHIP, - socket.inet_aton(_MDNS_ADDR) + socket.inet_aton('0.0.0.0')) + socket.inet_aton(_MDNS_ADDR) + socket.inet_aton(r'0.0.0.0')) self.socket.close() # Test a few module features, including service registration, service
--- a/mercurial/archival.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/archival.py Tue Feb 19 21:55:05 2019 -0800 @@ -340,7 +340,8 @@ for subpath in sorted(ctx.substate): sub = ctx.workingsub(subpath) submatch = matchmod.subdirmatcher(subpath, match) - total += sub.archive(archiver, prefix, submatch, decode) + subprefix = prefix + subpath + '/' + total += sub.archive(archiver, subprefix, submatch, decode) if total == 0: raise error.Abort(_('no files match the archive pattern'))
--- a/mercurial/bdiff.c Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/bdiff.c Tue Feb 19 21:55:05 2019 -0800 @@ -35,15 +35,19 @@ /* count the lines */ i = 1; /* extra line for sentinel */ - for (p = a; p < plast; p++) - if (*p == '\n') + for (p = a; p < plast; p++) { + if (*p == '\n') { i++; - if (p == plast) + } + } + if (p == plast) { i++; + } *lr = l = (struct bdiff_line *)calloc(i, sizeof(struct bdiff_line)); - if (!l) + if (!l) { return -1; + } /* build the line array and calculate hashes */ hash = 0; @@ -90,18 +94,21 @@ struct pos *h = NULL; /* build a hash table of the next highest power of 2 */ - while (buckets < bn + 1) + while (buckets < bn + 1) { buckets *= 2; + } /* try to allocate a large hash table to avoid collisions */ for (scale = 4; scale; scale /= 2) { h = (struct pos *)calloc(buckets, scale * sizeof(struct pos)); - if (h) + if (h) { break; + } } - if (!h) + if (!h) { return 0; + } buckets = buckets * scale - 1; @@ -115,9 +122,11 @@ for (i = 0; i < bn; i++) { /* find the equivalence class */ for (j = b[i].hash & buckets; h[j].pos != -1; - j = (j + 1) & buckets) - if (!cmp(b + i, b + h[j].pos)) + j = (j + 1) & buckets) { + if (!cmp(b + i, b + h[j].pos)) { break; + } + } /* add to the head of the equivalence class */ b[i].n = h[j].pos; @@ -133,15 +142,18 @@ for (i = 0; i < an; i++) { /* find the equivalence class */ for (j = a[i].hash & buckets; h[j].pos != -1; - j = (j + 1) & buckets) - if (!cmp(a + i, b + h[j].pos)) + j = (j + 1) & buckets) { + if (!cmp(a + i, b + h[j].pos)) { break; + } + } a[i].e = j; /* use equivalence class for quick compare */ - if (h[j].len <= t) + if (h[j].len <= t) { a[i].n = h[j].pos; /* point to head of match list */ - else + } else { a[i].n = -1; /* too popular */ + } } /* discard hash tables */ @@ -158,16 +170,18 @@ /* window our search on large regions to better bound worst-case performance. by choosing a window at the end, we reduce skipping overhead on the b chains. */ - if (a2 - a1 > 30000) + if (a2 - a1 > 30000) { a1 = a2 - 30000; + } half = (a1 + a2 - 1) / 2; bhalf = (b1 + b2 - 1) / 2; for (i = a1; i < a2; i++) { /* skip all lines in b after the current block */ - for (j = a[i].n; j >= b2; j = b[j].n) + for (j = a[i].n; j >= b2; j = b[j].n) { ; + } /* loop through all lines match a[i] in b */ for (; j >= b1; j = b[j].n) { @@ -179,8 +193,9 @@ break; } /* previous line mismatch? */ - if (a[i - k].e != b[j - k].e) + if (a[i - k].e != b[j - k].e) { break; + } } pos[j].pos = i; @@ -212,8 +227,9 @@ } /* expand match to include subsequent popular lines */ - while (mi + mk < a2 && mj + mk < b2 && a[mi + mk].e == b[mj + mk].e) + while (mi + mk < a2 && mj + mk < b2 && a[mi + mk].e == b[mj + mk].e) { mk++; + } *omi = mi; *omj = mj; @@ -230,18 +246,21 @@ while (1) { /* find the longest match in this chunk */ k = longest_match(a, b, pos, a1, a2, b1, b2, &i, &j); - if (!k) + if (!k) { return l; + } /* and recurse on the remaining chunks on either side */ l = recurse(a, b, pos, a1, i, b1, j, l); - if (!l) + if (!l) { return NULL; + } l->next = (struct bdiff_hunk *)malloc(sizeof(struct bdiff_hunk)); - if (!l->next) + if (!l->next) { return NULL; + } l = l->next; l->a1 = i; @@ -271,14 +290,16 @@ /* generate the matching block list */ curr = recurse(a, b, pos, 0, an, 0, bn, base); - if (!curr) + if (!curr) { return -1; + } /* sentinel end hunk */ curr->next = (struct bdiff_hunk *)malloc(sizeof(struct bdiff_hunk)); - if (!curr->next) + if (!curr->next) { return -1; + } curr = curr->next; curr->a1 = curr->a2 = an; curr->b1 = curr->b2 = bn; @@ -291,10 +312,11 @@ for (curr = base->next; curr; curr = curr->next) { struct bdiff_hunk *next = curr->next; - if (!next) + if (!next) { break; + } - if (curr->a2 == next->a1 || curr->b2 == next->b1) + if (curr->a2 == next->a1 || curr->b2 == next->b1) { while (curr->a2 < an && curr->b2 < bn && next->a1 < next->a2 && next->b1 < next->b2 && !cmp(a + curr->a2, b + curr->b2)) { @@ -303,10 +325,12 @@ curr->b2++; next->b1++; } + } } - for (curr = base->next; curr; curr = curr->next) + for (curr = base->next; curr; curr = curr->next) { count++; + } return count; }
--- a/mercurial/bookmarks.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/bookmarks.py Tue Feb 19 21:55:05 2019 -0800 @@ -44,7 +44,7 @@ return fp class bmstore(object): - """Storage for bookmarks. + r"""Storage for bookmarks. This object should do all bookmark-related reads and writes, so that it's fairly simple to replace the storage underlying @@ -306,7 +306,6 @@ itself as we commit. This function returns the name of that bookmark. It is stored in .hg/bookmarks.current """ - mark = None try: file = repo.vfs('bookmarks.current') except IOError as inst:
--- a/mercurial/branchmap.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/branchmap.py Tue Feb 19 21:55:05 2019 -0800 @@ -30,63 +30,6 @@ pack_into = struct.pack_into unpack_from = struct.unpack_from -def _filename(repo): - """name of a branchcache file for a given repo or repoview""" - filename = "branch2" - if repo.filtername: - filename = '%s-%s' % (filename, repo.filtername) - return filename - -def read(repo): - f = None - try: - f = repo.cachevfs(_filename(repo)) - lineiter = iter(f) - cachekey = next(lineiter).rstrip('\n').split(" ", 2) - last, lrev = cachekey[:2] - last, lrev = bin(last), int(lrev) - filteredhash = None - if len(cachekey) > 2: - filteredhash = bin(cachekey[2]) - partial = branchcache(tipnode=last, tiprev=lrev, - filteredhash=filteredhash) - if not partial.validfor(repo): - # invalidate the cache - raise ValueError(r'tip differs') - cl = repo.changelog - for l in lineiter: - l = l.rstrip('\n') - if not l: - continue - node, state, label = l.split(" ", 2) - if state not in 'oc': - raise ValueError(r'invalid branch state') - label = encoding.tolocal(label.strip()) - node = bin(node) - if not cl.hasnode(node): - raise ValueError( - r'node %s does not exist' % pycompat.sysstr(hex(node))) - partial.setdefault(label, []).append(node) - if state == 'c': - partial._closednodes.add(node) - - except (IOError, OSError): - return None - - except Exception as inst: - if repo.ui.debugflag: - msg = 'invalid branchheads cache' - if repo.filtername is not None: - msg += ' (%s)' % repo.filtername - msg += ': %s\n' - repo.ui.debug(msg % pycompat.bytestr(inst)) - partial = None - - finally: - if f: - f.close() - - return partial ### Nearest subset relation # Nearest subset of filter X is a filter Y so that: @@ -100,65 +43,89 @@ 'served': 'immutable', 'immutable': 'base'} -def updatecache(repo): - cl = repo.changelog - filtername = repo.filtername - partial = repo._branchcaches.get(filtername) + +class BranchMapCache(object): + """mapping of filtered views of repo with their branchcache""" + def __init__(self): + self._per_filter = {} + + def __getitem__(self, repo): + self.updatecache(repo) + return self._per_filter[repo.filtername] - revs = [] - if partial is None or not partial.validfor(repo): - partial = read(repo) - if partial is None: + def updatecache(self, repo): + """Update the cache for the given filtered view on a repository""" + # This can trigger updates for the caches for subsets of the filtered + # view, e.g. when there is no cache for this filtered view or the cache + # is stale. + + cl = repo.changelog + filtername = repo.filtername + bcache = self._per_filter.get(filtername) + if bcache is None or not bcache.validfor(repo): + # cache object missing or cache object stale? Read from disk + bcache = branchcache.fromfile(repo) + + revs = [] + if bcache is None: + # no (fresh) cache available anymore, perhaps we can re-use + # the cache for a subset, then extend that to add info on missing + # revisions. subsetname = subsettable.get(filtername) - if subsetname is None: - partial = branchcache() - else: + if subsetname is not None: subset = repo.filtered(subsetname) - partial = subset.branchmap().copy() + bcache = self[subset].copy() extrarevs = subset.changelog.filteredrevs - cl.filteredrevs - revs.extend(r for r in extrarevs if r <= partial.tiprev) - revs.extend(cl.revs(start=partial.tiprev + 1)) - if revs: - partial.update(repo, revs) - partial.write(repo) + revs.extend(r for r in extrarevs if r <= bcache.tiprev) + else: + # nothing to fall back on, start empty. + bcache = branchcache() - assert partial.validfor(repo), filtername - repo._branchcaches[repo.filtername] = partial - -def replacecache(repo, bm): - """Replace the branchmap cache for a repo with a branch mapping. + revs.extend(cl.revs(start=bcache.tiprev + 1)) + if revs: + bcache.update(repo, revs) - This is likely only called during clone with a branch map from a remote. - """ - cl = repo.changelog - clrev = cl.rev - clbranchinfo = cl.branchinfo - rbheads = [] - closed = [] - for bheads in bm.itervalues(): - rbheads.extend(bheads) - for h in bheads: - r = clrev(h) - b, c = clbranchinfo(r) - if c: - closed.append(h) + assert bcache.validfor(repo), filtername + self._per_filter[repo.filtername] = bcache + + def replace(self, repo, remotebranchmap): + """Replace the branchmap cache for a repo with a branch mapping. + + This is likely only called during clone with a branch map from a + remote. - if rbheads: - rtiprev = max((int(clrev(node)) - for node in rbheads)) - cache = branchcache(bm, - repo[rtiprev].node(), - rtiprev, - closednodes=closed) + """ + cl = repo.changelog + clrev = cl.rev + clbranchinfo = cl.branchinfo + rbheads = [] + closed = [] + for bheads in remotebranchmap.itervalues(): + rbheads += bheads + for h in bheads: + r = clrev(h) + b, c = clbranchinfo(r) + if c: + closed.append(h) - # Try to stick it as low as possible - # filter above served are unlikely to be fetch from a clone - for candidate in ('base', 'immutable', 'served'): - rview = repo.filtered(candidate) - if cache.validfor(rview): - repo._branchcaches[candidate] = cache - cache.write(rview) - break + if rbheads: + rtiprev = max((int(clrev(node)) for node in rbheads)) + cache = branchcache( + remotebranchmap, repo[rtiprev].node(), rtiprev, + closednodes=closed) + + # Try to stick it as low as possible + # filter above served are unlikely to be fetch from a clone + for candidate in ('base', 'immutable', 'served'): + rview = repo.filtered(candidate) + if cache.validfor(rview): + self._per_filter[candidate] = cache + cache.write(rview) + return + + def clear(self): + self._per_filter.clear() + class branchcache(dict): """A dict like object that hold branches heads cache. @@ -196,6 +163,65 @@ else: self._closednodes = closednodes + @classmethod + def fromfile(cls, repo): + f = None + try: + f = repo.cachevfs(cls._filename(repo)) + lineiter = iter(f) + cachekey = next(lineiter).rstrip('\n').split(" ", 2) + last, lrev = cachekey[:2] + last, lrev = bin(last), int(lrev) + filteredhash = None + if len(cachekey) > 2: + filteredhash = bin(cachekey[2]) + bcache = cls(tipnode=last, tiprev=lrev, filteredhash=filteredhash) + if not bcache.validfor(repo): + # invalidate the cache + raise ValueError(r'tip differs') + cl = repo.changelog + for line in lineiter: + line = line.rstrip('\n') + if not line: + continue + node, state, label = line.split(" ", 2) + if state not in 'oc': + raise ValueError(r'invalid branch state') + label = encoding.tolocal(label.strip()) + node = bin(node) + if not cl.hasnode(node): + raise ValueError( + r'node %s does not exist' % pycompat.sysstr(hex(node))) + bcache.setdefault(label, []).append(node) + if state == 'c': + bcache._closednodes.add(node) + + except (IOError, OSError): + return None + + except Exception as inst: + if repo.ui.debugflag: + msg = 'invalid branchheads cache' + if repo.filtername is not None: + msg += ' (%s)' % repo.filtername + msg += ': %s\n' + repo.ui.debug(msg % pycompat.bytestr(inst)) + bcache = None + + finally: + if f: + f.close() + + return bcache + + @staticmethod + def _filename(repo): + """name of a branchcache file for a given repo or repoview""" + filename = "branch2" + if repo.filtername: + filename = '%s-%s' % (filename, repo.filtername) + return filename + def validfor(self, repo): """Is the cache content valid regarding a repo @@ -241,26 +267,27 @@ def copy(self): """return an deep copy of the branchcache object""" - return branchcache(self, self.tipnode, self.tiprev, self.filteredhash, - self._closednodes) + return type(self)( + self, self.tipnode, self.tiprev, self.filteredhash, + self._closednodes) def write(self, repo): try: - f = repo.cachevfs(_filename(repo), "w", atomictemp=True) + f = repo.cachevfs(self._filename(repo), "w", atomictemp=True) cachekey = [hex(self.tipnode), '%d' % self.tiprev] if self.filteredhash is not None: cachekey.append(hex(self.filteredhash)) f.write(" ".join(cachekey) + '\n') nodecount = 0 for label, nodes in sorted(self.iteritems()): + label = encoding.fromlocal(label) for node in nodes: nodecount += 1 if node in self._closednodes: state = 'c' else: state = 'o' - f.write("%s %s %s\n" % (hex(node), state, - encoding.fromlocal(label))) + f.write("%s %s %s\n" % (hex(node), state, label)) f.close() repo.ui.log('branchcache', 'wrote %s branch cache with %d labels and %d nodes\n', @@ -331,6 +358,15 @@ repo.ui.log('branchcache', 'updated %s branch cache in %.4f seconds\n', repo.filtername, duration) + self.write(repo) + + +class remotebranchcache(branchcache): + """Branchmap info for a remote connection, should not write locally""" + def write(self, repo): + pass + + # Revision branch info cache _rbcversion = '-v1'
--- a/mercurial/cext/base85.c Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/cext/base85.c Tue Feb 19 21:55:05 2019 -0800 @@ -24,8 +24,9 @@ unsigned i; memset(b85dec, 0, sizeof(b85dec)); - for (i = 0; i < sizeof(b85chars); i++) + for (i = 0; i < sizeof(b85chars); i++) { b85dec[(int)(b85chars[i])] = i + 1; + } } static PyObject *b85encode(PyObject *self, PyObject *args) @@ -37,19 +38,22 @@ unsigned int acc, val, ch; int pad = 0; - if (!PyArg_ParseTuple(args, PY23("s#|i", "y#|i"), &text, &len, &pad)) + if (!PyArg_ParseTuple(args, PY23("s#|i", "y#|i"), &text, &len, &pad)) { return NULL; + } - if (pad) + if (pad) { olen = ((len + 3) / 4 * 5) - 3; - else { + } else { olen = len % 4; - if (olen) + if (olen) { olen++; + } olen += len / 4 * 5; } - if (!(out = PyBytes_FromStringAndSize(NULL, olen + 3))) + if (!(out = PyBytes_FromStringAndSize(NULL, olen + 3))) { return NULL; + } dst = PyBytes_AsString(out); @@ -58,8 +62,9 @@ for (i = 24; i >= 0; i -= 8) { ch = *text++; acc |= ch << i; - if (--len == 0) + if (--len == 0) { break; + } } for (i = 4; i >= 0; i--) { val = acc % 85; @@ -69,8 +74,9 @@ dst += 5; } - if (!pad) + if (!pad) { _PyBytes_Resize(&out, olen); + } return out; } @@ -84,15 +90,18 @@ int c; unsigned int acc; - if (!PyArg_ParseTuple(args, PY23("s#", "y#"), &text, &len)) + if (!PyArg_ParseTuple(args, PY23("s#", "y#"), &text, &len)) { return NULL; + } olen = len / 5 * 4; i = len % 5; - if (i) + if (i) { olen += i - 1; - if (!(out = PyBytes_FromStringAndSize(NULL, olen))) + } + if (!(out = PyBytes_FromStringAndSize(NULL, olen))) { return NULL; + } dst = PyBytes_AsString(out); @@ -100,8 +109,9 @@ while (i < len) { acc = 0; cap = len - i - 1; - if (cap > 4) + if (cap > 4) { cap = 4; + } for (j = 0; j < cap; i++, j++) { c = b85dec[(int)*text++] - 1; if (c < 0) { @@ -136,10 +146,12 @@ cap = olen < 4 ? olen : 4; olen -= cap; - for (j = 0; j < 4 - cap; j++) + for (j = 0; j < 4 - cap; j++) { acc *= 85; - if (cap && cap < 4) + } + if (cap && cap < 4) { acc += 0xffffff >> (cap - 1) * 8; + } for (j = 0; j < cap; j++) { acc = (acc << 8) | (acc >> 24); *dst++ = acc;
--- a/mercurial/cext/bdiff.c Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/cext/bdiff.c Tue Feb 19 21:55:05 2019 -0800 @@ -29,22 +29,26 @@ l.next = NULL; - if (!PyArg_ParseTuple(args, "SS:bdiff", &sa, &sb)) + if (!PyArg_ParseTuple(args, "SS:bdiff", &sa, &sb)) { return NULL; + } an = bdiff_splitlines(PyBytes_AsString(sa), PyBytes_Size(sa), &a); bn = bdiff_splitlines(PyBytes_AsString(sb), PyBytes_Size(sb), &b); - if (!a || !b) + if (!a || !b) { goto nomem; + } count = bdiff_diff(a, an, b, bn, &l); - if (count < 0) + if (count < 0) { goto nomem; + } rl = PyList_New(count); - if (!rl) + if (!rl) { goto nomem; + } for (h = l.next; h; h = h->next) { m = Py_BuildValue("iiii", h->a1, h->a2, h->b1, h->b2); @@ -72,8 +76,10 @@ l.next = NULL; - if (!PyArg_ParseTuple(args, PY23("s*s*:bdiff", "y*y*:bdiff"), &ba, &bb)) + if (!PyArg_ParseTuple(args, PY23("s*s*:bdiff", "y*y*:bdiff"), &ba, + &bb)) { return NULL; + } if (!PyBuffer_IsContiguous(&ba, 'C') || ba.ndim > 1) { PyErr_SetString(PyExc_ValueError, "bdiff input not contiguous"); @@ -98,8 +104,9 @@ lmax = la > lb ? lb : la; for (ia = ba.buf, ib = bb.buf; li < lmax && *ia == *ib; ++li, ++ia, ++ib) { - if (*ia == '\n') + if (*ia == '\n') { lcommon = li + 1; + } } /* we can almost add: if (li == lmax) lcommon = li; */ @@ -119,8 +126,9 @@ /* calculate length of output */ la = lb = 0; for (h = l.next; h; h = h->next) { - if (h->a1 != la || h->b1 != lb) + if (h->a1 != la || h->b1 != lb) { len += 12 + bl[h->b1].l - bl[lb].l; + } la = h->a2; lb = h->b2; } @@ -129,8 +137,9 @@ result = PyBytes_FromStringAndSize(NULL, len); - if (!result) + if (!result) { goto cleanup; + } /* build binary patch */ rb = PyBytes_AsString(result); @@ -151,8 +160,9 @@ } cleanup: - if (_save) + if (_save) { PyEval_RestoreThread(_save); + } PyBuffer_Release(&ba); PyBuffer_Release(&bb); free(al); @@ -174,20 +184,23 @@ Py_ssize_t i, rlen, wlen = 0; char *w; - if (!PyArg_ParseTuple(args, "Sb:fixws", &s, &allws)) + if (!PyArg_ParseTuple(args, "Sb:fixws", &s, &allws)) { return NULL; + } r = PyBytes_AsString(s); rlen = PyBytes_Size(s); w = (char *)PyMem_Malloc(rlen ? rlen : 1); - if (!w) + if (!w) { goto nomem; + } for (i = 0; i != rlen; i++) { c = r[i]; if (c == ' ' || c == '\t' || c == '\r') { - if (!allws && (wlen == 0 || w[wlen - 1] != ' ')) + if (!allws && (wlen == 0 || w[wlen - 1] != ' ')) { w[wlen++] = ' '; + } } else if (c == '\n' && !allws && wlen > 0 && w[wlen - 1] == ' ') { w[wlen - 1] = '\n'; @@ -207,8 +220,9 @@ const char *source, Py_ssize_t len) { PyObject *sliced = PyBytes_FromStringAndSize(source, len); - if (sliced == NULL) + if (sliced == NULL) { return false; + } PyList_SET_ITEM(list, destidx, sliced); return true; } @@ -232,19 +246,22 @@ ++nelts; } } - if ((result = PyList_New(nelts + 1)) == NULL) + if ((result = PyList_New(nelts + 1)) == NULL) { goto abort; + } nelts = 0; for (i = 0; i < size - 1; ++i) { if (text[i] == '\n') { if (!sliceintolist(result, nelts++, text + start, - i - start + 1)) + i - start + 1)) { goto abort; + } start = i + 1; } } - if (!sliceintolist(result, nelts++, text + start, size - start)) + if (!sliceintolist(result, nelts++, text + start, size - start)) { goto abort; + } return result; abort: Py_XDECREF(result); @@ -257,8 +274,9 @@ PyObject *rl = (PyObject *)priv; PyObject *m = Py_BuildValue("LLLL", a1, a2, b1, b2); int r; - if (!m) + if (!m) { return -1; + } r = PyList_Append(rl, m); Py_DECREF(m); return r; @@ -282,15 +300,17 @@ }; if (!PyArg_ParseTuple(args, PY23("s#s#", "y#y#"), &a.ptr, &la, &b.ptr, - &lb)) + &lb)) { return NULL; + } a.size = la; b.size = lb; rl = PyList_New(0); - if (!rl) + if (!rl) { return PyErr_NoMemory(); + } ecb.priv = rl;
--- a/mercurial/cext/charencode.c Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/cext/charencode.c Tue Feb 19 21:55:05 2019 -0800 @@ -114,8 +114,9 @@ ret = PyBytes_FromStringAndSize(NULL, len / 2); - if (!ret) + if (!ret) { return NULL; + } d = PyBytes_AsString(ret); @@ -133,21 +134,24 @@ const char *buf; Py_ssize_t i, len; if (!PyArg_ParseTuple(args, PY23("s#:isasciistr", "y#:isasciistr"), - &buf, &len)) + &buf, &len)) { return NULL; + } i = 0; /* char array in PyStringObject should be at least 4-byte aligned */ if (((uintptr_t)buf & 3) == 0) { const uint32_t *p = (const uint32_t *)buf; for (; i < len / 4; i++) { - if (p[i] & 0x80808080U) + if (p[i] & 0x80808080U) { Py_RETURN_FALSE; + } } i *= 4; } for (; i < len; i++) { - if (buf[i] & 0x80) + if (buf[i] & 0x80) { Py_RETURN_FALSE; + } } Py_RETURN_TRUE; } @@ -164,8 +168,9 @@ len = PyBytes_GET_SIZE(str_obj); newobj = PyBytes_FromStringAndSize(NULL, len); - if (!newobj) + if (!newobj) { goto quit; + } newstr = PyBytes_AS_STRING(newobj); @@ -197,16 +202,18 @@ PyObject *asciilower(PyObject *self, PyObject *args) { PyObject *str_obj; - if (!PyArg_ParseTuple(args, "O!:asciilower", &PyBytes_Type, &str_obj)) + if (!PyArg_ParseTuple(args, "O!:asciilower", &PyBytes_Type, &str_obj)) { return NULL; + } return _asciitransform(str_obj, lowertable, NULL); } PyObject *asciiupper(PyObject *self, PyObject *args) { PyObject *str_obj; - if (!PyArg_ParseTuple(args, "O!:asciiupper", &PyBytes_Type, &str_obj)) + if (!PyArg_ParseTuple(args, "O!:asciiupper", &PyBytes_Type, &str_obj)) { return NULL; + } return _asciitransform(str_obj, uppertable, NULL); } @@ -222,8 +229,9 @@ if (!PyArg_ParseTuple(args, "O!O!O!:make_file_foldmap", &PyDict_Type, &dmap, &PyInt_Type, &spec_obj, &PyFunction_Type, - &normcase_fallback)) + &normcase_fallback)) { goto quit; + } spec = (int)PyInt_AS_LONG(spec_obj); switch (spec) { @@ -244,8 +252,9 @@ /* Add some more entries to deal with additions outside this function. */ file_foldmap = _dict_new_presized((PyDict_Size(dmap) / 10) * 11); - if (file_foldmap == NULL) + if (file_foldmap == NULL) { goto quit; + } while (PyDict_Next(dmap, &pos, &k, &v)) { if (!dirstate_tuple_check(v)) { @@ -265,8 +274,9 @@ normcase_fallback, k, NULL); } - if (normed == NULL) + if (normed == NULL) { goto quit; + } if (PyDict_SetItem(file_foldmap, normed, k) == -1) { Py_DECREF(normed); goto quit; @@ -377,22 +387,25 @@ Py_ssize_t origlen, esclen; int paranoid; if (!PyArg_ParseTuple(args, "O!i:jsonescapeu8fast", &PyBytes_Type, - &origstr, ¶noid)) + &origstr, ¶noid)) { return NULL; + } origbuf = PyBytes_AS_STRING(origstr); origlen = PyBytes_GET_SIZE(origstr); esclen = jsonescapelen(origbuf, origlen, paranoid); - if (esclen < 0) + if (esclen < 0) { return NULL; /* unsupported char found or overflow */ + } if (origlen == esclen) { Py_INCREF(origstr); return origstr; } escstr = PyBytes_FromStringAndSize(NULL, esclen); - if (!escstr) + if (!escstr) { return NULL; + } encodejsonescape(PyBytes_AS_STRING(escstr), esclen, origbuf, origlen, paranoid);
--- a/mercurial/cext/mpatch.c Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/cext/mpatch.c Tue Feb 19 21:55:05 2019 -0800 @@ -55,13 +55,16 @@ int r; PyObject *tmp = PyList_GetItem((PyObject *)bins, pos); - if (!tmp) + if (!tmp) { return NULL; - if (PyObject_GetBuffer(tmp, &buffer, PyBUF_CONTIG_RO)) + } + if (PyObject_GetBuffer(tmp, &buffer, PyBUF_CONTIG_RO)) { return NULL; + } if ((r = mpatch_decode(buffer.buf, buffer.len, &res)) < 0) { - if (!PyErr_Occurred()) + if (!PyErr_Occurred()) { setpyerr(r); + } res = NULL; } @@ -78,8 +81,9 @@ char *out; Py_ssize_t len, outlen; - if (!PyArg_ParseTuple(args, "OO:mpatch", &text, &bins)) + if (!PyArg_ParseTuple(args, "OO:mpatch", &text, &bins)) { return NULL; + } len = PyList_Size(bins); if (!len) { @@ -94,8 +98,9 @@ patch = mpatch_fold(bins, cpygetitem, 0, len); if (!patch) { /* error already set or memory error */ - if (!PyErr_Occurred()) + if (!PyErr_Occurred()) { PyErr_NoMemory(); + } result = NULL; goto cleanup; } @@ -126,8 +131,9 @@ cleanup: mpatch_lfree(patch); PyBuffer_Release(&buffer); - if (!result && !PyErr_Occurred()) + if (!result && !PyErr_Occurred()) { setpyerr(r); + } return result; } @@ -138,15 +144,18 @@ Py_ssize_t patchlen; char *bin; - if (!PyArg_ParseTuple(args, PY23("ls#", "ly#"), &orig, &bin, &patchlen)) + if (!PyArg_ParseTuple(args, PY23("ls#", "ly#"), &orig, &bin, + &patchlen)) { return NULL; + } while (pos >= 0 && pos < patchlen) { start = getbe32(bin + pos); end = getbe32(bin + pos + 4); len = getbe32(bin + pos + 8); - if (start > end) + if (start > end) { break; /* sanity check */ + } pos += 12 + len; outlen += start - last; last = end; @@ -154,9 +163,10 @@ } if (pos != patchlen) { - if (!PyErr_Occurred()) + if (!PyErr_Occurred()) { PyErr_SetString(mpatch_Error, "patch cannot be decoded"); + } return NULL; }
--- a/mercurial/cext/parsers.c Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/cext/parsers.c Tue Feb 19 21:55:05 2019 -0800 @@ -32,8 +32,9 @@ { Py_ssize_t expected_size; - if (!PyArg_ParseTuple(args, "n:make_presized_dict", &expected_size)) + if (!PyArg_ParseTuple(args, "n:make_presized_dict", &expected_size)) { return NULL; + } return _dict_new_presized(expected_size); } @@ -43,8 +44,9 @@ { dirstateTupleObject *t = PyObject_New(dirstateTupleObject, &dirstateTupleType); - if (!t) + if (!t) { return NULL; + } t->state = state; t->mode = mode; t->size = size; @@ -60,12 +62,14 @@ dirstateTupleObject *t; char state; int size, mode, mtime; - if (!PyArg_ParseTuple(args, "ciii", &state, &mode, &size, &mtime)) + if (!PyArg_ParseTuple(args, "ciii", &state, &mode, &size, &mtime)) { return NULL; + } t = (dirstateTupleObject *)subtype->tp_alloc(subtype, 1); - if (!t) + if (!t) { return NULL; + } t->state = state; t->mode = mode; t->size = size; @@ -165,8 +169,9 @@ if (!PyArg_ParseTuple( args, PY23("O!O!s#:parse_dirstate", "O!O!y#:parse_dirstate"), - &PyDict_Type, &dmap, &PyDict_Type, &cmap, &str, &readlen)) + &PyDict_Type, &dmap, &PyDict_Type, &cmap, &str, &readlen)) { goto quit; + } len = readlen; @@ -178,8 +183,9 @@ } parents = Py_BuildValue(PY23("s#s#", "y#y#"), str, 20, str + 20, 20); - if (!parents) + if (!parents) { goto quit; + } /* read filenames */ while (pos >= 40 && pos < len) { @@ -212,13 +218,16 @@ cpos + 1, flen - (cpos - cur) - 1); if (!fname || !cname || PyDict_SetItem(cmap, fname, cname) == -1 || - PyDict_SetItem(dmap, fname, entry) == -1) + PyDict_SetItem(dmap, fname, entry) == -1) { goto quit; + } Py_DECREF(cname); } else { fname = PyBytes_FromStringAndSize(cur, flen); - if (!fname || PyDict_SetItem(dmap, fname, entry) == -1) + if (!fname || + PyDict_SetItem(dmap, fname, entry) == -1) { goto quit; + } } Py_DECREF(fname); Py_DECREF(entry); @@ -245,16 +254,20 @@ PyObject *nonnset = NULL, *otherpset = NULL, *result = NULL; Py_ssize_t pos; - if (!PyArg_ParseTuple(args, "O!:nonnormalentries", &PyDict_Type, &dmap)) + if (!PyArg_ParseTuple(args, "O!:nonnormalentries", &PyDict_Type, + &dmap)) { goto bail; + } nonnset = PySet_New(NULL); - if (nonnset == NULL) + if (nonnset == NULL) { goto bail; + } otherpset = PySet_New(NULL); - if (otherpset == NULL) + if (otherpset == NULL) { goto bail; + } pos = 0; while (PyDict_Next(dmap, &pos, &fname, &v)) { @@ -272,15 +285,18 @@ } } - if (t->state == 'n' && t->mtime != -1) + if (t->state == 'n' && t->mtime != -1) { continue; - if (PySet_Add(nonnset, fname) == -1) + } + if (PySet_Add(nonnset, fname) == -1) { goto bail; + } } result = Py_BuildValue("(OO)", nonnset, otherpset); - if (result == NULL) + if (result == NULL) { goto bail; + } Py_DECREF(nonnset); Py_DECREF(otherpset); return result; @@ -304,8 +320,10 @@ int now; if (!PyArg_ParseTuple(args, "O!O!O!i:pack_dirstate", &PyDict_Type, &map, - &PyDict_Type, ©map, &PyTuple_Type, &pl, &now)) + &PyDict_Type, ©map, &PyTuple_Type, &pl, + &now)) { return NULL; + } if (PyTuple_Size(pl) != 2) { PyErr_SetString(PyExc_TypeError, "expected 2-element tuple"); @@ -332,8 +350,9 @@ } packobj = PyBytes_FromStringAndSize(NULL, nbytes); - if (packobj == NULL) + if (packobj == NULL) { goto bail; + } p = PyBytes_AS_STRING(packobj); @@ -377,10 +396,12 @@ mtime = -1; mtime_unset = (PyObject *)make_dirstate_tuple( state, mode, size, mtime); - if (!mtime_unset) + if (!mtime_unset) { goto bail; - if (PyDict_SetItem(map, k, mtime_unset) == -1) + } + if (PyDict_SetItem(map, k, mtime_unset) == -1) { goto bail; + } Py_DECREF(mtime_unset); mtime_unset = NULL; } @@ -664,8 +685,9 @@ manifest_module_init(mod); revlog_module_init(mod); - if (PyType_Ready(&dirstateTupleType) < 0) + if (PyType_Ready(&dirstateTupleType) < 0) { return; + } Py_INCREF(&dirstateTupleType); PyModule_AddObject(mod, "dirstatetuple", (PyObject *)&dirstateTupleType); @@ -675,12 +697,14 @@ { PyObject *sys = PyImport_ImportModule("sys"), *ver; long hexversion; - if (!sys) + if (!sys) { return -1; + } ver = PyObject_GetAttrString(sys, "hexversion"); Py_DECREF(sys); - if (!ver) + if (!ver) { return -1; + } hexversion = PyInt_AsLong(ver); Py_DECREF(ver); /* sys.hexversion is a 32-bit number by default, so the -1 case @@ -720,8 +744,9 @@ { PyObject *mod; - if (check_python_version() == -1) + if (check_python_version() == -1) { return; + } mod = Py_InitModule3("parsers", methods, parsers_doc); module_init(mod); }
--- a/mercurial/cext/pathencode.c Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/cext/pathencode.c Tue Feb 19 21:55:05 2019 -0800 @@ -126,8 +126,9 @@ if (src[i] == 'g') { state = DHGDI; charcopy(dest, &destlen, destsize, src[i++]); - } else + } else { state = DDEFAULT; + } break; case DHGDI: if (src[i] == '/') { @@ -137,8 +138,9 @@ state = DDEFAULT; break; case DDEFAULT: - if (src[i] == '.') + if (src[i] == '.') { state = DDOT; + } charcopy(dest, &destlen, destsize, src[i++]); break; } @@ -153,8 +155,9 @@ PyObject *pathobj, *newobj; char *path; - if (!PyArg_ParseTuple(args, "O:encodedir", &pathobj)) + if (!PyArg_ParseTuple(args, "O:encodedir", &pathobj)) { return NULL; + } if (PyBytes_AsStringAndSize(pathobj, &path, &len) == -1) { PyErr_SetString(PyExc_TypeError, "expected a string"); @@ -235,15 +238,17 @@ if (src[i] == 'u') { state = AU; charcopy(dest, &destlen, destsize, src[i++]); - } else + } else { state = DEFAULT; + } break; case AU: if (src[i] == 'x') { state = THIRD; i++; - } else + } else { state = DEFAULT; + } break; case THIRD: state = DEFAULT; @@ -262,8 +267,9 @@ if (src[i] == 'o') { state = CO; charcopy(dest, &destlen, destsize, src[i++]); - } else + } else { state = DEFAULT; + } break; case CO: if (src[i] == 'm') { @@ -272,8 +278,9 @@ } else if (src[i] == 'n') { state = THIRD; i++; - } else + } else { state = DEFAULT; + } break; case COMLPT: switch (src[i]) { @@ -314,43 +321,49 @@ if (src[i] == 'p') { state = LP; charcopy(dest, &destlen, destsize, src[i++]); - } else + } else { state = DEFAULT; + } break; case LP: if (src[i] == 't') { state = COMLPT; i++; - } else + } else { state = DEFAULT; + } break; case N: if (src[i] == 'u') { state = NU; charcopy(dest, &destlen, destsize, src[i++]); - } else + } else { state = DEFAULT; + } break; case NU: if (src[i] == 'l') { state = THIRD; i++; - } else + } else { state = DEFAULT; + } break; case P: if (src[i] == 'r') { state = PR; charcopy(dest, &destlen, destsize, src[i++]); - } else + } else { state = DEFAULT; + } break; case PR: if (src[i] == 'n') { state = THIRD; i++; - } else + } else { state = DEFAULT; + } break; case LDOT: switch (src[i]) { @@ -397,18 +410,21 @@ if (src[i] == 'g') { state = HGDI; charcopy(dest, &destlen, destsize, src[i++]); - } else + } else { state = DEFAULT; + } break; case HGDI: if (src[i] == '/') { state = START; - if (encodedir) + if (encodedir) { memcopy(dest, &destlen, destsize, ".hg", 3); + } charcopy(dest, &destlen, destsize, src[i++]); - } else + } else { state = DEFAULT; + } break; case SPACE: switch (src[i]) { @@ -427,8 +443,9 @@ case DEFAULT: while (inset(onebyte, src[i])) { charcopy(dest, &destlen, destsize, src[i++]); - if (i == len) + if (i == len) { goto done; + } } switch (src[i]) { case '.': @@ -456,9 +473,10 @@ charcopy(dest, &destlen, destsize, '_'); charcopy(dest, &destlen, destsize, c == '_' ? '_' : c + 32); - } else + } else { escape3(dest, &destlen, destsize, src[i++]); + } break; } break; @@ -498,12 +516,13 @@ Py_ssize_t i, destlen = 0; for (i = 0; i < len; i++) { - if (inset(onebyte, src[i])) + if (inset(onebyte, src[i])) { charcopy(dest, &destlen, destsize, src[i]); - else if (inset(lower, src[i])) + } else if (inset(lower, src[i])) { charcopy(dest, &destlen, destsize, src[i] + 32); - else + } else { escape3(dest, &destlen, destsize, src[i]); + } } return destlen; @@ -516,13 +535,15 @@ PyObject *ret; if (!PyArg_ParseTuple(args, PY23("s#:lowerencode", "y#:lowerencode"), - &path, &len)) + &path, &len)) { return NULL; + } newlen = _lowerencode(NULL, 0, path, len); ret = PyBytes_FromStringAndSize(NULL, newlen); - if (ret) + if (ret) { _lowerencode(PyBytes_AS_STRING(ret), newlen, path, len); + } return ret; } @@ -551,8 +572,9 @@ Py_ssize_t destsize, destlen = 0, slop, used; while (lastslash >= 0 && src[lastslash] != '/') { - if (src[lastslash] == '.' && lastdot == -1) + if (src[lastslash] == '.' && lastdot == -1) { lastdot = lastslash; + } lastslash--; } @@ -570,12 +592,14 @@ /* If src contains a suffix, we will append it to the end of the new string, so make room. */ destsize = 120; - if (lastdot >= 0) + if (lastdot >= 0) { destsize += len - lastdot - 1; + } ret = PyBytes_FromStringAndSize(NULL, destsize); - if (ret == NULL) + if (ret == NULL) { return NULL; + } dest = PyBytes_AS_STRING(ret); memcopy(dest, &destlen, destsize, "dh/", 3); @@ -587,30 +611,36 @@ char d = dest[destlen - 1]; /* After truncation, a directory name may end in a space or dot, which are unportable. */ - if (d == '.' || d == ' ') + if (d == '.' || d == ' ') { dest[destlen - 1] = '_'; - /* The + 3 is to account for "dh/" in the beginning */ - if (destlen > maxshortdirslen + 3) + /* The + 3 is to account for "dh/" in the + * beginning */ + } + if (destlen > maxshortdirslen + 3) { break; + } charcopy(dest, &destlen, destsize, src[i]); p = -1; - } else if (p < dirprefixlen) + } else if (p < dirprefixlen) { charcopy(dest, &destlen, destsize, src[i]); + } } /* Rewind to just before the last slash copied. */ - if (destlen > maxshortdirslen + 3) + if (destlen > maxshortdirslen + 3) { do { destlen--; } while (destlen > 0 && dest[destlen] != '/'); + } if (destlen > 3) { if (lastslash > 0) { char d = dest[destlen - 1]; /* The last directory component may be truncated, so make it safe. */ - if (d == '.' || d == ' ') + if (d == '.' || d == ' ') { dest[destlen - 1] = '_'; + } } charcopy(dest, &destlen, destsize, '/'); @@ -620,27 +650,32 @@ depends on the number of bytes left after accounting for hash and suffix. */ used = destlen + 40; - if (lastdot >= 0) + if (lastdot >= 0) { used += len - lastdot - 1; + } slop = maxstorepathlen - used; if (slop > 0) { Py_ssize_t basenamelen = lastslash >= 0 ? len - lastslash - 2 : len - 1; - if (basenamelen > slop) + if (basenamelen > slop) { basenamelen = slop; - if (basenamelen > 0) + } + if (basenamelen > 0) { memcopy(dest, &destlen, destsize, &src[lastslash + 1], basenamelen); + } } /* Add hash and suffix. */ - for (i = 0; i < 20; i++) + for (i = 0; i < 20; i++) { hexencode(dest, &destlen, destsize, sha[i]); + } - if (lastdot >= 0) + if (lastdot >= 0) { memcopy(dest, &destlen, destsize, &src[lastdot], len - lastdot - 1); + } assert(PyBytes_Check(ret)); Py_SIZE(ret) = destlen; @@ -677,13 +712,15 @@ shaobj = PyObject_CallFunction(shafunc, PY23("s#", "y#"), str, len); - if (shaobj == NULL) + if (shaobj == NULL) { return -1; + } hashobj = PyObject_CallMethod(shaobj, "digest", ""); Py_DECREF(shaobj); - if (hashobj == NULL) + if (hashobj == NULL) { return -1; + } if (!PyBytes_Check(hashobj) || PyBytes_GET_SIZE(hashobj) != 20) { PyErr_SetString(PyExc_TypeError, @@ -714,8 +751,9 @@ } dirlen = _encodedir(dired, baselen, src, len); - if (sha1hash(sha, dired, dirlen - 1) == -1) + if (sha1hash(sha, dired, dirlen - 1) == -1) { return NULL; + } lowerlen = _lowerencode(lowered, baselen, dired + 5, dirlen - 5); auxlen = auxencode(auxed, baselen, lowered, lowerlen); return hashmangle(auxed, auxlen, sha); @@ -727,18 +765,20 @@ PyObject *pathobj, *newobj; char *path; - if (!PyArg_ParseTuple(args, "O:pathencode", &pathobj)) + if (!PyArg_ParseTuple(args, "O:pathencode", &pathobj)) { return NULL; + } if (PyBytes_AsStringAndSize(pathobj, &path, &len) == -1) { PyErr_SetString(PyExc_TypeError, "expected a string"); return NULL; } - if (len > maxstorepathlen) + if (len > maxstorepathlen) { newlen = maxstorepathlen + 2; - else + } else { newlen = len ? basicencode(NULL, 0, path, len + 1) : 1; + } if (newlen <= maxstorepathlen + 1) { if (newlen == len + 1) { @@ -754,8 +794,9 @@ basicencode(PyBytes_AS_STRING(newobj), newlen, path, len + 1); } - } else + } else { newobj = hashencode(path, len + 1); + } return newobj; }
--- a/mercurial/changegroup.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/changegroup.py Tue Feb 19 21:55:05 2019 -0800 @@ -275,7 +275,7 @@ # because we need to use the top level value (if they exist) # in this function. srctype = tr.hookargs.setdefault('source', srctype) - url = tr.hookargs.setdefault('url', url) + tr.hookargs.setdefault('url', url) repo.hook('prechangegroup', throw=True, **pycompat.strkwargs(tr.hookargs)) @@ -817,13 +817,13 @@ self._verbosenote(_('uncompressed size of bundle content:\n')) size = 0 - clstate, deltas = self._generatechangelog(cl, clnodes) + clstate, deltas = self._generatechangelog(cl, clnodes, + generate=changelog) for delta in deltas: - if changelog: - for chunk in _revisiondeltatochunks(delta, - self._builddeltaheader): - size += len(chunk) - yield chunk + for chunk in _revisiondeltatochunks(delta, + self._builddeltaheader): + size += len(chunk) + yield chunk close = closechunk() size += len(close) @@ -917,12 +917,15 @@ if clnodes: repo.hook('outgoing', node=hex(clnodes[0]), source=source) - def _generatechangelog(self, cl, nodes): + def _generatechangelog(self, cl, nodes, generate=True): """Generate data for changelog chunks. Returns a 2-tuple of a dict containing state and an iterable of byte chunks. The state will not be fully populated until the chunk stream has been fully consumed. + + if generate is False, the state will be fully populated and no chunk + stream will be yielded """ clrevorder = {} manifests = {} @@ -930,6 +933,27 @@ changedfiles = set() clrevtomanifestrev = {} + state = { + 'clrevorder': clrevorder, + 'manifests': manifests, + 'changedfiles': changedfiles, + 'clrevtomanifestrev': clrevtomanifestrev, + } + + if not (generate or self._ellipses): + # sort the nodes in storage order + nodes = sorted(nodes, key=cl.rev) + for node in nodes: + c = cl.changelogrevision(node) + clrevorder[node] = len(clrevorder) + # record the first changeset introducing this manifest version + manifests.setdefault(c.manifest, node) + # Record a complete list of potentially-changed files in + # this manifest. + changedfiles.update(c.files) + + return state, () + # Callback for the changelog, used to collect changed files and # manifest nodes. # Returns the linkrev node (identity in the changelog case). @@ -970,13 +994,6 @@ return x - state = { - 'clrevorder': clrevorder, - 'manifests': manifests, - 'changedfiles': changedfiles, - 'clrevtomanifestrev': clrevtomanifestrev, - } - gen = deltagroup( self._repo, cl, nodes, True, lookupcl, self._forcedeltaparentprev, @@ -1056,6 +1073,11 @@ # because of narrow clones). Do this even for the root # directory (tree=='') prunednodes = [] + elif not self._ellipses: + # In non-ellipses case and large repositories, it is better to + # prevent calling of store.rev and store.linkrev on a lot of + # nodes as compared to sending some extra data + prunednodes = nodes.copy() else: # Avoid sending any manifest nodes we can prove the # client already has by checking linkrevs. See the @@ -1296,9 +1318,9 @@ assert version in supportedoutgoingversions(repo) if matcher is None: - matcher = matchmod.alwaysmatcher(repo.root, '') + matcher = matchmod.always() if oldmatcher is None: - oldmatcher = matchmod.nevermatcher(repo.root, '') + oldmatcher = matchmod.never() if version == '01' and not matcher.always(): raise error.ProgrammingError('version 01 changegroups do not support '
--- a/mercurial/changelog.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/changelog.py Tue Feb 19 21:55:05 2019 -0800 @@ -70,7 +70,10 @@ def encodeextra(d): # keys must be sorted to produce a deterministic changelog entry - items = [_string_escape('%s:%s' % (k, d[k])) for k in sorted(d)] + items = [ + _string_escape('%s:%s' % (k, pycompat.bytestr(d[k]))) + for k in sorted(d) + ] return "\0".join(items) def stripdesc(desc):
--- a/mercurial/cmdutil.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/cmdutil.py Tue Feb 19 21:55:05 2019 -0800 @@ -282,7 +282,9 @@ status = repo.status(match=match) if not force: repo.checkcommitpatterns(wctx, vdirs, match, status, fail) - diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True) + diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True, + section='commands', + configprefix='commit.interactive.') diffopts.nodates = True diffopts.git = True diffopts.showfunc = True @@ -607,11 +609,9 @@ return _helpmessage('hg unshelve --continue', 'hg unshelve --abort') def _graftmsg(): - # tweakdefaults requires `update` to have a rev hence the `.` return _helpmessage('hg graft --continue', 'hg graft --abort') def _mergemsg(): - # tweakdefaults requires `update` to have a rev hence the `.` return _helpmessage('hg commit', 'hg merge --abort') def _bisectmsg(): @@ -1135,6 +1135,7 @@ dryrun = opts.get("dry_run") wctx = repo[None] + uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) def walkpat(pat): srcs = [] if after: @@ -1144,7 +1145,7 @@ m = scmutil.match(wctx, [pat], opts, globbed=True) for abs in wctx.walk(m): state = repo.dirstate[abs] - rel = m.rel(abs) + rel = uipathfn(abs) exact = m.exact(abs) if state in badstates: if exact and state == '?': @@ -1251,10 +1252,6 @@ else: ui.warn(_('%s: cannot copy - %s\n') % (relsrc, encoding.strtolocal(inst.strerror))) - if rename: - hint = _("('hg rename --after' to record the rename)\n") - else: - hint = _("('hg copy --after' to record the copy)\n") return True # report a failure if ui.verbose or not exact: @@ -1964,7 +1961,10 @@ else: self.revs.discard(value) ctx = change(value) - matches = [f for f in ctx.files() if match(f)] + if allfiles: + matches = list(ctx.manifest().walk(match)) + else: + matches = [f for f in ctx.files() if match(f)] if matches: fncache[value] = matches self.set.add(value) @@ -2031,8 +2031,7 @@ return iterate() -def add(ui, repo, match, prefix, explicitonly, **opts): - join = lambda f: os.path.join(prefix, f) +def add(ui, repo, match, prefix, uipathfn, explicitonly, **opts): bad = [] badfn = lambda x, y: bad.append(x) or match.bad(x, y) @@ -2056,20 +2055,24 @@ cca(f) names.append(f) if ui.verbose or not exact: - ui.status(_('adding %s\n') % match.rel(f), + ui.status(_('adding %s\n') % uipathfn(f), label='ui.addremove.added') for subpath in sorted(wctx.substate): sub = wctx.sub(subpath) try: submatch = matchmod.subdirmatcher(subpath, match) + subprefix = repo.wvfs.reljoin(prefix, subpath) + subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn) if opts.get(r'subrepos'): - bad.extend(sub.add(ui, submatch, prefix, False, **opts)) + bad.extend(sub.add(ui, submatch, subprefix, subuipathfn, False, + **opts)) else: - bad.extend(sub.add(ui, submatch, prefix, True, **opts)) + bad.extend(sub.add(ui, submatch, subprefix, subuipathfn, True, + **opts)) except error.LookupError: ui.status(_("skipping missing subrepository: %s\n") - % join(subpath)) + % uipathfn(subpath)) if not opts.get(r'dry_run'): rejected = wctx.add(names, prefix) @@ -2085,10 +2088,10 @@ for subpath in ctx.substate: ctx.sub(subpath).addwebdirpath(serverpath, webconf) -def forget(ui, repo, match, prefix, explicitonly, dryrun, interactive): +def forget(ui, repo, match, prefix, uipathfn, explicitonly, dryrun, + interactive): if dryrun and interactive: raise error.Abort(_("cannot specify both --dry-run and --interactive")) - join = lambda f: os.path.join(prefix, f) bad = [] badfn = lambda x, y: bad.append(x) or match.bad(x, y) wctx = repo[None] @@ -2101,15 +2104,18 @@ for subpath in sorted(wctx.substate): sub = wctx.sub(subpath) + submatch = matchmod.subdirmatcher(subpath, match) + subprefix = repo.wvfs.reljoin(prefix, subpath) + subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn) try: - submatch = matchmod.subdirmatcher(subpath, match) - subbad, subforgot = sub.forget(submatch, prefix, dryrun=dryrun, + subbad, subforgot = sub.forget(submatch, subprefix, subuipathfn, + dryrun=dryrun, interactive=interactive) bad.extend([subpath + '/' + f for f in subbad]) forgot.extend([subpath + '/' + f for f in subforgot]) except error.LookupError: ui.status(_("skipping missing subrepository: %s\n") - % join(subpath)) + % uipathfn(subpath)) if not explicitonly: for f in match.files(): @@ -2124,7 +2130,7 @@ continue ui.warn(_('not removing %s: ' 'file is already untracked\n') - % match.rel(f)) + % uipathfn(f)) bad.append(f) if interactive: @@ -2135,13 +2141,14 @@ '$$ Include &all remaining files' '$$ &? (display help)') for filename in forget[:]: - r = ui.promptchoice(_('forget %s %s') % (filename, responses)) + r = ui.promptchoice(_('forget %s %s') % + (uipathfn(filename), responses)) if r == 4: # ? while r == 4: for c, t in ui.extractchoices(responses)[1]: ui.write('%s - %s\n' % (c, encoding.lower(t))) - r = ui.promptchoice(_('forget %s %s') % (filename, - responses)) + r = ui.promptchoice(_('forget %s %s') % + (uipathfn(filename), responses)) if r == 0: # yes continue elif r == 1: # no @@ -2155,7 +2162,7 @@ for f in forget: if ui.verbose or not match.exact(f) or interactive: - ui.status(_('removing %s\n') % match.rel(f), + ui.status(_('removing %s\n') % uipathfn(f), label='ui.addremove.removed') if not dryrun: @@ -2168,6 +2175,7 @@ ret = 1 needsfctx = ui.verbose or {'size', 'flags'} & fm.datahint() + uipathfn = scmutil.getuipathfn(ctx.repo(), legacyrelativevalue=True) for f in ctx.matches(m): fm.startitem() fm.context(ctx=ctx) @@ -2175,7 +2183,7 @@ fc = ctx[f] fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags()) fm.data(path=f) - fm.plain(fmt % m.rel(f)) + fm.plain(fmt % uipathfn(f)) ret = 0 for subpath in sorted(ctx.substate): @@ -2188,12 +2196,12 @@ ret = 0 except error.LookupError: ui.status(_("skipping missing subrepository: %s\n") - % m.abs(subpath)) + % uipathfn(subpath)) return ret -def remove(ui, repo, m, prefix, after, force, subrepos, dryrun, warnings=None): - join = lambda f: os.path.join(prefix, f) +def remove(ui, repo, m, prefix, uipathfn, after, force, subrepos, dryrun, + warnings=None): ret = 0 s = repo.status(match=m, clean=True) modified, added, deleted, clean = s[0], s[1], s[3], s[6] @@ -2211,16 +2219,18 @@ unit=_('subrepos')) for subpath in subs: submatch = matchmod.subdirmatcher(subpath, m) + subprefix = repo.wvfs.reljoin(prefix, subpath) + subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn) if subrepos or m.exact(subpath) or any(submatch.files()): progress.increment() sub = wctx.sub(subpath) try: - if sub.removefiles(submatch, prefix, after, force, subrepos, - dryrun, warnings): + if sub.removefiles(submatch, subprefix, subuipathfn, after, + force, subrepos, dryrun, warnings): ret = 1 except error.LookupError: warnings.append(_("skipping missing subrepository: %s\n") - % join(subpath)) + % uipathfn(subpath)) progress.complete() # warn about failure to delete explicit files/dirs @@ -2244,10 +2254,10 @@ if repo.wvfs.exists(f): if repo.wvfs.isdir(f): warnings.append(_('not removing %s: no tracked files\n') - % m.rel(f)) + % uipathfn(f)) else: warnings.append(_('not removing %s: file is untracked\n') - % m.rel(f)) + % uipathfn(f)) # missing files will generate a warning elsewhere ret = 1 progress.complete() @@ -2263,7 +2273,7 @@ progress.increment() if ui.verbose or (f in files): warnings.append(_('not removing %s: file still exists\n') - % m.rel(f)) + % uipathfn(f)) ret = 1 progress.complete() else: @@ -2274,12 +2284,12 @@ for f in modified: progress.increment() warnings.append(_('not removing %s: file is modified (use -f' - ' to force removal)\n') % m.rel(f)) + ' to force removal)\n') % uipathfn(f)) ret = 1 for f in added: progress.increment() warnings.append(_("not removing %s: file has been marked for add" - " (use 'hg forget' to undo add)\n") % m.rel(f)) + " (use 'hg forget' to undo add)\n") % uipathfn(f)) ret = 1 progress.complete() @@ -2289,7 +2299,7 @@ for f in list: if ui.verbose or not m.exact(f): progress.increment() - ui.status(_('removing %s\n') % m.rel(f), + ui.status(_('removing %s\n') % uipathfn(f), label='ui.addremove.removed') progress.complete() @@ -2360,18 +2370,18 @@ write(abs) err = 0 + uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) for subpath in sorted(ctx.substate): sub = ctx.sub(subpath) try: submatch = matchmod.subdirmatcher(subpath, matcher) - - if not sub.cat(submatch, basefm, fntemplate, - os.path.join(prefix, sub._path), + subprefix = os.path.join(prefix, subpath) + if not sub.cat(submatch, basefm, fntemplate, subprefix, **pycompat.strkwargs(opts)): err = 0 except error.RepoLookupError: - ui.status(_("skipping missing subrepository: %s\n") - % os.path.join(prefix, subpath)) + ui.status(_("skipping missing subrepository: %s\n") % + uipathfn(subpath)) return err @@ -2390,7 +2400,9 @@ dsguard = dirstateguard.dirstateguard(repo, 'commit') with dsguard or util.nullcontextmanager(): if dsguard: - if scmutil.addremove(repo, matcher, "", opts) != 0: + relative = scmutil.anypats(pats, opts) + uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative) + if scmutil.addremove(repo, matcher, "", uipathfn, opts) != 0: raise error.Abort( _("failed to mark all new/missing files as added/removed")) @@ -2468,8 +2480,10 @@ # add/remove the files to the working copy if the "addremove" option # was specified. matcher = scmutil.match(wctx, pats, opts) + relative = scmutil.anypats(pats, opts) + uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative) if (opts.get('addremove') - and scmutil.addremove(repo, matcher, "", opts)): + and scmutil.addremove(repo, matcher, "", uipathfn, opts)): raise error.Abort( _("failed to mark all new/missing files as added/removed")) @@ -2785,6 +2799,7 @@ # The mapping is in the form: # <abs path in repo> -> (<path from CWD>, <exactly specified by matcher?>) names = {} + uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) with repo.wlock(): ## filling of the `names` mapping @@ -2800,7 +2815,7 @@ if not m.always(): matcher = matchmod.badmatch(m, lambda x, y: False) for abs in wctx.walk(matcher): - names[abs] = m.rel(abs), m.exact(abs) + names[abs] = m.exact(abs) # walk target manifest to fill `names` @@ -2813,11 +2828,11 @@ for f in names: if f.startswith(path_): return - ui.warn("%s: %s\n" % (m.rel(path), msg)) + ui.warn("%s: %s\n" % (uipathfn(path), msg)) for abs in ctx.walk(matchmod.badmatch(m, badfn)): if abs not in names: - names[abs] = m.rel(abs), m.exact(abs) + names[abs] = m.exact(abs) # Find status of all file in `names`. m = scmutil.matchfiles(repo, names) @@ -2828,7 +2843,7 @@ changes = repo.status(node1=node, match=m) for kind in changes: for abs in kind: - names[abs] = m.rel(abs), m.exact(abs) + names[abs] = m.exact(abs) m = scmutil.matchfiles(repo, names) @@ -2890,13 +2905,12 @@ dsmodified -= mergeadd # if f is a rename, update `names` to also revert the source - cwd = repo.getcwd() for f in localchanges: src = repo.dirstate.copied(f) # XXX should we check for rename down to target node? if src and src not in names and repo.dirstate[src] == 'r': dsremoved.add(src) - names[src] = (repo.pathto(src, cwd), True) + names[src] = True # determine the exact nature of the deleted changesets deladded = set(_deleted) @@ -3003,7 +3017,7 @@ (unknown, actions['unknown'], discard), ) - for abs, (rel, exact) in sorted(names.items()): + for abs, exact in sorted(names.items()): # target file to be touch on disk (relative to cwd) target = repo.wjoin(abs) # search the entry in the dispatch table. @@ -3020,19 +3034,21 @@ if dobackup == backupinteractive: tobackup.add(abs) elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])): - bakname = scmutil.origpath(ui, repo, rel) + absbakname = scmutil.backuppath(ui, repo, abs) + bakname = os.path.relpath(absbakname, + start=repo.root) ui.note(_('saving current version of %s as %s\n') % - (rel, bakname)) + (uipathfn(abs), uipathfn(bakname))) if not opts.get('dry_run'): if interactive: - util.copyfile(target, bakname) + util.copyfile(target, absbakname) else: - util.rename(target, bakname) + util.rename(target, absbakname) if opts.get('dry_run'): if ui.verbose or not exact: - ui.status(msg % rel) + ui.status(msg % uipathfn(abs)) elif exact: - ui.warn(msg % rel) + ui.warn(msg % uipathfn(abs)) break if not opts.get('dry_run'): @@ -3043,8 +3059,8 @@ prefetch(repo, [ctx.rev()], matchfiles(repo, [f for sublist in oplist for f in sublist])) - _performrevert(repo, parents, ctx, names, actions, interactive, - tobackup) + _performrevert(repo, parents, ctx, names, uipathfn, actions, + interactive, tobackup) if targetsubs: # Revert the subrepos on the revert list @@ -3056,8 +3072,8 @@ raise error.Abort("subrepository '%s' does not exist in %s!" % (sub, short(ctx.node()))) -def _performrevert(repo, parents, ctx, names, actions, interactive=False, - tobackup=None): +def _performrevert(repo, parents, ctx, names, uipathfn, actions, + interactive=False, tobackup=None): """function that actually perform all the actions computed for revert This is an independent function to let extension to plug in and react to @@ -3082,15 +3098,15 @@ repo.dirstate.remove(f) def prntstatusmsg(action, f): - rel, exact = names[f] + exact = names[f] if repo.ui.verbose or not exact: - repo.ui.status(actions[action][1] % rel) + repo.ui.status(actions[action][1] % uipathfn(f)) audit_path = pathutil.pathauditor(repo.root, cached=True) for f in actions['forget'][0]: if interactive: choice = repo.ui.promptchoice( - _("forget added file %s (Yn)?$$ &Yes $$ &No") % f) + _("forget added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)) if choice == 0: prntstatusmsg('forget', f) repo.dirstate.drop(f) @@ -3103,7 +3119,7 @@ audit_path(f) if interactive: choice = repo.ui.promptchoice( - _("remove added file %s (Yn)?$$ &Yes $$ &No") % f) + _("remove added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)) if choice == 0: prntstatusmsg('remove', f) doremove(f) @@ -3132,7 +3148,9 @@ # Prompt the user for changes to revert torevert = [f for f in actions['revert'][0] if f not in excluded_files] m = scmutil.matchfiles(repo, torevert) - diffopts = patch.difffeatureopts(repo.ui, whitespace=True) + diffopts = patch.difffeatureopts(repo.ui, whitespace=True, + section='commands', + configprefix='revert.interactive.') diffopts.nodates = True diffopts.git = True operation = 'discard' @@ -3170,7 +3188,7 @@ # Create a backup file only if this hunk should be backed up if c.header.filename() in tobackup: target = repo.wjoin(abs) - bakname = scmutil.origpath(repo.ui, repo, m.rel(abs)) + bakname = scmutil.backuppath(repo.ui, repo, abs) util.copyfile(target, bakname) tobackup.remove(abs) c.write(fp) @@ -3200,9 +3218,19 @@ if node == parent and p2 == nullid: normal = repo.dirstate.normal for f in actions['undelete'][0]: - prntstatusmsg('undelete', f) - checkout(f) - normal(f) + if interactive: + choice = repo.ui.promptchoice( + _("add back removed file %s (Yn)?$$ &Yes $$ &No") % f) + if choice == 0: + prntstatusmsg('undelete', f) + checkout(f) + normal(f) + else: + excluded_files.append(f) + else: + prntstatusmsg('undelete', f) + checkout(f) + normal(f) copied = copies.pathcopies(repo[parent], ctx)
--- a/mercurial/color.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/color.py Tue Feb 19 21:55:05 2019 -0800 @@ -77,12 +77,13 @@ _defaultstyles = { 'grep.match': 'red bold', 'grep.linenumber': 'green', - 'grep.rev': 'green', - 'grep.change': 'green', + 'grep.rev': 'blue', 'grep.sep': 'cyan', 'grep.filename': 'magenta', 'grep.user': 'magenta', 'grep.date': 'magenta', + 'grep.inserted': 'green bold', + 'grep.deleted': 'red bold', 'bookmarks.active': 'green', 'branches.active': 'none', 'branches.closed': 'black bold', @@ -169,7 +170,7 @@ ui._terminfoparams[key[9:]] = newval try: curses.setupterm() - except curses.error as e: + except curses.error: ui._terminfoparams.clear() return @@ -484,7 +485,7 @@ w32effects = None else: origattr = csbi.wAttributes - ansire = re.compile(b'\033\[([^m]*)m([^\033]*)(.*)', + ansire = re.compile(br'\033\[([^m]*)m([^\033]*)(.*)', re.MULTILINE | re.DOTALL) def win32print(ui, writefunc, text, **opts):
--- a/mercurial/commands.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/commands.py Tue Feb 19 21:55:05 2019 -0800 @@ -180,7 +180,8 @@ """ m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts)) - rejected = cmdutil.add(ui, repo, m, "", False, **opts) + uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) + rejected = cmdutil.add(ui, repo, m, "", uipathfn, False, **opts) return rejected and 1 or 0 @command('addremove', @@ -254,7 +255,9 @@ if not opts.get('similarity'): opts['similarity'] = '100' matcher = scmutil.match(repo[None], pats, opts) - return scmutil.addremove(repo, matcher, "", opts) + relative = scmutil.anypats(pats, opts) + uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative) + return scmutil.addremove(repo, matcher, "", uipathfn, opts) @command('annotate|blame', [('r', 'rev', '', _('annotate the specified revision'), _('REV')), @@ -407,12 +410,13 @@ if skiprevs: skiprevs = scmutil.revrange(repo, skiprevs) + uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) for abs in ctx.walk(m): fctx = ctx[abs] rootfm.startitem() rootfm.data(path=abs) if not opts.get('text') and fctx.isbinary(): - rootfm.plain(_("%s: binary file\n") % m.rel(abs)) + rootfm.plain(_("%s: binary file\n") % uipathfn(abs)) continue fm = rootfm.nested('lines', tmpl='{rev}: {line}') @@ -1102,7 +1106,7 @@ with repo.wlock(): if opts.get('clean'): - label = repo[None].p1().branch() + label = repo['.'].branch() repo.dirstate.setbranch(label) ui.status(_('reset working directory to branch %s\n') % label) elif label: @@ -1672,8 +1676,8 @@ if not bheads: raise error.Abort(_('can only close branch heads')) elif opts.get('amend'): - if repo[None].parents()[0].p1().branch() != branch and \ - repo[None].parents()[0].p2().branch() != branch: + if repo['.'].p1().branch() != branch and \ + repo['.'].p2().branch() != branch: raise error.Abort(_('can only close branch heads')) if opts.get('amend'): @@ -2254,7 +2258,8 @@ m = scmutil.match(repo[None], pats, opts) dryrun, interactive = opts.get('dry_run'), opts.get('interactive') - rejected = cmdutil.forget(ui, repo, m, prefix="", + uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) + rejected = cmdutil.forget(ui, repo, m, prefix="", uipathfn=uipathfn, explicitonly=False, dryrun=dryrun, interactive=interactive)[0] return rejected and 1 or 0 @@ -2633,7 +2638,6 @@ raise error.Abort(_("cannot abort using an old graftstate")) # changeset from which graft operation was started - startctx = None if len(newnodes) > 0: startctx = repo[newnodes[0]].p1() else: @@ -2849,6 +2853,7 @@ for i in pycompat.xrange(blo, bhi): yield ('+', b[i]) + uipathfn = scmutil.getuipathfn(repo) def display(fm, fn, ctx, pstates, states): rev = scmutil.intrev(ctx) if fm.isplain(): @@ -2868,7 +2873,7 @@ except error.WdirUnsupported: return ctx[fn].isbinary() - fieldnamemap = {'filename': 'path', 'linenumber': 'lineno'} + fieldnamemap = {'linenumber': 'lineno'} if diff: iter = difflinestates(pstates, states) else: @@ -2876,27 +2881,29 @@ for change, l in iter: fm.startitem() fm.context(ctx=ctx) - fm.data(node=fm.hexfunc(scmutil.binnode(ctx))) + fm.data(node=fm.hexfunc(scmutil.binnode(ctx)), path=fn) + fm.plain(uipathfn(fn), label='grep.filename') cols = [ - ('filename', '%s', fn, True), - ('rev', '%d', rev, not plaingrep), - ('linenumber', '%d', l.linenum, opts.get('line_number')), + ('rev', '%d', rev, not plaingrep, ''), + ('linenumber', '%d', l.linenum, opts.get('line_number'), ''), ] if diff: - cols.append(('change', '%s', change, True)) + cols.append( + ('change', '%s', change, True, + 'grep.inserted ' if change == '+' else 'grep.deleted ') + ) cols.extend([ - ('user', '%s', formatuser(ctx.user()), opts.get('user')), + ('user', '%s', formatuser(ctx.user()), opts.get('user'), ''), ('date', '%s', fm.formatdate(ctx.date(), datefmt), - opts.get('date')), + opts.get('date'), ''), ]) - lastcol = next( - name for name, fmt, data, cond in reversed(cols) if cond) - for name, fmt, data, cond in cols: + for name, fmt, data, cond, extra_label in cols: + if cond: + fm.plain(sep, label='grep.sep') field = fieldnamemap.get(name, name) - fm.condwrite(cond, field, fmt, data, label='grep.%s' % name) - if cond and name != lastcol: - fm.plain(sep, label='grep.sep') + label = extra_label + ('grep.%s' % name) + fm.condwrite(cond, field, fmt, data, label=label) if not opts.get('files_with_matches'): fm.plain(sep, label='grep.sep') if not opts.get('text') and binary(): @@ -2926,7 +2933,7 @@ fm.data(matched=False) fm.end() - skip = {} + skip = set() revfiles = {} match = scmutil.match(repo[None], pats, opts) found = False @@ -2945,16 +2952,18 @@ fnode = ctx.filenode(fn) except error.LookupError: continue - try: - copied = flog.renamed(fnode) - except error.WdirUnsupported: - copied = ctx[fn].renamed() - copy = follow and copied and copied[0] - if copy: - copies.setdefault(rev, {})[fn] = copy + copy = None + if follow: + try: + copied = flog.renamed(fnode) + except error.WdirUnsupported: + copied = ctx[fn].renamed() + copy = copied and copied[0] + if copy: + copies.setdefault(rev, {})[fn] = copy + if fn in skip: + skip.add(copy) if fn in skip: - if copy: - skip[copy] = True continue files.append(fn) @@ -2983,16 +2992,16 @@ copy = copies.get(rev, {}).get(fn) if fn in skip: if copy: - skip[copy] = True + skip.add(copy) continue pstates = matches.get(parent, {}).get(copy or fn, []) if pstates or states: r = display(fm, fn, ctx, pstates, states) found = found or r if r and not diff and not all_files: - skip[fn] = True + skip.add(fn) if copy: - skip[copy] = True + skip.add(copy) del revfiles[rev] # We will keep the matches dict for the duration of the window # clear the matches dict once the window is over @@ -3683,11 +3692,12 @@ filesgen = sorted(repo.dirstate.matches(m)) else: filesgen = ctx.matches(m) + uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=bool(pats)) for abs in filesgen: if opts.get('fullpath'): ui.write(repo.wjoin(abs), end) else: - ui.write(((pats and m.rel(abs)) or abs), end) + ui.write(uipathfn(abs), end) ret = 0 return ret @@ -4361,7 +4371,7 @@ msg = _("not updating: %s") % stringutil.forcebytestr(inst) hint = inst.hint raise error.UpdateAbort(msg, hint=hint) - if modheads > 1: + if modheads is not None and modheads > 1: currentbranchheads = len(repo.branchheads()) if currentbranchheads == modheads: ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n")) @@ -4714,7 +4724,8 @@ m = scmutil.match(repo[None], pats, opts) subrepos = opts.get('subrepos') - return cmdutil.remove(ui, repo, m, "", after, force, subrepos, + uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) + return cmdutil.remove(ui, repo, m, "", uipathfn, after, force, subrepos, dryrun=dryrun) @command('rename|move|mv', @@ -4839,6 +4850,8 @@ b'$$ &Yes $$ &No')): raise error.Abort(_('user quit')) + uipathfn = scmutil.getuipathfn(repo) + if show: ui.pager('resolve') fm = ui.formatter('resolve', opts) @@ -4866,7 +4879,8 @@ fm.startitem() fm.context(ctx=wctx) fm.condwrite(not nostatus, 'mergestatus', '%s ', key, label=label) - fm.write('path', '%s\n', f, label=label) + fm.data(path=f) + fm.plain('%s\n' % uipathfn(f), label=label) fm.end() return 0 @@ -4912,11 +4926,11 @@ if mark: if exact: ui.warn(_('not marking %s as it is driver-resolved\n') - % f) + % uipathfn(f)) elif unmark: if exact: ui.warn(_('not unmarking %s as it is driver-resolved\n') - % f) + % uipathfn(f)) else: runconclude = True continue @@ -4930,7 +4944,7 @@ ms.mark(f, mergemod.MERGE_RECORD_UNRESOLVED_PATH) elif ms[f] == mergemod.MERGE_RECORD_UNRESOLVED_PATH: ui.warn(_('%s: path conflict must be resolved manually\n') - % f) + % uipathfn(f)) continue if mark: @@ -4968,14 +4982,15 @@ if complete: try: util.rename(a + ".resolve", - scmutil.origpath(ui, repo, a)) + scmutil.backuppath(ui, repo, f)) except OSError as inst: if inst.errno != errno.ENOENT: raise if hasconflictmarkers: ui.warn(_('warning: the following files still have conflict ' - 'markers:\n ') + '\n '.join(hasconflictmarkers) + '\n') + 'markers:\n') + ''.join(' ' + uipathfn(f) + '\n' + for f in hasconflictmarkers)) if markcheck == 'abort' and not all and not pats: raise error.Abort(_('conflict markers detected'), hint=_('use --all to mark anyway')) @@ -4994,7 +5009,7 @@ # replace filemerge's .orig file with our resolve file a = repo.wjoin(f) try: - util.rename(a + ".resolve", scmutil.origpath(ui, repo, a)) + util.rename(a + ".resolve", scmutil.backuppath(ui, repo, f)) except OSError as inst: if inst.errno != errno.ENOENT: raise @@ -5413,10 +5428,11 @@ repo = scmutil.unhidehashlikerevs(repo, revs, 'nowarn') ctx1, ctx2 = scmutil.revpair(repo, revs) - if pats or ui.configbool('commands', 'status.relative'): - cwd = repo.getcwd() - else: - cwd = '' + forcerelativevalue = None + if ui.hasconfig('commands', 'status.relative'): + forcerelativevalue = ui.configbool('commands', 'status.relative') + uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=bool(pats), + forcerelativevalue=forcerelativevalue) if opts.get('print0'): end = '\0' @@ -5467,10 +5483,10 @@ fm.context(ctx=ctx2) fm.data(path=f) fm.condwrite(showchar, 'status', '%s ', char, label=label) - fm.plain(fmt % repo.pathto(f, cwd), label=label) + fm.plain(fmt % uipathfn(f), label=label) if f in copy: fm.data(source=copy[f]) - fm.plain((' %s' + end) % repo.pathto(copy[f], cwd), + fm.plain((' %s' + end) % uipathfn(copy[f]), label='status.copied') if ((ui.verbose or ui.configbool('commands', 'status.verbose')) @@ -5503,7 +5519,6 @@ pnode = parents[0].node() marks = [] - ms = None try: ms = mergemod.mergestate.read(repo) except error.UnsupportedMergeRecords as e: @@ -5830,6 +5845,10 @@ expectedtype = 'global' for n in names: + if repo.tagtype(n) == 'global': + alltags = tagsmod.findglobaltags(ui, repo) + if alltags[n][0] == nullid: + raise error.Abort(_("tag '%s' is already removed") % n) if not repo.tagtype(n): raise error.Abort(_("tag '%s' does not exist") % n) if repo.tagtype(n) != expectedtype: @@ -5908,7 +5927,6 @@ ui.pager('tags') fm = ui.formatter('tags', opts) hexfunc = fm.hexfunc - tagtype = "" for t, n in reversed(repo.tagslist()): hn = hexfunc(n)
--- a/mercurial/config.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/config.py Tue Feb 19 21:55:05 2019 -0800 @@ -78,6 +78,10 @@ return list(self._data.get(section, {}).iteritems()) def set(self, section, item, value, source=""): if pycompat.ispy3: + assert not isinstance(section, str), ( + 'config section may not be unicode strings on Python 3') + assert not isinstance(item, str), ( + 'config item may not be unicode strings on Python 3') assert not isinstance(value, str), ( 'config values may not be unicode strings on Python 3') if section not in self:
--- a/mercurial/configitems.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/configitems.py Tue Feb 19 21:55:05 2019 -0800 @@ -113,46 +113,49 @@ coreconfigitem = getitemregister(coreitems) +def _registerdiffopts(section, configprefix=''): + coreconfigitem(section, configprefix + 'nodates', + default=False, + ) + coreconfigitem(section, configprefix + 'showfunc', + default=False, + ) + coreconfigitem(section, configprefix + 'unified', + default=None, + ) + coreconfigitem(section, configprefix + 'git', + default=False, + ) + coreconfigitem(section, configprefix + 'ignorews', + default=False, + ) + coreconfigitem(section, configprefix + 'ignorewsamount', + default=False, + ) + coreconfigitem(section, configprefix + 'ignoreblanklines', + default=False, + ) + coreconfigitem(section, configprefix + 'ignorewseol', + default=False, + ) + coreconfigitem(section, configprefix + 'nobinary', + default=False, + ) + coreconfigitem(section, configprefix + 'noprefix', + default=False, + ) + coreconfigitem(section, configprefix + 'word-diff', + default=False, + ) + coreconfigitem('alias', '.*', default=dynamicdefault, generic=True, ) -coreconfigitem('annotate', 'nodates', - default=False, -) -coreconfigitem('annotate', 'showfunc', - default=False, -) -coreconfigitem('annotate', 'unified', - default=None, -) -coreconfigitem('annotate', 'git', - default=False, -) -coreconfigitem('annotate', 'ignorews', - default=False, -) -coreconfigitem('annotate', 'ignorewsamount', - default=False, -) -coreconfigitem('annotate', 'ignoreblanklines', - default=False, -) -coreconfigitem('annotate', 'ignorewseol', - default=False, -) -coreconfigitem('annotate', 'nobinary', - default=False, -) -coreconfigitem('annotate', 'noprefix', - default=False, -) -coreconfigitem('annotate', 'word-diff', - default=False, -) coreconfigitem('auth', 'cookiefile', default=None, ) +_registerdiffopts(section='annotate') # bookmarks.pushing: internal hack for discovery coreconfigitem('bookmarks', 'pushing', default=list, @@ -198,6 +201,7 @@ coreconfigitem('color', 'pagermode', default=dynamicdefault, ) +_registerdiffopts(section='commands', configprefix='commit.interactive.') coreconfigitem('commands', 'grep.all-files', default=False, ) @@ -210,6 +214,7 @@ coreconfigitem('commands', 'resolve.mark-check', default='none', ) +_registerdiffopts(section='commands', configprefix='revert.interactive.') coreconfigitem('commands', 'show.aliasprefix', default=list, ) @@ -404,39 +409,7 @@ coreconfigitem('devel', 'debug.peer-request', default=False, ) -coreconfigitem('diff', 'nodates', - default=False, -) -coreconfigitem('diff', 'showfunc', - default=False, -) -coreconfigitem('diff', 'unified', - default=None, -) -coreconfigitem('diff', 'git', - default=False, -) -coreconfigitem('diff', 'ignorews', - default=False, -) -coreconfigitem('diff', 'ignorewsamount', - default=False, -) -coreconfigitem('diff', 'ignoreblanklines', - default=False, -) -coreconfigitem('diff', 'ignorewseol', - default=False, -) -coreconfigitem('diff', 'nobinary', - default=False, -) -coreconfigitem('diff', 'noprefix', - default=False, -) -coreconfigitem('diff', 'word-diff', - default=False, -) +_registerdiffopts(section='diff') coreconfigitem('email', 'bcc', default=None, ) @@ -720,11 +693,11 @@ coreconfigitem('fsmonitor', 'warn_update_file_count', default=50000, ) -coreconfigitem('help', 'hidden-command\..*', +coreconfigitem('help', br'hidden-command\..*', default=False, generic=True, ) -coreconfigitem('help', 'hidden-topic\..*', +coreconfigitem('help', br'hidden-topic\..*', default=False, generic=True, ) @@ -1108,6 +1081,10 @@ default=None, generic=True, ) +coreconfigitem('templateconfig', '.*', + default=dynamicdefault, + generic=True, +) coreconfigitem('trusted', 'groups', default=list, ) @@ -1233,6 +1210,9 @@ coreconfigitem('ui', 'quietbookmarkmove', default=False, ) +coreconfigitem('ui', 'relative-paths', + default='legacy', +) coreconfigitem('ui', 'remotecmd', default='hg', )
--- a/mercurial/context.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/context.py Tue Feb 19 21:55:05 2019 -0800 @@ -294,16 +294,16 @@ listsubrepos=listsubrepos, badfn=badfn) def diff(self, ctx2=None, match=None, changes=None, opts=None, - losedatafn=None, prefix='', relroot='', copy=None, - hunksfilterfn=None): + losedatafn=None, pathfn=None, copy=None, + copysourcematch=None, hunksfilterfn=None): """Returns a diff generator for the given contexts and matcher""" if ctx2 is None: ctx2 = self.p1() if ctx2 is not None: ctx2 = self._repo[ctx2] return patch.diff(self._repo, ctx2, self, match=match, changes=changes, - opts=opts, losedatafn=losedatafn, prefix=prefix, - relroot=relroot, copy=copy, + opts=opts, losedatafn=losedatafn, pathfn=pathfn, + copy=copy, copysourcematch=copysourcematch, hunksfilterfn=hunksfilterfn) def dirs(self): @@ -1355,28 +1355,15 @@ uipath = lambda f: ds.pathto(pathutil.join(prefix, f)) rejected = [] for f in files: - if f not in self._repo.dirstate: + if f not in ds: self._repo.ui.warn(_("%s not tracked!\n") % uipath(f)) rejected.append(f) - elif self._repo.dirstate[f] != 'a': - self._repo.dirstate.remove(f) + elif ds[f] != 'a': + ds.remove(f) else: - self._repo.dirstate.drop(f) + ds.drop(f) return rejected - def undelete(self, list): - pctxs = self.parents() - with self._repo.wlock(): - ds = self._repo.dirstate - for f in list: - if self._repo.dirstate[f] != 'r': - self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f)) - else: - fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f] - t = fctx.data() - self._repo.wwrite(f, t, fctx.flags()) - self._repo.dirstate.normal(f) - def copy(self, source, dest): try: st = self._repo.wvfs.lstat(dest) @@ -1392,11 +1379,12 @@ % self._repo.dirstate.pathto(dest)) else: with self._repo.wlock(): - if self._repo.dirstate[dest] in '?': - self._repo.dirstate.add(dest) - elif self._repo.dirstate[dest] in 'r': - self._repo.dirstate.normallookup(dest) - self._repo.dirstate.copy(source, dest) + ds = self._repo.dirstate + if ds[dest] in '?': + ds.add(dest) + elif ds[dest] in 'r': + ds.normallookup(dest) + ds.copy(source, dest) def match(self, pats=None, include=None, exclude=None, default='glob', listsubrepos=False, badfn=None): @@ -2178,8 +2166,6 @@ """ def getfilectx(repo, memctx, path): fctx = ctx[path] - # this is weird but apparently we only keep track of one parent - # (why not only store that instead of a tuple?) copied = fctx.renamed() if copied: copied = copied[0]
--- a/mercurial/copies.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/copies.py Tue Feb 19 21:55:05 2019 -0800 @@ -24,14 +24,13 @@ stringutil, ) -def _findlimit(repo, a, b): +def _findlimit(repo, ctxa, ctxb): """ Find the last revision that needs to be checked to ensure that a full transitive closure for file copies can be properly calculated. Generally, this means finding the earliest revision number that's an ancestor of a or b but not both, except when a or b is a direct descendent of the other, in which case we can return the minimum revnum of a and b. - None if no such revision exists. """ # basic idea: @@ -46,27 +45,32 @@ # - quit when interesting revs is zero cl = repo.changelog + wdirparents = None + a = ctxa.rev() + b = ctxb.rev() if a is None: + wdirparents = (ctxa.p1(), ctxa.p2()) a = node.wdirrev if b is None: + assert not wdirparents + wdirparents = (ctxb.p1(), ctxb.p2()) b = node.wdirrev side = {a: -1, b: 1} visit = [-a, -b] heapq.heapify(visit) interesting = len(visit) - hascommonancestor = False limit = node.wdirrev while interesting: r = -heapq.heappop(visit) if r == node.wdirrev: - parents = [cl.rev(p) for p in repo.dirstate.parents()] + parents = [pctx.rev() for pctx in wdirparents] else: parents = cl.parentrevs(r) + if parents[1] == node.nullrev: + parents = parents[:1] for p in parents: - if p < 0: - continue if p not in side: # first time we see p; add it to visit side[p] = side[r] @@ -77,14 +81,10 @@ # p was interesting but now we know better side[p] = 0 interesting -= 1 - hascommonancestor = True if side[r]: limit = r # lowest rev visited interesting -= 1 - if not hascommonancestor: - return None - # Consider the following flow (see test-commit-amend.t under issue4405): # 1/ File 'a0' committed # 2/ File renamed from 'a0' to 'a1' in a new commit (call it 'a1') @@ -168,9 +168,7 @@ if debug: dbg('debug.copies: looking into rename from %s to %s\n' % (a, b)) - limit = _findlimit(repo, a.rev(), b.rev()) - if limit is None: - limit = node.nullrev + limit = _findlimit(repo, a, b) if debug: dbg('debug.copies: search limit: %d\n' % limit) am = a.manifest() @@ -464,10 +462,7 @@ if graft: tca = _c1.ancestor(_c2) - limit = _findlimit(repo, c1.rev(), c2.rev()) - if limit is None: - # no common ancestor, no copies - return {}, {}, {}, {}, {} + limit = _findlimit(repo, c1, c2) repo.ui.debug(" searching for copies back to rev %d\n" % limit) m1 = c1.manifest() @@ -781,7 +776,7 @@ """ if f1 == f2: - return f1 # a match + return True # a match g1, g2 = f1.ancestors(), f2.ancestors() try:
--- a/mercurial/crecord.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/crecord.py Tue Feb 19 21:55:05 2019 -0800 @@ -377,9 +377,9 @@ def countchanges(self): """changedlines -> (n+,n-)""" add = len([l for l in self.changedlines if l.applied - and l.prettystr()[0] == '+']) + and l.prettystr().startswith('+')]) rem = len([l for l in self.changedlines if l.applied - and l.prettystr()[0] == '-']) + and l.prettystr().startswith('-')]) return add, rem def getfromtoline(self): @@ -423,7 +423,7 @@ changedlinestr = changedline.prettystr() if changedline.applied: hunklinelist.append(changedlinestr) - elif changedlinestr[0] == "-": + elif changedlinestr.startswith("-"): hunklinelist.append(" " + changedlinestr[1:]) fp.write(''.join(self.before + hunklinelist + self.after)) @@ -471,11 +471,11 @@ for line in self.changedlines: text = line.linetext if line.applied: - if text[0] == '+': + if text.startswith('+'): dels.append(text[1:]) - elif text[0] == '-': + elif text.startswith('-'): adds.append(text[1:]) - elif text[0] == '+': + elif text.startswith('+'): dels.append(text[1:]) adds.append(text[1:]) hunk = ['-%s' % l for l in dels] + ['+%s' % l for l in adds] @@ -487,7 +487,7 @@ return getattr(self._hunk, name) def __repr__(self): - return '<hunk %r@%d>' % (self.filename(), self.fromline) + return r'<hunk %r@%d>' % (self.filename(), self.fromline) def filterpatch(ui, chunks, chunkselector, operation=None): """interactively filter patch chunks into applied-only chunks""" @@ -1546,14 +1546,7 @@ new changeset will be created (the normal commit behavior). """ - try: - ver = float(util.version()[:3]) - except ValueError: - ver = 1 - if ver < 2.19: - msg = _("The amend option is unavailable with hg versions < 2.2\n\n" - "Press any key to continue.") - elif opts.get('amend') is None: + if opts.get('amend') is None: opts['amend'] = True msg = _("Amend option is turned on -- committing the currently " "selected changes will not create a new changeset, but " @@ -1799,6 +1792,7 @@ break if self.commenttext != "": - whitespaceremoved = re.sub("(?m)^\s.*(\n|$)", "", self.commenttext) + whitespaceremoved = re.sub(br"(?m)^\s.*(\n|$)", b"", + self.commenttext) if whitespaceremoved != "": self.opts['message'] = self.commenttext
--- a/mercurial/dagop.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/dagop.py Tue Feb 19 21:55:05 2019 -0800 @@ -28,7 +28,7 @@ generatorset = smartset.generatorset # possible maximum depth between null and wdir() -_maxlogdepth = 0x80000000 +maxlogdepth = 0x80000000 def _walkrevtree(pfunc, revs, startdepth, stopdepth, reverse): """Walk DAG using 'pfunc' from the given 'revs' nodes @@ -42,7 +42,7 @@ if startdepth is None: startdepth = 0 if stopdepth is None: - stopdepth = _maxlogdepth + stopdepth = maxlogdepth if stopdepth == 0: return if stopdepth < 0: @@ -142,7 +142,7 @@ def revancestors(repo, revs, followfirst=False, startdepth=None, stopdepth=None, cutfunc=None): - """Like revlog.ancestors(), but supports additional options, includes + r"""Like revlog.ancestors(), but supports additional options, includes the given revs themselves, and returns a smartset Scan ends at the stopdepth (exlusive) if specified. Revisions found @@ -221,7 +221,7 @@ Scan ends at the stopdepth (exlusive) if specified. Revisions found earlier than the startdepth are omitted. """ - if startdepth is None and stopdepth is None: + if startdepth is None and (stopdepth is None or stopdepth >= maxlogdepth): gen = _genrevdescendants(repo, revs, followfirst) else: gen = _genrevdescendantsofdepth(repo, revs, followfirst,
--- a/mercurial/debugcommands.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/debugcommands.py Tue Feb 19 21:55:05 2019 -0800 @@ -38,6 +38,7 @@ cmdutil, color, context, + copies, dagparser, encoding, error, @@ -745,7 +746,6 @@ nodates = True datesort = opts.get(r'datesort') - timestr = "" if datesort: keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename else: @@ -1086,6 +1086,7 @@ ui.write("%s\n" % pycompat.byterepr(ignore)) else: m = scmutil.match(repo[None], pats=files) + uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) for f in m.files(): nf = util.normpath(f) ignored = None @@ -1102,16 +1103,16 @@ break if ignored: if ignored == nf: - ui.write(_("%s is ignored\n") % m.uipath(f)) + ui.write(_("%s is ignored\n") % uipathfn(f)) else: ui.write(_("%s is ignored because of " "containing folder %s\n") - % (m.uipath(f), ignored)) + % (uipathfn(f), ignored)) ignorefile, lineno, line = ignoredata ui.write(_("(ignore rule in %s, line %d: '%s')\n") % (ignorefile, lineno, line)) else: - ui.write(_("%s is not ignored\n") % m.uipath(f)) + ui.write(_("%s is not ignored\n") % uipathfn(f)) @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts, _('-c|-m|FILE')) @@ -1182,13 +1183,6 @@ ''' opts = pycompat.byteskwargs(opts) - def writetemp(contents): - (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-") - f = os.fdopen(fd, r"wb") - f.write(contents) - f.close() - return name - problems = 0 fm = ui.formatter('debuginstall', opts) @@ -1812,6 +1806,18 @@ ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files))) ui.write('\n') +@command('debugpathcopies', + cmdutil.walkopts, + 'hg debugpathcopies REV1 REV2 [FILE]', + inferrepo=True) +def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts): + """show copies between two revisions""" + ctx1 = scmutil.revsingle(repo, rev1) + ctx2 = scmutil.revsingle(repo, rev2) + m = scmutil.match(ctx1, pats, opts) + for dst, src in copies.pathcopies(ctx1, ctx2, m).items(): + ui.write('%s -> %s\n' % (src, dst)) + @command('debugpeer', [], _('PATH'), norepo=True) def debugpeer(ui, path): """establish a connection to a peer repository""" @@ -2004,17 +2010,17 @@ @command('debugrename', [('r', 'rev', '', _('revision to debug'), _('REV'))], - _('[-r REV] FILE')) -def debugrename(ui, repo, file1, *pats, **opts): + _('[-r REV] [FILE]...')) +def debugrename(ui, repo, *pats, **opts): """dump rename information""" opts = pycompat.byteskwargs(opts) ctx = scmutil.revsingle(repo, opts.get('rev')) - m = scmutil.match(ctx, (file1,) + pats, opts) + m = scmutil.match(ctx, pats, opts) for abs in ctx.walk(m): fctx = ctx[abs] o = fctx.filelog().renamed(fctx.filenode()) - rel = m.rel(abs) + rel = repo.pathto(abs) if o: ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1]))) else: @@ -2468,15 +2474,15 @@ ui.write(('+++ optimized\n'), label='diff.file_b') sm = difflib.SequenceMatcher(None, arevs, brevs) for tag, alo, ahi, blo, bhi in sm.get_opcodes(): - if tag in ('delete', 'replace'): + if tag in (r'delete', r'replace'): for c in arevs[alo:ahi]: - ui.write('-%s\n' % c, label='diff.deleted') - if tag in ('insert', 'replace'): + ui.write('-%d\n' % c, label='diff.deleted') + if tag in (r'insert', r'replace'): for c in brevs[blo:bhi]: - ui.write('+%s\n' % c, label='diff.inserted') - if tag == 'equal': + ui.write('+%d\n' % c, label='diff.inserted') + if tag == r'equal': for c in arevs[alo:ahi]: - ui.write(' %s\n' % c) + ui.write(' %d\n' % c) return 1 func = revset.makematcher(tree) @@ -2569,7 +2575,6 @@ source, branches = hg.parseurl(ui.expandpath(source)) url = util.url(source) - addr = None defaultport = {'https': 443, 'ssh': 22} if url.scheme in defaultport: @@ -2791,9 +2796,9 @@ f = lambda fn: util.normpath(fn) fmt = 'f %%-%ds %%-%ds %%s' % ( max([len(abs) for abs in items]), - max([len(m.rel(abs)) for abs in items])) + max([len(repo.pathto(abs)) for abs in items])) for abs in items: - line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '') + line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '') ui.write("%s\n" % line.rstrip()) @command('debugwhyunstable', [], _('REV'))
--- a/mercurial/diffutil.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/diffutil.py Tue Feb 19 21:55:05 2019 -0800 @@ -16,13 +16,15 @@ pycompat, ) -def diffallopts(ui, opts=None, untrusted=False, section='diff'): +def diffallopts(ui, opts=None, untrusted=False, section='diff', + configprefix=''): '''return diffopts with all features supported and parsed''' return difffeatureopts(ui, opts=opts, untrusted=untrusted, section=section, - git=True, whitespace=True, formatchanging=True) + git=True, whitespace=True, formatchanging=True, + configprefix=configprefix) def difffeatureopts(ui, opts=None, untrusted=False, section='diff', git=False, - whitespace=False, formatchanging=False): + whitespace=False, formatchanging=False, configprefix=''): '''return diffopts with only opted-in features parsed Features: @@ -45,7 +47,8 @@ return v if forceplain is not None and ui.plain(): return forceplain - return getter(section, name or key, untrusted=untrusted) + return getter(section, configprefix + (name or key), + untrusted=untrusted) # core options, expected to be understood by every diff parser buildopts = {
--- a/mercurial/dirstate.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/dirstate.py Tue Feb 19 21:55:05 2019 -0800 @@ -81,6 +81,10 @@ self._origpl = None self._updatedfiles = set() self._mapcls = dirstatemap + # Access and cache cwd early, so we don't access it for the first time + # after a working-copy update caused it to not exist (accessing it then + # raises an exception). + self._cwd @contextlib.contextmanager def parentchange(self): @@ -144,7 +148,7 @@ def _ignore(self): files = self._ignorefiles() if not files: - return matchmod.never(self._root, '') + return matchmod.never() pats = ['include:%s' % f for f in files] return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
--- a/mercurial/discovery.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/discovery.py Tue Feb 19 21:55:05 2019 -0800 @@ -238,7 +238,7 @@ # D. Update newmap with outgoing changes. # This will possibly add new heads and remove existing ones. - newmap = branchmap.branchcache((branch, heads[1]) + newmap = branchmap.remotebranchcache((branch, heads[1]) for branch, heads in headssum.iteritems() if heads[0] is not None) newmap.update(repo, (ctx.rev() for ctx in missingctx))
--- a/mercurial/exchange.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/exchange.py Tue Feb 19 21:55:05 2019 -0800 @@ -297,7 +297,6 @@ 'client')) elif part.type == 'stream2' and version is None: # A stream2 part requires to be part of a v2 bundle - version = "v2" requirements = urlreq.unquote(part.params['requirements']) splitted = requirements.split() params = bundle2._formatrequirementsparams(splitted) @@ -921,7 +920,7 @@ if v in changegroup.supportedoutgoingversions( pushop.repo)] if not cgversions: - raise ValueError(_('no common changegroup version')) + raise error.Abort(_('no common changegroup version')) version = max(cgversions) cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version, 'push') @@ -2185,7 +2184,7 @@ cgversions = [v for v in cgversions if v in changegroup.supportedoutgoingversions(repo)] if not cgversions: - raise ValueError(_('no common changegroup version')) + raise error.Abort(_('no common changegroup version')) version = max(cgversions) outgoing = _computeoutgoing(repo, heads, common) @@ -2229,7 +2228,7 @@ if not kwargs.get(r'bookmarks', False): return if 'bookmarks' not in b2caps: - raise ValueError(_('no common bookmarks exchange method')) + raise error.Abort(_('no common bookmarks exchange method')) books = bookmod.listbinbookmarks(repo) data = bookmod.binaryencode(books) if data: @@ -2264,7 +2263,7 @@ """add phase heads part to the requested bundle""" if kwargs.get(r'phases', False): if not 'heads' in b2caps.get('phases'): - raise ValueError(_('no common phases exchange method')) + raise error.Abort(_('no common phases exchange method')) if heads is None: heads = repo.heads()
--- a/mercurial/filemerge.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/filemerge.py Tue Feb 19 21:55:05 2019 -0800 @@ -279,6 +279,7 @@ keep as the merged version.""" ui = repo.ui fd = fcd.path() + uipathfn = scmutil.getuipathfn(repo) # Avoid prompting during an in-memory merge since it doesn't support merge # conflicts. @@ -287,7 +288,7 @@ 'support file conflicts') prompts = partextras(labels) - prompts['fd'] = fd + prompts['fd'] = uipathfn(fd) try: if fco.isabsent(): index = ui.promptchoice( @@ -394,13 +395,14 @@ def _mergecheck(repo, mynode, orig, fcd, fco, fca, toolconf): tool, toolpath, binary, symlink, scriptfn = toolconf + uipathfn = scmutil.getuipathfn(repo) if symlink: repo.ui.warn(_('warning: internal %s cannot merge symlinks ' - 'for %s\n') % (tool, fcd.path())) + 'for %s\n') % (tool, uipathfn(fcd.path()))) return False if fcd.isabsent() or fco.isabsent(): repo.ui.warn(_('warning: internal %s cannot merge change/delete ' - 'conflict for %s\n') % (tool, fcd.path())) + 'conflict for %s\n') % (tool, uipathfn(fcd.path()))) return False return True @@ -462,7 +464,6 @@ Generic driver for _imergelocal and _imergeother """ assert localorother is not None - tool, toolpath, binary, symlink, scriptfn = toolconf r = simplemerge.simplemerge(repo.ui, fcd, fca, fco, label=labels, localorother=localorother) return True, r @@ -581,9 +582,10 @@ def _xmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): tool, toolpath, binary, symlink, scriptfn = toolconf + uipathfn = scmutil.getuipathfn(repo) if fcd.isabsent() or fco.isabsent(): repo.ui.warn(_('warning: %s cannot merge change/delete conflict ' - 'for %s\n') % (tool, fcd.path())) + 'for %s\n') % (tool, uipathfn(fcd.path()))) return False, 1, None unused, unused, unused, back = files localpath = _workingpath(repo, fcd) @@ -623,7 +625,7 @@ lambda s: procutil.shellquote(util.localpath(s))) if _toolbool(ui, tool, "gui"): repo.ui.status(_('running merge tool %s for file %s\n') % - (tool, fcd.path())) + (tool, uipathfn(fcd.path()))) if scriptfn is None: cmd = toolpath + ' ' + args repo.ui.debug('launching merge tool: %s\n' % cmd) @@ -741,8 +743,7 @@ # TODO: Break this import cycle somehow. (filectx -> ctx -> fileset -> # merge -> filemerge). (I suspect the fileset import is the weakest link) from . import context - a = _workingpath(repo, fcd) - back = scmutil.origpath(ui, repo, a) + back = scmutil.backuppath(ui, repo, fcd.path()) inworkingdir = (back.startswith(repo.wvfs.base) and not back.startswith(repo.vfs.base)) if isinstance(fcd, context.overlayworkingfilectx) and inworkingdir: @@ -762,6 +763,7 @@ if isinstance(fcd, context.overlayworkingfilectx): util.writefile(back, fcd.data()) else: + a = _workingpath(repo, fcd) util.copyfile(a, back) # A arbitraryfilectx is returned, so we can run the same functions on # the backup context regardless of where it lives. @@ -842,6 +844,8 @@ ui = repo.ui fd = fcd.path() + uipathfn = scmutil.getuipathfn(repo) + fduipath = uipathfn(fd) binary = fcd.isbinary() or fco.isbinary() or fca.isbinary() symlink = 'l' in fcd.flags() + fco.flags() changedelete = fcd.isabsent() or fco.isabsent() @@ -865,8 +869,8 @@ raise error.Abort(_("invalid 'python:' syntax: %s") % toolpath) toolpath = script ui.debug("picked tool '%s' for %s (binary %s symlink %s changedelete %s)\n" - % (tool, fd, pycompat.bytestr(binary), pycompat.bytestr(symlink), - pycompat.bytestr(changedelete))) + % (tool, fduipath, pycompat.bytestr(binary), + pycompat.bytestr(symlink), pycompat.bytestr(changedelete))) if tool in internals: func = internals[tool] @@ -892,9 +896,10 @@ if premerge: if orig != fco.path(): - ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd)) + ui.status(_("merging %s and %s to %s\n") % + (uipathfn(orig), uipathfn(fco.path()), fduipath)) else: - ui.status(_("merging %s\n") % fd) + ui.status(_("merging %s\n") % fduipath) ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca)) @@ -905,7 +910,7 @@ raise error.InMemoryMergeConflictsError('in-memory merge does ' 'not support merge ' 'conflicts') - ui.warn(onfailure % fd) + ui.warn(onfailure % fduipath) return True, 1, False back = _makebackup(repo, ui, wctx, fcd, premerge) @@ -958,7 +963,7 @@ raise error.InMemoryMergeConflictsError('in-memory merge ' 'does not support ' 'merge conflicts') - ui.warn(onfailure % fd) + ui.warn(onfailure % fduipath) _onfilemergefailure(ui) return True, r, deleted @@ -986,6 +991,7 @@ def _check(repo, r, ui, tool, fcd, files): fd = fcd.path() + uipathfn = scmutil.getuipathfn(repo) unused, unused, unused, back = files if not r and (_toolbool(ui, tool, "checkconflicts") or @@ -997,7 +1003,7 @@ if 'prompt' in _toollist(ui, tool, "check"): checked = True if ui.promptchoice(_("was merge of '%s' successful (yn)?" - "$$ &Yes $$ &No") % fd, 1): + "$$ &Yes $$ &No") % uipathfn(fd), 1): r = 1 if not r and not checked and (_toolbool(ui, tool, "checkchanged") or @@ -1006,7 +1012,7 @@ if back is not None and not fcd.cmp(back): if ui.promptchoice(_(" output file %s appears unchanged\n" "was merge successful (yn)?" - "$$ &Yes $$ &No") % fd, 1): + "$$ &Yes $$ &No") % uipathfn(fd), 1): r = 1 if back is not None and _toolbool(ui, tool, "fixeol"):
--- a/mercurial/fileset.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/fileset.py Tue Feb 19 21:55:05 2019 -0800 @@ -499,9 +499,8 @@ """Create a matcher to select files by predfn(filename)""" if cache: predfn = util.cachefunc(predfn) - repo = self.ctx.repo() - return matchmod.predicatematcher(repo.root, repo.getcwd(), predfn, - predrepr=predrepr, badfn=self._badfn) + return matchmod.predicatematcher(predfn, predrepr=predrepr, + badfn=self._badfn) def fpredicate(self, predfn, predrepr=None, cache=False): """Create a matcher to select files by predfn(fctx) at the current @@ -539,9 +538,7 @@ def never(self): """Create a matcher to select nothing""" - repo = self.ctx.repo() - return matchmod.nevermatcher(repo.root, repo.getcwd(), - badfn=self._badfn) + return matchmod.never(badfn=self._badfn) def match(ctx, expr, badfn=None): """Create a matcher for a single fileset expression"""
--- a/mercurial/graphmod.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/graphmod.py Tue Feb 19 21:55:05 2019 -0800 @@ -451,7 +451,7 @@ # If 'graphshorten' config, only draw shift_interline # when there is any non vertical flow in graph. if state['graphshorten']: - if any(c in '\/' for c in shift_interline if c): + if any(c in br'\/' for c in shift_interline if c): lines.append(shift_interline) # Else, no 'graphshorten' config so draw shift_interline. else:
--- a/mercurial/help/config.txt Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/help/config.txt Tue Feb 19 21:55:05 2019 -0800 @@ -2341,6 +2341,9 @@ Reduce the amount of output printed. (default: False) +``relative-paths`` + Prefer relative paths in the UI. + ``remotecmd`` Remote command to use for clone/push/pull operations. (default: ``hg``)
--- a/mercurial/hg.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/hg.py Tue Feb 19 21:55:05 2019 -0800 @@ -38,6 +38,7 @@ narrowspec, node, phases, + pycompat, repository as repositorymod, scmutil, sshpeer, @@ -57,7 +58,15 @@ def _local(path): path = util.expandpath(util.urllocalpath(path)) - return (os.path.isfile(path) and bundlerepo or localrepo) + + try: + isfile = os.path.isfile(path) + # Python 2 raises TypeError, Python 3 ValueError. + except (TypeError, ValueError) as e: + raise error.Abort(_('invalid path %s: %s') % ( + path, pycompat.bytestr(e))) + + return isfile and bundlerepo or localrepo def addbranchrevs(lrepo, other, branches, revs): peer = other.peer() # a courtesy to callers using a localrepo for other @@ -282,25 +291,20 @@ called. """ - destlock = lock = None - lock = repo.lock() - try: + with repo.lock(): # we use locks here because if we race with commit, we # can end up with extra data in the cloned revlogs that's # not pointed to by changesets, thus causing verify to # fail - destlock = copystore(ui, repo, repo.path) - - sharefile = repo.vfs.join('sharedpath') - util.rename(sharefile, sharefile + '.old') + with destlock or util.nullcontextmanager(): - repo.requirements.discard('shared') - repo.requirements.discard('relshared') - repo._writerequirements() - finally: - destlock and destlock.release() - lock and lock.release() + sharefile = repo.vfs.join('sharedpath') + util.rename(sharefile, sharefile + '.old') + + repo.requirements.discard('shared') + repo.requirements.discard('relshared') + repo._writerequirements() # Removing share changes some fundamental properties of the repo instance. # So we instantiate a new repo object and operate on it rather than
--- a/mercurial/hgweb/hgwebdir_mod.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/hgweb/hgwebdir_mod.py Tue Feb 19 21:55:05 2019 -0800 @@ -143,7 +143,7 @@ path = path[:-len(discarded) - 1] try: - r = hg.repository(ui, path) + hg.repository(ui, path) directory = False except (IOError, error.RepoError): pass @@ -510,7 +510,7 @@ if style == styles[0]: vars['style'] = style - sessionvars = webutil.sessionvars(vars, r'?') + sessionvars = webutil.sessionvars(vars, '?') logourl = config('web', 'logourl') logoimg = config('web', 'logoimg') staticurl = (config('web', 'staticurl')
--- a/mercurial/hgweb/server.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/hgweb/server.py Tue Feb 19 21:55:05 2019 -0800 @@ -54,7 +54,7 @@ self.writelines(str.split('\n')) def writelines(self, seq): for msg in seq: - self.handler.log_error("HG error: %s", msg) + self.handler.log_error(r"HG error: %s", encoding.strfromlocal(msg)) class _httprequesthandler(httpservermod.basehttprequesthandler): @@ -100,17 +100,22 @@ def do_POST(self): try: self.do_write() - except Exception: + except Exception as e: + # I/O below could raise another exception. So log the original + # exception first to ensure it is recorded. + if not (isinstance(e, (OSError, socket.error)) + and e.errno == errno.ECONNRESET): + tb = r"".join(traceback.format_exception(*sys.exc_info())) + # We need a native-string newline to poke in the log + # message, because we won't get a newline when using an + # r-string. This is the easy way out. + newline = chr(10) + self.log_error(r"Exception happened during processing " + r"request '%s':%s%s", self.path, newline, tb) + self._start_response(r"500 Internal Server Error", []) self._write(b"Internal Server Error") self._done() - tb = r"".join(traceback.format_exception(*sys.exc_info())) - # We need a native-string newline to poke in the log - # message, because we won't get a newline when using an - # r-string. This is the easy way out. - newline = chr(10) - self.log_error(r"Exception happened during processing " - r"request '%s':%s%s", self.path, newline, tb) def do_PUT(self): self.do_POST() @@ -165,7 +170,7 @@ if length: env[r'CONTENT_LENGTH'] = length for header in [h for h in self.headers.keys() - if h not in (r'content-type', r'content-length')]: + if h.lower() not in (r'content-type', r'content-length')]: hkey = r'HTTP_' + header.replace(r'-', r'_').upper() hval = self.headers.get(header) hval = hval.replace(r'\n', r'').strip()
--- a/mercurial/hgweb/webcommands.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/hgweb/webcommands.py Tue Feb 19 21:55:05 2019 -0800 @@ -884,7 +884,7 @@ leftlines = filelines(pfctx) else: rightlines = () - pfctx = ctx.parents()[0][path] + pfctx = ctx.p1()[path] leftlines = filelines(pfctx) comparison = webutil.compare(context, leftlines, rightlines)
--- a/mercurial/hgweb/webutil.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/hgweb/webutil.py Tue Feb 19 21:55:05 2019 -0800 @@ -456,13 +456,13 @@ files = listfilediffs(ctx.files(), n, web.maxfiles) entry = commonentry(repo, ctx) - entry.update( - allparents=_kwfunc(lambda context, mapping: parents(ctx)), - parent=_kwfunc(lambda context, mapping: parents(ctx, rev - 1)), - child=_kwfunc(lambda context, mapping: children(ctx, rev + 1)), - changelogtag=showtags, - files=files, - ) + entry.update({ + 'allparents': _kwfunc(lambda context, mapping: parents(ctx)), + 'parent': _kwfunc(lambda context, mapping: parents(ctx, rev - 1)), + 'child': _kwfunc(lambda context, mapping: children(ctx, rev + 1)), + 'changelogtag': showtags, + 'files': files, + }) return entry def changelistentries(web, revs, maxcount, parityfn): @@ -565,16 +565,14 @@ def _diffsgen(context, repo, ctx, basectx, files, style, stripecount, linerange, lineidprefix): if files: - m = match.exact(repo.root, repo.getcwd(), files) + m = match.exact(files) else: - m = match.always(repo.root, repo.getcwd()) + m = match.always() diffopts = patch.diffopts(repo.ui, untrusted=True) - node1 = basectx.node() - node2 = ctx.node() parity = paritygen(stripecount) - diffhunks = patch.diffhunks(repo, node1, node2, m, opts=diffopts) + diffhunks = patch.diffhunks(repo, basectx, ctx, m, opts=diffopts) for blockno, (fctx1, fctx2, header, hunks) in enumerate(diffhunks, 1): if style != 'raw': header = header[1:]
--- a/mercurial/hgweb/wsgiheaders.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/hgweb/wsgiheaders.py Tue Feb 19 21:55:05 2019 -0800 @@ -127,7 +127,7 @@ return self._headers[:] def __repr__(self): - return "%s(%r)" % (self.__class__.__name__, self._headers) + return r"%s(%r)" % (self.__class__.__name__, self._headers) def __str__(self): """str() returns the formatted headers, complete with end line,
--- a/mercurial/httppeer.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/httppeer.py Tue Feb 19 21:55:05 2019 -0800 @@ -816,8 +816,8 @@ return raise error.CapabilityError( - _('cannot %s; client or remote repository does not support the %r ' - 'capability') % (purpose, name)) + _('cannot %s; client or remote repository does not support the ' + '\'%s\' capability') % (purpose, name)) # End of ipeercapabilities.
--- a/mercurial/keepalive.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/keepalive.py Tue Feb 19 21:55:05 2019 -0800 @@ -84,6 +84,7 @@ from __future__ import absolute_import, print_function +import collections import errno import hashlib import socket @@ -114,15 +115,13 @@ """ def __init__(self): self._lock = threading.Lock() - self._hostmap = {} # map hosts to a list of connections + self._hostmap = collections.defaultdict(list) # host -> [connection] self._connmap = {} # map connections to host self._readymap = {} # map connection to ready state def add(self, host, connection, ready): self._lock.acquire() try: - if host not in self._hostmap: - self._hostmap[host] = [] self._hostmap[host].append(connection) self._connmap[connection] = host self._readymap[connection] = ready @@ -155,19 +154,18 @@ conn = None self._lock.acquire() try: - if host in self._hostmap: - for c in self._hostmap[host]: - if self._readymap[c]: - self._readymap[c] = 0 - conn = c - break + for c in self._hostmap[host]: + if self._readymap[c]: + self._readymap[c] = False + conn = c + break finally: self._lock.release() return conn def get_all(self, host=None): if host: - return list(self._hostmap.get(host, [])) + return list(self._hostmap[host]) else: return dict(self._hostmap) @@ -202,7 +200,7 @@ def _request_closed(self, request, host, connection): """tells us that this request is now closed and that the connection is ready for another request""" - self._cm.set_ready(connection, 1) + self._cm.set_ready(connection, True) def _remove_connection(self, host, connection, close=0): if close: @@ -239,7 +237,7 @@ if DEBUG: DEBUG.info("creating new connection to %s (%d)", host, id(h)) - self._cm.add(host, h, 0) + self._cm.add(host, h, False) self._start_transaction(h, req) r = h.getresponse() # The string form of BadStatusLine is the status line. Add some context @@ -405,6 +403,11 @@ _raw_read = httplib.HTTPResponse.read _raw_readinto = getattr(httplib.HTTPResponse, 'readinto', None) + # Python 2.7 has a single close() which closes the socket handle. + # This method was effectively renamed to _close_conn() in Python 3. But + # there is also a close(). _close_conn() is called by methods like + # read(). + def close(self): if self.fp: self.fp.close() @@ -413,6 +416,9 @@ self._handler._request_closed(self, self._host, self._connection) + def _close_conn(self): + self.close() + def close_connection(self): self._handler._remove_connection(self._host, self._connection, close=1) self.close()
--- a/mercurial/localrepo.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/localrepo.py Tue Feb 19 21:55:05 2019 -0800 @@ -992,7 +992,7 @@ self._dirstatevalidatewarned = False - self._branchcaches = {} + self._branchcaches = branchmap.BranchMapCache() self._revbranchcache = None self._filterpats = {} self._datafilters = {} @@ -1227,14 +1227,14 @@ @storecache(narrowspec.FILENAME) def _storenarrowmatch(self): if repository.NARROW_REQUIREMENT not in self.requirements: - return matchmod.always(self.root, '') + return matchmod.always() include, exclude = self.narrowpats return narrowspec.match(self.root, include=include, exclude=exclude) @storecache(narrowspec.FILENAME) def _narrowmatch(self): if repository.NARROW_REQUIREMENT not in self.requirements: - return matchmod.always(self.root, '') + return matchmod.always() narrowspec.checkworkingcopynarrowspec(self) include, exclude = self.narrowpats return narrowspec.match(self.root, include=include, exclude=exclude) @@ -1252,7 +1252,7 @@ if includeexact and not self._narrowmatch.always(): # do not exclude explicitly-specified paths so that they can # be warned later on - em = matchmod.exact(match._root, match._cwd, match.files()) + em = matchmod.exact(match.files()) nm = matchmod.unionmatcher([self._narrowmatch, em]) return matchmod.intersectmatchers(match, nm) return matchmod.intersectmatchers(match, self._narrowmatch) @@ -1520,8 +1520,7 @@ def branchmap(self): '''returns a dictionary {branch: [branchheads]} with branchheads ordered by increasing revision number''' - branchmap.updatecache(self) - return self._branchcaches[self.filtername] + return self._branchcaches[self] @unfilteredmethod def revbranchcache(self): @@ -2011,8 +2010,7 @@ self.svfs.rename('undo.phaseroots', 'phaseroots', checkambig=True) self.invalidate() - parentgone = (parents[0] not in self.changelog.nodemap or - parents[1] not in self.changelog.nodemap) + parentgone = any(p not in self.changelog.nodemap for p in parents) if parentgone: # prevent dirstateguard from overwriting already restored one dsguard.close() @@ -2074,9 +2072,9 @@ return if tr is None or tr.changes['origrepolen'] < len(self): - # updating the unfiltered branchmap should refresh all the others, + # accessing the 'ser ved' branchmap should refresh all the others, self.ui.debug('updating the branch cache\n') - branchmap.updatecache(self.filtered('served')) + self.filtered('served').branchmap() if full: rbc = self.revbranchcache() @@ -2094,7 +2092,7 @@ # can't use delattr on proxy del self.__dict__[r'_tagscache'] - self.unfiltered()._branchcaches.clear() + self._branchcaches.clear() self.invalidatevolatilesets() self._sparsesignaturecache.clear() @@ -2402,18 +2400,15 @@ raise error.Abort('%s: %s' % (f, msg)) if not match: - match = matchmod.always(self.root, '') + match = matchmod.always() if not force: vdirs = [] match.explicitdir = vdirs.append match.bad = fail - wlock = lock = tr = None - try: - wlock = self.wlock() - lock = self.lock() # for recent changelog (see issue4368) - + # lock() for recent changelog (see issue4368) + with self.wlock(), self.lock(): wctx = self[None] merge = len(wctx.parents()) > 1 @@ -2460,10 +2455,11 @@ # commit subs and write new state if subs: + uipathfn = scmutil.getuipathfn(self) for s in sorted(commitsubs): sub = wctx.sub(s) self.ui.status(_('committing subrepository %s\n') % - subrepoutil.subrelpath(sub)) + uipathfn(subrepoutil.subrelpath(sub))) sr = sub.commit(cctx._text, user, date) newstate[s] = (newstate[s][0], sr) subrepoutil.writestate(self, newstate) @@ -2473,21 +2469,17 @@ try: self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2) - tr = self.transaction('commit') - ret = self.commitctx(cctx, True) + with self.transaction('commit'): + ret = self.commitctx(cctx, True) + # update bookmarks, dirstate and mergestate + bookmarks.update(self, [p1, p2], ret) + cctx.markcommitted(ret) + ms.reset() except: # re-raises if edited: self.ui.write( _('note: commit message saved in %s\n') % msgfn) raise - # update bookmarks, dirstate and mergestate - bookmarks.update(self, [p1, p2], ret) - cctx.markcommitted(ret) - ms.reset() - tr.close() - - finally: - lockmod.release(tr, lock, wlock) def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2): # hack for command that use a temporary commit (eg: histedit) @@ -2509,13 +2501,10 @@ from p1 or p2 are excluded from the committed ctx.files(). """ - tr = None p1, p2 = ctx.p1(), ctx.p2() user = ctx.user() - lock = self.lock() - try: - tr = self.transaction("commit") + with self.lock(), self.transaction("commit") as tr: trp = weakref.proxy(tr) if ctx.manifestnode(): @@ -2538,8 +2527,9 @@ removed = list(ctx.removed()) linkrev = len(self) self.ui.note(_("committing files:\n")) + uipathfn = scmutil.getuipathfn(self) for f in sorted(ctx.modified() + ctx.added()): - self.ui.note(f + "\n") + self.ui.note(uipathfn(f) + "\n") try: fctx = ctx[f] if fctx is None: @@ -2549,13 +2539,15 @@ m[f] = self._filecommit(fctx, m1, m2, linkrev, trp, changed) m.setflag(f, fctx.flags()) - except OSError as inst: - self.ui.warn(_("trouble committing %s!\n") % f) + except OSError: + self.ui.warn(_("trouble committing %s!\n") % + uipathfn(f)) raise except IOError as inst: errcode = getattr(inst, 'errno', errno.ENOENT) if error or errcode and errcode != errno.ENOENT: - self.ui.warn(_("trouble committing %s!\n") % f) + self.ui.warn(_("trouble committing %s!\n") % + uipathfn(f)) raise # update manifest @@ -2612,12 +2604,7 @@ # # if minimal phase was 0 we don't need to retract anything phases.registernew(self, tr, targetphase, [n]) - tr.close() return n - finally: - if tr: - tr.release() - lock.release() @unfilteredmethod def destroying(self):
--- a/mercurial/logcmdutil.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/logcmdutil.py Tue Feb 19 21:55:05 2019 -0800 @@ -9,6 +9,7 @@ import itertools import os +import posixpath from .i18n import _ from .node import ( @@ -58,29 +59,53 @@ changes=None, stat=False, fp=None, graphwidth=0, prefix='', root='', listsubrepos=False, hunksfilterfn=None): '''show diff or diffstat.''' + ctx1 = repo[node1] + ctx2 = repo[node2] if root: relroot = pathutil.canonpath(repo.root, repo.getcwd(), root) else: relroot = '' + copysourcematch = None + def compose(f, g): + return lambda x: f(g(x)) + def pathfn(f): + return posixpath.join(prefix, f) if relroot != '': # XXX relative roots currently don't work if the root is within a # subrepo - uirelroot = match.uipath(relroot) + uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) + uirelroot = uipathfn(pathfn(relroot)) relroot += '/' for matchroot in match.files(): if not matchroot.startswith(relroot): - ui.warn(_('warning: %s not inside relative root %s\n') % ( - match.uipath(matchroot), uirelroot)) + ui.warn(_('warning: %s not inside relative root %s\n') % + (uipathfn(pathfn(matchroot)), uirelroot)) + + relrootmatch = scmutil.match(ctx2, pats=[relroot], default='path') + match = matchmod.intersectmatchers(match, relrootmatch) + copysourcematch = relrootmatch + + checkroot = (repo.ui.configbool('devel', 'all-warnings') or + repo.ui.configbool('devel', 'check-relroot')) + def relrootpathfn(f): + if checkroot and not f.startswith(relroot): + raise AssertionError( + "file %s doesn't start with relroot %s" % (f, relroot)) + return f[len(relroot):] + pathfn = compose(relrootpathfn, pathfn) if stat: diffopts = diffopts.copy(context=0, noprefix=False) width = 80 if not ui.plain(): width = ui.termwidth() - graphwidth + # If an explicit --root was given, don't respect ui.relative-paths + if not relroot: + pathfn = compose(scmutil.getuipathfn(repo), pathfn) - chunks = repo[node2].diff(repo[node1], match, changes, opts=diffopts, - prefix=prefix, relroot=relroot, - hunksfilterfn=hunksfilterfn) + chunks = ctx2.diff(ctx1, match, changes, opts=diffopts, pathfn=pathfn, + copysourcematch=copysourcematch, + hunksfilterfn=hunksfilterfn) if fp is not None or ui.canwritewithoutlabels(): out = fp or ui @@ -105,8 +130,6 @@ ui.write(chunk, label=label) if listsubrepos: - ctx1 = repo[node1] - ctx2 = repo[node2] for subpath, sub in scmutil.itersubrepos(ctx1, ctx2): tempnode2 = node2 try: @@ -118,8 +141,9 @@ # subpath. The best we can do is to ignore it. tempnode2 = None submatch = matchmod.subdirmatcher(subpath, match) + subprefix = repo.wvfs.reljoin(prefix, subpath) sub.diff(ui, diffopts, tempnode2, submatch, changes=changes, - stat=stat, fp=fp, prefix=prefix) + stat=stat, fp=fp, prefix=subprefix) class changesetdiffer(object): """Generate diff of changeset with pre-configured filtering functions"""
--- a/mercurial/logexchange.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/logexchange.py Tue Feb 19 21:55:05 2019 -0800 @@ -97,7 +97,6 @@ def activepath(repo, remote): """returns remote path""" - local = None # is the remote a local peer local = remote.local()
--- a/mercurial/mail.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/mail.py Tue Feb 19 21:55:05 2019 -0800 @@ -243,6 +243,13 @@ cs.body_encoding = email.charset.QP break + # On Python 2, this simply assigns a value. Python 3 inspects + # body and does different things depending on whether it has + # encode() or decode() attributes. We can get the old behavior + # if we pass a str and charset is None and we call set_charset(). + # But we may get into trouble later due to Python attempting to + # encode/decode using the registered charset (or attempting to + # use ascii in the absence of a charset). msg.set_payload(body, cs) return msg
--- a/mercurial/manifest.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/manifest.py Tue Feb 19 21:55:05 2019 -0800 @@ -283,7 +283,6 @@ if len(self.extradata) == 0: return l = [] - last_cut = 0 i = 0 offset = 0 self.extrainfo = [0] * len(self.positions)
--- a/mercurial/match.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/match.py Tue Feb 19 21:55:05 2019 -0800 @@ -42,7 +42,7 @@ except AttributeError: return m.match -def _expandsets(root, cwd, kindpats, ctx, listsubrepos, badfn): +def _expandsets(kindpats, ctx, listsubrepos, badfn): '''Returns the kindpats list with the 'set' patterns expanded to matchers''' matchers = [] other = [] @@ -57,7 +57,7 @@ if listsubrepos: for subpath in ctx.substate: sm = ctx.sub(subpath).matchfileset(pat, badfn=badfn) - pm = prefixdirmatcher(root, cwd, subpath, sm, badfn=badfn) + pm = prefixdirmatcher(subpath, sm, badfn=badfn) matchers.append(pm) continue @@ -97,25 +97,24 @@ return False return True -def _buildkindpatsmatcher(matchercls, root, cwd, kindpats, ctx=None, +def _buildkindpatsmatcher(matchercls, root, kindpats, ctx=None, listsubrepos=False, badfn=None): matchers = [] - fms, kindpats = _expandsets(root, cwd, kindpats, ctx=ctx, + fms, kindpats = _expandsets(kindpats, ctx=ctx, listsubrepos=listsubrepos, badfn=badfn) if kindpats: - m = matchercls(root, cwd, kindpats, listsubrepos=listsubrepos, - badfn=badfn) + m = matchercls(root, kindpats, badfn=badfn) matchers.append(m) if fms: matchers.extend(fms) if not matchers: - return nevermatcher(root, cwd, badfn=badfn) + return nevermatcher(badfn=badfn) if len(matchers) == 1: return matchers[0] return unionmatcher(matchers) def match(root, cwd, patterns=None, include=None, exclude=None, default='glob', - exact=False, auditor=None, ctx=None, listsubrepos=False, warn=None, + auditor=None, ctx=None, listsubrepos=False, warn=None, badfn=None, icasefs=False): """build an object to match a set of file patterns @@ -126,7 +125,6 @@ include - patterns to include (unless they are excluded) exclude - patterns to exclude (even if they are included) default - if a pattern in patterns has no explicit type, assume this one - exact - patterns are actually filenames (include/exclude still apply) warn - optional function used for printing warnings badfn - optional bad() callback for this matcher instead of the default icasefs - make a matcher for wdir on case insensitive filesystems, which @@ -150,9 +148,6 @@ """ normalize = _donormalize if icasefs: - if exact: - raise error.ProgrammingError("a case-insensitive exact matcher " - "doesn't make sense") dirstate = ctx.repo().dirstate dsnormalize = dirstate.normalize @@ -171,41 +166,38 @@ kindpats.append((kind, pats, source)) return kindpats - if exact: - m = exactmatcher(root, cwd, patterns, badfn) - elif patterns: + if patterns: kindpats = normalize(patterns, default, root, cwd, auditor, warn) if _kindpatsalwaysmatch(kindpats): - m = alwaysmatcher(root, cwd, badfn, relativeuipath=True) + m = alwaysmatcher(badfn) else: - m = _buildkindpatsmatcher(patternmatcher, root, cwd, kindpats, - ctx=ctx, listsubrepos=listsubrepos, - badfn=badfn) + m = _buildkindpatsmatcher(patternmatcher, root, kindpats, ctx=ctx, + listsubrepos=listsubrepos, badfn=badfn) else: # It's a little strange that no patterns means to match everything. # Consider changing this to match nothing (probably using nevermatcher). - m = alwaysmatcher(root, cwd, badfn) + m = alwaysmatcher(badfn) if include: kindpats = normalize(include, 'glob', root, cwd, auditor, warn) - im = _buildkindpatsmatcher(includematcher, root, cwd, kindpats, ctx=ctx, + im = _buildkindpatsmatcher(includematcher, root, kindpats, ctx=ctx, listsubrepos=listsubrepos, badfn=None) m = intersectmatchers(m, im) if exclude: kindpats = normalize(exclude, 'glob', root, cwd, auditor, warn) - em = _buildkindpatsmatcher(includematcher, root, cwd, kindpats, ctx=ctx, + em = _buildkindpatsmatcher(includematcher, root, kindpats, ctx=ctx, listsubrepos=listsubrepos, badfn=None) m = differencematcher(m, em) return m -def exact(root, cwd, files, badfn=None): - return exactmatcher(root, cwd, files, badfn=badfn) +def exact(files, badfn=None): + return exactmatcher(files, badfn=badfn) -def always(root, cwd): - return alwaysmatcher(root, cwd) +def always(badfn=None): + return alwaysmatcher(badfn) -def never(root, cwd): - return nevermatcher(root, cwd) +def never(badfn=None): + return nevermatcher(badfn) def badmatch(match, badfn): """Make a copy of the given matcher, replacing its bad method with the given @@ -258,12 +250,9 @@ class basematcher(object): - def __init__(self, root, cwd, badfn=None, relativeuipath=True): - self._root = root - self._cwd = cwd + def __init__(self, badfn=None): if badfn is not None: self.bad = badfn - self._relativeuipath = relativeuipath def __call__(self, fn): return self.matchfn(fn) @@ -284,21 +273,6 @@ # by recursive traversal is visited. traversedir = None - def abs(self, f): - '''Convert a repo path back to path that is relative to the root of the - matcher.''' - return f - - def rel(self, f): - '''Convert repo path back to path that is relative to cwd of matcher.''' - return util.pathto(self._root, self._cwd, f) - - def uipath(self, f): - '''Convert repo path to a display path. If patterns or -I/-X were used - to create this matcher, the display path will be relative to cwd. - Otherwise it is relative to the root of the repo.''' - return (self._relativeuipath and self.rel(f)) or self.abs(f) - @propertycache def _files(self): return [] @@ -399,9 +373,8 @@ class alwaysmatcher(basematcher): '''Matches everything.''' - def __init__(self, root, cwd, badfn=None, relativeuipath=False): - super(alwaysmatcher, self).__init__(root, cwd, badfn, - relativeuipath=relativeuipath) + def __init__(self, badfn=None): + super(alwaysmatcher, self).__init__(badfn) def always(self): return True @@ -421,8 +394,8 @@ class nevermatcher(basematcher): '''Matches nothing.''' - def __init__(self, root, cwd, badfn=None): - super(nevermatcher, self).__init__(root, cwd, badfn) + def __init__(self, badfn=None): + super(nevermatcher, self).__init__(badfn) # It's a little weird to say that the nevermatcher is an exact matcher # or a prefix matcher, but it seems to make sense to let callers take @@ -447,8 +420,8 @@ class predicatematcher(basematcher): """A matcher adapter for a simple boolean function""" - def __init__(self, root, cwd, predfn, predrepr=None, badfn=None): - super(predicatematcher, self).__init__(root, cwd, badfn) + def __init__(self, predfn, predrepr=None, badfn=None): + super(predicatematcher, self).__init__(badfn) self.matchfn = predfn self._predrepr = predrepr @@ -460,13 +433,12 @@ class patternmatcher(basematcher): - def __init__(self, root, cwd, kindpats, listsubrepos=False, badfn=None): - super(patternmatcher, self).__init__(root, cwd, badfn) + def __init__(self, root, kindpats, badfn=None): + super(patternmatcher, self).__init__(badfn) self._files = _explicitfiles(kindpats) self._prefix = _prefix(kindpats) - self._pats, self.matchfn = _buildmatch(kindpats, '$', listsubrepos, - root) + self._pats, self.matchfn = _buildmatch(kindpats, '$', root) @propertycache def _dirs(self): @@ -539,11 +511,10 @@ class includematcher(basematcher): - def __init__(self, root, cwd, kindpats, listsubrepos=False, badfn=None): - super(includematcher, self).__init__(root, cwd, badfn) + def __init__(self, root, kindpats, badfn=None): + super(includematcher, self).__init__(badfn) - self._pats, self.matchfn = _buildmatch(kindpats, '(?:/|$)', - listsubrepos, root) + self._pats, self.matchfn = _buildmatch(kindpats, '(?:/|$)', root) self._prefix = _prefix(kindpats) roots, dirs, parents = _rootsdirsandparents(kindpats) # roots are directories which are recursively included. @@ -601,8 +572,8 @@ patterns (so no kind-prefixes). ''' - def __init__(self, root, cwd, files, badfn=None): - super(exactmatcher, self).__init__(root, cwd, badfn) + def __init__(self, files, badfn=None): + super(exactmatcher, self).__init__(badfn) if isinstance(files, list): self._files = files @@ -649,11 +620,11 @@ '''Composes two matchers by matching if the first matches and the second does not. - The second matcher's non-matching-attributes (root, cwd, bad, explicitdir, + The second matcher's non-matching-attributes (bad, explicitdir, traversedir) are ignored. ''' def __init__(self, m1, m2): - super(differencematcher, self).__init__(m1._root, m1._cwd) + super(differencematcher, self).__init__() self._m1 = m1 self._m2 = m2 self.bad = m1.bad @@ -677,6 +648,9 @@ def visitdir(self, dir): if self._m2.visitdir(dir) == 'all': return False + elif not self._m2.visitdir(dir): + # m2 does not match dir, we can return 'all' here if possible + return self._m1.visitdir(dir) return bool(self._m1.visitdir(dir)) def visitchildrenset(self, dir): @@ -714,7 +688,7 @@ def intersectmatchers(m1, m2): '''Composes two matchers by matching if both of them match. - The second matcher's non-matching-attributes (root, cwd, bad, explicitdir, + The second matcher's non-matching-attributes (bad, explicitdir, traversedir) are ignored. ''' if m1 is None or m2 is None: @@ -726,19 +700,15 @@ m.bad = m1.bad m.explicitdir = m1.explicitdir m.traversedir = m1.traversedir - m.abs = m1.abs - m.rel = m1.rel - m._relativeuipath |= m1._relativeuipath return m if m2.always(): m = copy.copy(m1) - m._relativeuipath |= m2._relativeuipath return m return intersectionmatcher(m1, m2) class intersectionmatcher(basematcher): def __init__(self, m1, m2): - super(intersectionmatcher, self).__init__(m1._root, m1._cwd) + super(intersectionmatcher, self).__init__() self._m1 = m1 self._m2 = m2 self.bad = m1.bad @@ -817,19 +787,15 @@ ['b.txt'] >>> m2.exact(b'b.txt') True - >>> util.pconvert(m2.rel(b'b.txt')) - 'sub/b.txt' >>> def bad(f, msg): ... print(pycompat.sysstr(b"%s: %s" % (f, msg))) >>> m1.bad = bad >>> m2.bad(b'x.txt', b'No such file') sub/x.txt: No such file - >>> m2.abs(b'c.txt') - 'sub/c.txt' """ def __init__(self, path, matcher): - super(subdirmatcher, self).__init__(matcher._root, matcher._cwd) + super(subdirmatcher, self).__init__() self._path = path self._matcher = matcher self._always = matcher.always() @@ -845,15 +811,6 @@ def bad(self, f, msg): self._matcher.bad(self._path + "/" + f, msg) - def abs(self, f): - return self._matcher.abs(self._path + "/" + f) - - def rel(self, f): - return self._matcher.rel(self._path + "/" + f) - - def uipath(self, f): - return self._matcher.uipath(self._path + "/" + f) - def matchfn(self, f): # Some information is lost in the superclass's constructor, so we # can not accurately create the matching function for the subdirectory @@ -889,14 +846,14 @@ class prefixdirmatcher(basematcher): """Adapt a matcher to work on a parent directory. - The matcher's non-matching-attributes (root, cwd, bad, explicitdir, - traversedir) are ignored. + The matcher's non-matching-attributes (bad, explicitdir, traversedir) are + ignored. The prefix path should usually be the relative path from the root of this matcher to the root of the wrapped matcher. >>> m1 = match(util.localpath(b'root/d/e'), b'f', [b'../a.txt', b'b.txt']) - >>> m2 = prefixdirmatcher(b'root', b'd/e/f', b'd/e', m1) + >>> m2 = prefixdirmatcher(b'd/e', m1) >>> bool(m2(b'a.txt'),) False >>> bool(m2(b'd/e/a.txt')) @@ -919,8 +876,8 @@ False """ - def __init__(self, root, cwd, path, matcher, badfn=None): - super(prefixdirmatcher, self).__init__(root, cwd, badfn) + def __init__(self, path, matcher, badfn=None): + super(prefixdirmatcher, self).__init__(badfn) if not path: raise error.ProgrammingError('prefix path must not be empty') self._path = path @@ -970,13 +927,13 @@ class unionmatcher(basematcher): """A matcher that is the union of several matchers. - The non-matching-attributes (root, cwd, bad, explicitdir, traversedir) are - taken from the first matcher. + The non-matching-attributes (bad, explicitdir, traversedir) are taken from + the first matcher. """ def __init__(self, matchers): m1 = matchers[0] - super(unionmatcher, self).__init__(m1._root, m1._cwd) + super(unionmatcher, self).__init__() self.explicitdir = m1.explicitdir self.traversedir = m1.traversedir self._matchers = matchers @@ -1142,7 +1099,7 @@ return _globre(pat) + globsuffix raise error.ProgrammingError('not a regex pattern: %s:%s' % (kind, pat)) -def _buildmatch(kindpats, globsuffix, listsubrepos, root): +def _buildmatch(kindpats, globsuffix, root): '''Return regexp string and a matcher function for kindpats. globsuffix is appended to the regexp of globs.''' matchfuncs = []
--- a/mercurial/merge.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/merge.py Tue Feb 19 21:55:05 2019 -0800 @@ -1186,9 +1186,6 @@ diff = m1.diff(m2, match=matcher) - if matcher is None: - matcher = matchmod.always('', '') - actions = {} for f, ((n1, fl1), (n2, fl2)) in diff.iteritems(): if n1 and n2: # file exists on both local and remote side @@ -1502,15 +1499,15 @@ # If a file or directory exists with the same name, back that # up. Otherwise, look to see if there is a file that conflicts # with a directory this file is in, and if so, back that up. - absf = repo.wjoin(f) + conflicting = f if not repo.wvfs.lexists(f): for p in util.finddirs(f): if repo.wvfs.isfileorlink(p): - absf = repo.wjoin(p) + conflicting = p break - orig = scmutil.origpath(ui, repo, absf) - if repo.wvfs.lexists(absf): - util.rename(absf, orig) + if repo.wvfs.lexists(conflicting): + orig = scmutil.backuppath(ui, repo, conflicting) + util.rename(repo.wjoin(conflicting), orig) wctx[f].clearunknown() atomictemp = ui.configbool("experimental", "update.atomic-file") wctx[f].write(fctx(f).data(), flags, backgroundclose=True,
--- a/mercurial/minirst.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/minirst.py Tue Feb 19 21:55:05 2019 -0800 @@ -641,7 +641,6 @@ def parse(text, indent=0, keep=None, admonitions=None): """Parse text into a list of blocks""" - pruned = [] blocks = findblocks(text) for b in blocks: b['indent'] += indent @@ -736,7 +735,6 @@ '''return a list of (section path, nesting level, blocks) tuples''' nest = "" names = () - level = 0 secs = [] def getname(b):
--- a/mercurial/mpatch.c Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/mpatch.c Tue Feb 19 21:55:05 2019 -0800 @@ -41,8 +41,9 @@ { struct mpatch_flist *a = NULL; - if (size < 1) + if (size < 1) { size = 1; + } a = (struct mpatch_flist *)malloc(sizeof(struct mpatch_flist)); if (a) { @@ -110,10 +111,12 @@ while (s != src->tail) { int soffset = s->start; - if (!safeadd(offset, &soffset)) + if (!safeadd(offset, &soffset)) { break; /* add would overflow, oh well */ - if (soffset >= cut) + } + if (soffset >= cut) { break; /* we've gone far enough */ + } postend = offset; if (!safeadd(s->start, &postend) || @@ -139,11 +142,13 @@ if (!safesub(offset, &c)) { break; } - if (s->end < c) + if (s->end < c) { c = s->end; + } l = cut - offset - s->start; - if (s->len < l) + if (s->len < l) { l = s->len; + } offset += s->start + l - c; @@ -176,8 +181,9 @@ if (!safeadd(offset, &cmpcut)) { break; } - if (cmpcut >= cut) + if (cmpcut >= cut) { break; + } postend = offset; if (!safeadd(s->start, &postend)) { @@ -205,11 +211,13 @@ if (!safesub(offset, &c)) { break; } - if (s->end < c) + if (s->end < c) { c = s->end; + } l = cut - offset - s->start; - if (s->len < l) + if (s->len < l) { l = s->len; + } offset += s->start + l - c; s->start = c; @@ -233,8 +241,9 @@ struct mpatch_frag *bh, *ct; int offset = 0, post; - if (a && b) + if (a && b) { c = lalloc((lsize(a) + lsize(b)) * 2); + } if (c) { @@ -284,8 +293,9 @@ /* assume worst case size, we won't have many of these lists */ l = lalloc(len / 12 + 1); - if (!l) + if (!l) { return MPATCH_ERR_NO_MEM; + } lt = l->tail; @@ -295,8 +305,9 @@ lt->start = getbe32(bin + pos); lt->end = getbe32(bin + pos + 4); lt->len = getbe32(bin + pos + 8); - if (lt->start < 0 || lt->start > lt->end || lt->len < 0) + if (lt->start < 0 || lt->start > lt->end || lt->len < 0) { break; /* sanity check */ + } if (!safeadd(12, &pos)) { break; }
--- a/mercurial/narrowspec.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/narrowspec.py Tue Feb 19 21:55:05 2019 -0800 @@ -127,7 +127,7 @@ # Passing empty include and empty exclude to matchmod.match() # gives a matcher that matches everything, so explicitly use # the nevermatcher. - return matchmod.never(root, '') + return matchmod.never() return matchmod.match(root, '', [], include=include or [], exclude=exclude or [])
--- a/mercurial/obsutil.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/obsutil.py Tue Feb 19 21:55:05 2019 -0800 @@ -397,14 +397,17 @@ This is a first and basic implementation, with many shortcoming. """ - # lefctx.repo() and rightctx.repo() are the same here - repo = leftctx.repo() - diffopts = diffutil.diffallopts(repo.ui, {'git': True}) + diffopts = diffutil.diffallopts(leftctx.repo().ui, {'git': True}) + # Leftctx or right ctx might be filtered, so we need to use the contexts # with an unfiltered repository to safely compute the diff - leftunfi = repo.unfiltered()[leftctx.rev()] + + # leftctx and rightctx can be from different repository views in case of + # hgsubversion, do don't try to access them from same repository + # rightctx.repo() and leftctx.repo() are not always the same + leftunfi = leftctx._repo.unfiltered()[leftctx.rev()] leftdiff = leftunfi.diff(opts=diffopts) - rightunfi = repo.unfiltered()[rightctx.rev()] + rightunfi = rightctx._repo.unfiltered()[rightctx.rev()] rightdiff = rightunfi.diff(opts=diffopts) left, right = (0, 0)
--- a/mercurial/patch.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/patch.py Tue Feb 19 21:55:05 2019 -0800 @@ -15,7 +15,6 @@ import errno import hashlib import os -import posixpath import re import shutil import zlib @@ -363,7 +362,7 @@ return self._ispatchinga(afile) and self._ispatchingb(bfile) def __repr__(self): - return "<patchmeta %s %r>" % (self.op, self.path) + return r"<patchmeta %s %r>" % (self.op, self.path) def readgitpatch(lr): """extract git-style metadata about patches from <patchname>""" @@ -637,8 +636,8 @@ return self.changed | self.removed # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1 -unidesc = re.compile('@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@') -contextdesc = re.compile('(?:---|\*\*\*) (\d+)(?:,(\d+))? (?:---|\*\*\*)') +unidesc = re.compile(br'@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@') +contextdesc = re.compile(br'(?:---|\*\*\*) (\d+)(?:,(\d+))? (?:---|\*\*\*)') eolmodes = ['strict', 'crlf', 'lf', 'auto'] class patchfile(object): @@ -752,7 +751,7 @@ for l in x.hunk: lines.append(l) if l[-1:] != '\n': - lines.append("\n\ No newline at end of file\n") + lines.append("\n\\ No newline at end of file\n") self.backend.writerej(self.fname, len(self.rej), self.hunks, lines) def apply(self, h): @@ -1304,7 +1303,7 @@ self.hunk.append(u) l = lr.readline() - if l.startswith('\ '): + if l.startswith(br'\ '): s = self.a[-1][:-1] self.a[-1] = s self.hunk[-1] = s @@ -1322,7 +1321,7 @@ hunki = 1 for x in pycompat.xrange(self.lenb): l = lr.readline() - if l.startswith('\ '): + if l.startswith(br'\ '): # XXX: the only way to hit this is with an invalid line range. # The no-eol marker is not counted in the line range, but I # guess there are diff(1) out there which behave differently. @@ -1379,7 +1378,7 @@ def _fixnewline(self, lr): l = lr.readline() - if l.startswith('\ '): + if l.startswith(br'\ '): diffhelper.fixnewline(self.hunk, self.a, self.b) else: lr.push(l) @@ -1448,7 +1447,6 @@ hunk.append(l) return l.rstrip('\r\n') - size = 0 while True: line = getline(lr, self.hunk) if not line: @@ -1610,6 +1608,7 @@ self.headers = [] def addrange(self, limits): + self.addcontext([]) fromstart, fromend, tostart, toend, proc = limits self.fromline = int(fromstart) self.toline = int(tostart) @@ -1630,6 +1629,8 @@ if self.context: self.before = self.context self.context = [] + if self.hunk: + self.addcontext([]) self.hunk = hunk def newfile(self, hdr): @@ -1903,7 +1904,6 @@ if not gitpatches: raise PatchError(_('failed to synchronize metadata for "%s"') % afile[2:]) - gp = gitpatches[-1] newfile = True elif x.startswith('---'): # check for a unified diff @@ -2238,8 +2238,8 @@ difffeatureopts = diffutil.difffeatureopts def diff(repo, node1=None, node2=None, match=None, changes=None, - opts=None, losedatafn=None, prefix='', relroot='', copy=None, - hunksfilterfn=None): + opts=None, losedatafn=None, pathfn=None, copy=None, + copysourcematch=None, hunksfilterfn=None): '''yields diff of changes to files between two nodes, or node and working directory. @@ -2263,14 +2263,22 @@ copy, if not empty, should contain mappings {dst@y: src@x} of copy information. + if copysourcematch is not None, then copy sources will be filtered by this + matcher + hunksfilterfn, if not None, should be a function taking a filectx and hunks generator that may yield filtered hunks. ''' + if not node1 and not node2: + node1 = repo.dirstate.p1() + + ctx1 = repo[node1] + ctx2 = repo[node2] + for fctx1, fctx2, hdr, hunks in diffhunks( - repo, node1=node1, node2=node2, - match=match, changes=changes, opts=opts, - losedatafn=losedatafn, prefix=prefix, relroot=relroot, copy=copy, - ): + repo, ctx1=ctx1, ctx2=ctx2, match=match, changes=changes, opts=opts, + losedatafn=losedatafn, pathfn=pathfn, copy=copy, + copysourcematch=copysourcematch): if hunksfilterfn is not None: # If the file has been removed, fctx2 is None; but this should # not occur here since we catch removed files early in @@ -2284,8 +2292,8 @@ if text: yield text -def diffhunks(repo, node1=None, node2=None, match=None, changes=None, - opts=None, losedatafn=None, prefix='', relroot='', copy=None): +def diffhunks(repo, ctx1, ctx2, match=None, changes=None, opts=None, + losedatafn=None, pathfn=None, copy=None, copysourcematch=None): """Yield diff of changes to files in the form of (`header`, `hunks`) tuples where `header` is a list of diff headers and `hunks` is an iterable of (`hunkrange`, `hunklines`) tuples. @@ -2296,9 +2304,6 @@ if opts is None: opts = mdiff.defaultopts - if not node1 and not node2: - node1 = repo.dirstate.p1() - def lrugetfilectx(): cache = {} order = collections.deque() @@ -2315,16 +2320,6 @@ return getfilectx getfilectx = lrugetfilectx() - ctx1 = repo[node1] - ctx2 = repo[node2] - - relfiltered = False - if relroot != '' and match.always(): - # as a special case, create a new matcher with just the relroot - pats = [relroot] - match = scmutil.match(ctx2, pats, default='path') - relfiltered = True - if not changes: changes = ctx1.status(ctx2, match=match) modified, added, removed = changes[:3] @@ -2343,21 +2338,11 @@ if opts.git or opts.upgrade: copy = copies.pathcopies(ctx1, ctx2, match=match) - if relroot is not None: - if not relfiltered: - # XXX this would ideally be done in the matcher, but that is - # generally meant to 'or' patterns, not 'and' them. In this case we - # need to 'and' all the patterns from the matcher with relroot. - def filterrel(l): - return [f for f in l if f.startswith(relroot)] - modified = filterrel(modified) - added = filterrel(added) - removed = filterrel(removed) - relfiltered = True - # filter out copies where either side isn't inside the relative root - copy = dict(((dst, src) for (dst, src) in copy.iteritems() - if dst.startswith(relroot) - and src.startswith(relroot))) + if copysourcematch: + # filter out copies where source side isn't inside the matcher + # (copies.pathcopies() already filtered out the destination) + copy = {dst: src for dst, src in copy.iteritems() + if copysourcematch(src)} modifiedset = set(modified) addedset = set(added) @@ -2388,7 +2373,7 @@ def difffn(opts, losedata): return trydiff(repo, revs, ctx1, ctx2, modified, added, removed, - copy, getfilectx, opts, losedata, prefix, relroot) + copy, getfilectx, opts, losedata, pathfn) if opts.upgrade and not opts.git: try: def losedata(fn): @@ -2603,16 +2588,14 @@ yield f1, f2, copyop def trydiff(repo, revs, ctx1, ctx2, modified, added, removed, - copy, getfilectx, opts, losedatafn, prefix, relroot): + copy, getfilectx, opts, losedatafn, pathfn): '''given input data, generate a diff and yield it in blocks If generating a diff would lose data like flags or binary data and losedatafn is not None, it will be called. - relroot is removed and prefix is added to every path in the diff output. - - If relroot is not empty, this function expects every path in modified, - added, removed and copy to start with it.''' + pathfn is applied to every path in the diff output. + ''' def gitindex(text): if not text: @@ -2640,12 +2623,8 @@ gitmode = {'l': '120000', 'x': '100755', '': '100644'} - if relroot != '' and (repo.ui.configbool('devel', 'all-warnings') - or repo.ui.configbool('devel', 'check-relroot')): - for f in modified + added + removed + list(copy) + list(copy.values()): - if f is not None and not f.startswith(relroot): - raise AssertionError( - "file %s doesn't start with relroot %s" % (f, relroot)) + if not pathfn: + pathfn = lambda f: f for f1, f2, copyop in _filepairs(modified, added, removed, copy, opts): content1 = None @@ -2682,10 +2661,8 @@ (f1 and f2 and flag1 != flag2)): losedatafn(f2 or f1) - path1 = f1 or f2 - path2 = f2 or f1 - path1 = posixpath.join(prefix, path1[len(relroot):]) - path2 = posixpath.join(prefix, path2[len(relroot):]) + path1 = pathfn(f1 or f2) + path2 = pathfn(f2 or f1) header = [] if opts.git: header.append('diff --git %s%s %s%s' % @@ -2773,7 +2750,7 @@ return maxfile, maxtotal, addtotal, removetotal, binary def diffstatdata(lines): - diffre = re.compile('^diff .*-r [a-z0-9]+\s(.*)$') + diffre = re.compile(br'^diff .*-r [a-z0-9]+\s(.*)$') results = [] filename, adds, removes, isbinary = None, 0, 0, False @@ -2808,6 +2785,10 @@ elif (line.startswith('GIT binary patch') or line.startswith('Binary file')): isbinary = True + elif line.startswith('rename from'): + filename = line[12:] + elif line.startswith('rename to'): + filename += ' => %s' % line[10:] addresult() return results
--- a/mercurial/posix.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/posix.py Tue Feb 19 21:55:05 2019 -0800 @@ -583,7 +583,8 @@ """Return the list of members of the group with the given name, KeyError if the group does not exist. """ - return list(grp.getgrnam(name).gr_mem) + name = pycompat.fsdecode(name) + return pycompat.rapply(pycompat.fsencode, list(grp.getgrnam(name).gr_mem)) def spawndetached(args): return os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
--- a/mercurial/repository.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/repository.py Tue Feb 19 21:55:05 2019 -0800 @@ -346,8 +346,8 @@ return raise error.CapabilityError( - _('cannot %s; remote repository does not support the %r ' - 'capability') % (purpose, name)) + _('cannot %s; remote repository does not support the ' + '\'%s\' capability') % (purpose, name)) class iverifyproblem(interfaceutil.Interface): """Represents a problem with the integrity of the repository.
--- a/mercurial/revlog.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/revlog.py Tue Feb 19 21:55:05 2019 -0800 @@ -610,6 +610,9 @@ self._pcache = {} try: + # If we are using the native C version, you are in a fun case + # where self.index, self.nodemap and self._nodecaches is the same + # object. self._nodecache.clearcaches() except AttributeError: self._nodecache = {nullid: nullrev} @@ -1337,7 +1340,7 @@ return True def maybewdir(prefix): - return all(c == 'f' for c in prefix) + return all(c == 'f' for c in pycompat.iterbytestr(prefix)) hexnode = hex(node)
--- a/mercurial/revset.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/revset.py Tue Feb 19 21:55:05 2019 -0800 @@ -43,7 +43,7 @@ getinteger = revsetlang.getinteger getboolean = revsetlang.getboolean getlist = revsetlang.getlist -getrange = revsetlang.getrange +getintrange = revsetlang.getintrange getargs = revsetlang.getargs getargsdict = revsetlang.getargsdict @@ -225,24 +225,70 @@ def relationset(repo, subset, x, y, order): raise error.ParseError(_("can't use a relation in this context")) -def generationsrel(repo, subset, x, rel, n, order): - # TODO: support range, rewrite tests, and drop startdepth argument - # from ancestors() and descendants() predicates - if n <= 0: - n = -n - return _ancestors(repo, subset, x, startdepth=n, stopdepth=n + 1) - else: - return _descendants(repo, subset, x, startdepth=n, stopdepth=n + 1) +def _splitrange(a, b): + """Split range with bounds a and b into two ranges at 0 and return two + tuples of numbers for use as startdepth and stopdepth arguments of + revancestors and revdescendants. + + >>> _splitrange(-10, -5) # [-10:-5] + ((5, 11), (None, None)) + >>> _splitrange(5, 10) # [5:10] + ((None, None), (5, 11)) + >>> _splitrange(-10, 10) # [-10:10] + ((0, 11), (0, 11)) + >>> _splitrange(-10, 0) # [-10:0] + ((0, 11), (None, None)) + >>> _splitrange(0, 10) # [0:10] + ((None, None), (0, 11)) + >>> _splitrange(0, 0) # [0:0] + ((0, 1), (None, None)) + >>> _splitrange(1, -1) # [1:-1] + ((None, None), (None, None)) + """ + ancdepths = (None, None) + descdepths = (None, None) + if a == b == 0: + ancdepths = (0, 1) + if a < 0: + ancdepths = (-min(b, 0), -a + 1) + if b > 0: + descdepths = (max(a, 0), b + 1) + return ancdepths, descdepths + +def generationsrel(repo, subset, x, rel, z, order): + # TODO: rewrite tests, and drop startdepth argument from ancestors() and + # descendants() predicates + a, b = getintrange(z, + _('relation subscript must be an integer or a range'), + _('relation subscript bounds must be integers'), + deffirst=-(dagop.maxlogdepth - 1), + deflast=+(dagop.maxlogdepth - 1)) + (ancstart, ancstop), (descstart, descstop) = _splitrange(a, b) + + if ancstart is None and descstart is None: + return baseset() + + revs = getset(repo, fullreposet(repo), x) + if not revs: + return baseset() + + if ancstart is not None and descstart is not None: + s = dagop.revancestors(repo, revs, False, ancstart, ancstop) + s += dagop.revdescendants(repo, revs, False, descstart, descstop) + elif ancstart is not None: + s = dagop.revancestors(repo, revs, False, ancstart, ancstop) + elif descstart is not None: + s = dagop.revdescendants(repo, revs, False, descstart, descstop) + + return subset & s def relsubscriptset(repo, subset, x, y, z, order): # this is pretty basic implementation of 'x#y[z]' operator, still # experimental so undocumented. see the wiki for further ideas. # https://www.mercurial-scm.org/wiki/RevsetOperatorPlan rel = getsymbol(y) - n = getinteger(z, _("relation subscript must be an integer")) - if rel in subscriptrelations: - return subscriptrelations[rel](repo, subset, x, rel, n, order) + return subscriptrelations[rel](repo, subset, x, rel, z, order) relnames = [r for r in subscriptrelations.keys() if len(r) > 1] raise error.UnknownIdentifier(rel, relnames) @@ -412,7 +458,7 @@ try: r = cl.parentrevs(r)[0] except error.WdirUnsupported: - r = repo[r].parents()[0].rev() + r = repo[r].p1().rev() ps.add(r) return subset & ps @@ -815,6 +861,43 @@ contentdivergent = obsmod.getrevs(repo, 'contentdivergent') return subset & contentdivergent +@predicate('expectsize(set[, size])', safe=True, takeorder=True) +def expectsize(repo, subset, x, order): + """Return the given revset if size matches the revset size. + Abort if the revset doesn't expect given size. + size can either be an integer range or an integer. + + For example, ``expectsize(0:1, 3:5)`` will abort as revset size is 2 and + 2 is not between 3 and 5 inclusive.""" + + args = getargsdict(x, 'expectsize', 'set size') + minsize = 0 + maxsize = len(repo) + 1 + err = '' + if 'size' not in args or 'set' not in args: + raise error.ParseError(_('invalid set of arguments')) + minsize, maxsize = getintrange(args['size'], + _('expectsize requires a size range' + ' or a positive integer'), + _('size range bounds must be integers'), + minsize, maxsize) + if minsize < 0 or maxsize < 0: + raise error.ParseError(_('negative size')) + rev = getset(repo, fullreposet(repo), args['set'], order=order) + if minsize != maxsize and (len(rev) < minsize or len(rev) > maxsize): + err = _('revset size mismatch.' + ' expected between %d and %d, got %d') % (minsize, maxsize, + len(rev)) + elif minsize == maxsize and len(rev) != minsize: + err = _('revset size mismatch.' + ' expected %d, got %d') % (minsize, len(rev)) + if err: + raise error.RepoLookupError(err) + if order == followorder: + return subset & rev + else: + return rev & subset + @predicate('extdata(source)', safe=False, weight=100) def extdata(repo, subset, x): """Changesets in the specified extdata source. (EXPERIMENTAL)""" @@ -1008,11 +1091,11 @@ # i18n: "followlines" is a keyword msg = _("followlines expects exactly one file") fname = scmutil.parsefollowlinespattern(repo, rev, pat, msg) - # i18n: "followlines" is a keyword - lr = getrange(args['lines'][0], _("followlines expects a line range")) - fromline, toline = [getinteger(a, _("line range bounds must be integers")) - for a in lr] - fromline, toline = util.processlinerange(fromline, toline) + fromline, toline = util.processlinerange( + *getintrange(args['lines'][0], + # i18n: "followlines" is a keyword + _("followlines expects a line number or a range"), + _("line range bounds must be integers"))) fctx = repo[rev].filectx(fname) descend = False @@ -1513,7 +1596,7 @@ try: ps.add(cl.parentrevs(r)[0]) except error.WdirUnsupported: - ps.add(repo[r].parents()[0].rev()) + ps.add(repo[r].p1().rev()) ps -= {node.nullrev} # XXX we should turn this into a baseset instead of a set, smartset may do # some optimizations from the fact this is a baseset. @@ -1632,7 +1715,7 @@ try: ps.add(cl.parentrevs(r)[0]) except error.WdirUnsupported: - ps.add(repo[r].parents()[0].rev()) + ps.add(repo[r].p1().rev()) else: try: parents = cl.parentrevs(r) @@ -2027,7 +2110,7 @@ if len(args) != 0: pat = getstring(args[0], _("subrepo requires a pattern")) - m = matchmod.exact(repo.root, repo.root, ['.hgsubstate']) + m = matchmod.exact(['.hgsubstate']) def submatches(names): k, p, m = stringutil.stringmatcher(pat)
--- a/mercurial/revsetlang.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/revsetlang.py Tue Feb 19 21:55:05 2019 -0800 @@ -240,6 +240,18 @@ return None, None raise error.ParseError(err) +def getintrange(x, err1, err2, deffirst=_notset, deflast=_notset): + """Get [first, last] integer range (both inclusive) from a parsed tree + + If any of the sides omitted, and if no default provided, ParseError will + be raised. + """ + if x and (x[0] == 'string' or x[0] == 'symbol'): + n = getinteger(x, err1) + return n, n + a, b = getrange(x, err1) + return getinteger(a, err2, deffirst), getinteger(b, err2, deflast) + def getargs(x, min, max, err): l = getlist(x) if len(l) < min or (max >= 0 and len(l) > max):
--- a/mercurial/scmutil.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/scmutil.py Tue Feb 19 21:55:05 2019 -0800 @@ -11,6 +11,7 @@ import glob import hashlib import os +import posixpath import re import subprocess import weakref @@ -231,10 +232,10 @@ ui.error(_("(did you forget to compile extensions?)\n")) elif m in "zlib".split(): ui.error(_("(is your Python install correct?)\n")) - except IOError as inst: - if util.safehasattr(inst, "code"): + except (IOError, OSError) as inst: + if util.safehasattr(inst, "code"): # HTTPError ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst)) - elif util.safehasattr(inst, "reason"): + elif util.safehasattr(inst, "reason"): # URLError or SSLError try: # usually it is in the form (errno, strerror) reason = inst.reason.args[1] except (AttributeError, IndexError): @@ -247,22 +248,15 @@ elif (util.safehasattr(inst, "args") and inst.args and inst.args[0] == errno.EPIPE): pass - elif getattr(inst, "strerror", None): - if getattr(inst, "filename", None): - ui.error(_("abort: %s: %s\n") % ( + elif getattr(inst, "strerror", None): # common IOError or OSError + if getattr(inst, "filename", None) is not None: + ui.error(_("abort: %s: '%s'\n") % ( encoding.strtolocal(inst.strerror), stringutil.forcebytestr(inst.filename))) else: ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror)) - else: + else: # suspicious IOError raise - except OSError as inst: - if getattr(inst, "filename", None) is not None: - ui.error(_("abort: %s: '%s'\n") % ( - encoding.strtolocal(inst.strerror), - stringutil.forcebytestr(inst.filename))) - else: - ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror)) except MemoryError: ui.error(_("abort: out of memory\n")) except SystemExit as inst: @@ -673,19 +667,11 @@ l = revrange(repo, revs) if not l: - first = second = None - elif l.isascending(): - first = l.min() - second = l.max() - elif l.isdescending(): - first = l.max() - second = l.min() - else: - first = l.first() - second = l.last() + raise error.Abort(_('empty revision range')) - if first is None: - raise error.Abort(_('empty revision range')) + first = l.first() + second = l.last() + if (first == second and len(revs) >= 2 and not all(revrange(repo, [r]) for r in revs)): raise error.Abort(_('empty revision on one side of range')) @@ -740,6 +726,53 @@ return [] return parents +def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None): + """Return a function that produced paths for presenting to the user. + + The returned function takes a repo-relative path and produces a path + that can be presented in the UI. + + Depending on the value of ui.relative-paths, either a repo-relative or + cwd-relative path will be produced. + + legacyrelativevalue is the value to use if ui.relative-paths=legacy + + If forcerelativevalue is not None, then that value will be used regardless + of what ui.relative-paths is set to. + """ + if forcerelativevalue is not None: + relative = forcerelativevalue + else: + config = repo.ui.config('ui', 'relative-paths') + if config == 'legacy': + relative = legacyrelativevalue + else: + relative = stringutil.parsebool(config) + if relative is None: + raise error.ConfigError( + _("ui.relative-paths is not a boolean ('%s')") % config) + + if relative: + cwd = repo.getcwd() + pathto = repo.pathto + return lambda f: pathto(f, cwd) + elif repo.ui.configbool('ui', 'slash'): + return lambda f: f + else: + return util.localpath + +def subdiruipathfn(subpath, uipathfn): + '''Create a new uipathfn that treats the file as relative to subpath.''' + return lambda f: uipathfn(posixpath.join(subpath, f)) + +def anypats(pats, opts): + '''Checks if any patterns, including --include and --exclude were given. + + Some commands (e.g. addremove) use this condition for deciding whether to + print absolute or relative paths. + ''' + return bool(pats or opts.get('include') or opts.get('exclude')) + def expandpats(pats): '''Expand bare globs when running on windows. On posix we assume it already has already been done by sh.''' @@ -764,15 +797,14 @@ '''Return a matcher and the patterns that were used. The matcher will warn about bad matches, unless an alternate badfn callback is provided.''' - if pats == ("",): - pats = [] if opts is None: opts = {} if not globbed and default == 'relpath': pats = expandpats(pats or []) + uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True) def bad(f, msg): - ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg)) + ctx.repo().ui.warn("%s: %s\n" % (uipathfn(f), msg)) if badfn is None: badfn = bad @@ -791,11 +823,11 @@ def matchall(repo): '''Return a matcher that will efficiently match everything.''' - return matchmod.always(repo.root, repo.getcwd()) + return matchmod.always() def matchfiles(repo, files, badfn=None): '''Return a matcher that will efficiently match exactly these files.''' - return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn) + return matchmod.exact(files, badfn=badfn) def parsefollowlinespattern(repo, rev, pat, msg): """Return a file name from `pat` pattern suitable for usage in followlines @@ -820,26 +852,26 @@ return None return vfs.vfs(repo.wvfs.join(origbackuppath)) -def origpath(ui, repo, filepath): - '''customize where .orig files are created +def backuppath(ui, repo, filepath): + '''customize where working copy backup files (.orig files) are created Fetch user defined path from config file: [ui] origbackuppath = <path> Fall back to default (filepath with .orig suffix) if not specified + + filepath is repo-relative + + Returns an absolute path ''' origvfs = getorigvfs(ui, repo) if origvfs is None: - return filepath + ".orig" + return repo.wjoin(filepath + ".orig") - # Convert filepath from an absolute path into a path inside the repo. - filepathfromroot = util.normpath(os.path.relpath(filepath, - start=repo.root)) - - origbackupdir = origvfs.dirname(filepathfromroot) + origbackupdir = origvfs.dirname(filepath) if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir): ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir)) # Remove any files that conflict with the backup file's path - for f in reversed(list(util.finddirs(filepathfromroot))): + for f in reversed(list(util.finddirs(filepath))): if origvfs.isfileorlink(f): ui.note(_('removing conflicting file: %s\n') % origvfs.join(f)) @@ -848,12 +880,12 @@ origvfs.makedirs(origbackupdir) - if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot): + if origvfs.isdir(filepath) and not origvfs.islink(filepath): ui.note(_('removing conflicting directory: %s\n') - % origvfs.join(filepathfromroot)) - origvfs.rmtree(filepathfromroot, forcibly=True) + % origvfs.join(filepath)) + origvfs.rmtree(filepath, forcibly=True) - return origvfs.join(filepathfromroot) + return origvfs.join(filepath) class _containsnode(object): """proxy __contains__(node) to container.__contains__ which accepts revs""" @@ -1008,7 +1040,7 @@ repair.delayedstrip(repo.ui, repo, tostrip, operation, backup=backup) -def addremove(repo, matcher, prefix, opts=None): +def addremove(repo, matcher, prefix, uipathfn, opts=None): if opts is None: opts = {} m = matcher @@ -1022,19 +1054,20 @@ similarity /= 100.0 ret = 0 - join = lambda f: os.path.join(prefix, f) wctx = repo[None] for subpath in sorted(wctx.substate): submatch = matchmod.subdirmatcher(subpath, m) if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()): sub = wctx.sub(subpath) + subprefix = repo.wvfs.reljoin(prefix, subpath) + subuipathfn = subdiruipathfn(subpath, uipathfn) try: - if sub.addremove(submatch, prefix, opts): + if sub.addremove(submatch, subprefix, subuipathfn, opts): ret = 1 except error.LookupError: repo.ui.status(_("skipping missing subrepository: %s\n") - % join(subpath)) + % uipathfn(subpath)) rejected = [] def badfn(f, msg): @@ -1052,15 +1085,15 @@ for abs in sorted(toprint): if repo.ui.verbose or not m.exact(abs): if abs in unknownset: - status = _('adding %s\n') % m.uipath(abs) + status = _('adding %s\n') % uipathfn(abs) label = 'ui.addremove.added' else: - status = _('removing %s\n') % m.uipath(abs) + status = _('removing %s\n') % uipathfn(abs) label = 'ui.addremove.removed' repo.ui.status(status, label=label) renames = _findrenames(repo, m, added + unknown, removed + deleted, - similarity) + similarity, uipathfn) if not dry_run: _markchanges(repo, unknown + forgotten, deleted, renames) @@ -1089,8 +1122,12 @@ status = _('removing %s\n') % abs repo.ui.status(status) + # TODO: We should probably have the caller pass in uipathfn and apply it to + # the messages above too. legacyrelativevalue=True is consistent with how + # it used to work. + uipathfn = getuipathfn(repo, legacyrelativevalue=True) renames = _findrenames(repo, m, added + unknown, removed + deleted, - similarity) + similarity, uipathfn) _markchanges(repo, unknown + forgotten, deleted, renames) @@ -1129,7 +1166,7 @@ return added, unknown, deleted, removed, forgotten -def _findrenames(repo, matcher, added, removed, similarity): +def _findrenames(repo, matcher, added, removed, similarity, uipathfn): '''Find renames from removed files to added ones.''' renames = {} if similarity > 0: @@ -1139,7 +1176,7 @@ or not matcher.exact(new)): repo.ui.status(_('recording removal of %s as rename to %s ' '(%d%% similar)\n') % - (matcher.rel(old), matcher.rel(new), + (uipathfn(old), uipathfn(new), score * 100)) renames[new] = old return renames
--- a/mercurial/sparse.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/sparse.py Tue Feb 19 21:55:05 2019 -0800 @@ -264,7 +264,7 @@ """Returns a matcher that returns true for any of the forced includes before testing against the actual matcher.""" kindpats = [('path', include, '') for include in includes] - includematcher = matchmod.includematcher('', '', kindpats) + includematcher = matchmod.includematcher('', kindpats) return matchmod.unionmatcher([includematcher, matcher]) def matcher(repo, revs=None, includetemp=True): @@ -277,7 +277,7 @@ """ # If sparse isn't enabled, sparse matcher matches everything. if not enabled: - return matchmod.always(repo.root, '') + return matchmod.always() if not revs or revs == [None]: revs = [repo.changelog.rev(node) @@ -305,7 +305,7 @@ pass if not matchers: - result = matchmod.always(repo.root, '') + result = matchmod.always() elif len(matchers) == 1: result = matchers[0] else: @@ -336,7 +336,7 @@ if branchmerge: # If we're merging, use the wctx filter, since we're merging into # the wctx. - sparsematch = matcher(repo, [wctx.parents()[0].rev()]) + sparsematch = matcher(repo, [wctx.p1().rev()]) else: # If we're updating, use the target context's filter, since we're # moving to the target context.
--- a/mercurial/sslutil.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/sslutil.py Tue Feb 19 21:55:05 2019 -0800 @@ -430,6 +430,7 @@ 'error)\n')) except ssl.SSLError: pass + # Try to print more helpful error messages for known failures. if util.safehasattr(e, 'reason'): # This error occurs when the client and server don't share a @@ -437,7 +438,7 @@ # outright. Hopefully the reason for this error is that we require # TLS 1.1+ and the server only supports TLS 1.0. Whatever the # reason, try to emit an actionable warning. - if e.reason == 'UNSUPPORTED_PROTOCOL': + if e.reason == r'UNSUPPORTED_PROTOCOL': # We attempted TLS 1.0+. if settings['protocolui'] == 'tls1.0': # We support more than just TLS 1.0+. If this happens, @@ -453,7 +454,7 @@ 'server; see ' 'https://mercurial-scm.org/wiki/SecureConnections ' 'for more info)\n') % ( - serverhostname, + pycompat.bytesurl(serverhostname), ', '.join(sorted(supportedprotocols)))) else: ui.warn(_( @@ -462,7 +463,8 @@ 'supports TLS 1.0 because it has known security ' 'vulnerabilities; see ' 'https://mercurial-scm.org/wiki/SecureConnections ' - 'for more info)\n') % serverhostname) + 'for more info)\n') % + pycompat.bytesurl(serverhostname)) else: # We attempted TLS 1.1+. We can only get here if the client # supports the configured protocol. So the likely reason is @@ -472,19 +474,20 @@ '(could not negotiate a common security protocol (%s+) ' 'with %s; the likely cause is Mercurial is configured ' 'to be more secure than the server can support)\n') % ( - settings['protocolui'], serverhostname)) + settings['protocolui'], + pycompat.bytesurl(serverhostname))) ui.warn(_('(consider contacting the operator of this ' 'server and ask them to support modern TLS ' 'protocol versions; or, set ' 'hostsecurity.%s:minimumprotocol=tls1.0 to allow ' 'use of legacy, less secure protocols when ' 'communicating with this server)\n') % - serverhostname) + pycompat.bytesurl(serverhostname)) ui.warn(_( '(see https://mercurial-scm.org/wiki/SecureConnections ' 'for more info)\n')) - elif (e.reason == 'CERTIFICATE_VERIFY_FAILED' and + elif (e.reason == r'CERTIFICATE_VERIFY_FAILED' and pycompat.iswindows): ui.warn(_('(the full certificate chain may not be available '
--- a/mercurial/statichttprepo.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/statichttprepo.py Tue Feb 19 21:55:05 2019 -0800 @@ -13,12 +13,14 @@ from .i18n import _ from . import ( + branchmap, changelog, error, localrepo, manifest, namespaces, pathutil, + pycompat, url, util, vfs as vfsmod, @@ -44,12 +46,12 @@ def seek(self, pos): self.pos = pos def read(self, bytes=None): - req = urlreq.request(self.url) + req = urlreq.request(pycompat.strurl(self.url)) end = '' if bytes: end = self.pos + bytes - 1 if self.pos or end: - req.add_header('Range', 'bytes=%d-%s' % (self.pos, end)) + req.add_header(r'Range', r'bytes=%d-%s' % (self.pos, end)) try: f = self.opener.open(req) @@ -59,7 +61,7 @@ num = inst.code == 404 and errno.ENOENT or None raise IOError(num, inst) except urlerr.urlerror as inst: - raise IOError(None, inst.reason[1]) + raise IOError(None, inst.reason) if code == 200: # HTTPRangeHandler does nothing if remote does not support @@ -192,7 +194,7 @@ self.changelog = changelog.changelog(self.svfs) self._tags = None self.nodetagscache = None - self._branchcaches = {} + self._branchcaches = branchmap.BranchMapCache() self._revbranchcache = None self.encodepats = None self.decodepats = None
--- a/mercurial/statprof.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/statprof.py Tue Feb 19 21:55:05 2019 -0800 @@ -816,9 +816,6 @@ id2stack[-1].update(parent=parent) return myid - def endswith(a, b): - return list(a)[-len(b):] == list(b) - # The sampling profiler can sample multiple times without # advancing the clock, potentially causing the Chrome trace viewer # to render single-pixel columns that we cannot zoom in on. We @@ -858,9 +855,6 @@ # events given only stack snapshots. for sample in data.samples: - tos = sample.stack[0] - name = tos.function - path = simplifypath(tos.path) stack = tuple((('%s:%d' % (simplifypath(frame.path), frame.lineno), frame.function) for frame in sample.stack)) qstack = collections.deque(stack)
--- a/mercurial/streamclone.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/streamclone.py Tue Feb 19 21:55:05 2019 -0800 @@ -13,7 +13,6 @@ from .i18n import _ from . import ( - branchmap, cacheutil, error, narrowspec, @@ -174,7 +173,7 @@ repo._writerequirements() if rbranchmap: - branchmap.replacecache(repo, rbranchmap) + repo._branchcaches.replace(repo, rbranchmap) repo.invalidate()
--- a/mercurial/subrepo.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/subrepo.py Tue Feb 19 21:55:05 2019 -0800 @@ -11,7 +11,6 @@ import errno import hashlib import os -import posixpath import re import stat import subprocess @@ -288,10 +287,10 @@ """ raise NotImplementedError - def add(self, ui, match, prefix, explicitonly, **opts): + def add(self, ui, match, prefix, uipathfn, explicitonly, **opts): return [] - def addremove(self, matcher, prefix, opts): + def addremove(self, matcher, prefix, uipathfn, opts): self.ui.warn("%s: %s" % (prefix, _("addremove is not supported"))) return 1 @@ -324,7 +323,7 @@ def matchfileset(self, expr, badfn=None): """Resolve the fileset expression for this repo""" - return matchmod.nevermatcher(self.wvfs.base, '', badfn=badfn) + return matchmod.never(badfn=badfn) def printfiles(self, ui, m, fm, fmt, subrepos): """handle the files command for this subrepo""" @@ -344,8 +343,8 @@ flags = self.fileflags(name) mode = 'x' in flags and 0o755 or 0o644 symlink = 'l' in flags - archiver.addfile(prefix + self._path + '/' + name, - mode, symlink, self.filedata(name, decode)) + archiver.addfile(prefix + name, mode, symlink, + self.filedata(name, decode)) progress.increment() progress.complete() return total @@ -356,10 +355,10 @@ matched by the match function ''' - def forget(self, match, prefix, dryrun, interactive): + def forget(self, match, prefix, uipathfn, dryrun, interactive): return ([], []) - def removefiles(self, matcher, prefix, after, force, subrepos, + def removefiles(self, matcher, prefix, uipathfn, after, force, subrepos, dryrun, warnings): """remove the matched files from the subrepository and the filesystem, possibly by force and/or after the file has been removed from the @@ -517,20 +516,18 @@ self._repo.vfs.write('hgrc', util.tonativeeol(''.join(lines))) @annotatesubrepoerror - def add(self, ui, match, prefix, explicitonly, **opts): - return cmdutil.add(ui, self._repo, match, - self.wvfs.reljoin(prefix, self._path), + def add(self, ui, match, prefix, uipathfn, explicitonly, **opts): + return cmdutil.add(ui, self._repo, match, prefix, uipathfn, explicitonly, **opts) @annotatesubrepoerror - def addremove(self, m, prefix, opts): + def addremove(self, m, prefix, uipathfn, opts): # In the same way as sub directories are processed, once in a subrepo, # always entry any of its subrepos. Don't corrupt the options that will # be used to process sibling subrepos however. opts = copy.copy(opts) opts['subrepos'] = True - return scmutil.addremove(self._repo, m, - self.wvfs.reljoin(prefix, self._path), opts) + return scmutil.addremove(self._repo, m, prefix, uipathfn, opts) @annotatesubrepoerror def cat(self, match, fm, fntemplate, prefix, **opts): @@ -559,10 +556,9 @@ # in hex format if node2 is not None: node2 = node.bin(node2) - logcmdutil.diffordiffstat(ui, self._repo, diffopts, - node1, node2, match, - prefix=posixpath.join(prefix, self._path), - listsubrepos=True, **opts) + logcmdutil.diffordiffstat(ui, self._repo, diffopts, node1, node2, + match, prefix=prefix, listsubrepos=True, + **opts) except error.RepoLookupError as inst: self.ui.warn(_('warning: error "%s" in subrepository "%s"\n') % (inst, subrelpath(self))) @@ -581,7 +577,8 @@ for subpath in ctx.substate: s = subrepo(ctx, subpath, True) submatch = matchmod.subdirmatcher(subpath, match) - total += s.archive(archiver, prefix + self._path + '/', submatch, + subprefix = prefix + subpath + '/' + total += s.archive(archiver, subprefix, submatch, decode) return total @@ -810,12 +807,11 @@ @annotatesubrepoerror def matchfileset(self, expr, badfn=None): - repo = self._repo if self._ctx.rev() is None: - ctx = repo[None] + ctx = self._repo[None] else: rev = self._state[1] - ctx = repo[rev] + ctx = self._repo[rev] matchers = [ctx.matchfileset(expr, badfn=badfn)] @@ -824,8 +820,7 @@ try: sm = sub.matchfileset(expr, badfn=badfn) - pm = matchmod.prefixdirmatcher(repo.root, repo.getcwd(), - subpath, sm, badfn=badfn) + pm = matchmod.prefixdirmatcher(subpath, sm, badfn=badfn) matchers.append(pm) except error.LookupError: self.ui.status(_("skipping missing subrepository: %s\n") @@ -839,16 +834,14 @@ return ctx.walk(match) @annotatesubrepoerror - def forget(self, match, prefix, dryrun, interactive): - return cmdutil.forget(self.ui, self._repo, match, - self.wvfs.reljoin(prefix, self._path), + def forget(self, match, prefix, uipathfn, dryrun, interactive): + return cmdutil.forget(self.ui, self._repo, match, prefix, uipathfn, True, dryrun=dryrun, interactive=interactive) @annotatesubrepoerror - def removefiles(self, matcher, prefix, after, force, subrepos, + def removefiles(self, matcher, prefix, uipathfn, after, force, subrepos, dryrun, warnings): - return cmdutil.remove(self.ui, self._repo, matcher, - self.wvfs.reljoin(prefix, self._path), + return cmdutil.remove(self.ui, self._repo, matcher, prefix, uipathfn, after, force, subrepos, dryrun) @annotatesubrepoerror @@ -971,9 +964,8 @@ p = subprocess.Popen(pycompat.rapply(procutil.tonativestr, cmd), bufsize=-1, close_fds=procutil.closefds, stdout=subprocess.PIPE, stderr=subprocess.PIPE, - universal_newlines=True, env=procutil.tonativeenv(env), **extrakw) - stdout, stderr = p.communicate() + stdout, stderr = map(util.fromnativeeol, p.communicate()) stderr = stderr.strip() if not failok: if p.returncode: @@ -1000,13 +992,14 @@ # both. We used to store the working directory one. output, err = self._svncommand(['info', '--xml']) doc = xml.dom.minidom.parseString(output) - entries = doc.getElementsByTagName('entry') + entries = doc.getElementsByTagName(r'entry') lastrev, rev = '0', '0' if entries: - rev = str(entries[0].getAttribute('revision')) or '0' - commits = entries[0].getElementsByTagName('commit') + rev = pycompat.bytestr(entries[0].getAttribute(r'revision')) or '0' + commits = entries[0].getElementsByTagName(r'commit') if commits: - lastrev = str(commits[0].getAttribute('revision')) or '0' + lastrev = pycompat.bytestr( + commits[0].getAttribute(r'revision')) or '0' return (lastrev, rev) def _wcrev(self): @@ -1021,19 +1014,19 @@ output, err = self._svncommand(['status', '--xml']) externals, changes, missing = [], [], [] doc = xml.dom.minidom.parseString(output) - for e in doc.getElementsByTagName('entry'): - s = e.getElementsByTagName('wc-status') + for e in doc.getElementsByTagName(r'entry'): + s = e.getElementsByTagName(r'wc-status') if not s: continue - item = s[0].getAttribute('item') - props = s[0].getAttribute('props') - path = e.getAttribute('path') - if item == 'external': + item = s[0].getAttribute(r'item') + props = s[0].getAttribute(r'props') + path = e.getAttribute(r'path').encode('utf8') + if item == r'external': externals.append(path) - elif item == 'missing': + elif item == r'missing': missing.append(path) - if (item not in ('', 'normal', 'unversioned', 'external') - or props not in ('', 'none', 'normal')): + if (item not in (r'', r'normal', r'unversioned', r'external') + or props not in (r'', r'none', r'normal')): changes.append(path) for path in changes: for ext in externals: @@ -1154,14 +1147,14 @@ output = self._svncommand(['list', '--recursive', '--xml'])[0] doc = xml.dom.minidom.parseString(output) paths = [] - for e in doc.getElementsByTagName('entry'): - kind = pycompat.bytestr(e.getAttribute('kind')) + for e in doc.getElementsByTagName(r'entry'): + kind = pycompat.bytestr(e.getAttribute(r'kind')) if kind != 'file': continue - name = ''.join(c.data for c - in e.getElementsByTagName('name')[0].childNodes - if c.nodeType == c.TEXT_NODE) - paths.append(name.encode('utf-8')) + name = r''.join(c.data for c + in e.getElementsByTagName(r'name')[0].childNodes + if c.nodeType == c.TEXT_NODE) + paths.append(name.encode('utf8')) return paths def filedata(self, name, decode): @@ -1596,7 +1589,7 @@ return False @annotatesubrepoerror - def add(self, ui, match, prefix, explicitonly, **opts): + def add(self, ui, match, prefix, uipathfn, explicitonly, **opts): if self._gitmissing(): return [] @@ -1620,7 +1613,7 @@ if exact: command.append("-f") #should be added, even if ignored if ui.verbose or not exact: - ui.status(_('adding %s\n') % match.rel(f)) + ui.status(_('adding %s\n') % uipathfn(f)) if f in tracked: # hg prints 'adding' even if already tracked if exact: @@ -1630,7 +1623,7 @@ self._gitcommand(command + [f]) for f in rejected: - ui.warn(_("%s already tracked!\n") % match.abs(f)) + ui.warn(_("%s already tracked!\n") % uipathfn(f)) return rejected @@ -1673,14 +1666,14 @@ for info in tar: if info.isdir(): continue - if match and not match(info.name): + bname = pycompat.fsencode(info.name) + if match and not match(bname): continue if info.issym(): data = info.linkname else: data = tar.extractfile(info).read() - archiver.addfile(prefix + self._path + '/' + info.name, - info.mode, info.issym(), data) + archiver.addfile(prefix + bname, info.mode, info.issym(), data) total += 1 progress.increment() progress.complete() @@ -1783,14 +1776,12 @@ # for Git, this also implies '-p' cmd.append('-U%d' % diffopts.context) - gitprefix = self.wvfs.reljoin(prefix, self._path) - if diffopts.noprefix: - cmd.extend(['--src-prefix=%s/' % gitprefix, - '--dst-prefix=%s/' % gitprefix]) + cmd.extend(['--src-prefix=%s/' % prefix, + '--dst-prefix=%s/' % prefix]) else: - cmd.extend(['--src-prefix=a/%s/' % gitprefix, - '--dst-prefix=b/%s/' % gitprefix]) + cmd.extend(['--src-prefix=a/%s/' % prefix, + '--dst-prefix=b/%s/' % prefix]) if diffopts.ignorews: cmd.append('--ignore-all-space') @@ -1823,15 +1814,15 @@ if not opts.get(r'no_backup'): status = self.status(None) names = status.modified - origvfs = scmutil.getorigvfs(self.ui, self._subparent) - if origvfs is None: - origvfs = self.wvfs for name in names: - bakname = scmutil.origpath(self.ui, self._subparent, name) + # backuppath() expects a path relative to the parent repo (the + # repo that ui.origbackuppath is relative to) + parentname = os.path.join(self._path, name) + bakname = scmutil.backuppath(self.ui, self._subparent, + parentname) self.ui.note(_('saving current version of %s as %s\n') % - (name, bakname)) - name = self.wvfs.join(name) - origvfs.rename(name, bakname) + (name, os.path.relpath(bakname))) + util.rename(self.wvfs.join(name), bakname) if not opts.get(r'dry_run'): self.get(substate, overwrite=True)
--- a/mercurial/subrepoutil.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/subrepoutil.py Tue Feb 19 21:55:05 2019 -0800 @@ -145,7 +145,6 @@ promptssrc = filemerge.partextras(labels) for s, l in sorted(s1.iteritems()): - prompts = None a = sa.get(s, nullstate) ld = l # local state with possible dirty flag for compares if wctx.sub(s).dirty(): @@ -218,7 +217,6 @@ wctx.sub(s).remove() for s, r in sorted(s2.items()): - prompts = None if s in s1: continue elif s not in sa:
--- a/mercurial/tags.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/tags.py Tue Feb 19 21:55:05 2019 -0800 @@ -536,7 +536,7 @@ date: date tuple to use if committing''' if not local: - m = matchmod.exact(repo.root, '', ['.hgtags']) + m = matchmod.exact(['.hgtags']) if any(repo.status(match=m, unknown=True, ignored=True)): raise error.Abort(_('working copy of .hgtags is changed'), hint=_('please commit .hgtags manually')) @@ -610,7 +610,7 @@ if '.hgtags' not in repo.dirstate: repo[None].add(['.hgtags']) - m = matchmod.exact(repo.root, '', ['.hgtags']) + m = matchmod.exact(['.hgtags']) tagnode = repo.commit(message, user, date, extra=extra, match=m, editor=editor)
--- a/mercurial/templatefuncs.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/templatefuncs.py Tue Feb 19 21:55:05 2019 -0800 @@ -295,6 +295,39 @@ hint = _("get() expects a dict as first argument") raise error.ParseError(bytes(err), hint=hint) +@templatefunc('config(section, name[, default])', requires={'ui'}) +def config(context, mapping, args): + """Returns the requested hgrc config option as a string.""" + fn = context.resource(mapping, 'ui').config + return _config(context, mapping, args, fn, evalstring) + +@templatefunc('configbool(section, name[, default])', requires={'ui'}) +def configbool(context, mapping, args): + """Returns the requested hgrc config option as a boolean.""" + fn = context.resource(mapping, 'ui').configbool + return _config(context, mapping, args, fn, evalboolean) + +@templatefunc('configint(section, name[, default])', requires={'ui'}) +def configint(context, mapping, args): + """Returns the requested hgrc config option as an integer.""" + fn = context.resource(mapping, 'ui').configint + return _config(context, mapping, args, fn, evalinteger) + +def _config(context, mapping, args, configfn, defaultfn): + if not (2 <= len(args) <= 3): + raise error.ParseError(_("config expects two or three arguments")) + + # The config option can come from any section, though we specifically + # reserve the [templateconfig] section for dynamically defining options + # for this function without also requiring an extension. + section = evalstringliteral(context, mapping, args[0]) + name = evalstringliteral(context, mapping, args[1]) + if len(args) == 3: + default = defaultfn(context, mapping, args[2]) + return configfn(section, name, default) + else: + return configfn(section, name) + @templatefunc('if(expr, then[, else])') def if_(context, mapping, args): """Conditionally execute based on the result of
--- a/mercurial/templatekw.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/templatekw.py Tue Feb 19 21:55:05 2019 -0800 @@ -554,6 +554,17 @@ return _hybrid(f, namespaces, makemap, pycompat.identity) +@templatekeyword('negrev', requires={'repo', 'ctx'}) +def shownegrev(context, mapping): + """Integer. The repository-local changeset negative revision number, + which counts in the opposite direction.""" + ctx = context.resource(mapping, 'ctx') + rev = ctx.rev() + if rev is None or rev < 0: # wdir() or nullrev? + return None + repo = context.resource(mapping, 'repo') + return rev - len(repo) + @templatekeyword('node', requires={'ctx'}) def shownode(context, mapping): """String. The changeset identification hash, as a 40 hexadecimal @@ -796,7 +807,7 @@ substate = ctx.substate if not substate: return compatlist(context, mapping, 'subrepo', []) - psubstate = ctx.parents()[0].substate or {} + psubstate = ctx.p1().substate or {} subrepos = [] for sub in substate: if sub not in psubstate or substate[sub] != psubstate[sub]:
--- a/mercurial/thirdparty/attr/_make.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/thirdparty/attr/_make.py Tue Feb 19 21:55:05 2019 -0800 @@ -56,7 +56,7 @@ def attr(default=NOTHING, validator=None, repr=True, cmp=True, hash=None, init=True, convert=None, metadata={}): - """ + r""" Create a new attribute on a class. .. warning::
--- a/mercurial/thirdparty/attr/filters.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/thirdparty/attr/filters.py Tue Feb 19 21:55:05 2019 -0800 @@ -19,7 +19,7 @@ def include(*what): - """ + r""" Whitelist *what*. :param what: What to whitelist. @@ -36,7 +36,7 @@ def exclude(*what): - """ + r""" Blacklist *what*. :param what: What to blacklist.
--- a/mercurial/transaction.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/transaction.py Tue Feb 19 21:55:05 2019 -0800 @@ -89,7 +89,7 @@ except (IOError, OSError) as inst: if inst.errno != errno.ENOENT: raise - except (IOError, OSError, error.Abort) as inst: + except (IOError, OSError, error.Abort): if not c: raise @@ -101,7 +101,7 @@ for f in backupfiles: if opener.exists(f): opener.unlink(f) - except (IOError, OSError, error.Abort) as inst: + except (IOError, OSError, error.Abort): # only pure backup file remains, it is sage to ignore any error pass
--- a/mercurial/ui.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/ui.py Tue Feb 19 21:55:05 2019 -0800 @@ -58,12 +58,12 @@ statuscopies = yes # Prefer curses UIs when available. Revert to plain-text with `text`. interface = curses +# Make compatible commands emit cwd-relative paths by default. +relative-paths = yes [commands] # Grep working directory by default. grep.all-files = True -# Make `hg status` emit cwd-relative paths by default. -status.relative = yes # Refuse to perform an `hg update` that would cause a file content merge update.check = noconflict # Show conflicts information in `hg status` @@ -566,8 +566,6 @@ candidate = self._data(untrusted).get(s, n, None) if candidate is not None: value = candidate - section = s - name = n break if self.debugflag and not untrusted and self._reportuntrusted: @@ -2053,7 +2051,11 @@ This is its own function so that extensions can change the definition of 'valid' in this case (like when pulling from a git repo into a hg one).""" - return os.path.isdir(os.path.join(path, '.hg')) + try: + return os.path.isdir(os.path.join(path, '.hg')) + # Python 2 may return TypeError. Python 3, ValueError. + except (TypeError, ValueError): + return False @property def suboptions(self):
--- a/mercurial/url.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/url.py Tue Feb 19 21:55:05 2019 -0800 @@ -58,11 +58,14 @@ return self.passwddb.add_password(realm, uri, user, passwd) def find_user_password(self, realm, authuri): + assert isinstance(realm, (type(None), str)) + assert isinstance(authuri, str) authinfo = self.passwddb.find_user_password(realm, authuri) user, passwd = authinfo + user, passwd = pycompat.bytesurl(user), pycompat.bytesurl(passwd) if user and passwd: self._writedebug(user, passwd) - return (user, passwd) + return (pycompat.strurl(user), pycompat.strurl(passwd)) if not user or not passwd: res = httpconnectionmod.readauthforuri(self.ui, authuri, user) @@ -90,7 +93,7 @@ self.passwddb.add_password(realm, authuri, user, passwd) self._writedebug(user, passwd) - return (user, passwd) + return (pycompat.strurl(user), pycompat.strurl(passwd)) def _writedebug(self, user, passwd): msg = _('http auth: user %s, password %s\n') @@ -128,9 +131,11 @@ else: self.no_list = no_list - proxyurl = bytes(proxy) - proxies = {'http': proxyurl, 'https': proxyurl} - ui.debug('proxying through %s\n' % util.hidepassword(proxyurl)) + # Keys and values need to be str because the standard library + # expects them to be. + proxyurl = str(proxy) + proxies = {r'http': proxyurl, r'https': proxyurl} + ui.debug('proxying through %s\n' % util.hidepassword(bytes(proxy))) else: proxies = {} @@ -138,7 +143,7 @@ self.ui = ui def proxy_open(self, req, proxy, type_): - host = urllibcompat.gethost(req).split(':')[0] + host = pycompat.bytesurl(urllibcompat.gethost(req)).split(':')[0] for e in self.no_list: if host == e: return None @@ -176,20 +181,20 @@ return proxyres return keepalive.HTTPConnection.getresponse(self) -# general transaction handler to support different ways to handle -# HTTPS proxying before and after Python 2.6.3. +# Large parts of this function have their origin from before Python 2.6 +# and could potentially be removed. def _generic_start_transaction(handler, h, req): - tunnel_host = getattr(req, '_tunnel_host', None) + tunnel_host = req._tunnel_host if tunnel_host: - if tunnel_host[:7] not in ['http://', 'https:/']: - tunnel_host = 'https://' + tunnel_host + if tunnel_host[:7] not in [r'http://', r'https:/']: + tunnel_host = r'https://' + tunnel_host new_tunnel = True else: tunnel_host = urllibcompat.getselector(req) new_tunnel = False if new_tunnel or tunnel_host == urllibcompat.getfullurl(req): # has proxy - u = util.url(tunnel_host) + u = util.url(pycompat.bytesurl(tunnel_host)) if new_tunnel or u.scheme == 'https': # only use CONNECT for HTTPS h.realhostport = ':'.join([u.host, (u.port or '443')]) h.headers = req.headers.copy() @@ -202,7 +207,7 @@ def _generic_proxytunnel(self): proxyheaders = dict( [(x, self.headers[x]) for x in self.headers - if x.lower().startswith('proxy-')]) + if x.lower().startswith(r'proxy-')]) self.send('CONNECT %s HTTP/1.0\r\n' % self.realhostport) for header in proxyheaders.iteritems(): self.send('%s: %s\r\n' % header) @@ -211,9 +216,14 @@ # majority of the following code is duplicated from # httplib.HTTPConnection as there are no adequate places to # override functions to provide the needed functionality + # strict was removed in Python 3.4. + kwargs = {} + if not pycompat.ispy3: + kwargs['strict'] = self.strict + res = self.response_class(self.sock, - strict=self.strict, - method=self._method) + method=self._method, + **kwargs) while True: version, status, reason = res._read_status()
--- a/mercurial/util.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/util.py Tue Feb 19 21:55:05 2019 -0800 @@ -789,6 +789,12 @@ res)) data = dest[0:res] if res is not None else b'' + + # _writedata() uses "in" operator and is confused by memoryview because + # characters are ints on Python 3. + if isinstance(data, memoryview): + data = data.tobytes() + self._writedata(data) def write(self, res, data):
--- a/mercurial/wireprotov1server.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/wireprotov1server.py Tue Feb 19 21:55:05 2019 -0800 @@ -7,6 +7,7 @@ from __future__ import absolute_import +import binascii import os from .i18n import _ @@ -344,7 +345,7 @@ one specific branch of many. """ def decodehexstring(s): - return set([h.decode('hex') for h in s.split(';')]) + return set([binascii.unhexlify(h) for h in s.split(';')]) manifest = repo.vfs.tryread('pullbundles.manifest') if not manifest: @@ -424,8 +425,6 @@ raise error.Abort(bundle2requiredmain, hint=bundle2requiredhint) - prefercompressed = True - try: clheads = set(repo.changelog.heads()) heads = set(opts.get('heads', set())) @@ -578,7 +577,6 @@ repo.ui.debug('redirecting incoming bundle to %s\n' % tempname) fp = os.fdopen(fd, pycompat.sysstr('wb+')) - r = 0 for p in payload: fp.write(p) fp.seek(0)
--- a/mercurial/wireprotov2peer.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/wireprotov2peer.py Tue Feb 19 21:55:05 2019 -0800 @@ -304,7 +304,7 @@ # TODO tell reactor? self._frameseof = True else: - self._ui.note(_('received %r\n') % frame) + self._ui.debug('received %r\n' % frame) self._processframe(frame) # Also try to read the first redirect. @@ -510,7 +510,7 @@ # Bytestring where each byte is a 0 or 1. raw = next(objs) - return [True if c == '1' else False for c in raw] + return [True if raw[i:i + 1] == b'1' else False for i in range(len(raw))] def decodelistkeys(objs): # Map with bytestring keys and values.
--- a/mercurial/wireprotov2server.py Thu Feb 07 20:50:41 2019 +0900 +++ b/mercurial/wireprotov2server.py Tue Feb 19 21:55:05 2019 -0800 @@ -23,6 +23,7 @@ narrowspec, pycompat, streamclone, + templatefilters, util, wireprotoframing, wireprototypes, @@ -148,8 +149,6 @@ tracker. We then dump the log of all that activity back out to the client. """ - import json - # Reflection APIs have a history of being abused, accidentally disclosing # sensitive data, etc. So we have a config knob. if not ui.configbool('experimental', 'web.api.debugreflect'): @@ -175,12 +174,11 @@ frame.payload)) action, meta = reactor.onframerecv(frame) - states.append(json.dumps((action, meta), sort_keys=True, - separators=(', ', ': '))) + states.append(templatefilters.json((action, meta))) action, meta = reactor.oninputeof() meta['action'] = action - states.append(json.dumps(meta, sort_keys=True, separators=(', ',': '))) + states.append(templatefilters.json(meta)) res.status = b'200 OK' res.headers[b'Content-Type'] = b'text/plain' @@ -390,7 +388,8 @@ return with cacher: - cachekey = entry.cachekeyfn(repo, proto, cacher, **args) + cachekey = entry.cachekeyfn(repo, proto, cacher, + **pycompat.strkwargs(args)) # No cache key or the cacher doesn't like it. Do default handling. if cachekey is None or not cacher.setcachekey(cachekey): @@ -744,7 +743,7 @@ # More granular cache key invalidation. b'localversion': localversion, # Cache keys are segmented by command. - b'command': pycompat.sysbytes(command), + b'command': command, # Throw in the media type and API version strings so changes # to exchange semantics invalid cache. b'mediatype': FRAMINGTYPE,
--- a/rust/Cargo.lock Thu Feb 07 20:50:41 2019 +0900 +++ b/rust/Cargo.lock Tue Feb 19 21:55:05 2019 -0800 @@ -7,11 +7,29 @@ ] [[package]] +name = "autocfg" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "bitflags" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] name = "cfg-if" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] +name = "cloudabi" +version = "0.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] name = "cpython" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -23,8 +41,17 @@ ] [[package]] +name = "fuchsia-cprng" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] name = "hg-core" version = "0.1.0" +dependencies = [ + "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_pcg 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] [[package]] name = "hg-cpython" @@ -89,6 +116,110 @@ ] [[package]] +name = "rand" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "autocfg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.45 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_jitter 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_os 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_pcg 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_chacha" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "autocfg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_core" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rand_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_core" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "rand_hc" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_isaac" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_jitter" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.45 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_os" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", + "fuchsia-cprng 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.45 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_pcg" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_xorshift" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rdrand" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] name = "regex" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -109,6 +240,27 @@ ] [[package]] +name = "rustc_version" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "semver" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "semver-parser" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] name = "thread_local" version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -131,19 +283,59 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "winapi" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [metadata] "checksum aho-corasick 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)" = "1e9a933f4e58658d7b12defcf96dc5c720f20832deebe3e0a19efd3b6aaeeb9e" +"checksum autocfg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a6d640bee2da49f60a4068a7fae53acde8982514ab7bae8b8cea9e88cbcfd799" +"checksum bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12" "checksum cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "082bb9b28e00d3c9d39cc03e64ce4cea0f1bb9b3fde493f0cbc008472d22bdf4" +"checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" "checksum cpython 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b489034e723e7f5109fecd19b719e664f89ef925be785885252469e9822fa940" +"checksum fuchsia-cprng 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "81f7f8eb465745ea9b02e2704612a9946a59fa40572086c6fd49d6ddcf30bf31" "checksum lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a374c89b9db55895453a74c1e38861d9deec0b01b405a82516e9d5de4820dea1" "checksum libc 0.2.45 (registry+https://github.com/rust-lang/crates.io-index)" = "2d2857ec59fadc0773853c664d2d18e7198e83883e7060b63c924cb077bd5c74" "checksum memchr 2.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "db4c41318937f6e76648f42826b1d9ade5c09cafb5aef7e351240a70f39206e9" "checksum num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0b3a5d7cc97d6d30d8b9bc8fa19bf45349ffe46241e8816f50f62f6d6aaabee1" "checksum python27-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "56114c37d4dca82526d74009df7782a28c871ac9d36b19d4cb9e67672258527e" "checksum python3-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "61e4aac43f833fd637e429506cb2ac9d7df672c4b68f2eaaa163649b7fdc0444" +"checksum rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca" +"checksum rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef" +"checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" +"checksum rand_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d0e7a549d590831370895ab7ba4ea0c1b6b011d106b5ff2da6eee112615e6dc0" +"checksum rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4" +"checksum rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08" +"checksum rand_jitter 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "080723c6145e37503a2224f801f252e14ac5531cb450f4502698542d188cb3c0" +"checksum rand_os 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "b7c690732391ae0abafced5015ffb53656abfaec61b342290e5eb56b286a679d" +"checksum rand_pcg 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "086bd09a33c7044e56bb44d5bdde5a60e7f119a9e95b0775f545de759a32fe05" +"checksum rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c" +"checksum rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" "checksum regex 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "37e7cbbd370869ce2e8dff25c7018702d10b21a20ef7135316f8daecd6c25b7f" "checksum regex-syntax 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)" = "4e47a2ed29da7a9e1960e1639e7a982e6edc6d49be308a3b02daf511504a16d1" +"checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" +"checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" +"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" "checksum thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b" "checksum ucd-util 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "535c204ee4d8434478593480b8f86ab45ec9aae0e83c568ca81abf0fd0e88f86" "checksum utf8-ranges 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "796f7e48bef87609f7ade7e06495a87d5cd06c7866e6a5cbfceffc558a243737" "checksum version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd" +"checksum winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "92c1eb33641e276cfa214a0522acad57be5c56b10cb348b3c5117db75f3ac4b0" +"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" +"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
--- a/rust/chg/src/sighandlers.c Thu Feb 07 20:50:41 2019 +0900 +++ b/rust/chg/src/sighandlers.c Tue Feb 19 21:55:05 2019 -0800 @@ -33,28 +33,36 @@ { sigset_t unblockset, oldset; struct sigaction sa, oldsa; - if (sigemptyset(&unblockset) < 0) + if (sigemptyset(&unblockset) < 0) { return; - if (sigaddset(&unblockset, sig) < 0) + } + if (sigaddset(&unblockset, sig) < 0) { return; + } memset(&sa, 0, sizeof(sa)); sa.sa_handler = SIG_DFL; sa.sa_flags = SA_RESTART; - if (sigemptyset(&sa.sa_mask) < 0) + if (sigemptyset(&sa.sa_mask) < 0) { return; + } forwardsignal(sig); - if (raise(sig) < 0) /* resend to self */ + if (raise(sig) < 0) { /* resend to self */ return; - if (sigaction(sig, &sa, &oldsa) < 0) + } + if (sigaction(sig, &sa, &oldsa) < 0) { return; - if (sigprocmask(SIG_UNBLOCK, &unblockset, &oldset) < 0) + } + if (sigprocmask(SIG_UNBLOCK, &unblockset, &oldset) < 0) { return; + } /* resent signal will be handled before sigprocmask() returns */ - if (sigprocmask(SIG_SETMASK, &oldset, NULL) < 0) + if (sigprocmask(SIG_SETMASK, &oldset, NULL) < 0) { return; - if (sigaction(sig, &oldsa, NULL) < 0) + } + if (sigaction(sig, &oldsa, NULL) < 0) { return; + } } /* @@ -81,37 +89,46 @@ * - SIGINT: usually generated by the terminal */ sa.sa_handler = forwardsignaltogroup; sa.sa_flags = SA_RESTART; - if (sigemptyset(&sa.sa_mask) < 0) + if (sigemptyset(&sa.sa_mask) < 0) { + return -1; + } + if (sigaction(SIGHUP, &sa, NULL) < 0) { return -1; - if (sigaction(SIGHUP, &sa, NULL) < 0) + } + if (sigaction(SIGINT, &sa, NULL) < 0) { return -1; - if (sigaction(SIGINT, &sa, NULL) < 0) - return -1; + } /* terminate frontend by double SIGTERM in case of server freeze */ sa.sa_handler = forwardsignal; sa.sa_flags |= SA_RESETHAND; - if (sigaction(SIGTERM, &sa, NULL) < 0) + if (sigaction(SIGTERM, &sa, NULL) < 0) { return -1; + } /* notify the worker about window resize events */ sa.sa_flags = SA_RESTART; - if (sigaction(SIGWINCH, &sa, NULL) < 0) + if (sigaction(SIGWINCH, &sa, NULL) < 0) { return -1; + } /* forward user-defined signals */ - if (sigaction(SIGUSR1, &sa, NULL) < 0) + if (sigaction(SIGUSR1, &sa, NULL) < 0) { return -1; - if (sigaction(SIGUSR2, &sa, NULL) < 0) + } + if (sigaction(SIGUSR2, &sa, NULL) < 0) { return -1; + } /* propagate job control requests to worker */ sa.sa_handler = forwardsignal; sa.sa_flags = SA_RESTART; - if (sigaction(SIGCONT, &sa, NULL) < 0) + if (sigaction(SIGCONT, &sa, NULL) < 0) { return -1; + } sa.sa_handler = handlestopsignal; sa.sa_flags = SA_RESTART; - if (sigaction(SIGTSTP, &sa, NULL) < 0) + if (sigaction(SIGTSTP, &sa, NULL) < 0) { return -1; + } return 0; } @@ -127,24 +144,31 @@ memset(&sa, 0, sizeof(sa)); sa.sa_handler = SIG_DFL; sa.sa_flags = SA_RESTART; - if (sigemptyset(&sa.sa_mask) < 0) + if (sigemptyset(&sa.sa_mask) < 0) { return -1; + } - if (sigaction(SIGHUP, &sa, NULL) < 0) + if (sigaction(SIGHUP, &sa, NULL) < 0) { return -1; - if (sigaction(SIGTERM, &sa, NULL) < 0) + } + if (sigaction(SIGTERM, &sa, NULL) < 0) { return -1; - if (sigaction(SIGWINCH, &sa, NULL) < 0) + } + if (sigaction(SIGWINCH, &sa, NULL) < 0) { return -1; - if (sigaction(SIGCONT, &sa, NULL) < 0) + } + if (sigaction(SIGCONT, &sa, NULL) < 0) { return -1; - if (sigaction(SIGTSTP, &sa, NULL) < 0) + } + if (sigaction(SIGTSTP, &sa, NULL) < 0) { return -1; + } /* ignore Ctrl+C while shutting down to make pager exits cleanly */ sa.sa_handler = SIG_IGN; - if (sigaction(SIGINT, &sa, NULL) < 0) + if (sigaction(SIGINT, &sa, NULL) < 0) { return -1; + } peerpid = 0; return 0;
--- a/rust/hg-core/Cargo.toml Thu Feb 07 20:50:41 2019 +0900 +++ b/rust/hg-core/Cargo.toml Tue Feb 19 21:55:05 2019 -0800 @@ -6,3 +6,7 @@ [lib] name = "hg" + +[dev-dependencies] +rand = "*" +rand_pcg = "*"
--- a/rust/hg-core/src/ancestors.rs Thu Feb 07 20:50:41 2019 +0900 +++ b/rust/hg-core/src/ancestors.rs Tue Feb 19 21:55:05 2019 -0800 @@ -38,6 +38,7 @@ pub struct MissingAncestors<G: Graph> { graph: G, bases: HashSet<Revision>, + max_base: Revision, } impl<G: Graph> AncestorsIterator<G> { @@ -79,8 +80,7 @@ #[inline] fn conditionally_push_rev(&mut self, rev: Revision) { - if self.stoprev <= rev && !self.seen.contains(&rev) { - self.seen.insert(rev); + if self.stoprev <= rev && self.seen.insert(rev) { self.visit.push(rev); } } @@ -154,11 +154,10 @@ Ok(ps) => ps, Err(e) => return Some(Err(e)), }; - if p1 < self.stoprev || self.seen.contains(&p1) { + if p1 < self.stoprev || !self.seen.insert(p1) { self.visit.pop(); } else { *(self.visit.peek_mut().unwrap()) = p1; - self.seen.insert(p1); }; self.conditionally_push_rev(p2); @@ -211,15 +210,17 @@ impl<G: Graph> MissingAncestors<G> { pub fn new(graph: G, bases: impl IntoIterator<Item = Revision>) -> Self { - let mut bases: HashSet<Revision> = bases.into_iter().collect(); - if bases.is_empty() { - bases.insert(NULL_REVISION); - } - MissingAncestors { graph, bases } + let mut created = MissingAncestors { + graph: graph, + bases: HashSet::new(), + max_base: NULL_REVISION, + }; + created.add_bases(bases); + created } pub fn has_bases(&self) -> bool { - self.bases.iter().any(|&b| b != NULL_REVISION) + !self.bases.is_empty() } /// Return a reference to current bases. @@ -238,16 +239,33 @@ } /// Consumes the object and returns the relative heads of its bases. - pub fn into_bases_heads(mut self) -> Result<HashSet<Revision>, GraphError> { + pub fn into_bases_heads( + mut self, + ) -> Result<HashSet<Revision>, GraphError> { dagops::retain_heads(&self.graph, &mut self.bases)?; Ok(self.bases) } + /// Add some revisions to `self.bases` + /// + /// Takes care of keeping `self.max_base` up to date. pub fn add_bases( &mut self, new_bases: impl IntoIterator<Item = Revision>, ) { - self.bases.extend(new_bases); + let mut max_base = self.max_base; + self.bases.extend( + new_bases + .into_iter() + .filter(|&rev| rev != NULL_REVISION) + .map(|r| { + if r > max_base { + max_base = r; + } + r + }), + ); + self.max_base = max_base; } /// Remove all ancestors of self.bases from the revs set (in place) @@ -256,28 +274,26 @@ revs: &mut HashSet<Revision>, ) -> Result<(), GraphError> { revs.retain(|r| !self.bases.contains(r)); - // the null revision is always an ancestor + // the null revision is always an ancestor. Logically speaking + // it's debatable in case bases is empty, but the Python + // implementation always adds NULL_REVISION to bases, making it + // unconditionnally true. revs.remove(&NULL_REVISION); if revs.is_empty() { return Ok(()); } // anything in revs > start is definitely not an ancestor of bases // revs <= start need to be investigated - // TODO optim: if a missingancestors is to be used several times, - // we shouldn't need to iterate each time on bases - let start = match self.bases.iter().cloned().max() { - Some(m) => m, - None => { - // bases is empty (shouldn't happen, but let's be safe) - return Ok(()); - } - }; + if self.max_base == NULL_REVISION { + return Ok(()); + } + // whatever happens, we'll keep at least keepcount of them // knowing this gives us a earlier stop condition than // going all the way to the root - let keepcount = revs.iter().filter(|r| **r > start).count(); + let keepcount = revs.iter().filter(|r| **r > self.max_base).count(); - let mut curr = start; + let mut curr = self.max_base; while curr != NULL_REVISION && revs.len() > keepcount { if self.bases.contains(&curr) { revs.remove(&curr); @@ -288,12 +304,17 @@ Ok(()) } - /// Add rev's parents to self.bases + /// Add the parents of `rev` to `self.bases` + /// + /// This has no effect on `self.max_base` #[inline] fn add_parents(&mut self, rev: Revision) -> Result<(), GraphError> { - // No need to bother the set with inserting NULL_REVISION over and - // over + if rev == NULL_REVISION { + return Ok(()); + } for p in self.graph.parents(rev)?.iter().cloned() { + // No need to bother the set with inserting NULL_REVISION over and + // over if p != NULL_REVISION { self.bases.insert(p); } @@ -323,12 +344,8 @@ if revs_visit.is_empty() { return Ok(Vec::new()); } - - let max_bases = - bases_visit.iter().cloned().max().unwrap_or(NULL_REVISION); - let max_revs = - revs_visit.iter().cloned().max().unwrap_or(NULL_REVISION); - let start = max(max_bases, max_revs); + let max_revs = revs_visit.iter().cloned().max().unwrap(); + let start = max(self.max_base, max_revs); // TODO heuristics for with_capacity()? let mut missing: Vec<Revision> = Vec::new(); @@ -336,12 +353,9 @@ if revs_visit.is_empty() { break; } - if both_visit.contains(&curr) { + if both_visit.remove(&curr) { // curr's parents might have made it into revs_visit through // another path - // TODO optim: Rust's HashSet.remove returns a boolean telling - // if it happened. This will spare us one set lookup - both_visit.remove(&curr); for p in self.graph.parents(curr)?.iter().cloned() { if p == NULL_REVISION { continue; @@ -356,13 +370,14 @@ if p == NULL_REVISION { continue; } - if bases_visit.contains(&p) || both_visit.contains(&p) { - // p is an ancestor of revs_visit, and is implicitly - // in bases_visit, which means p is ::revs & ::bases. - // TODO optim: hence if bothvisit, we look up twice + if bases_visit.contains(&p) { + // p is already known to be an ancestor of revs_visit + revs_visit.remove(&p); + both_visit.insert(p); + } else if both_visit.contains(&p) { + // p should have been in bases_visit revs_visit.remove(&p); bases_visit.insert(p); - both_visit.insert(p); } else { // visit later revs_visit.insert(p); @@ -373,11 +388,9 @@ if p == NULL_REVISION { continue; } - if revs_visit.contains(&p) || both_visit.contains(&p) { + if revs_visit.remove(&p) || both_visit.contains(&p) { // p is an ancestor of bases_visit, and is implicitly // in revs_visit, which means p is ::revs & ::bases. - // TODO optim: hence if bothvisit, we look up twice - revs_visit.remove(&p); bases_visit.insert(p); both_visit.insert(p); } else { @@ -578,11 +591,13 @@ missing_ancestors.get_bases().iter().cloned().collect(); as_vec.sort(); assert_eq!(as_vec, [1, 3, 5]); + assert_eq!(missing_ancestors.max_base, 5); missing_ancestors.add_bases([3, 7, 8].iter().cloned()); as_vec = missing_ancestors.get_bases().iter().cloned().collect(); as_vec.sort(); assert_eq!(as_vec, [1, 3, 5, 7, 8]); + assert_eq!(missing_ancestors.max_base, 8); as_vec = missing_ancestors.bases_heads()?.iter().cloned().collect(); as_vec.sort();
--- a/rust/hg-core/src/dagops.rs Thu Feb 07 20:50:41 2019 +0900 +++ b/rust/hg-core/src/dagops.rs Tue Feb 19 21:55:05 2019 -0800 @@ -46,7 +46,9 @@ let mut heads: HashSet<Revision> = iter_revs.clone().cloned().collect(); heads.remove(&NULL_REVISION); for rev in iter_revs { - remove_parents(graph, *rev, &mut heads)?; + if *rev != NULL_REVISION { + remove_parents(graph, *rev, &mut heads)?; + } } Ok(heads) } @@ -71,7 +73,9 @@ // mutating let as_vec: Vec<Revision> = revs.iter().cloned().collect(); for rev in as_vec { - remove_parents(graph, rev, revs)?; + if rev != NULL_REVISION { + remove_parents(graph, rev, revs)?; + } } Ok(()) }
--- a/rust/hg-core/src/lib.rs Thu Feb 07 20:50:41 2019 +0900 +++ b/rust/hg-core/src/lib.rs Tue Feb 19 21:55:05 2019 -0800 @@ -5,8 +5,7 @@ mod ancestors; pub mod dagops; pub use ancestors::{AncestorsIterator, LazyAncestors, MissingAncestors}; -#[cfg(test)] -pub mod testing; +pub mod testing; // unconditionally built, for use from integration tests /// Mercurial revision numbers /// @@ -14,6 +13,11 @@ /// 4 bytes, and are liberally converted to ints, whence the i32 pub type Revision = i32; + +/// Marker expressing the absence of a parent +/// +/// Independently of the actual representation, `NULL_REVISION` is guaranteed +/// to be smaller that all existing revisions. pub const NULL_REVISION: Revision = -1; /// Same as `mercurial.node.wdirrev`
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/rust/hg-core/tests/test_missing_ancestors.rs Tue Feb 19 21:55:05 2019 -0800 @@ -0,0 +1,340 @@ +extern crate hg; +extern crate rand; +extern crate rand_pcg; + +use hg::testing::VecGraph; +use hg::Revision; +use hg::*; +use rand::distributions::{Distribution, LogNormal, Uniform}; +use rand::{thread_rng, Rng, RngCore, SeedableRng}; +use std::cmp::min; +use std::collections::HashSet; +use std::env; +use std::fmt::Debug; + +fn build_random_graph( + nodes_opt: Option<usize>, + rootprob_opt: Option<f64>, + mergeprob_opt: Option<f64>, + prevprob_opt: Option<f64>, +) -> VecGraph { + let nodes = nodes_opt.unwrap_or(100); + let rootprob = rootprob_opt.unwrap_or(0.05); + let mergeprob = mergeprob_opt.unwrap_or(0.2); + let prevprob = prevprob_opt.unwrap_or(0.7); + + let mut rng = thread_rng(); + let mut vg: VecGraph = Vec::with_capacity(nodes); + for i in 0..nodes { + if i == 0 || rng.gen_bool(rootprob) { + vg.push([NULL_REVISION, NULL_REVISION]) + } else if i == 1 { + vg.push([0, NULL_REVISION]) + } else if rng.gen_bool(mergeprob) { + let p1 = { + if i == 2 || rng.gen_bool(prevprob) { + (i - 1) as Revision + } else { + rng.gen_range(0, i - 1) as Revision + } + }; + // p2 is a random revision lower than i and different from p1 + let mut p2 = rng.gen_range(0, i - 1) as Revision; + if p2 >= p1 { + p2 = p2 + 1; + } + vg.push([p1, p2]); + } else if rng.gen_bool(prevprob) { + vg.push([(i - 1) as Revision, NULL_REVISION]) + } else { + vg.push([rng.gen_range(0, i - 1) as Revision, NULL_REVISION]) + } + } + vg +} + +/// Compute the ancestors set of all revisions of a VecGraph +fn ancestors_sets(vg: &VecGraph) -> Vec<HashSet<Revision>> { + let mut ancs: Vec<HashSet<Revision>> = Vec::new(); + for i in 0..vg.len() { + let mut ancs_i = HashSet::new(); + ancs_i.insert(i as Revision); + for p in vg[i].iter().cloned() { + if p != NULL_REVISION { + ancs_i.extend(&ancs[p as usize]); + } + } + ancs.push(ancs_i); + } + ancs +} + +#[derive(Clone, Debug)] +enum MissingAncestorsAction { + InitialBases(HashSet<Revision>), + AddBases(HashSet<Revision>), + RemoveAncestorsFrom(HashSet<Revision>), + MissingAncestors(HashSet<Revision>), +} + +/// An instrumented naive yet obviously correct implementation +/// +/// It also records all its actions for easy reproduction for replay +/// of problematic cases +struct NaiveMissingAncestors<'a> { + ancestors_sets: &'a Vec<HashSet<Revision>>, + graph: &'a VecGraph, // used for error reporting only + bases: HashSet<Revision>, + history: Vec<MissingAncestorsAction>, + // for error reporting, assuming we are in a random test + random_seed: String, +} + +impl<'a> NaiveMissingAncestors<'a> { + fn new( + graph: &'a VecGraph, + ancestors_sets: &'a Vec<HashSet<Revision>>, + bases: &HashSet<Revision>, + random_seed: &str, + ) -> Self { + Self { + ancestors_sets: ancestors_sets, + bases: bases.clone(), + graph: graph, + history: vec![MissingAncestorsAction::InitialBases(bases.clone())], + random_seed: random_seed.into(), + } + } + + fn add_bases(&mut self, new_bases: HashSet<Revision>) { + self.bases.extend(&new_bases); + self.history + .push(MissingAncestorsAction::AddBases(new_bases)) + } + + fn remove_ancestors_from(&mut self, revs: &mut HashSet<Revision>) { + revs.remove(&NULL_REVISION); + self.history + .push(MissingAncestorsAction::RemoveAncestorsFrom(revs.clone())); + for base in self.bases.iter().cloned() { + if base != NULL_REVISION { + for rev in &self.ancestors_sets[base as usize] { + revs.remove(&rev); + } + } + } + } + + fn missing_ancestors( + &mut self, + revs: impl IntoIterator<Item = Revision>, + ) -> Vec<Revision> { + let revs_as_set: HashSet<Revision> = revs.into_iter().collect(); + + let mut missing: HashSet<Revision> = HashSet::new(); + for rev in revs_as_set.iter().cloned() { + if rev != NULL_REVISION { + missing.extend(&self.ancestors_sets[rev as usize]) + } + } + self.history + .push(MissingAncestorsAction::MissingAncestors(revs_as_set)); + + for base in self.bases.iter().cloned() { + if base != NULL_REVISION { + for rev in &self.ancestors_sets[base as usize] { + missing.remove(&rev); + } + } + } + let mut res: Vec<Revision> = missing.iter().cloned().collect(); + res.sort(); + res + } + + fn assert_eq<T>(&self, left: T, right: T) + where + T: PartialEq + Debug, + { + if left == right { + return; + } + panic!(format!( + "Equality assertion failed (left != right) + left={:?} + right={:?} + graph={:?} + current bases={:?} + history={:?} + random seed={} + ", + left, + right, + self.graph, + self.bases, + self.history, + self.random_seed, + )); + } +} + +/// Choose a set of random revisions +/// +/// The size of the set is taken from a LogNormal distribution +/// with default mu=1.1 and default sigma=0.8. Quoting the Python +/// test this is taken from: +/// the default mu and sigma give us a nice distribution of mostly +/// single-digit counts (including 0) with some higher ones +/// The sample may include NULL_REVISION +fn sample_revs<R: RngCore>( + rng: &mut R, + maxrev: Revision, + mu_opt: Option<f64>, + sigma_opt: Option<f64>, +) -> HashSet<Revision> { + let mu = mu_opt.unwrap_or(1.1); + let sigma = sigma_opt.unwrap_or(0.8); + + let log_normal = LogNormal::new(mu, sigma); + let nb = min(maxrev as usize, log_normal.sample(rng).floor() as usize); + + let dist = Uniform::from(NULL_REVISION..maxrev); + return rng.sample_iter(&dist).take(nb).collect(); +} + +/// Produces the hexadecimal representation of a slice of bytes +fn hex_bytes(bytes: &[u8]) -> String { + let mut s = String::with_capacity(bytes.len() * 2); + for b in bytes { + s.push_str(&format!("{:x}", b)); + } + s +} + +/// Fill a random seed from its hexadecimal representation. +/// +/// This signature is meant to be consistent with `RngCore::fill_bytes` +fn seed_parse_in(hex: &str, seed: &mut [u8]) { + if hex.len() != 32 { + panic!("Seed {} is too short for 128 bits hex", hex); + } + for i in 0..8 { + seed[i] = u8::from_str_radix(&hex[2 * i..2 * (i + 1)], 16) + .unwrap_or_else(|_e| panic!("Seed {} is not 128 bits hex", hex)); + } +} + +/// Parse the parameters for `test_missing_ancestors()` +/// +/// Returns (graphs, instances, calls per instance) +fn parse_test_missing_ancestors_params(var: &str) -> (usize, usize, usize) { + let err_msg = "TEST_MISSING_ANCESTORS format: GRAPHS,INSTANCES,CALLS"; + let params: Vec<usize> = var + .split(',') + .map(|n| n.trim().parse().expect(err_msg)) + .collect(); + if params.len() != 3 { + panic!(err_msg); + } + (params[0], params[1], params[2]) +} + +#[test] +/// This test creates lots of random VecGraphs, +/// and compare a bunch of MissingAncestors for them with +/// NaiveMissingAncestors that rely on precomputed transitive closures of +/// these VecGraphs (ancestors_sets). +/// +/// For each generater graph, several instances of `MissingAncestors` are +/// created, whose methods are called and checked a given number of times. +/// +/// This test can be parametrized by two environment variables: +/// +/// - TEST_RANDOM_SEED: must be 128 bits in hexadecimal +/// - TEST_MISSING_ANCESTORS: "GRAPHS,INSTANCES,CALLS". The default is +/// "100,10,10" +/// +/// This is slow: it runs on my workstation in about 5 seconds with the +/// default parameters with a plain `cargo --test`. +/// +/// If you want to run it faster, especially if you're changing the +/// parameters, use `cargo test --release`. +/// For me, that gets it down to 0.15 seconds with the default parameters +fn test_missing_ancestors_compare_naive() { + let (graphcount, testcount, inccount) = + match env::var("TEST_MISSING_ANCESTORS") { + Err(env::VarError::NotPresent) => (100, 10, 10), + Ok(val) => parse_test_missing_ancestors_params(&val), + Err(env::VarError::NotUnicode(_)) => { + panic!("TEST_MISSING_ANCESTORS is invalid"); + } + }; + let mut seed: [u8; 16] = [0; 16]; + match env::var("TEST_RANDOM_SEED") { + Ok(val) => { + seed_parse_in(&val, &mut seed); + } + Err(env::VarError::NotPresent) => { + thread_rng().fill_bytes(&mut seed); + } + Err(env::VarError::NotUnicode(_)) => { + panic!("TEST_RANDOM_SEED must be 128 bits in hex"); + } + } + let hex_seed = hex_bytes(&seed); + eprintln!("Random seed: {}", hex_seed); + + let mut rng = rand_pcg::Pcg32::from_seed(seed); + + eprint!("Checking MissingAncestors against brute force implementation "); + eprint!("for {} random graphs, ", graphcount); + eprintln!( + "with {} instances for each and {} calls per instance", + testcount, inccount, + ); + for g in 0..graphcount { + if g != 0 && g % 100 == 0 { + eprintln!("Tested with {} graphs", g); + } + let graph = build_random_graph(None, None, None, None); + let graph_len = graph.len() as Revision; + let ancestors_sets = ancestors_sets(&graph); + for _testno in 0..testcount { + let bases: HashSet<Revision> = + sample_revs(&mut rng, graph_len, None, None); + let mut inc = MissingAncestors::<VecGraph>::new( + graph.clone(), + bases.clone(), + ); + let mut naive = NaiveMissingAncestors::new( + &graph, + &ancestors_sets, + &bases, + &hex_seed, + ); + for _m in 0..inccount { + if rng.gen_bool(0.2) { + let new_bases = + sample_revs(&mut rng, graph_len, None, None); + inc.add_bases(new_bases.iter().cloned()); + naive.add_bases(new_bases); + } + if rng.gen_bool(0.4) { + // larger set so that there are more revs to remove from + let mut hrevs = + sample_revs(&mut rng, graph_len, Some(1.5), None); + let mut rrevs = hrevs.clone(); + inc.remove_ancestors_from(&mut hrevs).unwrap(); + naive.remove_ancestors_from(&mut rrevs); + naive.assert_eq(hrevs, rrevs); + } else { + let revs = sample_revs(&mut rng, graph_len, None, None); + let hm = + inc.missing_ancestors(revs.iter().cloned()).unwrap(); + let rm = naive.missing_ancestors(revs.iter().cloned()); + naive.assert_eq(hm, rm); + } + } + } + } +}
--- a/rust/hg-cpython/src/ancestors.rs Thu Feb 07 20:50:41 2019 +0900 +++ b/rust/hg-cpython/src/ancestors.rs Tue Feb 19 21:55:05 2019 -0800 @@ -34,11 +34,11 @@ //! [`LazyAncestors`]: struct.LazyAncestors.html //! [`MissingAncestors`]: struct.MissingAncestors.html //! [`AncestorsIterator`]: struct.AncestorsIterator.html -use crate::conversion::rev_pyiter_collect; +use crate::conversion::{py_set, rev_pyiter_collect}; use cindex::Index; use cpython::{ ObjectProtocol, PyClone, PyDict, PyList, PyModule, PyObject, PyResult, - PyTuple, Python, PythonObject, ToPyObject, + Python, PythonObject, ToPyObject, }; use exceptions::GraphError; use hg::Revision; @@ -90,24 +90,6 @@ } } -/// Copy and convert an `HashSet<Revision>` in a Python set -/// -/// This will probably turn useless once `PySet` support lands in -/// `rust-cpython`. -/// -/// This builds a Python tuple, then calls Python's "set()" on it -fn py_set(py: Python, set: &HashSet<Revision>) -> PyResult<PyObject> { - let as_vec: Vec<PyObject> = set - .iter() - .map(|rev| rev.to_py_object(py).into_object()) - .collect(); - let as_pytuple = PyTuple::new(py, as_vec.as_slice()); - - let locals = PyDict::new(py); - locals.set_item(py, "obj", as_pytuple.to_py_object(py))?; - py.eval("set(obj)", None, Some(&locals)) -} - py_class!(pub class LazyAncestors |py| { data inner: RefCell<Box<CoreLazy<Index>>>;
--- a/rust/hg-cpython/src/conversion.rs Thu Feb 07 20:50:41 2019 +0900 +++ b/rust/hg-cpython/src/conversion.rs Tue Feb 19 21:55:05 2019 -0800 @@ -8,8 +8,12 @@ //! Bindings for the hg::ancestors module provided by the //! `hg-core` crate. From Python, this will be seen as `rustext.ancestor` -use cpython::{ObjectProtocol, PyObject, PyResult, Python}; +use cpython::{ + ObjectProtocol, PyDict, PyObject, PyResult, PyTuple, Python, PythonObject, + ToPyObject, +}; use hg::Revision; +use std::collections::HashSet; use std::iter::FromIterator; /// Utility function to convert a Python iterable into various collections @@ -26,3 +30,21 @@ .map(|r| r.and_then(|o| o.extract::<Revision>(py))) .collect() } + +/// Copy and convert an `HashSet<Revision>` in a Python set +/// +/// This will probably turn useless once `PySet` support lands in +/// `rust-cpython`. +/// +/// This builds a Python tuple, then calls Python's "set()" on it +pub fn py_set(py: Python, set: &HashSet<Revision>) -> PyResult<PyObject> { + let as_vec: Vec<PyObject> = set + .iter() + .map(|rev| rev.to_py_object(py).into_object()) + .collect(); + let as_pytuple = PyTuple::new(py, as_vec.as_slice()); + + let locals = PyDict::new(py); + locals.set_item(py, "obj", as_pytuple.to_py_object(py))?; + py.eval("set(obj)", None, Some(&locals)) +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/rust/hg-cpython/src/dagops.rs Tue Feb 19 21:55:05 2019 -0800 @@ -0,0 +1,53 @@ +// dagops.rs +// +// Copyright 2019 Georges Racinet <georges.racinet@octobus.net> +// +// This software may be used and distributed according to the terms of the +// GNU General Public License version 2 or any later version. + +//! Bindings for the `hg::dagops` module provided by the +//! `hg-core` package. +//! +//! From Python, this will be seen as `mercurial.rustext.dagop` +use cindex::Index; +use cpython::{PyDict, PyModule, PyObject, PyResult, Python}; +use crate::conversion::{py_set, rev_pyiter_collect}; +use exceptions::GraphError; +use hg::dagops; +use hg::Revision; +use std::collections::HashSet; + +/// Using the the `index`, return heads out of any Python iterable of Revisions +/// +/// This is the Rust counterpart for `mercurial.dagop.headrevs` +pub fn headrevs( + py: Python, + index: PyObject, + revs: PyObject, +) -> PyResult<PyObject> { + let mut as_set: HashSet<Revision> = rev_pyiter_collect(py, &revs)?; + dagops::retain_heads(&Index::new(py, index)?, &mut as_set) + .map_err(|e| GraphError::pynew(py, e))?; + py_set(py, &as_set) +} + +/// Create the module, with `__package__` given from parent +pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> { + let dotted_name = &format!("{}.dagop", package); + let m = PyModule::new(py, dotted_name)?; + m.add(py, "__package__", package)?; + m.add(py, "__doc__", "DAG operations - Rust implementation")?; + m.add( + py, + "headrevs", + py_fn!(py, headrevs(index: PyObject, revs: PyObject)), + )?; + + let sys = PyModule::import(py, "sys")?; + let sys_modules: PyDict = sys.get(py, "modules")?.extract(py)?; + sys_modules.set_item(py, dotted_name, &m)?; + // Example C code (see pyexpat.c and import.c) will "give away the + // reference", but we won't because it will be consumed once the + // Rust PyObject is dropped. + Ok(m) +}
--- a/rust/hg-cpython/src/lib.rs Thu Feb 07 20:50:41 2019 +0900 +++ b/rust/hg-cpython/src/lib.rs Tue Feb 19 21:55:05 2019 -0800 @@ -27,6 +27,7 @@ pub mod ancestors; mod cindex; mod conversion; +pub mod dagops; pub mod exceptions; py_module_initializer!(rustext, initrustext, PyInit_rustext, |py, m| { @@ -38,6 +39,7 @@ let dotted_name: String = m.get(py, "__name__")?.extract(py)?; m.add(py, "ancestor", ancestors::init_module(py, &dotted_name)?)?; + m.add(py, "dagop", dagops::init_module(py, &dotted_name)?)?; m.add(py, "GraphError", py.get_type::<exceptions::GraphError>())?; Ok(()) });
--- a/setup.py Thu Feb 07 20:50:41 2019 +0900 +++ b/setup.py Tue Feb 19 21:55:05 2019 -0800 @@ -666,7 +666,7 @@ self.addlongpathsmanifest() def addlongpathsmanifest(self): - """Add manifest pieces so that hg.exe understands long paths + r"""Add manifest pieces so that hg.exe understands long paths This is an EXPERIMENTAL feature, use with care. To enable long paths support, one needs to do two things:
--- a/tests/artifacts/scripts/generate-churning-bundle.py Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/artifacts/scripts/generate-churning-bundle.py Tue Feb 19 21:55:05 2019 -0800 @@ -42,7 +42,6 @@ FILENAME='SPARSE-REVLOG-TEST-FILE' NB_LINES = 10500 ALWAYS_CHANGE_LINES = 500 -FILENAME = 'SPARSE-REVLOG-TEST-FILE' OTHER_CHANGES = 300 def nextcontent(previous_content):
--- a/tests/badserverext.py Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/badserverext.py Tue Feb 19 21:55:05 2019 -0800 @@ -34,6 +34,7 @@ import socket from mercurial import( + pycompat, registrar, ) @@ -48,10 +49,10 @@ default=False, ) configitem(b'badserver', b'closeafterrecvbytes', - default='0', + default=b'0', ) configitem(b'badserver', b'closeaftersendbytes', - default='0', + default=b'0', ) configitem(b'badserver', b'closebeforeaccept', default=False, @@ -74,7 +75,7 @@ object.__setattr__(self, '_closeaftersendbytes', closeaftersendbytes) def __getattribute__(self, name): - if name in ('makefile',): + if name in ('makefile', 'sendall', '_writelog'): return object.__getattribute__(self, name) return getattr(object.__getattribute__(self, '_orig'), name) @@ -85,6 +86,13 @@ def __setattr__(self, name, value): setattr(object.__getattribute__(self, '_orig'), name, value) + def _writelog(self, msg): + msg = msg.replace(b'\r', b'\\r').replace(b'\n', b'\\n') + + object.__getattribute__(self, '_logfp').write(msg) + object.__getattribute__(self, '_logfp').write(b'\n') + object.__getattribute__(self, '_logfp').flush() + def makefile(self, mode, bufsize): f = object.__getattribute__(self, '_orig').makefile(mode, bufsize) @@ -98,6 +106,38 @@ closeafterrecvbytes=closeafterrecvbytes, closeaftersendbytes=closeaftersendbytes) + def sendall(self, data, flags=0): + remaining = object.__getattribute__(self, '_closeaftersendbytes') + + # No read limit. Call original function. + if not remaining: + result = object.__getattribute__(self, '_orig').sendall(data, flags) + self._writelog(b'sendall(%d) -> %s' % (len(data), data)) + return result + + if len(data) > remaining: + newdata = data[0:remaining] + else: + newdata = data + + remaining -= len(newdata) + + result = object.__getattribute__(self, '_orig').sendall(newdata, flags) + + self._writelog(b'sendall(%d from %d) -> (%d) %s' % ( + len(newdata), len(data), remaining, newdata)) + + object.__setattr__(self, '_closeaftersendbytes', remaining) + + if remaining <= 0: + self._writelog(b'write limit reached; closing socket') + object.__getattribute__(self, '_orig').shutdown(socket.SHUT_RDWR) + + raise Exception('connection closed after sending N bytes') + + return result + + # We can't adjust __class__ on socket._fileobject, so define a proxy. class fileobjectproxy(object): __slots__ = ( @@ -115,7 +155,7 @@ object.__setattr__(self, '_closeaftersendbytes', closeaftersendbytes) def __getattribute__(self, name): - if name in ('read', 'readline', 'write', '_writelog'): + if name in ('_close', 'read', 'readline', 'write', '_writelog'): return object.__getattribute__(self, name) return getattr(object.__getattribute__(self, '_orig'), name) @@ -127,21 +167,34 @@ setattr(object.__getattribute__(self, '_orig'), name, value) def _writelog(self, msg): - msg = msg.replace('\r', '\\r').replace('\n', '\\n') + msg = msg.replace(b'\r', b'\\r').replace(b'\n', b'\\n') object.__getattribute__(self, '_logfp').write(msg) - object.__getattribute__(self, '_logfp').write('\n') + object.__getattribute__(self, '_logfp').write(b'\n') object.__getattribute__(self, '_logfp').flush() + def _close(self): + # Python 3 uses an io.BufferedIO instance. Python 2 uses some file + # object wrapper. + if pycompat.ispy3: + orig = object.__getattribute__(self, '_orig') + + if hasattr(orig, 'raw'): + orig.raw._sock.shutdown(socket.SHUT_RDWR) + else: + self.close() + else: + self._sock.shutdown(socket.SHUT_RDWR) + def read(self, size=-1): remaining = object.__getattribute__(self, '_closeafterrecvbytes') # No read limit. Call original function. if not remaining: result = object.__getattribute__(self, '_orig').read(size) - self._writelog('read(%d) -> (%d) (%s) %s' % (size, - len(result), - result)) + self._writelog(b'read(%d) -> (%d) (%s) %s' % (size, + len(result), + result)) return result origsize = size @@ -154,14 +207,15 @@ result = object.__getattribute__(self, '_orig').read(size) remaining -= len(result) - self._writelog('read(%d from %d) -> (%d) %s' % ( + self._writelog(b'read(%d from %d) -> (%d) %s' % ( size, origsize, len(result), result)) object.__setattr__(self, '_closeafterrecvbytes', remaining) if remaining <= 0: - self._writelog('read limit reached, closing socket') - self._sock.close() + self._writelog(b'read limit reached, closing socket') + self._close() + # This is the easiest way to abort the current request. raise Exception('connection closed after receiving N bytes') @@ -173,7 +227,7 @@ # No read limit. Call original function. if not remaining: result = object.__getattribute__(self, '_orig').readline(size) - self._writelog('readline(%d) -> (%d) %s' % ( + self._writelog(b'readline(%d) -> (%d) %s' % ( size, len(result), result)) return result @@ -187,14 +241,15 @@ result = object.__getattribute__(self, '_orig').readline(size) remaining -= len(result) - self._writelog('readline(%d from %d) -> (%d) %s' % ( + self._writelog(b'readline(%d from %d) -> (%d) %s' % ( size, origsize, len(result), result)) object.__setattr__(self, '_closeafterrecvbytes', remaining) if remaining <= 0: - self._writelog('read limit reached; closing socket') - self._sock.close() + self._writelog(b'read limit reached; closing socket') + self._close() + # This is the easiest way to abort the current request. raise Exception('connection closed after receiving N bytes') @@ -205,7 +260,7 @@ # No byte limit on this operation. Call original function. if not remaining: - self._writelog('write(%d) -> %s' % (len(data), data)) + self._writelog(b'write(%d) -> %s' % (len(data), data)) result = object.__getattribute__(self, '_orig').write(data) return result @@ -216,7 +271,7 @@ remaining -= len(newdata) - self._writelog('write(%d from %d) -> (%d) %s' % ( + self._writelog(b'write(%d from %d) -> (%d) %s' % ( len(newdata), len(data), remaining, newdata)) result = object.__getattribute__(self, '_orig').write(newdata) @@ -224,8 +279,9 @@ object.__setattr__(self, '_closeaftersendbytes', remaining) if remaining <= 0: - self._writelog('write limit reached; closing socket') - self._sock.close() + self._writelog(b'write limit reached; closing socket') + self._close() + raise Exception('connection closed after sending N bytes') return result @@ -239,10 +295,10 @@ super(badserver, self).__init__(ui, *args, **kwargs) recvbytes = self._ui.config(b'badserver', b'closeafterrecvbytes') - recvbytes = recvbytes.split(',') + recvbytes = recvbytes.split(b',') self.closeafterrecvbytes = [int(v) for v in recvbytes if v] sendbytes = self._ui.config(b'badserver', b'closeaftersendbytes') - sendbytes = sendbytes.split(',') + sendbytes = sendbytes.split(b',') self.closeaftersendbytes = [int(v) for v in sendbytes if v] # Need to inherit object so super() works. @@ -270,7 +326,7 @@ # Simulate failure to stop processing this request. raise socket.error('close before accept') - if self._ui.configbool('badserver', 'closeafteraccept'): + if self._ui.configbool(b'badserver', b'closeafteraccept'): request, client_address = super(badserver, self).get_request() request.close() raise socket.error('close after accept')
--- a/tests/check-perf-code.py Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/check-perf-code.py Tue Feb 19 21:55:05 2019 -0800 @@ -24,7 +24,7 @@ def modulewhitelist(names): replacement = [('.py', ''), ('.c', ''), # trim suffix - ('mercurial%s' % (os.sep), ''), # trim "mercurial/" path + ('mercurial%s' % ('/'), ''), # trim "mercurial/" path ] ignored = {'__init__'} modules = {}
--- a/tests/drawdag.py Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/drawdag.py Tue Feb 19 21:55:05 2019 -0800 @@ -322,7 +322,7 @@ v.remove(leaf) def _getcomments(text): - """ + r""" >>> [pycompat.sysstr(s) for s in _getcomments(br''' ... G ... | @@ -341,7 +341,7 @@ @command(b'debugdrawdag', []) def debugdrawdag(ui, repo, **opts): - """read an ASCII graph from stdin and create changesets + r"""read an ASCII graph from stdin and create changesets The ASCII graph is like what :hg:`log -G` outputs, with each `o` replaced to the name of the node. The command will create dummy changesets and local
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/filtertraceback.py Tue Feb 19 21:55:05 2019 -0800 @@ -0,0 +1,29 @@ +#!/usr/bin/env python + +# Filters traceback lines from stdin. + +from __future__ import absolute_import, print_function + +import sys + +state = 'none' + +for line in sys.stdin: + if state == 'none': + if line.startswith('Traceback '): + state = 'tb' + + elif state == 'tb': + if line.startswith(' File '): + state = 'file' + continue + + elif not line.startswith(' '): + state = 'none' + + elif state == 'file': + # Ignore lines after " File " + state = 'tb' + continue + + print(line, end='')
--- a/tests/flagprocessorext.py Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/flagprocessorext.py Tue Feb 19 21:55:05 2019 -0800 @@ -107,7 +107,7 @@ # Teach exchange to use changegroup 3 for k in exchange._bundlespeccontentopts.keys(): - exchange._bundlespeccontentopts[k]["cg.version"] = "03" + exchange._bundlespeccontentopts[k][b"cg.version"] = b"03" # Register flag processors for each extension revlog.addflagprocessor(
--- a/tests/hghave.py Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/hghave.py Tue Feb 19 21:55:05 2019 -0800 @@ -1,6 +1,5 @@ from __future__ import absolute_import -import errno import os import re import socket @@ -118,13 +117,8 @@ is matched by the supplied regular expression. """ r = re.compile(regexp) - try: - p = subprocess.Popen( - cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - except OSError as e: - if e.errno != errno.ENOENT: - raise - ret = -1 + p = subprocess.Popen( + cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) s = p.communicate()[0] ret = p.returncode return (ignorestatus or not ret) and r.search(s) @@ -549,7 +543,7 @@ @check("tls1.2", "TLS 1.2 protocol support") def has_tls1_2(): from mercurial import sslutil - return 'tls1.2' in sslutil.supportedprotocols + return b'tls1.2' in sslutil.supportedprotocols @check("windows", "Windows") def has_windows(): @@ -652,6 +646,13 @@ # chg disables demandimport intentionally for performance wins. return ((not has_chg()) and os.environ.get('HGDEMANDIMPORT') != 'disable') +@checkvers("py", "Python >= %s", (2.7, 3.5, 3.6, 3.7, 3.8, 3.9)) +def has_python_range(v): + major, minor = v.split('.')[0:2] + py_major, py_minor = sys.version_info.major, sys.version_info.minor + + return (py_major, py_minor) >= (int(major), int(minor)) + @check("py3", "running with Python 3.x") def has_py3(): return 3 == sys.version_info[0] @@ -721,7 +722,7 @@ @check("clang-libfuzzer", "clang new enough to include libfuzzer") def has_clang_libfuzzer(): - mat = matchoutput('clang --version', b'clang version (\d)') + mat = matchoutput('clang --version', br'clang version (\d)') if mat: # libfuzzer is new in clang 6 return int(mat.group(1)) > 5 @@ -729,7 +730,7 @@ @check("clang-6.0", "clang 6.0 with version suffix (libfuzzer included)") def has_clang60(): - return matchoutput('clang-6.0 --version', b'clang version 6\.') + return matchoutput('clang-6.0 --version', br'clang version 6\.') @check("xdiff", "xdiff algorithm") def has_xdiff(): @@ -810,7 +811,7 @@ # WITH clause not supported return False - return matchoutput('sqlite3 -version', b'^3\.\d+') + return matchoutput('sqlite3 -version', br'^3\.\d+') @check('vcr', 'vcr http mocking library') def has_vcr():
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/httpserverauth.py Tue Feb 19 21:55:05 2019 -0800 @@ -0,0 +1,113 @@ +from __future__ import absolute_import + +import base64 +import hashlib + +from mercurial.hgweb import common +from mercurial import ( + node, +) + +def parse_keqv_list(req, l): + """Parse list of key=value strings where keys are not duplicated.""" + parsed = {} + for elt in l: + k, v = elt.split(b'=', 1) + if v[0:1] == b'"' and v[-1:] == b'"': + v = v[1:-1] + parsed[k] = v + return parsed + +class digestauthserver(object): + def __init__(self): + self._user_hashes = {} + + def gethashers(self): + def _md5sum(x): + m = hashlib.md5() + m.update(x) + return node.hex(m.digest()) + + h = _md5sum + + kd = lambda s, d, h=h: h(b"%s:%s" % (s, d)) + return h, kd + + def adduser(self, user, password, realm): + h, kd = self.gethashers() + a1 = h(b'%s:%s:%s' % (user, realm, password)) + self._user_hashes[(user, realm)] = a1 + + def makechallenge(self, realm): + # We aren't testing the protocol here, just that the bytes make the + # proper round trip. So hardcoded seems fine. + nonce = b'064af982c5b571cea6450d8eda91c20d' + return b'realm="%s", nonce="%s", algorithm=MD5, qop="auth"' % (realm, + nonce) + + def checkauth(self, req, header): + log = req.rawenv[b'wsgi.errors'] + + h, kd = self.gethashers() + resp = parse_keqv_list(req, header.split(b', ')) + + if resp.get(b'algorithm', b'MD5').upper() != b'MD5': + log.write(b'Unsupported algorithm: %s' % resp.get(b'algorithm')) + raise common.ErrorResponse(common.HTTP_FORBIDDEN, + b"unknown algorithm") + user = resp[b'username'] + realm = resp[b'realm'] + nonce = resp[b'nonce'] + + ha1 = self._user_hashes.get((user, realm)) + if not ha1: + log.write(b'No hash found for user/realm "%s/%s"' % (user, realm)) + raise common.ErrorResponse(common.HTTP_FORBIDDEN, b"bad user") + + qop = resp.get(b'qop', b'auth') + if qop != b'auth': + log.write(b"Unsupported qop: %s" % qop) + raise common.ErrorResponse(common.HTTP_FORBIDDEN, b"bad qop") + + cnonce, ncvalue = resp.get(b'cnonce'), resp.get(b'nc') + if not cnonce or not ncvalue: + log.write(b'No cnonce (%s) or ncvalue (%s)' % (cnonce, ncvalue)) + raise common.ErrorResponse(common.HTTP_FORBIDDEN, b"no cnonce") + + a2 = b'%s:%s' % (req.method, resp[b'uri']) + noncebit = b"%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, h(a2)) + + respdig = kd(ha1, noncebit) + if respdig != resp[b'response']: + log.write(b'User/realm "%s/%s" gave %s, but expected %s' + % (user, realm, resp[b'response'], respdig)) + return False + + return True + +digest = digestauthserver() + +def perform_authentication(hgweb, req, op): + auth = req.headers.get(b'Authorization') + + if req.headers.get(b'X-HgTest-AuthType') == b'Digest': + if not auth: + challenge = digest.makechallenge(b'mercurial') + raise common.ErrorResponse(common.HTTP_UNAUTHORIZED, b'who', + [(b'WWW-Authenticate', b'Digest %s' % challenge)]) + + if not digest.checkauth(req, auth[7:]): + raise common.ErrorResponse(common.HTTP_FORBIDDEN, b'no') + + return + + if not auth: + raise common.ErrorResponse(common.HTTP_UNAUTHORIZED, b'who', + [(b'WWW-Authenticate', b'Basic Realm="mercurial"')]) + + if base64.b64decode(auth.split()[1]).split(b':', 1) != [b'user', b'pass']: + raise common.ErrorResponse(common.HTTP_FORBIDDEN, b'no') + +def extsetup(ui): + common.permhooks.insert(0, perform_authentication) + digest.adduser(b'user', b'pass', b'mercurial')
--- a/tests/notcapable Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/notcapable Tue Feb 19 21:55:05 2019 -0800 @@ -11,7 +11,7 @@ extensions.wrapfunction(repository.peer, 'capable', wrapcapable) extensions.wrapfunction(localrepo.localrepository, 'peer', wrappeer) def wrapcapable(orig, self, name, *args, **kwargs): - if name in '$CAP'.split(' '): + if name in b'$CAP'.split(b' '): return False return orig(self, name, *args, **kwargs) def wrappeer(orig, self):
--- a/tests/run-tests.py Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/run-tests.py Tue Feb 19 21:55:05 2019 -0800 @@ -634,7 +634,7 @@ # list in group 2, and the preceeding line output in group 1: # # output..output (feature !)\n -optline = re.compile(b'(.*) \((.+?) !\)\n$') +optline = re.compile(br'(.*) \((.+?) !\)\n$') def cdatasafe(data): """Make a string safe to include in a CDATA block. @@ -1225,7 +1225,6 @@ killdaemons(env['DAEMON_PIDS']) return ret - output = b'' proc.tochild.close() try: @@ -1903,8 +1902,9 @@ pass elif self._options.view: v = self._options.view - os.system(r"%s %s %s" % - (v, _strpath(test.refpath), _strpath(test.errpath))) + subprocess.call(r'"%s" "%s" "%s"' % + (v, _strpath(test.refpath), + _strpath(test.errpath)), shell=True) else: servefail, lines = getdiff(expected, got, test.refpath, test.errpath) @@ -2259,14 +2259,17 @@ self.stream.writeln('') if not self._runner.options.noskips: - for test, msg in self._result.skipped: + for test, msg in sorted(self._result.skipped, + key=lambda s: s[0].name): formatted = 'Skipped %s: %s\n' % (test.name, msg) msg = highlightmsg(formatted, self._result.color) self.stream.write(msg) - for test, msg in self._result.failures: + for test, msg in sorted(self._result.failures, + key=lambda f: f[0].name): formatted = 'Failed %s: %s\n' % (test.name, msg) self.stream.write(highlightmsg(formatted, self._result.color)) - for test, msg in self._result.errors: + for test, msg in sorted(self._result.errors, + key=lambda e: e[0].name): self.stream.writeln('Errored %s: %s' % (test.name, msg)) if self._runner.options.xunit: @@ -2376,12 +2379,12 @@ timesd = dict((t[0], t[3]) for t in result.times) doc = minidom.Document() s = doc.createElement('testsuite') - s.setAttribute('name', 'run-tests') - s.setAttribute('tests', str(result.testsRun)) s.setAttribute('errors', "0") # TODO s.setAttribute('failures', str(len(result.failures))) + s.setAttribute('name', 'run-tests') s.setAttribute('skipped', str(len(result.skipped) + len(result.ignored))) + s.setAttribute('tests', str(result.testsRun)) doc.appendChild(s) for tc in result.successes: t = doc.createElement('testcase') @@ -2770,8 +2773,8 @@ """ if not args: if self.options.changed: - proc = Popen4('hg st --rev "%s" -man0 .' % - self.options.changed, None, 0) + proc = Popen4(b'hg st --rev "%s" -man0 .' % + _bytespath(self.options.changed), None, 0) stdout, stderr = proc.communicate() args = stdout.strip(b'\0').split(b'\0') else: @@ -3110,8 +3113,8 @@ # installation layout put it in bin/ directly. Fix it with open(hgbat, 'rb') as f: data = f.read() - if b'"%~dp0..\python" "%~dp0hg" %*' in data: - data = data.replace(b'"%~dp0..\python" "%~dp0hg" %*', + if br'"%~dp0..\python" "%~dp0hg" %*' in data: + data = data.replace(br'"%~dp0..\python" "%~dp0hg" %*', b'"%~dp0python" "%~dp0hg" %*') with open(hgbat, 'wb') as f: f.write(data)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/svnurlof.py Tue Feb 19 21:55:05 2019 -0800 @@ -0,0 +1,18 @@ +from __future__ import absolute_import, print_function +import sys + +from mercurial import ( + pycompat, + util, +) + +def main(argv): + enc = util.urlreq.quote(pycompat.sysbytes(argv[1])) + if pycompat.iswindows: + fmt = 'file:///%s' + else: + fmt = 'file://%s' + print(fmt % pycompat.sysstr(enc)) + +if __name__ == '__main__': + main(sys.argv)
--- a/tests/svnxml.py Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/svnxml.py Tue Feb 19 21:55:05 2019 -0800 @@ -20,10 +20,10 @@ if paths: paths = paths[0] for p in paths.getElementsByTagName('path'): - action = p.getAttribute('action') - path = xmltext(p) - frompath = p.getAttribute('copyfrom-path') - fromrev = p.getAttribute('copyfrom-rev') + action = p.getAttribute('action').encode('utf-8') + path = xmltext(p).encode('utf-8') + frompath = p.getAttribute('copyfrom-path').encode('utf-8') + fromrev = p.getAttribute('copyfrom-rev').encode('utf-8') e['paths'].append((path, action, frompath, fromrev)) return e @@ -43,11 +43,11 @@ for k in ('revision', 'author', 'msg'): fp.write(('%s: %s\n' % (k, e[k])).encode('utf-8')) for path, action, fpath, frev in sorted(e['paths']): - frominfo = '' + frominfo = b'' if frev: - frominfo = ' (from %s@%s)' % (fpath, frev) - p = ' %s %s%s\n' % (action, path, frominfo) - fp.write(p.encode('utf-8')) + frominfo = b' (from %s@%s)' % (fpath, frev) + p = b' %s %s%s\n' % (action, path, frominfo) + fp.write(p) if __name__ == '__main__': data = sys.stdin.read()
--- a/tests/test-acl.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-acl.t Tue Feb 19 21:55:05 2019 -0800 @@ -38,8 +38,8 @@ > def fakegetusers(ui, group): > try: > return acl._getusersorig(ui, group) - > except: - > return ["fred", "betty"] + > except BaseException: + > return [b"fred", b"betty"] > acl._getusersorig = acl._getusers > acl._getusers = fakegetusers > EOF @@ -1125,7 +1125,7 @@ bundle2-input-bundle: 4 parts total transaction abort! rollback completed - abort: $ENOENT$: ../acl.config + abort: $ENOENT$: '../acl.config' no rollback information available 0:6675d58eff77
--- a/tests/test-ancestor.py Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-ancestor.py Tue Feb 19 21:55:05 2019 -0800 @@ -123,7 +123,6 @@ # reference slow algorithm naiveinc = naiveincrementalmissingancestors(ancs, bases) seq = [] - revs = [] for _ in xrange(inccount): if rng.random() < 0.2: newbases = samplerevs(graphnodes)
--- a/tests/test-annotate.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-annotate.t Tue Feb 19 21:55:05 2019 -0800 @@ -589,7 +589,7 @@ $ hg annotate -ncr "wdir()" baz abort: $TESTTMP\repo\baz: $ENOENT$ (windows !) - abort: $ENOENT$: $TESTTMP/repo/baz (no-windows !) + abort: $ENOENT$: '$TESTTMP/repo/baz' (no-windows !) [255] annotate removed file @@ -598,7 +598,7 @@ $ hg annotate -ncr "wdir()" baz abort: $TESTTMP\repo\baz: $ENOENT$ (windows !) - abort: $ENOENT$: $TESTTMP/repo/baz (no-windows !) + abort: $ENOENT$: '$TESTTMP/repo/baz' (no-windows !) [255] $ hg revert --all --no-backup --quiet @@ -809,6 +809,15 @@ |\ ~ ~ +An integer as a line range, which is parsed as '1:1' + + $ hg log -r 'followlines(baz, 1)' + changeset: 22:2174d0bf352a + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: added two lines with 0 + + check error cases $ hg up 24 --quiet $ hg log -r 'followlines()' @@ -817,8 +826,8 @@ $ hg log -r 'followlines(baz)' hg: parse error: followlines requires a line range [255] - $ hg log -r 'followlines(baz, 1)' - hg: parse error: followlines expects a line range + $ hg log -r 'followlines(baz, x)' + hg: parse error: followlines expects a line number or a range [255] $ hg log -r 'followlines(baz, 1:2, startrev=desc("b"))' hg: parse error: followlines expects exactly one revision
--- a/tests/test-arbitraryfilectx.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-arbitraryfilectx.t Tue Feb 19 21:55:05 2019 -0800 @@ -72,30 +72,30 @@ These files are different and should return True (different): (Note that filecmp.cmp's return semantics are inverted from ours, so we invert for simplicity): - $ hg eval "context.arbitraryfilectx('A', repo).cmp(repo[None]['real_A'])" + $ hg eval "context.arbitraryfilectx(b'A', repo).cmp(repo[None][b'real_A'])" True (no-eol) - $ hg eval "not filecmp.cmp('A', 'real_A')" + $ hg eval "not filecmp.cmp(b'A', b'real_A')" True (no-eol) These files are identical and should return False (same): - $ hg eval "context.arbitraryfilectx('A', repo).cmp(repo[None]['A'])" + $ hg eval "context.arbitraryfilectx(b'A', repo).cmp(repo[None][b'A'])" False (no-eol) - $ hg eval "context.arbitraryfilectx('A', repo).cmp(repo[None]['B'])" + $ hg eval "context.arbitraryfilectx(b'A', repo).cmp(repo[None][b'B'])" False (no-eol) - $ hg eval "not filecmp.cmp('A', 'B')" + $ hg eval "not filecmp.cmp(b'A', b'B')" False (no-eol) This comparison should also return False, since A and sym_A are substantially the same in the eyes of ``filectx.cmp``, which looks at data only. - $ hg eval "context.arbitraryfilectx('real_A', repo).cmp(repo[None]['sym_A'])" + $ hg eval "context.arbitraryfilectx(b'real_A', repo).cmp(repo[None][b'sym_A'])" False (no-eol) A naive use of filecmp on those two would wrongly return True, since it follows the symlink to "A", which has different contents. #if symlink - $ hg eval "not filecmp.cmp('real_A', 'sym_A')" + $ hg eval "not filecmp.cmp(b'real_A', b'sym_A')" True (no-eol) #else - $ hg eval "not filecmp.cmp('real_A', 'sym_A')" + $ hg eval "not filecmp.cmp(b'real_A', b'sym_A')" False (no-eol) #endif
--- a/tests/test-archive.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-archive.t Tue Feb 19 21:55:05 2019 -0800 @@ -187,7 +187,7 @@ server: testing stub value transfer-encoding: chunked - body: size=(1377|1461), sha1=(677b14d3d048778d5eb5552c14a67e6192068650|be6d3983aa13dfe930361b2569291cdedd02b537) (re) + body: size=(1377|1461|1489), sha1=(677b14d3d048778d5eb5552c14a67e6192068650|be6d3983aa13dfe930361b2569291cdedd02b537|1897e496871aa89ad685a92b936f5fa0d008b9e8) (re) % tar.gz and tar.bz2 disallowed should both give 403 403 Archive type not allowed: gz content-type: text/html; charset=ascii @@ -274,7 +274,7 @@ server: testing stub value transfer-encoding: chunked - body: size=(1377|1461), sha1=(677b14d3d048778d5eb5552c14a67e6192068650|be6d3983aa13dfe930361b2569291cdedd02b537) (re) + body: size=(1377|1461|1489), sha1=(677b14d3d048778d5eb5552c14a67e6192068650|be6d3983aa13dfe930361b2569291cdedd02b537|1897e496871aa89ad685a92b936f5fa0d008b9e8) (re) % tar.gz and tar.bz2 disallowed should both give 403 403 Archive type not allowed: gz content-type: text/html; charset=ascii
--- a/tests/test-batching.py Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-batching.py Tue Feb 19 21:55:05 2019 -0800 @@ -11,25 +11,28 @@ from mercurial import ( localrepo, + pycompat, wireprotov1peer, +) -) +def bprint(*bs): + print(*[pycompat.sysstr(b) for b in bs]) # equivalent of repo.repository class thing(object): def hello(self): - return "Ready." + return b"Ready." # equivalent of localrepo.localrepository class localthing(thing): def foo(self, one, two=None): if one: - return "%s and %s" % (one, two,) - return "Nope" + return b"%s and %s" % (one, two,) + return b"Nope" def bar(self, b, a): - return "%s und %s" % (b, a,) + return b"%s und %s" % (b, a,) def greet(self, name=None): - return "Hello, %s" % name + return b"Hello, %s" % name @contextlib.contextmanager def commandexecutor(self): @@ -43,27 +46,27 @@ def use(it): # Direct call to base method shared between client and server. - print(it.hello()) + bprint(it.hello()) # Direct calls to proxied methods. They cause individual roundtrips. - print(it.foo("Un", two="Deux")) - print(it.bar("Eins", "Zwei")) + bprint(it.foo(b"Un", two=b"Deux")) + bprint(it.bar(b"Eins", b"Zwei")) # Batched call to a couple of proxied methods. with it.commandexecutor() as e: - ffoo = e.callcommand('foo', {'one': 'One', 'two': 'Two'}) - fbar = e.callcommand('bar', {'b': 'Eins', 'a': 'Zwei'}) - fbar2 = e.callcommand('bar', {'b': 'Uno', 'a': 'Due'}) + ffoo = e.callcommand(b'foo', {b'one': b'One', b'two': b'Two'}) + fbar = e.callcommand(b'bar', {b'b': b'Eins', b'a': b'Zwei'}) + fbar2 = e.callcommand(b'bar', {b'b': b'Uno', b'a': b'Due'}) - print(ffoo.result()) - print(fbar.result()) - print(fbar2.result()) + bprint(ffoo.result()) + bprint(fbar.result()) + bprint(fbar2.result()) # local usage mylocal = localthing() print() -print("== Local") +bprint(b"== Local") use(mylocal) # demo remoting; mimicks what wireproto and HTTP/SSH do @@ -72,16 +75,16 @@ def escapearg(plain): return (plain - .replace(':', '::') - .replace(',', ':,') - .replace(';', ':;') - .replace('=', ':=')) + .replace(b':', b'::') + .replace(b',', b':,') + .replace(b';', b':;') + .replace(b'=', b':=')) def unescapearg(escaped): return (escaped - .replace(':=', '=') - .replace(':;', ';') - .replace(':,', ',') - .replace('::', ':')) + .replace(b':=', b'=') + .replace(b':;', b';') + .replace(b':,', b',') + .replace(b'::', b':')) # server side @@ -90,27 +93,28 @@ def __init__(self, local): self.local = local def _call(self, name, args): - args = dict(arg.split('=', 1) for arg in args) + args = dict(arg.split(b'=', 1) for arg in args) return getattr(self, name)(**args) def perform(self, req): - print("REQ:", req) - name, args = req.split('?', 1) - args = args.split('&') - vals = dict(arg.split('=', 1) for arg in args) - res = getattr(self, name)(**vals) - print(" ->", res) + bprint(b"REQ:", req) + name, args = req.split(b'?', 1) + args = args.split(b'&') + vals = dict(arg.split(b'=', 1) for arg in args) + res = getattr(self, pycompat.sysstr(name))(**pycompat.strkwargs(vals)) + bprint(b" ->", res) return res def batch(self, cmds): res = [] - for pair in cmds.split(';'): - name, args = pair.split(':', 1) + for pair in cmds.split(b';'): + name, args = pair.split(b':', 1) vals = {} - for a in args.split(','): + for a in args.split(b','): if a: - n, v = a.split('=') + n, v = a.split(b'=') vals[n] = unescapearg(v) - res.append(escapearg(getattr(self, name)(**vals))) - return ';'.join(res) + res.append(escapearg(getattr(self, pycompat.sysstr(name))( + **pycompat.strkwargs(vals)))) + return b';'.join(res) def foo(self, one, two): return mangle(self.local.foo(unmangle(one), unmangle(two))) def bar(self, b, a): @@ -124,25 +128,25 @@ # equivalent of wireproto.encode/decodelist, that is, type-specific marshalling # here we just transform the strings a bit to check we're properly en-/decoding def mangle(s): - return ''.join(chr(ord(c) + 1) for c in s) + return b''.join(pycompat.bytechr(ord(c) + 1) for c in pycompat.bytestr(s)) def unmangle(s): - return ''.join(chr(ord(c) - 1) for c in s) + return b''.join(pycompat.bytechr(ord(c) - 1) for c in pycompat.bytestr(s)) # equivalent of wireproto.wirerepository and something like http's wire format class remotething(thing): def __init__(self, server): self.server = server def _submitone(self, name, args): - req = name + '?' + '&'.join(['%s=%s' % (n, v) for n, v in args]) + req = name + b'?' + b'&'.join([b'%s=%s' % (n, v) for n, v in args]) return self.server.perform(req) def _submitbatch(self, cmds): req = [] for name, args in cmds: - args = ','.join(n + '=' + escapearg(v) for n, v in args) - req.append(name + ':' + args) - req = ';'.join(req) - res = self._submitone('batch', [('cmds', req,)]) - for r in res.split(';'): + args = b','.join(n + b'=' + escapearg(v) for n, v in args) + req.append(name + b':' + args) + req = b';'.join(req) + res = self._submitone(b'batch', [(b'cmds', req,)]) + for r in res.split(b';'): yield r @contextlib.contextmanager @@ -155,7 +159,7 @@ @wireprotov1peer.batchable def foo(self, one, two=None): - encargs = [('one', mangle(one),), ('two', mangle(two),)] + encargs = [(b'one', mangle(one),), (b'two', mangle(two),)] encresref = wireprotov1peer.future() yield encargs, encresref yield unmangle(encresref.value) @@ -163,18 +167,18 @@ @wireprotov1peer.batchable def bar(self, b, a): encresref = wireprotov1peer.future() - yield [('b', mangle(b),), ('a', mangle(a),)], encresref + yield [(b'b', mangle(b),), (b'a', mangle(a),)], encresref yield unmangle(encresref.value) # greet is coded directly. It therefore does not support batching. If it # does appear in a batch, the batch is split around greet, and the call to # greet is done in its own roundtrip. def greet(self, name=None): - return unmangle(self._submitone('greet', [('name', mangle(name),)])) + return unmangle(self._submitone(b'greet', [(b'name', mangle(name),)])) # demo remote usage myproxy = remotething(myserver) print() -print("== Remote") +bprint(b"== Remote") use(myproxy)
--- a/tests/test-blackbox.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-blackbox.t Tue Feb 19 21:55:05 2019 -0800 @@ -354,6 +354,35 @@ warning: cannot write to blackbox.log: $TESTTMP/gone/.hg/blackbox.log: $ENOTDIR$ (windows !) $ cd .. +blackbox should disable itself if track is empty + + $ hg --config blackbox.track= init nothing_tracked + $ cd nothing_tracked + $ cat >> .hg/hgrc << EOF + > [blackbox] + > track = + > EOF + $ hg blackbox + $ cd $TESTTMP + +a '*' entry in blackbox.track is interpreted as log everything + + $ hg --config blackbox.track='*' \ + > --config blackbox.logsource=True \ + > init track_star + $ cd track_star + $ cat >> .hg/hgrc << EOF + > [blackbox] + > logsource = True + > track = * + > EOF +(only look for entries with specific logged sources, otherwise this test is +pretty brittle) + $ hg blackbox | egrep '\[command(finish)?\]' + 1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000) [commandfinish]> --config *blackbox.track=* --config *blackbox.logsource=True* init track_star exited 0 after * seconds (glob) + 1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000) [command]> blackbox + $ cd $TESTTMP + #if chg when using chg, blackbox.log should get rotated correctly
--- a/tests/test-bugzilla.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-bugzilla.t Tue Feb 19 21:55:05 2019 -0800 @@ -3,7 +3,9 @@ $ cat <<EOF > bzmock.py > from __future__ import absolute_import > from mercurial import extensions + > from mercurial import pycompat > from mercurial import registrar + > from mercurial.utils import stringutil > > configtable = {} > configitem = registrar.configitem(configtable) @@ -18,14 +20,17 @@ > super(bzmock, self).__init__(ui) > self._logfile = ui.config(b'bugzilla', b'mocklog') > def updatebug(self, bugid, newstate, text, committer): - > with open(self._logfile, 'a') as f: - > f.write('update bugid=%r, newstate=%r, committer=%r\n' - > % (bugid, newstate, committer)) - > f.write('----\n' + text + '\n----\n') + > with open(pycompat.fsdecode(self._logfile), 'ab') as f: + > f.write(b'update bugid=%s, newstate=%s, committer=%s\n' + > % (stringutil.pprint(bugid), + > stringutil.pprint(newstate), + > stringutil.pprint(committer))) + > f.write(b'----\n' + text + b'\n----\n') > def notify(self, bugs, committer): - > with open(self._logfile, 'a') as f: - > f.write('notify bugs=%r, committer=%r\n' - > % (bugs, committer)) + > with open(pycompat.fsdecode(self._logfile), 'ab') as f: + > f.write(b'notify bugs=%s, committer=%s\n' + > % (stringutil.pprint(bugs), + > stringutil.pprint(committer))) > bugzilla.bugzilla._versions[b'mock'] = bzmock > EOF
--- a/tests/test-bundle.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-bundle.t Tue Feb 19 21:55:05 2019 -0800 @@ -218,10 +218,11 @@ $ cat >> .hg/hgrc <<EOF > [hooks] - > changegroup = sh -c "printenv.py changegroup" + > changegroup = sh -c "printenv.py --line changegroup" > EOF doesn't work (yet ?) +NOTE: msys is mangling the URL below hg -R bundle://../full.hg verify @@ -233,7 +234,15 @@ adding file changes added 9 changesets with 7 changes to 4 files (+1 heads) new changesets f9ee2f85a263:aa35859c02ea (9 drafts) - changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735 HG_NODE_LAST=aa35859c02ea8bd48da5da68cd2740ac71afcbaf HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=bundle*../full.hg (glob) + changegroup hook: HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735 + HG_NODE_LAST=aa35859c02ea8bd48da5da68cd2740ac71afcbaf + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=bundle:../full.hg (no-msys !) + HG_URL=bundle;../full.hg (msys !) + (run 'hg heads' to see heads, 'hg merge' to merge) Rollback empty @@ -257,7 +266,14 @@ adding file changes added 9 changesets with 7 changes to 4 files (+1 heads) new changesets f9ee2f85a263:aa35859c02ea (9 drafts) - changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735 HG_NODE_LAST=aa35859c02ea8bd48da5da68cd2740ac71afcbaf HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=bundle:empty+full.hg + changegroup hook: HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735 + HG_NODE_LAST=aa35859c02ea8bd48da5da68cd2740ac71afcbaf + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=bundle:empty+full.hg + (run 'hg heads' to see heads, 'hg merge' to merge) #endif
--- a/tests/test-bundle2-format.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-bundle2-format.t Tue Feb 19 21:55:05 2019 -0800 @@ -82,7 +82,8 @@ > (b'', b'genraise', False, b'includes a part that raise an exception during generation'), > (b'', b'timeout', False, b'emulate a timeout during bundle generation'), > (b'r', b'rev', [], b'includes those changeset in the bundle'), - > (b'', b'compress', b'', b'compress the stream'),], + > (b'', b'compress', b'', b'compress the stream'), + > ], > b'[OUTPUTFILE]') > def cmdbundle2(ui, repo, path=None, **opts): > """write a bundle2 container on standard output"""
--- a/tests/test-bundle2-multiple-changegroups.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-bundle2-multiple-changegroups.t Tue Feb 19 21:55:05 2019 -0800 @@ -66,9 +66,9 @@ $ cd ../clone $ cat >> .hg/hgrc <<EOF > [hooks] - > pretxnchangegroup = sh -c "printenv.py pretxnchangegroup" - > changegroup = sh -c "printenv.py changegroup" - > incoming = sh -c "printenv.py incoming" + > pretxnchangegroup = sh -c "printenv.py --line pretxnchangegroup" + > changegroup = sh -c "printenv.py --line changegroup" + > incoming = sh -c "printenv.py --line incoming" > EOF Pull the new commits in the clone @@ -81,18 +81,63 @@ adding manifests adding file changes added 1 changesets with 1 changes to 1 files - pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_NODE_LAST=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo + pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup + HG_HOOKTYPE=pretxnchangegroup + HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 + HG_NODE_LAST=27547f69f25460a52fff66ad004e58da7ad3fb56 + HG_PENDING=$TESTTMP/clone + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=file:$TESTTMP/repo + remote: changegroup2 adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files - pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_NODE_LAST=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo + pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup + HG_HOOKTYPE=pretxnchangegroup + HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 + HG_NODE_LAST=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 + HG_PENDING=$TESTTMP/clone + HG_PHASES_MOVED=1 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=file:$TESTTMP/repo + new changesets 27547f69f254:f838bfaca5c7 - changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_NODE_LAST=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo - incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo - changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_NODE_LAST=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo - incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo + changegroup hook: HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 + HG_NODE_LAST=27547f69f25460a52fff66ad004e58da7ad3fb56 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=file:$TESTTMP/repo + + incoming hook: HG_HOOKNAME=incoming + HG_HOOKTYPE=incoming + HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=file:$TESTTMP/repo + + changegroup hook: HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 + HG_NODE_LAST=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 + HG_PHASES_MOVED=1 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=file:$TESTTMP/repo + + incoming hook: HG_HOOKNAME=incoming + HG_HOOKTYPE=incoming + HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 + HG_PHASES_MOVED=1 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=file:$TESTTMP/repo + pullop.cgresult is 1 (run 'hg update' to get a working copy) $ hg update @@ -152,21 +197,86 @@ adding manifests adding file changes added 2 changesets with 2 changes to 2 files (+1 heads) - pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_NODE_LAST=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo + pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup + HG_HOOKTYPE=pretxnchangegroup + HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e + HG_NODE_LAST=8a5212ebc8527f9fb821601504794e3eb11a1ed3 + HG_PENDING=$TESTTMP/clone + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=file:$TESTTMP/repo + remote: changegroup2 adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 3 files (+1 heads) - pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_NODE_LAST=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo + pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup + HG_HOOKTYPE=pretxnchangegroup + HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 + HG_NODE_LAST=5cd59d311f6508b8e0ed28a266756c859419c9f1 + HG_PENDING=$TESTTMP/clone + HG_PHASES_MOVED=1 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=file:$TESTTMP/repo + new changesets b3325c91a4d9:5cd59d311f65 - changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_NODE_LAST=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo - incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo - incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo - changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_NODE_LAST=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo - incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo - incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=1d14c3ce6ac0582d2809220d33e8cd7a696e0156 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo - incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo + changegroup hook: HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e + HG_NODE_LAST=8a5212ebc8527f9fb821601504794e3eb11a1ed3 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=file:$TESTTMP/repo + + incoming hook: HG_HOOKNAME=incoming + HG_HOOKTYPE=incoming + HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=file:$TESTTMP/repo + + incoming hook: HG_HOOKNAME=incoming + HG_HOOKTYPE=incoming + HG_NODE=8a5212ebc8527f9fb821601504794e3eb11a1ed3 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=file:$TESTTMP/repo + + changegroup hook: HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 + HG_NODE_LAST=5cd59d311f6508b8e0ed28a266756c859419c9f1 + HG_PHASES_MOVED=1 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=file:$TESTTMP/repo + + incoming hook: HG_HOOKNAME=incoming + HG_HOOKTYPE=incoming + HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 + HG_PHASES_MOVED=1 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=file:$TESTTMP/repo + + incoming hook: HG_HOOKNAME=incoming + HG_HOOKTYPE=incoming + HG_NODE=1d14c3ce6ac0582d2809220d33e8cd7a696e0156 + HG_PHASES_MOVED=1 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=file:$TESTTMP/repo + + incoming hook: HG_HOOKNAME=incoming + HG_HOOKTYPE=incoming + HG_NODE=5cd59d311f6508b8e0ed28a266756c859419c9f1 + HG_PHASES_MOVED=1 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=file:$TESTTMP/repo + pullop.cgresult is 3 (run 'hg heads' to see heads, 'hg merge' to merge) $ hg log -G @@ -226,18 +336,63 @@ adding manifests adding file changes added 1 changesets with 0 changes to 0 files (-1 heads) - pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_NODE_LAST=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo + pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup + HG_HOOKTYPE=pretxnchangegroup + HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 + HG_NODE_LAST=71bd7b46de72e69a32455bf88d04757d542e6cf4 + HG_PENDING=$TESTTMP/clone + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=file:$TESTTMP/repo + remote: changegroup2 adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files - pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_NODE_LAST=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo + pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup + HG_HOOKTYPE=pretxnchangegroup + HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 + HG_NODE_LAST=9d18e5bd9ab09337802595d49f1dad0c98df4d84 + HG_PENDING=$TESTTMP/clone + HG_PHASES_MOVED=1 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=file:$TESTTMP/repo + new changesets 71bd7b46de72:9d18e5bd9ab0 - changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_NODE_LAST=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo - incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo - changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_NODE_LAST=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo - incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo + changegroup hook: HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 + HG_NODE_LAST=71bd7b46de72e69a32455bf88d04757d542e6cf4 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=file:$TESTTMP/repo + + incoming hook: HG_HOOKNAME=incoming + HG_HOOKTYPE=incoming + HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=file:$TESTTMP/repo + + changegroup hook: HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 + HG_NODE_LAST=9d18e5bd9ab09337802595d49f1dad0c98df4d84 + HG_PHASES_MOVED=1 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=file:$TESTTMP/repo + + incoming hook: HG_HOOKNAME=incoming + HG_HOOKTYPE=incoming + HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 + HG_PHASES_MOVED=1 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=file:$TESTTMP/repo + pullop.cgresult is -2 (run 'hg update' to get a working copy) $ hg log -G
--- a/tests/test-bundle2-pushback.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-bundle2-pushback.t Tue Feb 19 21:55:05 2019 -0800 @@ -25,7 +25,8 @@ > b'key': b'new-server-mark', > b'old': b'', > b'new': b'tip'} - > encodedparams = [(k, pushkey.encode(v)) for (k,v) in params.items()] + > encodedparams = [(k, pushkey.encode(v)) + > for (k, v) in params.items()] > op.reply.newpart(b'pushkey', mandatoryparams=encodedparams) > else: > op.reply.newpart(b'output', data=b'pushback not enabled')
--- a/tests/test-cbor.py Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-cbor.py Tue Feb 19 21:55:05 2019 -0800 @@ -926,7 +926,7 @@ (False, None, -1, cborutil.SPECIAL_NONE)) with self.assertRaisesRegex(cborutil.CBORDecodeError, - 'semantic tag \d+ not allowed'): + r'semantic tag \d+ not allowed'): cborutil.decodeitem(encoded) class SpecialTypesTests(TestCase): @@ -942,7 +942,7 @@ encoded = cborutil.encodelength(cborutil.MAJOR_TYPE_SPECIAL, i) with self.assertRaisesRegex(cborutil.CBORDecodeError, - 'special type \d+ not allowed'): + r'special type \d+ not allowed'): cborutil.decodeitem(encoded) class SansIODecoderTests(TestCase):
--- a/tests/test-check-code.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-check-code.t Tue Feb 19 21:55:05 2019 -0800 @@ -22,7 +22,7 @@ >>> commands = [] >>> with open('mercurial/debugcommands.py', 'rb') as fh: ... for line in fh: - ... m = re.match(b"^@command\('([a-z]+)", line) + ... m = re.match(br"^@command\('([a-z]+)", line) ... if m: ... commands.append(m.group(1)) >>> scommands = list(sorted(commands))
--- a/tests/test-check-py3-compat.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-check-py3-compat.t Tue Feb 19 21:55:05 2019 -0800 @@ -32,6 +32,14 @@ > -X mercurial/thirdparty \ > | sed 's|\\|/|g' | xargs "$PYTHON" contrib/check-py3-compat.py \ > | sed 's/[0-9][0-9]*)$/*)/' + contrib/python-zstandard/tests/test_compressor.py:324: SyntaxWarning: invalid escape sequence \( (py38 !) + with self.assertRaisesRegexp(zstd.ZstdError, 'cannot call compress\(\) after compressor'): (py38 !) + contrib/python-zstandard/tests/test_compressor.py:1329: SyntaxWarning: invalid escape sequence \( (py38 !) + 'cannot call compress\(\) after compression finished'): (py38 !) + contrib/python-zstandard/tests/test_compressor.py:1341: SyntaxWarning: invalid escape sequence \( (py38 !) + 'cannot call flush\(\) after compression finished'): (py38 !) + contrib/python-zstandard/tests/test_compressor.py:1353: SyntaxWarning: invalid escape sequence \( (py38 !) + 'cannot call finish\(\) after compression finished'): (py38 !) hgext/convert/transport.py: error importing: <*Error> No module named 'svn.client' (error at transport.py:*) (glob) (?) hgext/infinitepush/sqlindexapi.py: error importing: <*Error> No module named 'mysql' (error at sqlindexapi.py:*) (glob) (?) mercurial/scmwindows.py: error importing: <ValueError> _type_ 'v' not supported (error at win32.py:*) (no-windows !)
--- a/tests/test-clone.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-clone.t Tue Feb 19 21:55:05 2019 -0800 @@ -569,7 +569,7 @@ > extensions.loadall(myui) > extensions.populateui(myui) > repo = hg.repository(myui, b'a') - > hg.clone(myui, {}, repo, dest=b"ua", branch=[b"stable",]) + > hg.clone(myui, {}, repo, dest=b"ua", branch=[b"stable"]) > EOF $ "$PYTHON" branchclone.py
--- a/tests/test-commit-interactive-curses.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-commit-interactive-curses.t Tue Feb 19 21:55:05 2019 -0800 @@ -333,9 +333,9 @@ $ cp $HGRCPATH.pretest $HGRCPATH $ chunkselectorinterface() { > "$PYTHON" <<EOF - > from mercurial import hg, ui;\ - > repo = hg.repository(ui.ui.load(), ".");\ - > print(repo.ui.interface("chunkselector")) + > from mercurial import hg, pycompat, ui;\ + > repo = hg.repository(ui.ui.load(), b".");\ + > print(pycompat.sysstr(repo.ui.interface(b"chunkselector"))) > EOF > } $ chunkselectorinterface
--- a/tests/test-commit-interactive.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-commit-interactive.t Tue Feb 19 21:55:05 2019 -0800 @@ -1807,3 +1807,82 @@ n 0 -1 unset subdir/f1 $ hg status -A subdir/f1 M subdir/f1 + +Test commands.commit.interactive.unified=0 + + $ hg init $TESTTMP/b + $ cd $TESTTMP/b + $ cat > foo <<EOF + > 1 + > 2 + > 3 + > 4 + > 5 + > EOF + $ hg ci -qAm initial + $ cat > foo <<EOF + > 1 + > change1 + > 2 + > 3 + > change2 + > 4 + > 5 + > EOF + $ printf 'y\ny\ny\n' | hg ci -im initial --config commands.commit.interactive.unified=0 + diff --git a/foo b/foo + 2 hunks, 2 lines changed + examine changes to 'foo'? [Ynesfdaq?] y + + @@ -1,0 +2,1 @@ 1 + +change1 + record change 1/2 to 'foo'? [Ynesfdaq?] y + + @@ -3,0 +5,1 @@ 3 + +change2 + record change 2/2 to 'foo'? [Ynesfdaq?] y + + $ cd $TESTTMP + +Test diff.ignoreblanklines=1 + + $ hg init c + $ cd c + $ cat > foo <<EOF + > 1 + > 2 + > 3 + > 4 + > 5 + > EOF + $ hg ci -qAm initial + $ cat > foo <<EOF + > 1 + > + > 2 + > 3 + > change2 + > 4 + > 5 + > EOF + $ printf 'y\ny\ny\n' | hg ci -im initial --config diff.ignoreblanklines=1 + diff --git a/foo b/foo + 2 hunks, 2 lines changed + examine changes to 'foo'? [Ynesfdaq?] y + + @@ -1,3 +1,4 @@ + 1 + + + 2 + 3 + record change 1/2 to 'foo'? [Ynesfdaq?] y + + @@ -2,4 +3,5 @@ + 2 + 3 + +change2 + 4 + 5 + record change 2/2 to 'foo'? [Ynesfdaq?] y + +
--- a/tests/test-completion.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-completion.t Tue Feb 19 21:55:05 2019 -0800 @@ -104,6 +104,7 @@ debugnamecomplete debugobsolete debugpathcomplete + debugpathcopies debugpeer debugpickmergetool debugpushkey @@ -280,6 +281,7 @@ debugnamecomplete: debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template debugpathcomplete: full, normal, added, removed + debugpathcopies: include, exclude debugpeer: debugpickmergetool: rev, changedelete, include, exclude, tool debugpushkey:
--- a/tests/test-context.py Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-context.py Tue Feb 19 21:55:05 2019 -0800 @@ -63,7 +63,7 @@ # test performing a status def getfilectx(repo, memctx, f): - fctx = memctx.parents()[0][f] + fctx = memctx.p1()[f] data, flags = fctx.data(), fctx.flags() if f == b'foo': data += b'bar\n'
--- a/tests/test-contrib-perf.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-contrib-perf.t Tue Feb 19 21:55:05 2019 -0800 @@ -88,12 +88,12 @@ (no help text available) perffncachewrite (no help text available) - perfheads (no help text available) + perfheads benchmark the computation of a changelog heads perfhelper-pathcopies find statistic about potential parameters for the 'perftracecopies' perfignore benchmark operation related to computing ignore - perfindex (no help text available) + perfindex benchmark index creation time followed by a lookup perflinelogedits (no help text available) perfloadmarkers @@ -109,6 +109,8 @@ perfmoonwalk benchmark walking the changelog backwards perfnodelookup (no help text available) + perfnodemap benchmark the time necessary to look up revision from a cold + nodemap perfparents (no help text available) perfpathcopies benchmark the copy tracing logic
--- a/tests/test-contrib-relnotes.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-contrib-relnotes.t Tue Feb 19 21:55:05 2019 -0800 @@ -266,7 +266,6 @@ * diff: disable diff.noprefix option for diffstat (Bts:issue5759) * evolution: make reporting of new unstable changesets optional * extdata: abort if external command exits with non-zero status (BC) - * fancyopts: add early-options parser compatible with getopt() * graphlog: add another graph node type, unstable, using character "*" (BC) * hgdemandimport: use correct hyperlink to python-bug in comments (Bts:issue5765) * httppeer: add support for tracing all http request made by the peer @@ -277,17 +276,18 @@ * morestatus: don't crash with different drive letters for repo.root and CWD * outgoing: respect ":pushurl" paths (Bts:issue5365) * remove: print message for each file in verbose mode only while using '-A' (BC) - * rewriteutil: use precheck() in uncommit and amend commands * scmutil: don't try to delete origbackup symlinks to directories (Bts:issue5731) * sshpeer: add support for request tracing * subrepo: add config option to reject any subrepo operations (SEC) * subrepo: disable git and svn subrepos by default (BC) (SEC) + * subrepo: disallow symlink traversal across subrepo mount point (SEC) * subrepo: extend config option to disable subrepos by type (SEC) * subrepo: handle 'C:' style paths on the command line (Bts:issue5770) * subrepo: use per-type config options to enable subrepos * svnsubrepo: check if subrepo is missing when checking dirty state (Bts:issue5657) * test-bookmarks-pushpull: stabilize for Windows * test-run-tests: stabilize the test (Bts:issue5735) + * tests: show symlink traversal across subrepo mount point (SEC) * tr-summary: keep a weakref to the unfiltered repository * unamend: fix command summary line * uncommit: unify functions _uncommitdirstate and _unamenddirstate to one
--- a/tests/test-convert-cvs.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-convert-cvs.t Tue Feb 19 21:55:05 2019 -0800 @@ -11,11 +11,11 @@ $ echo "[extensions]" >> $HGRCPATH $ echo "convert = " >> $HGRCPATH $ cat > cvshooks.py <<EOF - > def cvslog(ui,repo,hooktype,log): - > ui.write(b'%s hook: %d entries\n' % (hooktype,len(log))) + > def cvslog(ui, repo, hooktype, log): + > ui.write(b'%s hook: %d entries\n' % (hooktype, len(log))) > - > def cvschangesets(ui,repo,hooktype,changesets): - > ui.write(b'%s hook: %d changesets\n' % (hooktype,len(changesets))) + > def cvschangesets(ui, repo, hooktype, changesets): + > ui.write(b'%s hook: %d changesets\n' % (hooktype, len(changesets))) > EOF $ hookpath=`pwd` $ cat <<EOF >> $HGRCPATH
--- a/tests/test-convert-hg-svn.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-convert-hg-svn.t Tue Feb 19 21:55:05 2019 -0800 @@ -11,11 +11,7 @@ > EOF $ SVNREPOPATH=`pwd`/svn-repo -#if windows - $ SVNREPOURL=file:///`"$PYTHON" -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"` -#else - $ SVNREPOURL=file://`"$PYTHON" -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"` -#endif + $ SVNREPOURL="`"$PYTHON" $TESTDIR/svnurlof.py \"$SVNREPOPATH\"`" $ svnadmin create "$SVNREPOPATH" $ cat > "$SVNREPOPATH"/hooks/pre-revprop-change <<EOF
--- a/tests/test-convert-svn-move.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-convert-svn-move.t Tue Feb 19 21:55:05 2019 -0800 @@ -8,11 +8,7 @@ $ svnadmin create svn-repo $ svnadmin load -q svn-repo < "$TESTDIR/svn/move.svndump" $ SVNREPOPATH=`pwd`/svn-repo -#if windows - $ SVNREPOURL=file:///`"$PYTHON" -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"` -#else - $ SVNREPOURL=file://`"$PYTHON" -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"` -#endif + $ SVNREPOURL="`"$PYTHON" $TESTDIR/svnurlof.py \"$SVNREPOPATH\"`" Convert trunk and branches
--- a/tests/test-convert-svn-sink.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-convert-svn-sink.t Tue Feb 19 21:55:05 2019 -0800 @@ -466,3 +466,85 @@ msg: Add file a A /a $ rm -rf a a-hg a-hg-wc + +#if execbit + +Executable bit removal + + $ hg init a + + $ echo a > a/exec + $ chmod +x a/exec + $ hg --cwd a ci -d '1 0' -A -m 'create executable' + adding exec + $ chmod -x a/exec + $ hg --cwd a ci -d '2 0' -A -m 'remove executable bit' + + $ hg convert -d svn a + assuming destination a-hg + initializing svn repository 'a-hg' + initializing svn working copy 'a-hg-wc' + scanning source... + sorting... + converting... + 1 create executable + 0 remove executable bit + $ svnupanddisplay a-hg-wc 0 + 2 2 test . + 2 2 test exec + revision: 2 + author: test + msg: remove executable bit + M /exec + revision: 1 + author: test + msg: create executable + A /exec + $ test ! -x a-hg-wc/exec + + $ rm -rf a a-hg a-hg-wc + +#endif + +Skipping empty commits + + $ hg init a + + $ hg --cwd a --config ui.allowemptycommit=True ci -d '1 0' -m 'Initial empty commit' + + $ echo a > a/a + $ hg --cwd a ci -d '0 0' -A -m 'Some change' + adding a + $ hg --cwd a --config ui.allowemptycommit=True ci -d '2 0' -m 'Empty commit 1' + $ hg --cwd a --config ui.allowemptycommit=True ci -d '3 0' -m 'Empty commit 2' + $ echo b > a/b + $ hg --cwd a ci -d '0 0' -A -m 'Another change' + adding b + + $ hg convert -d svn a + assuming destination a-hg + initializing svn repository 'a-hg' + initializing svn working copy 'a-hg-wc' + scanning source... + sorting... + converting... + 4 Initial empty commit + 3 Some change + 2 Empty commit 1 + 1 Empty commit 2 + 0 Another change + + $ svnupanddisplay a-hg-wc 0 + 2 1 test a + 2 2 test . + 2 2 test b + revision: 2 + author: test + msg: Another change + A /b + revision: 1 + author: test + msg: Some change + A /a + + $ rm -rf a a-hg a-hg-wc
--- a/tests/test-convert-svn-source.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-convert-svn-source.t Tue Feb 19 21:55:05 2019 -0800 @@ -13,11 +13,7 @@ $ svnadmin create svn-repo $ SVNREPOPATH=`pwd`/svn-repo -#if windows - $ SVNREPOURL=file:///`"$PYTHON" -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"` -#else - $ SVNREPOURL=file://`"$PYTHON" -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"` -#endif + $ SVNREPOURL="`"$PYTHON" $TESTDIR/svnurlof.py \"$SVNREPOPATH\"`" $ INVALIDREVISIONID=svn:x2147622-4a9f-4db4-a8d3-13562ff547b2/proj%20B/mytrunk@1 $ VALIDREVISIONID=svn:a2147622-4a9f-4db4-a8d3-13562ff547b2/proj%20B/mytrunk/mytrunk@1
--- a/tests/test-copy.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-copy.t Tue Feb 19 21:55:05 2019 -0800 @@ -118,6 +118,23 @@ [255] $ hg st -A ? foo +respects ui.relative-paths + $ mkdir dir + $ cd dir + $ hg mv ../foo ../bar + ../foo: not copying - file is not managed + abort: no files to copy + [255] + $ hg mv ../foo ../bar --config ui.relative-paths=yes + ../foo: not copying - file is not managed + abort: no files to copy + [255] + $ hg mv ../foo ../bar --config ui.relative-paths=no + foo: not copying - file is not managed + abort: no files to copy + [255] + $ cd .. + $ rmdir dir $ hg add foo dry-run; print a warning that this is not a real copy; foo is added $ hg mv --dry-run foo bar
--- a/tests/test-demandimport.py Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-demandimport.py Tue Feb 19 21:55:05 2019 -0800 @@ -6,12 +6,30 @@ import os import subprocess import sys +import types + +# Don't import pycompat because it has too many side-effects. +ispy3 = sys.version_info[0] >= 3 # Only run if demandimport is allowed if subprocess.call(['python', '%s/hghave' % os.environ['TESTDIR'], 'demandimport']): sys.exit(80) +# We rely on assert, which gets optimized out. +if sys.flags.optimize: + sys.exit(80) + +if ispy3: + from importlib.util import _LazyModule + + try: + from importlib.util import _Module as moduletype + except ImportError: + moduletype = types.ModuleType +else: + moduletype = types.ModuleType + if os.name != 'nt': try: import distutils.msvc9compiler @@ -36,76 +54,173 @@ # this enable call should not actually enable demandimport! demandimport.enable() from mercurial import node -print("node =", f(node)) + +# We use assert instead of a unittest test case because having imports inside +# functions changes behavior of the demand importer. +if ispy3: + assert not isinstance(node, _LazyModule) +else: + assert f(node) == "<module 'mercurial.node' from '?'>", f(node) + # now enable it for real del os.environ['HGDEMANDIMPORT'] demandimport.enable() # Test access to special attributes through demandmod proxy +assert 'mercurial.error' not in sys.modules from mercurial import error as errorproxy -print("errorproxy =", f(errorproxy)) -print("errorproxy.__doc__ = %r" - % (' '.join(errorproxy.__doc__.split()[:3]) + ' ...')) -print("errorproxy.__name__ = %r" % errorproxy.__name__) + +if ispy3: + # unsure why this isn't lazy. + assert not isinstance(f, _LazyModule) + assert f(errorproxy) == "<module 'mercurial.error' from '?'>", f(errorproxy) +else: + assert f(errorproxy) == "<unloaded module 'error'>", f(errorproxy) + +doc = ' '.join(errorproxy.__doc__.split()[:3]) +assert doc == 'Mercurial exceptions. This', doc +assert errorproxy.__name__ == 'mercurial.error', errorproxy.__name__ + # __name__ must be accessible via __dict__ so the relative imports can be # resolved -print("errorproxy.__dict__['__name__'] = %r" % errorproxy.__dict__['__name__']) -print("errorproxy =", f(errorproxy)) +name = errorproxy.__dict__['__name__'] +assert name == 'mercurial.error', name + +if ispy3: + assert not isinstance(errorproxy, _LazyModule) + assert f(errorproxy) == "<module 'mercurial.error' from '?'>", f(errorproxy) +else: + assert f(errorproxy) == "<proxied module 'error'>", f(errorproxy) import os -print("os =", f(os)) -print("os.system =", f(os.system)) -print("os =", f(os)) +if ispy3: + assert not isinstance(os, _LazyModule) + assert f(os) == "<module 'os' from '?'>", f(os) +else: + assert f(os) == "<unloaded module 'os'>", f(os) +assert f(os.system) == '<built-in function system>', f(os.system) +assert f(os) == "<module 'os' from '?'>", f(os) + +assert 'mercurial.utils.procutil' not in sys.modules from mercurial.utils import procutil -print("procutil =", f(procutil)) -print("procutil.system =", f(procutil.system)) -print("procutil =", f(procutil)) -print("procutil.system =", f(procutil.system)) +if ispy3: + assert isinstance(procutil, _LazyModule) + assert f(procutil) == "<module 'mercurial.utils.procutil' from '?'>", f( + procutil + ) +else: + assert f(procutil) == "<unloaded module 'procutil'>", f(procutil) + +assert f(procutil.system) == '<function system at 0x?>', f(procutil.system) +assert procutil.__class__ == moduletype, procutil.__class__ +assert f(procutil) == "<module 'mercurial.utils.procutil' from '?'>", f( + procutil +) +assert f(procutil.system) == '<function system at 0x?>', f(procutil.system) +assert 'mercurial.hgweb' not in sys.modules from mercurial import hgweb -print("hgweb =", f(hgweb)) -print("hgweb_mod =", f(hgweb.hgweb_mod)) -print("hgweb =", f(hgweb)) + +if ispy3: + assert not isinstance(hgweb, _LazyModule) + assert f(hgweb) == "<module 'mercurial.hgweb' from '?'>", f(hgweb) + assert isinstance(hgweb.hgweb_mod, _LazyModule) + assert ( + f(hgweb.hgweb_mod) == "<module 'mercurial.hgweb.hgweb_mod' from '?'>" + ), f(hgweb.hgweb_mod) +else: + assert f(hgweb) == "<unloaded module 'hgweb'>", f(hgweb) + assert f(hgweb.hgweb_mod) == "<unloaded module 'hgweb_mod'>", f( + hgweb.hgweb_mod + ) + +assert f(hgweb) == "<module 'mercurial.hgweb' from '?'>", f(hgweb) import re as fred -print("fred =", f(fred)) + +if ispy3: + assert not isinstance(fred, _LazyModule) + assert f(fred) == "<module 're' from '?'>" +else: + assert f(fred) == "<unloaded module 're'>", f(fred) import re as remod -print("remod =", f(remod)) + +if ispy3: + assert not isinstance(remod, _LazyModule) + assert f(remod) == "<module 're' from '?'>" +else: + assert f(remod) == "<unloaded module 're'>", f(remod) import sys as re -print("re =", f(re)) + +if ispy3: + assert not isinstance(re, _LazyModule) + assert f(re) == "<module 'sys' (built-in)>" +else: + assert f(re) == "<unloaded module 'sys'>", f(re) -print("fred =", f(fred)) -print("fred.sub =", f(fred.sub)) -print("fred =", f(fred)) +if ispy3: + assert not isinstance(fred, _LazyModule) + assert f(fred) == "<module 're' from '?'>", f(fred) +else: + assert f(fred) == "<unloaded module 're'>", f(fred) + +assert f(fred.sub) == '<function sub at 0x?>', f(fred.sub) + +if ispy3: + assert not isinstance(fred, _LazyModule) + assert f(fred) == "<module 're' from '?'>", f(fred) +else: + assert f(fred) == "<proxied module 're'>", f(fred) remod.escape # use remod -print("remod =", f(remod)) +assert f(remod) == "<module 're' from '?'>", f(remod) -print("re =", f(re)) -print("re.stderr =", f(re.stderr)) -print("re =", f(re)) +if ispy3: + assert not isinstance(re, _LazyModule) + assert f(re) == "<module 'sys' (built-in)>" + assert f(type(re.stderr)) == "<class '_io.TextIOWrapper'>", f( + type(re.stderr) + ) + assert f(re) == "<module 'sys' (built-in)>" +else: + assert f(re) == "<unloaded module 'sys'>", f(re) + assert f(re.stderr) == "<open file '<whatever>', mode 'w' at 0x?>", f( + re.stderr + ) + assert f(re) == "<proxied module 'sys'>", f(re) -import contextlib -print("contextlib =", f(contextlib)) +assert 'telnetlib' not in sys.modules +import telnetlib + +if ispy3: + assert not isinstance(telnetlib, _LazyModule) + assert f(telnetlib) == "<module 'telnetlib' from '?'>" +else: + assert f(telnetlib) == "<unloaded module 'telnetlib'>", f(telnetlib) + try: - from contextlib import unknownattr - print('no demandmod should be created for attribute of non-package ' - 'module:\ncontextlib.unknownattr =', f(unknownattr)) + from telnetlib import unknownattr + + assert False, ( + 'no demandmod should be created for attribute of non-package ' + 'module:\ntelnetlib.unknownattr = %s' % f(unknownattr) + ) except ImportError as inst: - print('contextlib.unknownattr = ImportError: %s' - % rsub(r"'", '', str(inst))) + assert rsub(r"'", '', str(inst)).startswith( + 'cannot import name unknownattr' + ) from mercurial import util # Unlike the import statement, __import__() function should not raise # ImportError even if fromlist has an unknown item # (see Python/import.c:import_module_level() and ensure_fromlist()) -contextlibimp = __import__('contextlib', globals(), locals(), ['unknownattr']) -print("__import__('contextlib', ..., ['unknownattr']) =", f(contextlibimp)) -print("hasattr(contextlibimp, 'unknownattr') =", - util.safehasattr(contextlibimp, 'unknownattr')) +assert 'zipfile' not in sys.modules +zipfileimp = __import__('zipfile', globals(), locals(), ['unknownattr']) +assert f(zipfileimp) == "<module 'zipfile' from '?'>", f(zipfileimp) +assert not util.safehasattr(zipfileimp, 'unknownattr')
--- a/tests/test-demandimport.py.out Thu Feb 07 20:50:41 2019 +0900 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,30 +0,0 @@ -node = <module 'mercurial.node' from '?'> -errorproxy = <unloaded module 'error'> -errorproxy.__doc__ = 'Mercurial exceptions. This ...' -errorproxy.__name__ = 'mercurial.error' -errorproxy.__dict__['__name__'] = 'mercurial.error' -errorproxy = <proxied module 'error'> -os = <unloaded module 'os'> -os.system = <built-in function system> -os = <module 'os' from '?'> -procutil = <unloaded module 'procutil'> -procutil.system = <function system at 0x?> -procutil = <module 'mercurial.utils.procutil' from '?'> -procutil.system = <function system at 0x?> -hgweb = <unloaded module 'hgweb'> -hgweb_mod = <unloaded module 'hgweb_mod'> -hgweb = <module 'mercurial.hgweb' from '?'> -fred = <unloaded module 're'> -remod = <unloaded module 're'> -re = <unloaded module 'sys'> -fred = <unloaded module 're'> -fred.sub = <function sub at 0x?> -fred = <proxied module 're'> -remod = <module 're' from '?'> -re = <unloaded module 'sys'> -re.stderr = <open file '<whatever>', mode 'w' at 0x?> -re = <proxied module 'sys'> -contextlib = <unloaded module 'contextlib'> -contextlib.unknownattr = ImportError: cannot import name unknownattr -__import__('contextlib', ..., ['unknownattr']) = <module 'contextlib' from '?'> -hasattr(contextlibimp, 'unknownattr') = False
--- a/tests/test-diffstat.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-diffstat.t Tue Feb 19 21:55:05 2019 -0800 @@ -146,10 +146,21 @@ $ hg diff --stat . dir1/new | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) + $ hg diff --stat . --config ui.relative-paths=yes + new | 1 + + 1 files changed, 1 insertions(+), 0 deletions(-) $ hg diff --stat --root . new | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) + $ hg diff --stat --root . --config ui.relative-paths=yes + new | 1 + + 1 files changed, 1 insertions(+), 0 deletions(-) +--root trumps ui.relative-paths + $ hg diff --stat --root .. --config ui.relative-paths=yes + new | 1 + + ../dir2/new | 1 + + 2 files changed, 2 insertions(+), 0 deletions(-) $ hg diff --stat --root ../dir1 ../dir2 warning: ../dir2 not inside relative root . @@ -236,3 +247,38 @@ $ hg diff --root . --stat file | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) + +When a file is renamed, --git shouldn't loss the info about old file + $ hg init issue6025 + $ cd issue6025 + $ echo > a + $ hg ci -Am 'add a' + adding a + $ hg mv a b + $ hg diff --git + diff --git a/a b/b + rename from a + rename to b + $ hg diff --stat + a | 1 - + b | 1 + + 2 files changed, 1 insertions(+), 1 deletions(-) + $ hg diff --stat --git + a => b | 0 + 1 files changed, 0 insertions(+), 0 deletions(-) +-- filename may contain whitespaces + $ echo > c + $ hg ci -Am 'add c' + adding c + $ hg mv c 'new c' + $ hg diff --git + diff --git a/c b/new c + rename from c + rename to new c + $ hg diff --stat + c | 1 - + new c | 1 + + 2 files changed, 1 insertions(+), 1 deletions(-) + $ hg diff --stat --git + c => new c | 0 + 1 files changed, 0 insertions(+), 0 deletions(-)
--- a/tests/test-dispatch.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-dispatch.t Tue Feb 19 21:55:05 2019 -0800 @@ -188,7 +188,8 @@ specified" should include filename even when it is empty $ hg -R a archive '' - abort: *: '' (glob) + abort: $ENOENT$: '' (no-windows !) + abort: $ENOTDIR$: '' (windows !) [255] #if no-outer-repo
--- a/tests/test-doctest.py Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-doctest.py Tue Feb 19 21:55:05 2019 -0800 @@ -62,6 +62,7 @@ testmod('mercurial.pycompat') testmod('mercurial.revlog') testmod('mercurial.revlogutils.deltas') +testmod('mercurial.revset') testmod('mercurial.revsetlang') testmod('mercurial.smartset') testmod('mercurial.store')
--- a/tests/test-encoding-align.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-encoding-align.t Tue Feb 19 21:55:05 2019 -0800 @@ -5,6 +5,7 @@ $ hg init t $ cd t $ "$PYTHON" << EOF + > from mercurial import pycompat > # (byte, width) = (6, 4) > s = b"\xe7\x9f\xad\xe5\x90\x8d" > # (byte, width) = (7, 7): odd width is good for alignment test @@ -21,14 +22,17 @@ > command = registrar.command(cmdtable) > > @command(b'showoptlist', - > [('s', 'opt1', '', 'short width' + ' %(s)s' * 8, '%(s)s'), - > ('m', 'opt2', '', 'middle width' + ' %(m)s' * 8, '%(m)s'), - > ('l', 'opt3', '', 'long width' + ' %(l)s' * 8, '%(l)s')], - > '') + > [(b's', b'opt1', b'', b'short width' + (b' ' +%(s)s) * 8, %(s)s), + > (b'm', b'opt2', b'', b'middle width' + (b' ' + %(m)s) * 8, %(m)s), + > (b'l', b'opt3', b'', b'long width' + (b' ' + %(l)s) * 8, %(l)s)], + > b'') > def showoptlist(ui, repo, *pats, **opts): > '''dummy command to show option descriptions''' > return 0 - > """ % globals()) + > """ % {b's': pycompat.byterepr(s), + > b'm': pycompat.byterepr(m), + > b'l': pycompat.byterepr(l), + > }) > f.close() > EOF $ S=`cat s`
--- a/tests/test-extdiff.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-extdiff.t Tue Feb 19 21:55:05 2019 -0800 @@ -22,6 +22,10 @@ > opts.falabala = diffing > cmd.edspace = echo > opts.edspace = "name <user@example.com>" + > alabalaf = + > [merge-tools] + > alabalaf.executable = echo + > alabalaf.diffargs = diffing > EOF $ hg falabala @@ -48,6 +52,8 @@ -o --option OPT [+] pass option to comparison program -r --rev REV [+] revision -c --change REV change made by revision + --per-file compare each file instead of revision snapshots + --confirm prompt user before each external program invocation --patch compare patches for two revisions -I --include PATTERN [+] include names matching the given patterns -X --exclude PATTERN [+] exclude names matching the given patterns @@ -128,6 +134,72 @@ diffing a.398e36faf9c6 a.5ab95fb166c4 [1] +Test --per-file option: + + $ hg up -q -C 3 + $ echo a2 > a + $ echo b2 > b + $ hg ci -d '3 0' -mtestmode1 + created new head + $ hg falabala -c 6 --per-file + diffing "*\\extdiff.*\\a.46c0e4daeb72\\a" "a.81906f2b98ac\\a" (glob) (windows !) + diffing */extdiff.*/a.46c0e4daeb72/a a.81906f2b98ac/a (glob) (no-windows !) + diffing "*\\extdiff.*\\a.46c0e4daeb72\\b" "a.81906f2b98ac\\b" (glob) (windows !) + diffing */extdiff.*/a.46c0e4daeb72/b a.81906f2b98ac/b (glob) (no-windows !) + [1] + +Test --per-file option for gui tool: + + $ hg --config extdiff.gui.alabalaf=True alabalaf -c 6 --per-file --debug + diffing */extdiff.*/a.46c0e4daeb72/a a.81906f2b98ac/a (glob) + diffing */extdiff.*/a.46c0e4daeb72/b a.81906f2b98ac/b (glob) + making snapshot of 2 files from rev 46c0e4daeb72 + a + b + making snapshot of 2 files from rev 81906f2b98ac + a + b + running '* diffing * *' in * (backgrounded) (glob) + running '* diffing * *' in * (backgrounded) (glob) + cleaning up temp directory + [1] + +Test --per-file option for gui tool again: + + $ hg --config merge-tools.alabalaf.gui=True alabalaf -c 6 --per-file --debug + diffing */extdiff.*/a.46c0e4daeb72/* a.81906f2b98ac/* (glob) + diffing */extdiff.*/a.46c0e4daeb72/* a.81906f2b98ac/* (glob) + making snapshot of 2 files from rev 46c0e4daeb72 + a + b + making snapshot of 2 files from rev 81906f2b98ac + a + b + running '* diffing * *' in * (backgrounded) (glob) + running '* diffing * *' in * (backgrounded) (glob) + cleaning up temp directory + [1] + +Test --per-file and --confirm options: + + $ hg --config ui.interactive=True falabala -c 6 --per-file --confirm <<EOF + > n + > y + > EOF + diff a (1 of 2) [Yns?] n + diff b (2 of 2) [Yns?] y + diffing "*\\extdiff.*\\a.46c0e4daeb72\\b" "a.81906f2b98ac\\b" (glob) (windows !) + diffing */extdiff.*/a.46c0e4daeb72/b a.81906f2b98ac/b (glob) (no-windows !) + [1] + +Test --per-file and --confirm options with skipping: + + $ hg --config ui.interactive=True falabala -c 6 --per-file --confirm <<EOF + > s + > EOF + diff a (1 of 2) [Yns?] s + [1] + issue4463: usage of command line configuration without additional quoting $ cat <<EOF >> $HGRCPATH
--- a/tests/test-extension.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-extension.t Tue Feb 19 21:55:05 2019 -0800 @@ -610,7 +610,8 @@ > cmdtable = {} > command = registrar.command(cmdtable) > - > # demand import avoids failure of importing notexist here + > # demand import avoids failure of importing notexist here, but only on + > # Python 2. > import extlibroot.lsub1.lsub2.notexist > > @command(b'checkrelativity', [], norepo=True) @@ -622,7 +623,13 @@ > pass # intentional failure > NO_CHECK_EOF - $ (PYTHONPATH=${PYTHONPATH}${PATHSEP}${TESTTMP}; hg --config extensions.checkrelativity=$TESTTMP/checkrelativity.py checkrelativity) +Python 3's lazy importer verifies modules exist before returning the lazy +module stub. Our custom lazy importer for Python 2 always returns a stub. + + $ (PYTHONPATH=${PYTHONPATH}${PATHSEP}${TESTTMP}; hg --config extensions.checkrelativity=$TESTTMP/checkrelativity.py checkrelativity) || true + *** failed to import extension checkrelativity from $TESTTMP/checkrelativity.py: No module named 'extlibroot.lsub1.lsub2.notexist' (py3 !) + hg: unknown command 'checkrelativity' (py3 !) + (use 'hg help' for a list of commands) (py3 !) #endif @@ -633,7 +640,7 @@ Make sure a broken uisetup doesn't globally break hg: $ cat > $TESTTMP/baduisetup.py <<EOF > def uisetup(ui): - > 1/0 + > 1 / 0 > EOF Even though the extension fails during uisetup, hg is still basically usable: @@ -642,7 +649,7 @@ File "*/mercurial/extensions.py", line *, in _runuisetup (glob) uisetup(ui) File "$TESTTMP/baduisetup.py", line 2, in uisetup - 1/0 + 1 / 0 ZeroDivisionError: * by zero (glob) *** failed to set up extension baduisetup: * by zero (glob) Mercurial Distributed SCM (version *) (glob) @@ -681,13 +688,11 @@ > @command(b'debugfoobar', [], b'hg debugfoobar') > def debugfoobar(ui, repo, *args, **opts): > "yet another debug command" - > pass > @command(b'foo', [], b'hg foo') > def foo(ui, repo, *args, **opts): > """yet another foo command > This command has been DEPRECATED since forever. > """ - > pass > EOF $ debugpath=`pwd`/debugextension.py $ echo "debugextension = $debugpath" >> $HGRCPATH @@ -805,15 +810,28 @@ "-Npru". To select a different program, use the -p/--program option. The program - will be passed the names of two directories to compare. To pass additional - options to the program, use -o/--option. These will be passed before the - names of the directories to compare. + will be passed the names of two directories to compare, unless the --per- + file option is specified (see below). To pass additional options to the + program, use -o/--option. These will be passed before the names of the + directories or files to compare. When two revision arguments are given, then changes are shown between those revisions. If only one revision is specified then that revision is compared to the working directory, and, when no revisions are specified, the working directory files are compared to its parent. + The --per-file option runs the external program repeatedly on each file to + diff, instead of once on two directories. By default, this happens one by + one, where the next file diff is open in the external program only once + the previous external program (for the previous file diff) has exited. If + the external program has a graphical interface, it can open all the file + diffs at once instead of one by one. See 'hg help -e extdiff' for + information about how to tell Mercurial that a given program has a + graphical interface. + + The --confirm option will prompt the user before each invocation of the + external program. It is ignored if --per-file isn't specified. + (use 'hg help -e extdiff' to show help for the extdiff extension) options ([+] can be repeated): @@ -822,6 +840,8 @@ -o --option OPT [+] pass option to comparison program -r --rev REV [+] revision -c --change REV change made by revision + --per-file compare each file instead of revision snapshots + --confirm prompt user before each external program invocation --patch compare patches for two revisions -I --include PATTERN [+] include names matching the given patterns -X --exclude PATTERN [+] exclude names matching the given patterns @@ -889,6 +909,20 @@ [diff-tools] kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child + If a program has a graphical interface, it might be interesting to tell + Mercurial about it. It will prevent the program from being mistakenly used in + a terminal-only environment (such as an SSH terminal session), and will make + 'hg extdiff --per-file' open multiple file diffs at once instead of one by one + (if you still want to open file diffs one by one, you can use the --confirm + option). + + Declaring that a tool has a graphical interface can be done with the "gui" + flag next to where "diffargs" are specified: + + [diff-tools] + kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child + kdiff3.gui = true + You can use -I/-X and list of file or directory names like normal 'hg diff' command. The extdiff extension makes snapshots of only needed files, so running the external diff program will actually be pretty fast (at least @@ -928,7 +962,6 @@ > @command(b'multirevs', [], b'ARG', norepo=True) > def multirevs(ui, repo, arg, *args, **opts): > """multirevs command""" - > pass > EOF $ echo "multirevs = multirevs.py" >> $HGRCPATH
--- a/tests/test-fastannotate-hg.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-fastannotate-hg.t Tue Feb 19 21:55:05 2019 -0800 @@ -593,7 +593,7 @@ $ rm baz $ hg annotate -ncr "wdir()" baz abort: $TESTTMP/repo/baz: $ENOENT$ (windows !) - abort: $ENOENT$: $TESTTMP/repo/baz (no-windows !) + abort: $ENOENT$: '$TESTTMP/repo/baz' (no-windows !) [255] annotate removed file @@ -601,7 +601,7 @@ $ hg rm baz $ hg annotate -ncr "wdir()" baz abort: $TESTTMP/repo/baz: $ENOENT$ (windows !) - abort: $ENOENT$: $TESTTMP/repo/baz (no-windows !) + abort: $ENOENT$: '$TESTTMP/repo/baz' (no-windows !) [255] Test annotate with whitespace options
--- a/tests/test-flagprocessor.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-flagprocessor.t Tue Feb 19 21:55:05 2019 -0800 @@ -209,11 +209,13 @@ _insertflagprocessor(flag, processor, _flagprocessors) File "*/mercurial/revlog.py", line *, in _insertflagprocessor (glob) raise error.Abort(msg) - Abort: cannot register multiple processors on flag '0x8'. + mercurial.error.Abort: b"cannot register multiple processors on flag '0x8'." (py3 !) + Abort: cannot register multiple processors on flag '0x8'. (no-py3 !) *** failed to set up extension duplicate: cannot register multiple processors on flag '0x8'. $ hg st 2>&1 | egrep 'cannot register multiple processors|flagprocessorext' File "*/tests/flagprocessorext.py", line *, in extsetup (glob) - Abort: cannot register multiple processors on flag '0x8'. + mercurial.error.Abort: b"cannot register multiple processors on flag '0x8'." (py3 !) + Abort: cannot register multiple processors on flag '0x8'. (no-py3 !) *** failed to set up extension duplicate: cannot register multiple processors on flag '0x8'. File "*/tests/flagprocessorext.py", line *, in b64decode (glob)
--- a/tests/test-grep.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-grep.t Tue Feb 19 21:55:05 2019 -0800 @@ -32,13 +32,27 @@ port:4:vaportight port:4:import/export +simple from subdirectory + + $ mkdir dir + $ cd dir + $ hg grep -r tip:0 port + port:4:export + port:4:vaportight + port:4:import/export + $ hg grep -r tip:0 port --config ui.relative-paths=yes + ../port:4:export + ../port:4:vaportight + ../port:4:import/export + $ cd .. + simple with color $ hg --config extensions.color= grep --config color.mode=ansi \ > --color=always port port -r tip:0 - \x1b[0;35mport\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;32m4\x1b[0m\x1b[0;36m:\x1b[0mex\x1b[0;31;1mport\x1b[0m (esc) - \x1b[0;35mport\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;32m4\x1b[0m\x1b[0;36m:\x1b[0mva\x1b[0;31;1mport\x1b[0might (esc) - \x1b[0;35mport\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;32m4\x1b[0m\x1b[0;36m:\x1b[0mim\x1b[0;31;1mport\x1b[0m/ex\x1b[0;31;1mport\x1b[0m (esc) + \x1b[0;35mport\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;34m4\x1b[0m\x1b[0;36m:\x1b[0mex\x1b[0;31;1mport\x1b[0m (esc) + \x1b[0;35mport\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;34m4\x1b[0m\x1b[0;36m:\x1b[0mva\x1b[0;31;1mport\x1b[0might (esc) + \x1b[0;35mport\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;34m4\x1b[0m\x1b[0;36m:\x1b[0mim\x1b[0;31;1mport\x1b[0m/ex\x1b[0;31;1mport\x1b[0m (esc) simple templated @@ -285,6 +299,15 @@ color:3:+:orange color:2:-:orange color:1:+:orange + $ hg grep --diff orange --color=debug + [grep.filename|color][grep.sep|:][grep.rev|3][grep.sep|:][grep.inserted grep.change|+][grep.sep|:][grep.match|orange] + [grep.filename|color][grep.sep|:][grep.rev|2][grep.sep|:][grep.deleted grep.change|-][grep.sep|:][grep.match|orange] + [grep.filename|color][grep.sep|:][grep.rev|1][grep.sep|:][grep.inserted grep.change|+][grep.sep|:][grep.match|orange] + + $ hg grep --diff orange --color=yes + \x1b[0;35mcolor\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;34m3\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;32;1m+\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;31;1morange\x1b[0m (esc) + \x1b[0;35mcolor\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;34m2\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;31;1m-\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;31;1morange\x1b[0m (esc) + \x1b[0;35mcolor\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;34m1\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;32;1m+\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;31;1morange\x1b[0m (esc) $ hg grep --diff orange color:3:+:orange @@ -503,5 +526,8 @@ $ hg grep -r "0:2" "unmod" --all-files um um:0:unmod um:1:unmod + $ hg grep -r "0:2" "unmod" --all-files "glob:**/um" # Check that patterns also work + um:0:unmod + um:1:unmod $ cd ..
--- a/tests/test-help.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-help.t Tue Feb 19 21:55:05 2019 -0800 @@ -825,7 +825,6 @@ > @command(b'hashelp', [], b'hg hashelp', norepo=True) > def hashelp(ui, *args, **kwargs): > """Extension command's help""" - > pass > > def uisetup(ui): > ui.setconfig(b'alias', b'shellalias', b'!echo hi', b'helpext') @@ -1014,6 +1013,8 @@ debugoptEXP (no help text available) debugpathcomplete complete part or all of a tracked path + debugpathcopies + show copies between two revisions debugpeer establish a connection to a peer repository debugpickmergetool examine which merge tool is chosen for specified file @@ -1672,7 +1673,7 @@ Test omit indicating for help $ cat > addverboseitems.py <<EOF - > '''extension to test omit indicating. + > r'''extension to test omit indicating. > > This paragraph is never omitted (for extension) > @@ -1685,7 +1686,7 @@ > ''' > from __future__ import absolute_import > from mercurial import commands, help - > testtopic = b"""This paragraph is never omitted (for topic). + > testtopic = br"""This paragraph is never omitted (for topic). > > .. container:: verbose >
--- a/tests/test-hgignore.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-hgignore.t Tue Feb 19 21:55:05 2019 -0800 @@ -356,7 +356,7 @@ $ rm dir1/.hgignore $ echo "dir1/file*" >> .hgignore $ hg debugignore "dir1\file2" - dir1\file2 is ignored + dir1/file2 is ignored (ignore rule in $TESTTMP\ignorerepo\.hgignore, line 4: 'dir1/file*') $ hg up -qC .
--- a/tests/test-hgweb-auth.py Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-hgweb-auth.py Tue Feb 19 21:55:05 2019 -0800 @@ -24,16 +24,26 @@ def writeauth(items): ui = origui.copy() for name, value in items.items(): - ui.setconfig('auth', name, value) + ui.setconfig(b'auth', name, value) return ui +def _stringifyauthinfo(ai): + if ai is None: + return ai + realm, authuris, user, passwd = ai + return (pycompat.strurl(realm), + [pycompat.strurl(u) for u in authuris], + pycompat.strurl(user), + pycompat.strurl(passwd), + ) + def test(auth, urls=None): print('CFG:', pycompat.sysstr(stringutil.pprint(auth, bprefix=True))) prefixes = set() for k in auth: - prefixes.add(k.split('.', 1)[0]) + prefixes.add(k.split(b'.', 1)[0]) for p in prefixes: - for name in ('.username', '.password'): + for name in (b'.username', b'.password'): if (p + name) not in auth: auth[p + name] = p auth = dict((k, v) for k, v in auth.items() if v is not None) @@ -41,106 +51,109 @@ ui = writeauth(auth) def _test(uri): - print('URI:', uri) + print('URI:', pycompat.strurl(uri)) try: pm = url.passwordmgr(ui, urlreq.httppasswordmgrwithdefaultrealm()) u, authinfo = util.url(uri).authinfo() if authinfo is not None: - pm.add_password(*authinfo) - print(' ', pm.find_user_password('test', u)) + pm.add_password(*_stringifyauthinfo(authinfo)) + print(' ', tuple(pycompat.strurl(a) for a in + pm.find_user_password('test', + pycompat.strurl(u)))) except error.Abort: print(' ','abort') if not urls: urls = [ - 'http://example.org/foo', - 'http://example.org/foo/bar', - 'http://example.org/bar', - 'https://example.org/foo', - 'https://example.org/foo/bar', - 'https://example.org/bar', - 'https://x@example.org/bar', - 'https://y@example.org/bar', + b'http://example.org/foo', + b'http://example.org/foo/bar', + b'http://example.org/bar', + b'https://example.org/foo', + b'https://example.org/foo/bar', + b'https://example.org/bar', + b'https://x@example.org/bar', + b'https://y@example.org/bar', ] for u in urls: _test(u) print('\n*** Test in-uri schemes\n') -test({'x.prefix': 'http://example.org'}) -test({'x.prefix': 'https://example.org'}) -test({'x.prefix': 'http://example.org', 'x.schemes': 'https'}) -test({'x.prefix': 'https://example.org', 'x.schemes': 'http'}) +test({b'x.prefix': b'http://example.org'}) +test({b'x.prefix': b'https://example.org'}) +test({b'x.prefix': b'http://example.org', b'x.schemes': b'https'}) +test({b'x.prefix': b'https://example.org', b'x.schemes': b'http'}) print('\n*** Test separately configured schemes\n') -test({'x.prefix': 'example.org', 'x.schemes': 'http'}) -test({'x.prefix': 'example.org', 'x.schemes': 'https'}) -test({'x.prefix': 'example.org', 'x.schemes': 'http https'}) +test({b'x.prefix': b'example.org', b'x.schemes': b'http'}) +test({b'x.prefix': b'example.org', b'x.schemes': b'https'}) +test({b'x.prefix': b'example.org', b'x.schemes': b'http https'}) print('\n*** Test prefix matching\n') -test({'x.prefix': 'http://example.org/foo', - 'y.prefix': 'http://example.org/bar'}) -test({'x.prefix': 'http://example.org/foo', - 'y.prefix': 'http://example.org/foo/bar'}) -test({'x.prefix': '*', 'y.prefix': 'https://example.org/bar'}) +test({b'x.prefix': b'http://example.org/foo', + b'y.prefix': b'http://example.org/bar'}) +test({b'x.prefix': b'http://example.org/foo', + b'y.prefix': b'http://example.org/foo/bar'}) +test({b'x.prefix': b'*', b'y.prefix': b'https://example.org/bar'}) print('\n*** Test user matching\n') -test({'x.prefix': 'http://example.org/foo', - 'x.username': None, - 'x.password': 'xpassword'}, - urls=['http://y@example.org/foo']) -test({'x.prefix': 'http://example.org/foo', - 'x.username': None, - 'x.password': 'xpassword', - 'y.prefix': 'http://example.org/foo', - 'y.username': 'y', - 'y.password': 'ypassword'}, - urls=['http://y@example.org/foo']) -test({'x.prefix': 'http://example.org/foo/bar', - 'x.username': None, - 'x.password': 'xpassword', - 'y.prefix': 'http://example.org/foo', - 'y.username': 'y', - 'y.password': 'ypassword'}, - urls=['http://y@example.org/foo/bar']) +test({b'x.prefix': b'http://example.org/foo', + b'x.username': None, + b'x.password': b'xpassword'}, + urls=[b'http://y@example.org/foo']) +test({b'x.prefix': b'http://example.org/foo', + b'x.username': None, + b'x.password': b'xpassword', + b'y.prefix': b'http://example.org/foo', + b'y.username': b'y', + b'y.password': b'ypassword'}, + urls=[b'http://y@example.org/foo']) +test({b'x.prefix': b'http://example.org/foo/bar', + b'x.username': None, + b'x.password': b'xpassword', + b'y.prefix': b'http://example.org/foo', + b'y.username': b'y', + b'y.password': b'ypassword'}, + urls=[b'http://y@example.org/foo/bar']) print('\n*** Test user matching with name in prefix\n') # prefix, username and URL have the same user -test({'x.prefix': 'https://example.org/foo', - 'x.username': None, - 'x.password': 'xpassword', - 'y.prefix': 'http://y@example.org/foo', - 'y.username': 'y', - 'y.password': 'ypassword'}, - urls=['http://y@example.org/foo']) +test({b'x.prefix': b'https://example.org/foo', + b'x.username': None, + b'x.password': b'xpassword', + b'y.prefix': b'http://y@example.org/foo', + b'y.username': b'y', + b'y.password': b'ypassword'}, + urls=[b'http://y@example.org/foo']) # Prefix has a different user from username and URL -test({'y.prefix': 'http://z@example.org/foo', - 'y.username': 'y', - 'y.password': 'ypassword'}, - urls=['http://y@example.org/foo']) +test({b'y.prefix': b'http://z@example.org/foo', + b'y.username': b'y', + b'y.password': b'ypassword'}, + urls=[b'http://y@example.org/foo']) # Prefix has a different user from URL; no username -test({'y.prefix': 'http://z@example.org/foo', - 'y.password': 'ypassword'}, - urls=['http://y@example.org/foo']) +test({b'y.prefix': b'http://z@example.org/foo', + b'y.password': b'ypassword'}, + urls=[b'http://y@example.org/foo']) # Prefix and URL have same user, but doesn't match username -test({'y.prefix': 'http://y@example.org/foo', - 'y.username': 'z', - 'y.password': 'ypassword'}, - urls=['http://y@example.org/foo']) +test({b'y.prefix': b'http://y@example.org/foo', + b'y.username': b'z', + b'y.password': b'ypassword'}, + urls=[b'http://y@example.org/foo']) # Prefix and URL have the same user; no username -test({'y.prefix': 'http://y@example.org/foo', - 'y.password': 'ypassword'}, - urls=['http://y@example.org/foo']) +test({b'y.prefix': b'http://y@example.org/foo', + b'y.password': b'ypassword'}, + urls=[b'http://y@example.org/foo']) # Prefix user, but no URL user or username -test({'y.prefix': 'http://y@example.org/foo', - 'y.password': 'ypassword'}, - urls=['http://example.org/foo']) +test({b'y.prefix': b'http://y@example.org/foo', + b'y.password': b'ypassword'}, + urls=[b'http://example.org/foo']) def testauthinfo(fullurl, authurl): print('URIs:', fullurl, authurl) pm = urlreq.httppasswordmgrwithdefaultrealm() - pm.add_password(*util.url(fullurl).authinfo()[1]) + ai = _stringifyauthinfo(util.url(pycompat.bytesurl(fullurl)).authinfo()[1]) + pm.add_password(*ai) print(pm.find_user_password('test', authurl)) print('\n*** Test urllib2 and util.url\n')
--- a/tests/test-hgweb-json.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-hgweb-json.t Tue Feb 19 21:55:05 2019 -0800 @@ -2196,7 +2196,8 @@ Commit message with Japanese Kanji 'Noh', which ends with '\x5c' $ echo foo >> da/foo - $ HGENCODING=cp932 hg ci -m `"$PYTHON" -c 'print("\x94\x5c")'` + >>> open('msg', 'wb').write(b'\x94\x5c\x0a') and None + $ HGENCODING=cp932 hg ci -l msg Commit message with null character
--- a/tests/test-hgweb-no-request-uri.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-hgweb-no-request-uri.t Tue Feb 19 21:55:05 2019 -0800 @@ -62,12 +62,12 @@ > output = stringio() > env['PATH_INFO'] = '/' > env['QUERY_STRING'] = 'style=atom' - > process(hgweb.hgweb(b'.', name = b'repo')) + > process(hgweb.hgweb(b'.', name=b'repo')) > > output = stringio() > env['PATH_INFO'] = '/file/tip/' > env['QUERY_STRING'] = 'style=raw' - > process(hgweb.hgweb(b'.', name = b'repo')) + > process(hgweb.hgweb(b'.', name=b'repo')) > > output = stringio() > env['PATH_INFO'] = '/'
--- a/tests/test-hgweb.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-hgweb.t Tue Feb 19 21:55:05 2019 -0800 @@ -910,7 +910,8 @@ errors - $ cat errors.log + $ cat errors.log | "$PYTHON" $TESTDIR/filtertraceback.py + $ rm -f errors.log Uncaught exceptions result in a logged error and canned HTTP response @@ -925,8 +926,11 @@ [1] $ killdaemons.py - $ head -1 errors.log + $ cat errors.log | "$PYTHON" $TESTDIR/filtertraceback.py .* Exception happened during processing request '/raiseerror': (re) + Traceback (most recent call last): + AttributeError: I am an uncaught error! + Uncaught exception after partial content sent
--- a/tests/test-highlight.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-highlight.t Tue Feb 19 21:55:05 2019 -0800 @@ -19,7 +19,7 @@ create random Python file to exercise Pygments - $ cat <<EOF > primes.py + $ cat <<NO_CHECK_EOF > primes.py > """Fun with generators. Corresponding Haskell implementation: > > primes = 2 : sieve [3, 5..] @@ -51,7 +51,7 @@ > n = 10 > p = primes() > print("The first %d primes: %s" % (n, list(itertools.islice(p, n)))) - > EOF + > NO_CHECK_EOF $ echo >> primes.py # to test html markup with an empty line just before EOF $ hg ci -Ama adding primes.py
--- a/tests/test-histedit-arguments.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-histedit-arguments.t Tue Feb 19 21:55:05 2019 -0800 @@ -362,7 +362,7 @@ $ hg histedit --abort warning: encountered an exception during histedit --abort; the repository may not have been completely cleaned up abort: $TESTTMP/foo/.hg/strip-backup/*-histedit.hg: $ENOENT$ (glob) (windows !) - abort: $ENOENT$: $TESTTMP/foo/.hg/strip-backup/*-histedit.hg (glob) (no-windows !) + abort: $ENOENT$: '$TESTTMP/foo/.hg/strip-backup/*-histedit.hg' (glob) (no-windows !) [255] Histedit state has been exited $ hg summary -q
--- a/tests/test-histedit-commute.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-histedit-commute.t Tue Feb 19 21:55:05 2019 -0800 @@ -52,6 +52,7 @@ summary: a + show the edit commands offered $ HGEDITOR=cat hg histedit 177f92b77385 pick 177f92b77385 2 c @@ -76,6 +77,33 @@ # r, roll = like fold, but discard this commit's description and date # + +test customization of revision summary + $ HGEDITOR=cat hg histedit 177f92b77385 \ + > --config histedit.summary-template='I am rev {rev} desc {desc} tags {tags}' + pick 177f92b77385 I am rev 2 desc c tags + pick 055a42cdd887 I am rev 3 desc d tags + pick e860deea161a I am rev 4 desc e tags + pick 652413bf663e I am rev 5 desc f tags tip + + # Edit history between 177f92b77385 and 652413bf663e + # + # Commits are listed from least to most recent + # + # You can reorder changesets by reordering the lines + # + # Commands: + # + # e, edit = use commit, but stop for amending + # m, mess = edit commit message without changing commit content + # p, pick = use commit + # b, base = checkout changeset and apply further changesets from there + # d, drop = remove commit from history + # f, fold = use commit, but combine it with the one above + # r, roll = like fold, but discard this commit's description and date + # + + edit the history (use a hacky editor to check histedit-last-edit.txt backup) @@ -142,6 +170,7 @@ summary: a + put things back $ hg histedit 177f92b77385 --commands - 2>&1 << EOF | fixbundle @@ -184,6 +213,7 @@ summary: a + slightly different this time $ hg histedit 177f92b77385 --commands - << EOF 2>&1 | fixbundle @@ -225,6 +255,7 @@ summary: a + keep prevents stripping dead revs $ hg histedit 799205341b6b --keep --commands - 2>&1 << EOF | fixbundle > pick 799205341b6b d @@ -276,6 +307,7 @@ summary: a + try with --rev $ hg histedit --commands - --rev -2 2>&1 <<EOF | fixbundle > pick de71b079d9ce e @@ -326,6 +358,7 @@ date: Thu Jan 01 00:00:00 1970 +0000 summary: a + Verify that revsetalias entries work with histedit: $ cat >> $HGRCPATH <<EOF > [revsetalias] @@ -355,6 +388,7 @@ # r, roll = like fold, but discard this commit's description and date # + should also work if a commit message is missing $ BUNDLE="$TESTDIR/missing-comment.hg" $ hg init missing @@ -384,6 +418,7 @@ date: Mon Nov 28 16:35:28 2011 +0000 summary: Checked in text file + $ hg histedit 0 $ cd .. @@ -440,6 +475,7 @@ @@ -0,0 +1,1 @@ +changed + $ hg --config diff.git=yes export 1 # HG changeset patch # User test @@ -453,6 +489,7 @@ rename from another-dir/initial-file rename to another-dir/renamed-file + $ cd .. Test that branches are preserved and stays active
--- a/tests/test-histedit-edit.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-histedit-edit.t Tue Feb 19 21:55:05 2019 -0800 @@ -370,9 +370,9 @@ HG: branch 'default' HG: added f ==== - note: commit message saved in .hg/last-message.txt transaction abort! rollback completed + note: commit message saved in .hg/last-message.txt abort: pretxncommit.unexpectedabort hook exited with status 1 [255] $ cat .hg/last-message.txt @@ -394,9 +394,9 @@ HG: user: test HG: branch 'default' HG: added f - note: commit message saved in .hg/last-message.txt transaction abort! rollback completed + note: commit message saved in .hg/last-message.txt abort: pretxncommit.unexpectedabort hook exited with status 1 [255]
--- a/tests/test-hook.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-hook.t Tue Feb 19 21:55:05 2019 -0800 @@ -14,32 +14,63 @@ $ cd a $ cat > .hg/hgrc <<EOF > [hooks] - > commit = sh -c "HG_LOCAL= HG_TAG= printenv.py commit" - > commit.b = sh -c "HG_LOCAL= HG_TAG= printenv.py commit.b" - > precommit = sh -c "HG_LOCAL= HG_NODE= HG_TAG= printenv.py precommit" - > pretxncommit = sh -c "HG_LOCAL= HG_TAG= printenv.py pretxncommit" + > commit = sh -c "HG_LOCAL= HG_TAG= printenv.py --line commit" + > commit.b = sh -c "HG_LOCAL= HG_TAG= printenv.py --line commit.b" + > precommit = sh -c "HG_LOCAL= HG_NODE= HG_TAG= printenv.py --line precommit" + > pretxncommit = sh -c "HG_LOCAL= HG_TAG= printenv.py --line pretxncommit" > pretxncommit.tip = hg -q tip - > pre-identify = sh -c "printenv.py pre-identify 1" - > pre-cat = sh -c "printenv.py pre-cat" - > post-cat = sh -c "printenv.py post-cat" - > pretxnopen = sh -c "HG_LOCAL= HG_TAG= printenv.py pretxnopen" - > pretxnclose = sh -c "HG_LOCAL= HG_TAG= printenv.py pretxnclose" - > txnclose = sh -c "HG_LOCAL= HG_TAG= printenv.py txnclose" + > pre-identify = sh -c "printenv.py --line pre-identify 1" + > pre-cat = sh -c "printenv.py --line pre-cat" + > post-cat = sh -c "printenv.py --line post-cat" + > pretxnopen = sh -c "HG_LOCAL= HG_TAG= printenv.py --line pretxnopen" + > pretxnclose = sh -c "HG_LOCAL= HG_TAG= printenv.py --line pretxnclose" + > txnclose = sh -c "HG_LOCAL= HG_TAG= printenv.py --line txnclose" > txnabort.0 = python:$TESTTMP/txnabort.checkargs.py:showargs - > txnabort.1 = sh -c "HG_LOCAL= HG_TAG= printenv.py txnabort" + > txnabort.1 = sh -c "HG_LOCAL= HG_TAG= printenv.py --line txnabort" > txnclose.checklock = sh -c "hg debuglock > /dev/null" > EOF $ echo a > a $ hg add a $ hg commit -m a - precommit hook: HG_HOOKNAME=precommit HG_HOOKTYPE=precommit HG_PARENT1=0000000000000000000000000000000000000000 - pretxnopen hook: HG_HOOKNAME=pretxnopen HG_HOOKTYPE=pretxnopen HG_TXNID=TXN:$ID$ HG_TXNNAME=commit - pretxncommit hook: HG_HOOKNAME=pretxncommit HG_HOOKTYPE=pretxncommit HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000 HG_PENDING=$TESTTMP/a + precommit hook: HG_HOOKNAME=precommit + HG_HOOKTYPE=precommit + HG_PARENT1=0000000000000000000000000000000000000000 + + pretxnopen hook: HG_HOOKNAME=pretxnopen + HG_HOOKTYPE=pretxnopen + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + + pretxncommit hook: HG_HOOKNAME=pretxncommit + HG_HOOKTYPE=pretxncommit + HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b + HG_PARENT1=0000000000000000000000000000000000000000 + HG_PENDING=$TESTTMP/a + 0:cb9a9f314b8b - pretxnclose hook: HG_HOOKNAME=pretxnclose HG_HOOKTYPE=pretxnclose HG_PENDING=$TESTTMP/a HG_PHASES_MOVED=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=commit - txnclose hook: HG_HOOKNAME=txnclose HG_HOOKTYPE=txnclose HG_PHASES_MOVED=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=commit - commit hook: HG_HOOKNAME=commit HG_HOOKTYPE=commit HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000 - commit.b hook: HG_HOOKNAME=commit.b HG_HOOKTYPE=commit HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000 + pretxnclose hook: HG_HOOKNAME=pretxnclose + HG_HOOKTYPE=pretxnclose + HG_PENDING=$TESTTMP/a + HG_PHASES_MOVED=1 + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + + txnclose hook: HG_HOOKNAME=txnclose + HG_HOOKTYPE=txnclose + HG_PHASES_MOVED=1 + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + + commit hook: HG_HOOKNAME=commit + HG_HOOKTYPE=commit + HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b + HG_PARENT1=0000000000000000000000000000000000000000 + + commit.b hook: HG_HOOKNAME=commit.b + HG_HOOKTYPE=commit + HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b + HG_PARENT1=0000000000000000000000000000000000000000 + $ hg clone . ../b updating to branch default @@ -50,9 +81,9 @@ $ cat > .hg/hgrc <<EOF > [hooks] - > prechangegroup = sh -c "printenv.py prechangegroup" - > changegroup = sh -c "printenv.py changegroup" - > incoming = sh -c "printenv.py incoming" + > prechangegroup = sh -c "printenv.py --line prechangegroup" + > changegroup = sh -c "printenv.py --line changegroup" + > incoming = sh -c "printenv.py --line incoming" > EOF pretxncommit and commit hooks can see both parents of merge @@ -60,103 +91,309 @@ $ cd ../a $ echo b >> a $ hg commit -m a1 -d "1 0" - precommit hook: HG_HOOKNAME=precommit HG_HOOKTYPE=precommit HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b - pretxnopen hook: HG_HOOKNAME=pretxnopen HG_HOOKTYPE=pretxnopen HG_TXNID=TXN:$ID$ HG_TXNNAME=commit - pretxncommit hook: HG_HOOKNAME=pretxncommit HG_HOOKTYPE=pretxncommit HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a + precommit hook: HG_HOOKNAME=precommit + HG_HOOKTYPE=precommit + HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b + + pretxnopen hook: HG_HOOKNAME=pretxnopen + HG_HOOKTYPE=pretxnopen + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + + pretxncommit hook: HG_HOOKNAME=pretxncommit + HG_HOOKTYPE=pretxncommit + HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd + HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b + HG_PENDING=$TESTTMP/a + 1:ab228980c14d - pretxnclose hook: HG_HOOKNAME=pretxnclose HG_HOOKTYPE=pretxnclose HG_PENDING=$TESTTMP/a HG_TXNID=TXN:$ID$ HG_TXNNAME=commit - txnclose hook: HG_HOOKNAME=txnclose HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=commit - commit hook: HG_HOOKNAME=commit HG_HOOKTYPE=commit HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b - commit.b hook: HG_HOOKNAME=commit.b HG_HOOKTYPE=commit HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b + pretxnclose hook: HG_HOOKNAME=pretxnclose + HG_HOOKTYPE=pretxnclose + HG_PENDING=$TESTTMP/a + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + + txnclose hook: HG_HOOKNAME=txnclose + HG_HOOKTYPE=txnclose + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + + commit hook: HG_HOOKNAME=commit + HG_HOOKTYPE=commit + HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd + HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b + + commit.b hook: HG_HOOKNAME=commit.b + HG_HOOKTYPE=commit + HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd + HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b + $ hg update -C 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo b > b $ hg add b $ hg commit -m b -d '1 0' - precommit hook: HG_HOOKNAME=precommit HG_HOOKTYPE=precommit HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b - pretxnopen hook: HG_HOOKNAME=pretxnopen HG_HOOKTYPE=pretxnopen HG_TXNID=TXN:$ID$ HG_TXNNAME=commit - pretxncommit hook: HG_HOOKNAME=pretxncommit HG_HOOKTYPE=pretxncommit HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a + precommit hook: HG_HOOKNAME=precommit + HG_HOOKTYPE=precommit + HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b + + pretxnopen hook: HG_HOOKNAME=pretxnopen + HG_HOOKTYPE=pretxnopen + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + + pretxncommit hook: HG_HOOKNAME=pretxncommit + HG_HOOKTYPE=pretxncommit + HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 + HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b + HG_PENDING=$TESTTMP/a + 2:ee9deb46ab31 - pretxnclose hook: HG_HOOKNAME=pretxnclose HG_HOOKTYPE=pretxnclose HG_PENDING=$TESTTMP/a HG_TXNID=TXN:$ID$ HG_TXNNAME=commit + pretxnclose hook: HG_HOOKNAME=pretxnclose + HG_HOOKTYPE=pretxnclose + HG_PENDING=$TESTTMP/a + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + created new head - txnclose hook: HG_HOOKNAME=txnclose HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=commit - commit hook: HG_HOOKNAME=commit HG_HOOKTYPE=commit HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b - commit.b hook: HG_HOOKNAME=commit.b HG_HOOKTYPE=commit HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b + txnclose hook: HG_HOOKNAME=txnclose + HG_HOOKTYPE=txnclose + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + + commit hook: HG_HOOKNAME=commit + HG_HOOKTYPE=commit + HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 + HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b + + commit.b hook: HG_HOOKNAME=commit.b + HG_HOOKTYPE=commit + HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 + HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b + $ hg merge 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg commit -m merge -d '2 0' - precommit hook: HG_HOOKNAME=precommit HG_HOOKTYPE=precommit HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd - pretxnopen hook: HG_HOOKNAME=pretxnopen HG_HOOKTYPE=pretxnopen HG_TXNID=TXN:$ID$ HG_TXNNAME=commit - pretxncommit hook: HG_HOOKNAME=pretxncommit HG_HOOKTYPE=pretxncommit HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd HG_PENDING=$TESTTMP/a + precommit hook: HG_HOOKNAME=precommit + HG_HOOKTYPE=precommit + HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 + HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd + + pretxnopen hook: HG_HOOKNAME=pretxnopen + HG_HOOKTYPE=pretxnopen + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + + pretxncommit hook: HG_HOOKNAME=pretxncommit + HG_HOOKTYPE=pretxncommit + HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 + HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 + HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd + HG_PENDING=$TESTTMP/a + 3:07f3376c1e65 - pretxnclose hook: HG_HOOKNAME=pretxnclose HG_HOOKTYPE=pretxnclose HG_PENDING=$TESTTMP/a HG_TXNID=TXN:$ID$ HG_TXNNAME=commit - txnclose hook: HG_HOOKNAME=txnclose HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=commit - commit hook: HG_HOOKNAME=commit HG_HOOKTYPE=commit HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd - commit.b hook: HG_HOOKNAME=commit.b HG_HOOKTYPE=commit HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd + pretxnclose hook: HG_HOOKNAME=pretxnclose + HG_HOOKTYPE=pretxnclose + HG_PENDING=$TESTTMP/a + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + + txnclose hook: HG_HOOKNAME=txnclose + HG_HOOKTYPE=txnclose + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + + commit hook: HG_HOOKNAME=commit + HG_HOOKTYPE=commit + HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 + HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 + HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd + + commit.b hook: HG_HOOKNAME=commit.b + HG_HOOKTYPE=commit + HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 + HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 + HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd + test generic hooks $ hg id - pre-identify hook: HG_ARGS=id HG_HOOKNAME=pre-identify HG_HOOKTYPE=pre-identify HG_OPTS={'bookmarks': None, 'branch': None, 'id': None, 'insecure': None, 'num': None, 'remotecmd': '', 'rev': '', 'ssh': '', 'tags': None, 'template': ''} HG_PATS=[] + pre-identify hook: HG_ARGS=id + HG_HOOKNAME=pre-identify + HG_HOOKTYPE=pre-identify + HG_OPTS={'bookmarks': None, 'branch': None, 'id': None, 'insecure': None, 'num': None, 'remotecmd': '', 'rev': '', 'ssh': '', 'tags': None, 'template': ''} + HG_PATS=[] + abort: pre-identify hook exited with status 1 [255] $ hg cat b - pre-cat hook: HG_ARGS=cat b HG_HOOKNAME=pre-cat HG_HOOKTYPE=pre-cat HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': '', 'template': ''} HG_PATS=['b'] + pre-cat hook: HG_ARGS=cat b + HG_HOOKNAME=pre-cat + HG_HOOKTYPE=pre-cat + HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': '', 'template': ''} + HG_PATS=['b'] + b - post-cat hook: HG_ARGS=cat b HG_HOOKNAME=post-cat HG_HOOKTYPE=post-cat HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': '', 'template': ''} HG_PATS=['b'] HG_RESULT=0 + post-cat hook: HG_ARGS=cat b + HG_HOOKNAME=post-cat + HG_HOOKTYPE=post-cat + HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': '', 'template': ''} + HG_PATS=['b'] + HG_RESULT=0 + $ cd ../b $ hg pull ../a pulling from ../a searching for changes - prechangegroup hook: HG_HOOKNAME=prechangegroup HG_HOOKTYPE=prechangegroup HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/a + prechangegroup hook: HG_HOOKNAME=prechangegroup + HG_HOOKTYPE=prechangegroup + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=file:$TESTTMP/a + adding changesets adding manifests adding file changes added 3 changesets with 2 changes to 2 files new changesets ab228980c14d:07f3376c1e65 - changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_NODE_LAST=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/a - incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/a - incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/a - incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/a + changegroup hook: HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd + HG_NODE_LAST=07f3376c1e655977439df2a814e3cc14b27abac2 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=file:$TESTTMP/a + + incoming hook: HG_HOOKNAME=incoming + HG_HOOKTYPE=incoming + HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=file:$TESTTMP/a + + incoming hook: HG_HOOKNAME=incoming + HG_HOOKTYPE=incoming + HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=file:$TESTTMP/a + + incoming hook: HG_HOOKNAME=incoming + HG_HOOKTYPE=incoming + HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=file:$TESTTMP/a + (run 'hg update' to get a working copy) tag hooks can see env vars $ cd ../a $ cat >> .hg/hgrc <<EOF - > pretag = sh -c "printenv.py pretag" - > tag = sh -c "HG_PARENT1= HG_PARENT2= printenv.py tag" + > pretag = sh -c "printenv.py --line pretag" + > tag = sh -c "HG_PARENT1= HG_PARENT2= printenv.py --line tag" > EOF $ hg tag -d '3 0' a - pretag hook: HG_HOOKNAME=pretag HG_HOOKTYPE=pretag HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a - precommit hook: HG_HOOKNAME=precommit HG_HOOKTYPE=precommit HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 - pretxnopen hook: HG_HOOKNAME=pretxnopen HG_HOOKTYPE=pretxnopen HG_TXNID=TXN:$ID$ HG_TXNNAME=commit - pretxncommit hook: HG_HOOKNAME=pretxncommit HG_HOOKTYPE=pretxncommit HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PENDING=$TESTTMP/a + pretag hook: HG_HOOKNAME=pretag + HG_HOOKTYPE=pretag + HG_LOCAL=0 + HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 + HG_TAG=a + + precommit hook: HG_HOOKNAME=precommit + HG_HOOKTYPE=precommit + HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 + + pretxnopen hook: HG_HOOKNAME=pretxnopen + HG_HOOKTYPE=pretxnopen + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + + pretxncommit hook: HG_HOOKNAME=pretxncommit + HG_HOOKTYPE=pretxncommit + HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 + HG_PENDING=$TESTTMP/a + 4:539e4b31b6dc - pretxnclose hook: HG_HOOKNAME=pretxnclose HG_HOOKTYPE=pretxnclose HG_PENDING=$TESTTMP/a HG_TXNID=TXN:$ID$ HG_TXNNAME=commit - tag hook: HG_HOOKNAME=tag HG_HOOKTYPE=tag HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a - txnclose hook: HG_HOOKNAME=txnclose HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=commit - commit hook: HG_HOOKNAME=commit HG_HOOKTYPE=commit HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 - commit.b hook: HG_HOOKNAME=commit.b HG_HOOKTYPE=commit HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 + pretxnclose hook: HG_HOOKNAME=pretxnclose + HG_HOOKTYPE=pretxnclose + HG_PENDING=$TESTTMP/a + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + + tag hook: HG_HOOKNAME=tag + HG_HOOKTYPE=tag + HG_LOCAL=0 + HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 + HG_TAG=a + + txnclose hook: HG_HOOKNAME=txnclose + HG_HOOKTYPE=txnclose + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + + commit hook: HG_HOOKNAME=commit + HG_HOOKTYPE=commit + HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 + + commit.b hook: HG_HOOKNAME=commit.b + HG_HOOKTYPE=commit + HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 + $ hg tag -l la - pretag hook: HG_HOOKNAME=pretag HG_HOOKTYPE=pretag HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la - tag hook: HG_HOOKNAME=tag HG_HOOKTYPE=tag HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la + pretag hook: HG_HOOKNAME=pretag + HG_HOOKTYPE=pretag + HG_LOCAL=1 + HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + HG_TAG=la + + tag hook: HG_HOOKNAME=tag + HG_HOOKTYPE=tag + HG_LOCAL=1 + HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + HG_TAG=la + pretag hook can forbid tagging $ cat >> .hg/hgrc <<EOF - > pretag.forbid = sh -c "printenv.py pretag.forbid 1" + > pretag.forbid = sh -c "printenv.py --line pretag.forbid 1" > EOF $ hg tag -d '4 0' fa - pretag hook: HG_HOOKNAME=pretag HG_HOOKTYPE=pretag HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa - pretag.forbid hook: HG_HOOKNAME=pretag.forbid HG_HOOKTYPE=pretag HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa + pretag hook: HG_HOOKNAME=pretag + HG_HOOKTYPE=pretag + HG_LOCAL=0 + HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + HG_TAG=fa + + pretag.forbid hook: HG_HOOKNAME=pretag.forbid + HG_HOOKTYPE=pretag + HG_LOCAL=0 + HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + HG_TAG=fa + abort: pretag.forbid hook exited with status 1 [255] $ hg tag -l fla - pretag hook: HG_HOOKNAME=pretag HG_HOOKTYPE=pretag HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla - pretag.forbid hook: HG_HOOKNAME=pretag.forbid HG_HOOKTYPE=pretag HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla + pretag hook: HG_HOOKNAME=pretag + HG_HOOKTYPE=pretag + HG_LOCAL=1 + HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + HG_TAG=fla + + pretag.forbid hook: HG_HOOKNAME=pretag.forbid + HG_HOOKTYPE=pretag + HG_LOCAL=1 + HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + HG_TAG=fla + abort: pretag.forbid hook exited with status 1 [255] @@ -165,22 +402,43 @@ $ cat >> .hg/hgrc <<EOF > pretxncommit.forbid0 = sh -c "hg tip -q" - > pretxncommit.forbid1 = sh -c "printenv.py pretxncommit.forbid 1" + > pretxncommit.forbid1 = sh -c "printenv.py --line pretxncommit.forbid 1" > EOF $ echo z > z $ hg add z $ hg -q tip 4:539e4b31b6dc $ hg commit -m 'fail' -d '4 0' - precommit hook: HG_HOOKNAME=precommit HG_HOOKTYPE=precommit HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 - pretxnopen hook: HG_HOOKNAME=pretxnopen HG_HOOKTYPE=pretxnopen HG_TXNID=TXN:$ID$ HG_TXNNAME=commit - pretxncommit hook: HG_HOOKNAME=pretxncommit HG_HOOKTYPE=pretxncommit HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a + precommit hook: HG_HOOKNAME=precommit + HG_HOOKTYPE=precommit + HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + + pretxnopen hook: HG_HOOKNAME=pretxnopen + HG_HOOKTYPE=pretxnopen + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + + pretxncommit hook: HG_HOOKNAME=pretxncommit + HG_HOOKTYPE=pretxncommit + HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 + HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + HG_PENDING=$TESTTMP/a + 5:6f611f8018c1 5:6f611f8018c1 - pretxncommit.forbid hook: HG_HOOKNAME=pretxncommit.forbid1 HG_HOOKTYPE=pretxncommit HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a + pretxncommit.forbid hook: HG_HOOKNAME=pretxncommit.forbid1 + HG_HOOKTYPE=pretxncommit + HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 + HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + HG_PENDING=$TESTTMP/a + transaction abort! txnabort Python hook: txnid,txnname - txnabort hook: HG_HOOKNAME=txnabort.1 HG_HOOKTYPE=txnabort HG_TXNID=TXN:$ID$ HG_TXNNAME=commit + txnabort hook: HG_HOOKNAME=txnabort.1 + HG_HOOKTYPE=txnabort + HG_TXNID=TXN:$ID$ + HG_TXNNAME=commit + rollback completed abort: pretxncommit.forbid1 hook exited with status 1 [255] @@ -205,11 +463,17 @@ precommit hook can prevent commit $ cat >> .hg/hgrc <<EOF - > precommit.forbid = sh -c "printenv.py precommit.forbid 1" + > precommit.forbid = sh -c "printenv.py --line precommit.forbid 1" > EOF $ hg commit -m 'fail' -d '4 0' - precommit hook: HG_HOOKNAME=precommit HG_HOOKTYPE=precommit HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 - precommit.forbid hook: HG_HOOKNAME=precommit.forbid HG_HOOKTYPE=precommit HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + precommit hook: HG_HOOKNAME=precommit + HG_HOOKTYPE=precommit + HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + + precommit.forbid hook: HG_HOOKNAME=precommit.forbid + HG_HOOKTYPE=precommit + HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + abort: precommit.forbid hook exited with status 1 [255] $ hg -q tip @@ -218,26 +482,36 @@ preupdate hook can prevent update $ cat >> .hg/hgrc <<EOF - > preupdate = sh -c "printenv.py preupdate" + > preupdate = sh -c "printenv.py --line preupdate" > EOF $ hg update 1 - preupdate hook: HG_HOOKNAME=preupdate HG_HOOKTYPE=preupdate HG_PARENT1=ab228980c14d + preupdate hook: HG_HOOKNAME=preupdate + HG_HOOKTYPE=preupdate + HG_PARENT1=ab228980c14d + 0 files updated, 0 files merged, 2 files removed, 0 files unresolved update hook $ cat >> .hg/hgrc <<EOF - > update = sh -c "printenv.py update" + > update = sh -c "printenv.py --line update" > EOF $ hg update - preupdate hook: HG_HOOKNAME=preupdate HG_HOOKTYPE=preupdate HG_PARENT1=539e4b31b6dc - update hook: HG_ERROR=0 HG_HOOKNAME=update HG_HOOKTYPE=update HG_PARENT1=539e4b31b6dc + preupdate hook: HG_HOOKNAME=preupdate + HG_HOOKTYPE=preupdate + HG_PARENT1=539e4b31b6dc + + update hook: HG_ERROR=0 + HG_HOOKNAME=update + HG_HOOKTYPE=update + HG_PARENT1=539e4b31b6dc + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved pushkey hook $ cat >> .hg/hgrc <<EOF - > pushkey = sh -c "printenv.py pushkey" + > pushkey = sh -c "printenv.py --line pushkey" > EOF $ cd ../b $ hg bookmark -r null foo @@ -245,10 +519,41 @@ pushing to ../a searching for changes no changes found - pretxnopen hook: HG_HOOKNAME=pretxnopen HG_HOOKTYPE=pretxnopen HG_TXNID=TXN:$ID$ HG_TXNNAME=push - pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_HOOKNAME=pretxnclose HG_HOOKTYPE=pretxnclose HG_PENDING=$TESTTMP/a HG_SOURCE=push HG_TXNID=TXN:$ID$ HG_TXNNAME=push HG_URL=file:$TESTTMP/a - pushkey hook: HG_BUNDLE2=1 HG_HOOKNAME=pushkey HG_HOOKTYPE=pushkey HG_KEY=foo HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_PUSHKEYCOMPAT=1 HG_SOURCE=push HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/a - txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_HOOKNAME=txnclose HG_HOOKTYPE=txnclose HG_SOURCE=push HG_TXNID=TXN:$ID$ HG_TXNNAME=push HG_URL=file:$TESTTMP/a + pretxnopen hook: HG_HOOKNAME=pretxnopen + HG_HOOKTYPE=pretxnopen + HG_TXNID=TXN:$ID$ + HG_TXNNAME=push + + pretxnclose hook: HG_BOOKMARK_MOVED=1 + HG_BUNDLE2=1 + HG_HOOKNAME=pretxnclose + HG_HOOKTYPE=pretxnclose + HG_PENDING=$TESTTMP/a + HG_SOURCE=push + HG_TXNID=TXN:$ID$ + HG_TXNNAME=push + HG_URL=file:$TESTTMP/a + + pushkey hook: HG_BUNDLE2=1 + HG_HOOKNAME=pushkey + HG_HOOKTYPE=pushkey + HG_KEY=foo + HG_NAMESPACE=bookmarks + HG_NEW=0000000000000000000000000000000000000000 + HG_PUSHKEYCOMPAT=1 + HG_SOURCE=push + HG_TXNID=TXN:$ID$ + HG_URL=file:$TESTTMP/a + + txnclose hook: HG_BOOKMARK_MOVED=1 + HG_BUNDLE2=1 + HG_HOOKNAME=txnclose + HG_HOOKTYPE=txnclose + HG_SOURCE=push + HG_TXNID=TXN:$ID$ + HG_TXNNAME=push + HG_URL=file:$TESTTMP/a + exporting bookmark foo [1] $ cd ../a @@ -256,16 +561,35 @@ listkeys hook $ cat >> .hg/hgrc <<EOF - > listkeys = sh -c "printenv.py listkeys" + > listkeys = sh -c "printenv.py --line listkeys" > EOF $ hg bookmark -r null bar - pretxnopen hook: HG_HOOKNAME=pretxnopen HG_HOOKTYPE=pretxnopen HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark - pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=pretxnclose HG_HOOKTYPE=pretxnclose HG_PENDING=$TESTTMP/a HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark - txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark + pretxnopen hook: HG_HOOKNAME=pretxnopen + HG_HOOKTYPE=pretxnopen + HG_TXNID=TXN:$ID$ + HG_TXNNAME=bookmark + + pretxnclose hook: HG_BOOKMARK_MOVED=1 + HG_HOOKNAME=pretxnclose + HG_HOOKTYPE=pretxnclose + HG_PENDING=$TESTTMP/a + HG_TXNID=TXN:$ID$ + HG_TXNNAME=bookmark + + txnclose hook: HG_BOOKMARK_MOVED=1 + HG_HOOKNAME=txnclose + HG_HOOKTYPE=txnclose + HG_TXNID=TXN:$ID$ + HG_TXNNAME=bookmark + $ cd ../b $ hg pull -B bar ../a pulling from ../a - listkeys hook: HG_HOOKNAME=listkeys HG_HOOKTYPE=listkeys HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'} + listkeys hook: HG_HOOKNAME=listkeys + HG_HOOKTYPE=listkeys + HG_NAMESPACE=bookmarks + HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'} + no changes found adding remote bookmark bar $ cd ../a @@ -273,18 +597,40 @@ test that prepushkey can prevent incoming keys $ cat >> .hg/hgrc <<EOF - > prepushkey = sh -c "printenv.py prepushkey.forbid 1" + > prepushkey = sh -c "printenv.py --line prepushkey.forbid 1" > EOF $ cd ../b $ hg bookmark -r null baz $ hg push -B baz ../a pushing to ../a searching for changes - listkeys hook: HG_HOOKNAME=listkeys HG_HOOKTYPE=listkeys HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'} - listkeys hook: HG_HOOKNAME=listkeys HG_HOOKTYPE=listkeys HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'} + listkeys hook: HG_HOOKNAME=listkeys + HG_HOOKTYPE=listkeys + HG_NAMESPACE=phases + HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'} + + listkeys hook: HG_HOOKNAME=listkeys + HG_HOOKTYPE=listkeys + HG_NAMESPACE=bookmarks + HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'} + no changes found - pretxnopen hook: HG_HOOKNAME=pretxnopen HG_HOOKTYPE=pretxnopen HG_TXNID=TXN:$ID$ HG_TXNNAME=push - prepushkey.forbid hook: HG_BUNDLE2=1 HG_HOOKNAME=prepushkey HG_HOOKTYPE=prepushkey HG_KEY=baz HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_PUSHKEYCOMPAT=1 HG_SOURCE=push HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/a + pretxnopen hook: HG_HOOKNAME=pretxnopen + HG_HOOKTYPE=pretxnopen + HG_TXNID=TXN:$ID$ + HG_TXNNAME=push + + prepushkey.forbid hook: HG_BUNDLE2=1 + HG_HOOKNAME=prepushkey + HG_HOOKTYPE=prepushkey + HG_KEY=baz + HG_NAMESPACE=bookmarks + HG_NEW=0000000000000000000000000000000000000000 + HG_PUSHKEYCOMPAT=1 + HG_SOURCE=push + HG_TXNID=TXN:$ID$ + HG_URL=file:$TESTTMP/a + abort: prepushkey hook exited with status 1 [255] $ cd ../a @@ -292,16 +638,34 @@ test that prelistkeys can prevent listing keys $ cat >> .hg/hgrc <<EOF - > prelistkeys = sh -c "printenv.py prelistkeys.forbid 1" + > prelistkeys = sh -c "printenv.py --line prelistkeys.forbid 1" > EOF $ hg bookmark -r null quux - pretxnopen hook: HG_HOOKNAME=pretxnopen HG_HOOKTYPE=pretxnopen HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark - pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=pretxnclose HG_HOOKTYPE=pretxnclose HG_PENDING=$TESTTMP/a HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark - txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark + pretxnopen hook: HG_HOOKNAME=pretxnopen + HG_HOOKTYPE=pretxnopen + HG_TXNID=TXN:$ID$ + HG_TXNNAME=bookmark + + pretxnclose hook: HG_BOOKMARK_MOVED=1 + HG_HOOKNAME=pretxnclose + HG_HOOKTYPE=pretxnclose + HG_PENDING=$TESTTMP/a + HG_TXNID=TXN:$ID$ + HG_TXNNAME=bookmark + + txnclose hook: HG_BOOKMARK_MOVED=1 + HG_HOOKNAME=txnclose + HG_HOOKTYPE=txnclose + HG_TXNID=TXN:$ID$ + HG_TXNNAME=bookmark + $ cd ../b $ hg pull -B quux ../a pulling from ../a - prelistkeys.forbid hook: HG_HOOKNAME=prelistkeys HG_HOOKTYPE=prelistkeys HG_NAMESPACE=bookmarks + prelistkeys.forbid hook: HG_HOOKNAME=prelistkeys + HG_HOOKTYPE=prelistkeys + HG_NAMESPACE=bookmarks + abort: prelistkeys hook exited with status 1 [255] $ cd ../a @@ -314,12 +678,17 @@ 3:07f3376c1e65 $ cat > .hg/hgrc <<EOF > [hooks] - > prechangegroup.forbid = sh -c "printenv.py prechangegroup.forbid 1" + > prechangegroup.forbid = sh -c "printenv.py --line prechangegroup.forbid 1" > EOF $ hg pull ../a pulling from ../a searching for changes - prechangegroup.forbid hook: HG_HOOKNAME=prechangegroup.forbid HG_HOOKTYPE=prechangegroup HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/a + prechangegroup.forbid hook: HG_HOOKNAME=prechangegroup.forbid + HG_HOOKTYPE=prechangegroup + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=file:$TESTTMP/a + abort: prechangegroup.forbid hook exited with status 1 [255] @@ -329,7 +698,7 @@ $ cat > .hg/hgrc <<EOF > [hooks] > pretxnchangegroup.forbid0 = hg tip -q - > pretxnchangegroup.forbid1 = sh -c "printenv.py pretxnchangegroup.forbid 1" + > pretxnchangegroup.forbid1 = sh -c "printenv.py --line pretxnchangegroup.forbid 1" > EOF $ hg pull ../a pulling from ../a @@ -339,7 +708,15 @@ adding file changes added 1 changesets with 1 changes to 1 files 4:539e4b31b6dc - pretxnchangegroup.forbid hook: HG_HOOKNAME=pretxnchangegroup.forbid1 HG_HOOKTYPE=pretxnchangegroup HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_NODE_LAST=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/b HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/a + pretxnchangegroup.forbid hook: HG_HOOKNAME=pretxnchangegroup.forbid1 + HG_HOOKTYPE=pretxnchangegroup + HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + HG_NODE_LAST=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + HG_PENDING=$TESTTMP/b + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=file:$TESTTMP/a + transaction abort! rollback completed abort: pretxnchangegroup.forbid1 hook exited with status 1 @@ -352,14 +729,21 @@ $ rm .hg/hgrc $ cat > ../a/.hg/hgrc <<EOF > [hooks] - > preoutgoing = sh -c "printenv.py preoutgoing" - > outgoing = sh -c "printenv.py outgoing" + > preoutgoing = sh -c "printenv.py --line preoutgoing" + > outgoing = sh -c "printenv.py --line outgoing" > EOF $ hg pull ../a pulling from ../a searching for changes - preoutgoing hook: HG_HOOKNAME=preoutgoing HG_HOOKTYPE=preoutgoing HG_SOURCE=pull - outgoing hook: HG_HOOKNAME=outgoing HG_HOOKTYPE=outgoing HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_SOURCE=pull + preoutgoing hook: HG_HOOKNAME=preoutgoing + HG_HOOKTYPE=preoutgoing + HG_SOURCE=pull + + outgoing hook: HG_HOOKNAME=outgoing + HG_HOOKTYPE=outgoing + HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 + HG_SOURCE=pull + adding changesets adding manifests adding file changes @@ -373,13 +757,19 @@ preoutgoing hook can prevent outgoing changes $ cat >> ../a/.hg/hgrc <<EOF - > preoutgoing.forbid = sh -c "printenv.py preoutgoing.forbid 1" + > preoutgoing.forbid = sh -c "printenv.py --line preoutgoing.forbid 1" > EOF $ hg pull ../a pulling from ../a searching for changes - preoutgoing hook: HG_HOOKNAME=preoutgoing HG_HOOKTYPE=preoutgoing HG_SOURCE=pull - preoutgoing.forbid hook: HG_HOOKNAME=preoutgoing.forbid HG_HOOKTYPE=preoutgoing HG_SOURCE=pull + preoutgoing hook: HG_HOOKNAME=preoutgoing + HG_HOOKTYPE=preoutgoing + HG_SOURCE=pull + + preoutgoing.forbid hook: HG_HOOKNAME=preoutgoing.forbid + HG_HOOKTYPE=preoutgoing + HG_SOURCE=pull + abort: preoutgoing.forbid hook exited with status 1 [255] @@ -388,12 +778,19 @@ $ cd .. $ cat > a/.hg/hgrc <<EOF > [hooks] - > preoutgoing = sh -c "printenv.py preoutgoing" - > outgoing = sh -c "printenv.py outgoing" + > preoutgoing = sh -c "printenv.py --line preoutgoing" + > outgoing = sh -c "printenv.py --line outgoing" > EOF $ hg clone a c - preoutgoing hook: HG_HOOKNAME=preoutgoing HG_HOOKTYPE=preoutgoing HG_SOURCE=clone - outgoing hook: HG_HOOKNAME=outgoing HG_HOOKTYPE=outgoing HG_NODE=0000000000000000000000000000000000000000 HG_SOURCE=clone + preoutgoing hook: HG_HOOKNAME=preoutgoing + HG_HOOKTYPE=preoutgoing + HG_SOURCE=clone + + outgoing hook: HG_HOOKNAME=outgoing + HG_HOOKTYPE=outgoing + HG_NODE=0000000000000000000000000000000000000000 + HG_SOURCE=clone + updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -rf c @@ -401,11 +798,17 @@ preoutgoing hook can prevent outgoing changes for local clones $ cat >> a/.hg/hgrc <<EOF - > preoutgoing.forbid = sh -c "printenv.py preoutgoing.forbid 1" + > preoutgoing.forbid = sh -c "printenv.py --line preoutgoing.forbid 1" > EOF $ hg clone a zzz - preoutgoing hook: HG_HOOKNAME=preoutgoing HG_HOOKTYPE=preoutgoing HG_SOURCE=clone - preoutgoing.forbid hook: HG_HOOKNAME=preoutgoing.forbid HG_HOOKTYPE=preoutgoing HG_SOURCE=clone + preoutgoing hook: HG_HOOKNAME=preoutgoing + HG_HOOKTYPE=preoutgoing + HG_SOURCE=clone + + preoutgoing.forbid hook: HG_HOOKNAME=preoutgoing.forbid + HG_HOOKTYPE=preoutgoing + HG_SOURCE=clone + abort: preoutgoing.forbid hook exited with status 1 [255] @@ -452,7 +855,7 @@ > def printtags(ui, repo, **args): > ui.write(b'[%s]\n' % b', '.join(sorted(repo.tags()))) > - > class container: + > class container(object): > unreachable = 1 > EOF @@ -690,7 +1093,7 @@ $ hg up null loading update.ne hook failed: - abort: $ENOENT$: $TESTTMP/d/repo/nonexistent.py + abort: $ENOENT$: '$TESTTMP/d/repo/nonexistent.py' [255] $ hg id @@ -780,10 +1183,16 @@ $ cd .. $ cat << EOF >> hgrc-with-post-init-hook > [hooks] - > post-init = sh -c "printenv.py post-init" + > post-init = sh -c "printenv.py --line post-init" > EOF $ HGRCPATH=hgrc-with-post-init-hook hg init to - post-init hook: HG_ARGS=init to HG_HOOKNAME=post-init HG_HOOKTYPE=post-init HG_OPTS={'insecure': None, 'remotecmd': '', 'ssh': ''} HG_PATS=['to'] HG_RESULT=0 + post-init hook: HG_ARGS=init to + HG_HOOKNAME=post-init + HG_HOOKTYPE=post-init + HG_OPTS={'insecure': None, 'remotecmd': '', 'ssh': ''} + HG_PATS=['to'] + HG_RESULT=0 + new commits must be visible in pretxnchangegroup (issue3428)
--- a/tests/test-http-api-httpv2.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-http-api-httpv2.t Tue Feb 19 21:55:05 2019 -0800 @@ -18,6 +18,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /api/exp-http-v2-0003 HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -46,6 +47,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/badcommand HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -67,6 +69,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /api/exp-http-v2-0003/ro/customreadonly HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -88,6 +91,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/customreadonly HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -110,6 +114,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/customreadonly HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: invalid\r\n @@ -134,6 +139,7 @@ > content-type: badmedia > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/customreadonly HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -160,6 +166,7 @@ > frame 1 1 stream-begin command-request new cbor:{b'name': b'customreadonly'} > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/customreadonly HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> *\r\n (glob) @@ -196,6 +203,7 @@ > EOF creating http peer for wire protocol version 2 sending customreadonly command + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/customreadonly HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -216,23 +224,19 @@ s> \t\x00\x00\x01\x00\x02\x01\x92 s> Hidentity s> \r\n - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) s> 13\r\n s> \x0b\x00\x00\x01\x00\x02\x041 s> \xa1FstatusBok s> \r\n - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 27\r\n s> \x1f\x00\x00\x01\x00\x02\x041 s> X\x1dcustomreadonly bytes response s> \r\n - received frame(size=31; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 8\r\n s> \x00\x00\x00\x01\x00\x02\x002 s> \r\n s> 0\r\n s> \r\n - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) response: gen[ b'customreadonly bytes response' ] @@ -247,6 +251,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /api/exp-http-v2-0003/rw/customreadonly HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -268,6 +273,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /api/exp-http-v2-0003/rw/badcommand HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -289,6 +295,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/rw/customreadonly HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -327,6 +334,7 @@ > frame 1 1 stream-begin command-request new cbor:{b'name': b'customreadonly'} > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/rw/customreadonly HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -366,6 +374,7 @@ > accept: $MEDIATYPE > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/rw/badcommand HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -388,6 +397,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/debugreflect HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -428,6 +438,7 @@ > frame 1 1 stream-begin command-request new cbor:{b'name': b'command1', b'args': {b'foo': b'val1', b'bar1': b'val'}} > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/debugreflect HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -459,6 +470,7 @@ > frame 1 1 stream-begin command-request new cbor:{b'name': b'customreadonly'} > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/customreadonly HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -501,6 +513,7 @@ > frame 3 1 0 command-request new cbor:{b'name': b'customreadonly'} > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/multirequest HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> *\r\n (glob) @@ -554,6 +567,7 @@ > frame 1 1 0 command-request continuation IbookmarksDnameHlistkeys > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/multirequest HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -619,6 +633,7 @@ > frame 1 1 stream-begin command-request new cbor:{b'name': b'pushkey'} > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/multirequest HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -645,6 +660,7 @@ creating http peer for wire protocol version 2 sending heads command wire protocol version 2 encoder referenced in config (badencoder) is not known; ignoring + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/heads HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -665,23 +681,19 @@ s> \t\x00\x00\x01\x00\x02\x01\x92 s> Hidentity s> \r\n - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) s> 13\r\n s> \x0b\x00\x00\x01\x00\x02\x041 s> \xa1FstatusBok s> \r\n - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 1e\r\n s> \x16\x00\x00\x01\x00\x02\x041 s> \x81T\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 s> \r\n - received frame(size=22; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 8\r\n s> \x00\x00\x00\x01\x00\x02\x002 s> \r\n s> 0\r\n s> \r\n - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) response: [ b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' ] @@ -694,6 +706,7 @@ > EOF creating http peer for wire protocol version 2 sending heads command + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/heads HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -714,12 +727,10 @@ s> \t\x00\x00\x01\x00\x02\x01\x92 s> Hzstd-8mb s> \r\n - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) s> 25\r\n s> \x1d\x00\x00\x01\x00\x02\x042 s> (\xb5/\xfd\x00P\xa4\x00\x00p\xa1FstatusBok\x81T\x00\x01\x00\tP\x02 s> \r\n - received frame(size=29; request=1; stream=2; streamflags=encoded; type=command-response; flags=eos) s> 0\r\n s> \r\n response: [
--- a/tests/test-http-api.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-http-api.t Tue Feb 19 21:55:05 2019 -0800 @@ -156,6 +156,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /api HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -177,6 +178,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /api/ HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -200,6 +202,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /api/unknown HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -222,6 +225,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /api/exp-http-v2-0003 HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -255,6 +259,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /api HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -276,6 +281,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /api/ HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n
--- a/tests/test-http-bad-server.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-http-bad-server.t Tue Feb 19 21:55:05 2019 -0800 @@ -94,7 +94,7 @@ $ cat error.log readline(40 from 65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n - readline(7 from -1) -> (7) Accept- + readline(7 from *) -> (7) Accept- (glob) read limit reached; closing socket $ rm -f error.log @@ -111,28 +111,32 @@ $ cat error.log readline(210 from 65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n - readline(177 from -1) -> (27) Accept-Encoding: identity\r\n - readline(150 from -1) -> (35) accept: application/mercurial-0.1\r\n - readline(115 from -1) -> (*) host: localhost:$HGPORT\r\n (glob) - readline(* from -1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) - readline(* from -1) -> (2) \r\n (glob) - write(36) -> HTTP/1.1 200 Script output follows\r\n - write(23) -> Server: badhttpserver\r\n - write(37) -> Date: $HTTP_DATE$\r\n - write(41) -> Content-Type: application/mercurial-0.1\r\n - write(21) -> Content-Length: 450\r\n - write(2) -> \r\n - write(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + readline(177 from *) -> (27) Accept-Encoding: identity\r\n (glob) + readline(150 from *) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(115 from *) -> (*) host: localhost:$HGPORT\r\n (glob) + readline(* from *) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(* from *) -> (2) \r\n (glob) + sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) + sendall(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) + write(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23) -> Server: badhttpserver\r\n (no-py3 !) + write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !) + write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(21) -> Content-Length: 450\r\n (no-py3 !) + write(2) -> \r\n (no-py3 !) + write(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(4? from 65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n (glob) - readline(1? from -1) -> (1?) Accept-Encoding* (glob) + readline(1? from *) -> (1?) Accept-Encoding* (glob) read limit reached; closing socket readline(223 from 65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n - readline(197 from -1) -> (27) Accept-Encoding: identity\r\n - readline(170 from -1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n - readline(141 from -1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n - readline(100 from -1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n - readline(39 from -1) -> (35) accept: application/mercurial-0.1\r\n - readline(4 from -1) -> (4) host + readline(197 from *) -> (27) Accept-Encoding: identity\r\n (glob) + readline(170 from *) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) + readline(141 from *) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob) + readline(100 from *) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob) + readline(39 from *) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(4 from *) -> (4) host (glob) read limit reached; closing socket $ rm -f error.log @@ -152,46 +156,54 @@ readline(1 from -1) -> (1) x (?) readline(1 from -1) -> (1) x (?) readline(308 from 65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n - readline(275 from -1) -> (27) Accept-Encoding: identity\r\n - readline(248 from -1) -> (35) accept: application/mercurial-0.1\r\n - readline(213 from -1) -> (*) host: localhost:$HGPORT\r\n (glob) - readline(* from -1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) - readline(* from -1) -> (2) \r\n (glob) - write(36) -> HTTP/1.1 200 Script output follows\r\n - write(23) -> Server: badhttpserver\r\n - write(37) -> Date: $HTTP_DATE$\r\n - write(41) -> Content-Type: application/mercurial-0.1\r\n - write(21) -> Content-Length: 450\r\n - write(2) -> \r\n - write(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + readline(275 from *) -> (27) Accept-Encoding: identity\r\n (glob) + readline(248 from *) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(213 from *) -> (*) host: localhost:$HGPORT\r\n (glob) + readline(* from *) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(* from *) -> (2) \r\n (glob) + sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) + sendall(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) + write(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23) -> Server: badhttpserver\r\n (no-py3 !) + write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !) + write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(21) -> Content-Length: 450\r\n (no-py3 !) + write(2) -> \r\n (no-py3 !) + write(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(13? from 65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n (glob) - readline(1?? from -1) -> (27) Accept-Encoding: identity\r\n (glob) - readline(8? from -1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) - readline(5? from -1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob) - readline(1? from -1) -> (1?) x-hgproto-1:* (glob) + readline(1?? from *) -> (27) Accept-Encoding: identity\r\n (glob) + readline(8? from *) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) + readline(5? from *) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob) + readline(1? from *) -> (1?) x-hgproto-1:* (glob) read limit reached; closing socket readline(317 from 65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n - readline(291 from -1) -> (27) Accept-Encoding: identity\r\n - readline(264 from -1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n - readline(235 from -1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n - readline(194 from -1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n - readline(133 from -1) -> (35) accept: application/mercurial-0.1\r\n - readline(98 from -1) -> (*) host: localhost:$HGPORT\r\n (glob) - readline(* from -1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) - readline(* from -1) -> (2) \r\n (glob) - write(36) -> HTTP/1.1 200 Script output follows\r\n - write(23) -> Server: badhttpserver\r\n - write(37) -> Date: $HTTP_DATE$\r\n - write(41) -> Content-Type: application/mercurial-0.1\r\n - write(20) -> Content-Length: 42\r\n - write(2) -> \r\n - write(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; + readline(291 from *) -> (27) Accept-Encoding: identity\r\n (glob) + readline(264 from *) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) + readline(235 from *) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob) + readline(194 from *) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob) + readline(133 from *) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(98 from *) -> (*) host: localhost:$HGPORT\r\n (glob) + readline(* from *) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(* from *) -> (2) \r\n (glob) + sendall(159) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py36 !) + sendall(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py36 !) + write(159) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py3 no-py36 !) + write(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py3 no-py36 !) + write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23) -> Server: badhttpserver\r\n (no-py3 !) + write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !) + write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(20) -> Content-Length: 42\r\n (no-py3 !) + write(2) -> \r\n (no-py3 !) + write(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (no-py3 !) readline(* from 65537) -> (*) GET /?cmd=getbundle HTTP* (glob) read limit reached; closing socket readline(304 from 65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n - readline(274 from -1) -> (27) Accept-Encoding: identity\r\n - readline(247 from -1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n - readline(218 from -1) -> (218) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtag + readline(274 from *) -> (27) Accept-Encoding: identity\r\n (glob) + readline(247 from *) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) + readline(218 from *) -> (218) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtag (glob) read limit reached; closing socket $ rm -f error.log @@ -207,41 +219,50 @@ $ killdaemons.py $DAEMON_PIDS - $ cat error.log + $ cat error.log | "$PYTHON" $TESTDIR/filtertraceback.py readline(329 from 65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n - readline(296 from -1) -> (27) Accept-Encoding: identity\r\n - readline(269 from -1) -> (35) accept: application/mercurial-0.1\r\n - readline(234 from -1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(* from -1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) - readline(* from -1) -> (2) \r\n (glob) - write(36) -> HTTP/1.1 200 Script output follows\r\n - write(23) -> Server: badhttpserver\r\n - write(37) -> Date: $HTTP_DATE$\r\n - write(41) -> Content-Type: application/mercurial-0.1\r\n - write(21) -> Content-Length: 463\r\n - write(2) -> \r\n - write(463) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx httppostargs known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + readline(296 from *) -> (27) Accept-Encoding: identity\r\n (glob) + readline(269 from *) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(234 from *) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(* from *) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(* from *) -> (2) \r\n (glob) + sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 463\r\n\r\n (py36 !) + sendall(463) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx httppostargs known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 463\r\n\r\n (py3 no-py36 !) + write(463) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx httppostargs known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23) -> Server: badhttpserver\r\n (no-py3 !) + write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !) + write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(21) -> Content-Length: 463\r\n (no-py3 !) + write(2) -> \r\n (no-py3 !) + write(463) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx httppostargs known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(1?? from 65537) -> (27) POST /?cmd=batch HTTP/1.1\r\n (glob) - readline(1?? from -1) -> (27) Accept-Encoding: identity\r\n (glob) - readline(1?? from -1) -> (41) content-type: application/mercurial-0.1\r\n (glob) - readline(6? from -1) -> (33) vary: X-HgArgs-Post,X-HgProto-1\r\n (glob) - readline(3? from -1) -> (19) x-hgargs-post: 28\r\n (glob) - readline(1? from -1) -> (1?) x-hgproto-1: * (glob) + readline(1?? from *) -> (27) Accept-Encoding: identity\r\n (glob) + readline(1?? from *) -> (41) content-type: application/mercurial-0.1\r\n (glob) + readline(6? from *) -> (33) vary: X-HgArgs-Post,X-HgProto-1\r\n (glob) + readline(3? from *) -> (19) x-hgargs-post: 28\r\n (glob) + readline(1? from *) -> (1?) x-hgproto-1: * (glob) read limit reached; closing socket readline(344 from 65537) -> (27) POST /?cmd=batch HTTP/1.1\r\n - readline(317 from -1) -> (27) Accept-Encoding: identity\r\n - readline(290 from -1) -> (41) content-type: application/mercurial-0.1\r\n - readline(249 from -1) -> (33) vary: X-HgArgs-Post,X-HgProto-1\r\n - readline(216 from -1) -> (19) x-hgargs-post: 28\r\n - readline(197 from -1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n - readline(136 from -1) -> (35) accept: application/mercurial-0.1\r\n - readline(101 from -1) -> (20) content-length: 28\r\n - readline(81 from -1) -> (*) host: localhost:$HGPORT\r\n (glob) - readline(* from -1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) - readline(* from -1) -> (2) \r\n (glob) + readline(317 from *) -> (27) Accept-Encoding: identity\r\n (glob) + readline(290 from *) -> (41) content-type: application/mercurial-0.1\r\n (glob) + readline(249 from *) -> (33) vary: X-HgArgs-Post,X-HgProto-1\r\n (glob) + readline(216 from *) -> (19) x-hgargs-post: 28\r\n (glob) + readline(197 from *) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob) + readline(136 from *) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(101 from *) -> (20) content-length: 28\r\n (glob) + readline(81 from *) -> (*) host: localhost:$HGPORT\r\n (glob) + readline(* from *) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(* from *) -> (2) \r\n (glob) read(* from 28) -> (*) cmds=* (glob) read limit reached, closing socket - write(36) -> HTTP/1.1 500 Internal Server Error\r\n + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=batch': (glob) + Traceback (most recent call last): + Exception: connection closed after receiving N bytes + + write(126) -> HTTP/1.1 500 Internal Server Error\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nTransfer-Encoding: chunked\r\n\r\n (py3 no-py36 !) + write(36) -> HTTP/1.1 500 Internal Server Error\r\n (no-py3 !) $ rm -f error.log @@ -258,16 +279,23 @@ $ killdaemons.py $DAEMON_PIDS - $ cat error.log + $ cat error.log | "$PYTHON" $TESTDIR/filtertraceback.py readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n - readline(-1) -> (27) Accept-Encoding: identity\r\n - readline(-1) -> (35) accept: application/mercurial-0.1\r\n - readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n - readline(-1) -> (2) \r\n - write(1 from 36) -> (0) H + readline(*) -> (27) Accept-Encoding: identity\r\n (glob) + readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(*) -> (2) \r\n (glob) + sendall(1 from 160) -> (0) H (py36 !) + write(1 from 160) -> (0) H (py3 no-py36 !) + write(1 from 36) -> (0) H (no-py3 !) write limit reached; closing socket - write(36) -> HTTP/1.1 500 Internal Server Error\r\n + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=capabilities': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + write(286) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\nHTTP/1.1 500 Internal Server Error\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nTransfer-Encoding: chunked\r\n\r\n (py3 no-py36 !) + write(36) -> HTTP/1.1 500 Internal Server Error\r\n (no-py3 !) $ rm -f error.log @@ -283,21 +311,29 @@ $ killdaemons.py $DAEMON_PIDS - $ cat error.log + $ cat error.log | "$PYTHON" $TESTDIR/filtertraceback.py readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n - readline(-1) -> (27) Accept-Encoding: identity\r\n - readline(-1) -> (35) accept: application/mercurial-0.1\r\n - readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n - readline(-1) -> (2) \r\n - write(36 from 36) -> (144) HTTP/1.1 200 Script output follows\r\n - write(23 from 23) -> (121) Server: badhttpserver\r\n - write(37 from 37) -> (84) Date: $HTTP_DATE$\r\n - write(41 from 41) -> (43) Content-Type: application/mercurial-0.1\r\n - write(21 from 21) -> (22) Content-Length: 450\r\n - write(2 from 2) -> (20) \r\n - write(20 from 450) -> (0) batch branchmap bund + readline(*) -> (27) Accept-Encoding: identity\r\n (glob) + readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(*) -> (2) \r\n (glob) + sendall(160 from 160) -> (20) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) + sendall(20 from 450) -> (0) batch branchmap bund (py36 !) + write(160 from 160) -> (20) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) + write(20 from 450) -> (0) batch branchmap bund (py3 no-py36 !) + write(36 from 36) -> (144) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (121) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (84) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (43) Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(21 from 21) -> (22) Content-Length: 450\r\n (no-py3 !) + write(2 from 2) -> (20) \r\n (no-py3 !) + write(20 from 450) -> (0) batch branchmap bund (no-py3 !) write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=capabilities': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + $ rm -f error.log @@ -318,35 +354,46 @@ $ killdaemons.py $DAEMON_PIDS - $ cat error.log + $ cat error.log | "$PYTHON" $TESTDIR/filtertraceback.py readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n - readline(-1) -> (27) Accept-Encoding: identity\r\n - readline(-1) -> (35) accept: application/mercurial-0.1\r\n - readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n - readline(-1) -> (2) \r\n - write(36 from 36) -> (692) HTTP/1.1 200 Script output follows\r\n - write(23 from 23) -> (669) Server: badhttpserver\r\n - write(37 from 37) -> (632) Date: $HTTP_DATE$\r\n - write(41 from 41) -> (591) Content-Type: application/mercurial-0.1\r\n - write(21 from 21) -> (570) Content-Length: 450\r\n - write(2 from 2) -> (568) \r\n - write(450 from 450) -> (118) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + readline(*) -> (27) Accept-Encoding: identity\r\n (glob) + readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(*) -> (2) \r\n (glob) + sendall(160 from 160) -> (568) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) + sendall(450 from 450) -> (118) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160 from 160) -> (568) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) + write(450 from 450) -> (118) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + write(36 from 36) -> (692) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (669) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (632) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (591) Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(21 from 21) -> (570) Content-Length: 450\r\n (no-py3 !) + write(2 from 2) -> (568) \r\n (no-py3 !) + write(450 from 450) -> (118) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n - readline(-1) -> (27) Accept-Encoding: identity\r\n - readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n - readline(-1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n - readline(-1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n - readline(-1) -> (35) accept: application/mercurial-0.1\r\n - readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n - readline(-1) -> (2) \r\n - write(36 from 36) -> (82) HTTP/1.1 200 Script output follows\r\n - write(23 from 23) -> (59) Server: badhttpserver\r\n - write(37 from 37) -> (22) Date: $HTTP_DATE$\r\n - write(22 from 41) -> (0) Content-Type: applicat + readline(*) -> (27) Accept-Encoding: identity\r\n (glob) + readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) + readline(*) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob) + readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob) + readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(*) -> (2) \r\n (glob) + sendall(118 from 159) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: applicat (py36 !) + write(118 from 159) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: applicat (py3 no-py36 !) + write(36 from 36) -> (82) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (59) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (22) Date: $HTTP_DATE$\r\n (no-py3 !) + write(22 from 41) -> (0) Content-Type: applicat (no-py3 !) write limit reached; closing socket - write(36) -> HTTP/1.1 500 Internal Server Error\r\n + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=batch': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + write(285) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\nHTTP/1.1 500 Internal Server Error\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nTransfer-Encoding: chunked\r\n\r\n (py3 no-py36 !) + write(36) -> HTTP/1.1 500 Internal Server Error\r\n (no-py3 !) $ rm -f error.log @@ -366,37 +413,49 @@ $ killdaemons.py $DAEMON_PIDS - $ cat error.log + $ cat error.log | "$PYTHON" $TESTDIR/filtertraceback.py readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n - readline(-1) -> (27) Accept-Encoding: identity\r\n - readline(-1) -> (35) accept: application/mercurial-0.1\r\n - readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n - readline(-1) -> (2) \r\n - write(36 from 36) -> (757) HTTP/1.1 200 Script output follows\r\n - write(23 from 23) -> (734) Server: badhttpserver\r\n - write(37 from 37) -> (697) Date: $HTTP_DATE$\r\n - write(41 from 41) -> (656) Content-Type: application/mercurial-0.1\r\n - write(21 from 21) -> (635) Content-Length: 450\r\n - write(2 from 2) -> (633) \r\n - write(450 from 450) -> (183) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + readline(*) -> (27) Accept-Encoding: identity\r\n (glob) + readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(*) -> (2) \r\n (glob) + sendall(160 from 160) -> (633) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) + sendall(450 from 450) -> (183) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160 from 160) -> (633) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) + write(450 from 450) -> (183) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + write(36 from 36) -> (757) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (734) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (697) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (656) Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(21 from 21) -> (635) Content-Length: 450\r\n (no-py3 !) + write(2 from 2) -> (633) \r\n (no-py3 !) + write(450 from 450) -> (183) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n - readline(-1) -> (27) Accept-Encoding: identity\r\n - readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n - readline(-1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n - readline(-1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n - readline(-1) -> (35) accept: application/mercurial-0.1\r\n - readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n - readline(-1) -> (2) \r\n - write(36 from 36) -> (147) HTTP/1.1 200 Script output follows\r\n - write(23 from 23) -> (124) Server: badhttpserver\r\n - write(37 from 37) -> (87) Date: $HTTP_DATE$\r\n - write(41 from 41) -> (46) Content-Type: application/mercurial-0.1\r\n - write(20 from 20) -> (26) Content-Length: 42\r\n - write(2 from 2) -> (24) \r\n - write(24 from 42) -> (0) 96ee1d7354c4ad7372047672 + readline(*) -> (27) Accept-Encoding: identity\r\n (glob) + readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) + readline(*) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob) + readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob) + readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(*) -> (2) \r\n (glob) + sendall(159 from 159) -> (24) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py36 !) + sendall(24 from 42) -> (0) 96ee1d7354c4ad7372047672 (py36 !) + write(159 from 159) -> (24) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py3 no-py36 !) + write(24 from 42) -> (0) 96ee1d7354c4ad7372047672 (py3 no-py36 !) + write(36 from 36) -> (147) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (124) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (87) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (46) Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(20 from 20) -> (26) Content-Length: 42\r\n (no-py3 !) + write(2 from 2) -> (24) \r\n (no-py3 !) + write(24 from 42) -> (0) 96ee1d7354c4ad7372047672 (no-py3 !) write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=batch': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + $ rm -f error.log @@ -418,51 +477,66 @@ $ killdaemons.py $DAEMON_PIDS - $ cat error.log + $ cat error.log | "$PYTHON" $TESTDIR/filtertraceback.py readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n - readline(-1) -> (27) Accept-Encoding: identity\r\n - readline(-1) -> (35) accept: application/mercurial-0.1\r\n - readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n - readline(-1) -> (2) \r\n - write(36 from 36) -> (904) HTTP/1.1 200 Script output follows\r\n - write(23 from 23) -> (881) Server: badhttpserver\r\n - write(37 from 37) -> (844) Date: $HTTP_DATE$\r\n - write(41 from 41) -> (803) Content-Type: application/mercurial-0.1\r\n - write(21 from 21) -> (782) Content-Length: 450\r\n - write(2 from 2) -> (780) \r\n - write(450 from 450) -> (330) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + readline(*) -> (27) Accept-Encoding: identity\r\n (glob) + readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(*) -> (2) \r\n (glob) + sendall(160 from 160) -> (780) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) + sendall(450 from 450) -> (330) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160 from 160) -> (780) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) + write(450 from 450) -> (330) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + write(36 from 36) -> (904) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (881) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (844) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (803) Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(21 from 21) -> (782) Content-Length: 450\r\n (no-py3 !) + write(2 from 2) -> (780) \r\n (no-py3 !) + write(450 from 450) -> (330) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n - readline(-1) -> (27) Accept-Encoding: identity\r\n - readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n - readline(-1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n - readline(-1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n - readline(-1) -> (35) accept: application/mercurial-0.1\r\n - readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n - readline(-1) -> (2) \r\n - write(36 from 36) -> (294) HTTP/1.1 200 Script output follows\r\n - write(23 from 23) -> (271) Server: badhttpserver\r\n - write(37 from 37) -> (234) Date: $HTTP_DATE$\r\n - write(41 from 41) -> (193) Content-Type: application/mercurial-0.1\r\n - write(20 from 20) -> (173) Content-Length: 42\r\n - write(2 from 2) -> (171) \r\n - write(42 from 42) -> (129) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; + readline(*) -> (27) Accept-Encoding: identity\r\n (glob) + readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) + readline(*) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob) + readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob) + readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(*) -> (2) \r\n (glob) + sendall(159 from 159) -> (171) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py36 !) + sendall(42 from 42) -> (129) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py36 !) + write(159 from 159) -> (171) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py3 no-py36 !) + write(42 from 42) -> (129) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py3 no-py36 !) + write(36 from 36) -> (294) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (271) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (234) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (193) Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(20 from 20) -> (173) Content-Length: 42\r\n (no-py3 !) + write(2 from 2) -> (171) \r\n (no-py3 !) + write(42 from 42) -> (129) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (no-py3 !) readline(65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n - readline(-1) -> (27) Accept-Encoding: identity\r\n - readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n - readline(-1) -> (461) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n - readline(-1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n - readline(-1) -> (35) accept: application/mercurial-0.1\r\n - readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n - readline(-1) -> (2) \r\n - write(36 from 36) -> (93) HTTP/1.1 200 Script output follows\r\n - write(23 from 23) -> (70) Server: badhttpserver\r\n - write(37 from 37) -> (33) Date: $HTTP_DATE$\r\n - write(33 from 41) -> (0) Content-Type: application/mercuri + readline(*) -> (27) Accept-Encoding: identity\r\n (glob) + readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) + readline(*) -> (461) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n (glob) + readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob) + readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(*) -> (2) \r\n (glob) + sendall(129 from 167) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercuri (py36 !) + write(129 from 167) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercuri (py3 no-py36 !) + write(36 from 36) -> (93) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (70) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (33) Date: $HTTP_DATE$\r\n (no-py3 !) + write(33 from 41) -> (0) Content-Type: application/mercuri (no-py3 !) write limit reached; closing socket - write(36) -> HTTP/1.1 500 Internal Server Error\r\n + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + write(293) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\nHTTP/1.1 500 Internal Server Error\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nTransfer-Encoding: chunked\r\n\r\n (py3 no-py36 !) + write(36) -> HTTP/1.1 500 Internal Server Error\r\n (no-py3 !) $ rm -f error.log @@ -478,11 +552,20 @@ $ killdaemons.py $DAEMON_PIDS - $ tail -4 error.log - write(41 from 41) -> (25) Content-Type: application/mercurial-0.2\r\n - write(25 from 28) -> (0) Transfer-Encoding: chunke - write limit reached; closing socket - write(36) -> HTTP/1.1 500 Internal Server Error\r\n +#if py36 + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -3 + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + +#else + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -4 + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + write(293) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\nHTTP/1.1 500 Internal Server Error\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !) + write(36) -> HTTP/1.1 500 Internal Server Error\r\n (no-py3 !) +#endif $ rm -f error.log @@ -499,53 +582,68 @@ $ killdaemons.py $DAEMON_PIDS - $ cat error.log + $ cat error.log | "$PYTHON" $TESTDIR/filtertraceback.py readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n - readline(-1) -> (27) Accept-Encoding: identity\r\n - readline(-1) -> (35) accept: application/mercurial-0.1\r\n - readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n - readline(-1) -> (2) \r\n - write(36 from 36) -> (942) HTTP/1.1 200 Script output follows\r\n - write(23 from 23) -> (919) Server: badhttpserver\r\n - write(37 from 37) -> (882) Date: $HTTP_DATE$\r\n - write(41 from 41) -> (841) Content-Type: application/mercurial-0.1\r\n - write(21 from 21) -> (820) Content-Length: 450\r\n - write(2 from 2) -> (818) \r\n - write(450 from 450) -> (368) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + readline(*) -> (27) Accept-Encoding: identity\r\n (glob) + readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(*) -> (2) \r\n (glob) + sendall(160 from 160) -> (818) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) + sendall(450 from 450) -> (368) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160 from 160) -> (818) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) + write(450 from 450) -> (368) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + write(36 from 36) -> (942) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (919) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (882) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (841) Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(21 from 21) -> (820) Content-Length: 450\r\n (no-py3 !) + write(2 from 2) -> (818) \r\n (no-py3 !) + write(450 from 450) -> (368) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n - readline(-1) -> (27) Accept-Encoding: identity\r\n - readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n - readline(-1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n - readline(-1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n - readline(-1) -> (35) accept: application/mercurial-0.1\r\n - readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n - readline(-1) -> (2) \r\n - write(36 from 36) -> (332) HTTP/1.1 200 Script output follows\r\n - write(23 from 23) -> (309) Server: badhttpserver\r\n - write(37 from 37) -> (272) Date: $HTTP_DATE$\r\n - write(41 from 41) -> (231) Content-Type: application/mercurial-0.1\r\n - write(20 from 20) -> (211) Content-Length: 42\r\n - write(2 from 2) -> (209) \r\n - write(42 from 42) -> (167) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; + readline(*) -> (27) Accept-Encoding: identity\r\n (glob) + readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) + readline(*) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob) + readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob) + readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(*) -> (2) \r\n (glob) + sendall(159 from 159) -> (209) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py36 !) + sendall(42 from 42) -> (167) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py36 !) + write(159 from 159) -> (209) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py3 no-py36 !) + write(42 from 42) -> (167) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py3 no-py36 !) + write(36 from 36) -> (332) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (309) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (272) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (231) Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(20 from 20) -> (211) Content-Length: 42\r\n (no-py3 !) + write(2 from 2) -> (209) \r\n (no-py3 !) + write(42 from 42) -> (167) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (no-py3 !) readline(65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n - readline(-1) -> (27) Accept-Encoding: identity\r\n - readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n - readline(-1) -> (461) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n - readline(-1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n - readline(-1) -> (35) accept: application/mercurial-0.1\r\n - readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n - readline(-1) -> (2) \r\n - write(36 from 36) -> (131) HTTP/1.1 200 Script output follows\r\n - write(23 from 23) -> (108) Server: badhttpserver\r\n - write(37 from 37) -> (71) Date: $HTTP_DATE$\r\n - write(41 from 41) -> (30) Content-Type: application/mercurial-0.2\r\n - write(28 from 28) -> (2) Transfer-Encoding: chunked\r\n - write(2 from 2) -> (0) \r\n + readline(*) -> (27) Accept-Encoding: identity\r\n (glob) + readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) + readline(*) -> (461) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n (glob) + readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob) + readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(*) -> (2) \r\n (glob) + sendall(167 from 167) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py36 !) + write(167 from 167) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 no-py36 !) + write(36 from 36) -> (131) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (108) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (71) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (30) Content-Type: application/mercurial-0.2\r\n (no-py3 !) + write(28 from 28) -> (2) Transfer-Encoding: chunked\r\n (no-py3 !) + write(2 from 2) -> (0) \r\n (no-py3 !) write limit reached; closing socket - write(36) -> HTTP/1.1 500 Internal Server Error\r\n + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + write(293) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\nHTTP/1.1 500 Internal Server Error\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nTransfer-Encoding: chunked\r\n\r\n (py3 no-py36 !) + write(36) -> HTTP/1.1 500 Internal Server Error\r\n (no-py3 !) $ rm -f error.log @@ -562,56 +660,72 @@ $ killdaemons.py $DAEMON_PIDS - $ cat error.log + $ cat error.log | "$PYTHON" $TESTDIR/filtertraceback.py readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n - readline(-1) -> (27) Accept-Encoding: identity\r\n - readline(-1) -> (35) accept: application/mercurial-0.1\r\n - readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n - readline(-1) -> (2) \r\n - write(36 from 36) -> (966) HTTP/1.1 200 Script output follows\r\n - write(23 from 23) -> (943) Server: badhttpserver\r\n - write(37 from 37) -> (906) Date: $HTTP_DATE$\r\n - write(41 from 41) -> (865) Content-Type: application/mercurial-0.1\r\n - write(21 from 21) -> (844) Content-Length: 450\r\n - write(2 from 2) -> (842) \r\n - write(450 from 450) -> (392) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + readline(*) -> (27) Accept-Encoding: identity\r\n (glob) + readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(*) -> (2) \r\n (glob) + sendall(160 from 160) -> (842) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) + sendall(450 from 450) -> (392) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160 from 160) -> (842) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) + write(450 from 450) -> (392) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + write(36 from 36) -> (966) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (943) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (906) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (865) Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(21 from 21) -> (844) Content-Length: 450\r\n (no-py3 !) + write(2 from 2) -> (842) \r\n (no-py3 !) + write(450 from 450) -> (392) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n - readline(-1) -> (27) Accept-Encoding: identity\r\n - readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n - readline(-1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n - readline(-1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n - readline(-1) -> (35) accept: application/mercurial-0.1\r\n - readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n - readline(-1) -> (2) \r\n - write(36 from 36) -> (356) HTTP/1.1 200 Script output follows\r\n - write(23 from 23) -> (333) Server: badhttpserver\r\n - write(37 from 37) -> (296) Date: $HTTP_DATE$\r\n - write(41 from 41) -> (255) Content-Type: application/mercurial-0.1\r\n - write(20 from 20) -> (235) Content-Length: 42\r\n - write(2 from 2) -> (233) \r\n - write(42 from 42) -> (191) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; + readline(*) -> (27) Accept-Encoding: identity\r\n (glob) + readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) + readline(*) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob) + readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob) + readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(*) -> (2) \r\n (glob) + sendall(159 from 159) -> (233) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py36 !) + sendall(42 from 42) -> (191) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py36 !) + write(159 from 159) -> (233) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py3 no-py36 !) + write(36 from 36) -> (356) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (333) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (296) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (255) Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(20 from 20) -> (235) Content-Length: 42\r\n (no-py3 !) + write(2 from 2) -> (233) \r\n (no-py3 !) + write(42 from 42) -> (191) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (no-py3 !) readline(65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n - readline(-1) -> (27) Accept-Encoding: identity\r\n - readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n - readline(-1) -> (461) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n - readline(-1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n - readline(-1) -> (35) accept: application/mercurial-0.1\r\n - readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob) - readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n - readline(-1) -> (2) \r\n - write(36 from 36) -> (155) HTTP/1.1 200 Script output follows\r\n - write(23 from 23) -> (132) Server: badhttpserver\r\n - write(37 from 37) -> (95) Date: $HTTP_DATE$\r\n - write(41 from 41) -> (54) Content-Type: application/mercurial-0.2\r\n - write(28 from 28) -> (26) Transfer-Encoding: chunked\r\n - write(2 from 2) -> (24) \r\n - write(6 from 6) -> (18) 1\\r\\n\x04\\r\\n (esc) - write(9 from 9) -> (9) 4\r\nnone\r\n - write(9 from 9) -> (0) 4\r\nHG20\r\n + readline(*) -> (27) Accept-Encoding: identity\r\n (glob) + readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) + readline(*) -> (461) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n (glob) + readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob) + readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) + readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) + readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) + readline(*) -> (2) \r\n (glob) + sendall(167 from 167) -> (24) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py36 !) + sendall(6 from 6) -> (18) 1\\r\\n\x04\\r\\n (esc) (py36 !) + sendall(9 from 9) -> (9) 4\r\nnone\r\n (py36 !) + sendall(9 from 9) -> (0) 4\r\nHG20\r\n (py36 !) + write(167 from 167) -> (24) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 no-py36 !) + write(36 from 36) -> (155) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (132) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (95) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (54) Content-Type: application/mercurial-0.2\r\n (no-py3 !) + write(28 from 28) -> (26) Transfer-Encoding: chunked\r\n (no-py3 !) + write(2 from 2) -> (24) \r\n (no-py3 !) + write(6 from 6) -> (18) 1\\r\\n\x04\\r\\n (esc) (no-py3 !) + write(9 from 9) -> (9) 4\r\nnone\r\n (no-py3 !) + write(9 from 9) -> (0) 4\r\nHG20\r\n (no-py3 !) write limit reached; closing socket - write(27) -> 15\r\nInternal Server Error\r\n + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + write(27) -> 15\r\nInternal Server Error\r\n (no-py3 !) $ rm -f error.log @@ -622,20 +736,41 @@ $ hg clone http://localhost:$HGPORT/ clone requesting all changes - abort: HTTP request error (incomplete response; expected 4 bytes got 3) + abort: HTTP request error (incomplete response) (py3 !) + abort: HTTP request error (incomplete response; expected 4 bytes got 3) (no-py3 !) (this may be an intermittent network failure; if the error persists, consider contacting the network or server operator) [255] $ killdaemons.py $DAEMON_PIDS - $ tail -7 error.log - write(28 from 28) -> (23) Transfer-Encoding: chunked\r\n - write(2 from 2) -> (21) \r\n +#if py36 + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -9 + sendall(167 from 167) -> (21) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n + sendall(6 from 6) -> (15) 1\\r\\n\x04\\r\\n (esc) + sendall(9 from 9) -> (6) 4\r\nnone\r\n + sendall(6 from 9) -> (0) 4\r\nHG2 + write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + +#else + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -11 + readline(65537) -> (2) \r\n (py3 !) + write(167 from 167) -> (21) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !) + write(28 from 28) -> (23) Transfer-Encoding: chunked\r\n (no-py3 !) + write(2 from 2) -> (21) \r\n (no-py3 !) write(6 from 6) -> (15) 1\\r\\n\x04\\r\\n (esc) write(9 from 9) -> (6) 4\r\nnone\r\n write(6 from 9) -> (0) 4\r\nHG2 write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + write(27) -> 15\r\nInternal Server Error\r\n +#endif $ rm -f error.log @@ -646,21 +781,43 @@ $ hg clone http://localhost:$HGPORT/ clone requesting all changes - abort: HTTP request error (incomplete response; expected 4 bytes got 3) + abort: HTTP request error (incomplete response) (py3 !) + abort: HTTP request error (incomplete response; expected 4 bytes got 3) (no-py3 !) (this may be an intermittent network failure; if the error persists, consider contacting the network or server operator) [255] $ killdaemons.py $DAEMON_PIDS - $ tail -8 error.log - write(28 from 28) -> (32) Transfer-Encoding: chunked\r\n - write(2 from 2) -> (30) \r\n +#if py36 + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -10 + sendall(167 from 167) -> (30) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n + sendall(6 from 6) -> (24) 1\\r\\n\x04\\r\\n (esc) + sendall(9 from 9) -> (15) 4\r\nnone\r\n + sendall(9 from 9) -> (6) 4\r\nHG20\r\n + sendall(6 from 9) -> (0) 4\\r\\n\x00\x00\x00 (esc) + write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + +#else + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -12 + readline(65537) -> (2) \r\n (py3 !) + write(167 from 167) -> (30) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !) + write(28 from 28) -> (32) Transfer-Encoding: chunked\r\n (no-py3 !) + write(2 from 2) -> (30) \r\n (no-py3 !) write(6 from 6) -> (24) 1\\r\\n\x04\\r\\n (esc) write(9 from 9) -> (15) 4\r\nnone\r\n write(9 from 9) -> (6) 4\r\nHG20\r\n write(6 from 9) -> (0) 4\\r\\n\x00\x00\x00 (esc) write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + write(27) -> 15\r\nInternal Server Error\r\n +#endif $ rm -f error.log @@ -677,15 +834,36 @@ $ killdaemons.py $DAEMON_PIDS - $ tail -8 error.log - write(28 from 28) -> (35) Transfer-Encoding: chunked\r\n - write(2 from 2) -> (33) \r\n +#if py36 + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -10 + sendall(167 from 167) -> (33) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n + sendall(6 from 6) -> (27) 1\\r\\n\x04\\r\\n (esc) + sendall(9 from 9) -> (18) 4\r\nnone\r\n + sendall(9 from 9) -> (9) 4\r\nHG20\r\n + sendall(9 from 9) -> (0) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + +#else + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -12 + readline(65537) -> (2) \r\n (py3 !) + write(167 from 167) -> (33) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !) + write(28 from 28) -> (35) Transfer-Encoding: chunked\r\n (no-py3 !) + write(2 from 2) -> (33) \r\n (no-py3 !) write(6 from 6) -> (27) 1\\r\\n\x04\\r\\n (esc) write(9 from 9) -> (18) 4\r\nnone\r\n write(9 from 9) -> (9) 4\r\nHG20\r\n write(9 from 9) -> (0) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + write(27) -> 15\r\nInternal Server Error\r\n +#endif $ rm -f error.log @@ -702,16 +880,39 @@ $ killdaemons.py $DAEMON_PIDS - $ tail -9 error.log - write(28 from 28) -> (44) Transfer-Encoding: chunked\r\n - write(2 from 2) -> (42) \r\n +#if py36 + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -11 + sendall(167 from 167) -> (42) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n + sendall(6 from 6) -> (36) 1\\r\\n\x04\\r\\n (esc) + sendall(9 from 9) -> (27) 4\r\nnone\r\n + sendall(9 from 9) -> (18) 4\r\nHG20\r\n + sendall(9 from 9) -> (9) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (0) 4\\r\\n\x00\x00\x00)\\r\\n (esc) + write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + +#else + + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -13 + readline(65537) -> (2) \r\n (py3 !) + write(167 from 167) -> (42) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !) + write(28 from 28) -> (44) Transfer-Encoding: chunked\r\n (no-py3 !) + write(2 from 2) -> (42) \r\n (no-py3 !) write(6 from 6) -> (36) 1\\r\\n\x04\\r\\n (esc) write(9 from 9) -> (27) 4\r\nnone\r\n write(9 from 9) -> (18) 4\r\nHG20\r\n write(9 from 9) -> (9) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) write(9 from 9) -> (0) 4\\r\\n\x00\x00\x00)\\r\\n (esc) write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + write(27) -> 15\r\nInternal Server Error\r\n +#endif $ rm -f error.log @@ -731,9 +932,27 @@ $ killdaemons.py $DAEMON_PIDS - $ tail -10 error.log - write(28 from 28) -> (91) Transfer-Encoding: chunked\r\n - write(2 from 2) -> (89) \r\n +#if py36 + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -12 + sendall(167 from 167) -> (89) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n + sendall(6 from 6) -> (83) 1\\r\\n\x04\\r\\n (esc) + sendall(9 from 9) -> (74) 4\r\nnone\r\n + sendall(9 from 9) -> (65) 4\r\nHG20\r\n + sendall(9 from 9) -> (56) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (47) 4\\r\\n\x00\x00\x00)\\r\\n (esc) + sendall(47 from 47) -> (0) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) + write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + +#else + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -14 + readline(65537) -> (2) \r\n (py3 !) + write(167 from 167) -> (89) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !) + write(28 from 28) -> (91) Transfer-Encoding: chunked\r\n (no-py3 !) + write(2 from 2) -> (89) \r\n (no-py3 !) write(6 from 6) -> (83) 1\\r\\n\x04\\r\\n (esc) write(9 from 9) -> (74) 4\r\nnone\r\n write(9 from 9) -> (65) 4\r\nHG20\r\n @@ -741,7 +960,12 @@ write(9 from 9) -> (47) 4\\r\\n\x00\x00\x00)\\r\\n (esc) write(47 from 47) -> (0) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + write(27) -> 15\r\nInternal Server Error\r\n +#endif $ rm -f error.log @@ -755,14 +979,34 @@ adding changesets transaction abort! rollback completed - abort: HTTP request error (incomplete response; expected 466 bytes got 7) + abort: HTTP request error (incomplete response) (py3 !) + abort: HTTP request error (incomplete response; expected 466 bytes got 7) (no-py3 !) (this may be an intermittent network failure; if the error persists, consider contacting the network or server operator) [255] $ killdaemons.py $DAEMON_PIDS - $ tail -11 error.log - write(2 from 2) -> (110) \r\n +#if py36 + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -14 + sendall(167 from 167) -> (110) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n + sendall(6 from 6) -> (104) 1\\r\\n\x04\\r\\n (esc) + sendall(9 from 9) -> (95) 4\r\nnone\r\n + sendall(9 from 9) -> (86) 4\r\nHG20\r\n + sendall(9 from 9) -> (77) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (68) 4\\r\\n\x00\x00\x00)\\r\\n (esc) + sendall(47 from 47) -> (21) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) + sendall(9 from 9) -> (12) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc) + sendall(12 from 473) -> (0) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1d (esc) + write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + +#else + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -15 + write(167 from 167) -> (110) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !) + write(2 from 2) -> (110) \r\n (no-py3 !) write(6 from 6) -> (104) 1\\r\\n\x04\\r\\n (esc) write(9 from 9) -> (95) 4\r\nnone\r\n write(9 from 9) -> (86) 4\r\nHG20\r\n @@ -772,7 +1016,12 @@ write(9 from 9) -> (12) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc) write(12 from 473) -> (0) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1d (esc) write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + write(27) -> 15\r\nInternal Server Error\r\n +#endif $ rm -f error.log @@ -792,9 +1041,29 @@ $ killdaemons.py $DAEMON_PIDS - $ tail -12 error.log - write(28 from 28) -> (573) Transfer-Encoding: chunked\r\n - write(2 from 2) -> (571) \r\n +#if py36 + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -14 + sendall(167 from 167) -> (571) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n + sendall(6 from 6) -> (565) 1\\r\\n\x04\\r\\n (esc) + sendall(9 from 9) -> (556) 4\r\nnone\r\n + sendall(9 from 9) -> (547) 4\r\nHG20\r\n + sendall(9 from 9) -> (538) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (529) 4\\r\\n\x00\x00\x00)\\r\\n (esc) + sendall(47 from 47) -> (482) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) + sendall(9 from 9) -> (473) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc) + sendall(473 from 473) -> (0) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc) + write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + +#else + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -16 + readline(65537) -> (2) \r\n (py3 !) + write(167 from 167) -> (571) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !) + write(28 from 28) -> (573) Transfer-Encoding: chunked\r\n (no-py3 !) + write(2 from 2) -> (571) \r\n (no-py3 !) write(6 from 6) -> (565) 1\\r\\n\x04\\r\\n (esc) write(9 from 9) -> (556) 4\r\nnone\r\n write(9 from 9) -> (547) 4\r\nHG20\r\n @@ -804,7 +1073,12 @@ write(9 from 9) -> (473) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc) write(473 from 473) -> (0) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc) write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + write(27) -> 15\r\nInternal Server Error\r\n +#endif $ rm -f error.log @@ -821,13 +1095,34 @@ added 1 changesets with 1 changes to 1 files transaction abort! rollback completed - abort: HTTP request error (incomplete response; expected 32 bytes got 9) + abort: HTTP request error (incomplete response) (py3 !) + abort: HTTP request error (incomplete response; expected 32 bytes got 9) (no-py3 !) (this may be an intermittent network failure; if the error persists, consider contacting the network or server operator) [255] $ killdaemons.py $DAEMON_PIDS - $ tail -13 error.log +#if py36 + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -16 + sendall(6 from 6) -> (596) 1\\r\\n\x04\\r\\n (esc) + sendall(9 from 9) -> (587) 4\r\nnone\r\n + sendall(9 from 9) -> (578) 4\r\nHG20\r\n + sendall(9 from 9) -> (569) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (560) 4\\r\\n\x00\x00\x00)\\r\\n (esc) + sendall(47 from 47) -> (513) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) + sendall(9 from 9) -> (504) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc) + sendall(473 from 473) -> (31) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (22) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (13) 4\\r\\n\x00\x00\x00 \\r\\n (esc) + sendall(13 from 38) -> (0) 20\\r\\n\x08LISTKEYS (esc) + write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + +#else + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -17 write(6 from 6) -> (596) 1\\r\\n\x04\\r\\n (esc) write(9 from 9) -> (587) 4\r\nnone\r\n write(9 from 9) -> (578) 4\r\nHG20\r\n @@ -840,7 +1135,12 @@ write(9 from 9) -> (13) 4\\r\\n\x00\x00\x00 \\r\\n (esc) write(13 from 38) -> (0) 20\\r\\n\x08LISTKEYS (esc) write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + write(27) -> 15\r\nInternal Server Error\r\n +#endif $ rm -f error.log @@ -863,7 +1163,36 @@ $ killdaemons.py $DAEMON_PIDS - $ tail -22 error.log +#if py36 + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -25 + sendall(9 from 9) -> (851) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (842) 4\\r\\n\x00\x00\x00)\\r\\n (esc) + sendall(47 from 47) -> (795) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) + sendall(9 from 9) -> (786) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc) + sendall(473 from 473) -> (313) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (304) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (295) 4\\r\\n\x00\x00\x00 \\r\\n (esc) + sendall(38 from 38) -> (257) 20\\r\\n\x08LISTKEYS\x00\x00\x00\x01\x01\x00 \x06namespacephases\\r\\n (esc) + sendall(9 from 9) -> (248) 4\\r\\n\x00\x00\x00:\\r\\n (esc) + sendall(64 from 64) -> (184) 3a\r\n96ee1d7354c4ad7372047672c36a1f561e3a6a4c 1\npublishing True\r\n + sendall(9 from 9) -> (175) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (166) 4\\r\\n\x00\x00\x00#\\r\\n (esc) + sendall(41 from 41) -> (125) 23\\r\\n\x08LISTKEYS\x00\x00\x00\x02\x01\x00 namespacebookmarks\\r\\n (esc) + sendall(9 from 9) -> (116) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (107) 4\\r\\n\x00\x00\x00\x1d\\r\\n (esc) + sendall(35 from 35) -> (72) 1d\\r\\n\x16cache:rev-branch-cache\x00\x00\x00\x03\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (63) 4\\r\\n\x00\x00\x00'\\r\\n (esc) + sendall(45 from 45) -> (18) 27\\r\\n\x00\x00\x00\x07\x00\x00\x00\x01\x00\x00\x00\x00default\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\\r\\n (esc) + sendall(9 from 9) -> (9) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (0) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + +#else + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -26 write(9 from 9) -> (851) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) write(9 from 9) -> (842) 4\\r\\n\x00\x00\x00)\\r\\n (esc) write(47 from 47) -> (795) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) @@ -885,7 +1214,12 @@ write(9 from 9) -> (9) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) write(9 from 9) -> (0) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + write(27) -> 15\r\nInternal Server Error\r\n +#endif $ rm -f error.log $ rm -rf clone @@ -907,7 +1241,37 @@ $ killdaemons.py $DAEMON_PIDS - $ tail -23 error.log +#if py36 + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -26 + sendall(9 from 9) -> (854) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (845) 4\\r\\n\x00\x00\x00)\\r\\n (esc) + sendall(47 from 47) -> (798) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) + sendall(9 from 9) -> (789) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc) + sendall(473 from 473) -> (316) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (307) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (298) 4\\r\\n\x00\x00\x00 \\r\\n (esc) + sendall(38 from 38) -> (260) 20\\r\\n\x08LISTKEYS\x00\x00\x00\x01\x01\x00 \x06namespacephases\\r\\n (esc) + sendall(9 from 9) -> (251) 4\\r\\n\x00\x00\x00:\\r\\n (esc) + sendall(64 from 64) -> (187) 3a\r\n96ee1d7354c4ad7372047672c36a1f561e3a6a4c 1\npublishing True\r\n + sendall(9 from 9) -> (178) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (169) 4\\r\\n\x00\x00\x00#\\r\\n (esc) + sendall(41 from 41) -> (128) 23\\r\\n\x08LISTKEYS\x00\x00\x00\x02\x01\x00 namespacebookmarks\\r\\n (esc) + sendall(9 from 9) -> (119) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (110) 4\\r\\n\x00\x00\x00\x1d\\r\\n (esc) + sendall(35 from 35) -> (75) 1d\\r\\n\x16cache:rev-branch-cache\x00\x00\x00\x03\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (66) 4\\r\\n\x00\x00\x00'\\r\\n (esc) + sendall(45 from 45) -> (21) 27\\r\\n\x00\x00\x00\x07\x00\x00\x00\x01\x00\x00\x00\x00default\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\\r\\n (esc) + sendall(9 from 9) -> (12) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (3) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(3 from 5) -> (0) 0\r\n + write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + + +#else + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -27 write(9 from 9) -> (854) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) write(9 from 9) -> (845) 4\\r\\n\x00\x00\x00)\\r\\n (esc) write(47 from 47) -> (798) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) @@ -930,7 +1294,12 @@ write(9 from 9) -> (3) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) write(3 from 5) -> (0) 0\r\n write limit reached; closing socket + $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob) + Traceback (most recent call last): + Exception: connection closed after sending N bytes + write(27) -> 15\r\nInternal Server Error\r\n +#endif $ rm -f error.log $ rm -rf clone
--- a/tests/test-http-bundle1.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-http-bundle1.t Tue Feb 19 21:55:05 2019 -0800 @@ -151,7 +151,7 @@ $ cd copy-pull $ cat >> .hg/hgrc <<EOF > [hooks] - > changegroup = sh -c "printenv.py changegroup" + > changegroup = sh -c "printenv.py --line changegroup" > EOF $ hg pull pulling from http://localhost:$HGPORT1/ @@ -161,7 +161,14 @@ adding file changes added 1 changesets with 1 changes to 1 files new changesets 5fed3813f7f5 - changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=5fed3813f7f5e1824344fdc9cf8f63bb662c292d HG_NODE_LAST=5fed3813f7f5e1824344fdc9cf8f63bb662c292d HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=http://localhost:$HGPORT1/ + changegroup hook: HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=5fed3813f7f5e1824344fdc9cf8f63bb662c292d + HG_NODE_LAST=5fed3813f7f5e1824344fdc9cf8f63bb662c292d + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=http://localhost:$HGPORT1/ + (run 'hg update' to get a working copy) $ cd .. @@ -175,22 +182,9 @@ + use the same server to test server side streaming preference $ cd test - $ cat << EOT > userpass.py - > import base64 - > from mercurial.hgweb import common - > def perform_authentication(hgweb, req, op): - > auth = req.headers.get(b'Authorization') - > if not auth: - > raise common.ErrorResponse(common.HTTP_UNAUTHORIZED, b'who', - > [(b'WWW-Authenticate', b'Basic Realm="mercurial"')]) - > if base64.b64decode(auth.split()[1]).split(b':', 1) != [b'user', - > b'pass']: - > raise common.ErrorResponse(common.HTTP_FORBIDDEN, b'no') - > def extsetup(ui): - > common.permhooks.insert(0, perform_authentication) - > EOT - $ hg serve --config extensions.x=userpass.py -p $HGPORT2 -d --pid-file=pid \ - > --config server.preferuncompressed=True \ + + $ hg serve --config extensions.x=$TESTDIR/httpserverauth.py -p $HGPORT2 -d \ + > --pid-file=pid --config server.preferuncompressed=True \ > --config web.push_ssl=False --config web.allow_push=* -A ../access.log $ cat pid >> $DAEMON_PIDS
--- a/tests/test-http-protocol.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-http-protocol.t Tue Feb 19 21:55:05 2019 -0800 @@ -179,6 +179,7 @@ > command listkeys > namespace namespaces > EOF + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-0.1\r\n @@ -194,6 +195,7 @@ s> \r\n s> batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash sending listkeys command + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=listkeys HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> vary: X-HgArg-1,X-HgProto-1\r\n @@ -228,6 +230,7 @@ > x-hgarg-1: namespace=namespaces > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=listkeys HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -250,6 +253,7 @@ $ hg --config experimental.httppeer.advertise-v2=true --verbose debugwireproto http://$LOCALIP:$HGPORT << EOF > command heads > EOF + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> vary: X-HgProto-1,X-HgUpgrade-1\r\n @@ -268,6 +272,7 @@ s> \r\n s> batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash sending heads command + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=heads HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> vary: X-HgProto-1\r\n @@ -299,6 +304,7 @@ $ hg --config experimental.httppeer.advertise-v2=true --config experimental.httppeer.v2-encoder-order=identity --verbose debugwireproto http://$LOCALIP:$HGPORT << EOF > command heads > EOF + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> vary: X-HgProto-1,X-HgUpgrade-1\r\n @@ -317,6 +323,7 @@ s> \r\n s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa4Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogNv1capabilitiesY\x01\xe0batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash sending heads command + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/heads HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -337,23 +344,19 @@ s> \t\x00\x00\x01\x00\x02\x01\x92 s> Hidentity s> \r\n - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) s> 13\r\n s> \x0b\x00\x00\x01\x00\x02\x041 s> \xa1FstatusBok s> \r\n - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 1e\r\n s> \x16\x00\x00\x01\x00\x02\x041 s> \x81T\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 s> \r\n - received frame(size=22; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 8\r\n s> \x00\x00\x00\x01\x00\x02\x002 s> \r\n s> 0\r\n s> \r\n - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) response: [ b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' ] @@ -386,7 +389,7 @@ > relpath = path[len(b'/redirector'):] > res.status = b'301 Redirect' > newurl = b'%s/redirected%s' % (req.baseurl, relpath) - > if not repo.ui.configbool('testing', 'redirectqs', True) and b'?' in newurl: + > if not repo.ui.configbool(b'testing', b'redirectqs', True) and b'?' in newurl: > newurl = newurl[0:newurl.index(b'?')] > res.headers[b'Location'] = newurl > res.headers[b'Content-Type'] = b'text/plain' @@ -408,6 +411,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /redirector?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -422,6 +426,7 @@ s> Content-Length: 10\r\n s> \r\n s> redirected + s> setsockopt(6, 1, 1) -> None (?) s> GET /redirected?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -441,6 +446,7 @@ $ hg --verbose debugwireproto http://$LOCALIP:$HGPORT/redirector << EOF > command heads > EOF + s> setsockopt(6, 1, 1) -> None (?) s> GET /redirector?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-0.1\r\n @@ -456,6 +462,7 @@ s> Content-Length: 10\r\n s> \r\n s> redirected + s> setsockopt(6, 1, 1) -> None (?) s> GET /redirected?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-0.1\r\n @@ -472,6 +479,7 @@ real URL is http://$LOCALIP:$HGPORT/redirected (glob) s> batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash sending heads command + s> setsockopt(6, 1, 1) -> None (?) s> GET /redirected?cmd=heads HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> vary: X-HgProto-1\r\n @@ -509,6 +517,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /redirector?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -523,6 +532,7 @@ s> Content-Length: 10\r\n s> \r\n s> redirected + s> setsockopt(6, 1, 1) -> None (?) s> GET /redirected HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -664,6 +674,7 @@ $ hg --verbose debugwireproto http://$LOCALIP:$HGPORT/redirector << EOF > command heads > EOF + s> setsockopt(6, 1, 1) -> None (?) s> GET /redirector?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-0.1\r\n @@ -679,6 +690,7 @@ s> Content-Length: 10\r\n s> \r\n s> redirected + s> setsockopt(6, 1, 1) -> None (?) s> GET /redirected HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-0.1\r\n @@ -721,6 +733,7 @@ s> <li class="active">log</li>\n s> <li><a href="/redirected/graph/tip">graph</a></li>\n s> <li><a href="/redirected/tags">tags</a + s> setsockopt(6, 1, 1) -> None (?) s> GET /redirected?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-0.1\r\n @@ -737,6 +750,7 @@ real URL is http://$LOCALIP:$HGPORT/redirected (glob) s> batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash sending heads command + s> setsockopt(6, 1, 1) -> None (?) s> GET /redirected?cmd=heads HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> vary: X-HgProto-1\r\n
--- a/tests/test-http.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-http.t Tue Feb 19 21:55:05 2019 -0800 @@ -171,21 +171,9 @@ + use the same server to test server side streaming preference $ cd test - $ cat << EOT > userpass.py - > import base64 - > from mercurial.hgweb import common - > def perform_authentication(hgweb, req, op): - > auth = req.headers.get(b'Authorization') - > if not auth: - > raise common.ErrorResponse(common.HTTP_UNAUTHORIZED, b'who', - > [(b'WWW-Authenticate', b'Basic Realm="mercurial"')]) - > if base64.b64decode(auth.split()[1]).split(b':', 1) != [b'user', b'pass']: - > raise common.ErrorResponse(common.HTTP_FORBIDDEN, b'no') - > def extsetup(ui): - > common.permhooks.insert(0, perform_authentication) - > EOT - $ hg serve --config extensions.x=userpass.py -p $HGPORT2 -d --pid-file=pid \ - > --config server.preferuncompressed=True \ + + $ hg serve --config extensions.x=$TESTDIR/httpserverauth.py -p $HGPORT2 -d \ + > --pid-file=pid --config server.preferuncompressed=True -E ../errors2.log \ > --config web.push_ssl=False --config web.allow_push=* -A ../access.log $ cat pid >> $DAEMON_PIDS @@ -221,6 +209,25 @@ $ hg id http://user@localhost:$HGPORT2/ 5fed3813f7f5 + $ cat > use_digests.py << EOF + > from mercurial import ( + > exthelper, + > url, + > ) + > + > eh = exthelper.exthelper() + > uisetup = eh.finaluisetup + > + > @eh.wrapfunction(url, 'opener') + > def urlopener(orig, *args, **kwargs): + > opener = orig(*args, **kwargs) + > opener.addheaders.append((r'X-HgTest-AuthType', r'Digest')) + > return opener + > EOF + + $ hg id http://localhost:$HGPORT2/ --config extensions.x=use_digests.py + 5fed3813f7f5 + #if no-reposimplestore $ hg clone http://user:pass@localhost:$HGPORT2/ dest 2>&1 streaming all changes @@ -374,6 +381,14 @@ "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull + "GET /?cmd=capabilities HTTP/1.1" 401 - x-hgtest-authtype:Digest + "GET /?cmd=capabilities HTTP/1.1" 200 - x-hgtest-authtype:Digest + "GET /?cmd=lookup HTTP/1.1" 401 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest + "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest + "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest + "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest + "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest + "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest "GET /?cmd=capabilities HTTP/1.1" 401 - (no-reposimplestore !) "GET /?cmd=capabilities HTTP/1.1" 200 - (no-reposimplestore !) "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (no-reposimplestore !) @@ -443,6 +458,8 @@ $ cat error.log + $ cat errors2.log + check abort error reporting while pulling/cloning $ $RUNTESTDIR/killdaemons.py
--- a/tests/test-https.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-https.t Tue Feb 19 21:55:05 2019 -0800 @@ -207,7 +207,7 @@ $ cd copy-pull $ cat >> .hg/hgrc <<EOF > [hooks] - > changegroup = sh -c "printenv.py changegroup" + > changegroup = sh -c "printenv.py --line changegroup" > EOF $ hg pull $DISABLECACERTS pulling from https://localhost:$HGPORT/ @@ -226,7 +226,14 @@ adding file changes added 1 changesets with 1 changes to 1 files new changesets 5fed3813f7f5 - changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=5fed3813f7f5e1824344fdc9cf8f63bb662c292d HG_NODE_LAST=5fed3813f7f5e1824344fdc9cf8f63bb662c292d HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=https://localhost:$HGPORT/ + changegroup hook: HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=5fed3813f7f5e1824344fdc9cf8f63bb662c292d + HG_NODE_LAST=5fed3813f7f5e1824344fdc9cf8f63bb662c292d + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=https://localhost:$HGPORT/ + (run 'hg update' to get a working copy) $ cd ..
--- a/tests/test-impexp-branch.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-impexp-branch.t Tue Feb 19 21:55:05 2019 -0800 @@ -6,7 +6,7 @@ > import re > import sys > - > head_re = re.compile('^#(?:(?:\\s+([A-Za-z][A-Za-z0-9_]*)(?:\\s.*)?)|(?:\\s*))$') + > head_re = re.compile(r'^#(?:(?:\\s+([A-Za-z][A-Za-z0-9_]*)(?:\\s.*)?)|(?:\\s*))$') > > for line in sys.stdin: > hmatch = head_re.match(line)
--- a/tests/test-import-context.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-import-context.t Tue Feb 19 21:55:05 2019 -0800 @@ -12,9 +12,9 @@ > count = int(pattern[0:-1]) > char = pattern[-1].encode('utf8') + b'\n' > if not lasteol and i == len(patterns) - 1: - > fp.write((char*count)[:-1]) + > fp.write((char * count)[:-1]) > else: - > fp.write(char*count) + > fp.write(char * count) > fp.close() > EOF $ cat > cat.py <<EOF
--- a/tests/test-import-eol.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-import-eol.t Tue Feb 19 21:55:05 2019 -0800 @@ -17,9 +17,9 @@ > 'empty:stripped-crlf': b'\r\n'}[sys.argv[1]]) > w(b' d\n') > w(b'-e\n') - > w(b'\ No newline at end of file\n') + > w(b'\\\\ No newline at end of file\n') > w(b'+z\r\n') - > w(b'\ No newline at end of file\r\n') + > w(b'\\\\ No newline at end of file\r\n') > EOF $ hg init repo
--- a/tests/test-import-git.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-import-git.t Tue Feb 19 21:55:05 2019 -0800 @@ -826,7 +826,7 @@ $ hg revert -qa $ hg --encoding utf-8 import - <<EOF - > From: =?UTF-8?q?Rapha=C3=ABl=20Hertzog?= <hertzog@debian.org> + > From: =?utf-8?q?Rapha=C3=ABl_Hertzog_=3Chertzog=40debian=2Eorg=3E?= > Subject: [PATCH] =?UTF-8?q?=C5=A7=E2=82=AC=C3=9F=E1=B9=AA?= > > diff --git a/a b/a
--- a/tests/test-install.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-install.t Tue Feb 19 21:55:05 2019 -0800 @@ -161,6 +161,7 @@ > import subprocess > import sys > import xml.etree.ElementTree as ET + > from mercurial import pycompat > > # MSYS mangles the path if it expands $TESTDIR > testdir = os.environ['TESTDIR'] @@ -177,7 +178,7 @@ > files = node.findall('./{%(wix)s}Component/{%(wix)s}File' % ns) > > for f in files: - > yield relpath + f.attrib['Name'] + > yield pycompat.sysbytes(relpath + f.attrib['Name']) > > def hgdirectory(relpath): > '''generator of tracked files, rooted at relpath''' @@ -187,10 +188,9 @@ > stderr=subprocess.PIPE) > output = proc.communicate()[0] > - > slash = '/' > for line in output.splitlines(): > if os.name == 'nt': - > yield line.replace(os.sep, slash) + > yield line.replace(pycompat.sysbytes(os.sep), b'/') > else: > yield line > @@ -204,11 +204,11 @@ > > print('Not installed:') > for f in sorted(set(tracked) - set(installed)): - > print(' %s' % f) + > print(' %s' % pycompat.sysstr(f)) > > print('Not tracked:') > for f in sorted(set(installed) - set(tracked)): - > print(' %s' % f) + > print(' %s' % pycompat.sysstr(f)) > EOF $ ( testrepohgenv; "$PYTHON" wixxml.py help ) @@ -238,6 +238,7 @@ the default for them. $ unset PYTHONPATH $ "$PYTHON" -m virtualenv --no-site-packages --never-download installenv >> pip.log + DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. (?) Note: we use this weird path to run pip and hg to avoid platform differences, since it's bin on most platforms but Scripts on Windows. $ ./installenv/*/pip install --no-index $TESTDIR/.. >> pip.log
--- a/tests/test-journal-exists.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-journal-exists.t Tue Feb 19 21:55:05 2019 -0800 @@ -29,7 +29,7 @@ $ hg -R foo unbundle repo.hg adding changesets - abort: Permission denied: $TESTTMP/foo/.hg/store/.00changelog.i-* (glob) + abort: Permission denied: '$TESTTMP/foo/.hg/store/.00changelog.i-*' (glob) [255] $ if test -f foo/.hg/store/journal; then echo 'journal exists :-('; fi
--- a/tests/test-largefiles-misc.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-largefiles-misc.t Tue Feb 19 21:55:05 2019 -0800 @@ -578,7 +578,7 @@ $ echo moremore >> anotherlarge $ hg revert anotherlarge -v --config 'ui.origbackuppath=.hg/origbackups' creating directory: $TESTTMP/addrm2/.hg/origbackups/.hglf/sub - saving current version of ../.hglf/sub/anotherlarge as $TESTTMP/addrm2/.hg/origbackups/.hglf/sub/anotherlarge + saving current version of ../.hglf/sub/anotherlarge as ../.hg/origbackups/.hglf/sub/anotherlarge reverting ../.hglf/sub/anotherlarge creating directory: $TESTTMP/addrm2/.hg/origbackups/sub found 90c622cf65cebe75c5842f9136c459333faf392e in store
--- a/tests/test-largefiles-small-disk.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-largefiles-small-disk.t Tue Feb 19 21:55:05 2019 -0800 @@ -9,7 +9,7 @@ > # > # this makes the original largefiles code abort: > _origcopyfileobj = shutil.copyfileobj - > def copyfileobj(fsrc, fdst, length=16*1024): + > def copyfileobj(fsrc, fdst, length=16 * 1024): > # allow journal files (used by transaction) to be written > if b'journal.' in fdst.name: > return _origcopyfileobj(fsrc, fdst, length)
--- a/tests/test-largefiles-wireproto.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-largefiles-wireproto.t Tue Feb 19 21:55:05 2019 -0800 @@ -420,20 +420,8 @@ $ rm "${USERCACHE}"/* $ cd .. - $ cat << EOT > userpass.py - > import base64 - > from mercurial.hgweb import common - > def perform_authentication(hgweb, req, op): - > auth = req.headers.get(b'Authorization') - > if not auth: - > raise common.ErrorResponse(common.HTTP_UNAUTHORIZED, b'who', - > [(b'WWW-Authenticate', b'Basic Realm="mercurial"')]) - > if base64.b64decode(auth.split()[1]).split(b':', 1) != [b'user', b'pass']: - > raise common.ErrorResponse(common.HTTP_FORBIDDEN, b'no') - > def extsetup(ui): - > common.permhooks.insert(0, perform_authentication) - > EOT - $ hg serve --config extensions.x=userpass.py -R credentialmain \ + + $ hg serve --config extensions.x=$TESTDIR/httpserverauth.py -R credentialmain \ > -d -p $HGPORT --pid-file hg.pid -A access.log $ cat hg.pid >> $DAEMON_PIDS $ cat << EOF > get_pass.py
--- a/tests/test-lfs-serve-access.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-lfs-serve-access.t Tue Feb 19 21:55:05 2019 -0800 @@ -227,9 +227,9 @@ > # One time simulation of a read error > if _readerr: > _readerr = False - > raise IOError(errno.EIO, '%s: I/O error' % oid) + > raise IOError(errno.EIO, r'%s: I/O error' % oid.decode("utf-8")) > # Simulate corrupt content on client download - > blobstore._verify(oid, 'dummy content') + > blobstore._verify(oid, b'dummy content') > > def verify(self, oid): > '''Called in the server to populate the Batch API response, @@ -240,7 +240,7 @@ > global _numverifies > _numverifies += 1 > if _numverifies <= 2: - > raise IOError(errno.EIO, '%s: I/O error' % oid) + > raise IOError(errno.EIO, r'%s: I/O error' % oid.decode("utf-8")) > return super(badstore, self).verify(oid) > > store.__class__ = badstore @@ -340,14 +340,14 @@ $LOCALIP - - [$ERRDATE$] HG error: Exception happened while processing request '/.git/info/lfs/objects/batch': (glob) $LOCALIP - - [$ERRDATE$] HG error: Traceback (most recent call last): (glob) $LOCALIP - - [$ERRDATE$] HG error: verifies = store.verify(oid) (glob) - $LOCALIP - - [$ERRDATE$] HG error: raise IOError(errno.EIO, '%s: I/O error' % oid) (glob) - $LOCALIP - - [$ERRDATE$] HG error: IOError: [Errno 5] f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e: I/O error (glob) + $LOCALIP - - [$ERRDATE$] HG error: raise IOError(errno.EIO, r'%s: I/O error' % oid.decode("utf-8")) (glob) + $LOCALIP - - [$ERRDATE$] HG error: *Error: [Errno 5] f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e: I/O error (glob) $LOCALIP - - [$ERRDATE$] HG error: (glob) $LOCALIP - - [$ERRDATE$] HG error: Exception happened while processing request '/.git/info/lfs/objects/batch': (glob) $LOCALIP - - [$ERRDATE$] HG error: Traceback (most recent call last): (glob) $LOCALIP - - [$ERRDATE$] HG error: verifies = store.verify(oid) (glob) - $LOCALIP - - [$ERRDATE$] HG error: raise IOError(errno.EIO, '%s: I/O error' % oid) (glob) - $LOCALIP - - [$ERRDATE$] HG error: IOError: [Errno 5] b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c: I/O error (glob) + $LOCALIP - - [$ERRDATE$] HG error: raise IOError(errno.EIO, r'%s: I/O error' % oid.decode("utf-8")) (glob) + $LOCALIP - - [$ERRDATE$] HG error: *Error: [Errno 5] b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c: I/O error (glob) $LOCALIP - - [$ERRDATE$] HG error: (glob) $LOCALIP - - [$ERRDATE$] HG error: Exception happened while processing request '/.hg/lfs/objects/b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c': (glob) $LOCALIP - - [$ERRDATE$] HG error: Traceback (most recent call last): (glob) @@ -363,19 +363,19 @@ for chunk in self.server.application(env, self._start_response): for r in self._runwsgi(req, res, repo): rctx, req, res, self.check_perm) - return func(*(args + a), **kw) + return func(*(args + a), **kw) (no-py3 !) lambda perm: res.setbodybytes(localstore.read(oid)) blob = self._read(self.vfs, oid, verify) - raise IOError(errno.EIO, '%s: I/O error' % oid) - IOError: [Errno 5] 276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d: I/O error + raise IOError(errno.EIO, r'%s: I/O error' % oid.decode("utf-8")) + *Error: [Errno 5] 276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d: I/O error (glob) $LOCALIP - - [$ERRDATE$] HG error: Exception happened while processing request '/.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d': (glob) $LOCALIP - - [$ERRDATE$] HG error: Traceback (most recent call last): (glob) $LOCALIP - - [$ERRDATE$] HG error: res.setbodybytes(localstore.read(oid)) (glob) $LOCALIP - - [$ERRDATE$] HG error: blob = self._read(self.vfs, oid, verify) (glob) - $LOCALIP - - [$ERRDATE$] HG error: blobstore._verify(oid, 'dummy content') (glob) - $LOCALIP - - [$ERRDATE$] HG error: hint=_('run hg verify')) (glob) + $LOCALIP - - [$ERRDATE$] HG error: blobstore._verify(oid, b'dummy content') (glob) + $LOCALIP - - [$ERRDATE$] HG error: hint=_(b'run hg verify')) (glob) $LOCALIP - - [$ERRDATE$] HG error: LfsCorruptionError: detected corrupt lfs object: 276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d (glob) $LOCALIP - - [$ERRDATE$] HG error: (glob) @@ -394,22 +394,7 @@ > l.password=pass > EOF - $ cat << EOF > userpass.py - > import base64 - > from mercurial.hgweb import common - > def perform_authentication(hgweb, req, op): - > auth = req.headers.get(b'Authorization') - > if not auth: - > raise common.ErrorResponse(common.HTTP_UNAUTHORIZED, b'who', - > [(b'WWW-Authenticate', b'Basic Realm="mercurial"')]) - > if base64.b64decode(auth.split()[1]).split(b':', 1) != [b'user', - > b'pass']: - > raise common.ErrorResponse(common.HTTP_FORBIDDEN, b'no') - > def extsetup(ui): - > common.permhooks.insert(0, perform_authentication) - > EOF - - $ hg --config extensions.x=$TESTTMP/userpass.py \ + $ hg --config extensions.x=$TESTDIR/httpserverauth.py \ > -R server serve -d -p $HGPORT1 --pid-file=hg.pid \ > -A $TESTTMP/access.log -E $TESTTMP/errors.log $ mv hg.pid $DAEMON_PIDS @@ -437,6 +422,32 @@ $ echo 'another blob' > auth_clone/lfs.blob $ hg -R auth_clone ci -Aqm 'add blob' + + $ cat > use_digests.py << EOF + > from mercurial import ( + > exthelper, + > url, + > ) + > + > eh = exthelper.exthelper() + > uisetup = eh.finaluisetup + > + > @eh.wrapfunction(url, 'opener') + > def urlopener(orig, *args, **kwargs): + > opener = orig(*args, **kwargs) + > opener.addheaders.append((r'X-HgTest-AuthType', r'Digest')) + > return opener + > EOF + +Test that Digest Auth fails gracefully before testing the successful Basic Auth + + $ hg -R auth_clone push --config extensions.x=use_digests.py + pushing to http://localhost:$HGPORT1/ + searching for changes + abort: LFS HTTP error: HTTP Error 401: the server must support Basic Authentication! + (api=http://localhost:$HGPORT1/.git/info/lfs/objects/batch, action=upload) + [255] + $ hg -R auth_clone --debug push | egrep '^[{}]| ' { "objects": [ @@ -468,6 +479,19 @@ $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 401 - (glob) $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob) $LOCALIP - - [$LOGDATE$] "GET /.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d HTTP/1.1" 200 - (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 401 - x-hgtest-authtype:Digest (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - x-hgtest-authtype:Digest (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 401 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D525251863cad618e55d483555f3d00a2ca99597e+4d9397055dc0c205f3132f331f36353ab1a525a3 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D525251863cad618e55d483555f3d00a2ca99597e+4d9397055dc0c205f3132f331f36353ab1a525a3 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=branchmap HTTP/1.1" 401 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest (glob) + $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 401 - x-hgtest-authtype:Digest (glob) $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 401 - (glob) $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D525251863cad618e55d483555f3d00a2ca99597e+4d9397055dc0c205f3132f331f36353ab1a525a3 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
--- a/tests/test-lfs-serve.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-lfs-serve.t Tue Feb 19 21:55:05 2019 -0800 @@ -51,16 +51,15 @@ > opts[b'manifest'] = False > opts[b'dir'] = False > rl = cmdutil.openrevlog(repo, b'debugprocessors', file_, opts) - > for flag, proc in rl._flagprocessors.iteritems(): + > for flag, proc in rl._flagprocessors.items(): > ui.status(b"registered processor '%#x'\n" % (flag)) > EOF Skip the experimental.changegroup3=True config. Failure to agree on this comes -first, and causes a "ValueError: no common changegroup version" or "abort: -HTTP Error 500: Internal Server Error", if the extension is only loaded on one -side. If that *is* enabled, the subsequent failure is "abort: missing processor -for flag '0x2000'!" if the extension is only loaded on one side (possibly also -masked by the Internal Server Error message). +first, and causes an "abort: no common changegroup version" if the extension is +only loaded on one side. If that *is* enabled, the subsequent failure is "abort: +missing processor for flag '0x2000'!" if the extension is only loaded on one side +(possibly also masked by the Internal Server Error message). $ cat >> $HGRCPATH <<EOF > [extensions] > debugprocessors = $TESTTMP/debugprocessors.py @@ -110,14 +109,14 @@ ... def diff(server): ... readchannel(server) ... # run an arbitrary command in the repo with the extension loaded - ... runcommand(server, ['id', '-R', '../cmdservelfs']) + ... runcommand(server, [b'id', b'-R', b'../cmdservelfs']) ... # now run a command in a repo without the extension to ensure that ... # files are added safely.. - ... runcommand(server, ['ci', '-Aqm', 'non-lfs']) + ... runcommand(server, [b'ci', b'-Aqm', b'non-lfs']) ... # .. and that scmutil.prefetchfiles() safely no-ops.. - ... runcommand(server, ['diff', '-r', '.~1']) + ... runcommand(server, [b'diff', b'-r', b'.~1']) ... # .. and that debugupgraderepo safely no-ops. - ... runcommand(server, ['debugupgraderepo', '-q', '--run']) + ... runcommand(server, [b'debugupgraderepo', b'-q', b'--run']) *** runcommand id -R ../cmdservelfs 000000000000 tip *** runcommand ci -Aqm non-lfs @@ -257,12 +256,12 @@ ... def addrequirement(server): ... readchannel(server) ... # change the repo in a way that adds the lfs requirement - ... runcommand(server, ['pull', '-qu']) + ... runcommand(server, [b'pull', b'-qu']) ... # Now cause the requirement adding hook to fire again, without going ... # through reposetup() again. ... with open('file.txt', 'wb') as fp: - ... fp.write('data') - ... runcommand(server, ['ci', '-Aqm', 'non-lfs']) + ... fp.write(b'data') + ... runcommand(server, [b'ci', b'-Aqm', b'non-lfs']) *** runcommand pull -qu *** runcommand ci -Aqm non-lfs @@ -317,8 +316,11 @@ TODO: fail more gracefully. $ hg init $TESTTMP/client4_pull - $ hg -R $TESTTMP/client4_pull pull -q http://localhost:$HGPORT - abort: HTTP Error 500: Internal Server Error + $ hg -R $TESTTMP/client4_pull pull http://localhost:$HGPORT + pulling from http://localhost:$HGPORT/ + requesting all changes + remote: abort: no common changegroup version + abort: pull failed on remote [255] $ grep 'lfs' $TESTTMP/client4_pull/.hg/requires $SERVER_REQUIRES $TESTTMP/server/.hg/requires:lfs @@ -359,22 +361,24 @@ $ cp $HGRCPATH.orig $HGRCPATH >>> from __future__ import absolute_import - >>> from hgclient import check, readchannel, runcommand + >>> from hgclient import bprint, check, readchannel, runcommand, stdout >>> @check ... def checkflags(server): ... readchannel(server) - ... print('') - ... print('# LFS required- both lfs and non-lfs revlogs have 0x2000 flag') - ... runcommand(server, ['debugprocessors', 'lfs.bin', '-R', - ... '../server']) - ... runcommand(server, ['debugprocessors', 'nonlfs2.txt', '-R', - ... '../server']) - ... runcommand(server, ['config', 'extensions', '--cwd', - ... '../server']) + ... bprint(b'') + ... bprint(b'# LFS required- both lfs and non-lfs revlogs have 0x2000 flag') + ... stdout.flush() + ... runcommand(server, [b'debugprocessors', b'lfs.bin', b'-R', + ... b'../server']) + ... runcommand(server, [b'debugprocessors', b'nonlfs2.txt', b'-R', + ... b'../server']) + ... runcommand(server, [b'config', b'extensions', b'--cwd', + ... b'../server']) ... - ... print("\n# LFS not enabled- revlogs don't have 0x2000 flag") - ... runcommand(server, ['debugprocessors', 'nonlfs3.txt']) - ... runcommand(server, ['config', 'extensions']) + ... bprint(b"\n# LFS not enabled- revlogs don't have 0x2000 flag") + ... stdout.flush() + ... runcommand(server, [b'debugprocessors', b'nonlfs3.txt']) + ... runcommand(server, [b'config', b'extensions']) # LFS required- both lfs and non-lfs revlogs have 0x2000 flag *** runcommand debugprocessors lfs.bin -R ../server @@ -403,28 +407,31 @@ > EOF >>> from __future__ import absolute_import, print_function - >>> from hgclient import check, readchannel, runcommand + >>> from hgclient import bprint, check, readchannel, runcommand, stdout >>> @check ... def checkflags2(server): ... readchannel(server) - ... print('') - ... print('# LFS enabled- both lfs and non-lfs revlogs have 0x2000 flag') - ... runcommand(server, ['debugprocessors', 'lfs.bin', '-R', - ... '../server']) - ... runcommand(server, ['debugprocessors', 'nonlfs2.txt', '-R', - ... '../server']) - ... runcommand(server, ['config', 'extensions', '--cwd', - ... '../server']) + ... bprint(b'') + ... bprint(b'# LFS enabled- both lfs and non-lfs revlogs have 0x2000 flag') + ... stdout.flush() + ... runcommand(server, [b'debugprocessors', b'lfs.bin', b'-R', + ... b'../server']) + ... runcommand(server, [b'debugprocessors', b'nonlfs2.txt', b'-R', + ... b'../server']) + ... runcommand(server, [b'config', b'extensions', b'--cwd', + ... b'../server']) ... - ... print('\n# LFS enabled without requirement- revlogs have 0x2000 flag') - ... runcommand(server, ['debugprocessors', 'nonlfs3.txt']) - ... runcommand(server, ['config', 'extensions']) + ... bprint(b'\n# LFS enabled without requirement- revlogs have 0x2000 flag') + ... stdout.flush() + ... runcommand(server, [b'debugprocessors', b'nonlfs3.txt']) + ... runcommand(server, [b'config', b'extensions']) ... - ... print("\n# LFS disabled locally- revlogs don't have 0x2000 flag") - ... runcommand(server, ['debugprocessors', 'nonlfs.txt', '-R', - ... '../nonlfs']) - ... runcommand(server, ['config', 'extensions', '--cwd', - ... '../nonlfs']) + ... bprint(b"\n# LFS disabled locally- revlogs don't have 0x2000 flag") + ... stdout.flush() + ... runcommand(server, [b'debugprocessors', b'nonlfs.txt', b'-R', + ... b'../nonlfs']) + ... runcommand(server, [b'config', b'extensions', b'--cwd', + ... b'../nonlfs']) # LFS enabled- both lfs and non-lfs revlogs have 0x2000 flag *** runcommand debugprocessors lfs.bin -R ../server @@ -657,10 +664,4 @@ $ "$PYTHON" $TESTDIR/killdaemons.py $DAEMON_PIDS -#if lfsremote-on - $ cat $TESTTMP/errors.log | grep '^[A-Z]' - Traceback (most recent call last): - ValueError: no common changegroup version -#else $ cat $TESTTMP/errors.log -#endif
--- a/tests/test-linelog.py Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-linelog.py Tue Feb 19 21:55:05 2019 -0800 @@ -15,7 +15,6 @@ def _genedits(seed, endrev): lines = [] random.seed(seed) - rev = 0 for rev in range(0, endrev): n = len(lines) a1 = random.randint(0, n)
--- a/tests/test-locate.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-locate.t Tue Feb 19 21:55:05 2019 -0800 @@ -123,6 +123,24 @@ ../t.h ../t/e.h ../t/x + $ hg files --config ui.relative-paths=yes + ../b + ../dir.h/foo + ../t.h + ../t/e.h + ../t/x + $ hg files --config ui.relative-paths=no + b + dir.h/foo + t.h + t/e.h + t/x + $ hg files --config ui.relative-paths=legacy + ../b + ../dir.h/foo + ../t.h + ../t/e.h + ../t/x $ hg locate b ../b
--- a/tests/test-lock.py Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-lock.py Tue Feb 19 21:55:05 2019 -0800 @@ -141,7 +141,7 @@ state.assertacquirecalled(True) # fake a fork - forklock = copy.deepcopy(lock) + forklock = copy.copy(lock) forklock._pidoffset = 1 forklock.release() state.assertreleasecalled(False) @@ -238,7 +238,7 @@ childstate.assertacquirecalled(True) # fork the child lock - forkchildlock = copy.deepcopy(childlock) + forkchildlock = copy.copy(childlock) forkchildlock._pidoffset += 1 forkchildlock.release() childstate.assertreleasecalled(False) @@ -290,7 +290,7 @@ self.fail("unexpected lock acquisition") except error.LockHeld as why: self.assertTrue(why.errno == errno.ETIMEDOUT) - self.assertTrue(why.locker == "") + self.assertTrue(why.locker == b"") state.assertlockexists(False) if __name__ == '__main__':
--- a/tests/test-manifest.py Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-manifest.py Tue Feb 19 21:55:05 2019 -0800 @@ -289,8 +289,7 @@ the resulting manifest.''' m = self.parsemanifest(A_HUGE_MANIFEST) - match = matchmod.match(b'/', b'', - [b'file1', b'file200', b'file300'], exact=True) + match = matchmod.exact([b'file1', b'file200', b'file300']) m2 = m.matches(match) w = (b'file1\0%sx\n' @@ -304,10 +303,8 @@ ''' m = self.parsemanifest(A_DEEPER_MANIFEST) - match = matchmod.match(b'/', b'', - [b'a/b/c/bar.txt', b'a/b/d/qux.py', - b'readme.txt', b'nonexistent'], - exact=True) + match = matchmod.exact([b'a/b/c/bar.txt', b'a/b/d/qux.py', + b'readme.txt', b'nonexistent']) m2 = m.matches(match) self.assertEqual( @@ -330,7 +327,7 @@ m = self.parsemanifest(A_HUGE_MANIFEST) flist = m.keys()[80:300] - match = matchmod.match(b'/', b'', flist, exact=True) + match = matchmod.exact(flist) m2 = m.matches(match) self.assertEqual(flist, m2.keys()) @@ -364,7 +361,7 @@ against a directory.''' m = self.parsemanifest(A_DEEPER_MANIFEST) - match = matchmod.match(b'/', b'', [b'a/b'], exact=True) + match = matchmod.exact([b'a/b']) m2 = m.matches(match) self.assertEqual([], m2.keys())
--- a/tests/test-match.py Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-match.py Tue Feb 19 21:55:05 2019 -0800 @@ -12,36 +12,36 @@ class BaseMatcherTests(unittest.TestCase): def testVisitdir(self): - m = matchmod.basematcher(b'', b'') + m = matchmod.basematcher() self.assertTrue(m.visitdir(b'.')) self.assertTrue(m.visitdir(b'dir')) def testVisitchildrenset(self): - m = matchmod.basematcher(b'', b'') + m = matchmod.basematcher() self.assertEqual(m.visitchildrenset(b'.'), b'this') self.assertEqual(m.visitchildrenset(b'dir'), b'this') class AlwaysMatcherTests(unittest.TestCase): def testVisitdir(self): - m = matchmod.alwaysmatcher(b'', b'') + m = matchmod.alwaysmatcher() self.assertEqual(m.visitdir(b'.'), b'all') self.assertEqual(m.visitdir(b'dir'), b'all') def testVisitchildrenset(self): - m = matchmod.alwaysmatcher(b'', b'') + m = matchmod.alwaysmatcher() self.assertEqual(m.visitchildrenset(b'.'), b'all') self.assertEqual(m.visitchildrenset(b'dir'), b'all') class NeverMatcherTests(unittest.TestCase): def testVisitdir(self): - m = matchmod.nevermatcher(b'', b'') + m = matchmod.nevermatcher() self.assertFalse(m.visitdir(b'.')) self.assertFalse(m.visitdir(b'dir')) def testVisitchildrenset(self): - m = matchmod.nevermatcher(b'', b'') + m = matchmod.nevermatcher() self.assertEqual(m.visitchildrenset(b'.'), set()) self.assertEqual(m.visitchildrenset(b'dir'), set()) @@ -50,12 +50,12 @@ # this is equivalent to BaseMatcherTests. def testVisitdir(self): - m = matchmod.predicatematcher(b'', b'', lambda *a: False) + m = matchmod.predicatematcher(lambda *a: False) self.assertTrue(m.visitdir(b'.')) self.assertTrue(m.visitdir(b'dir')) def testVisitchildrenset(self): - m = matchmod.predicatematcher(b'', b'', lambda *a: False) + m = matchmod.predicatematcher(lambda *a: False) self.assertEqual(m.visitchildrenset(b'.'), b'this') self.assertEqual(m.visitchildrenset(b'dir'), b'this') @@ -185,8 +185,7 @@ class ExactMatcherTests(unittest.TestCase): def testVisitdir(self): - m = matchmod.match(b'x', b'', patterns=[b'dir/subdir/foo.txt'], - exact=True) + m = matchmod.exact(files=[b'dir/subdir/foo.txt']) assert isinstance(m, matchmod.exactmatcher) self.assertTrue(m.visitdir(b'.')) self.assertTrue(m.visitdir(b'dir')) @@ -197,8 +196,7 @@ self.assertFalse(m.visitdir(b'folder')) def testVisitchildrenset(self): - m = matchmod.match(b'x', b'', patterns=[b'dir/subdir/foo.txt'], - exact=True) + m = matchmod.exact(files=[b'dir/subdir/foo.txt']) assert isinstance(m, matchmod.exactmatcher) self.assertEqual(m.visitchildrenset(b'.'), {b'dir'}) self.assertEqual(m.visitchildrenset(b'dir'), {b'subdir'}) @@ -208,12 +206,11 @@ self.assertEqual(m.visitchildrenset(b'folder'), set()) def testVisitchildrensetFilesAndDirs(self): - m = matchmod.match(b'x', b'', patterns=[b'rootfile.txt', - b'a/file1.txt', - b'a/b/file2.txt', - # no file in a/b/c - b'a/b/c/d/file4.txt'], - exact=True) + m = matchmod.exact(files=[b'rootfile.txt', + b'a/file1.txt', + b'a/b/file2.txt', + # no file in a/b/c + b'a/b/c/d/file4.txt']) assert isinstance(m, matchmod.exactmatcher) self.assertEqual(m.visitchildrenset(b'.'), {b'a', b'rootfile.txt'}) self.assertEqual(m.visitchildrenset(b'a'), {b'b', b'file1.txt'}) @@ -226,8 +223,8 @@ class DifferenceMatcherTests(unittest.TestCase): def testVisitdirM2always(self): - m1 = matchmod.alwaysmatcher(b'', b'') - m2 = matchmod.alwaysmatcher(b'', b'') + m1 = matchmod.alwaysmatcher() + m2 = matchmod.alwaysmatcher() dm = matchmod.differencematcher(m1, m2) # dm should be equivalent to a nevermatcher. self.assertFalse(dm.visitdir(b'.')) @@ -239,8 +236,8 @@ self.assertFalse(dm.visitdir(b'folder')) def testVisitchildrensetM2always(self): - m1 = matchmod.alwaysmatcher(b'', b'') - m2 = matchmod.alwaysmatcher(b'', b'') + m1 = matchmod.alwaysmatcher() + m2 = matchmod.alwaysmatcher() dm = matchmod.differencematcher(m1, m2) # dm should be equivalent to a nevermatcher. self.assertEqual(dm.visitchildrenset(b'.'), set()) @@ -252,27 +249,26 @@ self.assertEqual(dm.visitchildrenset(b'folder'), set()) def testVisitdirM2never(self): - m1 = matchmod.alwaysmatcher(b'', b'') - m2 = matchmod.nevermatcher(b'', b'') + m1 = matchmod.alwaysmatcher() + m2 = matchmod.nevermatcher() dm = matchmod.differencematcher(m1, m2) - # dm should be equivalent to a alwaysmatcher. OPT: if m2 is a - # nevermatcher, we could return 'all' for these. + # dm should be equivalent to a alwaysmatcher. # # We're testing Equal-to-True instead of just 'assertTrue' since # assertTrue does NOT verify that it's a bool, just that it's truthy. # While we may want to eventually make these return 'all', they should # not currently do so. - self.assertEqual(dm.visitdir(b'.'), True) - self.assertEqual(dm.visitdir(b'dir'), True) - self.assertEqual(dm.visitdir(b'dir/subdir'), True) - self.assertEqual(dm.visitdir(b'dir/subdir/z'), True) - self.assertEqual(dm.visitdir(b'dir/foo'), True) - self.assertEqual(dm.visitdir(b'dir/subdir/x'), True) - self.assertEqual(dm.visitdir(b'folder'), True) + self.assertEqual(dm.visitdir(b'.'), b'all') + self.assertEqual(dm.visitdir(b'dir'), b'all') + self.assertEqual(dm.visitdir(b'dir/subdir'), b'all') + self.assertEqual(dm.visitdir(b'dir/subdir/z'), b'all') + self.assertEqual(dm.visitdir(b'dir/foo'), b'all') + self.assertEqual(dm.visitdir(b'dir/subdir/x'), b'all') + self.assertEqual(dm.visitdir(b'folder'), b'all') def testVisitchildrensetM2never(self): - m1 = matchmod.alwaysmatcher(b'', b'') - m2 = matchmod.nevermatcher(b'', b'') + m1 = matchmod.alwaysmatcher() + m2 = matchmod.nevermatcher() dm = matchmod.differencematcher(m1, m2) # dm should be equivalent to a alwaysmatcher. self.assertEqual(dm.visitchildrenset(b'.'), b'all') @@ -284,7 +280,7 @@ self.assertEqual(dm.visitchildrenset(b'folder'), b'all') def testVisitdirM2SubdirPrefix(self): - m1 = matchmod.alwaysmatcher(b'', b'') + m1 = matchmod.alwaysmatcher() m2 = matchmod.match(b'', b'', patterns=[b'path:dir/subdir']) dm = matchmod.differencematcher(m1, m2) self.assertEqual(dm.visitdir(b'.'), True) @@ -295,12 +291,11 @@ # an 'all' pattern, just True. self.assertEqual(dm.visitdir(b'dir/subdir/z'), True) self.assertEqual(dm.visitdir(b'dir/subdir/x'), True) - # OPT: We could return 'all' for these. - self.assertEqual(dm.visitdir(b'dir/foo'), True) - self.assertEqual(dm.visitdir(b'folder'), True) + self.assertEqual(dm.visitdir(b'dir/foo'), b'all') + self.assertEqual(dm.visitdir(b'folder'), b'all') def testVisitchildrensetM2SubdirPrefix(self): - m1 = matchmod.alwaysmatcher(b'', b'') + m1 = matchmod.alwaysmatcher() m2 = matchmod.match(b'', b'', patterns=[b'path:dir/subdir']) dm = matchmod.differencematcher(m1, m2) self.assertEqual(dm.visitchildrenset(b'.'), b'this') @@ -322,7 +317,7 @@ dm = matchmod.differencematcher(m1, m2) self.assertEqual(dm.visitdir(b'.'), True) self.assertEqual(dm.visitdir(b'dir'), True) - self.assertEqual(dm.visitdir(b'dir/subdir'), True) + self.assertEqual(dm.visitdir(b'dir/subdir'), b'all') self.assertFalse(dm.visitdir(b'dir/foo')) self.assertFalse(dm.visitdir(b'folder')) # OPT: We should probably return False for these; we don't because @@ -349,8 +344,8 @@ class IntersectionMatcherTests(unittest.TestCase): def testVisitdirM2always(self): - m1 = matchmod.alwaysmatcher(b'', b'') - m2 = matchmod.alwaysmatcher(b'', b'') + m1 = matchmod.alwaysmatcher() + m2 = matchmod.alwaysmatcher() im = matchmod.intersectmatchers(m1, m2) # im should be equivalent to a alwaysmatcher. self.assertEqual(im.visitdir(b'.'), b'all') @@ -362,8 +357,8 @@ self.assertEqual(im.visitdir(b'folder'), b'all') def testVisitchildrensetM2always(self): - m1 = matchmod.alwaysmatcher(b'', b'') - m2 = matchmod.alwaysmatcher(b'', b'') + m1 = matchmod.alwaysmatcher() + m2 = matchmod.alwaysmatcher() im = matchmod.intersectmatchers(m1, m2) # im should be equivalent to a alwaysmatcher. self.assertEqual(im.visitchildrenset(b'.'), b'all') @@ -375,8 +370,8 @@ self.assertEqual(im.visitchildrenset(b'folder'), b'all') def testVisitdirM2never(self): - m1 = matchmod.alwaysmatcher(b'', b'') - m2 = matchmod.nevermatcher(b'', b'') + m1 = matchmod.alwaysmatcher() + m2 = matchmod.nevermatcher() im = matchmod.intersectmatchers(m1, m2) # im should be equivalent to a nevermatcher. self.assertFalse(im.visitdir(b'.')) @@ -388,8 +383,8 @@ self.assertFalse(im.visitdir(b'folder')) def testVisitchildrensetM2never(self): - m1 = matchmod.alwaysmatcher(b'', b'') - m2 = matchmod.nevermatcher(b'', b'') + m1 = matchmod.alwaysmatcher() + m2 = matchmod.nevermatcher() im = matchmod.intersectmatchers(m1, m2) # im should be equivalent to a nevermqtcher. self.assertEqual(im.visitchildrenset(b'.'), set()) @@ -401,7 +396,7 @@ self.assertEqual(im.visitchildrenset(b'folder'), set()) def testVisitdirM2SubdirPrefix(self): - m1 = matchmod.alwaysmatcher(b'', b'') + m1 = matchmod.alwaysmatcher() m2 = matchmod.match(b'', b'', patterns=[b'path:dir/subdir']) im = matchmod.intersectmatchers(m1, m2) self.assertEqual(im.visitdir(b'.'), True) @@ -416,7 +411,7 @@ self.assertEqual(im.visitdir(b'dir/subdir/x'), True) def testVisitchildrensetM2SubdirPrefix(self): - m1 = matchmod.alwaysmatcher(b'', b'') + m1 = matchmod.alwaysmatcher() m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir']) im = matchmod.intersectmatchers(m1, m2) self.assertEqual(im.visitchildrenset(b'.'), {b'dir'}) @@ -541,8 +536,8 @@ class UnionMatcherTests(unittest.TestCase): def testVisitdirM2always(self): - m1 = matchmod.alwaysmatcher(b'', b'') - m2 = matchmod.alwaysmatcher(b'', b'') + m1 = matchmod.alwaysmatcher() + m2 = matchmod.alwaysmatcher() um = matchmod.unionmatcher([m1, m2]) # um should be equivalent to a alwaysmatcher. self.assertEqual(um.visitdir(b'.'), b'all') @@ -554,8 +549,8 @@ self.assertEqual(um.visitdir(b'folder'), b'all') def testVisitchildrensetM2always(self): - m1 = matchmod.alwaysmatcher(b'', b'') - m2 = matchmod.alwaysmatcher(b'', b'') + m1 = matchmod.alwaysmatcher() + m2 = matchmod.alwaysmatcher() um = matchmod.unionmatcher([m1, m2]) # um should be equivalent to a alwaysmatcher. self.assertEqual(um.visitchildrenset(b'.'), b'all') @@ -567,8 +562,8 @@ self.assertEqual(um.visitchildrenset(b'folder'), b'all') def testVisitdirM1never(self): - m1 = matchmod.nevermatcher(b'', b'') - m2 = matchmod.alwaysmatcher(b'', b'') + m1 = matchmod.nevermatcher() + m2 = matchmod.alwaysmatcher() um = matchmod.unionmatcher([m1, m2]) # um should be equivalent to a alwaysmatcher. self.assertEqual(um.visitdir(b'.'), b'all') @@ -580,8 +575,8 @@ self.assertEqual(um.visitdir(b'folder'), b'all') def testVisitchildrensetM1never(self): - m1 = matchmod.nevermatcher(b'', b'') - m2 = matchmod.alwaysmatcher(b'', b'') + m1 = matchmod.nevermatcher() + m2 = matchmod.alwaysmatcher() um = matchmod.unionmatcher([m1, m2]) # um should be equivalent to a alwaysmatcher. self.assertEqual(um.visitchildrenset(b'.'), b'all') @@ -593,8 +588,8 @@ self.assertEqual(um.visitchildrenset(b'folder'), b'all') def testVisitdirM2never(self): - m1 = matchmod.alwaysmatcher(b'', b'') - m2 = matchmod.nevermatcher(b'', b'') + m1 = matchmod.alwaysmatcher() + m2 = matchmod.nevermatcher() um = matchmod.unionmatcher([m1, m2]) # um should be equivalent to a alwaysmatcher. self.assertEqual(um.visitdir(b'.'), b'all') @@ -606,8 +601,8 @@ self.assertEqual(um.visitdir(b'folder'), b'all') def testVisitchildrensetM2never(self): - m1 = matchmod.alwaysmatcher(b'', b'') - m2 = matchmod.nevermatcher(b'', b'') + m1 = matchmod.alwaysmatcher() + m2 = matchmod.nevermatcher() um = matchmod.unionmatcher([m1, m2]) # um should be equivalent to a alwaysmatcher. self.assertEqual(um.visitchildrenset(b'.'), b'all') @@ -619,7 +614,7 @@ self.assertEqual(um.visitchildrenset(b'folder'), b'all') def testVisitdirM2SubdirPrefix(self): - m1 = matchmod.alwaysmatcher(b'', b'') + m1 = matchmod.alwaysmatcher() m2 = matchmod.match(b'', b'', patterns=[b'path:dir/subdir']) um = matchmod.unionmatcher([m1, m2]) self.assertEqual(um.visitdir(b'.'), b'all') @@ -631,7 +626,7 @@ self.assertEqual(um.visitdir(b'dir/subdir/x'), b'all') def testVisitchildrensetM2SubdirPrefix(self): - m1 = matchmod.alwaysmatcher(b'', b'') + m1 = matchmod.alwaysmatcher() m2 = matchmod.match(b'', b'', include=[b'path:dir/subdir']) um = matchmod.unionmatcher([m1, m2]) self.assertEqual(um.visitchildrenset(b'.'), b'all') @@ -782,7 +777,7 @@ def testVisitdir(self): m = matchmod.match(util.localpath(b'root/d'), b'e/f', [b'../a.txt', b'b.txt']) - pm = matchmod.prefixdirmatcher(b'root', b'd/e/f', b'd', m) + pm = matchmod.prefixdirmatcher(b'd', m) # `m` elides 'd' because it's part of the root, and the rest of the # patterns are relative. @@ -814,7 +809,7 @@ def testVisitchildrenset(self): m = matchmod.match(util.localpath(b'root/d'), b'e/f', [b'../a.txt', b'b.txt']) - pm = matchmod.prefixdirmatcher(b'root', b'd/e/f', b'd', m) + pm = matchmod.prefixdirmatcher(b'd', m) # OPT: visitchildrenset could possibly return {'e'} and {'f'} for these # next two, respectively; patternmatcher does not have this
--- a/tests/test-merge10.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-merge10.t Tue Feb 19 21:55:05 2019 -0800 @@ -37,8 +37,9 @@ (run 'hg heads' to see heads, 'hg merge' to merge) $ hg up -C 2 0 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ hg merge - merging testdir/subdir/a and testdir/a to testdir/subdir/a +Abuse this test for also testing that merge respects ui.relative-paths + $ hg --cwd testdir merge --config ui.relative-paths=yes + merging subdir/a and a to subdir/a 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg stat
--- a/tests/test-missing-capability.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-missing-capability.t Tue Feb 19 21:55:05 2019 -0800 @@ -15,7 +15,7 @@ > from mercurial import extensions, wireprotov1server > def wcapabilities(orig, *args, **kwargs): > cap = orig(*args, **kwargs) - > cap.remove('$1') + > cap.remove(b'$1') > return cap > extensions.wrapfunction(wireprotov1server, '_capabilities', wcapabilities) > EOF
--- a/tests/test-mq-eol.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-mq-eol.t Tue Feb 19 21:55:05 2019 -0800 @@ -23,17 +23,21 @@ > w(b' c\r\n') > w(b' d\n') > w(b'-e\n') - > w(b'\ No newline at end of file\n') + > w(b'\\\\ No newline at end of file\n') > w(b'+z\r\n') - > w(b'\ No newline at end of file\r\n') + > w(b'\\\\ No newline at end of file\r\n') > EOF $ cat > cateol.py <<EOF > import sys + > try: + > stdout = sys.stdout.buffer + > except AttributeError: + > stdout = sys.stdout > for line in open(sys.argv[1], 'rb'): > line = line.replace(b'\r', b'<CR>') > line = line.replace(b'\n', b'<LF>') - > print(line) + > stdout.write(line + b'\n') > EOF $ hg init repo
--- a/tests/test-mq-missingfiles.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-mq-missingfiles.t Tue Feb 19 21:55:05 2019 -0800 @@ -5,16 +5,20 @@ $ cat > writelines.py <<EOF > import sys + > if sys.version_info[0] >= 3: + > encode = lambda x: x.encode('utf-8').decode('unicode_escape').encode('utf-8') + > else: + > encode = lambda x: x.decode('string_escape') > path = sys.argv[1] > args = sys.argv[2:] > assert (len(args) % 2) == 0 > > f = open(path, 'wb') > for i in range(len(args) // 2): - > count, s = args[2*i:2*i+2] + > count, s = args[2 * i:2 * i + 2] > count = int(count) - > s = s.decode('string_escape') - > f.write(s*count) + > s = encode(s) + > f.write(s * count) > f.close() > EOF
--- a/tests/test-mq-qimport.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-mq-qimport.t Tue Feb 19 21:55:05 2019 -0800 @@ -1,15 +1,19 @@ $ cat > writelines.py <<EOF > import sys + > if sys.version_info[0] >= 3: + > encode = lambda x: x.encode('utf-8').decode('unicode_escape').encode('utf-8') + > else: + > encode = lambda x: x.decode('string_escape') > path = sys.argv[1] > args = sys.argv[2:] > assert (len(args) % 2) == 0 > > f = open(path, 'wb') - > for i in range(len(args)//2): - > count, s = args[2*i:2*i+2] + > for i in range(len(args) // 2): + > count, s = args[2 * i:2 * i + 2] > count = int(count) - > s = s.decode('string_escape') - > f.write(s*count) + > s = encode(s) + > f.write(s * count) > f.close() > > EOF
--- a/tests/test-mq-qnew.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-mq-qnew.t Tue Feb 19 21:55:05 2019 -0800 @@ -305,9 +305,9 @@ HG: branch 'default' HG: no files changed ==== - note: commit message saved in .hg/last-message.txt transaction abort! rollback completed + note: commit message saved in .hg/last-message.txt abort: pretxncommit.unexpectedabort hook exited with status 1 [255] $ cat .hg/last-message.txt
--- a/tests/test-mq-subrepo-svn.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-mq-subrepo-svn.t Tue Feb 19 21:55:05 2019 -0800 @@ -23,11 +23,7 @@ $ svnadmin create svn-repo-2499 $ SVNREPOPATH=`pwd`/svn-repo-2499/project -#if windows - $ SVNREPOURL=file:///`"$PYTHON" -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"` -#else - $ SVNREPOURL=file://`"$PYTHON" -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"` -#endif + $ SVNREPOURL="`"$PYTHON" $TESTDIR/svnurlof.py \"$SVNREPOPATH\"`" $ mkdir -p svn-project-2499/trunk $ svn import -qm 'init project' svn-project-2499 "$SVNREPOURL"
--- a/tests/test-mq.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-mq.t Tue Feb 19 21:55:05 2019 -0800 @@ -1406,7 +1406,7 @@ $ hg qpush -f --verbose --config 'ui.origbackuppath=.hg/origbackups' applying empty creating directory: $TESTTMP/forcepush/.hg/origbackups - saving current version of hello.txt as $TESTTMP/forcepush/.hg/origbackups/hello.txt + saving current version of hello.txt as .hg/origbackups/hello.txt patching file hello.txt committing files: hello.txt
--- a/tests/test-narrow-trackedcmd.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-narrow-trackedcmd.t Tue Feb 19 21:55:05 2019 -0800 @@ -218,3 +218,13 @@ adding file changes added 3 changesets with 0 changes to 0 files new changesets *:* (glob) + + $ cd .. + +Testing tracked command on a non-narrow repo + + $ hg init non-narrow + $ cd non-narrow + $ hg tracked --addinclude foobar + abort: the tracked command is only supported on respositories cloned with --narrow + [255]
--- a/tests/test-narrow-widen-no-ellipsis.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-narrow-widen-no-ellipsis.t Tue Feb 19 21:55:05 2019 -0800 @@ -406,7 +406,7 @@ * bookmark 11:* (glob) $ hg unbundle .hg/strip-backup/*-widen.hg abort: .hg/strip-backup/*-widen.hg: $ENOTDIR$ (windows !) - abort: $ENOENT$: .hg/strip-backup/*-widen.hg (no-windows !) + abort: $ENOENT$: '.hg/strip-backup/*-widen.hg' (no-windows !) [255] $ hg log -T "{if(ellipsis, '...')}{rev}: {desc}\n" 11: local
--- a/tests/test-newcgi.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-newcgi.t Tue Feb 19 21:55:05 2019 -0800 @@ -18,7 +18,7 @@ > from mercurial.hgweb.request import wsgiapplication > > def make_web_app(): - > return hgweb("test", "Empty test repository") + > return hgweb(b"test", b"Empty test repository") > > wsgicgi.launch(wsgiapplication(make_web_app)) > HGWEB @@ -44,7 +44,7 @@ > from mercurial.hgweb.request import wsgiapplication > > def make_web_app(): - > return hgwebdir("hgweb.config") + > return hgwebdir(b"hgweb.config") > > wsgicgi.launch(wsgiapplication(make_web_app)) > HGWEBDIR
--- a/tests/test-notify.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-notify.t Tue Feb 19 21:55:05 2019 -0800 @@ -455,7 +455,7 @@ > test = False > mbox = mbox > EOF - $ "$PYTHON" -c 'open("a/a", "ab").write("no" * 500 + "\xd1\x84" + "\n")' + $ "$PYTHON" -c 'open("a/a", "ab").write(b"no" * 500 + b"\xd1\x84" + b"\n")' $ hg --cwd a commit -A -m "long line" $ hg --traceback --cwd b pull ../a pulling from ../a
--- a/tests/test-obsmarker-template.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-obsmarker-template.t Tue Feb 19 21:55:05 2019 -0800 @@ -2429,6 +2429,23 @@ date: Thu Jan 01 00:00:00 1970 +0000 summary: ROOT +Check that {negrev} shows usable negative revisions despite hidden commits + + $ hg log -G -T "{negrev}\n" + @ -3 + | + o -4 + + + $ hg log -G -T "{negrev}\n" --hidden + x -1 + | + | x -2 + |/ + | @ -3 + |/ + o -4 + Test templates with splitted and pruned commit ============================================== @@ -2639,3 +2656,10 @@ |/ Obsfate: rewritten using amend as 2:718c0d00cee1 by test (at 1970-01-01 00:00 +0000); o ea207398892e + $ hg log -G -T "{negrev}\n" + @ -1 + | + o -2 + | + o -5 +
--- a/tests/test-oldcgi.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-oldcgi.t Tue Feb 19 21:55:05 2019 -0800 @@ -55,7 +55,7 @@ > # Alternatively you can pass a list of ('virtual/path', '/real/path') tuples > # or use a dictionary with entries like 'virtual/path': '/real/path' > - > h = hgweb.hgwebdir("hgweb.config") + > h = hgweb.hgwebdir(b"hgweb.config") > h.run() > HGWEBDIR
--- a/tests/test-parseindex.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-parseindex.t Tue Feb 19 21:55:05 2019 -0800 @@ -27,7 +27,7 @@ $ cat >> test.py << EOF > from __future__ import print_function - > from mercurial import changelog, node, vfs + > from mercurial import changelog, node, pycompat, vfs > > class singlebyteread(object): > def __init__(self, real): @@ -55,10 +55,10 @@ > return singlebyteread(f) > return wrapper > - > cl = changelog.changelog(opener('.hg/store')) + > cl = changelog.changelog(opener(b'.hg/store')) > print(len(cl), 'revisions:') > for r in cl: - > print(node.short(cl.node(r))) + > print(pycompat.sysstr(node.short(cl.node(r)))) > EOF $ "$PYTHON" test.py 2 revisions: @@ -76,7 +76,7 @@ $ "$PYTHON" <<EOF > from __future__ import print_function > from mercurial import changelog, vfs - > cl = changelog.changelog(vfs.vfs('.hg/store')) + > cl = changelog.changelog(vfs.vfs(b'.hg/store')) > print('good heads:') > for head in [0, len(cl) - 1, -1]: > print('%s: %r' % (head, cl.reachableroots(0, [head], [0]))) @@ -112,7 +112,7 @@ 10000: head out of range -2: head out of range -10000: head out of range - None: an integer is required + None: an integer is required( .got type NoneType.)? (re) good roots: 0: [0] 1: [1] @@ -123,7 +123,7 @@ -2: [] -10000: [] bad roots: - None: an integer is required + None: an integer is required( .got type NoneType.)? (re) $ cd .. @@ -178,8 +178,8 @@ $ cat <<EOF > test.py > from __future__ import print_function > import sys - > from mercurial import changelog, vfs - > cl = changelog.changelog(vfs.vfs(sys.argv[1])) + > from mercurial import changelog, pycompat, vfs + > cl = changelog.changelog(vfs.vfs(pycompat.fsencode(sys.argv[1]))) > n0, n1 = cl.node(0), cl.node(1) > ops = [ > ('reachableroots',
--- a/tests/test-patch-offset.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-patch-offset.t Tue Feb 19 21:55:05 2019 -0800 @@ -9,7 +9,7 @@ > for pattern in patterns: > count = int(pattern[0:-1]) > char = pattern[-1].encode('utf8') + b'\n' - > fp.write(char*count) + > fp.write(char * count) > fp.close() > EOF
--- a/tests/test-permissions.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-permissions.t Tue Feb 19 21:55:05 2019 -0800 @@ -22,7 +22,7 @@ checking manifests crosschecking files in changesets and manifests checking files - abort: Permission denied: $TESTTMP/t/.hg/store/data/a.i + abort: Permission denied: '$TESTTMP/t/.hg/store/data/a.i' [255] $ chmod +r .hg/store/data/a.i @@ -39,7 +39,7 @@ $ echo barber > a $ hg commit -m "2" trouble committing a! - abort: Permission denied: $TESTTMP/t/.hg/store/data/a.i + abort: Permission denied: '$TESTTMP/t/.hg/store/data/a.i' [255] $ chmod -w .
--- a/tests/test-purge.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-purge.t Tue Feb 19 21:55:05 2019 -0800 @@ -52,7 +52,7 @@ $ "$PYTHON" <<EOF > import os > import stat - > f= 'untracked_file_readonly' + > f = 'untracked_file_readonly' > os.chmod(f, stat.S_IMODE(os.stat(f).st_mode) & ~stat.S_IWRITE) > EOF $ hg purge -p
--- a/tests/test-push-http.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-push-http.t Tue Feb 19 21:55:05 2019 -0800 @@ -74,8 +74,8 @@ $ cat >> .hg/hgrc <<EOF > allow_push = * > [hooks] - > changegroup = sh -c "printenv.py changegroup 0" - > pushkey = sh -c "printenv.py pushkey 0" + > changegroup = sh -c "printenv.py --line changegroup 0" + > pushkey = sh -c "printenv.py --line pushkey 0" > txnclose-phase.test = sh $TESTTMP/hook.sh > EOF $ req "--debug --config extensions.blackbox=" @@ -94,8 +94,15 @@ remote: phase-move: cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b: draft -> public remote: running hook txnclose-phase.test: sh $TESTTMP/hook.sh remote: phase-move: ba677d0156c1196c1a699fa53f390dcfc3ce3872: -> public - remote: running hook changegroup: sh -c "printenv.py changegroup 0" - remote: changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:http:$LOCALIP: (glob) + remote: running hook changegroup: sh -c "printenv.py --line changegroup 0" + remote: changegroup hook: HG_HOOKNAME=changegroup + remote: HG_HOOKTYPE=changegroup + remote: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_SOURCE=serve + remote: HG_TXNID=TXN:$ID$ + remote: HG_URL=remote:http:$LOCALIP: (glob) + remote: % serve errors $ hg rollback repository tip rolled back to revision 0 (undo serve) @@ -114,8 +121,15 @@ remote: phase-move: cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b: draft -> public remote: running hook txnclose-phase.test: sh $TESTTMP/hook.sh remote: phase-move: ba677d0156c1196c1a699fa53f390dcfc3ce3872: -> public - remote: running hook changegroup: sh -c "printenv.py changegroup 0" - remote: changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:http:$LOCALIP: (glob) + remote: running hook changegroup: sh -c "printenv.py --line changegroup 0" + remote: changegroup hook: HG_HOOKNAME=changegroup + remote: HG_HOOKTYPE=changegroup + remote: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_SOURCE=serve + remote: HG_TXNID=TXN:$ID$ + remote: HG_URL=remote:http:$LOCALIP: (glob) + remote: % serve errors $ hg rollback repository tip rolled back to revision 0 (undo serve) @@ -125,8 +139,8 @@ $ cat >> .hg/hgrc <<EOF > allow_push = * > [hooks] - > changegroup = sh -c "printenv.py changegroup 0" - > pushkey = sh -c "printenv.py pushkey 0" + > changegroup = sh -c "printenv.py --line changegroup 0" + > pushkey = sh -c "printenv.py --line pushkey 0" > txnclose-phase.test = sh $TESTTMP/hook.sh > EOF $ req @@ -138,7 +152,15 @@ remote: added 1 changesets with 1 changes to 1 files remote: phase-move: cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b: draft -> public remote: phase-move: ba677d0156c1196c1a699fa53f390dcfc3ce3872: -> public - remote: changegroup hook: HG_BUNDLE2=1 HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:http:$LOCALIP: (glob) + remote: changegroup hook: HG_BUNDLE2=1 + remote: HG_HOOKNAME=changegroup + remote: HG_HOOKTYPE=changegroup + remote: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_SOURCE=serve + remote: HG_TXNID=TXN:$ID$ + remote: HG_URL=remote:http:$LOCALIP: (glob) + remote: % serve errors $ hg rollback repository tip rolled back to revision 0 (undo serve) @@ -157,8 +179,16 @@ remote: added 1 changesets with 1 changes to 1 files remote: phase-move: cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b: draft -> public remote: phase-move: ba677d0156c1196c1a699fa53f390dcfc3ce3872: -> public - remote: changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:http:$LOCALIP: (glob) (bundle1 !) - remote: changegroup hook: HG_BUNDLE2=1 HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:http:$LOCALIP: (glob) (bundle2 !) + remote: changegroup hook: HG_HOOKNAME=changegroup (no-bundle2 !) + remote: changegroup hook: HG_BUNDLE2=1 (bundle2 !) + remote: HG_HOOKNAME=changegroup (bundle2 !) + remote: HG_HOOKTYPE=changegroup + remote: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_SOURCE=serve + remote: HG_TXNID=TXN:$ID$ + remote: HG_URL=remote:http:$LOCALIP: (glob) + remote: % serve errors $ hg rollback repository tip rolled back to revision 0 (undo serve) @@ -176,8 +206,16 @@ remote: added 1 changesets with 1 changes to 1 files remote: phase-move: cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b: draft -> public remote: phase-move: ba677d0156c1196c1a699fa53f390dcfc3ce3872: -> public - remote: changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:http:$LOCALIP: (glob) (bundle1 !) - remote: changegroup hook: HG_BUNDLE2=1 HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:http:$LOCALIP: (glob) (bundle2 !) + remote: changegroup hook: HG_HOOKNAME=changegroup (no-bundle2 !) + remote: changegroup hook: HG_BUNDLE2=1 (bundle2 !) + remote: HG_HOOKNAME=changegroup (bundle2 !) + remote: HG_HOOKTYPE=changegroup + remote: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_SOURCE=serve + remote: HG_TXNID=TXN:$ID$ + remote: HG_URL=remote:http:$LOCALIP: (glob) + remote: % serve errors $ hg rollback repository tip rolled back to revision 0 (undo serve) @@ -209,6 +247,14 @@ remote: phase-move: cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b: draft -> public remote: phase-move: ba677d0156c1196c1a699fa53f390dcfc3ce3872: -> public remote: changegroup hook: * (glob) + remote: HG_HOOKNAME=changegroup (bundle2 !) + remote: HG_HOOKTYPE=changegroup + remote: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_SOURCE=serve + remote: HG_TXNID=TXN:$ID$ + remote: HG_URL=remote:http:$LOCALIP: (glob) + remote: % serve errors $ hg rollback repository tip rolled back to revision 0 (undo serve) @@ -221,7 +267,7 @@ > push_ssl = false > allow_push = * > [hooks] - > prepushkey = sh -c "printenv.py prepushkey 1" + > prepushkey = sh -c "printenv.py --line prepushkey 1" > [devel] > legacy.exchange=phases > EOF @@ -253,7 +299,21 @@ remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files - remote: prepushkey hook: HG_BUNDLE2=1 HG_HOOKNAME=prepushkey HG_HOOKTYPE=prepushkey HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NAMESPACE=phases HG_NEW=0 HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_OLD=1 HG_PENDING=$TESTTMP/test HG_PHASES_MOVED=1 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:http:$LOCALIP: (glob) + remote: prepushkey hook: HG_BUNDLE2=1 + remote: HG_HOOKNAME=prepushkey + remote: HG_HOOKTYPE=prepushkey + remote: HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NAMESPACE=phases + remote: HG_NEW=0 + remote: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_OLD=1 + remote: HG_PENDING=$TESTTMP/test + remote: HG_PHASES_MOVED=1 + remote: HG_SOURCE=serve + remote: HG_TXNID=TXN:$ID$ + remote: HG_URL=remote:http:$LOCALIP: (glob) + remote: remote: pushkey-abort: prepushkey hook exited with status 1 remote: transaction abort! remote: rollback completed @@ -267,7 +327,7 @@ $ cat >> .hg/hgrc <<EOF > [hooks] - > prepushkey = sh -c "printenv.py prepushkey 0" + > prepushkey = sh -c "printenv.py --line prepushkey 0" > EOF We don't need to test bundle1 because it succeeded above. @@ -280,7 +340,21 @@ remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files - remote: prepushkey hook: HG_BUNDLE2=1 HG_HOOKNAME=prepushkey HG_HOOKTYPE=prepushkey HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NAMESPACE=phases HG_NEW=0 HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_OLD=1 HG_PENDING=$TESTTMP/test HG_PHASES_MOVED=1 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:http:$LOCALIP: (glob) + remote: prepushkey hook: HG_BUNDLE2=1 + remote: HG_HOOKNAME=prepushkey + remote: HG_HOOKTYPE=prepushkey + remote: HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NAMESPACE=phases + remote: HG_NEW=0 + remote: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_OLD=1 + remote: HG_PENDING=$TESTTMP/test + remote: HG_PHASES_MOVED=1 + remote: HG_SOURCE=serve + remote: HG_TXNID=TXN:$ID$ + remote: HG_URL=remote:http:$LOCALIP: (glob) + remote: % serve errors #endif @@ -293,7 +367,7 @@ > [phases] > publish = false > [hooks] - > prepushkey = sh -c "printenv.py prepushkey 1" + > prepushkey = sh -c "printenv.py --line prepushkey 1" > EOF #if bundle1 @@ -304,7 +378,13 @@ remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files - remote: prepushkey hook: HG_HOOKNAME=prepushkey HG_HOOKTYPE=prepushkey HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NAMESPACE=phases HG_NEW=0 HG_OLD=1 + remote: prepushkey hook: HG_HOOKNAME=prepushkey + remote: HG_HOOKTYPE=prepushkey + remote: HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NAMESPACE=phases + remote: HG_NEW=0 + remote: HG_OLD=1 + remote: remote: pushkey-abort: prepushkey hook exited with status 1 updating ba677d0156c1 to public failed! % serve errors @@ -318,7 +398,21 @@ remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files - remote: prepushkey hook: HG_BUNDLE2=1 HG_HOOKNAME=prepushkey HG_HOOKTYPE=prepushkey HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NAMESPACE=phases HG_NEW=0 HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_OLD=1 HG_PENDING=$TESTTMP/test HG_PHASES_MOVED=1 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:http:$LOCALIP: (glob) + remote: prepushkey hook: HG_BUNDLE2=1 + remote: HG_HOOKNAME=prepushkey + remote: HG_HOOKTYPE=prepushkey + remote: HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NAMESPACE=phases + remote: HG_NEW=0 + remote: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_OLD=1 + remote: HG_PENDING=$TESTTMP/test + remote: HG_PHASES_MOVED=1 + remote: HG_SOURCE=serve + remote: HG_TXNID=TXN:$ID$ + remote: HG_URL=remote:http:$LOCALIP: (glob) + remote: remote: pushkey-abort: prepushkey hook exited with status 1 remote: transaction abort! remote: rollback completed @@ -331,7 +425,7 @@ $ cat >> .hg/hgrc <<EOF > [hooks] - > prepushkey = sh -c "printenv.py prepushkey 0" + > prepushkey = sh -c "printenv.py --line prepushkey 0" > EOF #if bundle1 @@ -339,7 +433,13 @@ pushing to http://localhost:$HGPORT/ searching for changes no changes found - remote: prepushkey hook: HG_HOOKNAME=prepushkey HG_HOOKTYPE=prepushkey HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NAMESPACE=phases HG_NEW=0 HG_OLD=1 + remote: prepushkey hook: HG_HOOKNAME=prepushkey + remote: HG_HOOKTYPE=prepushkey + remote: HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NAMESPACE=phases + remote: HG_NEW=0 + remote: HG_OLD=1 + remote: % serve errors [1] #endif @@ -352,7 +452,21 @@ remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files - remote: prepushkey hook: HG_BUNDLE2=1 HG_HOOKNAME=prepushkey HG_HOOKTYPE=prepushkey HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NAMESPACE=phases HG_NEW=0 HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_OLD=1 HG_PENDING=$TESTTMP/test HG_PHASES_MOVED=1 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:http:$LOCALIP: (glob) + remote: prepushkey hook: HG_BUNDLE2=1 + remote: HG_HOOKNAME=prepushkey + remote: HG_HOOKTYPE=prepushkey + remote: HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NAMESPACE=phases + remote: HG_NEW=0 + remote: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 + remote: HG_OLD=1 + remote: HG_PENDING=$TESTTMP/test + remote: HG_PHASES_MOVED=1 + remote: HG_SOURCE=serve + remote: HG_TXNID=TXN:$ID$ + remote: HG_URL=remote:http:$LOCALIP: (glob) + remote: % serve errors #endif
--- a/tests/test-remotefilelog-cacheprocess.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-remotefilelog-cacheprocess.t Tue Feb 19 21:55:05 2019 -0800 @@ -56,11 +56,11 @@ > log('requested %r\n' % key) > sys.stdout.flush() > elif cmd == 'set': - > assert False, 'todo writing' + > raise Exception('todo writing') > else: - > assert False, 'unknown command! %r' % cmd + > raise Exception('unknown command! %r' % cmd) > except Exception as e: - > log('Exception! %r\n' % e) + > log('Exception! %s\n' % e) > raise > EOF @@ -79,7 +79,7 @@ requested 'master/39/5df8f7c51f007019cb30201c49e884b46b92fa/69a1b67522704ec122181c0890bd16e9d3e7516a' requested 'master/95/cb0bfd2977c761298d9624e4b4d4c72a39974a/076f5e2225b3ff0400b98c92aa6cdf403ee24cca' got command 'set' - Exception! AssertionError('todo writing',) + Exception! todo writing Test cache hits. $ mv hgcache oldhgcache @@ -110,7 +110,7 @@ requested 'y\x00master/95/cb0bfd2977c761298d9624e4b4d4c72a39974a/076f5e2225b3ff0400b98c92aa6cdf403ee24cca' requested 'z\x00master/39/5df8f7c51f007019cb30201c49e884b46b92fa/69a1b67522704ec122181c0890bd16e9d3e7516a' got command 'set' - Exception! AssertionError('todo writing',) + Exception! todo writing Test cache hits with includepath. $ mv hgcache oldhgcache
--- a/tests/test-remotefilelog-datapack.py Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-remotefilelog-datapack.py Tue Feb 19 21:55:05 2019 -0800 @@ -40,7 +40,7 @@ shutil.rmtree(d) def makeTempDir(self): - tempdir = tempfile.mkdtemp() + tempdir = pycompat.bytestr(tempfile.mkdtemp()) self.tempdirs.append(tempdir) return tempdir @@ -48,11 +48,12 @@ return hashlib.sha1(content).digest() def getFakeHash(self): - return ''.join(chr(random.randint(0, 255)) for _ in range(20)) + return b''.join(pycompat.bytechr(random.randint(0, 255)) + for _ in range(20)) def createPack(self, revisions=None, packdir=None): if revisions is None: - revisions = [("filename", self.getFakeHash(), nullid, "content")] + revisions = [(b"filename", self.getFakeHash(), nullid, b"content")] if packdir is None: packdir = self.makeTempDir() @@ -73,23 +74,23 @@ def _testAddSingle(self, content): """Test putting a simple blob into a pack and reading it out. """ - filename = "foo" + filename = b"foo" node = self.getHash(content) revisions = [(filename, node, nullid, content)] pack = self.createPack(revisions) if self.paramsavailable: - self.assertEquals(pack.params.fanoutprefix, - basepack.SMALLFANOUTPREFIX) + self.assertEqual(pack.params.fanoutprefix, + basepack.SMALLFANOUTPREFIX) chain = pack.getdeltachain(filename, node) - self.assertEquals(content, chain[0][4]) + self.assertEqual(content, chain[0][4]) def testAddSingle(self): - self._testAddSingle('') + self._testAddSingle(b'') def testAddSingleEmpty(self): - self._testAddSingle('abcdef') + self._testAddSingle(b'abcdef') def testAddMultiple(self): """Test putting multiple unrelated blobs into a pack and reading them @@ -97,8 +98,8 @@ """ revisions = [] for i in range(10): - filename = "foo%s" % i - content = "abcdef%s" % i + filename = b"foo%d" % i + content = b"abcdef%d" % i node = self.getHash(content) revisions.append((filename, node, self.getFakeHash(), content)) @@ -106,19 +107,19 @@ for filename, node, base, content in revisions: entry = pack.getdelta(filename, node) - self.assertEquals((content, filename, base, {}), entry) + self.assertEqual((content, filename, base, {}), entry) chain = pack.getdeltachain(filename, node) - self.assertEquals(content, chain[0][4]) + self.assertEqual(content, chain[0][4]) def testAddDeltas(self): """Test putting multiple delta blobs into a pack and read the chain. """ revisions = [] - filename = "foo" + filename = b"foo" lastnode = nullid for i in range(10): - content = "abcdef%s" % i + content = b"abcdef%d" % i node = self.getHash(content) revisions.append((filename, node, lastnode, content)) lastnode = node @@ -127,13 +128,13 @@ entry = pack.getdelta(filename, revisions[0][1]) realvalue = (revisions[0][3], filename, revisions[0][2], {}) - self.assertEquals(entry, realvalue) + self.assertEqual(entry, realvalue) # Test that the chain for the final entry has all the others chain = pack.getdeltachain(filename, node) for i in range(10): - content = "abcdef%s" % i - self.assertEquals(content, chain[-i - 1][4]) + content = b"abcdef%d" % i + self.assertEqual(content, chain[-i - 1][4]) def testPackMany(self): """Pack many related and unrelated objects. @@ -143,10 +144,10 @@ blobs = {} random.seed(0) for i in range(100): - filename = "filename-%s" % i + filename = b"filename-%d" % i filerevs = [] for j in range(random.randint(1, 100)): - content = "content-%s" % j + content = b"content-%d" % j node = self.getHash(content) lastnode = nullid if len(filerevs) > 0: @@ -158,22 +159,22 @@ pack = self.createPack(revisions) # Verify the pack contents - for (filename, node, lastnode), content in sorted(blobs.iteritems()): + for (filename, node, lastnode), content in sorted(blobs.items()): chain = pack.getdeltachain(filename, node) for entry in chain: expectedcontent = blobs[(entry[0], entry[1], entry[3])] - self.assertEquals(entry[4], expectedcontent) + self.assertEqual(entry[4], expectedcontent) def testPackMetadata(self): revisions = [] for i in range(100): - filename = '%s.txt' % i - content = 'put-something-here \n' * i + filename = b'%d.txt' % i + content = b'put-something-here \n' * i node = self.getHash(content) meta = {constants.METAKEYFLAG: i ** 4, constants.METAKEYSIZE: len(content), - 'Z': 'random_string', - '_': '\0' * i} + b'Z': b'random_string', + b'_': b'\0' * i} revisions.append((filename, node, nullid, content, meta)) pack = self.createPack(revisions) for name, node, x, content, origmeta in revisions: @@ -181,50 +182,51 @@ # flag == 0 should be optimized out if origmeta[constants.METAKEYFLAG] == 0: del origmeta[constants.METAKEYFLAG] - self.assertEquals(parsedmeta, origmeta) + self.assertEqual(parsedmeta, origmeta) def testGetMissing(self): """Test the getmissing() api. """ revisions = [] - filename = "foo" + filename = b"foo" lastnode = nullid for i in range(10): - content = "abcdef%s" % i + content = b"abcdef%d" % i node = self.getHash(content) revisions.append((filename, node, lastnode, content)) lastnode = node pack = self.createPack(revisions) - missing = pack.getmissing([("foo", revisions[0][1])]) + missing = pack.getmissing([(b"foo", revisions[0][1])]) self.assertFalse(missing) - missing = pack.getmissing([("foo", revisions[0][1]), - ("foo", revisions[1][1])]) + missing = pack.getmissing([(b"foo", revisions[0][1]), + (b"foo", revisions[1][1])]) self.assertFalse(missing) fakenode = self.getFakeHash() - missing = pack.getmissing([("foo", revisions[0][1]), ("foo", fakenode)]) - self.assertEquals(missing, [("foo", fakenode)]) + missing = pack.getmissing([(b"foo", revisions[0][1]), + (b"foo", fakenode)]) + self.assertEqual(missing, [(b"foo", fakenode)]) def testAddThrows(self): pack = self.createPack() try: - pack.add('filename', nullid, 'contents') + pack.add(b'filename', nullid, b'contents') self.assertTrue(False, "datapack.add should throw") except RuntimeError: pass def testBadVersionThrows(self): pack = self.createPack() - path = pack.path + '.datapack' - with open(path) as f: + path = pack.path + b'.datapack' + with open(path, 'rb') as f: raw = f.read() raw = struct.pack('!B', 255) + raw[1:] os.chmod(path, os.stat(path).st_mode | stat.S_IWRITE) - with open(path, 'w+') as f: + with open(path, 'wb+') as f: f.write(raw) try: @@ -235,10 +237,10 @@ def testMissingDeltabase(self): fakenode = self.getFakeHash() - revisions = [("filename", fakenode, self.getFakeHash(), "content")] + revisions = [(b"filename", fakenode, self.getFakeHash(), b"content")] pack = self.createPack(revisions) - chain = pack.getdeltachain("filename", fakenode) - self.assertEquals(len(chain), 1) + chain = pack.getdeltachain(b"filename", fakenode) + self.assertEqual(len(chain), 1) def testLargePack(self): """Test creating and reading from a large pack with over X entries. @@ -247,7 +249,7 @@ blobs = {} total = basepack.SMALLFANOUTCUTOFF + 1 for i in pycompat.xrange(total): - filename = "filename-%s" % i + filename = b"filename-%d" % i content = filename node = self.getHash(content) blobs[(filename, node)] = content @@ -255,12 +257,12 @@ pack = self.createPack(revisions) if self.paramsavailable: - self.assertEquals(pack.params.fanoutprefix, - basepack.LARGEFANOUTPREFIX) + self.assertEqual(pack.params.fanoutprefix, + basepack.LARGEFANOUTPREFIX) - for (filename, node), content in blobs.iteritems(): + for (filename, node), content in blobs.items(): actualcontent = pack.getdeltachain(filename, node)[0][4] - self.assertEquals(actualcontent, content) + self.assertEqual(actualcontent, content) def testPacksCache(self): """Test that we remember the most recent packs while fetching the delta @@ -274,12 +276,12 @@ for i in range(numpacks): chain = [] - revision = (str(i), self.getFakeHash(), nullid, "content") + revision = (b'%d' % i, self.getFakeHash(), nullid, b"content") for _ in range(revisionsperpack): chain.append(revision) revision = ( - str(i), + b'%d' % i, self.getFakeHash(), revision[1], self.getFakeHash() @@ -290,7 +292,7 @@ class testdatapackstore(datapack.datapackstore): # Ensures that we are not keeping everything in the cache. - DEFAULTCACHESIZE = numpacks / 2 + DEFAULTCACHESIZE = numpacks // 2 store = testdatapackstore(uimod.ui(), packdir) @@ -300,12 +302,12 @@ chain = store.getdeltachain(revision[0], revision[1]) mostrecentpack = next(iter(store.packs), None) - self.assertEquals( + self.assertEqual( mostrecentpack.getdeltachain(revision[0], revision[1]), chain ) - self.assertEquals(randomchain.index(revision) + 1, len(chain)) + self.assertEqual(randomchain.index(revision) + 1, len(chain)) # perf test off by default since it's slow def _testIndexPerf(self): @@ -330,8 +332,8 @@ for packsize in packsizes: revisions = [] for i in pycompat.xrange(packsize): - filename = "filename-%s" % i - content = "content-%s" % i + filename = b"filename-%d" % i + content = b"content-%d" % i node = self.getHash(content) revisions.append((filename, node, nullid, content)) @@ -350,9 +352,9 @@ start = time.time() pack.getmissing(findnodes[:lookupsize]) elapsed = time.time() - start - print ("%s pack %s lookups = %0.04f" % - (('%s' % packsize).rjust(7), - ('%s' % lookupsize).rjust(7), + print ("%s pack %d lookups = %0.04f" % + (('%d' % packsize).rjust(7), + ('%d' % lookupsize).rjust(7), elapsed)) print("")
--- a/tests/test-remotefilelog-gc.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-remotefilelog-gc.t Tue Feb 19 21:55:05 2019 -0800 @@ -107,6 +107,7 @@ # Test that warning is displayed when the repo path is malformed $ printf "asdas\0das" >> $CACHEDIR/repos - $ hg gc 2>&1 | head -n2 - warning: malformed path: * (glob) - Traceback (most recent call last): + $ hg gc + abort: invalid path asdas\x00da: stat: embedded null character in path (esc) (py3 !) + abort: invalid path asdas\x00da: stat() argument 1 must be encoded string without null bytes, not str (esc) (no-py3 !) + [255]
--- a/tests/test-remotefilelog-histpack.py Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-remotefilelog-histpack.py Tue Feb 19 21:55:05 2019 -0800 @@ -52,7 +52,7 @@ node, p1node, p2node, and linknode. """ if revisions is None: - revisions = [("filename", self.getFakeHash(), nullid, nullid, + revisions = [(b"filename", self.getFakeHash(), nullid, nullid, self.getFakeHash(), None)] packdir = pycompat.fsencode(self.makeTempDir()) @@ -68,7 +68,7 @@ def testAddSingle(self): """Test putting a single entry into a pack and reading it out. """ - filename = "foo" + filename = b"foo" node = self.getFakeHash() p1 = self.getFakeHash() p2 = self.getFakeHash() @@ -78,9 +78,9 @@ pack = self.createPack(revisions) actual = pack.getancestors(filename, node)[node] - self.assertEquals(p1, actual[0]) - self.assertEquals(p2, actual[1]) - self.assertEquals(linknode, actual[2]) + self.assertEqual(p1, actual[0]) + self.assertEqual(p2, actual[1]) + self.assertEqual(linknode, actual[2]) def testAddMultiple(self): """Test putting multiple unrelated revisions into a pack and reading @@ -88,7 +88,7 @@ """ revisions = [] for i in range(10): - filename = "foo-%s" % i + filename = b"foo-%d" % i node = self.getFakeHash() p1 = self.getFakeHash() p2 = self.getFakeHash() @@ -99,10 +99,10 @@ for filename, node, p1, p2, linknode, copyfrom in revisions: actual = pack.getancestors(filename, node)[node] - self.assertEquals(p1, actual[0]) - self.assertEquals(p2, actual[1]) - self.assertEquals(linknode, actual[2]) - self.assertEquals(copyfrom, actual[3]) + self.assertEqual(p1, actual[0]) + self.assertEqual(p2, actual[1]) + self.assertEqual(linknode, actual[2]) + self.assertEqual(copyfrom, actual[3]) def testAddAncestorChain(self): """Test putting multiple revisions in into a pack and read the ancestor @@ -124,10 +124,10 @@ ancestors = pack.getancestors(revisions[0][0], revisions[0][1]) for filename, node, p1, p2, linknode, copyfrom in revisions: ap1, ap2, alinknode, acopyfrom = ancestors[node] - self.assertEquals(ap1, p1) - self.assertEquals(ap2, p2) - self.assertEquals(alinknode, linknode) - self.assertEquals(acopyfrom, copyfrom) + self.assertEqual(ap1, p1) + self.assertEqual(ap2, p2) + self.assertEqual(alinknode, linknode) + self.assertEqual(acopyfrom, copyfrom) def testPackMany(self): """Pack many related and unrelated ancestors. @@ -161,16 +161,16 @@ pack = self.createPack(revisions) # Verify the pack contents - for (filename, node), (p1, p2, lastnode) in allentries.items(): + for (filename, node) in allentries: ancestors = pack.getancestors(filename, node) - self.assertEquals(ancestorcounts[(filename, node)], - len(ancestors)) + self.assertEqual(ancestorcounts[(filename, node)], + len(ancestors)) for anode, (ap1, ap2, alinknode, copyfrom) in ancestors.items(): ep1, ep2, elinknode = allentries[(filename, anode)] - self.assertEquals(ap1, ep1) - self.assertEquals(ap2, ep2) - self.assertEquals(alinknode, elinknode) - self.assertEquals(copyfrom, None) + self.assertEqual(ap1, ep1) + self.assertEqual(ap2, ep2) + self.assertEqual(alinknode, elinknode) + self.assertEqual(copyfrom, None) def testGetNodeInfo(self): revisions = [] @@ -186,10 +186,10 @@ # Test that getnodeinfo returns the expected results for filename, node, p1, p2, linknode, copyfrom in revisions: ap1, ap2, alinknode, acopyfrom = pack.getnodeinfo(filename, node) - self.assertEquals(ap1, p1) - self.assertEquals(ap2, p2) - self.assertEquals(alinknode, linknode) - self.assertEquals(acopyfrom, copyfrom) + self.assertEqual(ap1, p1) + self.assertEqual(ap2, p2) + self.assertEqual(alinknode, linknode) + self.assertEqual(acopyfrom, copyfrom) def testGetMissing(self): """Test the getmissing() api. @@ -215,11 +215,11 @@ fakenode = self.getFakeHash() missing = pack.getmissing([(filename, revisions[0][1]), (filename, fakenode)]) - self.assertEquals(missing, [(filename, fakenode)]) + self.assertEqual(missing, [(filename, fakenode)]) # Test getmissing on a non-existant filename - missing = pack.getmissing([("bar", fakenode)]) - self.assertEquals(missing, [("bar", fakenode)]) + missing = pack.getmissing([(b"bar", fakenode)]) + self.assertEqual(missing, [(b"bar", fakenode)]) def testAddThrows(self): pack = self.createPack() @@ -232,12 +232,12 @@ def testBadVersionThrows(self): pack = self.createPack() - path = pack.path + '.histpack' - with open(path) as f: + path = pack.path + b'.histpack' + with open(path, 'rb') as f: raw = f.read() raw = struct.pack('!B', 255) + raw[1:] os.chmod(path, os.stat(path).st_mode | stat.S_IWRITE) - with open(path, 'w+') as f: + with open(path, 'wb+') as f: f.write(raw) try: @@ -260,14 +260,14 @@ revisions.append((filename, node, p1, p2, linknode, None)) pack = self.createPack(revisions) - self.assertEquals(pack.params.fanoutprefix, basepack.LARGEFANOUTPREFIX) + self.assertEqual(pack.params.fanoutprefix, basepack.LARGEFANOUTPREFIX) for filename, node, p1, p2, linknode, copyfrom in revisions: actual = pack.getancestors(filename, node)[node] - self.assertEquals(p1, actual[0]) - self.assertEquals(p2, actual[1]) - self.assertEquals(linknode, actual[2]) - self.assertEquals(copyfrom, actual[3]) + self.assertEqual(p1, actual[0]) + self.assertEqual(p2, actual[1]) + self.assertEqual(linknode, actual[2]) + self.assertEqual(copyfrom, actual[3]) # TODO: # histpack store: # - repack two packs into one
--- a/tests/test-repair-strip.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-repair-strip.t Tue Feb 19 21:55:05 2019 -0800 @@ -53,7 +53,7 @@ rollback failed - please run hg recover (failure reason: [Errno 13] Permission denied .hg/store/data/b.i') strip failed, backup bundle - abort: Permission denied .hg/store/data/b.i + abort: Permission denied .hg/store/data/b.i' % after update 0, strip 2 abandoned transaction found - run hg recover checking changesets @@ -85,7 +85,7 @@ date: Thu Jan 01 00:00:00 1970 +0000 summary: a - abort: Permission denied .hg/store/data/b.i + abort: Permission denied .hg/store/data/b.i' % after update 0, strip 2 checking changesets checking manifests @@ -107,7 +107,7 @@ rollback failed - please run hg recover (failure reason: [Errno 13] Permission denied .hg/store/00manifest.i') strip failed, backup bundle - abort: Permission denied .hg/store/00manifest.i + abort: Permission denied .hg/store/00manifest.i' % after update 0, strip 2 abandoned transaction found - run hg recover checking changesets
--- a/tests/test-resolve.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-resolve.t Tue Feb 19 21:55:05 2019 -0800 @@ -67,6 +67,9 @@ $ hg resolve -l R file1 U file2 + $ hg resolve -l --config ui.relative-paths=yes + R ../file1 + U ../file2 $ hg resolve --re-merge filez file2 arguments do not match paths that need resolving (try: hg resolve --re-merge path:filez path:file2)
--- a/tests/test-revert-interactive.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-revert-interactive.t Tue Feb 19 21:55:05 2019 -0800 @@ -424,3 +424,24 @@ b: no such file in rev b40d1912accf $ cd .. + +Prompt before undeleting file(issue6008) + $ hg init repo + $ cd repo + $ echo a > a + $ hg ci -qAm a + $ hg rm a + $ hg revert -i<<EOF + > y + > EOF + add back removed file a (Yn)? y + undeleting a + $ ls + a + $ hg rm a + $ hg revert -i<<EOF + > n + > EOF + add back removed file a (Yn)? n + $ ls + $ cd ..
--- a/tests/test-revert.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-revert.t Tue Feb 19 21:55:05 2019 -0800 @@ -92,7 +92,7 @@ $ echo z > e $ hg revert --all -v --config 'ui.origbackuppath=.hg/origbackups' creating directory: $TESTTMP/repo/.hg/origbackups - saving current version of e as $TESTTMP/repo/.hg/origbackups/e + saving current version of e as .hg/origbackups/e reverting e $ rm -rf .hg/origbackups @@ -289,6 +289,23 @@ $ hg revert . reverting b/b +respects ui.relative-paths +-------------------------- + + $ echo foo > newdir/newfile + $ hg add newdir/newfile + $ hg revert --all --cwd newdir + forgetting newfile + + $ echo foo > newdir/newfile + $ hg add newdir/newfile + $ hg revert --all --cwd newdir --config ui.relative-paths=True + forgetting newfile + + $ echo foo > newdir/newfile + $ hg add newdir/newfile + $ hg revert --all --cwd newdir --config ui.relative-paths=False + forgetting newdir/newfile reverting a rename target should revert the source --------------------------------------------------
--- a/tests/test-revlog-raw.py Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-revlog-raw.py Tue Feb 19 21:55:05 2019 -0800 @@ -417,7 +417,6 @@ print(' got: %s' % result15) def maintest(): - expected = rl = None with newtransaction() as tr: rl = newrevlog(recreate=True) expected = writecases(rl, tr)
--- a/tests/test-revset.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-revset.t Tue Feb 19 21:55:05 2019 -0800 @@ -12,9 +12,9 @@ > """ > if 3 not in subset: > if 2 in subset: - > return baseset([2,2]) + > return baseset([2, 2]) > return baseset() - > return baseset([3,3,2,2]) + > return baseset([3, 3, 2, 2]) > > mercurial.revset.symbols[b'r3232'] = r3232 > EOF @@ -643,10 +643,13 @@ [255] $ hg debugrevspec '.#generations[a]' - hg: parse error: relation subscript must be an integer + hg: parse error: relation subscript must be an integer or a range [255] $ hg debugrevspec '.#generations[1-2]' - hg: parse error: relation subscript must be an integer + hg: parse error: relation subscript must be an integer or a range + [255] + $ hg debugrevspec '.#generations[foo:bar]' + hg: parse error: relation subscript bounds must be integers [255] suggested relations @@ -1274,6 +1277,31 @@ $ log '.#g[(-1)]' 8 + $ log '6#generations[0:1]' + 6 + 7 + $ log '6#generations[-1:1]' + 4 + 5 + 6 + 7 + $ log '6#generations[0:]' + 6 + 7 + $ log '5#generations[:0]' + 0 + 1 + 3 + 5 + $ log '3#generations[:]' + 0 + 1 + 3 + 5 + 6 + 7 + $ log 'tip#generations[1:-1]' + $ hg debugrevspec -p parsed 'roots(:)#g[2]' * parsed: (relsubscript @@ -2950,3 +2978,63 @@ * set: <baseset+ [0]> 0 + +abort if the revset doesn't expect given size + $ log 'expectsize()' + hg: parse error: invalid set of arguments + [255] + $ log 'expectsize(0:2, a)' + hg: parse error: expectsize requires a size range or a positive integer + [255] + $ log 'expectsize(0:2, 3)' + 0 + 1 + 2 + + $ log 'expectsize(2:0, 3)' + 2 + 1 + 0 + $ log 'expectsize(0:1, 1)' + abort: revset size mismatch. expected 1, got 2! + [255] + $ log 'expectsize(0:4, -1)' + hg: parse error: negative size + [255] + $ log 'expectsize(0:2, 2:4)' + 0 + 1 + 2 + $ log 'expectsize(0:1, 3:5)' + abort: revset size mismatch. expected between 3 and 5, got 2! + [255] + $ log 'expectsize(0:1, -1:2)' + hg: parse error: negative size + [255] + $ log 'expectsize(0:1, 1:-2)' + hg: parse error: negative size + [255] + $ log 'expectsize(0:2, a:4)' + hg: parse error: size range bounds must be integers + [255] + $ log 'expectsize(0:2, 2:b)' + hg: parse error: size range bounds must be integers + [255] + $ log 'expectsize(0:2, 2:)' + 0 + 1 + 2 + $ log 'expectsize(0:2, :5)' + 0 + 1 + 2 + $ log 'expectsize(0:2, :)' + 0 + 1 + 2 + $ log 'expectsize(0:2, 4:)' + abort: revset size mismatch. expected between 4 and 11, got 3! + [255] + $ log 'expectsize(0:2, :2)' + abort: revset size mismatch. expected between 0 and 2, got 3! + [255]
--- a/tests/test-revset2.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-revset2.t Tue Feb 19 21:55:05 2019 -0800 @@ -1525,8 +1525,8 @@ $ hg init problematicencoding $ cd problematicencoding - $ "$PYTHON" > setup.sh <<EOF - > print(u''' + $ "$PYTHON" <<EOF + > open('setup.sh', 'wb').write(u''' > echo a > text > hg add text > hg --encoding utf-8 commit -u '\u30A2' -m none @@ -1541,8 +1541,8 @@ $ sh < setup.sh test in problematic encoding - $ "$PYTHON" > test.sh <<EOF - > print(u''' + $ "$PYTHON" <<EOF + > open('test.sh', 'wb').write(u''' > hg --encoding cp932 log --template '{rev}\\n' -r 'author(\u30A2)' > echo ==== > hg --encoding cp932 log --template '{rev}\\n' -r 'author(\u30C2)'
--- a/tests/test-rollback.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-rollback.t Tue Feb 19 21:55:05 2019 -0800 @@ -113,9 +113,9 @@ > echo "another precious commit message" > "$1" > __EOF__ $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg --config hooks.pretxncommit=false commit 2>&1 - note: commit message saved in .hg/last-message.txt transaction abort! rollback completed + note: commit message saved in .hg/last-message.txt abort: pretxncommit hook exited with status * (glob) [255] $ cat .hg/last-message.txt
--- a/tests/test-run-tests.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-run-tests.t Tue Feb 19 21:55:05 2019 -0800 @@ -324,8 +324,8 @@ ERROR: test-failure-unicode.t output changed ! + Failed test-failure-unicode.t: output changed Failed test-failure.t: output changed - Failed test-failure-unicode.t: output changed # Ran 3 tests, 0 skipped, 2 failed. python hash seed: * (glob) [1] @@ -356,8 +356,8 @@ ERROR: test-failure-unicode.t output changed ! + Failed test-failure-unicode.t: output changed Failed test-failure.t: output changed - Failed test-failure-unicode.t: output changed # Ran 3 tests, 0 skipped, 2 failed. python hash seed: * (glob) [1] @@ -393,8 +393,8 @@ ERROR: test-failure-unicode.t output changed ! + Failed test-failure-unicode.t: output changed Failed test-failure.t: output changed - Failed test-failure-unicode.t: output changed # Ran 3 tests, 0 skipped, 2 failed. python hash seed: * (glob) [1]
--- a/tests/test-rust-ancestor.py Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-rust-ancestor.py Tue Feb 19 21:55:05 2019 -0800 @@ -19,6 +19,7 @@ LazyAncestors, MissingAncestors, ) + from mercurial.rustext import dagop try: from mercurial.cext import parsers as cparsers @@ -165,6 +166,10 @@ with self.assertRaises(error.WdirUnsupported): list(AncestorsIterator(idx, [node.wdirrev], -1, False)) + def testheadrevs(self): + idx = self.parseindex() + self.assertEqual(dagop.headrevs(idx, [1, 2, 3]), {3}) + if __name__ == '__main__': import silenttestrunner silenttestrunner.main(__name__)
--- a/tests/test-shelve2.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-shelve2.t Tue Feb 19 21:55:05 2019 -0800 @@ -130,13 +130,28 @@ e $ cat e.orig z + $ rm e.orig +restores backup of unknown file to right directory + + $ hg shelve + shelved as default + 0 files updated, 0 files merged, 2 files removed, 0 files unresolved + $ echo z > e + $ mkdir dir + $ hg unshelve --cwd dir + unshelving change 'default' + $ rmdir dir + $ cat e + e + $ cat e.orig + z unshelve and conflicts with tracked and untracked files preparing: - $ rm *.orig + $ rm -f *.orig $ hg ci -qm 'commit stuff' $ hg phase -p null:
--- a/tests/test-split.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-split.t Tue Feb 19 21:55:05 2019 -0800 @@ -26,6 +26,8 @@ > [diff] > git=1 > unified=0 + > [commands] + > commit.interactive.unified=0 > [alias] > glog=log -G -T '{rev}:{node|short} {desc} {bookmarks}\n' > EOF @@ -103,6 +105,12 @@ abort: cannot split multiple revisions [255] +This function splits a bit strangely primarily to avoid changing the behavior of +the test after a bug was fixed with how split/commit --interactive handled +`commands.commit.interactive.unified=0`: when there were no context lines, +it kept only the last diff hunk. When running split, this meant that runsplit +was always recording three commits, one for each diff hunk, in reverse order +(the base commit was the last diff hunk in the file). $ runsplit() { > cat > $TESTTMP/messages <<EOF > split 1 @@ -113,8 +121,11 @@ > EOF > cat <<EOF | hg split "$@" > y + > n + > n > y > y + > n > y > y > y @@ -123,13 +134,23 @@ $ HGEDITOR=false runsplit diff --git a/a b/a - 1 hunks, 1 lines changed + 3 hunks, 3 lines changed examine changes to 'a'? [Ynesfdaq?] y + @@ -1,1 +1,1 @@ + -1 + +11 + record change 1/3 to 'a'? [Ynesfdaq?] n + + @@ -3,1 +3,1 @@ 2 + -3 + +33 + record change 2/3 to 'a'? [Ynesfdaq?] n + @@ -5,1 +5,1 @@ 4 -5 +55 - record this change to 'a'? [Ynesfdaq?] y + record change 3/3 to 'a'? [Ynesfdaq?] y transaction abort! rollback completed @@ -140,13 +161,23 @@ $ HGEDITOR="\"$PYTHON\" $TESTTMP/editor.py" $ runsplit diff --git a/a b/a - 1 hunks, 1 lines changed + 3 hunks, 3 lines changed examine changes to 'a'? [Ynesfdaq?] y + @@ -1,1 +1,1 @@ + -1 + +11 + record change 1/3 to 'a'? [Ynesfdaq?] n + + @@ -3,1 +3,1 @@ 2 + -3 + +33 + record change 2/3 to 'a'? [Ynesfdaq?] n + @@ -5,1 +5,1 @@ 4 -5 +55 - record this change to 'a'? [Ynesfdaq?] y + record change 3/3 to 'a'? [Ynesfdaq?] y EDITOR: HG: Splitting 1df0d5c5a3ab. Write commit message for the first split changeset. EDITOR: a2 @@ -160,13 +191,18 @@ EDITOR: HG: changed a created new head diff --git a/a b/a - 1 hunks, 1 lines changed + 2 hunks, 2 lines changed examine changes to 'a'? [Ynesfdaq?] y + @@ -1,1 +1,1 @@ + -1 + +11 + record change 1/2 to 'a'? [Ynesfdaq?] n + @@ -3,1 +3,1 @@ 2 -3 +33 - record this change to 'a'? [Ynesfdaq?] y + record change 2/2 to 'a'? [Ynesfdaq?] y EDITOR: HG: Splitting 1df0d5c5a3ab. So far it has been split into: EDITOR: HG: - e704349bd21b: split 1 @@ -565,3 +601,111 @@ a09ad58faae3 draft e704349bd21b draft a61bcde8c529 draft + +`hg split` with ignoreblanklines=1 does not infinite loop + + $ mkdir $TESTTMP/f + $ hg init $TESTTMP/f/a + $ cd $TESTTMP/f/a + $ printf '1\n2\n3\n4\n5\n' > foo + $ cp foo bar + $ hg ci -qAm initial + $ printf '1\n\n2\n3\ntest\n4\n5\n' > bar + $ printf '1\n2\n3\ntest\n4\n5\n' > foo + $ hg ci -qm splitme + $ cat > $TESTTMP/messages <<EOF + > split 1 + > -- + > split 2 + > EOF + $ printf 'f\nn\nf\n' | hg --config extensions.split= --config diff.ignoreblanklines=1 split + diff --git a/bar b/bar + 2 hunks, 2 lines changed + examine changes to 'bar'? [Ynesfdaq?] f + + diff --git a/foo b/foo + 1 hunks, 1 lines changed + examine changes to 'foo'? [Ynesfdaq?] n + + EDITOR: HG: Splitting dd3c45017cbf. Write commit message for the first split changeset. + EDITOR: splitme + EDITOR: + EDITOR: + EDITOR: HG: Enter commit message. Lines beginning with 'HG:' are removed. + EDITOR: HG: Leave message empty to abort commit. + EDITOR: HG: -- + EDITOR: HG: user: test + EDITOR: HG: branch 'default' + EDITOR: HG: changed bar + created new head + diff --git a/foo b/foo + 1 hunks, 1 lines changed + examine changes to 'foo'? [Ynesfdaq?] f + + EDITOR: HG: Splitting dd3c45017cbf. So far it has been split into: + EDITOR: HG: - f205aea1c624: split 1 + EDITOR: HG: Write commit message for the next split changeset. + EDITOR: splitme + EDITOR: + EDITOR: + EDITOR: HG: Enter commit message. Lines beginning with 'HG:' are removed. + EDITOR: HG: Leave message empty to abort commit. + EDITOR: HG: -- + EDITOR: HG: user: test + EDITOR: HG: branch 'default' + EDITOR: HG: changed foo + saved backup bundle to $TESTTMP/f/a/.hg/strip-backup/dd3c45017cbf-463441b5-split.hg (obsstore-off !) + +Let's try that again, with a slightly different set of patches, to ensure that +the ignoreblanklines thing isn't somehow position dependent. + + $ hg init $TESTTMP/f/b + $ cd $TESTTMP/f/b + $ printf '1\n2\n3\n4\n5\n' > foo + $ cp foo bar + $ hg ci -qAm initial + $ printf '1\n2\n3\ntest\n4\n5\n' > bar + $ printf '1\n2\n3\ntest\n4\n\n5\n' > foo + $ hg ci -qm splitme + $ cat > $TESTTMP/messages <<EOF + > split 1 + > -- + > split 2 + > EOF + $ printf 'f\nn\nf\n' | hg --config extensions.split= --config diff.ignoreblanklines=1 split + diff --git a/bar b/bar + 1 hunks, 1 lines changed + examine changes to 'bar'? [Ynesfdaq?] f + + diff --git a/foo b/foo + 2 hunks, 2 lines changed + examine changes to 'foo'? [Ynesfdaq?] n + + EDITOR: HG: Splitting 904c80b40a4a. Write commit message for the first split changeset. + EDITOR: splitme + EDITOR: + EDITOR: + EDITOR: HG: Enter commit message. Lines beginning with 'HG:' are removed. + EDITOR: HG: Leave message empty to abort commit. + EDITOR: HG: -- + EDITOR: HG: user: test + EDITOR: HG: branch 'default' + EDITOR: HG: changed bar + created new head + diff --git a/foo b/foo + 2 hunks, 2 lines changed + examine changes to 'foo'? [Ynesfdaq?] f + + EDITOR: HG: Splitting 904c80b40a4a. So far it has been split into: + EDITOR: HG: - ffecf40fa954: split 1 + EDITOR: HG: Write commit message for the next split changeset. + EDITOR: splitme + EDITOR: + EDITOR: + EDITOR: HG: Enter commit message. Lines beginning with 'HG:' are removed. + EDITOR: HG: Leave message empty to abort commit. + EDITOR: HG: -- + EDITOR: HG: user: test + EDITOR: HG: branch 'default' + EDITOR: HG: changed foo + saved backup bundle to $TESTTMP/f/b/.hg/strip-backup/904c80b40a4a-47fb907f-split.hg (obsstore-off !)
--- a/tests/test-sqlitestore.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-sqlitestore.t Tue Feb 19 21:55:05 2019 -0800 @@ -71,17 +71,17 @@ That results in a row being inserted into various tables - $ sqlite3 .hg/store/db.sqlite << EOF + $ sqlite3 .hg/store/db.sqlite -init /dev/null << EOF > SELECT * FROM filepath; > EOF 1|foo - $ sqlite3 .hg/store/db.sqlite << EOF + $ sqlite3 .hg/store/db.sqlite -init /dev/null << EOF > SELECT * FROM fileindex; > EOF 1|1|0|-1|-1|0|0|1||6/\xef(L\xe2\xca\x02\xae\xcc\x8d\xe6\xd5\xe8\xa1\xc3\xaf\x05V\xfe (esc) - $ sqlite3 .hg/store/db.sqlite << EOF + $ sqlite3 .hg/store/db.sqlite -init /dev/null << EOF > SELECT * FROM delta; > EOF 1|1| \xd2\xaf\x8d\xd2"\x01\xdd\x8dH\xe5\xdc\xfc\xae\xd2\x81\xff\x94"\xc7|0 (esc) @@ -93,7 +93,7 @@ $ hg commit -A -m 'add bar' adding bar - $ sqlite3 .hg/store/db.sqlite << EOF + $ sqlite3 .hg/store/db.sqlite -init /dev/null << EOF > SELECT * FROM filedata ORDER BY id ASC; > EOF 1|1|foo|0|6/\xef(L\xe2\xca\x02\xae\xcc\x8d\xe6\xd5\xe8\xa1\xc3\xaf\x05V\xfe|-1|-1|0|0|1| (esc) @@ -104,7 +104,7 @@ $ echo a >> foo $ hg commit -m 'modify foo' - $ sqlite3 .hg/store/db.sqlite << EOF + $ sqlite3 .hg/store/db.sqlite -init /dev/null << EOF > SELECT * FROM filedata ORDER BY id ASC; > EOF 1|1|foo|0|6/\xef(L\xe2\xca\x02\xae\xcc\x8d\xe6\xd5\xe8\xa1\xc3\xaf\x05V\xfe|-1|-1|0|0|1| (esc)
--- a/tests/test-ssh-bundle1.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-ssh-bundle1.t Tue Feb 19 21:55:05 2019 -0800 @@ -46,7 +46,7 @@ > uncompressed = True > > [hooks] - > changegroup = sh -c "printenv.py changegroup-in-remote 0 ../dummylog" + > changegroup = sh -c "printenv.py --line changegroup-in-remote 0 ../dummylog" > EOF $ cd $TESTTMP @@ -131,7 +131,7 @@ checked 3 changesets with 2 changes to 2 files $ cat >> .hg/hgrc <<EOF > [hooks] - > changegroup = sh -c "printenv.py changegroup-in-local 0 ../dummylog" + > changegroup = sh -c "printenv.py --line changegroup-in-local 0 ../dummylog" > EOF empty default pull @@ -514,7 +514,14 @@ Got arguments 1:user@dummy 2:hg -R local serve --stdio Got arguments 1:user@dummy 2:hg -R $TESTTMP/local serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio - changegroup-in-remote hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_NODE_LAST=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:ssh:$LOCALIP + changegroup-in-remote hook: HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 + HG_NODE_LAST=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 + HG_SOURCE=serve + HG_TXNID=TXN:$ID$ + HG_URL=remote:ssh:$LOCALIP + Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio @@ -524,7 +531,14 @@ Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio - changegroup-in-remote hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6 HG_NODE_LAST=1383141674ec756a6056f6a9097618482fe0f4a6 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:ssh:$LOCALIP + changegroup-in-remote hook: HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6 + HG_NODE_LAST=1383141674ec756a6056f6a9097618482fe0f4a6 + HG_SOURCE=serve + HG_TXNID=TXN:$ID$ + HG_URL=remote:ssh:$LOCALIP + Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg init 'a repo' Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio @@ -532,7 +546,14 @@ Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio - changegroup-in-remote hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=65c38f4125f9602c8db4af56530cc221d93b8ef8 HG_NODE_LAST=65c38f4125f9602c8db4af56530cc221d93b8ef8 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:ssh:$LOCALIP + changegroup-in-remote hook: HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=65c38f4125f9602c8db4af56530cc221d93b8ef8 + HG_NODE_LAST=65c38f4125f9602c8db4af56530cc221d93b8ef8 + HG_SOURCE=serve + HG_TXNID=TXN:$ID$ + HG_URL=remote:ssh:$LOCALIP + Got arguments 1:user@dummy 2:hg -R remote serve --stdio remote hook failure is attributed to remote
--- a/tests/test-ssh-repoerror.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-ssh-repoerror.t Tue Feb 19 21:55:05 2019 -0800 @@ -34,7 +34,7 @@ > done $ hg id ssh://user@dummy/other - remote: abort: Permission denied: $TESTTMP/other/.hg/requires + remote: abort: Permission denied: '$TESTTMP/other/.hg/requires' abort: no suitable response from remote hg! [255]
--- a/tests/test-ssh.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-ssh.t Tue Feb 19 21:55:05 2019 -0800 @@ -36,7 +36,7 @@ > uncompressed = True > > [hooks] - > changegroup = sh -c "printenv.py changegroup-in-remote 0 ../dummylog" + > changegroup = sh -c "printenv.py --line changegroup-in-remote 0 ../dummylog" > EOF $ cd $TESTTMP @@ -563,7 +563,15 @@ Got arguments 1:user@dummy 2:hg -R local serve --stdio Got arguments 1:user@dummy 2:hg -R $TESTTMP/local serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio - changegroup-in-remote hook: HG_BUNDLE2=1 HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_NODE_LAST=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:ssh:$LOCALIP + changegroup-in-remote hook: HG_BUNDLE2=1 + HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 + HG_NODE_LAST=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 + HG_SOURCE=serve + HG_TXNID=TXN:$ID$ + HG_URL=remote:ssh:$LOCALIP + Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio @@ -573,9 +581,25 @@ Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio - changegroup-in-remote hook: HG_BUNDLE2=1 HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6 HG_NODE_LAST=1383141674ec756a6056f6a9097618482fe0f4a6 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:ssh:$LOCALIP + changegroup-in-remote hook: HG_BUNDLE2=1 + HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6 + HG_NODE_LAST=1383141674ec756a6056f6a9097618482fe0f4a6 + HG_SOURCE=serve + HG_TXNID=TXN:$ID$ + HG_URL=remote:ssh:$LOCALIP + Got arguments 1:user@dummy 2:chg -R remote serve --stdio (chg !) - changegroup-in-remote hook: HG_BUNDLE2=1 HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6 HG_NODE_LAST=1383141674ec756a6056f6a9097618482fe0f4a6 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:ssh:$LOCALIP (chg !) + changegroup-in-remote hook: HG_BUNDLE2=1 (chg !) + HG_HOOKNAME=changegroup (chg !) + HG_HOOKTYPE=changegroup (chg !) + HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6 (chg !) + HG_NODE_LAST=1383141674ec756a6056f6a9097618482fe0f4a6 (chg !) + HG_SOURCE=serve (chg !) + HG_TXNID=TXN:$ID$ (chg !) + HG_URL=remote:ssh:$LOCALIP (chg !) + (chg !) Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg init 'a repo' Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio @@ -583,9 +607,18 @@ Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio - changegroup-in-remote hook: HG_BUNDLE2=1 HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=65c38f4125f9602c8db4af56530cc221d93b8ef8 HG_NODE_LAST=65c38f4125f9602c8db4af56530cc221d93b8ef8 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:ssh:$LOCALIP + changegroup-in-remote hook: HG_BUNDLE2=1 + HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=65c38f4125f9602c8db4af56530cc221d93b8ef8 + HG_NODE_LAST=65c38f4125f9602c8db4af56530cc221d93b8ef8 + HG_SOURCE=serve + HG_TXNID=TXN:$ID$ + HG_URL=remote:ssh:$LOCALIP + Got arguments 1:user@dummy 2:hg -R remote serve --stdio + remote hook failure is attributed to remote $ cat > $TESTTMP/failhook << EOF
--- a/tests/test-static-http.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-static-http.t Tue Feb 19 21:55:05 2019 -0800 @@ -57,7 +57,7 @@ $ cd ../local $ cat >> .hg/hgrc <<EOF > [hooks] - > changegroup = sh -c "printenv.py changegroup" + > changegroup = sh -c "printenv.py --line changegroup" > EOF $ hg pull pulling from static-http://localhost:$HGPORT/remote @@ -67,7 +67,14 @@ adding file changes added 1 changesets with 1 changes to 1 files new changesets 4ac2e3648604 - changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=4ac2e3648604439c580c69b09ec9d93a88d93432 HG_NODE_LAST=4ac2e3648604439c580c69b09ec9d93a88d93432 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=http://localhost:$HGPORT/remote + changegroup hook: HG_HOOKNAME=changegroup + HG_HOOKTYPE=changegroup + HG_NODE=4ac2e3648604439c580c69b09ec9d93a88d93432 + HG_NODE_LAST=4ac2e3648604439c580c69b09ec9d93a88d93432 + HG_SOURCE=pull + HG_TXNID=TXN:$ID$ + HG_URL=http://localhost:$HGPORT/remote + (run 'hg update' to get a working copy) trying to push @@ -227,9 +234,11 @@ /.hg/requires /.hg/store/00changelog.i /.hg/store/00manifest.i - /.hg/store/data/%7E2ehgsub.i - /.hg/store/data/%7E2ehgsubstate.i + /.hg/store/data/%7E2ehgsub.i (no-py37 !) + /.hg/store/data/%7E2ehgsubstate.i (no-py37 !) /.hg/store/data/a.i + /.hg/store/data/~2ehgsub.i (py37 !) + /.hg/store/data/~2ehgsubstate.i (py37 !) /notarepo/.hg/00changelog.i /notarepo/.hg/requires /remote-with-names/.hg/bookmarks @@ -243,8 +252,9 @@ /remote-with-names/.hg/requires /remote-with-names/.hg/store/00changelog.i /remote-with-names/.hg/store/00manifest.i - /remote-with-names/.hg/store/data/%7E2ehgtags.i + /remote-with-names/.hg/store/data/%7E2ehgtags.i (no-py37 !) /remote-with-names/.hg/store/data/foo.i + /remote-with-names/.hg/store/data/~2ehgtags.i (py37 !) /remote/.hg/bookmarks /remote/.hg/bookmarks.current /remote/.hg/cache/branch2-base @@ -258,10 +268,12 @@ /remote/.hg/requires /remote/.hg/store/00changelog.i /remote/.hg/store/00manifest.i - /remote/.hg/store/data/%7E2edotfile%20with%20spaces.i - /remote/.hg/store/data/%7E2ehgtags.i + /remote/.hg/store/data/%7E2edotfile%20with%20spaces.i (no-py37 !) + /remote/.hg/store/data/%7E2ehgtags.i (no-py37 !) /remote/.hg/store/data/bar.i /remote/.hg/store/data/quux.i + /remote/.hg/store/data/~2edotfile%20with%20spaces.i (py37 !) + /remote/.hg/store/data/~2ehgtags.i (py37 !) /remotempty/.hg/bookmarks /remotempty/.hg/bookmarks.current /remotempty/.hg/requires @@ -275,5 +287,6 @@ /sub/.hg/requires /sub/.hg/store/00changelog.i /sub/.hg/store/00manifest.i - /sub/.hg/store/data/%7E2ehgtags.i + /sub/.hg/store/data/%7E2ehgtags.i (no-py37 !) /sub/.hg/store/data/test.i + /sub/.hg/store/data/~2ehgtags.i (py37 !)
--- a/tests/test-status.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-status.t Tue Feb 19 21:55:05 2019 -0800 @@ -132,7 +132,26 @@ relative paths can be requested + $ hg status --cwd a --config ui.relative-paths=yes + ? 1/in_a_1 + ? in_a + ? ../b/1/in_b_1 + ? ../b/2/in_b_2 + ? ../b/in_b + ? ../in_root + + $ hg status --cwd a . --config ui.relative-paths=legacy + ? 1/in_a_1 + ? in_a + $ hg status --cwd a . --config ui.relative-paths=no + ? a/1/in_a_1 + ? a/in_a + +commands.status.relative overrides ui.relative-paths + $ cat >> $HGRCPATH <<EOF + > [ui] + > relative-paths = False > [commands] > status.relative = True > EOF @@ -271,7 +290,7 @@ $ hg status -A -Tpickle > pickle >>> from __future__ import print_function - >>> import pickle + >>> from mercurial.util import pickle >>> data = sorted((x[b'status'].decode(), x[b'path'].decode()) for x in pickle.load(open("pickle", r"rb"))) >>> for s, p in data: print("%s %s" % (s, p)) ! deleted
--- a/tests/test-subrepo-git.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-subrepo-git.t Tue Feb 19 21:55:05 2019 -0800 @@ -924,9 +924,9 @@ $ echo 'bloop' > s/foobar $ hg revert --all --verbose --config 'ui.origbackuppath=.hg/origbackups' reverting subrepo ../gitroot - creating directory: $TESTTMP/tc/.hg/origbackups - saving current version of foobar as $TESTTMP/tc/.hg/origbackups/foobar - $ ls .hg/origbackups + creating directory: $TESTTMP/tc/.hg/origbackups/s + saving current version of foobar as .hg/origbackups/s/foobar + $ ls .hg/origbackups/s foobar $ rm -rf .hg/origbackups
--- a/tests/test-subrepo-svn.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-subrepo-svn.t Tue Feb 19 21:55:05 2019 -0800 @@ -1,11 +1,7 @@ #require svn15 $ SVNREPOPATH=`pwd`/svn-repo -#if windows - $ SVNREPOURL=file:///`"$PYTHON" -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"` -#else - $ SVNREPOURL=file://`"$PYTHON" -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"` -#endif + $ SVNREPOURL="`"$PYTHON" $TESTDIR/svnurlof.py \"$SVNREPOPATH\"`" $ filter_svn_output () { > egrep -v 'Committing|Transmitting|Updating|(^$)' || true
--- a/tests/test-tag.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-tag.t Tue Feb 19 21:55:05 2019 -0800 @@ -320,9 +320,9 @@ HG: branch 'tag-and-branch-same-name' HG: changed .hgtags ==== - note: commit message saved in .hg/last-message.txt transaction abort! rollback completed + note: commit message saved in .hg/last-message.txt abort: pretxncommit.unexpectedabort hook exited with status 1 [255] $ cat .hg/last-message.txt
--- a/tests/test-tags.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-tags.t Tue Feb 19 21:55:05 2019 -0800 @@ -759,3 +759,69 @@ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ (cd tags-local-clone/.hg/cache/; ls -1 tag*) tags2-visible + +Avoid writing logs on trying to delete an already deleted tag + $ hg init issue5752 + $ cd issue5752 + $ echo > a + $ hg commit -Am 'add a' + adding a + $ hg tag a + $ hg tags + tip 1:bd7ee4f3939b + a 0:a8a82d372bb3 + $ hg log + changeset: 1:bd7ee4f3939b + tag: tip + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: Added tag a for changeset a8a82d372bb3 + + changeset: 0:a8a82d372bb3 + tag: a + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: add a + + $ hg tag --remove a + $ hg log + changeset: 2:e7feacc7ec9e + tag: tip + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: Removed tag a + + changeset: 1:bd7ee4f3939b + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: Added tag a for changeset a8a82d372bb3 + + changeset: 0:a8a82d372bb3 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: add a + + $ hg tag --remove a + abort: tag 'a' is already removed + [255] + $ hg log + changeset: 2:e7feacc7ec9e + tag: tip + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: Removed tag a + + changeset: 1:bd7ee4f3939b + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: Added tag a for changeset a8a82d372bb3 + + changeset: 0:a8a82d372bb3 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: add a + + $ cat .hgtags + a8a82d372bb35b42ff736e74f07c23bcd99c371f a + a8a82d372bb35b42ff736e74f07c23bcd99c371f a + 0000000000000000000000000000000000000000 a
--- a/tests/test-template-functions.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-template-functions.t Tue Feb 19 21:55:05 2019 -0800 @@ -1549,4 +1549,31 @@ $ HGENCODING=utf-8 hg debugtemplate "{pad('`cat utf-8`', 2, '-')}\n" \xc3\xa9- (esc) +read config options: + + $ hg log -T "{config('templateconfig', 'knob', 'foo')}\n" + foo + $ hg log -T "{config('templateconfig', 'knob', 'foo')}\n" \ + > --config templateconfig.knob=bar + bar + $ hg log -T "{configbool('templateconfig', 'knob', True)}\n" + True + $ hg log -T "{configbool('templateconfig', 'knob', True)}\n" \ + > --config templateconfig.knob=0 + False + $ hg log -T "{configint('templateconfig', 'knob', 123)}\n" + 123 + $ hg log -T "{configint('templateconfig', 'knob', 123)}\n" \ + > --config templateconfig.knob=456 + 456 + $ hg log -T "{config('templateconfig', 'knob')}\n" + devel-warn: config item requires an explicit default value: 'templateconfig.knob' at: * (glob) + + $ hg log -T "{configbool('ui', 'interactive')}\n" + False + $ hg log -T "{configbool('ui', 'interactive')}\n" --config ui.interactive=1 + True + $ hg log -T "{config('templateconfig', 'knob', if(true, 'foo', 'bar'))}\n" + foo + $ cd ..
--- a/tests/test-template-keywords.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-template-keywords.t Tue Feb 19 21:55:05 2019 -0800 @@ -76,6 +76,12 @@ $ hg log -r 'wdir()' -T '{manifest}\n' 2147483647:ffffffffffff +However, for negrev, we refuse to output anything (as well as for null) + + $ hg log -r 'wdir() + null' -T 'bla{negrev}nk\n' + blank + blank + Changectx-derived keywords are disabled within {manifest} as {node} changes: $ hg log -r0 -T 'outer:{p1node} {manifest % "inner:{p1node}"}\n'
--- a/tests/test-template-map.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-template-map.t Tue Feb 19 21:55:05 2019 -0800 @@ -1039,7 +1039,7 @@ $ touch q $ chmod 0 q $ hg log --style ./q - abort: Permission denied: ./q + abort: Permission denied: './q' [255] #endif
--- a/tests/test-transplant.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-transplant.t Tue Feb 19 21:55:05 2019 -0800 @@ -39,12 +39,12 @@ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg transplant 1 - abort: outstanding uncommitted merges + abort: outstanding uncommitted merge [255] $ hg up -qC tip $ echo b0 > b1 $ hg transplant 1 - abort: outstanding local changes + abort: uncommitted changes [255] $ hg up -qC tip $ echo b2 > b2
--- a/tests/test-trusted.py Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-trusted.py Tue Feb 19 21:55:05 2019 -0800 @@ -5,19 +5,34 @@ from __future__ import absolute_import, print_function import os +import sys + from mercurial import ( error, + pycompat, ui as uimod, util, ) +from mercurial.utils import stringutil hgrc = os.environ['HGRCPATH'] -f = open(hgrc) +f = open(hgrc, 'rb') basehgrc = f.read() f.close() -def testui(user='foo', group='bar', tusers=(), tgroups=(), - cuser='foo', cgroup='bar', debug=False, silent=False, +def _maybesysstr(v): + if isinstance(v, bytes): + return pycompat.sysstr(v) + return pycompat.sysstr(stringutil.pprint(v)) + +def bprint(*args, **kwargs): + print(*[_maybesysstr(a) for a in args], + **{k: _maybesysstr(v) for k, v in kwargs.items()}) + # avoid awkward interleaving with ui object's output + sys.stdout.flush() + +def testui(user=b'foo', group=b'bar', tusers=(), tgroups=(), + cuser=b'foo', cgroup=b'bar', debug=False, silent=False, report=True): # user, group => owners of the file # tusers, tgroups => trusted users/groups @@ -25,17 +40,17 @@ # write a global hgrc with the list of trusted users/groups and # some setting so that we can be sure it was read - f = open(hgrc, 'w') + f = open(hgrc, 'wb') f.write(basehgrc) - f.write('\n[paths]\n') - f.write('global = /some/path\n\n') + f.write(b'\n[paths]\n') + f.write(b'global = /some/path\n\n') if tusers or tgroups: - f.write('[trusted]\n') + f.write(b'[trusted]\n') if tusers: - f.write('users = %s\n' % ', '.join(tusers)) + f.write(b'users = %s\n' % b', '.join(tusers)) if tgroups: - f.write('groups = %s\n' % ', '.join(tgroups)) + f.write(b'groups = %s\n' % b', '.join(tgroups)) f.close() # override the functions that give names to uids and gids @@ -47,7 +62,7 @@ def groupname(gid=None): if gid is None: - return 'bar' + return b'bar' return group util.groupname = groupname @@ -58,13 +73,14 @@ # try to read everything #print '# File belongs to user %s, group %s' % (user, group) #print '# trusted users = %s; trusted groups = %s' % (tusers, tgroups) - kind = ('different', 'same') - who = ('', 'user', 'group', 'user and the group') + kind = (b'different', b'same') + who = (b'', b'user', b'group', b'user and the group') trusted = who[(user in tusers) + 2*(group in tgroups)] if trusted: - trusted = ', but we trust the ' + trusted - print('# %s user, %s group%s' % (kind[user == cuser], kind[group == cgroup], - trusted)) + trusted = b', but we trust the ' + trusted + bprint(b'# %s user, %s group%s' % (kind[user == cuser], + kind[group == cgroup], + trusted)) u = uimod.ui.load() # disable the configuration registration warning @@ -72,33 +88,33 @@ # the purpose of this test is to check the old behavior, not to validate the # behavior from registered item. so we silent warning related to unregisted # config. - u.setconfig('devel', 'warn-config-unknown', False, 'test') - u.setconfig('devel', 'all-warnings', False, 'test') - u.setconfig('ui', 'debug', str(bool(debug))) - u.setconfig('ui', 'report_untrusted', str(bool(report))) - u.readconfig('.hg/hgrc') + u.setconfig(b'devel', b'warn-config-unknown', False, b'test') + u.setconfig(b'devel', b'all-warnings', False, b'test') + u.setconfig(b'ui', b'debug', pycompat.bytestr(bool(debug))) + u.setconfig(b'ui', b'report_untrusted', pycompat.bytestr(bool(report))) + u.readconfig(b'.hg/hgrc') if silent: return u - print('trusted') - for name, path in u.configitems('paths'): - print(' ', name, '=', util.pconvert(path)) - print('untrusted') - for name, path in u.configitems('paths', untrusted=True): - print('.', end=' ') - u.config('paths', name) # warning with debug=True - print('.', end=' ') - u.config('paths', name, untrusted=True) # no warnings - print(name, '=', util.pconvert(path)) + bprint(b'trusted') + for name, path in u.configitems(b'paths'): + bprint(b' ', name, b'=', util.pconvert(path)) + bprint(b'untrusted') + for name, path in u.configitems(b'paths', untrusted=True): + bprint(b'.', end=b' ') + u.config(b'paths', name) # warning with debug=True + bprint(b'.', end=b' ') + u.config(b'paths', name, untrusted=True) # no warnings + bprint(name, b'=', util.pconvert(path)) print() return u -os.mkdir('repo') -os.chdir('repo') -os.mkdir('.hg') -f = open('.hg/hgrc', 'w') -f.write('[paths]\n') -f.write('local = /another/path\n\n') +os.mkdir(b'repo') +os.chdir(b'repo') +os.mkdir(b'.hg') +f = open(b'.hg/hgrc', 'wb') +f.write(b'[paths]\n') +f.write(b'local = /another/path\n\n') f.close() #print '# Everything is run by user foo, group bar\n' @@ -106,120 +122,130 @@ # same user, same group testui() # same user, different group -testui(group='def') +testui(group=b'def') # different user, same group -testui(user='abc') +testui(user=b'abc') # ... but we trust the group -testui(user='abc', tgroups=['bar']) +testui(user=b'abc', tgroups=[b'bar']) # different user, different group -testui(user='abc', group='def') +testui(user=b'abc', group=b'def') # ... but we trust the user -testui(user='abc', group='def', tusers=['abc']) +testui(user=b'abc', group=b'def', tusers=[b'abc']) # ... but we trust the group -testui(user='abc', group='def', tgroups=['def']) +testui(user=b'abc', group=b'def', tgroups=[b'def']) # ... but we trust the user and the group -testui(user='abc', group='def', tusers=['abc'], tgroups=['def']) +testui(user=b'abc', group=b'def', tusers=[b'abc'], tgroups=[b'def']) # ... but we trust all users -print('# we trust all users') -testui(user='abc', group='def', tusers=['*']) +bprint(b'# we trust all users') +testui(user=b'abc', group=b'def', tusers=[b'*']) # ... but we trust all groups -print('# we trust all groups') -testui(user='abc', group='def', tgroups=['*']) +bprint(b'# we trust all groups') +testui(user=b'abc', group=b'def', tgroups=[b'*']) # ... but we trust the whole universe -print('# we trust all users and groups') -testui(user='abc', group='def', tusers=['*'], tgroups=['*']) +bprint(b'# we trust all users and groups') +testui(user=b'abc', group=b'def', tusers=[b'*'], tgroups=[b'*']) # ... check that users and groups are in different namespaces -print("# we don't get confused by users and groups with the same name") -testui(user='abc', group='def', tusers=['def'], tgroups=['abc']) +bprint(b"# we don't get confused by users and groups with the same name") +testui(user=b'abc', group=b'def', tusers=[b'def'], tgroups=[b'abc']) # ... lists of user names work -print("# list of user names") -testui(user='abc', group='def', tusers=['foo', 'xyz', 'abc', 'bleh'], - tgroups=['bar', 'baz', 'qux']) +bprint(b"# list of user names") +testui(user=b'abc', group=b'def', tusers=[b'foo', b'xyz', b'abc', b'bleh'], + tgroups=[b'bar', b'baz', b'qux']) # ... lists of group names work -print("# list of group names") -testui(user='abc', group='def', tusers=['foo', 'xyz', 'bleh'], - tgroups=['bar', 'def', 'baz', 'qux']) +bprint(b"# list of group names") +testui(user=b'abc', group=b'def', tusers=[b'foo', b'xyz', b'bleh'], + tgroups=[b'bar', b'def', b'baz', b'qux']) -print("# Can't figure out the name of the user running this process") -testui(user='abc', group='def', cuser=None) +bprint(b"# Can't figure out the name of the user running this process") +testui(user=b'abc', group=b'def', cuser=None) -print("# prints debug warnings") -u = testui(user='abc', group='def', cuser='foo', debug=True) +bprint(b"# prints debug warnings") +u = testui(user=b'abc', group=b'def', cuser=b'foo', debug=True) -print("# report_untrusted enabled without debug hides warnings") -u = testui(user='abc', group='def', cuser='foo', report=False) +bprint(b"# report_untrusted enabled without debug hides warnings") +u = testui(user=b'abc', group=b'def', cuser=b'foo', report=False) -print("# report_untrusted enabled with debug shows warnings") -u = testui(user='abc', group='def', cuser='foo', debug=True, report=False) +bprint(b"# report_untrusted enabled with debug shows warnings") +u = testui(user=b'abc', group=b'def', cuser=b'foo', debug=True, report=False) -print("# ui.readconfig sections") -filename = 'foobar' -f = open(filename, 'w') -f.write('[foobar]\n') -f.write('baz = quux\n') +bprint(b"# ui.readconfig sections") +filename = b'foobar' +f = open(filename, 'wb') +f.write(b'[foobar]\n') +f.write(b'baz = quux\n') f.close() -u.readconfig(filename, sections=['foobar']) -print(u.config('foobar', 'baz')) +u.readconfig(filename, sections=[b'foobar']) +bprint(u.config(b'foobar', b'baz')) print() -print("# read trusted, untrusted, new ui, trusted") +bprint(b"# read trusted, untrusted, new ui, trusted") u = uimod.ui.load() # disable the configuration registration warning # # the purpose of this test is to check the old behavior, not to validate the # behavior from registered item. so we silent warning related to unregisted # config. -u.setconfig('devel', 'warn-config-unknown', False, 'test') -u.setconfig('devel', 'all-warnings', False, 'test') -u.setconfig('ui', 'debug', 'on') +u.setconfig(b'devel', b'warn-config-unknown', False, b'test') +u.setconfig(b'devel', b'all-warnings', False, b'test') +u.setconfig(b'ui', b'debug', b'on') u.readconfig(filename) u2 = u.copy() def username(uid=None): - return 'foo' + return b'foo' util.username = username -u2.readconfig('.hg/hgrc') -print('trusted:') -print(u2.config('foobar', 'baz')) -print('untrusted:') -print(u2.config('foobar', 'baz', untrusted=True)) +u2.readconfig(b'.hg/hgrc') +bprint(b'trusted:') +bprint(u2.config(b'foobar', b'baz')) +bprint(b'untrusted:') +bprint(u2.config(b'foobar', b'baz', untrusted=True)) print() -print("# error handling") +bprint(b"# error handling") def assertraises(f, exc=error.Abort): try: f() except exc as inst: - print('raised', inst.__class__.__name__) + bprint(b'raised', inst.__class__.__name__) else: - print('no exception?!') + bprint(b'no exception?!') -print("# file doesn't exist") -os.unlink('.hg/hgrc') -assert not os.path.exists('.hg/hgrc') +bprint(b"# file doesn't exist") +os.unlink(b'.hg/hgrc') +assert not os.path.exists(b'.hg/hgrc') testui(debug=True, silent=True) -testui(user='abc', group='def', debug=True, silent=True) +testui(user=b'abc', group=b'def', debug=True, silent=True) print() -print("# parse error") -f = open('.hg/hgrc', 'w') -f.write('foo') +bprint(b"# parse error") +f = open(b'.hg/hgrc', 'wb') +f.write(b'foo') f.close() +# This is a hack to remove b'' prefixes from ParseError.__bytes__ on +# Python 3. +def normalizeparseerror(e): + if pycompat.ispy3: + args = [a.decode('utf-8') for a in e.args] + else: + args = e.args + + return error.ParseError(*args) + try: - testui(user='abc', group='def', silent=True) + testui(user=b'abc', group=b'def', silent=True) except error.ParseError as inst: - print(inst) + bprint(normalizeparseerror(inst)) try: testui(debug=True, silent=True) except error.ParseError as inst: - print(inst) + bprint(normalizeparseerror(inst)) print() -print('# access typed information') -with open('.hg/hgrc', 'w') as f: - f.write('''\ +bprint(b'# access typed information') +with open(b'.hg/hgrc', 'wb') as f: + f.write(b'''\ [foo] sub=main sub:one=one @@ -230,32 +256,33 @@ bytes=81mb list=spam,ham,eggs ''') -u = testui(user='abc', group='def', cuser='foo', silent=True) +u = testui(user=b'abc', group=b'def', cuser=b'foo', silent=True) def configpath(section, name, default=None, untrusted=False): path = u.configpath(section, name, default, untrusted) if path is None: return None return util.pconvert(path) -print('# suboptions, trusted and untrusted') -trusted = u.configsuboptions('foo', 'sub') -untrusted = u.configsuboptions('foo', 'sub', untrusted=True) -print( +bprint(b'# suboptions, trusted and untrusted') +trusted = u.configsuboptions(b'foo', b'sub') +untrusted = u.configsuboptions(b'foo', b'sub', untrusted=True) +bprint( (trusted[0], sorted(trusted[1].items())), (untrusted[0], sorted(untrusted[1].items()))) -print('# path, trusted and untrusted') -print(configpath('foo', 'path'), configpath('foo', 'path', untrusted=True)) -print('# bool, trusted and untrusted') -print(u.configbool('foo', 'bool'), u.configbool('foo', 'bool', untrusted=True)) -print('# int, trusted and untrusted') -print( - u.configint('foo', 'int', 0), - u.configint('foo', 'int', 0, untrusted=True)) -print('# bytes, trusted and untrusted') -print( - u.configbytes('foo', 'bytes', 0), - u.configbytes('foo', 'bytes', 0, untrusted=True)) -print('# list, trusted and untrusted') -print( - u.configlist('foo', 'list', []), - u.configlist('foo', 'list', [], untrusted=True)) +bprint(b'# path, trusted and untrusted') +bprint(configpath(b'foo', b'path'), configpath(b'foo', b'path', untrusted=True)) +bprint(b'# bool, trusted and untrusted') +bprint(u.configbool(b'foo', b'bool'), + u.configbool(b'foo', b'bool', untrusted=True)) +bprint(b'# int, trusted and untrusted') +bprint( + u.configint(b'foo', b'int', 0), + u.configint(b'foo', b'int', 0, untrusted=True)) +bprint(b'# bytes, trusted and untrusted') +bprint( + u.configbytes(b'foo', b'bytes', 0), + u.configbytes(b'foo', b'bytes', 0, untrusted=True)) +bprint(b'# list, trusted and untrusted') +bprint( + u.configlist(b'foo', b'list', []), + u.configlist(b'foo', b'list', [], untrusted=True))
--- a/tests/test-trusted.py.out Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-trusted.py.out Tue Feb 19 21:55:05 2019 -0800 @@ -174,9 +174,9 @@ # parse error # different user, different group not trusting file .hg/hgrc from untrusted user abc, group def -('foo', '.hg/hgrc:1') +ParseError('foo', '.hg/hgrc:1') # same user, same group -('foo', '.hg/hgrc:1') +ParseError('foo', '.hg/hgrc:1') # access typed information # different user, different group
--- a/tests/test-unamend.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-unamend.t Tue Feb 19 21:55:05 2019 -0800 @@ -232,6 +232,7 @@ $ hg revert --all forgetting bar + $ rm bar Unamending in middle of a stack @@ -302,7 +303,6 @@ Testing whether unamend retains copies or not $ hg status - ? bar $ hg mv a foo @@ -370,3 +370,42 @@ diff --git a/c b/wat rename from c rename to wat + $ hg revert -qa + $ rm foobar wat + +Rename a->b, then amend b->c. After unamend, should look like b->c. + + $ hg co -q 0 + $ hg mv a b + $ hg ci -qm 'move to a b' + $ hg mv b c + $ hg amend + $ hg unamend + $ hg st --copies --change . + A b + a + R a + $ hg st --copies + A c + b + R b + $ hg revert -qa + $ rm c + +Rename a->b, then amend b->c, and working copy change c->d. After unamend, should look like b->d + + $ hg co -q 0 + $ hg mv a b + $ hg ci -qm 'move to a b' + $ hg mv b c + $ hg amend + $ hg mv c d + $ hg unamend + $ hg st --copies --change . + A b + a + R a + $ hg st --copies + A d + b + R b
--- a/tests/test-uncommit.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-uncommit.t Tue Feb 19 21:55:05 2019 -0800 @@ -398,3 +398,43 @@ |/ o 0:ea4e33293d4d274a2ba73150733c2612231f398c a 1 + +Rename a->b, then remove b in working copy. Result should remove a. + + $ hg co -q 0 + $ hg mv a b + $ hg ci -qm 'move a to b' + $ hg rm b + $ hg uncommit --config experimental.uncommitondirtywdir=True + $ hg st --copies + R a + $ hg revert a + +Rename a->b, then rename b->c in working copy. Result should rename a->c. + + $ hg co -q 0 + $ hg mv a b + $ hg ci -qm 'move a to b' + $ hg mv b c + $ hg uncommit --config experimental.uncommitondirtywdir=True + $ hg st --copies + A c + a + R a + $ hg revert a + $ hg forget c + $ rm c + +Copy a->b1 and a->b2, then rename b1->c in working copy. Result should copy a->b2 and a->c. + + $ hg co -q 0 + $ hg cp a b1 + $ hg cp a b2 + $ hg ci -qm 'move a to b1 and b2' + $ hg mv b1 c + $ hg uncommit --config experimental.uncommitondirtywdir=True + $ hg st --copies + A b2 + a + A c + a
--- a/tests/test-update-atomic.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-update-atomic.t Tue Feb 19 21:55:05 2019 -0800 @@ -4,13 +4,14 @@ $ cat > $TESTTMP/show_mode.py <<EOF > from __future__ import print_function + > import os + > import stat > import sys - > import os - > from stat import ST_MODE + > ST_MODE = stat.ST_MODE > > for file_path in sys.argv[1:]: > file_stat = os.stat(file_path) - > octal_mode = oct(file_stat[ST_MODE] & 0o777) + > octal_mode = oct(file_stat[ST_MODE] & 0o777).replace('o', '') > print("%s:%s" % (file_path, octal_mode)) > > EOF @@ -19,11 +20,15 @@ $ cd repo $ cat > .hg/showwrites.py <<EOF + > from __future__ import print_function + > from mercurial import pycompat + > from mercurial.utils import stringutil > def uisetup(ui): > from mercurial import vfs > class newvfs(vfs.vfs): > def __call__(self, *args, **kwargs): - > print('vfs open', args, sorted(list(kwargs.items()))) + > print(pycompat.sysstr(stringutil.pprint( + > ('vfs open', args, sorted(list(kwargs.items())))))) > return super(newvfs, self).__call__(*args, **kwargs) > vfs.vfs = newvfs > EOF
--- a/tests/test-wireproto-command-capabilities.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-wireproto-command-capabilities.t Tue Feb 19 21:55:05 2019 -0800 @@ -22,6 +22,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -45,6 +46,7 @@ > x-hgproto-1: cbor > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -82,6 +84,7 @@ > x-hgupgrade-1: foo bar > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -106,6 +109,7 @@ > x-hgproto-1: some value > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -131,6 +135,7 @@ > x-hgproto-1: cbor > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -170,6 +175,7 @@ > x-hgproto-1: cbor > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -202,6 +208,7 @@ > x-hgproto-1: cbor > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -460,6 +467,7 @@ > command capabilities > EOF creating http peer for wire protocol version 2 + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> vary: X-HgProto-1,X-HgUpgrade-1\r\n @@ -478,6 +486,7 @@ s> \r\n s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa4Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogNv1capabilitiesY\x01\xe0batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash sending capabilities command + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -498,23 +507,19 @@ s> \t\x00\x00\x01\x00\x02\x01\x92 s> Hidentity s> \r\n - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) s> 13\r\n s> \x0b\x00\x00\x01\x00\x02\x041 s> \xa1FstatusBok s> \r\n - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 65e\r\n s> V\x06\x00\x01\x00\x02\x041 s> \xa4Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1Lsparserevlog s> \r\n - received frame(size=1622; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 8\r\n s> \x00\x00\x00\x01\x00\x02\x002 s> \r\n s> 0\r\n s> \r\n - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) response: gen[ { b'commands': {
--- a/tests/test-wireproto-content-redirects.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-wireproto-content-redirects.t Tue Feb 19 21:55:05 2019 -0800 @@ -51,6 +51,7 @@ > command capabilities > EOF creating http peer for wire protocol version 2 + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> vary: X-HgProto-1,X-HgUpgrade-1\r\n @@ -71,6 +72,7 @@ (remote redirect target target-a is compatible) (tls1.2 !) (remote redirect target target-a requires unsupported TLS versions: 1.2, 1.3) (no-tls1.2 !) sending capabilities command + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -93,23 +95,19 @@ s> \t\x00\x00\x01\x00\x02\x01\x92 s> Hidentity s> \r\n - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) s> 13\r\n s> \x0b\x00\x00\x01\x00\x02\x041 s> \xa1FstatusBok s> \r\n - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 6de\r\n s> \xd6\x06\x00\x01\x00\x02\x041 s> \xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa5DnameHtarget-aHprotocolDhttpKsnirequired\xf4Ktlsversions\x82C1.2C1.3Duris\x81Shttp://example.com/ s> \r\n - received frame(size=1750; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 8\r\n s> \x00\x00\x00\x01\x00\x02\x002 s> \r\n s> 0\r\n s> \r\n - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) response: gen[ { b'commands': { @@ -383,6 +381,7 @@ > command capabilities > EOF creating http peer for wire protocol version 2 + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> vary: X-HgProto-1,X-HgUpgrade-1\r\n @@ -403,6 +402,7 @@ (remote redirect target target-a is compatible) (remote redirect target target-b uses unsupported protocol: unknown) sending capabilities command + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -423,23 +423,19 @@ s> \t\x00\x00\x01\x00\x02\x01\x92 s> Hidentity s> \r\n - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) s> 13\r\n s> \x0b\x00\x00\x01\x00\x02\x041 s> \xa1FstatusBok s> \r\n - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 6f9\r\n s> \xf1\x06\x00\x01\x00\x02\x041 s> \xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x82\xa3DnameHtarget-aHprotocolDhttpDuris\x81Shttp://example.com/\xa3DnameHtarget-bHprotocolGunknownDuris\x81Vunknown://example.com/ s> \r\n - received frame(size=1777; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 8\r\n s> \x00\x00\x00\x01\x00\x02\x002 s> \r\n s> 0\r\n s> \r\n - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) response: gen[ { b'commands': { @@ -720,6 +716,7 @@ > command capabilities > EOF creating http peer for wire protocol version 2 + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> vary: X-HgProto-1,X-HgUpgrade-1\r\n @@ -739,6 +736,7 @@ s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKsnirequired\xf5Duris\x81Thttps://example.com/Nv1capabilitiesY\x01\xe0batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (redirect target target-bad-tls requires SNI, which is unsupported) sending capabilities command + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -759,23 +757,19 @@ s> \t\x00\x00\x01\x00\x02\x01\x92 s> Hidentity s> \r\n - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) s> 13\r\n s> \x0b\x00\x00\x01\x00\x02\x041 s> \xa1FstatusBok s> \r\n - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 6d1\r\n s> \xc9\x06\x00\x01\x00\x02\x041 s> \xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKsnirequired\xf5Duris\x81Thttps://example.com/ s> \r\n - received frame(size=1737; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 8\r\n s> \x00\x00\x00\x01\x00\x02\x002 s> \r\n s> 0\r\n s> \r\n - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) response: gen[ { b'commands': { @@ -1046,6 +1040,7 @@ > command capabilities > EOF creating http peer for wire protocol version 2 + s> setsockopt(6, 1, 1) -> None (?) s> GET /?cmd=capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> vary: X-HgProto-1,X-HgUpgrade-1\r\n @@ -1065,6 +1060,7 @@ s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKtlsversions\x82B42B39Duris\x81Thttps://example.com/Nv1capabilitiesY\x01\xe0batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (remote redirect target target-bad-tls requires unsupported TLS versions: 39, 42) sending capabilities command + s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/capabilities HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> accept: application/mercurial-exp-framing-0006\r\n @@ -1085,23 +1081,19 @@ s> \t\x00\x00\x01\x00\x02\x01\x92 s> Hidentity s> \r\n - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) s> 13\r\n s> \x0b\x00\x00\x01\x00\x02\x041 s> \xa1FstatusBok s> \r\n - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 6d7\r\n s> \xcf\x06\x00\x01\x00\x02\x041 s> \xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKtlsversions\x82B42B39Duris\x81Thttps://example.com/ s> \r\n - received frame(size=1743; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) s> 8\r\n s> \x00\x00\x00\x01\x00\x02\x002 s> \r\n s> 0\r\n s> \r\n - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) response: gen[ { b'commands': { @@ -1372,6 +1364,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /api/simplecache/missingkey HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n @@ -1416,6 +1409,7 @@ > user-agent: test > EOF using raw connection to peer + s> setsockopt(6, 1, 1) -> None (?) s> GET /api/simplecache/47abb8efa5f01b8964d74917793ad2464db0fa2c HTTP/1.1\r\n s> Accept-Encoding: identity\r\n s> user-agent: test\r\n
--- a/tests/test-wireproto-exchangev2.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-wireproto-exchangev2.t Tue Feb 19 21:55:05 2019 -0800 @@ -36,7 +36,10 @@ Test basic clone - $ hg --debug clone -U http://localhost:$HGPORT client-simple +Output is flaky, save it in a file and check part independently + $ hg --debug clone -U http://localhost:$HGPORT client-simple > clone-output + + $ cat clone-output | grep -v "received frame" using http://localhost:$HGPORT/ sending capabilities command query 1; heads @@ -45,13 +48,6 @@ sending command known: { 'nodes': [] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=43; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=1; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) sending 1 commands sending command changesetdata: { 'fields': set([ @@ -71,10 +67,6 @@ } ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=941; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) add changeset 3390ef850073 add changeset 4432d83626e8 add changeset cd2534766bec @@ -97,10 +89,6 @@ ], 'tree': '' } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=992; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) sending 1 commands sending command filesdata: { 'fields': set([ @@ -121,13 +109,32 @@ } ] } + updating the branch cache + new changesets 3390ef850073:caa2a465451d (3 drafts) + (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ cat clone-output | grep "received frame" + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=43; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=1; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=941; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=992; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=901; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - updating the branch cache - new changesets 3390ef850073:caa2a465451d (3 drafts) - (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ rm clone-output All changesets should have been transferred @@ -163,30 +170,22 @@ Cloning only a specific revision works - $ hg --debug clone -U -r 4432d83626e8 http://localhost:$HGPORT client-singlehead +Output is flaky, save it in a file and check part independently + $ hg --debug clone -U -r 4432d83626e8 http://localhost:$HGPORT client-singlehead > clone-output + + $ cat clone-output | grep -v "received frame" using http://localhost:$HGPORT/ sending capabilities command sending 1 commands sending command lookup: { 'key': '4432d83626e8' } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=21; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) query 1; heads sending 2 commands sending command heads: {} sending command known: { 'nodes': [] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=43; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=1; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) sending 1 commands sending command changesetdata: { 'fields': set([ @@ -205,10 +204,6 @@ } ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=381; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) add changeset 3390ef850073 add changeset 4432d83626e8 checking for updated bookmarks @@ -225,10 +220,6 @@ ], 'tree': '' } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=404; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) sending 1 commands sending command filesdata: { 'fields': set([ @@ -246,13 +237,36 @@ } ] } + updating the branch cache + new changesets 3390ef850073:4432d83626e8 + (sent 6 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ cat clone-output | grep "received frame" + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=21; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=43; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=1; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=381; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=404; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=439; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - updating the branch cache - new changesets 3390ef850073:4432d83626e8 - (sent 6 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ rm clone-output $ cd client-singlehead @@ -269,7 +283,10 @@ Incremental pull works - $ hg --debug pull +Output is flaky, save it in a file and check part independently + $ hg --debug pull > pull-output + + $ cat pull-output | grep -v "received frame" pulling from http://localhost:$HGPORT/ using http://localhost:$HGPORT/ sending capabilities command @@ -281,13 +298,6 @@ 'D2\xd86&\xe8\xa9\x86U\xf0b\xec\x1f*C\xb0\x7f\x7f\xbb\xb0' ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=43; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=2; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) searching for changes all local heads known remotely sending 1 commands @@ -311,10 +321,6 @@ } ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=573; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) add changeset cd2534766bec add changeset e96ae20f4188 add changeset caa2a465451d @@ -333,10 +339,6 @@ ], 'tree': '' } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=601; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) sending 1 commands sending command filesdata: { 'fields': set([ @@ -355,14 +357,33 @@ } ] } + updating the branch cache + new changesets cd2534766bec:caa2a465451d (3 drafts) + (run 'hg update' to get a working copy) + (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ cat pull-output | grep "received frame" + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=43; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=2; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=573; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=601; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=527; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - updating the branch cache - new changesets cd2534766bec:caa2a465451d (3 drafts) - (run 'hg update' to get a working copy) - (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ rm pull-output $ hg log -G -T '{rev} {node} {phase}\n' o 4 caa2a465451dd1facda0f5b12312c355584188a1 draft @@ -459,7 +480,10 @@ $ hg -R server-simple bookmark -r 3390ef850073fbc2f0dfff2244342c8e9229013a book-1 $ hg -R server-simple bookmark -r cd2534766bece138c7c1afdc6825302f0f62d81f book-2 - $ hg --debug clone -U http://localhost:$HGPORT/ client-bookmarks +Output is flaky, save it in a file and check part independently + $ hg --debug clone -U http://localhost:$HGPORT/ client-bookmarks > clone-output + + $ cat clone-output | grep -v "received frame" using http://localhost:$HGPORT/ sending capabilities command query 1; heads @@ -468,13 +492,6 @@ sending command known: { 'nodes': [] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=43; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=1; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) sending 1 commands sending command changesetdata: { 'fields': set([ @@ -494,10 +511,6 @@ } ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=979; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) add changeset 3390ef850073 add changeset 4432d83626e8 add changeset cd2534766bec @@ -522,10 +535,6 @@ ], 'tree': '' } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=992; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) sending 1 commands sending command filesdata: { 'fields': set([ @@ -546,13 +555,32 @@ } ] } + updating the branch cache + new changesets 3390ef850073:caa2a465451d (1 drafts) + (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ cat clone-output | grep "received frame" + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=43; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=1; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=979; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=992; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=901; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - updating the branch cache - new changesets 3390ef850073:caa2a465451d (1 drafts) - (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ rm clone-output $ hg -R client-bookmarks bookmarks book-1 0:3390ef850073 @@ -563,7 +591,10 @@ $ hg -R server-simple bookmark -r cd2534766bece138c7c1afdc6825302f0f62d81f book-1 moving bookmark 'book-1' forward from 3390ef850073 - $ hg -R client-bookmarks --debug pull +Output is flaky, save it in a file and check part independently + $ hg -R client-bookmarks --debug pull > pull-output + + $ cat pull-output | grep -v "received frame" pulling from http://localhost:$HGPORT/ using http://localhost:$HGPORT/ sending capabilities command @@ -576,13 +607,6 @@ '\xca\xa2\xa4eE\x1d\xd1\xfa\xcd\xa0\xf5\xb1#\x12\xc3UXA\x88\xa1' ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=43; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=3; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) searching for changes all remote heads known locally sending 1 commands @@ -607,14 +631,25 @@ } ] } + checking for updated bookmarks + updating bookmark book-1 + (run 'hg update' to get a working copy) + (sent 3 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ cat pull-output | grep "received frame" + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=43; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=3; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=65; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - checking for updated bookmarks - updating bookmark book-1 - (run 'hg update' to get a working copy) - (sent 3 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ rm pull-output $ hg -R client-bookmarks bookmarks book-1 2:cd2534766bec @@ -647,7 +682,10 @@ Narrow clone only fetches some files - $ hg --config extensions.pullext=$TESTDIR/pullext.py --debug clone -U --include dir0/ http://localhost:$HGPORT/ client-narrow-0 +Output is flaky, save it in a file and check part independently + $ hg --config extensions.pullext=$TESTDIR/pullext.py --debug clone -U --include dir0/ http://localhost:$HGPORT/ client-narrow-0 > clone-output + + $ cat clone-output | grep -v "received frame" using http://localhost:$HGPORT/ sending capabilities command query 1; heads @@ -656,13 +694,6 @@ sending command known: { 'nodes': [] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=22; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=1; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) sending 1 commands sending command changesetdata: { 'fields': set([ @@ -681,10 +712,6 @@ } ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=783; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) add changeset 3390ef850073 add changeset b709380892b1 add changeset 47fe012ab237 @@ -705,10 +732,6 @@ ], 'tree': '' } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=967; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) sending 1 commands sending command filesdata: { 'fields': set([ @@ -733,13 +756,32 @@ } ] } + updating the branch cache + new changesets 3390ef850073:97765fc3cd62 + (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ cat clone-output | grep "received frame" + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=22; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=1; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=783; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=967; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=449; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - updating the branch cache - new changesets 3390ef850073:97765fc3cd62 - (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ rm clone-output #if reporevlogstore $ find client-narrow-0/.hg/store -type f -name '*.i' | sort @@ -751,7 +793,10 @@ --exclude by itself works - $ hg --config extensions.pullext=$TESTDIR/pullext.py --debug clone -U --exclude dir0/ http://localhost:$HGPORT/ client-narrow-1 +Output is flaky, save it in a file and check part independently + $ hg --config extensions.pullext=$TESTDIR/pullext.py --debug clone -U --exclude dir0/ http://localhost:$HGPORT/ client-narrow-1 > clone-output + + $ cat clone-output | grep -v "received frame" using http://localhost:$HGPORT/ sending capabilities command query 1; heads @@ -760,13 +805,6 @@ sending command known: { 'nodes': [] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=22; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=1; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) sending 1 commands sending command changesetdata: { 'fields': set([ @@ -785,10 +823,6 @@ } ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=783; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) add changeset 3390ef850073 add changeset b709380892b1 add changeset 47fe012ab237 @@ -809,10 +843,6 @@ ], 'tree': '' } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=967; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) sending 1 commands sending command filesdata: { 'fields': set([ @@ -840,13 +870,32 @@ } ] } + updating the branch cache + new changesets 3390ef850073:97765fc3cd62 + (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ cat clone-output | grep "received frame" + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=22; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=1; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=783; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=967; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=709; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - updating the branch cache - new changesets 3390ef850073:97765fc3cd62 - (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ rm clone-output #if reporevlogstore $ find client-narrow-1/.hg/store -type f -name '*.i' | sort @@ -860,7 +909,10 @@ Mixing --include and --exclude works - $ hg --config extensions.pullext=$TESTDIR/pullext.py --debug clone -U --include dir0/ --exclude dir0/c http://localhost:$HGPORT/ client-narrow-2 +Output is flaky, save it in a file and check part independently + $ hg --config extensions.pullext=$TESTDIR/pullext.py --debug clone -U --include dir0/ --exclude dir0/c http://localhost:$HGPORT/ client-narrow-2 > clone-output + + $ cat clone-output | grep -v "received frame" using http://localhost:$HGPORT/ sending capabilities command query 1; heads @@ -869,13 +921,6 @@ sending command known: { 'nodes': [] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=22; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=1; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) sending 1 commands sending command changesetdata: { 'fields': set([ @@ -894,10 +939,6 @@ } ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=783; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) add changeset 3390ef850073 add changeset b709380892b1 add changeset 47fe012ab237 @@ -918,10 +959,6 @@ ], 'tree': '' } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=967; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) sending 1 commands sending command filesdata: { 'fields': set([ @@ -949,13 +986,32 @@ } ] } + updating the branch cache + new changesets 3390ef850073:97765fc3cd62 + (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ cat clone-output | grep "received frame" + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=22; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=1; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=783; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=967; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=160; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - updating the branch cache - new changesets 3390ef850073:97765fc3cd62 - (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ rm clone-output #if reporevlogstore $ find client-narrow-2/.hg/store -type f -name '*.i' | sort @@ -967,7 +1023,10 @@ --stream will use rawfiledata to transfer changelog and manifestlog, then fall through to get files data - $ hg --debug clone --stream -U http://localhost:$HGPORT client-stream-0 +Output is flaky, save it in a file and check part independently + $ hg --debug clone --stream -U http://localhost:$HGPORT client-stream-0 > clone-output + + $ cat clone-output | grep -v "received frame" using http://localhost:$HGPORT/ sending capabilities command sending 1 commands @@ -977,10 +1036,6 @@ 'manifestlog' ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=1275; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) updating the branch cache query 1; heads sending 2 commands @@ -990,13 +1045,6 @@ '\x97v_\xc3\xcdbO\xd1\xfa\x01v\x93,!\xff\xd1j\xdfC.' ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=22; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=2; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) searching for changes all remote heads known locally sending 1 commands @@ -1019,10 +1067,6 @@ } ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=13; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) checking for updated bookmarks sending 1 commands sending command filesdata: { @@ -1043,15 +1087,37 @@ } ] } + (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ cat clone-output | grep "received frame" + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=1275; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=22; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=2; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=13; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=1133; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ rm clone-output --stream + --include/--exclude will only obtain some files - $ hg --debug --config extensions.pullext=$TESTDIR/pullext.py clone --stream --include dir0/ -U http://localhost:$HGPORT client-stream-2 +Output is flaky, save it in a file and check part independently + $ hg --debug --config extensions.pullext=$TESTDIR/pullext.py clone --stream --include dir0/ -U http://localhost:$HGPORT client-stream-2 > clone-output + + $ cat clone-output | grep -v "received frame" using http://localhost:$HGPORT/ sending capabilities command sending 1 commands @@ -1061,10 +1127,6 @@ 'manifestlog' ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=1275; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) updating the branch cache query 1; heads sending 2 commands @@ -1074,13 +1136,6 @@ '\x97v_\xc3\xcdbO\xd1\xfa\x01v\x93,!\xff\xd1j\xdfC.' ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=22; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=2; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) searching for changes all remote heads known locally sending 1 commands @@ -1103,10 +1158,6 @@ } ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=13; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) checking for updated bookmarks sending 1 commands sending command filesdata: { @@ -1132,11 +1183,30 @@ } ] } + (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ cat clone-output | grep "received frame" + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=1275; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=22; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=2; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=13; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=449; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ rm clone-output #if reporevlogstore $ find client-stream-2/.hg/store -type f -name '*.i' | sort @@ -1148,7 +1218,14 @@ Shallow clone doesn't work with revlogs - $ hg --debug --config extensions.pullext=$TESTDIR/pullext.py clone --depth 1 -U http://localhost:$HGPORT client-shallow-revlogs +Output is flaky, save it in a file and check part independently + $ hg --debug --config extensions.pullext=$TESTDIR/pullext.py clone --depth 1 -U http://localhost:$HGPORT client-shallow-revlogs > clone-output + transaction abort! + rollback completed + abort: revlog storage does not support missing parents write mode + [255] + + $ cat clone-output | grep -v "received frame" using http://localhost:$HGPORT/ sending capabilities command query 1; heads @@ -1157,13 +1234,6 @@ sending command known: { 'nodes': [] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=22; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=1; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) sending 1 commands sending command changesetdata: { 'fields': set([ @@ -1182,10 +1252,6 @@ } ] } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=783; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) add changeset 3390ef850073 add changeset b709380892b1 add changeset 47fe012ab237 @@ -1206,10 +1272,6 @@ ], 'tree': '' } - received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) - received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=967; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) sending 1 commands sending command filesdata: { 'fields': set([ @@ -1227,15 +1289,30 @@ } ] } + (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) + + $ cat clone-output | grep "received frame" + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=22; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=11; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=1; request=3; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=3; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=783; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) + received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) + received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=967; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=1005; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) - transaction abort! - rollback completed - (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) - abort: revlog storage does not support missing parents write mode - [255] + + $ rm clone-output $ killdaemons.py
--- a/tests/test-worker.t Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/test-worker.t Tue Feb 19 21:55:05 2019 -0800 @@ -83,8 +83,10 @@ [255] $ hg --config "extensions.t=$abspath" --config 'worker.numcpus=8' \ - > test 100000.0 abort --traceback 2>&1 | egrep '^(SystemExit|Abort)' - Abort: known exception + > test 100000.0 abort --traceback 2>&1 | egrep '(SystemExit|Abort)' + raise error.Abort(b'known exception') + mercurial.error.Abort: b'known exception' (py3 !) + Abort: known exception (no-py3 !) SystemExit: 255 Traceback must be printed for unknown exceptions
--- a/tests/tinyproxy.py Thu Feb 07 20:50:41 2019 +0900 +++ b/tests/tinyproxy.py Tue Feb 19 21:55:05 2019 -0800 @@ -20,7 +20,10 @@ import socket import sys -from mercurial import util +from mercurial import ( + pycompat, + util, +) httpserver = util.httpserver socketserver = util.socketserver @@ -77,10 +80,11 @@ try: if self._connect_to(self.path, soc): self.log_request(200) - self.wfile.write(self.protocol_version + - " 200 Connection established\r\n") - self.wfile.write("Proxy-agent: %s\r\n" % self.version_string()) - self.wfile.write("\r\n") + self.wfile.write(pycompat.bytestr(self.protocol_version) + + b" 200 Connection established\r\n") + self.wfile.write(b"Proxy-agent: %s\r\n" % + pycompat.bytestr(self.version_string())) + self.wfile.write(b"\r\n") self._read_write(soc, 300) finally: print("\t" "bye") @@ -97,15 +101,17 @@ try: if self._connect_to(netloc, soc): self.log_request() - soc.send("%s %s %s\r\n" % ( - self.command, - urlreq.urlunparse(('', '', path, params, query, '')), - self.request_version)) + url = urlreq.urlunparse(('', '', path, params, query, '')) + soc.send(b"%s %s %s\r\n" % ( + pycompat.bytestr(self.command), + pycompat.bytestr(url), + pycompat.bytestr(self.request_version))) self.headers['Connection'] = 'close' del self.headers['Proxy-Connection'] - for key_val in self.headers.items(): - soc.send("%s: %s\r\n" % key_val) - soc.send("\r\n") + for key, val in self.headers.items(): + soc.send(b"%s: %s\r\n" % (pycompat.bytestr(key), + pycompat.bytestr(val))) + soc.send(b"\r\n") self._read_write(soc) finally: print("\t" "bye")