changeset 41529:13a6dd952ffe

merge with stable
author Pulkit Goyal <pulkit@yandex-team.ru>
date Mon, 04 Feb 2019 20:35:21 +0300
parents b7a0efb3c370 (diff) 8b2892d5a9f2 (current diff)
children 4dd07bf84608
files mercurial/commands.py mercurial/help/config.txt mercurial/localrepo.py mercurial/revlog.py mercurial/subrepo.py tests/test-help.t
diffstat 213 files changed, 3723 insertions(+), 1995 deletions(-) [+]
line wrap: on
line diff
--- a/contrib/bdiff-torture.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/contrib/bdiff-torture.py	Mon Feb 04 20:35:21 2019 +0300
@@ -25,7 +25,7 @@
 
         try:
             test1(a, b)
-        except Exception as inst:
+        except Exception:
             reductions += 1
             tries = 0
             a = a2
--- a/contrib/check-code.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/contrib/check-code.py	Mon Feb 04 20:35:21 2019 +0300
@@ -610,7 +610,7 @@
     try:
         with opentext(f) as fp:
             try:
-                pre = post = fp.read()
+                pre = fp.read()
             except UnicodeDecodeError as e:
                 print("%s while reading %s" % (e, f))
                 return result
--- a/contrib/chg/hgclient.c	Fri Feb 01 13:44:09 2019 -0500
+++ b/contrib/chg/hgclient.c	Mon Feb 04 20:35:21 2019 +0300
@@ -84,8 +84,9 @@
 
 static void enlargecontext(context_t *ctx, size_t newsize)
 {
-	if (newsize <= ctx->maxdatasize)
+	if (newsize <= ctx->maxdatasize) {
 		return;
+	}
 
 	newsize = defaultdatasize *
 	          ((newsize + defaultdatasize - 1) / defaultdatasize);
@@ -117,22 +118,25 @@
 
 	uint32_t datasize_n;
 	rsize = recv(hgc->sockfd, &datasize_n, sizeof(datasize_n), 0);
-	if (rsize != sizeof(datasize_n))
+	if (rsize != sizeof(datasize_n)) {
 		abortmsg("failed to read data size");
+	}
 
 	/* datasize denotes the maximum size to write if input request */
 	hgc->ctx.datasize = ntohl(datasize_n);
 	enlargecontext(&hgc->ctx, hgc->ctx.datasize);
 
-	if (isupper(hgc->ctx.ch) && hgc->ctx.ch != 'S')
+	if (isupper(hgc->ctx.ch) && hgc->ctx.ch != 'S') {
 		return; /* assumes input request */
+	}
 
 	size_t cursize = 0;
 	while (cursize < hgc->ctx.datasize) {
 		rsize = recv(hgc->sockfd, hgc->ctx.data + cursize,
 		             hgc->ctx.datasize - cursize, 0);
-		if (rsize < 1)
+		if (rsize < 1) {
 			abortmsg("failed to read data block");
+		}
 		cursize += rsize;
 	}
 }
@@ -143,8 +147,9 @@
 	const char *const endp = p + datasize;
 	while (p < endp) {
 		ssize_t r = send(sockfd, p, endp - p, 0);
-		if (r < 0)
+		if (r < 0) {
 			abortmsgerrno("cannot communicate");
+		}
 		p += r;
 	}
 }
@@ -186,8 +191,9 @@
 		ctx->datasize += n;
 	}
 
-	if (ctx->datasize > 0)
+	if (ctx->datasize > 0) {
 		--ctx->datasize; /* strip last '\0' */
+	}
 }
 
 /* Extract '\0'-separated list of args to new buffer, terminated by NULL */
@@ -205,8 +211,9 @@
 		args[nargs] = s;
 		nargs++;
 		s = memchr(s, '\0', e - s);
-		if (!s)
+		if (!s) {
 			break;
+		}
 		s++;
 	}
 	args[nargs] = NULL;
@@ -225,8 +232,9 @@
 static void handlereadlinerequest(hgclient_t *hgc)
 {
 	context_t *ctx = &hgc->ctx;
-	if (!fgets(ctx->data, ctx->datasize, stdin))
+	if (!fgets(ctx->data, ctx->datasize, stdin)) {
 		ctx->data[0] = '\0';
+	}
 	ctx->datasize = strlen(ctx->data);
 	writeblock(hgc);
 }
@@ -239,8 +247,9 @@
 	ctx->data[ctx->datasize] = '\0'; /* terminate last string */
 
 	const char **args = unpackcmdargsnul(ctx);
-	if (!args[0] || !args[1] || !args[2])
+	if (!args[0] || !args[1] || !args[2]) {
 		abortmsg("missing type or command or cwd in system request");
+	}
 	if (strcmp(args[0], "system") == 0) {
 		debugmsg("run '%s' at '%s'", args[1], args[2]);
 		int32_t r = runshellcmd(args[1], args + 3, args[2]);
@@ -252,8 +261,9 @@
 		writeblock(hgc);
 	} else if (strcmp(args[0], "pager") == 0) {
 		setuppager(args[1], args + 3);
-		if (hgc->capflags & CAP_ATTACHIO)
+		if (hgc->capflags & CAP_ATTACHIO) {
 			attachio(hgc);
+		}
 		/* unblock the server */
 		static const char emptycmd[] = "\n";
 		sendall(hgc->sockfd, emptycmd, sizeof(emptycmd) - 1);
@@ -296,9 +306,10 @@
 			handlesystemrequest(hgc);
 			break;
 		default:
-			if (isupper(ctx->ch))
+			if (isupper(ctx->ch)) {
 				abortmsg("cannot handle response (ch = %c)",
 				         ctx->ch);
+			}
 		}
 	}
 }
@@ -308,8 +319,9 @@
 	unsigned int flags = 0;
 	while (s < e) {
 		const char *t = strchr(s, ' ');
-		if (!t || t > e)
+		if (!t || t > e) {
 			t = e;
+		}
 		const cappair_t *cap;
 		for (cap = captable; cap->flag; ++cap) {
 			size_t n = t - s;
@@ -346,11 +358,13 @@
 	const char *const dataend = ctx->data + ctx->datasize;
 	while (s < dataend) {
 		const char *t = strchr(s, ':');
-		if (!t || t[1] != ' ')
+		if (!t || t[1] != ' ') {
 			break;
+		}
 		const char *u = strchr(t + 2, '\n');
-		if (!u)
+		if (!u) {
 			u = dataend;
+		}
 		if (strncmp(s, "capabilities:", t - s + 1) == 0) {
 			hgc->capflags = parsecapabilities(t + 2, u);
 		} else if (strncmp(s, "pgid:", t - s + 1) == 0) {
@@ -367,8 +381,9 @@
 {
 	int r = snprintf(hgc->ctx.data, hgc->ctx.maxdatasize, "chg[worker/%d]",
 	                 (int)getpid());
-	if (r < 0 || (size_t)r >= hgc->ctx.maxdatasize)
+	if (r < 0 || (size_t)r >= hgc->ctx.maxdatasize) {
 		abortmsg("insufficient buffer to write procname (r = %d)", r);
+	}
 	hgc->ctx.datasize = (size_t)r;
 	writeblockrequest(hgc, "setprocname");
 }
@@ -380,8 +395,9 @@
 	sendall(hgc->sockfd, chcmd, sizeof(chcmd) - 1);
 	readchannel(hgc);
 	context_t *ctx = &hgc->ctx;
-	if (ctx->ch != 'I')
+	if (ctx->ch != 'I') {
 		abortmsg("unexpected response for attachio (ch = %c)", ctx->ch);
+	}
 
 	static const int fds[3] = {STDIN_FILENO, STDOUT_FILENO, STDERR_FILENO};
 	struct msghdr msgh;
@@ -399,23 +415,27 @@
 	memcpy(CMSG_DATA(cmsg), fds, sizeof(fds));
 	msgh.msg_controllen = cmsg->cmsg_len;
 	ssize_t r = sendmsg(hgc->sockfd, &msgh, 0);
-	if (r < 0)
+	if (r < 0) {
 		abortmsgerrno("sendmsg failed");
+	}
 
 	handleresponse(hgc);
 	int32_t n;
-	if (ctx->datasize != sizeof(n))
+	if (ctx->datasize != sizeof(n)) {
 		abortmsg("unexpected size of attachio result");
+	}
 	memcpy(&n, ctx->data, sizeof(n));
 	n = ntohl(n);
-	if (n != sizeof(fds) / sizeof(fds[0]))
+	if (n != sizeof(fds) / sizeof(fds[0])) {
 		abortmsg("failed to send fds (n = %d)", n);
+	}
 }
 
 static void chdirtocwd(hgclient_t *hgc)
 {
-	if (!getcwd(hgc->ctx.data, hgc->ctx.maxdatasize))
+	if (!getcwd(hgc->ctx.data, hgc->ctx.maxdatasize)) {
 		abortmsgerrno("failed to getcwd");
+	}
 	hgc->ctx.datasize = strlen(hgc->ctx.data);
 	writeblockrequest(hgc, "chdir");
 }
@@ -440,8 +460,9 @@
 hgclient_t *hgc_open(const char *sockname)
 {
 	int fd = socket(AF_UNIX, SOCK_STREAM, 0);
-	if (fd < 0)
+	if (fd < 0) {
 		abortmsgerrno("cannot create socket");
+	}
 
 	/* don't keep fd on fork(), so that it can be closed when the parent
 	 * process get terminated. */
@@ -456,34 +477,39 @@
 	{
 		const char *split = strrchr(sockname, '/');
 		if (split && split != sockname) {
-			if (split[1] == '\0')
+			if (split[1] == '\0') {
 				abortmsg("sockname cannot end with a slash");
+			}
 			size_t len = split - sockname;
 			char sockdir[len + 1];
 			memcpy(sockdir, sockname, len);
 			sockdir[len] = '\0';
 
 			bakfd = open(".", O_DIRECTORY);
-			if (bakfd == -1)
+			if (bakfd == -1) {
 				abortmsgerrno("cannot open cwd");
+			}
 
 			int r = chdir(sockdir);
-			if (r != 0)
+			if (r != 0) {
 				abortmsgerrno("cannot chdir %s", sockdir);
+			}
 
 			basename = split + 1;
 		}
 	}
-	if (strlen(basename) >= sizeof(addr.sun_path))
+	if (strlen(basename) >= sizeof(addr.sun_path)) {
 		abortmsg("sockname is too long: %s", basename);
+	}
 	strncpy(addr.sun_path, basename, sizeof(addr.sun_path));
 	addr.sun_path[sizeof(addr.sun_path) - 1] = '\0';
 
 	/* real connect */
 	int r = connect(fd, (struct sockaddr *)&addr, sizeof(addr));
 	if (r < 0) {
-		if (errno != ENOENT && errno != ECONNREFUSED)
+		if (errno != ENOENT && errno != ECONNREFUSED) {
 			abortmsgerrno("cannot connect to %s", sockname);
+		}
 	}
 	if (bakfd != -1) {
 		fchdirx(bakfd);
@@ -501,16 +527,21 @@
 	initcontext(&hgc->ctx);
 
 	readhello(hgc);
-	if (!(hgc->capflags & CAP_RUNCOMMAND))
+	if (!(hgc->capflags & CAP_RUNCOMMAND)) {
 		abortmsg("insufficient capability: runcommand");
-	if (hgc->capflags & CAP_SETPROCNAME)
+	}
+	if (hgc->capflags & CAP_SETPROCNAME) {
 		updateprocname(hgc);
-	if (hgc->capflags & CAP_ATTACHIO)
+	}
+	if (hgc->capflags & CAP_ATTACHIO) {
 		attachio(hgc);
-	if (hgc->capflags & CAP_CHDIR)
+	}
+	if (hgc->capflags & CAP_CHDIR) {
 		chdirtocwd(hgc);
-	if (hgc->capflags & CAP_SETUMASK2)
+	}
+	if (hgc->capflags & CAP_SETUMASK2) {
 		forwardumask(hgc);
+	}
 
 	return hgc;
 }
@@ -555,16 +586,18 @@
                           size_t argsize)
 {
 	assert(hgc);
-	if (!(hgc->capflags & CAP_VALIDATE))
+	if (!(hgc->capflags & CAP_VALIDATE)) {
 		return NULL;
+	}
 
 	packcmdargs(&hgc->ctx, args, argsize);
 	writeblockrequest(hgc, "validate");
 	handleresponse(hgc);
 
 	/* the server returns '\0' if it can handle our request */
-	if (hgc->ctx.datasize <= 1)
+	if (hgc->ctx.datasize <= 1) {
 		return NULL;
+	}
 
 	/* make sure the buffer is '\0' terminated */
 	enlargecontext(&hgc->ctx, hgc->ctx.datasize + 1);
@@ -599,8 +632,9 @@
 void hgc_attachio(hgclient_t *hgc)
 {
 	assert(hgc);
-	if (!(hgc->capflags & CAP_ATTACHIO))
+	if (!(hgc->capflags & CAP_ATTACHIO)) {
 		return;
+	}
 	attachio(hgc);
 }
 
@@ -613,8 +647,9 @@
 void hgc_setenv(hgclient_t *hgc, const char *const envp[])
 {
 	assert(hgc && envp);
-	if (!(hgc->capflags & CAP_SETENV))
+	if (!(hgc->capflags & CAP_SETENV)) {
 		return;
+	}
 	packcmdargs(&hgc->ctx, envp, /*argsize*/ -1);
 	writeblockrequest(hgc, "setenv");
 }
--- a/contrib/chg/procutil.c	Fri Feb 01 13:44:09 2019 -0500
+++ b/contrib/chg/procutil.c	Mon Feb 04 20:35:21 2019 +0300
@@ -25,8 +25,9 @@
 static void forwardsignal(int sig)
 {
 	assert(peerpid > 0);
-	if (kill(peerpid, sig) < 0)
+	if (kill(peerpid, sig) < 0) {
 		abortmsgerrno("cannot kill %d", peerpid);
+	}
 	debugmsg("forward signal %d", sig);
 }
 
@@ -34,8 +35,9 @@
 {
 	/* prefer kill(-pgid, sig), fallback to pid if pgid is invalid */
 	pid_t killpid = peerpgid > 1 ? -peerpgid : peerpid;
-	if (kill(killpid, sig) < 0)
+	if (kill(killpid, sig) < 0) {
 		abortmsgerrno("cannot kill %d", killpid);
+	}
 	debugmsg("forward signal %d to %d", sig, killpid);
 }
 
@@ -43,28 +45,36 @@
 {
 	sigset_t unblockset, oldset;
 	struct sigaction sa, oldsa;
-	if (sigemptyset(&unblockset) < 0)
+	if (sigemptyset(&unblockset) < 0) {
 		goto error;
-	if (sigaddset(&unblockset, sig) < 0)
+	}
+	if (sigaddset(&unblockset, sig) < 0) {
 		goto error;
+	}
 	memset(&sa, 0, sizeof(sa));
 	sa.sa_handler = SIG_DFL;
 	sa.sa_flags = SA_RESTART;
-	if (sigemptyset(&sa.sa_mask) < 0)
+	if (sigemptyset(&sa.sa_mask) < 0) {
 		goto error;
+	}
 
 	forwardsignal(sig);
-	if (raise(sig) < 0) /* resend to self */
+	if (raise(sig) < 0) { /* resend to self */
 		goto error;
-	if (sigaction(sig, &sa, &oldsa) < 0)
+	}
+	if (sigaction(sig, &sa, &oldsa) < 0) {
 		goto error;
-	if (sigprocmask(SIG_UNBLOCK, &unblockset, &oldset) < 0)
+	}
+	if (sigprocmask(SIG_UNBLOCK, &unblockset, &oldset) < 0) {
 		goto error;
+	}
 	/* resent signal will be handled before sigprocmask() returns */
-	if (sigprocmask(SIG_SETMASK, &oldset, NULL) < 0)
+	if (sigprocmask(SIG_SETMASK, &oldset, NULL) < 0) {
 		goto error;
-	if (sigaction(sig, &oldsa, NULL) < 0)
+	}
+	if (sigaction(sig, &oldsa, NULL) < 0) {
 		goto error;
+	}
 	return;
 
 error:
@@ -73,19 +83,22 @@
 
 static void handlechildsignal(int sig UNUSED_)
 {
-	if (peerpid == 0 || pagerpid == 0)
+	if (peerpid == 0 || pagerpid == 0) {
 		return;
+	}
 	/* if pager exits, notify the server with SIGPIPE immediately.
 	 * otherwise the server won't get SIGPIPE if it does not write
 	 * anything. (issue5278) */
-	if (waitpid(pagerpid, NULL, WNOHANG) == pagerpid)
+	if (waitpid(pagerpid, NULL, WNOHANG) == pagerpid) {
 		kill(peerpid, SIGPIPE);
+	}
 }
 
 void setupsignalhandler(pid_t pid, pid_t pgid)
 {
-	if (pid <= 0)
+	if (pid <= 0) {
 		return;
+	}
 	peerpid = pid;
 	peerpgid = (pgid <= 1 ? 0 : pgid);
 
@@ -98,42 +111,52 @@
 	 * - SIGINT: usually generated by the terminal */
 	sa.sa_handler = forwardsignaltogroup;
 	sa.sa_flags = SA_RESTART;
-	if (sigemptyset(&sa.sa_mask) < 0)
+	if (sigemptyset(&sa.sa_mask) < 0) {
+		goto error;
+	}
+	if (sigaction(SIGHUP, &sa, NULL) < 0) {
 		goto error;
-	if (sigaction(SIGHUP, &sa, NULL) < 0)
+	}
+	if (sigaction(SIGINT, &sa, NULL) < 0) {
 		goto error;
-	if (sigaction(SIGINT, &sa, NULL) < 0)
-		goto error;
+	}
 
 	/* terminate frontend by double SIGTERM in case of server freeze */
 	sa.sa_handler = forwardsignal;
 	sa.sa_flags |= SA_RESETHAND;
-	if (sigaction(SIGTERM, &sa, NULL) < 0)
+	if (sigaction(SIGTERM, &sa, NULL) < 0) {
 		goto error;
+	}
 
 	/* notify the worker about window resize events */
 	sa.sa_flags = SA_RESTART;
-	if (sigaction(SIGWINCH, &sa, NULL) < 0)
+	if (sigaction(SIGWINCH, &sa, NULL) < 0) {
 		goto error;
+	}
 	/* forward user-defined signals */
-	if (sigaction(SIGUSR1, &sa, NULL) < 0)
+	if (sigaction(SIGUSR1, &sa, NULL) < 0) {
 		goto error;
-	if (sigaction(SIGUSR2, &sa, NULL) < 0)
+	}
+	if (sigaction(SIGUSR2, &sa, NULL) < 0) {
 		goto error;
+	}
 	/* propagate job control requests to worker */
 	sa.sa_handler = forwardsignal;
 	sa.sa_flags = SA_RESTART;
-	if (sigaction(SIGCONT, &sa, NULL) < 0)
+	if (sigaction(SIGCONT, &sa, NULL) < 0) {
 		goto error;
+	}
 	sa.sa_handler = handlestopsignal;
 	sa.sa_flags = SA_RESTART;
-	if (sigaction(SIGTSTP, &sa, NULL) < 0)
+	if (sigaction(SIGTSTP, &sa, NULL) < 0) {
 		goto error;
+	}
 	/* get notified when pager exits */
 	sa.sa_handler = handlechildsignal;
 	sa.sa_flags = SA_RESTART;
-	if (sigaction(SIGCHLD, &sa, NULL) < 0)
+	if (sigaction(SIGCHLD, &sa, NULL) < 0) {
 		goto error;
+	}
 
 	return;
 
@@ -147,26 +170,34 @@
 	memset(&sa, 0, sizeof(sa));
 	sa.sa_handler = SIG_DFL;
 	sa.sa_flags = SA_RESTART;
-	if (sigemptyset(&sa.sa_mask) < 0)
+	if (sigemptyset(&sa.sa_mask) < 0) {
 		goto error;
+	}
 
-	if (sigaction(SIGHUP, &sa, NULL) < 0)
+	if (sigaction(SIGHUP, &sa, NULL) < 0) {
 		goto error;
-	if (sigaction(SIGTERM, &sa, NULL) < 0)
+	}
+	if (sigaction(SIGTERM, &sa, NULL) < 0) {
 		goto error;
-	if (sigaction(SIGWINCH, &sa, NULL) < 0)
+	}
+	if (sigaction(SIGWINCH, &sa, NULL) < 0) {
 		goto error;
-	if (sigaction(SIGCONT, &sa, NULL) < 0)
+	}
+	if (sigaction(SIGCONT, &sa, NULL) < 0) {
 		goto error;
-	if (sigaction(SIGTSTP, &sa, NULL) < 0)
+	}
+	if (sigaction(SIGTSTP, &sa, NULL) < 0) {
 		goto error;
-	if (sigaction(SIGCHLD, &sa, NULL) < 0)
+	}
+	if (sigaction(SIGCHLD, &sa, NULL) < 0) {
 		goto error;
+	}
 
 	/* ignore Ctrl+C while shutting down to make pager exits cleanly */
 	sa.sa_handler = SIG_IGN;
-	if (sigaction(SIGINT, &sa, NULL) < 0)
+	if (sigaction(SIGINT, &sa, NULL) < 0) {
 		goto error;
+	}
 
 	peerpid = 0;
 	return;
@@ -180,22 +211,27 @@
 pid_t setuppager(const char *pagercmd, const char *envp[])
 {
 	assert(pagerpid == 0);
-	if (!pagercmd)
+	if (!pagercmd) {
 		return 0;
+	}
 
 	int pipefds[2];
-	if (pipe(pipefds) < 0)
+	if (pipe(pipefds) < 0) {
 		return 0;
+	}
 	pid_t pid = fork();
-	if (pid < 0)
+	if (pid < 0) {
 		goto error;
+	}
 	if (pid > 0) {
 		close(pipefds[0]);
-		if (dup2(pipefds[1], fileno(stdout)) < 0)
+		if (dup2(pipefds[1], fileno(stdout)) < 0) {
 			goto error;
+		}
 		if (isatty(fileno(stderr))) {
-			if (dup2(pipefds[1], fileno(stderr)) < 0)
+			if (dup2(pipefds[1], fileno(stderr)) < 0) {
 				goto error;
+			}
 		}
 		close(pipefds[1]);
 		pagerpid = pid;
@@ -222,16 +258,18 @@
 
 void waitpager(void)
 {
-	if (pagerpid == 0)
+	if (pagerpid == 0) {
 		return;
+	}
 
 	/* close output streams to notify the pager its input ends */
 	fclose(stdout);
 	fclose(stderr);
 	while (1) {
 		pid_t ret = waitpid(pagerpid, NULL, 0);
-		if (ret == -1 && errno == EINTR)
+		if (ret == -1 && errno == EINTR) {
 			continue;
+		}
 		break;
 	}
 }
--- a/contrib/chg/util.c	Fri Feb 01 13:44:09 2019 -0500
+++ b/contrib/chg/util.c	Mon Feb 04 20:35:21 2019 +0300
@@ -25,8 +25,9 @@
 
 static inline void fsetcolor(FILE *fp, const char *code)
 {
-	if (!colorenabled)
+	if (!colorenabled) {
 		return;
+	}
 	fprintf(fp, "\033[%sm", code);
 }
 
@@ -35,8 +36,9 @@
 	fsetcolor(stderr, "1;31");
 	fputs("chg: abort: ", stderr);
 	vfprintf(stderr, fmt, args);
-	if (no != 0)
+	if (no != 0) {
 		fprintf(stderr, " (errno = %d, %s)", no, strerror(no));
+	}
 	fsetcolor(stderr, "");
 	fputc('\n', stderr);
 	exit(255);
@@ -82,8 +84,9 @@
 
 void debugmsg(const char *fmt, ...)
 {
-	if (!debugmsgenabled)
+	if (!debugmsgenabled) {
 		return;
+	}
 
 	va_list args;
 	va_start(args, fmt);
@@ -98,32 +101,37 @@
 void fchdirx(int dirfd)
 {
 	int r = fchdir(dirfd);
-	if (r == -1)
+	if (r == -1) {
 		abortmsgerrno("failed to fchdir");
+	}
 }
 
 void fsetcloexec(int fd)
 {
 	int flags = fcntl(fd, F_GETFD);
-	if (flags < 0)
+	if (flags < 0) {
 		abortmsgerrno("cannot get flags of fd %d", fd);
-	if (fcntl(fd, F_SETFD, flags | FD_CLOEXEC) < 0)
+	}
+	if (fcntl(fd, F_SETFD, flags | FD_CLOEXEC) < 0) {
 		abortmsgerrno("cannot set flags of fd %d", fd);
+	}
 }
 
 void *mallocx(size_t size)
 {
 	void *result = malloc(size);
-	if (!result)
+	if (!result) {
 		abortmsg("failed to malloc");
+	}
 	return result;
 }
 
 void *reallocx(void *ptr, size_t size)
 {
 	void *result = realloc(ptr, size);
-	if (!result)
+	if (!result) {
 		abortmsg("failed to realloc");
+	}
 	return result;
 }
 
@@ -144,30 +152,37 @@
 	memset(&newsa, 0, sizeof(newsa));
 	newsa.sa_handler = SIG_IGN;
 	newsa.sa_flags = 0;
-	if (sigemptyset(&newsa.sa_mask) < 0)
+	if (sigemptyset(&newsa.sa_mask) < 0) {
 		goto done;
-	if (sigaction(SIGINT, &newsa, &oldsaint) < 0)
+	}
+	if (sigaction(SIGINT, &newsa, &oldsaint) < 0) {
 		goto done;
+	}
 	doneflags |= F_SIGINT;
-	if (sigaction(SIGQUIT, &newsa, &oldsaquit) < 0)
+	if (sigaction(SIGQUIT, &newsa, &oldsaquit) < 0) {
 		goto done;
+	}
 	doneflags |= F_SIGQUIT;
 
-	if (sigaddset(&newsa.sa_mask, SIGCHLD) < 0)
+	if (sigaddset(&newsa.sa_mask, SIGCHLD) < 0) {
 		goto done;
-	if (sigprocmask(SIG_BLOCK, &newsa.sa_mask, &oldmask) < 0)
+	}
+	if (sigprocmask(SIG_BLOCK, &newsa.sa_mask, &oldmask) < 0) {
 		goto done;
+	}
 	doneflags |= F_SIGMASK;
 
 	pid_t pid = fork();
-	if (pid < 0)
+	if (pid < 0) {
 		goto done;
+	}
 	if (pid == 0) {
 		sigaction(SIGINT, &oldsaint, NULL);
 		sigaction(SIGQUIT, &oldsaquit, NULL);
 		sigprocmask(SIG_SETMASK, &oldmask, NULL);
-		if (cwd && chdir(cwd) < 0)
+		if (cwd && chdir(cwd) < 0) {
 			_exit(127);
+		}
 		const char *argv[] = {"sh", "-c", cmd, NULL};
 		if (envp) {
 			execve("/bin/sh", (char **)argv, (char **)envp);
@@ -176,25 +191,32 @@
 		}
 		_exit(127);
 	} else {
-		if (waitpid(pid, &status, 0) < 0)
+		if (waitpid(pid, &status, 0) < 0) {
 			goto done;
+		}
 		doneflags |= F_WAITPID;
 	}
 
 done:
-	if (doneflags & F_SIGINT)
+	if (doneflags & F_SIGINT) {
 		sigaction(SIGINT, &oldsaint, NULL);
-	if (doneflags & F_SIGQUIT)
+	}
+	if (doneflags & F_SIGQUIT) {
 		sigaction(SIGQUIT, &oldsaquit, NULL);
-	if (doneflags & F_SIGMASK)
+	}
+	if (doneflags & F_SIGMASK) {
 		sigprocmask(SIG_SETMASK, &oldmask, NULL);
+	}
 
 	/* no way to report other errors, use 127 (= shell termination) */
-	if (!(doneflags & F_WAITPID))
+	if (!(doneflags & F_WAITPID)) {
 		return 127;
-	if (WIFEXITED(status))
+	}
+	if (WIFEXITED(status)) {
 		return WEXITSTATUS(status);
-	if (WIFSIGNALED(status))
+	}
+	if (WIFSIGNALED(status)) {
 		return -WTERMSIG(status);
+	}
 	return 127;
 }
--- a/contrib/fuzz/manifest.cc	Fri Feb 01 13:44:09 2019 -0500
+++ b/contrib/fuzz/manifest.cc	Mon Feb 04 20:35:21 2019 +0300
@@ -20,11 +20,19 @@
   lm = lazymanifest(mdata)
   # iterate the whole thing, which causes the code to fully parse
   # every line in the manifest
-  list(lm.iterentries())
+  for e, _, _ in lm.iterentries():
+      # also exercise __getitem__ et al
+      lm[e]
+      e in lm
+      (e + 'nope') in lm
   lm[b'xyzzy'] = (b'\0' * 20, 'x')
   # do an insert, text should change
   assert lm.text() != mdata, "insert should change text and didn't: %r %r" % (lm.text(), mdata)
+  cloned = lm.filtercopy(lambda x: x != 'xyzzy')
+  assert cloned.text() == mdata, 'cloned text should equal mdata'
+  cloned.diff(lm)
   del lm[b'xyzzy']
+  cloned.diff(lm)
   # should be back to the same
   assert lm.text() == mdata, "delete should have restored text but didn't: %r %r" % (lm.text(), mdata)
 except Exception as e:
@@ -39,6 +47,11 @@
 
 int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size)
 {
+	// Don't allow fuzzer inputs larger than 100k, since we'll just bog
+	// down and not accomplish much.
+	if (Size > 100000) {
+		return 0;
+	}
 	PyObject *mtext =
 	    PyBytes_FromStringAndSize((const char *)Data, (Py_ssize_t)Size);
 	PyObject *locals = PyDict_New();
--- a/contrib/fuzz/revlog.cc	Fri Feb 01 13:44:09 2019 -0500
+++ b/contrib/fuzz/revlog.cc	Mon Feb 04 20:35:21 2019 +0300
@@ -19,6 +19,11 @@
 for inline in (True, False):
     try:
         index, cache = parse_index2(data, inline)
+        index.slicechunktodensity(list(range(len(index))), 0.5, 262144)
+        for rev in range(len(index)):
+            node = index[rev][7]
+            partial = index.shortest(node)
+            index.partialmatch(node[:partial])
     except Exception as e:
         pass
         # uncomment this print if you're editing this Python code
@@ -31,6 +36,11 @@
 
 int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size)
 {
+	// Don't allow fuzzer inputs larger than 60k, since we'll just bog
+	// down and not accomplish much.
+	if (Size > 60000) {
+		return 0;
+	}
 	PyObject *text =
 	    PyBytes_FromStringAndSize((const char *)Data, (Py_ssize_t)Size);
 	PyObject *locals = PyDict_New();
--- a/contrib/perf.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/contrib/perf.py	Mon Feb 04 20:35:21 2019 +0300
@@ -535,13 +535,15 @@
 
 @command(b'perfheads', formatteropts)
 def perfheads(ui, repo, **opts):
+    """benchmark the computation of a changelog heads"""
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
     cl = repo.changelog
+    def s():
+        clearcaches(cl)
     def d():
         len(cl.headrevs())
-        clearcaches(cl)
-    timer(d)
+    timer(d, setup=s)
     fm.end()
 
 @command(b'perftags', formatteropts+
@@ -911,9 +913,7 @@
         raise error.Abort((b'default repository not configured!'),
                           hint=(b"see 'hg help config.paths'"))
     dest = path.pushloc or path.loc
-    branches = (path.branch, opts.get(b'branch') or [])
     ui.status((b'analysing phase of %s\n') % util.hidepassword(dest))
-    revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get(b'rev'))
     other = hg.peer(repo, opts, dest)
 
     # easier to perform discovery through the operation
@@ -1014,18 +1014,44 @@
     fm.end()
 
 @command(b'perfindex', [
-            (b'', b'rev', b'', b'revision to be looked up (default tip)'),
+            (b'', b'rev', [], b'revision to be looked up (default tip)'),
+            (b'', b'no-lookup', None, b'do not revision lookup post creation'),
          ] + formatteropts)
 def perfindex(ui, repo, **opts):
+    """benchmark index creation time followed by a lookup
+
+    The default is to look `tip` up. Depending on the index implementation,
+    the revision looked up can matters. For example, an implementation
+    scanning the index will have a faster lookup time for `--rev tip` than for
+    `--rev 0`. The number of looked up revisions and their order can also
+    matters.
+
+    Example of useful set to test:
+    * tip
+    * 0
+    * -10:
+    * :10
+    * -10: + :10
+    * :10: + -10:
+    * -10000:
+    * -10000: + 0
+
+    It is not currently possible to check for lookup of a missing node. For
+    deeper lookup benchmarking, checkout the `perfnodemap` command."""
     import mercurial.revlog
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
     mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
-    if opts[b'rev'] is None:
-        n = repo[b"tip"].node()
+    if opts[b'no_lookup']:
+        if opts['rev']:
+            raise error.Abort('--no-lookup and --rev are mutually exclusive')
+        nodes = []
+    elif not opts[b'rev']:
+        nodes = [repo[b"tip"].node()]
     else:
-        rev = scmutil.revsingle(repo, opts[b'rev'])
-        n = repo[rev].node()
+        revs = scmutil.revrange(repo, opts[b'rev'])
+        cl = repo.changelog
+        nodes = [cl.node(r) for r in revs]
 
     unfi = repo.unfiltered()
     # find the filecache func directly
@@ -1036,7 +1062,67 @@
         clearchangelog(unfi)
     def d():
         cl = makecl(unfi)
-        cl.rev(n)
+        for n in nodes:
+            cl.rev(n)
+    timer(d, setup=setup)
+    fm.end()
+
+@command(b'perfnodemap', [
+          (b'', b'rev', [], b'revision to be looked up (default tip)'),
+          (b'', b'clear-caches', True, b'clear revlog cache between calls'),
+    ] + formatteropts)
+def perfnodemap(ui, repo, **opts):
+    """benchmark the time necessary to look up revision from a cold nodemap
+
+    Depending on the implementation, the amount and order of revision we look
+    up can varies. Example of useful set to test:
+    * tip
+    * 0
+    * -10:
+    * :10
+    * -10: + :10
+    * :10: + -10:
+    * -10000:
+    * -10000: + 0
+
+    The command currently focus on valid binary lookup. Benchmarking for
+    hexlookup, prefix lookup and missing lookup would also be valuable.
+    """
+    import mercurial.revlog
+    opts = _byteskwargs(opts)
+    timer, fm = gettimer(ui, opts)
+    mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
+
+    unfi = repo.unfiltered()
+    clearcaches = opts['clear_caches']
+    # find the filecache func directly
+    # This avoid polluting the benchmark with the filecache logic
+    makecl = unfi.__class__.changelog.func
+    if not opts[b'rev']:
+        raise error.Abort('use --rev to specify revisions to look up')
+    revs = scmutil.revrange(repo, opts[b'rev'])
+    cl = repo.changelog
+    nodes = [cl.node(r) for r in revs]
+
+    # use a list to pass reference to a nodemap from one closure to the next
+    nodeget = [None]
+    def setnodeget():
+        # probably not necessary, but for good measure
+        clearchangelog(unfi)
+        nodeget[0] = makecl(unfi).nodemap.get
+
+    def d():
+        get = nodeget[0]
+        for n in nodes:
+            get(n)
+
+    setup = None
+    if clearcaches:
+        def setup():
+            setnodeget()
+    else:
+        setnodeget()
+        d() # prewarm the data structure
     timer(d, setup=setup)
     fm.end()
 
--- a/contrib/python3-whitelist	Fri Feb 01 13:44:09 2019 -0500
+++ b/contrib/python3-whitelist	Mon Feb 04 20:35:21 2019 +0300
@@ -5,6 +5,7 @@
 test-absorb-rename.t
 test-absorb-strip.t
 test-absorb.t
+test-acl.t
 test-add.t
 test-addremove-similar.t
 test-addremove.t
@@ -14,6 +15,7 @@
 test-ancestor.py
 test-annotate.py
 test-annotate.t
+test-arbitraryfilectx.t
 test-archive-symlinks.t
 test-archive.t
 test-atomictempfile.py
@@ -25,6 +27,7 @@
 test-bad-extension.t
 test-bad-pull.t
 test-basic.t
+test-batching.py
 test-bdiff.py
 test-bheads.t
 test-bisect.t
@@ -42,6 +45,7 @@
 test-branch-option.t
 test-branch-tag-confict.t
 test-branches.t
+test-bugzilla.t
 test-bundle-phases.t
 test-bundle-r.t
 test-bundle-type.t
@@ -54,7 +58,9 @@
 test-bundle2-remote-changegroup.t
 test-cache-abuse.t
 test-cappedreader.py
+test-casecollision-merge.t
 test-casecollision.t
+test-casefolding.t
 test-cat.t
 test-cbor.py
 test-censor.t
@@ -111,11 +117,14 @@
 test-convert-cvsnt-mergepoints.t
 test-convert-datesort.t
 test-convert-filemap.t
+test-convert-git.t
 test-convert-hg-sink.t
 test-convert-hg-source.t
 test-convert-hg-startrev.t
 test-convert-splicemap.t
+test-convert-svn-sink.t
 test-convert-tagsbranch-topology.t
+test-convert.t
 test-copy-move-merge.t
 test-copy.t
 test-copytrace-heuristics.t
@@ -127,6 +136,7 @@
 test-debugindexdot.t
 test-debugrename.t
 test-default-push.t
+test-demandimport.py
 test-diff-antipatience.t
 test-diff-binary-file.t
 test-diff-change.t
@@ -149,6 +159,7 @@
 test-dirstate-race.t
 test-dirstate.t
 test-dispatch.py
+test-dispatch.t
 test-doctest.py
 test-double-merge.t
 test-drawdag.t
@@ -159,6 +170,7 @@
 test-empty-group.t
 test-empty.t
 test-encode.t
+test-encoding-align.t
 test-encoding-func.py
 test-encoding-textwrap.t
 test-encoding.t
@@ -198,6 +210,7 @@
 test-extdata.t
 test-extdiff.t
 test-extension-timing.t
+test-extension.t
 test-extensions-afterloaded.t
 test-extensions-wrapfunction.py
 test-extra-filelog-entry.t
@@ -217,6 +230,7 @@
 test-fileset.t
 test-fix-topology.t
 test-fix.t
+test-flagprocessor.t
 test-flags.t
 test-fncache.t
 test-gendoc-da.t
@@ -235,6 +249,7 @@
 test-generaldelta.t
 test-getbundle.t
 test-git-export.t
+test-githelp.t
 test-globalopts.t
 test-glog-beautifygraph.t
 test-glog-topological.t
@@ -251,17 +266,24 @@
 test-hgk.t
 test-hgrc.t
 test-hgweb-annotate-whitespace.t
+test-hgweb-auth.py
 test-hgweb-bundle.t
+test-hgweb-commands.t
 test-hgweb-csp.t
 test-hgweb-descend-empties.t
 test-hgweb-diffs.t
 test-hgweb-empty.t
 test-hgweb-filelog.t
+test-hgweb-json.t
+test-hgweb-no-path-info.t
+test-hgweb-no-request-uri.t
 test-hgweb-non-interactive.t
 test-hgweb-raw.t
 test-hgweb-removed.t
+test-hgweb-symrev.t
 test-hgweb.t
 test-hgwebdir-paths.py
+test-hgwebdir.t
 test-hgwebdirsym.t
 test-histedit-arguments.t
 test-histedit-base.t
@@ -278,11 +300,16 @@
 test-histedit-obsolete.t
 test-histedit-outgoing.t
 test-histedit-templates.t
+test-http-api-httpv2.t
+test-http-api.t
+test-http-bad-server.t
 test-http-branchmap.t
 test-http-bundle1.t
 test-http-clone-r.t
 test-http-permissions.t
+test-http-protocol.t
 test-http.t
+test-https.t
 test-hybridencode.py
 test-i18n.t
 test-identify.t
@@ -290,6 +317,7 @@
 test-import-bypass.t
 test-import-context.t
 test-import-eol.t
+test-import-git.t
 test-import-merge.t
 test-import-unknown.t
 test-import.t
@@ -300,6 +328,7 @@
 test-infinitepush.t
 test-inherit-mode.t
 test-init.t
+test-install.t
 test-issue1089.t
 test-issue1102.t
 test-issue1175.t
@@ -335,11 +364,13 @@
 test-lfs-bundle.t
 test-lfs-largefiles.t
 test-lfs-pointer.py
+test-lfs-test-server.t
 test-lfs.t
 test-linelog.py
 test-linerange.py
 test-locate.t
 test-lock-badness.t
+test-lock.py
 test-log-exthook.t
 test-log-linerange.t
 test-log.t
@@ -381,11 +412,14 @@
 test-merge9.t
 test-minifileset.py
 test-minirst.py
+test-missing-capability.t
+test-mq-eol.t
 test-mq-git.t
 test-mq-guards.t
 test-mq-header-date.t
 test-mq-header-from.t
 test-mq-merge.t
+test-mq-missingfiles.t
 test-mq-pull-from-bundle.t
 test-mq-qclone-http.t
 test-mq-qdelete.t
@@ -393,6 +427,7 @@
 test-mq-qfold.t
 test-mq-qgoto.t
 test-mq-qimport-fail-cleanup.t
+test-mq-qimport.t
 test-mq-qnew.t
 test-mq-qpush-exact.t
 test-mq-qpush-fail.t
@@ -403,6 +438,7 @@
 test-mq-qrename.t
 test-mq-qsave.t
 test-mq-safety.t
+test-mq-subrepo-svn.t
 test-mq-subrepo.t
 test-mq-symlinks.t
 test-mq.t
@@ -438,8 +474,10 @@
 test-narrow.t
 test-nested-repo.t
 test-newbranch.t
+test-newcgi.t
 test-newercgi.t
 test-nointerrupt.t
+test-notify-changegroup.t
 test-obshistory.t
 test-obsmarker-template.t
 test-obsmarkers-effectflag.t
@@ -451,11 +489,13 @@
 test-obsolete-divergent.t
 test-obsolete-tag-cache.t
 test-obsolete.t
+test-oldcgi.t
 test-origbackup-conflict.t
 test-pager-legacy.t
 test-pager.t
 test-parents.t
 test-parse-date.t
+test-parseindex.t
 test-parseindex2.py
 test-patch-offset.t
 test-patch.t
@@ -474,6 +514,7 @@
 test-progress.t
 test-propertycache.py
 test-pull-branch.t
+test-pull-bundle.t
 test-pull-http.t
 test-pull-permission.t
 test-pull-pull-corruption.t
@@ -557,7 +598,11 @@
 test-remotefilelog-cacheprocess.t
 test-remotefilelog-clone-tree.t
 test-remotefilelog-clone.t
+test-remotefilelog-corrupt-cache.t
+test-remotefilelog-datapack.py
+test-remotefilelog-gc.t
 test-remotefilelog-gcrepack.t
+test-remotefilelog-histpack.py
 test-remotefilelog-http.t
 test-remotefilelog-keepset.t
 test-remotefilelog-local.t
@@ -567,6 +612,8 @@
 test-remotefilelog-permisssions.t
 test-remotefilelog-prefetch.t
 test-remotefilelog-pull-noshallow.t
+test-remotefilelog-repack-fast.t
+test-remotefilelog-repack.t
 test-remotefilelog-share.t
 test-remotefilelog-sparse.t
 test-remotefilelog-tags.t
@@ -597,6 +644,7 @@
 test-revset-dirstate-parents.t
 test-revset-legacy-lookup.t
 test-revset-outgoing.t
+test-revset2.t
 test-rollback.t
 test-run-tests.py
 test-run-tests.t
@@ -631,6 +679,7 @@
 test-ssh.t
 test-sshserver.py
 test-stack.t
+test-static-http.t
 test-status-color.t
 test-status-inprocess.py
 test-status-rev.t
@@ -642,10 +691,12 @@
 test-strip-cross.t
 test-strip.t
 test-subrepo-deep-nested-change.t
+test-subrepo-git.t
 test-subrepo-missing.t
 test-subrepo-paths.t
 test-subrepo-recursion.t
 test-subrepo-relative-path.t
+test-subrepo-svn.t
 test-subrepo.t
 test-symlink-os-yes-fs-no.py
 test-symlink-placeholder.t
@@ -658,7 +709,10 @@
 test-template-map.t
 test-tools.t
 test-transplant.t
+test-treediscovery-legacy.t
+test-treediscovery.t
 test-treemanifest.t
+test-trusted.py
 test-ui-color.py
 test-ui-config.py
 test-ui-verbosity.py
@@ -669,6 +723,7 @@
 test-unionrepo.t
 test-unrelated-pull.t
 test-up-local-change.t
+test-update-atomic.t
 test-update-branches.t
 test-update-dest.t
 test-update-issue1456.t
@@ -685,19 +740,25 @@
 test-walkrepo.py
 test-websub.t
 test-win32text.t
+test-wireproto-caching.t
 test-wireproto-clientreactor.py
 test-wireproto-command-branchmap.t
+test-wireproto-command-capabilities.t
 test-wireproto-command-changesetdata.t
 test-wireproto-command-filedata.t
 test-wireproto-command-filesdata.t
 test-wireproto-command-heads.t
+test-wireproto-command-known.t
 test-wireproto-command-listkeys.t
 test-wireproto-command-lookup.t
 test-wireproto-command-manifestdata.t
 test-wireproto-command-pushkey.t
 test-wireproto-command-rawstorefiledata.t
+test-wireproto-content-redirects.t
+test-wireproto-exchangev2.t
 test-wireproto-framing.py
 test-wireproto-serverreactor.py
 test-wireproto.py
+test-wireproto.t
 test-wsgirequest.py
 test-xdg.t
--- a/contrib/relnotes	Fri Feb 01 13:44:09 2019 -0500
+++ b/contrib/relnotes	Mon Feb 04 20:35:21 2019 +0300
@@ -14,6 +14,7 @@
     r"\(issue": 100,
     r"\(BC\)": 100,
     r"\(API\)": 100,
+    r"\(SEC\)": 100,
     # core commands, bump up
     r"(commit|files|log|pull|push|patch|status|tag|summary)(|s|es):": 20,
     r"(annotate|alias|branch|bookmark|clone|graft|import|verify).*:": 20,
@@ -21,6 +22,7 @@
     r"(mq|shelve|rebase):": 20,
     # newsy
     r": deprecate": 20,
+    r": new.*(extension|flag|module)": 10,
     r"( ability|command|feature|option|support)": 10,
     # experimental
     r"hg-experimental": 20,
@@ -29,22 +31,23 @@
     # bug-like?
     r"(fix|don't break|improve)": 7,
     r"(not|n't|avoid|fix|prevent).*crash": 10,
+    r"vulnerab": 10,
     # boring stuff, bump down
     r"^contrib": -5,
     r"debug": -5,
     r"help": -5,
+    r"minor": -5,
     r"(doc|metavar|bundle2|obsolete|obsmarker|rpm|setup|debug\S+:)": -15,
     r"(check-code|check-commit|check-config|import-checker)": -20,
     r"(flake8|lintian|pyflakes|pylint)": -20,
     # cleanups and refactoring
-    r"(cleanup|white ?space|spelling|quoting)": -20,
+    r"(clean ?up|white ?space|spelling|quoting)": -20,
     r"(flatten|dedent|indent|nesting|unnest)": -20,
     r"(typo|hint|note|comment|TODO|FIXME)": -20,
     r"(style:|convention|one-?liner)": -20,
-    r"_": -10,
     r"(argument|absolute_import|attribute|assignment|mutable)": -15,
     r"(scope|True|False)": -10,
-    r"(unused|useless|unnecessary|superfluous|duplicate|deprecated)": -10,
+    r"(unused|useless|unnecessar|superfluous|duplicate|deprecated)": -10,
     r"(redundant|pointless|confusing|uninitialized|meaningless|dead)": -10,
     r": (drop|remove|delete|rip out)": -10,
     r": (inherit|rename|simplify|naming|inline)": -10,
@@ -54,9 +57,12 @@
     r": (move|extract) .* (to|into|from|out of)": -20,
     r": implement ": -5,
     r": use .* implementation": -20,
+    r": use .* instead of": -20,
+    # code
+    r"_": -10,
+    r"__": -5,
+    r"\(\)": -5,
     r"\S\S\S+\.\S\S\S\S+": -5,
-    r": use .* instead of": -20,
-    r"__": -5,
     # dumb keywords
     r"\S+/\S+:": -10,
     r"\S+\.\S+:": -10,
@@ -92,6 +98,15 @@
     (r"shelve|unshelve", "extensions"),
 ]
 
+def wikify(desc):
+    desc = desc.replace("(issue", "(Bts:issue")
+    desc = re.sub(r"\b([0-9a-f]{12})\b", r"Cset:\1", desc)
+    # stop ParseError from being recognized as a (nonexistent) wiki page
+    desc = re.sub(r" ([A-Z][a-z]+[A-Z][a-z]+)\b", r" !\1", desc)
+    # prevent wiki markup of magic methods
+    desc = re.sub(r"\b(\S*__\S*)\b", r"`\1`", desc)
+    return desc
+
 def main():
     desc = "example: %(prog)s 4.7.2 --stoprev 4.8rc0"
     ap = argparse.ArgumentParser(description=desc)
@@ -148,10 +163,8 @@
             if re.search(rule, desc):
                 score += val
 
-        desc = desc.replace("(issue", "(Bts:issue")
-
         if score >= cutoff:
-            commits.append(desc)
+            commits.append(wikify(desc))
     # Group unflagged notes.
     groups = {}
     bcs = []
--- a/contrib/synthrepo.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/contrib/synthrepo.py	Mon Feb 04 20:35:21 2019 +0300
@@ -349,7 +349,7 @@
     # to the modeled directory structure.
     initcount = int(opts['initfiles'])
     if initcount and initdirs:
-        pctx = repo[None].parents()[0]
+        pctx = repo['.']
         dirs = set(pctx.dirs())
         files = {}
 
@@ -450,7 +450,6 @@
                 path = fctx.path()
                 changes[path] = '\n'.join(lines) + '\n'
             for __ in xrange(pick(filesremoved)):
-                path = random.choice(mfk)
                 for __ in xrange(10):
                     path = random.choice(mfk)
                     if path not in changes:
--- a/doc/check-seclevel.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/doc/check-seclevel.py	Mon Feb 04 20:35:21 2019 +0300
@@ -163,8 +163,8 @@
     (options, args) = optparser.parse_args()
 
     ui = uimod.ui.load()
-    ui.setconfig('ui', 'verbose', options.verbose, '--verbose')
-    ui.setconfig('ui', 'debug', options.debug, '--debug')
+    ui.setconfig(b'ui', b'verbose', options.verbose, b'--verbose')
+    ui.setconfig(b'ui', b'debug', options.debug, b'--debug')
 
     if options.file:
         if checkfile(ui, options.file, options.initlevel):
--- a/hgext/absorb.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/absorb.py	Mon Feb 04 20:35:21 2019 +0300
@@ -726,7 +726,6 @@
                 # nothing changed, nothing commited
                 nextp1 = ctx
                 continue
-            msg = ''
             if self._willbecomenoop(memworkingcopy, ctx, nextp1):
                 # changeset is no longer necessary
                 self.replacemap[ctx.node()] = None
--- a/hgext/blackbox.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/blackbox.py	Mon Feb 04 20:35:21 2019 +0300
@@ -118,7 +118,6 @@
         date = dateutil.datestr(default, ui.config('blackbox', 'date-format'))
         user = procutil.getuser()
         pid = '%d' % procutil.getpid()
-        rev = '(unknown)'
         changed = ''
         ctx = self._repo[None]
         parents = ctx.parents()
--- a/hgext/bugzilla.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/bugzilla.py	Mon Feb 04 20:35:21 2019 +0300
@@ -303,6 +303,7 @@
     error,
     logcmdutil,
     mail,
+    pycompat,
     registrar,
     url,
     util,
@@ -342,10 +343,10 @@
     default='bugs',
 )
 configitem('bugzilla', 'fixregexp',
-    default=(r'fix(?:es)?\s*(?:bugs?\s*)?,?\s*'
-             r'(?:nos?\.?|num(?:ber)?s?)?\s*'
-             r'(?P<ids>(?:#?\d+\s*(?:,?\s*(?:and)?)?\s*)+)'
-             r'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?')
+    default=(br'fix(?:es)?\s*(?:bugs?\s*)?,?\s*'
+             br'(?:nos?\.?|num(?:ber)?s?)?\s*'
+             br'(?P<ids>(?:#?\d+\s*(?:,?\s*(?:and)?)?\s*)+)'
+             br'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?')
 )
 configitem('bugzilla', 'fixresolution',
     default='FIXED',
@@ -363,9 +364,9 @@
     default=None,
 )
 configitem('bugzilla', 'regexp',
-    default=(r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
-             r'(?P<ids>(?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)'
-             r'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?')
+    default=(br'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
+             br'(?P<ids>(?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)'
+             br'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?')
 )
 configitem('bugzilla', 'strip',
     default=0,
@@ -733,7 +734,7 @@
         c = self.bzproxy.Bug.comments({'ids': [id],
                                        'include_fields': ['text'],
                                        'token': self.bztoken})
-        return ''.join([t['text'] for t in c['bugs'][str(id)]['comments']])
+        return ''.join([t['text'] for t in c['bugs']['%d' % id]['comments']])
 
     def filter_real_bug_ids(self, bugs):
         probe = self.bzproxy.Bug.get({'ids': sorted(bugs.keys()),
@@ -804,11 +805,11 @@
 
     def makecommandline(self, fieldname, value):
         if self.bzvermajor >= 4:
-            return "@%s %s" % (fieldname, str(value))
+            return "@%s %s" % (fieldname, pycompat.bytestr(value))
         else:
             if fieldname == "id":
                 fieldname = "bug_id"
-            return "@%s = %s" % (fieldname, str(value))
+            return "@%s = %s" % (fieldname, pycompat.bytestr(value))
 
     def send_bug_modify_email(self, bugid, commands, comment, committer):
         '''send modification message to Bugzilla bug via email.
@@ -873,7 +874,7 @@
         self.fixresolution = self.ui.config('bugzilla', 'fixresolution')
 
     def apiurl(self, targets, include_fields=None):
-        url = '/'.join([self.bzroot] + [str(t) for t in targets])
+        url = '/'.join([self.bzroot] + [pycompat.bytestr(t) for t in targets])
         qv = {}
         if self.apikey:
             qv['api_key'] = self.apikey
@@ -938,7 +939,7 @@
         for bugid in bugs.keys():
             burl = self.apiurl(('bug', bugid, 'comment'), include_fields='text')
             result = self._fetch(burl)
-            comments = result['bugs'][str(bugid)]['comments']
+            comments = result['bugs'][pycompat.bytestr(bugid)]['comments']
             if any(sn in c['text'] for c in comments):
                 self.ui.status(_('bug %d already knows about changeset %s\n') %
                                (bugid, sn))
@@ -1011,7 +1012,7 @@
             self.ui.config('bugzilla', 'regexp'), re.IGNORECASE)
         self.fix_re = re.compile(
             self.ui.config('bugzilla', 'fixregexp'), re.IGNORECASE)
-        self.split_re = re.compile(r'\D+')
+        self.split_re = re.compile(br'\D+')
 
     def find_bugs(self, ctx):
         '''return bugs dictionary created from commit comment.
@@ -1098,7 +1099,7 @@
         t = logcmdutil.changesettemplater(self.ui, self.repo, spec)
         self.ui.pushbuffer()
         t.show(ctx, changes=ctx.changeset(),
-               bug=str(bugid),
+               bug=pycompat.bytestr(bugid),
                hgweb=self.ui.config('web', 'baseurl'),
                root=self.repo.root,
                webroot=webroot(self.repo.root))
--- a/hgext/convert/convcmd.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/convert/convcmd.py	Mon Feb 04 20:35:21 2019 +0300
@@ -123,7 +123,7 @@
             exceptions.append(inst)
     if not ui.quiet:
         for inst in exceptions:
-            ui.write("%s\n" % pycompat.bytestr(inst))
+            ui.write("%s\n" % pycompat.bytestr(inst.args[0]))
     raise error.Abort(_('%s: missing or unsupported repository') % path)
 
 def convertsink(ui, path, type):
--- a/hgext/convert/cvs.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/convert/cvs.py	Mon Feb 04 20:35:21 2019 +0300
@@ -76,7 +76,6 @@
         d = encoding.getcwd()
         try:
             os.chdir(self.path)
-            id = None
 
             cache = 'update'
             if not self.ui.configbool('convert', 'cvsps.cache'):
@@ -219,7 +218,7 @@
         if "UseUnchanged" in r:
             self.writep.write("UseUnchanged\n")
             self.writep.flush()
-            r = self.readp.readline()
+            self.readp.readline()
 
     def getheads(self):
         self._parse()
--- a/hgext/convert/git.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/convert/git.py	Mon Feb 04 20:35:21 2019 +0300
@@ -13,6 +13,7 @@
     config,
     error,
     node as nodemod,
+    pycompat,
 )
 
 from . import (
@@ -175,7 +176,8 @@
         self.catfilepipe[0].flush()
         info = self.catfilepipe[1].readline().split()
         if info[1] != ftype:
-            raise error.Abort(_('cannot read %r object at %s') % (ftype, rev))
+            raise error.Abort(_('cannot read %r object at %s') % (
+                pycompat.bytestr(ftype), rev))
         size = int(info[2])
         data = self.catfilepipe[1].read(size)
         if len(data) < size:
@@ -294,7 +296,7 @@
             if not entry:
                 if not l.startswith(':'):
                     continue
-                entry = l.split()
+                entry = tuple(pycompat.bytestr(p) for p in l.split())
                 continue
             f = l
             if entry[4][0] == 'C':
--- a/hgext/convert/hg.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/convert/hg.py	Mon Feb 04 20:35:21 2019 +0300
@@ -105,10 +105,6 @@
         if not branch:
             branch = 'default'
         pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
-        if pbranches:
-            pbranch = pbranches[0][1]
-        else:
-            pbranch = 'default'
 
         branchpath = os.path.join(self.path, branch)
         if setbranch:
--- a/hgext/convert/monotone.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/convert/monotone.py	Mon Feb 04 20:35:21 2019 +0300
@@ -93,16 +93,16 @@
         kwargs = pycompat.byteskwargs(kwargs)
         command = []
         for k, v in kwargs.iteritems():
-            command.append("%s:%s" % (len(k), k))
+            command.append("%d:%s" % (len(k), k))
             if v:
-                command.append("%s:%s" % (len(v), v))
+                command.append("%d:%s" % (len(v), v))
         if command:
             command.insert(0, 'o')
             command.append('e')
 
         command.append('l')
         for arg in args:
-            command += "%d:%s" % (len(arg), arg)
+            command.append("%d:%s" % (len(arg), arg))
         command.append('e')
         command = ''.join(command)
 
@@ -138,7 +138,7 @@
                 raise error.Abort(_('bad mtn packet - no end of packet size'))
             lengthstr += read
         try:
-            length = long(lengthstr[:-1])
+            length = pycompat.long(lengthstr[:-1])
         except TypeError:
             raise error.Abort(_('bad mtn packet - bad packet size %s')
                 % lengthstr)
@@ -335,7 +335,6 @@
 
     def before(self):
         # Check if we have a new enough version to use automate stdio
-        version = 0.0
         try:
             versionstr = self.mtnrunsingle("interface_version")
             version = float(versionstr)
--- a/hgext/convert/subversion.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/convert/subversion.py	Mon Feb 04 20:35:21 2019 +0300
@@ -984,7 +984,6 @@
         # TODO: ra.get_file transmits the whole file instead of diffs.
         if file in self.removed:
             return None, None
-        mode = ''
         try:
             new_module, revnum = revsplit(rev)[1:]
             if self.module != new_module:
@@ -1183,12 +1182,12 @@
         m = set()
         output = self.run0('ls', recursive=True, xml=True)
         doc = xml.dom.minidom.parseString(output)
-        for e in doc.getElementsByTagName('entry'):
+        for e in doc.getElementsByTagName(r'entry'):
             for n in e.childNodes:
-                if n.nodeType != n.ELEMENT_NODE or n.tagName != 'name':
+                if n.nodeType != n.ELEMENT_NODE or n.tagName != r'name':
                     continue
-                name = ''.join(c.data for c in n.childNodes
-                               if c.nodeType == c.TEXT_NODE)
+                name = r''.join(c.data for c in n.childNodes
+                                if c.nodeType == c.TEXT_NODE)
                 # Entries are compared with names coming from
                 # mercurial, so bytes with undefined encoding. Our
                 # best bet is to assume they are in local
--- a/hgext/extdiff.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/extdiff.py	Mon Feb 04 20:35:21 2019 +0300
@@ -80,6 +80,7 @@
 from mercurial import (
     archival,
     cmdutil,
+    encoding,
     error,
     filemerge,
     formatter,
@@ -175,6 +176,65 @@
         cmdline += ' $parent1 $child'
     return re.sub(regex, quote, cmdline)
 
+def _runperfilediff(cmdline, repo_root, ui, do3way, confirm,
+                    commonfiles, tmproot, dir1a, dir1b,
+                    dir2root, dir2,
+                    rev1a, rev1b, rev2):
+    # Note that we need to sort the list of files because it was
+    # built in an "unstable" way and it's annoying to get files in a
+    # random order, especially when "confirm" mode is enabled.
+    totalfiles = len(commonfiles)
+    for idx, commonfile in enumerate(sorted(commonfiles)):
+        path1a = os.path.join(tmproot, dir1a, commonfile)
+        label1a = commonfile + rev1a
+        if not os.path.isfile(path1a):
+            path1a = os.devnull
+
+        path1b = ''
+        label1b = ''
+        if do3way:
+            path1b = os.path.join(tmproot, dir1b, commonfile)
+            label1b = commonfile + rev1b
+            if not os.path.isfile(path1b):
+                path1b = os.devnull
+
+        path2 = os.path.join(dir2root, dir2, commonfile)
+        label2 = commonfile + rev2
+
+        if confirm:
+            # Prompt before showing this diff
+            difffiles = _('diff %s (%d of %d)') % (commonfile, idx + 1,
+                                                   totalfiles)
+            responses = _('[Yns?]'
+                          '$$ &Yes, show diff'
+                          '$$ &No, skip this diff'
+                          '$$ &Skip remaining diffs'
+                          '$$ &? (display help)')
+            r = ui.promptchoice('%s %s' % (difffiles, responses))
+            if r == 3: # ?
+                while r == 3:
+                    for c, t in ui.extractchoices(responses)[1]:
+                        ui.write('%s - %s\n' % (c, encoding.lower(t)))
+                    r = ui.promptchoice('%s %s' % (difffiles, responses))
+            if r == 0: # yes
+                pass
+            elif r == 1: # no
+                continue
+            elif r == 2: # skip
+                break
+
+        curcmdline = formatcmdline(
+            cmdline, repo_root, do3way=do3way,
+            parent1=path1a, plabel1=label1a,
+            parent2=path1b, plabel2=label1b,
+            child=path2, clabel=label2)
+        ui.debug('running %r in %s\n' % (pycompat.bytestr(curcmdline),
+                                         tmproot))
+
+        # Run the comparison program and wait for it to exit
+        # before we show the next file.
+        ui.system(curcmdline, cwd=tmproot, blockedtag='extdiff')
+
 def dodiff(ui, repo, cmdline, pats, opts):
     '''Do the actual diff:
 
@@ -201,6 +261,9 @@
         else:
             ctx1b = repo[nullid]
 
+    perfile = opts.get('per_file')
+    confirm = opts.get('confirm')
+
     node1a = ctx1a.node()
     node1b = ctx1b.node()
     node2 = ctx2.node()
@@ -217,6 +280,8 @@
     if opts.get('patch'):
         if subrepos:
             raise error.Abort(_('--patch cannot be used with --subrepos'))
+        if perfile:
+            raise error.Abort(_('--patch cannot be used with --per-file'))
         if node2 is None:
             raise error.Abort(_('--patch requires two revisions'))
     else:
@@ -304,15 +369,23 @@
             label1b = None
             fnsandstat = []
 
-        # Run the external tool on the 2 temp directories or the patches
-        cmdline = formatcmdline(
-            cmdline, repo.root, do3way=do3way,
-            parent1=dir1a, plabel1=label1a,
-            parent2=dir1b, plabel2=label1b,
-            child=dir2, clabel=label2)
-        ui.debug('running %r in %s\n' % (pycompat.bytestr(cmdline),
-                                         tmproot))
-        ui.system(cmdline, cwd=tmproot, blockedtag='extdiff')
+        if not perfile:
+            # Run the external tool on the 2 temp directories or the patches
+            cmdline = formatcmdline(
+                cmdline, repo.root, do3way=do3way,
+                parent1=dir1a, plabel1=label1a,
+                parent2=dir1b, plabel2=label1b,
+                child=dir2, clabel=label2)
+            ui.debug('running %r in %s\n' % (pycompat.bytestr(cmdline),
+                                             tmproot))
+            ui.system(cmdline, cwd=tmproot, blockedtag='extdiff')
+        else:
+            # Run the external tool once for each pair of files
+            _runperfilediff(
+                cmdline, repo.root, ui, do3way=do3way, confirm=confirm,
+                commonfiles=common, tmproot=tmproot, dir1a=dir1a, dir1b=dir1b,
+                dir2root=dir2root, dir2=dir2,
+                rev1a=rev1a, rev1b=rev1b, rev2=rev2)
 
         for copy_fn, working_fn, st in fnsandstat:
             cpstat = os.lstat(copy_fn)
@@ -340,6 +413,10 @@
      _('pass option to comparison program'), _('OPT')),
     ('r', 'rev', [], _('revision'), _('REV')),
     ('c', 'change', '', _('change made by revision'), _('REV')),
+    ('', 'per-file', False,
+     _('compare each file instead of revision snapshots')),
+    ('', 'confirm', False,
+     _('prompt user before each external program invocation')),
     ('', 'patch', None, _('compare patches for two revisions'))
     ] + cmdutil.walkopts + cmdutil.subrepoopts
 
@@ -357,15 +434,23 @@
     default options "-Npru".
 
     To select a different program, use the -p/--program option. The
-    program will be passed the names of two directories to compare. To
-    pass additional options to the program, use -o/--option. These
-    will be passed before the names of the directories to compare.
+    program will be passed the names of two directories to compare,
+    unless the --per-file option is specified (see below). To pass
+    additional options to the program, use -o/--option. These will be
+    passed before the names of the directories or files to compare.
 
     When two revision arguments are given, then changes are shown
     between those revisions. If only one revision is specified then
     that revision is compared to the working directory, and, when no
     revisions are specified, the working directory files are compared
-    to its parent.'''
+    to its parent.
+
+    The --per-file option runs the external program repeatedly on each
+    file to diff, instead of once on two directories.
+
+    The --confirm option will prompt the user before each invocation of
+    the external program. It is ignored if --per-file isn't specified.
+    '''
     opts = pycompat.byteskwargs(opts)
     program = opts.get('program')
     option = opts.get('option')
--- a/hgext/fastannotate/formatter.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/fastannotate/formatter.py	Mon Feb 04 20:35:21 2019 +0300
@@ -38,8 +38,8 @@
         if self.opts.get('rev') == 'wdir()':
             orig = hexfunc
             hexfunc = lambda x: None if x is None else orig(x)
-            wnode = hexfunc(repo[None].p1().node()) + '+'
-            wrev = '%d' % repo[None].p1().rev()
+            wnode = hexfunc(repo['.'].node()) + '+'
+            wrev = '%d' % repo['.'].rev()
             wrevpad = ''
             if not opts.get('changeset'): # only show + if changeset is hidden
                 wrev += '+'
--- a/hgext/fastannotate/protocol.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/fastannotate/protocol.py	Mon Feb 04 20:35:21 2019 +0300
@@ -71,7 +71,6 @@
             for p in [actx.revmappath, actx.linelogpath]:
                 if not os.path.exists(p):
                     continue
-                content = ''
                 with open(p, 'rb') as f:
                     content = f.read()
                 vfsbaselen = len(repo.vfs.base + '/')
--- a/hgext/fastannotate/support.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/fastannotate/support.py	Mon Feb 04 20:35:21 2019 +0300
@@ -109,7 +109,6 @@
 
 def _remotefctxannotate(orig, self, follow=False, skiprevs=None, diffopts=None):
     # skipset: a set-like used to test if a fctx needs to be downloaded
-    skipset = None
     with context.fctxannotatecontext(self, follow, diffopts) as ac:
         skipset = revmap.revmap(ac.revmappath)
     return orig(self, follow, skiprevs=skiprevs, diffopts=diffopts,
--- a/hgext/fetch.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/fetch.py	Mon Feb 04 20:35:21 2019 +0300
@@ -68,7 +68,7 @@
     if date:
         opts['date'] = dateutil.parsedate(date)
 
-    parent, _p2 = repo.dirstate.parents()
+    parent = repo.dirstate.p1()
     branch = repo.dirstate.branch()
     try:
         branchnode = repo.branchtip(branch)
--- a/hgext/fsmonitor/__init__.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/fsmonitor/__init__.py	Mon Feb 04 20:35:21 2019 +0300
@@ -161,6 +161,9 @@
 configitem('fsmonitor', 'blacklistusers',
     default=list,
 )
+configitem('hgwatchman', 'verbose',
+    default=True,
+)
 configitem('experimental', 'fsmonitor.transaction_notify',
     default=False,
 )
@@ -172,11 +175,14 @@
 def _handleunavailable(ui, state, ex):
     """Exception handler for Watchman interaction exceptions"""
     if isinstance(ex, watchmanclient.Unavailable):
-        if ex.warn:
+        # experimental config: hgwatchman.verbose
+        if ex.warn and ui.configbool('hgwatchman', 'verbose'):
             ui.warn(str(ex) + '\n')
         if ex.invalidate:
             state.invalidate()
-        ui.log('fsmonitor', 'Watchman unavailable: %s\n', ex.msg)
+        # experimental config: hgwatchman.verbose
+        if ui.configbool('hgwatchman','verbose'):
+            ui.log('fsmonitor', 'Watchman unavailable: %s\n', ex.msg)
     else:
         ui.log('fsmonitor', 'Watchman exception: %s\n', ex)
 
@@ -240,24 +246,6 @@
         clock = 'c:0:0'
         notefiles = []
 
-    def fwarn(f, msg):
-        self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
-        return False
-
-    def badtype(mode):
-        kind = _('unknown')
-        if stat.S_ISCHR(mode):
-            kind = _('character device')
-        elif stat.S_ISBLK(mode):
-            kind = _('block device')
-        elif stat.S_ISFIFO(mode):
-            kind = _('fifo')
-        elif stat.S_ISSOCK(mode):
-            kind = _('socket')
-        elif stat.S_ISDIR(mode):
-            kind = _('directory')
-        return _('unsupported file type (type is %s)') % kind
-
     ignore = self._ignore
     dirignore = self._dirignore
     if unknown:
@@ -379,6 +367,9 @@
         fexists = entry['exists']
         kind = getkind(fmode)
 
+        if '/.hg/' in fname or fname.endswith('/.hg'):
+            return bail('nested-repo-detected')
+
         if not fexists:
             # if marked as deleted and we don't already have a change
             # record, mark it as deleted.  If we already have an entry
@@ -752,6 +743,14 @@
             repo, node, branchmerge, force, ancestor, mergeancestor,
             labels, matcher, **kwargs)
 
+def repo_has_depth_one_nested_repo(repo):
+    for f in repo.wvfs.listdir():
+        if os.path.isdir(os.path.join(repo.root, f, '.hg')):
+            msg = 'fsmonitor: sub-repository %r detected, fsmonitor disabled\n'
+            repo.ui.debug(msg % f)
+            return True
+    return False
+
 def reposetup(ui, repo):
     # We don't work with largefiles or inotify
     exts = extensions.enabled()
@@ -769,6 +768,9 @@
         if repo.wvfs.exists('.hgsubstate') or repo.wvfs.exists('.hgsub'):
             return
 
+        if repo_has_depth_one_nested_repo(repo):
+            return
+
         fsmonitorstate = state.state(repo)
         if fsmonitorstate.mode == 'off':
             return
--- a/hgext/fsmonitor/pywatchman/capabilities.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/fsmonitor/pywatchman/capabilities.py	Mon Feb 04 20:35:21 2019 +0300
@@ -62,7 +62,6 @@
     vers['capabilities'] = {}
     for name in opts['optional']:
         vers['capabilities'][name] = check(parsed_version, name)
-    failed = False
     for name in opts['required']:
         have = check(parsed_version, name)
         vers['capabilities'][name] = have
--- a/hgext/fsmonitor/pywatchman/pybser.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/fsmonitor/pywatchman/pybser.py	Mon Feb 04 20:35:21 2019 +0300
@@ -267,7 +267,7 @@
             key = key[3:]
         try:
             return self._values[self._keys.index(key)]
-        except ValueError as ex:
+        except ValueError:
             raise KeyError('_BunserDict has no key %s' % key)
 
     def __len__(self):
@@ -420,7 +420,6 @@
 
 
 def _pdu_info_helper(buf):
-    bser_version = -1
     if buf[0:2] == EMPTY_HEADER[0:2]:
         bser_version = 1
         bser_capabilities = 0
--- a/hgext/githelp.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/githelp.py	Mon Feb 04 20:35:21 2019 +0300
@@ -25,6 +25,7 @@
     encoding,
     error,
     fancyopts,
+    pycompat,
     registrar,
     scmutil,
 )
@@ -83,21 +84,22 @@
             args = fancyopts.fancyopts(list(args), cmdoptions, opts, True)
             break
         except getopt.GetoptError as ex:
-            flag = None
-            if "requires argument" in ex.msg:
+            if r"requires argument" in ex.msg:
                 raise
-            if ('--' + ex.opt) in ex.msg:
-                flag = '--' + ex.opt
-            elif ('-' + ex.opt) in ex.msg:
-                flag = '-' + ex.opt
+            if (r'--' + ex.opt) in ex.msg:
+                flag = '--' + pycompat.bytestr(ex.opt)
+            elif (r'-' + ex.opt) in ex.msg:
+                flag = '-' + pycompat.bytestr(ex.opt)
             else:
-                raise error.Abort(_("unknown option %s") % ex.opt)
+                raise error.Abort(_("unknown option %s") %
+                                  pycompat.bytestr(ex.opt))
             try:
                 args.remove(flag)
             except Exception:
                 msg = _("unknown option '%s' packed with other options")
                 hint = _("please try passing the option as its own flag: -%s")
-                raise error.Abort(msg % ex.opt, hint=hint % ex.opt)
+                raise error.Abort(msg % pycompat.bytestr(ex.opt),
+                                  hint=hint % pycompat.bytestr(ex.opt))
 
             ui.warn(_("ignoring unknown option %s\n") % flag)
 
@@ -119,7 +121,12 @@
             for k, values in sorted(self.opts.iteritems()):
                 for v in values:
                     if v:
-                        cmd += " %s %s" % (k, v)
+                        if isinstance(v, int):
+                            fmt = ' %s %d'
+                        else:
+                            fmt = ' %s %s'
+
+                        cmd += fmt % (k, v)
                     else:
                         cmd += " %s" % (k,)
         if self.args:
--- a/hgext/histedit.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/histedit.py	Mon Feb 04 20:35:21 2019 +0300
@@ -156,6 +156,15 @@
   [histedit]
   linelen = 120      # truncate rule lines at 120 characters
 
+The summary of a change can be customized as well::
+
+  [histedit]
+  summary-template = '{rev} {bookmarks} {desc|firstline}'
+
+The customized summary should be kept short enough that rule lines
+will fit in the configured line length. See above if that requires
+customization.
+
 ``hg histedit`` attempts to automatically choose an appropriate base
 revision to use. To change which base revision is used, define a
 revset in your configuration file::
@@ -248,6 +257,8 @@
 configitem('ui', 'interface.histedit',
     default=None,
 )
+configitem('histedit', 'summary-template',
+           default='{rev} {desc|firstline}')
 
 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
@@ -480,8 +491,11 @@
         <hash> <rev> <summary>
         """
         ctx = self.repo[self.node]
-        summary = _getsummary(ctx)
-        line = '%s %s %d %s' % (self.verb, ctx, ctx.rev(), summary)
+        ui = self.repo.ui
+        summary = cmdutil.rendertemplate(
+            ctx, ui.config('histedit', 'summary-template')) or ''
+        summary = summary.splitlines()[0]
+        line = '%s %s %s' % (self.verb, ctx, summary)
         # trim to 75 columns by default so it's not stupidly wide in my editor
         # (the 5 more are left for verb)
         maxlen = self.repo.ui.configint('histedit', 'linelen')
@@ -575,7 +589,7 @@
 
 def applychanges(ui, repo, ctx, opts):
     """Merge changeset from ctx (only) in the current working directory"""
-    wcpar = repo.dirstate.parents()[0]
+    wcpar = repo.dirstate.p1()
     if ctx.p1().node() == wcpar:
         # edits are "in place" we do not need to make any merge,
         # just applies changes on parent for editing
@@ -608,7 +622,7 @@
         if not c.mutable():
             raise error.ParseError(
                 _("cannot fold into public change %s") % node.short(c.node()))
-    base = firstctx.parents()[0]
+    base = firstctx.p1()
 
     # commit a new version of the old changeset, including the update
     # collect all files which might be affected
@@ -693,7 +707,7 @@
 class pick(histeditaction):
     def run(self):
         rulectx = self.repo[self.node]
-        if rulectx.parents()[0].node() == self.state.parentctxnode:
+        if rulectx.p1().node() == self.state.parentctxnode:
             self.repo.ui.debug('node %s unchanged\n' % node.short(self.node))
             return rulectx, []
 
@@ -724,7 +738,7 @@
         super(fold, self).verify(prev, expected, seen)
         repo = self.repo
         if not prev:
-            c = repo[self.node].parents()[0]
+            c = repo[self.node].p1()
         elif not prev.verb in ('pick', 'base'):
             return
         else:
@@ -795,7 +809,7 @@
         return False
 
     def finishfold(self, ui, repo, ctx, oldctx, newnode, internalchanges):
-        parent = ctx.parents()[0].node()
+        parent = ctx.p1().node()
         hg.updaterepo(repo, parent, overwrite=False)
         ### prepare new commit data
         commitopts = {}
@@ -1459,7 +1473,7 @@
                 'exactly one common root'))
         root = rr[0].node()
 
-        topmost, empty = repo.dirstate.parents()
+        topmost = repo.dirstate.p1()
         revs = between(repo, root, topmost, keep)
         if not revs:
             raise error.Abort(_('%s is not an ancestor of working directory') %
@@ -1873,7 +1887,7 @@
     cmdutil.checkunfinished(repo)
     cmdutil.bailifchanged(repo)
 
-    topmost, empty = repo.dirstate.parents()
+    topmost = repo.dirstate.p1()
     if outg:
         if freeargs:
             remote = freeargs[0]
@@ -1902,7 +1916,7 @@
     actions = parserules(rules, state)
     warnverifyactions(ui, repo, actions, state, ctxs)
 
-    parentctxnode = repo[root].parents()[0].node()
+    parentctxnode = repo[root].p1().node()
 
     state.parentctxnode = parentctxnode
     state.actions = actions
--- a/hgext/journal.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/journal.py	Mon Feb 04 20:35:21 2019 +0300
@@ -348,7 +348,6 @@
 
     def _write(self, vfs, entry):
         with self.jlock(vfs):
-            version = None
             # open file in amend mode to ensure it is created if missing
             with vfs('namejournal', mode='a+b') as f:
                 f.seek(0, os.SEEK_SET)
--- a/hgext/largefiles/lfcommands.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/largefiles/lfcommands.py	Mon Feb 04 20:35:21 2019 +0300
@@ -288,12 +288,9 @@
     files = set(ctx.files())
     if node.nullid not in parents:
         mc = ctx.manifest()
-        mp1 = ctx.parents()[0].manifest()
-        mp2 = ctx.parents()[1].manifest()
-        files |= (set(mp1) | set(mp2)) - set(mc)
-        for f in mc:
-            if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
-                files.add(f)
+        for pctx in ctx.parents():
+            for fn in pctx.manifest().diff(mc):
+                files.add(fn)
     return files
 
 # Convert src parents to dst parents
--- a/hgext/largefiles/lfutil.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/largefiles/lfutil.py	Mon Feb 04 20:35:21 2019 +0300
@@ -518,8 +518,8 @@
             files = set(ctx.files())
             if len(parents) == 2:
                 mc = ctx.manifest()
-                mp1 = ctx.parents()[0].manifest()
-                mp2 = ctx.parents()[1].manifest()
+                mp1 = ctx.p1().manifest()
+                mp2 = ctx.p2().manifest()
                 for f in mp1:
                     if f not in mc:
                         files.add(f)
--- a/hgext/largefiles/overrides.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/largefiles/overrides.py	Mon Feb 04 20:35:21 2019 +0300
@@ -210,8 +210,6 @@
             ui.warn(msg % m.rel(f))
         return int(len(files) > 0)
 
-    result = 0
-
     if after:
         remove = deleted
         result = warn(modified + added + clean,
--- a/hgext/largefiles/storefactory.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/largefiles/storefactory.py	Mon Feb 04 20:35:21 2019 +0300
@@ -43,7 +43,6 @@
             path, _branches = hg.parseurl(path)
             remote = hg.peer(repo or ui, {}, path)
         elif path == 'default-push' or path == 'default':
-            path = ''
             remote = repo
         else:
             path, _branches = hg.parseurl(path)
--- a/hgext/lfs/blobstore.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/lfs/blobstore.py	Mon Feb 04 20:35:21 2019 +0300
@@ -42,7 +42,7 @@
     def join(self, path):
         """split the path at first two characters, like: XX/XXXXX..."""
         if not _lfsre.match(path):
-            raise error.ProgrammingError('unexpected lfs path: %s' % path)
+            raise error.ProgrammingError(b'unexpected lfs path: %s' % path)
         return super(lfsvfs, self).join(path[0:2], path[2:])
 
     def walk(self, path=None, onerror=None):
@@ -56,7 +56,8 @@
         prefixlen = len(pathutil.normasprefix(root))
         oids = []
 
-        for dirpath, dirs, files in os.walk(self.reljoin(self.base, path or ''),
+        for dirpath, dirs, files in os.walk(self.reljoin(self.base, path
+                                                         or b''),
                                             onerror=onerror):
             dirpath = dirpath[prefixlen:]
 
@@ -79,10 +80,11 @@
         # self.vfs.  Raise the same error as a normal vfs when asked to read a
         # file that doesn't exist.  The only difference is the full file path
         # isn't available in the error.
-        raise IOError(errno.ENOENT, '%s: No such file or directory' % oid)
+        raise IOError(errno.ENOENT,
+                      pycompat.sysstr(b'%s: No such file or directory' % oid))
 
     def walk(self, path=None, onerror=None):
-        return ('', [], [])
+        return (b'', [], [])
 
     def write(self, oid, data):
         pass
@@ -123,13 +125,13 @@
     """
 
     def __init__(self, repo):
-        fullpath = repo.svfs.join('lfs/objects')
+        fullpath = repo.svfs.join(b'lfs/objects')
         self.vfs = lfsvfs(fullpath)
 
-        if repo.ui.configbool('experimental', 'lfs.disableusercache'):
+        if repo.ui.configbool(b'experimental', b'lfs.disableusercache'):
             self.cachevfs = nullvfs()
         else:
-            usercache = lfutil._usercachedir(repo.ui, 'lfs')
+            usercache = lfutil._usercachedir(repo.ui, b'lfs')
             self.cachevfs = lfsvfs(usercache)
         self.ui = repo.ui
 
@@ -143,23 +145,23 @@
         # the usercache is the only place it _could_ be.  If not present, the
         # missing file msg here will indicate the local repo, not the usercache.
         if self.cachevfs.exists(oid):
-            return self.cachevfs(oid, 'rb')
+            return self.cachevfs(oid, b'rb')
 
-        return self.vfs(oid, 'rb')
+        return self.vfs(oid, b'rb')
 
     def download(self, oid, src):
         """Read the blob from the remote source in chunks, verify the content,
         and write to this local blobstore."""
         sha256 = hashlib.sha256()
 
-        with self.vfs(oid, 'wb', atomictemp=True) as fp:
+        with self.vfs(oid, b'wb', atomictemp=True) as fp:
             for chunk in util.filechunkiter(src, size=1048576):
                 fp.write(chunk)
                 sha256.update(chunk)
 
             realoid = node.hex(sha256.digest())
             if realoid != oid:
-                raise LfsCorruptionError(_('corrupt remote lfs object: %s')
+                raise LfsCorruptionError(_(b'corrupt remote lfs object: %s')
                                          % oid)
 
         self._linktousercache(oid)
@@ -170,7 +172,7 @@
         This should only be called from the filelog during a commit or similar.
         As such, there is no need to verify the data.  Imports from a remote
         store must use ``download()`` instead."""
-        with self.vfs(oid, 'wb', atomictemp=True) as fp:
+        with self.vfs(oid, b'wb', atomictemp=True) as fp:
             fp.write(data)
 
         self._linktousercache(oid)
@@ -186,7 +188,7 @@
         """
         if (not isinstance(self.cachevfs, nullvfs)
             and not self.vfs.exists(oid)):
-            self.ui.note(_('lfs: found %s in the usercache\n') % oid)
+            self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
             lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
 
     def _linktousercache(self, oid):
@@ -194,7 +196,7 @@
         # the local store on success, but truncate, write and link on failure?
         if (not self.cachevfs.exists(oid)
             and not isinstance(self.cachevfs, nullvfs)):
-            self.ui.note(_('lfs: adding %s to the usercache\n') % oid)
+            self.ui.note(_(b'lfs: adding %s to the usercache\n') % oid)
             lfutil.link(self.vfs.join(oid), self.cachevfs.join(oid))
 
     def read(self, oid, verify=True):
@@ -208,10 +210,10 @@
             # give more useful info about the corruption- simply don't add the
             # hardlink.
             if verify or node.hex(hashlib.sha256(blob).digest()) == oid:
-                self.ui.note(_('lfs: found %s in the usercache\n') % oid)
+                self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
                 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
         else:
-            self.ui.note(_('lfs: found %s in the local lfs store\n') % oid)
+            self.ui.note(_(b'lfs: found %s in the local lfs store\n') % oid)
             blob = self._read(self.vfs, oid, verify)
         return blob
 
@@ -268,20 +270,20 @@
         ui = repo.ui
         self.ui = ui
         baseurl, authinfo = url.authinfo()
-        self.baseurl = baseurl.rstrip('/')
-        useragent = repo.ui.config('experimental', 'lfs.user-agent')
+        self.baseurl = baseurl.rstrip(b'/')
+        useragent = repo.ui.config(b'experimental', b'lfs.user-agent')
         if not useragent:
-            useragent = 'git-lfs/2.3.4 (Mercurial %s)' % util.version()
+            useragent = b'git-lfs/2.3.4 (Mercurial %s)' % util.version()
         self.urlopener = urlmod.opener(ui, authinfo, useragent)
-        self.retry = ui.configint('lfs', 'retry')
+        self.retry = ui.configint(b'lfs', b'retry')
 
     def writebatch(self, pointers, fromstore):
         """Batch upload from local to remote blobstore."""
-        self._batch(_deduplicate(pointers), fromstore, 'upload')
+        self._batch(_deduplicate(pointers), fromstore, b'upload')
 
     def readbatch(self, pointers, tostore):
         """Batch download from remote to local blostore."""
-        self._batch(_deduplicate(pointers), tostore, 'download')
+        self._batch(_deduplicate(pointers), tostore, b'download')
 
     def _batchrequest(self, pointers, action):
         """Get metadata about objects pointed by pointers for given action
@@ -289,52 +291,63 @@
         Return decoded JSON object like {'objects': [{'oid': '', 'size': 1}]}
         See https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md
         """
-        objects = [{'oid': p.oid(), 'size': p.size()} for p in pointers]
-        requestdata = json.dumps({
-            'objects': objects,
-            'operation': action,
-        })
-        url = '%s/objects/batch' % self.baseurl
-        batchreq = util.urlreq.request(url, data=requestdata)
-        batchreq.add_header('Accept', 'application/vnd.git-lfs+json')
-        batchreq.add_header('Content-Type', 'application/vnd.git-lfs+json')
+        objects = [{r'oid': pycompat.strurl(p.oid()),
+                    r'size': p.size()} for p in pointers]
+        requestdata = pycompat.bytesurl(json.dumps({
+            r'objects': objects,
+            r'operation': pycompat.strurl(action),
+        }))
+        url = b'%s/objects/batch' % self.baseurl
+        batchreq = util.urlreq.request(pycompat.strurl(url), data=requestdata)
+        batchreq.add_header(r'Accept', r'application/vnd.git-lfs+json')
+        batchreq.add_header(r'Content-Type', r'application/vnd.git-lfs+json')
         try:
             with contextlib.closing(self.urlopener.open(batchreq)) as rsp:
                 rawjson = rsp.read()
         except util.urlerr.httperror as ex:
             hints = {
-                400: _('check that lfs serving is enabled on %s and "%s" is '
-                       'supported') % (self.baseurl, action),
-                404: _('the "lfs.url" config may be used to override %s')
+                400: _(b'check that lfs serving is enabled on %s and "%s" is '
+                       b'supported') % (self.baseurl, action),
+                404: _(b'the "lfs.url" config may be used to override %s')
                        % self.baseurl,
             }
-            hint = hints.get(ex.code, _('api=%s, action=%s') % (url, action))
-            raise LfsRemoteError(_('LFS HTTP error: %s') % ex, hint=hint)
+            hint = hints.get(ex.code, _(b'api=%s, action=%s') % (url, action))
+            raise LfsRemoteError(
+                _(b'LFS HTTP error: %s') % stringutil.forcebytestr(ex),
+                hint=hint)
         except util.urlerr.urlerror as ex:
-            hint = (_('the "lfs.url" config may be used to override %s')
+            hint = (_(b'the "lfs.url" config may be used to override %s')
                     % self.baseurl)
-            raise LfsRemoteError(_('LFS error: %s') % _urlerrorreason(ex),
+            raise LfsRemoteError(_(b'LFS error: %s') % _urlerrorreason(ex),
                                  hint=hint)
         try:
             response = json.loads(rawjson)
         except ValueError:
-            raise LfsRemoteError(_('LFS server returns invalid JSON: %s')
-                                 % rawjson)
+            raise LfsRemoteError(_(b'LFS server returns invalid JSON: %s')
+                                 % rawjson.encode("utf-8"))
 
         if self.ui.debugflag:
-            self.ui.debug('Status: %d\n' % rsp.status)
+            self.ui.debug(b'Status: %d\n' % rsp.status)
             # lfs-test-server and hg serve return headers in different order
-            self.ui.debug('%s\n'
-                          % '\n'.join(sorted(str(rsp.info()).splitlines())))
+            headers = pycompat.bytestr(rsp.info()).strip()
+            self.ui.debug(b'%s\n'
+                          % b'\n'.join(sorted(headers.splitlines())))
 
-            if 'objects' in response:
-                response['objects'] = sorted(response['objects'],
-                                             key=lambda p: p['oid'])
-            self.ui.debug('%s\n'
-                          % json.dumps(response, indent=2,
-                                       separators=('', ': '), sort_keys=True))
+            if r'objects' in response:
+                response[r'objects'] = sorted(response[r'objects'],
+                                              key=lambda p: p[r'oid'])
+            self.ui.debug(b'%s\n'
+                          % pycompat.bytesurl(
+                              json.dumps(response, indent=2,
+                                         separators=(r'', r': '),
+                                         sort_keys=True)))
 
-        return response
+        def encodestr(x):
+            if isinstance(x, pycompat.unicode):
+                return x.encode(u'utf-8')
+            return x
+
+        return pycompat.rapply(encodestr, response)
 
     def _checkforservererror(self, pointers, responses, action):
         """Scans errors from objects
@@ -345,34 +358,34 @@
             # server implementation (ex. lfs-test-server)  does not set "error"
             # but just removes "download" from "actions". Treat that case
             # as the same as 404 error.
-            if 'error' not in response:
-                if (action == 'download'
-                    and action not in response.get('actions', [])):
+            if b'error' not in response:
+                if (action == b'download'
+                    and action not in response.get(b'actions', [])):
                     code = 404
                 else:
                     continue
             else:
                 # An error dict without a code doesn't make much sense, so
                 # treat as a server error.
-                code = response.get('error').get('code', 500)
+                code = response.get(b'error').get(b'code', 500)
 
             ptrmap = {p.oid(): p for p in pointers}
-            p = ptrmap.get(response['oid'], None)
+            p = ptrmap.get(response[b'oid'], None)
             if p:
-                filename = getattr(p, 'filename', 'unknown')
+                filename = getattr(p, 'filename', b'unknown')
                 errors = {
-                    404: 'The object does not exist',
-                    410: 'The object was removed by the owner',
-                    422: 'Validation error',
-                    500: 'Internal server error',
+                    404: b'The object does not exist',
+                    410: b'The object was removed by the owner',
+                    422: b'Validation error',
+                    500: b'Internal server error',
                 }
-                msg = errors.get(code, 'status code %d' % code)
-                raise LfsRemoteError(_('LFS server error for "%s": %s')
+                msg = errors.get(code, b'status code %d' % code)
+                raise LfsRemoteError(_(b'LFS server error for "%s": %s')
                                      % (filename, msg))
             else:
                 raise LfsRemoteError(
-                    _('LFS server error. Unsolicited response for oid %s')
-                    % response['oid'])
+                    _(b'LFS server error. Unsolicited response for oid %s')
+                    % response[b'oid'])
 
     def _extractobjects(self, response, pointers, action):
         """extract objects from response of the batch API
@@ -382,12 +395,13 @@
         raise if any object has an error
         """
         # Scan errors from objects - fail early
-        objects = response.get('objects', [])
+        objects = response.get(b'objects', [])
         self._checkforservererror(pointers, objects, action)
 
         # Filter objects with given action. Practically, this skips uploading
         # objects which exist in the server.
-        filteredobjects = [o for o in objects if action in o.get('actions', [])]
+        filteredobjects = [o for o in objects
+                           if action in o.get(b'actions', [])]
 
         return filteredobjects
 
@@ -401,36 +415,37 @@
         See https://github.com/git-lfs/git-lfs/blob/master/docs/api/\
         basic-transfers.md
         """
-        oid = pycompat.bytestr(obj['oid'])
+        oid = obj[b'oid']
+        href = obj[b'actions'][action].get(b'href')
+        headers = obj[b'actions'][action].get(b'header', {}).items()
 
-        href = pycompat.bytestr(obj['actions'][action].get('href'))
-        headers = obj['actions'][action].get('header', {}).items()
-
-        request = util.urlreq.request(href)
-        if action == 'upload':
+        request = util.urlreq.request(pycompat.strurl(href))
+        if action == b'upload':
             # If uploading blobs, read data from local blobstore.
             if not localstore.verify(oid):
-                raise error.Abort(_('detected corrupt lfs object: %s') % oid,
-                                  hint=_('run hg verify'))
+                raise error.Abort(_(b'detected corrupt lfs object: %s') % oid,
+                                  hint=_(b'run hg verify'))
             request.data = filewithprogress(localstore.open(oid), None)
-            request.get_method = lambda: 'PUT'
-            request.add_header('Content-Type', 'application/octet-stream')
+            request.get_method = lambda: r'PUT'
+            request.add_header(r'Content-Type', r'application/octet-stream')
+            request.add_header(r'Content-Length', len(request.data))
 
         for k, v in headers:
-            request.add_header(k, v)
+            request.add_header(pycompat.strurl(k), pycompat.strurl(v))
 
         response = b''
         try:
             with contextlib.closing(self.urlopener.open(request)) as req:
                 ui = self.ui  # Shorten debug lines
                 if self.ui.debugflag:
-                    ui.debug('Status: %d\n' % req.status)
+                    ui.debug(b'Status: %d\n' % req.status)
                     # lfs-test-server and hg serve return headers in different
                     # order
-                    ui.debug('%s\n'
-                             % '\n'.join(sorted(str(req.info()).splitlines())))
+                    headers = pycompat.bytestr(req.info()).strip()
+                    ui.debug(b'%s\n'
+                             % b'\n'.join(sorted(headers.splitlines())))
 
-                if action == 'download':
+                if action == b'download':
                     # If downloading blobs, store downloaded data to local
                     # blobstore
                     localstore.download(oid, req)
@@ -441,65 +456,65 @@
                             break
                         response += data
                     if response:
-                        ui.debug('lfs %s response: %s' % (action, response))
+                        ui.debug(b'lfs %s response: %s' % (action, response))
         except util.urlerr.httperror as ex:
             if self.ui.debugflag:
-                self.ui.debug('%s: %s\n' % (oid, ex.read()))
-            raise LfsRemoteError(_('LFS HTTP error: %s (oid=%s, action=%s)')
-                                 % (ex, oid, action))
+                self.ui.debug(b'%s: %s\n' % (oid, ex.read())) # XXX: also bytes?
+            raise LfsRemoteError(_(b'LFS HTTP error: %s (oid=%s, action=%s)')
+                                 % (stringutil.forcebytestr(ex), oid, action))
         except util.urlerr.urlerror as ex:
-            hint = (_('attempted connection to %s')
-                    % util.urllibcompat.getfullurl(request))
-            raise LfsRemoteError(_('LFS error: %s') % _urlerrorreason(ex),
+            hint = (_(b'attempted connection to %s')
+                    % pycompat.bytesurl(util.urllibcompat.getfullurl(request)))
+            raise LfsRemoteError(_(b'LFS error: %s') % _urlerrorreason(ex),
                                  hint=hint)
 
     def _batch(self, pointers, localstore, action):
-        if action not in ['upload', 'download']:
-            raise error.ProgrammingError('invalid Git-LFS action: %s' % action)
+        if action not in [b'upload', b'download']:
+            raise error.ProgrammingError(b'invalid Git-LFS action: %s' % action)
 
         response = self._batchrequest(pointers, action)
         objects = self._extractobjects(response, pointers, action)
-        total = sum(x.get('size', 0) for x in objects)
+        total = sum(x.get(b'size', 0) for x in objects)
         sizes = {}
         for obj in objects:
-            sizes[obj.get('oid')] = obj.get('size', 0)
-        topic = {'upload': _('lfs uploading'),
-                 'download': _('lfs downloading')}[action]
+            sizes[obj.get(b'oid')] = obj.get(b'size', 0)
+        topic = {b'upload': _(b'lfs uploading'),
+                 b'download': _(b'lfs downloading')}[action]
         if len(objects) > 1:
-            self.ui.note(_('lfs: need to transfer %d objects (%s)\n')
+            self.ui.note(_(b'lfs: need to transfer %d objects (%s)\n')
                          % (len(objects), util.bytecount(total)))
 
         def transfer(chunk):
             for obj in chunk:
-                objsize = obj.get('size', 0)
+                objsize = obj.get(b'size', 0)
                 if self.ui.verbose:
-                    if action == 'download':
-                        msg = _('lfs: downloading %s (%s)\n')
-                    elif action == 'upload':
-                        msg = _('lfs: uploading %s (%s)\n')
-                    self.ui.note(msg % (obj.get('oid'),
+                    if action == b'download':
+                        msg = _(b'lfs: downloading %s (%s)\n')
+                    elif action == b'upload':
+                        msg = _(b'lfs: uploading %s (%s)\n')
+                    self.ui.note(msg % (obj.get(b'oid'),
                                  util.bytecount(objsize)))
                 retry = self.retry
                 while True:
                     try:
                         self._basictransfer(obj, action, localstore)
-                        yield 1, obj.get('oid')
+                        yield 1, obj.get(b'oid')
                         break
                     except socket.error as ex:
                         if retry > 0:
                             self.ui.note(
-                                _('lfs: failed: %r (remaining retry %d)\n')
-                                % (ex, retry))
+                                _(b'lfs: failed: %r (remaining retry %d)\n')
+                                % (stringutil.forcebytestr(ex), retry))
                             retry -= 1
                             continue
                         raise
 
         # Until https multiplexing gets sorted out
-        if self.ui.configbool('experimental', 'lfs.worker-enable'):
+        if self.ui.configbool(b'experimental', b'lfs.worker-enable'):
             oids = worker.worker(self.ui, 0.1, transfer, (),
-                                 sorted(objects, key=lambda o: o.get('oid')))
+                                 sorted(objects, key=lambda o: o.get(b'oid')))
         else:
-            oids = transfer(sorted(objects, key=lambda o: o.get('oid')))
+            oids = transfer(sorted(objects, key=lambda o: o.get(b'oid')))
 
         with self.ui.makeprogress(topic, total=total) as progress:
             progress.update(0)
@@ -509,14 +524,14 @@
                 processed += sizes[oid]
                 blobs += 1
                 progress.update(processed)
-                self.ui.note(_('lfs: processed: %s\n') % oid)
+                self.ui.note(_(b'lfs: processed: %s\n') % oid)
 
         if blobs > 0:
-            if action == 'upload':
-                self.ui.status(_('lfs: uploaded %d files (%s)\n')
+            if action == b'upload':
+                self.ui.status(_(b'lfs: uploaded %d files (%s)\n')
                                % (blobs, util.bytecount(processed)))
-            elif action == 'download':
-                self.ui.status(_('lfs: downloaded %d files (%s)\n')
+            elif action == b'download':
+                self.ui.status(_(b'lfs: downloaded %d files (%s)\n')
                                % (blobs, util.bytecount(processed)))
 
     def __del__(self):
@@ -531,18 +546,18 @@
     """Dummy store storing blobs to temp directory."""
 
     def __init__(self, repo, url):
-        fullpath = repo.vfs.join('lfs', url.path)
+        fullpath = repo.vfs.join(b'lfs', url.path)
         self.vfs = lfsvfs(fullpath)
 
     def writebatch(self, pointers, fromstore):
         for p in _deduplicate(pointers):
             content = fromstore.read(p.oid(), verify=True)
-            with self.vfs(p.oid(), 'wb', atomictemp=True) as fp:
+            with self.vfs(p.oid(), b'wb', atomictemp=True) as fp:
                 fp.write(content)
 
     def readbatch(self, pointers, tostore):
         for p in _deduplicate(pointers):
-            with self.vfs(p.oid(), 'rb') as fp:
+            with self.vfs(p.oid(), b'rb') as fp:
                 tostore.download(p.oid(), fp)
 
 class _nullremote(object):
@@ -570,13 +585,13 @@
         self._prompt()
 
     def _prompt(self):
-        raise error.Abort(_('lfs.url needs to be configured'))
+        raise error.Abort(_(b'lfs.url needs to be configured'))
 
 _storemap = {
-    'https': _gitlfsremote,
-    'http': _gitlfsremote,
-    'file': _dummyremote,
-    'null': _nullremote,
+    b'https': _gitlfsremote,
+    b'http': _gitlfsremote,
+    b'file': _dummyremote,
+    b'null': _nullremote,
     None: _promptremote,
 }
 
@@ -590,8 +605,8 @@
 def _verify(oid, content):
     realoid = node.hex(hashlib.sha256(content).digest())
     if realoid != oid:
-        raise LfsCorruptionError(_('detected corrupt lfs object: %s') % oid,
-                                 hint=_('run hg verify'))
+        raise LfsCorruptionError(_(b'detected corrupt lfs object: %s') % oid,
+                                 hint=_(b'run hg verify'))
 
 def remote(repo, remote=None):
     """remotestore factory. return a store in _storemap depending on config
@@ -603,7 +618,7 @@
 
     https://github.com/git-lfs/git-lfs/blob/master/docs/api/server-discovery.md
     """
-    lfsurl = repo.ui.config('lfs', 'url')
+    lfsurl = repo.ui.config(b'lfs', b'url')
     url = util.url(lfsurl or '')
     if lfsurl is None:
         if remote:
@@ -616,7 +631,7 @@
         else:
             # TODO: investigate 'paths.remote:lfsurl' style path customization,
             # and fall back to inferring from 'paths.remote' if unspecified.
-            path = repo.ui.config('paths', 'default') or ''
+            path = repo.ui.config(b'paths', b'default') or b''
 
         defaulturl = util.url(path)
 
@@ -628,11 +643,11 @@
             defaulturl.path = (defaulturl.path or b'') + b'.git/info/lfs'
 
             url = util.url(bytes(defaulturl))
-            repo.ui.note(_('lfs: assuming remote store: %s\n') % url)
+            repo.ui.note(_(b'lfs: assuming remote store: %s\n') % url)
 
     scheme = url.scheme
     if scheme not in _storemap:
-        raise error.Abort(_('lfs: unknown url scheme: %s') % scheme)
+        raise error.Abort(_(b'lfs: unknown url scheme: %s') % scheme)
     return _storemap[scheme](repo, url)
 
 class LfsRemoteError(error.StorageError):
--- a/hgext/lfs/wireprotolfsserver.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/lfs/wireprotolfsserver.py	Mon Feb 04 20:35:21 2019 +0300
@@ -43,7 +43,7 @@
     if orig(rctx, req, res, checkperm):
         return True
 
-    if not rctx.repo.ui.configbool('experimental', 'lfs.serve'):
+    if not rctx.repo.ui.configbool(b'experimental', b'lfs.serve'):
         return False
 
     if not util.safehasattr(rctx.repo.svfs, 'lfslocalblobstore'):
@@ -54,7 +54,7 @@
 
     try:
         if req.dispatchpath == b'.git/info/lfs/objects/batch':
-            checkperm(rctx, req, 'pull')
+            checkperm(rctx, req, b'pull')
             return _processbatchrequest(rctx.repo, req, res)
         # TODO: reserve and use a path in the proposed http wireprotocol /api/
         #       namespace?
@@ -81,7 +81,7 @@
 def _logexception(req):
     """Write information about the current exception to wsgi.errors."""
     tb = pycompat.sysbytes(traceback.format_exc())
-    errorlog = req.rawenv[r'wsgi.errors']
+    errorlog = req.rawenv[b'wsgi.errors']
 
     uri = b''
     if req.apppath:
@@ -133,25 +133,27 @@
     lfsreq = json.loads(req.bodyfh.read())
 
     # If no transfer handlers are explicitly requested, 'basic' is assumed.
-    if 'basic' not in lfsreq.get('transfers', ['basic']):
+    if r'basic' not in lfsreq.get(r'transfers', [r'basic']):
         _sethttperror(res, HTTP_BAD_REQUEST,
                       b'Only the basic LFS transfer handler is supported')
         return True
 
-    operation = lfsreq.get('operation')
-    if operation not in ('upload', 'download'):
+    operation = lfsreq.get(r'operation')
+    operation = pycompat.bytestr(operation)
+
+    if operation not in (b'upload', b'download'):
         _sethttperror(res, HTTP_BAD_REQUEST,
                       b'Unsupported LFS transfer operation: %s' % operation)
         return True
 
     localstore = repo.svfs.lfslocalblobstore
 
-    objects = [p for p in _batchresponseobjects(req, lfsreq.get('objects', []),
+    objects = [p for p in _batchresponseobjects(req, lfsreq.get(r'objects', []),
                                                 operation, localstore)]
 
     rsp = {
-        'transfer': 'basic',
-        'objects': objects,
+        r'transfer': r'basic',
+        r'objects': objects,
     }
 
     res.status = hgwebcommon.statusmessage(HTTP_OK)
@@ -190,11 +192,12 @@
 
     for obj in objects:
         # Convert unicode to ASCII to create a filesystem path
-        oid = obj.get('oid').encode('ascii')
+        soid = obj.get(r'oid')
+        oid = soid.encode(r'ascii')
         rsp = {
-            'oid': oid,
-            'size': obj.get('size'),  # XXX: should this check the local size?
-            #'authenticated': True,
+            r'oid': soid,
+            r'size': obj.get(r'size'),  # XXX: should this check the local size?
+            #r'authenticated': True,
         }
 
         exists = True
@@ -209,7 +212,7 @@
         # verified as the file is streamed to the caller.
         try:
             verifies = store.verify(oid)
-            if verifies and action == 'upload':
+            if verifies and action == b'upload':
                 # The client will skip this upload, but make sure it remains
                 # available locally.
                 store.linkfromusercache(oid)
@@ -217,9 +220,9 @@
             if inst.errno != errno.ENOENT:
                 _logexception(req)
 
-                rsp['error'] = {
-                    'code': 500,
-                    'message': inst.strerror or 'Internal Server Server'
+                rsp[r'error'] = {
+                    r'code': 500,
+                    r'message': inst.strerror or r'Internal Server Server'
                 }
                 yield rsp
                 continue
@@ -228,19 +231,19 @@
 
         # Items are always listed for downloads.  They are dropped for uploads
         # IFF they already exist locally.
-        if action == 'download':
+        if action == b'download':
             if not exists:
-                rsp['error'] = {
-                    'code': 404,
-                    'message': "The object does not exist"
+                rsp[r'error'] = {
+                    r'code': 404,
+                    r'message': r"The object does not exist"
                 }
                 yield rsp
                 continue
 
             elif not verifies:
-                rsp['error'] = {
-                    'code': 422,   # XXX: is this the right code?
-                    'message': "The object is corrupt"
+                rsp[r'error'] = {
+                    r'code': 422,   # XXX: is this the right code?
+                    r'message': r"The object is corrupt"
                 }
                 yield rsp
                 continue
@@ -256,22 +259,22 @@
             # a gratuitous deviation from lfs-test-server in the test
             # output.
             hdr = {
-                'Accept': 'application/vnd.git-lfs'
+                r'Accept': r'application/vnd.git-lfs'
             }
 
-            auth = req.headers.get('Authorization', '')
-            if auth.startswith('Basic '):
-                hdr['Authorization'] = auth
+            auth = req.headers.get(b'Authorization', b'')
+            if auth.startswith(b'Basic '):
+                hdr[r'Authorization'] = pycompat.strurl(auth)
 
             return hdr
 
-        rsp['actions'] = {
-            '%s' % action: {
-                'href': '%s%s/.hg/lfs/objects/%s'
-                    % (req.baseurl, req.apppath, oid),
+        rsp[r'actions'] = {
+            r'%s' % pycompat.strurl(action): {
+                r'href': pycompat.strurl(b'%s%s/.hg/lfs/objects/%s'
+                    % (req.baseurl, req.apppath, oid)),
                 # datetime.isoformat() doesn't include the 'Z' suffix
-                "expires_at": expiresat.strftime('%Y-%m-%dT%H:%M:%SZ'),
-                'header': _buildheader(),
+                r"expires_at": expiresat.strftime(r'%Y-%m-%dT%H:%M:%SZ'),
+                r'header': _buildheader(),
             }
         }
 
@@ -297,7 +300,7 @@
         return True
 
     if method == b'PUT':
-        checkperm('upload')
+        checkperm(b'upload')
 
         # TODO: verify Content-Type?
 
@@ -324,7 +327,7 @@
 
         return True
     elif method == b'GET':
-        checkperm('pull')
+        checkperm(b'pull')
 
         res.status = hgwebcommon.statusmessage(HTTP_OK)
         res.headers[b'Content-Type'] = b'application/octet-stream'
--- a/hgext/mq.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/mq.py	Mon Feb 04 20:35:21 2019 +0300
@@ -970,7 +970,7 @@
                         repo.dirstate.remove(f)
                     for f in merged:
                         repo.dirstate.merge(f)
-                    p1, p2 = repo.dirstate.parents()
+                    p1 = repo.dirstate.p1()
                     repo.setparents(p1, merge)
 
             if all_files and '.hgsubstate' in all_files:
@@ -3521,7 +3521,7 @@
             if self.mq.applied and self.mq.checkapplied and not force:
                 parents = self.dirstate.parents()
                 patches = [s.node for s in self.mq.applied]
-                if parents[0] in patches or parents[1] in patches:
+                if any(p in patches for p in parents):
                     raise error.Abort(errmsg)
 
         def commit(self, text="", user=None, date=None, match=None,
--- a/hgext/notify.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/notify.py	Mon Feb 04 20:35:21 2019 +0300
@@ -367,8 +367,12 @@
             raise error.Abort(inst)
 
         # store sender and subject
-        sender = encoding.strtolocal(msg[r'From'])
-        subject = encoding.strtolocal(msg[r'Subject'])
+        sender = msg[r'From']
+        subject = msg[r'Subject']
+        if sender is not None:
+            sender = encoding.strtolocal(sender)
+        if subject is not None:
+            subject = encoding.strtolocal(subject)
         del msg[r'From'], msg[r'Subject']
 
         if not msg.is_multipart():
--- a/hgext/phabricator.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/phabricator.py	Mon Feb 04 20:35:21 2019 +0300
@@ -277,7 +277,6 @@
     The ``old node``, if not None, is guaranteed to be the last diff of
     corresponding Differential Revision, and exist in the repo.
     """
-    url, token = readurltoken(repo)
     unfi = repo.unfiltered()
     nodemap = unfi.changelog.nodemap
 
--- a/hgext/rebase.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/rebase.py	Mon Feb 04 20:35:21 2019 +0300
@@ -1804,7 +1804,6 @@
 
 def pullrebase(orig, ui, repo, *args, **opts):
     'Call rebase after pull if the latter has been invoked with --rebase'
-    ret = None
     if opts.get(r'rebase'):
         if ui.configbool('commands', 'rebase.requiredest'):
             msg = _('rebase destination required by configuration')
--- a/hgext/remotefilelog/basepack.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/remotefilelog/basepack.py	Mon Feb 04 20:35:21 2019 +0300
@@ -270,9 +270,9 @@
                 # only affect this instance
                 self.VERSION = version
             elif self.VERSION != version:
-                raise RuntimeError('inconsistent version: %s' % version)
+                raise RuntimeError('inconsistent version: %d' % version)
         else:
-            raise RuntimeError('unsupported version: %s' % version)
+            raise RuntimeError('unsupported version: %d' % version)
 
 class basepack(versionmixin):
     # The maximum amount we should read via mmap before remmaping so the old
@@ -457,8 +457,6 @@
             pass
 
     def writeindex(self):
-        rawindex = ''
-
         largefanout = len(self.entries) > SMALLFANOUTCUTOFF
         if largefanout:
             params = indexparams(LARGEFANOUTPREFIX, self.VERSION)
--- a/hgext/remotefilelog/basestore.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/remotefilelog/basestore.py	Mon Feb 04 20:35:21 2019 +0300
@@ -410,16 +410,18 @@
         def wrapped(self, *args, **kwargs):
             retrylog = self.retrylog or noop
             funcname = fn.__name__
-            for i in pycompat.xrange(self.numattempts):
+            i = 0
+            while i < self.numattempts:
                 if i > 0:
                     retrylog('re-attempting (n=%d) %s\n' % (i, funcname))
                     self.markforrefresh()
+                i += 1
                 try:
                     return fn(self, *args, **kwargs)
                 except KeyError:
-                    pass
-            # retries exhausted
-            retrylog('retries exhausted in %s, raising KeyError\n' %
-                     pycompat.sysbytes(funcname))
-            raise
+                    if i == self.numattempts:
+                        # retries exhausted
+                        retrylog('retries exhausted in %s, raising KeyError\n' %
+                                 pycompat.sysbytes(funcname))
+                        raise
         return wrapped
--- a/hgext/remotefilelog/datapack.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/remotefilelog/datapack.py	Mon Feb 04 20:35:21 2019 +0300
@@ -242,7 +242,7 @@
             entry = index[end:end + entrylen]
         else:
             while start < end - entrylen:
-                mid = start  + (end - start) / 2
+                mid = start + (end - start) // 2
                 mid = mid - ((mid - params.indexstart) % entrylen)
                 midnode = index[mid:mid + NODELENGTH]
                 if midnode == node:
@@ -250,10 +250,8 @@
                     break
                 if node > midnode:
                     start = mid
-                    startnode = midnode
                 elif node < midnode:
                     end = mid
-                    endnode = midnode
             else:
                 return None
 
--- a/hgext/remotefilelog/debugcommands.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/remotefilelog/debugcommands.py	Mon Feb 04 20:35:21 2019 +0300
@@ -175,7 +175,6 @@
     return zlib.decompress(raw)
 
 def parsefileblob(path, decompress):
-    raw = None
     f = open(path, "rb")
     try:
         raw = f.read()
--- a/hgext/remotefilelog/historypack.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/remotefilelog/historypack.py	Mon Feb 04 20:35:21 2019 +0300
@@ -259,10 +259,8 @@
                     return self._index[mid:mid + entrylen]
                 if node > midnode:
                     start = mid
-                    startnode = midnode
                 elif node < midnode:
                     end = mid
-                    endnode = midnode
         return None
 
     def markledger(self, ledger, options=None):
@@ -514,7 +512,6 @@
 
             fileindexentries.append(rawentry)
 
-        nodecountraw = ''
         nodecountraw = struct.pack('!Q', nodecount)
         return (''.join(fileindexentries) + nodecountraw +
                 ''.join(nodeindexentries))
--- a/hgext/remotefilelog/remotefilectx.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/remotefilelog/remotefilectx.py	Mon Feb 04 20:35:21 2019 +0300
@@ -452,8 +452,8 @@
 class remoteworkingfilectx(context.workingfilectx, remotefilectx):
     def __init__(self, repo, path, filelog=None, workingctx=None):
         self._ancestormap = None
-        return super(remoteworkingfilectx, self).__init__(repo, path,
-            filelog, workingctx)
+        super(remoteworkingfilectx, self).__init__(repo, path, filelog,
+                                                   workingctx)
 
     def parents(self):
         return remotefilectx.parents(self)
--- a/hgext/remotefilelog/remotefilelog.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/remotefilelog/remotefilelog.py	Mon Feb 04 20:35:21 2019 +0300
@@ -61,8 +61,6 @@
         return t[s + 2:]
 
     def add(self, text, meta, transaction, linknode, p1=None, p2=None):
-        hashtext = text
-
         # hash with the metadata, like in vanilla filelogs
         hashtext = shallowutil.createrevlogtext(text, meta.get('copy'),
                                                 meta.get('copyrev'))
--- a/hgext/remotefilelog/repack.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/remotefilelog/repack.py	Mon Feb 04 20:35:21 2019 +0300
@@ -601,7 +601,6 @@
                 # TODO: Optimize the deltachain fetching. Since we're
                 # iterating over the different version of the file, we may
                 # be fetching the same deltachain over and over again.
-                meta = None
                 if deltabase != nullid:
                     deltaentry = self.data.getdelta(filename, node)
                     delta, deltabasename, origdeltabase, meta = deltaentry
--- a/hgext/remotefilelog/shallowutil.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/remotefilelog/shallowutil.py	Mon Feb 04 20:35:21 2019 +0300
@@ -237,9 +237,9 @@
             # v0, str(int(size)) is the header
             size = int(header)
     except ValueError:
-        raise RuntimeError("unexpected remotefilelog header: illegal format")
+        raise RuntimeError(r"unexpected remotefilelog header: illegal format")
     if size is None:
-        raise RuntimeError("unexpected remotefilelog header: no size found")
+        raise RuntimeError(r"unexpected remotefilelog header: no size found")
     return index + 1, size, flags
 
 def buildfileblobheader(size, flags, version=None):
--- a/hgext/shelve.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/shelve.py	Mon Feb 04 20:35:21 2019 +0300
@@ -674,7 +674,7 @@
         hg.update(repo, wctx.node())
         files = []
         files.extend(shelvectx.files())
-        files.extend(shelvectx.parents()[0].files())
+        files.extend(shelvectx.p1().files())
 
         # revert will overwrite unknown files, so move them out of the way
         for file in repo.status(unknown=True).unknown:
@@ -809,7 +809,7 @@
     """Rebase restored commit from its original location to a destination"""
     # If the shelve is not immediately on top of the commit
     # we'll be merging with, rebase it to be on top.
-    if tmpwctx.node() == shelvectx.parents()[0].node():
+    if tmpwctx.node() == shelvectx.p1().node():
         return shelvectx
 
     overrides = {
--- a/hgext/strip.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/strip.py	Mon Feb 04 20:35:21 2019 +0300
@@ -39,7 +39,7 @@
     if baserev:
         bctx = repo[baserev]
     else:
-        bctx = wctx.parents()[0]
+        bctx = wctx.p1()
     for s in sorted(wctx.substate):
         wctx.sub(s).bailifchanged(True)
         if s not in bctx.substate or bctx.sub(s).dirty():
--- a/hgext/transplant.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/transplant.py	Mon Feb 04 20:35:21 2019 +0300
@@ -155,7 +155,7 @@
         if opts is None:
             opts = {}
         revs = sorted(revmap)
-        p1, p2 = repo.dirstate.parents()
+        p1 = repo.dirstate.p1()
         pulls = []
         diffopts = patch.difffeatureopts(self.ui, opts)
         diffopts.git = True
@@ -186,7 +186,7 @@
                             exchange.pull(repo, source.peer(), heads=pulls)
                         merge.update(repo, pulls[-1], branchmerge=False,
                                      force=False)
-                        p1, p2 = repo.dirstate.parents()
+                        p1 = repo.dirstate.p1()
                         pulls = []
 
                 domerge = False
@@ -323,7 +323,7 @@
         else:
             files = None
         if merge:
-            p1, p2 = repo.dirstate.parents()
+            p1 = repo.dirstate.p1()
             repo.setparents(p1, node)
             m = match.always(repo.root, '')
         else:
@@ -387,7 +387,7 @@
 
         extra = {'transplant_source': node}
         try:
-            p1, p2 = repo.dirstate.parents()
+            p1 = repo.dirstate.p1()
             if p1 != parent:
                 raise error.Abort(_('working directory not at transplant '
                                    'parent %s') % nodemod.hex(parent))
@@ -668,7 +668,7 @@
 
     tp = transplanter(ui, repo, opts)
 
-    p1, p2 = repo.dirstate.parents()
+    p1 = repo.dirstate.p1()
     if len(repo) > 0 and p1 == revlog.nullid:
         raise error.Abort(_('no revision checked out'))
     if opts.get('continue'):
@@ -676,11 +676,7 @@
             raise error.Abort(_('no transplant to continue'))
     else:
         cmdutil.checkunfinished(repo)
-        if p2 != revlog.nullid:
-            raise error.Abort(_('outstanding uncommitted merges'))
-        m, a, r, d = repo.status()[:4]
-        if m or a or r or d:
-            raise error.Abort(_('outstanding local changes'))
+        cmdutil.bailifchanged(repo)
 
     sourcerepo = opts.get('source')
     if sourcerepo:
--- a/hgext/uncommit.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/uncommit.py	Mon Feb 04 20:35:21 2019 +0300
@@ -25,7 +25,7 @@
     cmdutil,
     commands,
     context,
-    copies,
+    copies as copiesmod,
     error,
     node,
     obsutil,
@@ -67,10 +67,10 @@
     files = (initialfiles - exclude)
     # return the p1 so that we don't create an obsmarker later
     if not keepcommit:
-        return ctx.parents()[0].node()
+        return ctx.p1().node()
 
     # Filter copies
-    copied = copies.pathcopies(base, ctx)
+    copied = copiesmod.pathcopies(base, ctx)
     copied = dict((dst, src) for dst, src in copied.iteritems()
                   if dst in files)
     def filectxfn(repo, memctx, path, contentctx=ctx, redirect=()):
@@ -93,13 +93,14 @@
                          extra=ctx.extra())
     return repo.commitctx(new)
 
-def _fixdirstate(repo, oldctx, newctx, status):
+def _fixdirstate(repo, oldctx, newctx, match=None):
     """ fix the dirstate after switching the working directory from oldctx to
     newctx which can be result of either unamend or uncommit.
     """
     ds = repo.dirstate
+    ds.setparents(newctx.node(), node.nullid)
     copies = dict(ds.copies())
-    s = status
+    s = newctx.status(oldctx, match=match)
     for f in s.modified:
         if ds[f] == 'r':
             # modified + removed -> removed
@@ -121,11 +122,7 @@
             ds.remove(f)
 
     # Merge old parent and old working dir copies
-    oldcopies = {}
-    for f in (s.modified + s.added):
-        src = oldctx[f].renamed()
-        if src:
-            oldcopies[f] = src[0]
+    oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
     oldcopies.update(copies)
     copies = dict((dst, oldcopies.get(src, src))
                   for dst, src in oldcopies.iteritems())
@@ -179,12 +176,10 @@
                 # Fully removed the old commit
                 mapping[old.node()] = ()
 
-            scmutil.cleanupnodes(repo, mapping, 'uncommit', fixphase=True)
+            with repo.dirstate.parentchange():
+                _fixdirstate(repo, old, repo[newid], match)
 
-            with repo.dirstate.parentchange():
-                repo.dirstate.setparents(newid, node.nullid)
-                s = old.p1().status(old, match=match)
-                _fixdirstate(repo, old, repo[newid], s)
+            scmutil.cleanupnodes(repo, mapping, 'uncommit', fixphase=True)
 
 def predecessormarkers(ctx):
     """yields the obsolete markers marking the given changeset as a successor"""
@@ -244,9 +239,7 @@
         dirstate = repo.dirstate
 
         with dirstate.parentchange():
-            dirstate.setparents(newprednode, node.nullid)
-            s = repo.status(predctx, curctx)
-            _fixdirstate(repo, curctx, newpredctx, s)
+            _fixdirstate(repo, curctx, newpredctx)
 
         mapping = {curctx.node(): (newprednode,)}
         scmutil.cleanupnodes(repo, mapping, 'unamend', fixphase=True)
--- a/hgext/zeroconf/Zeroconf.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/hgext/zeroconf/Zeroconf.py	Mon Feb 04 20:35:21 2019 +0300
@@ -84,7 +84,6 @@
 import itertools
 import select
 import socket
-import string
 import struct
 import threading
 import time
@@ -106,7 +105,7 @@
 
 # Some DNS constants
 
-_MDNS_ADDR = '224.0.0.251'
+_MDNS_ADDR = r'224.0.0.251'
 _MDNS_PORT = 5353
 _DNS_PORT = 53
 _DNS_TTL = 60 * 60 # one hour default TTL
@@ -221,7 +220,7 @@
     """A DNS entry"""
 
     def __init__(self, name, type, clazz):
-        self.key = string.lower(name)
+        self.key = name.lower()
         self.name = name
         self.type = type
         self.clazz = clazz & _CLASS_MASK
@@ -620,7 +619,7 @@
         first = off
 
         while True:
-            len = ord(self.data[off])
+            len = ord(self.data[off:off + 1])
             off += 1
             if len == 0:
                 break
@@ -631,7 +630,7 @@
             elif t == 0xC0:
                 if next < 0:
                     next = off + 1
-                off = ((len & 0x3F) << 8) | ord(self.data[off])
+                off = ((len & 0x3F) << 8) | ord(self.data[off:off + 1])
                 if off >= first:
                     raise BadDomainNameCircular(off)
                 first = off
@@ -938,7 +937,6 @@
         self.zeroconf.engine.addReader(self, self.zeroconf.socket)
 
     def handle_read(self):
-        data = addr = port = None
         sock = self.zeroconf.socket
         try:
             data, (addr, port) = sock.recvfrom(_MAX_MSG_ABSOLUTE)
@@ -1230,7 +1228,6 @@
         delay = _LISTENER_TIME
         next = now + delay
         last = now + timeout
-        result = 0
         try:
             zeroconf.addListener(self, DNSQuestion(self.name, _TYPE_ANY,
                                                    _CLASS_IN))
@@ -1335,7 +1332,7 @@
             # SO_REUSEADDR and SO_REUSEPORT have been set, so ignore it
             pass
         self.socket.setsockopt(socket.SOL_IP, socket.IP_ADD_MEMBERSHIP,
-            socket.inet_aton(_MDNS_ADDR) + socket.inet_aton('0.0.0.0'))
+            socket.inet_aton(_MDNS_ADDR) + socket.inet_aton(r'0.0.0.0'))
 
         self.listeners = []
         self.browsers = []
@@ -1659,7 +1656,7 @@
             self.engine.notify()
             self.unregisterAllServices()
             self.socket.setsockopt(socket.SOL_IP, socket.IP_DROP_MEMBERSHIP,
-                socket.inet_aton(_MDNS_ADDR) + socket.inet_aton('0.0.0.0'))
+                socket.inet_aton(_MDNS_ADDR) + socket.inet_aton(r'0.0.0.0'))
             self.socket.close()
 
 # Test a few module features, including service registration, service
--- a/mercurial/bdiff.c	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/bdiff.c	Mon Feb 04 20:35:21 2019 +0300
@@ -35,15 +35,19 @@
 
 	/* count the lines */
 	i = 1; /* extra line for sentinel */
-	for (p = a; p < plast; p++)
-		if (*p == '\n')
+	for (p = a; p < plast; p++) {
+		if (*p == '\n') {
 			i++;
-	if (p == plast)
+		}
+	}
+	if (p == plast) {
 		i++;
+	}
 
 	*lr = l = (struct bdiff_line *)calloc(i, sizeof(struct bdiff_line));
-	if (!l)
+	if (!l) {
 		return -1;
+	}
 
 	/* build the line array and calculate hashes */
 	hash = 0;
@@ -90,18 +94,21 @@
 	struct pos *h = NULL;
 
 	/* build a hash table of the next highest power of 2 */
-	while (buckets < bn + 1)
+	while (buckets < bn + 1) {
 		buckets *= 2;
+	}
 
 	/* try to allocate a large hash table to avoid collisions */
 	for (scale = 4; scale; scale /= 2) {
 		h = (struct pos *)calloc(buckets, scale * sizeof(struct pos));
-		if (h)
+		if (h) {
 			break;
+		}
 	}
 
-	if (!h)
+	if (!h) {
 		return 0;
+	}
 
 	buckets = buckets * scale - 1;
 
@@ -115,9 +122,11 @@
 	for (i = 0; i < bn; i++) {
 		/* find the equivalence class */
 		for (j = b[i].hash & buckets; h[j].pos != -1;
-		     j = (j + 1) & buckets)
-			if (!cmp(b + i, b + h[j].pos))
+		     j = (j + 1) & buckets) {
+			if (!cmp(b + i, b + h[j].pos)) {
 				break;
+			}
+		}
 
 		/* add to the head of the equivalence class */
 		b[i].n = h[j].pos;
@@ -133,15 +142,18 @@
 	for (i = 0; i < an; i++) {
 		/* find the equivalence class */
 		for (j = a[i].hash & buckets; h[j].pos != -1;
-		     j = (j + 1) & buckets)
-			if (!cmp(a + i, b + h[j].pos))
+		     j = (j + 1) & buckets) {
+			if (!cmp(a + i, b + h[j].pos)) {
 				break;
+			}
+		}
 
 		a[i].e = j; /* use equivalence class for quick compare */
-		if (h[j].len <= t)
+		if (h[j].len <= t) {
 			a[i].n = h[j].pos; /* point to head of match list */
-		else
+		} else {
 			a[i].n = -1; /* too popular */
+		}
 	}
 
 	/* discard hash tables */
@@ -158,16 +170,18 @@
 	/* window our search on large regions to better bound
 	   worst-case performance. by choosing a window at the end, we
 	   reduce skipping overhead on the b chains. */
-	if (a2 - a1 > 30000)
+	if (a2 - a1 > 30000) {
 		a1 = a2 - 30000;
+	}
 
 	half = (a1 + a2 - 1) / 2;
 	bhalf = (b1 + b2 - 1) / 2;
 
 	for (i = a1; i < a2; i++) {
 		/* skip all lines in b after the current block */
-		for (j = a[i].n; j >= b2; j = b[j].n)
+		for (j = a[i].n; j >= b2; j = b[j].n) {
 			;
+		}
 
 		/* loop through all lines match a[i] in b */
 		for (; j >= b1; j = b[j].n) {
@@ -179,8 +193,9 @@
 					break;
 				}
 				/* previous line mismatch? */
-				if (a[i - k].e != b[j - k].e)
+				if (a[i - k].e != b[j - k].e) {
 					break;
+				}
 			}
 
 			pos[j].pos = i;
@@ -212,8 +227,9 @@
 	}
 
 	/* expand match to include subsequent popular lines */
-	while (mi + mk < a2 && mj + mk < b2 && a[mi + mk].e == b[mj + mk].e)
+	while (mi + mk < a2 && mj + mk < b2 && a[mi + mk].e == b[mj + mk].e) {
 		mk++;
+	}
 
 	*omi = mi;
 	*omj = mj;
@@ -230,18 +246,21 @@
 	while (1) {
 		/* find the longest match in this chunk */
 		k = longest_match(a, b, pos, a1, a2, b1, b2, &i, &j);
-		if (!k)
+		if (!k) {
 			return l;
+		}
 
 		/* and recurse on the remaining chunks on either side */
 		l = recurse(a, b, pos, a1, i, b1, j, l);
-		if (!l)
+		if (!l) {
 			return NULL;
+		}
 
 		l->next =
 		    (struct bdiff_hunk *)malloc(sizeof(struct bdiff_hunk));
-		if (!l->next)
+		if (!l->next) {
 			return NULL;
+		}
 
 		l = l->next;
 		l->a1 = i;
@@ -271,14 +290,16 @@
 		/* generate the matching block list */
 
 		curr = recurse(a, b, pos, 0, an, 0, bn, base);
-		if (!curr)
+		if (!curr) {
 			return -1;
+		}
 
 		/* sentinel end hunk */
 		curr->next =
 		    (struct bdiff_hunk *)malloc(sizeof(struct bdiff_hunk));
-		if (!curr->next)
+		if (!curr->next) {
 			return -1;
+		}
 		curr = curr->next;
 		curr->a1 = curr->a2 = an;
 		curr->b1 = curr->b2 = bn;
@@ -291,10 +312,11 @@
 	for (curr = base->next; curr; curr = curr->next) {
 		struct bdiff_hunk *next = curr->next;
 
-		if (!next)
+		if (!next) {
 			break;
+		}
 
-		if (curr->a2 == next->a1 || curr->b2 == next->b1)
+		if (curr->a2 == next->a1 || curr->b2 == next->b1) {
 			while (curr->a2 < an && curr->b2 < bn &&
 			       next->a1 < next->a2 && next->b1 < next->b2 &&
 			       !cmp(a + curr->a2, b + curr->b2)) {
@@ -303,10 +325,12 @@
 				curr->b2++;
 				next->b1++;
 			}
+		}
 	}
 
-	for (curr = base->next; curr; curr = curr->next)
+	for (curr = base->next; curr; curr = curr->next) {
 		count++;
+	}
 	return count;
 }
 
--- a/mercurial/bookmarks.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/bookmarks.py	Mon Feb 04 20:35:21 2019 +0300
@@ -306,7 +306,6 @@
     itself as we commit. This function returns the name of that bookmark.
     It is stored in .hg/bookmarks.current
     """
-    mark = None
     try:
         file = repo.vfs('bookmarks.current')
     except IOError as inst:
--- a/mercurial/branchmap.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/branchmap.py	Mon Feb 04 20:35:21 2019 +0300
@@ -48,9 +48,9 @@
         filteredhash = None
         if len(cachekey) > 2:
             filteredhash = bin(cachekey[2])
-        partial = branchcache(tipnode=last, tiprev=lrev,
+        bcache = branchcache(tipnode=last, tiprev=lrev,
                               filteredhash=filteredhash)
-        if not partial.validfor(repo):
+        if not bcache.validfor(repo):
             # invalidate the cache
             raise ValueError(r'tip differs')
         cl = repo.changelog
@@ -66,9 +66,9 @@
             if not cl.hasnode(node):
                 raise ValueError(
                     r'node %s does not exist' % pycompat.sysstr(hex(node)))
-            partial.setdefault(label, []).append(node)
+            bcache.setdefault(label, []).append(node)
             if state == 'c':
-                partial._closednodes.add(node)
+                bcache._closednodes.add(node)
 
     except (IOError, OSError):
         return None
@@ -80,13 +80,13 @@
                 msg += ' (%s)' % repo.filtername
             msg += ': %s\n'
             repo.ui.debug(msg % pycompat.bytestr(inst))
-        partial = None
+        bcache = None
 
     finally:
         if f:
             f.close()
 
-    return partial
+    return bcache
 
 ### Nearest subset relation
 # Nearest subset of filter X is a filter Y so that:
@@ -103,27 +103,27 @@
 def updatecache(repo):
     cl = repo.changelog
     filtername = repo.filtername
-    partial = repo._branchcaches.get(filtername)
+    bcache = repo._branchcaches.get(filtername)
 
     revs = []
-    if partial is None or not partial.validfor(repo):
-        partial = read(repo)
-        if partial is None:
+    if bcache is None or not bcache.validfor(repo):
+        bcache = read(repo)
+        if bcache is None:
             subsetname = subsettable.get(filtername)
             if subsetname is None:
-                partial = branchcache()
+                bcache = branchcache()
             else:
                 subset = repo.filtered(subsetname)
-                partial = subset.branchmap().copy()
+                bcache = subset.branchmap().copy()
                 extrarevs = subset.changelog.filteredrevs - cl.filteredrevs
-                revs.extend(r for  r in extrarevs if r <= partial.tiprev)
-    revs.extend(cl.revs(start=partial.tiprev + 1))
+                revs.extend(r for  r in extrarevs if r <= bcache.tiprev)
+    revs.extend(cl.revs(start=bcache.tiprev + 1))
     if revs:
-        partial.update(repo, revs)
-        partial.write(repo)
+        bcache.update(repo, revs)
+        bcache.write(repo)
 
-    assert partial.validfor(repo), filtername
-    repo._branchcaches[repo.filtername] = partial
+    assert bcache.validfor(repo), filtername
+    repo._branchcaches[repo.filtername] = bcache
 
 def replacecache(repo, bm):
     """Replace the branchmap cache for a repo with a branch mapping.
--- a/mercurial/cext/base85.c	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/cext/base85.c	Mon Feb 04 20:35:21 2019 +0300
@@ -24,8 +24,9 @@
 	unsigned i;
 
 	memset(b85dec, 0, sizeof(b85dec));
-	for (i = 0; i < sizeof(b85chars); i++)
+	for (i = 0; i < sizeof(b85chars); i++) {
 		b85dec[(int)(b85chars[i])] = i + 1;
+	}
 }
 
 static PyObject *b85encode(PyObject *self, PyObject *args)
@@ -37,19 +38,22 @@
 	unsigned int acc, val, ch;
 	int pad = 0;
 
-	if (!PyArg_ParseTuple(args, PY23("s#|i", "y#|i"), &text, &len, &pad))
+	if (!PyArg_ParseTuple(args, PY23("s#|i", "y#|i"), &text, &len, &pad)) {
 		return NULL;
+	}
 
-	if (pad)
+	if (pad) {
 		olen = ((len + 3) / 4 * 5) - 3;
-	else {
+	} else {
 		olen = len % 4;
-		if (olen)
+		if (olen) {
 			olen++;
+		}
 		olen += len / 4 * 5;
 	}
-	if (!(out = PyBytes_FromStringAndSize(NULL, olen + 3)))
+	if (!(out = PyBytes_FromStringAndSize(NULL, olen + 3))) {
 		return NULL;
+	}
 
 	dst = PyBytes_AsString(out);
 
@@ -58,8 +62,9 @@
 		for (i = 24; i >= 0; i -= 8) {
 			ch = *text++;
 			acc |= ch << i;
-			if (--len == 0)
+			if (--len == 0) {
 				break;
+			}
 		}
 		for (i = 4; i >= 0; i--) {
 			val = acc % 85;
@@ -69,8 +74,9 @@
 		dst += 5;
 	}
 
-	if (!pad)
+	if (!pad) {
 		_PyBytes_Resize(&out, olen);
+	}
 
 	return out;
 }
@@ -84,15 +90,18 @@
 	int c;
 	unsigned int acc;
 
-	if (!PyArg_ParseTuple(args, PY23("s#", "y#"), &text, &len))
+	if (!PyArg_ParseTuple(args, PY23("s#", "y#"), &text, &len)) {
 		return NULL;
+	}
 
 	olen = len / 5 * 4;
 	i = len % 5;
-	if (i)
+	if (i) {
 		olen += i - 1;
-	if (!(out = PyBytes_FromStringAndSize(NULL, olen)))
+	}
+	if (!(out = PyBytes_FromStringAndSize(NULL, olen))) {
 		return NULL;
+	}
 
 	dst = PyBytes_AsString(out);
 
@@ -100,8 +109,9 @@
 	while (i < len) {
 		acc = 0;
 		cap = len - i - 1;
-		if (cap > 4)
+		if (cap > 4) {
 			cap = 4;
+		}
 		for (j = 0; j < cap; i++, j++) {
 			c = b85dec[(int)*text++] - 1;
 			if (c < 0) {
@@ -136,10 +146,12 @@
 
 		cap = olen < 4 ? olen : 4;
 		olen -= cap;
-		for (j = 0; j < 4 - cap; j++)
+		for (j = 0; j < 4 - cap; j++) {
 			acc *= 85;
-		if (cap && cap < 4)
+		}
+		if (cap && cap < 4) {
 			acc += 0xffffff >> (cap - 1) * 8;
+		}
 		for (j = 0; j < cap; j++) {
 			acc = (acc << 8) | (acc >> 24);
 			*dst++ = acc;
--- a/mercurial/cext/bdiff.c	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/cext/bdiff.c	Mon Feb 04 20:35:21 2019 +0300
@@ -29,22 +29,26 @@
 
 	l.next = NULL;
 
-	if (!PyArg_ParseTuple(args, "SS:bdiff", &sa, &sb))
+	if (!PyArg_ParseTuple(args, "SS:bdiff", &sa, &sb)) {
 		return NULL;
+	}
 
 	an = bdiff_splitlines(PyBytes_AsString(sa), PyBytes_Size(sa), &a);
 	bn = bdiff_splitlines(PyBytes_AsString(sb), PyBytes_Size(sb), &b);
 
-	if (!a || !b)
+	if (!a || !b) {
 		goto nomem;
+	}
 
 	count = bdiff_diff(a, an, b, bn, &l);
-	if (count < 0)
+	if (count < 0) {
 		goto nomem;
+	}
 
 	rl = PyList_New(count);
-	if (!rl)
+	if (!rl) {
 		goto nomem;
+	}
 
 	for (h = l.next; h; h = h->next) {
 		m = Py_BuildValue("iiii", h->a1, h->a2, h->b1, h->b2);
@@ -72,8 +76,10 @@
 
 	l.next = NULL;
 
-	if (!PyArg_ParseTuple(args, PY23("s*s*:bdiff", "y*y*:bdiff"), &ba, &bb))
+	if (!PyArg_ParseTuple(args, PY23("s*s*:bdiff", "y*y*:bdiff"), &ba,
+	                      &bb)) {
 		return NULL;
+	}
 
 	if (!PyBuffer_IsContiguous(&ba, 'C') || ba.ndim > 1) {
 		PyErr_SetString(PyExc_ValueError, "bdiff input not contiguous");
@@ -98,8 +104,9 @@
 	lmax = la > lb ? lb : la;
 	for (ia = ba.buf, ib = bb.buf; li < lmax && *ia == *ib;
 	     ++li, ++ia, ++ib) {
-		if (*ia == '\n')
+		if (*ia == '\n') {
 			lcommon = li + 1;
+		}
 	}
 	/* we can almost add: if (li == lmax) lcommon = li; */
 
@@ -119,8 +126,9 @@
 	/* calculate length of output */
 	la = lb = 0;
 	for (h = l.next; h; h = h->next) {
-		if (h->a1 != la || h->b1 != lb)
+		if (h->a1 != la || h->b1 != lb) {
 			len += 12 + bl[h->b1].l - bl[lb].l;
+		}
 		la = h->a2;
 		lb = h->b2;
 	}
@@ -129,8 +137,9 @@
 
 	result = PyBytes_FromStringAndSize(NULL, len);
 
-	if (!result)
+	if (!result) {
 		goto cleanup;
+	}
 
 	/* build binary patch */
 	rb = PyBytes_AsString(result);
@@ -151,8 +160,9 @@
 	}
 
 cleanup:
-	if (_save)
+	if (_save) {
 		PyEval_RestoreThread(_save);
+	}
 	PyBuffer_Release(&ba);
 	PyBuffer_Release(&bb);
 	free(al);
@@ -174,20 +184,23 @@
 	Py_ssize_t i, rlen, wlen = 0;
 	char *w;
 
-	if (!PyArg_ParseTuple(args, "Sb:fixws", &s, &allws))
+	if (!PyArg_ParseTuple(args, "Sb:fixws", &s, &allws)) {
 		return NULL;
+	}
 	r = PyBytes_AsString(s);
 	rlen = PyBytes_Size(s);
 
 	w = (char *)PyMem_Malloc(rlen ? rlen : 1);
-	if (!w)
+	if (!w) {
 		goto nomem;
+	}
 
 	for (i = 0; i != rlen; i++) {
 		c = r[i];
 		if (c == ' ' || c == '\t' || c == '\r') {
-			if (!allws && (wlen == 0 || w[wlen - 1] != ' '))
+			if (!allws && (wlen == 0 || w[wlen - 1] != ' ')) {
 				w[wlen++] = ' ';
+			}
 		} else if (c == '\n' && !allws && wlen > 0 &&
 		           w[wlen - 1] == ' ') {
 			w[wlen - 1] = '\n';
@@ -207,8 +220,9 @@
                           const char *source, Py_ssize_t len)
 {
 	PyObject *sliced = PyBytes_FromStringAndSize(source, len);
-	if (sliced == NULL)
+	if (sliced == NULL) {
 		return false;
+	}
 	PyList_SET_ITEM(list, destidx, sliced);
 	return true;
 }
@@ -232,19 +246,22 @@
 			++nelts;
 		}
 	}
-	if ((result = PyList_New(nelts + 1)) == NULL)
+	if ((result = PyList_New(nelts + 1)) == NULL) {
 		goto abort;
+	}
 	nelts = 0;
 	for (i = 0; i < size - 1; ++i) {
 		if (text[i] == '\n') {
 			if (!sliceintolist(result, nelts++, text + start,
-			                   i - start + 1))
+			                   i - start + 1)) {
 				goto abort;
+			}
 			start = i + 1;
 		}
 	}
-	if (!sliceintolist(result, nelts++, text + start, size - start))
+	if (!sliceintolist(result, nelts++, text + start, size - start)) {
 		goto abort;
+	}
 	return result;
 abort:
 	Py_XDECREF(result);
@@ -257,8 +274,9 @@
 	PyObject *rl = (PyObject *)priv;
 	PyObject *m = Py_BuildValue("LLLL", a1, a2, b1, b2);
 	int r;
-	if (!m)
+	if (!m) {
 		return -1;
+	}
 	r = PyList_Append(rl, m);
 	Py_DECREF(m);
 	return r;
@@ -282,15 +300,17 @@
 	};
 
 	if (!PyArg_ParseTuple(args, PY23("s#s#", "y#y#"), &a.ptr, &la, &b.ptr,
-	                      &lb))
+	                      &lb)) {
 		return NULL;
+	}
 
 	a.size = la;
 	b.size = lb;
 
 	rl = PyList_New(0);
-	if (!rl)
+	if (!rl) {
 		return PyErr_NoMemory();
+	}
 
 	ecb.priv = rl;
 
--- a/mercurial/cext/charencode.c	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/cext/charencode.c	Mon Feb 04 20:35:21 2019 +0300
@@ -114,8 +114,9 @@
 
 	ret = PyBytes_FromStringAndSize(NULL, len / 2);
 
-	if (!ret)
+	if (!ret) {
 		return NULL;
+	}
 
 	d = PyBytes_AsString(ret);
 
@@ -133,21 +134,24 @@
 	const char *buf;
 	Py_ssize_t i, len;
 	if (!PyArg_ParseTuple(args, PY23("s#:isasciistr", "y#:isasciistr"),
-	                      &buf, &len))
+	                      &buf, &len)) {
 		return NULL;
+	}
 	i = 0;
 	/* char array in PyStringObject should be at least 4-byte aligned */
 	if (((uintptr_t)buf & 3) == 0) {
 		const uint32_t *p = (const uint32_t *)buf;
 		for (; i < len / 4; i++) {
-			if (p[i] & 0x80808080U)
+			if (p[i] & 0x80808080U) {
 				Py_RETURN_FALSE;
+			}
 		}
 		i *= 4;
 	}
 	for (; i < len; i++) {
-		if (buf[i] & 0x80)
+		if (buf[i] & 0x80) {
 			Py_RETURN_FALSE;
+		}
 	}
 	Py_RETURN_TRUE;
 }
@@ -164,8 +168,9 @@
 	len = PyBytes_GET_SIZE(str_obj);
 
 	newobj = PyBytes_FromStringAndSize(NULL, len);
-	if (!newobj)
+	if (!newobj) {
 		goto quit;
+	}
 
 	newstr = PyBytes_AS_STRING(newobj);
 
@@ -197,16 +202,18 @@
 PyObject *asciilower(PyObject *self, PyObject *args)
 {
 	PyObject *str_obj;
-	if (!PyArg_ParseTuple(args, "O!:asciilower", &PyBytes_Type, &str_obj))
+	if (!PyArg_ParseTuple(args, "O!:asciilower", &PyBytes_Type, &str_obj)) {
 		return NULL;
+	}
 	return _asciitransform(str_obj, lowertable, NULL);
 }
 
 PyObject *asciiupper(PyObject *self, PyObject *args)
 {
 	PyObject *str_obj;
-	if (!PyArg_ParseTuple(args, "O!:asciiupper", &PyBytes_Type, &str_obj))
+	if (!PyArg_ParseTuple(args, "O!:asciiupper", &PyBytes_Type, &str_obj)) {
 		return NULL;
+	}
 	return _asciitransform(str_obj, uppertable, NULL);
 }
 
@@ -222,8 +229,9 @@
 
 	if (!PyArg_ParseTuple(args, "O!O!O!:make_file_foldmap", &PyDict_Type,
 	                      &dmap, &PyInt_Type, &spec_obj, &PyFunction_Type,
-	                      &normcase_fallback))
+	                      &normcase_fallback)) {
 		goto quit;
+	}
 
 	spec = (int)PyInt_AS_LONG(spec_obj);
 	switch (spec) {
@@ -244,8 +252,9 @@
 	/* Add some more entries to deal with additions outside this
 	   function. */
 	file_foldmap = _dict_new_presized((PyDict_Size(dmap) / 10) * 11);
-	if (file_foldmap == NULL)
+	if (file_foldmap == NULL) {
 		goto quit;
+	}
 
 	while (PyDict_Next(dmap, &pos, &k, &v)) {
 		if (!dirstate_tuple_check(v)) {
@@ -265,8 +274,9 @@
 				    normcase_fallback, k, NULL);
 			}
 
-			if (normed == NULL)
+			if (normed == NULL) {
 				goto quit;
+			}
 			if (PyDict_SetItem(file_foldmap, normed, k) == -1) {
 				Py_DECREF(normed);
 				goto quit;
@@ -377,22 +387,25 @@
 	Py_ssize_t origlen, esclen;
 	int paranoid;
 	if (!PyArg_ParseTuple(args, "O!i:jsonescapeu8fast", &PyBytes_Type,
-	                      &origstr, &paranoid))
+	                      &origstr, &paranoid)) {
 		return NULL;
+	}
 
 	origbuf = PyBytes_AS_STRING(origstr);
 	origlen = PyBytes_GET_SIZE(origstr);
 	esclen = jsonescapelen(origbuf, origlen, paranoid);
-	if (esclen < 0)
+	if (esclen < 0) {
 		return NULL; /* unsupported char found or overflow */
+	}
 	if (origlen == esclen) {
 		Py_INCREF(origstr);
 		return origstr;
 	}
 
 	escstr = PyBytes_FromStringAndSize(NULL, esclen);
-	if (!escstr)
+	if (!escstr) {
 		return NULL;
+	}
 	encodejsonescape(PyBytes_AS_STRING(escstr), esclen, origbuf, origlen,
 	                 paranoid);
 
--- a/mercurial/cext/mpatch.c	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/cext/mpatch.c	Mon Feb 04 20:35:21 2019 +0300
@@ -55,13 +55,16 @@
 	int r;
 
 	PyObject *tmp = PyList_GetItem((PyObject *)bins, pos);
-	if (!tmp)
+	if (!tmp) {
 		return NULL;
-	if (PyObject_GetBuffer(tmp, &buffer, PyBUF_CONTIG_RO))
+	}
+	if (PyObject_GetBuffer(tmp, &buffer, PyBUF_CONTIG_RO)) {
 		return NULL;
+	}
 	if ((r = mpatch_decode(buffer.buf, buffer.len, &res)) < 0) {
-		if (!PyErr_Occurred())
+		if (!PyErr_Occurred()) {
 			setpyerr(r);
+		}
 		res = NULL;
 	}
 
@@ -78,8 +81,9 @@
 	char *out;
 	Py_ssize_t len, outlen;
 
-	if (!PyArg_ParseTuple(args, "OO:mpatch", &text, &bins))
+	if (!PyArg_ParseTuple(args, "OO:mpatch", &text, &bins)) {
 		return NULL;
+	}
 
 	len = PyList_Size(bins);
 	if (!len) {
@@ -94,8 +98,9 @@
 
 	patch = mpatch_fold(bins, cpygetitem, 0, len);
 	if (!patch) { /* error already set or memory error */
-		if (!PyErr_Occurred())
+		if (!PyErr_Occurred()) {
 			PyErr_NoMemory();
+		}
 		result = NULL;
 		goto cleanup;
 	}
@@ -126,8 +131,9 @@
 cleanup:
 	mpatch_lfree(patch);
 	PyBuffer_Release(&buffer);
-	if (!result && !PyErr_Occurred())
+	if (!result && !PyErr_Occurred()) {
 		setpyerr(r);
+	}
 	return result;
 }
 
@@ -138,15 +144,18 @@
 	Py_ssize_t patchlen;
 	char *bin;
 
-	if (!PyArg_ParseTuple(args, PY23("ls#", "ly#"), &orig, &bin, &patchlen))
+	if (!PyArg_ParseTuple(args, PY23("ls#", "ly#"), &orig, &bin,
+	                      &patchlen)) {
 		return NULL;
+	}
 
 	while (pos >= 0 && pos < patchlen) {
 		start = getbe32(bin + pos);
 		end = getbe32(bin + pos + 4);
 		len = getbe32(bin + pos + 8);
-		if (start > end)
+		if (start > end) {
 			break; /* sanity check */
+		}
 		pos += 12 + len;
 		outlen += start - last;
 		last = end;
@@ -154,9 +163,10 @@
 	}
 
 	if (pos != patchlen) {
-		if (!PyErr_Occurred())
+		if (!PyErr_Occurred()) {
 			PyErr_SetString(mpatch_Error,
 			                "patch cannot be decoded");
+		}
 		return NULL;
 	}
 
--- a/mercurial/cext/parsers.c	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/cext/parsers.c	Mon Feb 04 20:35:21 2019 +0300
@@ -32,8 +32,9 @@
 {
 	Py_ssize_t expected_size;
 
-	if (!PyArg_ParseTuple(args, "n:make_presized_dict", &expected_size))
+	if (!PyArg_ParseTuple(args, "n:make_presized_dict", &expected_size)) {
 		return NULL;
+	}
 
 	return _dict_new_presized(expected_size);
 }
@@ -43,8 +44,9 @@
 {
 	dirstateTupleObject *t =
 	    PyObject_New(dirstateTupleObject, &dirstateTupleType);
-	if (!t)
+	if (!t) {
 		return NULL;
+	}
 	t->state = state;
 	t->mode = mode;
 	t->size = size;
@@ -60,12 +62,14 @@
 	dirstateTupleObject *t;
 	char state;
 	int size, mode, mtime;
-	if (!PyArg_ParseTuple(args, "ciii", &state, &mode, &size, &mtime))
+	if (!PyArg_ParseTuple(args, "ciii", &state, &mode, &size, &mtime)) {
 		return NULL;
+	}
 
 	t = (dirstateTupleObject *)subtype->tp_alloc(subtype, 1);
-	if (!t)
+	if (!t) {
 		return NULL;
+	}
 	t->state = state;
 	t->mode = mode;
 	t->size = size;
@@ -165,8 +169,9 @@
 
 	if (!PyArg_ParseTuple(
 	        args, PY23("O!O!s#:parse_dirstate", "O!O!y#:parse_dirstate"),
-	        &PyDict_Type, &dmap, &PyDict_Type, &cmap, &str, &readlen))
+	        &PyDict_Type, &dmap, &PyDict_Type, &cmap, &str, &readlen)) {
 		goto quit;
+	}
 
 	len = readlen;
 
@@ -178,8 +183,9 @@
 	}
 
 	parents = Py_BuildValue(PY23("s#s#", "y#y#"), str, 20, str + 20, 20);
-	if (!parents)
+	if (!parents) {
 		goto quit;
+	}
 
 	/* read filenames */
 	while (pos >= 40 && pos < len) {
@@ -212,13 +218,16 @@
 			    cpos + 1, flen - (cpos - cur) - 1);
 			if (!fname || !cname ||
 			    PyDict_SetItem(cmap, fname, cname) == -1 ||
-			    PyDict_SetItem(dmap, fname, entry) == -1)
+			    PyDict_SetItem(dmap, fname, entry) == -1) {
 				goto quit;
+			}
 			Py_DECREF(cname);
 		} else {
 			fname = PyBytes_FromStringAndSize(cur, flen);
-			if (!fname || PyDict_SetItem(dmap, fname, entry) == -1)
+			if (!fname ||
+			    PyDict_SetItem(dmap, fname, entry) == -1) {
 				goto quit;
+			}
 		}
 		Py_DECREF(fname);
 		Py_DECREF(entry);
@@ -245,16 +254,20 @@
 	PyObject *nonnset = NULL, *otherpset = NULL, *result = NULL;
 	Py_ssize_t pos;
 
-	if (!PyArg_ParseTuple(args, "O!:nonnormalentries", &PyDict_Type, &dmap))
+	if (!PyArg_ParseTuple(args, "O!:nonnormalentries", &PyDict_Type,
+	                      &dmap)) {
 		goto bail;
+	}
 
 	nonnset = PySet_New(NULL);
-	if (nonnset == NULL)
+	if (nonnset == NULL) {
 		goto bail;
+	}
 
 	otherpset = PySet_New(NULL);
-	if (otherpset == NULL)
+	if (otherpset == NULL) {
 		goto bail;
+	}
 
 	pos = 0;
 	while (PyDict_Next(dmap, &pos, &fname, &v)) {
@@ -272,15 +285,18 @@
 			}
 		}
 
-		if (t->state == 'n' && t->mtime != -1)
+		if (t->state == 'n' && t->mtime != -1) {
 			continue;
-		if (PySet_Add(nonnset, fname) == -1)
+		}
+		if (PySet_Add(nonnset, fname) == -1) {
 			goto bail;
+		}
 	}
 
 	result = Py_BuildValue("(OO)", nonnset, otherpset);
-	if (result == NULL)
+	if (result == NULL) {
 		goto bail;
+	}
 	Py_DECREF(nonnset);
 	Py_DECREF(otherpset);
 	return result;
@@ -304,8 +320,10 @@
 	int now;
 
 	if (!PyArg_ParseTuple(args, "O!O!O!i:pack_dirstate", &PyDict_Type, &map,
-	                      &PyDict_Type, &copymap, &PyTuple_Type, &pl, &now))
+	                      &PyDict_Type, &copymap, &PyTuple_Type, &pl,
+	                      &now)) {
 		return NULL;
+	}
 
 	if (PyTuple_Size(pl) != 2) {
 		PyErr_SetString(PyExc_TypeError, "expected 2-element tuple");
@@ -332,8 +350,9 @@
 	}
 
 	packobj = PyBytes_FromStringAndSize(NULL, nbytes);
-	if (packobj == NULL)
+	if (packobj == NULL) {
 		goto bail;
+	}
 
 	p = PyBytes_AS_STRING(packobj);
 
@@ -377,10 +396,12 @@
 			mtime = -1;
 			mtime_unset = (PyObject *)make_dirstate_tuple(
 			    state, mode, size, mtime);
-			if (!mtime_unset)
+			if (!mtime_unset) {
 				goto bail;
-			if (PyDict_SetItem(map, k, mtime_unset) == -1)
+			}
+			if (PyDict_SetItem(map, k, mtime_unset) == -1) {
 				goto bail;
+			}
 			Py_DECREF(mtime_unset);
 			mtime_unset = NULL;
 		}
@@ -664,8 +685,9 @@
 	manifest_module_init(mod);
 	revlog_module_init(mod);
 
-	if (PyType_Ready(&dirstateTupleType) < 0)
+	if (PyType_Ready(&dirstateTupleType) < 0) {
 		return;
+	}
 	Py_INCREF(&dirstateTupleType);
 	PyModule_AddObject(mod, "dirstatetuple",
 	                   (PyObject *)&dirstateTupleType);
@@ -675,12 +697,14 @@
 {
 	PyObject *sys = PyImport_ImportModule("sys"), *ver;
 	long hexversion;
-	if (!sys)
+	if (!sys) {
 		return -1;
+	}
 	ver = PyObject_GetAttrString(sys, "hexversion");
 	Py_DECREF(sys);
-	if (!ver)
+	if (!ver) {
 		return -1;
+	}
 	hexversion = PyInt_AsLong(ver);
 	Py_DECREF(ver);
 	/* sys.hexversion is a 32-bit number by default, so the -1 case
@@ -720,8 +744,9 @@
 {
 	PyObject *mod;
 
-	if (check_python_version() == -1)
+	if (check_python_version() == -1) {
 		return;
+	}
 	mod = Py_InitModule3("parsers", methods, parsers_doc);
 	module_init(mod);
 }
--- a/mercurial/cext/pathencode.c	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/cext/pathencode.c	Mon Feb 04 20:35:21 2019 +0300
@@ -126,8 +126,9 @@
 			if (src[i] == 'g') {
 				state = DHGDI;
 				charcopy(dest, &destlen, destsize, src[i++]);
-			} else
+			} else {
 				state = DDEFAULT;
+			}
 			break;
 		case DHGDI:
 			if (src[i] == '/') {
@@ -137,8 +138,9 @@
 			state = DDEFAULT;
 			break;
 		case DDEFAULT:
-			if (src[i] == '.')
+			if (src[i] == '.') {
 				state = DDOT;
+			}
 			charcopy(dest, &destlen, destsize, src[i++]);
 			break;
 		}
@@ -153,8 +155,9 @@
 	PyObject *pathobj, *newobj;
 	char *path;
 
-	if (!PyArg_ParseTuple(args, "O:encodedir", &pathobj))
+	if (!PyArg_ParseTuple(args, "O:encodedir", &pathobj)) {
 		return NULL;
+	}
 
 	if (PyBytes_AsStringAndSize(pathobj, &path, &len) == -1) {
 		PyErr_SetString(PyExc_TypeError, "expected a string");
@@ -235,15 +238,17 @@
 			if (src[i] == 'u') {
 				state = AU;
 				charcopy(dest, &destlen, destsize, src[i++]);
-			} else
+			} else {
 				state = DEFAULT;
+			}
 			break;
 		case AU:
 			if (src[i] == 'x') {
 				state = THIRD;
 				i++;
-			} else
+			} else {
 				state = DEFAULT;
+			}
 			break;
 		case THIRD:
 			state = DEFAULT;
@@ -262,8 +267,9 @@
 			if (src[i] == 'o') {
 				state = CO;
 				charcopy(dest, &destlen, destsize, src[i++]);
-			} else
+			} else {
 				state = DEFAULT;
+			}
 			break;
 		case CO:
 			if (src[i] == 'm') {
@@ -272,8 +278,9 @@
 			} else if (src[i] == 'n') {
 				state = THIRD;
 				i++;
-			} else
+			} else {
 				state = DEFAULT;
+			}
 			break;
 		case COMLPT:
 			switch (src[i]) {
@@ -314,43 +321,49 @@
 			if (src[i] == 'p') {
 				state = LP;
 				charcopy(dest, &destlen, destsize, src[i++]);
-			} else
+			} else {
 				state = DEFAULT;
+			}
 			break;
 		case LP:
 			if (src[i] == 't') {
 				state = COMLPT;
 				i++;
-			} else
+			} else {
 				state = DEFAULT;
+			}
 			break;
 		case N:
 			if (src[i] == 'u') {
 				state = NU;
 				charcopy(dest, &destlen, destsize, src[i++]);
-			} else
+			} else {
 				state = DEFAULT;
+			}
 			break;
 		case NU:
 			if (src[i] == 'l') {
 				state = THIRD;
 				i++;
-			} else
+			} else {
 				state = DEFAULT;
+			}
 			break;
 		case P:
 			if (src[i] == 'r') {
 				state = PR;
 				charcopy(dest, &destlen, destsize, src[i++]);
-			} else
+			} else {
 				state = DEFAULT;
+			}
 			break;
 		case PR:
 			if (src[i] == 'n') {
 				state = THIRD;
 				i++;
-			} else
+			} else {
 				state = DEFAULT;
+			}
 			break;
 		case LDOT:
 			switch (src[i]) {
@@ -397,18 +410,21 @@
 			if (src[i] == 'g') {
 				state = HGDI;
 				charcopy(dest, &destlen, destsize, src[i++]);
-			} else
+			} else {
 				state = DEFAULT;
+			}
 			break;
 		case HGDI:
 			if (src[i] == '/') {
 				state = START;
-				if (encodedir)
+				if (encodedir) {
 					memcopy(dest, &destlen, destsize, ".hg",
 					        3);
+				}
 				charcopy(dest, &destlen, destsize, src[i++]);
-			} else
+			} else {
 				state = DEFAULT;
+			}
 			break;
 		case SPACE:
 			switch (src[i]) {
@@ -427,8 +443,9 @@
 		case DEFAULT:
 			while (inset(onebyte, src[i])) {
 				charcopy(dest, &destlen, destsize, src[i++]);
-				if (i == len)
+				if (i == len) {
 					goto done;
+				}
 			}
 			switch (src[i]) {
 			case '.':
@@ -456,9 +473,10 @@
 					charcopy(dest, &destlen, destsize, '_');
 					charcopy(dest, &destlen, destsize,
 					         c == '_' ? '_' : c + 32);
-				} else
+				} else {
 					escape3(dest, &destlen, destsize,
 					        src[i++]);
+				}
 				break;
 			}
 			break;
@@ -498,12 +516,13 @@
 	Py_ssize_t i, destlen = 0;
 
 	for (i = 0; i < len; i++) {
-		if (inset(onebyte, src[i]))
+		if (inset(onebyte, src[i])) {
 			charcopy(dest, &destlen, destsize, src[i]);
-		else if (inset(lower, src[i]))
+		} else if (inset(lower, src[i])) {
 			charcopy(dest, &destlen, destsize, src[i] + 32);
-		else
+		} else {
 			escape3(dest, &destlen, destsize, src[i]);
+		}
 	}
 
 	return destlen;
@@ -516,13 +535,15 @@
 	PyObject *ret;
 
 	if (!PyArg_ParseTuple(args, PY23("s#:lowerencode", "y#:lowerencode"),
-	                      &path, &len))
+	                      &path, &len)) {
 		return NULL;
+	}
 
 	newlen = _lowerencode(NULL, 0, path, len);
 	ret = PyBytes_FromStringAndSize(NULL, newlen);
-	if (ret)
+	if (ret) {
 		_lowerencode(PyBytes_AS_STRING(ret), newlen, path, len);
+	}
 
 	return ret;
 }
@@ -551,8 +572,9 @@
 	Py_ssize_t destsize, destlen = 0, slop, used;
 
 	while (lastslash >= 0 && src[lastslash] != '/') {
-		if (src[lastslash] == '.' && lastdot == -1)
+		if (src[lastslash] == '.' && lastdot == -1) {
 			lastdot = lastslash;
+		}
 		lastslash--;
 	}
 
@@ -570,12 +592,14 @@
 	/* If src contains a suffix, we will append it to the end of
 	   the new string, so make room. */
 	destsize = 120;
-	if (lastdot >= 0)
+	if (lastdot >= 0) {
 		destsize += len - lastdot - 1;
+	}
 
 	ret = PyBytes_FromStringAndSize(NULL, destsize);
-	if (ret == NULL)
+	if (ret == NULL) {
 		return NULL;
+	}
 
 	dest = PyBytes_AS_STRING(ret);
 	memcopy(dest, &destlen, destsize, "dh/", 3);
@@ -587,30 +611,36 @@
 			char d = dest[destlen - 1];
 			/* After truncation, a directory name may end
 			   in a space or dot, which are unportable. */
-			if (d == '.' || d == ' ')
+			if (d == '.' || d == ' ') {
 				dest[destlen - 1] = '_';
-			/* The + 3 is to account for "dh/" in the beginning */
-			if (destlen > maxshortdirslen + 3)
+				/* The + 3 is to account for "dh/" in the
+				 * beginning */
+			}
+			if (destlen > maxshortdirslen + 3) {
 				break;
+			}
 			charcopy(dest, &destlen, destsize, src[i]);
 			p = -1;
-		} else if (p < dirprefixlen)
+		} else if (p < dirprefixlen) {
 			charcopy(dest, &destlen, destsize, src[i]);
+		}
 	}
 
 	/* Rewind to just before the last slash copied. */
-	if (destlen > maxshortdirslen + 3)
+	if (destlen > maxshortdirslen + 3) {
 		do {
 			destlen--;
 		} while (destlen > 0 && dest[destlen] != '/');
+	}
 
 	if (destlen > 3) {
 		if (lastslash > 0) {
 			char d = dest[destlen - 1];
 			/* The last directory component may be
 			   truncated, so make it safe. */
-			if (d == '.' || d == ' ')
+			if (d == '.' || d == ' ') {
 				dest[destlen - 1] = '_';
+			}
 		}
 
 		charcopy(dest, &destlen, destsize, '/');
@@ -620,27 +650,32 @@
 	   depends on the number of bytes left after accounting for
 	   hash and suffix. */
 	used = destlen + 40;
-	if (lastdot >= 0)
+	if (lastdot >= 0) {
 		used += len - lastdot - 1;
+	}
 	slop = maxstorepathlen - used;
 	if (slop > 0) {
 		Py_ssize_t basenamelen =
 		    lastslash >= 0 ? len - lastslash - 2 : len - 1;
 
-		if (basenamelen > slop)
+		if (basenamelen > slop) {
 			basenamelen = slop;
-		if (basenamelen > 0)
+		}
+		if (basenamelen > 0) {
 			memcopy(dest, &destlen, destsize, &src[lastslash + 1],
 			        basenamelen);
+		}
 	}
 
 	/* Add hash and suffix. */
-	for (i = 0; i < 20; i++)
+	for (i = 0; i < 20; i++) {
 		hexencode(dest, &destlen, destsize, sha[i]);
+	}
 
-	if (lastdot >= 0)
+	if (lastdot >= 0) {
 		memcopy(dest, &destlen, destsize, &src[lastdot],
 		        len - lastdot - 1);
+	}
 
 	assert(PyBytes_Check(ret));
 	Py_SIZE(ret) = destlen;
@@ -677,13 +712,15 @@
 
 	shaobj = PyObject_CallFunction(shafunc, PY23("s#", "y#"), str, len);
 
-	if (shaobj == NULL)
+	if (shaobj == NULL) {
 		return -1;
+	}
 
 	hashobj = PyObject_CallMethod(shaobj, "digest", "");
 	Py_DECREF(shaobj);
-	if (hashobj == NULL)
+	if (hashobj == NULL) {
 		return -1;
+	}
 
 	if (!PyBytes_Check(hashobj) || PyBytes_GET_SIZE(hashobj) != 20) {
 		PyErr_SetString(PyExc_TypeError,
@@ -714,8 +751,9 @@
 	}
 
 	dirlen = _encodedir(dired, baselen, src, len);
-	if (sha1hash(sha, dired, dirlen - 1) == -1)
+	if (sha1hash(sha, dired, dirlen - 1) == -1) {
 		return NULL;
+	}
 	lowerlen = _lowerencode(lowered, baselen, dired + 5, dirlen - 5);
 	auxlen = auxencode(auxed, baselen, lowered, lowerlen);
 	return hashmangle(auxed, auxlen, sha);
@@ -727,18 +765,20 @@
 	PyObject *pathobj, *newobj;
 	char *path;
 
-	if (!PyArg_ParseTuple(args, "O:pathencode", &pathobj))
+	if (!PyArg_ParseTuple(args, "O:pathencode", &pathobj)) {
 		return NULL;
+	}
 
 	if (PyBytes_AsStringAndSize(pathobj, &path, &len) == -1) {
 		PyErr_SetString(PyExc_TypeError, "expected a string");
 		return NULL;
 	}
 
-	if (len > maxstorepathlen)
+	if (len > maxstorepathlen) {
 		newlen = maxstorepathlen + 2;
-	else
+	} else {
 		newlen = len ? basicencode(NULL, 0, path, len + 1) : 1;
+	}
 
 	if (newlen <= maxstorepathlen + 1) {
 		if (newlen == len + 1) {
@@ -754,8 +794,9 @@
 			basicencode(PyBytes_AS_STRING(newobj), newlen, path,
 			            len + 1);
 		}
-	} else
+	} else {
 		newobj = hashencode(path, len + 1);
+	}
 
 	return newobj;
 }
--- a/mercurial/changegroup.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/changegroup.py	Mon Feb 04 20:35:21 2019 +0300
@@ -275,7 +275,7 @@
             # because we need to use the top level value (if they exist)
             # in this function.
             srctype = tr.hookargs.setdefault('source', srctype)
-            url = tr.hookargs.setdefault('url', url)
+            tr.hookargs.setdefault('url', url)
             repo.hook('prechangegroup',
                       throw=True, **pycompat.strkwargs(tr.hookargs))
 
@@ -817,13 +817,13 @@
         self._verbosenote(_('uncompressed size of bundle content:\n'))
         size = 0
 
-        clstate, deltas = self._generatechangelog(cl, clnodes)
+        clstate, deltas = self._generatechangelog(cl, clnodes,
+                                                  generate=changelog)
         for delta in deltas:
-            if changelog:
-                for chunk in _revisiondeltatochunks(delta,
-                                                    self._builddeltaheader):
-                    size += len(chunk)
-                    yield chunk
+            for chunk in _revisiondeltatochunks(delta,
+                                                self._builddeltaheader):
+                size += len(chunk)
+                yield chunk
 
         close = closechunk()
         size += len(close)
@@ -917,12 +917,15 @@
         if clnodes:
             repo.hook('outgoing', node=hex(clnodes[0]), source=source)
 
-    def _generatechangelog(self, cl, nodes):
+    def _generatechangelog(self, cl, nodes, generate=True):
         """Generate data for changelog chunks.
 
         Returns a 2-tuple of a dict containing state and an iterable of
         byte chunks. The state will not be fully populated until the
         chunk stream has been fully consumed.
+
+        if generate is False, the state will be fully populated and no chunk
+        stream will be yielded
         """
         clrevorder = {}
         manifests = {}
@@ -930,6 +933,27 @@
         changedfiles = set()
         clrevtomanifestrev = {}
 
+        state = {
+            'clrevorder': clrevorder,
+            'manifests': manifests,
+            'changedfiles': changedfiles,
+            'clrevtomanifestrev': clrevtomanifestrev,
+        }
+
+        if not (generate or self._ellipses):
+            # sort the nodes in storage order
+            nodes = sorted(nodes, key=cl.rev)
+            for node in nodes:
+                c = cl.changelogrevision(node)
+                clrevorder[node] = len(clrevorder)
+                # record the first changeset introducing this manifest version
+                manifests.setdefault(c.manifest, node)
+                # Record a complete list of potentially-changed files in
+                # this manifest.
+                changedfiles.update(c.files)
+
+            return state, ()
+
         # Callback for the changelog, used to collect changed files and
         # manifest nodes.
         # Returns the linkrev node (identity in the changelog case).
@@ -970,13 +994,6 @@
 
             return x
 
-        state = {
-            'clrevorder': clrevorder,
-            'manifests': manifests,
-            'changedfiles': changedfiles,
-            'clrevtomanifestrev': clrevtomanifestrev,
-        }
-
         gen = deltagroup(
             self._repo, cl, nodes, True, lookupcl,
             self._forcedeltaparentprev,
--- a/mercurial/cmdutil.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/cmdutil.py	Mon Feb 04 20:35:21 2019 +0300
@@ -607,11 +607,9 @@
     return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
 
 def _graftmsg():
-    # tweakdefaults requires `update` to have a rev hence the `.`
     return _helpmessage('hg graft --continue', 'hg graft --abort')
 
 def _mergemsg():
-    # tweakdefaults requires `update` to have a rev hence the `.`
     return _helpmessage('hg commit', 'hg merge --abort')
 
 def _bisectmsg():
@@ -1251,10 +1249,6 @@
                 else:
                     ui.warn(_('%s: cannot copy - %s\n') %
                             (relsrc, encoding.strtolocal(inst.strerror)))
-                    if rename:
-                        hint = _("('hg rename --after' to record the rename)\n")
-                    else:
-                        hint = _("('hg copy --after' to record the copy)\n")
                     return True # report a failure
 
         if ui.verbose or not exact:
@@ -3200,9 +3194,19 @@
     if node == parent and p2 == nullid:
         normal = repo.dirstate.normal
     for f in actions['undelete'][0]:
-        prntstatusmsg('undelete', f)
-        checkout(f)
-        normal(f)
+        if interactive:
+            choice = repo.ui.promptchoice(
+                _("add back removed file %s (Yn)?$$ &Yes $$ &No") % f)
+            if choice == 0:
+                prntstatusmsg('undelete', f)
+                checkout(f)
+                normal(f)
+            else:
+                excluded_files.append(f)
+        else:
+            prntstatusmsg('undelete', f)
+            checkout(f)
+            normal(f)
 
     copied = copies.pathcopies(repo[parent], ctx)
 
--- a/mercurial/color.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/color.py	Mon Feb 04 20:35:21 2019 +0300
@@ -169,7 +169,7 @@
             ui._terminfoparams[key[9:]] = newval
     try:
         curses.setupterm()
-    except curses.error as e:
+    except curses.error:
         ui._terminfoparams.clear()
         return
 
--- a/mercurial/commands.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/commands.py	Mon Feb 04 20:35:21 2019 +0300
@@ -1102,7 +1102,7 @@
 
     with repo.wlock():
         if opts.get('clean'):
-            label = repo[None].p1().branch()
+            label = repo['.'].branch()
             repo.dirstate.setbranch(label)
             ui.status(_('reset working directory to branch %s\n') % label)
         elif label:
@@ -1672,8 +1672,8 @@
         if not bheads:
             raise error.Abort(_('can only close branch heads'))
         elif opts.get('amend'):
-            if repo[None].parents()[0].p1().branch() != branch and \
-                    repo[None].parents()[0].p2().branch() != branch:
+            if repo['.'].p1().branch() != branch and \
+                    repo['.'].p2().branch() != branch:
                 raise error.Abort(_('can only close branch heads'))
 
     if opts.get('amend'):
@@ -2633,7 +2633,6 @@
         raise error.Abort(_("cannot abort using an old graftstate"))
 
     # changeset from which graft operation was started
-    startctx = None
     if len(newnodes) > 0:
         startctx = repo[newnodes[0]].p1()
     else:
@@ -2849,6 +2848,7 @@
                 for i in pycompat.xrange(blo, bhi):
                     yield ('+', b[i])
 
+    uipathfn = scmutil.getuipathfn(repo)
     def display(fm, fn, ctx, pstates, states):
         rev = scmutil.intrev(ctx)
         if fm.isplain():
@@ -2868,7 +2868,7 @@
             except error.WdirUnsupported:
                 return ctx[fn].isbinary()
 
-        fieldnamemap = {'filename': 'path', 'linenumber': 'lineno'}
+        fieldnamemap = {'linenumber': 'lineno'}
         if diff:
             iter = difflinestates(pstates, states)
         else:
@@ -2876,10 +2876,10 @@
         for change, l in iter:
             fm.startitem()
             fm.context(ctx=ctx)
-            fm.data(node=fm.hexfunc(scmutil.binnode(ctx)))
+            fm.data(node=fm.hexfunc(scmutil.binnode(ctx)), path=fn)
+            fm.plain(uipathfn(fn), label='grep.filename')
 
             cols = [
-                ('filename', '%s', fn, True),
                 ('rev', '%d', rev, not plaingrep),
                 ('linenumber', '%d', l.linenum, opts.get('line_number')),
             ]
@@ -2890,13 +2890,11 @@
                 ('date', '%s', fm.formatdate(ctx.date(), datefmt),
                  opts.get('date')),
             ])
-            lastcol = next(
-                name for name, fmt, data, cond in reversed(cols) if cond)
             for name, fmt, data, cond in cols:
+                if cond:
+                    fm.plain(sep, label='grep.sep')
                 field = fieldnamemap.get(name, name)
                 fm.condwrite(cond, field, fmt, data, label='grep.%s' % name)
-                if cond and name != lastcol:
-                    fm.plain(sep, label='grep.sep')
             if not opts.get('files_with_matches'):
                 fm.plain(sep, label='grep.sep')
                 if not opts.get('text') and binary():
@@ -2926,7 +2924,7 @@
             fm.data(matched=False)
         fm.end()
 
-    skip = {}
+    skip = set()
     revfiles = {}
     match = scmutil.match(repo[None], pats, opts)
     found = False
@@ -2945,16 +2943,18 @@
                 fnode = ctx.filenode(fn)
             except error.LookupError:
                 continue
-            try:
-                copied = flog.renamed(fnode)
-            except error.WdirUnsupported:
-                copied = ctx[fn].renamed()
-            copy = follow and copied and copied[0]
-            if copy:
-                copies.setdefault(rev, {})[fn] = copy
+            copy = None
+            if follow:
+                try:
+                    copied = flog.renamed(fnode)
+                except error.WdirUnsupported:
+                    copied = ctx[fn].renamed()
+                copy = copied and copied[0]
+                if copy:
+                    copies.setdefault(rev, {})[fn] = copy
+                    if fn in skip:
+                        skip.add(copy)
             if fn in skip:
-                if copy:
-                    skip[copy] = True
                 continue
             files.append(fn)
 
@@ -2983,16 +2983,16 @@
             copy = copies.get(rev, {}).get(fn)
             if fn in skip:
                 if copy:
-                    skip[copy] = True
+                    skip.add(copy)
                 continue
             pstates = matches.get(parent, {}).get(copy or fn, [])
             if pstates or states:
                 r = display(fm, fn, ctx, pstates, states)
                 found = found or r
                 if r and not diff and not all_files:
-                    skip[fn] = True
+                    skip.add(fn)
                     if copy:
-                        skip[copy] = True
+                        skip.add(copy)
         del revfiles[rev]
         # We will keep the matches dict for the duration of the window
         # clear the matches dict once the window is over
@@ -4361,7 +4361,7 @@
             msg = _("not updating: %s") % stringutil.forcebytestr(inst)
             hint = inst.hint
             raise error.UpdateAbort(msg, hint=hint)
-    if modheads > 1:
+    if modheads is not None and modheads > 1:
         currentbranchheads = len(repo.branchheads())
         if currentbranchheads == modheads:
             ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
@@ -4839,6 +4839,8 @@
                                  b'$$ &Yes $$ &No')):
                 raise error.Abort(_('user quit'))
 
+    uipathfn = scmutil.getuipathfn(repo)
+
     if show:
         ui.pager('resolve')
         fm = ui.formatter('resolve', opts)
@@ -4866,7 +4868,8 @@
             fm.startitem()
             fm.context(ctx=wctx)
             fm.condwrite(not nostatus, 'mergestatus', '%s ', key, label=label)
-            fm.write('path', '%s\n', f, label=label)
+            fm.data(path=f)
+            fm.plain('%s\n' % uipathfn(f), label=label)
         fm.end()
         return 0
 
@@ -5413,10 +5416,12 @@
         repo = scmutil.unhidehashlikerevs(repo, revs, 'nowarn')
         ctx1, ctx2 = scmutil.revpair(repo, revs)
 
-    if pats or ui.configbool('commands', 'status.relative'):
-        cwd = repo.getcwd()
-    else:
-        cwd = ''
+    relative = None
+    if pats:
+        relative = True
+    elif ui.hasconfig('commands', 'status.relative'):
+        relative = ui.configbool('commands', 'status.relative')
+    uipathfn = scmutil.getuipathfn(repo, relative)
 
     if opts.get('print0'):
         end = '\0'
@@ -5467,10 +5472,10 @@
                 fm.context(ctx=ctx2)
                 fm.data(path=f)
                 fm.condwrite(showchar, 'status', '%s ', char, label=label)
-                fm.plain(fmt % repo.pathto(f, cwd), label=label)
+                fm.plain(fmt % uipathfn(f), label=label)
                 if f in copy:
                     fm.data(source=copy[f])
-                    fm.plain(('  %s' + end) % repo.pathto(copy[f], cwd),
+                    fm.plain(('  %s' + end) % uipathfn(copy[f]),
                              label='status.copied')
 
     if ((ui.verbose or ui.configbool('commands', 'status.verbose'))
@@ -5503,7 +5508,6 @@
     pnode = parents[0].node()
     marks = []
 
-    ms = None
     try:
         ms = mergemod.mergestate.read(repo)
     except error.UnsupportedMergeRecords as e:
@@ -5830,6 +5834,10 @@
                 expectedtype = 'global'
 
             for n in names:
+                if repo.tagtype(n) == 'global':
+                    alltags = tagsmod.findglobaltags(ui, repo)
+                    if alltags[n][0] == nullid:
+                        raise error.Abort(_("tag '%s' is already removed") % n)
                 if not repo.tagtype(n):
                     raise error.Abort(_("tag '%s' does not exist") % n)
                 if repo.tagtype(n) != expectedtype:
@@ -5908,7 +5916,6 @@
     ui.pager('tags')
     fm = ui.formatter('tags', opts)
     hexfunc = fm.hexfunc
-    tagtype = ""
 
     for t, n in reversed(repo.tagslist()):
         hn = hexfunc(n)
--- a/mercurial/config.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/config.py	Mon Feb 04 20:35:21 2019 +0300
@@ -78,6 +78,10 @@
         return list(self._data.get(section, {}).iteritems())
     def set(self, section, item, value, source=""):
         if pycompat.ispy3:
+            assert not isinstance(section, str), (
+                'config section may not be unicode strings on Python 3')
+            assert not isinstance(item, str), (
+                'config item may not be unicode strings on Python 3')
             assert not isinstance(value, str), (
                 'config values may not be unicode strings on Python 3')
         if section not in self:
--- a/mercurial/configitems.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/configitems.py	Mon Feb 04 20:35:21 2019 +0300
@@ -1233,6 +1233,9 @@
 coreconfigitem('ui', 'quietbookmarkmove',
     default=False,
 )
+coreconfigitem('ui', 'relative-paths',
+    default=False,
+)
 coreconfigitem('ui', 'remotecmd',
     default='hg',
 )
--- a/mercurial/context.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/context.py	Mon Feb 04 20:35:21 2019 +0300
@@ -2178,8 +2178,6 @@
     """
     def getfilectx(repo, memctx, path):
         fctx = ctx[path]
-        # this is weird but apparently we only keep track of one parent
-        # (why not only store that instead of a tuple?)
         copied = fctx.renamed()
         if copied:
             copied = copied[0]
--- a/mercurial/copies.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/copies.py	Mon Feb 04 20:35:21 2019 +0300
@@ -24,14 +24,13 @@
     stringutil,
 )
 
-def _findlimit(repo, a, b):
+def _findlimit(repo, ctxa, ctxb):
     """
     Find the last revision that needs to be checked to ensure that a full
     transitive closure for file copies can be properly calculated.
     Generally, this means finding the earliest revision number that's an
     ancestor of a or b but not both, except when a or b is a direct descendent
     of the other, in which case we can return the minimum revnum of a and b.
-    None if no such revision exists.
     """
 
     # basic idea:
@@ -46,27 +45,32 @@
     #   - quit when interesting revs is zero
 
     cl = repo.changelog
+    wdirparents = None
+    a = ctxa.rev()
+    b = ctxb.rev()
     if a is None:
+        wdirparents = (ctxa.p1(), ctxa.p2())
         a = node.wdirrev
     if b is None:
+        assert not wdirparents
+        wdirparents = (ctxb.p1(), ctxb.p2())
         b = node.wdirrev
 
     side = {a: -1, b: 1}
     visit = [-a, -b]
     heapq.heapify(visit)
     interesting = len(visit)
-    hascommonancestor = False
     limit = node.wdirrev
 
     while interesting:
         r = -heapq.heappop(visit)
         if r == node.wdirrev:
-            parents = [cl.rev(p) for p in repo.dirstate.parents()]
+            parents = [pctx.rev() for pctx in wdirparents]
         else:
             parents = cl.parentrevs(r)
+        if parents[1] == node.nullrev:
+            parents = parents[:1]
         for p in parents:
-            if p < 0:
-                continue
             if p not in side:
                 # first time we see p; add it to visit
                 side[p] = side[r]
@@ -77,14 +81,10 @@
                 # p was interesting but now we know better
                 side[p] = 0
                 interesting -= 1
-                hascommonancestor = True
         if side[r]:
             limit = r # lowest rev visited
             interesting -= 1
 
-    if not hascommonancestor:
-        return None
-
     # Consider the following flow (see test-commit-amend.t under issue4405):
     # 1/ File 'a0' committed
     # 2/ File renamed from 'a0' to 'a1' in a new commit (call it 'a1')
@@ -168,9 +168,7 @@
     if debug:
         dbg('debug.copies:    looking into rename from %s to %s\n'
             % (a, b))
-    limit = _findlimit(repo, a.rev(), b.rev())
-    if limit is None:
-        limit = node.nullrev
+    limit = _findlimit(repo, a, b)
     if debug:
         dbg('debug.copies:      search limit: %d\n' % limit)
     am = a.manifest()
@@ -464,10 +462,7 @@
     if graft:
         tca = _c1.ancestor(_c2)
 
-    limit = _findlimit(repo, c1.rev(), c2.rev())
-    if limit is None:
-        # no common ancestor, no copies
-        return {}, {}, {}, {}, {}
+    limit = _findlimit(repo, c1, c2)
     repo.ui.debug("  searching for copies back to rev %d\n" % limit)
 
     m1 = c1.manifest()
--- a/mercurial/crecord.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/crecord.py	Mon Feb 04 20:35:21 2019 +0300
@@ -487,7 +487,7 @@
         return getattr(self._hunk, name)
 
     def __repr__(self):
-        return '<hunk %r@%d>' % (self.filename(), self.fromline)
+        return r'<hunk %r@%d>' % (self.filename(), self.fromline)
 
 def filterpatch(ui, chunks, chunkselector, operation=None):
     """interactively filter patch chunks into applied-only chunks"""
--- a/mercurial/dagop.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/dagop.py	Mon Feb 04 20:35:21 2019 +0300
@@ -28,7 +28,7 @@
 generatorset = smartset.generatorset
 
 # possible maximum depth between null and wdir()
-_maxlogdepth = 0x80000000
+maxlogdepth = 0x80000000
 
 def _walkrevtree(pfunc, revs, startdepth, stopdepth, reverse):
     """Walk DAG using 'pfunc' from the given 'revs' nodes
@@ -42,7 +42,7 @@
     if startdepth is None:
         startdepth = 0
     if stopdepth is None:
-        stopdepth = _maxlogdepth
+        stopdepth = maxlogdepth
     if stopdepth == 0:
         return
     if stopdepth < 0:
@@ -221,7 +221,7 @@
     Scan ends at the stopdepth (exlusive) if specified. Revisions found
     earlier than the startdepth are omitted.
     """
-    if startdepth is None and stopdepth is None:
+    if startdepth is None and (stopdepth is None or stopdepth >= maxlogdepth):
         gen = _genrevdescendants(repo, revs, followfirst)
     else:
         gen = _genrevdescendantsofdepth(repo, revs, followfirst,
--- a/mercurial/debugcommands.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/debugcommands.py	Mon Feb 04 20:35:21 2019 +0300
@@ -38,6 +38,7 @@
     cmdutil,
     color,
     context,
+    copies,
     dagparser,
     encoding,
     error,
@@ -745,7 +746,6 @@
         nodates = True
     datesort = opts.get(r'datesort')
 
-    timestr = ""
     if datesort:
         keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
     else:
@@ -1182,13 +1182,6 @@
     '''
     opts = pycompat.byteskwargs(opts)
 
-    def writetemp(contents):
-        (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
-        f = os.fdopen(fd, r"wb")
-        f.write(contents)
-        f.close()
-        return name
-
     problems = 0
 
     fm = ui.formatter('debuginstall', opts)
@@ -1812,6 +1805,18 @@
     ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
     ui.write('\n')
 
+@command('debugpathcopies',
+         cmdutil.walkopts,
+         'hg debugcopies REV1 REV2 [FILE]',
+         inferrepo=True)
+def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
+    """show copies between two revisions"""
+    ctx1 = scmutil.revsingle(repo, rev1)
+    ctx2 = scmutil.revsingle(repo, rev2)
+    m = scmutil.match(ctx1, pats, opts)
+    for dst, src in copies.pathcopies(ctx1, ctx2, m).items():
+        ui.write('%s -> %s\n' % (src, dst))
+
 @command('debugpeer', [], _('PATH'), norepo=True)
 def debugpeer(ui, path):
     """establish a connection to a peer repository"""
@@ -2569,7 +2574,6 @@
 
     source, branches = hg.parseurl(ui.expandpath(source))
     url = util.url(source)
-    addr = None
 
     defaultport = {'https': 443, 'ssh': 22}
     if url.scheme in defaultport:
--- a/mercurial/exchange.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/exchange.py	Mon Feb 04 20:35:21 2019 +0300
@@ -297,7 +297,6 @@
                                               'client'))
             elif part.type == 'stream2' and version is None:
                 # A stream2 part requires to be part of a v2 bundle
-                version = "v2"
                 requirements = urlreq.unquote(part.params['requirements'])
                 splitted = requirements.split()
                 params = bundle2._formatrequirementsparams(splitted)
--- a/mercurial/filemerge.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/filemerge.py	Mon Feb 04 20:35:21 2019 +0300
@@ -279,6 +279,7 @@
     keep as the merged version."""
     ui = repo.ui
     fd = fcd.path()
+    uipathfn = scmutil.getuipathfn(repo)
 
     # Avoid prompting during an in-memory merge since it doesn't support merge
     # conflicts.
@@ -287,7 +288,7 @@
                                                 'support file conflicts')
 
     prompts = partextras(labels)
-    prompts['fd'] = fd
+    prompts['fd'] = uipathfn(fd)
     try:
         if fco.isabsent():
             index = ui.promptchoice(
@@ -394,13 +395,14 @@
 
 def _mergecheck(repo, mynode, orig, fcd, fco, fca, toolconf):
     tool, toolpath, binary, symlink, scriptfn = toolconf
+    uipathfn = scmutil.getuipathfn(repo)
     if symlink:
         repo.ui.warn(_('warning: internal %s cannot merge symlinks '
-                       'for %s\n') % (tool, fcd.path()))
+                       'for %s\n') % (tool, uipathfn(fcd.path())))
         return False
     if fcd.isabsent() or fco.isabsent():
         repo.ui.warn(_('warning: internal %s cannot merge change/delete '
-                       'conflict for %s\n') % (tool, fcd.path()))
+                       'conflict for %s\n') % (tool, uipathfn(fcd.path())))
         return False
     return True
 
@@ -462,7 +464,6 @@
     Generic driver for _imergelocal and _imergeother
     """
     assert localorother is not None
-    tool, toolpath, binary, symlink, scriptfn = toolconf
     r = simplemerge.simplemerge(repo.ui, fcd, fca, fco, label=labels,
                                 localorother=localorother)
     return True, r
@@ -581,9 +582,10 @@
 
 def _xmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
     tool, toolpath, binary, symlink, scriptfn = toolconf
+    uipathfn = scmutil.getuipathfn(repo)
     if fcd.isabsent() or fco.isabsent():
         repo.ui.warn(_('warning: %s cannot merge change/delete conflict '
-                       'for %s\n') % (tool, fcd.path()))
+                       'for %s\n') % (tool, uipathfn(fcd.path())))
         return False, 1, None
     unused, unused, unused, back = files
     localpath = _workingpath(repo, fcd)
@@ -623,7 +625,7 @@
             lambda s: procutil.shellquote(util.localpath(s)))
         if _toolbool(ui, tool, "gui"):
             repo.ui.status(_('running merge tool %s for file %s\n') %
-                           (tool, fcd.path()))
+                           (tool, uipathfn(fcd.path())))
         if scriptfn is None:
             cmd = toolpath + ' ' + args
             repo.ui.debug('launching merge tool: %s\n' % cmd)
@@ -842,6 +844,8 @@
 
     ui = repo.ui
     fd = fcd.path()
+    uipathfn = scmutil.getuipathfn(repo)
+    fduipath = uipathfn(fd)
     binary = fcd.isbinary() or fco.isbinary() or fca.isbinary()
     symlink = 'l' in fcd.flags() + fco.flags()
     changedelete = fcd.isabsent() or fco.isabsent()
@@ -865,8 +869,8 @@
             raise error.Abort(_("invalid 'python:' syntax: %s") % toolpath)
         toolpath = script
     ui.debug("picked tool '%s' for %s (binary %s symlink %s changedelete %s)\n"
-             % (tool, fd, pycompat.bytestr(binary), pycompat.bytestr(symlink),
-                    pycompat.bytestr(changedelete)))
+             % (tool, fduipath, pycompat.bytestr(binary),
+                pycompat.bytestr(symlink), pycompat.bytestr(changedelete)))
 
     if tool in internals:
         func = internals[tool]
@@ -892,9 +896,10 @@
 
     if premerge:
         if orig != fco.path():
-            ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd))
+            ui.status(_("merging %s and %s to %s\n") %
+                      (uipathfn(orig), uipathfn(fco.path()), fduipath))
         else:
-            ui.status(_("merging %s\n") % fd)
+            ui.status(_("merging %s\n") % fduipath)
 
     ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca))
 
@@ -905,7 +910,7 @@
                 raise error.InMemoryMergeConflictsError('in-memory merge does '
                                                         'not support merge '
                                                         'conflicts')
-            ui.warn(onfailure % fd)
+            ui.warn(onfailure % fduipath)
         return True, 1, False
 
     back = _makebackup(repo, ui, wctx, fcd, premerge)
@@ -958,7 +963,7 @@
                     raise error.InMemoryMergeConflictsError('in-memory merge '
                                                             'does not support '
                                                             'merge conflicts')
-                ui.warn(onfailure % fd)
+                ui.warn(onfailure % fduipath)
             _onfilemergefailure(ui)
 
         return True, r, deleted
@@ -986,6 +991,7 @@
 
 def _check(repo, r, ui, tool, fcd, files):
     fd = fcd.path()
+    uipathfn = scmutil.getuipathfn(repo)
     unused, unused, unused, back = files
 
     if not r and (_toolbool(ui, tool, "checkconflicts") or
@@ -997,7 +1003,7 @@
     if 'prompt' in _toollist(ui, tool, "check"):
         checked = True
         if ui.promptchoice(_("was merge of '%s' successful (yn)?"
-                             "$$ &Yes $$ &No") % fd, 1):
+                             "$$ &Yes $$ &No") % uipathfn(fd), 1):
             r = 1
 
     if not r and not checked and (_toolbool(ui, tool, "checkchanged") or
@@ -1006,7 +1012,7 @@
         if back is not None and not fcd.cmp(back):
             if ui.promptchoice(_(" output file %s appears unchanged\n"
                                  "was merge successful (yn)?"
-                                 "$$ &Yes $$ &No") % fd, 1):
+                                 "$$ &Yes $$ &No") % uipathfn(fd), 1):
                 r = 1
 
     if back is not None and _toolbool(ui, tool, "fixeol"):
--- a/mercurial/help/config.txt	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/help/config.txt	Mon Feb 04 20:35:21 2019 +0300
@@ -2341,6 +2341,9 @@
     Reduce the amount of output printed.
     (default: False)
 
+``relative-paths``
+    Prefer relative paths in the UI.
+
 ``remotecmd``
     Remote command to use for clone/push/pull operations.
     (default: ``hg``)
--- a/mercurial/hg.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/hg.py	Mon Feb 04 20:35:21 2019 +0300
@@ -38,6 +38,7 @@
     narrowspec,
     node,
     phases,
+    pycompat,
     repository as repositorymod,
     scmutil,
     sshpeer,
@@ -57,7 +58,15 @@
 
 def _local(path):
     path = util.expandpath(util.urllocalpath(path))
-    return (os.path.isfile(path) and bundlerepo or localrepo)
+
+    try:
+        isfile = os.path.isfile(path)
+    # Python 2 raises TypeError, Python 3 ValueError.
+    except (TypeError, ValueError) as e:
+        raise error.Abort(_('invalid path %s: %s') % (
+            path, pycompat.bytestr(e)))
+
+    return isfile and bundlerepo or localrepo
 
 def addbranchrevs(lrepo, other, branches, revs):
     peer = other.peer() # a courtesy to callers using a localrepo for other
@@ -282,25 +291,20 @@
     called.
     """
 
-    destlock = lock = None
-    lock = repo.lock()
-    try:
+    with repo.lock():
         # we use locks here because if we race with commit, we
         # can end up with extra data in the cloned revlogs that's
         # not pointed to by changesets, thus causing verify to
         # fail
-
         destlock = copystore(ui, repo, repo.path)
-
-        sharefile = repo.vfs.join('sharedpath')
-        util.rename(sharefile, sharefile + '.old')
+        with destlock or util.nullcontextmanager():
 
-        repo.requirements.discard('shared')
-        repo.requirements.discard('relshared')
-        repo._writerequirements()
-    finally:
-        destlock and destlock.release()
-        lock and lock.release()
+            sharefile = repo.vfs.join('sharedpath')
+            util.rename(sharefile, sharefile + '.old')
+
+            repo.requirements.discard('shared')
+            repo.requirements.discard('relshared')
+            repo._writerequirements()
 
     # Removing share changes some fundamental properties of the repo instance.
     # So we instantiate a new repo object and operate on it rather than
--- a/mercurial/hgweb/hgwebdir_mod.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/hgweb/hgwebdir_mod.py	Mon Feb 04 20:35:21 2019 +0300
@@ -143,7 +143,7 @@
                 path = path[:-len(discarded) - 1]
 
                 try:
-                    r = hg.repository(ui, path)
+                    hg.repository(ui, path)
                     directory = False
                 except (IOError, error.RepoError):
                     pass
@@ -510,7 +510,7 @@
         if style == styles[0]:
             vars['style'] = style
 
-        sessionvars = webutil.sessionvars(vars, r'?')
+        sessionvars = webutil.sessionvars(vars, '?')
         logourl = config('web', 'logourl')
         logoimg = config('web', 'logoimg')
         staticurl = (config('web', 'staticurl')
--- a/mercurial/hgweb/server.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/hgweb/server.py	Mon Feb 04 20:35:21 2019 +0300
@@ -54,7 +54,7 @@
         self.writelines(str.split('\n'))
     def writelines(self, seq):
         for msg in seq:
-            self.handler.log_error("HG error:  %s", msg)
+            self.handler.log_error(r"HG error:  %s", encoding.strfromlocal(msg))
 
 class _httprequesthandler(httpservermod.basehttprequesthandler):
 
@@ -100,17 +100,22 @@
     def do_POST(self):
         try:
             self.do_write()
-        except Exception:
+        except Exception as e:
+            # I/O below could raise another exception. So log the original
+            # exception first to ensure it is recorded.
+            if not (isinstance(e, (OSError, socket.error))
+                    and e.errno == errno.ECONNRESET):
+                tb = r"".join(traceback.format_exception(*sys.exc_info()))
+                # We need a native-string newline to poke in the log
+                # message, because we won't get a newline when using an
+                # r-string. This is the easy way out.
+                newline = chr(10)
+                self.log_error(r"Exception happened during processing "
+                               r"request '%s':%s%s", self.path, newline, tb)
+
             self._start_response(r"500 Internal Server Error", [])
             self._write(b"Internal Server Error")
             self._done()
-            tb = r"".join(traceback.format_exception(*sys.exc_info()))
-            # We need a native-string newline to poke in the log
-            # message, because we won't get a newline when using an
-            # r-string. This is the easy way out.
-            newline = chr(10)
-            self.log_error(r"Exception happened during processing "
-                           r"request '%s':%s%s", self.path, newline, tb)
 
     def do_PUT(self):
         self.do_POST()
@@ -165,7 +170,7 @@
         if length:
             env[r'CONTENT_LENGTH'] = length
         for header in [h for h in self.headers.keys()
-                       if h not in (r'content-type', r'content-length')]:
+                      if h.lower() not in (r'content-type', r'content-length')]:
             hkey = r'HTTP_' + header.replace(r'-', r'_').upper()
             hval = self.headers.get(header)
             hval = hval.replace(r'\n', r'').strip()
--- a/mercurial/hgweb/webcommands.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/hgweb/webcommands.py	Mon Feb 04 20:35:21 2019 +0300
@@ -884,7 +884,7 @@
             leftlines = filelines(pfctx)
     else:
         rightlines = ()
-        pfctx = ctx.parents()[0][path]
+        pfctx = ctx.p1()[path]
         leftlines = filelines(pfctx)
 
     comparison = webutil.compare(context, leftlines, rightlines)
--- a/mercurial/hgweb/webutil.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/hgweb/webutil.py	Mon Feb 04 20:35:21 2019 +0300
@@ -456,13 +456,13 @@
     files = listfilediffs(ctx.files(), n, web.maxfiles)
 
     entry = commonentry(repo, ctx)
-    entry.update(
-        allparents=_kwfunc(lambda context, mapping: parents(ctx)),
-        parent=_kwfunc(lambda context, mapping: parents(ctx, rev - 1)),
-        child=_kwfunc(lambda context, mapping: children(ctx, rev + 1)),
-        changelogtag=showtags,
-        files=files,
-    )
+    entry.update({
+        'allparents': _kwfunc(lambda context, mapping: parents(ctx)),
+        'parent': _kwfunc(lambda context, mapping: parents(ctx, rev - 1)),
+        'child': _kwfunc(lambda context, mapping: children(ctx, rev + 1)),
+        'changelogtag': showtags,
+        'files': files,
+    })
     return entry
 
 def changelistentries(web, revs, maxcount, parityfn):
--- a/mercurial/httppeer.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/httppeer.py	Mon Feb 04 20:35:21 2019 +0300
@@ -816,8 +816,8 @@
             return
 
         raise error.CapabilityError(
-            _('cannot %s; client or remote repository does not support the %r '
-              'capability') % (purpose, name))
+            _('cannot %s; client or remote repository does not support the '
+              '\'%s\' capability') % (purpose, name))
 
     # End of ipeercapabilities.
 
--- a/mercurial/keepalive.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/keepalive.py	Mon Feb 04 20:35:21 2019 +0300
@@ -84,6 +84,7 @@
 
 from __future__ import absolute_import, print_function
 
+import collections
 import errno
 import hashlib
 import socket
@@ -114,15 +115,13 @@
       """
     def __init__(self):
         self._lock = threading.Lock()
-        self._hostmap = {} # map hosts to a list of connections
+        self._hostmap = collections.defaultdict(list) # host -> [connection]
         self._connmap = {} # map connections to host
         self._readymap = {} # map connection to ready state
 
     def add(self, host, connection, ready):
         self._lock.acquire()
         try:
-            if host not in self._hostmap:
-                self._hostmap[host] = []
             self._hostmap[host].append(connection)
             self._connmap[connection] = host
             self._readymap[connection] = ready
@@ -155,19 +154,18 @@
         conn = None
         self._lock.acquire()
         try:
-            if host in self._hostmap:
-                for c in self._hostmap[host]:
-                    if self._readymap[c]:
-                        self._readymap[c] = 0
-                        conn = c
-                        break
+            for c in self._hostmap[host]:
+                if self._readymap[c]:
+                    self._readymap[c] = False
+                    conn = c
+                    break
         finally:
             self._lock.release()
         return conn
 
     def get_all(self, host=None):
         if host:
-            return list(self._hostmap.get(host, []))
+            return list(self._hostmap[host])
         else:
             return dict(self._hostmap)
 
@@ -202,7 +200,7 @@
     def _request_closed(self, request, host, connection):
         """tells us that this request is now closed and that the
         connection is ready for another request"""
-        self._cm.set_ready(connection, 1)
+        self._cm.set_ready(connection, True)
 
     def _remove_connection(self, host, connection, close=0):
         if close:
@@ -239,7 +237,7 @@
                 if DEBUG:
                     DEBUG.info("creating new connection to %s (%d)",
                                host, id(h))
-                self._cm.add(host, h, 0)
+                self._cm.add(host, h, False)
                 self._start_transaction(h, req)
                 r = h.getresponse()
         # The string form of BadStatusLine is the status line. Add some context
@@ -405,6 +403,11 @@
     _raw_read = httplib.HTTPResponse.read
     _raw_readinto = getattr(httplib.HTTPResponse, 'readinto', None)
 
+    # Python 2.7 has a single close() which closes the socket handle.
+    # This method was effectively renamed to _close_conn() in Python 3. But
+    # there is also a close(). _close_conn() is called by methods like
+    # read().
+
     def close(self):
         if self.fp:
             self.fp.close()
@@ -413,6 +416,9 @@
                 self._handler._request_closed(self, self._host,
                                               self._connection)
 
+    def _close_conn(self):
+        self.close()
+
     def close_connection(self):
         self._handler._remove_connection(self._host, self._connection, close=1)
         self.close()
--- a/mercurial/localrepo.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/localrepo.py	Mon Feb 04 20:35:21 2019 +0300
@@ -2011,8 +2011,7 @@
             self.svfs.rename('undo.phaseroots', 'phaseroots', checkambig=True)
         self.invalidate()
 
-        parentgone = (parents[0] not in self.changelog.nodemap or
-                      parents[1] not in self.changelog.nodemap)
+        parentgone = any(p not in self.changelog.nodemap for p in parents)
         if parentgone:
             # prevent dirstateguard from overwriting already restored one
             dsguard.close()
@@ -2409,11 +2408,8 @@
             match.explicitdir = vdirs.append
             match.bad = fail
 
-        wlock = lock = tr = None
-        try:
-            wlock = self.wlock()
-            lock = self.lock() # for recent changelog (see issue4368)
-
+        # lock() for recent changelog (see issue4368)
+        with self.wlock(), self.lock():
             wctx = self[None]
             merge = len(wctx.parents()) > 1
 
@@ -2473,21 +2469,17 @@
             try:
                 self.hook("precommit", throw=True, parent1=hookp1,
                           parent2=hookp2)
-                tr = self.transaction('commit')
-                ret = self.commitctx(cctx, True)
+                with self.transaction('commit'):
+                    ret = self.commitctx(cctx, True)
+                    # update bookmarks, dirstate and mergestate
+                    bookmarks.update(self, [p1, p2], ret)
+                    cctx.markcommitted(ret)
+                    ms.reset()
             except: # re-raises
                 if edited:
                     self.ui.write(
                         _('note: commit message saved in %s\n') % msgfn)
                 raise
-            # update bookmarks, dirstate and mergestate
-            bookmarks.update(self, [p1, p2], ret)
-            cctx.markcommitted(ret)
-            ms.reset()
-            tr.close()
-
-        finally:
-            lockmod.release(tr, lock, wlock)
 
         def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
             # hack for command that use a temporary commit (eg: histedit)
@@ -2509,13 +2501,10 @@
         from p1 or p2 are excluded from the committed ctx.files().
         """
 
-        tr = None
         p1, p2 = ctx.p1(), ctx.p2()
         user = ctx.user()
 
-        lock = self.lock()
-        try:
-            tr = self.transaction("commit")
+        with self.lock(), self.transaction("commit") as tr:
             trp = weakref.proxy(tr)
 
             if ctx.manifestnode():
@@ -2549,7 +2538,7 @@
                             m[f] = self._filecommit(fctx, m1, m2, linkrev,
                                                     trp, changed)
                             m.setflag(f, fctx.flags())
-                    except OSError as inst:
+                    except OSError:
                         self.ui.warn(_("trouble committing %s!\n") % f)
                         raise
                     except IOError as inst:
@@ -2612,12 +2601,7 @@
                 #
                 # if minimal phase was 0 we don't need to retract anything
                 phases.registernew(self, tr, targetphase, [n])
-            tr.close()
             return n
-        finally:
-            if tr:
-                tr.release()
-            lock.release()
 
     @unfilteredmethod
     def destroying(self):
--- a/mercurial/logexchange.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/logexchange.py	Mon Feb 04 20:35:21 2019 +0300
@@ -97,7 +97,6 @@
 
 def activepath(repo, remote):
     """returns remote path"""
-    local = None
     # is the remote a local peer
     local = remote.local()
 
--- a/mercurial/mail.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/mail.py	Mon Feb 04 20:35:21 2019 +0300
@@ -243,6 +243,13 @@
             cs.body_encoding = email.charset.QP
             break
 
+    # On Python 2, this simply assigns a value. Python 3 inspects
+    # body and does different things depending on whether it has
+    # encode() or decode() attributes. We can get the old behavior
+    # if we pass a str and charset is None and we call set_charset().
+    # But we may get into  trouble later due to Python attempting to
+    # encode/decode using the registered charset (or attempting to
+    # use ascii in the absence of a charset).
     msg.set_payload(body, cs)
 
     return msg
--- a/mercurial/manifest.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/manifest.py	Mon Feb 04 20:35:21 2019 +0300
@@ -283,7 +283,6 @@
         if len(self.extradata) == 0:
             return
         l = []
-        last_cut = 0
         i = 0
         offset = 0
         self.extrainfo = [0] * len(self.positions)
--- a/mercurial/match.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/match.py	Mon Feb 04 20:35:21 2019 +0300
@@ -677,6 +677,9 @@
     def visitdir(self, dir):
         if self._m2.visitdir(dir) == 'all':
             return False
+        elif not self._m2.visitdir(dir):
+            # m2 does not match dir, we can return 'all' here if possible
+            return self._m1.visitdir(dir)
         return bool(self._m1.visitdir(dir))
 
     def visitchildrenset(self, dir):
--- a/mercurial/merge.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/merge.py	Mon Feb 04 20:35:21 2019 +0300
@@ -1186,9 +1186,6 @@
 
     diff = m1.diff(m2, match=matcher)
 
-    if matcher is None:
-        matcher = matchmod.always('', '')
-
     actions = {}
     for f, ((n1, fl1), (n2, fl2)) in diff.iteritems():
         if n1 and n2: # file exists on both local and remote side
--- a/mercurial/minirst.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/minirst.py	Mon Feb 04 20:35:21 2019 +0300
@@ -641,7 +641,6 @@
 
 def parse(text, indent=0, keep=None, admonitions=None):
     """Parse text into a list of blocks"""
-    pruned = []
     blocks = findblocks(text)
     for b in blocks:
         b['indent'] += indent
@@ -736,7 +735,6 @@
     '''return a list of (section path, nesting level, blocks) tuples'''
     nest = ""
     names = ()
-    level = 0
     secs = []
 
     def getname(b):
--- a/mercurial/mpatch.c	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/mpatch.c	Mon Feb 04 20:35:21 2019 +0300
@@ -41,8 +41,9 @@
 {
 	struct mpatch_flist *a = NULL;
 
-	if (size < 1)
+	if (size < 1) {
 		size = 1;
+	}
 
 	a = (struct mpatch_flist *)malloc(sizeof(struct mpatch_flist));
 	if (a) {
@@ -110,10 +111,12 @@
 
 	while (s != src->tail) {
 		int soffset = s->start;
-		if (!safeadd(offset, &soffset))
+		if (!safeadd(offset, &soffset)) {
 			break; /* add would overflow, oh well */
-		if (soffset >= cut)
+		}
+		if (soffset >= cut) {
 			break; /* we've gone far enough */
+		}
 
 		postend = offset;
 		if (!safeadd(s->start, &postend) ||
@@ -139,11 +142,13 @@
 			if (!safesub(offset, &c)) {
 				break;
 			}
-			if (s->end < c)
+			if (s->end < c) {
 				c = s->end;
+			}
 			l = cut - offset - s->start;
-			if (s->len < l)
+			if (s->len < l) {
 				l = s->len;
+			}
 
 			offset += s->start + l - c;
 
@@ -176,8 +181,9 @@
 		if (!safeadd(offset, &cmpcut)) {
 			break;
 		}
-		if (cmpcut >= cut)
+		if (cmpcut >= cut) {
 			break;
+		}
 
 		postend = offset;
 		if (!safeadd(s->start, &postend)) {
@@ -205,11 +211,13 @@
 			if (!safesub(offset, &c)) {
 				break;
 			}
-			if (s->end < c)
+			if (s->end < c) {
 				c = s->end;
+			}
 			l = cut - offset - s->start;
-			if (s->len < l)
+			if (s->len < l) {
 				l = s->len;
+			}
 
 			offset += s->start + l - c;
 			s->start = c;
@@ -233,8 +241,9 @@
 	struct mpatch_frag *bh, *ct;
 	int offset = 0, post;
 
-	if (a && b)
+	if (a && b) {
 		c = lalloc((lsize(a) + lsize(b)) * 2);
+	}
 
 	if (c) {
 
@@ -284,8 +293,9 @@
 
 	/* assume worst case size, we won't have many of these lists */
 	l = lalloc(len / 12 + 1);
-	if (!l)
+	if (!l) {
 		return MPATCH_ERR_NO_MEM;
+	}
 
 	lt = l->tail;
 
@@ -295,8 +305,9 @@
 		lt->start = getbe32(bin + pos);
 		lt->end = getbe32(bin + pos + 4);
 		lt->len = getbe32(bin + pos + 8);
-		if (lt->start < 0 || lt->start > lt->end || lt->len < 0)
+		if (lt->start < 0 || lt->start > lt->end || lt->len < 0) {
 			break; /* sanity check */
+		}
 		if (!safeadd(12, &pos)) {
 			break;
 		}
--- a/mercurial/patch.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/patch.py	Mon Feb 04 20:35:21 2019 +0300
@@ -32,6 +32,7 @@
     encoding,
     error,
     mail,
+    match as matchmod,
     mdiff,
     pathutil,
     pycompat,
@@ -1448,7 +1449,6 @@
             hunk.append(l)
             return l.rstrip('\r\n')
 
-        size = 0
         while True:
             line = getline(lr, self.hunk)
             if not line:
@@ -1610,6 +1610,7 @@
             self.headers = []
 
         def addrange(self, limits):
+            self.addcontext([])
             fromstart, fromend, tostart, toend, proc = limits
             self.fromline = int(fromstart)
             self.toline = int(tostart)
@@ -1630,6 +1631,8 @@
             if self.context:
                 self.before = self.context
                 self.context = []
+            if self.hunk:
+                self.addcontext([])
             self.hunk = hunk
 
         def newfile(self, hdr):
@@ -1903,7 +1906,6 @@
             if not gitpatches:
                 raise PatchError(_('failed to synchronize metadata for "%s"')
                                  % afile[2:])
-            gp = gitpatches[-1]
             newfile = True
         elif x.startswith('---'):
             # check for a unified diff
@@ -2318,12 +2320,9 @@
     ctx1 = repo[node1]
     ctx2 = repo[node2]
 
-    relfiltered = False
-    if relroot != '' and match.always():
-        # as a special case, create a new matcher with just the relroot
-        pats = [relroot]
-        match = scmutil.match(ctx2, pats, default='path')
-        relfiltered = True
+    if relroot:
+        relrootmatch = scmutil.match(ctx2, pats=[relroot], default='path')
+        match = matchmod.intersectmatchers(match, relrootmatch)
 
     if not changes:
         changes = ctx1.status(ctx2, match=match)
@@ -2343,21 +2342,11 @@
         if opts.git or opts.upgrade:
             copy = copies.pathcopies(ctx1, ctx2, match=match)
 
-    if relroot is not None:
-        if not relfiltered:
-            # XXX this would ideally be done in the matcher, but that is
-            # generally meant to 'or' patterns, not 'and' them. In this case we
-            # need to 'and' all the patterns from the matcher with relroot.
-            def filterrel(l):
-                return [f for f in l if f.startswith(relroot)]
-            modified = filterrel(modified)
-            added = filterrel(added)
-            removed = filterrel(removed)
-            relfiltered = True
-        # filter out copies where either side isn't inside the relative root
-        copy = dict(((dst, src) for (dst, src) in copy.iteritems()
-                     if dst.startswith(relroot)
-                     and src.startswith(relroot)))
+    if relroot:
+        # filter out copies where source side isn't inside the relative root
+        # (copies.pathcopies() already filtered out the destination)
+        copy = {dst: src for dst, src in copy.iteritems()
+                if src.startswith(relroot)}
 
     modifiedset = set(modified)
     addedset = set(added)
@@ -2808,6 +2797,10 @@
         elif (line.startswith('GIT binary patch') or
               line.startswith('Binary file')):
             isbinary = True
+        elif line.startswith('rename from'):
+            filename = line[12:]
+        elif line.startswith('rename to'):
+            filename += ' => %s' % line[10:]
     addresult()
     return results
 
--- a/mercurial/posix.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/posix.py	Mon Feb 04 20:35:21 2019 +0300
@@ -583,7 +583,8 @@
     """Return the list of members of the group with the given
     name, KeyError if the group does not exist.
     """
-    return list(grp.getgrnam(name).gr_mem)
+    name = pycompat.fsdecode(name)
+    return pycompat.rapply(pycompat.fsencode, list(grp.getgrnam(name).gr_mem))
 
 def spawndetached(args):
     return os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
--- a/mercurial/repository.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/repository.py	Mon Feb 04 20:35:21 2019 +0300
@@ -346,8 +346,8 @@
             return
 
         raise error.CapabilityError(
-            _('cannot %s; remote repository does not support the %r '
-              'capability') % (purpose, name))
+            _('cannot %s; remote repository does not support the '
+              '\'%s\' capability') % (purpose, name))
 
 class iverifyproblem(interfaceutil.Interface):
     """Represents a problem with the integrity of the repository.
--- a/mercurial/revlog.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/revlog.py	Mon Feb 04 20:35:21 2019 +0300
@@ -610,6 +610,9 @@
         self._pcache = {}
 
         try:
+            # If we are using the native C version, you are in a fun case
+            # where self.index, self.nodemap and self._nodecaches is the same
+            # object.
             self._nodecache.clearcaches()
         except AttributeError:
             self._nodecache = {nullid: nullrev}
--- a/mercurial/revset.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/revset.py	Mon Feb 04 20:35:21 2019 +0300
@@ -225,24 +225,83 @@
 def relationset(repo, subset, x, y, order):
     raise error.ParseError(_("can't use a relation in this context"))
 
-def generationsrel(repo, subset, x, rel, n, order):
-    # TODO: support range, rewrite tests, and drop startdepth argument
-    # from ancestors() and descendants() predicates
-    if n <= 0:
-        n = -n
-        return _ancestors(repo, subset, x, startdepth=n, stopdepth=n + 1)
-    else:
-        return _descendants(repo, subset, x, startdepth=n, stopdepth=n + 1)
+def _splitrange(a, b):
+    """Split range with bounds a and b into two ranges at 0 and return two
+    tuples of numbers for use as startdepth and stopdepth arguments of
+    revancestors and revdescendants.
+
+    >>> _splitrange(-10, -5)     # [-10:-5]
+    ((5, 11), (None, None))
+    >>> _splitrange(5, 10)       # [5:10]
+    ((None, None), (5, 11))
+    >>> _splitrange(-10, 10)     # [-10:10]
+    ((0, 11), (0, 11))
+    >>> _splitrange(-10, 0)      # [-10:0]
+    ((0, 11), (None, None))
+    >>> _splitrange(0, 10)       # [0:10]
+    ((None, None), (0, 11))
+    >>> _splitrange(0, 0)        # [0:0]
+    ((0, 1), (None, None))
+    >>> _splitrange(1, -1)       # [1:-1]
+    ((None, None), (None, None))
+    """
+    ancdepths = (None, None)
+    descdepths = (None, None)
+    if a == b == 0:
+        ancdepths = (0, 1)
+    if a < 0:
+        ancdepths = (-min(b, 0), -a + 1)
+    if b > 0:
+        descdepths = (max(a, 0), b + 1)
+    return ancdepths, descdepths
+
+def generationsrel(repo, subset, x, rel, a, b, order):
+    # TODO: rewrite tests, and drop startdepth argument from ancestors() and
+    # descendants() predicates
+    if a is None:
+        a = -(dagop.maxlogdepth - 1)
+    if b is None:
+        b = +(dagop.maxlogdepth - 1)
+
+    (ancstart, ancstop), (descstart, descstop) = _splitrange(a, b)
+
+    if ancstart is None and descstart is None:
+        return baseset()
+
+    revs = getset(repo, fullreposet(repo), x)
+    if not revs:
+        return baseset()
+
+    if ancstart is not None and descstart is not None:
+        s = dagop.revancestors(repo, revs, False, ancstart, ancstop)
+        s += dagop.revdescendants(repo, revs, False, descstart, descstop)
+    elif ancstart is not None:
+        s = dagop.revancestors(repo, revs, False, ancstart, ancstop)
+    elif descstart is not None:
+        s = dagop.revdescendants(repo, revs, False, descstart, descstop)
+
+    return subset & s
 
 def relsubscriptset(repo, subset, x, y, z, order):
     # this is pretty basic implementation of 'x#y[z]' operator, still
     # experimental so undocumented. see the wiki for further ideas.
     # https://www.mercurial-scm.org/wiki/RevsetOperatorPlan
     rel = getsymbol(y)
-    n = getinteger(z, _("relation subscript must be an integer"))
+    try:
+        a, b = getrange(z, '')
+    except error.ParseError:
+        a = getinteger(z, _("relation subscript must be an integer"))
+        b = a
+    else:
+        def getbound(i):
+            if i is None:
+                return None
+            msg = _("relation subscript bounds must be integers")
+            return getinteger(i, msg)
+        a, b = [getbound(i) for i in (a, b)]
 
     if rel in subscriptrelations:
-        return subscriptrelations[rel](repo, subset, x, rel, n, order)
+        return subscriptrelations[rel](repo, subset, x, rel, a, b, order)
 
     relnames = [r for r in subscriptrelations.keys() if len(r) > 1]
     raise error.UnknownIdentifier(rel, relnames)
@@ -412,7 +471,7 @@
             try:
                 r = cl.parentrevs(r)[0]
             except error.WdirUnsupported:
-                r = repo[r].parents()[0].rev()
+                r = repo[r].p1().rev()
         ps.add(r)
     return subset & ps
 
@@ -1513,7 +1572,7 @@
         try:
             ps.add(cl.parentrevs(r)[0])
         except error.WdirUnsupported:
-            ps.add(repo[r].parents()[0].rev())
+            ps.add(repo[r].p1().rev())
     ps -= {node.nullrev}
     # XXX we should turn this into a baseset instead of a set, smartset may do
     # some optimizations from the fact this is a baseset.
@@ -1632,7 +1691,7 @@
             try:
                 ps.add(cl.parentrevs(r)[0])
             except error.WdirUnsupported:
-                ps.add(repo[r].parents()[0].rev())
+                ps.add(repo[r].p1().rev())
         else:
             try:
                 parents = cl.parentrevs(r)
--- a/mercurial/scmutil.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/scmutil.py	Mon Feb 04 20:35:21 2019 +0300
@@ -231,10 +231,10 @@
             ui.error(_("(did you forget to compile extensions?)\n"))
         elif m in "zlib".split():
             ui.error(_("(is your Python install correct?)\n"))
-    except IOError as inst:
-        if util.safehasattr(inst, "code"):
+    except (IOError, OSError) as inst:
+        if util.safehasattr(inst, "code"): # HTTPError
             ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
-        elif util.safehasattr(inst, "reason"):
+        elif util.safehasattr(inst, "reason"): # URLError or SSLError
             try: # usually it is in the form (errno, strerror)
                 reason = inst.reason.args[1]
             except (AttributeError, IndexError):
@@ -247,22 +247,15 @@
         elif (util.safehasattr(inst, "args")
               and inst.args and inst.args[0] == errno.EPIPE):
             pass
-        elif getattr(inst, "strerror", None):
-            if getattr(inst, "filename", None):
-                ui.error(_("abort: %s: %s\n") % (
+        elif getattr(inst, "strerror", None): # common IOError or OSError
+            if getattr(inst, "filename", None) is not None:
+                ui.error(_("abort: %s: '%s'\n") % (
                     encoding.strtolocal(inst.strerror),
                     stringutil.forcebytestr(inst.filename)))
             else:
                 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
-        else:
+        else: # suspicious IOError
             raise
-    except OSError as inst:
-        if getattr(inst, "filename", None) is not None:
-            ui.error(_("abort: %s: '%s'\n") % (
-                encoding.strtolocal(inst.strerror),
-                stringutil.forcebytestr(inst.filename)))
-        else:
-            ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
     except MemoryError:
         ui.error(_("abort: out of memory\n"))
     except SystemExit as inst:
@@ -673,19 +666,11 @@
     l = revrange(repo, revs)
 
     if not l:
-        first = second = None
-    elif l.isascending():
-        first = l.min()
-        second = l.max()
-    elif l.isdescending():
-        first = l.max()
-        second = l.min()
-    else:
-        first = l.first()
-        second = l.last()
+        raise error.Abort(_('empty revision range'))
 
-    if first is None:
-        raise error.Abort(_('empty revision range'))
+    first = l.first()
+    second = l.last()
+
     if (first == second and len(revs) >= 2
         and not all(revrange(repo, [r]) for r in revs)):
         raise error.Abort(_('empty revision on one side of range'))
@@ -740,6 +725,16 @@
         return []
     return parents
 
+def getuipathfn(repo, relative=None):
+    if relative is None:
+        relative = repo.ui.configbool('ui', 'relative-paths')
+    if relative:
+        cwd = repo.getcwd()
+        pathto = repo.pathto
+        return lambda f: pathto(f, cwd)
+    else:
+        return lambda f: f
+
 def expandpats(pats):
     '''Expand bare globs when running on windows.
     On posix we assume it already has already been done by sh.'''
--- a/mercurial/sparse.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/sparse.py	Mon Feb 04 20:35:21 2019 +0300
@@ -336,7 +336,7 @@
     if branchmerge:
         # If we're merging, use the wctx filter, since we're merging into
         # the wctx.
-        sparsematch = matcher(repo, [wctx.parents()[0].rev()])
+        sparsematch = matcher(repo, [wctx.p1().rev()])
     else:
         # If we're updating, use the target context's filter, since we're
         # moving to the target context.
--- a/mercurial/sslutil.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/sslutil.py	Mon Feb 04 20:35:21 2019 +0300
@@ -430,6 +430,7 @@
                           'error)\n'))
         except ssl.SSLError:
             pass
+
         # Try to print more helpful error messages for known failures.
         if util.safehasattr(e, 'reason'):
             # This error occurs when the client and server don't share a
@@ -437,7 +438,7 @@
             # outright. Hopefully the reason for this error is that we require
             # TLS 1.1+ and the server only supports TLS 1.0. Whatever the
             # reason, try to emit an actionable warning.
-            if e.reason == 'UNSUPPORTED_PROTOCOL':
+            if e.reason == r'UNSUPPORTED_PROTOCOL':
                 # We attempted TLS 1.0+.
                 if settings['protocolui'] == 'tls1.0':
                     # We support more than just TLS 1.0+. If this happens,
@@ -453,7 +454,7 @@
                             'server; see '
                             'https://mercurial-scm.org/wiki/SecureConnections '
                             'for more info)\n') % (
-                                serverhostname,
+                                pycompat.bytesurl(serverhostname),
                                 ', '.join(sorted(supportedprotocols))))
                     else:
                         ui.warn(_(
@@ -462,7 +463,8 @@
                             'supports TLS 1.0 because it has known security '
                             'vulnerabilities; see '
                             'https://mercurial-scm.org/wiki/SecureConnections '
-                            'for more info)\n') % serverhostname)
+                            'for more info)\n') %
+                                pycompat.bytesurl(serverhostname))
                 else:
                     # We attempted TLS 1.1+. We can only get here if the client
                     # supports the configured protocol. So the likely reason is
@@ -472,19 +474,20 @@
                         '(could not negotiate a common security protocol (%s+) '
                         'with %s; the likely cause is Mercurial is configured '
                         'to be more secure than the server can support)\n') % (
-                        settings['protocolui'], serverhostname))
+                        settings['protocolui'],
+                        pycompat.bytesurl(serverhostname)))
                     ui.warn(_('(consider contacting the operator of this '
                               'server and ask them to support modern TLS '
                               'protocol versions; or, set '
                               'hostsecurity.%s:minimumprotocol=tls1.0 to allow '
                               'use of legacy, less secure protocols when '
                               'communicating with this server)\n') %
-                            serverhostname)
+                            pycompat.bytesurl(serverhostname))
                     ui.warn(_(
                         '(see https://mercurial-scm.org/wiki/SecureConnections '
                         'for more info)\n'))
 
-            elif (e.reason == 'CERTIFICATE_VERIFY_FAILED' and
+            elif (e.reason == r'CERTIFICATE_VERIFY_FAILED' and
                 pycompat.iswindows):
 
                 ui.warn(_('(the full certificate chain may not be available '
--- a/mercurial/statichttprepo.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/statichttprepo.py	Mon Feb 04 20:35:21 2019 +0300
@@ -19,6 +19,7 @@
     manifest,
     namespaces,
     pathutil,
+    pycompat,
     url,
     util,
     vfs as vfsmod,
@@ -44,12 +45,12 @@
     def seek(self, pos):
         self.pos = pos
     def read(self, bytes=None):
-        req = urlreq.request(self.url)
+        req = urlreq.request(pycompat.strurl(self.url))
         end = ''
         if bytes:
             end = self.pos + bytes - 1
         if self.pos or end:
-            req.add_header('Range', 'bytes=%d-%s' % (self.pos, end))
+            req.add_header(r'Range', r'bytes=%d-%s' % (self.pos, end))
 
         try:
             f = self.opener.open(req)
@@ -59,7 +60,7 @@
             num = inst.code == 404 and errno.ENOENT or None
             raise IOError(num, inst)
         except urlerr.urlerror as inst:
-            raise IOError(None, inst.reason[1])
+            raise IOError(None, inst.reason)
 
         if code == 200:
             # HTTPRangeHandler does nothing if remote does not support
--- a/mercurial/statprof.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/statprof.py	Mon Feb 04 20:35:21 2019 +0300
@@ -816,9 +816,6 @@
             id2stack[-1].update(parent=parent)
         return myid
 
-    def endswith(a, b):
-        return list(a)[-len(b):] == list(b)
-
     # The sampling profiler can sample multiple times without
     # advancing the clock, potentially causing the Chrome trace viewer
     # to render single-pixel columns that we cannot zoom in on.  We
@@ -858,9 +855,6 @@
     # events given only stack snapshots.
 
     for sample in data.samples:
-        tos = sample.stack[0]
-        name = tos.function
-        path = simplifypath(tos.path)
         stack = tuple((('%s:%d' % (simplifypath(frame.path), frame.lineno),
                         frame.function) for frame in sample.stack))
         qstack = collections.deque(stack)
--- a/mercurial/subrepo.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/subrepo.py	Mon Feb 04 20:35:21 2019 +0300
@@ -971,9 +971,8 @@
         p = subprocess.Popen(pycompat.rapply(procutil.tonativestr, cmd),
                              bufsize=-1, close_fds=procutil.closefds,
                              stdout=subprocess.PIPE, stderr=subprocess.PIPE,
-                             universal_newlines=True,
                              env=procutil.tonativeenv(env), **extrakw)
-        stdout, stderr = p.communicate()
+        stdout, stderr = map(util.fromnativeeol, p.communicate())
         stderr = stderr.strip()
         if not failok:
             if p.returncode:
@@ -1000,13 +999,14 @@
         # both. We used to store the working directory one.
         output, err = self._svncommand(['info', '--xml'])
         doc = xml.dom.minidom.parseString(output)
-        entries = doc.getElementsByTagName('entry')
+        entries = doc.getElementsByTagName(r'entry')
         lastrev, rev = '0', '0'
         if entries:
-            rev = str(entries[0].getAttribute('revision')) or '0'
-            commits = entries[0].getElementsByTagName('commit')
+            rev = pycompat.bytestr(entries[0].getAttribute(r'revision')) or '0'
+            commits = entries[0].getElementsByTagName(r'commit')
             if commits:
-                lastrev = str(commits[0].getAttribute('revision')) or '0'
+                lastrev = pycompat.bytestr(
+                    commits[0].getAttribute(r'revision')) or '0'
         return (lastrev, rev)
 
     def _wcrev(self):
@@ -1021,19 +1021,19 @@
         output, err = self._svncommand(['status', '--xml'])
         externals, changes, missing = [], [], []
         doc = xml.dom.minidom.parseString(output)
-        for e in doc.getElementsByTagName('entry'):
-            s = e.getElementsByTagName('wc-status')
+        for e in doc.getElementsByTagName(r'entry'):
+            s = e.getElementsByTagName(r'wc-status')
             if not s:
                 continue
-            item = s[0].getAttribute('item')
-            props = s[0].getAttribute('props')
-            path = e.getAttribute('path')
-            if item == 'external':
+            item = s[0].getAttribute(r'item')
+            props = s[0].getAttribute(r'props')
+            path = e.getAttribute(r'path').encode('utf8')
+            if item == r'external':
                 externals.append(path)
-            elif item == 'missing':
+            elif item == r'missing':
                 missing.append(path)
-            if (item not in ('', 'normal', 'unversioned', 'external')
-                or props not in ('', 'none', 'normal')):
+            if (item not in (r'', r'normal', r'unversioned', r'external')
+                or props not in (r'', r'none', r'normal')):
                 changes.append(path)
         for path in changes:
             for ext in externals:
@@ -1154,14 +1154,14 @@
         output = self._svncommand(['list', '--recursive', '--xml'])[0]
         doc = xml.dom.minidom.parseString(output)
         paths = []
-        for e in doc.getElementsByTagName('entry'):
-            kind = pycompat.bytestr(e.getAttribute('kind'))
+        for e in doc.getElementsByTagName(r'entry'):
+            kind = pycompat.bytestr(e.getAttribute(r'kind'))
             if kind != 'file':
                 continue
-            name = ''.join(c.data for c
-                           in e.getElementsByTagName('name')[0].childNodes
-                           if c.nodeType == c.TEXT_NODE)
-            paths.append(name.encode('utf-8'))
+            name = r''.join(c.data for c
+                            in e.getElementsByTagName(r'name')[0].childNodes
+                            if c.nodeType == c.TEXT_NODE)
+            paths.append(name.encode('utf8'))
         return paths
 
     def filedata(self, name, decode):
@@ -1673,13 +1673,14 @@
         for info in tar:
             if info.isdir():
                 continue
-            if match and not match(info.name):
+            bname = pycompat.fsencode(info.name)
+            if match and not match(bname):
                 continue
             if info.issym():
                 data = info.linkname
             else:
                 data = tar.extractfile(info).read()
-            archiver.addfile(prefix + self._path + '/' + info.name,
+            archiver.addfile(prefix + self._path + '/' + bname,
                              info.mode, info.issym(), data)
             total += 1
             progress.increment()
--- a/mercurial/subrepoutil.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/subrepoutil.py	Mon Feb 04 20:35:21 2019 +0300
@@ -145,7 +145,6 @@
 
     promptssrc = filemerge.partextras(labels)
     for s, l in sorted(s1.iteritems()):
-        prompts = None
         a = sa.get(s, nullstate)
         ld = l # local state with possible dirty flag for compares
         if wctx.sub(s).dirty():
@@ -218,7 +217,6 @@
                 wctx.sub(s).remove()
 
     for s, r in sorted(s2.items()):
-        prompts = None
         if s in s1:
             continue
         elif s not in sa:
--- a/mercurial/templatekw.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/templatekw.py	Mon Feb 04 20:35:21 2019 +0300
@@ -796,7 +796,7 @@
     substate = ctx.substate
     if not substate:
         return compatlist(context, mapping, 'subrepo', [])
-    psubstate = ctx.parents()[0].substate or {}
+    psubstate = ctx.p1().substate or {}
     subrepos = []
     for sub in substate:
         if sub not in psubstate or substate[sub] != psubstate[sub]:
--- a/mercurial/transaction.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/transaction.py	Mon Feb 04 20:35:21 2019 +0300
@@ -89,7 +89,7 @@
                 except (IOError, OSError) as inst:
                     if inst.errno != errno.ENOENT:
                         raise
-        except (IOError, OSError, error.Abort) as inst:
+        except (IOError, OSError, error.Abort):
             if not c:
                 raise
 
@@ -101,7 +101,7 @@
         for f in backupfiles:
             if opener.exists(f):
                 opener.unlink(f)
-    except (IOError, OSError, error.Abort) as inst:
+    except (IOError, OSError, error.Abort):
         # only pure backup file remains, it is sage to ignore any error
         pass
 
--- a/mercurial/ui.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/ui.py	Mon Feb 04 20:35:21 2019 +0300
@@ -58,12 +58,12 @@
 statuscopies = yes
 # Prefer curses UIs when available. Revert to plain-text with `text`.
 interface = curses
+# Make compatible commands emit cwd-relative paths by default.
+relative-paths = yes
 
 [commands]
 # Grep working directory by default.
 grep.all-files = True
-# Make `hg status` emit cwd-relative paths by default.
-status.relative = yes
 # Refuse to perform an `hg update` that would cause a file content merge
 update.check = noconflict
 # Show conflicts information in `hg status`
@@ -566,8 +566,6 @@
             candidate = self._data(untrusted).get(s, n, None)
             if candidate is not None:
                 value = candidate
-                section = s
-                name = n
                 break
 
         if self.debugflag and not untrusted and self._reportuntrusted:
@@ -2053,7 +2051,11 @@
         This is its own function so that extensions can change the definition of
         'valid' in this case (like when pulling from a git repo into a hg
         one)."""
-        return os.path.isdir(os.path.join(path, '.hg'))
+        try:
+            return os.path.isdir(os.path.join(path, '.hg'))
+        # Python 2 may return TypeError. Python 3, ValueError.
+        except (TypeError, ValueError):
+            return False
 
     @property
     def suboptions(self):
--- a/mercurial/url.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/url.py	Mon Feb 04 20:35:21 2019 +0300
@@ -58,8 +58,11 @@
         return self.passwddb.add_password(realm, uri, user, passwd)
 
     def find_user_password(self, realm, authuri):
+        assert isinstance(realm, (type(None), str))
+        assert isinstance(authuri, str)
         authinfo = self.passwddb.find_user_password(realm, authuri)
         user, passwd = authinfo
+        user, passwd = pycompat.bytesurl(user), pycompat.bytesurl(passwd)
         if user and passwd:
             self._writedebug(user, passwd)
             return (user, passwd)
--- a/mercurial/util.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/util.py	Mon Feb 04 20:35:21 2019 +0300
@@ -789,6 +789,12 @@
                                                       res))
 
         data = dest[0:res] if res is not None else b''
+
+        # _writedata() uses "in" operator and is confused by memoryview because
+        # characters are ints on Python 3.
+        if isinstance(data, memoryview):
+            data = data.tobytes()
+
         self._writedata(data)
 
     def write(self, res, data):
--- a/mercurial/wireprotov1server.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/wireprotov1server.py	Mon Feb 04 20:35:21 2019 +0300
@@ -7,6 +7,7 @@
 
 from __future__ import absolute_import
 
+import binascii
 import os
 
 from .i18n import _
@@ -344,7 +345,7 @@
       one specific branch of many.
     """
     def decodehexstring(s):
-        return set([h.decode('hex') for h in s.split(';')])
+        return set([binascii.unhexlify(h) for h in s.split(';')])
 
     manifest = repo.vfs.tryread('pullbundles.manifest')
     if not manifest:
@@ -424,8 +425,6 @@
             raise error.Abort(bundle2requiredmain,
                               hint=bundle2requiredhint)
 
-    prefercompressed = True
-
     try:
         clheads = set(repo.changelog.heads())
         heads = set(opts.get('heads', set()))
@@ -578,7 +577,6 @@
                     repo.ui.debug('redirecting incoming bundle to %s\n' %
                         tempname)
                     fp = os.fdopen(fd, pycompat.sysstr('wb+'))
-                    r = 0
                     for p in payload:
                         fp.write(p)
                     fp.seek(0)
--- a/mercurial/wireprotov2peer.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/wireprotov2peer.py	Mon Feb 04 20:35:21 2019 +0300
@@ -304,7 +304,7 @@
                 # TODO tell reactor?
                 self._frameseof = True
             else:
-                self._ui.note(_('received %r\n') % frame)
+                self._ui.debug('received %r\n' % frame)
                 self._processframe(frame)
 
         # Also try to read the first redirect.
@@ -510,7 +510,7 @@
     # Bytestring where each byte is a 0 or 1.
     raw = next(objs)
 
-    return [True if c == '1' else False for c in raw]
+    return [True if raw[i:i + 1] == b'1' else False for i in range(len(raw))]
 
 def decodelistkeys(objs):
     # Map with bytestring keys and values.
--- a/mercurial/wireprotov2server.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/mercurial/wireprotov2server.py	Mon Feb 04 20:35:21 2019 +0300
@@ -23,6 +23,7 @@
     narrowspec,
     pycompat,
     streamclone,
+    templatefilters,
     util,
     wireprotoframing,
     wireprototypes,
@@ -148,8 +149,6 @@
     tracker. We then dump the log of all that activity back out to the
     client.
     """
-    import json
-
     # Reflection APIs have a history of being abused, accidentally disclosing
     # sensitive data, etc. So we have a config knob.
     if not ui.configbool('experimental', 'web.api.debugreflect'):
@@ -175,12 +174,11 @@
                                                   frame.payload))
 
         action, meta = reactor.onframerecv(frame)
-        states.append(json.dumps((action, meta), sort_keys=True,
-                                 separators=(', ', ': ')))
+        states.append(templatefilters.json((action, meta)))
 
     action, meta = reactor.oninputeof()
     meta['action'] = action
-    states.append(json.dumps(meta, sort_keys=True, separators=(', ',': ')))
+    states.append(templatefilters.json(meta))
 
     res.status = b'200 OK'
     res.headers[b'Content-Type'] = b'text/plain'
@@ -390,7 +388,8 @@
         return
 
     with cacher:
-        cachekey = entry.cachekeyfn(repo, proto, cacher, **args)
+        cachekey = entry.cachekeyfn(repo, proto, cacher,
+                                    **pycompat.strkwargs(args))
 
         # No cache key or the cacher doesn't like it. Do default handling.
         if cachekey is None or not cacher.setcachekey(cachekey):
@@ -744,7 +743,7 @@
             # More granular cache key invalidation.
             b'localversion': localversion,
             # Cache keys are segmented by command.
-            b'command': pycompat.sysbytes(command),
+            b'command': command,
             # Throw in the media type and API version strings so changes
             # to exchange semantics invalid cache.
             b'mediatype': FRAMINGTYPE,
--- a/rust/chg/src/sighandlers.c	Fri Feb 01 13:44:09 2019 -0500
+++ b/rust/chg/src/sighandlers.c	Mon Feb 04 20:35:21 2019 +0300
@@ -33,28 +33,36 @@
 {
 	sigset_t unblockset, oldset;
 	struct sigaction sa, oldsa;
-	if (sigemptyset(&unblockset) < 0)
+	if (sigemptyset(&unblockset) < 0) {
 		return;
-	if (sigaddset(&unblockset, sig) < 0)
+	}
+	if (sigaddset(&unblockset, sig) < 0) {
 		return;
+	}
 	memset(&sa, 0, sizeof(sa));
 	sa.sa_handler = SIG_DFL;
 	sa.sa_flags = SA_RESTART;
-	if (sigemptyset(&sa.sa_mask) < 0)
+	if (sigemptyset(&sa.sa_mask) < 0) {
 		return;
+	}
 
 	forwardsignal(sig);
-	if (raise(sig) < 0) /* resend to self */
+	if (raise(sig) < 0) { /* resend to self */
 		return;
-	if (sigaction(sig, &sa, &oldsa) < 0)
+	}
+	if (sigaction(sig, &sa, &oldsa) < 0) {
 		return;
-	if (sigprocmask(SIG_UNBLOCK, &unblockset, &oldset) < 0)
+	}
+	if (sigprocmask(SIG_UNBLOCK, &unblockset, &oldset) < 0) {
 		return;
+	}
 	/* resent signal will be handled before sigprocmask() returns */
-	if (sigprocmask(SIG_SETMASK, &oldset, NULL) < 0)
+	if (sigprocmask(SIG_SETMASK, &oldset, NULL) < 0) {
 		return;
-	if (sigaction(sig, &oldsa, NULL) < 0)
+	}
+	if (sigaction(sig, &oldsa, NULL) < 0) {
 		return;
+	}
 }
 
 /*
@@ -81,37 +89,46 @@
 	 * - SIGINT: usually generated by the terminal */
 	sa.sa_handler = forwardsignaltogroup;
 	sa.sa_flags = SA_RESTART;
-	if (sigemptyset(&sa.sa_mask) < 0)
+	if (sigemptyset(&sa.sa_mask) < 0) {
+		return -1;
+	}
+	if (sigaction(SIGHUP, &sa, NULL) < 0) {
 		return -1;
-	if (sigaction(SIGHUP, &sa, NULL) < 0)
+	}
+	if (sigaction(SIGINT, &sa, NULL) < 0) {
 		return -1;
-	if (sigaction(SIGINT, &sa, NULL) < 0)
-		return -1;
+	}
 
 	/* terminate frontend by double SIGTERM in case of server freeze */
 	sa.sa_handler = forwardsignal;
 	sa.sa_flags |= SA_RESETHAND;
-	if (sigaction(SIGTERM, &sa, NULL) < 0)
+	if (sigaction(SIGTERM, &sa, NULL) < 0) {
 		return -1;
+	}
 
 	/* notify the worker about window resize events */
 	sa.sa_flags = SA_RESTART;
-	if (sigaction(SIGWINCH, &sa, NULL) < 0)
+	if (sigaction(SIGWINCH, &sa, NULL) < 0) {
 		return -1;
+	}
 	/* forward user-defined signals */
-	if (sigaction(SIGUSR1, &sa, NULL) < 0)
+	if (sigaction(SIGUSR1, &sa, NULL) < 0) {
 		return -1;
-	if (sigaction(SIGUSR2, &sa, NULL) < 0)
+	}
+	if (sigaction(SIGUSR2, &sa, NULL) < 0) {
 		return -1;
+	}
 	/* propagate job control requests to worker */
 	sa.sa_handler = forwardsignal;
 	sa.sa_flags = SA_RESTART;
-	if (sigaction(SIGCONT, &sa, NULL) < 0)
+	if (sigaction(SIGCONT, &sa, NULL) < 0) {
 		return -1;
+	}
 	sa.sa_handler = handlestopsignal;
 	sa.sa_flags = SA_RESTART;
-	if (sigaction(SIGTSTP, &sa, NULL) < 0)
+	if (sigaction(SIGTSTP, &sa, NULL) < 0) {
 		return -1;
+	}
 
 	return 0;
 }
@@ -127,24 +144,31 @@
 	memset(&sa, 0, sizeof(sa));
 	sa.sa_handler = SIG_DFL;
 	sa.sa_flags = SA_RESTART;
-	if (sigemptyset(&sa.sa_mask) < 0)
+	if (sigemptyset(&sa.sa_mask) < 0) {
 		return -1;
+	}
 
-	if (sigaction(SIGHUP, &sa, NULL) < 0)
+	if (sigaction(SIGHUP, &sa, NULL) < 0) {
 		return -1;
-	if (sigaction(SIGTERM, &sa, NULL) < 0)
+	}
+	if (sigaction(SIGTERM, &sa, NULL) < 0) {
 		return -1;
-	if (sigaction(SIGWINCH, &sa, NULL) < 0)
+	}
+	if (sigaction(SIGWINCH, &sa, NULL) < 0) {
 		return -1;
-	if (sigaction(SIGCONT, &sa, NULL) < 0)
+	}
+	if (sigaction(SIGCONT, &sa, NULL) < 0) {
 		return -1;
-	if (sigaction(SIGTSTP, &sa, NULL) < 0)
+	}
+	if (sigaction(SIGTSTP, &sa, NULL) < 0) {
 		return -1;
+	}
 
 	/* ignore Ctrl+C while shutting down to make pager exits cleanly */
 	sa.sa_handler = SIG_IGN;
-	if (sigaction(SIGINT, &sa, NULL) < 0)
+	if (sigaction(SIGINT, &sa, NULL) < 0) {
 		return -1;
+	}
 
 	peerpid = 0;
 	return 0;
--- a/tests/artifacts/scripts/generate-churning-bundle.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/artifacts/scripts/generate-churning-bundle.py	Mon Feb 04 20:35:21 2019 +0300
@@ -42,7 +42,6 @@
 FILENAME='SPARSE-REVLOG-TEST-FILE'
 NB_LINES = 10500
 ALWAYS_CHANGE_LINES = 500
-FILENAME = 'SPARSE-REVLOG-TEST-FILE'
 OTHER_CHANGES = 300
 
 def nextcontent(previous_content):
--- a/tests/badserverext.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/badserverext.py	Mon Feb 04 20:35:21 2019 +0300
@@ -34,6 +34,7 @@
 import socket
 
 from mercurial import(
+    pycompat,
     registrar,
 )
 
@@ -48,10 +49,10 @@
     default=False,
 )
 configitem(b'badserver', b'closeafterrecvbytes',
-    default='0',
+    default=b'0',
 )
 configitem(b'badserver', b'closeaftersendbytes',
-    default='0',
+    default=b'0',
 )
 configitem(b'badserver', b'closebeforeaccept',
     default=False,
@@ -74,7 +75,7 @@
         object.__setattr__(self, '_closeaftersendbytes', closeaftersendbytes)
 
     def __getattribute__(self, name):
-        if name in ('makefile',):
+        if name in ('makefile', 'sendall', '_writelog'):
             return object.__getattribute__(self, name)
 
         return getattr(object.__getattribute__(self, '_orig'), name)
@@ -85,6 +86,13 @@
     def __setattr__(self, name, value):
         setattr(object.__getattribute__(self, '_orig'), name, value)
 
+    def _writelog(self, msg):
+        msg = msg.replace(b'\r', b'\\r').replace(b'\n', b'\\n')
+
+        object.__getattribute__(self, '_logfp').write(msg)
+        object.__getattribute__(self, '_logfp').write(b'\n')
+        object.__getattribute__(self, '_logfp').flush()
+
     def makefile(self, mode, bufsize):
         f = object.__getattribute__(self, '_orig').makefile(mode, bufsize)
 
@@ -98,6 +106,38 @@
                                closeafterrecvbytes=closeafterrecvbytes,
                                closeaftersendbytes=closeaftersendbytes)
 
+    def sendall(self, data, flags=0):
+        remaining = object.__getattribute__(self, '_closeaftersendbytes')
+
+        # No read limit. Call original function.
+        if not remaining:
+            result = object.__getattribute__(self, '_orig').sendall(data, flags)
+            self._writelog(b'sendall(%d) -> %s' % (len(data), data))
+            return result
+
+        if len(data) > remaining:
+            newdata = data[0:remaining]
+        else:
+            newdata = data
+
+        remaining -= len(newdata)
+
+        result = object.__getattribute__(self, '_orig').sendall(newdata, flags)
+
+        self._writelog(b'sendall(%d from %d) -> (%d) %s' % (
+            len(newdata), len(data), remaining, newdata))
+
+        object.__setattr__(self, '_closeaftersendbytes', remaining)
+
+        if remaining <= 0:
+            self._writelog(b'write limit reached; closing socket')
+            object.__getattribute__(self, '_orig').shutdown(socket.SHUT_RDWR)
+
+            raise Exception('connection closed after sending N bytes')
+
+        return result
+
+
 # We can't adjust __class__ on socket._fileobject, so define a proxy.
 class fileobjectproxy(object):
     __slots__ = (
@@ -115,7 +155,7 @@
         object.__setattr__(self, '_closeaftersendbytes', closeaftersendbytes)
 
     def __getattribute__(self, name):
-        if name in ('read', 'readline', 'write', '_writelog'):
+        if name in ('_close', 'read', 'readline', 'write', '_writelog'):
             return object.__getattribute__(self, name)
 
         return getattr(object.__getattribute__(self, '_orig'), name)
@@ -127,21 +167,34 @@
         setattr(object.__getattribute__(self, '_orig'), name, value)
 
     def _writelog(self, msg):
-        msg = msg.replace('\r', '\\r').replace('\n', '\\n')
+        msg = msg.replace(b'\r', b'\\r').replace(b'\n', b'\\n')
 
         object.__getattribute__(self, '_logfp').write(msg)
-        object.__getattribute__(self, '_logfp').write('\n')
+        object.__getattribute__(self, '_logfp').write(b'\n')
         object.__getattribute__(self, '_logfp').flush()
 
+    def _close(self):
+        # Python 3 uses an io.BufferedIO instance. Python 2 uses some file
+        # object wrapper.
+        if pycompat.ispy3:
+            orig = object.__getattribute__(self, '_orig')
+
+            if hasattr(orig, 'raw'):
+                orig.raw._sock.shutdown(socket.SHUT_RDWR)
+            else:
+                self.close()
+        else:
+            self._sock.shutdown(socket.SHUT_RDWR)
+
     def read(self, size=-1):
         remaining = object.__getattribute__(self, '_closeafterrecvbytes')
 
         # No read limit. Call original function.
         if not remaining:
             result = object.__getattribute__(self, '_orig').read(size)
-            self._writelog('read(%d) -> (%d) (%s) %s' % (size,
-                                                           len(result),
-                                                           result))
+            self._writelog(b'read(%d) -> (%d) (%s) %s' % (size,
+                                                          len(result),
+                                                          result))
             return result
 
         origsize = size
@@ -154,14 +207,15 @@
         result = object.__getattribute__(self, '_orig').read(size)
         remaining -= len(result)
 
-        self._writelog('read(%d from %d) -> (%d) %s' % (
+        self._writelog(b'read(%d from %d) -> (%d) %s' % (
             size, origsize, len(result), result))
 
         object.__setattr__(self, '_closeafterrecvbytes', remaining)
 
         if remaining <= 0:
-            self._writelog('read limit reached, closing socket')
-            self._sock.close()
+            self._writelog(b'read limit reached, closing socket')
+            self._close()
+
             # This is the easiest way to abort the current request.
             raise Exception('connection closed after receiving N bytes')
 
@@ -173,7 +227,7 @@
         # No read limit. Call original function.
         if not remaining:
             result = object.__getattribute__(self, '_orig').readline(size)
-            self._writelog('readline(%d) -> (%d) %s' % (
+            self._writelog(b'readline(%d) -> (%d) %s' % (
                 size, len(result), result))
             return result
 
@@ -187,14 +241,15 @@
         result = object.__getattribute__(self, '_orig').readline(size)
         remaining -= len(result)
 
-        self._writelog('readline(%d from %d) -> (%d) %s' % (
+        self._writelog(b'readline(%d from %d) -> (%d) %s' % (
             size, origsize, len(result), result))
 
         object.__setattr__(self, '_closeafterrecvbytes', remaining)
 
         if remaining <= 0:
-            self._writelog('read limit reached; closing socket')
-            self._sock.close()
+            self._writelog(b'read limit reached; closing socket')
+            self._close()
+
             # This is the easiest way to abort the current request.
             raise Exception('connection closed after receiving N bytes')
 
@@ -205,7 +260,7 @@
 
         # No byte limit on this operation. Call original function.
         if not remaining:
-            self._writelog('write(%d) -> %s' % (len(data), data))
+            self._writelog(b'write(%d) -> %s' % (len(data), data))
             result = object.__getattribute__(self, '_orig').write(data)
             return result
 
@@ -216,7 +271,7 @@
 
         remaining -= len(newdata)
 
-        self._writelog('write(%d from %d) -> (%d) %s' % (
+        self._writelog(b'write(%d from %d) -> (%d) %s' % (
             len(newdata), len(data), remaining, newdata))
 
         result = object.__getattribute__(self, '_orig').write(newdata)
@@ -224,8 +279,9 @@
         object.__setattr__(self, '_closeaftersendbytes', remaining)
 
         if remaining <= 0:
-            self._writelog('write limit reached; closing socket')
-            self._sock.close()
+            self._writelog(b'write limit reached; closing socket')
+            self._close()
+
             raise Exception('connection closed after sending N bytes')
 
         return result
@@ -239,10 +295,10 @@
             super(badserver, self).__init__(ui, *args, **kwargs)
 
             recvbytes = self._ui.config(b'badserver', b'closeafterrecvbytes')
-            recvbytes = recvbytes.split(',')
+            recvbytes = recvbytes.split(b',')
             self.closeafterrecvbytes = [int(v) for v in recvbytes if v]
             sendbytes = self._ui.config(b'badserver', b'closeaftersendbytes')
-            sendbytes = sendbytes.split(',')
+            sendbytes = sendbytes.split(b',')
             self.closeaftersendbytes = [int(v) for v in sendbytes if v]
 
             # Need to inherit object so super() works.
@@ -270,7 +326,7 @@
                 # Simulate failure to stop processing this request.
                 raise socket.error('close before accept')
 
-            if self._ui.configbool('badserver', 'closeafteraccept'):
+            if self._ui.configbool(b'badserver', b'closeafteraccept'):
                 request, client_address = super(badserver, self).get_request()
                 request.close()
                 raise socket.error('close after accept')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/filtertraceback.py	Mon Feb 04 20:35:21 2019 +0300
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+
+# Filters traceback lines from stdin.
+
+from __future__ import absolute_import, print_function
+
+import sys
+
+state = 'none'
+
+for line in sys.stdin:
+    if state == 'none':
+        if line.startswith('Traceback '):
+            state = 'tb'
+
+    elif state == 'tb':
+        if line.startswith('  File '):
+            state = 'file'
+            continue
+
+        elif not line.startswith(' '):
+            state = 'none'
+
+    elif state == 'file':
+        # Ignore lines after "  File "
+        state = 'tb'
+        continue
+
+    print(line, end='')
--- a/tests/flagprocessorext.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/flagprocessorext.py	Mon Feb 04 20:35:21 2019 +0300
@@ -107,7 +107,7 @@
 
     # Teach exchange to use changegroup 3
     for k in exchange._bundlespeccontentopts.keys():
-        exchange._bundlespeccontentopts[k]["cg.version"] = "03"
+        exchange._bundlespeccontentopts[k][b"cg.version"] = b"03"
 
     # Register flag processors for each extension
     revlog.addflagprocessor(
--- a/tests/hghave.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/hghave.py	Mon Feb 04 20:35:21 2019 +0300
@@ -1,6 +1,5 @@
 from __future__ import absolute_import
 
-import errno
 import os
 import re
 import socket
@@ -118,13 +117,8 @@
     is matched by the supplied regular expression.
     """
     r = re.compile(regexp)
-    try:
-        p = subprocess.Popen(
-            cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
-    except OSError as e:
-        if e.errno != errno.ENOENT:
-            raise
-        ret = -1
+    p = subprocess.Popen(
+        cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
     s = p.communicate()[0]
     ret = p.returncode
     return (ignorestatus or not ret) and r.search(s)
@@ -549,7 +543,7 @@
 @check("tls1.2", "TLS 1.2 protocol support")
 def has_tls1_2():
     from mercurial import sslutil
-    return 'tls1.2' in sslutil.supportedprotocols
+    return b'tls1.2' in sslutil.supportedprotocols
 
 @check("windows", "Windows")
 def has_windows():
@@ -652,6 +646,13 @@
     # chg disables demandimport intentionally for performance wins.
     return ((not has_chg()) and os.environ.get('HGDEMANDIMPORT') != 'disable')
 
+@checkvers("py", "Python >= %s", (2.7, 3.5, 3.6, 3.7, 3.8, 3.9))
+def has_python_range(v):
+    major, minor = v.split('.')[0:2]
+    py_major, py_minor = sys.version_info.major, sys.version_info.minor
+
+    return (py_major, py_minor) >= (int(major), int(minor))
+
 @check("py3", "running with Python 3.x")
 def has_py3():
     return 3 == sys.version_info[0]
--- a/tests/notcapable	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/notcapable	Mon Feb 04 20:35:21 2019 +0300
@@ -11,7 +11,7 @@
     extensions.wrapfunction(repository.peer, 'capable', wrapcapable)
     extensions.wrapfunction(localrepo.localrepository, 'peer', wrappeer)
 def wrapcapable(orig, self, name, *args, **kwargs):
-    if name in '$CAP'.split(' '):
+    if name in b'$CAP'.split(b' '):
         return False
     return orig(self, name, *args, **kwargs)
 def wrappeer(orig, self):
--- a/tests/run-tests.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/run-tests.py	Mon Feb 04 20:35:21 2019 +0300
@@ -1225,7 +1225,6 @@
             killdaemons(env['DAEMON_PIDS'])
             return ret
 
-        output = b''
         proc.tochild.close()
 
         try:
@@ -2259,14 +2258,17 @@
             self.stream.writeln('')
 
             if not self._runner.options.noskips:
-                for test, msg in self._result.skipped:
+                for test, msg in sorted(self._result.skipped,
+                                        key=lambda s: s[0].name):
                     formatted = 'Skipped %s: %s\n' % (test.name, msg)
                     msg = highlightmsg(formatted, self._result.color)
                     self.stream.write(msg)
-            for test, msg in self._result.failures:
+            for test, msg in sorted(self._result.failures,
+                                    key=lambda f: f[0].name):
                 formatted = 'Failed %s: %s\n' % (test.name, msg)
                 self.stream.write(highlightmsg(formatted, self._result.color))
-            for test, msg in self._result.errors:
+            for test, msg in sorted(self._result.errors,
+                                    key=lambda e: e[0].name):
                 self.stream.writeln('Errored %s: %s' % (test.name, msg))
 
             if self._runner.options.xunit:
@@ -2770,8 +2772,8 @@
         """
         if not args:
             if self.options.changed:
-                proc = Popen4('hg st --rev "%s" -man0 .' %
-                              self.options.changed, None, 0)
+                proc = Popen4(b'hg st --rev "%s" -man0 .' %
+                              _bytespath(self.options.changed), None, 0)
                 stdout, stderr = proc.communicate()
                 args = stdout.strip(b'\0').split(b'\0')
             else:
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/svnurlof.py	Mon Feb 04 20:35:21 2019 +0300
@@ -0,0 +1,18 @@
+from __future__ import absolute_import, print_function
+import sys
+
+from mercurial import (
+    pycompat,
+    util,
+)
+
+def main(argv):
+    enc = util.urlreq.quote(pycompat.sysbytes(argv[1]))
+    if pycompat.iswindows:
+        fmt = 'file:///%s'
+    else:
+        fmt = 'file://%s'
+    print(fmt % pycompat.sysstr(enc))
+
+if __name__ == '__main__':
+    main(sys.argv)
--- a/tests/svnxml.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/svnxml.py	Mon Feb 04 20:35:21 2019 +0300
@@ -20,10 +20,10 @@
     if paths:
         paths = paths[0]
         for p in paths.getElementsByTagName('path'):
-            action = p.getAttribute('action')
-            path = xmltext(p)
-            frompath = p.getAttribute('copyfrom-path')
-            fromrev = p.getAttribute('copyfrom-rev')
+            action = p.getAttribute('action').encode('utf-8')
+            path = xmltext(p).encode('utf-8')
+            frompath = p.getAttribute('copyfrom-path').encode('utf-8')
+            fromrev = p.getAttribute('copyfrom-rev').encode('utf-8')
             e['paths'].append((path, action, frompath, fromrev))
     return e
 
@@ -43,11 +43,11 @@
         for k in ('revision', 'author', 'msg'):
             fp.write(('%s: %s\n' % (k, e[k])).encode('utf-8'))
         for path, action, fpath, frev in sorted(e['paths']):
-            frominfo = ''
+            frominfo = b''
             if frev:
-                frominfo = ' (from %s@%s)' % (fpath, frev)
-            p = ' %s %s%s\n' % (action, path, frominfo)
-            fp.write(p.encode('utf-8'))
+                frominfo = b' (from %s@%s)' % (fpath, frev)
+            p = b' %s %s%s\n' % (action, path, frominfo)
+            fp.write(p)
 
 if __name__ == '__main__':
     data = sys.stdin.read()
--- a/tests/test-acl.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-acl.t	Mon Feb 04 20:35:21 2019 +0300
@@ -39,7 +39,7 @@
   >     try:
   >         return acl._getusersorig(ui, group)
   >     except:
-  >         return ["fred", "betty"]
+  >         return [b"fred", b"betty"]
   > acl._getusersorig = acl._getusers
   > acl._getusers = fakegetusers
   > EOF
@@ -1125,7 +1125,7 @@
   bundle2-input-bundle: 4 parts total
   transaction abort!
   rollback completed
-  abort: $ENOENT$: ../acl.config
+  abort: $ENOENT$: '../acl.config'
   no rollback information available
   0:6675d58eff77
   
--- a/tests/test-ancestor.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-ancestor.py	Mon Feb 04 20:35:21 2019 +0300
@@ -123,7 +123,6 @@
             # reference slow algorithm
             naiveinc = naiveincrementalmissingancestors(ancs, bases)
             seq = []
-            revs = []
             for _ in xrange(inccount):
                 if rng.random() < 0.2:
                     newbases = samplerevs(graphnodes)
--- a/tests/test-annotate.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-annotate.t	Mon Feb 04 20:35:21 2019 +0300
@@ -589,7 +589,7 @@
 
   $ hg annotate -ncr "wdir()" baz
   abort: $TESTTMP\repo\baz: $ENOENT$ (windows !)
-  abort: $ENOENT$: $TESTTMP/repo/baz (no-windows !)
+  abort: $ENOENT$: '$TESTTMP/repo/baz' (no-windows !)
   [255]
 
 annotate removed file
@@ -598,7 +598,7 @@
 
   $ hg annotate -ncr "wdir()" baz
   abort: $TESTTMP\repo\baz: $ENOENT$ (windows !)
-  abort: $ENOENT$: $TESTTMP/repo/baz (no-windows !)
+  abort: $ENOENT$: '$TESTTMP/repo/baz' (no-windows !)
   [255]
 
   $ hg revert --all --no-backup --quiet
--- a/tests/test-arbitraryfilectx.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-arbitraryfilectx.t	Mon Feb 04 20:35:21 2019 +0300
@@ -72,30 +72,30 @@
 These files are different and should return True (different):
 (Note that filecmp.cmp's return semantics are inverted from ours, so we invert
 for simplicity):
-  $ hg eval "context.arbitraryfilectx('A', repo).cmp(repo[None]['real_A'])"
+  $ hg eval "context.arbitraryfilectx(b'A', repo).cmp(repo[None][b'real_A'])"
   True (no-eol)
-  $ hg eval "not filecmp.cmp('A', 'real_A')"
+  $ hg eval "not filecmp.cmp(b'A', b'real_A')"
   True (no-eol)
 
 These files are identical and should return False (same):
-  $ hg eval "context.arbitraryfilectx('A', repo).cmp(repo[None]['A'])"
+  $ hg eval "context.arbitraryfilectx(b'A', repo).cmp(repo[None][b'A'])"
   False (no-eol)
-  $ hg eval "context.arbitraryfilectx('A', repo).cmp(repo[None]['B'])"
+  $ hg eval "context.arbitraryfilectx(b'A', repo).cmp(repo[None][b'B'])"
   False (no-eol)
-  $ hg eval "not filecmp.cmp('A', 'B')"
+  $ hg eval "not filecmp.cmp(b'A', b'B')"
   False (no-eol)
 
 This comparison should also return False, since A and sym_A are substantially
 the same in the eyes of ``filectx.cmp``, which looks at data only.
-  $ hg eval "context.arbitraryfilectx('real_A', repo).cmp(repo[None]['sym_A'])"
+  $ hg eval "context.arbitraryfilectx(b'real_A', repo).cmp(repo[None][b'sym_A'])"
   False (no-eol)
 
 A naive use of filecmp on those two would wrongly return True, since it follows
 the symlink to "A", which has different contents.
 #if symlink
-  $ hg eval "not filecmp.cmp('real_A', 'sym_A')"
+  $ hg eval "not filecmp.cmp(b'real_A', b'sym_A')"
   True (no-eol)
 #else
-  $ hg eval "not filecmp.cmp('real_A', 'sym_A')"
+  $ hg eval "not filecmp.cmp(b'real_A', b'sym_A')"
   False (no-eol)
 #endif
--- a/tests/test-archive.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-archive.t	Mon Feb 04 20:35:21 2019 +0300
@@ -187,7 +187,7 @@
   server: testing stub value
   transfer-encoding: chunked
   
-  body: size=(1377|1461), sha1=(677b14d3d048778d5eb5552c14a67e6192068650|be6d3983aa13dfe930361b2569291cdedd02b537) (re)
+  body: size=(1377|1461|1489), sha1=(677b14d3d048778d5eb5552c14a67e6192068650|be6d3983aa13dfe930361b2569291cdedd02b537|1897e496871aa89ad685a92b936f5fa0d008b9e8) (re)
   % tar.gz and tar.bz2 disallowed should both give 403
   403 Archive type not allowed: gz
   content-type: text/html; charset=ascii
@@ -274,7 +274,7 @@
   server: testing stub value
   transfer-encoding: chunked
   
-  body: size=(1377|1461), sha1=(677b14d3d048778d5eb5552c14a67e6192068650|be6d3983aa13dfe930361b2569291cdedd02b537) (re)
+  body: size=(1377|1461|1489), sha1=(677b14d3d048778d5eb5552c14a67e6192068650|be6d3983aa13dfe930361b2569291cdedd02b537|1897e496871aa89ad685a92b936f5fa0d008b9e8) (re)
   % tar.gz and tar.bz2 disallowed should both give 403
   403 Archive type not allowed: gz
   content-type: text/html; charset=ascii
--- a/tests/test-batching.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-batching.py	Mon Feb 04 20:35:21 2019 +0300
@@ -11,25 +11,28 @@
 
 from mercurial import (
     localrepo,
+    pycompat,
     wireprotov1peer,
+)
 
-)
+def bprint(*bs):
+    print(*[pycompat.sysstr(b) for b in bs])
 
 # equivalent of repo.repository
 class thing(object):
     def hello(self):
-        return "Ready."
+        return b"Ready."
 
 # equivalent of localrepo.localrepository
 class localthing(thing):
     def foo(self, one, two=None):
         if one:
-            return "%s and %s" % (one, two,)
-        return "Nope"
+            return b"%s and %s" % (one, two,)
+        return b"Nope"
     def bar(self, b, a):
-        return "%s und %s" % (b, a,)
+        return b"%s und %s" % (b, a,)
     def greet(self, name=None):
-        return "Hello, %s" % name
+        return b"Hello, %s" % name
 
     @contextlib.contextmanager
     def commandexecutor(self):
@@ -43,27 +46,27 @@
 def use(it):
 
     # Direct call to base method shared between client and server.
-    print(it.hello())
+    bprint(it.hello())
 
     # Direct calls to proxied methods. They cause individual roundtrips.
-    print(it.foo("Un", two="Deux"))
-    print(it.bar("Eins", "Zwei"))
+    bprint(it.foo(b"Un", two=b"Deux"))
+    bprint(it.bar(b"Eins", b"Zwei"))
 
     # Batched call to a couple of proxied methods.
 
     with it.commandexecutor() as e:
-        ffoo = e.callcommand('foo', {'one': 'One', 'two': 'Two'})
-        fbar = e.callcommand('bar', {'b': 'Eins', 'a': 'Zwei'})
-        fbar2 = e.callcommand('bar', {'b': 'Uno', 'a': 'Due'})
+        ffoo = e.callcommand(b'foo', {b'one': b'One', b'two': b'Two'})
+        fbar = e.callcommand(b'bar', {b'b': b'Eins', b'a': b'Zwei'})
+        fbar2 = e.callcommand(b'bar', {b'b': b'Uno', b'a': b'Due'})
 
-    print(ffoo.result())
-    print(fbar.result())
-    print(fbar2.result())
+    bprint(ffoo.result())
+    bprint(fbar.result())
+    bprint(fbar2.result())
 
 # local usage
 mylocal = localthing()
 print()
-print("== Local")
+bprint(b"== Local")
 use(mylocal)
 
 # demo remoting; mimicks what wireproto and HTTP/SSH do
@@ -72,16 +75,16 @@
 
 def escapearg(plain):
     return (plain
-            .replace(':', '::')
-            .replace(',', ':,')
-            .replace(';', ':;')
-            .replace('=', ':='))
+            .replace(b':', b'::')
+            .replace(b',', b':,')
+            .replace(b';', b':;')
+            .replace(b'=', b':='))
 def unescapearg(escaped):
     return (escaped
-            .replace(':=', '=')
-            .replace(':;', ';')
-            .replace(':,', ',')
-            .replace('::', ':'))
+            .replace(b':=', b'=')
+            .replace(b':;', b';')
+            .replace(b':,', b',')
+            .replace(b'::', b':'))
 
 # server side
 
@@ -90,27 +93,28 @@
     def __init__(self, local):
         self.local = local
     def _call(self, name, args):
-        args = dict(arg.split('=', 1) for arg in args)
+        args = dict(arg.split(b'=', 1) for arg in args)
         return getattr(self, name)(**args)
     def perform(self, req):
-        print("REQ:", req)
-        name, args = req.split('?', 1)
-        args = args.split('&')
-        vals = dict(arg.split('=', 1) for arg in args)
-        res = getattr(self, name)(**vals)
-        print("  ->", res)
+        bprint(b"REQ:", req)
+        name, args = req.split(b'?', 1)
+        args = args.split(b'&')
+        vals = dict(arg.split(b'=', 1) for arg in args)
+        res = getattr(self, pycompat.sysstr(name))(**pycompat.strkwargs(vals))
+        bprint(b"  ->", res)
         return res
     def batch(self, cmds):
         res = []
-        for pair in cmds.split(';'):
-            name, args = pair.split(':', 1)
+        for pair in cmds.split(b';'):
+            name, args = pair.split(b':', 1)
             vals = {}
-            for a in args.split(','):
+            for a in args.split(b','):
                 if a:
-                    n, v = a.split('=')
+                    n, v = a.split(b'=')
                     vals[n] = unescapearg(v)
-            res.append(escapearg(getattr(self, name)(**vals)))
-        return ';'.join(res)
+            res.append(escapearg(getattr(self, pycompat.sysstr(name))(
+                **pycompat.strkwargs(vals))))
+        return b';'.join(res)
     def foo(self, one, two):
         return mangle(self.local.foo(unmangle(one), unmangle(two)))
     def bar(self, b, a):
@@ -124,25 +128,25 @@
 # equivalent of wireproto.encode/decodelist, that is, type-specific marshalling
 # here we just transform the strings a bit to check we're properly en-/decoding
 def mangle(s):
-    return ''.join(chr(ord(c) + 1) for c in s)
+    return b''.join(pycompat.bytechr(ord(c) + 1) for c in pycompat.bytestr(s))
 def unmangle(s):
-    return ''.join(chr(ord(c) - 1) for c in s)
+    return b''.join(pycompat.bytechr(ord(c) - 1) for c in pycompat.bytestr(s))
 
 # equivalent of wireproto.wirerepository and something like http's wire format
 class remotething(thing):
     def __init__(self, server):
         self.server = server
     def _submitone(self, name, args):
-        req = name + '?' + '&'.join(['%s=%s' % (n, v) for n, v in args])
+        req = name + b'?' + b'&'.join([b'%s=%s' % (n, v) for n, v in args])
         return self.server.perform(req)
     def _submitbatch(self, cmds):
         req = []
         for name, args in cmds:
-            args = ','.join(n + '=' + escapearg(v) for n, v in args)
-            req.append(name + ':' + args)
-        req = ';'.join(req)
-        res = self._submitone('batch', [('cmds', req,)])
-        for r in res.split(';'):
+            args = b','.join(n + b'=' + escapearg(v) for n, v in args)
+            req.append(name + b':' + args)
+        req = b';'.join(req)
+        res = self._submitone(b'batch', [(b'cmds', req,)])
+        for r in res.split(b';'):
             yield r
 
     @contextlib.contextmanager
@@ -155,7 +159,7 @@
 
     @wireprotov1peer.batchable
     def foo(self, one, two=None):
-        encargs = [('one', mangle(one),), ('two', mangle(two),)]
+        encargs = [(b'one', mangle(one),), (b'two', mangle(two),)]
         encresref = wireprotov1peer.future()
         yield encargs, encresref
         yield unmangle(encresref.value)
@@ -163,18 +167,18 @@
     @wireprotov1peer.batchable
     def bar(self, b, a):
         encresref = wireprotov1peer.future()
-        yield [('b', mangle(b),), ('a', mangle(a),)], encresref
+        yield [(b'b', mangle(b),), (b'a', mangle(a),)], encresref
         yield unmangle(encresref.value)
 
     # greet is coded directly. It therefore does not support batching. If it
     # does appear in a batch, the batch is split around greet, and the call to
     # greet is done in its own roundtrip.
     def greet(self, name=None):
-        return unmangle(self._submitone('greet', [('name', mangle(name),)]))
+        return unmangle(self._submitone(b'greet', [(b'name', mangle(name),)]))
 
 # demo remote usage
 
 myproxy = remotething(myserver)
 print()
-print("== Remote")
+bprint(b"== Remote")
 use(myproxy)
--- a/tests/test-bugzilla.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-bugzilla.t	Mon Feb 04 20:35:21 2019 +0300
@@ -3,7 +3,9 @@
   $ cat <<EOF > bzmock.py
   > from __future__ import absolute_import
   > from mercurial import extensions
+  > from mercurial import pycompat
   > from mercurial import registrar
+  > from mercurial.utils import stringutil
   > 
   > configtable = {}
   > configitem = registrar.configitem(configtable)
@@ -18,14 +20,17 @@
   >             super(bzmock, self).__init__(ui)
   >             self._logfile = ui.config(b'bugzilla', b'mocklog')
   >         def updatebug(self, bugid, newstate, text, committer):
-  >             with open(self._logfile, 'a') as f:
-  >                 f.write('update bugid=%r, newstate=%r, committer=%r\n'
-  >                         % (bugid, newstate, committer))
-  >                 f.write('----\n' + text + '\n----\n')
+  >             with open(pycompat.fsdecode(self._logfile), 'ab') as f:
+  >                 f.write(b'update bugid=%s, newstate=%s, committer=%s\n'
+  >                         % (stringutil.pprint(bugid),
+  >                            stringutil.pprint(newstate),
+  >                            stringutil.pprint(committer)))
+  >                 f.write(b'----\n' + text + b'\n----\n')
   >         def notify(self, bugs, committer):
-  >             with open(self._logfile, 'a') as f:
-  >                 f.write('notify bugs=%r, committer=%r\n'
-  >                         % (bugs, committer))
+  >             with open(pycompat.fsdecode(self._logfile), 'ab') as f:
+  >                 f.write(b'notify bugs=%s, committer=%s\n'
+  >                         % (stringutil.pprint(bugs),
+  >                            stringutil.pprint(committer)))
   >     bugzilla.bugzilla._versions[b'mock'] = bzmock
   > EOF
 
--- a/tests/test-commit-interactive-curses.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-commit-interactive-curses.t	Mon Feb 04 20:35:21 2019 +0300
@@ -333,9 +333,9 @@
   $ cp $HGRCPATH.pretest $HGRCPATH
   $ chunkselectorinterface() {
   > "$PYTHON" <<EOF
-  > from mercurial import hg, ui;\
-  > repo = hg.repository(ui.ui.load(), ".");\
-  > print(repo.ui.interface("chunkselector"))
+  > from mercurial import hg, pycompat, ui;\
+  > repo = hg.repository(ui.ui.load(), b".");\
+  > print(pycompat.sysstr(repo.ui.interface(b"chunkselector")))
   > EOF
   > }
   $ chunkselectorinterface
--- a/tests/test-commit-interactive.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-commit-interactive.t	Mon Feb 04 20:35:21 2019 +0300
@@ -1807,3 +1807,38 @@
   n   0         -1 unset               subdir/f1
   $ hg status -A subdir/f1
   M subdir/f1
+
+Test diff.unified=0
+
+  $ hg init $TESTTMP/b
+  $ cd $TESTTMP/b
+  $ cat > foo <<EOF
+  > 1
+  > 2
+  > 3
+  > 4
+  > 5
+  > EOF
+  $ hg ci -qAm initial
+  $ cat > foo <<EOF
+  > 1
+  > change1
+  > 2
+  > 3
+  > change2
+  > 4
+  > 5
+  > EOF
+  $ printf 'y\ny\ny\n' | hg ci -im initial --config diff.unified=0
+  diff --git a/foo b/foo
+  2 hunks, 2 lines changed
+  examine changes to 'foo'? [Ynesfdaq?] y
+  
+  @@ -1,0 +2,1 @@ 1
+  +change1
+  record change 1/2 to 'foo'? [Ynesfdaq?] y
+  
+  @@ -3,0 +5,1 @@ 3
+  +change2
+  record change 2/2 to 'foo'? [Ynesfdaq?] y
+  
--- a/tests/test-completion.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-completion.t	Mon Feb 04 20:35:21 2019 +0300
@@ -104,6 +104,7 @@
   debugnamecomplete
   debugobsolete
   debugpathcomplete
+  debugpathcopies
   debugpeer
   debugpickmergetool
   debugpushkey
@@ -280,6 +281,7 @@
   debugnamecomplete: 
   debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template
   debugpathcomplete: full, normal, added, removed
+  debugpathcopies: include, exclude
   debugpeer: 
   debugpickmergetool: rev, changedelete, include, exclude, tool
   debugpushkey: 
--- a/tests/test-context.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-context.py	Mon Feb 04 20:35:21 2019 +0300
@@ -63,7 +63,7 @@
 # test performing a status
 
 def getfilectx(repo, memctx, f):
-    fctx = memctx.parents()[0][f]
+    fctx = memctx.p1()[f]
     data, flags = fctx.data(), fctx.flags()
     if f == b'foo':
         data += b'bar\n'
--- a/tests/test-contrib-perf.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-contrib-perf.t	Mon Feb 04 20:35:21 2019 +0300
@@ -88,12 +88,12 @@
                  (no help text available)
    perffncachewrite
                  (no help text available)
-   perfheads     (no help text available)
+   perfheads     benchmark the computation of a changelog heads
    perfhelper-pathcopies
                  find statistic about potential parameters for the
                  'perftracecopies'
    perfignore    benchmark operation related to computing ignore
-   perfindex     (no help text available)
+   perfindex     benchmark index creation time followed by a lookup
    perflinelogedits
                  (no help text available)
    perfloadmarkers
@@ -109,6 +109,8 @@
    perfmoonwalk  benchmark walking the changelog backwards
    perfnodelookup
                  (no help text available)
+   perfnodemap   benchmark the time necessary to look up revision from a cold
+                 nodemap
    perfparents   (no help text available)
    perfpathcopies
                  benchmark the copy tracing logic
--- a/tests/test-contrib-relnotes.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-contrib-relnotes.t	Mon Feb 04 20:35:21 2019 +0300
@@ -266,7 +266,6 @@
    * diff: disable diff.noprefix option for diffstat (Bts:issue5759)
    * evolution: make reporting of new unstable changesets optional
    * extdata: abort if external command exits with non-zero status (BC)
-   * fancyopts: add early-options parser compatible with getopt()
    * graphlog: add another graph node type, unstable, using character "*" (BC)
    * hgdemandimport: use correct hyperlink to python-bug in comments (Bts:issue5765)
    * httppeer: add support for tracing all http request made by the peer
@@ -277,17 +276,18 @@
    * morestatus: don't crash with different drive letters for repo.root and CWD
    * outgoing: respect ":pushurl" paths (Bts:issue5365)
    * remove: print message for each file in verbose mode only while using '-A' (BC)
-   * rewriteutil: use precheck() in uncommit and amend commands
    * scmutil: don't try to delete origbackup symlinks to directories (Bts:issue5731)
    * sshpeer: add support for request tracing
    * subrepo: add config option to reject any subrepo operations (SEC)
    * subrepo: disable git and svn subrepos by default (BC) (SEC)
+   * subrepo: disallow symlink traversal across subrepo mount point (SEC)
    * subrepo: extend config option to disable subrepos by type (SEC)
    * subrepo: handle 'C:' style paths on the command line (Bts:issue5770)
    * subrepo: use per-type config options to enable subrepos
    * svnsubrepo: check if subrepo is missing when checking dirty state (Bts:issue5657)
    * test-bookmarks-pushpull: stabilize for Windows
    * test-run-tests: stabilize the test (Bts:issue5735)
+   * tests: show symlink traversal across subrepo mount point (SEC)
    * tr-summary: keep a weakref to the unfiltered repository
    * unamend: fix command summary line
    * uncommit: unify functions _uncommitdirstate and _unamenddirstate to one
--- a/tests/test-convert-hg-svn.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-convert-hg-svn.t	Mon Feb 04 20:35:21 2019 +0300
@@ -11,11 +11,7 @@
   > EOF
 
   $ SVNREPOPATH=`pwd`/svn-repo
-#if windows
-  $ SVNREPOURL=file:///`"$PYTHON" -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"`
-#else
-  $ SVNREPOURL=file://`"$PYTHON" -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"`
-#endif
+  $ SVNREPOURL="`"$PYTHON" $TESTDIR/svnurlof.py \"$SVNREPOPATH\"`"
 
   $ svnadmin create "$SVNREPOPATH"
   $ cat > "$SVNREPOPATH"/hooks/pre-revprop-change <<EOF
--- a/tests/test-convert-svn-move.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-convert-svn-move.t	Mon Feb 04 20:35:21 2019 +0300
@@ -8,11 +8,7 @@
   $ svnadmin create svn-repo
   $ svnadmin load -q svn-repo < "$TESTDIR/svn/move.svndump"
   $ SVNREPOPATH=`pwd`/svn-repo
-#if windows
-  $ SVNREPOURL=file:///`"$PYTHON" -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"`
-#else
-  $ SVNREPOURL=file://`"$PYTHON" -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"`
-#endif
+  $ SVNREPOURL="`"$PYTHON" $TESTDIR/svnurlof.py \"$SVNREPOPATH\"`"
 
 Convert trunk and branches
 
--- a/tests/test-convert-svn-source.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-convert-svn-source.t	Mon Feb 04 20:35:21 2019 +0300
@@ -13,11 +13,7 @@
 
   $ svnadmin create svn-repo
   $ SVNREPOPATH=`pwd`/svn-repo
-#if windows
-  $ SVNREPOURL=file:///`"$PYTHON" -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"`
-#else
-  $ SVNREPOURL=file://`"$PYTHON" -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"`
-#endif
+  $ SVNREPOURL="`"$PYTHON" $TESTDIR/svnurlof.py \"$SVNREPOPATH\"`"
   $ INVALIDREVISIONID=svn:x2147622-4a9f-4db4-a8d3-13562ff547b2/proj%20B/mytrunk@1
   $ VALIDREVISIONID=svn:a2147622-4a9f-4db4-a8d3-13562ff547b2/proj%20B/mytrunk/mytrunk@1
 
--- a/tests/test-demandimport.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-demandimport.py	Mon Feb 04 20:35:21 2019 +0300
@@ -6,12 +6,30 @@
 import os
 import subprocess
 import sys
+import types
+
+# Don't import pycompat because it has too many side-effects.
+ispy3 = sys.version_info[0] >= 3
 
 # Only run if demandimport is allowed
 if subprocess.call(['python', '%s/hghave' % os.environ['TESTDIR'],
                     'demandimport']):
     sys.exit(80)
 
+# We rely on assert, which gets optimized out.
+if sys.flags.optimize:
+    sys.exit(80)
+
+if ispy3:
+    from importlib.util import _LazyModule
+
+    try:
+        from importlib.util import _Module as moduletype
+    except ImportError:
+        moduletype = types.ModuleType
+else:
+    moduletype = types.ModuleType
+
 if os.name != 'nt':
     try:
         import distutils.msvc9compiler
@@ -36,76 +54,173 @@
 # this enable call should not actually enable demandimport!
 demandimport.enable()
 from mercurial import node
-print("node =", f(node))
+
+# We use assert instead of a unittest test case because having imports inside
+# functions changes behavior of the demand importer.
+if ispy3:
+    assert not isinstance(node, _LazyModule)
+else:
+    assert f(node) == "<module 'mercurial.node' from '?'>", f(node)
+
 # now enable it for real
 del os.environ['HGDEMANDIMPORT']
 demandimport.enable()
 
 # Test access to special attributes through demandmod proxy
+assert 'mercurial.error' not in sys.modules
 from mercurial import error as errorproxy
-print("errorproxy =", f(errorproxy))
-print("errorproxy.__doc__ = %r"
-      % (' '.join(errorproxy.__doc__.split()[:3]) + ' ...'))
-print("errorproxy.__name__ = %r" % errorproxy.__name__)
+
+if ispy3:
+    # unsure why this isn't lazy.
+    assert not isinstance(f, _LazyModule)
+    assert f(errorproxy) == "<module 'mercurial.error' from '?'>", f(errorproxy)
+else:
+    assert f(errorproxy) == "<unloaded module 'error'>", f(errorproxy)
+
+doc = ' '.join(errorproxy.__doc__.split()[:3])
+assert doc == 'Mercurial exceptions. This', doc
+assert errorproxy.__name__ == 'mercurial.error', errorproxy.__name__
+
 # __name__ must be accessible via __dict__ so the relative imports can be
 # resolved
-print("errorproxy.__dict__['__name__'] = %r" % errorproxy.__dict__['__name__'])
-print("errorproxy =", f(errorproxy))
+name = errorproxy.__dict__['__name__']
+assert name == 'mercurial.error', name
+
+if ispy3:
+    assert not isinstance(errorproxy, _LazyModule)
+    assert f(errorproxy) == "<module 'mercurial.error' from '?'>", f(errorproxy)
+else:
+    assert f(errorproxy) == "<proxied module 'error'>", f(errorproxy)
 
 import os
 
-print("os =", f(os))
-print("os.system =", f(os.system))
-print("os =", f(os))
+if ispy3:
+    assert not isinstance(os, _LazyModule)
+    assert f(os) == "<module 'os' from '?'>", f(os)
+else:
+    assert f(os) == "<unloaded module 'os'>", f(os)
 
+assert f(os.system) == '<built-in function system>', f(os.system)
+assert f(os) == "<module 'os' from '?'>", f(os)
+
+assert 'mercurial.utils.procutil' not in sys.modules
 from mercurial.utils import procutil
 
-print("procutil =", f(procutil))
-print("procutil.system =", f(procutil.system))
-print("procutil =", f(procutil))
-print("procutil.system =", f(procutil.system))
+if ispy3:
+    assert isinstance(procutil, _LazyModule)
+    assert f(procutil) == "<module 'mercurial.utils.procutil' from '?'>", f(
+        procutil
+    )
+else:
+    assert f(procutil) == "<unloaded module 'procutil'>", f(procutil)
+
+assert f(procutil.system) == '<function system at 0x?>', f(procutil.system)
+assert procutil.__class__ == moduletype, procutil.__class__
+assert f(procutil) == "<module 'mercurial.utils.procutil' from '?'>", f(
+    procutil
+)
+assert f(procutil.system) == '<function system at 0x?>', f(procutil.system)
 
+assert 'mercurial.hgweb' not in sys.modules
 from mercurial import hgweb
-print("hgweb =", f(hgweb))
-print("hgweb_mod =", f(hgweb.hgweb_mod))
-print("hgweb =", f(hgweb))
+
+if ispy3:
+    assert not isinstance(hgweb, _LazyModule)
+    assert f(hgweb) == "<module 'mercurial.hgweb' from '?'>", f(hgweb)
+    assert isinstance(hgweb.hgweb_mod, _LazyModule)
+    assert (
+        f(hgweb.hgweb_mod) == "<module 'mercurial.hgweb.hgweb_mod' from '?'>"
+    ), f(hgweb.hgweb_mod)
+else:
+    assert f(hgweb) == "<unloaded module 'hgweb'>", f(hgweb)
+    assert f(hgweb.hgweb_mod) == "<unloaded module 'hgweb_mod'>", f(
+        hgweb.hgweb_mod
+    )
+
+assert f(hgweb) == "<module 'mercurial.hgweb' from '?'>", f(hgweb)
 
 import re as fred
-print("fred =", f(fred))
+
+if ispy3:
+    assert not isinstance(fred, _LazyModule)
+    assert f(fred) == "<module 're' from '?'>"
+else:
+    assert f(fred) == "<unloaded module 're'>", f(fred)
 
 import re as remod
-print("remod =", f(remod))
+
+if ispy3:
+    assert not isinstance(remod, _LazyModule)
+    assert f(remod) == "<module 're' from '?'>"
+else:
+    assert f(remod) == "<unloaded module 're'>", f(remod)
 
 import sys as re
-print("re =", f(re))
+
+if ispy3:
+    assert not isinstance(re, _LazyModule)
+    assert f(re) == "<module 'sys' (built-in)>"
+else:
+    assert f(re) == "<unloaded module 'sys'>", f(re)
 
-print("fred =", f(fred))
-print("fred.sub =", f(fred.sub))
-print("fred =", f(fred))
+if ispy3:
+    assert not isinstance(fred, _LazyModule)
+    assert f(fred) == "<module 're' from '?'>", f(fred)
+else:
+    assert f(fred) == "<unloaded module 're'>", f(fred)
+
+assert f(fred.sub) == '<function sub at 0x?>', f(fred.sub)
+
+if ispy3:
+    assert not isinstance(fred, _LazyModule)
+    assert f(fred) == "<module 're' from '?'>", f(fred)
+else:
+    assert f(fred) == "<proxied module 're'>", f(fred)
 
 remod.escape  # use remod
-print("remod =", f(remod))
+assert f(remod) == "<module 're' from '?'>", f(remod)
 
-print("re =", f(re))
-print("re.stderr =", f(re.stderr))
-print("re =", f(re))
+if ispy3:
+    assert not isinstance(re, _LazyModule)
+    assert f(re) == "<module 'sys' (built-in)>"
+    assert f(type(re.stderr)) == "<class '_io.TextIOWrapper'>", f(
+        type(re.stderr)
+    )
+    assert f(re) == "<module 'sys' (built-in)>"
+else:
+    assert f(re) == "<unloaded module 'sys'>", f(re)
+    assert f(re.stderr) == "<open file '<whatever>', mode 'w' at 0x?>", f(
+        re.stderr
+    )
+    assert f(re) == "<proxied module 'sys'>", f(re)
 
-import contextlib
-print("contextlib =", f(contextlib))
+assert 'telnetlib' not in sys.modules
+import telnetlib
+
+if ispy3:
+    assert not isinstance(telnetlib, _LazyModule)
+    assert f(telnetlib) == "<module 'telnetlib' from '?'>"
+else:
+    assert f(telnetlib) == "<unloaded module 'telnetlib'>", f(telnetlib)
+
 try:
-    from contextlib import unknownattr
-    print('no demandmod should be created for attribute of non-package '
-          'module:\ncontextlib.unknownattr =', f(unknownattr))
+    from telnetlib import unknownattr
+
+    assert False, (
+        'no demandmod should be created for attribute of non-package '
+        'module:\ntelnetlib.unknownattr = %s' % f(unknownattr)
+    )
 except ImportError as inst:
-    print('contextlib.unknownattr = ImportError: %s'
-          % rsub(r"'", '', str(inst)))
+    assert rsub(r"'", '', str(inst)).startswith(
+        'cannot import name unknownattr'
+    )
 
 from mercurial import util
 
 # Unlike the import statement, __import__() function should not raise
 # ImportError even if fromlist has an unknown item
 # (see Python/import.c:import_module_level() and ensure_fromlist())
-contextlibimp = __import__('contextlib', globals(), locals(), ['unknownattr'])
-print("__import__('contextlib', ..., ['unknownattr']) =", f(contextlibimp))
-print("hasattr(contextlibimp, 'unknownattr') =",
-      util.safehasattr(contextlibimp, 'unknownattr'))
+assert 'zipfile' not in sys.modules
+zipfileimp = __import__('zipfile', globals(), locals(), ['unknownattr'])
+assert f(zipfileimp) == "<module 'zipfile' from '?'>", f(zipfileimp)
+assert not util.safehasattr(zipfileimp, 'unknownattr')
--- a/tests/test-demandimport.py.out	Fri Feb 01 13:44:09 2019 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,30 +0,0 @@
-node = <module 'mercurial.node' from '?'>
-errorproxy = <unloaded module 'error'>
-errorproxy.__doc__ = 'Mercurial exceptions. This ...'
-errorproxy.__name__ = 'mercurial.error'
-errorproxy.__dict__['__name__'] = 'mercurial.error'
-errorproxy = <proxied module 'error'>
-os = <unloaded module 'os'>
-os.system = <built-in function system>
-os = <module 'os' from '?'>
-procutil = <unloaded module 'procutil'>
-procutil.system = <function system at 0x?>
-procutil = <module 'mercurial.utils.procutil' from '?'>
-procutil.system = <function system at 0x?>
-hgweb = <unloaded module 'hgweb'>
-hgweb_mod = <unloaded module 'hgweb_mod'>
-hgweb = <module 'mercurial.hgweb' from '?'>
-fred = <unloaded module 're'>
-remod = <unloaded module 're'>
-re = <unloaded module 'sys'>
-fred = <unloaded module 're'>
-fred.sub = <function sub at 0x?>
-fred = <proxied module 're'>
-remod = <module 're' from '?'>
-re = <unloaded module 'sys'>
-re.stderr = <open file '<whatever>', mode 'w' at 0x?>
-re = <proxied module 'sys'>
-contextlib = <unloaded module 'contextlib'>
-contextlib.unknownattr = ImportError: cannot import name unknownattr
-__import__('contextlib', ..., ['unknownattr']) = <module 'contextlib' from '?'>
-hasattr(contextlibimp, 'unknownattr') = False
--- a/tests/test-diffstat.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-diffstat.t	Mon Feb 04 20:35:21 2019 +0300
@@ -236,3 +236,38 @@
   $ hg diff --root . --stat
    file |  2 +-
    1 files changed, 1 insertions(+), 1 deletions(-)
+
+When a file is renamed, --git shouldn't loss the info about old file
+  $ hg init issue6025
+  $ cd issue6025
+  $ echo > a
+  $ hg ci -Am 'add a'
+  adding a
+  $ hg mv a b
+  $ hg diff --git
+  diff --git a/a b/b
+  rename from a
+  rename to b
+  $ hg diff --stat
+   a |  1 -
+   b |  1 +
+   2 files changed, 1 insertions(+), 1 deletions(-)
+  $ hg diff --stat --git
+   a => b |  0 
+   1 files changed, 0 insertions(+), 0 deletions(-)
+-- filename may contain whitespaces
+  $ echo > c
+  $ hg ci -Am 'add c'
+  adding c
+  $ hg mv c 'new c'
+  $ hg diff --git
+  diff --git a/c b/new c
+  rename from c
+  rename to new c
+  $ hg diff --stat
+   c     |  1 -
+   new c |  1 +
+   2 files changed, 1 insertions(+), 1 deletions(-)
+  $ hg diff --stat --git
+   c => new c |  0 
+   1 files changed, 0 insertions(+), 0 deletions(-)
--- a/tests/test-dispatch.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-dispatch.t	Mon Feb 04 20:35:21 2019 +0300
@@ -188,7 +188,8 @@
 specified" should include filename even when it is empty
 
   $ hg -R a archive ''
-  abort: *: '' (glob)
+  abort: $ENOENT$: '' (no-windows !)
+  abort: $ENOTDIR$: '' (windows !)
   [255]
 
 #if no-outer-repo
--- a/tests/test-doctest.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-doctest.py	Mon Feb 04 20:35:21 2019 +0300
@@ -62,6 +62,7 @@
 testmod('mercurial.pycompat')
 testmod('mercurial.revlog')
 testmod('mercurial.revlogutils.deltas')
+testmod('mercurial.revset')
 testmod('mercurial.revsetlang')
 testmod('mercurial.smartset')
 testmod('mercurial.store')
--- a/tests/test-encoding-align.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-encoding-align.t	Mon Feb 04 20:35:21 2019 +0300
@@ -5,6 +5,7 @@
   $ hg init t
   $ cd t
   $ "$PYTHON" << EOF
+  > from mercurial import pycompat
   > # (byte, width) = (6, 4)
   > s = b"\xe7\x9f\xad\xe5\x90\x8d"
   > # (byte, width) = (7, 7): odd width is good for alignment test
@@ -21,14 +22,17 @@
   > command = registrar.command(cmdtable)
   > 
   > @command(b'showoptlist',
-  >     [('s', 'opt1', '', 'short width'  + ' %(s)s' * 8, '%(s)s'),
-  >     ('m', 'opt2', '', 'middle width' + ' %(m)s' * 8, '%(m)s'),
-  >     ('l', 'opt3', '', 'long width'   + ' %(l)s' * 8, '%(l)s')],
-  >     '')
+  >     [(b's', b'opt1', b'', b'short width'  + (b' ' +%(s)s) * 8, %(s)s),
+  >     (b'm', b'opt2', b'', b'middle width' + (b' ' + %(m)s) * 8, %(m)s),
+  >     (b'l', b'opt3', b'', b'long width'   + (b' ' + %(l)s) * 8, %(l)s)],
+  >     b'')
   > def showoptlist(ui, repo, *pats, **opts):
   >     '''dummy command to show option descriptions'''
   >     return 0
-  > """ % globals())
+  > """ % {b's': pycompat.byterepr(s),
+  >        b'm': pycompat.byterepr(m),
+  >        b'l': pycompat.byterepr(l),
+  >       })
   > f.close()
   > EOF
   $ S=`cat s`
--- a/tests/test-extdiff.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-extdiff.t	Mon Feb 04 20:35:21 2019 +0300
@@ -48,6 +48,8 @@
    -o --option OPT [+]      pass option to comparison program
    -r --rev REV [+]         revision
    -c --change REV          change made by revision
+      --per-file            compare each file instead of revision snapshots
+      --confirm             prompt user before each external program invocation
       --patch               compare patches for two revisions
    -I --include PATTERN [+] include names matching the given patterns
    -X --exclude PATTERN [+] exclude names matching the given patterns
@@ -128,6 +130,40 @@
   diffing a.398e36faf9c6 a.5ab95fb166c4
   [1]
 
+Test --per-file option:
+
+  $ hg up -q -C 3
+  $ echo a2 > a
+  $ echo b2 > b
+  $ hg ci -d '3 0' -mtestmode1
+  created new head
+  $ hg falabala -c 6 --per-file
+  diffing "*\\extdiff.*\\a.46c0e4daeb72\\a" "a.81906f2b98ac\\a" (glob) (windows !)
+  diffing */extdiff.*/a.46c0e4daeb72/a a.81906f2b98ac/a (glob) (no-windows !)
+  diffing "*\\extdiff.*\\a.46c0e4daeb72\\b" "a.81906f2b98ac\\b" (glob) (windows !)
+  diffing */extdiff.*/a.46c0e4daeb72/b a.81906f2b98ac/b (glob) (no-windows !)
+  [1]
+
+Test --per-file and --confirm options:
+
+  $ hg --config ui.interactive=True falabala -c 6 --per-file --confirm <<EOF
+  > n
+  > y
+  > EOF
+  diff a (1 of 2) [Yns?] n
+  diff b (2 of 2) [Yns?] y
+  diffing "*\\extdiff.*\\a.46c0e4daeb72\\b" "a.81906f2b98ac\\b" (glob) (windows !)
+  diffing */extdiff.*/a.46c0e4daeb72/b a.81906f2b98ac/b (glob) (no-windows !)
+  [1]
+
+Test --per-file and --confirm options with skipping:
+
+  $ hg --config ui.interactive=True falabala -c 6 --per-file --confirm <<EOF
+  > s
+  > EOF
+  diff a (1 of 2) [Yns?] s
+  [1]
+
 issue4463: usage of command line configuration without additional quoting
 
   $ cat <<EOF >> $HGRCPATH
--- a/tests/test-extension.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-extension.t	Mon Feb 04 20:35:21 2019 +0300
@@ -610,7 +610,8 @@
   > cmdtable = {}
   > command = registrar.command(cmdtable)
   > 
-  > # demand import avoids failure of importing notexist here
+  > # demand import avoids failure of importing notexist here, but only on
+  > # Python 2.
   > import extlibroot.lsub1.lsub2.notexist
   > 
   > @command(b'checkrelativity', [], norepo=True)
@@ -622,7 +623,13 @@
   >         pass # intentional failure
   > NO_CHECK_EOF
 
-  $ (PYTHONPATH=${PYTHONPATH}${PATHSEP}${TESTTMP}; hg --config extensions.checkrelativity=$TESTTMP/checkrelativity.py checkrelativity)
+Python 3's lazy importer verifies modules exist before returning the lazy
+module stub. Our custom lazy importer for Python 2 always returns a stub.
+
+  $ (PYTHONPATH=${PYTHONPATH}${PATHSEP}${TESTTMP}; hg --config extensions.checkrelativity=$TESTTMP/checkrelativity.py checkrelativity) || true
+  *** failed to import extension checkrelativity from $TESTTMP/checkrelativity.py: No module named 'extlibroot.lsub1.lsub2.notexist' (py3 !)
+  hg: unknown command 'checkrelativity' (py3 !)
+  (use 'hg help' for a list of commands) (py3 !)
 
 #endif
 
@@ -805,15 +812,22 @@
       "-Npru".
   
       To select a different program, use the -p/--program option. The program
-      will be passed the names of two directories to compare. To pass additional
-      options to the program, use -o/--option. These will be passed before the
-      names of the directories to compare.
+      will be passed the names of two directories to compare, unless the --per-
+      file option is specified (see below). To pass additional options to the
+      program, use -o/--option. These will be passed before the names of the
+      directories or files to compare.
   
       When two revision arguments are given, then changes are shown between
       those revisions. If only one revision is specified then that revision is
       compared to the working directory, and, when no revisions are specified,
       the working directory files are compared to its parent.
   
+      The --per-file option runs the external program repeatedly on each file to
+      diff, instead of once on two directories.
+  
+      The --confirm option will prompt the user before each invocation of the
+      external program. It is ignored if --per-file isn't specified.
+  
   (use 'hg help -e extdiff' to show help for the extdiff extension)
   
   options ([+] can be repeated):
@@ -822,6 +836,8 @@
    -o --option OPT [+]      pass option to comparison program
    -r --rev REV [+]         revision
    -c --change REV          change made by revision
+      --per-file            compare each file instead of revision snapshots
+      --confirm             prompt user before each external program invocation
       --patch               compare patches for two revisions
    -I --include PATTERN [+] include names matching the given patterns
    -X --exclude PATTERN [+] exclude names matching the given patterns
--- a/tests/test-fastannotate-hg.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-fastannotate-hg.t	Mon Feb 04 20:35:21 2019 +0300
@@ -593,7 +593,7 @@
   $ rm baz
   $ hg annotate -ncr "wdir()" baz
   abort: $TESTTMP/repo/baz: $ENOENT$ (windows !)
-  abort: $ENOENT$: $TESTTMP/repo/baz (no-windows !)
+  abort: $ENOENT$: '$TESTTMP/repo/baz' (no-windows !)
   [255]
 
 annotate removed file
@@ -601,7 +601,7 @@
   $ hg rm baz
   $ hg annotate -ncr "wdir()" baz
   abort: $TESTTMP/repo/baz: $ENOENT$ (windows !)
-  abort: $ENOENT$: $TESTTMP/repo/baz (no-windows !)
+  abort: $ENOENT$: '$TESTTMP/repo/baz' (no-windows !)
   [255]
 
 Test annotate with whitespace options
--- a/tests/test-flagprocessor.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-flagprocessor.t	Mon Feb 04 20:35:21 2019 +0300
@@ -209,11 +209,13 @@
       _insertflagprocessor(flag, processor, _flagprocessors)
     File "*/mercurial/revlog.py", line *, in _insertflagprocessor (glob)
       raise error.Abort(msg)
-  Abort: cannot register multiple processors on flag '0x8'.
+  mercurial.error.Abort: b"cannot register multiple processors on flag '0x8'." (py3 !)
+  Abort: cannot register multiple processors on flag '0x8'. (no-py3 !)
   *** failed to set up extension duplicate: cannot register multiple processors on flag '0x8'.
   $ hg st 2>&1 | egrep 'cannot register multiple processors|flagprocessorext'
     File "*/tests/flagprocessorext.py", line *, in extsetup (glob)
-  Abort: cannot register multiple processors on flag '0x8'.
+  mercurial.error.Abort: b"cannot register multiple processors on flag '0x8'." (py3 !)
+  Abort: cannot register multiple processors on flag '0x8'. (no-py3 !)
   *** failed to set up extension duplicate: cannot register multiple processors on flag '0x8'.
     File "*/tests/flagprocessorext.py", line *, in b64decode (glob)
 
--- a/tests/test-grep.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-grep.t	Mon Feb 04 20:35:21 2019 +0300
@@ -32,6 +32,20 @@
   port:4:vaportight
   port:4:import/export
 
+simple from subdirectory
+
+  $ mkdir dir
+  $ cd dir
+  $ hg grep -r tip:0 port
+  port:4:export
+  port:4:vaportight
+  port:4:import/export
+  $ hg grep -r tip:0 port --config ui.relative-paths=yes
+  ../port:4:export
+  ../port:4:vaportight
+  ../port:4:import/export
+  $ cd ..
+
 simple with color
 
   $ hg --config extensions.color= grep --config color.mode=ansi \
--- a/tests/test-help.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-help.t	Mon Feb 04 20:35:21 2019 +0300
@@ -1014,6 +1014,8 @@
    debugoptEXP   (no help text available)
    debugpathcomplete
                  complete part or all of a tracked path
+   debugpathcopies
+                 show copies between two revisions
    debugpeer     establish a connection to a peer repository
    debugpickmergetool
                  examine which merge tool is chosen for specified file
--- a/tests/test-hgweb-auth.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-hgweb-auth.py	Mon Feb 04 20:35:21 2019 +0300
@@ -24,16 +24,26 @@
 def writeauth(items):
     ui = origui.copy()
     for name, value in items.items():
-        ui.setconfig('auth', name, value)
+        ui.setconfig(b'auth', name, value)
     return ui
 
+def _stringifyauthinfo(ai):
+    if ai is None:
+        return ai
+    realm, authuris, user, passwd = ai
+    return (pycompat.strurl(realm),
+            [pycompat.strurl(u) for u in authuris],
+            pycompat.strurl(user),
+            pycompat.strurl(passwd),
+    )
+
 def test(auth, urls=None):
     print('CFG:', pycompat.sysstr(stringutil.pprint(auth, bprefix=True)))
     prefixes = set()
     for k in auth:
-        prefixes.add(k.split('.', 1)[0])
+        prefixes.add(k.split(b'.', 1)[0])
     for p in prefixes:
-        for name in ('.username', '.password'):
+        for name in (b'.username', b'.password'):
             if (p + name) not in auth:
                 auth[p + name] = p
     auth = dict((k, v) for k, v in auth.items() if v is not None)
@@ -41,106 +51,109 @@
     ui = writeauth(auth)
 
     def _test(uri):
-        print('URI:', uri)
+        print('URI:', pycompat.strurl(uri))
         try:
             pm = url.passwordmgr(ui, urlreq.httppasswordmgrwithdefaultrealm())
             u, authinfo = util.url(uri).authinfo()
             if authinfo is not None:
-                pm.add_password(*authinfo)
-            print('    ', pm.find_user_password('test', u))
+                pm.add_password(*_stringifyauthinfo(authinfo))
+            print('    ', tuple(pycompat.strurl(a) for a in
+                                pm.find_user_password('test',
+                                                      pycompat.strurl(u))))
         except error.Abort:
             print('    ','abort')
 
     if not urls:
         urls = [
-            'http://example.org/foo',
-            'http://example.org/foo/bar',
-            'http://example.org/bar',
-            'https://example.org/foo',
-            'https://example.org/foo/bar',
-            'https://example.org/bar',
-            'https://x@example.org/bar',
-            'https://y@example.org/bar',
+            b'http://example.org/foo',
+            b'http://example.org/foo/bar',
+            b'http://example.org/bar',
+            b'https://example.org/foo',
+            b'https://example.org/foo/bar',
+            b'https://example.org/bar',
+            b'https://x@example.org/bar',
+            b'https://y@example.org/bar',
             ]
     for u in urls:
         _test(u)
 
 
 print('\n*** Test in-uri schemes\n')
-test({'x.prefix': 'http://example.org'})
-test({'x.prefix': 'https://example.org'})
-test({'x.prefix': 'http://example.org', 'x.schemes': 'https'})
-test({'x.prefix': 'https://example.org', 'x.schemes': 'http'})
+test({b'x.prefix': b'http://example.org'})
+test({b'x.prefix': b'https://example.org'})
+test({b'x.prefix': b'http://example.org', b'x.schemes': b'https'})
+test({b'x.prefix': b'https://example.org', b'x.schemes': b'http'})
 
 print('\n*** Test separately configured schemes\n')
-test({'x.prefix': 'example.org', 'x.schemes': 'http'})
-test({'x.prefix': 'example.org', 'x.schemes': 'https'})
-test({'x.prefix': 'example.org', 'x.schemes': 'http https'})
+test({b'x.prefix': b'example.org', b'x.schemes': b'http'})
+test({b'x.prefix': b'example.org', b'x.schemes': b'https'})
+test({b'x.prefix': b'example.org', b'x.schemes': b'http https'})
 
 print('\n*** Test prefix matching\n')
-test({'x.prefix': 'http://example.org/foo',
-      'y.prefix': 'http://example.org/bar'})
-test({'x.prefix': 'http://example.org/foo',
-      'y.prefix': 'http://example.org/foo/bar'})
-test({'x.prefix': '*', 'y.prefix': 'https://example.org/bar'})
+test({b'x.prefix': b'http://example.org/foo',
+      b'y.prefix': b'http://example.org/bar'})
+test({b'x.prefix': b'http://example.org/foo',
+      b'y.prefix': b'http://example.org/foo/bar'})
+test({b'x.prefix': b'*', b'y.prefix': b'https://example.org/bar'})
 
 print('\n*** Test user matching\n')
-test({'x.prefix': 'http://example.org/foo',
-      'x.username': None,
-      'x.password': 'xpassword'},
-     urls=['http://y@example.org/foo'])
-test({'x.prefix': 'http://example.org/foo',
-      'x.username': None,
-      'x.password': 'xpassword',
-      'y.prefix': 'http://example.org/foo',
-      'y.username': 'y',
-      'y.password': 'ypassword'},
-     urls=['http://y@example.org/foo'])
-test({'x.prefix': 'http://example.org/foo/bar',
-      'x.username': None,
-      'x.password': 'xpassword',
-      'y.prefix': 'http://example.org/foo',
-      'y.username': 'y',
-      'y.password': 'ypassword'},
-     urls=['http://y@example.org/foo/bar'])
+test({b'x.prefix': b'http://example.org/foo',
+      b'x.username': None,
+      b'x.password': b'xpassword'},
+     urls=[b'http://y@example.org/foo'])
+test({b'x.prefix': b'http://example.org/foo',
+      b'x.username': None,
+      b'x.password': b'xpassword',
+      b'y.prefix': b'http://example.org/foo',
+      b'y.username': b'y',
+      b'y.password': b'ypassword'},
+     urls=[b'http://y@example.org/foo'])
+test({b'x.prefix': b'http://example.org/foo/bar',
+      b'x.username': None,
+      b'x.password': b'xpassword',
+      b'y.prefix': b'http://example.org/foo',
+      b'y.username': b'y',
+      b'y.password': b'ypassword'},
+     urls=[b'http://y@example.org/foo/bar'])
 
 print('\n*** Test user matching with name in prefix\n')
 
 # prefix, username and URL have the same user
-test({'x.prefix': 'https://example.org/foo',
-      'x.username': None,
-      'x.password': 'xpassword',
-      'y.prefix': 'http://y@example.org/foo',
-      'y.username': 'y',
-      'y.password': 'ypassword'},
-     urls=['http://y@example.org/foo'])
+test({b'x.prefix': b'https://example.org/foo',
+      b'x.username': None,
+      b'x.password': b'xpassword',
+      b'y.prefix': b'http://y@example.org/foo',
+      b'y.username': b'y',
+      b'y.password': b'ypassword'},
+     urls=[b'http://y@example.org/foo'])
 # Prefix has a different user from username and URL
-test({'y.prefix': 'http://z@example.org/foo',
-      'y.username': 'y',
-      'y.password': 'ypassword'},
-     urls=['http://y@example.org/foo'])
+test({b'y.prefix': b'http://z@example.org/foo',
+      b'y.username': b'y',
+      b'y.password': b'ypassword'},
+     urls=[b'http://y@example.org/foo'])
 # Prefix has a different user from URL; no username
-test({'y.prefix': 'http://z@example.org/foo',
-      'y.password': 'ypassword'},
-     urls=['http://y@example.org/foo'])
+test({b'y.prefix': b'http://z@example.org/foo',
+      b'y.password': b'ypassword'},
+     urls=[b'http://y@example.org/foo'])
 # Prefix and URL have same user, but doesn't match username
-test({'y.prefix': 'http://y@example.org/foo',
-      'y.username': 'z',
-      'y.password': 'ypassword'},
-     urls=['http://y@example.org/foo'])
+test({b'y.prefix': b'http://y@example.org/foo',
+      b'y.username': b'z',
+      b'y.password': b'ypassword'},
+     urls=[b'http://y@example.org/foo'])
 # Prefix and URL have the same user; no username
-test({'y.prefix': 'http://y@example.org/foo',
-      'y.password': 'ypassword'},
-     urls=['http://y@example.org/foo'])
+test({b'y.prefix': b'http://y@example.org/foo',
+      b'y.password': b'ypassword'},
+     urls=[b'http://y@example.org/foo'])
 # Prefix user, but no URL user or username
-test({'y.prefix': 'http://y@example.org/foo',
-      'y.password': 'ypassword'},
-     urls=['http://example.org/foo'])
+test({b'y.prefix': b'http://y@example.org/foo',
+      b'y.password': b'ypassword'},
+     urls=[b'http://example.org/foo'])
 
 def testauthinfo(fullurl, authurl):
     print('URIs:', fullurl, authurl)
     pm = urlreq.httppasswordmgrwithdefaultrealm()
-    pm.add_password(*util.url(fullurl).authinfo()[1])
+    ai = _stringifyauthinfo(util.url(pycompat.bytesurl(fullurl)).authinfo()[1])
+    pm.add_password(*ai)
     print(pm.find_user_password('test', authurl))
 
 print('\n*** Test urllib2 and util.url\n')
--- a/tests/test-hgweb-json.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-hgweb-json.t	Mon Feb 04 20:35:21 2019 +0300
@@ -2196,7 +2196,8 @@
 Commit message with Japanese Kanji 'Noh', which ends with '\x5c'
 
   $ echo foo >> da/foo
-  $ HGENCODING=cp932 hg ci -m `"$PYTHON" -c 'print("\x94\x5c")'`
+  >>> open('msg', 'wb').write(b'\x94\x5c\x0a') and None
+  $ HGENCODING=cp932 hg ci -l msg
 
 Commit message with null character
 
--- a/tests/test-hgweb.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-hgweb.t	Mon Feb 04 20:35:21 2019 +0300
@@ -910,7 +910,8 @@
 
 errors
 
-  $ cat errors.log
+  $ cat errors.log | "$PYTHON" $TESTDIR/filtertraceback.py
+  $ rm -f errors.log
 
 Uncaught exceptions result in a logged error and canned HTTP response
 
@@ -925,8 +926,11 @@
   [1]
 
   $ killdaemons.py
-  $ head -1 errors.log
+  $ cat errors.log | "$PYTHON" $TESTDIR/filtertraceback.py
   .* Exception happened during processing request '/raiseerror': (re)
+  Traceback (most recent call last):
+  AttributeError: I am an uncaught error!
+  
 
 Uncaught exception after partial content sent
 
--- a/tests/test-histedit-arguments.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-histedit-arguments.t	Mon Feb 04 20:35:21 2019 +0300
@@ -362,7 +362,7 @@
   $ hg histedit --abort
   warning: encountered an exception during histedit --abort; the repository may not have been completely cleaned up
   abort: $TESTTMP/foo/.hg/strip-backup/*-histedit.hg: $ENOENT$ (glob) (windows !)
-  abort: $ENOENT$: $TESTTMP/foo/.hg/strip-backup/*-histedit.hg (glob) (no-windows !)
+  abort: $ENOENT$: '$TESTTMP/foo/.hg/strip-backup/*-histedit.hg' (glob) (no-windows !)
   [255]
 Histedit state has been exited
   $ hg summary -q
--- a/tests/test-histedit-commute.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-histedit-commute.t	Mon Feb 04 20:35:21 2019 +0300
@@ -52,6 +52,7 @@
      summary:     a
   
 
+
 show the edit commands offered
   $ HGEDITOR=cat hg histedit 177f92b77385
   pick 177f92b77385 2 c
@@ -76,6 +77,33 @@
   #  r, roll = like fold, but discard this commit's description and date
   #
 
+
+test customization of revision summary
+  $ HGEDITOR=cat hg histedit 177f92b77385 \
+  >  --config histedit.summary-template='I am rev {rev} desc {desc} tags {tags}'
+  pick 177f92b77385 I am rev 2 desc c tags 
+  pick 055a42cdd887 I am rev 3 desc d tags 
+  pick e860deea161a I am rev 4 desc e tags 
+  pick 652413bf663e I am rev 5 desc f tags tip
+  
+  # Edit history between 177f92b77385 and 652413bf663e
+  #
+  # Commits are listed from least to most recent
+  #
+  # You can reorder changesets by reordering the lines
+  #
+  # Commands:
+  #
+  #  e, edit = use commit, but stop for amending
+  #  m, mess = edit commit message without changing commit content
+  #  p, pick = use commit
+  #  b, base = checkout changeset and apply further changesets from there
+  #  d, drop = remove commit from history
+  #  f, fold = use commit, but combine it with the one above
+  #  r, roll = like fold, but discard this commit's description and date
+  #
+
+
 edit the history
 (use a hacky editor to check histedit-last-edit.txt backup)
 
@@ -142,6 +170,7 @@
      summary:     a
   
 
+
 put things back
 
   $ hg histedit 177f92b77385 --commands - 2>&1 << EOF | fixbundle
@@ -184,6 +213,7 @@
      summary:     a
   
 
+
 slightly different this time
 
   $ hg histedit 177f92b77385 --commands - << EOF 2>&1 | fixbundle
@@ -225,6 +255,7 @@
      summary:     a
   
 
+
 keep prevents stripping dead revs
   $ hg histedit 799205341b6b --keep --commands - 2>&1 << EOF | fixbundle
   > pick 799205341b6b d
@@ -276,6 +307,7 @@
      summary:     a
   
 
+
 try with --rev
   $ hg histedit --commands - --rev -2 2>&1 <<EOF | fixbundle
   > pick de71b079d9ce e
@@ -326,6 +358,7 @@
      date:        Thu Jan 01 00:00:00 1970 +0000
      summary:     a
   
+
 Verify that revsetalias entries work with histedit:
   $ cat >> $HGRCPATH <<EOF
   > [revsetalias]
@@ -355,6 +388,7 @@
   #  r, roll = like fold, but discard this commit's description and date
   #
 
+
 should also work if a commit message is missing
   $ BUNDLE="$TESTDIR/missing-comment.hg"
   $ hg init missing
@@ -384,6 +418,7 @@
      date:        Mon Nov 28 16:35:28 2011 +0000
      summary:     Checked in text file
   
+
   $ hg histedit 0
   $ cd ..
 
@@ -440,6 +475,7 @@
   @@ -0,0 +1,1 @@
   +changed
 
+
   $ hg --config diff.git=yes export 1
   # HG changeset patch
   # User test
@@ -453,6 +489,7 @@
   rename from another-dir/initial-file
   rename to another-dir/renamed-file
 
+
   $ cd ..
 
 Test that branches are preserved and stays active
--- a/tests/test-histedit-edit.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-histedit-edit.t	Mon Feb 04 20:35:21 2019 +0300
@@ -370,9 +370,9 @@
   HG: branch 'default'
   HG: added f
   ====
-  note: commit message saved in .hg/last-message.txt
   transaction abort!
   rollback completed
+  note: commit message saved in .hg/last-message.txt
   abort: pretxncommit.unexpectedabort hook exited with status 1
   [255]
   $ cat .hg/last-message.txt
@@ -394,9 +394,9 @@
   HG: user: test
   HG: branch 'default'
   HG: added f
-  note: commit message saved in .hg/last-message.txt
   transaction abort!
   rollback completed
+  note: commit message saved in .hg/last-message.txt
   abort: pretxncommit.unexpectedabort hook exited with status 1
   [255]
 
--- a/tests/test-hook.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-hook.t	Mon Feb 04 20:35:21 2019 +0300
@@ -690,7 +690,7 @@
 
   $ hg up null
   loading update.ne hook failed:
-  abort: $ENOENT$: $TESTTMP/d/repo/nonexistent.py
+  abort: $ENOENT$: '$TESTTMP/d/repo/nonexistent.py'
   [255]
 
   $ hg id
--- a/tests/test-http-api-httpv2.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-http-api-httpv2.t	Mon Feb 04 20:35:21 2019 +0300
@@ -18,6 +18,7 @@
   >     user-agent: test
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /api/exp-http-v2-0003 HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     user-agent: test\r\n
@@ -46,6 +47,7 @@
   >     user-agent: test
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     POST /api/exp-http-v2-0003/ro/badcommand HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     user-agent: test\r\n
@@ -67,6 +69,7 @@
   >     user-agent: test
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /api/exp-http-v2-0003/ro/customreadonly HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     user-agent: test\r\n
@@ -88,6 +91,7 @@
   >     user-agent: test
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     POST /api/exp-http-v2-0003/ro/customreadonly HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     user-agent: test\r\n
@@ -110,6 +114,7 @@
   >     user-agent: test
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     POST /api/exp-http-v2-0003/ro/customreadonly HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     accept: invalid\r\n
@@ -134,6 +139,7 @@
   >     content-type: badmedia
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     POST /api/exp-http-v2-0003/ro/customreadonly HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     accept: application/mercurial-exp-framing-0006\r\n
@@ -160,6 +166,7 @@
   >     frame 1 1 stream-begin command-request new cbor:{b'name': b'customreadonly'}
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     POST /api/exp-http-v2-0003/ro/customreadonly HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     *\r\n (glob)
@@ -196,6 +203,7 @@
   > EOF
   creating http peer for wire protocol version 2
   sending customreadonly command
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     POST /api/exp-http-v2-0003/ro/customreadonly HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     accept: application/mercurial-exp-framing-0006\r\n
@@ -216,23 +224,19 @@
   s>     \t\x00\x00\x01\x00\x02\x01\x92
   s>     Hidentity
   s>     \r\n
-  received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos)
   s>     13\r\n
   s>     \x0b\x00\x00\x01\x00\x02\x041
   s>     \xa1FstatusBok
   s>     \r\n
-  received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
   s>     27\r\n
   s>     \x1f\x00\x00\x01\x00\x02\x041
   s>     X\x1dcustomreadonly bytes response
   s>     \r\n
-  received frame(size=31; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
   s>     8\r\n
   s>     \x00\x00\x00\x01\x00\x02\x002
   s>     \r\n
   s>     0\r\n
   s>     \r\n
-  received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos)
   response: gen[
     b'customreadonly bytes response'
   ]
@@ -247,6 +251,7 @@
   >     user-agent: test
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /api/exp-http-v2-0003/rw/customreadonly HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     user-agent: test\r\n
@@ -268,6 +273,7 @@
   >     user-agent: test
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /api/exp-http-v2-0003/rw/badcommand HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     user-agent: test\r\n
@@ -289,6 +295,7 @@
   >     user-agent: test
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     POST /api/exp-http-v2-0003/rw/customreadonly HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     user-agent: test\r\n
@@ -327,6 +334,7 @@
   >     frame 1 1 stream-begin command-request new cbor:{b'name': b'customreadonly'}
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     POST /api/exp-http-v2-0003/rw/customreadonly HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     accept: application/mercurial-exp-framing-0006\r\n
@@ -366,6 +374,7 @@
   >     accept: $MEDIATYPE
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     POST /api/exp-http-v2-0003/rw/badcommand HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     accept: application/mercurial-exp-framing-0006\r\n
@@ -388,6 +397,7 @@
   >     user-agent: test
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     POST /api/exp-http-v2-0003/ro/debugreflect HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     user-agent: test\r\n
@@ -428,6 +438,7 @@
   >     frame 1 1 stream-begin command-request new cbor:{b'name': b'command1', b'args': {b'foo': b'val1', b'bar1': b'val'}}
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     POST /api/exp-http-v2-0003/ro/debugreflect HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     accept: application/mercurial-exp-framing-0006\r\n
@@ -459,6 +470,7 @@
   >     frame 1 1 stream-begin command-request new cbor:{b'name': b'customreadonly'}
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     POST /api/exp-http-v2-0003/ro/customreadonly HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     accept: application/mercurial-exp-framing-0006\r\n
@@ -501,6 +513,7 @@
   >     frame 3 1 0 command-request new cbor:{b'name': b'customreadonly'}
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     POST /api/exp-http-v2-0003/ro/multirequest HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     *\r\n (glob)
@@ -554,6 +567,7 @@
   >     frame 1 1 0 command-request continuation IbookmarksDnameHlistkeys
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     POST /api/exp-http-v2-0003/ro/multirequest HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     accept: application/mercurial-exp-framing-0006\r\n
@@ -619,6 +633,7 @@
   >     frame 1 1 stream-begin command-request new cbor:{b'name': b'pushkey'}
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     POST /api/exp-http-v2-0003/ro/multirequest HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     accept: application/mercurial-exp-framing-0006\r\n
@@ -645,6 +660,7 @@
   creating http peer for wire protocol version 2
   sending heads command
   wire protocol version 2 encoder referenced in config (badencoder) is not known; ignoring
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     POST /api/exp-http-v2-0003/ro/heads HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     accept: application/mercurial-exp-framing-0006\r\n
@@ -665,23 +681,19 @@
   s>     \t\x00\x00\x01\x00\x02\x01\x92
   s>     Hidentity
   s>     \r\n
-  received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos)
   s>     13\r\n
   s>     \x0b\x00\x00\x01\x00\x02\x041
   s>     \xa1FstatusBok
   s>     \r\n
-  received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
   s>     1e\r\n
   s>     \x16\x00\x00\x01\x00\x02\x041
   s>     \x81T\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00
   s>     \r\n
-  received frame(size=22; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
   s>     8\r\n
   s>     \x00\x00\x00\x01\x00\x02\x002
   s>     \r\n
   s>     0\r\n
   s>     \r\n
-  received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos)
   response: [
     b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
   ]
@@ -694,6 +706,7 @@
   > EOF
   creating http peer for wire protocol version 2
   sending heads command
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     POST /api/exp-http-v2-0003/ro/heads HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     accept: application/mercurial-exp-framing-0006\r\n
@@ -714,12 +727,10 @@
   s>     \t\x00\x00\x01\x00\x02\x01\x92
   s>     Hzstd-8mb
   s>     \r\n
-  received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos)
   s>     25\r\n
   s>     \x1d\x00\x00\x01\x00\x02\x042
   s>     (\xb5/\xfd\x00P\xa4\x00\x00p\xa1FstatusBok\x81T\x00\x01\x00\tP\x02
   s>     \r\n
-  received frame(size=29; request=1; stream=2; streamflags=encoded; type=command-response; flags=eos)
   s>     0\r\n
   s>     \r\n
   response: [
--- a/tests/test-http-api.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-http-api.t	Mon Feb 04 20:35:21 2019 +0300
@@ -156,6 +156,7 @@
   >     user-agent: test
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /api HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     user-agent: test\r\n
@@ -177,6 +178,7 @@
   >     user-agent: test
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /api/ HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     user-agent: test\r\n
@@ -200,6 +202,7 @@
   >     user-agent: test
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /api/unknown HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     user-agent: test\r\n
@@ -222,6 +225,7 @@
   >     user-agent: test
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /api/exp-http-v2-0003 HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     user-agent: test\r\n
@@ -255,6 +259,7 @@
   >     user-agent: test
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /api HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     user-agent: test\r\n
@@ -276,6 +281,7 @@
   >     user-agent: test
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /api/ HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     user-agent: test\r\n
--- a/tests/test-http-bad-server.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-http-bad-server.t	Mon Feb 04 20:35:21 2019 +0300
@@ -94,7 +94,7 @@
 
   $ cat error.log
   readline(40 from 65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
-  readline(7 from -1) -> (7) Accept-
+  readline(7 from *) -> (7) Accept- (glob)
   read limit reached; closing socket
 
   $ rm -f error.log
@@ -111,28 +111,32 @@
 
   $ cat error.log
   readline(210 from 65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
-  readline(177 from -1) -> (27) Accept-Encoding: identity\r\n
-  readline(150 from -1) -> (35) accept: application/mercurial-0.1\r\n
-  readline(115 from -1) -> (*) host: localhost:$HGPORT\r\n (glob)
-  readline(* from -1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
-  readline(* from -1) -> (2) \r\n (glob)
-  write(36) -> HTTP/1.1 200 Script output follows\r\n
-  write(23) -> Server: badhttpserver\r\n
-  write(37) -> Date: $HTTP_DATE$\r\n
-  write(41) -> Content-Type: application/mercurial-0.1\r\n
-  write(21) -> Content-Length: 450\r\n
-  write(2) -> \r\n
-  write(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+  readline(177 from *) -> (27) Accept-Encoding: identity\r\n (glob)
+  readline(150 from *) -> (35) accept: application/mercurial-0.1\r\n (glob)
+  readline(115 from *) -> (*) host: localhost:$HGPORT\r\n (glob)
+  readline(* from *) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
+  readline(* from *) -> (2) \r\n (glob)
+  sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !)
+  sendall(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !)
+  write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !)
+  write(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !)
+  write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !)
+  write(23) -> Server: badhttpserver\r\n (no-py3 !)
+  write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !)
+  write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !)
+  write(21) -> Content-Length: 450\r\n (no-py3 !)
+  write(2) -> \r\n (no-py3 !)
+  write(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !)
   readline(4? from 65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n (glob)
-  readline(1? from -1) -> (1?) Accept-Encoding* (glob)
+  readline(1? from *) -> (1?) Accept-Encoding* (glob)
   read limit reached; closing socket
   readline(223 from 65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n
-  readline(197 from -1) -> (27) Accept-Encoding: identity\r\n
-  readline(170 from -1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
-  readline(141 from -1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n
-  readline(100 from -1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n
-  readline(39 from -1) -> (35) accept: application/mercurial-0.1\r\n
-  readline(4 from -1) -> (4) host
+  readline(197 from *) -> (27) Accept-Encoding: identity\r\n (glob)
+  readline(170 from *) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
+  readline(141 from *) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob)
+  readline(100 from *) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob)
+  readline(39 from *) -> (35) accept: application/mercurial-0.1\r\n (glob)
+  readline(4 from *) -> (4) host (glob)
   read limit reached; closing socket
 
   $ rm -f error.log
@@ -152,46 +156,54 @@
   readline(1 from -1) -> (1) x (?)
   readline(1 from -1) -> (1) x (?)
   readline(308 from 65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
-  readline(275 from -1) -> (27) Accept-Encoding: identity\r\n
-  readline(248 from -1) -> (35) accept: application/mercurial-0.1\r\n
-  readline(213 from -1) -> (*) host: localhost:$HGPORT\r\n (glob)
-  readline(* from -1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
-  readline(* from -1) -> (2) \r\n (glob)
-  write(36) -> HTTP/1.1 200 Script output follows\r\n
-  write(23) -> Server: badhttpserver\r\n
-  write(37) -> Date: $HTTP_DATE$\r\n
-  write(41) -> Content-Type: application/mercurial-0.1\r\n
-  write(21) -> Content-Length: 450\r\n
-  write(2) -> \r\n
-  write(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+  readline(275 from *) -> (27) Accept-Encoding: identity\r\n (glob)
+  readline(248 from *) -> (35) accept: application/mercurial-0.1\r\n (glob)
+  readline(213 from *) -> (*) host: localhost:$HGPORT\r\n (glob)
+  readline(* from *) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
+  readline(* from *) -> (2) \r\n (glob)
+  sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !)
+  sendall(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !)
+  write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !)
+  write(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !)
+  write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !)
+  write(23) -> Server: badhttpserver\r\n (no-py3 !)
+  write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !)
+  write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !)
+  write(21) -> Content-Length: 450\r\n (no-py3 !)
+  write(2) -> \r\n (no-py3 !)
+  write(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !)
   readline(13? from 65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n (glob)
-  readline(1?? from -1) -> (27) Accept-Encoding: identity\r\n (glob)
-  readline(8? from -1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
-  readline(5? from -1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob)
-  readline(1? from -1) -> (1?) x-hgproto-1:* (glob)
+  readline(1?? from *) -> (27) Accept-Encoding: identity\r\n (glob)
+  readline(8? from *) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
+  readline(5? from *) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob)
+  readline(1? from *) -> (1?) x-hgproto-1:* (glob)
   read limit reached; closing socket
   readline(317 from 65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n
-  readline(291 from -1) -> (27) Accept-Encoding: identity\r\n
-  readline(264 from -1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
-  readline(235 from -1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n
-  readline(194 from -1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n
-  readline(133 from -1) -> (35) accept: application/mercurial-0.1\r\n
-  readline(98 from -1) -> (*) host: localhost:$HGPORT\r\n (glob)
-  readline(* from -1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
-  readline(* from -1) -> (2) \r\n (glob)
-  write(36) -> HTTP/1.1 200 Script output follows\r\n
-  write(23) -> Server: badhttpserver\r\n
-  write(37) -> Date: $HTTP_DATE$\r\n
-  write(41) -> Content-Type: application/mercurial-0.1\r\n
-  write(20) -> Content-Length: 42\r\n
-  write(2) -> \r\n
-  write(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n;
+  readline(291 from *) -> (27) Accept-Encoding: identity\r\n (glob)
+  readline(264 from *) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
+  readline(235 from *) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob)
+  readline(194 from *) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob)
+  readline(133 from *) -> (35) accept: application/mercurial-0.1\r\n (glob)
+  readline(98 from *) -> (*) host: localhost:$HGPORT\r\n (glob)
+  readline(* from *) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
+  readline(* from *) -> (2) \r\n (glob)
+  sendall(159) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py36 !)
+  sendall(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py36 !)
+  write(159) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py3 no-py36 !)
+  write(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py3 no-py36 !)
+  write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !)
+  write(23) -> Server: badhttpserver\r\n (no-py3 !)
+  write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !)
+  write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !)
+  write(20) -> Content-Length: 42\r\n (no-py3 !)
+  write(2) -> \r\n (no-py3 !)
+  write(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (no-py3 !)
   readline(* from 65537) -> (*) GET /?cmd=getbundle HTTP* (glob)
   read limit reached; closing socket
   readline(304 from 65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n
-  readline(274 from -1) -> (27) Accept-Encoding: identity\r\n
-  readline(247 from -1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
-  readline(218 from -1) -> (218) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtag
+  readline(274 from *) -> (27) Accept-Encoding: identity\r\n (glob)
+  readline(247 from *) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
+  readline(218 from *) -> (218) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtag (glob)
   read limit reached; closing socket
 
   $ rm -f error.log
@@ -207,41 +219,50 @@
 
   $ killdaemons.py $DAEMON_PIDS
 
-  $ cat error.log
+  $ cat error.log | "$PYTHON" $TESTDIR/filtertraceback.py
   readline(329 from 65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
-  readline(296 from -1) -> (27) Accept-Encoding: identity\r\n
-  readline(269 from -1) -> (35) accept: application/mercurial-0.1\r\n
-  readline(234 from -1) -> (2?) host: localhost:$HGPORT\r\n (glob)
-  readline(* from -1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
-  readline(* from -1) -> (2) \r\n (glob)
-  write(36) -> HTTP/1.1 200 Script output follows\r\n
-  write(23) -> Server: badhttpserver\r\n
-  write(37) -> Date: $HTTP_DATE$\r\n
-  write(41) -> Content-Type: application/mercurial-0.1\r\n
-  write(21) -> Content-Length: 463\r\n
-  write(2) -> \r\n
-  write(463) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx httppostargs known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+  readline(296 from *) -> (27) Accept-Encoding: identity\r\n (glob)
+  readline(269 from *) -> (35) accept: application/mercurial-0.1\r\n (glob)
+  readline(234 from *) -> (2?) host: localhost:$HGPORT\r\n (glob)
+  readline(* from *) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
+  readline(* from *) -> (2) \r\n (glob)
+  sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 463\r\n\r\n (py36 !)
+  sendall(463) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx httppostargs known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !)
+  write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 463\r\n\r\n (py3 no-py36 !)
+  write(463) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx httppostargs known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !)
+  write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !)
+  write(23) -> Server: badhttpserver\r\n (no-py3 !)
+  write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !)
+  write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !)
+  write(21) -> Content-Length: 463\r\n (no-py3 !)
+  write(2) -> \r\n (no-py3 !)
+  write(463) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx httppostargs known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !)
   readline(1?? from 65537) -> (27) POST /?cmd=batch HTTP/1.1\r\n (glob)
-  readline(1?? from -1) -> (27) Accept-Encoding: identity\r\n (glob)
-  readline(1?? from -1) -> (41) content-type: application/mercurial-0.1\r\n (glob)
-  readline(6? from -1) -> (33) vary: X-HgArgs-Post,X-HgProto-1\r\n (glob)
-  readline(3? from -1) -> (19) x-hgargs-post: 28\r\n (glob)
-  readline(1? from -1) -> (1?) x-hgproto-1: * (glob)
+  readline(1?? from *) -> (27) Accept-Encoding: identity\r\n (glob)
+  readline(1?? from *) -> (41) content-type: application/mercurial-0.1\r\n (glob)
+  readline(6? from *) -> (33) vary: X-HgArgs-Post,X-HgProto-1\r\n (glob)
+  readline(3? from *) -> (19) x-hgargs-post: 28\r\n (glob)
+  readline(1? from *) -> (1?) x-hgproto-1: * (glob)
   read limit reached; closing socket
   readline(344 from 65537) -> (27) POST /?cmd=batch HTTP/1.1\r\n
-  readline(317 from -1) -> (27) Accept-Encoding: identity\r\n
-  readline(290 from -1) -> (41) content-type: application/mercurial-0.1\r\n
-  readline(249 from -1) -> (33) vary: X-HgArgs-Post,X-HgProto-1\r\n
-  readline(216 from -1) -> (19) x-hgargs-post: 28\r\n
-  readline(197 from -1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n
-  readline(136 from -1) -> (35) accept: application/mercurial-0.1\r\n
-  readline(101 from -1) -> (20) content-length: 28\r\n
-  readline(81 from -1) -> (*) host: localhost:$HGPORT\r\n (glob)
-  readline(* from -1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
-  readline(* from -1) -> (2) \r\n (glob)
+  readline(317 from *) -> (27) Accept-Encoding: identity\r\n (glob)
+  readline(290 from *) -> (41) content-type: application/mercurial-0.1\r\n (glob)
+  readline(249 from *) -> (33) vary: X-HgArgs-Post,X-HgProto-1\r\n (glob)
+  readline(216 from *) -> (19) x-hgargs-post: 28\r\n (glob)
+  readline(197 from *) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob)
+  readline(136 from *) -> (35) accept: application/mercurial-0.1\r\n (glob)
+  readline(101 from *) -> (20) content-length: 28\r\n (glob)
+  readline(81 from *) -> (*) host: localhost:$HGPORT\r\n (glob)
+  readline(* from *) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
+  readline(* from *) -> (2) \r\n (glob)
   read(* from 28) -> (*) cmds=* (glob)
   read limit reached, closing socket
-  write(36) -> HTTP/1.1 500 Internal Server Error\r\n
+  $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=batch': (glob)
+  Traceback (most recent call last):
+  Exception: connection closed after receiving N bytes
+  
+  write(126) -> HTTP/1.1 500 Internal Server Error\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nTransfer-Encoding: chunked\r\n\r\n (py3 no-py36 !)
+  write(36) -> HTTP/1.1 500 Internal Server Error\r\n (no-py3 !)
 
   $ rm -f error.log
 
@@ -258,16 +279,23 @@
 
   $ killdaemons.py $DAEMON_PIDS
 
-  $ cat error.log
+  $ cat error.log | "$PYTHON" $TESTDIR/filtertraceback.py
   readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
-  readline(-1) -> (27) Accept-Encoding: identity\r\n
-  readline(-1) -> (35) accept: application/mercurial-0.1\r\n
-  readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
-  readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n
-  readline(-1) -> (2) \r\n
-  write(1 from 36) -> (0) H
+  readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
+  readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob)
+  readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
+  readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
+  readline(*) -> (2) \r\n (glob)
+  sendall(1 from 160) -> (0) H (py36 !)
+  write(1 from 160) -> (0) H (py3 no-py36 !)
+  write(1 from 36) -> (0) H (no-py3 !)
   write limit reached; closing socket
-  write(36) -> HTTP/1.1 500 Internal Server Error\r\n
+  $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=capabilities': (glob)
+  Traceback (most recent call last):
+  Exception: connection closed after sending N bytes
+  
+  write(286) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\nHTTP/1.1 500 Internal Server Error\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nTransfer-Encoding: chunked\r\n\r\n (py3 no-py36 !)
+  write(36) -> HTTP/1.1 500 Internal Server Error\r\n (no-py3 !)
 
   $ rm -f error.log
 
@@ -283,21 +311,29 @@
 
   $ killdaemons.py $DAEMON_PIDS
 
-  $ cat error.log
+  $ cat error.log | "$PYTHON" $TESTDIR/filtertraceback.py
   readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
-  readline(-1) -> (27) Accept-Encoding: identity\r\n
-  readline(-1) -> (35) accept: application/mercurial-0.1\r\n
-  readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
-  readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n
-  readline(-1) -> (2) \r\n
-  write(36 from 36) -> (144) HTTP/1.1 200 Script output follows\r\n
-  write(23 from 23) -> (121) Server: badhttpserver\r\n
-  write(37 from 37) -> (84) Date: $HTTP_DATE$\r\n
-  write(41 from 41) -> (43) Content-Type: application/mercurial-0.1\r\n
-  write(21 from 21) -> (22) Content-Length: 450\r\n
-  write(2 from 2) -> (20) \r\n
-  write(20 from 450) -> (0) batch branchmap bund
+  readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
+  readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob)
+  readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
+  readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
+  readline(*) -> (2) \r\n (glob)
+  sendall(160 from 160) -> (20) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !)
+  sendall(20 from 450) -> (0) batch branchmap bund (py36 !)
+  write(160 from 160) -> (20) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !)
+  write(20 from 450) -> (0) batch branchmap bund (py3 no-py36 !)
+  write(36 from 36) -> (144) HTTP/1.1 200 Script output follows\r\n (no-py3 !)
+  write(23 from 23) -> (121) Server: badhttpserver\r\n (no-py3 !)
+  write(37 from 37) -> (84) Date: $HTTP_DATE$\r\n (no-py3 !)
+  write(41 from 41) -> (43) Content-Type: application/mercurial-0.1\r\n (no-py3 !)
+  write(21 from 21) -> (22) Content-Length: 450\r\n (no-py3 !)
+  write(2 from 2) -> (20) \r\n (no-py3 !)
+  write(20 from 450) -> (0) batch branchmap bund (no-py3 !)
   write limit reached; closing socket
+  $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=capabilities': (glob)
+  Traceback (most recent call last):
+  Exception: connection closed after sending N bytes
+  
 
   $ rm -f error.log
 
@@ -318,35 +354,46 @@
 
   $ killdaemons.py $DAEMON_PIDS
 
-  $ cat error.log
+  $ cat error.log | "$PYTHON" $TESTDIR/filtertraceback.py
   readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
-  readline(-1) -> (27) Accept-Encoding: identity\r\n
-  readline(-1) -> (35) accept: application/mercurial-0.1\r\n
-  readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
-  readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n
-  readline(-1) -> (2) \r\n
-  write(36 from 36) -> (692) HTTP/1.1 200 Script output follows\r\n
-  write(23 from 23) -> (669) Server: badhttpserver\r\n
-  write(37 from 37) -> (632) Date: $HTTP_DATE$\r\n
-  write(41 from 41) -> (591) Content-Type: application/mercurial-0.1\r\n
-  write(21 from 21) -> (570) Content-Length: 450\r\n
-  write(2 from 2) -> (568) \r\n
-  write(450 from 450) -> (118) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+  readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
+  readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob)
+  readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
+  readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
+  readline(*) -> (2) \r\n (glob)
+  sendall(160 from 160) -> (568) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !)
+  sendall(450 from 450) -> (118) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !)
+  write(160 from 160) -> (568) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !)
+  write(450 from 450) -> (118) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !)
+  write(36 from 36) -> (692) HTTP/1.1 200 Script output follows\r\n (no-py3 !)
+  write(23 from 23) -> (669) Server: badhttpserver\r\n (no-py3 !)
+  write(37 from 37) -> (632) Date: $HTTP_DATE$\r\n (no-py3 !)
+  write(41 from 41) -> (591) Content-Type: application/mercurial-0.1\r\n (no-py3 !)
+  write(21 from 21) -> (570) Content-Length: 450\r\n (no-py3 !)
+  write(2 from 2) -> (568) \r\n (no-py3 !)
+  write(450 from 450) -> (118) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !)
   readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n
-  readline(-1) -> (27) Accept-Encoding: identity\r\n
-  readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
-  readline(-1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n
-  readline(-1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n
-  readline(-1) -> (35) accept: application/mercurial-0.1\r\n
-  readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
-  readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n
-  readline(-1) -> (2) \r\n
-  write(36 from 36) -> (82) HTTP/1.1 200 Script output follows\r\n
-  write(23 from 23) -> (59) Server: badhttpserver\r\n
-  write(37 from 37) -> (22) Date: $HTTP_DATE$\r\n
-  write(22 from 41) -> (0) Content-Type: applicat
+  readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
+  readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
+  readline(*) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob)
+  readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob)
+  readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob)
+  readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
+  readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
+  readline(*) -> (2) \r\n (glob)
+  sendall(118 from 159) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: applicat (py36 !)
+  write(118 from 159) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: applicat (py3 no-py36 !)
+  write(36 from 36) -> (82) HTTP/1.1 200 Script output follows\r\n (no-py3 !)
+  write(23 from 23) -> (59) Server: badhttpserver\r\n (no-py3 !)
+  write(37 from 37) -> (22) Date: $HTTP_DATE$\r\n (no-py3 !)
+  write(22 from 41) -> (0) Content-Type: applicat (no-py3 !)
   write limit reached; closing socket
-  write(36) -> HTTP/1.1 500 Internal Server Error\r\n
+  $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=batch': (glob)
+  Traceback (most recent call last):
+  Exception: connection closed after sending N bytes
+  
+  write(285) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\nHTTP/1.1 500 Internal Server Error\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nTransfer-Encoding: chunked\r\n\r\n (py3 no-py36 !)
+  write(36) -> HTTP/1.1 500 Internal Server Error\r\n (no-py3 !)
 
   $ rm -f error.log
 
@@ -366,37 +413,49 @@
 
   $ killdaemons.py $DAEMON_PIDS
 
-  $ cat error.log
+  $ cat error.log | "$PYTHON" $TESTDIR/filtertraceback.py
   readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
-  readline(-1) -> (27) Accept-Encoding: identity\r\n
-  readline(-1) -> (35) accept: application/mercurial-0.1\r\n
-  readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
-  readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n
-  readline(-1) -> (2) \r\n
-  write(36 from 36) -> (757) HTTP/1.1 200 Script output follows\r\n
-  write(23 from 23) -> (734) Server: badhttpserver\r\n
-  write(37 from 37) -> (697) Date: $HTTP_DATE$\r\n
-  write(41 from 41) -> (656) Content-Type: application/mercurial-0.1\r\n
-  write(21 from 21) -> (635) Content-Length: 450\r\n
-  write(2 from 2) -> (633) \r\n
-  write(450 from 450) -> (183) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+  readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
+  readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob)
+  readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
+  readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
+  readline(*) -> (2) \r\n (glob)
+  sendall(160 from 160) -> (633) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !)
+  sendall(450 from 450) -> (183) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !)
+  write(160 from 160) -> (633) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !)
+  write(450 from 450) -> (183) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !)
+  write(36 from 36) -> (757) HTTP/1.1 200 Script output follows\r\n (no-py3 !)
+  write(23 from 23) -> (734) Server: badhttpserver\r\n (no-py3 !)
+  write(37 from 37) -> (697) Date: $HTTP_DATE$\r\n (no-py3 !)
+  write(41 from 41) -> (656) Content-Type: application/mercurial-0.1\r\n (no-py3 !)
+  write(21 from 21) -> (635) Content-Length: 450\r\n (no-py3 !)
+  write(2 from 2) -> (633) \r\n (no-py3 !)
+  write(450 from 450) -> (183) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !)
   readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n
-  readline(-1) -> (27) Accept-Encoding: identity\r\n
-  readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
-  readline(-1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n
-  readline(-1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n
-  readline(-1) -> (35) accept: application/mercurial-0.1\r\n
-  readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
-  readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n
-  readline(-1) -> (2) \r\n
-  write(36 from 36) -> (147) HTTP/1.1 200 Script output follows\r\n
-  write(23 from 23) -> (124) Server: badhttpserver\r\n
-  write(37 from 37) -> (87) Date: $HTTP_DATE$\r\n
-  write(41 from 41) -> (46) Content-Type: application/mercurial-0.1\r\n
-  write(20 from 20) -> (26) Content-Length: 42\r\n
-  write(2 from 2) -> (24) \r\n
-  write(24 from 42) -> (0) 96ee1d7354c4ad7372047672
+  readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
+  readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
+  readline(*) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob)
+  readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob)
+  readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob)
+  readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
+  readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
+  readline(*) -> (2) \r\n (glob)
+  sendall(159 from 159) -> (24) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py36 !)
+  sendall(24 from 42) -> (0) 96ee1d7354c4ad7372047672 (py36 !)
+  write(159 from 159) -> (24) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py3 no-py36 !)
+  write(24 from 42) -> (0) 96ee1d7354c4ad7372047672 (py3 no-py36 !)
+  write(36 from 36) -> (147) HTTP/1.1 200 Script output follows\r\n (no-py3 !)
+  write(23 from 23) -> (124) Server: badhttpserver\r\n (no-py3 !)
+  write(37 from 37) -> (87) Date: $HTTP_DATE$\r\n (no-py3 !)
+  write(41 from 41) -> (46) Content-Type: application/mercurial-0.1\r\n (no-py3 !)
+  write(20 from 20) -> (26) Content-Length: 42\r\n (no-py3 !)
+  write(2 from 2) -> (24) \r\n (no-py3 !)
+  write(24 from 42) -> (0) 96ee1d7354c4ad7372047672 (no-py3 !)
   write limit reached; closing socket
+  $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=batch': (glob)
+  Traceback (most recent call last):
+  Exception: connection closed after sending N bytes
+  
 
   $ rm -f error.log
 
@@ -418,51 +477,66 @@
 
   $ killdaemons.py $DAEMON_PIDS
 
-  $ cat error.log
+  $ cat error.log | "$PYTHON" $TESTDIR/filtertraceback.py
   readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
-  readline(-1) -> (27) Accept-Encoding: identity\r\n
-  readline(-1) -> (35) accept: application/mercurial-0.1\r\n
-  readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
-  readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n
-  readline(-1) -> (2) \r\n
-  write(36 from 36) -> (904) HTTP/1.1 200 Script output follows\r\n
-  write(23 from 23) -> (881) Server: badhttpserver\r\n
-  write(37 from 37) -> (844) Date: $HTTP_DATE$\r\n
-  write(41 from 41) -> (803) Content-Type: application/mercurial-0.1\r\n
-  write(21 from 21) -> (782) Content-Length: 450\r\n
-  write(2 from 2) -> (780) \r\n
-  write(450 from 450) -> (330) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+  readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
+  readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob)
+  readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
+  readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
+  readline(*) -> (2) \r\n (glob)
+  sendall(160 from 160) -> (780) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !)
+  sendall(450 from 450) -> (330) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !)
+  write(160 from 160) -> (780) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !)
+  write(450 from 450) -> (330) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !)
+  write(36 from 36) -> (904) HTTP/1.1 200 Script output follows\r\n (no-py3 !)
+  write(23 from 23) -> (881) Server: badhttpserver\r\n (no-py3 !)
+  write(37 from 37) -> (844) Date: $HTTP_DATE$\r\n (no-py3 !)
+  write(41 from 41) -> (803) Content-Type: application/mercurial-0.1\r\n (no-py3 !)
+  write(21 from 21) -> (782) Content-Length: 450\r\n (no-py3 !)
+  write(2 from 2) -> (780) \r\n (no-py3 !)
+  write(450 from 450) -> (330) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !)
   readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n
-  readline(-1) -> (27) Accept-Encoding: identity\r\n
-  readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
-  readline(-1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n
-  readline(-1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n
-  readline(-1) -> (35) accept: application/mercurial-0.1\r\n
-  readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
-  readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n
-  readline(-1) -> (2) \r\n
-  write(36 from 36) -> (294) HTTP/1.1 200 Script output follows\r\n
-  write(23 from 23) -> (271) Server: badhttpserver\r\n
-  write(37 from 37) -> (234) Date: $HTTP_DATE$\r\n
-  write(41 from 41) -> (193) Content-Type: application/mercurial-0.1\r\n
-  write(20 from 20) -> (173) Content-Length: 42\r\n
-  write(2 from 2) -> (171) \r\n
-  write(42 from 42) -> (129) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n;
+  readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
+  readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
+  readline(*) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob)
+  readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob)
+  readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob)
+  readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
+  readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
+  readline(*) -> (2) \r\n (glob)
+  sendall(159 from 159) -> (171) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py36 !)
+  sendall(42 from 42) -> (129) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py36 !)
+  write(159 from 159) -> (171) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py3 no-py36 !)
+  write(42 from 42) -> (129) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py3 no-py36 !)
+  write(36 from 36) -> (294) HTTP/1.1 200 Script output follows\r\n (no-py3 !)
+  write(23 from 23) -> (271) Server: badhttpserver\r\n (no-py3 !)
+  write(37 from 37) -> (234) Date: $HTTP_DATE$\r\n (no-py3 !)
+  write(41 from 41) -> (193) Content-Type: application/mercurial-0.1\r\n (no-py3 !)
+  write(20 from 20) -> (173) Content-Length: 42\r\n (no-py3 !)
+  write(2 from 2) -> (171) \r\n (no-py3 !)
+  write(42 from 42) -> (129) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (no-py3 !)
   readline(65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n
-  readline(-1) -> (27) Accept-Encoding: identity\r\n
-  readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
-  readline(-1) -> (461) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n
-  readline(-1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n
-  readline(-1) -> (35) accept: application/mercurial-0.1\r\n
-  readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
-  readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n
-  readline(-1) -> (2) \r\n
-  write(36 from 36) -> (93) HTTP/1.1 200 Script output follows\r\n
-  write(23 from 23) -> (70) Server: badhttpserver\r\n
-  write(37 from 37) -> (33) Date: $HTTP_DATE$\r\n
-  write(33 from 41) -> (0) Content-Type: application/mercuri
+  readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
+  readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
+  readline(*) -> (461) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n (glob)
+  readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob)
+  readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob)
+  readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
+  readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
+  readline(*) -> (2) \r\n (glob)
+  sendall(129 from 167) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercuri (py36 !)
+  write(129 from 167) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercuri (py3 no-py36 !)
+  write(36 from 36) -> (93) HTTP/1.1 200 Script output follows\r\n (no-py3 !)
+  write(23 from 23) -> (70) Server: badhttpserver\r\n (no-py3 !)
+  write(37 from 37) -> (33) Date: $HTTP_DATE$\r\n (no-py3 !)
+  write(33 from 41) -> (0) Content-Type: application/mercuri (no-py3 !)
   write limit reached; closing socket
-  write(36) -> HTTP/1.1 500 Internal Server Error\r\n
+  $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
+  Traceback (most recent call last):
+  Exception: connection closed after sending N bytes
+  
+  write(293) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\nHTTP/1.1 500 Internal Server Error\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nTransfer-Encoding: chunked\r\n\r\n (py3 no-py36 !)
+  write(36) -> HTTP/1.1 500 Internal Server Error\r\n (no-py3 !)
 
   $ rm -f error.log
 
@@ -478,11 +552,20 @@
 
   $ killdaemons.py $DAEMON_PIDS
 
-  $ tail -4 error.log
-  write(41 from 41) -> (25) Content-Type: application/mercurial-0.2\r\n
-  write(25 from 28) -> (0) Transfer-Encoding: chunke
-  write limit reached; closing socket
-  write(36) -> HTTP/1.1 500 Internal Server Error\r\n
+#if py36
+  $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -3
+  Traceback (most recent call last):
+  Exception: connection closed after sending N bytes
+  
+
+#else
+  $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -4
+  Traceback (most recent call last):
+  Exception: connection closed after sending N bytes
+  
+  write(293) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\nHTTP/1.1 500 Internal Server Error\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !)
+  write(36) -> HTTP/1.1 500 Internal Server Error\r\n (no-py3 !)
+#endif
 
   $ rm -f error.log
 
@@ -499,53 +582,68 @@
 
   $ killdaemons.py $DAEMON_PIDS
 
-  $ cat error.log
+  $ cat error.log | "$PYTHON" $TESTDIR/filtertraceback.py
   readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
-  readline(-1) -> (27) Accept-Encoding: identity\r\n
-  readline(-1) -> (35) accept: application/mercurial-0.1\r\n
-  readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
-  readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n
-  readline(-1) -> (2) \r\n
-  write(36 from 36) -> (942) HTTP/1.1 200 Script output follows\r\n
-  write(23 from 23) -> (919) Server: badhttpserver\r\n
-  write(37 from 37) -> (882) Date: $HTTP_DATE$\r\n
-  write(41 from 41) -> (841) Content-Type: application/mercurial-0.1\r\n
-  write(21 from 21) -> (820) Content-Length: 450\r\n
-  write(2 from 2) -> (818) \r\n
-  write(450 from 450) -> (368) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+  readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
+  readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob)
+  readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
+  readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
+  readline(*) -> (2) \r\n (glob)
+  sendall(160 from 160) -> (818) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !)
+  sendall(450 from 450) -> (368) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !)
+  write(160 from 160) -> (818) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !)
+  write(450 from 450) -> (368) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !)
+  write(36 from 36) -> (942) HTTP/1.1 200 Script output follows\r\n (no-py3 !)
+  write(23 from 23) -> (919) Server: badhttpserver\r\n (no-py3 !)
+  write(37 from 37) -> (882) Date: $HTTP_DATE$\r\n (no-py3 !)
+  write(41 from 41) -> (841) Content-Type: application/mercurial-0.1\r\n (no-py3 !)
+  write(21 from 21) -> (820) Content-Length: 450\r\n (no-py3 !)
+  write(2 from 2) -> (818) \r\n (no-py3 !)
+  write(450 from 450) -> (368) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !)
   readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n
-  readline(-1) -> (27) Accept-Encoding: identity\r\n
-  readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
-  readline(-1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n
-  readline(-1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n
-  readline(-1) -> (35) accept: application/mercurial-0.1\r\n
-  readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
-  readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n
-  readline(-1) -> (2) \r\n
-  write(36 from 36) -> (332) HTTP/1.1 200 Script output follows\r\n
-  write(23 from 23) -> (309) Server: badhttpserver\r\n
-  write(37 from 37) -> (272) Date: $HTTP_DATE$\r\n
-  write(41 from 41) -> (231) Content-Type: application/mercurial-0.1\r\n
-  write(20 from 20) -> (211) Content-Length: 42\r\n
-  write(2 from 2) -> (209) \r\n
-  write(42 from 42) -> (167) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n;
+  readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
+  readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
+  readline(*) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob)
+  readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob)
+  readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob)
+  readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
+  readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
+  readline(*) -> (2) \r\n (glob)
+  sendall(159 from 159) -> (209) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py36 !)
+  sendall(42 from 42) -> (167) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py36 !)
+  write(159 from 159) -> (209) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py3 no-py36 !)
+  write(42 from 42) -> (167) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py3 no-py36 !)
+  write(36 from 36) -> (332) HTTP/1.1 200 Script output follows\r\n (no-py3 !)
+  write(23 from 23) -> (309) Server: badhttpserver\r\n (no-py3 !)
+  write(37 from 37) -> (272) Date: $HTTP_DATE$\r\n (no-py3 !)
+  write(41 from 41) -> (231) Content-Type: application/mercurial-0.1\r\n (no-py3 !)
+  write(20 from 20) -> (211) Content-Length: 42\r\n (no-py3 !)
+  write(2 from 2) -> (209) \r\n (no-py3 !)
+  write(42 from 42) -> (167) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (no-py3 !)
   readline(65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n
-  readline(-1) -> (27) Accept-Encoding: identity\r\n
-  readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
-  readline(-1) -> (461) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n
-  readline(-1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n
-  readline(-1) -> (35) accept: application/mercurial-0.1\r\n
-  readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
-  readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n
-  readline(-1) -> (2) \r\n
-  write(36 from 36) -> (131) HTTP/1.1 200 Script output follows\r\n
-  write(23 from 23) -> (108) Server: badhttpserver\r\n
-  write(37 from 37) -> (71) Date: $HTTP_DATE$\r\n
-  write(41 from 41) -> (30) Content-Type: application/mercurial-0.2\r\n
-  write(28 from 28) -> (2) Transfer-Encoding: chunked\r\n
-  write(2 from 2) -> (0) \r\n
+  readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
+  readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
+  readline(*) -> (461) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n (glob)
+  readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob)
+  readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob)
+  readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
+  readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
+  readline(*) -> (2) \r\n (glob)
+  sendall(167 from 167) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py36 !)
+  write(167 from 167) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 no-py36 !)
+  write(36 from 36) -> (131) HTTP/1.1 200 Script output follows\r\n (no-py3 !)
+  write(23 from 23) -> (108) Server: badhttpserver\r\n (no-py3 !)
+  write(37 from 37) -> (71) Date: $HTTP_DATE$\r\n (no-py3 !)
+  write(41 from 41) -> (30) Content-Type: application/mercurial-0.2\r\n (no-py3 !)
+  write(28 from 28) -> (2) Transfer-Encoding: chunked\r\n (no-py3 !)
+  write(2 from 2) -> (0) \r\n (no-py3 !)
   write limit reached; closing socket
-  write(36) -> HTTP/1.1 500 Internal Server Error\r\n
+  $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
+  Traceback (most recent call last):
+  Exception: connection closed after sending N bytes
+  
+  write(293) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\nHTTP/1.1 500 Internal Server Error\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nTransfer-Encoding: chunked\r\n\r\n (py3 no-py36 !)
+  write(36) -> HTTP/1.1 500 Internal Server Error\r\n (no-py3 !)
 
   $ rm -f error.log
 
@@ -562,56 +660,72 @@
 
   $ killdaemons.py $DAEMON_PIDS
 
-  $ cat error.log
+  $ cat error.log | "$PYTHON" $TESTDIR/filtertraceback.py
   readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
-  readline(-1) -> (27) Accept-Encoding: identity\r\n
-  readline(-1) -> (35) accept: application/mercurial-0.1\r\n
-  readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
-  readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n
-  readline(-1) -> (2) \r\n
-  write(36 from 36) -> (966) HTTP/1.1 200 Script output follows\r\n
-  write(23 from 23) -> (943) Server: badhttpserver\r\n
-  write(37 from 37) -> (906) Date: $HTTP_DATE$\r\n
-  write(41 from 41) -> (865) Content-Type: application/mercurial-0.1\r\n
-  write(21 from 21) -> (844) Content-Length: 450\r\n
-  write(2 from 2) -> (842) \r\n
-  write(450 from 450) -> (392) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
+  readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
+  readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob)
+  readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
+  readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
+  readline(*) -> (2) \r\n (glob)
+  sendall(160 from 160) -> (842) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !)
+  sendall(450 from 450) -> (392) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !)
+  write(160 from 160) -> (842) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !)
+  write(450 from 450) -> (392) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !)
+  write(36 from 36) -> (966) HTTP/1.1 200 Script output follows\r\n (no-py3 !)
+  write(23 from 23) -> (943) Server: badhttpserver\r\n (no-py3 !)
+  write(37 from 37) -> (906) Date: $HTTP_DATE$\r\n (no-py3 !)
+  write(41 from 41) -> (865) Content-Type: application/mercurial-0.1\r\n (no-py3 !)
+  write(21 from 21) -> (844) Content-Length: 450\r\n (no-py3 !)
+  write(2 from 2) -> (842) \r\n (no-py3 !)
+  write(450 from 450) -> (392) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !)
   readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n
-  readline(-1) -> (27) Accept-Encoding: identity\r\n
-  readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
-  readline(-1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n
-  readline(-1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n
-  readline(-1) -> (35) accept: application/mercurial-0.1\r\n
-  readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
-  readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n
-  readline(-1) -> (2) \r\n
-  write(36 from 36) -> (356) HTTP/1.1 200 Script output follows\r\n
-  write(23 from 23) -> (333) Server: badhttpserver\r\n
-  write(37 from 37) -> (296) Date: $HTTP_DATE$\r\n
-  write(41 from 41) -> (255) Content-Type: application/mercurial-0.1\r\n
-  write(20 from 20) -> (235) Content-Length: 42\r\n
-  write(2 from 2) -> (233) \r\n
-  write(42 from 42) -> (191) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n;
+  readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
+  readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
+  readline(*) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob)
+  readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob)
+  readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob)
+  readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
+  readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
+  readline(*) -> (2) \r\n (glob)
+  sendall(159 from 159) -> (233) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py36 !)
+  sendall(42 from 42) -> (191) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py36 !)
+  write(159 from 159) -> (233) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py3 no-py36 !)
+  write(36 from 36) -> (356) HTTP/1.1 200 Script output follows\r\n (no-py3 !)
+  write(23 from 23) -> (333) Server: badhttpserver\r\n (no-py3 !)
+  write(37 from 37) -> (296) Date: $HTTP_DATE$\r\n (no-py3 !)
+  write(41 from 41) -> (255) Content-Type: application/mercurial-0.1\r\n (no-py3 !)
+  write(20 from 20) -> (235) Content-Length: 42\r\n (no-py3 !)
+  write(2 from 2) -> (233) \r\n (no-py3 !)
+  write(42 from 42) -> (191) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (no-py3 !)
   readline(65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n
-  readline(-1) -> (27) Accept-Encoding: identity\r\n
-  readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
-  readline(-1) -> (461) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n
-  readline(-1) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n
-  readline(-1) -> (35) accept: application/mercurial-0.1\r\n
-  readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
-  readline(-1) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n
-  readline(-1) -> (2) \r\n
-  write(36 from 36) -> (155) HTTP/1.1 200 Script output follows\r\n
-  write(23 from 23) -> (132) Server: badhttpserver\r\n
-  write(37 from 37) -> (95) Date: $HTTP_DATE$\r\n
-  write(41 from 41) -> (54) Content-Type: application/mercurial-0.2\r\n
-  write(28 from 28) -> (26) Transfer-Encoding: chunked\r\n
-  write(2 from 2) -> (24) \r\n
-  write(6 from 6) -> (18) 1\\r\\n\x04\\r\\n (esc)
-  write(9 from 9) -> (9) 4\r\nnone\r\n
-  write(9 from 9) -> (0) 4\r\nHG20\r\n
+  readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
+  readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
+  readline(*) -> (461) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n (glob)
+  readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob)
+  readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob)
+  readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
+  readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
+  readline(*) -> (2) \r\n (glob)
+  sendall(167 from 167) -> (24) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py36 !)
+  sendall(6 from 6) -> (18) 1\\r\\n\x04\\r\\n (esc) (py36 !)
+  sendall(9 from 9) -> (9) 4\r\nnone\r\n (py36 !)
+  sendall(9 from 9) -> (0) 4\r\nHG20\r\n (py36 !)
+  write(167 from 167) -> (24) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 no-py36 !)
+  write(36 from 36) -> (155) HTTP/1.1 200 Script output follows\r\n (no-py3 !)
+  write(23 from 23) -> (132) Server: badhttpserver\r\n (no-py3 !)
+  write(37 from 37) -> (95) Date: $HTTP_DATE$\r\n (no-py3 !)
+  write(41 from 41) -> (54) Content-Type: application/mercurial-0.2\r\n (no-py3 !)
+  write(28 from 28) -> (26) Transfer-Encoding: chunked\r\n (no-py3 !)
+  write(2 from 2) -> (24) \r\n (no-py3 !)
+  write(6 from 6) -> (18) 1\\r\\n\x04\\r\\n (esc) (no-py3 !)
+  write(9 from 9) -> (9) 4\r\nnone\r\n (no-py3 !)
+  write(9 from 9) -> (0) 4\r\nHG20\r\n (no-py3 !)
   write limit reached; closing socket
-  write(27) -> 15\r\nInternal Server Error\r\n
+  $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
+  Traceback (most recent call last):
+  Exception: connection closed after sending N bytes
+  
+  write(27) -> 15\r\nInternal Server Error\r\n (no-py3 !)
 
   $ rm -f error.log
 
@@ -622,20 +736,41 @@
 
   $ hg clone http://localhost:$HGPORT/ clone
   requesting all changes
-  abort: HTTP request error (incomplete response; expected 4 bytes got 3)
+  abort: HTTP request error (incomplete response) (py3 !)
+  abort: HTTP request error (incomplete response; expected 4 bytes got 3) (no-py3 !)
   (this may be an intermittent network failure; if the error persists, consider contacting the network or server operator)
   [255]
 
   $ killdaemons.py $DAEMON_PIDS
 
-  $ tail -7 error.log
-  write(28 from 28) -> (23) Transfer-Encoding: chunked\r\n
-  write(2 from 2) -> (21) \r\n
+#if py36
+  $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -9
+  sendall(167 from 167) -> (21) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n
+  sendall(6 from 6) -> (15) 1\\r\\n\x04\\r\\n (esc)
+  sendall(9 from 9) -> (6) 4\r\nnone\r\n
+  sendall(6 from 9) -> (0) 4\r\nHG2
+  write limit reached; closing socket
+  $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
+  Traceback (most recent call last):
+  Exception: connection closed after sending N bytes
+  
+
+#else
+  $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -11
+  readline(65537) -> (2) \r\n (py3 !)
+  write(167 from 167) -> (21) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !)
+  write(28 from 28) -> (23) Transfer-Encoding: chunked\r\n (no-py3 !)
+  write(2 from 2) -> (21) \r\n (no-py3 !)
   write(6 from 6) -> (15) 1\\r\\n\x04\\r\\n (esc)
   write(9 from 9) -> (6) 4\r\nnone\r\n
   write(6 from 9) -> (0) 4\r\nHG2
   write limit reached; closing socket
+  $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
+  Traceback (most recent call last):
+  Exception: connection closed after sending N bytes
+  
   write(27) -> 15\r\nInternal Server Error\r\n
+#endif
 
   $ rm -f error.log
 
@@ -646,21 +781,43 @@
 
   $ hg clone http://localhost:$HGPORT/ clone
   requesting all changes
-  abort: HTTP request error (incomplete response; expected 4 bytes got 3)
+  abort: HTTP request error (incomplete response) (py3 !)
+  abort: HTTP request error (incomplete response; expected 4 bytes got 3) (no-py3 !)
   (this may be an intermittent network failure; if the error persists, consider contacting the network or server operator)
   [255]
 
   $ killdaemons.py $DAEMON_PIDS
 
-  $ tail -8 error.log
-  write(28 from 28) -> (32) Transfer-Encoding: chunked\r\n
-  write(2 from 2) -> (30) \r\n
+#if py36
+  $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -10
+  sendall(167 from 167) -> (30) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n
+  sendall(6 from 6) -> (24) 1\\r\\n\x04\\r\\n (esc)
+  sendall(9 from 9) -> (15) 4\r\nnone\r\n
+  sendall(9 from 9) -> (6) 4\r\nHG20\r\n
+  sendall(6 from 9) -> (0) 4\\r\\n\x00\x00\x00 (esc)
+  write limit reached; closing socket
+  $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
+  Traceback (most recent call last):
+  Exception: connection closed after sending N bytes
+  
+
+#else
+  $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -12
+  readline(65537) -> (2) \r\n (py3 !)
+  write(167 from 167) -> (30) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !)
+  write(28 from 28) -> (32) Transfer-Encoding: chunked\r\n (no-py3 !)
+  write(2 from 2) -> (30) \r\n (no-py3 !)
   write(6 from 6) -> (24) 1\\r\\n\x04\\r\\n (esc)
   write(9 from 9) -> (15) 4\r\nnone\r\n
   write(9 from 9) -> (6) 4\r\nHG20\r\n
   write(6 from 9) -> (0) 4\\r\\n\x00\x00\x00 (esc)
   write limit reached; closing socket
+  $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
+  Traceback (most recent call last):
+  Exception: connection closed after sending N bytes
+  
   write(27) -> 15\r\nInternal Server Error\r\n
+#endif
 
   $ rm -f error.log
 
@@ -677,15 +834,36 @@
 
   $ killdaemons.py $DAEMON_PIDS
 
-  $ tail -8 error.log
-  write(28 from 28) -> (35) Transfer-Encoding: chunked\r\n
-  write(2 from 2) -> (33) \r\n
+#if py36
+  $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -10
+  sendall(167 from 167) -> (33) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n
+  sendall(6 from 6) -> (27) 1\\r\\n\x04\\r\\n (esc)
+  sendall(9 from 9) -> (18) 4\r\nnone\r\n
+  sendall(9 from 9) -> (9) 4\r\nHG20\r\n
+  sendall(9 from 9) -> (0) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
+  write limit reached; closing socket
+  $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
+  Traceback (most recent call last):
+  Exception: connection closed after sending N bytes
+  
+
+#else
+  $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -12
+  readline(65537) -> (2) \r\n (py3 !)
+  write(167 from 167) -> (33) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !)
+  write(28 from 28) -> (35) Transfer-Encoding: chunked\r\n (no-py3 !)
+  write(2 from 2) -> (33) \r\n (no-py3 !)
   write(6 from 6) -> (27) 1\\r\\n\x04\\r\\n (esc)
   write(9 from 9) -> (18) 4\r\nnone\r\n
   write(9 from 9) -> (9) 4\r\nHG20\r\n
   write(9 from 9) -> (0) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
   write limit reached; closing socket
+  $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
+  Traceback (most recent call last):
+  Exception: connection closed after sending N bytes
+  
   write(27) -> 15\r\nInternal Server Error\r\n
+#endif
 
   $ rm -f error.log
 
@@ -702,16 +880,39 @@
 
   $ killdaemons.py $DAEMON_PIDS
 
-  $ tail -9 error.log
-  write(28 from 28) -> (44) Transfer-Encoding: chunked\r\n
-  write(2 from 2) -> (42) \r\n
+#if py36
+  $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -11
+  sendall(167 from 167) -> (42) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n
+  sendall(6 from 6) -> (36) 1\\r\\n\x04\\r\\n (esc)
+  sendall(9 from 9) -> (27) 4\r\nnone\r\n
+  sendall(9 from 9) -> (18) 4\r\nHG20\r\n
+  sendall(9 from 9) -> (9) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
+  sendall(9 from 9) -> (0) 4\\r\\n\x00\x00\x00)\\r\\n (esc)
+  write limit reached; closing socket
+  $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
+  Traceback (most recent call last):
+  Exception: connection closed after sending N bytes
+  
+
+#else
+
+  $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -13
+  readline(65537) -> (2) \r\n (py3 !)
+  write(167 from 167) -> (42) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !)
+  write(28 from 28) -> (44) Transfer-Encoding: chunked\r\n (no-py3 !)
+  write(2 from 2) -> (42) \r\n (no-py3 !)
   write(6 from 6) -> (36) 1\\r\\n\x04\\r\\n (esc)
   write(9 from 9) -> (27) 4\r\nnone\r\n
   write(9 from 9) -> (18) 4\r\nHG20\r\n
   write(9 from 9) -> (9) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
   write(9 from 9) -> (0) 4\\r\\n\x00\x00\x00)\\r\\n (esc)
   write limit reached; closing socket
+  $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
+  Traceback (most recent call last):
+  Exception: connection closed after sending N bytes
+  
   write(27) -> 15\r\nInternal Server Error\r\n
+#endif
 
   $ rm -f error.log
 
@@ -731,9 +932,27 @@
 
   $ killdaemons.py $DAEMON_PIDS
 
-  $ tail -10 error.log
-  write(28 from 28) -> (91) Transfer-Encoding: chunked\r\n
-  write(2 from 2) -> (89) \r\n
+#if py36
+  $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -12
+  sendall(167 from 167) -> (89) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n
+  sendall(6 from 6) -> (83) 1\\r\\n\x04\\r\\n (esc)
+  sendall(9 from 9) -> (74) 4\r\nnone\r\n
+  sendall(9 from 9) -> (65) 4\r\nHG20\r\n
+  sendall(9 from 9) -> (56) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
+  sendall(9 from 9) -> (47) 4\\r\\n\x00\x00\x00)\\r\\n (esc)
+  sendall(47 from 47) -> (0) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02	\x01version02nbchanges1\\r\\n (esc)
+  write limit reached; closing socket
+  $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
+  Traceback (most recent call last):
+  Exception: connection closed after sending N bytes
+  
+
+#else
+  $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -14
+  readline(65537) -> (2) \r\n (py3 !)
+  write(167 from 167) -> (89) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !)
+  write(28 from 28) -> (91) Transfer-Encoding: chunked\r\n (no-py3 !)
+  write(2 from 2) -> (89) \r\n (no-py3 !)
   write(6 from 6) -> (83) 1\\r\\n\x04\\r\\n (esc)
   write(9 from 9) -> (74) 4\r\nnone\r\n
   write(9 from 9) -> (65) 4\r\nHG20\r\n
@@ -741,7 +960,12 @@
   write(9 from 9) -> (47) 4\\r\\n\x00\x00\x00)\\r\\n (esc)
   write(47 from 47) -> (0) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02	\x01version02nbchanges1\\r\\n (esc)
   write limit reached; closing socket
+  $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
+  Traceback (most recent call last):
+  Exception: connection closed after sending N bytes
+  
   write(27) -> 15\r\nInternal Server Error\r\n
+#endif
 
   $ rm -f error.log
 
@@ -755,14 +979,34 @@
   adding changesets
   transaction abort!
   rollback completed
-  abort: HTTP request error (incomplete response; expected 466 bytes got 7)
+  abort: HTTP request error (incomplete response) (py3 !)
+  abort: HTTP request error (incomplete response; expected 466 bytes got 7) (no-py3 !)
   (this may be an intermittent network failure; if the error persists, consider contacting the network or server operator)
   [255]
 
   $ killdaemons.py $DAEMON_PIDS
 
-  $ tail -11 error.log
-  write(2 from 2) -> (110) \r\n
+#if py36
+  $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -14
+  sendall(167 from 167) -> (110) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n
+  sendall(6 from 6) -> (104) 1\\r\\n\x04\\r\\n (esc)
+  sendall(9 from 9) -> (95) 4\r\nnone\r\n
+  sendall(9 from 9) -> (86) 4\r\nHG20\r\n
+  sendall(9 from 9) -> (77) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
+  sendall(9 from 9) -> (68) 4\\r\\n\x00\x00\x00)\\r\\n (esc)
+  sendall(47 from 47) -> (21) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02	\x01version02nbchanges1\\r\\n (esc)
+  sendall(9 from 9) -> (12) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc)
+  sendall(12 from 473) -> (0) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1d (esc)
+  write limit reached; closing socket
+  $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
+  Traceback (most recent call last):
+  Exception: connection closed after sending N bytes
+  
+
+#else
+  $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -15
+  write(167 from 167) -> (110) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !)
+  write(2 from 2) -> (110) \r\n (no-py3 !)
   write(6 from 6) -> (104) 1\\r\\n\x04\\r\\n (esc)
   write(9 from 9) -> (95) 4\r\nnone\r\n
   write(9 from 9) -> (86) 4\r\nHG20\r\n
@@ -772,7 +1016,12 @@
   write(9 from 9) -> (12) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc)
   write(12 from 473) -> (0) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1d (esc)
   write limit reached; closing socket
+  $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
+  Traceback (most recent call last):
+  Exception: connection closed after sending N bytes
+  
   write(27) -> 15\r\nInternal Server Error\r\n
+#endif
 
   $ rm -f error.log
 
@@ -792,9 +1041,29 @@
 
   $ killdaemons.py $DAEMON_PIDS
 
-  $ tail -12 error.log
-  write(28 from 28) -> (573) Transfer-Encoding: chunked\r\n
-  write(2 from 2) -> (571) \r\n
+#if py36
+  $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -14
+  sendall(167 from 167) -> (571) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n
+  sendall(6 from 6) -> (565) 1\\r\\n\x04\\r\\n (esc)
+  sendall(9 from 9) -> (556) 4\r\nnone\r\n
+  sendall(9 from 9) -> (547) 4\r\nHG20\r\n
+  sendall(9 from 9) -> (538) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
+  sendall(9 from 9) -> (529) 4\\r\\n\x00\x00\x00)\\r\\n (esc)
+  sendall(47 from 47) -> (482) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02	\x01version02nbchanges1\\r\\n (esc)
+  sendall(9 from 9) -> (473) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc)
+  sendall(473 from 473) -> (0) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc)
+  write limit reached; closing socket
+  $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
+  Traceback (most recent call last):
+  Exception: connection closed after sending N bytes
+  
+
+#else
+  $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -16
+  readline(65537) -> (2) \r\n (py3 !)
+  write(167 from 167) -> (571) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py3 !)
+  write(28 from 28) -> (573) Transfer-Encoding: chunked\r\n (no-py3 !)
+  write(2 from 2) -> (571) \r\n (no-py3 !)
   write(6 from 6) -> (565) 1\\r\\n\x04\\r\\n (esc)
   write(9 from 9) -> (556) 4\r\nnone\r\n
   write(9 from 9) -> (547) 4\r\nHG20\r\n
@@ -804,7 +1073,12 @@
   write(9 from 9) -> (473) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc)
   write(473 from 473) -> (0) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc)
   write limit reached; closing socket
+  $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
+  Traceback (most recent call last):
+  Exception: connection closed after sending N bytes
+  
   write(27) -> 15\r\nInternal Server Error\r\n
+#endif
 
   $ rm -f error.log
 
@@ -821,13 +1095,34 @@
   added 1 changesets with 1 changes to 1 files
   transaction abort!
   rollback completed
-  abort: HTTP request error (incomplete response; expected 32 bytes got 9)
+  abort: HTTP request error (incomplete response) (py3 !)
+  abort: HTTP request error (incomplete response; expected 32 bytes got 9) (no-py3 !)
   (this may be an intermittent network failure; if the error persists, consider contacting the network or server operator)
   [255]
 
   $ killdaemons.py $DAEMON_PIDS
 
-  $ tail -13 error.log
+#if py36
+  $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -16
+  sendall(6 from 6) -> (596) 1\\r\\n\x04\\r\\n (esc)
+  sendall(9 from 9) -> (587) 4\r\nnone\r\n
+  sendall(9 from 9) -> (578) 4\r\nHG20\r\n
+  sendall(9 from 9) -> (569) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
+  sendall(9 from 9) -> (560) 4\\r\\n\x00\x00\x00)\\r\\n (esc)
+  sendall(47 from 47) -> (513) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02	\x01version02nbchanges1\\r\\n (esc)
+  sendall(9 from 9) -> (504) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc)
+  sendall(473 from 473) -> (31) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc)
+  sendall(9 from 9) -> (22) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
+  sendall(9 from 9) -> (13) 4\\r\\n\x00\x00\x00 \\r\\n (esc)
+  sendall(13 from 38) -> (0) 20\\r\\n\x08LISTKEYS (esc)
+  write limit reached; closing socket
+  $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
+  Traceback (most recent call last):
+  Exception: connection closed after sending N bytes
+  
+
+#else
+  $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -17
   write(6 from 6) -> (596) 1\\r\\n\x04\\r\\n (esc)
   write(9 from 9) -> (587) 4\r\nnone\r\n
   write(9 from 9) -> (578) 4\r\nHG20\r\n
@@ -840,7 +1135,12 @@
   write(9 from 9) -> (13) 4\\r\\n\x00\x00\x00 \\r\\n (esc)
   write(13 from 38) -> (0) 20\\r\\n\x08LISTKEYS (esc)
   write limit reached; closing socket
+  $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
+  Traceback (most recent call last):
+  Exception: connection closed after sending N bytes
+  
   write(27) -> 15\r\nInternal Server Error\r\n
+#endif
 
   $ rm -f error.log
 
@@ -863,7 +1163,36 @@
 
   $ killdaemons.py $DAEMON_PIDS
 
-  $ tail -22 error.log
+#if py36
+  $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -25
+  sendall(9 from 9) -> (851) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
+  sendall(9 from 9) -> (842) 4\\r\\n\x00\x00\x00)\\r\\n (esc)
+  sendall(47 from 47) -> (795) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02	\x01version02nbchanges1\\r\\n (esc)
+  sendall(9 from 9) -> (786) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc)
+  sendall(473 from 473) -> (313) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc)
+  sendall(9 from 9) -> (304) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
+  sendall(9 from 9) -> (295) 4\\r\\n\x00\x00\x00 \\r\\n (esc)
+  sendall(38 from 38) -> (257) 20\\r\\n\x08LISTKEYS\x00\x00\x00\x01\x01\x00	\x06namespacephases\\r\\n (esc)
+  sendall(9 from 9) -> (248) 4\\r\\n\x00\x00\x00:\\r\\n (esc)
+  sendall(64 from 64) -> (184) 3a\r\n96ee1d7354c4ad7372047672c36a1f561e3a6a4c	1\npublishing	True\r\n
+  sendall(9 from 9) -> (175) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
+  sendall(9 from 9) -> (166) 4\\r\\n\x00\x00\x00#\\r\\n (esc)
+  sendall(41 from 41) -> (125) 23\\r\\n\x08LISTKEYS\x00\x00\x00\x02\x01\x00		namespacebookmarks\\r\\n (esc)
+  sendall(9 from 9) -> (116) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
+  sendall(9 from 9) -> (107) 4\\r\\n\x00\x00\x00\x1d\\r\\n (esc)
+  sendall(35 from 35) -> (72) 1d\\r\\n\x16cache:rev-branch-cache\x00\x00\x00\x03\x00\x00\\r\\n (esc)
+  sendall(9 from 9) -> (63) 4\\r\\n\x00\x00\x00'\\r\\n (esc)
+  sendall(45 from 45) -> (18) 27\\r\\n\x00\x00\x00\x07\x00\x00\x00\x01\x00\x00\x00\x00default\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\\r\\n (esc)
+  sendall(9 from 9) -> (9) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
+  sendall(9 from 9) -> (0) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
+  write limit reached; closing socket
+  $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
+  Traceback (most recent call last):
+  Exception: connection closed after sending N bytes
+  
+
+#else
+  $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -26
   write(9 from 9) -> (851) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
   write(9 from 9) -> (842) 4\\r\\n\x00\x00\x00)\\r\\n (esc)
   write(47 from 47) -> (795) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02	\x01version02nbchanges1\\r\\n (esc)
@@ -885,7 +1214,12 @@
   write(9 from 9) -> (9) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
   write(9 from 9) -> (0) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
   write limit reached; closing socket
+  $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
+  Traceback (most recent call last):
+  Exception: connection closed after sending N bytes
+  
   write(27) -> 15\r\nInternal Server Error\r\n
+#endif
 
   $ rm -f error.log
   $ rm -rf clone
@@ -907,7 +1241,37 @@
 
   $ killdaemons.py $DAEMON_PIDS
 
-  $ tail -23 error.log
+#if py36
+  $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -26
+  sendall(9 from 9) -> (854) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
+  sendall(9 from 9) -> (845) 4\\r\\n\x00\x00\x00)\\r\\n (esc)
+  sendall(47 from 47) -> (798) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02	\x01version02nbchanges1\\r\\n (esc)
+  sendall(9 from 9) -> (789) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc)
+  sendall(473 from 473) -> (316) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc)
+  sendall(9 from 9) -> (307) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
+  sendall(9 from 9) -> (298) 4\\r\\n\x00\x00\x00 \\r\\n (esc)
+  sendall(38 from 38) -> (260) 20\\r\\n\x08LISTKEYS\x00\x00\x00\x01\x01\x00	\x06namespacephases\\r\\n (esc)
+  sendall(9 from 9) -> (251) 4\\r\\n\x00\x00\x00:\\r\\n (esc)
+  sendall(64 from 64) -> (187) 3a\r\n96ee1d7354c4ad7372047672c36a1f561e3a6a4c	1\npublishing	True\r\n
+  sendall(9 from 9) -> (178) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
+  sendall(9 from 9) -> (169) 4\\r\\n\x00\x00\x00#\\r\\n (esc)
+  sendall(41 from 41) -> (128) 23\\r\\n\x08LISTKEYS\x00\x00\x00\x02\x01\x00		namespacebookmarks\\r\\n (esc)
+  sendall(9 from 9) -> (119) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
+  sendall(9 from 9) -> (110) 4\\r\\n\x00\x00\x00\x1d\\r\\n (esc)
+  sendall(35 from 35) -> (75) 1d\\r\\n\x16cache:rev-branch-cache\x00\x00\x00\x03\x00\x00\\r\\n (esc)
+  sendall(9 from 9) -> (66) 4\\r\\n\x00\x00\x00'\\r\\n (esc)
+  sendall(45 from 45) -> (21) 27\\r\\n\x00\x00\x00\x07\x00\x00\x00\x01\x00\x00\x00\x00default\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\\r\\n (esc)
+  sendall(9 from 9) -> (12) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
+  sendall(9 from 9) -> (3) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
+  sendall(3 from 5) -> (0) 0\r\n
+  write limit reached; closing socket
+  $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
+  Traceback (most recent call last):
+  Exception: connection closed after sending N bytes
+  
+
+#else
+  $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -27
   write(9 from 9) -> (854) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
   write(9 from 9) -> (845) 4\\r\\n\x00\x00\x00)\\r\\n (esc)
   write(47 from 47) -> (798) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02	\x01version02nbchanges1\\r\\n (esc)
@@ -930,7 +1294,12 @@
   write(9 from 9) -> (3) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
   write(3 from 5) -> (0) 0\r\n
   write limit reached; closing socket
+  $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
+  Traceback (most recent call last):
+  Exception: connection closed after sending N bytes
+  
   write(27) -> 15\r\nInternal Server Error\r\n
+#endif
 
   $ rm -f error.log
   $ rm -rf clone
--- a/tests/test-http-protocol.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-http-protocol.t	Mon Feb 04 20:35:21 2019 +0300
@@ -179,6 +179,7 @@
   > command listkeys
   >     namespace namespaces
   > EOF
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /?cmd=capabilities HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     accept: application/mercurial-0.1\r\n
@@ -194,6 +195,7 @@
   s>     \r\n
   s>     batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
   sending listkeys command
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /?cmd=listkeys HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     vary: X-HgArg-1,X-HgProto-1\r\n
@@ -228,6 +230,7 @@
   >     x-hgarg-1: namespace=namespaces
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /?cmd=listkeys HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     user-agent: test\r\n
@@ -250,6 +253,7 @@
   $ hg --config experimental.httppeer.advertise-v2=true --verbose debugwireproto http://$LOCALIP:$HGPORT << EOF
   > command heads
   > EOF
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /?cmd=capabilities HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     vary: X-HgProto-1,X-HgUpgrade-1\r\n
@@ -268,6 +272,7 @@
   s>     \r\n
   s>     batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
   sending heads command
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /?cmd=heads HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     vary: X-HgProto-1\r\n
@@ -299,6 +304,7 @@
   $ hg --config experimental.httppeer.advertise-v2=true --config experimental.httppeer.v2-encoder-order=identity --verbose debugwireproto http://$LOCALIP:$HGPORT << EOF
   > command heads
   > EOF
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /?cmd=capabilities HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     vary: X-HgProto-1,X-HgUpgrade-1\r\n
@@ -317,6 +323,7 @@
   s>     \r\n
   s>     \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa4Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogNv1capabilitiesY\x01\xe0batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
   sending heads command
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     POST /api/exp-http-v2-0003/ro/heads HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     accept: application/mercurial-exp-framing-0006\r\n
@@ -337,23 +344,19 @@
   s>     \t\x00\x00\x01\x00\x02\x01\x92
   s>     Hidentity
   s>     \r\n
-  received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos)
   s>     13\r\n
   s>     \x0b\x00\x00\x01\x00\x02\x041
   s>     \xa1FstatusBok
   s>     \r\n
-  received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
   s>     1e\r\n
   s>     \x16\x00\x00\x01\x00\x02\x041
   s>     \x81T\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00
   s>     \r\n
-  received frame(size=22; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
   s>     8\r\n
   s>     \x00\x00\x00\x01\x00\x02\x002
   s>     \r\n
   s>     0\r\n
   s>     \r\n
-  received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos)
   response: [
     b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
   ]
@@ -386,7 +389,7 @@
   >     relpath = path[len(b'/redirector'):]
   >     res.status = b'301 Redirect'
   >     newurl = b'%s/redirected%s' % (req.baseurl, relpath)
-  >     if not repo.ui.configbool('testing', 'redirectqs', True) and b'?' in newurl:
+  >     if not repo.ui.configbool(b'testing', b'redirectqs', True) and b'?' in newurl:
   >         newurl = newurl[0:newurl.index(b'?')]
   >     res.headers[b'Location'] = newurl
   >     res.headers[b'Content-Type'] = b'text/plain'
@@ -408,6 +411,7 @@
   >     user-agent: test
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /redirector?cmd=capabilities HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     user-agent: test\r\n
@@ -422,6 +426,7 @@
   s>     Content-Length: 10\r\n
   s>     \r\n
   s>     redirected
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /redirected?cmd=capabilities HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     user-agent: test\r\n
@@ -441,6 +446,7 @@
   $ hg --verbose debugwireproto http://$LOCALIP:$HGPORT/redirector << EOF
   > command heads
   > EOF
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /redirector?cmd=capabilities HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     accept: application/mercurial-0.1\r\n
@@ -456,6 +462,7 @@
   s>     Content-Length: 10\r\n
   s>     \r\n
   s>     redirected
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /redirected?cmd=capabilities HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     accept: application/mercurial-0.1\r\n
@@ -472,6 +479,7 @@
   real URL is http://$LOCALIP:$HGPORT/redirected (glob)
   s>     batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
   sending heads command
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /redirected?cmd=heads HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     vary: X-HgProto-1\r\n
@@ -509,6 +517,7 @@
   >     user-agent: test
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /redirector?cmd=capabilities HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     user-agent: test\r\n
@@ -523,6 +532,7 @@
   s>     Content-Length: 10\r\n
   s>     \r\n
   s>     redirected
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /redirected HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     user-agent: test\r\n
@@ -664,6 +674,7 @@
   $ hg --verbose debugwireproto http://$LOCALIP:$HGPORT/redirector << EOF
   > command heads
   > EOF
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /redirector?cmd=capabilities HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     accept: application/mercurial-0.1\r\n
@@ -679,6 +690,7 @@
   s>     Content-Length: 10\r\n
   s>     \r\n
   s>     redirected
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /redirected HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     accept: application/mercurial-0.1\r\n
@@ -721,6 +733,7 @@
   s>     <li class="active">log</li>\n
   s>     <li><a href="/redirected/graph/tip">graph</a></li>\n
   s>     <li><a href="/redirected/tags">tags</a
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /redirected?cmd=capabilities HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     accept: application/mercurial-0.1\r\n
@@ -737,6 +750,7 @@
   real URL is http://$LOCALIP:$HGPORT/redirected (glob)
   s>     batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
   sending heads command
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /redirected?cmd=heads HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     vary: X-HgProto-1\r\n
--- a/tests/test-import-git.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-import-git.t	Mon Feb 04 20:35:21 2019 +0300
@@ -826,7 +826,7 @@
 
   $ hg revert -qa
   $ hg --encoding utf-8 import - <<EOF
-  > From: =?UTF-8?q?Rapha=C3=ABl=20Hertzog?= <hertzog@debian.org>
+  > From: =?utf-8?q?Rapha=C3=ABl_Hertzog_=3Chertzog=40debian=2Eorg=3E?=
   > Subject: [PATCH] =?UTF-8?q?=C5=A7=E2=82=AC=C3=9F=E1=B9=AA?=
   > 
   > diff --git a/a b/a
--- a/tests/test-install.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-install.t	Mon Feb 04 20:35:21 2019 +0300
@@ -161,6 +161,7 @@
   > import subprocess
   > import sys
   > import xml.etree.ElementTree as ET
+  > from mercurial import pycompat
   > 
   > # MSYS mangles the path if it expands $TESTDIR
   > testdir = os.environ['TESTDIR']
@@ -177,7 +178,7 @@
   >     files = node.findall('./{%(wix)s}Component/{%(wix)s}File' % ns)
   > 
   >     for f in files:
-  >         yield relpath + f.attrib['Name']
+  >         yield pycompat.sysbytes(relpath + f.attrib['Name'])
   > 
   > def hgdirectory(relpath):
   >     '''generator of tracked files, rooted at relpath'''
@@ -187,10 +188,9 @@
   >                             stderr=subprocess.PIPE)
   >     output = proc.communicate()[0]
   > 
-  >     slash = '/'
   >     for line in output.splitlines():
   >         if os.name == 'nt':
-  >             yield line.replace(os.sep, slash)
+  >             yield line.replace(pycompat.sysbytes(os.sep), b'/')
   >         else:
   >             yield line
   > 
@@ -204,11 +204,11 @@
   > 
   > print('Not installed:')
   > for f in sorted(set(tracked) - set(installed)):
-  >     print('  %s' % f)
+  >     print('  %s' % pycompat.sysstr(f))
   > 
   > print('Not tracked:')
   > for f in sorted(set(installed) - set(tracked)):
-  >     print('  %s' % f)
+  >     print('  %s' % pycompat.sysstr(f))
   > EOF
 
   $ ( testrepohgenv; "$PYTHON" wixxml.py help )
--- a/tests/test-journal-exists.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-journal-exists.t	Mon Feb 04 20:35:21 2019 +0300
@@ -29,7 +29,7 @@
 
   $ hg -R foo unbundle repo.hg
   adding changesets
-  abort: Permission denied: $TESTTMP/foo/.hg/store/.00changelog.i-* (glob)
+  abort: Permission denied: '$TESTTMP/foo/.hg/store/.00changelog.i-*' (glob)
   [255]
 
   $ if test -f foo/.hg/store/journal; then echo 'journal exists :-('; fi
--- a/tests/test-lfs-serve-access.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-lfs-serve-access.t	Mon Feb 04 20:35:21 2019 +0300
@@ -226,9 +226,9 @@
   >             # One time simulation of a read error
   >             if _readerr:
   >                 _readerr = False
-  >                 raise IOError(errno.EIO, '%s: I/O error' % oid)
+  >                 raise IOError(errno.EIO, r'%s: I/O error' % oid.decode("utf-8"))
   >             # Simulate corrupt content on client download
-  >             blobstore._verify(oid, 'dummy content')
+  >             blobstore._verify(oid, b'dummy content')
   > 
   >         def verify(self, oid):
   >             '''Called in the server to populate the Batch API response,
@@ -239,7 +239,7 @@
   >             global _numverifies
   >             _numverifies += 1
   >             if _numverifies <= 2:
-  >                 raise IOError(errno.EIO, '%s: I/O error' % oid)
+  >                 raise IOError(errno.EIO, r'%s: I/O error' % oid.decode("utf-8"))
   >             return super(badstore, self).verify(oid)
   > 
   >     store.__class__ = badstore
@@ -339,14 +339,14 @@
   $LOCALIP - - [$ERRDATE$] HG error:  Exception happened while processing request '/.git/info/lfs/objects/batch': (glob)
   $LOCALIP - - [$ERRDATE$] HG error:  Traceback (most recent call last): (glob)
   $LOCALIP - - [$ERRDATE$] HG error:      verifies = store.verify(oid) (glob)
-  $LOCALIP - - [$ERRDATE$] HG error:      raise IOError(errno.EIO, '%s: I/O error' % oid) (glob)
-  $LOCALIP - - [$ERRDATE$] HG error:  IOError: [Errno 5] f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e: I/O error (glob)
+  $LOCALIP - - [$ERRDATE$] HG error:      raise IOError(errno.EIO, r'%s: I/O error' % oid.decode("utf-8")) (glob)
+  $LOCALIP - - [$ERRDATE$] HG error:  *Error: [Errno 5] f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e: I/O error (glob)
   $LOCALIP - - [$ERRDATE$] HG error:   (glob)
   $LOCALIP - - [$ERRDATE$] HG error:  Exception happened while processing request '/.git/info/lfs/objects/batch': (glob)
   $LOCALIP - - [$ERRDATE$] HG error:  Traceback (most recent call last): (glob)
   $LOCALIP - - [$ERRDATE$] HG error:      verifies = store.verify(oid) (glob)
-  $LOCALIP - - [$ERRDATE$] HG error:      raise IOError(errno.EIO, '%s: I/O error' % oid) (glob)
-  $LOCALIP - - [$ERRDATE$] HG error:  IOError: [Errno 5] b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c: I/O error (glob)
+  $LOCALIP - - [$ERRDATE$] HG error:      raise IOError(errno.EIO, r'%s: I/O error' % oid.decode("utf-8")) (glob)
+  $LOCALIP - - [$ERRDATE$] HG error:  *Error: [Errno 5] b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c: I/O error (glob)
   $LOCALIP - - [$ERRDATE$] HG error:   (glob)
   $LOCALIP - - [$ERRDATE$] HG error:  Exception happened while processing request '/.hg/lfs/objects/b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c': (glob)
   $LOCALIP - - [$ERRDATE$] HG error:  Traceback (most recent call last): (glob)
@@ -362,19 +362,19 @@
       for chunk in self.server.application(env, self._start_response):
       for r in self._runwsgi(req, res, repo):
       rctx, req, res, self.check_perm)
-      return func(*(args + a), **kw)
+      return func(*(args + a), **kw) (no-py3 !)
       lambda perm:
       res.setbodybytes(localstore.read(oid))
       blob = self._read(self.vfs, oid, verify)
-      raise IOError(errno.EIO, '%s: I/O error' % oid)
-  IOError: [Errno 5] 276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d: I/O error
+      raise IOError(errno.EIO, r'%s: I/O error' % oid.decode("utf-8"))
+  *Error: [Errno 5] 276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d: I/O error (glob)
   
   $LOCALIP - - [$ERRDATE$] HG error:  Exception happened while processing request '/.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d': (glob)
   $LOCALIP - - [$ERRDATE$] HG error:  Traceback (most recent call last): (glob)
   $LOCALIP - - [$ERRDATE$] HG error:      res.setbodybytes(localstore.read(oid)) (glob)
   $LOCALIP - - [$ERRDATE$] HG error:      blob = self._read(self.vfs, oid, verify) (glob)
-  $LOCALIP - - [$ERRDATE$] HG error:      blobstore._verify(oid, 'dummy content') (glob)
-  $LOCALIP - - [$ERRDATE$] HG error:      hint=_('run hg verify')) (glob)
+  $LOCALIP - - [$ERRDATE$] HG error:      blobstore._verify(oid, b'dummy content') (glob)
+  $LOCALIP - - [$ERRDATE$] HG error:      hint=_(b'run hg verify')) (glob)
   $LOCALIP - - [$ERRDATE$] HG error:  LfsCorruptionError: detected corrupt lfs object: 276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d (glob)
   $LOCALIP - - [$ERRDATE$] HG error:   (glob)
 
--- a/tests/test-lfs-serve.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-lfs-serve.t	Mon Feb 04 20:35:21 2019 +0300
@@ -51,7 +51,7 @@
   >     opts[b'manifest'] = False
   >     opts[b'dir'] = False
   >     rl = cmdutil.openrevlog(repo, b'debugprocessors', file_, opts)
-  >     for flag, proc in rl._flagprocessors.iteritems():
+  >     for flag, proc in rl._flagprocessors.items():
   >         ui.status(b"registered processor '%#x'\n" % (flag))
   > EOF
 
@@ -110,14 +110,14 @@
   ... def diff(server):
   ...     readchannel(server)
   ...     # run an arbitrary command in the repo with the extension loaded
-  ...     runcommand(server, ['id', '-R', '../cmdservelfs'])
+  ...     runcommand(server, [b'id', b'-R', b'../cmdservelfs'])
   ...     # now run a command in a repo without the extension to ensure that
   ...     # files are added safely..
-  ...     runcommand(server, ['ci', '-Aqm', 'non-lfs'])
+  ...     runcommand(server, [b'ci', b'-Aqm', b'non-lfs'])
   ...     # .. and that scmutil.prefetchfiles() safely no-ops..
-  ...     runcommand(server, ['diff', '-r', '.~1'])
+  ...     runcommand(server, [b'diff', b'-r', b'.~1'])
   ...     # .. and that debugupgraderepo safely no-ops.
-  ...     runcommand(server, ['debugupgraderepo', '-q', '--run'])
+  ...     runcommand(server, [b'debugupgraderepo', b'-q', b'--run'])
   *** runcommand id -R ../cmdservelfs
   000000000000 tip
   *** runcommand ci -Aqm non-lfs
@@ -257,12 +257,12 @@
   ... def addrequirement(server):
   ...     readchannel(server)
   ...     # change the repo in a way that adds the lfs requirement
-  ...     runcommand(server, ['pull', '-qu'])
+  ...     runcommand(server, [b'pull', b'-qu'])
   ...     # Now cause the requirement adding hook to fire again, without going
   ...     # through reposetup() again.
   ...     with open('file.txt', 'wb') as fp:
-  ...         fp.write('data')
-  ...     runcommand(server, ['ci', '-Aqm', 'non-lfs'])
+  ...         fp.write(b'data')
+  ...     runcommand(server, [b'ci', b'-Aqm', b'non-lfs'])
   *** runcommand pull -qu
   *** runcommand ci -Aqm non-lfs
 
@@ -359,22 +359,24 @@
   $ cp $HGRCPATH.orig $HGRCPATH
 
   >>> from __future__ import absolute_import
-  >>> from hgclient import check, readchannel, runcommand
+  >>> from hgclient import bprint, check, readchannel, runcommand, stdout
   >>> @check
   ... def checkflags(server):
   ...     readchannel(server)
-  ...     print('')
-  ...     print('# LFS required- both lfs and non-lfs revlogs have 0x2000 flag')
-  ...     runcommand(server, ['debugprocessors', 'lfs.bin', '-R',
-  ...                '../server'])
-  ...     runcommand(server, ['debugprocessors', 'nonlfs2.txt', '-R',
-  ...                '../server'])
-  ...     runcommand(server, ['config', 'extensions', '--cwd',
-  ...                '../server'])
+  ...     bprint(b'')
+  ...     bprint(b'# LFS required- both lfs and non-lfs revlogs have 0x2000 flag')
+  ...     stdout.flush()
+  ...     runcommand(server, [b'debugprocessors', b'lfs.bin', b'-R',
+  ...                b'../server'])
+  ...     runcommand(server, [b'debugprocessors', b'nonlfs2.txt', b'-R',
+  ...                b'../server'])
+  ...     runcommand(server, [b'config', b'extensions', b'--cwd',
+  ...                b'../server'])
   ... 
-  ...     print("\n# LFS not enabled- revlogs don't have 0x2000 flag")
-  ...     runcommand(server, ['debugprocessors', 'nonlfs3.txt'])
-  ...     runcommand(server, ['config', 'extensions'])
+  ...     bprint(b"\n# LFS not enabled- revlogs don't have 0x2000 flag")
+  ...     stdout.flush()
+  ...     runcommand(server, [b'debugprocessors', b'nonlfs3.txt'])
+  ...     runcommand(server, [b'config', b'extensions'])
   
   # LFS required- both lfs and non-lfs revlogs have 0x2000 flag
   *** runcommand debugprocessors lfs.bin -R ../server
@@ -403,28 +405,31 @@
   > EOF
 
   >>> from __future__ import absolute_import, print_function
-  >>> from hgclient import check, readchannel, runcommand
+  >>> from hgclient import bprint, check, readchannel, runcommand, stdout
   >>> @check
   ... def checkflags2(server):
   ...     readchannel(server)
-  ...     print('')
-  ...     print('# LFS enabled- both lfs and non-lfs revlogs have 0x2000 flag')
-  ...     runcommand(server, ['debugprocessors', 'lfs.bin', '-R',
-  ...                '../server'])
-  ...     runcommand(server, ['debugprocessors', 'nonlfs2.txt', '-R',
-  ...                '../server'])
-  ...     runcommand(server, ['config', 'extensions', '--cwd',
-  ...                '../server'])
+  ...     bprint(b'')
+  ...     bprint(b'# LFS enabled- both lfs and non-lfs revlogs have 0x2000 flag')
+  ...     stdout.flush()
+  ...     runcommand(server, [b'debugprocessors', b'lfs.bin', b'-R',
+  ...                b'../server'])
+  ...     runcommand(server, [b'debugprocessors', b'nonlfs2.txt', b'-R',
+  ...                b'../server'])
+  ...     runcommand(server, [b'config', b'extensions', b'--cwd',
+  ...                b'../server'])
   ... 
-  ...     print('\n# LFS enabled without requirement- revlogs have 0x2000 flag')
-  ...     runcommand(server, ['debugprocessors', 'nonlfs3.txt'])
-  ...     runcommand(server, ['config', 'extensions'])
+  ...     bprint(b'\n# LFS enabled without requirement- revlogs have 0x2000 flag')
+  ...     stdout.flush()
+  ...     runcommand(server, [b'debugprocessors', b'nonlfs3.txt'])
+  ...     runcommand(server, [b'config', b'extensions'])
   ... 
-  ...     print("\n# LFS disabled locally- revlogs don't have 0x2000 flag")
-  ...     runcommand(server, ['debugprocessors', 'nonlfs.txt', '-R',
-  ...                '../nonlfs'])
-  ...     runcommand(server, ['config', 'extensions', '--cwd',
-  ...                '../nonlfs'])
+  ...     bprint(b"\n# LFS disabled locally- revlogs don't have 0x2000 flag")
+  ...     stdout.flush()
+  ...     runcommand(server, [b'debugprocessors', b'nonlfs.txt', b'-R',
+  ...                b'../nonlfs'])
+  ...     runcommand(server, [b'config', b'extensions', b'--cwd',
+  ...                b'../nonlfs'])
   
   # LFS enabled- both lfs and non-lfs revlogs have 0x2000 flag
   *** runcommand debugprocessors lfs.bin -R ../server
--- a/tests/test-linelog.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-linelog.py	Mon Feb 04 20:35:21 2019 +0300
@@ -15,7 +15,6 @@
 def _genedits(seed, endrev):
     lines = []
     random.seed(seed)
-    rev = 0
     for rev in range(0, endrev):
         n = len(lines)
         a1 = random.randint(0, n)
--- a/tests/test-lock.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-lock.py	Mon Feb 04 20:35:21 2019 +0300
@@ -141,7 +141,7 @@
         state.assertacquirecalled(True)
 
         # fake a fork
-        forklock = copy.deepcopy(lock)
+        forklock = copy.copy(lock)
         forklock._pidoffset = 1
         forklock.release()
         state.assertreleasecalled(False)
@@ -238,7 +238,7 @@
             childstate.assertacquirecalled(True)
 
             # fork the child lock
-            forkchildlock = copy.deepcopy(childlock)
+            forkchildlock = copy.copy(childlock)
             forkchildlock._pidoffset += 1
             forkchildlock.release()
             childstate.assertreleasecalled(False)
@@ -290,7 +290,7 @@
             self.fail("unexpected lock acquisition")
         except error.LockHeld as why:
             self.assertTrue(why.errno == errno.ETIMEDOUT)
-            self.assertTrue(why.locker == "")
+            self.assertTrue(why.locker == b"")
             state.assertlockexists(False)
 
 if __name__ == '__main__':
--- a/tests/test-match.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-match.py	Mon Feb 04 20:35:21 2019 +0300
@@ -255,20 +255,19 @@
         m1 = matchmod.alwaysmatcher(b'', b'')
         m2 = matchmod.nevermatcher(b'', b'')
         dm = matchmod.differencematcher(m1, m2)
-        # dm should be equivalent to a alwaysmatcher. OPT: if m2 is a
-        # nevermatcher, we could return 'all' for these.
+        # dm should be equivalent to a alwaysmatcher.
         #
         # We're testing Equal-to-True instead of just 'assertTrue' since
         # assertTrue does NOT verify that it's a bool, just that it's truthy.
         # While we may want to eventually make these return 'all', they should
         # not currently do so.
-        self.assertEqual(dm.visitdir(b'.'), True)
-        self.assertEqual(dm.visitdir(b'dir'), True)
-        self.assertEqual(dm.visitdir(b'dir/subdir'), True)
-        self.assertEqual(dm.visitdir(b'dir/subdir/z'), True)
-        self.assertEqual(dm.visitdir(b'dir/foo'), True)
-        self.assertEqual(dm.visitdir(b'dir/subdir/x'), True)
-        self.assertEqual(dm.visitdir(b'folder'), True)
+        self.assertEqual(dm.visitdir(b'.'), 'all')
+        self.assertEqual(dm.visitdir(b'dir'), 'all')
+        self.assertEqual(dm.visitdir(b'dir/subdir'), 'all')
+        self.assertEqual(dm.visitdir(b'dir/subdir/z'), 'all')
+        self.assertEqual(dm.visitdir(b'dir/foo'), 'all')
+        self.assertEqual(dm.visitdir(b'dir/subdir/x'), 'all')
+        self.assertEqual(dm.visitdir(b'folder'), 'all')
 
     def testVisitchildrensetM2never(self):
         m1 = matchmod.alwaysmatcher(b'', b'')
@@ -295,9 +294,8 @@
         # an 'all' pattern, just True.
         self.assertEqual(dm.visitdir(b'dir/subdir/z'), True)
         self.assertEqual(dm.visitdir(b'dir/subdir/x'), True)
-        # OPT: We could return 'all' for these.
-        self.assertEqual(dm.visitdir(b'dir/foo'), True)
-        self.assertEqual(dm.visitdir(b'folder'), True)
+        self.assertEqual(dm.visitdir(b'dir/foo'), 'all')
+        self.assertEqual(dm.visitdir(b'folder'), 'all')
 
     def testVisitchildrensetM2SubdirPrefix(self):
         m1 = matchmod.alwaysmatcher(b'', b'')
@@ -322,7 +320,7 @@
         dm = matchmod.differencematcher(m1, m2)
         self.assertEqual(dm.visitdir(b'.'), True)
         self.assertEqual(dm.visitdir(b'dir'), True)
-        self.assertEqual(dm.visitdir(b'dir/subdir'), True)
+        self.assertEqual(dm.visitdir(b'dir/subdir'), 'all')
         self.assertFalse(dm.visitdir(b'dir/foo'))
         self.assertFalse(dm.visitdir(b'folder'))
         # OPT: We should probably return False for these; we don't because
--- a/tests/test-merge10.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-merge10.t	Mon Feb 04 20:35:21 2019 +0300
@@ -37,8 +37,9 @@
   (run 'hg heads' to see heads, 'hg merge' to merge)
   $ hg up -C 2
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  $ hg merge
-  merging testdir/subdir/a and testdir/a to testdir/subdir/a
+Abuse this test for also testing that merge respects ui.relative-paths
+  $ hg --cwd testdir merge --config ui.relative-paths=yes
+  merging subdir/a and a to subdir/a
   0 files updated, 1 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   $ hg stat
--- a/tests/test-missing-capability.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-missing-capability.t	Mon Feb 04 20:35:21 2019 +0300
@@ -15,7 +15,7 @@
   > from mercurial import extensions, wireprotov1server
   > def wcapabilities(orig, *args, **kwargs):
   >   cap = orig(*args, **kwargs)
-  >   cap.remove('$1')
+  >   cap.remove(b'$1')
   >   return cap
   > extensions.wrapfunction(wireprotov1server, '_capabilities', wcapabilities)
   > EOF
--- a/tests/test-mq-eol.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-mq-eol.t	Mon Feb 04 20:35:21 2019 +0300
@@ -30,10 +30,14 @@
 
   $ cat > cateol.py <<EOF
   > import sys
+  > try:
+  >     stdout = sys.stdout.buffer
+  > except AttributeError:
+  >     stdout = sys.stdout
   > for line in open(sys.argv[1], 'rb'):
   >     line = line.replace(b'\r', b'<CR>')
   >     line = line.replace(b'\n', b'<LF>')
-  >     print(line)
+  >     stdout.write(line + b'\n')
   > EOF
 
   $ hg init repo
--- a/tests/test-mq-missingfiles.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-mq-missingfiles.t	Mon Feb 04 20:35:21 2019 +0300
@@ -5,6 +5,10 @@
 
   $ cat > writelines.py <<EOF
   > import sys
+  > if sys.version_info[0] >= 3:
+  >     encode = lambda x: x.encode('utf-8').decode('unicode_escape').encode('utf-8')
+  > else:
+  >     encode = lambda x: x.decode('string_escape')
   > path = sys.argv[1]
   > args = sys.argv[2:]
   > assert (len(args) % 2) == 0
@@ -13,7 +17,7 @@
   > for i in range(len(args) // 2):
   >    count, s = args[2*i:2*i+2]
   >    count = int(count)
-  >    s = s.decode('string_escape')
+  >    s = encode(s)
   >    f.write(s*count)
   > f.close()
   > EOF
--- a/tests/test-mq-qimport.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-mq-qimport.t	Mon Feb 04 20:35:21 2019 +0300
@@ -1,5 +1,9 @@
   $ cat > writelines.py <<EOF
   > import sys
+  > if sys.version_info[0] >= 3:
+  >     encode = lambda x: x.encode('utf-8').decode('unicode_escape').encode('utf-8')
+  > else:
+  >     encode = lambda x: x.decode('string_escape')
   > path = sys.argv[1]
   > args = sys.argv[2:]
   > assert (len(args) % 2) == 0
@@ -8,7 +12,7 @@
   > for i in range(len(args)//2):
   >    count, s = args[2*i:2*i+2]
   >    count = int(count)
-  >    s = s.decode('string_escape')
+  >    s = encode(s)
   >    f.write(s*count)
   > f.close()
   > 
--- a/tests/test-mq-qnew.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-mq-qnew.t	Mon Feb 04 20:35:21 2019 +0300
@@ -305,9 +305,9 @@
   HG: branch 'default'
   HG: no files changed
   ====
-  note: commit message saved in .hg/last-message.txt
   transaction abort!
   rollback completed
+  note: commit message saved in .hg/last-message.txt
   abort: pretxncommit.unexpectedabort hook exited with status 1
   [255]
   $ cat .hg/last-message.txt
--- a/tests/test-mq-subrepo-svn.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-mq-subrepo-svn.t	Mon Feb 04 20:35:21 2019 +0300
@@ -23,11 +23,7 @@
   $ svnadmin create svn-repo-2499
 
   $ SVNREPOPATH=`pwd`/svn-repo-2499/project
-#if windows
-  $ SVNREPOURL=file:///`"$PYTHON" -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"`
-#else
-  $ SVNREPOURL=file://`"$PYTHON" -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"`
-#endif
+  $ SVNREPOURL="`"$PYTHON" $TESTDIR/svnurlof.py \"$SVNREPOPATH\"`"
 
   $ mkdir -p svn-project-2499/trunk
   $ svn import -qm 'init project' svn-project-2499 "$SVNREPOURL"
--- a/tests/test-narrow-widen-no-ellipsis.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-narrow-widen-no-ellipsis.t	Mon Feb 04 20:35:21 2019 +0300
@@ -406,7 +406,7 @@
    * bookmark                  11:* (glob)
   $ hg unbundle .hg/strip-backup/*-widen.hg
   abort: .hg/strip-backup/*-widen.hg: $ENOTDIR$ (windows !)
-  abort: $ENOENT$: .hg/strip-backup/*-widen.hg (no-windows !)
+  abort: $ENOENT$: '.hg/strip-backup/*-widen.hg' (no-windows !)
   [255]
   $ hg log -T "{if(ellipsis, '...')}{rev}: {desc}\n"
   11: local
--- a/tests/test-newcgi.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-newcgi.t	Mon Feb 04 20:35:21 2019 +0300
@@ -18,7 +18,7 @@
   > from mercurial.hgweb.request import wsgiapplication
   > 
   > def make_web_app():
-  >     return hgweb("test", "Empty test repository")
+  >     return hgweb(b"test", b"Empty test repository")
   > 
   > wsgicgi.launch(wsgiapplication(make_web_app))
   > HGWEB
@@ -44,7 +44,7 @@
   > from mercurial.hgweb.request import wsgiapplication
   > 
   > def make_web_app():
-  >     return hgwebdir("hgweb.config")
+  >     return hgwebdir(b"hgweb.config")
   > 
   > wsgicgi.launch(wsgiapplication(make_web_app))
   > HGWEBDIR
--- a/tests/test-notify.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-notify.t	Mon Feb 04 20:35:21 2019 +0300
@@ -455,7 +455,7 @@
   > test = False
   > mbox = mbox
   > EOF
-  $ "$PYTHON" -c 'open("a/a", "ab").write("no" * 500 + "\xd1\x84" + "\n")'
+  $ "$PYTHON" -c 'open("a/a", "ab").write(b"no" * 500 + b"\xd1\x84" + b"\n")'
   $ hg --cwd a commit -A -m "long line"
   $ hg --traceback --cwd b pull ../a
   pulling from ../a
--- a/tests/test-oldcgi.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-oldcgi.t	Mon Feb 04 20:35:21 2019 +0300
@@ -55,7 +55,7 @@
   > # Alternatively you can pass a list of ('virtual/path', '/real/path') tuples
   > # or use a dictionary with entries like 'virtual/path': '/real/path'
   > 
-  > h = hgweb.hgwebdir("hgweb.config")
+  > h = hgweb.hgwebdir(b"hgweb.config")
   > h.run()
   > HGWEBDIR
 
--- a/tests/test-parseindex.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-parseindex.t	Mon Feb 04 20:35:21 2019 +0300
@@ -27,7 +27,7 @@
   
   $ cat >> test.py << EOF
   > from __future__ import print_function
-  > from mercurial import changelog, node, vfs
+  > from mercurial import changelog, node, pycompat, vfs
   > 
   > class singlebyteread(object):
   >     def __init__(self, real):
@@ -55,10 +55,10 @@
   >         return singlebyteread(f)
   >     return wrapper
   > 
-  > cl = changelog.changelog(opener('.hg/store'))
+  > cl = changelog.changelog(opener(b'.hg/store'))
   > print(len(cl), 'revisions:')
   > for r in cl:
-  >     print(node.short(cl.node(r)))
+  >     print(pycompat.sysstr(node.short(cl.node(r))))
   > EOF
   $ "$PYTHON" test.py
   2 revisions:
@@ -76,7 +76,7 @@
   $ "$PYTHON" <<EOF
   > from __future__ import print_function
   > from mercurial import changelog, vfs
-  > cl = changelog.changelog(vfs.vfs('.hg/store'))
+  > cl = changelog.changelog(vfs.vfs(b'.hg/store'))
   > print('good heads:')
   > for head in [0, len(cl) - 1, -1]:
   >     print('%s: %r' % (head, cl.reachableroots(0, [head], [0])))
@@ -112,7 +112,7 @@
   10000: head out of range
   -2: head out of range
   -10000: head out of range
-  None: an integer is required
+  None: an integer is required( .got type NoneType.)? (re)
   good roots:
   0: [0]
   1: [1]
@@ -123,7 +123,7 @@
   -2: []
   -10000: []
   bad roots:
-  None: an integer is required
+  None: an integer is required( .got type NoneType.)? (re)
 
   $ cd ..
 
@@ -178,8 +178,8 @@
   $ cat <<EOF > test.py
   > from __future__ import print_function
   > import sys
-  > from mercurial import changelog, vfs
-  > cl = changelog.changelog(vfs.vfs(sys.argv[1]))
+  > from mercurial import changelog, pycompat, vfs
+  > cl = changelog.changelog(vfs.vfs(pycompat.fsencode(sys.argv[1])))
   > n0, n1 = cl.node(0), cl.node(1)
   > ops = [
   >     ('reachableroots',
--- a/tests/test-permissions.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-permissions.t	Mon Feb 04 20:35:21 2019 +0300
@@ -22,7 +22,7 @@
   checking manifests
   crosschecking files in changesets and manifests
   checking files
-  abort: Permission denied: $TESTTMP/t/.hg/store/data/a.i
+  abort: Permission denied: '$TESTTMP/t/.hg/store/data/a.i'
   [255]
 
   $ chmod +r .hg/store/data/a.i
@@ -39,7 +39,7 @@
   $ echo barber > a
   $ hg commit -m "2"
   trouble committing a!
-  abort: Permission denied: $TESTTMP/t/.hg/store/data/a.i
+  abort: Permission denied: '$TESTTMP/t/.hg/store/data/a.i'
   [255]
 
   $ chmod -w .
--- a/tests/test-remotefilelog-cacheprocess.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-remotefilelog-cacheprocess.t	Mon Feb 04 20:35:21 2019 +0300
@@ -56,11 +56,11 @@
   >                 log('requested %r\n' % key)
   >             sys.stdout.flush()
   >         elif cmd == 'set':
-  >             assert False, 'todo writing'
+  >             raise Exception('todo writing')
   >         else:
-  >             assert False, 'unknown command! %r' % cmd
+  >             raise Exception('unknown command! %r' % cmd)
   > except Exception as e:
-  >     log('Exception! %r\n' % e)
+  >     log('Exception! %s\n' % e)
   >     raise
   > EOF
 
@@ -79,7 +79,7 @@
   requested 'master/39/5df8f7c51f007019cb30201c49e884b46b92fa/69a1b67522704ec122181c0890bd16e9d3e7516a'
   requested 'master/95/cb0bfd2977c761298d9624e4b4d4c72a39974a/076f5e2225b3ff0400b98c92aa6cdf403ee24cca'
   got command 'set'
-  Exception! AssertionError('todo writing',)
+  Exception! todo writing
 
 Test cache hits.
   $ mv hgcache oldhgcache
@@ -110,7 +110,7 @@
   requested 'y\x00master/95/cb0bfd2977c761298d9624e4b4d4c72a39974a/076f5e2225b3ff0400b98c92aa6cdf403ee24cca'
   requested 'z\x00master/39/5df8f7c51f007019cb30201c49e884b46b92fa/69a1b67522704ec122181c0890bd16e9d3e7516a'
   got command 'set'
-  Exception! AssertionError('todo writing',)
+  Exception! todo writing
 
 Test cache hits with includepath.
   $ mv hgcache oldhgcache
--- a/tests/test-remotefilelog-datapack.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-remotefilelog-datapack.py	Mon Feb 04 20:35:21 2019 +0300
@@ -40,7 +40,7 @@
             shutil.rmtree(d)
 
     def makeTempDir(self):
-        tempdir = tempfile.mkdtemp()
+        tempdir = pycompat.bytestr(tempfile.mkdtemp())
         self.tempdirs.append(tempdir)
         return tempdir
 
@@ -48,11 +48,12 @@
         return hashlib.sha1(content).digest()
 
     def getFakeHash(self):
-        return ''.join(chr(random.randint(0, 255)) for _ in range(20))
+        return b''.join(pycompat.bytechr(random.randint(0, 255))
+                        for _ in range(20))
 
     def createPack(self, revisions=None, packdir=None):
         if revisions is None:
-            revisions = [("filename", self.getFakeHash(), nullid, "content")]
+            revisions = [(b"filename", self.getFakeHash(), nullid, b"content")]
 
         if packdir is None:
             packdir = self.makeTempDir()
@@ -73,23 +74,23 @@
     def _testAddSingle(self, content):
         """Test putting a simple blob into a pack and reading it out.
         """
-        filename = "foo"
+        filename = b"foo"
         node = self.getHash(content)
 
         revisions = [(filename, node, nullid, content)]
         pack = self.createPack(revisions)
         if self.paramsavailable:
-            self.assertEquals(pack.params.fanoutprefix,
-                              basepack.SMALLFANOUTPREFIX)
+            self.assertEqual(pack.params.fanoutprefix,
+                             basepack.SMALLFANOUTPREFIX)
 
         chain = pack.getdeltachain(filename, node)
-        self.assertEquals(content, chain[0][4])
+        self.assertEqual(content, chain[0][4])
 
     def testAddSingle(self):
-        self._testAddSingle('')
+        self._testAddSingle(b'')
 
     def testAddSingleEmpty(self):
-        self._testAddSingle('abcdef')
+        self._testAddSingle(b'abcdef')
 
     def testAddMultiple(self):
         """Test putting multiple unrelated blobs into a pack and reading them
@@ -97,8 +98,8 @@
         """
         revisions = []
         for i in range(10):
-            filename = "foo%s" % i
-            content = "abcdef%s" % i
+            filename = b"foo%d" % i
+            content = b"abcdef%d" % i
             node = self.getHash(content)
             revisions.append((filename, node, self.getFakeHash(), content))
 
@@ -106,19 +107,19 @@
 
         for filename, node, base, content in revisions:
             entry = pack.getdelta(filename, node)
-            self.assertEquals((content, filename, base, {}), entry)
+            self.assertEqual((content, filename, base, {}), entry)
 
             chain = pack.getdeltachain(filename, node)
-            self.assertEquals(content, chain[0][4])
+            self.assertEqual(content, chain[0][4])
 
     def testAddDeltas(self):
         """Test putting multiple delta blobs into a pack and read the chain.
         """
         revisions = []
-        filename = "foo"
+        filename = b"foo"
         lastnode = nullid
         for i in range(10):
-            content = "abcdef%s" % i
+            content = b"abcdef%d" % i
             node = self.getHash(content)
             revisions.append((filename, node, lastnode, content))
             lastnode = node
@@ -127,13 +128,13 @@
 
         entry = pack.getdelta(filename, revisions[0][1])
         realvalue = (revisions[0][3], filename, revisions[0][2], {})
-        self.assertEquals(entry, realvalue)
+        self.assertEqual(entry, realvalue)
 
         # Test that the chain for the final entry has all the others
         chain = pack.getdeltachain(filename, node)
         for i in range(10):
-            content = "abcdef%s" % i
-            self.assertEquals(content, chain[-i - 1][4])
+            content = b"abcdef%d" % i
+            self.assertEqual(content, chain[-i - 1][4])
 
     def testPackMany(self):
         """Pack many related and unrelated objects.
@@ -143,10 +144,10 @@
         blobs = {}
         random.seed(0)
         for i in range(100):
-            filename = "filename-%s" % i
+            filename = b"filename-%d" % i
             filerevs = []
             for j in range(random.randint(1, 100)):
-                content = "content-%s" % j
+                content = b"content-%d" % j
                 node = self.getHash(content)
                 lastnode = nullid
                 if len(filerevs) > 0:
@@ -158,22 +159,22 @@
         pack = self.createPack(revisions)
 
         # Verify the pack contents
-        for (filename, node, lastnode), content in sorted(blobs.iteritems()):
+        for (filename, node, lastnode), content in sorted(blobs.items()):
             chain = pack.getdeltachain(filename, node)
             for entry in chain:
                 expectedcontent = blobs[(entry[0], entry[1], entry[3])]
-                self.assertEquals(entry[4], expectedcontent)
+                self.assertEqual(entry[4], expectedcontent)
 
     def testPackMetadata(self):
         revisions = []
         for i in range(100):
-            filename = '%s.txt' % i
-            content = 'put-something-here \n' * i
+            filename = b'%d.txt' % i
+            content = b'put-something-here \n' * i
             node = self.getHash(content)
             meta = {constants.METAKEYFLAG: i ** 4,
                     constants.METAKEYSIZE: len(content),
-                    'Z': 'random_string',
-                    '_': '\0' * i}
+                    b'Z': b'random_string',
+                    b'_': b'\0' * i}
             revisions.append((filename, node, nullid, content, meta))
         pack = self.createPack(revisions)
         for name, node, x, content, origmeta in revisions:
@@ -181,50 +182,51 @@
             # flag == 0 should be optimized out
             if origmeta[constants.METAKEYFLAG] == 0:
                 del origmeta[constants.METAKEYFLAG]
-            self.assertEquals(parsedmeta, origmeta)
+            self.assertEqual(parsedmeta, origmeta)
 
     def testGetMissing(self):
         """Test the getmissing() api.
         """
         revisions = []
-        filename = "foo"
+        filename = b"foo"
         lastnode = nullid
         for i in range(10):
-            content = "abcdef%s" % i
+            content = b"abcdef%d" % i
             node = self.getHash(content)
             revisions.append((filename, node, lastnode, content))
             lastnode = node
 
         pack = self.createPack(revisions)
 
-        missing = pack.getmissing([("foo", revisions[0][1])])
+        missing = pack.getmissing([(b"foo", revisions[0][1])])
         self.assertFalse(missing)
 
-        missing = pack.getmissing([("foo", revisions[0][1]),
-                                   ("foo", revisions[1][1])])
+        missing = pack.getmissing([(b"foo", revisions[0][1]),
+                                   (b"foo", revisions[1][1])])
         self.assertFalse(missing)
 
         fakenode = self.getFakeHash()
-        missing = pack.getmissing([("foo", revisions[0][1]), ("foo", fakenode)])
-        self.assertEquals(missing, [("foo", fakenode)])
+        missing = pack.getmissing([(b"foo", revisions[0][1]),
+                                   (b"foo", fakenode)])
+        self.assertEqual(missing, [(b"foo", fakenode)])
 
     def testAddThrows(self):
         pack = self.createPack()
 
         try:
-            pack.add('filename', nullid, 'contents')
+            pack.add(b'filename', nullid, b'contents')
             self.assertTrue(False, "datapack.add should throw")
         except RuntimeError:
             pass
 
     def testBadVersionThrows(self):
         pack = self.createPack()
-        path = pack.path + '.datapack'
-        with open(path) as f:
+        path = pack.path + b'.datapack'
+        with open(path, 'rb') as f:
             raw = f.read()
         raw = struct.pack('!B', 255) + raw[1:]
         os.chmod(path, os.stat(path).st_mode | stat.S_IWRITE)
-        with open(path, 'w+') as f:
+        with open(path, 'wb+') as f:
             f.write(raw)
 
         try:
@@ -235,10 +237,10 @@
 
     def testMissingDeltabase(self):
         fakenode = self.getFakeHash()
-        revisions = [("filename", fakenode, self.getFakeHash(), "content")]
+        revisions = [(b"filename", fakenode, self.getFakeHash(), b"content")]
         pack = self.createPack(revisions)
-        chain = pack.getdeltachain("filename", fakenode)
-        self.assertEquals(len(chain), 1)
+        chain = pack.getdeltachain(b"filename", fakenode)
+        self.assertEqual(len(chain), 1)
 
     def testLargePack(self):
         """Test creating and reading from a large pack with over X entries.
@@ -247,7 +249,7 @@
         blobs = {}
         total = basepack.SMALLFANOUTCUTOFF + 1
         for i in pycompat.xrange(total):
-            filename = "filename-%s" % i
+            filename = b"filename-%d" % i
             content = filename
             node = self.getHash(content)
             blobs[(filename, node)] = content
@@ -255,12 +257,12 @@
 
         pack = self.createPack(revisions)
         if self.paramsavailable:
-            self.assertEquals(pack.params.fanoutprefix,
-                              basepack.LARGEFANOUTPREFIX)
+            self.assertEqual(pack.params.fanoutprefix,
+                             basepack.LARGEFANOUTPREFIX)
 
-        for (filename, node), content in blobs.iteritems():
+        for (filename, node), content in blobs.items():
             actualcontent = pack.getdeltachain(filename, node)[0][4]
-            self.assertEquals(actualcontent, content)
+            self.assertEqual(actualcontent, content)
 
     def testPacksCache(self):
         """Test that we remember the most recent packs while fetching the delta
@@ -274,12 +276,12 @@
 
         for i in range(numpacks):
             chain = []
-            revision = (str(i), self.getFakeHash(), nullid, "content")
+            revision = (b'%d' % i, self.getFakeHash(), nullid, b"content")
 
             for _ in range(revisionsperpack):
                 chain.append(revision)
                 revision = (
-                    str(i),
+                    b'%d' % i,
                     self.getFakeHash(),
                     revision[1],
                     self.getFakeHash()
@@ -290,7 +292,7 @@
 
         class testdatapackstore(datapack.datapackstore):
             # Ensures that we are not keeping everything in the cache.
-            DEFAULTCACHESIZE = numpacks / 2
+            DEFAULTCACHESIZE = numpacks // 2
 
         store = testdatapackstore(uimod.ui(), packdir)
 
@@ -300,12 +302,12 @@
             chain = store.getdeltachain(revision[0], revision[1])
 
             mostrecentpack = next(iter(store.packs), None)
-            self.assertEquals(
+            self.assertEqual(
                 mostrecentpack.getdeltachain(revision[0], revision[1]),
                 chain
             )
 
-            self.assertEquals(randomchain.index(revision) + 1, len(chain))
+            self.assertEqual(randomchain.index(revision) + 1, len(chain))
 
     # perf test off by default since it's slow
     def _testIndexPerf(self):
@@ -330,8 +332,8 @@
         for packsize in packsizes:
             revisions = []
             for i in pycompat.xrange(packsize):
-                filename = "filename-%s" % i
-                content = "content-%s" % i
+                filename = b"filename-%d" % i
+                content = b"content-%d" % i
                 node = self.getHash(content)
                 revisions.append((filename, node, nullid, content))
 
@@ -350,9 +352,9 @@
                 start = time.time()
                 pack.getmissing(findnodes[:lookupsize])
                 elapsed = time.time() - start
-                print ("%s pack %s lookups = %0.04f" %
-                       (('%s' % packsize).rjust(7),
-                        ('%s' % lookupsize).rjust(7),
+                print ("%s pack %d lookups = %0.04f" %
+                       (('%d' % packsize).rjust(7),
+                        ('%d' % lookupsize).rjust(7),
                         elapsed))
 
             print("")
--- a/tests/test-remotefilelog-gc.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-remotefilelog-gc.t	Mon Feb 04 20:35:21 2019 +0300
@@ -107,6 +107,7 @@
 # Test that warning is displayed when the repo path is malformed
 
   $ printf "asdas\0das" >> $CACHEDIR/repos
-  $ hg gc 2>&1 | head -n2
-  warning: malformed path: * (glob)
-  Traceback (most recent call last):
+  $ hg gc
+  abort: invalid path asdas\x00da: stat: embedded null character in path (esc) (py3 !)
+  abort: invalid path asdas\x00da: stat() argument 1 must be encoded string without null bytes, not str (esc) (no-py3 !)
+  [255]
--- a/tests/test-remotefilelog-histpack.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-remotefilelog-histpack.py	Mon Feb 04 20:35:21 2019 +0300
@@ -52,7 +52,7 @@
         node, p1node, p2node, and linknode.
         """
         if revisions is None:
-            revisions = [("filename", self.getFakeHash(), nullid, nullid,
+            revisions = [(b"filename", self.getFakeHash(), nullid, nullid,
                           self.getFakeHash(), None)]
 
         packdir = pycompat.fsencode(self.makeTempDir())
@@ -68,7 +68,7 @@
     def testAddSingle(self):
         """Test putting a single entry into a pack and reading it out.
         """
-        filename = "foo"
+        filename = b"foo"
         node = self.getFakeHash()
         p1 = self.getFakeHash()
         p2 = self.getFakeHash()
@@ -78,9 +78,9 @@
         pack = self.createPack(revisions)
 
         actual = pack.getancestors(filename, node)[node]
-        self.assertEquals(p1, actual[0])
-        self.assertEquals(p2, actual[1])
-        self.assertEquals(linknode, actual[2])
+        self.assertEqual(p1, actual[0])
+        self.assertEqual(p2, actual[1])
+        self.assertEqual(linknode, actual[2])
 
     def testAddMultiple(self):
         """Test putting multiple unrelated revisions into a pack and reading
@@ -88,7 +88,7 @@
         """
         revisions = []
         for i in range(10):
-            filename = "foo-%s" % i
+            filename = b"foo-%d" % i
             node = self.getFakeHash()
             p1 = self.getFakeHash()
             p2 = self.getFakeHash()
@@ -99,10 +99,10 @@
 
         for filename, node, p1, p2, linknode, copyfrom in revisions:
             actual = pack.getancestors(filename, node)[node]
-            self.assertEquals(p1, actual[0])
-            self.assertEquals(p2, actual[1])
-            self.assertEquals(linknode, actual[2])
-            self.assertEquals(copyfrom, actual[3])
+            self.assertEqual(p1, actual[0])
+            self.assertEqual(p2, actual[1])
+            self.assertEqual(linknode, actual[2])
+            self.assertEqual(copyfrom, actual[3])
 
     def testAddAncestorChain(self):
         """Test putting multiple revisions in into a pack and read the ancestor
@@ -124,10 +124,10 @@
         ancestors = pack.getancestors(revisions[0][0], revisions[0][1])
         for filename, node, p1, p2, linknode, copyfrom in revisions:
             ap1, ap2, alinknode, acopyfrom = ancestors[node]
-            self.assertEquals(ap1, p1)
-            self.assertEquals(ap2, p2)
-            self.assertEquals(alinknode, linknode)
-            self.assertEquals(acopyfrom, copyfrom)
+            self.assertEqual(ap1, p1)
+            self.assertEqual(ap2, p2)
+            self.assertEqual(alinknode, linknode)
+            self.assertEqual(acopyfrom, copyfrom)
 
     def testPackMany(self):
         """Pack many related and unrelated ancestors.
@@ -161,16 +161,16 @@
         pack = self.createPack(revisions)
 
         # Verify the pack contents
-        for (filename, node), (p1, p2, lastnode) in allentries.items():
+        for (filename, node) in allentries:
             ancestors = pack.getancestors(filename, node)
-            self.assertEquals(ancestorcounts[(filename, node)],
-                              len(ancestors))
+            self.assertEqual(ancestorcounts[(filename, node)],
+                             len(ancestors))
             for anode, (ap1, ap2, alinknode, copyfrom) in ancestors.items():
                 ep1, ep2, elinknode = allentries[(filename, anode)]
-                self.assertEquals(ap1, ep1)
-                self.assertEquals(ap2, ep2)
-                self.assertEquals(alinknode, elinknode)
-                self.assertEquals(copyfrom, None)
+                self.assertEqual(ap1, ep1)
+                self.assertEqual(ap2, ep2)
+                self.assertEqual(alinknode, elinknode)
+                self.assertEqual(copyfrom, None)
 
     def testGetNodeInfo(self):
         revisions = []
@@ -186,10 +186,10 @@
         # Test that getnodeinfo returns the expected results
         for filename, node, p1, p2, linknode, copyfrom in revisions:
             ap1, ap2, alinknode, acopyfrom = pack.getnodeinfo(filename, node)
-            self.assertEquals(ap1, p1)
-            self.assertEquals(ap2, p2)
-            self.assertEquals(alinknode, linknode)
-            self.assertEquals(acopyfrom, copyfrom)
+            self.assertEqual(ap1, p1)
+            self.assertEqual(ap2, p2)
+            self.assertEqual(alinknode, linknode)
+            self.assertEqual(acopyfrom, copyfrom)
 
     def testGetMissing(self):
         """Test the getmissing() api.
@@ -215,11 +215,11 @@
         fakenode = self.getFakeHash()
         missing = pack.getmissing([(filename, revisions[0][1]),
                                    (filename, fakenode)])
-        self.assertEquals(missing, [(filename, fakenode)])
+        self.assertEqual(missing, [(filename, fakenode)])
 
         # Test getmissing on a non-existant filename
-        missing = pack.getmissing([("bar", fakenode)])
-        self.assertEquals(missing, [("bar", fakenode)])
+        missing = pack.getmissing([(b"bar", fakenode)])
+        self.assertEqual(missing, [(b"bar", fakenode)])
 
     def testAddThrows(self):
         pack = self.createPack()
@@ -232,12 +232,12 @@
 
     def testBadVersionThrows(self):
         pack = self.createPack()
-        path = pack.path + '.histpack'
-        with open(path) as f:
+        path = pack.path + b'.histpack'
+        with open(path, 'rb') as f:
             raw = f.read()
         raw = struct.pack('!B', 255) + raw[1:]
         os.chmod(path, os.stat(path).st_mode | stat.S_IWRITE)
-        with open(path, 'w+') as f:
+        with open(path, 'wb+') as f:
             f.write(raw)
 
         try:
@@ -260,14 +260,14 @@
             revisions.append((filename, node, p1, p2, linknode, None))
 
         pack = self.createPack(revisions)
-        self.assertEquals(pack.params.fanoutprefix, basepack.LARGEFANOUTPREFIX)
+        self.assertEqual(pack.params.fanoutprefix, basepack.LARGEFANOUTPREFIX)
 
         for filename, node, p1, p2, linknode, copyfrom in revisions:
             actual = pack.getancestors(filename, node)[node]
-            self.assertEquals(p1, actual[0])
-            self.assertEquals(p2, actual[1])
-            self.assertEquals(linknode, actual[2])
-            self.assertEquals(copyfrom, actual[3])
+            self.assertEqual(p1, actual[0])
+            self.assertEqual(p2, actual[1])
+            self.assertEqual(linknode, actual[2])
+            self.assertEqual(copyfrom, actual[3])
 # TODO:
 # histpack store:
 # - repack two packs into one
--- a/tests/test-repair-strip.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-repair-strip.t	Mon Feb 04 20:35:21 2019 +0300
@@ -53,7 +53,7 @@
   rollback failed - please run hg recover
   (failure reason: [Errno 13] Permission denied .hg/store/data/b.i')
   strip failed, backup bundle
-  abort: Permission denied .hg/store/data/b.i
+  abort: Permission denied .hg/store/data/b.i'
   % after update 0, strip 2
   abandoned transaction found - run hg recover
   checking changesets
@@ -85,7 +85,7 @@
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     a
   
-  abort: Permission denied .hg/store/data/b.i
+  abort: Permission denied .hg/store/data/b.i'
   % after update 0, strip 2
   checking changesets
   checking manifests
@@ -107,7 +107,7 @@
   rollback failed - please run hg recover
   (failure reason: [Errno 13] Permission denied .hg/store/00manifest.i')
   strip failed, backup bundle
-  abort: Permission denied .hg/store/00manifest.i
+  abort: Permission denied .hg/store/00manifest.i'
   % after update 0, strip 2
   abandoned transaction found - run hg recover
   checking changesets
--- a/tests/test-resolve.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-resolve.t	Mon Feb 04 20:35:21 2019 +0300
@@ -67,6 +67,9 @@
   $ hg resolve -l
   R file1
   U file2
+  $ hg resolve -l --config ui.relative-paths=yes
+  R ../file1
+  U ../file2
   $ hg resolve --re-merge filez file2
   arguments do not match paths that need resolving
   (try: hg resolve --re-merge path:filez path:file2)
--- a/tests/test-revert-interactive.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-revert-interactive.t	Mon Feb 04 20:35:21 2019 +0300
@@ -424,3 +424,24 @@
   b: no such file in rev b40d1912accf
 
   $ cd ..
+
+Prompt before undeleting file(issue6008)
+  $ hg init repo
+  $ cd repo
+  $ echo a > a
+  $ hg ci -qAm a
+  $ hg rm a
+  $ hg revert -i<<EOF
+  > y
+  > EOF
+  add back removed file a (Yn)? y
+  undeleting a
+  $ ls
+  a
+  $ hg rm a
+  $ hg revert -i<<EOF
+  > n
+  > EOF
+  add back removed file a (Yn)? n
+  $ ls
+  $ cd ..
--- a/tests/test-revlog-raw.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-revlog-raw.py	Mon Feb 04 20:35:21 2019 +0300
@@ -417,7 +417,6 @@
         print('  got:      %s' % result15)
 
 def maintest():
-    expected = rl = None
     with newtransaction() as tr:
         rl = newrevlog(recreate=True)
         expected = writecases(rl, tr)
--- a/tests/test-revset.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-revset.t	Mon Feb 04 20:35:21 2019 +0300
@@ -648,6 +648,9 @@
   $ hg debugrevspec '.#generations[1-2]'
   hg: parse error: relation subscript must be an integer
   [255]
+  $ hg debugrevspec '.#generations[foo:bar]'
+  hg: parse error: relation subscript bounds must be integers
+  [255]
 
 suggested relations
 
@@ -1274,6 +1277,31 @@
   $ log '.#g[(-1)]'
   8
 
+  $ log '6#generations[0:1]'
+  6
+  7
+  $ log '6#generations[-1:1]'
+  4
+  5
+  6
+  7
+  $ log '6#generations[0:]'
+  6
+  7
+  $ log '5#generations[:0]'
+  0
+  1
+  3
+  5
+  $ log '3#generations[:]'
+  0
+  1
+  3
+  5
+  6
+  7
+  $ log 'tip#generations[1:-1]'
+
   $ hg debugrevspec -p parsed 'roots(:)#g[2]'
   * parsed:
   (relsubscript
--- a/tests/test-revset2.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-revset2.t	Mon Feb 04 20:35:21 2019 +0300
@@ -1525,8 +1525,8 @@
   $ hg init problematicencoding
   $ cd problematicencoding
 
-  $ "$PYTHON" > setup.sh <<EOF
-  > print(u'''
+  $ "$PYTHON" <<EOF
+  > open('setup.sh', 'wb').write(u'''
   > echo a > text
   > hg add text
   > hg --encoding utf-8 commit -u '\u30A2' -m none
@@ -1541,8 +1541,8 @@
   $ sh < setup.sh
 
 test in problematic encoding
-  $ "$PYTHON" > test.sh <<EOF
-  > print(u'''
+  $ "$PYTHON" <<EOF
+  > open('test.sh', 'wb').write(u'''
   > hg --encoding cp932 log --template '{rev}\\n' -r 'author(\u30A2)'
   > echo ====
   > hg --encoding cp932 log --template '{rev}\\n' -r 'author(\u30C2)'
--- a/tests/test-rollback.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-rollback.t	Mon Feb 04 20:35:21 2019 +0300
@@ -113,9 +113,9 @@
   > echo "another precious commit message" > "$1"
   > __EOF__
   $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg --config hooks.pretxncommit=false commit 2>&1
-  note: commit message saved in .hg/last-message.txt
   transaction abort!
   rollback completed
+  note: commit message saved in .hg/last-message.txt
   abort: pretxncommit hook exited with status * (glob)
   [255]
   $ cat .hg/last-message.txt
--- a/tests/test-run-tests.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-run-tests.t	Mon Feb 04 20:35:21 2019 +0300
@@ -324,8 +324,8 @@
   
   ERROR: test-failure-unicode.t output changed
   !
+  Failed test-failure-unicode.t: output changed
   Failed test-failure.t: output changed
-  Failed test-failure-unicode.t: output changed
   # Ran 3 tests, 0 skipped, 2 failed.
   python hash seed: * (glob)
   [1]
@@ -356,8 +356,8 @@
   
   ERROR: test-failure-unicode.t output changed
   !
+  Failed test-failure-unicode.t: output changed
   Failed test-failure.t: output changed
-  Failed test-failure-unicode.t: output changed
   # Ran 3 tests, 0 skipped, 2 failed.
   python hash seed: * (glob)
   [1]
@@ -393,8 +393,8 @@
   
   ERROR: test-failure-unicode.t output changed
   !
+  Failed test-failure-unicode.t: output changed
   Failed test-failure.t: output changed
-  Failed test-failure-unicode.t: output changed
   # Ran 3 tests, 0 skipped, 2 failed.
   python hash seed: * (glob)
   [1]
--- a/tests/test-split.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-split.t	Mon Feb 04 20:35:21 2019 +0300
@@ -103,6 +103,12 @@
   abort: cannot split multiple revisions
   [255]
 
+This function splits a bit strangely primarily to avoid changing the behavior of
+the test after a bug was fixed with how split/commit --interactive handled
+`diff.unified=0`: when there were no context lines, it kept only the last diff
+hunk. When running split, this meant that runsplit was always recording three commits,
+one for each diff hunk, in reverse order (the base commit was the last diff hunk
+in the file).
   $ runsplit() {
   > cat > $TESTTMP/messages <<EOF
   > split 1
@@ -113,8 +119,11 @@
   > EOF
   > cat <<EOF | hg split "$@"
   > y
+  > n
+  > n
   > y
   > y
+  > n
   > y
   > y
   > y
@@ -123,13 +132,23 @@
 
   $ HGEDITOR=false runsplit
   diff --git a/a b/a
-  1 hunks, 1 lines changed
+  3 hunks, 3 lines changed
   examine changes to 'a'? [Ynesfdaq?] y
   
+  @@ -1,1 +1,1 @@
+  -1
+  +11
+  record change 1/3 to 'a'? [Ynesfdaq?] n
+  
+  @@ -3,1 +3,1 @@ 2
+  -3
+  +33
+  record change 2/3 to 'a'? [Ynesfdaq?] n
+  
   @@ -5,1 +5,1 @@ 4
   -5
   +55
-  record this change to 'a'? [Ynesfdaq?] y
+  record change 3/3 to 'a'? [Ynesfdaq?] y
   
   transaction abort!
   rollback completed
@@ -140,13 +159,23 @@
   $ HGEDITOR="\"$PYTHON\" $TESTTMP/editor.py"
   $ runsplit
   diff --git a/a b/a
-  1 hunks, 1 lines changed
+  3 hunks, 3 lines changed
   examine changes to 'a'? [Ynesfdaq?] y
   
+  @@ -1,1 +1,1 @@
+  -1
+  +11
+  record change 1/3 to 'a'? [Ynesfdaq?] n
+  
+  @@ -3,1 +3,1 @@ 2
+  -3
+  +33
+  record change 2/3 to 'a'? [Ynesfdaq?] n
+  
   @@ -5,1 +5,1 @@ 4
   -5
   +55
-  record this change to 'a'? [Ynesfdaq?] y
+  record change 3/3 to 'a'? [Ynesfdaq?] y
   
   EDITOR: HG: Splitting 1df0d5c5a3ab. Write commit message for the first split changeset.
   EDITOR: a2
@@ -160,13 +189,18 @@
   EDITOR: HG: changed a
   created new head
   diff --git a/a b/a
-  1 hunks, 1 lines changed
+  2 hunks, 2 lines changed
   examine changes to 'a'? [Ynesfdaq?] y
   
+  @@ -1,1 +1,1 @@
+  -1
+  +11
+  record change 1/2 to 'a'? [Ynesfdaq?] n
+  
   @@ -3,1 +3,1 @@ 2
   -3
   +33
-  record this change to 'a'? [Ynesfdaq?] y
+  record change 2/2 to 'a'? [Ynesfdaq?] y
   
   EDITOR: HG: Splitting 1df0d5c5a3ab. So far it has been split into:
   EDITOR: HG: - e704349bd21b: split 1
--- a/tests/test-ssh-repoerror.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-ssh-repoerror.t	Mon Feb 04 20:35:21 2019 +0300
@@ -34,7 +34,7 @@
   > done
 
   $ hg id ssh://user@dummy/other
-  remote: abort: Permission denied: $TESTTMP/other/.hg/requires
+  remote: abort: Permission denied: '$TESTTMP/other/.hg/requires'
   abort: no suitable response from remote hg!
   [255]
 
--- a/tests/test-static-http.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-static-http.t	Mon Feb 04 20:35:21 2019 +0300
@@ -227,9 +227,11 @@
   /.hg/requires
   /.hg/store/00changelog.i
   /.hg/store/00manifest.i
-  /.hg/store/data/%7E2ehgsub.i
-  /.hg/store/data/%7E2ehgsubstate.i
+  /.hg/store/data/%7E2ehgsub.i (no-py37 !)
+  /.hg/store/data/%7E2ehgsubstate.i (no-py37 !)
   /.hg/store/data/a.i
+  /.hg/store/data/~2ehgsub.i (py37 !)
+  /.hg/store/data/~2ehgsubstate.i (py37 !)
   /notarepo/.hg/00changelog.i
   /notarepo/.hg/requires
   /remote-with-names/.hg/bookmarks
@@ -243,8 +245,9 @@
   /remote-with-names/.hg/requires
   /remote-with-names/.hg/store/00changelog.i
   /remote-with-names/.hg/store/00manifest.i
-  /remote-with-names/.hg/store/data/%7E2ehgtags.i
+  /remote-with-names/.hg/store/data/%7E2ehgtags.i (no-py37 !)
   /remote-with-names/.hg/store/data/foo.i
+  /remote-with-names/.hg/store/data/~2ehgtags.i (py37 !)
   /remote/.hg/bookmarks
   /remote/.hg/bookmarks.current
   /remote/.hg/cache/branch2-base
@@ -258,10 +261,12 @@
   /remote/.hg/requires
   /remote/.hg/store/00changelog.i
   /remote/.hg/store/00manifest.i
-  /remote/.hg/store/data/%7E2edotfile%20with%20spaces.i
-  /remote/.hg/store/data/%7E2ehgtags.i
+  /remote/.hg/store/data/%7E2edotfile%20with%20spaces.i (no-py37 !)
+  /remote/.hg/store/data/%7E2ehgtags.i (no-py37 !)
   /remote/.hg/store/data/bar.i
   /remote/.hg/store/data/quux.i
+  /remote/.hg/store/data/~2edotfile%20with%20spaces.i (py37 !)
+  /remote/.hg/store/data/~2ehgtags.i (py37 !)
   /remotempty/.hg/bookmarks
   /remotempty/.hg/bookmarks.current
   /remotempty/.hg/requires
@@ -275,5 +280,6 @@
   /sub/.hg/requires
   /sub/.hg/store/00changelog.i
   /sub/.hg/store/00manifest.i
-  /sub/.hg/store/data/%7E2ehgtags.i
+  /sub/.hg/store/data/%7E2ehgtags.i (no-py37 !)
   /sub/.hg/store/data/test.i
+  /sub/.hg/store/data/~2ehgtags.i (py37 !)
--- a/tests/test-status.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-status.t	Mon Feb 04 20:35:21 2019 +0300
@@ -133,6 +133,22 @@
 relative paths can be requested
 
   $ cat >> $HGRCPATH <<EOF
+  > [ui]
+  > relative-paths = True
+  > EOF
+  $ hg status --cwd a
+  ? 1/in_a_1
+  ? in_a
+  ? ../b/1/in_b_1
+  ? ../b/2/in_b_2
+  ? ../b/in_b
+  ? ../in_root
+
+commands.status.relative overrides ui.relative-paths
+
+  $ cat >> $HGRCPATH <<EOF
+  > [ui]
+  > relative-paths = False
   > [commands]
   > status.relative = True
   > EOF
--- a/tests/test-subrepo-svn.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-subrepo-svn.t	Mon Feb 04 20:35:21 2019 +0300
@@ -1,11 +1,7 @@
 #require svn15
 
   $ SVNREPOPATH=`pwd`/svn-repo
-#if windows
-  $ SVNREPOURL=file:///`"$PYTHON" -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"`
-#else
-  $ SVNREPOURL=file://`"$PYTHON" -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"`
-#endif
+  $ SVNREPOURL="`"$PYTHON" $TESTDIR/svnurlof.py \"$SVNREPOPATH\"`"
 
   $ filter_svn_output () {
   >     egrep -v 'Committing|Transmitting|Updating|(^$)' || true
--- a/tests/test-tag.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-tag.t	Mon Feb 04 20:35:21 2019 +0300
@@ -320,9 +320,9 @@
   HG: branch 'tag-and-branch-same-name'
   HG: changed .hgtags
   ====
-  note: commit message saved in .hg/last-message.txt
   transaction abort!
   rollback completed
+  note: commit message saved in .hg/last-message.txt
   abort: pretxncommit.unexpectedabort hook exited with status 1
   [255]
   $ cat .hg/last-message.txt
--- a/tests/test-tags.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-tags.t	Mon Feb 04 20:35:21 2019 +0300
@@ -759,3 +759,69 @@
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ (cd tags-local-clone/.hg/cache/; ls -1 tag*)
   tags2-visible
+
+Avoid writing logs on trying to delete an already deleted tag
+  $ hg init issue5752
+  $ cd issue5752
+  $ echo > a
+  $ hg commit -Am 'add a'
+  adding a
+  $ hg tag a
+  $ hg tags
+  tip                                1:bd7ee4f3939b
+  a                                  0:a8a82d372bb3
+  $ hg log
+  changeset:   1:bd7ee4f3939b
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     Added tag a for changeset a8a82d372bb3
+  
+  changeset:   0:a8a82d372bb3
+  tag:         a
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     add a
+  
+  $ hg tag --remove a
+  $ hg log
+  changeset:   2:e7feacc7ec9e
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     Removed tag a
+  
+  changeset:   1:bd7ee4f3939b
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     Added tag a for changeset a8a82d372bb3
+  
+  changeset:   0:a8a82d372bb3
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     add a
+  
+  $ hg tag --remove a
+  abort: tag 'a' is already removed
+  [255]
+  $ hg log
+  changeset:   2:e7feacc7ec9e
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     Removed tag a
+  
+  changeset:   1:bd7ee4f3939b
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     Added tag a for changeset a8a82d372bb3
+  
+  changeset:   0:a8a82d372bb3
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     add a
+  
+  $ cat .hgtags
+  a8a82d372bb35b42ff736e74f07c23bcd99c371f a
+  a8a82d372bb35b42ff736e74f07c23bcd99c371f a
+  0000000000000000000000000000000000000000 a
--- a/tests/test-template-map.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-template-map.t	Mon Feb 04 20:35:21 2019 +0300
@@ -1039,7 +1039,7 @@
   $ touch q
   $ chmod 0 q
   $ hg log --style ./q
-  abort: Permission denied: ./q
+  abort: Permission denied: './q'
   [255]
 #endif
 
--- a/tests/test-transplant.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-transplant.t	Mon Feb 04 20:35:21 2019 +0300
@@ -39,12 +39,12 @@
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   $ hg transplant 1
-  abort: outstanding uncommitted merges
+  abort: outstanding uncommitted merge
   [255]
   $ hg up -qC tip
   $ echo b0 > b1
   $ hg transplant 1
-  abort: outstanding local changes
+  abort: uncommitted changes
   [255]
   $ hg up -qC tip
   $ echo b2 > b2
--- a/tests/test-trusted.py	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-trusted.py	Mon Feb 04 20:35:21 2019 +0300
@@ -5,19 +5,34 @@
 from __future__ import absolute_import, print_function
 
 import os
+import sys
+
 from mercurial import (
     error,
+    pycompat,
     ui as uimod,
     util,
 )
+from mercurial.utils import stringutil
 
 hgrc = os.environ['HGRCPATH']
-f = open(hgrc)
+f = open(hgrc, 'rb')
 basehgrc = f.read()
 f.close()
 
-def testui(user='foo', group='bar', tusers=(), tgroups=(),
-           cuser='foo', cgroup='bar', debug=False, silent=False,
+def _maybesysstr(v):
+    if isinstance(v, bytes):
+        return pycompat.sysstr(v)
+    return pycompat.sysstr(stringutil.pprint(v))
+
+def bprint(*args, **kwargs):
+    print(*[_maybesysstr(a) for a in args],
+          **{k: _maybesysstr(v) for k, v in kwargs.items()})
+    # avoid awkward interleaving with ui object's output
+    sys.stdout.flush()
+
+def testui(user=b'foo', group=b'bar', tusers=(), tgroups=(),
+           cuser=b'foo', cgroup=b'bar', debug=False, silent=False,
            report=True):
     # user, group => owners of the file
     # tusers, tgroups => trusted users/groups
@@ -25,17 +40,17 @@
 
     # write a global hgrc with the list of trusted users/groups and
     # some setting so that we can be sure it was read
-    f = open(hgrc, 'w')
+    f = open(hgrc, 'wb')
     f.write(basehgrc)
-    f.write('\n[paths]\n')
-    f.write('global = /some/path\n\n')
+    f.write(b'\n[paths]\n')
+    f.write(b'global = /some/path\n\n')
 
     if tusers or tgroups:
-        f.write('[trusted]\n')
+        f.write(b'[trusted]\n')
         if tusers:
-            f.write('users = %s\n' % ', '.join(tusers))
+            f.write(b'users = %s\n' % b', '.join(tusers))
         if tgroups:
-            f.write('groups = %s\n' % ', '.join(tgroups))
+            f.write(b'groups = %s\n' % b', '.join(tgroups))
     f.close()
 
     # override the functions that give names to uids and gids
@@ -47,7 +62,7 @@
 
     def groupname(gid=None):
         if gid is None:
-            return 'bar'
+            return b'bar'
         return group
     util.groupname = groupname
 
@@ -58,13 +73,14 @@
     # try to read everything
     #print '# File belongs to user %s, group %s' % (user, group)
     #print '# trusted users = %s; trusted groups = %s' % (tusers, tgroups)
-    kind = ('different', 'same')
-    who = ('', 'user', 'group', 'user and the group')
+    kind = (b'different', b'same')
+    who = (b'', b'user', b'group', b'user and the group')
     trusted = who[(user in tusers) + 2*(group in tgroups)]
     if trusted:
-        trusted = ', but we trust the ' + trusted
-    print('# %s user, %s group%s' % (kind[user == cuser], kind[group == cgroup],
-                                     trusted))
+        trusted = b', but we trust the ' + trusted
+    bprint(b'# %s user, %s group%s' % (kind[user == cuser],
+                                       kind[group == cgroup],
+                                       trusted))
 
     u = uimod.ui.load()
     # disable the configuration registration warning
@@ -72,33 +88,33 @@
     # the purpose of this test is to check the old behavior, not to validate the
     # behavior from registered item. so we silent warning related to unregisted
     # config.
-    u.setconfig('devel', 'warn-config-unknown', False, 'test')
-    u.setconfig('devel', 'all-warnings', False, 'test')
-    u.setconfig('ui', 'debug', str(bool(debug)))
-    u.setconfig('ui', 'report_untrusted', str(bool(report)))
-    u.readconfig('.hg/hgrc')
+    u.setconfig(b'devel', b'warn-config-unknown', False, b'test')
+    u.setconfig(b'devel', b'all-warnings', False, b'test')
+    u.setconfig(b'ui', b'debug', pycompat.bytestr(bool(debug)))
+    u.setconfig(b'ui', b'report_untrusted', pycompat.bytestr(bool(report)))
+    u.readconfig(b'.hg/hgrc')
     if silent:
         return u
-    print('trusted')
-    for name, path in u.configitems('paths'):
-        print('   ', name, '=', util.pconvert(path))
-    print('untrusted')
-    for name, path in u.configitems('paths', untrusted=True):
-        print('.', end=' ')
-        u.config('paths', name) # warning with debug=True
-        print('.', end=' ')
-        u.config('paths', name, untrusted=True) # no warnings
-        print(name, '=', util.pconvert(path))
+    bprint(b'trusted')
+    for name, path in u.configitems(b'paths'):
+        bprint(b'   ', name, b'=', util.pconvert(path))
+    bprint(b'untrusted')
+    for name, path in u.configitems(b'paths', untrusted=True):
+        bprint(b'.', end=b' ')
+        u.config(b'paths', name) # warning with debug=True
+        bprint(b'.', end=b' ')
+        u.config(b'paths', name, untrusted=True) # no warnings
+        bprint(name, b'=', util.pconvert(path))
     print()
 
     return u
 
-os.mkdir('repo')
-os.chdir('repo')
-os.mkdir('.hg')
-f = open('.hg/hgrc', 'w')
-f.write('[paths]\n')
-f.write('local = /another/path\n\n')
+os.mkdir(b'repo')
+os.chdir(b'repo')
+os.mkdir(b'.hg')
+f = open(b'.hg/hgrc', 'wb')
+f.write(b'[paths]\n')
+f.write(b'local = /another/path\n\n')
 f.close()
 
 #print '# Everything is run by user foo, group bar\n'
@@ -106,120 +122,130 @@
 # same user, same group
 testui()
 # same user, different group
-testui(group='def')
+testui(group=b'def')
 # different user, same group
-testui(user='abc')
+testui(user=b'abc')
 # ... but we trust the group
-testui(user='abc', tgroups=['bar'])
+testui(user=b'abc', tgroups=[b'bar'])
 # different user, different group
-testui(user='abc', group='def')
+testui(user=b'abc', group=b'def')
 # ... but we trust the user
-testui(user='abc', group='def', tusers=['abc'])
+testui(user=b'abc', group=b'def', tusers=[b'abc'])
 # ... but we trust the group
-testui(user='abc', group='def', tgroups=['def'])
+testui(user=b'abc', group=b'def', tgroups=[b'def'])
 # ... but we trust the user and the group
-testui(user='abc', group='def', tusers=['abc'], tgroups=['def'])
+testui(user=b'abc', group=b'def', tusers=[b'abc'], tgroups=[b'def'])
 # ... but we trust all users
-print('# we trust all users')
-testui(user='abc', group='def', tusers=['*'])
+bprint(b'# we trust all users')
+testui(user=b'abc', group=b'def', tusers=[b'*'])
 # ... but we trust all groups
-print('# we trust all groups')
-testui(user='abc', group='def', tgroups=['*'])
+bprint(b'# we trust all groups')
+testui(user=b'abc', group=b'def', tgroups=[b'*'])
 # ... but we trust the whole universe
-print('# we trust all users and groups')
-testui(user='abc', group='def', tusers=['*'], tgroups=['*'])
+bprint(b'# we trust all users and groups')
+testui(user=b'abc', group=b'def', tusers=[b'*'], tgroups=[b'*'])
 # ... check that users and groups are in different namespaces
-print("# we don't get confused by users and groups with the same name")
-testui(user='abc', group='def', tusers=['def'], tgroups=['abc'])
+bprint(b"# we don't get confused by users and groups with the same name")
+testui(user=b'abc', group=b'def', tusers=[b'def'], tgroups=[b'abc'])
 # ... lists of user names work
-print("# list of user names")
-testui(user='abc', group='def', tusers=['foo', 'xyz', 'abc', 'bleh'],
-       tgroups=['bar', 'baz', 'qux'])
+bprint(b"# list of user names")
+testui(user=b'abc', group=b'def', tusers=[b'foo', b'xyz', b'abc', b'bleh'],
+       tgroups=[b'bar', b'baz', b'qux'])
 # ... lists of group names work
-print("# list of group names")
-testui(user='abc', group='def', tusers=['foo', 'xyz', 'bleh'],
-       tgroups=['bar', 'def', 'baz', 'qux'])
+bprint(b"# list of group names")
+testui(user=b'abc', group=b'def', tusers=[b'foo', b'xyz', b'bleh'],
+       tgroups=[b'bar', b'def', b'baz', b'qux'])
 
-print("# Can't figure out the name of the user running this process")
-testui(user='abc', group='def', cuser=None)
+bprint(b"# Can't figure out the name of the user running this process")
+testui(user=b'abc', group=b'def', cuser=None)
 
-print("# prints debug warnings")
-u = testui(user='abc', group='def', cuser='foo', debug=True)
+bprint(b"# prints debug warnings")
+u = testui(user=b'abc', group=b'def', cuser=b'foo', debug=True)
 
-print("# report_untrusted enabled without debug hides warnings")
-u = testui(user='abc', group='def', cuser='foo', report=False)
+bprint(b"# report_untrusted enabled without debug hides warnings")
+u = testui(user=b'abc', group=b'def', cuser=b'foo', report=False)
 
-print("# report_untrusted enabled with debug shows warnings")
-u = testui(user='abc', group='def', cuser='foo', debug=True, report=False)
+bprint(b"# report_untrusted enabled with debug shows warnings")
+u = testui(user=b'abc', group=b'def', cuser=b'foo', debug=True, report=False)
 
-print("# ui.readconfig sections")
-filename = 'foobar'
-f = open(filename, 'w')
-f.write('[foobar]\n')
-f.write('baz = quux\n')
+bprint(b"# ui.readconfig sections")
+filename = b'foobar'
+f = open(filename, 'wb')
+f.write(b'[foobar]\n')
+f.write(b'baz = quux\n')
 f.close()
-u.readconfig(filename, sections=['foobar'])
-print(u.config('foobar', 'baz'))
+u.readconfig(filename, sections=[b'foobar'])
+bprint(u.config(b'foobar', b'baz'))
 
 print()
-print("# read trusted, untrusted, new ui, trusted")
+bprint(b"# read trusted, untrusted, new ui, trusted")
 u = uimod.ui.load()
 # disable the configuration registration warning
 #
 # the purpose of this test is to check the old behavior, not to validate the
 # behavior from registered item. so we silent warning related to unregisted
 # config.
-u.setconfig('devel', 'warn-config-unknown', False, 'test')
-u.setconfig('devel', 'all-warnings', False, 'test')
-u.setconfig('ui', 'debug', 'on')
+u.setconfig(b'devel', b'warn-config-unknown', False, b'test')
+u.setconfig(b'devel', b'all-warnings', False, b'test')
+u.setconfig(b'ui', b'debug', b'on')
 u.readconfig(filename)
 u2 = u.copy()
 def username(uid=None):
-    return 'foo'
+    return b'foo'
 util.username = username
-u2.readconfig('.hg/hgrc')
-print('trusted:')
-print(u2.config('foobar', 'baz'))
-print('untrusted:')
-print(u2.config('foobar', 'baz', untrusted=True))
+u2.readconfig(b'.hg/hgrc')
+bprint(b'trusted:')
+bprint(u2.config(b'foobar', b'baz'))
+bprint(b'untrusted:')
+bprint(u2.config(b'foobar', b'baz', untrusted=True))
 
 print()
-print("# error handling")
+bprint(b"# error handling")
 
 def assertraises(f, exc=error.Abort):
     try:
         f()
     except exc as inst:
-        print('raised', inst.__class__.__name__)
+        bprint(b'raised', inst.__class__.__name__)
     else:
-        print('no exception?!')
+        bprint(b'no exception?!')
 
-print("# file doesn't exist")
-os.unlink('.hg/hgrc')
-assert not os.path.exists('.hg/hgrc')
+bprint(b"# file doesn't exist")
+os.unlink(b'.hg/hgrc')
+assert not os.path.exists(b'.hg/hgrc')
 testui(debug=True, silent=True)
-testui(user='abc', group='def', debug=True, silent=True)
+testui(user=b'abc', group=b'def', debug=True, silent=True)
 
 print()
-print("# parse error")
-f = open('.hg/hgrc', 'w')
-f.write('foo')
+bprint(b"# parse error")
+f = open(b'.hg/hgrc', 'wb')
+f.write(b'foo')
 f.close()
 
+# This is a hack to remove b'' prefixes from ParseError.__bytes__ on
+# Python 3.
+def normalizeparseerror(e):
+    if pycompat.ispy3:
+        args = [a.decode('utf-8') for a in e.args]
+    else:
+        args = e.args
+
+    return error.ParseError(*args)
+
 try:
-    testui(user='abc', group='def', silent=True)
+    testui(user=b'abc', group=b'def', silent=True)
 except error.ParseError as inst:
-    print(inst)
+    bprint(normalizeparseerror(inst))
 
 try:
     testui(debug=True, silent=True)
 except error.ParseError as inst:
-    print(inst)
+    bprint(normalizeparseerror(inst))
 
 print()
-print('# access typed information')
-with open('.hg/hgrc', 'w') as f:
-    f.write('''\
+bprint(b'# access typed information')
+with open(b'.hg/hgrc', 'wb') as f:
+    f.write(b'''\
 [foo]
 sub=main
 sub:one=one
@@ -230,32 +256,33 @@
 bytes=81mb
 list=spam,ham,eggs
 ''')
-u = testui(user='abc', group='def', cuser='foo', silent=True)
+u = testui(user=b'abc', group=b'def', cuser=b'foo', silent=True)
 def configpath(section, name, default=None, untrusted=False):
     path = u.configpath(section, name, default, untrusted)
     if path is None:
         return None
     return util.pconvert(path)
 
-print('# suboptions, trusted and untrusted')
-trusted = u.configsuboptions('foo', 'sub')
-untrusted = u.configsuboptions('foo', 'sub', untrusted=True)
-print(
+bprint(b'# suboptions, trusted and untrusted')
+trusted = u.configsuboptions(b'foo', b'sub')
+untrusted = u.configsuboptions(b'foo', b'sub', untrusted=True)
+bprint(
     (trusted[0], sorted(trusted[1].items())),
     (untrusted[0], sorted(untrusted[1].items())))
-print('# path, trusted and untrusted')
-print(configpath('foo', 'path'), configpath('foo', 'path', untrusted=True))
-print('# bool, trusted and untrusted')
-print(u.configbool('foo', 'bool'), u.configbool('foo', 'bool', untrusted=True))
-print('# int, trusted and untrusted')
-print(
-    u.configint('foo', 'int', 0),
-    u.configint('foo', 'int', 0, untrusted=True))
-print('# bytes, trusted and untrusted')
-print(
-    u.configbytes('foo', 'bytes', 0),
-    u.configbytes('foo', 'bytes', 0, untrusted=True))
-print('# list, trusted and untrusted')
-print(
-    u.configlist('foo', 'list', []),
-    u.configlist('foo', 'list', [], untrusted=True))
+bprint(b'# path, trusted and untrusted')
+bprint(configpath(b'foo', b'path'), configpath(b'foo', b'path', untrusted=True))
+bprint(b'# bool, trusted and untrusted')
+bprint(u.configbool(b'foo', b'bool'),
+       u.configbool(b'foo', b'bool', untrusted=True))
+bprint(b'# int, trusted and untrusted')
+bprint(
+    u.configint(b'foo', b'int', 0),
+    u.configint(b'foo', b'int', 0, untrusted=True))
+bprint(b'# bytes, trusted and untrusted')
+bprint(
+    u.configbytes(b'foo', b'bytes', 0),
+    u.configbytes(b'foo', b'bytes', 0, untrusted=True))
+bprint(b'# list, trusted and untrusted')
+bprint(
+    u.configlist(b'foo', b'list', []),
+    u.configlist(b'foo', b'list', [], untrusted=True))
--- a/tests/test-trusted.py.out	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-trusted.py.out	Mon Feb 04 20:35:21 2019 +0300
@@ -174,9 +174,9 @@
 # parse error
 # different user, different group
 not trusting file .hg/hgrc from untrusted user abc, group def
-('foo', '.hg/hgrc:1')
+ParseError('foo', '.hg/hgrc:1')
 # same user, same group
-('foo', '.hg/hgrc:1')
+ParseError('foo', '.hg/hgrc:1')
 
 # access typed information
 # different user, different group
--- a/tests/test-unamend.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-unamend.t	Mon Feb 04 20:35:21 2019 +0300
@@ -232,6 +232,7 @@
 
   $ hg revert --all
   forgetting bar
+  $ rm bar
 
 Unamending in middle of a stack
 
@@ -302,7 +303,6 @@
 Testing whether unamend retains copies or not
 
   $ hg status
-  ? bar
 
   $ hg mv a foo
 
@@ -370,3 +370,42 @@
   diff --git a/c b/wat
   rename from c
   rename to wat
+  $ hg revert -qa
+  $ rm foobar wat
+
+Rename a->b, then amend b->c. After unamend, should look like b->c.
+
+  $ hg co -q 0
+  $ hg mv a b
+  $ hg ci -qm 'move to a b'
+  $ hg mv b c
+  $ hg amend
+  $ hg unamend
+  $ hg st --copies --change .
+  A b
+    a
+  R a
+  $ hg st --copies
+  A c
+    b
+  R b
+  $ hg revert -qa
+  $ rm c
+
+Rename a->b, then amend b->c, and working copy change c->d. After unamend, should look like b->d
+
+  $ hg co -q 0
+  $ hg mv a b
+  $ hg ci -qm 'move to a b'
+  $ hg mv b c
+  $ hg amend
+  $ hg mv c d
+  $ hg unamend
+  $ hg st --copies --change .
+  A b
+    a
+  R a
+  $ hg st --copies
+  A d
+    b
+  R b
--- a/tests/test-uncommit.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-uncommit.t	Mon Feb 04 20:35:21 2019 +0300
@@ -398,3 +398,43 @@
   |/
   o  0:ea4e33293d4d274a2ba73150733c2612231f398c a 1
   
+
+Rename a->b, then remove b in working copy. Result should remove a.
+
+  $ hg co -q 0
+  $ hg mv a b
+  $ hg ci -qm 'move a to b'
+  $ hg rm b
+  $ hg uncommit --config experimental.uncommitondirtywdir=True
+  $ hg st --copies
+  R a
+  $ hg revert a
+
+Rename a->b, then rename b->c in working copy. Result should rename a->c.
+
+  $ hg co -q 0
+  $ hg mv a b
+  $ hg ci -qm 'move a to b'
+  $ hg mv b c
+  $ hg uncommit --config experimental.uncommitondirtywdir=True
+  $ hg st --copies
+  A c
+    a
+  R a
+  $ hg revert a
+  $ hg forget c
+  $ rm c
+
+Copy a->b1 and a->b2, then rename b1->c in working copy. Result should copy a->b2 and a->c.
+
+  $ hg co -q 0
+  $ hg cp a b1
+  $ hg cp a b2
+  $ hg ci -qm 'move a to b1 and b2'
+  $ hg mv b1 c
+  $ hg uncommit --config experimental.uncommitondirtywdir=True
+  $ hg st --copies
+  A b2
+    a
+  A c
+    a
--- a/tests/test-update-atomic.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-update-atomic.t	Mon Feb 04 20:35:21 2019 +0300
@@ -4,13 +4,14 @@
 
   $ cat > $TESTTMP/show_mode.py <<EOF
   > from __future__ import print_function
+  > import os
+  > import stat
   > import sys
-  > import os
-  > from stat import ST_MODE
+  > ST_MODE = stat.ST_MODE
   > 
   > for file_path in sys.argv[1:]:
   >     file_stat = os.stat(file_path)
-  >     octal_mode = oct(file_stat[ST_MODE] & 0o777)
+  >     octal_mode = oct(file_stat[ST_MODE] & 0o777).replace('o', '')
   >     print("%s:%s" % (file_path, octal_mode))
   > 
   > EOF
@@ -19,11 +20,15 @@
   $ cd repo
 
   $ cat > .hg/showwrites.py <<EOF
+  > from __future__ import print_function
+  > from mercurial import pycompat
+  > from mercurial.utils import stringutil
   > def uisetup(ui):
   >   from mercurial import vfs
   >   class newvfs(vfs.vfs):
   >     def __call__(self, *args, **kwargs):
-  >       print('vfs open', args, sorted(list(kwargs.items())))
+  >       print(pycompat.sysstr(stringutil.pprint(
+  >           ('vfs open', args, sorted(list(kwargs.items()))))))
   >       return super(newvfs, self).__call__(*args, **kwargs)
   >   vfs.vfs = newvfs
   > EOF
--- a/tests/test-wireproto-command-capabilities.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-wireproto-command-capabilities.t	Mon Feb 04 20:35:21 2019 +0300
@@ -22,6 +22,7 @@
   >     user-agent: test
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /?cmd=capabilities HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     user-agent: test\r\n
@@ -45,6 +46,7 @@
   >    x-hgproto-1: cbor
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /?cmd=capabilities HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     user-agent: test\r\n
@@ -82,6 +84,7 @@
   >    x-hgupgrade-1: foo bar
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /?cmd=capabilities HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     user-agent: test\r\n
@@ -106,6 +109,7 @@
   >    x-hgproto-1: some value
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /?cmd=capabilities HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     user-agent: test\r\n
@@ -131,6 +135,7 @@
   >    x-hgproto-1: cbor
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /?cmd=capabilities HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     user-agent: test\r\n
@@ -170,6 +175,7 @@
   >    x-hgproto-1: cbor
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /?cmd=capabilities HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     user-agent: test\r\n
@@ -202,6 +208,7 @@
   >    x-hgproto-1: cbor
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /?cmd=capabilities HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     user-agent: test\r\n
@@ -460,6 +467,7 @@
   > command capabilities
   > EOF
   creating http peer for wire protocol version 2
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /?cmd=capabilities HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     vary: X-HgProto-1,X-HgUpgrade-1\r\n
@@ -478,6 +486,7 @@
   s>     \r\n
   s>     \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa4Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogNv1capabilitiesY\x01\xe0batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
   sending capabilities command
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     POST /api/exp-http-v2-0003/ro/capabilities HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     accept: application/mercurial-exp-framing-0006\r\n
@@ -498,23 +507,19 @@
   s>     \t\x00\x00\x01\x00\x02\x01\x92
   s>     Hidentity
   s>     \r\n
-  received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos)
   s>     13\r\n
   s>     \x0b\x00\x00\x01\x00\x02\x041
   s>     \xa1FstatusBok
   s>     \r\n
-  received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
   s>     65e\r\n
   s>     V\x06\x00\x01\x00\x02\x041
   s>     \xa4Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1Lsparserevlog
   s>     \r\n
-  received frame(size=1622; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
   s>     8\r\n
   s>     \x00\x00\x00\x01\x00\x02\x002
   s>     \r\n
   s>     0\r\n
   s>     \r\n
-  received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos)
   response: gen[
     {
       b'commands': {
--- a/tests/test-wireproto-content-redirects.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-wireproto-content-redirects.t	Mon Feb 04 20:35:21 2019 +0300
@@ -51,6 +51,7 @@
   > command capabilities
   > EOF
   creating http peer for wire protocol version 2
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /?cmd=capabilities HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     vary: X-HgProto-1,X-HgUpgrade-1\r\n
@@ -71,6 +72,7 @@
   (remote redirect target target-a is compatible) (tls1.2 !)
   (remote redirect target target-a requires unsupported TLS versions: 1.2, 1.3) (no-tls1.2 !)
   sending capabilities command
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     POST /api/exp-http-v2-0003/ro/capabilities HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     accept: application/mercurial-exp-framing-0006\r\n
@@ -93,23 +95,19 @@
   s>     \t\x00\x00\x01\x00\x02\x01\x92
   s>     Hidentity
   s>     \r\n
-  received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos)
   s>     13\r\n
   s>     \x0b\x00\x00\x01\x00\x02\x041
   s>     \xa1FstatusBok
   s>     \r\n
-  received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
   s>     6de\r\n
   s>     \xd6\x06\x00\x01\x00\x02\x041
   s>     \xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa5DnameHtarget-aHprotocolDhttpKsnirequired\xf4Ktlsversions\x82C1.2C1.3Duris\x81Shttp://example.com/
   s>     \r\n
-  received frame(size=1750; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
   s>     8\r\n
   s>     \x00\x00\x00\x01\x00\x02\x002
   s>     \r\n
   s>     0\r\n
   s>     \r\n
-  received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos)
   response: gen[
     {
       b'commands': {
@@ -383,6 +381,7 @@
   > command capabilities
   > EOF
   creating http peer for wire protocol version 2
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /?cmd=capabilities HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     vary: X-HgProto-1,X-HgUpgrade-1\r\n
@@ -403,6 +402,7 @@
   (remote redirect target target-a is compatible)
   (remote redirect target target-b uses unsupported protocol: unknown)
   sending capabilities command
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     POST /api/exp-http-v2-0003/ro/capabilities HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     accept: application/mercurial-exp-framing-0006\r\n
@@ -423,23 +423,19 @@
   s>     \t\x00\x00\x01\x00\x02\x01\x92
   s>     Hidentity
   s>     \r\n
-  received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos)
   s>     13\r\n
   s>     \x0b\x00\x00\x01\x00\x02\x041
   s>     \xa1FstatusBok
   s>     \r\n
-  received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
   s>     6f9\r\n
   s>     \xf1\x06\x00\x01\x00\x02\x041
   s>     \xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x82\xa3DnameHtarget-aHprotocolDhttpDuris\x81Shttp://example.com/\xa3DnameHtarget-bHprotocolGunknownDuris\x81Vunknown://example.com/
   s>     \r\n
-  received frame(size=1777; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
   s>     8\r\n
   s>     \x00\x00\x00\x01\x00\x02\x002
   s>     \r\n
   s>     0\r\n
   s>     \r\n
-  received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos)
   response: gen[
     {
       b'commands': {
@@ -720,6 +716,7 @@
   > command capabilities
   > EOF
   creating http peer for wire protocol version 2
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /?cmd=capabilities HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     vary: X-HgProto-1,X-HgUpgrade-1\r\n
@@ -739,6 +736,7 @@
   s>     \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKsnirequired\xf5Duris\x81Thttps://example.com/Nv1capabilitiesY\x01\xe0batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
   (redirect target target-bad-tls requires SNI, which is unsupported)
   sending capabilities command
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     POST /api/exp-http-v2-0003/ro/capabilities HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     accept: application/mercurial-exp-framing-0006\r\n
@@ -759,23 +757,19 @@
   s>     \t\x00\x00\x01\x00\x02\x01\x92
   s>     Hidentity
   s>     \r\n
-  received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos)
   s>     13\r\n
   s>     \x0b\x00\x00\x01\x00\x02\x041
   s>     \xa1FstatusBok
   s>     \r\n
-  received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
   s>     6d1\r\n
   s>     \xc9\x06\x00\x01\x00\x02\x041
   s>     \xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKsnirequired\xf5Duris\x81Thttps://example.com/
   s>     \r\n
-  received frame(size=1737; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
   s>     8\r\n
   s>     \x00\x00\x00\x01\x00\x02\x002
   s>     \r\n
   s>     0\r\n
   s>     \r\n
-  received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos)
   response: gen[
     {
       b'commands': {
@@ -1046,6 +1040,7 @@
   > command capabilities
   > EOF
   creating http peer for wire protocol version 2
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /?cmd=capabilities HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     vary: X-HgProto-1,X-HgUpgrade-1\r\n
@@ -1065,6 +1060,7 @@
   s>     \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKtlsversions\x82B42B39Duris\x81Thttps://example.com/Nv1capabilitiesY\x01\xe0batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
   (remote redirect target target-bad-tls requires unsupported TLS versions: 39, 42)
   sending capabilities command
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     POST /api/exp-http-v2-0003/ro/capabilities HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     accept: application/mercurial-exp-framing-0006\r\n
@@ -1085,23 +1081,19 @@
   s>     \t\x00\x00\x01\x00\x02\x01\x92
   s>     Hidentity
   s>     \r\n
-  received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos)
   s>     13\r\n
   s>     \x0b\x00\x00\x01\x00\x02\x041
   s>     \xa1FstatusBok
   s>     \r\n
-  received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
   s>     6d7\r\n
   s>     \xcf\x06\x00\x01\x00\x02\x041
   s>     \xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKtlsversions\x82B42B39Duris\x81Thttps://example.com/
   s>     \r\n
-  received frame(size=1743; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
   s>     8\r\n
   s>     \x00\x00\x00\x01\x00\x02\x002
   s>     \r\n
   s>     0\r\n
   s>     \r\n
-  received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos)
   response: gen[
     {
       b'commands': {
@@ -1372,6 +1364,7 @@
   >     user-agent: test
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /api/simplecache/missingkey HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     user-agent: test\r\n
@@ -1416,6 +1409,7 @@
   >     user-agent: test
   > EOF
   using raw connection to peer
+  s> setsockopt(6, 1, 1) -> None (?)
   s>     GET /api/simplecache/47abb8efa5f01b8964d74917793ad2464db0fa2c HTTP/1.1\r\n
   s>     Accept-Encoding: identity\r\n
   s>     user-agent: test\r\n
--- a/tests/test-worker.t	Fri Feb 01 13:44:09 2019 -0500
+++ b/tests/test-worker.t	Mon Feb 04 20:35:21 2019 +0300
@@ -83,8 +83,10 @@
   [255]
 
   $ hg --config "extensions.t=$abspath" --config 'worker.numcpus=8' \
-  > test 100000.0 abort --traceback 2>&1 | egrep '^(SystemExit|Abort)'
-  Abort: known exception
+  > test 100000.0 abort --traceback 2>&1 | egrep '(SystemExit|Abort)'
+      raise error.Abort(b'known exception')
+  mercurial.error.Abort: b'known exception' (py3 !)
+  Abort: known exception (no-py3 !)
   SystemExit: 255
 
 Traceback must be printed for unknown exceptions