phases: large rewrite on retract boundary
The new code is still pure Python, so we still have room to going significantly
faster. However its complexity of the complex part is `O(|[min_new_draft, tip]|)` instead of
`O(|[min_draft, tip]|` which should help tremendously one repository with old
draft (like mercurial-devel or mozilla-try).
This is especially useful as the most common "retract boundary" operation
happens when we commit/rewrite new drafts or when we push new draft to a
non-publishing server. In this case, the smallest new_revs is very close to the
tip and there is very few work to do.
A few smaller optimisation could be done for these cases and will be introduced in
later changesets.
We still have iterate over large sets of roots, but this is already a great
improvement for a very small amount of work. We gather information on the
affected changeset as we go as we can put it to use in the next changesets.
This extra data collection might slowdown the `register_new` case a bit, however
for register_new, it should not really matters. The set of new nodes is either
small, so the impact is negligible, or the set of new nodes is large, and the
amount of work to do to had them will dominate the overhead the collecting
information in `changed_revs`.
As this new code compute the changes on the fly, it unlock other interesting
improvement to be done in later changeset.
#!/usr/bin/env python
__doc__ = """Tiny HTTP Proxy.
This module implements GET, HEAD, POST, PUT and DELETE methods
on BaseHTTPServer, and behaves as an HTTP proxy. The CONNECT
method is also implemented experimentally, but has not been
tested yet.
Any help will be greatly appreciated. SUZUKI Hisao
"""
__version__ = "0.2.1"
import optparse
import os
import select
import socket
import sys
from mercurial import (
pycompat,
util,
)
httpserver = util.httpserver
socketserver = util.socketserver
urlreq = util.urlreq
if os.environ.get('HGIPV6', '0') == '1':
family = socket.AF_INET6
else:
family = socket.AF_INET
class ProxyHandler(httpserver.basehttprequesthandler):
__base = httpserver.basehttprequesthandler
__base_handle = __base.handle
server_version = "TinyHTTPProxy/" + __version__
rbufsize = 0 # self.rfile Be unbuffered
def handle(self):
(ip, port) = self.client_address
allowed = getattr(self, 'allowed_clients', None)
if allowed is not None and ip not in allowed:
self.raw_requestline = self.rfile.readline()
if self.parse_request():
self.send_error(403)
else:
self.__base_handle()
def log_request(self, code='-', size='-'):
xheaders = [h for h in self.headers.items() if h[0].startswith('x-')]
self.log_message(
'"%s" %s %s%s',
self.requestline,
str(code),
str(size),
''.join([' %s:%s' % h for h in sorted(xheaders)]),
)
# Flush for Windows, so output isn't lost on TerminateProcess()
sys.stdout.flush()
sys.stderr.flush()
def _connect_to(self, netloc, soc):
i = netloc.find(':')
if i >= 0:
host_port = netloc[:i], int(netloc[i + 1 :])
else:
host_port = netloc, 80
print("\t" "connect to %s:%d" % host_port)
try:
soc.connect(host_port)
except socket.error as e:
self.send_error(404, e.strerror)
return 0
return 1
def do_CONNECT(self):
soc = socket.socket(family, socket.SOCK_STREAM)
try:
if self._connect_to(self.path, soc):
self.log_request(200)
self.wfile.write(
pycompat.bytestr(self.protocol_version)
+ b" 200 Connection established\r\n"
)
self.wfile.write(
b"Proxy-agent: %s\r\n"
% pycompat.bytestr(self.version_string())
)
self.wfile.write(b"\r\n")
self._read_write(soc, 300)
finally:
print("\t" "bye")
soc.close()
self.connection.close()
def do_GET(self):
(scm, netloc, path, params, query, fragment) = urlreq.urlparse(
self.path, 'http'
)
if scm != 'http' or fragment or not netloc:
self.send_error(400, "bad url %s" % self.path)
return
soc = socket.socket(family, socket.SOCK_STREAM)
try:
if self._connect_to(netloc, soc):
self.log_request()
url = urlreq.urlunparse(('', '', path, params, query, ''))
soc.send(
b"%s %s %s\r\n"
% (
pycompat.bytestr(self.command),
pycompat.bytestr(url),
pycompat.bytestr(self.request_version),
)
)
self.headers['Connection'] = 'close'
del self.headers['Proxy-Connection']
for key, val in self.headers.items():
soc.send(
b"%s: %s\r\n"
% (pycompat.bytestr(key), pycompat.bytestr(val))
)
soc.send(b"\r\n")
self._read_write(soc)
finally:
print("\t" "bye")
soc.close()
self.connection.close()
def _read_write(self, soc, max_idling=20):
iw = [self.connection, soc]
ow = []
count = 0
while True:
count += 1
(ins, _, exs) = select.select(iw, ow, iw, 3)
if exs:
break
if ins:
for i in ins:
if i is soc:
out = self.connection
else:
out = soc
try:
data = i.recv(8192)
except socket.error:
break
if data:
out.send(data)
count = 0
else:
print("\t" "idle", count)
if count == max_idling:
break
do_HEAD = do_GET
do_POST = do_GET
do_PUT = do_GET
do_DELETE = do_GET
class ThreadingHTTPServer(socketserver.ThreadingMixIn, httpserver.httpserver):
def __init__(self, *args, **kwargs):
httpserver.httpserver.__init__(self, *args, **kwargs)
a = open("proxy.pid", "w")
a.write(str(os.getpid()) + "\n")
a.close()
def runserver(port=8000, bind=""):
server_address = (bind, port)
ProxyHandler.protocol_version = "HTTP/1.0"
httpd = ThreadingHTTPServer(server_address, ProxyHandler)
sa = httpd.socket.getsockname()
print("Serving HTTP on", sa[0], "port", sa[1], "...")
try:
httpd.serve_forever()
except KeyboardInterrupt:
print("\nKeyboard interrupt received, exiting.")
httpd.server_close()
sys.exit(0)
if __name__ == '__main__':
argv = sys.argv
if argv[1:] and argv[1] in ('-h', '--help'):
print(argv[0], "[port [allowed_client_name ...]]")
else:
if argv[2:]:
allowed = []
for name in argv[2:]:
client = socket.gethostbyname(name)
allowed.append(client)
print("Accept: %s (%s)" % (client, name))
ProxyHandler.allowed_clients = allowed
del argv[2:]
else:
print("Any clients will be served...")
parser = optparse.OptionParser()
parser.add_option(
'-b',
'--bind',
metavar='ADDRESS',
help='Specify alternate bind address ' '[default: all interfaces]',
default='',
)
(options, args) = parser.parse_args()
port = 8000
if len(args) == 1:
port = int(args[0])
runserver(port, options.bind)