view mercurial/httpconnection.py @ 34682:7e3001b74ab3

tersestatus: re-implement the functionality to terse the status The previous terse status implementation was hacking around os.listdir() and was flaky. There have been a lot of instances of mercurial buildbots failing and google's internal builds failing because of the hacky implementation of terse status. Even though I wrote the last implementation but it was hard for me to find the reason for the flake. The new implementation can be slower than the old one but is clean and easy to understand. In this we create a node object for each directory and create a tree like structure starting from the root of the working copy. While building the tree like structure we store some information on the nodes which will be helpful for deciding later whether we can terse the dir or not. Once the whole tree is build we traverse and built the list of files for each status with required tersing. There is no behaviour change as the old test, test-status-terse.t passes with the new implementation. Differential Revision: https://phab.mercurial-scm.org/D985
author Pulkit Goyal <7895pulkit@gmail.com>
date Fri, 06 Oct 2017 20:54:23 +0530
parents 1232f7fa00c3
children 8549ca7fcde1
line wrap: on
line source

# httpconnection.py - urllib2 handler for new http support
#
# Copyright 2005, 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
# Copyright 2006, 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br>
# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
# Copyright 2011 Google, Inc.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.

from __future__ import absolute_import

import logging
import os
import socket

from .i18n import _
from . import (
    httpclient,
    sslutil,
    urllibcompat,
    util,
)

urlerr = util.urlerr
urlreq = util.urlreq

# moved here from url.py to avoid a cycle
class httpsendfile(object):
    """This is a wrapper around the objects returned by python's "open".

    Its purpose is to send file-like objects via HTTP.
    It do however not define a __len__ attribute because the length
    might be more than Py_ssize_t can handle.
    """

    def __init__(self, ui, *args, **kwargs):
        self.ui = ui
        self._data = open(*args, **kwargs)
        self.seek = self._data.seek
        self.close = self._data.close
        self.write = self._data.write
        self.length = os.fstat(self._data.fileno()).st_size
        self._pos = 0
        self._total = self.length // 1024 * 2

    def read(self, *args, **kwargs):
        ret = self._data.read(*args, **kwargs)
        if not ret:
            self.ui.progress(_('sending'), None)
            return ret
        self._pos += len(ret)
        # We pass double the max for total because we currently have
        # to send the bundle twice in the case of a server that
        # requires authentication. Since we can't know until we try
        # once whether authentication will be required, just lie to
        # the user and maybe the push succeeds suddenly at 50%.
        self.ui.progress(_('sending'), self._pos // 1024,
                         unit=_('kb'), total=self._total)
        return ret

    def __enter__(self):
        return self

    def __exit__(self, exc_type, exc_val, exc_tb):
        self.close()

# moved here from url.py to avoid a cycle
def readauthforuri(ui, uri, user):
    # Read configuration
    groups = {}
    for key, val in ui.configitems('auth'):
        if key in ('cookiefile',):
            continue

        if '.' not in key:
            ui.warn(_("ignoring invalid [auth] key '%s'\n") % key)
            continue
        group, setting = key.rsplit('.', 1)
        gdict = groups.setdefault(group, {})
        if setting in ('username', 'cert', 'key'):
            val = util.expandpath(val)
        gdict[setting] = val

    # Find the best match
    scheme, hostpath = uri.split('://', 1)
    bestuser = None
    bestlen = 0
    bestauth = None
    for group, auth in groups.iteritems():
        if user and user != auth.get('username', user):
            # If a username was set in the URI, the entry username
            # must either match it or be unset
            continue
        prefix = auth.get('prefix')
        if not prefix:
            continue
        p = prefix.split('://', 1)
        if len(p) > 1:
            schemes, prefix = [p[0]], p[1]
        else:
            schemes = (auth.get('schemes') or 'https').split()
        if (prefix == '*' or hostpath.startswith(prefix)) and \
            (len(prefix) > bestlen or (len(prefix) == bestlen and \
                not bestuser and 'username' in auth)) \
             and scheme in schemes:
            bestlen = len(prefix)
            bestauth = group, auth
            bestuser = auth.get('username')
            if user and not bestuser:
                auth['username'] = user
    return bestauth

# Mercurial (at least until we can remove the old codepath) requires
# that the http response object be sufficiently file-like, so we
# provide a close() method here.
class HTTPResponse(httpclient.HTTPResponse):
    def close(self):
        pass

class HTTPConnection(httpclient.HTTPConnection):
    response_class = HTTPResponse
    def request(self, method, uri, body=None, headers=None):
        if headers is None:
            headers = {}
        if isinstance(body, httpsendfile):
            body.seek(0)
        httpclient.HTTPConnection.request(self, method, uri, body=body,
                                          headers=headers)


_configuredlogging = False
LOGFMT = '%(levelname)s:%(name)s:%(lineno)d:%(message)s'
# Subclass BOTH of these because otherwise urllib2 "helpfully"
# reinserts them since it notices we don't include any subclasses of
# them.
class http2handler(urlreq.httphandler, urlreq.httpshandler):
    def __init__(self, ui, pwmgr):
        global _configuredlogging
        urlreq.abstracthttphandler.__init__(self)
        self.ui = ui
        self.pwmgr = pwmgr
        self._connections = {}
        # developer config: ui.http2debuglevel
        loglevel = ui.config('ui', 'http2debuglevel')
        if loglevel and not _configuredlogging:
            _configuredlogging = True
            logger = logging.getLogger('mercurial.httpclient')
            logger.setLevel(getattr(logging, loglevel.upper()))
            handler = logging.StreamHandler()
            handler.setFormatter(logging.Formatter(LOGFMT))
            logger.addHandler(handler)

    def close_all(self):
        """Close and remove all connection objects being kept for reuse."""
        for openconns in self._connections.values():
            for conn in openconns:
                conn.close()
        self._connections = {}

    # shamelessly borrowed from urllib2.AbstractHTTPHandler
    def do_open(self, http_class, req, use_ssl):
        """Return an addinfourl object for the request, using http_class.

        http_class must implement the HTTPConnection API from httplib.
        The addinfourl return value is a file-like object.  It also
        has methods and attributes including:
            - info(): return a mimetools.Message object for the headers
            - geturl(): return the original request URL
            - code: HTTP status code
        """
        # If using a proxy, the host returned by get_host() is
        # actually the proxy. On Python 2.6.1, the real destination
        # hostname is encoded in the URI in the urllib2 request
        # object. On Python 2.6.5, it's stored in the _tunnel_host
        # attribute which has no accessor.
        tunhost = getattr(req, '_tunnel_host', None)
        host = urllibcompat.gethost(req)
        if tunhost:
            proxyhost = host
            host = tunhost
        elif req.has_proxy():
            proxyhost = urllibcompat.gethost(req)
            host = urllibcompat.getselector(
                req).split('://', 1)[1].split('/', 1)[0]
        else:
            proxyhost = None

        if proxyhost:
            if ':' in proxyhost:
                # Note: this means we'll explode if we try and use an
                # IPv6 http proxy. This isn't a regression, so we
                # won't worry about it for now.
                proxyhost, proxyport = proxyhost.rsplit(':', 1)
            else:
                proxyport = 3128 # squid default
            proxy = (proxyhost, proxyport)
        else:
            proxy = None

        if not host:
            raise urlerr.urlerror('no host given')

        connkey = use_ssl, host, proxy
        allconns = self._connections.get(connkey, [])
        conns = [c for c in allconns if not c.busy()]
        if conns:
            h = conns[0]
        else:
            if allconns:
                self.ui.debug('all connections for %s busy, making a new '
                              'one\n' % host)
            timeout = None
            if req.timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
                timeout = req.timeout
            h = http_class(host, timeout=timeout, proxy_hostport=proxy)
            self._connections.setdefault(connkey, []).append(h)

        headers = dict(req.headers)
        headers.update(req.unredirected_hdrs)
        headers = dict(
            (name.title(), val) for name, val in headers.items())
        try:
            path = urllibcompat.getselector(req)
            if '://' in path:
                path = path.split('://', 1)[1].split('/', 1)[1]
            if path[0] != '/':
                path = '/' + path
            h.request(req.get_method(), path, req.data, headers)
            r = h.getresponse()
        except socket.error as err: # XXX what error?
            raise urlerr.urlerror(err)

        # Pick apart the HTTPResponse object to get the addinfourl
        # object initialized properly.
        r.recv = r.read

        resp = urlreq.addinfourl(r, r.headers, urllibcompat.getfullurl(req))
        resp.code = r.status
        resp.msg = r.reason
        return resp

    # httplib always uses the given host/port as the socket connect
    # target, and then allows full URIs in the request path, which it
    # then observes and treats as a signal to do proxying instead.
    def http_open(self, req):
        if urllibcompat.getfullurl(req).startswith('https'):
            return self.https_open(req)
        def makehttpcon(*args, **kwargs):
            k2 = dict(kwargs)
            k2['use_ssl'] = False
            return HTTPConnection(*args, **k2)
        return self.do_open(makehttpcon, req, False)

    def https_open(self, req):
        # urllibcompat.getfullurl(req) does not contain credentials and we may
        # need them to match the certificates.
        url = urllibcompat.getfullurl(req)
        user, password = self.pwmgr.find_stored_password(url)
        res = readauthforuri(self.ui, url, user)
        if res:
            group, auth = res
            self.auth = auth
            self.ui.debug("using auth.%s.* for authentication\n" % group)
        else:
            self.auth = None
        return self.do_open(self._makesslconnection, req, True)

    def _makesslconnection(self, host, port=443, *args, **kwargs):
        keyfile = None
        certfile = None

        if args: # key_file
            keyfile = args.pop(0)
        if args: # cert_file
            certfile = args.pop(0)

        # if the user has specified different key/cert files in
        # hgrc, we prefer these
        if self.auth and 'key' in self.auth and 'cert' in self.auth:
            keyfile = self.auth['key']
            certfile = self.auth['cert']

        # let host port take precedence
        if ':' in host and '[' not in host or ']:' in host:
            host, port = host.rsplit(':', 1)
            port = int(port)
            if '[' in host:
                host = host[1:-1]

        kwargs['keyfile'] = keyfile
        kwargs['certfile'] = certfile

        con = HTTPConnection(host, port, use_ssl=True,
                             ssl_wrap_socket=sslutil.wrapsocket,
                             ssl_validator=sslutil.validatesocket,
                             ui=self.ui,
                             **kwargs)
        return con