view tests/dumbhttp.py @ 39474:a913d2892e17

lfs: ensure the blob is linked to the remote store on skipped uploads I noticed a "missing" blob when pushing two repositories with common blobs to a fresh server, and then running `hg verify` as a user different from the one running the web server. When pushing the second repo, several of the blobs already existed in the user cache, so the server indicated to the client that it doesn't need to upload the blobs. That's good enough for the web server process to serve up in the future. But a different user has a different cache by default, so verify complains that `lfs.url` needs to be set, because it wants to fetch the missing blobs. Aside from that corner case, it's better to keep all of the blobs in the repo whenever possible. Especially since the largefiles wiki says the user cache can be deleted at any time to reclaim disk space- users switching over may have the same expectations.
author Matt Harbison <matt_harbison@yahoo.com>
date Thu, 06 Sep 2018 00:51:21 -0400
parents e46c3b6a47b5
children 2372284d9457
line wrap: on
line source

#!/usr/bin/env python

from __future__ import absolute_import

"""
Small and dumb HTTP server for use in tests.
"""

import optparse
import os
import signal
import socket
import sys

from mercurial import (
    encoding,
    pycompat,
    server,
    util,
)

httpserver = util.httpserver
OptionParser = optparse.OptionParser

if os.environ.get('HGIPV6', '0') == '1':
    class simplehttpserver(httpserver.httpserver):
        address_family = socket.AF_INET6
else:
    simplehttpserver = httpserver.httpserver

class _httprequesthandler(httpserver.simplehttprequesthandler):
    def log_message(self, format, *args):
        httpserver.simplehttprequesthandler.log_message(self, format, *args)
        sys.stderr.flush()

class simplehttpservice(object):
    def __init__(self, host, port):
        self.address = (host, port)
    def init(self):
        self.httpd = simplehttpserver(self.address, _httprequesthandler)
    def run(self):
        self.httpd.serve_forever()

if __name__ == '__main__':
    parser = OptionParser()
    parser.add_option('-p', '--port', dest='port', type='int', default=8000,
        help='TCP port to listen on', metavar='PORT')
    parser.add_option('-H', '--host', dest='host', default='localhost',
        help='hostname or IP to listen on', metavar='HOST')
    parser.add_option('--logfile', help='file name of access/error log')
    parser.add_option('--pid', dest='pid',
        help='file name where the PID of the server is stored')
    parser.add_option('-f', '--foreground', dest='foreground',
        action='store_true',
        help='do not start the HTTP server in the background')
    parser.add_option('--daemon-postexec', action='append')

    (options, args) = parser.parse_args()

    signal.signal(signal.SIGTERM, lambda x, y: sys.exit(0))

    if options.foreground and options.logfile:
        parser.error("options --logfile and --foreground are mutually "
                     "exclusive")
    if options.foreground and options.pid:
        parser.error("options --pid and --foreground are mutually exclusive")

    opts = {b'pid_file': options.pid,
            b'daemon': not options.foreground,
            b'daemon_postexec': pycompat.rapply(encoding.strtolocal,
                                                options.daemon_postexec)}
    service = simplehttpservice(options.host, options.port)
    runargs = [sys.executable, __file__] + sys.argv[1:]
    runargs = [pycompat.fsencode(a) for a in runargs]
    server.runservice(opts, initfn=service.init, runfn=service.run,
                      logfile=options.logfile,
                      runargs=runargs)