view tests/test-filelog.py @ 35767:5f5fb279fd39

streamclone: also stream caches to the client When stream clone is used over bundle2, relevant cache files are also streamed. This is expected to be a massive performance win for clone since no important cache will have to be recomputed. Some performance numbers: (All times are wall-clock times in seconds, 2 attempts per case.) # Mozilla-Central ## Clone over ssh over lan V1 streaming: 234.3 239.6 V2 streaming: 248.4 243.7 ## Clone over ssh over Internet V1 streaming: 175.5 110.9 V2 streaming: 109.1 111.0 ## Clone over HTTP over lan V1 streaming: 105.3 105.6 V2 streaming: 112.7 111.4 ## Clone over HTTP over internet V1 streaming: 105.6 114.6 V2 streaming: 226.7 225.9 ## Hg tags V1 streaming (no cache): 1.084 1.071 V2 streaming (cache): 0.312 0.325 ## Hg branches V1 streaming (no cache): 14.047 14.148 V2 streaming (with cache): 0.312 0.333 # Pypy ## Clone over ssh over internet V1 streaming: 29.4 30.1 V2 streaming: 31.2 30.1 ## Clone over http over internet V1 streaming: 29.7 29.7 V2 streaming: 75.2 72.9 (since ssh and lan are not affected, there seems to be an issue with how we read/write the http stream on connection with latency, unrelated to the format) ## Hg tags V1 streaming (no cache): 1.752 1.664 V2 streaming (with cache): 0.274 0.260 ## Hg branches V1 streaming (no cache): 4.469 4.728 V2 streaming (with cache): 0.318 0.321 # Private repository: * 500K revision revisions * 11K topological heads * 28K branch heads ## hg tags no cache: 1543.332 with cache: 4.900 ## hg branches no cache: 91.828 with cache: 2.955
author Boris Feld <boris.feld@octobus.net>
date Thu, 18 Jan 2018 00:50:12 +0100
parents d83ca854fa21
children f71c97d9b97b
line wrap: on
line source

#!/usr/bin/env python
"""
Tests the behavior of filelog w.r.t. data starting with '\1\n'
"""
from __future__ import absolute_import, print_function

from mercurial.node import (
    hex,
    nullid,
)
from mercurial import (
    hg,
    ui as uimod,
)

myui = uimod.ui.load()
repo = hg.repository(myui, path='.', create=True)

fl = repo.file('foobar')

def addrev(text, renamed=False):
    if renamed:
        # data doesn't matter. Just make sure filelog.renamed() returns True
        meta = {'copyrev': hex(nullid), 'copy': 'bar'}
    else:
        meta = {}

    lock = t = None
    try:
        lock = repo.lock()
        t = repo.transaction('commit')
        node = fl.add(text, meta, t, 0, nullid, nullid)
        return node
    finally:
        if t:
            t.close()
        if lock:
            lock.release()

def error(text):
    print('ERROR: ' + text)

textwith = '\1\nfoo'
without = 'foo'

node = addrev(textwith)
if not textwith == fl.read(node):
    error('filelog.read for data starting with \\1\\n')
if fl.cmp(node, textwith) or not fl.cmp(node, without):
    error('filelog.cmp for data starting with \\1\\n')
if fl.size(0) != len(textwith):
    error('FIXME: This is a known failure of filelog.size for data starting '
        'with \\1\\n')

node = addrev(textwith, renamed=True)
if not textwith == fl.read(node):
    error('filelog.read for a renaming + data starting with \\1\\n')
if fl.cmp(node, textwith) or not fl.cmp(node, without):
    error('filelog.cmp for a renaming + data starting with \\1\\n')
if fl.size(1) != len(textwith):
    error('filelog.size for a renaming + data starting with \\1\\n')

print('OK.')