view tests/test-filelog.py @ 17126:8fa8717b47b6

obsolete: write obsolete marker inside a transaction Marker are now written as soon as possible but within a transaction. Using a transaction ensure a proper behavior on error and rollback compatibility. Flush logic are not necessary anymore and are dropped from lock release. With this changeset, the obsstore is open, written and closed for every single added marker. This is expected to be highly inefficient and batched write should be implemented "quickly". Another issue is that every flush of the file will invalidate the obsstore filecache and trigger a full re instantiation of the repo.obsstore attribute (including, reading and parsing entry). This is also expected to be highly inefficient and proper filecache operation should be implemented "quickly" too. A side benefit of the filecache issue is that repo.obsstore object is properly invalidated on transaction abortion.
author Pierre-Yves David <pierre-yves.david@ens-lyon.org>
date Wed, 04 Jul 2012 02:21:04 +0200
parents d76ba2090e0c
children 73e3e368bd42
line wrap: on
line source

#!/usr/bin/env python
"""
Tests the behaviour of filelog w.r.t. data starting with '\1\n'
"""
from mercurial import ui, hg
from mercurial.node import nullid, hex

myui = ui.ui()
repo = hg.repository(myui, path='.', create=True)

fl = repo.file('foobar')

def addrev(text, renamed=False):
    if renamed:
        # data doesnt matter. Just make sure filelog.renamed() returns True
        meta = dict(copyrev=hex(nullid), copy='bar')
    else:
        meta = {}

    lock = t = None
    try:
        lock = repo.lock()
        t = repo.transaction('commit')
        node = fl.add(text, meta, t, 0, nullid, nullid)
        return node
    finally:
        if t:
            t.close()
        if lock:
            lock.release()

def error(text):
    print 'ERROR: ' + text

textwith = '\1\nfoo'
without = 'foo'

node = addrev(textwith)
if not textwith == fl.read(node):
    error('filelog.read for data starting with \\1\\n')
if fl.cmp(node, textwith) or not fl.cmp(node, without):
    error('filelog.cmp for data starting with \\1\\n')
if fl.size(0) != len(textwith):
    error('FIXME: This is a known failure of filelog.size for data starting '
        'with \\1\\n')

node = addrev(textwith, renamed=True)
if not textwith == fl.read(node):
    error('filelog.read for a renaming + data starting with \\1\\n')
if fl.cmp(node, textwith) or not fl.cmp(node, without):
    error('filelog.cmp for a renaming + data starting with \\1\\n')
if fl.size(1) != len(textwith):
    error('filelog.size for a renaming + data starting with \\1\\n')

print 'OK.'