view tests/test-filecache.py @ 22196:23fe278bde43

largefiles: keep largefiles from colliding with normal one during linear merge Before this patch, linear merging of modified or newly added largefile causes unexpected result, if (1) largefile collides with same name normal one in the target revision and (2) "local" largefile is chosen, even though branch merging between such revisions doesn't. Expected result of such linear merging is: (1) (not yet recorded) largefile is kept in the working directory (2) largefile is marked as (re-)"added" (3) colliding normal file is marked as "removed" But actual result is: (1) largefile in the working directory is unlinked (2) largefile is marked as "normal" (so treated as "missing") (3) the dirstate entry for colliding normal file is just dropped (1) is very serious, because there is no way to restore temporarily modified largefiles. (3) prevents the next commit from adding the manifest with correct "removal of (normal) file" information for newly created changeset. The root cause of this problem is putting "lfile" into "actions['r']" in linear-merging case. At liner merging, "actions['r']" causes: - unlinking "target file" in the working directory, but "lfile" as "target file" is also largefile itself in this case - dropping the dirstate entry for target file "actions['f']" (= "forget") does only the latter, and this is reason why this patch doesn't choose putting "lfile" into it instead of "actions['r']". This patch newly introduces action "lfmr" (LargeFiles: Mark as Removed) to mark colliding normal file as "removed" without unlinking it. This patch uses "hg debugdirstate" instead of "hg status" in test, because: - choosing "local largefile" hides "removed" status of "remote normal file" in "hg status" output, and - "hg status" for "large2" in this case has another problem fixed in the subsequent patch
author FUJIWARA Katsunori <foozy@lares.dti.ne.jp>
date Fri, 15 Aug 2014 20:28:51 +0900
parents b3684fd2ff1a
children ce26928cbe41
line wrap: on
line source

import sys, os, subprocess

if subprocess.call(['python', '%s/hghave' % os.environ['TESTDIR'],
                    'cacheable']):
    sys.exit(80)

from mercurial import util, scmutil, extensions, hg, ui

filecache = scmutil.filecache

class fakerepo(object):
    def __init__(self):
        self._filecache = {}

    def join(self, p):
        return p

    def sjoin(self, p):
        return p

    @filecache('x', 'y')
    def cached(self):
        print 'creating'
        return 'string from function'

    def invalidate(self):
        for k in self._filecache:
            try:
                delattr(self, k)
            except AttributeError:
                pass

def basic(repo):
    print "* neither file exists"
    # calls function
    repo.cached

    repo.invalidate()
    print "* neither file still exists"
    # uses cache
    repo.cached

    # create empty file
    f = open('x', 'w')
    f.close()
    repo.invalidate()
    print "* empty file x created"
    # should recreate the object
    repo.cached

    f = open('x', 'w')
    f.write('a')
    f.close()
    repo.invalidate()
    print "* file x changed size"
    # should recreate the object
    repo.cached

    repo.invalidate()
    print "* nothing changed with either file"
    # stats file again, reuses object
    repo.cached

    # atomic replace file, size doesn't change
    # hopefully st_mtime doesn't change as well so this doesn't use the cache
    # because of inode change
    f = scmutil.opener('.')('x', 'w', atomictemp=True)
    f.write('b')
    f.close()

    repo.invalidate()
    print "* file x changed inode"
    repo.cached

    # create empty file y
    f = open('y', 'w')
    f.close()
    repo.invalidate()
    print "* empty file y created"
    # should recreate the object
    repo.cached

    f = open('y', 'w')
    f.write('A')
    f.close()
    repo.invalidate()
    print "* file y changed size"
    # should recreate the object
    repo.cached

    f = scmutil.opener('.')('y', 'w', atomictemp=True)
    f.write('B')
    f.close()

    repo.invalidate()
    print "* file y changed inode"
    repo.cached

    f = scmutil.opener('.')('x', 'w', atomictemp=True)
    f.write('c')
    f.close()
    f = scmutil.opener('.')('y', 'w', atomictemp=True)
    f.write('C')
    f.close()

    repo.invalidate()
    print "* both files changed inode"
    repo.cached

def fakeuncacheable():
    def wrapcacheable(orig, *args, **kwargs):
        return False

    def wrapinit(orig, *args, **kwargs):
        pass

    originit = extensions.wrapfunction(util.cachestat, '__init__', wrapinit)
    origcacheable = extensions.wrapfunction(util.cachestat, 'cacheable',
                                            wrapcacheable)

    for fn in ['x', 'y']:
        try:
            os.remove(fn)
        except OSError:
            pass

    basic(fakerepo())

    util.cachestat.cacheable = origcacheable
    util.cachestat.__init__ = originit

def test_filecache_synced():
    # test old behaviour that caused filecached properties to go out of sync
    os.system('hg init && echo a >> a && hg ci -qAm.')
    repo = hg.repository(ui.ui())
    # first rollback clears the filecache, but changelog to stays in __dict__
    repo.rollback()
    repo.commit('.')
    # second rollback comes along and touches the changelog externally
    # (file is moved)
    repo.rollback()
    # but since changelog isn't under the filecache control anymore, we don't
    # see that it changed, and return the old changelog without reconstructing
    # it
    repo.commit('.')

def setbeforeget(repo):
    os.remove('x')
    os.remove('y')
    repo.cached = 'string set externally'
    repo.invalidate()
    print "* neither file exists"
    print repo.cached
    repo.invalidate()
    f = open('x', 'w')
    f.write('a')
    f.close()
    print "* file x created"
    print repo.cached

    repo.cached = 'string 2 set externally'
    repo.invalidate()
    print "* string set externally again"
    print repo.cached

    repo.invalidate()
    f = open('y', 'w')
    f.write('b')
    f.close()
    print "* file y created"
    print repo.cached

print 'basic:'
print
basic(fakerepo())
print
print 'fakeuncacheable:'
print
fakeuncacheable()
test_filecache_synced()
print
print 'setbeforeget:'
print
setbeforeget(fakerepo())