view mercurial/lsprofcalltree.py @ 21990:48e32c2c499b stable

context: call normal on the right object dirstate.normal is the method that marks files as unchanged/normal. Rev 20a30cd41d21 started caching dirstate.normal in order to improve performance. However, there was an error in the patch: taking the wlock, under some conditions depending on platform, can cause a new dirstate object to be created. Caching dirstate.normal before calling wlock would then cause the fixup calls below to be on the old dirstate object, effectively disappearing into the ether. On Unix and Unix-like OSes, the condition under which we create a new dirstate object is 'the dirstate file has been modified since the last time we opened it'. This happens pretty rarely, so the object is usually the same -- there's little impact. On Windows, the condition is 'always'. This means files in the lookup state are never marked normal, so the bug has a serious performance impact since all the files in the lookup state are re-read every time hg status is run.
author Siddharth Agarwal <sid0@fb.com>
date Fri, 01 Aug 2014 18:30:18 -0700
parents beae42f3d93b
children 071af8d385a9
line wrap: on
line source

"""
lsprofcalltree.py - lsprof output which is readable by kcachegrind

Authors:
    * David Allouche <david <at> allouche.net>
    * Jp Calderone & Itamar Shtull-Trauring
    * Johan Dahlin

This software may be used and distributed according to the terms
of the GNU General Public License, incorporated herein by reference.
"""

def label(code):
    if isinstance(code, str):
        return '~' + code    # built-in functions ('~' sorts at the end)
    else:
        return '%s %s:%d' % (code.co_name,
                             code.co_filename,
                             code.co_firstlineno)

class KCacheGrind(object):
    def __init__(self, profiler):
        self.data = profiler.getstats()
        self.out_file = None

    def output(self, out_file):
        self.out_file = out_file
        print >> out_file, 'events: Ticks'
        self._print_summary()
        for entry in self.data:
            self._entry(entry)

    def _print_summary(self):
        max_cost = 0
        for entry in self.data:
            totaltime = int(entry.totaltime * 1000)
            max_cost = max(max_cost, totaltime)
        print >> self.out_file, 'summary: %d' % (max_cost,)

    def _entry(self, entry):
        out_file = self.out_file

        code = entry.code
        #print >> out_file, 'ob=%s' % (code.co_filename,)
        if isinstance(code, str):
            print >> out_file, 'fi=~'
        else:
            print >> out_file, 'fi=%s' % (code.co_filename,)
        print >> out_file, 'fn=%s' % (label(code),)

        inlinetime = int(entry.inlinetime * 1000)
        if isinstance(code, str):
            print >> out_file, '0 ', inlinetime
        else:
            print >> out_file, '%d %d' % (code.co_firstlineno, inlinetime)

        # recursive calls are counted in entry.calls
        if entry.calls:
            calls = entry.calls
        else:
            calls = []

        if isinstance(code, str):
            lineno = 0
        else:
            lineno = code.co_firstlineno

        for subentry in calls:
            self._subentry(lineno, subentry)
        print >> out_file

    def _subentry(self, lineno, subentry):
        out_file = self.out_file
        code = subentry.code
        #print >> out_file, 'cob=%s' % (code.co_filename,)
        print >> out_file, 'cfn=%s' % (label(code),)
        if isinstance(code, str):
            print >> out_file, 'cfi=~'
            print >> out_file, 'calls=%d 0' % (subentry.callcount,)
        else:
            print >> out_file, 'cfi=%s' % (code.co_filename,)
            print >> out_file, 'calls=%d %d' % (
                subentry.callcount, code.co_firstlineno)

        totaltime = int(subentry.totaltime * 1000)
        print >> out_file, '%d %d' % (lineno, totaltime)