view mercurial/lsprofcalltree.py @ 19095:5cc71484ee9c stable

merge: increase safety of parallel updating/removing on icasefs "merge.applyupdates()" sorts "actions" in removal first order, and "workeractions" derived from it should be also sorted. If each actions in "workeractions" are executed in serial, this sorting ensures that merging/updating process is collision free, because updating the file in target context is always executed after removing the existing file which causes case-folding collision against the former. In the other hand, if each actions are executed in parallel, updating on a worker process may be executed before removing on another worker process, because "worker.partition()" partitions list of actions regardless of type of each actions. This patch divides "workeractions" into removing and updating, and executes the former first. This patch still scans "actions"/"workeractions" some times for ease of patch review, even though large list may cost much in this way. (total cost should be as same as before) This also changes some tests, because dividing "workeractions" affects progress indication.
author FUJIWARA Katsunori <foozy@lares.dti.ne.jp>
date Mon, 29 Apr 2013 15:58:15 +0900
parents beae42f3d93b
children 071af8d385a9
line wrap: on
line source

"""
lsprofcalltree.py - lsprof output which is readable by kcachegrind

Authors:
    * David Allouche <david <at> allouche.net>
    * Jp Calderone & Itamar Shtull-Trauring
    * Johan Dahlin

This software may be used and distributed according to the terms
of the GNU General Public License, incorporated herein by reference.
"""

def label(code):
    if isinstance(code, str):
        return '~' + code    # built-in functions ('~' sorts at the end)
    else:
        return '%s %s:%d' % (code.co_name,
                             code.co_filename,
                             code.co_firstlineno)

class KCacheGrind(object):
    def __init__(self, profiler):
        self.data = profiler.getstats()
        self.out_file = None

    def output(self, out_file):
        self.out_file = out_file
        print >> out_file, 'events: Ticks'
        self._print_summary()
        for entry in self.data:
            self._entry(entry)

    def _print_summary(self):
        max_cost = 0
        for entry in self.data:
            totaltime = int(entry.totaltime * 1000)
            max_cost = max(max_cost, totaltime)
        print >> self.out_file, 'summary: %d' % (max_cost,)

    def _entry(self, entry):
        out_file = self.out_file

        code = entry.code
        #print >> out_file, 'ob=%s' % (code.co_filename,)
        if isinstance(code, str):
            print >> out_file, 'fi=~'
        else:
            print >> out_file, 'fi=%s' % (code.co_filename,)
        print >> out_file, 'fn=%s' % (label(code),)

        inlinetime = int(entry.inlinetime * 1000)
        if isinstance(code, str):
            print >> out_file, '0 ', inlinetime
        else:
            print >> out_file, '%d %d' % (code.co_firstlineno, inlinetime)

        # recursive calls are counted in entry.calls
        if entry.calls:
            calls = entry.calls
        else:
            calls = []

        if isinstance(code, str):
            lineno = 0
        else:
            lineno = code.co_firstlineno

        for subentry in calls:
            self._subentry(lineno, subentry)
        print >> out_file

    def _subentry(self, lineno, subentry):
        out_file = self.out_file
        code = subentry.code
        #print >> out_file, 'cob=%s' % (code.co_filename,)
        print >> out_file, 'cfn=%s' % (label(code),)
        if isinstance(code, str):
            print >> out_file, 'cfi=~'
            print >> out_file, 'calls=%d 0' % (subentry.callcount,)
        else:
            print >> out_file, 'cfi=%s' % (code.co_filename,)
            print >> out_file, 'calls=%d %d' % (
                subentry.callcount, code.co_firstlineno)

        totaltime = int(subentry.totaltime * 1000)
        print >> out_file, '%d %d' % (lineno, totaltime)