view tests/basic_test_result.py @ 42194:0da689a60163

fix: allow fixer tools to return metadata in addition to the file content With this change, fixer tools can be configured to output a JSON object that will be parsed and passed to hooks that can be used to print summaries of what code was formatted or perform other post-fixing work. The motivation for this change is to allow parallel executions of a "meta-formatter" tool to report back statistics, which are then aggregated and processed after all formatting has completed. Providing an extensible mechanism inside fix.py is far simpler, and more portable, than trying to make a tool like this communicate through some other channel. Differential Revision: https://phab.mercurial-scm.org/D6167
author Danny Hooper <hooper@google.com>
date Thu, 21 Mar 2019 18:32:45 -0700
parents f4a214300957
children 2372284d9457
line wrap: on
line source

from __future__ import absolute_import, print_function

import unittest

class TestResult(unittest._TextTestResult):

    def __init__(self, options, *args, **kwargs):
        super(TestResult, self).__init__(*args, **kwargs)
        self._options = options

        # unittest.TestResult didn't have skipped until 2.7. We need to
        # polyfill it.
        self.skipped = []

        # We have a custom "ignored" result that isn't present in any Python
        # unittest implementation. It is very similar to skipped. It may make
        # sense to map it into skip some day.
        self.ignored = []

        self.times = []
        self._firststarttime = None
        # Data stored for the benefit of generating xunit reports.
        self.successes = []
        self.faildata = {}

    def addFailure(self, test, reason):
        print("FAILURE!", test, reason)

    def addSuccess(self, test):
        print("SUCCESS!", test)

    def addError(self, test, err):
        print("ERR!", test, err)

    # Polyfill.
    def addSkip(self, test, reason):
        print("SKIP!", test, reason)

    def addIgnore(self, test, reason):
        print("IGNORE!", test, reason)

    def onStart(self, test):
        print("ON_START!", test)

    def onEnd(self):
        print("ON_END!")

    def addOutputMismatch(self, test, ret, got, expected):
        return False

    def stopTest(self, test, interrupted=False):
        super(TestResult, self).stopTest(test)