view tests/basic_test_result.py @ 41722:37b33c34bf4f

templatekw: add a {negrev} keyword Revision numbers are getting much maligned for two reasons: they are too long in large repos and users get confused by their local-only nature. It just occurred to me that negative revision numbers avoid both of those problems. Since negative revision numbers change whenever the repo changes, it's much more obvious that they are a local-only convenience. Additionally, for the recent commits that we usually care about the most, negative revision numbers are always near zero. This commit adds a negrev templatekw to more easily expose negative revision numbers. It's not easy to reliably produce this output with existing keywords due to hidden commits while at the same time ensuring good performance.
author Jordi Gutiérrez Hermoso <jordigh@octave.org>
date Fri, 15 Feb 2019 14:43:31 -0500
parents f4a214300957
children 2372284d9457
line wrap: on
line source

from __future__ import absolute_import, print_function

import unittest

class TestResult(unittest._TextTestResult):

    def __init__(self, options, *args, **kwargs):
        super(TestResult, self).__init__(*args, **kwargs)
        self._options = options

        # unittest.TestResult didn't have skipped until 2.7. We need to
        # polyfill it.
        self.skipped = []

        # We have a custom "ignored" result that isn't present in any Python
        # unittest implementation. It is very similar to skipped. It may make
        # sense to map it into skip some day.
        self.ignored = []

        self.times = []
        self._firststarttime = None
        # Data stored for the benefit of generating xunit reports.
        self.successes = []
        self.faildata = {}

    def addFailure(self, test, reason):
        print("FAILURE!", test, reason)

    def addSuccess(self, test):
        print("SUCCESS!", test)

    def addError(self, test, err):
        print("ERR!", test, err)

    # Polyfill.
    def addSkip(self, test, reason):
        print("SKIP!", test, reason)

    def addIgnore(self, test, reason):
        print("IGNORE!", test, reason)

    def onStart(self, test):
        print("ON_START!", test)

    def onEnd(self):
        print("ON_END!")

    def addOutputMismatch(self, test, ret, got, expected):
        return False

    def stopTest(self, test, interrupted=False):
        super(TestResult, self).stopTest(test)