changeset 22044:a06172e85fd4

run-tests: add support for xunit test reports The Jenkins CI system understands xunit reports natively, so this will be helpful for anyone that wants to use Jenkins for testing hg or extensions that use run-tests.py for their testing.
author Augie Fackler <raf@durin42.com>
date Tue, 05 Aug 2014 21:17:11 -0400
parents 1274ff3f20a8
children 769198c6a62d
files tests/run-tests.py tests/test-run-tests.t
diffstat 2 files changed, 103 insertions(+), 3 deletions(-) [+]
line wrap: on
line diff
--- a/tests/run-tests.py	Wed Aug 06 02:45:55 2014 -0500
+++ b/tests/run-tests.py	Tue Aug 05 21:17:11 2014 -0400
@@ -57,6 +57,7 @@
 import threading
 import killdaemons as killmod
 import Queue as queue
+from xml.dom import minidom
 import unittest
 
 processlock = threading.Lock()
@@ -190,6 +191,8 @@
              " (implies --keep-tmpdir)")
     parser.add_option("-v", "--verbose", action="store_true",
         help="output verbose messages")
+    parser.add_option("--xunit", type="string",
+                      help="record xunit results at specified path")
     parser.add_option("--view", type="string",
         help="external diff viewer")
     parser.add_option("--with-hg", type="string",
@@ -304,6 +307,20 @@
 
     return log(*msg)
 
+# Bytes that break XML even in a CDATA block: control characters 0-31
+# sans \t, \n and \r
+CDATA_EVIL = re.compile(r"[\000-\010\013\014\016-\037]")
+
+def cdatasafe(data):
+    """Make a string safe to include in a CDATA block.
+
+    Certain control characters are illegal in a CDATA block, and
+    there's no way to include a ]]> in a CDATA either. This function
+    replaces illegal bytes with ? and adds a space between the ]] so
+    that it won't break the CDATA block.
+    """
+    return CDATA_EVIL.sub('?', data).replace(']]>', '] ]>')
+
 def log(*msg):
     """Log something to stdout.
 
@@ -1085,6 +1102,9 @@
         self.times = []
         self._started = {}
         self._stopped = {}
+        # Data stored for the benefit of generating xunit reports.
+        self.successes = []
+        self.faildata = {}
 
     def addFailure(self, test, reason):
         self.failures.append((test, reason))
@@ -1099,9 +1119,12 @@
             self.stream.write('!')
             iolock.release()
 
-    def addError(self, *args, **kwargs):
-        super(TestResult, self).addError(*args, **kwargs)
+    def addSuccess(self, test):
+        super(TestResult, self).addSuccess(test)
+        self.successes.append(test)
 
+    def addError(self, test, err):
+        super(TestResult, self).addError(test, err)
         if self._options.first:
             self.stop()
 
@@ -1141,6 +1164,8 @@
         """Record a mismatch in test output for a particular test."""
 
         accepted = False
+        failed = False
+        lines = []
 
         iolock.acquire()
         if self._options.nodiff:
@@ -1169,7 +1194,8 @@
                     else:
                         rename(test.errpath, '%s.out' % test.path)
                     accepted = True
-
+            if not accepted and not failed:
+                self.faildata[test.name] = ''.join(lines)
         iolock.release()
 
         return accepted
@@ -1344,6 +1370,35 @@
         for test, msg in result.errors:
             self.stream.writeln('Errored %s: %s' % (test.name, msg))
 
+        if self._runner.options.xunit:
+            xuf = open(self._runner.options.xunit, 'wb')
+            try:
+                timesd = dict(
+                    (test, real) for test, cuser, csys, real in result.times)
+                doc = minidom.Document()
+                s = doc.createElement('testsuite')
+                s.setAttribute('name', 'run-tests')
+                s.setAttribute('tests', str(result.testsRun))
+                s.setAttribute('errors', "0") # TODO
+                s.setAttribute('failures', str(failed))
+                s.setAttribute('skipped', str(skipped + ignored))
+                doc.appendChild(s)
+                for tc in result.successes:
+                    t = doc.createElement('testcase')
+                    t.setAttribute('name', tc.name)
+                    t.setAttribute('time', '%.3f' % timesd[tc.name])
+                    s.appendChild(t)
+                for tc, err in sorted(result.faildata.iteritems()):
+                    t = doc.createElement('testcase')
+                    t.setAttribute('name', tc)
+                    t.setAttribute('time', '%.3f' % timesd[tc])
+                    cd = doc.createCDATASection(cdatasafe(err))
+                    t.appendChild(cd)
+                    s.appendChild(t)
+                xuf.write(doc.toprettyxml(indent='  ', encoding='utf-8'))
+            finally:
+                xuf.close()
+
         self._runner._checkhglib('Tested')
 
         self.stream.writeln('# Ran %d tests, %d skipped, %d warned, %d failed.'
--- a/tests/test-run-tests.t	Wed Aug 06 02:45:55 2014 -0500
+++ b/tests/test-run-tests.t	Tue Aug 05 21:17:11 2014 -0400
@@ -48,6 +48,39 @@
   # Ran 2 tests, 0 skipped, 0 warned, 1 failed.
   python hash seed: * (glob)
   [1]
+test --xunit support
+  $ $TESTDIR/run-tests.py --with-hg=`which hg` --xunit=xunit.xml
+  
+  --- $TESTTMP/test-failure.t
+  +++ $TESTTMP/test-failure.t.err
+  @@ -1,4 +1,4 @@
+     $ echo babar
+  -  rataxes
+  +  babar
+   This is a noop statement so that
+   this test is still more bytes than success.
+  
+  ERROR: test-failure.t output changed
+  !.
+  Failed test-failure.t: output changed
+  # Ran 2 tests, 0 skipped, 0 warned, 1 failed.
+  python hash seed: * (glob)
+  [1]
+  $ cat xunit.xml
+  <?xml version="1.0" encoding="utf-8"?>
+  <testsuite errors="0" failures="1" name="run-tests" skipped="0" tests="2">
+    <testcase name="test-success.t" time="*"/> (glob)
+    <testcase name="test-failure.t" time="*"> (glob)
+  <![CDATA[--- $TESTTMP/test-failure.t
+  +++ $TESTTMP/test-failure.t.err
+  @@ -1,4 +1,4 @@
+     $ echo babar
+  -  rataxes
+  +  babar
+   This is a noop statement so that
+   this test is still more bytes than success.
+  ]]>  </testcase>
+  </testsuite>
 
 test for --retest
 ====================
@@ -291,6 +324,18 @@
   Skipped test-skip.t: irrelevant
   # Ran 1 tests, 2 skipped, 0 warned, 0 failed.
 
+Skips with xml
+  $ $TESTDIR/run-tests.py --with-hg=`which hg` --keyword xyzzy \
+  >  --xunit=xunit.xml
+  i.s
+  Skipped test-skip.t: irrelevant
+  # Ran 1 tests, 2 skipped, 0 warned, 0 failed.
+  $ cat xunit.xml
+  <?xml version="1.0" encoding="utf-8"?>
+  <testsuite errors="0" failures="0" name="run-tests" skipped="2" tests="1">
+    <testcase name="test-success.t" time="*"/> (glob)
+  </testsuite>
+
 Missing skips or blacklisted skips don't count as executed:
   $ echo test-failure.t > blacklist
   $ $TESTDIR/run-tests.py --with-hg=`which hg` --blacklist=blacklist \