changeset 636:21902b481ee7

0.6dev: Inline display of error and failure details in 'Test Results' summary table. Thanks to Mat Booth for patch! Closes #205.
author osimons
date Mon, 17 Aug 2009 20:29:54 +0000
parents 5f09d36c24be
children 988f2b3e585c
files bitten/build/javatools.py bitten/htdocs/bitten.css bitten/report/testing.py bitten/report/tests/testing.py bitten/templates/bitten_build.html bitten/templates/bitten_summary_tests.html bitten/web_ui.py
diffstat 7 files changed, 141 insertions(+), 14 deletions(-) [+]
line wrap: on
line diff
--- a/bitten/build/javatools.py
+++ b/bitten/build/javatools.py
@@ -116,6 +116,7 @@
                 for testcase in xmlio.parse(fileobj).children('testcase'):
                     test = xmlio.Element('test')
                     test.attr['fixture'] = testcase.attr['classname']
+                    test.attr['name'] = testcase.attr['name']
                     if 'time' in testcase.attr:
                         test.attr['duration'] = testcase.attr['time']
                     if srcdir is not None:
@@ -126,9 +127,16 @@
                     result = list(testcase.children())
                     if result:
                         test.attr['status'] = result[0].name
-                        test.append(xmlio.Element('traceback')[
-                            result[0].gettext()
-                        ])
+                        # Sometimes the traceback isn't prefixed with the
+                        # exception type and message, so add it in if needed
+                        tracebackprefix = "%s: %s" % (result[0].attr['type'],
+                                                      result[0].attr['message'])
+                        if result[0].gettext().startswith(tracebackprefix):
+                            test.append(xmlio.Element('traceback')[
+                                        result[0].gettext()])
+                        else:
+                            test.append(xmlio.Element('traceback')[
+                                        "\n".join((tracebackprefix, result[0].gettext()))])
                         failed += 1
                     else:
                         test.attr['status'] = 'success'
--- a/bitten/htdocs/bitten.css
+++ b/bitten/htdocs/bitten.css
@@ -156,14 +156,27 @@
   font-weight: bold;
 }
 #content.build table.tests tr.failed { background: #d99; }
+#content.build table.tests tr.failed th,
 #content.build table.tests tr.failed td { font-weight: bold; }
-#content.build table.tests tr.failed:hover th,
-#content.build table.tests tr.failed:hover td,
-#content.build table.tests tr.failed:hover tr { background: #966; }
 #content.build table.tests tr.failed :link,
-#content.build table.tests tr.failed :visited { color: #933 }
-#content.build table.tests tr.failed :link:hover,
-#content.build table.tests tr.failed :visited:hover { color: #fff; }
+#content.build table.tests tr.failed :visited { color: #b00 }
+
+/* collapsible failure details */
+#content.build table.tests tr th p { margin: 0; padding: 0; text-align: left; }
+#content.build table.tests tr th p.details {
+  margin: 0; padding-left: 32px; padding-top: 5px; text-align: left; font-weight: normal;
+}
+#content.build table.tests tr th p.details span {
+  white-space: pre; font-family: monospace; font-weight: normal; color: #666;
+}
+#content.build table.tests .fixture { display: inline; }
+#content.build table.tests tr.failed th .fixture a { 
+  background: url(../common/expanded.png) 0 50% no-repeat; padding-left: 16px;
+}
+#content.build table.tests tr.failed th.collapsed .fixture a {
+  background-image: url(../common/collapsed.png);
+}
+#content.build table.tests tr.failed th.collapsed p.details { display: none; }
 
 #content.build .log { background: #fff; border: 1px inset; font-size: 90%;
   overflow: auto; max-height: 20em; width: 100%; white-space: pre;
--- a/bitten/report/testing.py
+++ b/bitten/report/testing.py
@@ -9,6 +9,7 @@
 # are also available at http://bitten.edgewall.org/wiki/License.
 
 from trac.core import *
+from trac.web.chrome import add_script
 from bitten.api import IReportChartGenerator, IReportSummarizer
 
 __docformat__ = 'restructuredtext en'
@@ -125,6 +126,42 @@
             if file:
                 fixtures[-1]['href'] = req.href.browser(config.path, file)
 
+        # For each fixture, get a list of tests that don't succeed
+        for fixture in fixtures:
+            cursor.execute("""
+SELECT item_status.value AS status, item_name.value AS name,
+       item_traceback.value AS traceback
+FROM bitten_report
+ LEFT OUTER JOIN bitten_report_item AS item_fixture
+  ON (item_fixture.report=bitten_report.id AND
+      item_fixture.name='fixture')
+ LEFT OUTER JOIN bitten_report_item AS item_status
+  ON (item_status.report=bitten_report.id AND
+      item_status.item=item_fixture.item AND
+      item_status.name='status')
+ LEFT OUTER JOIN bitten_report_item AS item_name
+  ON (item_name.report=bitten_report.id AND
+      item_name.item=item_fixture.item AND
+      item_name.name='name')
+ LEFT OUTER JOIN bitten_report_item AS item_traceback
+  ON (item_traceback.report=bitten_report.id AND
+      item_traceback.item=item_fixture.item AND
+      item_traceback.name='traceback')
+WHERE category='test' AND build=%s AND step=%s AND item_status.value<>'success' AND
+      item_fixture.value=%s""", (build.id, step.name, fixture['name']))
+
+            failures = []
+            for status, name, traceback in cursor:
+                # use the fixture name if a name isn't supplied for the
+                # individual test
+                if not name:
+                    name = fixture['name']
+                failures.append({'status': status,
+                                 'name': name,
+                                 'traceback': traceback})
+            if failures:
+                fixture['failures'] = failures
+
         data = {'fixtures': fixtures,
                 'totals': {'success': total_success, 
                            'ignore': total_ignore,
--- a/bitten/report/tests/testing.py
+++ b/bitten/report/tests/testing.py
@@ -12,8 +12,10 @@
 
 from trac.db import DatabaseManager
 from trac.test import EnvironmentStub, Mock
+from trac.web.href import Href
 from bitten.model import *
-from bitten.report.testing import TestResultsChartGenerator
+from bitten.report.testing import TestResultsChartGenerator, \
+                    TestResultsSummarizer
 
 
 class TestResultsChartGeneratorTestCase(unittest.TestCase):
@@ -98,9 +100,64 @@
         self.assertEqual(2, data['data'][2][1])
 
 
+class TestResultsSummarizerTestCase(unittest.TestCase):
+
+    def setUp(self):
+        self.env = EnvironmentStub()
+        self.env.path = ''
+
+        db = self.env.get_db_cnx()
+        cursor = db.cursor()
+        connector, _ = DatabaseManager(self.env)._get_connector()
+        for table in schema:
+            for stmt in connector.to_sql(table):
+                cursor.execute(stmt)
+
+    def test_testcase_errors_and_failures(self):
+        config = Mock(name='trunk', path='/somewhere')
+        step = Mock(name='foo')
+
+        build = Build(self.env, config=config.name, platform=1, rev=123,
+                      rev_time=42)
+        build.insert()
+        report = Report(self.env, build=build.id, step=step.name,
+                        category='test')
+        report.items += [{'fixture': 'test_foo',
+                          'name': 'foo', 'file': 'foo.c',
+                          'type': 'test', 'status': 'success'},
+                         {'fixture': 'test_bar',
+                          'name': 'bar', 'file': 'bar.c',
+                          'type': 'test', 'status': 'error',
+                          'traceback': 'Error traceback'},
+                         {'fixture': 'test_baz',
+                          'name': 'baz', 'file': 'baz.c',
+                          'type': 'test', 'status': 'failure',
+                          'traceback': 'Failure reason'}]
+        report.insert()
+
+        req = Mock(href=Href('trac'))
+        generator = TestResultsSummarizer(self.env)
+        template, data = generator.render_summary(req,
+                                            config, build, step, 'test')
+        self.assertEquals('bitten_summary_tests.html', template)
+        self.assertEquals(data['totals'],
+                {'ignore': 0, 'failure': 1, 'success': 1, 'error': 1})
+        for fixture in data['fixtures']:
+            if fixture.has_key('failures'):
+                if fixture['failures'][0]['status'] == 'error':
+                    self.assertEquals('test_bar', fixture['name'])
+                    self.assertEquals('Error traceback',
+                                      fixture['failures'][0]['traceback'])
+                if fixture['failures'][0]['status'] == 'failure':
+                    self.assertEquals('test_baz', fixture['name'])
+                    self.assertEquals('Failure reason',
+                                      fixture['failures'][0]['traceback'])
+
+
 def suite():
     suite = unittest.TestSuite()
     suite.addTest(unittest.makeSuite(TestResultsChartGeneratorTestCase))
+    suite.addTest(unittest.makeSuite(TestResultsSummarizerTestCase))
     return suite
 
 if __name__ == '__main__':
--- a/bitten/templates/bitten_build.html
+++ b/bitten/templates/bitten_build.html
@@ -8,6 +8,11 @@
   <xi:include href="macros.html" />
   <head>
     <title>$title</title>
+    <script type="text/javascript">
+      jQuery(document).ready(function($){
+        $("table.tests tr.failed th .fixture").enableFolding(true);
+      });
+    </script>
   </head>
   <body>
     <div id="content" class="build">
--- a/bitten/templates/bitten_summary_tests.html
+++ b/bitten/templates/bitten_summary_tests.html
@@ -11,10 +11,16 @@
     <th>Failures</th><th>Ignores</th><th>Errors</th>
    </tr></thead>
    <tbody><tr py:for="item in data.fixtures"
-              class="${item.num_failure or item.num_error and 'failed' or None}">
-    <th py:choose="">
-     <a py:when="item.href" href="$item.href">$item.name</a>
-     <py:otherwise>$item.name</py:otherwise>
+              class="${item.failures and 'failed' or 'success'}">
+    <th>
+      <span class="fixture" />
+      <py:choose test="">
+        <a py:when="item.href" href="$item.href">$item.name</a>
+        <span py:when="not item.href">$item.name</span>
+      </py:choose>
+      <p class="details" py:for="failure in item.failures">$failure.name ($failure.status):<br />
+        <span>$failure.traceback</span>
+      </p>
     </th>
     <td>${item.num_success + item.num_failure + item.num_error + item.num_ignore}</td>
     <td>$item.num_failure</td>
--- a/bitten/web_ui.py
+++ b/bitten/web_ui.py
@@ -545,6 +545,7 @@
         chgset = repos.get_changeset(build.rev)
         data['build']['chgset_author'] = chgset.author
 
+        add_script(req, 'common/js/folding.js')
         add_script(req, 'bitten/tabset.js')
         add_stylesheet(req, 'bitten/bitten.css')
         return 'bitten_build.html', data, None
Copyright (C) 2012-2017 Edgewall Software