[gtk/wip/ebassi/ci-junit-report] ci: Add JUnit report for the test suite



commit 4ee809d68a6b7476fc5dda4b1e1d5bf3770c4721
Author: Emmanuele Bassi <ebassi gnome org>
Date:   Fri Apr 5 18:23:13 2019 +0100

    ci: Add JUnit report for the test suite
    
    We can convert the JSON output for `meson test` into a JUnix XML report,
    which can be consumed by GitLab.

 .gitlab-ci.yml                   |  5 ++-
 .gitlab-ci/meson-junit-report.py | 91 ++++++++++++++++++++++++++++++++++++++++
 2 files changed, 95 insertions(+), 1 deletion(-)
---
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index a29a7d52b0..7260219b2e 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -17,8 +17,11 @@ fedora-x86_64:
   stage: build
   script:
     - bash -x ./.gitlab-ci/test-docker.sh
+    - python3 ./.gitlab-ci/meson-junit-report.py --job-id="${CI_JOB_NAME}" 
--output="${CI_PROJECT_DIR}/_build/report.xml" "${CI_PROJECT_DIR}/_build/meson-logs/testlog.json"
   artifacts:
-    when: on_failure
+    reports:
+      junit:
+        - "${CI_PROJECT_DIR}/_build/report.xml" 
     name: "gtk-${CI_COMMIT_REF_NAME}"
     paths:
       - "${CI_PROJECT_DIR}/_build/meson-logs"
diff --git a/.gitlab-ci/meson-junit-report.py b/.gitlab-ci/meson-junit-report.py
new file mode 100755
index 0000000000..9d1b87a33f
--- /dev/null
+++ b/.gitlab-ci/meson-junit-report.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python3
+
+import argparse
+import datetime
+import json
+import os
+import sys
+import xml.etree.ElementTree as ET
+
+aparser = argparse.ArgumentParser(description='Turns a Meson test log into a JUnit report')
+aparser.add_argument('--job-id', metavar='ID',
+                     help='The job ID for the report',
+                     default='Unknown')
+aparser.add_argument('--branch', metavar='NAME',
+                     help='Branch of the project being tested',
+                     default='master')
+aparser.add_argument('--output', metavar='FILE',
+                     help='The output file, stdout by default',
+                     type=argparse.FileType('w'),
+                     default=sys.stdout)
+aparser.add_argument('infile', metavar='FILE',
+                     help='The input testlog.json, stdin by default',
+                     type=argparse.FileType('r'),
+                     default=sys.stdin)
+
+args = aparser.parse_args()
+
+outfile = args.output
+
+testsuites = ET.Element('testsuites')
+testsuites.set('id', args.job_id)
+testsuites.set('timestamp', datetime.datetime.utcnow().isoformat(timespec='minutes'))
+
+suites = {}
+for line in args.infile:
+    data = json.loads(line)
+    (full_suite, unit_name) = data['name'].split(' / ')
+    (project_name, suite_name) = full_suite.split(':')
+
+    duration = data['duration']
+    return_code = data['returncode']
+    log = data['stdout']
+
+    unit = {
+        'project': project_name,
+        'suite': suite_name,
+        'name': unit_name,
+        'duration': duration,
+        'returncode': return_code,
+        'stdout': log,
+    }
+
+    units = suites.setdefault(suite_name, [])
+    units.append(unit)
+
+    print('Added unit {}/{}/{}'.format(project_name, suite_name, unit_name))
+
+for name, units in suites.items():
+    print('Processing suite {} (units: {})'.format(name, len(units)))
+    testsuite = ET.SubElement(testsuites, 'testsuite')
+    testsuite.set('name', name)
+    testsuite.set('tests', str(len(units)))
+
+    def if_failed(unit):
+        if unit['returncode'] != 0:
+            return True
+        return False
+
+    def if_succeded(unit):
+        if unit['returncode'] == 0:
+            return True
+        return False
+
+    successes = list(filter(if_succeded, units))
+    failures = list(filter(if_failed, units))
+    testsuite.set('failures', str(len(failures)))
+
+    print('{}: {} pass, {} fail'.format(name, len(successes), len(failures)))
+
+    for unit in successes:
+        testcase = ET.SubElement(testsuite, 'testcase')
+        testcase.set('name', '{}/{}/{}'.format(unit['project'], unit['suite'], unit['name']))
+        testcase.set('time', str(unit['duration']))
+
+    for unit in failures:
+        failure = ET.SubElement(testsuite, 'failure')
+        failure.set('name', '{}/{}'.format(unit['project'], unit['suite'], unit['name']))
+        failure.set('time', str(unit['duration']))
+
+output = ET.tostring(testsuites, encoding='unicode')
+outfile.write(output)


[Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]