[glib/wip/ebassi/test-report: 1/2] ci: Generate a cover report for the test suite



commit ed65996300a52007c815d03314ba49f29b15e91b
Author: Emmanuele Bassi <ebassi gnome org>
Date:   Fri Apr 5 20:53:11 2019 +0100

    ci: Generate a cover report for the test suite
    
    GitLab can show the results of a CI pipeline if the pipeline generates a
    report using the JUnit XML format.
    
    Since Meson provides a machine parseable output for `meson test`, we can
    take that and turn it into XML soup.

 .gitlab-ci.yml                   |  15 ++++--
 .gitlab-ci/meson-junit-report.py | 101 +++++++++++++++++++++++++++++++++++++++
 .gitlab-ci/run-tests.sh          |  18 +++++++
 3 files changed, 131 insertions(+), 3 deletions(-)
---
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index e2afb12dd..1f5e1ac96 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -34,7 +34,7 @@ fedora-x86_64:
     - ninja -C _build
     - mkdir -p _coverage
     - lcov --config-file .gitlab-ci/lcovrc --directory _build --capture --initial --output-file 
"_coverage/${CI_JOB_NAME}-baseline.lcov"
-    - meson test -C _build --timeout-multiplier ${MESON_TEST_TIMEOUT_MULTIPLIER} --no-suite flaky
+    - .gitlab-ci/run-tests.sh
     - lcov --config-file .gitlab-ci/lcovrc --directory _build --capture --output-file 
"_coverage/${CI_JOB_NAME}.lcov"
     # FIXME: We should run all installed tests, but do only this one for now
     # because it cannot run uninstalled. Reconfigure with dtrace disabled
@@ -43,12 +43,15 @@ fedora-x86_64:
     - ninja -C _build install
     - GLIB_TEST_COMPILATION=1 $HOME/glib-installed/libexec/installed-tests/glib/static-link.py 
$HOME/glib-installed/lib/pkgconfig
   artifacts:
+    reports:
+      junit: "_build/${CI_JOB_NAME}-report.xml"
     name: "glib-${CI_JOB_NAME}-${CI_COMMIT_REF_NAME}"
     when: always
     paths:
       - "_build/config.h"
       - "_build/glib/glibconfig.h"
       - "_build/meson-logs"
+      - "_build/${CI_JOB_NAME}-report.xml"
       - "_coverage"
 
 G_DISABLE_ASSERT:
@@ -66,14 +69,17 @@ G_DISABLE_ASSERT:
             -Dinstalled_tests=true
             _build
     - ninja -C _build
-    - meson test -C _build --timeout-multiplier ${MESON_TEST_TIMEOUT_MULTIPLIER} --no-suite flaky
+    - bash -x ./.gitlab-ci/run-tests.sh
   artifacts:
+    reports:
+      junit: "_build/${CI_JOB_NAME}-report.xml"
     name: "glib-${CI_JOB_NAME}-${CI_COMMIT_REF_NAME}"
     when: always
     paths:
       - "_build/config.h"
       - "_build/glib/glibconfig.h"
       - "_build/meson-logs"
+      - "_build/${CI_JOB_NAME}-report.xml"
 
 .cross-template: &cross-template
   stage: build
@@ -166,16 +172,19 @@ freebsd-11-x86_64:
     # FIXME: extattr(2) support: https://gitlab.gnome.org/GNOME/glib/issues/1404
     - meson ${MESON_COMMON_OPTIONS} -Db_lundef=false -Diconv=gnu -Dxattr=false _build
     - ninja -C _build
-    - meson test -C _build --timeout-multiplier "${MESON_TEST_TIMEOUT_MULTIPLIER}" --no-suite flaky
+    - bash -x ./.gitlab-ci/run-tests.sh
   except:
     - tags
   artifacts:
+    reports:
+      junit: "_build/${CI_JOB_NAME}-report.xml"
     name: "glib-${CI_JOB_NAME}-${CI_COMMIT_REF_NAME}"
     when: always
     paths:
       - "_build/config.h"
       - "_build/glib/glibconfig.h"
       - "_build/meson-logs"
+      - "_build/${CI_JOB_NAME}-report.xml"
 
 coverage:
   stage: coverage
diff --git a/.gitlab-ci/meson-junit-report.py b/.gitlab-ci/meson-junit-report.py
new file mode 100755
index 000000000..ea4928248
--- /dev/null
+++ b/.gitlab-ci/meson-junit-report.py
@@ -0,0 +1,101 @@
+#!/usr/bin/env python3
+
+import argparse
+import datetime
+import json
+import os
+import sys
+import xml.etree.ElementTree as ET
+
+aparser = argparse.ArgumentParser(description='Turns a Meson test log into a JUnit report')
+aparser.add_argument('--project-name', metavar='NAME',
+                     help='The project name',
+                     default='unknown')
+aparser.add_argument('--job-id', metavar='ID',
+                     help='The job ID for the report',
+                     default='Unknown')
+aparser.add_argument('--branch', metavar='NAME',
+                     help='Branch of the project being tested',
+                     default='master')
+aparser.add_argument('--output', metavar='FILE',
+                     help='The output file, stdout by default',
+                     type=argparse.FileType('w', encoding='UTF-8'),
+                     default=sys.stdout)
+aparser.add_argument('infile', metavar='FILE',
+                     help='The input testlog.json, stdin by default',
+                     type=argparse.FileType('r', encoding='UTF-8'),
+                     default=sys.stdin)
+
+args = aparser.parse_args()
+
+outfile = args.output
+
+testsuites = ET.Element('testsuites')
+testsuites.set('id', '{}/{}'.format(args.job_id, args.branch))
+testsuites.set('package', args.project_name)
+testsuites.set('timestamp', datetime.datetime.utcnow().isoformat(timespec='minutes'))
+
+suites = {}
+for line in args.infile:
+    data = json.loads(line)
+    (full_suite, unit_name) = data['name'].split(' / ')
+    (project_name, suite_name) = full_suite.split(':')
+
+    duration = data['duration']
+    return_code = data['returncode']
+    log = data['stdout']
+
+    unit = {
+        'suite': suite_name,
+        'name': unit_name,
+        'duration': duration,
+        'returncode': return_code,
+        'stdout': log,
+    }
+
+    units = suites.setdefault(suite_name, [])
+    units.append(unit)
+
+for name, units in suites.items():
+    print('Processing suite {} (units: {})'.format(name, len(units)))
+
+    def if_failed(unit):
+        if unit['returncode'] != 0:
+            return True
+        return False
+
+    def if_succeded(unit):
+        if unit['returncode'] == 0:
+            return True
+        return False
+
+    successes = list(filter(if_succeded, units))
+    failures = list(filter(if_failed, units))
+    print(' - {}: {} pass, {} fail'.format(name, len(successes), len(failures)))
+
+    testsuite = ET.SubElement(testsuites, 'testsuite')
+    testsuite.set('name', '{}/{}'.format(args.project_name, name))
+    testsuite.set('tests', str(len(units)))
+    testsuite.set('errors', str(len(failures)))
+    testsuite.set('failures', str(len(failures)))
+
+    for unit in successes:
+        testcase = ET.SubElement(testsuite, 'testcase')
+        testcase.set('classname', '{}/{}'.format(args.project_name, unit['suite']))
+        testcase.set('name', unit['name'])
+        testcase.set('time', str(unit['duration']))
+
+    for unit in failures:
+        testcase = ET.SubElement(testsuite, 'testcase')
+        testcase.set('classname', '{}/{}'.format(args.project_name, unit['suite']))
+        testcase.set('name', unit['name'])
+        testcase.set('time', str(unit['duration']))
+
+        failure = ET.SubElement(testcase, 'failure')
+        failure.set('classname', '{}/{}'.format(args.project_name, unit['suite']))
+        failure.set('name', unit['name'])
+        failure.set('type', 'error')
+        failure.text = unit['stdout']
+
+output = ET.tostring(testsuites, encoding='unicode')
+outfile.write(output)
diff --git a/.gitlab-ci/run-tests.sh b/.gitlab-ci/run-tests.sh
new file mode 100755
index 000000000..ca02816f9
--- /dev/null
+++ b/.gitlab-ci/run-tests.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+
+set +e
+
+meson test \
+        -C _build \
+        --timeout-multiplier ${MESON_TEST_TIMEOUT_MULTIPLIER} \
+        --no-suite flaky
+
+exit_code=$?
+
+python3 .gitlab-ci/meson-junit-report.py \
+        --project-name=glib \
+        --job-id "${CI_JOB_NAME}" \
+        --output "_build/${CI_JOB_NAME}-report.xml" \
+        _build/meson-logs/testlog.json
+
+exit $exit_code


[Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]