diff options
-rw-r--r-- | .gitlab-ci.yml | 17 | ||||
-rwxr-xr-x | .gitlab/scripts/meson-junit-report.py | 114 | ||||
-rwxr-xr-x | .gitlab/scripts/run-tests.sh | 24 |
3 files changed, 147 insertions, 8 deletions
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 359c4968a..fbe208a0d 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -18,11 +18,16 @@ variables: script: - meson ${COMMON_MESON_FLAGS} ${LOADERS_FLAGS} ${BUILD_OPTS} _build . - ninja -C _build + - .gitlab/scripts/run-tests.sh _build artifacts: - when: on_failure + when: always name: "gdk-pixbuf-${CI_JOB_NAME}-${CI_COMMIT_REF_NAME}" paths: - - "${CI_PROJECT_DIR}/build_*/meson-logs" + - "${CI_PROJECT_DIR}/_build/meson-logs" + - "${CI_PROJECT_DIR}/_build/report-${CI_JOB_NAME}.xml" + reports: + junit: + - "${CI_PROJECT_DIR}/_build/report-${CI_JOB_NAME}.xml" cache: key: "$CI_JOB_NAME" paths: @@ -32,8 +37,6 @@ meson-fedora-x86_64: stage: build extends: .build-linux image: ${FEDORA_IMAGE} - after_script: - - meson test -C _build release-build: stage: build @@ -41,16 +44,14 @@ release-build: image: ${FEDORA_IMAGE} variables: BUILD_OPTS: "-Dbuildtype=release" - after_script: - - meson test -C _build reference: stage: docs - extends: .build-linux image: ${FEDORA_IMAGE} variables: BUILD_OPTS: "-Dgtk_doc=true" - after_script: + script: + - meson ${COMMON_MESON_FLAGS} ${LOADERS_FLAGS} ${BUILD_OPTS} _build . - ninja -C _build gdk-pixbuf-doc - mv _build/docs/html _reference artifacts: diff --git a/.gitlab/scripts/meson-junit-report.py b/.gitlab/scripts/meson-junit-report.py new file mode 100755 index 000000000..532305cdc --- /dev/null +++ b/.gitlab/scripts/meson-junit-report.py @@ -0,0 +1,114 @@ +#!/usr/bin/env python3 + +# Turns a Meson testlog.json file into a JUnit XML report +# +# Copyright 2019 GNOME Foundation +# +# SPDX-License-Identifier: LGPL-2.1-or-later +# +# Original author: Emmanuele Bassi + +import argparse +import datetime +import json +import os +import sys +import xml.etree.ElementTree as ET + +aparser = argparse.ArgumentParser(description='Turns a Meson test log into a JUnit report') +aparser.add_argument('--project-name', metavar='NAME', + help='The project name', + default='unknown') +aparser.add_argument('--backend', metavar='NAME', + help='The used backend', + default='unknown') +aparser.add_argument('--job-id', metavar='ID', + help='The job ID for the report', + default='Unknown') +aparser.add_argument('--branch', metavar='NAME', + help='Branch of the project being tested', + default='master') +aparser.add_argument('--output', metavar='FILE', + help='The output file, stdout by default', + type=argparse.FileType('w', encoding='UTF-8'), + default=sys.stdout) +aparser.add_argument('infile', metavar='FILE', + help='The input testlog.json, stdin by default', + type=argparse.FileType('r', encoding='UTF-8'), + default=sys.stdin) + +args = aparser.parse_args() + +outfile = args.output + +testsuites = ET.Element('testsuites') +testsuites.set('id', '{}/{}'.format(args.job_id, args.branch)) +testsuites.set('package', args.project_name) +testsuites.set('timestamp', datetime.datetime.utcnow().isoformat(timespec='minutes')) + +suites = {} +for line in args.infile: + data = json.loads(line) + (full_suite, unit_name) = data['name'].split(' / ') + (project_name, suite_name) = full_suite.split(':') + + duration = data['duration'] + return_code = data['returncode'] + result = data['result'] + log = data['stdout'] + + unit = { + 'suite': suite_name, + 'name': unit_name, + 'duration': duration, + 'returncode': return_code, + 'result': result, + 'stdout': log, + } + + units = suites.setdefault(suite_name, []) + units.append(unit) + +for name, units in suites.items(): + print('Processing suite {} (units: {})'.format(name, len(units))) + + def if_failed(unit): + if unit['result'] in ['ERROR', 'FAIL', 'UNEXPECTEDPASS', 'TIMEOUT']: + return True + return False + + def if_succeded(unit): + if unit['result'] in ['OK', 'EXPECTEDFAIL', 'SKIP']: + return True + return False + + successes = list(filter(if_succeded, units)) + failures = list(filter(if_failed, units)) + print(' - {}: {} pass, {} fail'.format(name, len(successes), len(failures))) + + testsuite = ET.SubElement(testsuites, 'testsuite') + testsuite.set('name', '{}/{}'.format(args.project_name, name)) + testsuite.set('tests', str(len(units))) + testsuite.set('errors', str(len(failures))) + testsuite.set('failures', str(len(failures))) + + for unit in successes: + testcase = ET.SubElement(testsuite, 'testcase') + testcase.set('classname', '{}/{}'.format(args.project_name, unit['suite'])) + testcase.set('name', '{}/{}'.format(args.backend, unit['name'])) + testcase.set('time', str(unit['duration'])) + + for unit in failures: + testcase = ET.SubElement(testsuite, 'testcase') + testcase.set('classname', '{}/{}'.format(args.project_name, unit['suite'])) + testcase.set('name', '{}/{}'.format(args.backend, unit['name'])) + testcase.set('time', str(unit['duration'])) + + failure = ET.SubElement(testcase, 'failure') + failure.set('classname', '{}/{}'.format(args.project_name, unit['suite'])) + testcase.set('name', '{}/{}'.format(args.backend, unit['name'])) + failure.set('type', 'error') + failure.text = unit['stdout'] + +output = ET.tostring(testsuites, encoding='unicode') +outfile.write(output) diff --git a/.gitlab/scripts/run-tests.sh b/.gitlab/scripts/run-tests.sh new file mode 100755 index 000000000..1212d7cda --- /dev/null +++ b/.gitlab/scripts/run-tests.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +set +x +set +e + +srcdir=$( pwd ) +builddir=$1 +backend=$2 + +meson test -C ${builddir} + +# Store the exit code for the CI run, but always +# generate the reports +exit_code=$? + +cd ${builddir} + +${srcdir}/.gitlab/scripts/meson-junit-report.py \ + --project-name=gdk-pixbuf \ + --job-id="${CI_JOB_NAME}" \ + --output=report-${CI_JOB_NAME}.xml \ + meson-logs/testlog.json + +exit $exit_code |