Rewrite logics to generate job results summary
This patch implements a report csv helper, which can be shared between
build and test jobs to generate csv files for result summary. It makes
the scripts easier to maintain.
This patch also aligns the data structures of build and test results, to
simplify the logic for email notification and other artifacts.
Signed-off-by: Xinyu Zhang <xinyu.zhang@arm.com>
Change-Id: I61d3f45933908e31880bde0de3250d3066ca79e8
diff --git a/jenkins/ci.jpl b/jenkins/ci.jpl
index 9d4a434..bdee9e3 100644
--- a/jenkins/ci.jpl
+++ b/jenkins/ci.jpl
@@ -59,8 +59,8 @@
print(links)
}
-def listConfigs(ci_scripts_dir, config_list, filter_group) {
- dir(ci_scripts_dir) {
+def listConfigs(config_list, filter_group) {
+ dir("tf-m-ci-scripts") {
echo "Obtaining list of configs."
echo "Running: python3 ./configs.py -g ${filter_group.replace(" ", " -g ")}"
def build_config_list_raw = sh(script: """\
@@ -71,23 +71,27 @@
}
}
-def buildConfig(ci_scripts_dir, config, filter_group, results) {
- def params = []
- def params_collection = [:]
- def build_config_params
- dir(ci_scripts_dir) {
+def obtainBuildParams(config) {
+ def build_params = [:]
+ dir("tf-m-ci-scripts") {
echo "Obtaining build configuration for config ${config}"
- echo "Running: python3 ./configs.py -g ${filter_group.replace(" ", " -g ")} ${config}"
- build_config_params = sh(script: """\
-python3 ./configs.py -g ${filter_group.replace(" ", " -g ")} ${config}
-""", returnStdout: true).trim()
+ echo "Running: python3 ./configs.py ${config}"
+ build_config_params = sh(script: "python3 ./configs.py ${config}", returnStdout: true).trim()
}
def lines = build_config_params.tokenize('\n')
for (String line : lines) {
def key, value
(key, value) = line.tokenize('=')
- params += string(name: key, value: value)
- params_collection[key] = value
+ build_params[key] = value
+ }
+ return build_params
+}
+
+def buildConfig(config, results) {
+ def params = []
+ params_collection = obtainBuildParams(config)
+ params_collection.each { param ->
+ params += string(name: param.key, value:param.value)
}
params += string(name: 'GERRIT_BRANCH', value: env.GERRIT_BRANCH)
params += string(name: 'GERRIT_HOST', value: env.GERRIT_HOST)
@@ -114,7 +118,7 @@
return { -> results
def build_res = build(job: 'tf-m-build-config', parameters: params, propagate: false)
def build_url = build_res.getAbsoluteUrl()
- results['builds'][build_res.number] = [build_res, config, params_collection]
+ results['builds'][config] = build_res
print("${build_res.number}: ${config} ${build_res.result} ${build_url}")
@@ -158,12 +162,15 @@
}
}
-def generateEmailBody(stage, failed_jobs) {
+def generateEmailBody(stage, results) {
+ // Results format: [CONFIG_NAME: [URL: "", RESULT: "", ...]]
body = "Check console output at ${env.BUILD_URL} \n\n"
body += "Failed Jobs:\n"
- failed_jobs.each { job ->
- body += "${job.key} ${job.value}\n"
+ results.each { job ->
+ if (job.value['RESULT'] == 'FAILURE') {
+ body += "${job.key} ${job.value['URL']}\n"
+ }
}
body += "\nFor detailed ${stage} results please refer to \
@@ -171,18 +178,16 @@
return body
}
-def emailNotification(results, stage, failed_jobs) {
+def emailNotification(success, stage, results) {
script {
if (env.EMAIL_NOTIFICATION) {
- def result = "Fail."
- if (results == true) {
- result = "Success."
- print("Skip sending as ${result} for ${stage}")
+ if (success == true) {
+ print("Skip sending as Success for ${stage}")
}
else {
emailext (
- subject: ("Job ${env.JOB_NAME} ${stage} ${env.BUILD_NUMBER} ${result}"),
- body: generateEmailBody(stage, failed_jobs),
+ subject: ("Job ${env.JOB_NAME} ${stage} ${env.BUILD_NUMBER} fail"),
+ body: generateEmailBody(stage, results),
to: "${EMAIL_NOTIFICATION}"
)
}
@@ -190,16 +195,6 @@
} /* script */
}
-def filterFailedBuild(results) {
- def failed_builds = [:]
- results.each { result ->
- if (result.value[0].getResult() == "FAILURE") {
- failed_builds[result.value[1]] = result.value[0].getAbsoluteUrl()
- }
- }
- return failed_builds
-}
-
def parseTestResults(output) {
// Verify test status
g = new Gerrit()
@@ -230,132 +225,56 @@
metadata[record_metadata[0]] = record_metadata[1]
}
}
- test_results[config_name] = metadata
+ test_results[config_name] = ['URL': metadata['LAVA link'],
+ 'RESULT': metadata['Test Result']]
}
+
+ writeCsv(test_results, "test_results.csv")
+
return test_results
}
-def filterFailedTest(string) {
- def failed_tests = [:]
- line = lineInString(string, "FAILURE_TESTS:")
- if (line == null) {
- return ["???"];
- }
- a = line.split(' ')
- if (a.size() > 1) {
- a = line.split(' ')[1..-1]
- a.each { fail_test ->
- config_link = fail_test.split(':')
- failed_tests[config_link[0]] = config_link[1..-1].join(':')
- }
- }
- return failed_tests
-}
-
-@NonCPS
def generateCsvContent(results) {
- def resultsParam = []
+ // Results format: [CONFIG_NAME: [URL: "", RESULT: "", ...]]
+ // CSV format: CONFIG_NAME, RESULT
+ csv_header = obtainBuildParams(results.keySet()[0]).keySet().toList()
+ csv_header.add('RESULT')
+ csv_content = [csv_header]
results.each { result ->
- if (result.value[2]['BL2'] == "True") {
- resultsParam.add([result.value[1], \
- result.value[0].getResult(), \
- result.value[2]['TFM_PLATFORM'], \
- result.value[2]['COMPILER'].split('_')[0], \
- result.value[2]['CMAKE_BUILD_TYPE'], \
- result.value[2]['BL2'], \
- result.value[2]['LIB_MODEL'], \
- result.value[2]['ISOLATION_LEVEL'], \
- result.value[2]['TEST_REGRESSION'], \
- result.value[2]['TEST_PSA_API'], \
- result.value[2]['PROFILE']])
+ build_params = []
+ obtainBuildParams(result.key).each { config ->
+ build_params.add(config.value)
}
+ build_params.add(result.value['RESULT'])
+ csv_content.add(build_params)
}
- resultsParam.each { result ->
- result[3] = result[3].split('_')[0]
- build_params = result[6..10]
- configName = ""
- for (map_cfg in mapConfigs) {
- if (build_params[0..4] == map_cfg[0..4]) {
- configName = map_cfg[5]
- break
- }
- }
- if (configName == "") {
- configName = "Default"
- }
- else if (configName == "RegressionProfileM") {
- if (build_params[5] == "OFF") {
- configName = "RegressionProfileM PSOFF"
- }
- }
- result.add(configName)
- }
- def csvContent = []
- resultsParam.each { result ->
- current_row = result[2..4]
- cfgs.each {cfg ->
- if (cfg == result[11]) {
- current_row.add(cfg)
- current_row.add(result[1])
- }
- }
- csvContent.add(current_row)
- }
- csvContent.sort{a,b -> a[0] <=> b[0] ?: a[1] <=> b[1] ?: a[2] <=> b[2]}
- build_summary = []
- current_platform = ""
- current_compiler = ""
- current_build_type = ""
- csvContent.each { build_cfg ->
- if (current_platform != build_cfg[0] || \
- current_compiler != build_cfg[1] || \
- current_build_type != build_cfg[2]) {
- current_platform = build_cfg[0]
- current_compiler = build_cfg[1]
- current_build_type = build_cfg[2]
- csv_line = [current_platform, current_compiler, current_build_type]
- cfgs.each {
- csv_line.add("N.A.")
- }
- build_summary.add(csv_line)
- }
- i = 0
- cfgs.each { cfg ->
- if (cfg == build_cfg[3]) {
- build_summary[-1][3+i] = build_cfg[4]
- }
- i += 1
- }
- }
- build_summary.add(0, ['Platform', 'Compiler', 'Cmake Build Type'])
- build_summary[0] += cfgs
- return build_summary
+ return csv_content
}
-def generateBuildCsv(results) {
+def generateHtmlContent(results) {
+ // Results format: [CONFIG_NAME: [URL: "", RESULT: "", ...]]
+ // HTML format: CONFIG_NAME: Job/Logs/Artifacts RESULT
+ htmlContent = []
+ results.each { result ->
+ htmlContent.add("${result.key}: <a href=\"${result.value['URL']}\">Job</a>/<a href=\"${result.value['URL']}/consoleText\">Logs</a>/<a href=\"${result.value['URL']}/artifact/\">Artifacts</a> ${result.value['RESULT']}<br/>")
+ }
+ htmlContent.sort()
+ return htmlContent.join("\n")
+}
+
+def writeCsv(results, file_name) {
def csvContent = generateCsvContent(results)
- node("master") {
- writeCSV file: 'build_results.csv', records: csvContent, format: CSVFormat.EXCEL
- archiveArtifacts 'build_results.csv'
- }
+ writeCSV file: file_name, records: csvContent, format: CSVFormat.EXCEL
+ sh(script: """./tf-m-ci-scripts/report_parser/report_csv_helper.py \
+ --input-file ${file_name} --output-file ${file_name} \
+ """, returnStdout: true)
+ archiveArtifacts file_name
}
-def buildCsv(results) {
- def summary = new Summary();
- def csvContent = summary.getBuildCsv(results)
- node("master") {
- writeCSV file: 'build_results.csv', records: csvContent, format: CSVFormat.EXCEL
- archiveArtifacts 'build_results.csv'
- }
-}
-
-def writeSummary(results) {
- def summary = new Summary();
- def buildLinks = summary.getLinks(results)
- node("master") {
- writeFile file: "build_links.html", text: buildLinks
- archiveArtifacts 'build_links.html'
- }
+def writeHTML(results, file_name) {
+def buildLinks = generateHtmlContent(results)
+ writeFile file: file_name, text: buildLinks
+ archiveArtifacts file_name
}
def lineInString(string, match) {
@@ -405,11 +324,11 @@
stage("Configs") {
// Populate configs
- listConfigs('tf-m-ci-scripts', configs, env.FILTER_GROUP)
+ listConfigs(configs, env.FILTER_GROUP)
results['builds'] = [:]
results['lava_jobs'] = []
for (config in configs) {
- builds[config] = buildConfig("tf-m-ci-scripts", config, env.FILTER_GROUP, results)
+ builds[config] = buildConfig(config, results)
}
if (!env.JOB_NAME.equals("tf-m-extra-build")) {
builds["docs"] = buildDocs(results)
@@ -428,13 +347,16 @@
success = false
} finally {
print("Verifying status")
- def failed_builds = filterFailedBuild(results['builds'])
- emailNotification(success, 'build', failed_builds)
g = new Gerrit()
g.verifyStatus(verify, 'tf-m-build', 'build')
- print("Building CSV")
- generateBuildCsv(results['builds'])
- writeSummary(results['builds'])
+ print("Generating build results summary.")
+ def build_results_for_summary = [:]
+ results['builds'].each { build ->
+ build_results_for_summary[build.key] = ['URL': '', 'RESULT': build.value.result]
+ }
+ emailNotification(success, 'build', build_results_for_summary)
+ writeCsv(build_results_for_summary, "build_results.csv")
+ writeHTML(build_results_for_summary, "build_links.html")
}
}
@@ -444,6 +366,7 @@
}
def all_jobs = []
def success = true
+ def test_results = [:]
print("Wait for LAVA results here...")
try {
all_jobs = submitJobsToList(results['lava_jobs'])
@@ -458,10 +381,8 @@
println("--- output from lava_wait_jobs.py ---")
println(output)
println("--- end of output from lava_wait_jobs.py ---")
- parseTestResults(output)
+ test_results = parseTestResults(output)
archiveArtifacts artifacts: 'test_summary.*', allowEmptyArchive: true
- archiveArtifacts artifacts: 'test_results.csv', allowEmptyArchive: true
-
if (env.CODE_COVERAGE_EN == "TRUE") {
println("Producing merged report")
sh(script: """./tf-m-ci-scripts/lava_helper/codecov_merge.sh""")
@@ -480,7 +401,7 @@
} finally {
archiveArtifacts artifacts: 'cfgs/**', allowEmptyArchive: true
if (all_jobs.size() > 0) {
- emailNotification(success, 'test', filterFailedTest(output))
+ emailNotification(success, 'test', test_results)
}
cleanWs()
if (!success) {
diff --git a/lava_helper/lava_wait_jobs.py b/lava_helper/lava_wait_jobs.py
index abd9c8a..51b7e97 100755
--- a/lava_helper/lava_wait_jobs.py
+++ b/lava_helper/lava_wait_jobs.py
@@ -41,9 +41,7 @@
def process_finished_jobs(finished_jobs, user_args):
print_lava_urls(finished_jobs, user_args)
test_report(finished_jobs, user_args)
- failure_report(finished_jobs, user_args)
job_links(finished_jobs, user_args)
- csv_report(finished_jobs)
codecov_helper.coverage_reports(finished_jobs, user_args)
def get_finished_jobs(job_list, user_args, lava):
@@ -104,12 +102,6 @@
def lava_id_to_url(id, user_args):
return "{}/scheduler/job/{}".format(user_args.lava_url, id)
-def generateTestResult(info):
- if info['health'] == "Complete" and info['state'] == "Finished":
- return "PASS"
- else:
- return "FAIL"
-
def job_links(jobs, user_args):
job_links = ""
for job, info in jobs.items():
@@ -122,49 +114,6 @@
job_links += "TFM LOG: {}artifact/{}/target_log.txt\n".format(os.getenv("BUILD_URL"), info['job_dir'])
print(job_links)
-def csv_report(jobs):
- lava_jobs = []
- for job, info in jobs.items():
- exist = False
- for record in lava_jobs:
- if info['metadata']['platform'] == record["Platform"] and \
- info['metadata']['compiler'] == record["Compiler"] and \
- info['metadata']['build_type'] == record["Build Type"]:
- if record[info['metadata']['build_name']] != "FAIL":
- record[info['metadata']['build_name']] = generateTestResult(info)
- exist = True
- break
- if not exist:
- record = {}
- record["Platform"] = info['metadata']['platform']
- record["Compiler"] = info['metadata']['compiler']
- record["Build Type"] = info['metadata']['build_type']
- record["Config Name"] = info['metadata']['build_name']
- for cfg in cfgs:
- record[cfg] = "N.A."
- record[info['metadata']['name']] = generateTestResult(info)
- lava_jobs.append(record)
- lava_jobs.sort(key=lambda x: x["Platform"] + x["Compiler"] + x["Build Type"])
- with open("test_results.csv", "w", newline="") as csvfile:
- fieldnames = ["Platform", "Compiler", "Build Type"] + list(cfgs)
- writer = csv.DictWriter(csvfile, fieldnames=fieldnames, extrasaction='ignore')
-
- writer.writeheader()
- writer.writerows(lava_jobs)
-
-
-def failure_report(jobs, user_args):
- failed_report = "FAILURE_TESTS:"
- for job, info in jobs.items():
- if info['health'] != "Complete" or info['state'] != "Finished":
- failed_report += " {}:{}artifact/{}/target_log.txt\n".format(info['metadata']['build_name'],
- os.getenv("BUILD_URL"),
- info['job_dir'])
- info['result'] = 'FAILURE'
- else:
- info['result'] = 'SUCCESS'
- print(failed_report)
-
def remove_lava_dupes(results):
for result in results:
if result['result'] != 'pass':
@@ -180,11 +129,14 @@
fail_j = []
jinja_data = []
for job, info in jobs.items():
+ info['result'] = 'SUCCESS'
if info['health'] != 'Complete':
+ info['result'] = 'FAILURE'
fail_j.append(job)
continue
results_file = os.path.join(info['job_dir'], 'results.yaml')
if not os.path.exists(results_file) or (os.path.getsize(results_file) == 0):
+ info['result'] = 'FAILURE'
fail_j.append(job)
continue
with open(results_file, "r") as F:
@@ -196,6 +148,7 @@
jinja_data.append({job: [info, non_lava_results]})
for result in non_lava_results:
if result['result'] == 'fail':
+ info['result'] = 'FAILURE'
fail_j.append(job) if job not in fail_j else fail_j
time.sleep(0.5) # be friendly to LAVA
data = {}
diff --git a/report_parser/report_csv_helper.py b/report_parser/report_csv_helper.py
new file mode 100755
index 0000000..52198e2
--- /dev/null
+++ b/report_parser/report_csv_helper.py
@@ -0,0 +1,80 @@
+#!/usr/bin/env python3
+
+__copyright__ = """
+/*
+ * Copyright (c) 2022, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+"""
+Script to create report summary CSV file.
+"""
+
+import csv
+import argparse
+
+
+def get_extra_config_name(extra_param):
+ return extra_param.replace(', ', '_') if extra_param != 'N.A' else ' Default'
+
+
+def generate_headers(config_results):
+
+ # Keys: [CONFIG_NAME, TFM_PLATFORM, COMPILER, LIB_MODEL, ISOLATION_LEVEL, TEST_REGRESSION,
+ # TEST_PSA_API, CMAKE_BUILD_TYPE, BL2, PROFILE, EXTRA_PARAMS, RESULT]
+
+ common_params = list(config_results[0].keys())[1:-2]
+ extra_params = set()
+
+ for config in config_results:
+ extra_params.add(get_extra_config_name(config['EXTRA_PARAMS']))
+
+ csv_headers = common_params + sorted(list(extra_params))
+ return csv_headers
+
+
+def generate_result_rows(config_results):
+ for config in config_results:
+ config[get_extra_config_name(config['EXTRA_PARAMS'])] = config['RESULT']
+ return sorted(config_results, key = lambda x: x['TFM_PLATFORM'])
+
+
+def main(user_args):
+ with open(user_args.input_file, newline='') as csvfile:
+ config_results = csv.DictReader(csvfile)
+ config_results = [dict(config) for config in config_results]
+ csv_headers = generate_headers(config_results)
+ with open(user_args.output_file, 'w', newline='') as csvfile:
+ writer = csv.DictWriter(csvfile, fieldnames=csv_headers, restval='', extrasaction='ignore')
+ writer.writeheader()
+ writer.writerows(generate_result_rows(config_results))
+
+
+def get_cmd_args():
+ """Parse command line arguments"""
+
+ # Parse command line arguments to override config
+ parser = argparse.ArgumentParser(description="Create CSV report file")
+ cmdargs = parser.add_argument_group("Create CSV file")
+
+ # Configuration control
+ cmdargs.add_argument(
+ "--input-file",
+ dest="input_file",
+ action="store",
+ help="Build or test result of the config",
+ )
+ cmdargs.add_argument(
+ "--output-file",
+ dest="output_file",
+ action="store",
+ help="File name of CSV report",
+ )
+
+ return parser.parse_args()
+
+if __name__ == "__main__":
+ main(get_cmd_args())
diff --git a/src/org/trustedfirmware/Summary.groovy b/src/org/trustedfirmware/Summary.groovy
deleted file mode 100644
index 542a41f..0000000
--- a/src/org/trustedfirmware/Summary.groovy
+++ /dev/null
@@ -1,73 +0,0 @@
-//-------------------------------------------------------------------------------
-// Copyright (c) 2020, Arm Limited and Contributors. All rights reserved.
-//
-// SPDX-License-Identifier: BSD-3-Clause
-//
-//-------------------------------------------------------------------------------
-
-package org.trustedfirmware;
-
-@NonCPS
-def getBuildCsv(results) {
- def table = [:]
- def projects = []
- results.each { result ->
- res = result.value[0]
- config = result.value[1]
- params = result.value[2]
- if (params['BL2'] == 'True') {
- bl2_string = 'BL2'
- } else {
- bl2_string = 'NOBL2'
- }
- if (params["PSA_API_SUITE"].isEmpty()) {
- psa_string = ""
- } else {
- psa_string = "_${params['PSA_API_SUITE']}"
- }
- row_string = "${params['TARGET_PLATFORM']}_${params['COMPILER']}_${params['CMAKE_BUILD_TYPE']}_${bl2_string}${psa_string}"
- column_string = "${params['PROJ_CONFIG']}"
- row = table[row_string]
- if (row == null) {
- row = [:]
- }
- row[column_string] = res.getResult()
- table[row_string] = row
- if(!projects.contains(params['PROJ_CONFIG'])) {
- projects += params['PROJ_CONFIG']
- }
- }
- header = []
- header += "" // top left
- header.addAll(projects)
- header.sort { it.toLowerCase() }
- csvContent = []
- for (row in table) {
- row_item = []
- row_item += row.key
- for (project in projects) {
- result = table[row.key][project]
- if (result == null) {
- result = "N/A"
- }
- row_item += result
- }
- csvContent.add(row_item)
- }
- csvContent.sort { it[0].toLowerCase() }
- csvContent.add(0, header)
- return csvContent
-}
-
-@NonCPS
-def getLinks(results) {
- linksContent = []
- results.each { result ->
- res = result.value[0]
- config = result.value[1]
- url = res.getAbsoluteUrl()
- linksContent.add("${config}: <a href=\"${url}\">Job</a>/<a href=\"${url}/consoleText\">Logs</a>/<a href=\"${url}/artifact/\">Artifacts</a><br/>")
- }
- linksContent.sort()
- return linksContent.join("\n")
-}