Infra Health: Decouple from other TF-M jobs
Infra-health needs a stable version of TF-M to make sure
the infrastructure works well.
Other CI jobs need to use the latest HEAD on master of TF-M for
verification.
So Infra-Health job needs to be decoupled from other TF-M jobs.
This decouple is forked from
709c1546b4ebf1d63cfbd4eaddb5d19443c920b0
in tf-m-ci-scripts.
Change-Id: If005dc3d0b19d9b58ac4e471f687e72419eb3b92
Signed-off-by: Xinyu Zhang <xinyu.zhang@arm.com>
diff --git a/jenkins/build-config-infra-health.jpl b/jenkins/build-config-infra-health.jpl
new file mode 100644
index 0000000..00550a8
--- /dev/null
+++ b/jenkins/build-config-infra-health.jpl
@@ -0,0 +1,123 @@
+#!/usr/bin/env groovy
+//-------------------------------------------------------------------------------
+// Copyright (c) 2020, Arm Limited and Contributors. All rights reserved.
+//
+// SPDX-License-Identifier: BSD-3-Clause
+//
+//-------------------------------------------------------------------------------
+
+@Library('trustedfirmware') _
+import org.trustedfirmware.Gerrit
+
+def nodeLabel = "docker-amd64-bionic"
+if (env.COMPILER == "ARMCLANG") {
+ nodeLabel = "docker-amd64-bionic-armclang"
+}
+
+node(nodeLabel) {
+ stage("Init") {
+ cleanWs()
+ dir("trusted-firmware-m") {
+ checkout(
+ poll: false,
+ scm: [
+ $class: 'GitSCM',
+ branches: [[name: '$GERRIT_BRANCH']],
+ extensions: [[$class: 'BuildChooserSetting', buildChooser: [$class: 'GerritTriggerBuildChooser']]],
+ userRemoteConfigs: [[
+ credentialsId: 'GIT_SSH_KEY',
+ refspec: '$GERRIT_REFSPEC', url: '$CODE_REPO'
+ ]]
+ ])
+ }
+ dir("tf-m-ci-scripts") {
+ checkout(
+ poll: false,
+ scm: [
+ $class: 'GitSCM',
+ branches: [[name: '$CI_SCRIPTS_BRANCH']],
+ extensions: [[$class: 'BuildChooserSetting', buildChooser: [$class: 'GerritTriggerBuildChooser']]],
+ userRemoteConfigs: [[
+ credentialsId: 'GIT_SSH_KEY',
+ refspec: '$CI_SCRIPTS_REFSPEC', url: '$CI_SCRIPTS_REPO'
+ ]]
+ ])
+ }
+ dir("mbedtls") {
+ checkout(
+ changelog: false,
+ poll: false,
+ scm: [
+ $class: 'GitSCM',
+ branches: [[name: 'FETCH_HEAD']],
+ userRemoteConfigs: [[
+ refspec: 'refs/tags/$MBEDTLS_VERSION',
+ url: params.MBEDTLS_URL
+ ]]
+ ]
+ )
+ }
+ dir("mcuboot") {
+ checkout(
+ changelog: false,
+ poll: false,
+ scm: [
+ $class: 'GitSCM',
+ branches: [[name: 'FETCH_HEAD']],
+ userRemoteConfigs: [[
+ refspec: '$MCUBOOT_REFSPEC',
+ url: params.MCUBOOT_URL
+ ]]
+ ]
+ )
+ }
+ dir("tf-m-tests") {
+ checkout(
+ changelog: false,
+ poll: false,
+ scm: [
+ $class: 'GitSCM',
+ branches: [[name: 'FETCH_HEAD']],
+ userRemoteConfigs: [[
+ refspec: '$TFM_TESTS_REFSPEC',
+ url: params.TFM_TESTS_URL
+ ]]
+ ]
+ )
+ }
+ if (env.PSA_API_SUITE != "") {
+ dir("psa-arch-tests") {
+ checkout(
+ changelog: false,
+ poll: false,
+ scm: [
+ $class: 'GitSCM',
+ branches: [[name: 'FETCH_HEAD']],
+ userRemoteConfigs: [[
+ refspec: '$PSA_ARCH_TESTS_VERSION',
+ url: params.PSA_ARCH_TESTS_URL
+ ]]
+ ]
+ )
+ }
+ }
+ }
+ try {
+ verify = 1
+ stage("Build") {
+ sh "tf-m-ci-scripts/run-build.sh"
+ }
+ stage("Post") {
+ archiveArtifacts 'trusted-firmware-m/build/install/**'
+ }
+ } catch (Exception e) {
+ manager.buildFailure()
+ verify = -1
+ } finally {
+ g = new Gerrit()
+ g.verifyStatusInWorkspace(verify, env.CONFIG_NAME, 'build')
+ def buildStatus = (verify == 1) ? 'Successful' : 'Failed'
+ //g.commentInWorkspace("Build configuration ${env.CONFIG_NAME} ${buildStatus}: ${env.RUN_DISPLAY_URL}")
+ cleanWs()
+ }
+}
diff --git a/jenkins/ci-infra-health.jpl b/jenkins/ci-infra-health.jpl
new file mode 100644
index 0000000..5d84b94
--- /dev/null
+++ b/jenkins/ci-infra-health.jpl
@@ -0,0 +1,536 @@
+#!/usr/bin/env groovy
+//-------------------------------------------------------------------------------
+// Copyright (c) 2020, Arm Limited and Contributors. All rights reserved.
+//
+// SPDX-License-Identifier: BSD-3-Clause
+//
+//-------------------------------------------------------------------------------
+
+@Library('trustedfirmware') _
+import org.trustedfirmware.Gerrit
+import org.trustedfirmware.Summary
+
+mapPlatform = ["cypress/psoc64": "psoc64",
+ "mps2/an519": "AN519",
+ "mps2/an521": "AN521",
+ "mps2/an539": "AN539",
+ "mps2/sse-200_aws": "SSE-200_AWS",
+ "mps3/an524": "AN524",
+ "musca_a": "MUSCA_A",
+ "musca_b1": "MUSCA_B1",
+ "musca_s1": "MUSCA_S1"]
+
+mapCompiler = ["toolchain_GNUARM.cmake": "GNUARM",
+ "toolchain_ARMCLANG.cmake": "ARMCLANG"]
+
+mapBL2 = ["True": "--bl2",
+ "False": ""]
+
+mapTestPsaApi = ["OFF": "",
+ "INTERNAL_TRUSTED_STORAGE": "ITS",
+ "PROTECTED_STORAGE": "PS",
+ "CRYPTO": "Crypto",
+ "INITIAL_ATTESTATION": "Attest",
+ "IPC": "FF"]
+
+def generateLavaParam(build_params) {
+ def params = []
+ params += string(name: "TARGET_PLATFORM", \
+ value: mapPlatform[build_params["TFM_PLATFORM"]])
+ params += string(name: "COMPILER", \
+ value: mapCompiler[build_params["TOOLCHAIN_FILE"]])
+ params += string(name: "PSA_API_SUITE", \
+ value: mapTestPsaApi[build_params["TEST_PSA_API"]])
+
+ if (build_params["BL2"] == "True" && \
+ build_params["NS"] == "True" && \
+ build_params["PSA_API"] == "False" && \
+ build_params["ISOLATION_LEVEL"] == "1" && \
+ build_params["TEST_REGRESSION"] == "False" && \
+ build_params["TEST_PSA_API"] == "OFF" && \
+ build_params["PROFILE"] == "N.A") {
+ params += string(name: "PROJ_CONFIG", value: "ConfigDefault")
+ }
+ else if (build_params["BL2"] == "True" && \
+ build_params["NS"] == "True" && \
+ build_params["PSA_API"] == "True" && \
+ build_params["ISOLATION_LEVEL"] == "1" && \
+ build_params["TEST_REGRESSION"] == "False" && \
+ build_params["TEST_PSA_API"] == "OFF" && \
+ build_params["PROFILE"] == "N.A") {
+ params += string(name: "PROJ_CONFIG", value: "ConfigCoreIPC")
+ }
+ else if (build_params["BL2"] == "True" && \
+ build_params["NS"] == "True" && \
+ build_params["PSA_API"] == "True" && \
+ build_params["ISOLATION_LEVEL"] == "2" && \
+ build_params["TEST_REGRESSION"] == "False" && \
+ build_params["TEST_PSA_API"] == "OFF" && \
+ build_params["PROFILE"] == "N.A") {
+ params += string(name: "PROJ_CONFIG", value: "ConfigCoreIPCTfmLevel2")
+ }
+ else if (build_params["BL2"] == "True" && \
+ build_params["NS"] == "True" && \
+ build_params["PSA_API"] == "False" && \
+ build_params["ISOLATION_LEVEL"] == "1" && \
+ build_params["PROFILE"] == "profile_small" && \
+ build_params["TEST_REGRESSION"] == "False" && \
+ build_params["TEST_PSA_API"] == "OFF") {
+ params += string(name: "PROJ_CONFIG", value: "ConfigDefaultProfileS")
+ }
+ else if (build_params["BL2"] == "True" && \
+ build_params["NS"] == "True" && \
+ build_params["PSA_API"] == "True" && \
+ build_params["ISOLATION_LEVEL"] == "2" && \
+ build_params["PROFILE"] == "profile_medium"&& \
+ build_params["TEST_REGRESSION"] == "False" && \
+ build_params["TEST_PSA_API"] == "OFF") {
+ params += string(name: "PROJ_CONFIG", value: "ConfigDefaultProfileM")
+ }
+ else if (build_params["BL2"] == "True" && \
+ build_params["NS"] == "True" && \
+ build_params["PSA_API"] == "False" && \
+ build_params["ISOLATION_LEVEL"] == "1" && \
+ build_params["TEST_REGRESSION"] == "True" && \
+ build_params["TEST_PSA_API"] == "OFF" && \
+ build_params["PROFILE"] == "N.A") {
+ params += string(name: "PROJ_CONFIG", value: "ConfigRegression")
+ }
+ else if (build_params["BL2"] == "True" && \
+ build_params["NS"] == "True" && \
+ build_params["PSA_API"] == "True" && \
+ build_params["ISOLATION_LEVEL"] == "1" && \
+ build_params["TEST_REGRESSION"] == "True" && \
+ build_params["TEST_PSA_API"] == "OFF" && \
+ build_params["PROFILE"] == "N.A") {
+ params += string(name: "PROJ_CONFIG", value: "ConfigRegressionIPC")
+ }
+ else if (build_params["BL2"] == "True" && \
+ build_params["NS"] == "True" && \
+ build_params["PSA_API"] == "True" && \
+ build_params["ISOLATION_LEVEL"] == "2" && \
+ build_params["TEST_REGRESSION"] == "True" && \
+ build_params["TEST_PSA_API"] == "OFF" && \
+ build_params["PROFILE"] == "N.A") {
+ params += string(name: "PROJ_CONFIG", value: "ConfigRegressionIPCTfmLevel2")
+ }
+ else if (build_params["BL2"] == "True" && \
+ build_params["NS"] == "True" && \
+ build_params["PSA_API"] == "False" && \
+ build_params["ISOLATION_LEVEL"] == "1" && \
+ build_params["PROFILE"] == "profile_small" && \
+ build_params["TEST_REGRESSION"] == "True" && \
+ build_params["TEST_PSA_API"] == "OFF") {
+ params += string(name: "PROJ_CONFIG", value: "ConfigRegressionProfileS")
+ }
+ else if (build_params["BL2"] == "True" && \
+ build_params["NS"] == "True" && \
+ build_params["PSA_API"] == "True" && \
+ build_params["ISOLATION_LEVEL"] == "2" && \
+ build_params["PROFILE"] == "profile_medium"&& \
+ build_params["TEST_REGRESSION"] == "True" && \
+ build_params["TEST_PSA_API"] == "OFF") {
+ params += string(name: "PROJ_CONFIG", value: "ConfigRegressionProfileM")
+ }
+ else if (build_params["BL2"] == "True" && \
+ build_params["NS"] == "True" && \
+ build_params["PSA_API"] == "False" && \
+ build_params["ISOLATION_LEVEL"] == "1" && \
+ build_params["TEST_REGRESSION"] == "False" && \
+ build_params["TEST_PSA_API"] != "OFF" && \
+ build_params["PROFILE"] == "N.A") {
+ params += string(name: "PROJ_CONFIG", value: "ConfigPsaApiTest")
+ }
+ else if (build_params["BL2"] == "True" && \
+ build_params["NS"] == "True" && \
+ build_params["PSA_API"] == "True" && \
+ build_params["ISOLATION_LEVEL"] == "1" && \
+ build_params["TEST_REGRESSION"] == "False" && \
+ build_params["TEST_PSA_API"] != "OFF" && \
+ build_params["PROFILE"] == "N.A") {
+ params += string(name: "PROJ_CONFIG", value: "ConfigPsaApiTestIPC")
+ }
+ else if (build_params["BL2"] == "True" && \
+ build_params["NS"] == "True" && \
+ build_params["PSA_API"] == "True" && \
+ build_params["ISOLATION_LEVEL"] == "2" && \
+ build_params["TEST_REGRESSION"] == "False" && \
+ build_params["TEST_PSA_API"] != "OFF" && \
+ build_params["PROFILE"] == "N.A") {
+ params += string(name: "PROJ_CONFIG", value: "ConfigPsaApiTestIPCTfmLevel2")
+ }
+ else {
+ params += string(name: "PROJ_CONFIG", value: "ConfigDefault")
+ }
+ return params
+}
+
+def listConfigs(ci_scripts_dir, config_list, filter_group) {
+ dir(ci_scripts_dir) {
+ echo "Obtaining list of configs."
+ echo "Running: python3 ./configs.py -g ${filter_group.replace(" ", " -g ")}"
+ def build_config_list_raw = sh(script: """\
+python3 ./configs.py -g ${filter_group.replace(" ", " -g ")}
+""", returnStdout: true).trim()
+ def build_config_list = build_config_list_raw.tokenize('\n')
+ config_list.addAll(build_config_list)
+ }
+}
+
+def buildConfig(ci_scripts_dir, config, filter_group, results) {
+ def params = []
+ def params_collection = [:]
+ def build_config_params
+ dir(ci_scripts_dir) {
+ echo "Obtaining build configuration for config ${config}"
+ echo "Running: python3 ./configs.py -g ${filter_group.replace(" ", " -g ")} ${config}"
+ build_config_params = sh(script: """\
+python3 ./configs.py -g ${filter_group.replace(" ", " -g ")} ${config}
+""", returnStdout: true).trim()
+ }
+ def lines = build_config_params.tokenize('\n')
+ for (String line : lines) {
+ def key, value
+ (key, value) = line.tokenize('=')
+ params += string(name: key, value: value)
+ params_collection[key] = value
+ }
+ params += string(name: 'GERRIT_BRANCH', value: env.GERRIT_BRANCH)
+ params += string(name: 'GERRIT_HOST', value: env.GERRIT_HOST)
+ params += string(name: 'GERRIT_CHANGE_NUMBER', value: env.GERRIT_CHANGE_NUMBER)
+ params += string(name: 'GERRIT_PATCHSET_REVISION', value: env.GERRIT_PATCHSET_REVISION)
+ params += string(name: 'GERRIT_REFSPEC', value: env.GERRIT_REFSPEC)
+ params += string(name: 'MBEDTLS_VERSION', value: env.MBEDTLS_VERSION)
+ params += string(name: 'CODE_REPO', value: env.CODE_REPO)
+ params += string(name: 'CODE_COVERAGE_EN', value: env.CODE_COVERAGE_EN)
+ params += string(name: 'TFM_TESTS_REFSPEC', value: env.TFM_TESTS_REFSPEC)
+ params += string(name: 'CI_SCRIPTS_REFSPEC', value: env.CI_SCRIPTS_REFSPEC)
+ return { -> results
+ def build_res = build(job: 'tf-m-build-config-infra-health', parameters: params, propagate: false)
+ def build_info = [build_res, config, params_collection]
+ results['builds'][build_res.number] = build_info
+ def build_url = build_res.getAbsoluteUrl()
+ print("${build_res.number}: ${config} ${build_res.result} ${build_url}")
+ failure_states = ["FAILURE", "ABORTED", "UNSTABLE", "NOT_BUILT"]
+ if (build_res.result in failure_states) {
+ error("Build failed at ${build_url}")
+ }
+ else if (params_collection["NS"] == "False" ||
+ params_collection["PARTITION_PS"] == "OFF") {
+ print("LAVA is not needed for ${build_url}")
+ }
+ else {
+ print("Doing LAVA stuff for ${build_url}")
+ params += generateLavaParam(params_collection)
+ params += string(name: 'BUILD_NUMBER', value: "${build_res.number}")
+ params += string(name: 'BUILD_URL', value: build_url)
+ params += string(name: 'LAVA_URL', value: env.LAVA_URL)
+ params += string(name: 'CI_SCRIPTS_BRANCH', value: env.CI_SCRIPTS_BRANCH)
+ params += string(name: 'LAVA_CREDENTIALS', value: env.LAVA_CREDENTIALS)
+ def lava_res = build(job: 'tf-m-lava-submit', parameters: params, propagate: false)
+ if (lava_res.result in failure_states) {
+ error("LAVA Create and Submit failed at ${lava_res.getAbsoluteUrl()}")
+ }
+ else {
+ results['lava_jobs'] += lava_res.getDescription()
+ }
+ }
+ }
+}
+
+def buildDocs(results) {
+ def params = []
+ params += string(name: 'GERRIT_BRANCH', value: env.GERRIT_BRANCH)
+ params += string(name: 'GERRIT_HOST', value: env.GERRIT_HOST)
+ params += string(name: 'GERRIT_CHANGE_NUMBER', value: env.GERRIT_CHANGE_NUMBER)
+ params += string(name: 'GERRIT_PATCHSET_REVISION', value: env.GERRIT_PATCHSET_REVISION)
+ params += string(name: 'GERRIT_REFSPEC', value: env.GERRIT_REFSPEC)
+ params += string(name: 'MBEDTLS_VERSION', value: env.MBEDTLS_VERSION)
+ params += string(name: 'CODE_REPO', value: env.CODE_REPO)
+ return { -> results
+ def res = build(job: 'tf-m-build-docs-infra-health', parameters: params, propagate:false)
+ print("${res.number}: Docs ${res.result} ${res.getAbsoluteUrl()}")
+ results['docs'] = [res.number, res.result, params]
+ if (res.result in ["FAILURE", "ABORTED", "UNSTABLE", "NOT_BUILT"]) {
+ error("Build failed at ${res.getAbsoluteUrl()}")
+ }
+ }
+}
+
+def emailNotification(results, stage) {
+ script {
+ if (env.JOB_NAME.equals("tf-m-nightly") && !env.EMAIL_NOTIFICATION.equals('')) {
+ def result = "Fail."
+ if (results == true) {
+ result = "Success."
+ print("Skip sending as ${result} for ${stage}")
+ }
+ else {
+ emailext (
+ subject: ("Job ${env.JOB_NAME} ${stage} ${env.BUILD_NUMBER} ${result}"),
+ body: "Check console output at ${env.BUILD_URL}",
+ to: "${EMAIL_NOTIFICATION}"
+ )
+ }
+ }
+ } /* script */
+}
+
+@NonCPS
+def generateCsvContent(results) {
+ def resultsParam = []
+ results.each { result ->
+ resultsParam.add([result.value[1], \
+ result.value[0].getResult(), \
+ result.value[2]['TARGET_PLATFORM'], \
+ result.value[2]['COMPILER'], \
+ result.value[2]['PROJ_CONFIG'], \
+ result.value[2]['CMAKE_BUILD_TYPE'], \
+ result.value[2]['BL2'], \
+ result.value[2]['PSA_API_SUITE']])
+ }
+ def configs = [] as Set
+ resultsParam.each { result ->
+ if (result[2] == 'MUSCA_B1') {
+ if (result[0].contains('_OTP_')) {
+ result[2] += '_OTP'
+ }
+ }
+ if (result[6] == 'True') {
+ result[6] = 'BL2'
+ }
+ else {
+ result[6] = 'NOBL2'
+ }
+ config = result[4]
+ if (result[7] != "''") {
+ config += ' (' + result[7] + ') '
+ }
+ configs.add(config)
+ result.add(config)
+ }
+ configs.sort()
+ def csvContent = []
+ resultsParam.each { result ->
+ def configExists = false
+ for (csvLine in csvContent) {
+ if (csvLine[0] == result[2] && \
+ csvLine[1] == result[3] && \
+ csvLine[2] == result[5] && \
+ csvLine[3] == result[6]) {
+ csvLine[4][result[8]] = result[1]
+ configExists = true
+ break
+ }
+ }
+ if (!configExists) {
+ csvContent.add([result[2], result[3], result[5], result[6], [:]])
+ csvContent.last()[4][result[8]] = result[1]
+ }
+ }
+ csvContent.sort{a,b -> a[0] <=> b[0] ?: a[1] <=> b[1] ?: a[2] <=> b[2] ?: a[3] <=> b[3]}
+ def csvTable = [['Platform', 'Compiler', 'Cmake Build Type', 'BL2']]
+ csvTable[0] += configs
+ def currentPlatform = ''
+ def currentCompiler = ''
+ def currentBuild = ''
+ csvContent.each { csvLine ->
+ // Modify CSV output format for a better layout
+ if (currentPlatform == csvLine[0]) {
+ csvTable.add([''])
+ }
+ else {
+ csvTable.add([csvLine[0]])
+ currentPlatform = csvLine[0]
+ currentCompiler = ''
+ currentBuild = ''
+ }
+ if (currentCompiler == csvLine[1]) {
+ csvTable.last().add('')
+ }
+ else {
+ csvTable.last().add(csvLine[1])
+ currentCompiler = csvLine[1]
+ currentBuild = ''
+ }
+ if (currentBuild == csvLine[2]) {
+ csvTable.last().add('')
+ }
+ else {
+ csvTable.last().add(csvLine[2])
+ currentBuild = csvLine[2]
+ }
+ csvTable.last().add(csvLine[3])
+ configs.each { config ->
+ if (csvLine[4].containsKey(config)) {
+ csvTable.last().add(csvLine[4][config])
+ }
+ else {
+ csvTable.last().add('N/A')
+ }
+ }
+ }
+ return csvTable
+}
+
+def generateBuildCsv(results) {
+ def csvContent = generateCsvContent(results)
+ node("master") {
+ writeCSV file: 'build_results.csv', records: csvContent, format: CSVFormat.EXCEL
+ archiveArtifacts 'build_results.csv'
+ }
+}
+
+def buildCsv(results) {
+ def summary = new Summary();
+ def csvContent = summary.getBuildCsv(results)
+ node("master") {
+ writeCSV file: 'build_results.csv', records: csvContent, format: CSVFormat.EXCEL
+ archiveArtifacts 'build_results.csv'
+ }
+}
+
+def writeSummary(results) {
+ def summary = new Summary();
+ def buildLinks = summary.getLinks(results)
+ node("master") {
+ writeFile file: "build_links.html", text: buildLinks
+ archiveArtifacts 'build_links.html'
+ }
+}
+
+def lineInString(string, match) {
+ def lines = string.split("\n")
+ def result = lines.findAll { it.contains(match) }
+ return result[0]
+}
+
+def getResult(string, match) {
+ line = lineInString(string, match)
+ a = line.split(match)[1].split(' ')
+ score = a[0]
+ if (a.size() > 1)
+ {
+ fail_text = a[1..-1].join(" ")
+ return [score, fail_text]
+ }
+ return [score, ""]
+}
+
+def submitJobsToList(results) {
+ def all_jobs = []
+ for (String result : results){
+ jobs_s = result.split('JOBS: ')
+ if (jobs_s.size() > 1) {
+ all_jobs += jobs_s[1]
+ }
+ }
+ return(all_jobs)
+}
+
+def configs = []
+def builds = [:]
+def results = [:]
+
+node("docker-amd64-bionic") {
+ stage("Init") {
+ cleanWs()
+ dir("tf-m-ci-scripts") {
+ git url: '$CI_SCRIPTS_REPO', branch: '$CI_SCRIPTS_BRANCH', credentialsId: 'GIT_SSH_KEY'
+ }
+ }
+ stage("Configs") {
+ // Populate configs
+ listConfigs('tf-m-ci-scripts', configs, env.FILTER_GROUP)
+ results['builds'] = [:]
+ results['lava_jobs'] = []
+ for (config in configs) {
+ builds[config] = buildConfig("tf-m-ci-scripts", config, env.FILTER_GROUP, results)
+ }
+ // builds["docs"] = buildDocs(results) # Build Docs is not necessary in Infra-Health
+ }
+}
+
+stage("Builds") {
+ def verify = 1
+ def success = true
+ try {
+ parallel(builds)
+ } catch (Exception e) {
+ print(e)
+ manager.buildFailure()
+ verify = -1
+ success = false
+ } finally {
+ print("Verifying status")
+ emailNotification(success, 'Build')
+ g = new Gerrit()
+ g.verifyStatus(verify, 'tf-m-build', 'build')
+ print("Building CSV")
+ generateBuildCsv(results['builds'])
+ writeSummary(results['builds'])
+ }
+}
+
+node("docker-amd64-bionic") {
+ stage("Copy Docs") {
+ if (env.JOB_NAME.equals("tf-m-build-and-test")) {
+ step([$class: 'CopyArtifact', projectName: 'tf-m-build-docs',
+ selector: specific("${results['docs'][0]}"), target: './docs/',
+ optional: true])
+ archiveArtifacts artifacts: 'docs/**', allowEmptyArchive: true
+ }
+ else {
+ print("No doc copy for job: ${env.JOB_NAME}")
+ }
+ }
+ stage("Tests") {
+ dir("tf-m-ci-scripts") {
+ git url: '$CI_SCRIPTS_REPO', branch: '$CI_SCRIPTS_BRANCH', credentialsId: 'GIT_SSH_KEY'
+ }
+ def all_jobs = []
+ def success = true
+ print("Wait for LAVA results here...")
+ try {
+ all_jobs = submitJobsToList(results['lava_jobs'])
+ if (all_jobs.size() > 0) {
+ dir("tf-m-ci-scripts") {
+ withCredentials([usernamePassword(credentialsId: env.LAVA_CREDENTIALS, passwordVariable: 'LAVA_TOKEN', usernameVariable: 'LAVA_USER')]) {
+ output = sh(script: """./lava_helper/lava_wait_jobs.py --job-ids ${all_jobs.join(",")} \
+ --lava-url ${env.LAVA_URL} --lava-user ${LAVA_USER} --lava-token ${LAVA_TOKEN} \
+ --artifacts-path lava_artifacts --lava-timeout 7200 \
+ """, returnStdout: true).trim()
+ archiveArtifacts artifacts: 'test_summary.*', allowEmptyArchive: true
+ print(output)
+ g = new Gerrit()
+ def (boot_result, boot_output) = getResult(output, 'BOOT_RESULT: ')
+ if (boot_result) {
+ g.verifyStatus(boot_result, "lava_boot", "test")
+ }
+ def (test_result, test_output) = getResult(output, 'TEST_RESULT: ')
+ if (test_result) {
+ g.verifyStatus(test_result, "lava_test", "test")
+ }
+ if (boot_result.toInteger() < 1 || test_result.toInteger() < 1) {
+ error("Marking job as failed due to failed boots: ${boot_output} or tests: ${test_output}")
+ }
+ }
+ }
+ }
+ else {
+ print("There were no LAVA jobs to test.")
+ }
+ }
+ catch (Exception e) {
+ print("ERROR: ${e}")
+ success = false
+ } finally {
+ archiveArtifacts artifacts: 'tf-m-ci-scripts/lava_artifacts/**', allowEmptyArchive: true
+ emailNotification(success, 'Test')
+ cleanWs()
+ if (!success) {
+ error("There was an Error waiting for LAVA jobs")
+ }
+ }
+ }
+}