Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 1 | #!/usr/bin/env python3 |
| 2 | |
| 3 | from __future__ import print_function |
| 4 | |
| 5 | __copyright__ = """ |
| 6 | /* |
| 7 | * Copyright (c) 2020, Arm Limited. All rights reserved. |
| 8 | * |
| 9 | * SPDX-License-Identifier: BSD-3-Clause |
| 10 | * |
| 11 | */ |
| 12 | """ |
| 13 | |
| 14 | """ |
| 15 | Script for waiting for LAVA jobs and parsing the results |
| 16 | """ |
| 17 | |
| 18 | import os |
| 19 | import sys |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 20 | import time |
| 21 | import yaml |
| 22 | import argparse |
Xinyu Zhang | 1b8f515 | 2020-11-13 16:10:58 +0800 | [diff] [blame] | 23 | import csv |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 24 | from jinja2 import Environment, FileSystemLoader |
| 25 | from lava_helper_configs import * |
| 26 | from lava_helper import test_lava_dispatch_credentials |
| 27 | |
| 28 | try: |
| 29 | from tfm_ci_pylib.utils import save_json, load_json, sort_dict,\ |
| 30 | load_yaml, test, print_test |
| 31 | from tfm_ci_pylib.lava_rpc_connector import LAVA_RPC_connector |
| 32 | except ImportError: |
| 33 | dir_path = os.path.dirname(os.path.realpath(__file__)) |
| 34 | sys.path.append(os.path.join(dir_path, "../")) |
| 35 | from tfm_ci_pylib.utils import save_json, load_json, sort_dict,\ |
| 36 | load_yaml, test, print_test |
| 37 | from tfm_ci_pylib.lava_rpc_connector import LAVA_RPC_connector |
| 38 | |
Xinyu Zhang | 1b8f515 | 2020-11-13 16:10:58 +0800 | [diff] [blame] | 39 | cfgs = ["Default", "CoreIPC", "CoreIPCTfmLevel2", "CoreIPCTfmLevel3", |
| 40 | "Regression", "RegressionIPC", |
| 41 | "RegressionIPCTfmLevel2", "RegressionIPCTfmLevel3", |
| 42 | "DefaultProfileS", "RegressionProfileS", |
| 43 | "DefaultProfileM", "RegressionProfileM", "RegressionProfileM PSOFF", |
Xinyu Zhang | 9b1aef9 | 2021-03-12 15:36:44 +0800 | [diff] [blame^] | 44 | "DefaultProfileL", "RegressionProfileL", |
Xinyu Zhang | 1b8f515 | 2020-11-13 16:10:58 +0800 | [diff] [blame] | 45 | "PsaApiTest (Attest)", "PsaApiTestIPC (Attest)", |
| 46 | "PsaApiTestIPCTfmLevel2 (Attest)", |
| 47 | "PsaApiTest (Crypto)", "PsaApiTestIPC (Crypto)", |
| 48 | "PsaApiTestIPCTfmLevel2 (Crypto)", |
| 49 | "PsaApiTest (PS)", "PsaApiTestIPC (PS)", |
| 50 | "PsaApiTestIPCTfmLevel2 (PS)", |
| 51 | "PsaApiTest (ITS)", "PsaApiTestIPC (ITS)", |
| 52 | "PsaApiTestIPCTfmLevel2 (ITS)", |
| 53 | "PsaApiTestIPC (FF)", |
| 54 | "PsaApiTestIPCTfmLevel2 (FF)", |
| 55 | "PsaApiTestIPCTfmLevel3 (ITS)", "PsaApiTestIPCTfmLevel3 (PS)", |
| 56 | "PsaApiTestIPCTfmLevel3 (Crypto)", "PsaApiTestIPCTfmLevel3 (Attest)", |
| 57 | "PsaApiTestIPCTfmLevel3 (FF)"] |
| 58 | |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 59 | def wait_for_jobs(user_args): |
| 60 | job_list = user_args.job_ids.split(",") |
| 61 | job_list = [int(x) for x in job_list if x != ''] |
| 62 | lava = test_lava_dispatch_credentials(user_args) |
| 63 | finished_jobs = lava.block_wait_for_jobs(job_list, user_args.dispatch_timeout, 0.5) |
| 64 | unfinished_jobs = [item for item in job_list if item not in finished_jobs] |
| 65 | for job in unfinished_jobs: |
| 66 | info_print("Cancelling unfinished job: {}".format(job)) |
| 67 | lava.cancel_job(job) |
| 68 | if user_args.artifacts_path: |
| 69 | for job, info in finished_jobs.items(): |
| 70 | info['job_dir'] = os.path.join(user_args.artifacts_path, "{}_{}".format(str(job), info['description'])) |
| 71 | finished_jobs[job] = info |
| 72 | finished_jobs = fetch_artifacts(finished_jobs, user_args, lava) |
| 73 | print_lava_urls(finished_jobs, user_args) |
Xinyu Zhang | 97ee3fd | 2020-12-14 14:45:06 +0800 | [diff] [blame] | 74 | job_links(finished_jobs, user_args) |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 75 | boot_report(finished_jobs, user_args) |
| 76 | test_report(finished_jobs, user_args, lava) |
Xinyu Zhang | 38a1887 | 2020-11-23 16:45:28 +0800 | [diff] [blame] | 77 | failure_report(finished_jobs, user_args) |
Xinyu Zhang | 1b8f515 | 2020-11-13 16:10:58 +0800 | [diff] [blame] | 78 | csv_report(finished_jobs) |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 79 | |
| 80 | def fetch_artifacts(jobs, user_args, lava): |
| 81 | if not user_args.artifacts_path: |
| 82 | return |
| 83 | for job_id, info in jobs.items(): |
| 84 | job_dir = info['job_dir'] |
| 85 | info_print("Fetching artifacts for JOB: {} to {}".format(job_id, job_dir)) |
| 86 | os.makedirs(job_dir, exist_ok=True) |
| 87 | def_path = os.path.join(job_dir, 'definition.yaml') |
| 88 | target_log = os.path.join(job_dir, 'target_log.txt') |
Matthew Hart | 4a4f120 | 2020-06-12 15:52:46 +0100 | [diff] [blame] | 89 | config = os.path.join(job_dir, 'config.tar.bz2') |
| 90 | results_file = os.path.join(job_dir, 'results.yaml') |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 91 | definition, metadata = lava.get_job_definition(job_id, def_path) |
| 92 | jobs[job_id]['metadata'] = metadata |
| 93 | time.sleep(0.2) # be friendly to LAVA |
Matthew Hart | 4a4f120 | 2020-06-12 15:52:46 +0100 | [diff] [blame] | 94 | lava.get_job_log(job_id, target_log) |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 95 | time.sleep(0.2) |
| 96 | lava.get_job_config(job_id, config) |
| 97 | time.sleep(0.2) |
Matthew Hart | 4a4f120 | 2020-06-12 15:52:46 +0100 | [diff] [blame] | 98 | lava.get_job_results(job_id, results_file) |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 99 | return(jobs) |
| 100 | |
| 101 | |
| 102 | def lava_id_to_url(id, user_args): |
| 103 | return "{}/scheduler/job/{}".format(user_args.lava_url, id) |
| 104 | |
Xinyu Zhang | 1b8f515 | 2020-11-13 16:10:58 +0800 | [diff] [blame] | 105 | def generateTestResult(info): |
| 106 | if info['health'] == "Complete" and info['state'] == "Finished": |
| 107 | return "PASS" |
| 108 | else: |
| 109 | return "FAIL" |
| 110 | |
Xinyu Zhang | 97ee3fd | 2020-12-14 14:45:06 +0800 | [diff] [blame] | 111 | def job_links(jobs, user_args): |
| 112 | job_links = "" |
| 113 | for job, info in jobs.items(): |
| 114 | job_links += "Build Config: {} ".format(info['metadata']['build_name']) |
| 115 | job_links += "LAVA link: {} ".format(lava_id_to_url(job, user_args)) |
| 116 | job_links += "Build link: {}\n".format(info['metadata']['build_job_url']) |
| 117 | print(job_links) |
| 118 | |
Xinyu Zhang | 1b8f515 | 2020-11-13 16:10:58 +0800 | [diff] [blame] | 119 | def csv_report(jobs): |
| 120 | lava_jobs = [] |
| 121 | for job, info in jobs.items(): |
| 122 | exist = False |
| 123 | for record in lava_jobs: |
| 124 | if info['metadata']['platform'] == record["Platform"] and \ |
| 125 | info['metadata']['compiler'] == record["Compiler"] and \ |
| 126 | info['metadata']['build_type'] == record["Build Type"]: |
| 127 | if record[info['metadata']['name']] != "FAIL": |
| 128 | record[info['metadata']['name']] = generateTestResult(info) |
| 129 | exist = True |
| 130 | break |
| 131 | if not exist: |
| 132 | record = {} |
| 133 | record["Platform"] = info['metadata']['platform'] |
| 134 | record["Compiler"] = info['metadata']['compiler'] |
| 135 | record["Build Type"] = info['metadata']['build_type'] |
| 136 | record["Config Name"] = info['metadata']['name'] |
| 137 | for cfg in cfgs: |
| 138 | record[cfg] = "N.A." |
| 139 | record[info['metadata']['name']] = generateTestResult(info) |
| 140 | lava_jobs.append(record) |
| 141 | lava_jobs.sort(key=lambda x: x["Platform"] + x["Compiler"] + x["Build Type"]) |
| 142 | with open("test_results.csv", "w", newline="") as csvfile: |
| 143 | fieldnames = ["Platform", "Compiler", "Build Type"] + cfgs |
| 144 | writer = csv.DictWriter(csvfile, fieldnames=fieldnames, extrasaction='ignore') |
| 145 | |
| 146 | writer.writeheader() |
| 147 | writer.writerows(lava_jobs) |
| 148 | |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 149 | def boot_report(jobs, user_args): |
| 150 | incomplete_jobs = [] |
| 151 | for job, info in jobs.items(): |
| 152 | if info['health'] != 'Complete': |
| 153 | if info['error_reason'] == 'Infrastructure': |
| 154 | info_print("Job {} failed with Infrastructure error".format(job)) |
| 155 | incomplete_jobs.append(job) |
| 156 | incomplete_output = [lava_id_to_url(x, user_args) for x in incomplete_jobs]; |
| 157 | if len(incomplete_jobs) > 0: |
| 158 | print("BOOT_RESULT: -1 Failed: {}".format(incomplete_output)) |
| 159 | else: |
| 160 | print("BOOT_RESULT: +1") |
| 161 | |
Xinyu Zhang | 38a1887 | 2020-11-23 16:45:28 +0800 | [diff] [blame] | 162 | def failure_report(jobs, user_args): |
| 163 | failed_report = "FAILURE_TESTS:" |
| 164 | for job, info in jobs.items(): |
| 165 | if info['health'] != "Complete" or info['state'] != "Finished": |
| 166 | failed_report += " {}:{}".format(info['metadata']['build_name'], |
| 167 | lava_id_to_url(job, user_args)) |
| 168 | print(failed_report) |
| 169 | |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 170 | def remove_lava_dupes(results): |
| 171 | for result in results: |
| 172 | if result['result'] != 'pass': |
| 173 | if result['suite'] == "lava": |
| 174 | for other in [x for x in results if x != result]: |
| 175 | if other['name'] == result['name']: |
| 176 | if other['result'] == 'pass': |
| 177 | results.remove(result) |
| 178 | return(results) |
| 179 | |
| 180 | def test_report(jobs, user_args, lava): |
| 181 | # parsing of test results is WIP |
| 182 | fail_j = [] |
| 183 | jinja_data = [] |
| 184 | for job, info in jobs.items(): |
Matthew Hart | 4a4f120 | 2020-06-12 15:52:46 +0100 | [diff] [blame] | 185 | results_file = os.path.join(info['job_dir'], 'results.yaml') |
| 186 | if not os.path.exists(results_file) or (os.path.getsize(results_file) == 0): |
| 187 | fail_j.append(job) |
| 188 | continue |
| 189 | with open(results_file, "r") as F: |
| 190 | res_data = F.read() |
| 191 | results = yaml.load(res_data) |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 192 | non_lava_results = [x for x in results if x['suite'] != 'lava'] |
| 193 | info['lava_url'] = lava_id_to_url(job, user_args) |
| 194 | info['artifacts_dir'] = "tf-m-ci-scripts/{}".format(info['job_dir']) |
| 195 | jinja_data.append({job: [info, non_lava_results]}) |
| 196 | for result in non_lava_results: |
| 197 | if result['result'] != 'pass': |
| 198 | fail_j.append(job) if job not in fail_j else fail_j |
| 199 | time.sleep(0.5) # be friendly to LAVA |
| 200 | fail_output = [lava_id_to_url(x, user_args) for x in fail_j] |
| 201 | if len(fail_j) > 0: |
| 202 | print("TEST_RESULT: -1 Failed: {}".format(fail_output)) |
| 203 | else: |
| 204 | print("TEST_RESULT: +1") |
| 205 | data = {} |
| 206 | data['jobs'] = jinja_data |
| 207 | render_jinja(data) |
| 208 | |
| 209 | def render_jinja(data): |
| 210 | work_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), "jinja2_templates") |
| 211 | template_loader = FileSystemLoader(searchpath=work_dir) |
| 212 | template_env = Environment(loader=template_loader) |
| 213 | html = template_env.get_template("test_summary.jinja2").render(data) |
| 214 | csv = template_env.get_template("test_summary_csv.jinja2").render(data) |
| 215 | with open('test_summary.html', "w") as F: |
| 216 | F.write(html) |
| 217 | with open('test_summary.csv', "w") as F: |
| 218 | F.write(csv) |
| 219 | |
| 220 | def print_lava_urls(jobs, user_args): |
| 221 | output = [lava_id_to_url(x, user_args) for x in jobs] |
| 222 | print("LAVA jobs triggered for this build: {}".format(output)) |
| 223 | |
| 224 | |
| 225 | def info_print(line): |
| 226 | print("INFO: {}".format(line)) |
| 227 | |
| 228 | def main(user_args): |
| 229 | """ Main logic """ |
| 230 | user_args.lava_rpc = "RPC2" |
| 231 | wait_for_jobs(user_args) |
| 232 | |
| 233 | def get_cmd_args(): |
| 234 | """ Parse command line arguments """ |
| 235 | |
| 236 | # Parse command line arguments to override config |
| 237 | parser = argparse.ArgumentParser(description="Lava Wait Jobs") |
| 238 | cmdargs = parser.add_argument_group("Lava Wait Jobs") |
| 239 | |
| 240 | # Configuration control |
| 241 | cmdargs.add_argument( |
| 242 | "--lava-url", dest="lava_url", action="store", help="LAVA lab URL (without RPC2)" |
| 243 | ) |
| 244 | cmdargs.add_argument( |
| 245 | "--job-ids", dest="job_ids", action="store", required=True, help="Comma separated list of job IDS" |
| 246 | ) |
| 247 | cmdargs.add_argument( |
| 248 | "--lava-token", dest="token_secret", action="store", help="LAVA auth token" |
| 249 | ) |
| 250 | cmdargs.add_argument( |
| 251 | "--lava-user", dest="token_usr", action="store", help="LAVA username" |
| 252 | ) |
| 253 | cmdargs.add_argument( |
| 254 | "--use-env", dest="token_from_env", action="store_true", default=False, help="Use LAVA auth info from environment" |
| 255 | ) |
| 256 | cmdargs.add_argument( |
| 257 | "--lava-timeout", dest="dispatch_timeout", action="store", type=int, default=3600, help="Time in seconds to wait for all jobs" |
| 258 | ) |
| 259 | cmdargs.add_argument( |
| 260 | "--artifacts-path", dest="artifacts_path", action="store", help="Download LAVA artifacts to this directory" |
| 261 | ) |
| 262 | return parser.parse_args() |
| 263 | |
| 264 | |
| 265 | if __name__ == "__main__": |
| 266 | main(get_cmd_args()) |