Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 1 | #!/usr/bin/env python3 |
| 2 | |
| 3 | from __future__ import print_function |
| 4 | |
| 5 | __copyright__ = """ |
| 6 | /* |
Xinyu Zhang | f2b7cbf | 2021-05-18 20:17:34 +0800 | [diff] [blame] | 7 | * Copyright (c) 2020-2021, Arm Limited. All rights reserved. |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 8 | * |
| 9 | * SPDX-License-Identifier: BSD-3-Clause |
| 10 | * |
| 11 | */ |
| 12 | """ |
| 13 | |
| 14 | """ |
| 15 | Script for waiting for LAVA jobs and parsing the results |
| 16 | """ |
| 17 | |
| 18 | import os |
| 19 | import sys |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 20 | import time |
| 21 | import yaml |
| 22 | import argparse |
Xinyu Zhang | 1b8f515 | 2020-11-13 16:10:58 +0800 | [diff] [blame] | 23 | import csv |
Xinyu Zhang | c8a670c | 2021-05-18 20:20:53 +0800 | [diff] [blame] | 24 | import shutil |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 25 | from jinja2 import Environment, FileSystemLoader |
| 26 | from lava_helper_configs import * |
| 27 | from lava_helper import test_lava_dispatch_credentials |
Xinyu Zhang | c8a670c | 2021-05-18 20:20:53 +0800 | [diff] [blame] | 28 | from lava_submit_jobs import * |
Paul Sokolovsky | 2512ec5 | 2022-03-04 00:15:39 +0300 | [diff] [blame^] | 29 | import codecov_helper |
| 30 | |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 31 | |
| 32 | try: |
| 33 | from tfm_ci_pylib.utils import save_json, load_json, sort_dict,\ |
| 34 | load_yaml, test, print_test |
| 35 | from tfm_ci_pylib.lava_rpc_connector import LAVA_RPC_connector |
| 36 | except ImportError: |
| 37 | dir_path = os.path.dirname(os.path.realpath(__file__)) |
| 38 | sys.path.append(os.path.join(dir_path, "../")) |
| 39 | from tfm_ci_pylib.utils import save_json, load_json, sort_dict,\ |
| 40 | load_yaml, test, print_test |
| 41 | from tfm_ci_pylib.lava_rpc_connector import LAVA_RPC_connector |
| 42 | |
Xinyu Zhang | 1b8f515 | 2020-11-13 16:10:58 +0800 | [diff] [blame] | 43 | cfgs = ["Default", "CoreIPC", "CoreIPCTfmLevel2", "CoreIPCTfmLevel3", |
| 44 | "Regression", "RegressionIPC", |
| 45 | "RegressionIPCTfmLevel2", "RegressionIPCTfmLevel3", |
| 46 | "DefaultProfileS", "RegressionProfileS", |
| 47 | "DefaultProfileM", "RegressionProfileM", "RegressionProfileM PSOFF", |
Xinyu Zhang | 9b1aef9 | 2021-03-12 15:36:44 +0800 | [diff] [blame] | 48 | "DefaultProfileL", "RegressionProfileL", |
Xinyu Zhang | 1b8f515 | 2020-11-13 16:10:58 +0800 | [diff] [blame] | 49 | "PsaApiTest (Attest)", "PsaApiTestIPC (Attest)", |
| 50 | "PsaApiTestIPCTfmLevel2 (Attest)", |
| 51 | "PsaApiTest (Crypto)", "PsaApiTestIPC (Crypto)", |
| 52 | "PsaApiTestIPCTfmLevel2 (Crypto)", |
| 53 | "PsaApiTest (PS)", "PsaApiTestIPC (PS)", |
| 54 | "PsaApiTestIPCTfmLevel2 (PS)", |
| 55 | "PsaApiTest (ITS)", "PsaApiTestIPC (ITS)", |
| 56 | "PsaApiTestIPCTfmLevel2 (ITS)", |
| 57 | "PsaApiTestIPC (FF)", |
| 58 | "PsaApiTestIPCTfmLevel2 (FF)", |
Paul Sokolovsky | b600311 | 2022-02-04 00:36:14 +0300 | [diff] [blame] | 59 | "PsaApiTest (STORAGE)", |
| 60 | "PsaApiTestIPC (STORAGE)", |
| 61 | "PsaApiTestIPCTfmLevel2 (STORAGE)", |
| 62 | "PsaApiTestIPCTfmLevel3 (STORAGE)", |
Xinyu Zhang | 1b8f515 | 2020-11-13 16:10:58 +0800 | [diff] [blame] | 63 | "PsaApiTestIPCTfmLevel3 (ITS)", "PsaApiTestIPCTfmLevel3 (PS)", |
| 64 | "PsaApiTestIPCTfmLevel3 (Crypto)", "PsaApiTestIPCTfmLevel3 (Attest)", |
| 65 | "PsaApiTestIPCTfmLevel3 (FF)"] |
| 66 | |
Paul Sokolovsky | f1ff6c1 | 2022-02-02 21:42:41 +0300 | [diff] [blame] | 67 | # Convert test config identifiers to LAVA naming convention. |
| 68 | cfgs = [x.replace(" (", "_").replace(")", "") for x in cfgs] |
| 69 | |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 70 | def wait_for_jobs(user_args): |
| 71 | job_list = user_args.job_ids.split(",") |
| 72 | job_list = [int(x) for x in job_list if x != ''] |
| 73 | lava = test_lava_dispatch_credentials(user_args) |
Xinyu Zhang | f2b7cbf | 2021-05-18 20:17:34 +0800 | [diff] [blame] | 74 | finished_jobs = get_finished_jobs(job_list, user_args, lava) |
Xinyu Zhang | c8a670c | 2021-05-18 20:20:53 +0800 | [diff] [blame] | 75 | resubmit_jobs = resubmit_failed_jobs(finished_jobs, user_args) |
| 76 | finished_resubmit_jobs = get_finished_jobs(resubmit_jobs, user_args, lava) |
| 77 | finished_jobs.update(finished_resubmit_jobs) |
Paul Sokolovsky | 451f67b | 2022-03-08 19:44:41 +0300 | [diff] [blame] | 78 | return finished_jobs |
| 79 | |
| 80 | |
| 81 | def process_finished_jobs(finished_jobs, user_args): |
Xinyu Zhang | f2b7cbf | 2021-05-18 20:17:34 +0800 | [diff] [blame] | 82 | print_lava_urls(finished_jobs, user_args) |
| 83 | job_links(finished_jobs, user_args) |
| 84 | boot_report(finished_jobs, user_args) |
Paul Sokolovsky | 451f67b | 2022-03-08 19:44:41 +0300 | [diff] [blame] | 85 | test_report(finished_jobs, user_args) |
Xinyu Zhang | f2b7cbf | 2021-05-18 20:17:34 +0800 | [diff] [blame] | 86 | failure_report(finished_jobs, user_args) |
| 87 | csv_report(finished_jobs) |
Paul Sokolovsky | 2512ec5 | 2022-03-04 00:15:39 +0300 | [diff] [blame^] | 88 | codecov_helper.coverage_reports(finished_jobs, user_args) |
Xinyu Zhang | f2b7cbf | 2021-05-18 20:17:34 +0800 | [diff] [blame] | 89 | |
Paul Sokolovsky | 451f67b | 2022-03-08 19:44:41 +0300 | [diff] [blame] | 90 | |
Xinyu Zhang | f2b7cbf | 2021-05-18 20:17:34 +0800 | [diff] [blame] | 91 | def get_finished_jobs(job_list, user_args, lava): |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 92 | finished_jobs = lava.block_wait_for_jobs(job_list, user_args.dispatch_timeout, 0.5) |
| 93 | unfinished_jobs = [item for item in job_list if item not in finished_jobs] |
| 94 | for job in unfinished_jobs: |
| 95 | info_print("Cancelling unfinished job: {}".format(job)) |
| 96 | lava.cancel_job(job) |
| 97 | if user_args.artifacts_path: |
| 98 | for job, info in finished_jobs.items(): |
| 99 | info['job_dir'] = os.path.join(user_args.artifacts_path, "{}_{}".format(str(job), info['description'])) |
| 100 | finished_jobs[job] = info |
| 101 | finished_jobs = fetch_artifacts(finished_jobs, user_args, lava) |
Xinyu Zhang | f2b7cbf | 2021-05-18 20:17:34 +0800 | [diff] [blame] | 102 | return finished_jobs |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 103 | |
Xinyu Zhang | c8a670c | 2021-05-18 20:20:53 +0800 | [diff] [blame] | 104 | def resubmit_failed_jobs(jobs, user_args): |
| 105 | if not jobs: |
| 106 | return [] |
Xinyu Zhang | 4aca6d0 | 2021-05-31 11:43:32 +0800 | [diff] [blame] | 107 | time.sleep(2) # be friendly to LAVA |
Xinyu Zhang | c8a670c | 2021-05-18 20:20:53 +0800 | [diff] [blame] | 108 | failed_job = [] |
| 109 | os.makedirs('failed_jobs', exist_ok=True) |
| 110 | for job_id, info in jobs.items(): |
| 111 | if not (info['health'] == "Complete" and info['state'] == "Finished"): |
| 112 | job_dir = info['job_dir'] |
| 113 | def_path = os.path.join(job_dir, 'definition.yaml') |
| 114 | os.rename(def_path, 'failed_jobs/{}_definition.yaml'.format(job_id)) |
| 115 | shutil.rmtree(job_dir) |
| 116 | failed_job.append(job_id) |
| 117 | for failed_job_id in failed_job: |
| 118 | jobs.pop(failed_job_id) |
| 119 | resubmitted_jobs = lava_dispatch(user_args, job_dir='failed_jobs') |
| 120 | resubmitted_jobs = [int(x) for x in resubmitted_jobs if x != ''] |
| 121 | return resubmitted_jobs |
| 122 | |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 123 | def fetch_artifacts(jobs, user_args, lava): |
| 124 | if not user_args.artifacts_path: |
| 125 | return |
| 126 | for job_id, info in jobs.items(): |
| 127 | job_dir = info['job_dir'] |
| 128 | info_print("Fetching artifacts for JOB: {} to {}".format(job_id, job_dir)) |
| 129 | os.makedirs(job_dir, exist_ok=True) |
| 130 | def_path = os.path.join(job_dir, 'definition.yaml') |
| 131 | target_log = os.path.join(job_dir, 'target_log.txt') |
Matthew Hart | 4a4f120 | 2020-06-12 15:52:46 +0100 | [diff] [blame] | 132 | config = os.path.join(job_dir, 'config.tar.bz2') |
| 133 | results_file = os.path.join(job_dir, 'results.yaml') |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 134 | definition, metadata = lava.get_job_definition(job_id, def_path) |
| 135 | jobs[job_id]['metadata'] = metadata |
| 136 | time.sleep(0.2) # be friendly to LAVA |
Matthew Hart | 4a4f120 | 2020-06-12 15:52:46 +0100 | [diff] [blame] | 137 | lava.get_job_log(job_id, target_log) |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 138 | time.sleep(0.2) |
| 139 | lava.get_job_config(job_id, config) |
| 140 | time.sleep(0.2) |
Matthew Hart | 4a4f120 | 2020-06-12 15:52:46 +0100 | [diff] [blame] | 141 | lava.get_job_results(job_id, results_file) |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 142 | return(jobs) |
| 143 | |
| 144 | |
| 145 | def lava_id_to_url(id, user_args): |
| 146 | return "{}/scheduler/job/{}".format(user_args.lava_url, id) |
| 147 | |
Xinyu Zhang | 1b8f515 | 2020-11-13 16:10:58 +0800 | [diff] [blame] | 148 | def generateTestResult(info): |
| 149 | if info['health'] == "Complete" and info['state'] == "Finished": |
| 150 | return "PASS" |
| 151 | else: |
| 152 | return "FAIL" |
| 153 | |
Xinyu Zhang | 97ee3fd | 2020-12-14 14:45:06 +0800 | [diff] [blame] | 154 | def job_links(jobs, user_args): |
| 155 | job_links = "" |
| 156 | for job, info in jobs.items(): |
| 157 | job_links += "Build Config: {} ".format(info['metadata']['build_name']) |
| 158 | job_links += "LAVA link: {} ".format(lava_id_to_url(job, user_args)) |
| 159 | job_links += "Build link: {}\n".format(info['metadata']['build_job_url']) |
| 160 | print(job_links) |
| 161 | |
Xinyu Zhang | 1b8f515 | 2020-11-13 16:10:58 +0800 | [diff] [blame] | 162 | def csv_report(jobs): |
| 163 | lava_jobs = [] |
| 164 | for job, info in jobs.items(): |
| 165 | exist = False |
| 166 | for record in lava_jobs: |
| 167 | if info['metadata']['platform'] == record["Platform"] and \ |
| 168 | info['metadata']['compiler'] == record["Compiler"] and \ |
| 169 | info['metadata']['build_type'] == record["Build Type"]: |
| 170 | if record[info['metadata']['name']] != "FAIL": |
| 171 | record[info['metadata']['name']] = generateTestResult(info) |
| 172 | exist = True |
| 173 | break |
| 174 | if not exist: |
| 175 | record = {} |
| 176 | record["Platform"] = info['metadata']['platform'] |
| 177 | record["Compiler"] = info['metadata']['compiler'] |
| 178 | record["Build Type"] = info['metadata']['build_type'] |
| 179 | record["Config Name"] = info['metadata']['name'] |
| 180 | for cfg in cfgs: |
| 181 | record[cfg] = "N.A." |
| 182 | record[info['metadata']['name']] = generateTestResult(info) |
| 183 | lava_jobs.append(record) |
| 184 | lava_jobs.sort(key=lambda x: x["Platform"] + x["Compiler"] + x["Build Type"]) |
| 185 | with open("test_results.csv", "w", newline="") as csvfile: |
| 186 | fieldnames = ["Platform", "Compiler", "Build Type"] + cfgs |
| 187 | writer = csv.DictWriter(csvfile, fieldnames=fieldnames, extrasaction='ignore') |
| 188 | |
| 189 | writer.writeheader() |
| 190 | writer.writerows(lava_jobs) |
| 191 | |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 192 | def boot_report(jobs, user_args): |
| 193 | incomplete_jobs = [] |
| 194 | for job, info in jobs.items(): |
| 195 | if info['health'] != 'Complete': |
| 196 | if info['error_reason'] == 'Infrastructure': |
| 197 | info_print("Job {} failed with Infrastructure error".format(job)) |
| 198 | incomplete_jobs.append(job) |
| 199 | incomplete_output = [lava_id_to_url(x, user_args) for x in incomplete_jobs]; |
| 200 | if len(incomplete_jobs) > 0: |
| 201 | print("BOOT_RESULT: -1 Failed: {}".format(incomplete_output)) |
| 202 | else: |
| 203 | print("BOOT_RESULT: +1") |
| 204 | |
Xinyu Zhang | 38a1887 | 2020-11-23 16:45:28 +0800 | [diff] [blame] | 205 | def failure_report(jobs, user_args): |
| 206 | failed_report = "FAILURE_TESTS:" |
| 207 | for job, info in jobs.items(): |
| 208 | if info['health'] != "Complete" or info['state'] != "Finished": |
| 209 | failed_report += " {}:{}".format(info['metadata']['build_name'], |
| 210 | lava_id_to_url(job, user_args)) |
| 211 | print(failed_report) |
| 212 | |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 213 | def remove_lava_dupes(results): |
| 214 | for result in results: |
| 215 | if result['result'] != 'pass': |
| 216 | if result['suite'] == "lava": |
| 217 | for other in [x for x in results if x != result]: |
| 218 | if other['name'] == result['name']: |
| 219 | if other['result'] == 'pass': |
| 220 | results.remove(result) |
| 221 | return(results) |
| 222 | |
Paul Sokolovsky | 451f67b | 2022-03-08 19:44:41 +0300 | [diff] [blame] | 223 | def test_report(jobs, user_args): |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 224 | # parsing of test results is WIP |
| 225 | fail_j = [] |
| 226 | jinja_data = [] |
| 227 | for job, info in jobs.items(): |
Matthew Hart | 4a4f120 | 2020-06-12 15:52:46 +0100 | [diff] [blame] | 228 | results_file = os.path.join(info['job_dir'], 'results.yaml') |
| 229 | if not os.path.exists(results_file) or (os.path.getsize(results_file) == 0): |
| 230 | fail_j.append(job) |
| 231 | continue |
| 232 | with open(results_file, "r") as F: |
| 233 | res_data = F.read() |
Paul Sokolovsky | f2f385d | 2022-01-11 00:36:31 +0300 | [diff] [blame] | 234 | results = yaml.safe_load(res_data) |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 235 | non_lava_results = [x for x in results if x['suite'] != 'lava'] |
| 236 | info['lava_url'] = lava_id_to_url(job, user_args) |
| 237 | info['artifacts_dir'] = "tf-m-ci-scripts/{}".format(info['job_dir']) |
| 238 | jinja_data.append({job: [info, non_lava_results]}) |
| 239 | for result in non_lava_results: |
Paul Sokolovsky | 58f00de | 2022-02-01 00:26:32 +0300 | [diff] [blame] | 240 | if result['result'] == 'fail': |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 241 | fail_j.append(job) if job not in fail_j else fail_j |
| 242 | time.sleep(0.5) # be friendly to LAVA |
| 243 | fail_output = [lava_id_to_url(x, user_args) for x in fail_j] |
| 244 | if len(fail_j) > 0: |
| 245 | print("TEST_RESULT: -1 Failed: {}".format(fail_output)) |
| 246 | else: |
| 247 | print("TEST_RESULT: +1") |
| 248 | data = {} |
| 249 | data['jobs'] = jinja_data |
| 250 | render_jinja(data) |
| 251 | |
| 252 | def render_jinja(data): |
| 253 | work_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), "jinja2_templates") |
| 254 | template_loader = FileSystemLoader(searchpath=work_dir) |
| 255 | template_env = Environment(loader=template_loader) |
| 256 | html = template_env.get_template("test_summary.jinja2").render(data) |
| 257 | csv = template_env.get_template("test_summary_csv.jinja2").render(data) |
| 258 | with open('test_summary.html', "w") as F: |
| 259 | F.write(html) |
| 260 | with open('test_summary.csv', "w") as F: |
| 261 | F.write(csv) |
| 262 | |
| 263 | def print_lava_urls(jobs, user_args): |
| 264 | output = [lava_id_to_url(x, user_args) for x in jobs] |
| 265 | print("LAVA jobs triggered for this build: {}".format(output)) |
| 266 | |
| 267 | |
| 268 | def info_print(line): |
| 269 | print("INFO: {}".format(line)) |
| 270 | |
| 271 | def main(user_args): |
| 272 | """ Main logic """ |
| 273 | user_args.lava_rpc = "RPC2" |
Xinyu Zhang | 3e8f660 | 2021-04-28 10:57:32 +0800 | [diff] [blame] | 274 | for try_time in range(3): |
| 275 | try: |
Paul Sokolovsky | 451f67b | 2022-03-08 19:44:41 +0300 | [diff] [blame] | 276 | finished_jobs = wait_for_jobs(user_args) |
Xinyu Zhang | 3e8f660 | 2021-04-28 10:57:32 +0800 | [diff] [blame] | 277 | break |
| 278 | except Exception as e: |
Xinyu Zhang | 3e8f660 | 2021-04-28 10:57:32 +0800 | [diff] [blame] | 279 | if try_time < 2: |
Paul Sokolovsky | cc51ea9 | 2022-02-02 19:34:02 +0300 | [diff] [blame] | 280 | print("Exception in wait_for_jobs: {!r}".format(e)) |
| 281 | print("Trying to get LAVA jobs again...") |
Xinyu Zhang | 3e8f660 | 2021-04-28 10:57:32 +0800 | [diff] [blame] | 282 | else: |
| 283 | raise e |
Paul Sokolovsky | 451f67b | 2022-03-08 19:44:41 +0300 | [diff] [blame] | 284 | process_finished_jobs(finished_jobs, user_args) |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 285 | |
| 286 | def get_cmd_args(): |
| 287 | """ Parse command line arguments """ |
| 288 | |
| 289 | # Parse command line arguments to override config |
| 290 | parser = argparse.ArgumentParser(description="Lava Wait Jobs") |
| 291 | cmdargs = parser.add_argument_group("Lava Wait Jobs") |
| 292 | |
| 293 | # Configuration control |
| 294 | cmdargs.add_argument( |
| 295 | "--lava-url", dest="lava_url", action="store", help="LAVA lab URL (without RPC2)" |
| 296 | ) |
| 297 | cmdargs.add_argument( |
| 298 | "--job-ids", dest="job_ids", action="store", required=True, help="Comma separated list of job IDS" |
| 299 | ) |
| 300 | cmdargs.add_argument( |
Xinyu Zhang | f2b7cbf | 2021-05-18 20:17:34 +0800 | [diff] [blame] | 301 | "--lava-token", dest="lava_token", action="store", help="LAVA auth token" |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 302 | ) |
| 303 | cmdargs.add_argument( |
Xinyu Zhang | f2b7cbf | 2021-05-18 20:17:34 +0800 | [diff] [blame] | 304 | "--lava-user", dest="lava_user", action="store", help="LAVA username" |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 305 | ) |
| 306 | cmdargs.add_argument( |
| 307 | "--use-env", dest="token_from_env", action="store_true", default=False, help="Use LAVA auth info from environment" |
| 308 | ) |
| 309 | cmdargs.add_argument( |
| 310 | "--lava-timeout", dest="dispatch_timeout", action="store", type=int, default=3600, help="Time in seconds to wait for all jobs" |
| 311 | ) |
| 312 | cmdargs.add_argument( |
| 313 | "--artifacts-path", dest="artifacts_path", action="store", help="Download LAVA artifacts to this directory" |
| 314 | ) |
| 315 | return parser.parse_args() |
| 316 | |
| 317 | |
| 318 | if __name__ == "__main__": |
| 319 | main(get_cmd_args()) |