Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 1 | #!/usr/bin/env python3 |
| 2 | |
| 3 | from __future__ import print_function |
| 4 | |
| 5 | __copyright__ = """ |
| 6 | /* |
| 7 | * Copyright (c) 2020, Arm Limited. All rights reserved. |
| 8 | * |
| 9 | * SPDX-License-Identifier: BSD-3-Clause |
| 10 | * |
| 11 | */ |
| 12 | """ |
| 13 | |
| 14 | """ |
| 15 | Script for waiting for LAVA jobs and parsing the results |
| 16 | """ |
| 17 | |
| 18 | import os |
| 19 | import sys |
| 20 | import shutil |
| 21 | import time |
| 22 | import yaml |
| 23 | import argparse |
| 24 | import threading |
Xinyu Zhang | 1b8f515 | 2020-11-13 16:10:58 +0800 | [diff] [blame^] | 25 | import csv |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 26 | from copy import deepcopy |
| 27 | from collections import OrderedDict |
| 28 | from jinja2 import Environment, FileSystemLoader |
| 29 | from lava_helper_configs import * |
| 30 | from lava_helper import test_lava_dispatch_credentials |
| 31 | |
| 32 | try: |
| 33 | from tfm_ci_pylib.utils import save_json, load_json, sort_dict,\ |
| 34 | load_yaml, test, print_test |
| 35 | from tfm_ci_pylib.lava_rpc_connector import LAVA_RPC_connector |
| 36 | except ImportError: |
| 37 | dir_path = os.path.dirname(os.path.realpath(__file__)) |
| 38 | sys.path.append(os.path.join(dir_path, "../")) |
| 39 | from tfm_ci_pylib.utils import save_json, load_json, sort_dict,\ |
| 40 | load_yaml, test, print_test |
| 41 | from tfm_ci_pylib.lava_rpc_connector import LAVA_RPC_connector |
| 42 | |
Xinyu Zhang | 1b8f515 | 2020-11-13 16:10:58 +0800 | [diff] [blame^] | 43 | cfgs = ["Default", "CoreIPC", "CoreIPCTfmLevel2", "CoreIPCTfmLevel3", |
| 44 | "Regression", "RegressionIPC", |
| 45 | "RegressionIPCTfmLevel2", "RegressionIPCTfmLevel3", |
| 46 | "DefaultProfileS", "RegressionProfileS", |
| 47 | "DefaultProfileM", "RegressionProfileM", "RegressionProfileM PSOFF", |
| 48 | "PsaApiTest (Attest)", "PsaApiTestIPC (Attest)", |
| 49 | "PsaApiTestIPCTfmLevel2 (Attest)", |
| 50 | "PsaApiTest (Crypto)", "PsaApiTestIPC (Crypto)", |
| 51 | "PsaApiTestIPCTfmLevel2 (Crypto)", |
| 52 | "PsaApiTest (PS)", "PsaApiTestIPC (PS)", |
| 53 | "PsaApiTestIPCTfmLevel2 (PS)", |
| 54 | "PsaApiTest (ITS)", "PsaApiTestIPC (ITS)", |
| 55 | "PsaApiTestIPCTfmLevel2 (ITS)", |
| 56 | "PsaApiTestIPC (FF)", |
| 57 | "PsaApiTestIPCTfmLevel2 (FF)", |
| 58 | "PsaApiTestIPCTfmLevel3 (ITS)", "PsaApiTestIPCTfmLevel3 (PS)", |
| 59 | "PsaApiTestIPCTfmLevel3 (Crypto)", "PsaApiTestIPCTfmLevel3 (Attest)", |
| 60 | "PsaApiTestIPCTfmLevel3 (FF)"] |
| 61 | |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 62 | def wait_for_jobs(user_args): |
| 63 | job_list = user_args.job_ids.split(",") |
| 64 | job_list = [int(x) for x in job_list if x != ''] |
| 65 | lava = test_lava_dispatch_credentials(user_args) |
| 66 | finished_jobs = lava.block_wait_for_jobs(job_list, user_args.dispatch_timeout, 0.5) |
| 67 | unfinished_jobs = [item for item in job_list if item not in finished_jobs] |
| 68 | for job in unfinished_jobs: |
| 69 | info_print("Cancelling unfinished job: {}".format(job)) |
| 70 | lava.cancel_job(job) |
| 71 | if user_args.artifacts_path: |
| 72 | for job, info in finished_jobs.items(): |
| 73 | info['job_dir'] = os.path.join(user_args.artifacts_path, "{}_{}".format(str(job), info['description'])) |
| 74 | finished_jobs[job] = info |
| 75 | finished_jobs = fetch_artifacts(finished_jobs, user_args, lava) |
| 76 | print_lava_urls(finished_jobs, user_args) |
| 77 | boot_report(finished_jobs, user_args) |
| 78 | test_report(finished_jobs, user_args, lava) |
Xinyu Zhang | 1b8f515 | 2020-11-13 16:10:58 +0800 | [diff] [blame^] | 79 | csv_report(finished_jobs) |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 80 | |
| 81 | def fetch_artifacts(jobs, user_args, lava): |
| 82 | if not user_args.artifacts_path: |
| 83 | return |
| 84 | for job_id, info in jobs.items(): |
| 85 | job_dir = info['job_dir'] |
| 86 | info_print("Fetching artifacts for JOB: {} to {}".format(job_id, job_dir)) |
| 87 | os.makedirs(job_dir, exist_ok=True) |
| 88 | def_path = os.path.join(job_dir, 'definition.yaml') |
| 89 | target_log = os.path.join(job_dir, 'target_log.txt') |
Matthew Hart | 4a4f120 | 2020-06-12 15:52:46 +0100 | [diff] [blame] | 90 | config = os.path.join(job_dir, 'config.tar.bz2') |
| 91 | results_file = os.path.join(job_dir, 'results.yaml') |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 92 | definition, metadata = lava.get_job_definition(job_id, def_path) |
| 93 | jobs[job_id]['metadata'] = metadata |
| 94 | time.sleep(0.2) # be friendly to LAVA |
Matthew Hart | 4a4f120 | 2020-06-12 15:52:46 +0100 | [diff] [blame] | 95 | lava.get_job_log(job_id, target_log) |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 96 | time.sleep(0.2) |
| 97 | lava.get_job_config(job_id, config) |
| 98 | time.sleep(0.2) |
Matthew Hart | 4a4f120 | 2020-06-12 15:52:46 +0100 | [diff] [blame] | 99 | lava.get_job_results(job_id, results_file) |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 100 | return(jobs) |
| 101 | |
| 102 | |
| 103 | def lava_id_to_url(id, user_args): |
| 104 | return "{}/scheduler/job/{}".format(user_args.lava_url, id) |
| 105 | |
Xinyu Zhang | 1b8f515 | 2020-11-13 16:10:58 +0800 | [diff] [blame^] | 106 | def generateTestResult(info): |
| 107 | if info['health'] == "Complete" and info['state'] == "Finished": |
| 108 | return "PASS" |
| 109 | else: |
| 110 | return "FAIL" |
| 111 | |
| 112 | def csv_report(jobs): |
| 113 | lava_jobs = [] |
| 114 | for job, info in jobs.items(): |
| 115 | exist = False |
| 116 | for record in lava_jobs: |
| 117 | if info['metadata']['platform'] == record["Platform"] and \ |
| 118 | info['metadata']['compiler'] == record["Compiler"] and \ |
| 119 | info['metadata']['build_type'] == record["Build Type"]: |
| 120 | if record[info['metadata']['name']] != "FAIL": |
| 121 | record[info['metadata']['name']] = generateTestResult(info) |
| 122 | exist = True |
| 123 | break |
| 124 | if not exist: |
| 125 | record = {} |
| 126 | record["Platform"] = info['metadata']['platform'] |
| 127 | record["Compiler"] = info['metadata']['compiler'] |
| 128 | record["Build Type"] = info['metadata']['build_type'] |
| 129 | record["Config Name"] = info['metadata']['name'] |
| 130 | for cfg in cfgs: |
| 131 | record[cfg] = "N.A." |
| 132 | record[info['metadata']['name']] = generateTestResult(info) |
| 133 | lava_jobs.append(record) |
| 134 | lava_jobs.sort(key=lambda x: x["Platform"] + x["Compiler"] + x["Build Type"]) |
| 135 | with open("test_results.csv", "w", newline="") as csvfile: |
| 136 | fieldnames = ["Platform", "Compiler", "Build Type"] + cfgs |
| 137 | writer = csv.DictWriter(csvfile, fieldnames=fieldnames, extrasaction='ignore') |
| 138 | |
| 139 | writer.writeheader() |
| 140 | writer.writerows(lava_jobs) |
| 141 | |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 142 | def boot_report(jobs, user_args): |
| 143 | incomplete_jobs = [] |
| 144 | for job, info in jobs.items(): |
| 145 | if info['health'] != 'Complete': |
| 146 | if info['error_reason'] == 'Infrastructure': |
| 147 | info_print("Job {} failed with Infrastructure error".format(job)) |
| 148 | incomplete_jobs.append(job) |
| 149 | incomplete_output = [lava_id_to_url(x, user_args) for x in incomplete_jobs]; |
| 150 | if len(incomplete_jobs) > 0: |
| 151 | print("BOOT_RESULT: -1 Failed: {}".format(incomplete_output)) |
| 152 | else: |
| 153 | print("BOOT_RESULT: +1") |
| 154 | |
| 155 | def remove_lava_dupes(results): |
| 156 | for result in results: |
| 157 | if result['result'] != 'pass': |
| 158 | if result['suite'] == "lava": |
| 159 | for other in [x for x in results if x != result]: |
| 160 | if other['name'] == result['name']: |
| 161 | if other['result'] == 'pass': |
| 162 | results.remove(result) |
| 163 | return(results) |
| 164 | |
| 165 | def test_report(jobs, user_args, lava): |
| 166 | # parsing of test results is WIP |
| 167 | fail_j = [] |
| 168 | jinja_data = [] |
| 169 | for job, info in jobs.items(): |
Matthew Hart | 4a4f120 | 2020-06-12 15:52:46 +0100 | [diff] [blame] | 170 | results_file = os.path.join(info['job_dir'], 'results.yaml') |
| 171 | if not os.path.exists(results_file) or (os.path.getsize(results_file) == 0): |
| 172 | fail_j.append(job) |
| 173 | continue |
| 174 | with open(results_file, "r") as F: |
| 175 | res_data = F.read() |
| 176 | results = yaml.load(res_data) |
Matthew Hart | fb6fd36 | 2020-03-04 21:03:59 +0000 | [diff] [blame] | 177 | non_lava_results = [x for x in results if x['suite'] != 'lava'] |
| 178 | info['lava_url'] = lava_id_to_url(job, user_args) |
| 179 | info['artifacts_dir'] = "tf-m-ci-scripts/{}".format(info['job_dir']) |
| 180 | jinja_data.append({job: [info, non_lava_results]}) |
| 181 | for result in non_lava_results: |
| 182 | if result['result'] != 'pass': |
| 183 | fail_j.append(job) if job not in fail_j else fail_j |
| 184 | time.sleep(0.5) # be friendly to LAVA |
| 185 | fail_output = [lava_id_to_url(x, user_args) for x in fail_j] |
| 186 | if len(fail_j) > 0: |
| 187 | print("TEST_RESULT: -1 Failed: {}".format(fail_output)) |
| 188 | else: |
| 189 | print("TEST_RESULT: +1") |
| 190 | data = {} |
| 191 | data['jobs'] = jinja_data |
| 192 | render_jinja(data) |
| 193 | |
| 194 | def render_jinja(data): |
| 195 | work_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), "jinja2_templates") |
| 196 | template_loader = FileSystemLoader(searchpath=work_dir) |
| 197 | template_env = Environment(loader=template_loader) |
| 198 | html = template_env.get_template("test_summary.jinja2").render(data) |
| 199 | csv = template_env.get_template("test_summary_csv.jinja2").render(data) |
| 200 | with open('test_summary.html', "w") as F: |
| 201 | F.write(html) |
| 202 | with open('test_summary.csv', "w") as F: |
| 203 | F.write(csv) |
| 204 | |
| 205 | def print_lava_urls(jobs, user_args): |
| 206 | output = [lava_id_to_url(x, user_args) for x in jobs] |
| 207 | print("LAVA jobs triggered for this build: {}".format(output)) |
| 208 | |
| 209 | |
| 210 | def info_print(line): |
| 211 | print("INFO: {}".format(line)) |
| 212 | |
| 213 | def main(user_args): |
| 214 | """ Main logic """ |
| 215 | user_args.lava_rpc = "RPC2" |
| 216 | wait_for_jobs(user_args) |
| 217 | |
| 218 | def get_cmd_args(): |
| 219 | """ Parse command line arguments """ |
| 220 | |
| 221 | # Parse command line arguments to override config |
| 222 | parser = argparse.ArgumentParser(description="Lava Wait Jobs") |
| 223 | cmdargs = parser.add_argument_group("Lava Wait Jobs") |
| 224 | |
| 225 | # Configuration control |
| 226 | cmdargs.add_argument( |
| 227 | "--lava-url", dest="lava_url", action="store", help="LAVA lab URL (without RPC2)" |
| 228 | ) |
| 229 | cmdargs.add_argument( |
| 230 | "--job-ids", dest="job_ids", action="store", required=True, help="Comma separated list of job IDS" |
| 231 | ) |
| 232 | cmdargs.add_argument( |
| 233 | "--lava-token", dest="token_secret", action="store", help="LAVA auth token" |
| 234 | ) |
| 235 | cmdargs.add_argument( |
| 236 | "--lava-user", dest="token_usr", action="store", help="LAVA username" |
| 237 | ) |
| 238 | cmdargs.add_argument( |
| 239 | "--use-env", dest="token_from_env", action="store_true", default=False, help="Use LAVA auth info from environment" |
| 240 | ) |
| 241 | cmdargs.add_argument( |
| 242 | "--lava-timeout", dest="dispatch_timeout", action="store", type=int, default=3600, help="Time in seconds to wait for all jobs" |
| 243 | ) |
| 244 | cmdargs.add_argument( |
| 245 | "--artifacts-path", dest="artifacts_path", action="store", help="Download LAVA artifacts to this directory" |
| 246 | ) |
| 247 | return parser.parse_args() |
| 248 | |
| 249 | |
| 250 | if __name__ == "__main__": |
| 251 | main(get_cmd_args()) |