blob: a91fcfb1cdb6eac2837330b136a69bcef2bb0088 [file] [log] [blame]
Matthew Hartfb6fd362020-03-04 21:03:59 +00001#!/usr/bin/env python3
2
3from __future__ import print_function
4
5__copyright__ = """
6/*
Xinyu Zhangf2b7cbf2021-05-18 20:17:34 +08007 * Copyright (c) 2020-2021, Arm Limited. All rights reserved.
Matthew Hartfb6fd362020-03-04 21:03:59 +00008 *
9 * SPDX-License-Identifier: BSD-3-Clause
10 *
11 */
12 """
13
14"""
15Script for waiting for LAVA jobs and parsing the results
16"""
17
18import os
19import sys
Matthew Hartfb6fd362020-03-04 21:03:59 +000020import time
21import yaml
22import argparse
Xinyu Zhang1b8f5152020-11-13 16:10:58 +080023import csv
Matthew Hartfb6fd362020-03-04 21:03:59 +000024from jinja2 import Environment, FileSystemLoader
25from lava_helper_configs import *
26from lava_helper import test_lava_dispatch_credentials
27
28try:
29 from tfm_ci_pylib.utils import save_json, load_json, sort_dict,\
30 load_yaml, test, print_test
31 from tfm_ci_pylib.lava_rpc_connector import LAVA_RPC_connector
32except ImportError:
33 dir_path = os.path.dirname(os.path.realpath(__file__))
34 sys.path.append(os.path.join(dir_path, "../"))
35 from tfm_ci_pylib.utils import save_json, load_json, sort_dict,\
36 load_yaml, test, print_test
37 from tfm_ci_pylib.lava_rpc_connector import LAVA_RPC_connector
38
Xinyu Zhang1b8f5152020-11-13 16:10:58 +080039cfgs = ["Default", "CoreIPC", "CoreIPCTfmLevel2", "CoreIPCTfmLevel3",
40 "Regression", "RegressionIPC",
41 "RegressionIPCTfmLevel2", "RegressionIPCTfmLevel3",
42 "DefaultProfileS", "RegressionProfileS",
43 "DefaultProfileM", "RegressionProfileM", "RegressionProfileM PSOFF",
Xinyu Zhang9b1aef92021-03-12 15:36:44 +080044 "DefaultProfileL", "RegressionProfileL",
Xinyu Zhang1b8f5152020-11-13 16:10:58 +080045 "PsaApiTest (Attest)", "PsaApiTestIPC (Attest)",
46 "PsaApiTestIPCTfmLevel2 (Attest)",
47 "PsaApiTest (Crypto)", "PsaApiTestIPC (Crypto)",
48 "PsaApiTestIPCTfmLevel2 (Crypto)",
49 "PsaApiTest (PS)", "PsaApiTestIPC (PS)",
50 "PsaApiTestIPCTfmLevel2 (PS)",
51 "PsaApiTest (ITS)", "PsaApiTestIPC (ITS)",
52 "PsaApiTestIPCTfmLevel2 (ITS)",
53 "PsaApiTestIPC (FF)",
54 "PsaApiTestIPCTfmLevel2 (FF)",
55 "PsaApiTestIPCTfmLevel3 (ITS)", "PsaApiTestIPCTfmLevel3 (PS)",
56 "PsaApiTestIPCTfmLevel3 (Crypto)", "PsaApiTestIPCTfmLevel3 (Attest)",
57 "PsaApiTestIPCTfmLevel3 (FF)"]
58
Matthew Hartfb6fd362020-03-04 21:03:59 +000059def wait_for_jobs(user_args):
60 job_list = user_args.job_ids.split(",")
61 job_list = [int(x) for x in job_list if x != '']
62 lava = test_lava_dispatch_credentials(user_args)
Xinyu Zhangf2b7cbf2021-05-18 20:17:34 +080063 finished_jobs = get_finished_jobs(job_list, user_args, lava)
64 print_lava_urls(finished_jobs, user_args)
65 job_links(finished_jobs, user_args)
66 boot_report(finished_jobs, user_args)
67 test_report(finished_jobs, user_args, lava)
68 failure_report(finished_jobs, user_args)
69 csv_report(finished_jobs)
70
71def get_finished_jobs(job_list, user_args, lava):
Matthew Hartfb6fd362020-03-04 21:03:59 +000072 finished_jobs = lava.block_wait_for_jobs(job_list, user_args.dispatch_timeout, 0.5)
73 unfinished_jobs = [item for item in job_list if item not in finished_jobs]
74 for job in unfinished_jobs:
75 info_print("Cancelling unfinished job: {}".format(job))
76 lava.cancel_job(job)
77 if user_args.artifacts_path:
78 for job, info in finished_jobs.items():
79 info['job_dir'] = os.path.join(user_args.artifacts_path, "{}_{}".format(str(job), info['description']))
80 finished_jobs[job] = info
81 finished_jobs = fetch_artifacts(finished_jobs, user_args, lava)
Xinyu Zhangf2b7cbf2021-05-18 20:17:34 +080082 return finished_jobs
Matthew Hartfb6fd362020-03-04 21:03:59 +000083
84def fetch_artifacts(jobs, user_args, lava):
85 if not user_args.artifacts_path:
86 return
87 for job_id, info in jobs.items():
88 job_dir = info['job_dir']
89 info_print("Fetching artifacts for JOB: {} to {}".format(job_id, job_dir))
90 os.makedirs(job_dir, exist_ok=True)
91 def_path = os.path.join(job_dir, 'definition.yaml')
92 target_log = os.path.join(job_dir, 'target_log.txt')
Matthew Hart4a4f1202020-06-12 15:52:46 +010093 config = os.path.join(job_dir, 'config.tar.bz2')
94 results_file = os.path.join(job_dir, 'results.yaml')
Matthew Hartfb6fd362020-03-04 21:03:59 +000095 definition, metadata = lava.get_job_definition(job_id, def_path)
96 jobs[job_id]['metadata'] = metadata
97 time.sleep(0.2) # be friendly to LAVA
Matthew Hart4a4f1202020-06-12 15:52:46 +010098 lava.get_job_log(job_id, target_log)
Matthew Hartfb6fd362020-03-04 21:03:59 +000099 time.sleep(0.2)
100 lava.get_job_config(job_id, config)
101 time.sleep(0.2)
Matthew Hart4a4f1202020-06-12 15:52:46 +0100102 lava.get_job_results(job_id, results_file)
Matthew Hartfb6fd362020-03-04 21:03:59 +0000103 return(jobs)
104
105
106def lava_id_to_url(id, user_args):
107 return "{}/scheduler/job/{}".format(user_args.lava_url, id)
108
Xinyu Zhang1b8f5152020-11-13 16:10:58 +0800109def generateTestResult(info):
110 if info['health'] == "Complete" and info['state'] == "Finished":
111 return "PASS"
112 else:
113 return "FAIL"
114
Xinyu Zhang97ee3fd2020-12-14 14:45:06 +0800115def job_links(jobs, user_args):
116 job_links = ""
117 for job, info in jobs.items():
118 job_links += "Build Config: {} ".format(info['metadata']['build_name'])
119 job_links += "LAVA link: {} ".format(lava_id_to_url(job, user_args))
120 job_links += "Build link: {}\n".format(info['metadata']['build_job_url'])
121 print(job_links)
122
Xinyu Zhang1b8f5152020-11-13 16:10:58 +0800123def csv_report(jobs):
124 lava_jobs = []
125 for job, info in jobs.items():
126 exist = False
127 for record in lava_jobs:
128 if info['metadata']['platform'] == record["Platform"] and \
129 info['metadata']['compiler'] == record["Compiler"] and \
130 info['metadata']['build_type'] == record["Build Type"]:
131 if record[info['metadata']['name']] != "FAIL":
132 record[info['metadata']['name']] = generateTestResult(info)
133 exist = True
134 break
135 if not exist:
136 record = {}
137 record["Platform"] = info['metadata']['platform']
138 record["Compiler"] = info['metadata']['compiler']
139 record["Build Type"] = info['metadata']['build_type']
140 record["Config Name"] = info['metadata']['name']
141 for cfg in cfgs:
142 record[cfg] = "N.A."
143 record[info['metadata']['name']] = generateTestResult(info)
144 lava_jobs.append(record)
145 lava_jobs.sort(key=lambda x: x["Platform"] + x["Compiler"] + x["Build Type"])
146 with open("test_results.csv", "w", newline="") as csvfile:
147 fieldnames = ["Platform", "Compiler", "Build Type"] + cfgs
148 writer = csv.DictWriter(csvfile, fieldnames=fieldnames, extrasaction='ignore')
149
150 writer.writeheader()
151 writer.writerows(lava_jobs)
152
Matthew Hartfb6fd362020-03-04 21:03:59 +0000153def boot_report(jobs, user_args):
154 incomplete_jobs = []
155 for job, info in jobs.items():
156 if info['health'] != 'Complete':
157 if info['error_reason'] == 'Infrastructure':
158 info_print("Job {} failed with Infrastructure error".format(job))
159 incomplete_jobs.append(job)
160 incomplete_output = [lava_id_to_url(x, user_args) for x in incomplete_jobs];
161 if len(incomplete_jobs) > 0:
162 print("BOOT_RESULT: -1 Failed: {}".format(incomplete_output))
163 else:
164 print("BOOT_RESULT: +1")
165
Xinyu Zhang38a18872020-11-23 16:45:28 +0800166def failure_report(jobs, user_args):
167 failed_report = "FAILURE_TESTS:"
168 for job, info in jobs.items():
169 if info['health'] != "Complete" or info['state'] != "Finished":
170 failed_report += " {}:{}".format(info['metadata']['build_name'],
171 lava_id_to_url(job, user_args))
172 print(failed_report)
173
Matthew Hartfb6fd362020-03-04 21:03:59 +0000174def remove_lava_dupes(results):
175 for result in results:
176 if result['result'] != 'pass':
177 if result['suite'] == "lava":
178 for other in [x for x in results if x != result]:
179 if other['name'] == result['name']:
180 if other['result'] == 'pass':
181 results.remove(result)
182 return(results)
183
184def test_report(jobs, user_args, lava):
185 # parsing of test results is WIP
186 fail_j = []
187 jinja_data = []
188 for job, info in jobs.items():
Matthew Hart4a4f1202020-06-12 15:52:46 +0100189 results_file = os.path.join(info['job_dir'], 'results.yaml')
190 if not os.path.exists(results_file) or (os.path.getsize(results_file) == 0):
191 fail_j.append(job)
192 continue
193 with open(results_file, "r") as F:
194 res_data = F.read()
195 results = yaml.load(res_data)
Matthew Hartfb6fd362020-03-04 21:03:59 +0000196 non_lava_results = [x for x in results if x['suite'] != 'lava']
197 info['lava_url'] = lava_id_to_url(job, user_args)
198 info['artifacts_dir'] = "tf-m-ci-scripts/{}".format(info['job_dir'])
199 jinja_data.append({job: [info, non_lava_results]})
200 for result in non_lava_results:
201 if result['result'] != 'pass':
202 fail_j.append(job) if job not in fail_j else fail_j
203 time.sleep(0.5) # be friendly to LAVA
204 fail_output = [lava_id_to_url(x, user_args) for x in fail_j]
205 if len(fail_j) > 0:
206 print("TEST_RESULT: -1 Failed: {}".format(fail_output))
207 else:
208 print("TEST_RESULT: +1")
209 data = {}
210 data['jobs'] = jinja_data
211 render_jinja(data)
212
213def render_jinja(data):
214 work_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), "jinja2_templates")
215 template_loader = FileSystemLoader(searchpath=work_dir)
216 template_env = Environment(loader=template_loader)
217 html = template_env.get_template("test_summary.jinja2").render(data)
218 csv = template_env.get_template("test_summary_csv.jinja2").render(data)
219 with open('test_summary.html', "w") as F:
220 F.write(html)
221 with open('test_summary.csv', "w") as F:
222 F.write(csv)
223
224def print_lava_urls(jobs, user_args):
225 output = [lava_id_to_url(x, user_args) for x in jobs]
226 print("LAVA jobs triggered for this build: {}".format(output))
227
228
229def info_print(line):
230 print("INFO: {}".format(line))
231
232def main(user_args):
233 """ Main logic """
234 user_args.lava_rpc = "RPC2"
Xinyu Zhang3e8f6602021-04-28 10:57:32 +0800235 for try_time in range(3):
236 try:
237 wait_for_jobs(user_args)
238 break
239 except Exception as e:
240 print(e)
241 if try_time < 2:
242 print("Try to get LAVA jobs again...")
243 else:
244 raise e
Matthew Hartfb6fd362020-03-04 21:03:59 +0000245
246def get_cmd_args():
247 """ Parse command line arguments """
248
249 # Parse command line arguments to override config
250 parser = argparse.ArgumentParser(description="Lava Wait Jobs")
251 cmdargs = parser.add_argument_group("Lava Wait Jobs")
252
253 # Configuration control
254 cmdargs.add_argument(
255 "--lava-url", dest="lava_url", action="store", help="LAVA lab URL (without RPC2)"
256 )
257 cmdargs.add_argument(
258 "--job-ids", dest="job_ids", action="store", required=True, help="Comma separated list of job IDS"
259 )
260 cmdargs.add_argument(
Xinyu Zhangf2b7cbf2021-05-18 20:17:34 +0800261 "--lava-token", dest="lava_token", action="store", help="LAVA auth token"
Matthew Hartfb6fd362020-03-04 21:03:59 +0000262 )
263 cmdargs.add_argument(
Xinyu Zhangf2b7cbf2021-05-18 20:17:34 +0800264 "--lava-user", dest="lava_user", action="store", help="LAVA username"
Matthew Hartfb6fd362020-03-04 21:03:59 +0000265 )
266 cmdargs.add_argument(
267 "--use-env", dest="token_from_env", action="store_true", default=False, help="Use LAVA auth info from environment"
268 )
269 cmdargs.add_argument(
270 "--lava-timeout", dest="dispatch_timeout", action="store", type=int, default=3600, help="Time in seconds to wait for all jobs"
271 )
272 cmdargs.add_argument(
273 "--artifacts-path", dest="artifacts_path", action="store", help="Download LAVA artifacts to this directory"
274 )
275 return parser.parse_args()
276
277
278if __name__ == "__main__":
279 main(get_cmd_args())