blob: e0fb12c317a1c9d1fdcdf78ef93253b9aecd009a [file] [log] [blame]
Matthew Hartfb6fd362020-03-04 21:03:59 +00001#!/usr/bin/env python3
2
3from __future__ import print_function
4
5__copyright__ = """
6/*
7 * Copyright (c) 2020, Arm Limited. All rights reserved.
8 *
9 * SPDX-License-Identifier: BSD-3-Clause
10 *
11 */
12 """
13
14"""
15Script for waiting for LAVA jobs and parsing the results
16"""
17
18import os
19import sys
Matthew Hartfb6fd362020-03-04 21:03:59 +000020import time
21import yaml
22import argparse
Xinyu Zhang1b8f5152020-11-13 16:10:58 +080023import csv
Matthew Hartfb6fd362020-03-04 21:03:59 +000024from jinja2 import Environment, FileSystemLoader
25from lava_helper_configs import *
26from lava_helper import test_lava_dispatch_credentials
27
28try:
29 from tfm_ci_pylib.utils import save_json, load_json, sort_dict,\
30 load_yaml, test, print_test
31 from tfm_ci_pylib.lava_rpc_connector import LAVA_RPC_connector
32except ImportError:
33 dir_path = os.path.dirname(os.path.realpath(__file__))
34 sys.path.append(os.path.join(dir_path, "../"))
35 from tfm_ci_pylib.utils import save_json, load_json, sort_dict,\
36 load_yaml, test, print_test
37 from tfm_ci_pylib.lava_rpc_connector import LAVA_RPC_connector
38
Xinyu Zhang1b8f5152020-11-13 16:10:58 +080039cfgs = ["Default", "CoreIPC", "CoreIPCTfmLevel2", "CoreIPCTfmLevel3",
40 "Regression", "RegressionIPC",
41 "RegressionIPCTfmLevel2", "RegressionIPCTfmLevel3",
42 "DefaultProfileS", "RegressionProfileS",
43 "DefaultProfileM", "RegressionProfileM", "RegressionProfileM PSOFF",
44 "PsaApiTest (Attest)", "PsaApiTestIPC (Attest)",
45 "PsaApiTestIPCTfmLevel2 (Attest)",
46 "PsaApiTest (Crypto)", "PsaApiTestIPC (Crypto)",
47 "PsaApiTestIPCTfmLevel2 (Crypto)",
48 "PsaApiTest (PS)", "PsaApiTestIPC (PS)",
49 "PsaApiTestIPCTfmLevel2 (PS)",
50 "PsaApiTest (ITS)", "PsaApiTestIPC (ITS)",
51 "PsaApiTestIPCTfmLevel2 (ITS)",
52 "PsaApiTestIPC (FF)",
53 "PsaApiTestIPCTfmLevel2 (FF)",
54 "PsaApiTestIPCTfmLevel3 (ITS)", "PsaApiTestIPCTfmLevel3 (PS)",
55 "PsaApiTestIPCTfmLevel3 (Crypto)", "PsaApiTestIPCTfmLevel3 (Attest)",
56 "PsaApiTestIPCTfmLevel3 (FF)"]
57
Matthew Hartfb6fd362020-03-04 21:03:59 +000058def wait_for_jobs(user_args):
59 job_list = user_args.job_ids.split(",")
60 job_list = [int(x) for x in job_list if x != '']
61 lava = test_lava_dispatch_credentials(user_args)
62 finished_jobs = lava.block_wait_for_jobs(job_list, user_args.dispatch_timeout, 0.5)
63 unfinished_jobs = [item for item in job_list if item not in finished_jobs]
64 for job in unfinished_jobs:
65 info_print("Cancelling unfinished job: {}".format(job))
66 lava.cancel_job(job)
67 if user_args.artifacts_path:
68 for job, info in finished_jobs.items():
69 info['job_dir'] = os.path.join(user_args.artifacts_path, "{}_{}".format(str(job), info['description']))
70 finished_jobs[job] = info
71 finished_jobs = fetch_artifacts(finished_jobs, user_args, lava)
72 print_lava_urls(finished_jobs, user_args)
Xinyu Zhang97ee3fd2020-12-14 14:45:06 +080073 job_links(finished_jobs, user_args)
Matthew Hartfb6fd362020-03-04 21:03:59 +000074 boot_report(finished_jobs, user_args)
75 test_report(finished_jobs, user_args, lava)
Xinyu Zhang38a18872020-11-23 16:45:28 +080076 failure_report(finished_jobs, user_args)
Xinyu Zhang1b8f5152020-11-13 16:10:58 +080077 csv_report(finished_jobs)
Matthew Hartfb6fd362020-03-04 21:03:59 +000078
79def fetch_artifacts(jobs, user_args, lava):
80 if not user_args.artifacts_path:
81 return
82 for job_id, info in jobs.items():
83 job_dir = info['job_dir']
84 info_print("Fetching artifacts for JOB: {} to {}".format(job_id, job_dir))
85 os.makedirs(job_dir, exist_ok=True)
86 def_path = os.path.join(job_dir, 'definition.yaml')
87 target_log = os.path.join(job_dir, 'target_log.txt')
Matthew Hart4a4f1202020-06-12 15:52:46 +010088 config = os.path.join(job_dir, 'config.tar.bz2')
89 results_file = os.path.join(job_dir, 'results.yaml')
Matthew Hartfb6fd362020-03-04 21:03:59 +000090 definition, metadata = lava.get_job_definition(job_id, def_path)
91 jobs[job_id]['metadata'] = metadata
92 time.sleep(0.2) # be friendly to LAVA
Matthew Hart4a4f1202020-06-12 15:52:46 +010093 lava.get_job_log(job_id, target_log)
Matthew Hartfb6fd362020-03-04 21:03:59 +000094 time.sleep(0.2)
95 lava.get_job_config(job_id, config)
96 time.sleep(0.2)
Matthew Hart4a4f1202020-06-12 15:52:46 +010097 lava.get_job_results(job_id, results_file)
Matthew Hartfb6fd362020-03-04 21:03:59 +000098 return(jobs)
99
100
101def lava_id_to_url(id, user_args):
102 return "{}/scheduler/job/{}".format(user_args.lava_url, id)
103
Xinyu Zhang1b8f5152020-11-13 16:10:58 +0800104def generateTestResult(info):
105 if info['health'] == "Complete" and info['state'] == "Finished":
106 return "PASS"
107 else:
108 return "FAIL"
109
Xinyu Zhang97ee3fd2020-12-14 14:45:06 +0800110def job_links(jobs, user_args):
111 job_links = ""
112 for job, info in jobs.items():
113 job_links += "Build Config: {} ".format(info['metadata']['build_name'])
114 job_links += "LAVA link: {} ".format(lava_id_to_url(job, user_args))
115 job_links += "Build link: {}\n".format(info['metadata']['build_job_url'])
116 print(job_links)
117
Xinyu Zhang1b8f5152020-11-13 16:10:58 +0800118def csv_report(jobs):
119 lava_jobs = []
120 for job, info in jobs.items():
121 exist = False
122 for record in lava_jobs:
123 if info['metadata']['platform'] == record["Platform"] and \
124 info['metadata']['compiler'] == record["Compiler"] and \
125 info['metadata']['build_type'] == record["Build Type"]:
126 if record[info['metadata']['name']] != "FAIL":
127 record[info['metadata']['name']] = generateTestResult(info)
128 exist = True
129 break
130 if not exist:
131 record = {}
132 record["Platform"] = info['metadata']['platform']
133 record["Compiler"] = info['metadata']['compiler']
134 record["Build Type"] = info['metadata']['build_type']
135 record["Config Name"] = info['metadata']['name']
136 for cfg in cfgs:
137 record[cfg] = "N.A."
138 record[info['metadata']['name']] = generateTestResult(info)
139 lava_jobs.append(record)
140 lava_jobs.sort(key=lambda x: x["Platform"] + x["Compiler"] + x["Build Type"])
141 with open("test_results.csv", "w", newline="") as csvfile:
142 fieldnames = ["Platform", "Compiler", "Build Type"] + cfgs
143 writer = csv.DictWriter(csvfile, fieldnames=fieldnames, extrasaction='ignore')
144
145 writer.writeheader()
146 writer.writerows(lava_jobs)
147
Matthew Hartfb6fd362020-03-04 21:03:59 +0000148def boot_report(jobs, user_args):
149 incomplete_jobs = []
150 for job, info in jobs.items():
151 if info['health'] != 'Complete':
152 if info['error_reason'] == 'Infrastructure':
153 info_print("Job {} failed with Infrastructure error".format(job))
154 incomplete_jobs.append(job)
155 incomplete_output = [lava_id_to_url(x, user_args) for x in incomplete_jobs];
156 if len(incomplete_jobs) > 0:
157 print("BOOT_RESULT: -1 Failed: {}".format(incomplete_output))
158 else:
159 print("BOOT_RESULT: +1")
160
Xinyu Zhang38a18872020-11-23 16:45:28 +0800161def failure_report(jobs, user_args):
162 failed_report = "FAILURE_TESTS:"
163 for job, info in jobs.items():
164 if info['health'] != "Complete" or info['state'] != "Finished":
165 failed_report += " {}:{}".format(info['metadata']['build_name'],
166 lava_id_to_url(job, user_args))
167 print(failed_report)
168
Matthew Hartfb6fd362020-03-04 21:03:59 +0000169def remove_lava_dupes(results):
170 for result in results:
171 if result['result'] != 'pass':
172 if result['suite'] == "lava":
173 for other in [x for x in results if x != result]:
174 if other['name'] == result['name']:
175 if other['result'] == 'pass':
176 results.remove(result)
177 return(results)
178
179def test_report(jobs, user_args, lava):
180 # parsing of test results is WIP
181 fail_j = []
182 jinja_data = []
183 for job, info in jobs.items():
Matthew Hart4a4f1202020-06-12 15:52:46 +0100184 results_file = os.path.join(info['job_dir'], 'results.yaml')
185 if not os.path.exists(results_file) or (os.path.getsize(results_file) == 0):
186 fail_j.append(job)
187 continue
188 with open(results_file, "r") as F:
189 res_data = F.read()
190 results = yaml.load(res_data)
Matthew Hartfb6fd362020-03-04 21:03:59 +0000191 non_lava_results = [x for x in results if x['suite'] != 'lava']
192 info['lava_url'] = lava_id_to_url(job, user_args)
193 info['artifacts_dir'] = "tf-m-ci-scripts/{}".format(info['job_dir'])
194 jinja_data.append({job: [info, non_lava_results]})
195 for result in non_lava_results:
196 if result['result'] != 'pass':
197 fail_j.append(job) if job not in fail_j else fail_j
198 time.sleep(0.5) # be friendly to LAVA
199 fail_output = [lava_id_to_url(x, user_args) for x in fail_j]
200 if len(fail_j) > 0:
201 print("TEST_RESULT: -1 Failed: {}".format(fail_output))
202 else:
203 print("TEST_RESULT: +1")
204 data = {}
205 data['jobs'] = jinja_data
206 render_jinja(data)
207
208def render_jinja(data):
209 work_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), "jinja2_templates")
210 template_loader = FileSystemLoader(searchpath=work_dir)
211 template_env = Environment(loader=template_loader)
212 html = template_env.get_template("test_summary.jinja2").render(data)
213 csv = template_env.get_template("test_summary_csv.jinja2").render(data)
214 with open('test_summary.html', "w") as F:
215 F.write(html)
216 with open('test_summary.csv', "w") as F:
217 F.write(csv)
218
219def print_lava_urls(jobs, user_args):
220 output = [lava_id_to_url(x, user_args) for x in jobs]
221 print("LAVA jobs triggered for this build: {}".format(output))
222
223
224def info_print(line):
225 print("INFO: {}".format(line))
226
227def main(user_args):
228 """ Main logic """
229 user_args.lava_rpc = "RPC2"
230 wait_for_jobs(user_args)
231
232def get_cmd_args():
233 """ Parse command line arguments """
234
235 # Parse command line arguments to override config
236 parser = argparse.ArgumentParser(description="Lava Wait Jobs")
237 cmdargs = parser.add_argument_group("Lava Wait Jobs")
238
239 # Configuration control
240 cmdargs.add_argument(
241 "--lava-url", dest="lava_url", action="store", help="LAVA lab URL (without RPC2)"
242 )
243 cmdargs.add_argument(
244 "--job-ids", dest="job_ids", action="store", required=True, help="Comma separated list of job IDS"
245 )
246 cmdargs.add_argument(
247 "--lava-token", dest="token_secret", action="store", help="LAVA auth token"
248 )
249 cmdargs.add_argument(
250 "--lava-user", dest="token_usr", action="store", help="LAVA username"
251 )
252 cmdargs.add_argument(
253 "--use-env", dest="token_from_env", action="store_true", default=False, help="Use LAVA auth info from environment"
254 )
255 cmdargs.add_argument(
256 "--lava-timeout", dest="dispatch_timeout", action="store", type=int, default=3600, help="Time in seconds to wait for all jobs"
257 )
258 cmdargs.add_argument(
259 "--artifacts-path", dest="artifacts_path", action="store", help="Download LAVA artifacts to this directory"
260 )
261 return parser.parse_args()
262
263
264if __name__ == "__main__":
265 main(get_cmd_args())