blob: 1cd3345de5d76ba6c4f6365041add4460b45cc05 [file] [log] [blame]
Matthew Hartfb6fd362020-03-04 21:03:59 +00001#!/usr/bin/env python3
2
3from __future__ import print_function
4
5__copyright__ = """
6/*
7 * Copyright (c) 2020, Arm Limited. All rights reserved.
8 *
9 * SPDX-License-Identifier: BSD-3-Clause
10 *
11 */
12 """
13
14"""
15Script for waiting for LAVA jobs and parsing the results
16"""
17
18import os
19import sys
Matthew Hartfb6fd362020-03-04 21:03:59 +000020import time
21import yaml
22import argparse
Xinyu Zhang1b8f5152020-11-13 16:10:58 +080023import csv
Matthew Hartfb6fd362020-03-04 21:03:59 +000024from jinja2 import Environment, FileSystemLoader
25from lava_helper_configs import *
26from lava_helper import test_lava_dispatch_credentials
27
28try:
29 from tfm_ci_pylib.utils import save_json, load_json, sort_dict,\
30 load_yaml, test, print_test
31 from tfm_ci_pylib.lava_rpc_connector import LAVA_RPC_connector
32except ImportError:
33 dir_path = os.path.dirname(os.path.realpath(__file__))
34 sys.path.append(os.path.join(dir_path, "../"))
35 from tfm_ci_pylib.utils import save_json, load_json, sort_dict,\
36 load_yaml, test, print_test
37 from tfm_ci_pylib.lava_rpc_connector import LAVA_RPC_connector
38
Xinyu Zhang1b8f5152020-11-13 16:10:58 +080039cfgs = ["Default", "CoreIPC", "CoreIPCTfmLevel2", "CoreIPCTfmLevel3",
40 "Regression", "RegressionIPC",
41 "RegressionIPCTfmLevel2", "RegressionIPCTfmLevel3",
42 "DefaultProfileS", "RegressionProfileS",
43 "DefaultProfileM", "RegressionProfileM", "RegressionProfileM PSOFF",
44 "PsaApiTest (Attest)", "PsaApiTestIPC (Attest)",
45 "PsaApiTestIPCTfmLevel2 (Attest)",
46 "PsaApiTest (Crypto)", "PsaApiTestIPC (Crypto)",
47 "PsaApiTestIPCTfmLevel2 (Crypto)",
48 "PsaApiTest (PS)", "PsaApiTestIPC (PS)",
49 "PsaApiTestIPCTfmLevel2 (PS)",
50 "PsaApiTest (ITS)", "PsaApiTestIPC (ITS)",
51 "PsaApiTestIPCTfmLevel2 (ITS)",
52 "PsaApiTestIPC (FF)",
53 "PsaApiTestIPCTfmLevel2 (FF)",
54 "PsaApiTestIPCTfmLevel3 (ITS)", "PsaApiTestIPCTfmLevel3 (PS)",
55 "PsaApiTestIPCTfmLevel3 (Crypto)", "PsaApiTestIPCTfmLevel3 (Attest)",
56 "PsaApiTestIPCTfmLevel3 (FF)"]
57
Matthew Hartfb6fd362020-03-04 21:03:59 +000058def wait_for_jobs(user_args):
59 job_list = user_args.job_ids.split(",")
60 job_list = [int(x) for x in job_list if x != '']
61 lava = test_lava_dispatch_credentials(user_args)
62 finished_jobs = lava.block_wait_for_jobs(job_list, user_args.dispatch_timeout, 0.5)
63 unfinished_jobs = [item for item in job_list if item not in finished_jobs]
64 for job in unfinished_jobs:
65 info_print("Cancelling unfinished job: {}".format(job))
66 lava.cancel_job(job)
67 if user_args.artifacts_path:
68 for job, info in finished_jobs.items():
69 info['job_dir'] = os.path.join(user_args.artifacts_path, "{}_{}".format(str(job), info['description']))
70 finished_jobs[job] = info
71 finished_jobs = fetch_artifacts(finished_jobs, user_args, lava)
72 print_lava_urls(finished_jobs, user_args)
73 boot_report(finished_jobs, user_args)
74 test_report(finished_jobs, user_args, lava)
Xinyu Zhang38a18872020-11-23 16:45:28 +080075 failure_report(finished_jobs, user_args)
Xinyu Zhang1b8f5152020-11-13 16:10:58 +080076 csv_report(finished_jobs)
Matthew Hartfb6fd362020-03-04 21:03:59 +000077
78def fetch_artifacts(jobs, user_args, lava):
79 if not user_args.artifacts_path:
80 return
81 for job_id, info in jobs.items():
82 job_dir = info['job_dir']
83 info_print("Fetching artifacts for JOB: {} to {}".format(job_id, job_dir))
84 os.makedirs(job_dir, exist_ok=True)
85 def_path = os.path.join(job_dir, 'definition.yaml')
86 target_log = os.path.join(job_dir, 'target_log.txt')
Matthew Hart4a4f1202020-06-12 15:52:46 +010087 config = os.path.join(job_dir, 'config.tar.bz2')
88 results_file = os.path.join(job_dir, 'results.yaml')
Matthew Hartfb6fd362020-03-04 21:03:59 +000089 definition, metadata = lava.get_job_definition(job_id, def_path)
90 jobs[job_id]['metadata'] = metadata
91 time.sleep(0.2) # be friendly to LAVA
Matthew Hart4a4f1202020-06-12 15:52:46 +010092 lava.get_job_log(job_id, target_log)
Matthew Hartfb6fd362020-03-04 21:03:59 +000093 time.sleep(0.2)
94 lava.get_job_config(job_id, config)
95 time.sleep(0.2)
Matthew Hart4a4f1202020-06-12 15:52:46 +010096 lava.get_job_results(job_id, results_file)
Matthew Hartfb6fd362020-03-04 21:03:59 +000097 return(jobs)
98
99
100def lava_id_to_url(id, user_args):
101 return "{}/scheduler/job/{}".format(user_args.lava_url, id)
102
Xinyu Zhang1b8f5152020-11-13 16:10:58 +0800103def generateTestResult(info):
104 if info['health'] == "Complete" and info['state'] == "Finished":
105 return "PASS"
106 else:
107 return "FAIL"
108
109def csv_report(jobs):
110 lava_jobs = []
111 for job, info in jobs.items():
112 exist = False
113 for record in lava_jobs:
114 if info['metadata']['platform'] == record["Platform"] and \
115 info['metadata']['compiler'] == record["Compiler"] and \
116 info['metadata']['build_type'] == record["Build Type"]:
117 if record[info['metadata']['name']] != "FAIL":
118 record[info['metadata']['name']] = generateTestResult(info)
119 exist = True
120 break
121 if not exist:
122 record = {}
123 record["Platform"] = info['metadata']['platform']
124 record["Compiler"] = info['metadata']['compiler']
125 record["Build Type"] = info['metadata']['build_type']
126 record["Config Name"] = info['metadata']['name']
127 for cfg in cfgs:
128 record[cfg] = "N.A."
129 record[info['metadata']['name']] = generateTestResult(info)
130 lava_jobs.append(record)
131 lava_jobs.sort(key=lambda x: x["Platform"] + x["Compiler"] + x["Build Type"])
132 with open("test_results.csv", "w", newline="") as csvfile:
133 fieldnames = ["Platform", "Compiler", "Build Type"] + cfgs
134 writer = csv.DictWriter(csvfile, fieldnames=fieldnames, extrasaction='ignore')
135
136 writer.writeheader()
137 writer.writerows(lava_jobs)
138
Matthew Hartfb6fd362020-03-04 21:03:59 +0000139def boot_report(jobs, user_args):
140 incomplete_jobs = []
141 for job, info in jobs.items():
142 if info['health'] != 'Complete':
143 if info['error_reason'] == 'Infrastructure':
144 info_print("Job {} failed with Infrastructure error".format(job))
145 incomplete_jobs.append(job)
146 incomplete_output = [lava_id_to_url(x, user_args) for x in incomplete_jobs];
147 if len(incomplete_jobs) > 0:
148 print("BOOT_RESULT: -1 Failed: {}".format(incomplete_output))
149 else:
150 print("BOOT_RESULT: +1")
151
Xinyu Zhang38a18872020-11-23 16:45:28 +0800152def failure_report(jobs, user_args):
153 failed_report = "FAILURE_TESTS:"
154 for job, info in jobs.items():
155 if info['health'] != "Complete" or info['state'] != "Finished":
156 failed_report += " {}:{}".format(info['metadata']['build_name'],
157 lava_id_to_url(job, user_args))
158 print(failed_report)
159
Matthew Hartfb6fd362020-03-04 21:03:59 +0000160def remove_lava_dupes(results):
161 for result in results:
162 if result['result'] != 'pass':
163 if result['suite'] == "lava":
164 for other in [x for x in results if x != result]:
165 if other['name'] == result['name']:
166 if other['result'] == 'pass':
167 results.remove(result)
168 return(results)
169
170def test_report(jobs, user_args, lava):
171 # parsing of test results is WIP
172 fail_j = []
173 jinja_data = []
174 for job, info in jobs.items():
Matthew Hart4a4f1202020-06-12 15:52:46 +0100175 results_file = os.path.join(info['job_dir'], 'results.yaml')
176 if not os.path.exists(results_file) or (os.path.getsize(results_file) == 0):
177 fail_j.append(job)
178 continue
179 with open(results_file, "r") as F:
180 res_data = F.read()
181 results = yaml.load(res_data)
Matthew Hartfb6fd362020-03-04 21:03:59 +0000182 non_lava_results = [x for x in results if x['suite'] != 'lava']
183 info['lava_url'] = lava_id_to_url(job, user_args)
184 info['artifacts_dir'] = "tf-m-ci-scripts/{}".format(info['job_dir'])
185 jinja_data.append({job: [info, non_lava_results]})
186 for result in non_lava_results:
187 if result['result'] != 'pass':
188 fail_j.append(job) if job not in fail_j else fail_j
189 time.sleep(0.5) # be friendly to LAVA
190 fail_output = [lava_id_to_url(x, user_args) for x in fail_j]
191 if len(fail_j) > 0:
192 print("TEST_RESULT: -1 Failed: {}".format(fail_output))
193 else:
194 print("TEST_RESULT: +1")
195 data = {}
196 data['jobs'] = jinja_data
197 render_jinja(data)
198
199def render_jinja(data):
200 work_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), "jinja2_templates")
201 template_loader = FileSystemLoader(searchpath=work_dir)
202 template_env = Environment(loader=template_loader)
203 html = template_env.get_template("test_summary.jinja2").render(data)
204 csv = template_env.get_template("test_summary_csv.jinja2").render(data)
205 with open('test_summary.html', "w") as F:
206 F.write(html)
207 with open('test_summary.csv', "w") as F:
208 F.write(csv)
209
210def print_lava_urls(jobs, user_args):
211 output = [lava_id_to_url(x, user_args) for x in jobs]
212 print("LAVA jobs triggered for this build: {}".format(output))
213
214
215def info_print(line):
216 print("INFO: {}".format(line))
217
218def main(user_args):
219 """ Main logic """
220 user_args.lava_rpc = "RPC2"
221 wait_for_jobs(user_args)
222
223def get_cmd_args():
224 """ Parse command line arguments """
225
226 # Parse command line arguments to override config
227 parser = argparse.ArgumentParser(description="Lava Wait Jobs")
228 cmdargs = parser.add_argument_group("Lava Wait Jobs")
229
230 # Configuration control
231 cmdargs.add_argument(
232 "--lava-url", dest="lava_url", action="store", help="LAVA lab URL (without RPC2)"
233 )
234 cmdargs.add_argument(
235 "--job-ids", dest="job_ids", action="store", required=True, help="Comma separated list of job IDS"
236 )
237 cmdargs.add_argument(
238 "--lava-token", dest="token_secret", action="store", help="LAVA auth token"
239 )
240 cmdargs.add_argument(
241 "--lava-user", dest="token_usr", action="store", help="LAVA username"
242 )
243 cmdargs.add_argument(
244 "--use-env", dest="token_from_env", action="store_true", default=False, help="Use LAVA auth info from environment"
245 )
246 cmdargs.add_argument(
247 "--lava-timeout", dest="dispatch_timeout", action="store", type=int, default=3600, help="Time in seconds to wait for all jobs"
248 )
249 cmdargs.add_argument(
250 "--artifacts-path", dest="artifacts_path", action="store", help="Download LAVA artifacts to this directory"
251 )
252 return parser.parse_args()
253
254
255if __name__ == "__main__":
256 main(get_cmd_args())