blob: d0578b2dc52f32cc7500a1c7b826db99eee80101 [file] [log] [blame]
Matthew Hartfb6fd362020-03-04 21:03:59 +00001#!/usr/bin/env python3
2
3from __future__ import print_function
4
5__copyright__ = """
6/*
7 * Copyright (c) 2020, Arm Limited. All rights reserved.
8 *
9 * SPDX-License-Identifier: BSD-3-Clause
10 *
11 */
12 """
13
14"""
15Script for waiting for LAVA jobs and parsing the results
16"""
17
18import os
19import sys
Matthew Hartfb6fd362020-03-04 21:03:59 +000020import time
21import yaml
22import argparse
Xinyu Zhang1b8f5152020-11-13 16:10:58 +080023import csv
Matthew Hartfb6fd362020-03-04 21:03:59 +000024from jinja2 import Environment, FileSystemLoader
25from lava_helper_configs import *
26from lava_helper import test_lava_dispatch_credentials
27
28try:
29 from tfm_ci_pylib.utils import save_json, load_json, sort_dict,\
30 load_yaml, test, print_test
31 from tfm_ci_pylib.lava_rpc_connector import LAVA_RPC_connector
32except ImportError:
33 dir_path = os.path.dirname(os.path.realpath(__file__))
34 sys.path.append(os.path.join(dir_path, "../"))
35 from tfm_ci_pylib.utils import save_json, load_json, sort_dict,\
36 load_yaml, test, print_test
37 from tfm_ci_pylib.lava_rpc_connector import LAVA_RPC_connector
38
Xinyu Zhang1b8f5152020-11-13 16:10:58 +080039cfgs = ["Default", "CoreIPC", "CoreIPCTfmLevel2", "CoreIPCTfmLevel3",
40 "Regression", "RegressionIPC",
41 "RegressionIPCTfmLevel2", "RegressionIPCTfmLevel3",
42 "DefaultProfileS", "RegressionProfileS",
43 "DefaultProfileM", "RegressionProfileM", "RegressionProfileM PSOFF",
44 "PsaApiTest (Attest)", "PsaApiTestIPC (Attest)",
45 "PsaApiTestIPCTfmLevel2 (Attest)",
46 "PsaApiTest (Crypto)", "PsaApiTestIPC (Crypto)",
47 "PsaApiTestIPCTfmLevel2 (Crypto)",
48 "PsaApiTest (PS)", "PsaApiTestIPC (PS)",
49 "PsaApiTestIPCTfmLevel2 (PS)",
50 "PsaApiTest (ITS)", "PsaApiTestIPC (ITS)",
51 "PsaApiTestIPCTfmLevel2 (ITS)",
52 "PsaApiTestIPC (FF)",
53 "PsaApiTestIPCTfmLevel2 (FF)",
54 "PsaApiTestIPCTfmLevel3 (ITS)", "PsaApiTestIPCTfmLevel3 (PS)",
55 "PsaApiTestIPCTfmLevel3 (Crypto)", "PsaApiTestIPCTfmLevel3 (Attest)",
56 "PsaApiTestIPCTfmLevel3 (FF)"]
57
Matthew Hartfb6fd362020-03-04 21:03:59 +000058def wait_for_jobs(user_args):
59 job_list = user_args.job_ids.split(",")
60 job_list = [int(x) for x in job_list if x != '']
61 lava = test_lava_dispatch_credentials(user_args)
62 finished_jobs = lava.block_wait_for_jobs(job_list, user_args.dispatch_timeout, 0.5)
63 unfinished_jobs = [item for item in job_list if item not in finished_jobs]
64 for job in unfinished_jobs:
65 info_print("Cancelling unfinished job: {}".format(job))
66 lava.cancel_job(job)
67 if user_args.artifacts_path:
68 for job, info in finished_jobs.items():
69 info['job_dir'] = os.path.join(user_args.artifacts_path, "{}_{}".format(str(job), info['description']))
70 finished_jobs[job] = info
71 finished_jobs = fetch_artifacts(finished_jobs, user_args, lava)
72 print_lava_urls(finished_jobs, user_args)
73 boot_report(finished_jobs, user_args)
74 test_report(finished_jobs, user_args, lava)
Xinyu Zhang1b8f5152020-11-13 16:10:58 +080075 csv_report(finished_jobs)
Matthew Hartfb6fd362020-03-04 21:03:59 +000076
77def fetch_artifacts(jobs, user_args, lava):
78 if not user_args.artifacts_path:
79 return
80 for job_id, info in jobs.items():
81 job_dir = info['job_dir']
82 info_print("Fetching artifacts for JOB: {} to {}".format(job_id, job_dir))
83 os.makedirs(job_dir, exist_ok=True)
84 def_path = os.path.join(job_dir, 'definition.yaml')
85 target_log = os.path.join(job_dir, 'target_log.txt')
Matthew Hart4a4f1202020-06-12 15:52:46 +010086 config = os.path.join(job_dir, 'config.tar.bz2')
87 results_file = os.path.join(job_dir, 'results.yaml')
Matthew Hartfb6fd362020-03-04 21:03:59 +000088 definition, metadata = lava.get_job_definition(job_id, def_path)
89 jobs[job_id]['metadata'] = metadata
90 time.sleep(0.2) # be friendly to LAVA
Matthew Hart4a4f1202020-06-12 15:52:46 +010091 lava.get_job_log(job_id, target_log)
Matthew Hartfb6fd362020-03-04 21:03:59 +000092 time.sleep(0.2)
93 lava.get_job_config(job_id, config)
94 time.sleep(0.2)
Matthew Hart4a4f1202020-06-12 15:52:46 +010095 lava.get_job_results(job_id, results_file)
Matthew Hartfb6fd362020-03-04 21:03:59 +000096 return(jobs)
97
98
99def lava_id_to_url(id, user_args):
100 return "{}/scheduler/job/{}".format(user_args.lava_url, id)
101
Xinyu Zhang1b8f5152020-11-13 16:10:58 +0800102def generateTestResult(info):
103 if info['health'] == "Complete" and info['state'] == "Finished":
104 return "PASS"
105 else:
106 return "FAIL"
107
108def csv_report(jobs):
109 lava_jobs = []
110 for job, info in jobs.items():
111 exist = False
112 for record in lava_jobs:
113 if info['metadata']['platform'] == record["Platform"] and \
114 info['metadata']['compiler'] == record["Compiler"] and \
115 info['metadata']['build_type'] == record["Build Type"]:
116 if record[info['metadata']['name']] != "FAIL":
117 record[info['metadata']['name']] = generateTestResult(info)
118 exist = True
119 break
120 if not exist:
121 record = {}
122 record["Platform"] = info['metadata']['platform']
123 record["Compiler"] = info['metadata']['compiler']
124 record["Build Type"] = info['metadata']['build_type']
125 record["Config Name"] = info['metadata']['name']
126 for cfg in cfgs:
127 record[cfg] = "N.A."
128 record[info['metadata']['name']] = generateTestResult(info)
129 lava_jobs.append(record)
130 lava_jobs.sort(key=lambda x: x["Platform"] + x["Compiler"] + x["Build Type"])
131 with open("test_results.csv", "w", newline="") as csvfile:
132 fieldnames = ["Platform", "Compiler", "Build Type"] + cfgs
133 writer = csv.DictWriter(csvfile, fieldnames=fieldnames, extrasaction='ignore')
134
135 writer.writeheader()
136 writer.writerows(lava_jobs)
137
Matthew Hartfb6fd362020-03-04 21:03:59 +0000138def boot_report(jobs, user_args):
139 incomplete_jobs = []
140 for job, info in jobs.items():
141 if info['health'] != 'Complete':
142 if info['error_reason'] == 'Infrastructure':
143 info_print("Job {} failed with Infrastructure error".format(job))
144 incomplete_jobs.append(job)
145 incomplete_output = [lava_id_to_url(x, user_args) for x in incomplete_jobs];
146 if len(incomplete_jobs) > 0:
147 print("BOOT_RESULT: -1 Failed: {}".format(incomplete_output))
148 else:
149 print("BOOT_RESULT: +1")
150
151def remove_lava_dupes(results):
152 for result in results:
153 if result['result'] != 'pass':
154 if result['suite'] == "lava":
155 for other in [x for x in results if x != result]:
156 if other['name'] == result['name']:
157 if other['result'] == 'pass':
158 results.remove(result)
159 return(results)
160
161def test_report(jobs, user_args, lava):
162 # parsing of test results is WIP
163 fail_j = []
164 jinja_data = []
165 for job, info in jobs.items():
Matthew Hart4a4f1202020-06-12 15:52:46 +0100166 results_file = os.path.join(info['job_dir'], 'results.yaml')
167 if not os.path.exists(results_file) or (os.path.getsize(results_file) == 0):
168 fail_j.append(job)
169 continue
170 with open(results_file, "r") as F:
171 res_data = F.read()
172 results = yaml.load(res_data)
Matthew Hartfb6fd362020-03-04 21:03:59 +0000173 non_lava_results = [x for x in results if x['suite'] != 'lava']
174 info['lava_url'] = lava_id_to_url(job, user_args)
175 info['artifacts_dir'] = "tf-m-ci-scripts/{}".format(info['job_dir'])
176 jinja_data.append({job: [info, non_lava_results]})
177 for result in non_lava_results:
178 if result['result'] != 'pass':
179 fail_j.append(job) if job not in fail_j else fail_j
180 time.sleep(0.5) # be friendly to LAVA
181 fail_output = [lava_id_to_url(x, user_args) for x in fail_j]
182 if len(fail_j) > 0:
183 print("TEST_RESULT: -1 Failed: {}".format(fail_output))
184 else:
185 print("TEST_RESULT: +1")
186 data = {}
187 data['jobs'] = jinja_data
188 render_jinja(data)
189
190def render_jinja(data):
191 work_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), "jinja2_templates")
192 template_loader = FileSystemLoader(searchpath=work_dir)
193 template_env = Environment(loader=template_loader)
194 html = template_env.get_template("test_summary.jinja2").render(data)
195 csv = template_env.get_template("test_summary_csv.jinja2").render(data)
196 with open('test_summary.html', "w") as F:
197 F.write(html)
198 with open('test_summary.csv', "w") as F:
199 F.write(csv)
200
201def print_lava_urls(jobs, user_args):
202 output = [lava_id_to_url(x, user_args) for x in jobs]
203 print("LAVA jobs triggered for this build: {}".format(output))
204
205
206def info_print(line):
207 print("INFO: {}".format(line))
208
209def main(user_args):
210 """ Main logic """
211 user_args.lava_rpc = "RPC2"
212 wait_for_jobs(user_args)
213
214def get_cmd_args():
215 """ Parse command line arguments """
216
217 # Parse command line arguments to override config
218 parser = argparse.ArgumentParser(description="Lava Wait Jobs")
219 cmdargs = parser.add_argument_group("Lava Wait Jobs")
220
221 # Configuration control
222 cmdargs.add_argument(
223 "--lava-url", dest="lava_url", action="store", help="LAVA lab URL (without RPC2)"
224 )
225 cmdargs.add_argument(
226 "--job-ids", dest="job_ids", action="store", required=True, help="Comma separated list of job IDS"
227 )
228 cmdargs.add_argument(
229 "--lava-token", dest="token_secret", action="store", help="LAVA auth token"
230 )
231 cmdargs.add_argument(
232 "--lava-user", dest="token_usr", action="store", help="LAVA username"
233 )
234 cmdargs.add_argument(
235 "--use-env", dest="token_from_env", action="store_true", default=False, help="Use LAVA auth info from environment"
236 )
237 cmdargs.add_argument(
238 "--lava-timeout", dest="dispatch_timeout", action="store", type=int, default=3600, help="Time in seconds to wait for all jobs"
239 )
240 cmdargs.add_argument(
241 "--artifacts-path", dest="artifacts_path", action="store", help="Download LAVA artifacts to this directory"
242 )
243 return parser.parse_args()
244
245
246if __name__ == "__main__":
247 main(get_cmd_args())