blob: 586311f666f0b0900d300cafef80d5c41a40b625 [file] [log] [blame]
Matthew Hartfb6fd362020-03-04 21:03:59 +00001#!/usr/bin/env python3
2
3from __future__ import print_function
4
5__copyright__ = """
6/*
7 * Copyright (c) 2020, Arm Limited. All rights reserved.
8 *
9 * SPDX-License-Identifier: BSD-3-Clause
10 *
11 */
12 """
13
14"""
15Script for waiting for LAVA jobs and parsing the results
16"""
17
18import os
19import sys
20import shutil
21import time
22import yaml
23import argparse
24import threading
Xinyu Zhang1b8f5152020-11-13 16:10:58 +080025import csv
Matthew Hartfb6fd362020-03-04 21:03:59 +000026from copy import deepcopy
27from collections import OrderedDict
28from jinja2 import Environment, FileSystemLoader
29from lava_helper_configs import *
30from lava_helper import test_lava_dispatch_credentials
31
32try:
33 from tfm_ci_pylib.utils import save_json, load_json, sort_dict,\
34 load_yaml, test, print_test
35 from tfm_ci_pylib.lava_rpc_connector import LAVA_RPC_connector
36except ImportError:
37 dir_path = os.path.dirname(os.path.realpath(__file__))
38 sys.path.append(os.path.join(dir_path, "../"))
39 from tfm_ci_pylib.utils import save_json, load_json, sort_dict,\
40 load_yaml, test, print_test
41 from tfm_ci_pylib.lava_rpc_connector import LAVA_RPC_connector
42
Xinyu Zhang1b8f5152020-11-13 16:10:58 +080043cfgs = ["Default", "CoreIPC", "CoreIPCTfmLevel2", "CoreIPCTfmLevel3",
44 "Regression", "RegressionIPC",
45 "RegressionIPCTfmLevel2", "RegressionIPCTfmLevel3",
46 "DefaultProfileS", "RegressionProfileS",
47 "DefaultProfileM", "RegressionProfileM", "RegressionProfileM PSOFF",
48 "PsaApiTest (Attest)", "PsaApiTestIPC (Attest)",
49 "PsaApiTestIPCTfmLevel2 (Attest)",
50 "PsaApiTest (Crypto)", "PsaApiTestIPC (Crypto)",
51 "PsaApiTestIPCTfmLevel2 (Crypto)",
52 "PsaApiTest (PS)", "PsaApiTestIPC (PS)",
53 "PsaApiTestIPCTfmLevel2 (PS)",
54 "PsaApiTest (ITS)", "PsaApiTestIPC (ITS)",
55 "PsaApiTestIPCTfmLevel2 (ITS)",
56 "PsaApiTestIPC (FF)",
57 "PsaApiTestIPCTfmLevel2 (FF)",
58 "PsaApiTestIPCTfmLevel3 (ITS)", "PsaApiTestIPCTfmLevel3 (PS)",
59 "PsaApiTestIPCTfmLevel3 (Crypto)", "PsaApiTestIPCTfmLevel3 (Attest)",
60 "PsaApiTestIPCTfmLevel3 (FF)"]
61
Matthew Hartfb6fd362020-03-04 21:03:59 +000062def wait_for_jobs(user_args):
63 job_list = user_args.job_ids.split(",")
64 job_list = [int(x) for x in job_list if x != '']
65 lava = test_lava_dispatch_credentials(user_args)
66 finished_jobs = lava.block_wait_for_jobs(job_list, user_args.dispatch_timeout, 0.5)
67 unfinished_jobs = [item for item in job_list if item not in finished_jobs]
68 for job in unfinished_jobs:
69 info_print("Cancelling unfinished job: {}".format(job))
70 lava.cancel_job(job)
71 if user_args.artifacts_path:
72 for job, info in finished_jobs.items():
73 info['job_dir'] = os.path.join(user_args.artifacts_path, "{}_{}".format(str(job), info['description']))
74 finished_jobs[job] = info
75 finished_jobs = fetch_artifacts(finished_jobs, user_args, lava)
76 print_lava_urls(finished_jobs, user_args)
77 boot_report(finished_jobs, user_args)
78 test_report(finished_jobs, user_args, lava)
Xinyu Zhang1b8f5152020-11-13 16:10:58 +080079 csv_report(finished_jobs)
Matthew Hartfb6fd362020-03-04 21:03:59 +000080
81def fetch_artifacts(jobs, user_args, lava):
82 if not user_args.artifacts_path:
83 return
84 for job_id, info in jobs.items():
85 job_dir = info['job_dir']
86 info_print("Fetching artifacts for JOB: {} to {}".format(job_id, job_dir))
87 os.makedirs(job_dir, exist_ok=True)
88 def_path = os.path.join(job_dir, 'definition.yaml')
89 target_log = os.path.join(job_dir, 'target_log.txt')
Matthew Hart4a4f1202020-06-12 15:52:46 +010090 config = os.path.join(job_dir, 'config.tar.bz2')
91 results_file = os.path.join(job_dir, 'results.yaml')
Matthew Hartfb6fd362020-03-04 21:03:59 +000092 definition, metadata = lava.get_job_definition(job_id, def_path)
93 jobs[job_id]['metadata'] = metadata
94 time.sleep(0.2) # be friendly to LAVA
Matthew Hart4a4f1202020-06-12 15:52:46 +010095 lava.get_job_log(job_id, target_log)
Matthew Hartfb6fd362020-03-04 21:03:59 +000096 time.sleep(0.2)
97 lava.get_job_config(job_id, config)
98 time.sleep(0.2)
Matthew Hart4a4f1202020-06-12 15:52:46 +010099 lava.get_job_results(job_id, results_file)
Matthew Hartfb6fd362020-03-04 21:03:59 +0000100 return(jobs)
101
102
103def lava_id_to_url(id, user_args):
104 return "{}/scheduler/job/{}".format(user_args.lava_url, id)
105
Xinyu Zhang1b8f5152020-11-13 16:10:58 +0800106def generateTestResult(info):
107 if info['health'] == "Complete" and info['state'] == "Finished":
108 return "PASS"
109 else:
110 return "FAIL"
111
112def csv_report(jobs):
113 lava_jobs = []
114 for job, info in jobs.items():
115 exist = False
116 for record in lava_jobs:
117 if info['metadata']['platform'] == record["Platform"] and \
118 info['metadata']['compiler'] == record["Compiler"] and \
119 info['metadata']['build_type'] == record["Build Type"]:
120 if record[info['metadata']['name']] != "FAIL":
121 record[info['metadata']['name']] = generateTestResult(info)
122 exist = True
123 break
124 if not exist:
125 record = {}
126 record["Platform"] = info['metadata']['platform']
127 record["Compiler"] = info['metadata']['compiler']
128 record["Build Type"] = info['metadata']['build_type']
129 record["Config Name"] = info['metadata']['name']
130 for cfg in cfgs:
131 record[cfg] = "N.A."
132 record[info['metadata']['name']] = generateTestResult(info)
133 lava_jobs.append(record)
134 lava_jobs.sort(key=lambda x: x["Platform"] + x["Compiler"] + x["Build Type"])
135 with open("test_results.csv", "w", newline="") as csvfile:
136 fieldnames = ["Platform", "Compiler", "Build Type"] + cfgs
137 writer = csv.DictWriter(csvfile, fieldnames=fieldnames, extrasaction='ignore')
138
139 writer.writeheader()
140 writer.writerows(lava_jobs)
141
Matthew Hartfb6fd362020-03-04 21:03:59 +0000142def boot_report(jobs, user_args):
143 incomplete_jobs = []
144 for job, info in jobs.items():
145 if info['health'] != 'Complete':
146 if info['error_reason'] == 'Infrastructure':
147 info_print("Job {} failed with Infrastructure error".format(job))
148 incomplete_jobs.append(job)
149 incomplete_output = [lava_id_to_url(x, user_args) for x in incomplete_jobs];
150 if len(incomplete_jobs) > 0:
151 print("BOOT_RESULT: -1 Failed: {}".format(incomplete_output))
152 else:
153 print("BOOT_RESULT: +1")
154
155def remove_lava_dupes(results):
156 for result in results:
157 if result['result'] != 'pass':
158 if result['suite'] == "lava":
159 for other in [x for x in results if x != result]:
160 if other['name'] == result['name']:
161 if other['result'] == 'pass':
162 results.remove(result)
163 return(results)
164
165def test_report(jobs, user_args, lava):
166 # parsing of test results is WIP
167 fail_j = []
168 jinja_data = []
169 for job, info in jobs.items():
Matthew Hart4a4f1202020-06-12 15:52:46 +0100170 results_file = os.path.join(info['job_dir'], 'results.yaml')
171 if not os.path.exists(results_file) or (os.path.getsize(results_file) == 0):
172 fail_j.append(job)
173 continue
174 with open(results_file, "r") as F:
175 res_data = F.read()
176 results = yaml.load(res_data)
Matthew Hartfb6fd362020-03-04 21:03:59 +0000177 non_lava_results = [x for x in results if x['suite'] != 'lava']
178 info['lava_url'] = lava_id_to_url(job, user_args)
179 info['artifacts_dir'] = "tf-m-ci-scripts/{}".format(info['job_dir'])
180 jinja_data.append({job: [info, non_lava_results]})
181 for result in non_lava_results:
182 if result['result'] != 'pass':
183 fail_j.append(job) if job not in fail_j else fail_j
184 time.sleep(0.5) # be friendly to LAVA
185 fail_output = [lava_id_to_url(x, user_args) for x in fail_j]
186 if len(fail_j) > 0:
187 print("TEST_RESULT: -1 Failed: {}".format(fail_output))
188 else:
189 print("TEST_RESULT: +1")
190 data = {}
191 data['jobs'] = jinja_data
192 render_jinja(data)
193
194def render_jinja(data):
195 work_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), "jinja2_templates")
196 template_loader = FileSystemLoader(searchpath=work_dir)
197 template_env = Environment(loader=template_loader)
198 html = template_env.get_template("test_summary.jinja2").render(data)
199 csv = template_env.get_template("test_summary_csv.jinja2").render(data)
200 with open('test_summary.html', "w") as F:
201 F.write(html)
202 with open('test_summary.csv', "w") as F:
203 F.write(csv)
204
205def print_lava_urls(jobs, user_args):
206 output = [lava_id_to_url(x, user_args) for x in jobs]
207 print("LAVA jobs triggered for this build: {}".format(output))
208
209
210def info_print(line):
211 print("INFO: {}".format(line))
212
213def main(user_args):
214 """ Main logic """
215 user_args.lava_rpc = "RPC2"
216 wait_for_jobs(user_args)
217
218def get_cmd_args():
219 """ Parse command line arguments """
220
221 # Parse command line arguments to override config
222 parser = argparse.ArgumentParser(description="Lava Wait Jobs")
223 cmdargs = parser.add_argument_group("Lava Wait Jobs")
224
225 # Configuration control
226 cmdargs.add_argument(
227 "--lava-url", dest="lava_url", action="store", help="LAVA lab URL (without RPC2)"
228 )
229 cmdargs.add_argument(
230 "--job-ids", dest="job_ids", action="store", required=True, help="Comma separated list of job IDS"
231 )
232 cmdargs.add_argument(
233 "--lava-token", dest="token_secret", action="store", help="LAVA auth token"
234 )
235 cmdargs.add_argument(
236 "--lava-user", dest="token_usr", action="store", help="LAVA username"
237 )
238 cmdargs.add_argument(
239 "--use-env", dest="token_from_env", action="store_true", default=False, help="Use LAVA auth info from environment"
240 )
241 cmdargs.add_argument(
242 "--lava-timeout", dest="dispatch_timeout", action="store", type=int, default=3600, help="Time in seconds to wait for all jobs"
243 )
244 cmdargs.add_argument(
245 "--artifacts-path", dest="artifacts_path", action="store", help="Download LAVA artifacts to this directory"
246 )
247 return parser.parse_args()
248
249
250if __name__ == "__main__":
251 main(get_cmd_args())