blob: 78c7df466f2be51fdc914fd2b2bc04b358a0f8ed [file] [log] [blame]
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +01001#!/usr/bin/env python3
2
3""" utils.py:
4
5 various simple and commonly used methods and classes shared by the scripts
6 in the CI environment """
7
8from __future__ import print_function
9
10__copyright__ = """
11/*
12 * Copyright (c) 2018-2019, Arm Limited. All rights reserved.
13 *
14 * SPDX-License-Identifier: BSD-3-Clause
15 *
16 */
17 """
18__author__ = "Minos Galanakis"
19__email__ = "minos.galanakis@linaro.org"
20__project__ = "Trusted Firmware-M Open CI"
21__status__ = "stable"
Minos Galanakisea421232019-06-20 17:11:28 +010022__version__ = "1.1"
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +010023
24import os
Minos Galanakisea421232019-06-20 17:11:28 +010025import re
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +010026import sys
27import yaml
Minos Galanakisea421232019-06-20 17:11:28 +010028import requests
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +010029import argparse
30import json
31import itertools
Minos Galanakisea421232019-06-20 17:11:28 +010032from shutil import move
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +010033from collections import OrderedDict, namedtuple
Minos Galanakisea421232019-06-20 17:11:28 +010034from subprocess import Popen, PIPE, STDOUT, check_output
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +010035
36
37def detect_python3():
38 """ Return true if script is run with Python3 interpreter """
39
40 return sys.version_info > (3, 0)
41
42
Minos Galanakisea421232019-06-20 17:11:28 +010043def find_missing_files(file_list):
44 """ Return the files that dot not exist in the file_list """
45
46 F = set(file_list)
47 T = set(list(filter(os.path.isfile, file_list)))
48 return list(F.difference(T))
49
50
51def resolve_rel_path(target_path, origin_path=os.getcwd()):
52 """ Resolve relative path from origin to target. By default origin
53 path is current working directory. """
54
55 common = os.path.commonprefix([origin_path, target_path])
56 return os.path.relpath(target_path, common)
57
58
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +010059def print_test_dict(data_dict,
60 pad_space=80,
61 identation=5,
62 titl="Summary",
63 pad_char="*"):
64
65 """ Configurable print formatter aimed for dictionaries of the type
66 {"TEST NAME": "RESULT"} used in CI systems. It will also return
67 the string which is printing """
68
69 # Calculate pad space bewteen variables x, y t achieve alignment on y
70 # taking into consideration a maximum aligment boundary p and
71 # possible indentation i
72 def flex_pad(x, y, p, i):
73 return " " * (p - i * 2 - len(x) - len(y)) + "-> "
74
75 # Calculate the padding for the dataset
76 tests = [k + flex_pad(k,
77 v,
78 pad_space,
79 identation) + v for k, v in data_dict.items()]
80
81 # Add the identation
82 tests = map(lambda x: " " * identation + x, tests)
83
84 # Convert to string
85 tests = "\n".join(tests)
86
87 # Calcuate the top header padding ceiling any rounding errors
88 hdr_pad = (pad_space - len(titl) - 3) / 2
89
90 if detect_python3():
91 hdr_pad = int(hdr_pad)
92
93 # Generate a print formatting dictionary
94 print_dict = {"pad0": pad_char * (hdr_pad),
95 "pad1": pad_char * (hdr_pad + 1 if len(titl) % 2
96 else hdr_pad),
97 "sumry": tests,
98 "pad2": pad_char * pad_space,
99 "titl": titl}
100
101 # Compose & print the report
102 r = "\n%(pad0)s %(titl)s %(pad1)s\n\n%(sumry)s\n\n%(pad2)s\n" % print_dict
103 print(r)
104 return r
105
106
107def print_test(t_name=None, t_list=None, status="failed", tname="Tests"):
108 """ Print a list of tests in a stuctured ascii table format """
109
110 gfx_line1 = "=" * 80
111 gfx_line2 = "\t" + "-" * 70
112 if t_name:
113 print("%(line)s\n%(name)s\n%(line)s" % {"line": gfx_line1,
114 "name": t_name})
115 print("%s %s:" % (tname, status))
116 print(gfx_line2 + "\n" +
117 "\n".join(["\t| %(key)s%(pad)s|\n%(line)s" % {
118 "key": n,
119 "pad": (66 - len(n)) * " ",
120 "line": gfx_line2} for n in t_list]))
121
122
123def test(test_list,
124 test_dict,
125 test_name="TF-M Test",
126 pass_text=["PASSED", "PRESENT"],
127 error_on_failed=True,
128 summary=True):
129
130 """ Using input of a test_lst and a test results dictionary in the format
131 of test_name: resut key-value pairs, test() method will verify that Every
132 single method in the test_list has been tested and passed. Pass and Failed,
133 status tests can be overriden and error_on_failed flag, exits the script
134 with failure if a single test fails or is not detected. Returns a json
135 containing status and fields for each test passed/failed/missing, if error
136 on failed is not set.
137 """
138
139 t_report = {"name": test_name,
140 "success": None,
141 "passed": [],
142 "failed": [],
143 "missing": []}
144 # Clean-up tests that are not requested by test_list
145 test_dict = {k: v for k, v in test_dict.items() if k in test_list}
146
147 # Calculate the difference of the two sets to find missing tests
148 t_report["missing"] = list(set(test_list) - set(test_dict.keys()))
149
150 # Sor the items into the apropriate lists (failed or passed)
151 # based on their status.
152 for k, v in test_dict.items():
153 # print(k, v)
154 key = "passed" if v in pass_text else "failed"
155 t_report[key] += [k]
156
157 # For the test to pass every singe test in test_list needs to be present
158 # and be in the passed list
159 if len(test_list) == len(t_report["passed"]):
160 t_report["success"] = True
161 else:
162 t_report["success"] = False
163
164 # Print a summary
165 if summary:
166 if t_report["passed"]:
167 print_test(test_name, t_report["passed"], status="passed")
168 if t_report["missing"]:
169 print_test(test_name, t_report["missing"], status="missing")
170 if t_report["failed"]:
171 print_test(test_name, t_report["failed"], status="Failed")
172
173 print("\nTest %s has %s!" % (t_report["name"],
174 " been successful" if t_report["success"]
175 else "failed"))
176 print("-" * 80)
177 if error_on_failed:
178 syscode = 0 if t_report["success"] else 1
179 sys.exit(syscode)
180 return t_report
181
182
183def save_json(f_name, data_object):
184 """ Save object to json file """
185
186 with open(f_name, "w") as F:
187 F.write(json.dumps(data_object, indent=2))
188
189
190def save_dict_json(f_name, data_dict, sort_list=None):
191 """ Save a dictionary object to file with optional sorting """
192
193 if sort_list:
194 data_object = (sort_dict(data_dict, sort_list))
195 save_json(f_name, data_object)
196
197
198def sort_dict(config_dict, sort_order_list=None):
199 """ Create a fixed order disctionary out of a config dataset """
200
201 if sort_order_list:
202 ret = OrderedDict([(k, config_dict[k]) for k in sort_order_list])
203 else:
204 ret = OrderedDict([(k, config_dict[k]) for k in sorted(config_dict)])
205 return ret
206
207
208def load_json(f_name):
209 """ Load object from json file """
210
211 with open(f_name, "r") as F:
212 try:
213 return json.loads(F.read())
214 except ValueError as exc:
215 print("No JSON object could be decoded from file: %s" % f_name)
216 except IOError:
217 print("Error opening file: %s" % f_name)
218 raise Exception("Failed to load file")
219
220
221def load_yaml(f_name):
222
223 # Parse command line arguments to override config
224 with open(f_name, "r") as F:
225 try:
226 return yaml.load(F.read())
227 except yaml.YAMLError as exc:
228 print("Error parsing file: %s" % f_name)
229 except IOError:
230 print("Error opening file: %s" % f_name)
231 raise Exception("Failed to load file")
232
233
234def subprocess_log(cmd, log_f, prefix=None, append=False, silent=False):
235 """ Run a command as subproccess an log the output to stdout and fileself.
236 If prefix is spefified it will be added as the first line in file """
237
238 with open(log_f, 'a' if append else "w") as F:
239 if prefix:
240 F.write(prefix + "\n")
241 pcss = Popen(cmd,
242 stdout=PIPE,
243 stderr=STDOUT,
244 shell=True,
245 env=os.environ)
246 for line in pcss.stdout:
247 if detect_python3():
248 line = line.decode("utf-8")
249 if not silent:
250 sys.stdout.write(line)
251 F.write(line)
252 pcss.communicate()
253 return pcss.returncode
254 return
255
256
257def run_proccess(cmd):
258 """ Run a command as subproccess an log the output to stdout and file.
259 If prefix is spefified it will be added as the first line in file """
260
261 pcss = Popen(cmd,
262 stdout=PIPE,
263 stderr=PIPE,
264 shell=True,
265 env=os.environ)
266 pcss.communicate()
267 return pcss.returncode
268
269
Minos Galanakisea421232019-06-20 17:11:28 +0100270def get_pid_status(pid):
271 """ Read the procfc in Linux machines to determine a proccess's statusself.
272 Returns status if proccess exists or None if it does not """
273
274 try:
275 with open("/proc/%s/status" % pid, "r") as F:
276 full_state = F.read()
277 return re.findall(r'(?:State:\t[A-Z]{1} \()(\w+)',
278 full_state, re.MULTILINE)[0]
279 except Exception as e:
280 print("Exception", e)
281
282
283def check_pid_status(pid, status_list):
284 """ Check a proccess's status againist a provided lists and return True
285 if the proccess exists and has a status included in the list. (Linux) """
286
287 pid_status = get_pid_status(pid)
288
289 if not pid_status:
290 print("PID %s does not exist." % pid)
291 return False
292
293 ret = pid_status in status_list
294 # TODO Remove debug print
295 if not ret:
296 print("PID status %s not in %s" % (pid_status, ",".join(status_list)))
297 return ret
298
299
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +0100300def list_chunks(l, n):
301 """ Yield successive n-sized chunks from l. """
302
303 for i in range(0, len(l), n):
304 yield l[i:i + n]
305
306
307def export_config_map(config_m, dir=None):
308 """ Will export a dictionary of configurations to a group of JSON files """
309
310 _dir = dir if dir else os.getcwd()
311 for _cname, _cfg in config_m.items():
312 _cname = _cname.lower()
313 _fname = os.path.join(_dir, _cname + ".json")
314 print("Exporting config %s" % _fname)
Fathi Boudra4be4c062020-11-19 15:32:30 +0100315 save_json(_fname, _cfg)
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +0100316
317
318def gen_cfg_combinations(name, categories, *args):
319 """ Create a list of named tuples of `name`, with elements defined in a
320 space separated string `categories` and equal ammount of lists for said
321 categories provided as arguments. Order of arguments should match the
322 order of the categories lists """
323
324 build_config = namedtuple(name, categories)
325 return [build_config(*x) for x in itertools.product(*args)]
326
327
Minos Galanakisea421232019-06-20 17:11:28 +0100328def show_progress(current_count, total_count):
329 """ Display the percent progress percentage of input metric a over b """
330
331 progress = int((current_count / total_count) * 100)
332 completed_count = int(progress * 0.7)
333 remaining_count = 70 - completed_count
334 print("[ %s%s | %d%% ]" % ("#" * completed_count,
335 "~" * remaining_count,
336 progress))
337
338
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +0100339def get_cmd_args(descr="", parser=None):
340 """ Parse command line arguments """
341 # Parse command line arguments to override config
342
343 if not parser:
344 parser = argparse.ArgumentParser(description=descr)
345 return parser.parse_args()
Minos Galanakisea421232019-06-20 17:11:28 +0100346
347
348def arm_non_eabi_size(filename):
349 """ Run arm-non-eabi-size command and parse the output using regex. Will
350 return a tuple with the formated data as well as the raw output of the
351 command """
352
353 size_info_rex = re.compile(r'^\s+(?P<text>[0-9]+)\s+(?P<data>[0-9]+)\s+'
354 r'(?P<bss>[0-9]+)\s+(?P<dec>[0-9]+)\s+'
355 r'(?P<hex>[0-9a-f]+)\s+(?P<file>\S+)',
356 re.MULTILINE)
357
358 eabi_size = check_output(["arm-none-eabi-size",
359 filename],
Karl Zhangaff558a2020-05-15 14:28:23 +0100360 timeout=18).decode('UTF-8').rstrip()
Minos Galanakisea421232019-06-20 17:11:28 +0100361
362 size_data = re.search(size_info_rex, eabi_size)
363
364 return [{"text": size_data.group("text"),
365 "data": size_data.group("data"),
366 "bss": size_data.group("bss"),
367 "dec": size_data.group("dec"),
368 "hex": size_data.group("hex")}, eabi_size]
369
370
371def list_subdirs(directory):
372
373 directory = os.path.abspath(directory)
374 abs_sub_dirs = [os.path.join(directory, n) for n in os.listdir(directory)]
375 return [n for n in abs_sub_dirs if os.path.isdir(os.path.realpath(n))]
376
377
378def get_local_git_info(directory, json_out_f=None):
379 """ Extract git related information from a target directory. It allows
380 optional export to json file """
381
382 directory = os.path.abspath(directory)
383 cur_dir = os.path.abspath(os.getcwd())
384 os.chdir(directory)
385
386 # System commands to collect information
387 cmd1 = "git log HEAD -n 1 --pretty=format:'%H%x09%an%x09%ae%x09%ai%x09%s'"
388 cmd2 = "git log HEAD -n 1 --pretty=format:'%b'"
389 cmd3 = "git remote -v | head -n 1 | awk '{ print $2}';"
390 cmd4 = ("git ls-remote --heads origin | "
391 "grep $(git rev-parse HEAD) | cut -d / -f 3")
392
393 git_info_rex = re.compile(r'(?P<body>^[\s\S]*?)((?:Change-Id:\s)'
394 r'(?P<change_id>.*)\n)((?:Signed-off-by:\s)'
395 r'(?P<sign_off>.*)\n?)', re.MULTILINE)
396
397 proc_res = []
398 for cmd in [cmd1, cmd2, cmd3, cmd4]:
399 r, e = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate()
400 if e:
401 print("Error", e)
402 return
403 else:
404 try:
405 txt_body = r.decode('ascii')
406 except UnicodeDecodeError as E:
407 txt_body = r.decode('utf-8')
408 proc_res.append(txt_body.rstrip())
409
410 # Unpack and tag the data
411 hash, name, email, date, subject = proc_res[0].split('\t')
412
413 _raw_body = proc_res[1]
414 _bd_items = re.findall(r'(Signed-off-by|Change-Id)', _raw_body,
415 re.MULTILINE)
416
417 signed_off = None
418 body = None
419 change_id = None
420 # If both sign-off and gerrit-id exist
421 if len(_bd_items) == 2:
422 m = git_info_rex.search(_raw_body)
423 print(git_info_rex.findall(_raw_body))
424 if m is not None:
425 match_dict = m.groupdict()
426 if "body" in match_dict.keys():
427 body = match_dict["body"]
428 if "sign_off" in match_dict.keys():
429 signed_off = match_dict["sign_off"]
430 if "change_id" in match_dict.keys():
431 change_id = match_dict["change_id"]
432 else:
433 print("Error: Could not regex parse message", repr(_raw_body))
434 body = _raw_body
435 # If only one of sign-off / gerrit-id exist
436 elif len(_bd_items) == 1:
437 _entry_key = _bd_items[0]
438 body, _extra = _raw_body.split(_entry_key)
439 if _entry_key == "Change-Id":
440 change_id = _extra
441 else:
442 signed_off = _extra
443 # If the message contains commit message body only
444 else:
445 body = _raw_body
446
447 # Attempt to read the branch from Gerrit Trigger
448 try:
449 branch = os.environ["GERRIT_BRANCH"]
450 # IF not compare the commit hash with the remote branches to determine the
451 # branch of origin. Warning this assumes that only one branch has its head
452 # on this commit.
453 except KeyError as E:
454 branch = proc_res[3]
455
456 remote = proc_res[2]
457 # Internal Gerrit specific code
458 # Intended for converting the git remote to a more usuable url
459 known_remotes = ["https://gerrit.oss.arm.com",
460 "http://gerrit.mirror.oss.arm.com"]
461
462 for kr in known_remotes:
463 if kr in remote:
464 print("Applying Remote specific patch to remote", kr)
465
466 remote = remote.split(kr)[-1][1:]
467 print("REMOTE", remote)
468 remote = "%s/gitweb?p=%s.git;a=commit;h=%s" % (kr, remote, hash)
469 break
470
471 out = {"author": name.strip(),
472 "email": email.strip(),
473 "dir": directory.strip(),
474 "remote": remote.strip(),
475 "date": date.strip(),
476 "commit": hash.strip(),
477 "subject": subject.strip(),
478 "message": body.strip(),
479 "change_id": change_id.strip() if change_id is not None else "N.A",
480 "sign_off": signed_off.strip() if signed_off is not None else "N.A",
481 "branch": branch.strip()}
482
483 # Restore the directory path
484 os.chdir(cur_dir)
485 if json_out_f:
486 save_json(json_out_f, out)
487 return out
488
489
490def get_remote_git_info(url):
491 """ Collect git information from a Linux Kernel web repository """
492
493 auth_rex = re.compile(r'(?:<th>author</th>.*)(?:span>)(.*)'
494 r'(?:;.*\'right\'>)([0-9\+\-:\s]+)')
495 # commiter_rex = re.compile(r'(?:<th>committer</th>.*)(?:</div>)(.*)'
496 # r'(?:;.*\'right\'>)([0-9\+\-:\s]+)')
497 subject_rex = re.compile(r'(?:\'commit-subject\'>)(.*)(?:</div>)')
498 body_rex = re.compile(r'(?:\'commit-msg\'>)([\s\S^<]*)(?:</div>'
499 r'<div class=\'diffstat-header\'>)', re.MULTILINE)
500
501 content = requests.get(url).text
502 author, date = re.search(auth_rex, content).groups()
503 subject = re.search(subject_rex, content).groups()[0]
504 body = re.search(body_rex, content).groups()[0]
505 remote, hash = url.split("=")
506
507 outdict = {"author": author,
508 "remote": remote[:-3],
509 "date": date,
510 "commit": hash,
511 "subject": subject,
512 "message": body}
513 # Clean up html noise
514 return {k: re.sub(r'&[a-z]t;?', "", v) for k, v in outdict.items()}
515
516
517def convert_git_ref_path(dir_path):
518 """ If a git long hash is detected in a path move it to a short hash """
519
520 # Detect a git hash on a directory naming format of name_{hash},
521 # {hash}, name-{hash}
522 git_hash_rex = re.compile(r'(?:[_|-])*([a-f0-9]{40})')
523
524 # if checkout directory name contains a git reference convert to short
525 git_hash = git_hash_rex.findall(dir_path)
526 if len(git_hash):
527 d = dir_path.replace(git_hash[0], git_hash[0][:7])
528 print("Renaming %s -> %s", dir_path, d)
529 move(dir_path, d)
530 dir_path = d
531 return dir_path
532
533
Minos Galanakisea421232019-06-20 17:11:28 +0100534def list_filtered_tree(directory, rex_filter=None):
535 ret = []
536 for path, subdirs, files in os.walk(directory):
537 for fname in files:
538 ret.append(os.path.join(path, fname))
539 if rex_filter:
540 rex = re.compile(rex_filter)
541 return [n for n in ret if rex.search(n)]
542 else:
543 return ret
544
545
546def gerrit_patch_from_changeid(remote, change_id):
547 """ Use Gerrit's REST api for a best effort to retrieve the url of the
548 patch-set under review """
549
550 try:
551 r = requests.get('%s/changes/%s' % (remote, change_id),
552 headers={'Accept': 'application/json'})
553 resp_data = r.text[r.text.find("{"):].rstrip()
554 change_no = json.loads(resp_data)["_number"]
555 return "%s/#/c/%s" % (remote, change_no)
556 except Exception as E:
557 print("Failed to retrieve change (%s) from URL %s" % (change_id,
558 remote))
559 print("Exception Thrown:", E)
560 raise Exception()