blob: 3ef5a483db624de7c3f456779c6940f56cfe0713 [file] [log] [blame]
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +01001#!/usr/bin/env python3
2
3""" utils.py:
4
5 various simple and commonly used methods and classes shared by the scripts
6 in the CI environment """
7
8from __future__ import print_function
9
10__copyright__ = """
11/*
Xinyu Zhang97a47cb2022-10-21 11:13:01 +080012 * Copyright (c) 2018-2022, Arm Limited. All rights reserved.
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +010013 *
14 * SPDX-License-Identifier: BSD-3-Clause
15 *
16 */
17 """
Karl Zhang08681e62020-10-30 13:56:03 +080018
19__author__ = "tf-m@lists.trustedfirmware.org"
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +010020__project__ = "Trusted Firmware-M Open CI"
Xinyu Zhang06286a92021-07-22 14:00:51 +080021__version__ = "1.4.0"
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +010022
23import os
Minos Galanakisea421232019-06-20 17:11:28 +010024import re
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +010025import sys
26import yaml
Minos Galanakisea421232019-06-20 17:11:28 +010027import requests
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +010028import argparse
29import json
30import itertools
Minos Galanakisea421232019-06-20 17:11:28 +010031from shutil import move
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +010032from collections import OrderedDict, namedtuple
Minos Galanakisea421232019-06-20 17:11:28 +010033from subprocess import Popen, PIPE, STDOUT, check_output
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +010034
35
36def detect_python3():
37 """ Return true if script is run with Python3 interpreter """
38
39 return sys.version_info > (3, 0)
40
41
Minos Galanakisea421232019-06-20 17:11:28 +010042def find_missing_files(file_list):
43 """ Return the files that dot not exist in the file_list """
44
45 F = set(file_list)
46 T = set(list(filter(os.path.isfile, file_list)))
47 return list(F.difference(T))
48
49
50def resolve_rel_path(target_path, origin_path=os.getcwd()):
51 """ Resolve relative path from origin to target. By default origin
52 path is current working directory. """
53
54 common = os.path.commonprefix([origin_path, target_path])
55 return os.path.relpath(target_path, common)
56
57
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +010058def print_test_dict(data_dict,
59 pad_space=80,
60 identation=5,
61 titl="Summary",
62 pad_char="*"):
63
64 """ Configurable print formatter aimed for dictionaries of the type
65 {"TEST NAME": "RESULT"} used in CI systems. It will also return
66 the string which is printing """
67
68 # Calculate pad space bewteen variables x, y t achieve alignment on y
69 # taking into consideration a maximum aligment boundary p and
70 # possible indentation i
71 def flex_pad(x, y, p, i):
72 return " " * (p - i * 2 - len(x) - len(y)) + "-> "
73
74 # Calculate the padding for the dataset
75 tests = [k + flex_pad(k,
76 v,
77 pad_space,
78 identation) + v for k, v in data_dict.items()]
79
80 # Add the identation
81 tests = map(lambda x: " " * identation + x, tests)
82
83 # Convert to string
84 tests = "\n".join(tests)
85
86 # Calcuate the top header padding ceiling any rounding errors
87 hdr_pad = (pad_space - len(titl) - 3) / 2
88
89 if detect_python3():
90 hdr_pad = int(hdr_pad)
91
92 # Generate a print formatting dictionary
93 print_dict = {"pad0": pad_char * (hdr_pad),
94 "pad1": pad_char * (hdr_pad + 1 if len(titl) % 2
95 else hdr_pad),
96 "sumry": tests,
97 "pad2": pad_char * pad_space,
98 "titl": titl}
99
100 # Compose & print the report
101 r = "\n%(pad0)s %(titl)s %(pad1)s\n\n%(sumry)s\n\n%(pad2)s\n" % print_dict
102 print(r)
103 return r
104
105
106def print_test(t_name=None, t_list=None, status="failed", tname="Tests"):
107 """ Print a list of tests in a stuctured ascii table format """
108
109 gfx_line1 = "=" * 80
110 gfx_line2 = "\t" + "-" * 70
111 if t_name:
112 print("%(line)s\n%(name)s\n%(line)s" % {"line": gfx_line1,
113 "name": t_name})
114 print("%s %s:" % (tname, status))
115 print(gfx_line2 + "\n" +
116 "\n".join(["\t| %(key)s%(pad)s|\n%(line)s" % {
117 "key": n,
118 "pad": (66 - len(n)) * " ",
119 "line": gfx_line2} for n in t_list]))
120
121
122def test(test_list,
123 test_dict,
124 test_name="TF-M Test",
125 pass_text=["PASSED", "PRESENT"],
126 error_on_failed=True,
127 summary=True):
128
129 """ Using input of a test_lst and a test results dictionary in the format
130 of test_name: resut key-value pairs, test() method will verify that Every
131 single method in the test_list has been tested and passed. Pass and Failed,
132 status tests can be overriden and error_on_failed flag, exits the script
133 with failure if a single test fails or is not detected. Returns a json
134 containing status and fields for each test passed/failed/missing, if error
135 on failed is not set.
136 """
137
138 t_report = {"name": test_name,
139 "success": None,
140 "passed": [],
141 "failed": [],
142 "missing": []}
143 # Clean-up tests that are not requested by test_list
144 test_dict = {k: v for k, v in test_dict.items() if k in test_list}
145
146 # Calculate the difference of the two sets to find missing tests
147 t_report["missing"] = list(set(test_list) - set(test_dict.keys()))
148
149 # Sor the items into the apropriate lists (failed or passed)
150 # based on their status.
151 for k, v in test_dict.items():
152 # print(k, v)
153 key = "passed" if v in pass_text else "failed"
154 t_report[key] += [k]
155
156 # For the test to pass every singe test in test_list needs to be present
157 # and be in the passed list
158 if len(test_list) == len(t_report["passed"]):
159 t_report["success"] = True
160 else:
161 t_report["success"] = False
162
163 # Print a summary
164 if summary:
165 if t_report["passed"]:
166 print_test(test_name, t_report["passed"], status="passed")
167 if t_report["missing"]:
168 print_test(test_name, t_report["missing"], status="missing")
169 if t_report["failed"]:
170 print_test(test_name, t_report["failed"], status="Failed")
171
172 print("\nTest %s has %s!" % (t_report["name"],
173 " been successful" if t_report["success"]
174 else "failed"))
175 print("-" * 80)
176 if error_on_failed:
177 syscode = 0 if t_report["success"] else 1
178 sys.exit(syscode)
179 return t_report
180
181
182def save_json(f_name, data_object):
183 """ Save object to json file """
184
185 with open(f_name, "w") as F:
186 F.write(json.dumps(data_object, indent=2))
187
188
189def save_dict_json(f_name, data_dict, sort_list=None):
190 """ Save a dictionary object to file with optional sorting """
191
192 if sort_list:
193 data_object = (sort_dict(data_dict, sort_list))
194 save_json(f_name, data_object)
195
196
197def sort_dict(config_dict, sort_order_list=None):
198 """ Create a fixed order disctionary out of a config dataset """
199
200 if sort_order_list:
201 ret = OrderedDict([(k, config_dict[k]) for k in sort_order_list])
202 else:
203 ret = OrderedDict([(k, config_dict[k]) for k in sorted(config_dict)])
204 return ret
205
206
207def load_json(f_name):
208 """ Load object from json file """
209
210 with open(f_name, "r") as F:
211 try:
212 return json.loads(F.read())
213 except ValueError as exc:
214 print("No JSON object could be decoded from file: %s" % f_name)
215 except IOError:
216 print("Error opening file: %s" % f_name)
217 raise Exception("Failed to load file")
218
219
220def load_yaml(f_name):
221
222 # Parse command line arguments to override config
223 with open(f_name, "r") as F:
224 try:
225 return yaml.load(F.read())
226 except yaml.YAMLError as exc:
227 print("Error parsing file: %s" % f_name)
228 except IOError:
229 print("Error opening file: %s" % f_name)
230 raise Exception("Failed to load file")
231
232
233def subprocess_log(cmd, log_f, prefix=None, append=False, silent=False):
234 """ Run a command as subproccess an log the output to stdout and fileself.
235 If prefix is spefified it will be added as the first line in file """
236
237 with open(log_f, 'a' if append else "w") as F:
238 if prefix:
239 F.write(prefix + "\n")
240 pcss = Popen(cmd,
241 stdout=PIPE,
242 stderr=STDOUT,
243 shell=True,
244 env=os.environ)
245 for line in pcss.stdout:
246 if detect_python3():
247 line = line.decode("utf-8")
248 if not silent:
249 sys.stdout.write(line)
250 F.write(line)
251 pcss.communicate()
252 return pcss.returncode
253 return
254
255
256def run_proccess(cmd):
257 """ Run a command as subproccess an log the output to stdout and file.
258 If prefix is spefified it will be added as the first line in file """
259
260 pcss = Popen(cmd,
261 stdout=PIPE,
262 stderr=PIPE,
263 shell=True,
264 env=os.environ)
265 pcss.communicate()
266 return pcss.returncode
267
268
Minos Galanakisea421232019-06-20 17:11:28 +0100269def get_pid_status(pid):
270 """ Read the procfc in Linux machines to determine a proccess's statusself.
271 Returns status if proccess exists or None if it does not """
272
273 try:
274 with open("/proc/%s/status" % pid, "r") as F:
275 full_state = F.read()
276 return re.findall(r'(?:State:\t[A-Z]{1} \()(\w+)',
277 full_state, re.MULTILINE)[0]
278 except Exception as e:
279 print("Exception", e)
280
281
282def check_pid_status(pid, status_list):
283 """ Check a proccess's status againist a provided lists and return True
284 if the proccess exists and has a status included in the list. (Linux) """
285
286 pid_status = get_pid_status(pid)
287
288 if not pid_status:
289 print("PID %s does not exist." % pid)
290 return False
291
292 ret = pid_status in status_list
293 # TODO Remove debug print
294 if not ret:
295 print("PID status %s not in %s" % (pid_status, ",".join(status_list)))
296 return ret
297
298
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +0100299def list_chunks(l, n):
300 """ Yield successive n-sized chunks from l. """
301
302 for i in range(0, len(l), n):
303 yield l[i:i + n]
304
305
306def export_config_map(config_m, dir=None):
307 """ Will export a dictionary of configurations to a group of JSON files """
308
309 _dir = dir if dir else os.getcwd()
310 for _cname, _cfg in config_m.items():
311 _cname = _cname.lower()
312 _fname = os.path.join(_dir, _cname + ".json")
313 print("Exporting config %s" % _fname)
Fathi Boudra4be4c062020-11-19 15:32:30 +0100314 save_json(_fname, _cfg)
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +0100315
316
317def gen_cfg_combinations(name, categories, *args):
318 """ Create a list of named tuples of `name`, with elements defined in a
319 space separated string `categories` and equal ammount of lists for said
320 categories provided as arguments. Order of arguments should match the
321 order of the categories lists """
322
323 build_config = namedtuple(name, categories)
324 return [build_config(*x) for x in itertools.product(*args)]
325
326
Minos Galanakisea421232019-06-20 17:11:28 +0100327def show_progress(current_count, total_count):
328 """ Display the percent progress percentage of input metric a over b """
329
330 progress = int((current_count / total_count) * 100)
331 completed_count = int(progress * 0.7)
332 remaining_count = 70 - completed_count
333 print("[ %s%s | %d%% ]" % ("#" * completed_count,
334 "~" * remaining_count,
335 progress))
336
337
Minos Galanakisf4ca6ac2017-12-11 02:39:21 +0100338def get_cmd_args(descr="", parser=None):
339 """ Parse command line arguments """
340 # Parse command line arguments to override config
341
342 if not parser:
343 parser = argparse.ArgumentParser(description=descr)
344 return parser.parse_args()
Minos Galanakisea421232019-06-20 17:11:28 +0100345
346
347def arm_non_eabi_size(filename):
348 """ Run arm-non-eabi-size command and parse the output using regex. Will
349 return a tuple with the formated data as well as the raw output of the
350 command """
351
352 size_info_rex = re.compile(r'^\s+(?P<text>[0-9]+)\s+(?P<data>[0-9]+)\s+'
353 r'(?P<bss>[0-9]+)\s+(?P<dec>[0-9]+)\s+'
354 r'(?P<hex>[0-9a-f]+)\s+(?P<file>\S+)',
355 re.MULTILINE)
356
357 eabi_size = check_output(["arm-none-eabi-size",
358 filename],
Karl Zhangaff558a2020-05-15 14:28:23 +0100359 timeout=18).decode('UTF-8').rstrip()
Minos Galanakisea421232019-06-20 17:11:28 +0100360
361 size_data = re.search(size_info_rex, eabi_size)
362
363 return [{"text": size_data.group("text"),
364 "data": size_data.group("data"),
365 "bss": size_data.group("bss"),
366 "dec": size_data.group("dec"),
367 "hex": size_data.group("hex")}, eabi_size]
368
Xinyu Zhang97a47cb2022-10-21 11:13:01 +0800369def fromelf(filename):
370 """ Run fromelf command and parse the output using regex. Will
371 return a tuple with the formated data as well as the raw output of the
372 command """
373
374 size_info_rex = re.compile(r'^\s+(?P<Code>[0-9]+)\s+(?P<data>[0-9]+)\s+'
375 r'(?P<RO>[0-9]+)\s+(?P<RW>[0-9]+)\s+'
376 r'(?P<ZI>[0-9]+)\s+(?P<Debug>[0-9a-f]+)\s',
377 re.MULTILINE)
378
379 fromelf_size = check_output(["fromelf", "-z", filename],
380 timeout=18).decode('UTF-8').rstrip()
381
382 size_data = re.search(size_info_rex, fromelf_size)
383
384 return [{"Code": size_data.group("Code"),
385 "Inline Data": size_data.group("data"),
386 "RO Data": size_data.group("RO"),
387 "RW Data": size_data.group("RW"),
388 "ZI Data": size_data.group("ZI"),
389 "Debug": size_data.group("Debug")}, fromelf_size]
390
Minos Galanakisea421232019-06-20 17:11:28 +0100391
392def list_subdirs(directory):
393
394 directory = os.path.abspath(directory)
395 abs_sub_dirs = [os.path.join(directory, n) for n in os.listdir(directory)]
396 return [n for n in abs_sub_dirs if os.path.isdir(os.path.realpath(n))]
397
398
399def get_local_git_info(directory, json_out_f=None):
400 """ Extract git related information from a target directory. It allows
401 optional export to json file """
402
403 directory = os.path.abspath(directory)
404 cur_dir = os.path.abspath(os.getcwd())
405 os.chdir(directory)
406
407 # System commands to collect information
408 cmd1 = "git log HEAD -n 1 --pretty=format:'%H%x09%an%x09%ae%x09%ai%x09%s'"
409 cmd2 = "git log HEAD -n 1 --pretty=format:'%b'"
410 cmd3 = "git remote -v | head -n 1 | awk '{ print $2}';"
411 cmd4 = ("git ls-remote --heads origin | "
412 "grep $(git rev-parse HEAD) | cut -d / -f 3")
413
414 git_info_rex = re.compile(r'(?P<body>^[\s\S]*?)((?:Change-Id:\s)'
415 r'(?P<change_id>.*)\n)((?:Signed-off-by:\s)'
416 r'(?P<sign_off>.*)\n?)', re.MULTILINE)
417
418 proc_res = []
419 for cmd in [cmd1, cmd2, cmd3, cmd4]:
420 r, e = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate()
421 if e:
422 print("Error", e)
423 return
424 else:
425 try:
426 txt_body = r.decode('ascii')
427 except UnicodeDecodeError as E:
428 txt_body = r.decode('utf-8')
429 proc_res.append(txt_body.rstrip())
430
431 # Unpack and tag the data
432 hash, name, email, date, subject = proc_res[0].split('\t')
433
434 _raw_body = proc_res[1]
435 _bd_items = re.findall(r'(Signed-off-by|Change-Id)', _raw_body,
436 re.MULTILINE)
437
438 signed_off = None
439 body = None
440 change_id = None
441 # If both sign-off and gerrit-id exist
442 if len(_bd_items) == 2:
443 m = git_info_rex.search(_raw_body)
444 print(git_info_rex.findall(_raw_body))
445 if m is not None:
446 match_dict = m.groupdict()
447 if "body" in match_dict.keys():
448 body = match_dict["body"]
449 if "sign_off" in match_dict.keys():
450 signed_off = match_dict["sign_off"]
451 if "change_id" in match_dict.keys():
452 change_id = match_dict["change_id"]
453 else:
454 print("Error: Could not regex parse message", repr(_raw_body))
455 body = _raw_body
456 # If only one of sign-off / gerrit-id exist
457 elif len(_bd_items) == 1:
458 _entry_key = _bd_items[0]
459 body, _extra = _raw_body.split(_entry_key)
460 if _entry_key == "Change-Id":
461 change_id = _extra
462 else:
463 signed_off = _extra
464 # If the message contains commit message body only
465 else:
466 body = _raw_body
467
468 # Attempt to read the branch from Gerrit Trigger
469 try:
470 branch = os.environ["GERRIT_BRANCH"]
471 # IF not compare the commit hash with the remote branches to determine the
472 # branch of origin. Warning this assumes that only one branch has its head
473 # on this commit.
474 except KeyError as E:
475 branch = proc_res[3]
476
477 remote = proc_res[2]
478 # Internal Gerrit specific code
479 # Intended for converting the git remote to a more usuable url
480 known_remotes = ["https://gerrit.oss.arm.com",
481 "http://gerrit.mirror.oss.arm.com"]
482
483 for kr in known_remotes:
484 if kr in remote:
485 print("Applying Remote specific patch to remote", kr)
486
487 remote = remote.split(kr)[-1][1:]
488 print("REMOTE", remote)
489 remote = "%s/gitweb?p=%s.git;a=commit;h=%s" % (kr, remote, hash)
490 break
491
492 out = {"author": name.strip(),
493 "email": email.strip(),
494 "dir": directory.strip(),
495 "remote": remote.strip(),
496 "date": date.strip(),
497 "commit": hash.strip(),
498 "subject": subject.strip(),
499 "message": body.strip(),
500 "change_id": change_id.strip() if change_id is not None else "N.A",
501 "sign_off": signed_off.strip() if signed_off is not None else "N.A",
502 "branch": branch.strip()}
503
504 # Restore the directory path
505 os.chdir(cur_dir)
506 if json_out_f:
507 save_json(json_out_f, out)
508 return out
509
510
511def get_remote_git_info(url):
512 """ Collect git information from a Linux Kernel web repository """
513
514 auth_rex = re.compile(r'(?:<th>author</th>.*)(?:span>)(.*)'
515 r'(?:;.*\'right\'>)([0-9\+\-:\s]+)')
516 # commiter_rex = re.compile(r'(?:<th>committer</th>.*)(?:</div>)(.*)'
517 # r'(?:;.*\'right\'>)([0-9\+\-:\s]+)')
518 subject_rex = re.compile(r'(?:\'commit-subject\'>)(.*)(?:</div>)')
519 body_rex = re.compile(r'(?:\'commit-msg\'>)([\s\S^<]*)(?:</div>'
520 r'<div class=\'diffstat-header\'>)', re.MULTILINE)
521
522 content = requests.get(url).text
523 author, date = re.search(auth_rex, content).groups()
524 subject = re.search(subject_rex, content).groups()[0]
525 body = re.search(body_rex, content).groups()[0]
526 remote, hash = url.split("=")
527
528 outdict = {"author": author,
529 "remote": remote[:-3],
530 "date": date,
531 "commit": hash,
532 "subject": subject,
533 "message": body}
534 # Clean up html noise
535 return {k: re.sub(r'&[a-z]t;?', "", v) for k, v in outdict.items()}
536
537
538def convert_git_ref_path(dir_path):
539 """ If a git long hash is detected in a path move it to a short hash """
540
541 # Detect a git hash on a directory naming format of name_{hash},
542 # {hash}, name-{hash}
543 git_hash_rex = re.compile(r'(?:[_|-])*([a-f0-9]{40})')
544
545 # if checkout directory name contains a git reference convert to short
546 git_hash = git_hash_rex.findall(dir_path)
547 if len(git_hash):
548 d = dir_path.replace(git_hash[0], git_hash[0][:7])
549 print("Renaming %s -> %s", dir_path, d)
550 move(dir_path, d)
551 dir_path = d
552 return dir_path
553
554
Minos Galanakisea421232019-06-20 17:11:28 +0100555def list_filtered_tree(directory, rex_filter=None):
556 ret = []
557 for path, subdirs, files in os.walk(directory):
558 for fname in files:
559 ret.append(os.path.join(path, fname))
560 if rex_filter:
561 rex = re.compile(rex_filter)
562 return [n for n in ret if rex.search(n)]
563 else:
564 return ret
565
566
567def gerrit_patch_from_changeid(remote, change_id):
568 """ Use Gerrit's REST api for a best effort to retrieve the url of the
569 patch-set under review """
570
571 try:
572 r = requests.get('%s/changes/%s' % (remote, change_id),
573 headers={'Accept': 'application/json'})
574 resp_data = r.text[r.text.find("{"):].rstrip()
575 change_no = json.loads(resp_data)["_number"]
576 return "%s/#/c/%s" % (remote, change_no)
577 except Exception as E:
578 print("Failed to retrieve change (%s) from URL %s" % (change_id,
579 remote))
580 print("Exception Thrown:", E)
581 raise Exception()