Sync scripts with Arm internal CI
This patch syncs utility scripts and scripts
in the script directory with the internal CI.
Where a path update is required,
the changes have been commented out.
Signed-off-by: Zelalem <zelalem.aweke@arm.com>
Change-Id: Ifa4bd805e345184d1378e8423e5f878a2fbfbcd4
diff --git a/script/static-checks/check-banned-api.py b/script/static-checks/check-banned-api.py
new file mode 100755
index 0000000..0bfadeb
--- /dev/null
+++ b/script/static-checks/check-banned-api.py
@@ -0,0 +1,217 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+import argparse
+import os
+import re
+import sys
+import utils
+
+# File extensions to check
+VALID_FILE_EXTENSIONS = ('.c', '.S', '.h')
+
+# Paths inside the tree to ignore. Hidden folders and files are always ignored.
+# They mustn't end in '/'.
+IGNORED_FOLDERS = (
+ "tools",
+ "docs"
+)
+
+# List of ignored files in folders that aren't ignored
+IGNORED_FILES = ()
+
+# Regular expression for searching the Banned APIs. This is taken from the
+# Coding guideline in TF-A repo
+BANNED_APIS = ["strcpy", "wcscpy", "strncpy", "strcat", "wcscat", "strncat",
+ "sprintf", "vsprintf", "strtok", "atoi", "atol", "atoll",
+ "itoa", "ltoa", "lltoa"]
+BANNED_PATTERN = re.compile('|'.join(BANNED_APIS))
+
+COMMENTS_PATTERN = re.compile(r"//|/\*|\*/")
+
+
+def filter_comments(f):
+ '''
+ filter_comments(f) -> iterator for line number, filtered line
+
+ Given an iterable of lines (such as a file), return another iterable of
+ lines, with the comments filtered out and removed.
+ '''
+
+ in_comment = False
+ for line_num, line in enumerate(f):
+ line = line.rstrip('\n')
+
+ temp = ""
+ breaker = len(line) if in_comment else 0
+ for match in COMMENTS_PATTERN.finditer(line):
+ content = match.group(0)
+ start, end = match.span()
+
+ if in_comment:
+ if content == "*/":
+ in_comment = False
+ breaker = end
+ else:
+ if content == "/*":
+ in_comment = True
+ temp += line[breaker:start]
+ breaker = len(line)
+ elif content == "//":
+ temp += line[breaker:start]
+ breaker = len(line)
+ break
+
+ temp += line[breaker:]
+ if temp:
+ yield line_num + 1, temp
+
+
+def file_check_banned_api(path, encoding='utf-8'):
+ '''
+ Reads all lines from a file in path and checks for any banned APIs.
+ The combined number of errors and uses of banned APIs is returned. If the
+ result is equal to 0, the file is clean and contains no banned APIs.
+ '''
+
+ count = 0
+
+ try:
+ f = open(path, encoding=encoding)
+ except FileNotFoundError:
+ print("ERROR: could not open " + path)
+ utils.print_exception_info()
+ return True
+
+ try:
+ for line_num, line in filter_comments(f):
+ match = BANNED_PATTERN.search(line)
+ if match:
+ location = "line {} of file {}".format(line_num, path)
+ print("BANNED API: in " + location)
+
+ # NOTE: this preview of the error is not perfect if comments
+ # have been removed - however, it does good enough most of the
+ # time.
+ start, end = match.span()
+ print(">>> {}".format(line))
+ print(" {}^{}".format(start * " ", (end - start - 1) * "~"))
+
+ count += 1
+ except:
+ print("ERROR: unexpected exception while parsing " + path)
+ utils.print_exception_info()
+ count += 1
+
+ f.close()
+
+ return count
+
+
+def get_tree_files():
+ '''
+ Get all files in the git repository
+ '''
+
+ # Get patches of the affected commits with one line of context.
+ (rc, stdout, stderr) = utils.shell_command(['git', 'ls-files'])
+ if rc != 0:
+ return False
+
+ lines = stdout.splitlines()
+ return lines
+
+
+def get_patch_files(base_commit, end_commit):
+ '''
+ Get all files that have changed in a given patch
+ '''
+
+ # Get patches of the affected commits with one line of context.
+ (rc, stdout, stderr) = utils.shell_command([
+ 'git', 'diff-tree', '--diff-filter=ACMRT', '-r', '--name-only',
+ base_commit, end_commit])
+
+ if rc != 0:
+ return False
+
+ paths = stdout.splitlines()
+ return paths
+
+
+def parse_cmd_line():
+ parser = argparse.ArgumentParser(
+ description="Check Banned APIs",
+ epilog="""
+ For each source file in the tree, checks whether Banned APIs as
+ described in the list are used or not.
+ """
+ )
+
+ parser.add_argument("--tree", "-t",
+ help="""
+ Path to the source tree to check (default: %(default)s)
+ """,
+ default=os.curdir)
+ parser.add_argument("--patch", "-p",
+ help="""
+ Patch mode. Instead of checking all files in
+ the source tree, the script will consider only files
+ that are modified by the latest patch(es).
+ """,
+ action="store_true")
+ parser.add_argument("--from-ref",
+ help="""
+ Base commit in patch mode (default: %(default)s)
+ """,
+ default="master")
+ parser.add_argument("--to-ref",
+ help="""
+ Final commit in patch mode (default: %(default)s)
+ """,
+ default="HEAD")
+ parser.add_argument("--verbose", "-v",
+ help="Print verbose output",
+ action="store_true")
+ args = parser.parse_args()
+ return args
+
+
+if __name__ == "__main__":
+ args = parse_cmd_line()
+
+ os.chdir(args.tree)
+
+ if args.patch:
+ print("Checking files modified between patches " + args.from_ref +
+ " and " + args.to_ref + "...\n")
+ files = get_patch_files(args.from_ref, args.to_ref)
+ else:
+ print("Checking all files git repo " + os.path.abspath(args.tree) +
+ "...\n")
+ files = get_tree_files()
+
+ total_errors = 0
+ for filename in files:
+ ignored = utils.file_is_ignored(filename, VALID_FILE_EXTENSIONS,
+ IGNORED_FILES, IGNORED_FOLDERS)
+ if ignored:
+ if args.verbose:
+ print("INFO: Skipping ignored file " + filename)
+ continue
+
+ if args.verbose:
+ print("INFO: Checking " + filename)
+
+ total_errors += file_check_banned_api(filename)
+
+ print(str(total_errors) + " errors found")
+
+ if total_errors == 0:
+ sys.exit(0)
+ else:
+ sys.exit(1)
diff --git a/script/static-checks/check-copyright.py b/script/static-checks/check-copyright.py
index 350381b..39863c7 100755
--- a/script/static-checks/check-copyright.py
+++ b/script/static-checks/check-copyright.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python3
#
-# Copyright (c) 2019, Arm Limited. All rights reserved.
+# Copyright (c) 2019-2020, Arm Limited. All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
@@ -8,8 +8,10 @@
"""
Check if a given file includes the copyright boiler plate.
This checker supports the following comment styles:
- * Used by .c, .h, .S, .dts and .dtsi files
- # Used by Makefile (including .mk)
+ /*
+ *
+ //
+ #
"""
import argparse
@@ -24,7 +26,9 @@
from itertools import islice
# File extensions to check
-VALID_FILE_EXTENSIONS = ('.c', '.S', '.h', 'Makefile', '.mk', '.dts', '.dtsi', '.ld')
+VALID_FILE_EXTENSIONS = ('.c', '.conf', '.dts', '.dtsi', '.editorconfig',
+ '.h', '.i', '.ld', 'Makefile', '.mk', '.msvc',
+ '.py', '.S', '.scat', '.sh')
# Paths inside the tree to ignore. Hidden folders and files are always ignored.
# They mustn't end in '/'.
@@ -41,47 +45,64 @@
)
# Supported comment styles (Python regex)
-COMMENT_PATTERN = '^(( \* ?)|(\# ?))'
+COMMENT_PATTERN = '(\*|/\*|\#|//)'
-# License pattern to match
-LICENSE_PATTERN = '''(?P<copyright_prologue>
-{0}Copyright \(c\) (?P<years>[0-9]{{4}}(-[0-9]{{4}})?), (Arm Limited|ARM Limited and Contributors)\. All rights reserved\.$
-{0}$
-{0}SPDX-License-Identifier: BSD-3-Clause$
-)'''.format(
- COMMENT_PATTERN
-)
+# Any combination of spaces and/or tabs
+SPACING = '[ \t]*'
-# Compiled license pattern
-RE_PATTERN = re.compile(LICENSE_PATTERN, re.MULTILINE)
+# Line must start with a comment and optional spacing
+LINE_START = '^' + SPACING + COMMENT_PATTERN + SPACING
+
+# Line end with optional spacing
+EOL = SPACING + '$'
+
+# Year or period as YYYY or YYYY-YYYY
+TIME_PERIOD = '[0-9]{4}(-[0-9]{4})?'
+
+# Any string with valid license ID, don't allow adding postfix
+LICENSE_ID = '.*(BSD-3-Clause|BSD-2-Clause-FreeBSD)([ ,.\);].*)?'
+
+# File must contain both lines to pass the check
+COPYRIGHT_LINE = LINE_START + 'Copyright' + '.*' + TIME_PERIOD + '.*' + EOL
+LICENSE_ID_LINE = LINE_START + 'SPDX-License-Identifier:' + LICENSE_ID + EOL
+
+# Compiled license patterns
+COPYRIGHT_PATTERN = re.compile(COPYRIGHT_LINE, re.MULTILINE)
+LICENSE_ID_PATTERN = re.compile(LICENSE_ID_LINE, re.MULTILINE)
+
+CURRENT_YEAR = str(datetime.datetime.now().year)
COPYRIGHT_OK = 0
COPYRIGHT_ERROR = 1
-COPYRIGHT_WARNING = 2
-def check_copyright(path):
+def check_copyright(path, args, encoding='utf-8'):
'''Checks a file for a correct copyright header.'''
- with open(path) as file_:
+ result = COPYRIGHT_OK
+
+ with open(path, encoding=encoding) as file_:
file_content = file_.read()
- if RE_PATTERN.search(file_content):
- return COPYRIGHT_OK
+ copyright_line = COPYRIGHT_PATTERN.search(file_content)
+ if not copyright_line:
+ print("ERROR: Missing copyright in " + file_.name)
+ result = COPYRIGHT_ERROR
+ elif CURRENT_YEAR not in copyright_line.group():
+ print("WARNING: Copyright is out of date in " + file_.name + ": '" +
+ copyright_line.group() + "'")
- for line in file_content.split('\n'):
- if 'SPDX-License-Identifier' in line:
- if ('BSD-3-Clause' in line or
- 'BSD-2-Clause-FreeBSD' in line):
- return COPYRIGHT_WARNING
- break
+ if not LICENSE_ID_PATTERN.search(file_content):
+ print("ERROR: License ID error in " + file_.name)
+ result = COPYRIGHT_ERROR
- return COPYRIGHT_ERROR
-
+ return result
def main(args):
print("Checking the copyrights in the code...")
- all_files_correct = True
+ if args.verbose:
+ print ("Copyright regexp: " + COPYRIGHT_LINE)
+ print ("License regexp: " + LICENSE_ID_LINE)
if args.patch:
print("Checking files modified between patches " + args.from_ref
@@ -90,7 +111,7 @@
(rc, stdout, stderr) = utils.shell_command(['git', 'diff',
'--diff-filter=ACMRT', '--name-only', args.from_ref, args.to_ref ])
if rc:
- return 1
+ return COPYRIGHT_ERROR
files = stdout.splitlines()
@@ -99,7 +120,7 @@
(rc, stdout, stderr) = utils.shell_command([ 'git', 'ls-files' ])
if rc:
- return 1
+ return COPYRIGHT_ERROR
files = stdout.splitlines()
@@ -117,30 +138,22 @@
if args.verbose:
print("Checking file " + f)
- rc = check_copyright(f)
+ rc = check_copyright(f, args)
if rc == COPYRIGHT_OK:
count_ok += 1
- elif rc == COPYRIGHT_WARNING:
- count_warning += 1
- print("WARNING: " + f)
elif rc == COPYRIGHT_ERROR:
count_error += 1
- print("ERROR: " + f)
print("\nSummary:")
- print("\t{} files analyzed".format(count_ok + count_warning + count_error))
+ print("\t{} files analyzed".format(count_ok + count_error))
- if count_warning == 0 and count_error == 0:
+ if count_error == 0:
print("\tNo errors found")
- return 0
-
- if count_error > 0:
+ return COPYRIGHT_OK
+ else:
print("\t{} errors found".format(count_error))
-
- if count_warning > 0:
- print("\t{} warnings found".format(count_warning))
-
+ return COPYRIGHT_ERROR
def parse_cmd_line(argv, prog_name):
parser = argparse.ArgumentParser(
@@ -166,9 +179,20 @@
Instead of checking all files in the source tree, the script will consider
only files that are modified by the latest patch(es).""",
action="store_true")
+
+ (rc, stdout, stderr) = utils.shell_command(['git', 'merge-base', 'HEAD', 'master'])
+ if rc:
+ print("Git merge-base command failed. Cannot determine base commit.")
+ sys.exit(rc)
+ merge_bases = stdout.splitlines()
+
+ # This should not happen, but it's better to be safe.
+ if len(merge_bases) > 1:
+ print("WARNING: Multiple merge bases found. Using the first one as base commit.")
+
parser.add_argument("--from-ref",
help="Base commit in patch mode (default: %(default)s)",
- default="master")
+ default=merge_bases[0])
parser.add_argument("--to-ref",
help="Final commit in patch mode (default: %(default)s)",
default="HEAD")
diff --git a/script/static-checks/check-include-order.py b/script/static-checks/check-include-order.py
index 481ca42..4f605f3 100755
--- a/script/static-checks/check-include-order.py
+++ b/script/static-checks/check-include-order.py
@@ -1,304 +1,197 @@
#!/usr/bin/env python3
#
-# Copyright (c) 2019, Arm Limited. All rights reserved.
+# Copyright (c) 2019-2020, Arm Limited. All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
import argparse
import codecs
+import collections
import os
import re
+import subprocess
import sys
import utils
+import yaml
+import logging
# File extensions to check
-VALID_FILE_EXTENSIONS = ('.c', '.S', '.h')
+VALID_FILE_EXTENSIONS = (".c", ".S", ".h")
# Paths inside the tree to ignore. Hidden folders and files are always ignored.
# They mustn't end in '/'.
-IGNORED_FOLDERS = ("include/lib/stdlib",
- "include/lib/libc",
- "include/lib/libfdt",
- "lib/libfdt",
- "lib/libc",
- "lib/stdlib")
-
-# List of ignored files in folders that aren't ignored
-IGNORED_FILES = (
+IGNORED_FOLDERS = (
+ "include/lib/stdlib",
+ "include/lib/libc",
+ "include/lib/libfdt",
+ "lib/libfdt",
+ "lib/libc",
+ "lib/stdlib",
)
-def line_remove_comments(line):
- '''Remove C comments within a line. This code doesn't know if the line is
- commented in a multi line comment that involves more lines than itself.'''
+# List of ignored files in folders that aren't ignored
+IGNORED_FILES = ()
- # Multi line comments
- while line.find("/*") != -1:
- start_comment = line.find("/*")
- end_comment = line.find("*/")
- if end_comment != -1:
- end_comment = end_comment + 2 # Skip the "*/"
- line = line[ : start_comment ] + line[ end_comment : ]
- else: # The comment doesn't end this line.
- line = line[ : start_comment ]
-
- # Single line comments
- comment = line.find("//")
- if comment != -1:
- line = line[ : comment ]
-
- return line
+INCLUDE_RE = re.compile(r"^\s*#\s*include\s\s*(?P<path>[\"<].+[\">])")
+INCLUDE_RE_DIFF = re.compile(r"^\+?\s*#\s*include\s\s*(?P<path>[\"<].+[\">])")
-def line_get_include_path(line):
- '''It takes a line of code with an include directive and returns the file
- path with < or the first " included to tell them apart.'''
- if line.find('<') != -1:
- if line.find('.h>') == -1:
- return None
- inc = line[ line.find('<') : line.find('.h>') ]
- elif line.find('"') != -1:
- if line.find('.h"') == -1:
- return None
- inc = line[ line.find('"') : line.find('.h"') ]
- else:
- inc = None
-
- return inc
+def include_paths(lines, diff_mode=False):
+ """List all include paths in a file. Ignore starting `+` in diff mode."""
+ pattern = INCLUDE_RE_DIFF if diff_mode else INCLUDE_RE
+ matches = (pattern.match(line) for line in lines)
+ return [m.group("path") for m in matches if m]
-def file_get_include_list(path, _encoding='ascii'):
- '''Reads all lines from a file and returns a list of include paths. It
- tries to read the file in ASCII mode and UTF-8 if it fails. If it succeeds
- it will return a list of include paths. If it fails it will return None.'''
-
- inc_list = []
-
+def file_include_list(path):
+ """Return a list of all include paths in a file or None on failure."""
try:
- f = codecs.open(path, encoding=_encoding)
- except:
- print("ERROR:" + path + ":open() error!")
- utils.print_exception_info()
+ with codecs.open(path, encoding="utf-8") as f:
+ return include_paths(f)
+ except Exception:
+ logging.exception(path + ":error while parsing.")
return None
- # Allow spaces in between, but not comments.
- pattern = re.compile(r"^\s*#\s*include\s\s*[\"<]")
-
- fatal_error = False
-
- try:
- for line in f:
- if pattern.match(line):
- line_remove_comments(line)
- inc = line_get_include_path(line)
- if inc != None:
- inc_list.append(inc)
-
- except UnicodeDecodeError:
- # Capture exceptions caused by non-ASCII encoded files.
- if _encoding == 'ascii':
- # Reopen the file in UTF-8 mode. Python allows a file to be opened
- # more than once at a time. Exceptions for the recursively called
- # function will be handled inside it.
- # Output a warning.
- print("ERROR:" + path + ":Non-ASCII encoded file!")
- inc_list = file_get_include_list(path,'utf-8')
- else:
- # Already tried to decode in UTF-8 mode. Don't try again.
- print("ERROR:" + path + ":Failed to decode UTF-8!")
- fatal_error = True # Can't return while file is still open.
- utils.print_exception_info()
- except:
- print("ERROR:" + path + ":error while parsing!")
- utils.print_exception_info()
-
- f.close()
-
- if fatal_error:
- return None
-
- return inc_list
-
def inc_order_is_correct(inc_list, path, commit_hash=""):
- '''Returns true if the provided list is in order. If not, output error
- messages to stdout.'''
+ """Returns true if the provided list is in order. If not, output error
+ messages to stdout."""
# If there are less than 2 includes there's no need to check.
if len(inc_list) < 2:
return True
if commit_hash != "":
- commit_hash = commit_hash + ":" # For formatting
+ commit_hash = commit_hash + ":"
- sys_after_user = False
- sys_order_wrong = False
- user_order_wrong = False
+ # Get list of system includes from libc include directory.
+ libc_incs = [f for f in os.listdir("include/lib/libc") if f.endswith(".h")]
- # First, check if all system includes are before the user includes.
- previous_delimiter = '<' # Begin with system includes.
+ # First, check if all includes are in the appropriate group.
+ inc_group = "System"
+ incs = collections.defaultdict(list)
+ error_msgs = []
for inc in inc_list:
- delimiter = inc[0]
- if previous_delimiter == '<' and delimiter == '"':
- previous_delimiter = '"' # Started user includes.
- elif previous_delimiter == '"' and delimiter == '<':
- sys_after_user = True
+ if inc[1:-1] in libc_incs:
+ if inc_group != "System":
+ error_msgs.append(inc[1:-1] + " should be in system group, at the top")
+ elif (
+ "plat/" in inc
+ or "platform" in inc
+ or (inc.startswith('"') and "plat" in path)
+ ):
+ inc_group = "Platform"
+ elif inc_group in ("Project", "System"):
+ inc_group = "Project"
+ else:
+ error_msgs.append(
+ inc[1:-1] + " should be in project group, after system group"
+ )
+ incs[inc_group].append(inc[1:-1])
- # Then, check alphabetic order (system and user separately).
- usr_incs = []
- sys_incs = []
-
- for inc in inc_list:
- if inc.startswith('<'):
- sys_incs.append(inc)
- elif inc.startswith('"'):
- usr_incs.append(inc)
-
- if sorted(sys_incs) != sys_incs:
- sys_order_wrong = True
- if sorted(usr_incs) != usr_incs:
- user_order_wrong = True
+ # Then, check alphabetic order (system, project and user separately).
+ if not error_msgs:
+ for name, inc_list in incs.items():
+ if sorted(inc_list) != inc_list:
+ error_msgs.append("{} includes not in order.".format(name))
# Output error messages.
- if sys_after_user:
- print("ERROR:" + commit_hash + path +
- ":System include after user include.")
- if sys_order_wrong:
- print("ERROR:" + commit_hash + path +
- ":System includes not in order.")
- if user_order_wrong:
- print("ERROR:" + commit_hash + path +
- ":User includes not in order.")
-
- return not ( sys_after_user or sys_order_wrong or user_order_wrong )
+ if error_msgs:
+ print(yaml.dump({commit_hash + path: error_msgs}))
+ return False
+ else:
+ return True
def file_is_correct(path):
- '''Checks whether the order of includes in the file specified in the path
- is correct or not.'''
-
- inc_list = file_get_include_list(path)
-
- if inc_list == None: # Failed to decode - Flag as incorrect.
- return False
-
- return inc_order_is_correct(inc_list, path)
+ """Checks whether the order of includes in the file specified in the path
+ is correct or not."""
+ inc_list = file_include_list(path)
+ return inc_list is not None and inc_order_is_correct(inc_list, path)
def directory_tree_is_correct():
- '''Checks all tracked files in the current git repository, except the ones
+ """Checks all tracked files in the current git repository, except the ones
explicitly ignored by this script.
- Returns True if all files are correct.'''
-
- # Get list of files tracked by git
- (rc, stdout, stderr) = utils.shell_command([ 'git', 'ls-files' ])
+ Returns True if all files are correct."""
+ (rc, stdout, stderr) = utils.shell_command(["git", "ls-files"])
if rc != 0:
return False
-
all_files_correct = True
-
- files = stdout.splitlines()
-
- for f in files:
- if not utils.file_is_ignored(f, VALID_FILE_EXTENSIONS, IGNORED_FILES, IGNORED_FOLDERS):
- if not file_is_correct(f):
- # Make the script end with an error code, but continue
- # checking files even if one of them is incorrect.
- all_files_correct = False
-
+ for f in stdout.splitlines():
+ if not utils.file_is_ignored(
+ f, VALID_FILE_EXTENSIONS, IGNORED_FILES, IGNORED_FOLDERS
+ ):
+ all_files_correct &= file_is_correct(f)
return all_files_correct
+def group_lines(patchlines, starting_with):
+ """Generator of (name, lines) almost the same as itertools.groupby
+
+ This function's control flow is non-trivial. In particular, the clearing
+ of the lines variable, marked with [1], is intentional and must come
+ after the yield. That's because we must yield the (name, lines) tuple
+ after we have found the name of the next section but before we assign the
+ name and start collecting lines. Further, [2] is required to yeild the
+ last block as there will not be a block start delimeter at the end of
+ the stream.
+ """
+ lines = []
+ name = None
+ for line in patchlines:
+ if line.startswith(starting_with):
+ if name:
+ yield name, lines
+ name = line[len(starting_with) :]
+ lines = [] # [1]
+ else:
+ lines.append(line)
+ yield name, lines # [2]
+
+
+def group_files(commitlines):
+ """Generator of (commit hash, lines) almost the same as itertools.groupby"""
+ return group_lines(commitlines, "+++ b/")
+
+
+def group_commits(commitlines):
+ """Generator of (file name, lines) almost the same as itertools.groupby"""
+ return group_lines(commitlines, "commit ")
+
+
def patch_is_correct(base_commit, end_commit):
- '''Get the output of a git diff and analyse each modified file.'''
+ """Get the output of a git diff and analyse each modified file."""
# Get patches of the affected commits with one line of context.
- (rc, stdout, stderr) = utils.shell_command([ 'git', 'log', '--unified=1',
- '--pretty="commit %h"',
- base_commit + '..' + end_commit ])
+ gitlog = subprocess.run(
+ [
+ "git",
+ "log",
+ "--unified=1",
+ "--pretty=commit %h",
+ base_commit + ".." + end_commit,
+ ],
+ stdout=subprocess.PIPE,
+ )
- if rc != 0:
+ if gitlog.returncode != 0:
return False
- # Parse stdout to get all renamed, modified and added file paths.
- # Then, check order of new includes. The log output begins with each commit
- # comment and then a list of files and differences.
- lines = stdout.splitlines()
-
+ gitlines = gitlog.stdout.decode("utf-8").splitlines()
all_files_correct = True
-
- # All files without a valid extension are ignored. /dev/null is also used by
- # git patch to tell that a file has been deleted, and it doesn't have a
- # valid extension, so it will be used as a reset value.
- path = "/dev/null"
- commit_hash = "0"
- # There are only 2 states: commit msg or file. Start inside commit message
- # because the include list is not checked when changing from this state.
- inside_commit_message = True
- inc_list = []
-
- # Allow spaces in between, but not comments.
- # Check for lines with "+" or " " at the beginning (added or not modified)
- pattern = re.compile(r"^[+ ]\s*#\s*include\s\s*[\"<]")
-
- total_line_num = len(lines)
- # By iterating this way the loop can detect if it's the last iteration and
- # check the last file (the log doesn't have any indicator of the end)
- for i, line in enumerate(lines): # Save line number in i
-
- new_commit = False
- new_file = False
- log_last_line = i == total_line_num-1
-
- # 1. Check which kind of line this is. If this line means that the file
- # being analysed is finished, don't update the path or hash until after
- # checking the order of includes, they are used in error messages. Check
- # for any includes in case this is the last line of the log.
-
- # Line format: <"commit 0000000"> (quotes present in stdout)
- if line.startswith('"commit '): # New commit
- new_commit = True
- # Line format: <+++ b/path>
- elif line.startswith("+++ b/"): # New file.
- new_file = True
- # Any other line
- else: # Check for includes inside files, not in the commit message.
- if not inside_commit_message:
- if pattern.match(line):
- line_remove_comments(line)
- inc = line_get_include_path(line)
- if inc != None:
- inc_list.append(inc)
-
- # 2. Check order of includes if the file that was being analysed has
- # finished. Print hash and path of the analised file in the error
- # messages.
-
- if new_commit or new_file or log_last_line:
- if not inside_commit_message: # If a file is being analysed
- if not utils.file_is_ignored(path, VALID_FILE_EXTENSIONS,
- IGNORED_FILES, IGNORED_FOLDERS):
- if not inc_order_is_correct(inc_list, path, commit_hash):
- all_files_correct = False
- inc_list = [] # Reset the include list for the next file (if any)
-
- # 3. Update path or hash for the new file or commit. Update state.
-
- if new_commit: # New commit, save hash
- inside_commit_message = True # Enter commit message state
- commit_hash = line[ 8 : -1 ] # Discard last "
- elif new_file: # New file, save path.
- inside_commit_message = False # Save path, exit commit message state
- # A deleted file will appear as /dev/null so it will be ignored.
- path = line[ 6 : ]
-
+ for commit, comlines in group_commits(gitlines):
+ for path, lines in group_files(comlines):
+ all_files_correct &= inc_order_is_correct(
+ include_paths(lines, diff_mode=True), path, commit
+ )
return all_files_correct
-
def parse_cmd_line(argv, prog_name):
parser = argparse.ArgumentParser(
prog=prog_name,
@@ -309,23 +202,34 @@
directives are ordered alphabetically (as mandated by the Trusted
Firmware coding style). System header includes must come before user
header includes.
-""")
+""",
+ )
- parser.add_argument("--tree", "-t",
- help="Path to the source tree to check (default: %(default)s)",
- default=os.curdir)
- parser.add_argument("--patch", "-p",
- help="""
+ parser.add_argument(
+ "--tree",
+ "-t",
+ help="Path to the source tree to check (default: %(default)s)",
+ default=os.curdir,
+ )
+ parser.add_argument(
+ "--patch",
+ "-p",
+ help="""
Patch mode.
Instead of checking all files in the source tree, the script will consider
only files that are modified by the latest patch(es).""",
- action="store_true")
- parser.add_argument("--from-ref",
- help="Base commit in patch mode (default: %(default)s)",
- default="master")
- parser.add_argument("--to-ref",
- help="Final commit in patch mode (default: %(default)s)",
- default="HEAD")
+ action="store_true",
+ )
+ parser.add_argument(
+ "--from-ref",
+ help="Base commit in patch mode (default: %(default)s)",
+ default="master",
+ )
+ parser.add_argument(
+ "--to-ref",
+ help="Final commit in patch mode (default: %(default)s)",
+ default="HEAD",
+ )
args = parser.parse_args(argv)
return args
@@ -336,8 +240,13 @@
os.chdir(args.tree)
if args.patch:
- print("Checking files modified between patches " + args.from_ref
- + " and " + args.to_ref + "...")
+ print(
+ "Checking files modified between patches "
+ + args.from_ref
+ + " and "
+ + args.to_ref
+ + "..."
+ )
if not patch_is_correct(args.from_ref, args.to_ref):
sys.exit(1)
else:
diff --git a/script/static-checks/static-checks-banned-apis.sh b/script/static-checks/static-checks-banned-apis.sh
new file mode 100755
index 0000000..c4ed874
--- /dev/null
+++ b/script/static-checks/static-checks-banned-apis.sh
@@ -0,0 +1,43 @@
+#! /bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# static-checks-banned-apis.sh <path-to-root-folder> [patch]
+
+LOG_FILE=$(mktemp -t banned-api-check.XXXX)
+
+if [[ "$2" == "patch" ]]; then
+ echo "# Check for banned APIs in the patch"
+ TEST_CASE="Banned API check on patch(es)"
+ "$CI_ROOT/script/static-checks/check-banned-api.py" --tree "$1" \
+ --patch --from-ref origin/master \
+ &> "$LOG_FILE"
+else
+ echo "# Check for banned APIs in entire source tree"
+ TEST_CASE="Banned API check of the entire source tree"
+ "$CI_ROOT/script/static-checks/check-banned-api.py" --tree "$1" \
+ &> "$LOG_FILE"
+fi
+
+EXIT_VALUE=$?
+
+echo >> "$LOG_TEST_FILENAME"
+echo "****** $TEST_CASE ******" >> "$LOG_TEST_FILENAME"
+echo >> "$LOG_TEST_FILENAME"
+if [[ "$EXIT_VALUE" == 0 ]]; then
+ echo "Result : SUCCESS" >> "$LOG_TEST_FILENAME"
+else
+ echo "Result : FAILURE" >> "$LOG_TEST_FILENAME"
+ echo >> "$LOG_TEST_FILENAME"
+ cat "$LOG_FILE" >> "$LOG_TEST_FILENAME"
+fi
+echo >> "$LOG_TEST_FILENAME"
+
+rm -f "$LOG_FILE"
+
+exit "$EXIT_VALUE"
+
+
diff --git a/script/static-checks/static-checks-coding-style-line-endings.sh b/script/static-checks/static-checks-coding-style-line-endings.sh
index 87e149c..ae7a6db 100755
--- a/script/static-checks/static-checks-coding-style-line-endings.sh
+++ b/script/static-checks/static-checks-coding-style-line-endings.sh
@@ -11,13 +11,16 @@
LOG_FILE=`mktemp -t common.XXXX`
-# For all the source and doc files (*.h,*.c,*.S,*.mk,*.md)
+# For all the source and doc files
# We only return the files that contain CRLF
find "." -\( \
-name '*.S' -or \
-name '*.c' -or \
-name '*.h' -or \
- -name '*.md' -or \
+ -name '*.i' -or \
+ -name '*.dts' -or \
+ -name '*.dtsi' -or \
+ -name '*.rst' -or \
-name 'Makefile' -or \
-name '*.mk' \
-\) -exec grep --files-with-matches $'\r$' {} \; &> "$LOG_FILE"
diff --git a/script/static-checks/static-checks.sh b/script/static-checks/static-checks.sh
index c9b980c..6bae729 100755
--- a/script/static-checks/static-checks.sh
+++ b/script/static-checks/static-checks.sh
@@ -87,6 +87,24 @@
fi
echo
+# Check for any Banned API usage
+
+echo 'Checking Banned API usage...'
+echo
+if [ "$IS_CONTINUOUS_INTEGRATION" == 1 ]; then
+ "$CI_ROOT"/script/static-checks/static-checks-banned-apis.sh . patch
+else
+ "$CI_ROOT"/script/static-checks/static-checks-banned-apis.sh
+fi
+if [ "$?" != 0 ]; then
+ echo "Banned API check: FAILURE"
+ ((ERROR_COUNT++))
+else
+ echo "Banned API check: PASS"
+fi
+echo
+
+
# Check error count
if [ "$ERROR_COUNT" != 0 ] || [ "$WARNING_COUNT" != 0 ]; then
diff --git a/script/static-checks/utils.py b/script/static-checks/utils.py
index c6a7fdd..548b64e 100644
--- a/script/static-checks/utils.py
+++ b/script/static-checks/utils.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python3
#
-# Copyright (c) 2019, Arm Limited. All rights reserved.
+# Copyright (c) 2019-2020, Arm Limited. All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
@@ -41,24 +41,24 @@
print(textwrap.indent(str(sys.exc_info()[1])," "))
-def decode_string(string):
- '''Tries to decode a binary string into ASCII. It gives an error if it
- finds non-ASCII characters, but it will return the string converted
- anyway, ignoring these characters.'''
+def decode_string(string, encoding='utf-8'):
+ '''Tries to decode a binary string. It gives an error if it finds
+ invalid characters, but it will return the string converted anyway,
+ ignoring these characters.'''
try:
- string = string.decode("ascii")
+ string = string.decode(encoding)
except UnicodeDecodeError:
- # Capture exceptions caused by non-ASCII characters.
- print("ERROR:Non-ASCII characters detected.")
+ # Capture exceptions caused by invalid characters.
+ print("ERROR:Non-{} characters detected.".format(encoding.upper()))
print_exception_info()
- string = string.decode("ascii", "ignore")
+ string = string.decode(encoding, "ignore")
return string
def shell_command(cmd_line):
'''Executes a shell command. Returns (returncode, stdout, stderr), where
- stdout and stderr are ASCII-encoded strings.'''
+ stdout and stderr are strings.'''
try:
p = subprocess.Popen(cmd_line, stdout=subprocess.PIPE,