Sync scripts with Arm internal CI
This patch syncs utility scripts and scripts
in the script directory with the internal CI.
Where a path update is required,
the changes have been commented out.
Signed-off-by: Zelalem <zelalem.aweke@arm.com>
Change-Id: Ifa4bd805e345184d1378e8423e5f878a2fbfbcd4
diff --git a/script/build_package.sh b/script/build_package.sh
index 69eedfd..33d8258 100755
--- a/script/build_package.sh
+++ b/script/build_package.sh
@@ -18,19 +18,25 @@
fi
# Directory to where the source code e.g. for Trusted Firmware is checked out.
-tf_root="${tf_root:-$workspace/trusted_firmware}"
-tftf_root="${tftf_root:-$workspace/trusted_firmware_tf}"
-scp_root="${scp_root:-$workspace/scp}"
+export tf_root="${tf_root:-$workspace/trusted_firmware}"
+export tftf_root="${tftf_root:-$workspace/trusted_firmware_tf}"
+export scp_root="${scp_root:-$workspace/scp}"
+scp_tools_root="${scp_tools_root:-$workspace/scp_tools}"
+cc_root="${cc_root:-$ccpathspec}"
+
+scp_tf_tools_root="$scp_tools_root/scp_tf_tools"
# Refspecs
tf_refspec="$TF_REFSPEC"
tftf_refspec="$TFTF_REFSPEC"
scp_refspec="$SCP_REFSPEC"
+scp_tools_commit="${SCP_TOOLS_COMMIT:-master}"
test_config="${TEST_CONFIG:?}"
test_group="${TEST_GROUP:?}"
build_configs="${BUILD_CONFIG:?}"
run_config="${RUN_CONFIG:?}"
+cc_config="${CC_ENABLE:-}"
archive="$artefacts"
build_log="$artefacts/build.log"
@@ -179,6 +185,9 @@
*/mcp_romfw/*)
cp $file $to/mcp_rom.$ext
;;
+ */scp_romfw_bypass/*)
+ cp $file $to/scp_rom_bypass.$ext
+ ;;
*)
echo "Unknown SCP binary: $file" >&2
;;
@@ -533,6 +542,179 @@
)
}
+clone_scp_tools() {
+
+ if [ ! -d "$scp_tools_root" ]; then
+ echo "Cloning SCP-tools ... $scp_tools_commit" |& log_separator
+
+ clone_url="${SCP_TOOLS_CHECKOUT_LOC:-$scp_tools_src_repo_url}" \
+ where="$scp_tools_root" \
+ refspec="${scp_tools_commit}"
+ clone_repo &>>"$build_log"
+ else
+ echo "Already cloned SCP-tools ..." |& log_separator
+ fi
+
+ show_head "$scp_tools_root"
+
+ cd "$scp_tools_root"
+
+ echo "Updating submodules"
+
+ git submodule init
+
+ git submodule update
+
+ cd "scmi"
+
+ git show --quiet --no-color | sed 's/^/ > /g'
+}
+
+clone_tf_for_scp_tools() {
+ scp_tools_arm_tf="$scp_tools_root/arm-tf"
+
+ if [ ! -d "$scp_tools_arm_tf" ]; then
+ echo "Cloning TF-4-SCP-tools ..." |& log_separator
+
+ clone_url="$tf_for_scp_tools_src_repo_url"
+ where="$scp_tools_arm_tf"
+
+ git clone "$clone_url" "$where"
+
+ cd "$scp_tools_arm_tf"
+
+ git checkout --track origin/dev/pedro/juno
+
+ git show --quiet --no-color | sed 's/^/ > /g'
+
+ else
+ echo "Already cloned TF-4-SCP-tools ..." |& log_separator
+ fi
+}
+
+build_scmi_lib_scp_tools() {
+ (
+ cd "$scp_tools_root"
+
+ cd "scmi"
+
+ scp_tools_arm_tf="$scp_tools_root/arm-tf"
+
+ cross_compile="/arm/pdsw/tools/gcc-linaro-6.2.1-2016.11-x86_64_aarch64-linux-gnu/bin/aarch64-linux-gnu-"
+
+ std_libs="-I$scp_tools_arm_tf/include/common"
+ std_libs="$std_libs -I$scp_tools_arm_tf/include/common/tbbr"
+ std_libs="$std_libs -I$scp_tools_arm_tf/include/drivers/arm"
+ std_libs="$std_libs -I$scp_tools_arm_tf/include/lib"
+ std_libs="$std_libs -I$scp_tools_arm_tf/include/lib/aarch64"
+ std_libs="$std_libs -I$scp_tools_arm_tf/include/lib/stdlib"
+ std_libs="$std_libs -I$scp_tools_arm_tf/include/lib/stdlib/sys"
+ std_libs="$std_libs -I$scp_tools_arm_tf/include/lib/xlat_tables"
+ std_libs="$std_libs -I$scp_tools_arm_tf/include/plat/common"
+ std_libs="$std_libs -I$scp_tools_arm_tf/include/plat/arm/common"
+ std_libs="$std_libs -I$scp_tools_arm_tf/include/plat/arm/css/common"
+ std_libs="$std_libs -I$scp_tools_arm_tf/include/plat/arm/board/common"
+ std_libs="$std_libs -I$scp_tools_arm_tf/include/plat/arm/soc/common"
+ std_libs="$std_libs -I$scp_tools_arm_tf/plat/arm/board/juno/include"
+
+ cflags="-Og -g"
+ cflags="$cflags -mgeneral-regs-only"
+ cflags="$cflags -mstrict-align"
+ cflags="$cflags -nostdinc"
+ cflags="$cflags -fno-inline"
+ cflags="$cflags -ffreestanding"
+ cflags="$cflags -ffunction-sections"
+ cflags="$cflags -fdata-sections"
+ cflags="$cflags -DAARCH64"
+ cflags="$cflags -DPRId32=\"ld\""
+
+ cflags="$cflags $std_libs"
+
+ protocols="power,system_power,performance,sensor"
+
+ echo "Building SCMI library (SCP-tools) ..."
+
+ make "CROSS_COMPILE=$cross_compile" \
+ "CFLAGS=$cflags" \
+ "PROTOCOLS=$protocols" \
+ "clean" \
+ "all"
+ )
+}
+
+build_tf_for_scp_tools() {
+
+ cd "$scp_tools_root/arm-tf"
+
+ cross_compile="/arm/pdsw/tools/gcc-linaro-6.2.1-2016.11-x86_64_aarch64-linux-gnu/bin/aarch64-linux-gnu-"
+
+ if [ "$1" = "release" ]; then
+ echo "Build TF-4-SCP-Tools rls..."
+ else
+ echo "Build TF-4-SCP-Tools dbg..."
+
+ make realclean
+
+ make "BM_TEST=scmi" \
+ "ARM_BOARD_OPTIMISE_MEM=1" \
+ "BM_CSS=juno" \
+ "CSS_USE_SCMI_SDS_DRIVER=1" \
+ "PLAT=juno" \
+ "DEBUG=1" \
+ "PLATFORM=juno" \
+ "CROSS_COMPILE=$cross_compile" \
+ "BM_WORKSPACE=$scp_tools_root/baremetal"
+
+ archive_file "build/juno/debug/bl1.bin"
+
+ archive_file "build/juno/debug/bl2.bin"
+
+ archive_file "build/juno/debug/bl31.bin"
+ fi
+}
+
+build_fip_for_scp_tools() {
+
+ cd "$scp_tools_root/arm-tf"
+
+ cross_compile="/arm/pdsw/tools/gcc-linaro-6.2.1-2016.11-x86_64_aarch64-linux-gnu/bin/aarch64-linux-gnu-"
+
+ if [ ! -d "$scp_root/build/product/juno/scp_ramfw/debug" ]; then
+ make fiptool
+ echo "Make FIP 4 SCP-Tools rls..."
+
+ else
+ make fiptool
+ echo "Make FIP 4 SCP-Tools dbg..."
+
+ make "PLAT=juno" \
+ "all" \
+ "fip" \
+ "DEBUG=1" \
+ "CROSS_COMPILE=$cross_compile" \
+ "BL31=$scp_tools_root/arm-tf/build/juno/debug/bl31.bin" \
+ "BL33=$scp_tools_root/baremetal/dummy_bl33" \
+ "SCP_BL2=$scp_root/build/product/juno/scp_ramfw/debug/bin/firmware.bin"
+
+ archive_file "$scp_tools_root/arm-tf/build/juno/debug/fip.bin"
+ fi
+}
+
+build_cc() {
+# Building code coverage plugin
+ ARM_DIR=/arm
+ pvlibversion=$(/arm/devsys-tools/abs/detag "SysGen:PVModelLib:$model_version::trunk")
+ PVLIB_HOME=$warehouse/SysGen/PVModelLib/$model_version/${pvlibversion}/external
+ if [ -n "$(find "$ARM_DIR" -maxdepth 0 -type d -empty 2>/dev/null)" ]; then
+ echo "Error: Arm warehouse not mounted. Please mount the Arm warehouse to your /arm local folder"
+ exit -1
+ fi # Error if arm warehouse not found
+ cd "$ccpathspec/scripts/tools/code_coverage/fastmodel_baremetal/bmcov"
+
+ make -C model-plugin PVLIB_HOME=$PVLIB_HOME &>>"$build_log"
+}
+
+
# Set metadata for the whole package so that it can be used by both Jenkins and
# shell
set_package_var() {
@@ -772,12 +954,14 @@
tf_config="$(echo "$build_configs" | awk -F, '{print $1}')"
tftf_config="$(echo "$build_configs" | awk -F, '{print $2}')"
scp_config="$(echo "$build_configs" | awk -F, '{print $3}')"
+scp_tools_config="$(echo "$build_configs" | awk -F, '{print $4}')"
test_config_file="$ci_root/group/$test_group/$test_config"
tf_config_file="$ci_root/tf_config/$tf_config"
tftf_config_file="$ci_root/tftf_config/$tftf_config"
scp_config_file="$ci_root/scp_config/$scp_config"
+scp_tools_config_file="$ci_root/scp_tools_config/$scp_tools_config"
# File that keeps track of applied patches
tf_patch_record="$workspace/tf_patches"
@@ -812,6 +996,15 @@
echo
fi
+if ! config_valid "$scp_tools_config"; then
+ scp_tools_config=
+else
+ echo "SCP Tools config:"
+ echo
+ sort "$scp_tools_config_file" | sed '/^\s*$/d;s/^/\t/'
+ echo
+fi
+
if ! config_valid "$run_config"; then
run_config=
fi
@@ -864,6 +1057,15 @@
show_head "$scp_root"
fi
+if [ -n "$cc_config" ] ; then
+ if [ "$cc_config" -eq 1 ] && assert_can_git_clone "cc_root"; then
+ # Copy code coverage repository
+ echo "Cloning Code Coverage..."
+ git clone -q $cc_src_repo_url cc_plugin --depth 1 -b $cc_src_repo_tag > /dev/null
+ show_head "$cc_root"
+ fi
+fi
+
if [ "$run_config" ]; then
# Get candidates for run config
run_config_candiates="$("$ci_root/script/gen_run_config_candidates.py" \
@@ -902,14 +1104,21 @@
mkdir "$build_archive"
if [ "$mode" = "debug" ]; then
+ export bin_mode="debug"
DEBUG=1
else
+ export bin_mode="release"
DEBUG=0
fi
# Perform builds in a subshell so as not to pollute the current and
# subsequent builds' environment
+ if config_valid "$cc_config"; then
+ # Build code coverage plugin
+ build_cc
+ fi
+
# SCP build
if config_valid "$scp_config"; then
(
@@ -928,7 +1137,6 @@
echo "Building SCP Firmware ($mode) ..." |& log_separator
build_scp
-
to="$archive" collect_scp_artefacts
echo "##########"
@@ -936,6 +1144,26 @@
)
fi
+ # SCP-tools build
+ if config_valid "$scp_tools_config"; then
+ (
+ echo "##########"
+
+ archive="$build_archive"
+ scp_tools_build_root="$scp_tools_root/build"
+
+ clone_scp_tools
+
+ echo "##########"
+ echo
+
+ echo "##########"
+ clone_tf_for_scp_tools
+ echo "##########"
+ echo
+ )
+ fi
+
# TFTF build
if config_valid "$tftf_config"; then
(
@@ -1020,14 +1248,7 @@
call_hook post_package
if upon "$jenkins_run" && upon "$artefacts_receiver" && [ -d "artefacts" ]; then
- tar -cJf "artefacts.tar.xz" "artefacts"
- where="$artefacts_receiver/$test_group/$test_config/artefacts.tar.xz"
- where+="?j=$JOB_NAME&b=$BUILD_NUMBER"
- if wget -q --method=PUT --body-file="artefacts.tar.xz" "$where"; then
- echo "Artefacts submitted to $where."
- else
- echo "Error submitting artefacts to $where."
- fi
+ source "$CI_ROOT/script/send_artefacts.sh" "artefacts"
fi
echo
diff --git a/script/clone_repos.sh b/script/clone_repos.sh
index 53b94bb..f552aa9 100755
--- a/script/clone_repos.sh
+++ b/script/clone_repos.sh
@@ -36,6 +36,11 @@
echo "$1=$2" >> "$param_file"
}
+# Emit a parameter for code coverage metadata
+code_cov_emit_param() {
+ emit_param "CC_$(echo ${1^^} | tr '-' _)_$2" "$3"
+}
+
meta_data() {
echo "$1" >> "$clone_data"
}
@@ -84,12 +89,12 @@
# account credentials for the latter.
if [ "$gerrit_url" == "review.trustedfirmware.org" ]; then
ssh -p 29418 -i "$tforg_key" "$tforg_user@$gerrit_url" gerrit \
- review "$GERRIT_CHANGE_NUMBER,$GERRIT_PATCHSET_NUMBER" \
- --message "'$(cat ${msg_file:?})'"
+ review "$GERRIT_CHANGE_NUMBER,$GERRIT_PATCHSET_NUMBER" \
+ --message "'$(cat ${msg_file:?})'"
else
ssh -p 29418 "$gerrit_url" gerrit review \
- "$GERRIT_CHANGE_NUMBER,$GERRIT_PATCHSET_NUMBER" \
- --message "'$(cat ${msg_file:?})'"
+ "$GERRIT_CHANGE_NUMBER,$GERRIT_PATCHSET_NUMBER" \
+ --message "'$(cat ${msg_file:?})'"
fi
}
@@ -157,7 +162,9 @@
if [ -d "$reference_dir" ]; then
ref_repo="--reference $reference_dir"
fi
+ echo "$ref_repo $url $name $branch"
git clone -q $ref_repo "$url" "$name" &>"$clone_log"
+ code_cov_emit_param "${name}" "URL" "${url}"
stat="on branch master"
pushd "$name"
@@ -195,7 +202,7 @@
refspec="$("$ci_root/script/translate_refspec.py" -p "$name" \
-u "$gerrit_user" -k "$gerrit_keyfile" \
-s "$gerrit_server" "topic:$topic" 2>/dev/null)" \
- || ret="$?"
+ || ret="$?"
if [ "$ret" = 0 ]; then
{
git fetch -q origin "$refspec"
@@ -217,12 +224,14 @@
fi
fi
+ code_cov_emit_param "${name}" "REFSPEC" "${refspec}"
# Generate meta data. Eliminate any quoting in commit subject as it
# might cause problems when reporting back to Gerrit.
meta_data "$name: $stat"
meta_data " $(git show --quiet --format=%H): $(git show --quiet --format=%s | sed "s/[\"']/ /g")"
meta_data " Commit date: $(git show --quiet --format=%cd)"
meta_data
+ code_cov_emit_param "${name}" "COMMIT" "$(git show --quiet --format=%H)"
# Calculate elapsed seconds
s_after="$(date +%s)"
@@ -396,10 +405,17 @@
# Clone Trusted Firmware TF repository
url="$tftf_src_repo_url" name="trusted-firmware-tf" ref="TFTF_REFSPEC" \
loc="TFTF_CHECKOUT_LOC" \
- gerrit_test_groups="tftf-master-build tftf-master-fwu tftf-l1" \
+ gerrit_test_groups="tftf-l1-build tftf-l1-fvp tftf-l1-spm" \
clone_and_sync
fi
+# Clone code coverage repository if code coverage is enabled
+if not_upon "$no_cc"; then
+ pushd "$ci_scratch"
+ git clone -q $cc_src_repo_url cc_plugin -b $cc_src_repo_tag 2> /dev/null
+ popd
+fi
+
SCP_REFSPEC="${scp_refspec:-$SCP_REFSPEC}"
if upon "$clone_scp"; then
# Clone SCP Firmware repository
@@ -463,6 +479,8 @@
fi
fi
+echo "SCP_TOOLS_COMMIT=$SCP_TOOLS_COMMIT" >> "$param_file"
+
# Copy environment file to ci_scratch for sub-jobs' access
cp "$env_file" "$ci_scratch"
cp "$param_file" "$ci_scratch"
diff --git a/script/coverity_parser.py b/script/coverity_parser.py
index 5cb31aa..5048348 100644
--- a/script/coverity_parser.py
+++ b/script/coverity_parser.py
@@ -13,8 +13,25 @@
_rule_exclusions = [
+ "MISRA C-2012 Rule 2.4",
+ "MISRA C-2012 Rule 2.5",
+ "MISRA C-2012 Rule 2.7",
+ "MISRA C-2012 Rule 5.1",
+ "MISRA C-2012 Rule 5.8",
"MISRA C-2012 Rule 8.6",
- "MISRA C-2012 Rule 5.1"
+ "MISRA C-2012 Rule 8.7",
+ "MISRA C-2012 Rule 11.4",
+ "MISRA C-2012 Rule 11.5",
+ "MISRA C-2012 Rule 15.1",
+ "MISRA C-2012 Rule 15.5",
+ "MISRA C-2012 Rule 15.6",
+ "MISRA C-2012 Rule 16.1",
+ "MISRA C-2012 Rule 16.3",
+ "MISRA C-2012 Rule 17.1",
+ "MISRA C-2012 Rule 21.6",
+ "MISRA C-2012 Directive 4.6",
+ "MISRA C-2012 Directive 4.8",
+ "MISRA C-2012 Directive 4.9"
]
# The following classification of rules and directives include 'MISRA C:2012
diff --git a/script/coverity_wrapper.sh b/script/coverity_wrapper.sh
index fed41e4..97b5345 100644
--- a/script/coverity_wrapper.sh
+++ b/script/coverity_wrapper.sh
@@ -111,6 +111,7 @@
if [ -z "$cov_force_commit" ]; then
"$ci_root/script/get_latest_snapshot.py" \\
--host "$coverity_host" \\
+ --https-port "$coverity_port" \\
--file "$golden_snapshot" \\
--description "*$cov_checker*" \\
--version "\$(git show -q --format=%H)" \\
@@ -188,10 +189,10 @@
cat <<EOF | name="stream-setup" create_snippet
if cov-manage-im --mode streams --add --set "name:$stream_name" \\
--auth-key-file "$auth_file" \\
- --host "$coverity_host"; then
+ --host "$coverity_host" --ssl --port "$coverity_port"; then
cov-manage-im --mode projects --name "Arm Trusted Firmware" --update \\
--insert "stream:$stream_name" --auth-key-file "$auth_file" \\
- --host "$coverity_host"
+ --host "$coverity_host" --ssl --port "$coverity_port"
fi
EOF
@@ -241,6 +242,7 @@
if [ ! -f "$golden_snapshot" -a -z "$cov_force_commit" ]; then
"$ci_root/script/get_latest_snapshot.py" \\
--host "$coverity_host" \\
+ --https-port "$coverity_port" \\
--file "$golden_snapshot" \\
--description "*$cov_checker*" \\
--version "\$(git show -q --format=%H)" \\
@@ -251,6 +253,7 @@
if [ ! -f "$golden_snapshot" -o -n "$cov_force_commit" ]; then
cd -P "$golden_repo"
cov-commit-defects --dir "$cov_dir/golden" --host "$coverity_host" \\
+ --https-port "$coverity_port" \\
--stream "$stream_name" --auth-key-file "$auth_file" \\
--version "\$(git show -q --format=%H)" \\
--description "$description" \\
@@ -291,6 +294,7 @@
if [ "$cov_force_commit" ]; then
cd -P "$branch_repo"
cov-commit-defects --dir "$cov_dir/branch" --host "$coverity_host" \\
+ --https-port "$coverity_port" \\
--stream "$stream_name" --description "$description" \\
--version "\$(git show -q --format=%H%)" \\
--auth-key-file "$auth_file" \\
@@ -306,6 +310,7 @@
cat <<EOF | name="branch-report-compare" \
deps="golden-cov-commit-defects branch-cov-analyze" create_snippet
cov-commit-defects --dir "$cov_dir/branch" --host "$coverity_host" \\
+ --https-port "$coverity_port" \\
--stream "$stream_name" --auth-key-file "$auth_file" \\
--preview-report-v2 "$cov_dir/report.json" \\
--comparison-snapshot-id "\$(cat $golden_snapshot)"
@@ -317,6 +322,7 @@
deps="branch-cov-commit-defects stream-setup branch-cov-analyze" \
create_snippet
cov-commit-defects --dir "$cov_dir/branch" --host "$coverity_host" \\
+ --https-port "$coverity_port" \\
--stream "$stream_name" --auth-key-file "$auth_file" \\
--preview-report-v2 "$cov_dir/report.json"
EOF
diff --git a/script/gen_juno_tftf_yaml.sh b/script/gen_juno_tftf_yaml.sh
index 4229a47..2b0b7b6 100755
--- a/script/gen_juno_tftf_yaml.sh
+++ b/script/gen_juno_tftf_yaml.sh
@@ -75,7 +75,7 @@
# For each test case, LAVA looks for a string which includes the testcase
# name and result.
- pattern: "(?s)> Executing '(?P<test_case_id>.+)'(.*) TEST COMPLETE\\\s+(?P<result>(Skipped|Passed|Failed|Crashed))"
+ pattern: "(?s)> Executing '(?P<test_case_id>.+?(?='))'(.*) TEST COMPLETE\\\s+(?P<result>(Skipped|Passed|Failed|Crashed))"
# Teach to LAVA how to interpret the TFTF Tests results.
fixupdict:
diff --git a/script/gen_run_config_candidates.py b/script/gen_run_config_candidates.py
index d9e3f9b..407742a 100755
--- a/script/gen_run_config_candidates.py
+++ b/script/gen_run_config_candidates.py
@@ -35,6 +35,8 @@
sys.exit(0)
fragments = run_config.split("-")
+if 'bmcov' in fragments:
+ fragments.remove('bmcov')
exit_code = 0
# Stems are fragments, except with everything after dot removed.
@@ -63,5 +65,4 @@
print("warning: {}: no matches for fragment '{}'".format(
arg, fragment), file=sys.stderr)
exit_code = 1
-
sys.exit(exit_code)
diff --git a/script/gen_test_desc.py b/script/gen_test_desc.py
index 5913c32..d729a9f 100755
--- a/script/gen_test_desc.py
+++ b/script/gen_test_desc.py
@@ -35,9 +35,9 @@
# config_list contains build configs as read from the test config
if group.startswith("scp-"):
# SCP configs would be specified in the following format:
- # scp_config, tf_config, tftf_config
+ # scp_config, tf_config, tftf_config, scp_tools
# Reshuffle them into the canonical format
- config_list = [config_list[1], config_list[2], config_list[0]]
+ config_list = [config_list[1], config_list[2], config_list[0], config_list[3]]
return config_list
@@ -48,9 +48,9 @@
build_config, run_config = test.split(":")
# Test descriptors are always generated in the following order:
- # tf_config, tftf_config, scp_config
+ # tf_config, tftf_config, scp_config, scp_tools
# Fill missing configs to the right with "nil".
- config_list = (build_config.split(",") + ["nil"] * 3)[:3]
+ config_list = (build_config.split(",") + ["nil"] * 4)[:4]
# Perform any group-specific translation on the config
config_list = translate_build_config(group, config_list)
diff --git a/script/get_latest_snapshot.py b/script/get_latest_snapshot.py
index 3ddd377..a806730 100755
--- a/script/get_latest_snapshot.py
+++ b/script/get_latest_snapshot.py
@@ -22,6 +22,7 @@
# Get coverity host from environment, or fall back to the default one.
coverity_host = os.environ.get("coverity_host", "coverity.cambridge.arm.com")
+coverity_port = os.environ.get("coverity_port", "8443")
parser = argparse.ArgumentParser()
@@ -29,6 +30,7 @@
parser.add_argument("--file", dest="output_file", help="Output file. Mandatory")
parser.add_argument("--old", default=10, help="Max snapshot age in days")
parser.add_argument("--host", default=coverity_host, help="Coverity server")
+parser.add_argument("--https-port", default=coverity_port, help="Coverity Secure port")
parser.add_argument("--version", help="Snapshot version filter")
parser.add_argument("stream_name")
@@ -49,7 +51,7 @@
sys.exit(0)
# SOAP magic stuff
-client = suds.client.Client("http://{}/ws/v9/configurationservice?wsdl".format(opts.host))
+client = suds.client.Client("https://{}/ws/v9/configurationservice?wsdl".format(opts.host))
security = suds.wsse.Security()
token = suds.wsse.UsernameToken(user, password)
security.tokens.append(token)
diff --git a/script/parse_lava_job.py b/script/parse_lava_job.py
index 9e331e3..aa85ca3 100755
--- a/script/parse_lava_job.py
+++ b/script/parse_lava_job.py
@@ -32,6 +32,53 @@
print()
sys.exit(0)
+def scmi_parse_phase(results, case, special_case):
+ pass_count = 0
+ fail_count = 0
+ false_fail_count = 0
+
+ for phase in results:
+ if phase["metadata"]["definition"] == case:
+ if phase["metadata"]["result"] == "pass":
+ pass_count += 1
+ else:
+ if special_case != "" and phase["metadata"]["case"] == special_case:
+ false_fail_count += 1
+ else:
+ fail_count += 1
+
+ print(case)
+ print("pass_count " + str(pass_count))
+ print("fail_count " + str(fail_count))
+ if special_case != "":
+ print("false_fail_count " + str(false_fail_count))
+ if fail_count > 0:
+ report_job_failure()
+
+def parse_scp_scmi_results():
+ #
+ # All protocols but sensor
+ #
+ scmi_parse_phase(results, "scp-scmi-non-sensor-protocol", "")
+
+ #
+ # Protocol sensor, not reading_get
+ #
+ scmi_parse_phase(results, "scp-scmi-sensor-protocol", "")
+
+ #
+ # Protocol sensor, only reading_get
+ # In this case, we know that the reading from the sensor VBIG will fail
+ # cause the big cluster is OFF. Thus we simply discard that false failure.
+ #
+ JUNO_PVT_SENSOR_VOLT_BIG = "1"
+ scmi_parse_phase(results, "scp-scmi-sensor-protocol-get", JUNO_PVT_SENSOR_VOLT_BIG)
+
+ #
+ # Parse the final overall results
+ # We already know the false failures, discard them
+ #
+ scmi_parse_phase(results, "scp-scmi", "sensor_reading_get_sync_allsensorid_")
def parse_cmd_line():
parser = argparse.ArgumentParser(description="Parse results from LAVA. "
@@ -48,7 +95,7 @@
args = parse_cmd_line()
with open(args.file) as fd:
- results = yaml.load(fd)
+ results = yaml.safe_load(fd)
# Iterate through results. Find the element whose name is "job" in the
# "lava" suite. It contains the result of the overall LAVA run.
@@ -66,11 +113,24 @@
if args.payload_type == "linux":
report_job_success()
- # If we've run TFTF tests instead, then do some further parsing.
+ # If we've run TFTF or SCMI tests instead, then do some further parsing.
+ elif args.payload_type == "tftf":
+ session = "TFTF"
+ suite = "tftf"
+ elif args.payload_type == "scp_tests_scmi":
+ session = "SCMI"
+ suite = "scp-scmi"
+ parse_scp_scmi_results()
+
+ print("All tests passed.")
+ report_job_success()
+ else:
+ raise Exception("Payload not defined")
+
# First make sure the test session finished.
for phase in filter(lambda p: p["name"] == "lava-test-monitor", results):
if phase["result"] != "pass":
- print("TFTF test session failed. Did it time out?")
+ print(session + " test session failed. Did it time out?")
report_job_failure()
break
else:
@@ -79,7 +139,7 @@
# Then count the number of tests that failed/skipped.
test_failures = 0
test_skips = 0
- for phase in filter(lambda p: p["suite"] == "tftf", results):
+ for phase in filter(lambda p: p["suite"] == suite, results):
metadata = phase["metadata"]
testcase_name = metadata["case"]
testcase_result = metadata["result"]
diff --git a/script/parse_test.sh b/script/parse_test.sh
index 4086ace..03e828b 100755
--- a/script/parse_test.sh
+++ b/script/parse_test.sh
@@ -36,6 +36,7 @@
emit_env "RUN_CONFIG" "$run_config"
emit_env "TEST_CONFIG" "$test_config"
emit_env "TEST_GROUP" "$test_group"
+emit_env "CC_ENABLE" "$cc_enable"
# Default binary mode. This would usually come from the build package for FVP
# runs, but is provided for LAVA jobs.
diff --git a/script/run_local_ci.sh b/script/run_local_ci.sh
index 9376305..56d531c 100755
--- a/script/run_local_ci.sh
+++ b/script/run_local_ci.sh
@@ -1,6 +1,6 @@
#!/bin/bash
#
-# Copyright (c) 2019, Arm Limited. All rights reserved.
+# Copyright (c) 2019-2020, Arm Limited. All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
@@ -85,7 +85,7 @@
export TEST_DESC="$(basename "$test_file")"
cp "$test_file" "$id/TEST_DESC"
- workspace="$id" test_desc="$test_file" "$ci_root/script/parse_test.sh"
+ workspace="$id" test_desc="$test_file" cc_enable="$cc_enable" "$ci_root/script/parse_test.sh"
set -a
source "$id/env"
@@ -109,10 +109,15 @@
# Unset make flags for build script
MAKEFLAGS=
+ if [ $import_cc -eq 1 ]; then
+ # Path to plugin if there is no local reference
+ cc_path_spec=$workspace/cc_plugin
+ fi
+
case "$action" in
"build")
echo "building: $config_string" >&5
- if ! bash $minus_x "$ci_root/script/build_package.sh" \
+ if ! ccpathspec="$cc_path_spec" bash $minus_x "$ci_root/script/build_package.sh" \
>&6 2>&1; then
{
print_failure "$config_string (build)"
@@ -129,23 +134,60 @@
if echo "$RUN_CONFIG" | grep -q "^fvp" && \
not_upon "$skip_runs"; then
echo "running: $config_string" >&5
- if bash $minus_x "$ci_root/script/run_package.sh" \
+ if [ -n "$cc_enable" ]; then
+ # Enable of code coverage during run
+ if cc_enable="$cc_enable" trace_file_prefix=tr \
+ coverage_trace_plugin=$cc_path_spec/scripts/tools/code_coverage/fastmodel_baremetal/bmcov/model-plugin/CoverageTrace.so \
+ bash $minus_x "$ci_root/script/run_package.sh" \
>&6 2>&1; then
- if grep -q -e "--BUILD UNSTABLE--" \
+ if grep -q -e "--BUILD UNSTABLE--" \
"$log_file"; then
- print_unstable "$config_string" >&5
+ print_unstable "$config_string" >&5
+ else
+ print_success "$config_string" >&5
+ if [ -d "$workspace/artefacts/release" ] && \
+ [ -f "$workspace/artefacts/release/tr-FVP_Base_RevC_2xAEMv8A.cluster0.cpu0.log" ]; then
+ cp $workspace/artefacts/release/*.log $workspace/artefacts/debug
+ fi
+ # Setting environmental variables for run of code coverage
+ OBJDUMP=$TOOLCHAIN/bin/aarch64-none-elf-objdump \
+ READELF=$TOOLCHAIN/bin/aarch64-none-elf-readelf \
+ ELF_FOLDER=$workspace/artefacts/debug \
+ TRACE_FOLDER=$workspace/artefacts/debug \
+ workspace=$workspace \
+ TRACE_PREFIX=tr python \
+ $cc_path_spec/scripts/tools/code_coverage/fastmodel_baremetal/bmcov/report/gen-coverage-report.py --config \
+ $cc_path_spec/scripts/tools/code_coverage/fastmodel_baremetal/bmcov/report/config_atf.py
+ fi
+ exit 0
else
- print_success "$config_string" >&5
+ {
+ print_failure "$config_string (run)"
+ if [ "$console_file" ]; then
+ echo " see $console_file"
+ fi
+ } >&5
+ exit 1
fi
- exit 0
else
- {
- print_failure "$config_string (run)"
- if [ "$console_file" ]; then
- echo " see $console_file"
+ if bash $minus_x "$ci_root/script/run_package.sh" \
+ >&6 2>&1; then
+ if grep -q -e "--BUILD UNSTABLE--" \
+ "$log_file"; then
+ print_unstable "$config_string" >&5
+ else
+ print_success "$config_string" >&5
+ fi
+ exit 0
+ else
+ {
+ print_failure "$config_string (run)"
+ if [ "$console_file" ]; then
+ echo " see $console_file"
+ fi
+ } >&5
+ exit 1
fi
- } >&5
- exit 1
fi
else
if grep -q -e "--BUILD UNSTABLE--" \
@@ -214,7 +256,7 @@
tforg_user="${tforg_user:?}"
else
tftf_root="$(readlink -f $tftf_root)"
- tf_refspec=
+ tftf_refspec=
in_green "Using local work tree for TFTF"
let "++local_count"
fi
@@ -228,6 +270,23 @@
let "++local_count"
fi
+if [ -n "$cc_enable" ]; then
+ in_green "Code Coverage enabled"
+ if [ -z "$TOOLCHAIN" ]; then
+ in_red "TOOLCHAIN not set for code coverage: ex: export TOOLCHAIN=<path to toolchain>/gcc-arm-<gcc version>-x86_64-aarch64-none-elf"
+ exit 1
+ fi
+ if [ -n "$cc_path" ]; then
+ in_green "Code coverage plugin path specified"
+ cc_path_spec=$cc_path
+ import_cc=0
+ else
+ in_red "Code coverage plugin path not specified"
+ cc_path_spec="$workspace/cc_plugin"
+ import_cc=1
+ fi
+fi
+
# User preferences
user_test_run="$test_run"
user_dont_clean="$dont_clean"
@@ -240,6 +299,8 @@
export parallel
export test_run=0
export primary_live=0
+export cc_path_spec
+export import_cc
rm -rf "$workspace"
mkdir -p "$workspace"
@@ -255,8 +316,15 @@
clone_scp=0
fi
-# Use clone_repos.sh to clone and share repositores that aren't local.
-no_tf="$tf_root" no_tftf="$tftf_root" no_ci="$ci_root" \
+# Enable of code coverage and whether there is a local plugin
+if upon "$cc_enable" && not_upon "$cc_path"; then
+ no_cc_t=1
+else
+ no_cc_t=0
+fi
+
+# Use clone_repos.sh to clone and share repositories that aren't local.
+no_tf="$tf_root" no_tftf="$tftf_root" no_ci="$ci_root" no_cc="$import_cc" \
bash $minus_x "$ci_root/script/clone_repos.sh"
set -a
diff --git a/script/run_package.sh b/script/run_package.sh
index 382c336..a9d91d2 100755
--- a/script/run_package.sh
+++ b/script/run_package.sh
@@ -1,6 +1,6 @@
#!/bin/bash
#
-# Copyright (c) 2019, Arm Limited. All rights reserved.
+# Copyright (c) 2019-2020, Arm Limited. All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
@@ -24,14 +24,22 @@
kill_and_reap() {
local gid
-
# Kill an active process. Ignore errors
[ "$1" ] || return 0
kill -0 "$1" &>/dev/null || return 0
+ # Kill the children
+ kill -- "-$1" &>/dev/null || true
# Kill the group
- gid="$(awk '{print $5}' < /proc/$1/stat)"
- kill -SIGKILL -- "-$gid" &>/dev/null || true
+ { gid="$(awk '{print $5}' < /proc/$1/stat)";} 2>/dev/null || return
+ # For Code Coverage plugin it is needed to propagate
+ # the kill signal to the plugin in order to save
+ # the trace statistics.
+ if [ "${COVERAGE_ON}" == "1" ] || [ -n "$cc_enable" ]; then
+ kill -SIGTERM -- "-$gid" &>/dev/null || true
+ else
+ kill -SIGKILL -- "-$gid" &>/dev/null || true
+ fi
wait "$gid" &>/dev/null || true
}
@@ -44,7 +52,19 @@
set +e
while read pid; do
pid="$(cat $pid)"
- kill_and_reap "$pid"
+ # Forcefully killing model process does not show statistical
+ # data (Host CPU time spent running in User and System). Safely
+ # kill the model by using SIGINT(^C) that helps in printing
+ # statistical data.
+ if [ "$pid" == "$model_pid" ]; then
+ model_cid=$(pgrep -P "$model_pid" | xargs)
+ # ignore errors
+ kill -SIGINT "$model_cid" &>/dev/null || true
+ # Allow some time to print data
+ sleep 2
+ else
+ kill_and_reap "$pid"
+ fi
done < <(find -name '*.pid')
popd
}
@@ -114,11 +134,54 @@
model_out="$run_root/model_log.txt"
run_sh="$run_root/run.sh"
+
# Generate run.sh
echo "$model_path \\" > "$run_sh"
sed '/^\s*$/d' < model_params | sort | sed 's/^/\t/;s/$/ \\/' >> "$run_sh"
+
+if [ "${COVERAGE_ON}" == "1" ]; then
+ # Adding code coverage plugin
+ echo -e "\t-C TRACE.CoverageTrace.trace-file-prefix=$trace_file_prefix \\" >> "$run_sh"
+ echo -e "\t--plugin $coverage_trace_plugin \\" >> "$run_sh"
+fi
echo -e "\t\"\$@\"" >> "$run_sh"
+# Running Reboot/Shutdown tests requires storing the state in non-volatile
+# memory(NVM) across reboot. On FVP, NVM is not persistent across reboot, hence
+# NVM was saved to a file($NVM_file) when running the model using the run.sh
+# shell script.
+# If TFTF Reboot/Shutdown tests are enabled, run the fvp model 10 times by
+# feeding the file containing NVM state generated from the previous run. Note
+# that this file also includes FIP image.
+
+if upon "$run_tftf_reboot_tests" = "1"; then
+ tftf_reboot_tests="$run_root/tftf_reboot_tests.sh"
+
+ # Generate tftf_reboot_tests command. It is similar to run_sh.
+ # The model would run the reboot and shutdown tests 10 times
+ # The uart log file generated by FVP model gets overwritten
+ # across reboots. Copy its contents at the end of the test
+ echo "cat $uart0_file >> UART0.log" >>"$tftf_reboot_tests"
+ echo "cat $uart1_file >> UART1.log" >>"$tftf_reboot_tests"
+ cat <<EOF >>"$tftf_reboot_tests"
+
+for i in {1..10}
+do
+EOF
+ cat "$run_sh" >> "$tftf_reboot_tests"
+ echo "cat $uart0_file >> UART0.log" >>"$tftf_reboot_tests"
+ echo "cat $uart1_file >> UART1.log" >>"$tftf_reboot_tests"
+ cat <<EOF >>"$tftf_reboot_tests"
+done
+EOF
+ #Replace fip.bin with file $NVM_file
+ sed -i 's/fip.bin/'"$NVM_file"'/' "$tftf_reboot_tests"
+
+ echo "TFTF Reboot/Shutdown Tests Enabled"
+ cat "$tftf_reboot_tests" >> "$run_sh"
+ rm "$tftf_reboot_tests"
+fi
+
echo "Model command line:"
echo
cat "$run_sh"
@@ -151,8 +214,8 @@
die "Failed to launch model!"
fi
done
-model_pid="$(cat "$pid_dir/model.pid")"
+model_pid="$(cat $pid_dir/model.pid)"
ports_output="$(mktempfile)"
if not_upon "$ports_script"; then
# Default AWK script to parse model ports
@@ -217,6 +280,11 @@
exit 1
fi
+if ! [ -x "$(command -v expect)" ]; then
+ echo "Error: Expect is not installed."
+ exit 1
+fi
+
# The wait loop above exited after model port numbers have been parsed. The
# script's output is ready to be sourced now.
declare -a ports
@@ -230,7 +298,7 @@
# Launch expect scripts for all UARTs
uarts=0
-for u in $(seq 0 $num_uarts | tac); do
+for u in $(seq 0 $(( $num_uarts - 1 )) | tac); do
script="run/uart$u/expect"
if [ -f "$script" ]; then
script="$(cat "$script")"
@@ -243,6 +311,7 @@
if [ "$u" = "$primary_uart" ]; then
die "No primary UART script!"
else
+ echo "Ignoring UART$u (no expect script provided)."
continue
fi
fi
@@ -289,7 +358,6 @@
let "uarts += 1"
echo "Tracking UART$u$star with $script; timeout $timeout."
done
-
# Wait here long 'enough' for expect scripts to connect to ports; then
# let the model proceed
sleep 2
@@ -331,19 +399,9 @@
fi
if upon "$jenkins_run" && upon "$artefacts_receiver" && [ -d "$workspace/run" ]; then
- pushd "$workspace"
- run_archive="run.tar.xz"
- tar -cJf "$run_archive" "run"
- where="$artefacts_receiver/${TEST_GROUP:?}/${TEST_CONFIG:?}/$run_archive"
- where+="?j=$JOB_NAME&b=$BUILD_NUMBER"
- if wget -q --method=PUT --body-file="$run_archive" "$where"; then
- echo "Run logs submitted to $where."
- else
- echo "Error submitting run logs to $where."
- fi
- popd
+ source "$CI_ROOT/script/send_artefacts.sh" "run"
fi
-exit "$result"
+exit "$result"
# vim: set tw=80 sw=8 noet:
diff --git a/script/run_tools_through_valgrind.sh b/script/run_tools_through_valgrind.sh
index c437868..47f8b11 100755
--- a/script/run_tools_through_valgrind.sh
+++ b/script/run_tools_through_valgrind.sh
@@ -1,6 +1,6 @@
#!/bin/bash
#
-# Copyright (c) 2019, Arm Limited. All rights reserved.
+# Copyright (c) 2019-2020, Arm Limited. All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
@@ -15,7 +15,7 @@
# Build TF-A to get blx.bin images and the tools (fiptool and cert_create)
# Debug build enabled so that valgrind has access to source file line numbers
-if ! make CROSS_COMPILE="aarch64-linux-gnu-" all fiptool certtool DEBUG=1 V=1 \
+if ! make CROSS_COMPILE="aarch64-none-elf-" all fiptool certtool DEBUG=1 V=1 \
&>"$workspace/build.log"; then
echo "Error building tools; see archived build.log"
exit 1
diff --git a/script/static-checks/check-banned-api.py b/script/static-checks/check-banned-api.py
new file mode 100755
index 0000000..0bfadeb
--- /dev/null
+++ b/script/static-checks/check-banned-api.py
@@ -0,0 +1,217 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+import argparse
+import os
+import re
+import sys
+import utils
+
+# File extensions to check
+VALID_FILE_EXTENSIONS = ('.c', '.S', '.h')
+
+# Paths inside the tree to ignore. Hidden folders and files are always ignored.
+# They mustn't end in '/'.
+IGNORED_FOLDERS = (
+ "tools",
+ "docs"
+)
+
+# List of ignored files in folders that aren't ignored
+IGNORED_FILES = ()
+
+# Regular expression for searching the Banned APIs. This is taken from the
+# Coding guideline in TF-A repo
+BANNED_APIS = ["strcpy", "wcscpy", "strncpy", "strcat", "wcscat", "strncat",
+ "sprintf", "vsprintf", "strtok", "atoi", "atol", "atoll",
+ "itoa", "ltoa", "lltoa"]
+BANNED_PATTERN = re.compile('|'.join(BANNED_APIS))
+
+COMMENTS_PATTERN = re.compile(r"//|/\*|\*/")
+
+
+def filter_comments(f):
+ '''
+ filter_comments(f) -> iterator for line number, filtered line
+
+ Given an iterable of lines (such as a file), return another iterable of
+ lines, with the comments filtered out and removed.
+ '''
+
+ in_comment = False
+ for line_num, line in enumerate(f):
+ line = line.rstrip('\n')
+
+ temp = ""
+ breaker = len(line) if in_comment else 0
+ for match in COMMENTS_PATTERN.finditer(line):
+ content = match.group(0)
+ start, end = match.span()
+
+ if in_comment:
+ if content == "*/":
+ in_comment = False
+ breaker = end
+ else:
+ if content == "/*":
+ in_comment = True
+ temp += line[breaker:start]
+ breaker = len(line)
+ elif content == "//":
+ temp += line[breaker:start]
+ breaker = len(line)
+ break
+
+ temp += line[breaker:]
+ if temp:
+ yield line_num + 1, temp
+
+
+def file_check_banned_api(path, encoding='utf-8'):
+ '''
+ Reads all lines from a file in path and checks for any banned APIs.
+ The combined number of errors and uses of banned APIs is returned. If the
+ result is equal to 0, the file is clean and contains no banned APIs.
+ '''
+
+ count = 0
+
+ try:
+ f = open(path, encoding=encoding)
+ except FileNotFoundError:
+ print("ERROR: could not open " + path)
+ utils.print_exception_info()
+ return True
+
+ try:
+ for line_num, line in filter_comments(f):
+ match = BANNED_PATTERN.search(line)
+ if match:
+ location = "line {} of file {}".format(line_num, path)
+ print("BANNED API: in " + location)
+
+ # NOTE: this preview of the error is not perfect if comments
+ # have been removed - however, it does good enough most of the
+ # time.
+ start, end = match.span()
+ print(">>> {}".format(line))
+ print(" {}^{}".format(start * " ", (end - start - 1) * "~"))
+
+ count += 1
+ except:
+ print("ERROR: unexpected exception while parsing " + path)
+ utils.print_exception_info()
+ count += 1
+
+ f.close()
+
+ return count
+
+
+def get_tree_files():
+ '''
+ Get all files in the git repository
+ '''
+
+ # Get patches of the affected commits with one line of context.
+ (rc, stdout, stderr) = utils.shell_command(['git', 'ls-files'])
+ if rc != 0:
+ return False
+
+ lines = stdout.splitlines()
+ return lines
+
+
+def get_patch_files(base_commit, end_commit):
+ '''
+ Get all files that have changed in a given patch
+ '''
+
+ # Get patches of the affected commits with one line of context.
+ (rc, stdout, stderr) = utils.shell_command([
+ 'git', 'diff-tree', '--diff-filter=ACMRT', '-r', '--name-only',
+ base_commit, end_commit])
+
+ if rc != 0:
+ return False
+
+ paths = stdout.splitlines()
+ return paths
+
+
+def parse_cmd_line():
+ parser = argparse.ArgumentParser(
+ description="Check Banned APIs",
+ epilog="""
+ For each source file in the tree, checks whether Banned APIs as
+ described in the list are used or not.
+ """
+ )
+
+ parser.add_argument("--tree", "-t",
+ help="""
+ Path to the source tree to check (default: %(default)s)
+ """,
+ default=os.curdir)
+ parser.add_argument("--patch", "-p",
+ help="""
+ Patch mode. Instead of checking all files in
+ the source tree, the script will consider only files
+ that are modified by the latest patch(es).
+ """,
+ action="store_true")
+ parser.add_argument("--from-ref",
+ help="""
+ Base commit in patch mode (default: %(default)s)
+ """,
+ default="master")
+ parser.add_argument("--to-ref",
+ help="""
+ Final commit in patch mode (default: %(default)s)
+ """,
+ default="HEAD")
+ parser.add_argument("--verbose", "-v",
+ help="Print verbose output",
+ action="store_true")
+ args = parser.parse_args()
+ return args
+
+
+if __name__ == "__main__":
+ args = parse_cmd_line()
+
+ os.chdir(args.tree)
+
+ if args.patch:
+ print("Checking files modified between patches " + args.from_ref +
+ " and " + args.to_ref + "...\n")
+ files = get_patch_files(args.from_ref, args.to_ref)
+ else:
+ print("Checking all files git repo " + os.path.abspath(args.tree) +
+ "...\n")
+ files = get_tree_files()
+
+ total_errors = 0
+ for filename in files:
+ ignored = utils.file_is_ignored(filename, VALID_FILE_EXTENSIONS,
+ IGNORED_FILES, IGNORED_FOLDERS)
+ if ignored:
+ if args.verbose:
+ print("INFO: Skipping ignored file " + filename)
+ continue
+
+ if args.verbose:
+ print("INFO: Checking " + filename)
+
+ total_errors += file_check_banned_api(filename)
+
+ print(str(total_errors) + " errors found")
+
+ if total_errors == 0:
+ sys.exit(0)
+ else:
+ sys.exit(1)
diff --git a/script/static-checks/check-copyright.py b/script/static-checks/check-copyright.py
index 350381b..39863c7 100755
--- a/script/static-checks/check-copyright.py
+++ b/script/static-checks/check-copyright.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python3
#
-# Copyright (c) 2019, Arm Limited. All rights reserved.
+# Copyright (c) 2019-2020, Arm Limited. All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
@@ -8,8 +8,10 @@
"""
Check if a given file includes the copyright boiler plate.
This checker supports the following comment styles:
- * Used by .c, .h, .S, .dts and .dtsi files
- # Used by Makefile (including .mk)
+ /*
+ *
+ //
+ #
"""
import argparse
@@ -24,7 +26,9 @@
from itertools import islice
# File extensions to check
-VALID_FILE_EXTENSIONS = ('.c', '.S', '.h', 'Makefile', '.mk', '.dts', '.dtsi', '.ld')
+VALID_FILE_EXTENSIONS = ('.c', '.conf', '.dts', '.dtsi', '.editorconfig',
+ '.h', '.i', '.ld', 'Makefile', '.mk', '.msvc',
+ '.py', '.S', '.scat', '.sh')
# Paths inside the tree to ignore. Hidden folders and files are always ignored.
# They mustn't end in '/'.
@@ -41,47 +45,64 @@
)
# Supported comment styles (Python regex)
-COMMENT_PATTERN = '^(( \* ?)|(\# ?))'
+COMMENT_PATTERN = '(\*|/\*|\#|//)'
-# License pattern to match
-LICENSE_PATTERN = '''(?P<copyright_prologue>
-{0}Copyright \(c\) (?P<years>[0-9]{{4}}(-[0-9]{{4}})?), (Arm Limited|ARM Limited and Contributors)\. All rights reserved\.$
-{0}$
-{0}SPDX-License-Identifier: BSD-3-Clause$
-)'''.format(
- COMMENT_PATTERN
-)
+# Any combination of spaces and/or tabs
+SPACING = '[ \t]*'
-# Compiled license pattern
-RE_PATTERN = re.compile(LICENSE_PATTERN, re.MULTILINE)
+# Line must start with a comment and optional spacing
+LINE_START = '^' + SPACING + COMMENT_PATTERN + SPACING
+
+# Line end with optional spacing
+EOL = SPACING + '$'
+
+# Year or period as YYYY or YYYY-YYYY
+TIME_PERIOD = '[0-9]{4}(-[0-9]{4})?'
+
+# Any string with valid license ID, don't allow adding postfix
+LICENSE_ID = '.*(BSD-3-Clause|BSD-2-Clause-FreeBSD)([ ,.\);].*)?'
+
+# File must contain both lines to pass the check
+COPYRIGHT_LINE = LINE_START + 'Copyright' + '.*' + TIME_PERIOD + '.*' + EOL
+LICENSE_ID_LINE = LINE_START + 'SPDX-License-Identifier:' + LICENSE_ID + EOL
+
+# Compiled license patterns
+COPYRIGHT_PATTERN = re.compile(COPYRIGHT_LINE, re.MULTILINE)
+LICENSE_ID_PATTERN = re.compile(LICENSE_ID_LINE, re.MULTILINE)
+
+CURRENT_YEAR = str(datetime.datetime.now().year)
COPYRIGHT_OK = 0
COPYRIGHT_ERROR = 1
-COPYRIGHT_WARNING = 2
-def check_copyright(path):
+def check_copyright(path, args, encoding='utf-8'):
'''Checks a file for a correct copyright header.'''
- with open(path) as file_:
+ result = COPYRIGHT_OK
+
+ with open(path, encoding=encoding) as file_:
file_content = file_.read()
- if RE_PATTERN.search(file_content):
- return COPYRIGHT_OK
+ copyright_line = COPYRIGHT_PATTERN.search(file_content)
+ if not copyright_line:
+ print("ERROR: Missing copyright in " + file_.name)
+ result = COPYRIGHT_ERROR
+ elif CURRENT_YEAR not in copyright_line.group():
+ print("WARNING: Copyright is out of date in " + file_.name + ": '" +
+ copyright_line.group() + "'")
- for line in file_content.split('\n'):
- if 'SPDX-License-Identifier' in line:
- if ('BSD-3-Clause' in line or
- 'BSD-2-Clause-FreeBSD' in line):
- return COPYRIGHT_WARNING
- break
+ if not LICENSE_ID_PATTERN.search(file_content):
+ print("ERROR: License ID error in " + file_.name)
+ result = COPYRIGHT_ERROR
- return COPYRIGHT_ERROR
-
+ return result
def main(args):
print("Checking the copyrights in the code...")
- all_files_correct = True
+ if args.verbose:
+ print ("Copyright regexp: " + COPYRIGHT_LINE)
+ print ("License regexp: " + LICENSE_ID_LINE)
if args.patch:
print("Checking files modified between patches " + args.from_ref
@@ -90,7 +111,7 @@
(rc, stdout, stderr) = utils.shell_command(['git', 'diff',
'--diff-filter=ACMRT', '--name-only', args.from_ref, args.to_ref ])
if rc:
- return 1
+ return COPYRIGHT_ERROR
files = stdout.splitlines()
@@ -99,7 +120,7 @@
(rc, stdout, stderr) = utils.shell_command([ 'git', 'ls-files' ])
if rc:
- return 1
+ return COPYRIGHT_ERROR
files = stdout.splitlines()
@@ -117,30 +138,22 @@
if args.verbose:
print("Checking file " + f)
- rc = check_copyright(f)
+ rc = check_copyright(f, args)
if rc == COPYRIGHT_OK:
count_ok += 1
- elif rc == COPYRIGHT_WARNING:
- count_warning += 1
- print("WARNING: " + f)
elif rc == COPYRIGHT_ERROR:
count_error += 1
- print("ERROR: " + f)
print("\nSummary:")
- print("\t{} files analyzed".format(count_ok + count_warning + count_error))
+ print("\t{} files analyzed".format(count_ok + count_error))
- if count_warning == 0 and count_error == 0:
+ if count_error == 0:
print("\tNo errors found")
- return 0
-
- if count_error > 0:
+ return COPYRIGHT_OK
+ else:
print("\t{} errors found".format(count_error))
-
- if count_warning > 0:
- print("\t{} warnings found".format(count_warning))
-
+ return COPYRIGHT_ERROR
def parse_cmd_line(argv, prog_name):
parser = argparse.ArgumentParser(
@@ -166,9 +179,20 @@
Instead of checking all files in the source tree, the script will consider
only files that are modified by the latest patch(es).""",
action="store_true")
+
+ (rc, stdout, stderr) = utils.shell_command(['git', 'merge-base', 'HEAD', 'master'])
+ if rc:
+ print("Git merge-base command failed. Cannot determine base commit.")
+ sys.exit(rc)
+ merge_bases = stdout.splitlines()
+
+ # This should not happen, but it's better to be safe.
+ if len(merge_bases) > 1:
+ print("WARNING: Multiple merge bases found. Using the first one as base commit.")
+
parser.add_argument("--from-ref",
help="Base commit in patch mode (default: %(default)s)",
- default="master")
+ default=merge_bases[0])
parser.add_argument("--to-ref",
help="Final commit in patch mode (default: %(default)s)",
default="HEAD")
diff --git a/script/static-checks/check-include-order.py b/script/static-checks/check-include-order.py
index 481ca42..4f605f3 100755
--- a/script/static-checks/check-include-order.py
+++ b/script/static-checks/check-include-order.py
@@ -1,304 +1,197 @@
#!/usr/bin/env python3
#
-# Copyright (c) 2019, Arm Limited. All rights reserved.
+# Copyright (c) 2019-2020, Arm Limited. All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
import argparse
import codecs
+import collections
import os
import re
+import subprocess
import sys
import utils
+import yaml
+import logging
# File extensions to check
-VALID_FILE_EXTENSIONS = ('.c', '.S', '.h')
+VALID_FILE_EXTENSIONS = (".c", ".S", ".h")
# Paths inside the tree to ignore. Hidden folders and files are always ignored.
# They mustn't end in '/'.
-IGNORED_FOLDERS = ("include/lib/stdlib",
- "include/lib/libc",
- "include/lib/libfdt",
- "lib/libfdt",
- "lib/libc",
- "lib/stdlib")
-
-# List of ignored files in folders that aren't ignored
-IGNORED_FILES = (
+IGNORED_FOLDERS = (
+ "include/lib/stdlib",
+ "include/lib/libc",
+ "include/lib/libfdt",
+ "lib/libfdt",
+ "lib/libc",
+ "lib/stdlib",
)
-def line_remove_comments(line):
- '''Remove C comments within a line. This code doesn't know if the line is
- commented in a multi line comment that involves more lines than itself.'''
+# List of ignored files in folders that aren't ignored
+IGNORED_FILES = ()
- # Multi line comments
- while line.find("/*") != -1:
- start_comment = line.find("/*")
- end_comment = line.find("*/")
- if end_comment != -1:
- end_comment = end_comment + 2 # Skip the "*/"
- line = line[ : start_comment ] + line[ end_comment : ]
- else: # The comment doesn't end this line.
- line = line[ : start_comment ]
-
- # Single line comments
- comment = line.find("//")
- if comment != -1:
- line = line[ : comment ]
-
- return line
+INCLUDE_RE = re.compile(r"^\s*#\s*include\s\s*(?P<path>[\"<].+[\">])")
+INCLUDE_RE_DIFF = re.compile(r"^\+?\s*#\s*include\s\s*(?P<path>[\"<].+[\">])")
-def line_get_include_path(line):
- '''It takes a line of code with an include directive and returns the file
- path with < or the first " included to tell them apart.'''
- if line.find('<') != -1:
- if line.find('.h>') == -1:
- return None
- inc = line[ line.find('<') : line.find('.h>') ]
- elif line.find('"') != -1:
- if line.find('.h"') == -1:
- return None
- inc = line[ line.find('"') : line.find('.h"') ]
- else:
- inc = None
-
- return inc
+def include_paths(lines, diff_mode=False):
+ """List all include paths in a file. Ignore starting `+` in diff mode."""
+ pattern = INCLUDE_RE_DIFF if diff_mode else INCLUDE_RE
+ matches = (pattern.match(line) for line in lines)
+ return [m.group("path") for m in matches if m]
-def file_get_include_list(path, _encoding='ascii'):
- '''Reads all lines from a file and returns a list of include paths. It
- tries to read the file in ASCII mode and UTF-8 if it fails. If it succeeds
- it will return a list of include paths. If it fails it will return None.'''
-
- inc_list = []
-
+def file_include_list(path):
+ """Return a list of all include paths in a file or None on failure."""
try:
- f = codecs.open(path, encoding=_encoding)
- except:
- print("ERROR:" + path + ":open() error!")
- utils.print_exception_info()
+ with codecs.open(path, encoding="utf-8") as f:
+ return include_paths(f)
+ except Exception:
+ logging.exception(path + ":error while parsing.")
return None
- # Allow spaces in between, but not comments.
- pattern = re.compile(r"^\s*#\s*include\s\s*[\"<]")
-
- fatal_error = False
-
- try:
- for line in f:
- if pattern.match(line):
- line_remove_comments(line)
- inc = line_get_include_path(line)
- if inc != None:
- inc_list.append(inc)
-
- except UnicodeDecodeError:
- # Capture exceptions caused by non-ASCII encoded files.
- if _encoding == 'ascii':
- # Reopen the file in UTF-8 mode. Python allows a file to be opened
- # more than once at a time. Exceptions for the recursively called
- # function will be handled inside it.
- # Output a warning.
- print("ERROR:" + path + ":Non-ASCII encoded file!")
- inc_list = file_get_include_list(path,'utf-8')
- else:
- # Already tried to decode in UTF-8 mode. Don't try again.
- print("ERROR:" + path + ":Failed to decode UTF-8!")
- fatal_error = True # Can't return while file is still open.
- utils.print_exception_info()
- except:
- print("ERROR:" + path + ":error while parsing!")
- utils.print_exception_info()
-
- f.close()
-
- if fatal_error:
- return None
-
- return inc_list
-
def inc_order_is_correct(inc_list, path, commit_hash=""):
- '''Returns true if the provided list is in order. If not, output error
- messages to stdout.'''
+ """Returns true if the provided list is in order. If not, output error
+ messages to stdout."""
# If there are less than 2 includes there's no need to check.
if len(inc_list) < 2:
return True
if commit_hash != "":
- commit_hash = commit_hash + ":" # For formatting
+ commit_hash = commit_hash + ":"
- sys_after_user = False
- sys_order_wrong = False
- user_order_wrong = False
+ # Get list of system includes from libc include directory.
+ libc_incs = [f for f in os.listdir("include/lib/libc") if f.endswith(".h")]
- # First, check if all system includes are before the user includes.
- previous_delimiter = '<' # Begin with system includes.
+ # First, check if all includes are in the appropriate group.
+ inc_group = "System"
+ incs = collections.defaultdict(list)
+ error_msgs = []
for inc in inc_list:
- delimiter = inc[0]
- if previous_delimiter == '<' and delimiter == '"':
- previous_delimiter = '"' # Started user includes.
- elif previous_delimiter == '"' and delimiter == '<':
- sys_after_user = True
+ if inc[1:-1] in libc_incs:
+ if inc_group != "System":
+ error_msgs.append(inc[1:-1] + " should be in system group, at the top")
+ elif (
+ "plat/" in inc
+ or "platform" in inc
+ or (inc.startswith('"') and "plat" in path)
+ ):
+ inc_group = "Platform"
+ elif inc_group in ("Project", "System"):
+ inc_group = "Project"
+ else:
+ error_msgs.append(
+ inc[1:-1] + " should be in project group, after system group"
+ )
+ incs[inc_group].append(inc[1:-1])
- # Then, check alphabetic order (system and user separately).
- usr_incs = []
- sys_incs = []
-
- for inc in inc_list:
- if inc.startswith('<'):
- sys_incs.append(inc)
- elif inc.startswith('"'):
- usr_incs.append(inc)
-
- if sorted(sys_incs) != sys_incs:
- sys_order_wrong = True
- if sorted(usr_incs) != usr_incs:
- user_order_wrong = True
+ # Then, check alphabetic order (system, project and user separately).
+ if not error_msgs:
+ for name, inc_list in incs.items():
+ if sorted(inc_list) != inc_list:
+ error_msgs.append("{} includes not in order.".format(name))
# Output error messages.
- if sys_after_user:
- print("ERROR:" + commit_hash + path +
- ":System include after user include.")
- if sys_order_wrong:
- print("ERROR:" + commit_hash + path +
- ":System includes not in order.")
- if user_order_wrong:
- print("ERROR:" + commit_hash + path +
- ":User includes not in order.")
-
- return not ( sys_after_user or sys_order_wrong or user_order_wrong )
+ if error_msgs:
+ print(yaml.dump({commit_hash + path: error_msgs}))
+ return False
+ else:
+ return True
def file_is_correct(path):
- '''Checks whether the order of includes in the file specified in the path
- is correct or not.'''
-
- inc_list = file_get_include_list(path)
-
- if inc_list == None: # Failed to decode - Flag as incorrect.
- return False
-
- return inc_order_is_correct(inc_list, path)
+ """Checks whether the order of includes in the file specified in the path
+ is correct or not."""
+ inc_list = file_include_list(path)
+ return inc_list is not None and inc_order_is_correct(inc_list, path)
def directory_tree_is_correct():
- '''Checks all tracked files in the current git repository, except the ones
+ """Checks all tracked files in the current git repository, except the ones
explicitly ignored by this script.
- Returns True if all files are correct.'''
-
- # Get list of files tracked by git
- (rc, stdout, stderr) = utils.shell_command([ 'git', 'ls-files' ])
+ Returns True if all files are correct."""
+ (rc, stdout, stderr) = utils.shell_command(["git", "ls-files"])
if rc != 0:
return False
-
all_files_correct = True
-
- files = stdout.splitlines()
-
- for f in files:
- if not utils.file_is_ignored(f, VALID_FILE_EXTENSIONS, IGNORED_FILES, IGNORED_FOLDERS):
- if not file_is_correct(f):
- # Make the script end with an error code, but continue
- # checking files even if one of them is incorrect.
- all_files_correct = False
-
+ for f in stdout.splitlines():
+ if not utils.file_is_ignored(
+ f, VALID_FILE_EXTENSIONS, IGNORED_FILES, IGNORED_FOLDERS
+ ):
+ all_files_correct &= file_is_correct(f)
return all_files_correct
+def group_lines(patchlines, starting_with):
+ """Generator of (name, lines) almost the same as itertools.groupby
+
+ This function's control flow is non-trivial. In particular, the clearing
+ of the lines variable, marked with [1], is intentional and must come
+ after the yield. That's because we must yield the (name, lines) tuple
+ after we have found the name of the next section but before we assign the
+ name and start collecting lines. Further, [2] is required to yeild the
+ last block as there will not be a block start delimeter at the end of
+ the stream.
+ """
+ lines = []
+ name = None
+ for line in patchlines:
+ if line.startswith(starting_with):
+ if name:
+ yield name, lines
+ name = line[len(starting_with) :]
+ lines = [] # [1]
+ else:
+ lines.append(line)
+ yield name, lines # [2]
+
+
+def group_files(commitlines):
+ """Generator of (commit hash, lines) almost the same as itertools.groupby"""
+ return group_lines(commitlines, "+++ b/")
+
+
+def group_commits(commitlines):
+ """Generator of (file name, lines) almost the same as itertools.groupby"""
+ return group_lines(commitlines, "commit ")
+
+
def patch_is_correct(base_commit, end_commit):
- '''Get the output of a git diff and analyse each modified file.'''
+ """Get the output of a git diff and analyse each modified file."""
# Get patches of the affected commits with one line of context.
- (rc, stdout, stderr) = utils.shell_command([ 'git', 'log', '--unified=1',
- '--pretty="commit %h"',
- base_commit + '..' + end_commit ])
+ gitlog = subprocess.run(
+ [
+ "git",
+ "log",
+ "--unified=1",
+ "--pretty=commit %h",
+ base_commit + ".." + end_commit,
+ ],
+ stdout=subprocess.PIPE,
+ )
- if rc != 0:
+ if gitlog.returncode != 0:
return False
- # Parse stdout to get all renamed, modified and added file paths.
- # Then, check order of new includes. The log output begins with each commit
- # comment and then a list of files and differences.
- lines = stdout.splitlines()
-
+ gitlines = gitlog.stdout.decode("utf-8").splitlines()
all_files_correct = True
-
- # All files without a valid extension are ignored. /dev/null is also used by
- # git patch to tell that a file has been deleted, and it doesn't have a
- # valid extension, so it will be used as a reset value.
- path = "/dev/null"
- commit_hash = "0"
- # There are only 2 states: commit msg or file. Start inside commit message
- # because the include list is not checked when changing from this state.
- inside_commit_message = True
- inc_list = []
-
- # Allow spaces in between, but not comments.
- # Check for lines with "+" or " " at the beginning (added or not modified)
- pattern = re.compile(r"^[+ ]\s*#\s*include\s\s*[\"<]")
-
- total_line_num = len(lines)
- # By iterating this way the loop can detect if it's the last iteration and
- # check the last file (the log doesn't have any indicator of the end)
- for i, line in enumerate(lines): # Save line number in i
-
- new_commit = False
- new_file = False
- log_last_line = i == total_line_num-1
-
- # 1. Check which kind of line this is. If this line means that the file
- # being analysed is finished, don't update the path or hash until after
- # checking the order of includes, they are used in error messages. Check
- # for any includes in case this is the last line of the log.
-
- # Line format: <"commit 0000000"> (quotes present in stdout)
- if line.startswith('"commit '): # New commit
- new_commit = True
- # Line format: <+++ b/path>
- elif line.startswith("+++ b/"): # New file.
- new_file = True
- # Any other line
- else: # Check for includes inside files, not in the commit message.
- if not inside_commit_message:
- if pattern.match(line):
- line_remove_comments(line)
- inc = line_get_include_path(line)
- if inc != None:
- inc_list.append(inc)
-
- # 2. Check order of includes if the file that was being analysed has
- # finished. Print hash and path of the analised file in the error
- # messages.
-
- if new_commit or new_file or log_last_line:
- if not inside_commit_message: # If a file is being analysed
- if not utils.file_is_ignored(path, VALID_FILE_EXTENSIONS,
- IGNORED_FILES, IGNORED_FOLDERS):
- if not inc_order_is_correct(inc_list, path, commit_hash):
- all_files_correct = False
- inc_list = [] # Reset the include list for the next file (if any)
-
- # 3. Update path or hash for the new file or commit. Update state.
-
- if new_commit: # New commit, save hash
- inside_commit_message = True # Enter commit message state
- commit_hash = line[ 8 : -1 ] # Discard last "
- elif new_file: # New file, save path.
- inside_commit_message = False # Save path, exit commit message state
- # A deleted file will appear as /dev/null so it will be ignored.
- path = line[ 6 : ]
-
+ for commit, comlines in group_commits(gitlines):
+ for path, lines in group_files(comlines):
+ all_files_correct &= inc_order_is_correct(
+ include_paths(lines, diff_mode=True), path, commit
+ )
return all_files_correct
-
def parse_cmd_line(argv, prog_name):
parser = argparse.ArgumentParser(
prog=prog_name,
@@ -309,23 +202,34 @@
directives are ordered alphabetically (as mandated by the Trusted
Firmware coding style). System header includes must come before user
header includes.
-""")
+""",
+ )
- parser.add_argument("--tree", "-t",
- help="Path to the source tree to check (default: %(default)s)",
- default=os.curdir)
- parser.add_argument("--patch", "-p",
- help="""
+ parser.add_argument(
+ "--tree",
+ "-t",
+ help="Path to the source tree to check (default: %(default)s)",
+ default=os.curdir,
+ )
+ parser.add_argument(
+ "--patch",
+ "-p",
+ help="""
Patch mode.
Instead of checking all files in the source tree, the script will consider
only files that are modified by the latest patch(es).""",
- action="store_true")
- parser.add_argument("--from-ref",
- help="Base commit in patch mode (default: %(default)s)",
- default="master")
- parser.add_argument("--to-ref",
- help="Final commit in patch mode (default: %(default)s)",
- default="HEAD")
+ action="store_true",
+ )
+ parser.add_argument(
+ "--from-ref",
+ help="Base commit in patch mode (default: %(default)s)",
+ default="master",
+ )
+ parser.add_argument(
+ "--to-ref",
+ help="Final commit in patch mode (default: %(default)s)",
+ default="HEAD",
+ )
args = parser.parse_args(argv)
return args
@@ -336,8 +240,13 @@
os.chdir(args.tree)
if args.patch:
- print("Checking files modified between patches " + args.from_ref
- + " and " + args.to_ref + "...")
+ print(
+ "Checking files modified between patches "
+ + args.from_ref
+ + " and "
+ + args.to_ref
+ + "..."
+ )
if not patch_is_correct(args.from_ref, args.to_ref):
sys.exit(1)
else:
diff --git a/script/static-checks/static-checks-banned-apis.sh b/script/static-checks/static-checks-banned-apis.sh
new file mode 100755
index 0000000..c4ed874
--- /dev/null
+++ b/script/static-checks/static-checks-banned-apis.sh
@@ -0,0 +1,43 @@
+#! /bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# static-checks-banned-apis.sh <path-to-root-folder> [patch]
+
+LOG_FILE=$(mktemp -t banned-api-check.XXXX)
+
+if [[ "$2" == "patch" ]]; then
+ echo "# Check for banned APIs in the patch"
+ TEST_CASE="Banned API check on patch(es)"
+ "$CI_ROOT/script/static-checks/check-banned-api.py" --tree "$1" \
+ --patch --from-ref origin/master \
+ &> "$LOG_FILE"
+else
+ echo "# Check for banned APIs in entire source tree"
+ TEST_CASE="Banned API check of the entire source tree"
+ "$CI_ROOT/script/static-checks/check-banned-api.py" --tree "$1" \
+ &> "$LOG_FILE"
+fi
+
+EXIT_VALUE=$?
+
+echo >> "$LOG_TEST_FILENAME"
+echo "****** $TEST_CASE ******" >> "$LOG_TEST_FILENAME"
+echo >> "$LOG_TEST_FILENAME"
+if [[ "$EXIT_VALUE" == 0 ]]; then
+ echo "Result : SUCCESS" >> "$LOG_TEST_FILENAME"
+else
+ echo "Result : FAILURE" >> "$LOG_TEST_FILENAME"
+ echo >> "$LOG_TEST_FILENAME"
+ cat "$LOG_FILE" >> "$LOG_TEST_FILENAME"
+fi
+echo >> "$LOG_TEST_FILENAME"
+
+rm -f "$LOG_FILE"
+
+exit "$EXIT_VALUE"
+
+
diff --git a/script/static-checks/static-checks-coding-style-line-endings.sh b/script/static-checks/static-checks-coding-style-line-endings.sh
index 87e149c..ae7a6db 100755
--- a/script/static-checks/static-checks-coding-style-line-endings.sh
+++ b/script/static-checks/static-checks-coding-style-line-endings.sh
@@ -11,13 +11,16 @@
LOG_FILE=`mktemp -t common.XXXX`
-# For all the source and doc files (*.h,*.c,*.S,*.mk,*.md)
+# For all the source and doc files
# We only return the files that contain CRLF
find "." -\( \
-name '*.S' -or \
-name '*.c' -or \
-name '*.h' -or \
- -name '*.md' -or \
+ -name '*.i' -or \
+ -name '*.dts' -or \
+ -name '*.dtsi' -or \
+ -name '*.rst' -or \
-name 'Makefile' -or \
-name '*.mk' \
-\) -exec grep --files-with-matches $'\r$' {} \; &> "$LOG_FILE"
diff --git a/script/static-checks/static-checks.sh b/script/static-checks/static-checks.sh
index c9b980c..6bae729 100755
--- a/script/static-checks/static-checks.sh
+++ b/script/static-checks/static-checks.sh
@@ -87,6 +87,24 @@
fi
echo
+# Check for any Banned API usage
+
+echo 'Checking Banned API usage...'
+echo
+if [ "$IS_CONTINUOUS_INTEGRATION" == 1 ]; then
+ "$CI_ROOT"/script/static-checks/static-checks-banned-apis.sh . patch
+else
+ "$CI_ROOT"/script/static-checks/static-checks-banned-apis.sh
+fi
+if [ "$?" != 0 ]; then
+ echo "Banned API check: FAILURE"
+ ((ERROR_COUNT++))
+else
+ echo "Banned API check: PASS"
+fi
+echo
+
+
# Check error count
if [ "$ERROR_COUNT" != 0 ] || [ "$WARNING_COUNT" != 0 ]; then
diff --git a/script/static-checks/utils.py b/script/static-checks/utils.py
index c6a7fdd..548b64e 100644
--- a/script/static-checks/utils.py
+++ b/script/static-checks/utils.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python3
#
-# Copyright (c) 2019, Arm Limited. All rights reserved.
+# Copyright (c) 2019-2020, Arm Limited. All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
@@ -41,24 +41,24 @@
print(textwrap.indent(str(sys.exc_info()[1])," "))
-def decode_string(string):
- '''Tries to decode a binary string into ASCII. It gives an error if it
- finds non-ASCII characters, but it will return the string converted
- anyway, ignoring these characters.'''
+def decode_string(string, encoding='utf-8'):
+ '''Tries to decode a binary string. It gives an error if it finds
+ invalid characters, but it will return the string converted anyway,
+ ignoring these characters.'''
try:
- string = string.decode("ascii")
+ string = string.decode(encoding)
except UnicodeDecodeError:
- # Capture exceptions caused by non-ASCII characters.
- print("ERROR:Non-ASCII characters detected.")
+ # Capture exceptions caused by invalid characters.
+ print("ERROR:Non-{} characters detected.".format(encoding.upper()))
print_exception_info()
- string = string.decode("ascii", "ignore")
+ string = string.decode(encoding, "ignore")
return string
def shell_command(cmd_line):
'''Executes a shell command. Returns (returncode, stdout, stderr), where
- stdout and stderr are ASCII-encoded strings.'''
+ stdout and stderr are strings.'''
try:
p = subprocess.Popen(cmd_line, stdout=subprocess.PIPE,
diff --git a/script/translate_refspec.py b/script/translate_refspec.py
index 9e6b370..567f706 100755
--- a/script/translate_refspec.py
+++ b/script/translate_refspec.py
@@ -27,6 +27,7 @@
"trusted-firmware": gerrit.GerritProject("pdcs-platforms/ap/tf-topics", gerrit_arm),
"trusted-firmware-tf": gerrit.GerritProject("trusted-firmware/tf-a-tests", gerrit_arm),
"trusted-firmware-ci": gerrit.GerritProject("pdswinf/ci/pdcs-platforms/platform-ci", gerrit_arm),
+ "cc_plugin": gerrit.GerritProject("tests/lava/test-definitions.git", gerrit_arm),
"scp": gerrit.GerritProject("scp/firmware", gerrit_arm),
},
diff --git a/script/trusted-firmware.nomination.py b/script/trusted-firmware.nomination.py
index f9df9e7..ac81475 100644
--- a/script/trusted-firmware.nomination.py
+++ b/script/trusted-firmware.nomination.py
@@ -26,6 +26,6 @@
# Run SDEI boot test for SDEI, EHF, or RAS changes or mention
("pathre:sdei", "pathre:ehf", "pathre:ras", "has:SDEI_SUPPORT",
"has:EL3_EXCEPTION_HANDLING"):
- ["tftf-l2-tests/fvp-aarch64-sdei,fvp-default:fvp-tftf-fip.tftf-aemv8a-debug"],
+ ["tftf-l2-fvp/fvp-aarch64-sdei,fvp-default:fvp-tftf-fip.tftf-aemv8a-debug"],
}