Initial commit for TF-A CI scripts

Signed-off-by: Fathi Boudra <fathi.boudra@linaro.org>
diff --git a/script/tf-coverity/common-def.sh b/script/tf-coverity/common-def.sh
new file mode 100644
index 0000000..71640f2
--- /dev/null
+++ b/script/tf-coverity/common-def.sh
@@ -0,0 +1,15 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+JENKINS_URL=http://ssg-sw.cambridge.arm.com/jenkins
+
+# mbed TLS source tag to checkout when building Trusted Firmware with Trusted
+# Board Boot support.
+MBED_TLS_SOURCES_TAG="mbedtls-2.16.0"
+
+ARMCLANG_PATH=
+CRYPTOCELL_LIB_PATH=/arm/projectscratch/ssg/trusted-fw/dummy-crypto-lib
diff --git a/script/tf-coverity/coverity_tf_conf.py b/script/tf-coverity/coverity_tf_conf.py
new file mode 100644
index 0000000..00fb945
--- /dev/null
+++ b/script/tf-coverity/coverity_tf_conf.py
@@ -0,0 +1,44 @@
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+#
+# This file lists the source files that are expected to be excluded from
+# Coverity's analysis, and the reason why.
+#
+
+# The expected format is an array of tuples (filename_pattern, description).
+# - filename_pattern is a Python regular expression (as in the 're' module)
+#   describing the file(s) to exclude.
+# - description aims at providing the reason why the files are expected
+#   to be excluded.
+exclude_paths = [
+    ("drivers/arm/cci400/cci400.c", "deprecated driver"),
+    ("drivers/arm/gic/v3/arm_gicv3_common.c", "platform to exercise GIC-500/600 powerdown not available yet"),
+    ("drivers/arm/tzc400/tzc400.c", "deprecated driver"),
+    ("drivers/arm/tzc/tzc_common_private.c",
+     "file included, actually indirectly analyzed"),
+    ("drivers/arm/tzc/tzc_dmc500.c", "not used by any upstream platform"),
+
+    ("drivers/io/io_dummy.c", "not used by any upstream platform"),
+    ("drivers/partition/gpt.c", "not used by any upstream platform"),
+    ("drivers/partition/partition.c", "not used by any upstream platform"),
+
+    ("lib/aarch64/xlat_tables.c", "deprecated library code"),
+
+    ("plat/arm/common/arm_tzc_dmc500.c", "not used by any upstream platform"),
+
+    ("plat/mediatek/mt8173/plat_mt_gic.c", "deprecated code"),
+
+    ("lib/aarch32/arm32_aeabi_divmod.c", "not used by any upstream platform"),
+
+    # Waiting for the following patch to be available:
+    # http://ssg-sw.cambridge.arm.com/gerrit/#/c/49862/
+    ("plat/rockchip/rk3399/drivers/m0/.*",
+     "work around the lack of support for the M0 compiler in the scripts"),
+
+    ("tools/.*", "Host tools"),
+    ("plat/qemu/sp_min/sp_min_setup.c", "not used in any upstream platform - see GENFW-2164")
+]
diff --git a/script/tf-coverity/run_coverity_on_tf.py b/script/tf-coverity/run_coverity_on_tf.py
new file mode 100755
index 0000000..a29b5f3
--- /dev/null
+++ b/script/tf-coverity/run_coverity_on_tf.py
@@ -0,0 +1,262 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+#
+# Run the Coverity tool on the Trusted Firmware and produce a tarball ready to
+# be submitted to Coverity Scan Online.
+#
+
+import sys
+import argparse
+import urllib.request
+import tarfile
+import os
+import subprocess
+import re
+import utils
+import coverity_tf_conf
+
+
+def tarball_name(filename):
+    "Isolate the tarball name without the filename's extension."
+    # Handle a selection of "composite" extensions
+    for ext in [".tar.gz", ".tar.bz2"]:
+        if filename.endswith(ext):
+            return filename[:-len(ext)]
+    # For all other extensions, let the vanilla splitext() function handle it
+    return os.path.splitext(filename)[0]
+
+assert tarball_name("foo.gz") == "foo"
+assert tarball_name("bar.tar.gz") == "bar"
+assert tarball_name("baz.tar.bz2") == "baz"
+
+
+def get_coverity_tool():
+    coverity_tarball = "cov-analysis-linux64-2017.07.tar.gz"
+    url = "http://files.oss.arm.com/downloads/tf-a/" + coverity_tarball
+    print("Downloading Coverity Build tool from %s..." % url)
+    file_handle = urllib.request.urlopen(url)
+    output = open(coverity_tarball, "wb")
+    output.write(file_handle.read())
+    output.close()
+    print("Download complete.")
+
+    print("\nUnpacking tarball %s..." % coverity_tarball)
+    tarfile.open(coverity_tarball).extractall()
+    print("Tarball unpacked.")
+
+    print("\nNow please load the Coverity tool in your PATH...")
+    print("E.g.:")
+    cov_dir_name = tarball_name(coverity_tarball)
+    cov_dir_path = os.path.abspath(os.path.join(cov_dir_name, "bin"))
+    print("  export PATH=%s$PATH" % (cov_dir_path + os.pathsep))
+
+
+def print_coverage(coverity_dir, tf_dir, exclude_paths=[], log_filename=None):
+    analyzed = []
+    not_analyzed = []
+    excluded = []
+
+    # Print the coverage report to a file (or stdout if no file is specified)
+    if log_filename is not None:
+        log_file = open(log_filename, "w")
+    else:
+        log_file = sys.stdout
+
+    # Get the list of files analyzed by Coverity.
+    #
+    # To do that, we examine the build log file Coverity generated and look for
+    # compilation lines. These are the lines starting with "COMPILING:". We consider
+    # only those lines that actually compile C files, i.e. lines of the form:
+    #   gcc -c file.c -o file.o
+    # This filters out other compilation lines like generation of dependency files
+    # (*.d) and such.
+    # We then extract the C filename.
+    coverity_build_log = os.path.join(coverity_dir, "build-log.txt")
+    with open(coverity_build_log, encoding="utf-8") as build_log:
+        for line in build_log:
+            results = re.search("COMPILING:.*-c *(.*\.c).*-o.*\.o", line)
+            if results is not None:
+                filename = results.group(1)
+                if filename not in analyzed:
+                    analyzed.append(filename)
+
+    # Now get the list of C files in the Trusted Firmware source tree.
+    # Header files and assembly files are ignored, as well as anything that
+    # matches the patterns list in the exclude_paths[] list.
+    # Build a list of files that are in this source tree but were not analyzed
+    # by comparing the 2 sets of files.
+    all_files_count = 0
+    old_cwd = os.path.abspath(os.curdir)
+    os.chdir(tf_dir)
+    git_process = utils.exec_prog("git", ["ls-files", "*.c"],
+                                  out=subprocess.PIPE, out_text_mode=True)
+    for filename in git_process.stdout:
+        # Remove final \n in filename
+        filename = filename.strip()
+
+        def is_excluded(filename, excludes):
+            for pattern in excludes:
+                if re.match(pattern[0], filename):
+                    excluded.append((filename, pattern[1]))
+                    return True
+            return False
+
+        if is_excluded(filename, exclude_paths):
+            continue
+
+        # Keep track of the number of C files in the source tree. Used to
+        # compute the coverage percentage at the end.
+        all_files_count += 1
+        if filename not in analyzed:
+            not_analyzed.append(filename)
+    os.chdir(old_cwd)
+
+    # Compute the coverage percentage
+    # Note: The 1.0 factor here is used to make a float division instead of an
+    # integer one.
+    percentage = (1 - ((1.0 * len(not_analyzed) ) / all_files_count)) * 100
+
+    #
+    # Print a report
+    #
+    log_file.write("Files coverage: %d%%\n\n" % percentage)
+    log_file.write("Analyzed %d files\n" % len(analyzed))
+
+    if len(excluded) > 0:
+        log_file.write("\n%d files were ignored on purpose:\n" % len(excluded))
+        for exc in excluded:
+            log_file.write(" - {0:50}   (Reason: {1})\n".format(exc[0], exc[1]))
+
+    if len(not_analyzed) > 0:
+        log_file.write("\n%d files were not analyzed:\n" % len(not_analyzed))
+        for f in not_analyzed:
+            log_file.write(" - %s\n" % f)
+        log_file.write("""
+===============================================================================
+Please investigate why the above files are not run through Coverity.
+
+There are 2 possible reasons:
+
+1) The build coverage is insufficient. Please review the tf-cov-make script to
+   add the missing build config(s) that will involve the file in the build.
+
+2) The file is expected to be ignored, for example because it is deprecated
+   code. Please update the TF Coverity configuration to list the file and
+   indicate the reason why it is safe to ignore it.
+===============================================================================
+""")
+    log_file.close()
+
+
+def parse_cmd_line(argv, prog_name):
+    parser = argparse.ArgumentParser(
+        prog=prog_name,
+        description="Run Coverity on Trusted Firmware",
+        epilog="""
+        Please ensure the AArch64 & AArch32 cross-toolchains are loaded in your
+        PATH. Ditto for the Coverity tools. If you don't have the latter then
+        you can use the --get-coverity-tool to download them for you.
+        """)
+    parser.add_argument("--tf", default=None,
+                        metavar="<Trusted Firmware source dir>",
+                        help="Specify the location of ARM Trusted Firmware sources to analyze")
+    parser.add_argument("--get-coverity-tool", default=False,
+                        help="Download the Coverity build tool and exit",
+                        action="store_true")
+    parser.add_argument("--mode", choices=["offline", "online"], default="online",
+                        help="Choose between online or offline mode for the analysis")
+    parser.add_argument("--output", "-o",
+                        help="Name of the output file containing the results of the analysis")
+    parser.add_argument("--build-cmd", "-b",
+                        help="Command used to build TF through Coverity")
+    parser.add_argument("--analysis-profile", "-p",
+                        action="append", nargs=1,
+                        help="Analysis profile for a local analysis")
+    args = parser.parse_args(argv)
+
+    # Set a default name for the output file if none is provided.
+    # If running in offline mode, this will be a text file;
+    # If running in online mode, this will be a tarball name.
+    if not args.output:
+        if args.mode == "offline":
+            args.output = "arm-tf-coverity-report.txt"
+        else:
+            args.output = "arm-tf-coverity-results.tgz"
+
+    return args
+
+
+if __name__ == "__main__":
+    prog_name = sys.argv[0]
+    args = parse_cmd_line(sys.argv[1:], prog_name)
+
+    # If the user asked to download the Coverity build tool then just do that
+    # and exit.
+    if args.get_coverity_tool:
+        # If running locally, use the commercial version of Coverity from the
+        # EUHPC cluster.
+        if args.mode == "offline":
+            print("To load the Coverity tools, use the following command:")
+            print("export PATH=/arm/tools/coverity/static-analysis/8.7.1/bin/:$PATH")
+        else:
+            get_coverity_tool()
+        sys.exit(0)
+
+    if args.tf is None:
+        print("ERROR: Please specify the Trusted Firmware sources using the --tf option.",
+              file=sys.stderr)
+        sys.exit(1)
+
+    # Get some important paths in the platform-ci scripts
+    tf_scripts_dir = os.path.abspath(os.path.dirname(prog_name))
+    tf_coverity_dir = os.path.join(os.path.normpath(
+        os.path.join(tf_scripts_dir, os.pardir, os.pardir)),"coverity")
+
+    if not args.build_cmd:
+        tf_build_script = os.path.join(tf_scripts_dir, "tf-cov-make")
+        args.build_cmd = tf_build_script + " " + args.tf
+
+    run_coverity_script = os.path.join(tf_coverity_dir, "run_coverity.sh")
+
+    ret = subprocess.call([run_coverity_script, "check_tools", args.mode])
+    if ret != 0:
+        sys.exit(1)
+
+    ret = subprocess.call([run_coverity_script, "configure"])
+    if ret != 0:
+        sys.exit(1)
+
+    ret = subprocess.call([run_coverity_script, "build", args.build_cmd])
+    if ret != 0:
+        sys.exit(1)
+
+    if args.mode == "online":
+        ret = subprocess.call([run_coverity_script, "package", args.output])
+    else:
+        for profile in args.analysis_profile:
+            ret = subprocess.call([run_coverity_script, "analyze",
+                                   args.output,
+                                   args.tf,
+                                   profile[0]])
+            if ret != 0:
+                    break
+    if ret != 0:
+        print("An error occured (%d)." % ret, file=sys.stderr)
+        sys.exit(ret)
+
+    print("-----------------------------------------------------------------")
+    print("Results can be found in file '%s'" % args.output)
+    if args.mode == "online":
+        print("This tarball can be uploaded at Coverity Scan Online:" )
+        print("https://scan.coverity.com/projects/arm-software-arm-trusted-firmware/builds/new?tab=upload")
+    print("-----------------------------------------------------------------")
+
+    print_coverage("cov-int", args.tf, coverity_tf_conf.exclude_paths, "tf_coverage.log")
+    with open("tf_coverage.log") as log_file:
+        for line in log_file:
+            print(line, end="")
diff --git a/script/tf-coverity/tf-cov-make b/script/tf-coverity/tf-cov-make
new file mode 100755
index 0000000..36cc290
--- /dev/null
+++ b/script/tf-coverity/tf-cov-make
@@ -0,0 +1,204 @@
+#! /bin/sh
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+#
+# This script builds the TF in different configs.
+# Rather than telling cov-build to build TF using a simple 'make all' command,
+# the goal here is to combine several build flags to analyse more of our source
+# code in a single 'build'. The Coverity Scan service does not have the notion
+# of separate types of build - there is just one linear sequence of builds in
+# the project history.
+#
+
+# Bail out as soon as an error is encountered.
+set -e
+
+TF_SOURCES=$1
+if [ ! -d "$TF_SOURCES" ]; then
+    echo "ERROR: '$TF_SOURCES' does not exist or is not a directory"
+    echo "Usage: $(basename "$0") <trusted-firmware-directory>"
+    exit 1
+fi
+
+export CROSS_COMPILE=aarch64-linux-gnu-
+
+# Get mbed TLS library code to build Trusted Firmware with Trusted Board Boot
+# support. The version of mbed TLS to use here must be the same as when
+# building TF in the usual context.
+if [ ! -d mbedtls ]; then
+    git clone https://github.com/ARMmbed/mbedtls.git
+fi
+cd mbedtls
+containing_dir="$(readlink -f "$(dirname "$0")/")"
+. $containing_dir/common-def.sh
+git checkout "$MBED_TLS_SOURCES_TAG"
+cd -
+TBB_OPTIONS="TRUSTED_BOARD_BOOT=1 GENERATE_COT=1 MBEDTLS_DIR=$(pwd)/mbedtls"
+ARM_TBB_OPTIONS="$TBB_OPTIONS ARM_ROTPK_LOCATION=devel_rsa"
+
+cd "$TF_SOURCES"
+
+# Clean TF source dir to make sure we don't analyse temporary files.
+make distclean
+
+#
+# Build TF in different configurations to get as much coverage as possible
+#
+
+# We need to clean the platform build between each configuration because Trusted
+# Firmware's build system doesn't track build options dependencies and won't
+# rebuild the files affected by build options changes.
+clean_build()
+{
+    local flags="$*"
+    echo "Building TF with the following build flags:"
+    echo "  $flags"
+    make $flags clean
+    make $flags all
+    echo "Build config complete."
+    echo
+}
+
+#
+# FVP platform
+# We'll use the following flags for all FVP builds.
+#
+fvp_common_flags="-j PLAT=fvp DEBUG=1"
+
+# Try all possible SPDs.
+clean_build $fvp_common_flags ${ARM_TBB_OPTIONS} ARM_TSP_RAM_LOCATION=dram SPD=tspd
+clean_build $fvp_common_flags ${ARM_TBB_OPTIONS} ARM_TSP_RAM_LOCATION=dram SPD=tspd TSP_INIT_ASYNC=1 \
+    TSP_NS_INTR_ASYNC_PREEMPT=1
+clean_build $fvp_common_flags ${ARM_TBB_OPTIONS} SPD=opteed
+clean_build $fvp_common_flags ${ARM_TBB_OPTIONS} SPD=tlkd
+
+clean_build -j PLAT=fvp DEBUG=1 SPD=trusty
+clean_build -j PLAT=fvp DEBUG=1 SPD=trusty TRUSTY_SPD_WITH_GENERIC_SERVICES=1
+
+# SDEI
+clean_build PLAT=fvp DEBUG=1 SDEI_SUPPORT=1 EL3_EXCEPTION_HANDLING=1
+
+# Without coherent memory
+clean_build $fvp_common_flags ${ARM_TBB_OPTIONS} ARM_TSP_RAM_LOCATION=dram SPD=tspd USE_COHERENT_MEM=0
+
+# Using PSCI extended State ID format rather than the original format
+clean_build $fvp_common_flags ${ARM_TBB_OPTIONS} ARM_TSP_RAM_LOCATION=dram SPD=tspd PSCI_EXTENDED_STATE_ID=1 \
+    ARM_RECOM_STATE_ID_ENC=1
+
+# Alternative boot flows (This changes some of the platform initialisation code)
+clean_build $fvp_common_flags EL3_PAYLOAD=0x80000000
+clean_build $fvp_common_flags PRELOADED_BL33_BASE=0x80000000
+
+# Using the SP804 timer instead of the Generic Timer
+clean_build $fvp_common_flags FVP_USE_SP804_TIMER=1
+
+# Using the CCN driver and multi cluster topology
+clean_build $fvp_common_flags FVP_CLUSTER_COUNT=4
+
+# PMF
+clean_build $fvp_common_flags ENABLE_PMF=1
+
+# stack protector
+clean_build $fvp_common_flags ENABLE_STACK_PROTECTOR=strong
+
+# AArch32 build
+clean_build $fvp_common_flags CROSS_COMPILE=arm-linux-gnueabihf- \
+    ARCH=aarch32 AARCH32_SP=sp_min \
+    RESET_TO_SP_MIN=1 PRELOADED_BL33_BASE=0x80000000
+clean_build $fvp_common_flags CROSS_COMPILE=arm-linux-gnueabihf- \
+    ARCH=aarch32 AARCH32_SP=sp_min
+
+# Xlat tables lib version 1 (AArch64 and AArch32)
+clean_build $fvp_common_flags ARM_XLAT_TABLES_LIB_V1=1 RECLAIM_INIT_CODE=0
+clean_build $fvp_common_flags CROSS_COMPILE=arm-linux-gnueabihf- \
+    ARCH=aarch32 AARCH32_SP=sp_min ARM_XLAT_TABLES_LIB_V1=1 RECLAIM_INIT_CODE=0
+
+# Using GIC600 driver
+clean_build $fvp_common_flags FVP_USE_GIC_DRIVER=FVP_GIC600
+
+# SPM support
+clean_build $fvp_common_flags ENABLE_SPM=1 EL3_EXCEPTION_HANDLING=1
+
+#BL2 at EL3 support
+clean_build $fvp_common_flags BL2_AT_EL3=1
+clean_build $fvp_common_flags CROSS_COMPILE=arm-linux-gnueabihf- \
+    ARCH=aarch32 AARCH32_SP=sp_min BL2_AT_EL3=1
+
+#
+# Juno platform
+# We'll use the following flags for all Juno builds.
+#
+juno_common_flags="-j PLAT=juno DEBUG=1"
+clean_build $juno_common_flags SPD=tspd ${ARM_TBB_OPTIONS}
+clean_build $juno_common_flags EL3_PAYLOAD=0x80000000
+clean_build $juno_common_flags ENABLE_STACK_PROTECTOR=strong
+clean_build $juno_common_flags CSS_USE_SCMI_SDS_DRIVER=0
+clean_build $juno_common_flags SPD=tspd ${ARM_TBB_OPTIONS} ARM_CRYPTOCELL_INTEG=1 CCSBROM_LIB_PATH=${CRYPTOCELL_LIB_PATH}
+
+#
+# System Guidance for Infrastructure platform SGI575
+#
+make -j DEBUG=1 PLAT=sgi575 all
+
+#
+# System Guidance for Infrastructure platform RD-N1Edge
+#
+make -j DEBUG=1 PLAT=rdn1edge all
+
+#
+# System Guidance for Infrastructure platform RD-E1Edge
+#
+make -j DEBUG=1 PLAT=rde1edge all
+
+# Partners' platforms.
+# Enable as many features as possible.
+# We don't need to clean between each build here because we only do one build
+# per platform so we don't hit the build flags dependency problem.
+external_plat_common_flags="-j DEBUG=1"
+
+make PLAT=mt8173 $external_plat_common_flags all
+
+make PLAT=rk3368 $external_plat_common_flags COREBOOT=1 all
+make PLAT=rk3399 $external_plat_common_flags COREBOOT=1 all
+make PLAT=rk3328 $external_plat_common_flags COREBOOT=1 all
+
+# Although we do several consecutive builds for the Tegra platform below, we
+# don't need to clean between each one because the Tegra makefiles specify
+# a different build directory per SoC.
+make PLAT=tegra TARGET_SOC=t210 $external_plat_common_flags all
+make PLAT=tegra TARGET_SOC=t132 $external_plat_common_flags all
+make PLAT=tegra TARGET_SOC=t186 $external_plat_common_flags all
+
+# For the Xilinx platform, artificially increase the extents of BL31 memory
+# (using the platform-specific build options ZYNQMP_ATF_MEM_{BASE,SIZE}).
+# If we keep the default values, BL31 doesn't fit when it is built with all
+# these build flags.
+make PLAT=zynqmp $external_plat_common_flags \
+    RESET_TO_BL31=1 SPD=tspd \
+    ZYNQMP_ATF_MEM_BASE=0xFFFC0000 ZYNQMP_ATF_MEM_SIZE=0x00040000 \
+    all
+
+clean_build PLAT=qemu $external_plat_common_flags ${TBB_OPTIONS}
+clean_build PLAT=qemu $external_plat_common_flags ENABLE_STACK_PROTECTOR=strong
+
+# For hikey enable PMF to include all files in the platform port
+make PLAT=hikey $external_plat_common_flags ENABLE_PMF=1 all
+make PLAT=hikey960 $external_plat_common_flags all
+
+clean_build PLAT=uniphier $external_plat_common_flags ${TBB_OPTIONS} SPD=tspd
+clean_build PLAT=uniphier $external_plat_common_flags FIP_GZIP=1
+
+make PLAT=poplar $external_plat_common_flags all
+
+make PLAT=rpi3 $external_plat_common_flags PRELOADED_BL33_BASE=0xDEADBEEF all
+
+# Cannot use $external_plat_common_flags for LS1043 platform, as then
+# the binaries do not fit in memory.
+clean_build PLAT=ls1043 SPD=opteed ENABLE_STACK_PROTECTOR=strong
+clean_build PLAT=ls1043 SPD=tspd
+
+cd ..
diff --git a/script/tf-coverity/utils.py b/script/tf-coverity/utils.py
new file mode 100644
index 0000000..f86667f
--- /dev/null
+++ b/script/tf-coverity/utils.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+import subprocess
+
+def exec_prog(prog, args=[], out=None, out_text_mode=False):
+    # Build the command line to execute
+    cmd = [ prog ] + args
+
+    # Spawn process.
+    # Note: The standard error output is captured into the same file handle as
+    # for stdout.
+    process = subprocess.Popen(cmd, stdout=out, stderr=subprocess.STDOUT,
+                               universal_newlines=out_text_mode, bufsize=0)
+    print("Spawned process with PID %u" % process.pid)
+    return process