Sync job files with internal CI
Sync job files with platform-ci commit:
539c151d0cd99a5e6ca6c0e6966f6d8579fe864e
Signed-off-by: Zelalem <zelalem.aweke@arm.com>
Change-Id: Ida470e00da76188ce3987d1fa93ec758b5e0f23a
diff --git a/job/tf-a-tests-master-sync/sync.sh b/job/tf-a-tests-master-sync/sync.sh
deleted file mode 100755
index 59ac052..0000000
--- a/job/tf-a-tests-master-sync/sync.sh
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/bin/bash
-#
-# Copyright (c) 2019, Arm Limited. All rights reserved.
-#
-# SPDX-License-Identifier: BSD-3-Clause
-#
-
-set -e
-
-source "$CI_ROOT/utils.sh"
-
-# Clone TF-A Tests repo from tf.org.
-if [ ! -d "tf-a-tests" ]; then
- git clone --origin tforg $tftf_src_repo_url
- cd tf-a-tests
- git remote add arm $tftf_arm_gerrit_repo
-else
- cd tf-a-tests
-fi
-
-# Get the latest updates from the master branch on tf.org.
-git remote update --prune
-git checkout master
-git merge --ff-only tforg/master
-
-# Push updates to Arm internal Gerrit.
-git push arm master
diff --git a/job/tf-ci-gateway/generate_report.sh b/job/tf-ci-gateway/generate_report.sh
index c0f9a7a..c48e38b 100755
--- a/job/tf-ci-gateway/generate_report.sh
+++ b/job/tf-ci-gateway/generate_report.sh
@@ -24,4 +24,6 @@
--meta-data inject.data \
--meta-data html:coverity.data \
|| true
+ source $CI_ROOT/script/gen_merge_report.sh "${WORKSPACE}/report.json" \
+ "${WORKSPACE}/report.html"
fi
diff --git a/job/tf-coverity/should_run_tf_coverity.sh b/job/tf-coverity/should_run_tf_coverity.sh
new file mode 100755
index 0000000..f7ad382
--- /dev/null
+++ b/job/tf-coverity/should_run_tf_coverity.sh
@@ -0,0 +1,26 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# This script checks if the current patch modifies scripts which run
+# Coverity Online Scan in tf-coverity jenkins job.
+
+set -e
+
+cd $CI_ROOT
+current_commit=$(git rev-parse --short HEAD)
+modified_files=$(git diff-tree --no-commit-id --name-only -r $current_commit)
+
+hit=$(echo $modified_files|grep "script/tf-coverity/"|wc -l)
+cd -
+
+if [ $hit -gt 0 ]; then
+ echo "Coverity scripts modified in this patch. tf-coverity will be triggered"
+ exit 0
+fi
+
+echo "No coverity scripts modified"
+exit 1
diff --git a/job/tf-gerrit-bot/gerrit_bot.py b/job/tf-gerrit-bot/gerrit_bot.py
new file mode 100644
index 0000000..1bad974
--- /dev/null
+++ b/job/tf-gerrit-bot/gerrit_bot.py
@@ -0,0 +1,232 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+#
+# Assigns reviewers according to maintainers file.
+
+import argparse
+import os
+from pygerrit2 import GerritRestAPI, HTTPBasicAuth
+import re
+
+DEFAULT_GERRIT_URL = 'https://review.trustedfirmware.org'
+DEFAULT_GERRIT_PROJECT_NAME = 'TF-A/trusted-firmware-a'
+DEFAULT_MAINTAINERS_FILE_NAME = 'maintainers.rst'
+
+# Commit message is returned in a file list, ignore it
+COMMIT_MSG_FILE = '/COMMIT_MSG'
+
+def connect_to_gerrit(gerrit_url, gerrit_user, gerrit_password):
+ '''
+ Connect to Gerrit server.
+ The password is not a plaintext password,
+ it can be obtained from Profile/Settings/HTTP Password page.
+ Returns GerritRestAPI class.
+ '''
+
+ auth = HTTPBasicAuth(gerrit_user, gerrit_password)
+ return GerritRestAPI(url=gerrit_url, auth=auth)
+
+
+def get_open_changes(rest_api, project_name):
+ '''
+ Get list of open reviews for the project.
+ '''
+
+ # Pass DETAILED_ACCOUNTS to get owner username
+ return rest_api.get("/changes/?q=status:open%20project:" + project_name + "&o=DETAILED_ACCOUNTS")
+
+
+def get_files(rest_api, change_id):
+ '''
+ Get list of changed files for the review.
+ Commit message is removed from the list.
+ '''
+
+ files_list = rest_api.get("/changes/" + change_id + "/revisions/current/files/")
+ del files_list[COMMIT_MSG_FILE]
+
+ return files_list
+
+
+def add_reviewer(rest_api, change_id, username, dry_run):
+ '''
+ Add reviewer to the review.
+ '''
+
+ endpoint = "/changes/" + change_id + "/reviewers"
+ kwargs = {"data": {"reviewer": username}}
+
+ # Exception is thrown if username is wrong, so just print it
+ try:
+ if not dry_run:
+ rest_api.post(endpoint, **kwargs)
+ except Exception as e:
+ print(" Add reviewer failed, username: " + str(username))
+ print(" " + str(e))
+ else:
+ print(" Reviewer added, username: " + str(username))
+
+
+def parse_maintainers_file(file_path):
+ '''
+ Parse maintainers file.
+ Returns a dictionary {file_path:set{user1, user2, ...}}
+ '''
+
+ f = open(file_path, encoding='utf8')
+ file_text = f.read()
+ f.close()
+
+ FILE_PREFIX = "\n:F: "
+
+ regex = r"^:G: `(?P<user>.*)`_$(?P<paths>(" + FILE_PREFIX + r".*$)+)"
+ matches = re.finditer(regex, file_text, re.MULTILINE)
+
+ # Create a dictionary {file_path:set{user1, user2, ...}} for faster search
+ result_dict = {}
+
+ for match in matches:
+ user_name = match.group("user")
+
+ paths = match.group("paths").split(FILE_PREFIX)
+ paths.remove("")
+
+ # Fill the dictionary
+ for path in paths:
+ if path not in result_dict:
+ result_dict[path] = set()
+
+ result_dict[path].add(user_name)
+
+ return result_dict
+
+
+def get_file_maintainers(file_path, maintainers_dictionary):
+ '''
+ Returns a set of usernames(mainteiners) for the file.
+ '''
+
+ maintainers = set()
+
+ file = file_path
+
+ # Get maintainers of the file
+ maintainers_set = maintainers_dictionary.get(file)
+ if maintainers_set:
+ maintainers.update(maintainers_set)
+
+ # Get maintainers of the directories
+ while (file > "/"):
+ # Get upper directory on each step.
+ file = os.path.dirname(file)
+ path_to_check = file + "/"
+
+ maintainers_set = maintainers_dictionary.get(path_to_check)
+ if maintainers_set:
+ maintainers.update(maintainers_set)
+
+ return maintainers
+
+
+def assign_reviewers(rest_api, maintainers_dictionary, change, dry_run):
+ '''
+ Assign maintainers to the review.
+ '''
+
+ # It looks like some accounts may not have username
+ owner_username = None
+ if ('username' in change['owner']):
+ owner_username = change['owner']['username']
+
+ print("\nChange: " + str(change['id']))
+ print(" Topic: " + str(change.get('topic')))
+ print(" Owner: " + str(owner_username))
+
+ change_maintainers = set()
+
+ # Get list of all files in the change
+ files = get_files(rest_api, change['id'])
+
+ for file in files:
+ # Get all maintainers of the file
+ file_maintainers = get_file_maintainers(file, maintainers_dictionary)
+
+ if (len(file_maintainers) > 0):
+ print(" File: " + file + " maintainers: " + str(file_maintainers))
+
+ change_maintainers.update(file_maintainers)
+
+ # Don't add owner even if he is a maintainer
+ change_maintainers.discard(owner_username)
+
+ for maintainer in change_maintainers:
+ add_reviewer(rest_api, change['id'], maintainer, dry_run)
+
+
+def parse_cmd_line():
+
+ parser = argparse.ArgumentParser(
+ description="Gerrit bot",
+ epilog="""
+ Assigns reviewers according to maintainers file.
+ """
+ )
+
+ required_group = parser.add_argument_group('required arguments')
+
+ parser.add_argument("--url", "-u",
+ help = """
+ Gerrit URL (default: %(default)s)
+ """,
+ default = DEFAULT_GERRIT_URL)
+
+ parser.add_argument("--project", "-p",
+ help = """
+ Project name (default: %(default)s).
+ """,
+ default = DEFAULT_GERRIT_PROJECT_NAME)
+
+ parser.add_argument("--maintainers", "-m",
+ help = """
+ Path to maintainers file (default: %(default)s).
+ """,
+ default = DEFAULT_MAINTAINERS_FILE_NAME)
+
+ parser.add_argument("--dry-run",
+ help = """
+ Check maintainers, but don't add them (default: %(default)s).
+ """,
+ action='store_true',
+ default = False)
+
+ required_group.add_argument("--user",
+ help = """
+ Gerrit user.
+ """,
+ required = True)
+
+ required_group.add_argument("--password",
+ help="""
+ Gerrit HTTP password.
+ This is NOT a plaintext password.
+ But the value from Profile/Settings/HTTP Password
+ """,
+ required = True)
+
+ return parser.parse_args()
+
+
+if __name__ == '__main__':
+
+ args = parse_cmd_line()
+
+ maintainers_dict = parse_maintainers_file(args.maintainers)
+ rest = connect_to_gerrit(args.url, args.user, args.password)
+ changes = get_open_changes(rest, args.project)
+
+ for change in changes:
+ assign_reviewers(rest, maintainers_dict, change, args.dry_run)
diff --git a/job/tf-gerrit-bot/requirements.txt b/job/tf-gerrit-bot/requirements.txt
new file mode 100644
index 0000000..f693ee9
--- /dev/null
+++ b/job/tf-gerrit-bot/requirements.txt
@@ -0,0 +1,9 @@
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# Gerrit bot requires:
+
+pygerrit2>=2.0.9
diff --git a/job/tf-gerrit-bot/run_gerrit_bot.sh b/job/tf-gerrit-bot/run_gerrit_bot.sh
new file mode 100755
index 0000000..796e29a
--- /dev/null
+++ b/job/tf-gerrit-bot/run_gerrit_bot.sh
@@ -0,0 +1,17 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# Install pygerrit2 if needed
+python3 -c "from pygerrit2 import GerritRestAPI, HTTPBasicAuth"
+if [ $? != 0 ]
+then
+ yes | pip3 install pygerrit2
+fi
+
+# Run bot
+cd $(dirname "$0")
+python3 gerrit_bot.py --user $1 --password $2 --maintainers $3
diff --git a/job/tf-github-autoreply/github_pr_bot.py b/job/tf-github-autoreply/github_pr_bot.py
new file mode 100755
index 0000000..b21da50
--- /dev/null
+++ b/job/tf-github-autoreply/github_pr_bot.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2019-2020, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+import argparse
+import datetime
+import sys
+import os.path
+import logging
+
+try:
+ from github import Github
+except ImportError:
+ print(
+ "Can not import from github. PyGitHub may be missing. Check requirements.txt."
+ )
+ sys.exit(1)
+
+SCRIPT_DIR = os.path.dirname(__file__)
+logger = logging.getLogger()
+
+
+def commented_already(comments, bots):
+ """Check if our bots have left a comment."""
+ return any(comment.user.login in bots for comment in comments)
+
+
+def readfile(path):
+ """Read a file into a python string"""
+ with open(os.path.join(SCRIPT_DIR, path), "r") as textfile:
+ return textfile.read()
+
+
+def reply_to_issues(repo, bots, dry_run):
+ """Reply to all new issues without a bot reply"""
+ body = readfile("issue_comment.md")
+ logging.info("Replying to new issues on {}/{}".format(repo.owner.login, repo.name))
+ for issue in repo.get_issues(since=datetime.datetime(2019, 10, 17, 12)):
+ if not commented_already(issue.get_comments(), bots):
+ logging.info("Repliyng to issue #{}: {}".format(issue.number, issue.title))
+ if not dry_run:
+ issue.create_comment(body.format(user_name=issue.user.login))
+
+
+def reply_to_pull_requests(repo, bots, dry_run):
+ """Reply to all new Pull Requests without a bot reply"""
+ body = readfile("pull_comment.md")
+ logging.info("Replying to PRs on {}/{}".format(repo.owner.login, repo.name))
+ for pr in repo.get_pulls("status=open"):
+ # get_issue_comments() returns top-level PR comments.
+ # While get_comments() or get_review_comments()
+ # return comments against diff in the PR.
+ if not commented_already(pr.get_issue_comments(), bots):
+ logging.info("Repling to pull request #{}: {}".format(pr.number, pr.title))
+ if not dry_run:
+ pr.create_issue_comment(body.format(user_name=pr.user.login))
+
+
+def to_repo(gh, owner, name):
+ """Construct a Repo from a logged in Github object an owner and a repo name"""
+ return gh.get_user(owner).get_repo(name)
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser()
+ parser.add_argument("user", help="Username to login to GitHub")
+ parser.add_argument("pass", help="Password of the GitHub user")
+ parser.add_argument(
+ "--dry-run",
+ help="Just print what would be done",
+ default=False,
+ action="store_true",
+ )
+ parser.add_argument('--verbose', '-v', action='count', default=0, help="Increase verbosity of the printing")
+ args = parser.parse_args()
+
+ if args.verbose <= 0:
+ logger.setLevel(logging.ERROR)
+ elif args.verbose <= 1:
+ logger.setLevel(logging.INFO)
+ else:
+ logger.setLevel(logging.DEBUG)
+
+ repository_owner = "ARM-software"
+ repository_name = "arm-trusted-firmware"
+ issues_name = "tf-issues"
+ bots = {"arm-tf-bot", "ssg-bot", args.user}
+
+ gh = Github(args.user, getattr(args, "pass"))
+ pr_repo = to_repo(gh, repository_owner, repository_name)
+ reply_to_pull_requests(pr_repo, bots, args.dry_run)
+ issue_repo = to_repo(gh, repository_owner, issues_name)
+ reply_to_pull_requests(issue_repo, bots, args.dry_run)
+ reply_to_issues(issue_repo, bots, args.dry_run)
diff --git a/job/tf-github-autoreply/issue_comment.md b/job/tf-github-autoreply/issue_comment.md
new file mode 100644
index 0000000..fb4d721
--- /dev/null
+++ b/job/tf-github-autoreply/issue_comment.md
@@ -0,0 +1,15 @@
+Hello @{user_name}!
+
+
+Thank you for raising an issue for **Trusted Firmware-A**.
+
+The TF-A project has now migrated to www.trustedfirmware.org. This issue tracker will still remain accessible for some time, but only for historical reasons. From now on you should raise new issues on trustedfirmware.org.
+
+If it is a query or a design discussion it is better discussed via the [mailing list](https://lists.trustedfirmware.org/mailman/listinfo/tf-a). If it is issue/bug which need to be tracked, raise an issue in the [issue tracking board](https://developer.trustedfirmware.org/maniphest/query/open/) and also send an email to the [mailing list](https://lists.trustedfirmware.org/mailman/listinfo/tf-a) to notify the TF-A community.
+
+## How do I raise issues for TF-A?
+Please use our new [issue tracking board](https://developer.trustedfirmware.org/maniphest/query/open/). For this you just need to login with your existing GitHub account. We also have a [guide](https://developer.trustedfirmware.org/dashboard/view/6/) to help you raise the issue with the appropriate labels and tags. This way it will be easier for both you and us to track and address the issue most effectively.
+
+We are looking forward to seeing you in trustedfirmware.org!
+
+The **Trusted Firmware-A** team
\ No newline at end of file
diff --git a/job/tf-github-autoreply/pull_comment.md b/job/tf-github-autoreply/pull_comment.md
new file mode 100644
index 0000000..0418224
--- /dev/null
+++ b/job/tf-github-autoreply/pull_comment.md
@@ -0,0 +1,24 @@
+Hello @{user_name}!
+
+
+Thank you for your contribution to **Trusted Firmware-A**!
+
+The TF-A project has now migrated to www.trustedfirmware.org. Our GitHub repo will remain accessible as a Read-Only mirror but we have changed the way we accept contributions for the project.
+
+## How do I contribute patches to TF-A?
+We have a [Getting started](https://developer.trustedfirmware.org/w/tf_a/gerrit-getting-started/) article which we hope will make everything very straightforward! And if you would like more details you can always refer to the [contributing guidelines](https://trustedfirmware-a.readthedocs.io/en/latest/process/contributing.html).
+
+A quick overview:
+1. Go to review.trustedfirmware.org
+2. Register with your existing GitHub account
+3. Submit your patches!
+
+## What if I face any problems?
+We have many channels through which you can contact us:
+
+ * **Our mailing lists**
+ You can send us an email in the [public TF-A mailing list](https://lists.trustedfirmware.org/mailman/listinfo/tf-a). [Here](https://lists.trustedfirmware.org/mailman/listinfo) you can also find all the mailing lists for all the projects hosted under trustedfirmware.org.
+
+We are looking forward to seeing your patch submitted to trustedfirmware.org!
+
+The **Trusted Firmware-A** team
diff --git a/job/tf-github-autoreply/requirements.txt b/job/tf-github-autoreply/requirements.txt
new file mode 100644
index 0000000..3aa2f44
--- /dev/null
+++ b/job/tf-github-autoreply/requirements.txt
@@ -0,0 +1,9 @@
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+#github_pr_bot.py requires:
+
+pygithub>=1.44
diff --git a/job/tf-github-autoreply/run_github_autoreply.sh b/job/tf-github-autoreply/run_github_autoreply.sh
new file mode 100755
index 0000000..63c4466
--- /dev/null
+++ b/job/tf-github-autoreply/run_github_autoreply.sh
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright (c) 2020, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# Install PyGitHub if needed
+python3 -c "import github"
+if [ $? != 0 ]
+then
+ yes | pip3 install pygithub
+fi
+
+# Run bot
+python3 $(dirname "${BASH_SOURCE[0]}")/github_pr_bot.py $@
diff --git a/job/tf-main/run_fast_forward_master.sh b/job/tf-main/run_fast_forward_master.sh
new file mode 100755
index 0000000..33e2486
--- /dev/null
+++ b/job/tf-main/run_fast_forward_master.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+#
+# Copyright (c) 2020, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+set -e
+
+if [[ $MULTIJOB_FAILED -eq 0 ]]; then
+ echo "Proceed with integration->master fast-forward merge"
+ bash /arm/projectscratch/ssg/trusted-fw/ci-scripts/fast-forward-master.sh
+ exit 0
+else
+ echo "Do not proceed with integration->master merge as sub-jobs failed"
+ exit 1
+fi
+
diff --git a/job/tf-optee-build/build_optee.sh b/job/tf-optee-build/build_optee.sh
index 4bf24c1..bbb032d 100755
--- a/job/tf-optee-build/build_optee.sh
+++ b/job/tf-optee-build/build_optee.sh
@@ -1,6 +1,6 @@
#!/bin/bash
#
-# Copyright (c) 2019, Arm Limited. All rights reserved.
+# Copyright (c) 2019-2020, Arm Limited. All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
@@ -9,10 +9,26 @@
source "$ci_root/utils.sh"
cd optee
-make PLATFORM=vexpress PLATFORM_FLAVOR="${PLATFORM_FLAVOR:?}" CFG_ARM64_core=y
+
+# Setting up Python virtual environment with pyelftools and pycrypto
+python3 -m venv python_virtualenv
+source python_virtualenv/bin/activate
+
+# wheel is not specified as pycrypto dependency but it is necessary for
+# installing it.
+pip install wheel
+pip install pyelftools pycrypto
+
+make PLATFORM=vexpress \
+ PLATFORM_FLAVOR="${PLATFORM_FLAVOR:?}" \
+ CFG_ARM64_core=y \
+ CROSS_COMPILE32=arm-none-eabi-
+
+# Deactivating Python virtual environment
+deactivate
# Remove header from tee.bin
-aarch64-linux-gnu-objcopy -O binary \
+aarch64-none-elf-objcopy -O binary \
out/arm-plat-vexpress/core/tee.elf out/arm-plat-vexpress/core/tee.bin
# Gather files to export in a single directory
diff --git a/job/tf-static-checks/run_static_checks.sh b/job/tf-static-checks/run_static_checks.sh
new file mode 100644
index 0000000..b2d3a48
--- /dev/null
+++ b/job/tf-static-checks/run_static_checks.sh
@@ -0,0 +1,28 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# This script runs the static checks in tf-static-checks
+# jenkins job.
+
+if [ "$REPO_UNDER_TEST" = "trusted-firmware" ]; then
+ cd "$TF_CHECKOUT_LOC"
+else
+ cd "$TFTF_CHECKOUT_LOC"
+fi
+
+export IS_CONTINUOUS_INTEGRATION=1
+static_fail=0
+
+if ! "$CI_ROOT/script/static-checks/static-checks.sh"; then
+ static_fail=1
+fi
+
+if [ -f "static-checks.log" ]; then
+ mv "static-checks.log" "$WORKSPACE"
+fi
+
+exit "$static_fail"
diff --git a/job/tf-sync-repos/sync.sh b/job/tf-sync-repos/sync.sh
new file mode 100755
index 0000000..8269768
--- /dev/null
+++ b/job/tf-sync-repos/sync.sh
@@ -0,0 +1,107 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# Push the updated master from local to the selected remote
+#
+# $1 = git remote human readable name
+# $2 = git remote URL
+sync_repo()
+{
+ local result
+
+ echo Pushing to "$1"...
+ git push --tags $2 master
+ result=$?
+ if [ $result != 0 ]
+ then
+ echo Pushing to $1 FAILED!
+ else
+ echo Pushing to $1 SUCCEEDED!
+ fi
+ return $result
+}
+
+# Clone the selected repo from tf.org
+#
+# Some variables utilised inside this function come from utils.sh
+#
+# $1 = repo to clone
+clone_repo()
+{
+ local repo_url
+ local repo_name
+
+ case $1 in
+ trusted-firmware-a)
+ repo_url=$tf_src_repo_url
+ repo_name="TF-A"
+ ;;
+ tf-a-tests)
+ repo_url=$tftf_src_repo_url
+ repo_name="TF-A-Tests"
+ ;;
+ *)
+ echo "ERROR: Unknown repo to be cloned. sync.sh failed!"
+ exit 1
+ ;;
+ esac
+
+ # Check if the repo clone exists in the job's workspace
+ if [ ! -d $1 ]
+ then
+ # Fresh clone
+ echo Cloning $repo_name from trustedfirmware.org...
+ git clone $repo_url
+ else
+ echo Will use existing repo for "$repo_name"...
+ fi
+}
+
+# Pull changes from tf.org to the local repo
+#
+# $1 = repo to update. It must be the same with the directory name
+pull_changes()
+{
+ cd $1
+ echo Pulling $1 from trustedfirmware.org...
+ git remote update --prune
+ git checkout master
+ git merge --ff-only origin/master
+ cd - > /dev/null
+}
+
+# exit if anything fails
+set -e
+
+# Source this file to get TF-A and TF-A-Tests repo URLs
+source "$CI_ROOT/utils.sh"
+
+clone_repo trusted-firmware-a
+clone_repo tf-a-tests
+
+pull_changes trusted-firmware-a
+pull_changes tf-a-tests
+
+# stop exiting automatically
+set +e
+
+# Update TF-A remotes
+cd trusted-firmware-a
+sync_repo GitHub https://$GH_USER:$GH_PASSWORD@github.com/ARM-software/arm-trusted-firmware.git
+github=$?
+sync_repo "internal TF-A Gerrit" $tf_arm_gerrit_repo
+tfa_gerrit=$?
+
+# Update TF-A-Tests
+cd ../tf-a-tests
+sync_repo "internal TF-A-Tests Gerrit" $tftf_arm_gerrit_repo
+tftf_gerrit=$?
+
+if [ $github != 0 -o $tfa_gerrit != 0 -o $tftf_gerrit != 0 ]
+then
+ exit 1
+fi
diff --git a/job/tf-topics-master-sync/sync.sh b/job/tf-topics-master-sync/sync.sh
deleted file mode 100755
index d2c741b..0000000
--- a/job/tf-topics-master-sync/sync.sh
+++ /dev/null
@@ -1,56 +0,0 @@
-#!/bin/bash
-#
-# Copyright (c) 2019, Arm Limited. All rights reserved.
-#
-# SPDX-License-Identifier: BSD-3-Clause
-#
-
-# $1 = git remote human readable name
-# $2 = git remote URL
-sync_repo()
-{
- local result
- echo Pushing to $1...
- git push $2 master
- result=$?
- if [ $result != 0 ]
- then
- echo Pushing to $1 FAILED!
- else
- echo Pushing to $1 SUCCEEDED!
- fi
- return $result
-}
-
-# exit if anything fails
-set -e
-
-source "$CI_ROOT/utils.sh"
-
-if [ ! -d "trusted-firmware-a" ]
-then
- # Fresh clone
- echo Cloning from trustedfirmware.org...
- git clone $tf_src_repo_url
-else
- echo Using existing repo...
-fi
-
-echo Pulling from trustedfirmware.org...
-cd trusted-firmware-a
-git remote update --prune
-git checkout master
-git merge --ff-only origin/master
-
-# stop exiting automatically
-set +e
-
-sync_repo GitHub https://$GH_USER:$GH_PASSWORD@github.com/ARM-software/arm-trusted-firmware.git
-github=$?
-sync_repo "internal Arm Gerrit" $tf_arm_gerrit_repo
-gerrit=$?
-
-if [ $github != 0 -o $gerrit != 0 ]
-then
- exit 1
-fi
diff --git a/job/tf-worker/generate_yaml.sh b/job/tf-worker/generate_yaml.sh
index 37a0ae0..60ae846 100755
--- a/job/tf-worker/generate_yaml.sh
+++ b/job/tf-worker/generate_yaml.sh
@@ -9,6 +9,8 @@
if echo "$RUN_CONFIG" | grep -iq 'tftf'; then
payload_type="tftf"
+elif echo "$RUN_CONFIG" | grep -iq 'scmi'; then
+ payload_type="scp_tests_scmi"
else
payload_type="linux"
fi
diff --git a/job/tf-worker/manage_artefacts.sh b/job/tf-worker/manage_artefacts.sh
index 865afd9..dbfc4eb 100755
--- a/job/tf-worker/manage_artefacts.sh
+++ b/job/tf-worker/manage_artefacts.sh
@@ -8,6 +8,8 @@
set -e
if [ -d artefacts ]; then
- # Remove everything except logs
- find artefacts -type f -not \( -name "*.log" \) -exec rm -f {} +
+ # Remove everything except logs and scan-build artefacts such as
+ # .html, .js and .css files useful for offline debug of static
+ # analysis defects
+ find artefacts -type f -not \( -name "*.log" -o -name "*.html" -o -name "*.js" -o -name "*.css" \) -exec rm -f {} +
fi
diff --git a/job/tf-worker/run_fvp_test.sh b/job/tf-worker/run_fvp_test.sh
index 2a62eab..4ec3912 100755
--- a/job/tf-worker/run_fvp_test.sh
+++ b/job/tf-worker/run_fvp_test.sh
@@ -1,6 +1,6 @@
#!/bin/bash
#
-# Copyright (c) 2019, Arm Limited. All rights reserved.
+# Copyright (c) 2019-2020, Arm Limited. All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
@@ -8,8 +8,14 @@
set -e
# Build
+export COVERAGE_ON=$(echo "$RUN_CONFIG" | grep -v 'aarch32' | grep -qE 'bmcov' && echo 1 || echo 0)
+if [ $COVERAGE_ON -eq 1 ]; then
+ source "$CI_ROOT/script/build_bmcov.sh"
+fi
+
"$CI_ROOT/script/build_package.sh"
+
if [ "$skip_runs" ]; then
exit 0
fi
@@ -17,4 +23,76 @@
# Execute test locally for FVP configs
if [ "$RUN_CONFIG" != "nil" ] && echo "$RUN_CONFIG" | grep -iq '^fvp'; then
"$CI_ROOT/script/run_package.sh"
+ if [ $COVERAGE_ON -eq 1 ]; then
+ ELF_FOLDER=""
+ DEBUG_FOLDER=${artefacts}/debug
+ RELEASE_FOLDER=${artefacts}/release
+ if ls "${DEBUG_FOLDER}/"*.elf &> /dev/null;then
+ export ELF_FOLDER=$DEBUG_FOLDER
+ elif ls "${RELEASE_FOLDER}/"*.elf &> /dev/null;then
+ export ELF_FOLDER=$RELEASE_FOLDER
+ else
+ # If elf files are not present, report can't be produced
+ echo "ELF files not present, aborting reports..."
+ exit 0
+ fi
+ export OUTDIR=${WORKSPACE}/html
+ test_config=${TEST_CONFIG}
+ if [ -n "$CC_SCP_REFSPEC" ]; then #SCP
+ export JENKINS_SOURCES_WORKSPACE="${scp_root:-$workspace}"
+ if grep -q "fvp-linux.sgi" <<< "$test_config"; then
+ export LIST_OF_BINARIES=${LIST_OF_BINARIES:-"scp_ram scp_rom mcp_rom"}
+ elif grep -q "fvp-sgm775" <<< "$test_config"; then
+ export LIST_OF_BINARIES=${LIST_OF_BINARIES:-"scp_ram scp_rom"}
+ fi
+ export OBJDUMP="$(which 'arm-none-eabi-objdump')"
+ export READELF="$(which 'arm-none-eabi-readelf')"
+ export REPO=SCP
+ else # TF-A
+ export JENKINS_SOURCES_WORKSPACE="${tf_root:-$workspace}"
+ export LIST_OF_BINARIES=${LIST_OF_BINARIES:-"bl1 bl2 bl31"}
+ export OBJDUMP="$(which 'aarch64-none-elf-objdump')"
+ export READELF="$(which 'aarch64-none-elf-readelf')"
+ export REPO=TRUSTED_FIRMWARE
+ fi
+ echo "Toolchain:$OBJDUMP"
+
+ mkdir -p ${OUTDIR}
+ sync
+ sleep 5 #wait for trace files to be written
+ if [ $(ls -1 ${DEBUG_FOLDER}/${trace_file_prefix}-* 2>/dev/null | wc -l) != 0 ]; then
+ export TRACE_FOLDER=${DEBUG_FOLDER}
+ elif [ $(ls -1 ${RELEASE_FOLDER}/${trace_file_prefix}-* 2>/dev/null | wc -l) != 0 ]; then
+ export TRACE_FOLDER=${RELEASE_FOLDER}
+ else
+ echo "Trace files not present, aborting reports..."
+ exit 0
+ fi
+ export REPORT_TITLE="Coverage Summary Report [Build:${BUILD_NUMBER}]"
+ # launch intermediate layer script
+ export CONFIG_JSON=${OUTDIR}/config_file.json
+ export OUTPUT_JSON=${OUTDIR}/output_file.json
+ export CSOURCE_FOLDER=source
+ export DEBUG_ELFS=${DEBUG_ELFS:-True}
+ prepare_json_configuration "${LIST_OF_BINARIES}" "${JENKINS_SOURCES_WORKSPACE}"
+ echo "Executing intermediate_layer.py ..."
+ python ${BMCOV_REPORT_FOLDER}/intermediate_layer.py --config-json "${CONFIG_JSON}"
+ ver_py=$(python -V 2>&1 | sed 's/.* \([0-9]\).\([0-9]\).*/\1\2/')
+ if [ "$ver_py" = "27" ]; then
+ python ${BMCOV_REPORT_FOLDER}/gen-coverage-report.py --config ${BMCOV_REPORT_FOLDER}/config_atf.py \
+ --prefix_workspace "$JENKINS_SOURCES_WORKSPACE"
+ else
+ echo "Python 2.7 is required for producing Bmcov reports"
+ fi
+ chmod 775 ${BMCOV_REPORT_FOLDER}/branch_coverage/branch_coverage.sh
+ echo "Running branch coverage..."
+ branch_folder=${OUTDIR}/lcov_report
+ mkdir -p ${branch_folder}
+ pushd ${BMCOV_REPORT_FOLDER}/branch_coverage
+ . branch_coverage.sh --workspace ${JENKINS_SOURCES_WORKSPACE} --json-path ${OUTPUT_JSON} --outdir ${branch_folder}
+ popd
+ export OUTDIR=${WORKSPACE}/html
+ # prepare static (Jenkins) and dynamic (python server) pages
+ prepare_html_pages
+ fi
fi
diff --git a/job/tf-worker/run_lava.py b/job/tf-worker/run_lava.py
new file mode 100644
index 0000000..93d522c
--- /dev/null
+++ b/job/tf-worker/run_lava.py
@@ -0,0 +1,105 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+import argparse
+import os
+import subprocess
+import sys
+import logging
+import tempfile
+import yaml
+
+
+def case_infra_error(case):
+ try:
+ if case["metadata"]["error_type"] == "Infrastructure":
+ logging.error("case %s: infra error is type Infrastructure", case["id"])
+ return False
+ elif "timed out" in case["metadata"]["error_msg"]:
+ logging.error(
+ "case %s: infra error: %s", case["id"], case["metadata"]["error_msg"]
+ )
+ return False
+ else:
+ return True
+ except KeyError:
+ return True
+
+
+def not_infra_error(path):
+ """Returns a boolean indicating if there was not an infra error"""
+ try:
+ with open(path) as file:
+ results = yaml.safe_load(file)
+ return all(case_infra_error(tc) for tc in results)
+ except FileNotFoundError:
+ logging.warning("Could not open results file %s", path)
+ return True
+
+
+def run_one_job(cmd):
+ """Run a job and return a boolean indicating if there was not an infra error.
+ Raises a `subprocess.CalledProcessError` when the called script fails.
+ """
+ subprocess.run(cmd, check=True)
+ return not_infra_error("job_results.yaml")
+
+
+def retry_job(cmd, retries):
+ """Run a job until there was not an infra error or retries are exhausted.
+ Raises a `subprocess.CalledProcessError` when the called script fails.
+ """
+ logging.debug("trying job %s up to %d times", str(cmd), retries)
+ return any(run_one_job(cmd) for _ in range(retries))
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ description="Lava job runner with infrastructure error dectection and retry."
+ )
+ parser.add_argument(
+ "script",
+ nargs="?",
+ default=os.path.join(os.path.dirname(__file__), "run_lava_job.sh"),
+ help="bash job script to run a lava job",
+ )
+ parser.add_argument(
+ "job",
+ nargs="?",
+ default=os.path.join("artefacts", os.environ["BIN_MODE"], "juno.yaml"),
+ help="the Lava job description file",
+ )
+ parser.add_argument(
+ "retries",
+ type=int,
+ nargs="?",
+ default=3,
+ help="Number of retries. defaluts to 3",
+ )
+ parser.add_argument(
+ "--save",
+ default=tempfile.mkdtemp(prefix="job-output"),
+ help="directory to store the job_output.log",
+ )
+ parser.add_argument(
+ "-v", action="count", default=0, help="Increase printing of debug ouptut"
+ )
+ args = parser.parse_args()
+ if args.v >= 2:
+ logging.getLogger().setLevel(logging.DEBUG)
+ elif args.v >= 1:
+ logging.getLogger().setLevel(logging.INFO)
+ logging.debug(args)
+ try:
+ if not retry_job([args.script, args.job, args.save], args.retries):
+ logging.critical("All jobs failed with infra errors; retries exhausted")
+ sys.exit(-1)
+ else:
+ sys.exit(0)
+ except subprocess.CalledProcessError as e:
+ logging.critical("Job script returned error code %d", e.returncode)
+ sys.exit(e.returncode)
diff --git a/job/tf-worker/should_build_local.sh b/job/tf-worker/should_build_local.sh
index 5b47866..b3fde29 100755
--- a/job/tf-worker/should_build_local.sh
+++ b/job/tf-worker/should_build_local.sh
@@ -6,7 +6,6 @@
#
set -e
-
# If it's a Juno build-only config, or an FVP config, we do everything locally
if [ "$RUN_CONFIG" = "nil" ]; then
exit 0
@@ -17,6 +16,10 @@
exit 0;;
coverity-*)
exit 0;;
+ scan_build-*)
+ exit 0;;
+ norun-*)
+ exit 0;;
esac
# If we're not going to run Juno, then no need to spawn tf-build-for lava;
diff --git a/job/tf-worker/submit_lava_job.sh b/job/tf-worker/submit_lava_job.sh
deleted file mode 100755
index 7b47e97..0000000
--- a/job/tf-worker/submit_lava_job.sh
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/bin/bash
-#
-# Copyright (c) 2019, Arm Limited. All rights reserved.
-#
-# SPDX-License-Identifier: BSD-3-Clause
-#
-
-# Submit jobs to LAVA and wait until the job is complete. This script replace
-# the "managed script" previously used and provide the same behavior.
-#
-# Required arguments:
-# 1: yaml job file
-# 2: flag whether to save output, true/false, defaults to false
-#
-# output:
-# job_results.yaml
-# job_output.log if save output = true
-
-set -e
-
-JOB_FILE="$1"
-SAVE_OUTPUT="$2"
-
-LAVA_HOST=
-LAVA_USER=
-LAVA_TOKEN=
-LAVA_URL=
-
-if [ ! -f "${JOB_FILE}" ]; then
- echo "error: LAVA job file does not exist: ${JOB_FILE}"
- exit 1
-fi
-
-# Install lavacli with fixes
-virtualenv -p $(which python3) venv
-source venv/bin/activate
-pip install -q lavacli
-
-# Configure lavacli
-lavacli identities add \
---username $LAVA_USER \
---token $LAVA_TOKEN \
---uri ${LAVA_URL}/RPC2 \
-default
-
-# Submit a job using lavacli
-JOB_ID=$(lavacli jobs submit ${JOB_FILE})
-if [ -z "$JOB_ID" ] ; then
- echo "Couldn't submit. Stopping."
- exit 1
-fi
-
-echo "Job url: https://lava.oss.arm.com/scheduler/job/$JOB_ID"
-
-# Wait for the job to finish
-lavacli jobs wait $JOB_ID
-
-if [ "${SAVE_OUTPUT}" = "true" ] ; then
- lavacli jobs logs $JOB_ID > job_output.log
-fi
-
-# Get results
-lavacli results $JOB_ID --yaml > job_results.yaml
-
-# Exit virtualenv
-deactivate