Various fixes

* Retrieve build commands from build_manager
* fixing psa build dir
* Use different node labels for different builds
* Add script to download jenkins artifacts
* Verify status per stage
* Moving code to library
* Ability to comment on gerrit change

Change-Id: I390674b7ed6cfd20e4746a2d32e708fd6855857b
Signed-off-by: Dean Birch <dean.birch@arm.com>
diff --git a/configs.py b/configs.py
index 32283d9..8b3c077 100755
--- a/configs.py
+++ b/configs.py
@@ -50,6 +50,12 @@
     build_manager.print_config_environment(config, silence_stderr=silence_stderr)
 
 
+def print_build_commands(config, group=None):
+    """Prints particular configuration environment variables"""
+    build_manager = get_build_manager(group)
+    build_manager.print_build_commands(config, silence_stderr=True)
+
+
 if __name__ == "__main__":
     PARSER = argparse.ArgumentParser(description="Extract build configurations.")
     PARSER.add_argument(
@@ -61,14 +67,28 @@
         "If not specified, the available configurations are printed",
     )
     PARSER.add_argument(
+        "-b",
+        "--build_commands",
+        default=None,
+        action='store_true',
+        help="Instead of printing environment variables, print raw "
+        "build commands to be run."
+    )
+    PARSER.add_argument(
         "-g",
         "--group",
         default=[],
         action="append",
-        help="Only list configurations under a certain group. ",
-        choices=list(_builtin_configs.keys()),
+        help="Only list configurations under a certain group. "
+        "'all' will look through all configurations. "
+        "Leaving blank will just look at config 'full'.",
+        choices=list(_builtin_configs.keys())+['all'],
     )
     ARGS = PARSER.parse_args()
+    if not ARGS.group:
+        ARGS.group = ['full']
+    if ARGS.group == ['all']:
+        ARGS.group = list(_builtin_configs.keys())
 
     all_configs = set()
     for group in ARGS.group:
@@ -77,10 +97,13 @@
             all_configs.update(list_configs(group))
         else:
             try:
-                print_config_environment(ARGS.config, group=group, silence_stderr=True)
+                if not ARGS.build_commands:
+                    print_config_environment(ARGS.config, group=group, silence_stderr=True)
+                else:
+                    print_build_commands(ARGS.config, group=group)
                 break
-            except SystemExit:
-                if group == ARGS.group[-1]:
+            except (SystemExit, KeyError):
+                if group == ARGS.group[-1] or ARGS.group == []:
                     print(
                         "Could not find configuration {} in groups {}".format(
                             ARGS.config, ARGS.group
diff --git a/jenkins/build-config.jpl b/jenkins/build-config.jpl
index aea0266..b4263cd 100644
--- a/jenkins/build-config.jpl
+++ b/jenkins/build-config.jpl
@@ -6,7 +6,15 @@
 //
 //-------------------------------------------------------------------------------
 
-node("docker-amd64-xenial") {
+@Library('trustedfirmware') _
+import org.trustedfirmware.Gerrit
+
+def nodeLabel = "docker-amd64-xenial"
+if (env.COMPILER == "ARMCLANG") {
+  nodeLabel = "docker-amd64-xenial-armclang"
+}
+
+node(nodeLabel) {
   stage("Init") {
     cleanWs()
     dir("trusted-firmware-m") {
@@ -45,12 +53,39 @@
 wget -O cmsis.pack -q \${JENKINS_URL}/userContent/ARM.CMSIS.${CMSIS_VERSION}.pack
 unzip -o -d CMSIS_5 cmsis.pack
 """
+    if (env.PSA_API_SUITE != "") {
+      dir("psa-arch-tests") {
+        checkout(
+          changelog: false,
+          poll: false,
+          scm: [
+            $class: 'GitSCM',
+            branches: [[name: 'FETCH_HEAD']],
+            userRemoteConfigs: [[
+              refspec: 'refs/tags/v20.03_API1.0',
+              url: 'https://github.com/ARM-software/psa-arch-tests'
+            ]]
+          ]
+        )
+      }
+    }
   }
-  stage("Build") {
-    sh "tf-m-ci-scripts/run-build.sh 2>&1 | tee build.log"
-  }
-  stage("Post") {
-    archiveArtifacts 'trusted-firmware-m/build/install/**,build.log'
+  try {
+    verify = 1
+    stage("Build") {
+      tee("build.log") {
+        sh "tf-m-ci-scripts/run-build.sh"
+      }
+    }
+    stage("Post") {
+      archiveArtifacts 'trusted-firmware-m/build/install/**,build.log'
+    }
+  } catch (Exception e) {
+    manager.buildFailure()
+    verify = -1
+  } finally {
+    g = new Gerrit()
+    g.verifyStatusInWorkspace(verify, env.CONFIG_NAME, 'build')
     cleanWs()
   }
 }
diff --git a/jenkins/build-docs.jpl b/jenkins/build-docs.jpl
index ba5e1e1..71993b7 100644
--- a/jenkins/build-docs.jpl
+++ b/jenkins/build-docs.jpl
@@ -6,6 +6,9 @@
 //
 //-------------------------------------------------------------------------------
 
+@Library('trustedfirmware') _
+import org.trustedfirmware.Gerrit
+
 node("docker-amd64-xenial") {
   stage("Init") {
     cleanWs()
@@ -46,11 +49,20 @@
 unzip -o -d CMSIS_5 cmsis.pack
 """
   }
-  stage("Build") {
-    sh "tf-m-ci-scripts/build-docs.sh"
-  }
-  stage("Post") {
-    archiveArtifacts 'trusted-firmware-m/build/install/**'
+  try {
+    verify = 1
+    stage("Build") {
+      sh "tf-m-ci-scripts/build-docs.sh"
+    }
+    stage("Post") {
+      archiveArtifacts 'trusted-firmware-m/build/install/**'
+    }
+  } catch (Exception e) {
+    manager.buildFailure()
+    verify = -1
+  } finally {
+    g = new Gerrit()
+    g.verifyStatusInWorkspace(verify, 'tf-m-build-docs', 'build')
     cleanWs()
   }
 }
diff --git a/jenkins/checkpatch.jpl b/jenkins/checkpatch.jpl
index 5a447b5..7855cb0 100644
--- a/jenkins/checkpatch.jpl
+++ b/jenkins/checkpatch.jpl
@@ -6,22 +6,8 @@
 //
 //-------------------------------------------------------------------------------
 
-def verifyStatus(value, stage_name) {
-  withCredentials([usernamePassword(credentialsId: 'VERIFY_STATUS', passwordVariable: 'VERIFY_PASSWORD', usernameVariable: 'VERIFY_USER')]) {
-    sh """
-if [ -z "\$GERRIT_HOST" ] ; then
-  echo Not running for a Gerrit change, skipping vote.
-  exit 0
-fi
-if [ ! -d venv ] ; then
-  virtualenv -p \$(which python3) venv
-fi
-. venv/bin/activate
-pip -q install requests
-./tf-m-ci-scripts/jenkins/verify.py --value ${value} --verify-name tf-m-${stage_name} --user \$VERIFY_USER
-"""
-  }
-}
+@Library('trustedfirmware') _
+import org.trustedfirmware.Gerrit
 
 node("docker-amd64-xenial") {
   stage("Init") {
@@ -75,7 +61,8 @@
       manager.buildFailure()
       verify = -1
     } finally {
-      verifyStatus(verify, 'checkpatch')
+      g = new Gerrit()
+      g.verifyStatusInWorkspace(verify, 'checkpatch', 'static')
       cleanWs()
     }
   }
diff --git a/jenkins/ci.jpl b/jenkins/ci.jpl
index 743d6c5..e20b9a3 100644
--- a/jenkins/ci.jpl
+++ b/jenkins/ci.jpl
@@ -6,7 +6,9 @@
 //
 //-------------------------------------------------------------------------------
 
-library identifier: 'local-lib@master', retriever: legacySCM(scm)
+@Library('trustedfirmware') _
+import org.trustedfirmware.Gerrit
+import org.trustedfirmware.Summary
 
 def listConfigs(ci_scripts_dir, config_list, filter_group) {
   dir(ci_scripts_dir) {
@@ -37,6 +39,9 @@
     params += string(name: key, value: value)
   }
   params += string(name: 'GERRIT_BRANCH', value: env.GERRIT_BRANCH)
+  params += string(name: 'GERRIT_HOST', value: env.GERRIT_HOST)
+  params += string(name: 'GERRIT_CHANGE_NUMBER', value: env.GERRIT_CHANGE_NUMBER)
+  params += string(name: 'GERRIT_PATCHSET_REVISION', value: env.GERRIT_PATCHSET_REVISION)
   params += string(name: 'GERRIT_REFSPEC', value: env.GERRIT_REFSPEC)
   params += string(name: 'CMSIS_VERSION', value: env.CMSIS_VERSION)
   params += string(name: 'MBEDCRYPTO_VERSION', value: env.MBEDCRYPTO_VERSION)
@@ -64,6 +69,9 @@
 def buildDocs() {
   def params = []
   params += string(name: 'GERRIT_BRANCH', value: env.GERRIT_BRANCH)
+  params += string(name: 'GERRIT_HOST', value: env.GERRIT_HOST)
+  params += string(name: 'GERRIT_CHANGE_NUMBER', value: env.GERRIT_CHANGE_NUMBER)
+  params += string(name: 'GERRIT_PATCHSET_REVISION', value: env.GERRIT_PATCHSET_REVISION)
   params += string(name: 'GERRIT_REFSPEC', value: env.GERRIT_REFSPEC)
   params += string(name: 'CMSIS_VERSION', value: env.CMSIS_VERSION)
   params += string(name: 'MBEDCRYPTO_VERSION', value: env.MBEDCRYPTO_VERSION)
@@ -79,6 +87,7 @@
 
 
 def buildCsv(results) {
+  def summary = new Summary();
   def csvContent = summary.getBuildCsv(results)
   node("master") {
     writeCSV file: 'build_results.csv', records: csvContent, format: CSVFormat.EXCEL
@@ -87,6 +96,7 @@
 }
 
 def writeSummary(results) {
+  def summary = new Summary();
   def buildLinks = summary.getLinks(results)
   node("master") {
     writeFile file: "build_links.html", text: buildLinks
@@ -94,29 +104,6 @@
   }
 }
 
-def verifyStatus(value, stage_name) {
-  node("docker-amd64-xenial") {
-    cleanWs()
-    dir("tf-m-ci-scripts") {
-      git url: '$CI_SCRIPTS_REPO', branch: 'master', credentialsId: 'GIT_SSH_KEY'
-    }
-    withCredentials([usernamePassword(credentialsId: 'VERIFY_STATUS', passwordVariable: 'VERIFY_PASSWORD', usernameVariable: 'VERIFY_USER')]) {
-      sh("""
-    if [ -z "\$GERRIT_HOST" ] ; then
-      echo Not running for a Gerrit change, skipping vote.
-      exit 0
-    fi
-    if [ ! -d venv ] ; then
-      virtualenv -p \$(which python3) venv
-    fi
-    . venv/bin/activate
-    pip -q install requests
-    ./tf-m-ci-scripts/jenkins/verify.py --value ${value} --verify-name tf-m-${stage_name} --user \$VERIFY_USER
-    """)
-    }
-  }
-}
-
 def configs = []
 def builds = [:]
 
@@ -148,7 +135,8 @@
     verify = -1
   } finally {
     print("Verifying status")
-    verifyStatus(verify, 'build')
+    g = new Gerrit()
+    g.verifyStatus(verify, 'tf-m-build', 'build')
     print("Building CSV")
     buildCsv(results['builds'])
     writeSummary(results['builds'])
diff --git a/jenkins/comment.py b/jenkins/comment.py
new file mode 100755
index 0000000..edd1aeb
--- /dev/null
+++ b/jenkins/comment.py
@@ -0,0 +1,80 @@
+#!/usr/bin/env python3
+"""
+Posts a comment to Gerrit.
+"""
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+import argparse
+import json
+import os
+import sys
+import requests
+
+
+def submit_comment(base_url, auth, changeset, patchset_revision, comment):
+    post_data = {"message": comment}
+    comment_url = "{}/a/changes/{}/revisions/{}/review".format(
+        base_url, changeset, patchset_revision
+    )
+    headers = {"Content-Type": "application/json; charset=UTF-8"}
+    post = None
+    try:
+        post = requests.post(
+            comment_url, data=json.dumps(post_data), auth=auth, headers=headers,
+        )
+    except requests.exceptions.RequestException as exception:
+        print("Error posting comment to Gerrit.")
+        sys.exit(0)
+    if post.status_code == 200:
+        print("Posted comment to Gerrit successfully.")
+    else:
+        print(
+            "Could not post comment to Gerrit. Error: {} {}".format(
+                post.status_code, post.text
+            )
+        )
+
+
+if __name__ == "__main__":
+    PARSER = argparse.ArgumentParser(description="Submits a comment to a Gerrit change")
+    PARSER.add_argument("--host", help="Gerrit Host", default=os.getenv("GERRIT_HOST"))
+    PARSER.add_argument(
+        "--changeset",
+        help="Changeset in Gerrit to comment on.",
+        default=os.getenv("GERRIT_CHANGE_NUMBER"),
+    )
+    PARSER.add_argument(
+        "--patchset-revision",
+        help="Commit SHA of revision in Gerrit to comment on.",
+        default=os.getenv("GERRIT_PATCHSET_REVISION"),
+    )
+    PARSER.add_argument(
+        "--user", help="Username to authenticate as.", default=os.getenv("GERRIT_USER")
+    )
+    PARSER.add_argument(
+        "--password",
+        help="Password or token to authenticate as. "
+        "Defaults to GERRIT_PASSWORD environment variable.",
+        default=os.getenv("GERRIT_PASSWORD"),
+    )
+    PARSER.add_argument("--protocol", help="Protocol to use.", default="https")
+    PARSER.add_argument("--port", help="Port to use.", default=None)
+    PARSER.add_argument("--comment", help="Comment to send.")
+    ARGS = PARSER.parse_args()
+    submit_comment(
+        "{}://{}{}".format(
+            ARGS.protocol, ARGS.host, ":{}".format(ARGS.port) if ARGS.port else ""
+        ),
+        (ARGS.user, ARGS.password),
+        ARGS.changeset,
+        ARGS.patchset_revision,
+        ARGS.comment,
+    )
diff --git a/jenkins/cppcheck.jpl b/jenkins/cppcheck.jpl
index 6ce22b4..fb40a9f 100644
--- a/jenkins/cppcheck.jpl
+++ b/jenkins/cppcheck.jpl
@@ -6,22 +6,8 @@
 //
 //-------------------------------------------------------------------------------
 
-def verifyStatus(value, stage_name) {
-  withCredentials([usernamePassword(credentialsId: 'VERIFY_STATUS', passwordVariable: 'VERIFY_PASSWORD', usernameVariable: 'VERIFY_USER')]) {
-    sh """
-if [ -z "\$GERRIT_HOST" ] ; then
-  echo Not running for a Gerrit change, skipping vote.
-  exit 0
-fi
-if [ ! -d venv ] ; then
-  virtualenv -p \$(which python3) venv
-fi
-. venv/bin/activate
-pip -q install requests
-./tf-m-ci-scripts/jenkins/verify.py --value ${value} --verify-name tf-m-${stage_name} --user \$VERIFY_USER
-"""
-  }
-}
+@Library('trustedfirmware') _
+import org.trustedfirmware.Gerrit
 
 node("docker-amd64-xenial") {
   stage("Init") {
@@ -78,7 +64,8 @@
       manager.buildFailure()
       verify = -1
     } finally {
-      verifyStatus(verify, 'cppcheck')
+      g = new Gerrit()
+      g.verifyStatusInWorkspace(verify, 'cppcheck', 'static')
       cleanWs()
     }
   }
diff --git a/jenkins/download_artifacts.py b/jenkins/download_artifacts.py
new file mode 100755
index 0000000..5e9111b
--- /dev/null
+++ b/jenkins/download_artifacts.py
@@ -0,0 +1,106 @@
+#!/usr/bin/env python3
+#
+# Downloads artifacts from a build of tf-m-build-and-test
+#
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+import requests
+import argparse
+import os
+from urllib.parse import urljoin
+from html.parser import HTMLParser
+
+
+class UrlExtracter(HTMLParser):
+    def __init__(self):
+        super().__init__()
+        self.last_tag = None
+        self.last_link = None
+        self.last_config = None
+        self.build_artifacts = {}
+        self.build_logs = {}
+
+    def handle_starttag(self, tag, attrs):
+        for key, value in attrs:
+            if key == "href":
+                self.last_link = value
+        self.last_tag = tag
+
+    def handle_endtag(self, tag):
+        if tag == "br":
+            self.last_tag = None
+
+    def handle_data(self, data):
+        if not self.last_tag:
+            self.last_config = data.replace(": ", "").replace("\n", "")
+            return
+
+        if self.last_tag == "a":
+            if data == "Artifacts":
+                self.build_artifacts[self.last_config] = self.last_link
+            elif data == "Logs":
+                self.build_logs[self.last_config] = self.last_link
+
+
+def download_artifacts(url, save_dir):
+    if not url.endswith("/"):
+        url += "/"
+    job_page_req = requests.get(url)
+    if job_page_req.status_code != requests.codes.ok:
+        print("Issue contacting given URL")
+        return
+    print("Found build")
+    build_links_req = requests.get(urljoin(url, "artifact/build_links.html"))
+    if build_links_req.status_code != requests.codes.ok:
+        print("Given build did not have an artifact called `build_links.html`")
+        return
+    parser = UrlExtracter()
+    print("Extracting links from build_links.html")
+    parser.feed(build_links_req.text)
+    print("Links found")
+    if not os.path.exists(save_dir):
+        print("Creating directory at {}".format(save_dir))
+        os.makedirs(save_dir)
+    else:
+        print("Reusing directory at {}.")
+    for config, log_url in parser.build_logs.items():
+        print("Downloading {}".format(log_url))
+        log_req = requests.get(log_url)
+        log_file_path = os.path.join(save_dir, "{}.log".format(config))
+        with open(log_file_path, "w") as log_file:
+            log_file.write(log_req.text)
+        print("Saved log to {}".format(log_file_path))
+    for config, artifacts_url in parser.build_artifacts.items():
+        zip_url = urljoin(artifacts_url, "*zip*/archive.zip")
+        print("Downloading {}".format(zip_url))
+        artifact_zip_req = requests.get(zip_url, stream=True)
+        zip_file = os.path.join(save_dir, "{}.zip".format(config))
+        with open(zip_file, "wb") as artifact_zip:
+            for chunk in artifact_zip_req.iter_content(chunk_size=8192):
+                artifact_zip.write(chunk)
+        print("Saved artifacts zip to {}".format(zip_file))
+    print("Finished")
+
+
+def main():
+    argparser = argparse.ArgumentParser()
+    argparser.add_argument(
+        "job_url", help="Url to a completed build of tf-m-build-and-test"
+    )
+    argparser.add_argument(
+        "-o", "--output_dir", default="artifacts", help="Location to save artifacts to."
+    )
+    args = argparser.parse_args()
+    download_artifacts(args.job_url, args.output_dir)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/run-build.sh b/run-build.sh
index 9c49bce..c239bbb 100755
--- a/run-build.sh
+++ b/run-build.sh
@@ -14,18 +14,20 @@
 #
 
 set -ex
+
+if [ -z "$CONFIG_NAME" ] ; then
+	echo "Set CONFIG_NAME to run a build."
+	exit 1
+fi
+
+build_commands=$(python3 tf-m-ci-scripts/configs.py -b -g all $CONFIG_NAME)
+
+if [ -z "$build_commands" ] ; then
+	echo "No build commands found."
+	exit 1
+fi
+
 mkdir trusted-firmware-m/build
 cd trusted-firmware-m/build
-cmake -G "Unix Makefiles" -DPROJ_CONFIG=`readlink -f ../configs/$PROJ_CONFIG.cmake` -DTARGET_PLATFORM=$TARGET_PLATFORM -DCOMPILER=$COMPILER -DCMAKE_BUILD_TYPE=$CMAKE_BUILD_TYPE -DBL2=$BL2 ..
-cmake --build ./ -- -j 2 install
-if [ "$TARGET_PLATFORM" == "MUSCA_A" ] ; then
-  export OFFSET1=0x200000
-  export OFFSET2=0x220000
-elif [ "$TARGET_PLATFORM" == "MUSCA_B1" ] ; then
-  export OFFSET1=0xA000000
-  export OFFSET2=0xA020000
-fi
-if [ ! -z "$OFFSET1" ] && [ ! -z "$OFFSET2" ] ; then
-  # Cleanup offset(s)?
-  srec_cat install/outputs/$TARGET_PLATFORM/mcuboot.bin -Binary -offset $OFFSET1 install/outputs/$TARGET_PLATFORM/tfm_sign.bin -Binary -offset $OFFSET2 -o install/outputs/$TARGET_PLATFORM/tfm.hex -Intel
-fi
+
+eval "set -ex ; $build_commands"
diff --git a/src/org/trustedfirmware/Gerrit.groovy b/src/org/trustedfirmware/Gerrit.groovy
new file mode 100644
index 0000000..7addf0d
--- /dev/null
+++ b/src/org/trustedfirmware/Gerrit.groovy
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+
+package org.trustedfirmware
+
+def verifyStatus(value, verify_name, category) {
+  node("docker-amd64-xenial") {
+    cleanWs()
+    dir("tf-m-ci-scripts") {
+      git url: '$CI_SCRIPTS_REPO', branch: 'master', credentialsId: 'GIT_SSH_KEY'
+    }
+    verifyStatusInWorkspace(value, verify_name, category)
+  }
+}
+
+def verifyStatusInWorkspace(value, verify_name, category) {
+  withCredentials([usernamePassword(credentialsId: 'VERIFY_STATUS', passwordVariable: 'VERIFY_PASSWORD', usernameVariable: 'VERIFY_USER')]) {
+    sh("""
+  if [ -z "\$GERRIT_HOST" ] ; then
+    echo Not running for a Gerrit change, skipping vote.
+    exit 0
+  fi
+  if [ ! -d venv ] ; then
+    virtualenv -p \$(which python3) venv
+  fi
+  . venv/bin/activate
+  pip -q install requests
+  ./tf-m-ci-scripts/jenkins/verify.py --category ${category} --value ${value} --verify-name ${verify_name} --user \$VERIFY_USER
+  """)
+  }
+}
+
+def comment(comment) {
+  node("docker-amd64-xenial") {
+    cleanWs()
+    dir("tf-m-ci-scripts") {
+      git url: '$CI_SCRIPTS_REPO', branch: 'master', credentialsId: 'GIT_SSH_KEY'
+    }
+    commentInWorkspace(comment)
+  }
+}
+
+def commentInWorkspace(comment) {
+  withCredentials([usernamePassword(credentialsId: 'VERIFY_STATUS', passwordVariable: 'GERRIT_PASSWORD', usernameVariable: 'GERRIT_USER')]) {
+    sh("""
+  if [ -z "\$GERRIT_HOST" ] ; then
+    echo Not running for a Gerrit change, skipping.
+    exit 0
+  fi
+  if [ ! -d venv ] ; then
+    virtualenv -p \$(which python3) venv
+  fi
+  . venv/bin/activate
+  pip -q install requests
+  ./tf-m-ci-scripts/jenkins/comment.py --comment "${comment}" --user \$GERRIT_USER
+  """)
+  }
+}
diff --git a/vars/summary.groovy b/src/org/trustedfirmware/Summary.groovy
similarity index 97%
rename from vars/summary.groovy
rename to src/org/trustedfirmware/Summary.groovy
index 31a23fe..bb5b16d 100644
--- a/vars/summary.groovy
+++ b/src/org/trustedfirmware/Summary.groovy
@@ -1,4 +1,3 @@
-#!/usr/bin/env groovy
 //-------------------------------------------------------------------------------
 // Copyright (c) 2020, Arm Limited and Contributors. All rights reserved.
 //
@@ -6,6 +5,8 @@
 //
 //-------------------------------------------------------------------------------
 
+package org.trustedfirmware;
+
 @NonCPS
 def getBuildCsv(results) {
   def table = [:]
diff --git a/tfm_ci_pylib/tfm_build_manager.py b/tfm_ci_pylib/tfm_build_manager.py
index d4b4545..58a59a1 100644
--- a/tfm_ci_pylib/tfm_build_manager.py
+++ b/tfm_ci_pylib/tfm_build_manager.py
@@ -89,24 +89,57 @@
             sys.exit(1)
         config_details = self._tbm_build_cfg[config]
         argument_list = [
+            "CONFIG_NAME={}",
             "TARGET_PLATFORM={}",
             "COMPILER={}",
             "PROJ_CONFIG={}",
             "CMAKE_BUILD_TYPE={}",
             "BL2={}",
+            "PSA_API_SUITE={}"
         ]
         print(
             "\n".join(argument_list)
             .format(
+                config,
                 config_details.target_platform,
                 config_details.compiler,
                 config_details.proj_config,
                 config_details.cmake_build_type,
                 config_details.with_mcuboot,
+                getattr(config_details, 'psa_api_suit', "''")
             )
             .strip()
         )
 
+    def print_build_commands(self, config, silence_stderr=False):
+        config_details = self._tbm_build_cfg[config]
+        codebase_dir = os.path.join(os.getcwd(),"trusted-firmware-m")
+        build_dir=os.path.join(os.getcwd(),'trusted-firmware-m/build')
+        build_config = self.get_build_config(config_details, config, silence=silence_stderr, build_dir=build_dir, codebase_dir=codebase_dir)
+        build_commands = build_config['build_cmds']
+        psa_commands = build_config.get('build_psa_api', None)
+        if psa_commands:
+            manifest_command_list = []
+            # Also need manifest commands
+            if 'build_ff_ipc' in build_config:
+                manifest_command_list += [
+                    "pushd ../../psa-arch-tests/api-tests",
+                    "python3 tools/scripts/manifest_update.py",
+                    "popd",
+                    "pushd ../",
+                    "python3 tools/tfm_parse_manifest_list.py -m tools/tfm_psa_ff_test_manifest_list.yaml append",
+                    "popd",
+                ]
+            else:
+                manifest_command_list += [
+                    "pushd ..",
+                    "python3 tools/tfm_parse_manifest_list.py",
+                    "popd"
+                ]
+            psa_command_list = psa_commands.split(" ; ")
+            build_commands = manifest_command_list + ["mkdir ../../psa-arch-tests/api-tests/build","pushd ../../psa-arch-tests/api-tests/build"] + psa_command_list + ["popd"] + build_commands
+        print(" ;\n".join(build_commands)) 
+
     def pre_eval(self):
         """ Tests that need to be run in set-up state """
         return True
@@ -238,96 +271,7 @@
              % (self._tbm_build_cfg, self.tbm_common_cfg))
             for name, i in self._tbm_build_cfg.items():
                 # Do not modify the original config
-                build_cfg = deepcopy(self.tbm_common_cfg)
-
-                # Extract the common for all elements of config
-                for key in ["build_cmds", "required_artefacts"]:
-                    try:
-                        build_cfg[key] = deepcopy(self.tbm_common_cfg[key]
-                                                  ["all"])
-                    except KeyError as E:
-                        build_cfg[key] = []
-
-                # Extract the platform specific elements of config
-                for key in ["build_cmds", "required_artefacts"]:
-                    try:
-                        if i.target_platform in self.tbm_common_cfg[key].keys():
-                            build_cfg[key] += deepcopy(self.tbm_common_cfg[key]
-                                                       [i.target_platform])
-                    except Exception as E:
-                        pass
-
-                # Merge the two dictionaries since the template may contain
-                # fixed and combinations seed parameters
-                if i.proj_config.startswith("ConfigPsaApiTest"):
-                    #PSA API tests only
-                    #TODO i._asdict()["tfm_build_dir"] = self._tbm_work_dir
-                    cmd0 = build_cfg["config_template_psa_api"] % \
-                        {**dict(i._asdict()), **build_cfg}
-                    cmd0 += " -DPSA_API_TEST_BUILD_PATH=" + self._tbm_work_dir + \
-                            "/" + name + "/BUILD"
-
-                    if i.psa_api_suit == "FF":
-                        cmd0 += " -DPSA_API_TEST_IPC=ON"
-                        cmd2 = "cmake " + build_cfg["codebase_root_dir"] + "/../psa-arch-tests/api-tests/ " + \
-                            "-G\"Unix Makefiles\" -DTARGET=tgt_ff_tfm_" + \
-                            i.target_platform.lower() +" -DCPU_ARCH=armv8m_ml -DTOOLCHAIN=" + \
-                            i.compiler + " -DSUITE=IPC -DPSA_INCLUDE_PATHS=\"" + \
-                            build_cfg["codebase_root_dir"] + "/interface/include/"
-
-                        cmd2 += ";" + build_cfg["codebase_root_dir"] + \
-                            "/../psa-arch-tests/api-tests/platform/manifests\"" + \
-                            " -DINCLUDE_PANIC_TESTS=1 -DPLATFORM_PSA_ISOLATION_LEVEL=" + \
-                            (("2") if i.proj_config.find("TfmLevel2") > 0 else "1") + \
-                            " -DSP_HEAP_MEM_SUPP=0"
-                        if i.target_platform == "MUSCA_B1":
-                            cmd0 += " -DSST_RAM_FS=ON"
-                        build_cfg["build_ff_ipc"] = "IPC"
-                    else:
-                        cmd0 += " -DPSA_API_TEST_" + i.psa_api_suit + "=ON"
-                        cmd2 = "cmake " + build_cfg["codebase_root_dir"] + "/../psa-arch-tests/api-tests/ " + \
-                            "-G\"Unix Makefiles\" -DTARGET=tgt_dev_apis_tfm_" + \
-                            i.target_platform.lower() +" -DCPU_ARCH=armv8m_ml -DTOOLCHAIN=" + \
-                            i.compiler + " -DSUITE=" + i.psa_api_suit +" -DPSA_INCLUDE_PATHS=\"" + \
-                            build_cfg["codebase_root_dir"] + "/interface/include/\""
-
-                    cmd2 += " -DCMAKE_BUILD_TYPE=" + i.cmake_build_type
-
-                    cmd3 = "cmake --build ."
-                    build_cfg["build_psa_api"] = cmd2 + " ; " + cmd3
-
-                else:
-                    cmd0 = build_cfg["config_template"] % \
-                        {**dict(i._asdict()), **build_cfg}
-
-                try:
-                    if i.__str__().find("with_OTP") > 0:
-                        cmd0 += " -DCRYPTO_HW_ACCELERATOR_OTP_STATE=ENABLED"
-                    else:
-                        build_cfg["build_cmds"][0] += " -j 2"
-                    if cmd0.find("SST_RAM_FS=ON") < 0 and i.target_platform == "MUSCA_B1":
-                        cmd0 += " -DSST_RAM_FS=OFF -DITS_RAM_FS=OFF"
-                except Exception as E:
-                    pass
-
-                # Prepend configuration commoand as the first cmd [cmd1] + [cmd2] + [cmd3] +
-                build_cfg["build_cmds"] = [cmd0] + build_cfg["build_cmds"]
-                print("cmd0 %s\r\n" % (build_cfg["build_cmds"]))
-                if "build_psa_api" in build_cfg:
-                    print("cmd build_psa_api %s\r\n" % build_cfg["build_psa_api"])
-
-                # Set the overrid params
-                over_dict = {"_tbm_build_dir_": os.path.join(
-                    self._tbm_work_dir, name),
-                    "_tbm_code_dir_": build_cfg["codebase_root_dir"],
-                    "_tbm_target_platform_": i.target_platform}
-
-                over_params = ["build_cmds",
-                               "required_artefacts",
-                               "artifact_capture_rex"]
-                build_cfg = self.override_tbm_cfg_params(build_cfg,
-                                                         over_params,
-                                                         **over_dict)
+                build_cfg = self.get_build_config(i, name)
                 self.pre_build(build_cfg)
                 # Overrides path in expected artefacts
                 print("Loading config %s" % name)
@@ -391,6 +335,103 @@
             print("Exported build report to file:", self._tbm_report)
             save_json(self._tbm_report, full_rep)
 
+    def get_build_config(self, i, name, silence=False, build_dir=None, codebase_dir=None):
+        psa_build_dir = self._tbm_work_dir + "/" + name + "/BUILD"
+        if not build_dir:
+            build_dir = os.path.join(self._tbm_work_dir, name)
+        else:
+            psa_build_dir = os.path.join(build_dir, "../../psa-arch-tests/api-tests/build")
+        build_cfg = deepcopy(self.tbm_common_cfg)
+        if not codebase_dir:
+            codebase_dir = build_cfg["codebase_root_dir"]
+        else:
+            # Would prefer to do all with the new variable
+            # However, many things use this from build_cfg elsewhere
+            build_cfg["codebase_root_dir"] = codebase_dir
+        # Extract the common for all elements of config
+        for key in ["build_cmds", "required_artefacts"]:
+            try:
+                build_cfg[key] = deepcopy(self.tbm_common_cfg[key]
+                                          ["all"])
+            except KeyError as E:
+                build_cfg[key] = []
+        # Extract the platform specific elements of config
+        for key in ["build_cmds", "required_artefacts"]:
+            try:
+                if i.target_platform in self.tbm_common_cfg[key].keys():
+                    build_cfg[key] += deepcopy(self.tbm_common_cfg[key]
+                                               [i.target_platform])
+            except Exception as E:
+                pass
+        # Merge the two dictionaries since the template may contain
+        # fixed and combinations seed parameters
+        if i.proj_config.startswith("ConfigPsaApiTest"):
+            # PSA API tests only
+            # TODO i._asdict()["tfm_build_dir"] = self._tbm_work_dir
+            cmd0 = build_cfg["config_template_psa_api"] % \
+                   dict(dict(i._asdict()), **build_cfg)
+            cmd0 += " -DPSA_API_TEST_BUILD_PATH=" + psa_build_dir
+
+            if i.psa_api_suit == "FF":
+                cmd0 += " -DPSA_API_TEST_IPC=ON"
+                cmd2 = "cmake " + codebase_dir + "/../psa-arch-tests/api-tests/ " + \
+                       "-G\"Unix Makefiles\" -DTARGET=tgt_ff_tfm_" + \
+                       i.target_platform.lower() + " -DCPU_ARCH=armv8m_ml -DTOOLCHAIN=" + \
+                       i.compiler + " -DSUITE=IPC -DPSA_INCLUDE_PATHS=\"" + \
+                       codebase_dir + "/interface/include/"
+
+                cmd2 += ";" + codebase_dir + \
+                        "/../psa-arch-tests/api-tests/platform/manifests\"" + \
+                        " -DINCLUDE_PANIC_TESTS=1 -DPLATFORM_PSA_ISOLATION_LEVEL=" + \
+                        (("2") if i.proj_config.find("TfmLevel2") > 0 else "1") + \
+                        " -DSP_HEAP_MEM_SUPP=0"
+                if i.target_platform == "MUSCA_B1":
+                    cmd0 += " -DSST_RAM_FS=ON"
+                build_cfg["build_ff_ipc"] = "IPC"
+            else:
+                cmd0 += " -DPSA_API_TEST_" + i.psa_api_suit + "=ON"
+                cmd2 = "cmake " + codebase_dir + "/../psa-arch-tests/api-tests/ " + \
+                       "-G\"Unix Makefiles\" -DTARGET=tgt_dev_apis_tfm_" + \
+                       i.target_platform.lower() + " -DCPU_ARCH=armv8m_ml -DTOOLCHAIN=" + \
+                       i.compiler + " -DSUITE=" + i.psa_api_suit + " -DPSA_INCLUDE_PATHS=\"" + \
+                       codebase_dir + "/interface/include/\""
+
+            cmd2 += " -DCMAKE_BUILD_TYPE=" + i.cmake_build_type
+
+            cmd3 = "cmake --build ."
+            build_cfg["build_psa_api"] = cmd2 + " ; " + cmd3
+
+        else:
+            cmd0 = build_cfg["config_template"] % \
+                   dict(dict(i._asdict()), **build_cfg)
+        try:
+            if i.__str__().find("with_OTP") > 0:
+                cmd0 += " -DCRYPTO_HW_ACCELERATOR_OTP_STATE=ENABLED"
+            else:
+                build_cfg["build_cmds"][0] += " -j 2"
+            if cmd0.find("SST_RAM_FS=ON") < 0 and i.target_platform == "MUSCA_B1":
+                cmd0 += " -DSST_RAM_FS=OFF -DITS_RAM_FS=OFF"
+        except Exception as E:
+            pass
+        # Prepend configuration commoand as the first cmd [cmd1] + [cmd2] + [cmd3] +
+        build_cfg["build_cmds"] = [cmd0] + build_cfg["build_cmds"]
+        if not silence:
+            print("cmd0 %s\r\n" % (build_cfg["build_cmds"]))
+        if "build_psa_api" in build_cfg:
+            if not silence:
+                print("cmd build_psa_api %s\r\n" % build_cfg["build_psa_api"])
+        # Set the overrid params
+        over_dict = {"_tbm_build_dir_": build_dir,
+            "_tbm_code_dir_": codebase_dir,
+            "_tbm_target_platform_": i.target_platform}
+        over_params = ["build_cmds",
+                       "required_artefacts",
+                       "artifact_capture_rex"]
+        build_cfg = self.override_tbm_cfg_params(build_cfg,
+                                                 over_params,
+                                                 **over_dict)
+        return build_cfg
+
     def post_eval(self):
         """ If a single build failed fail the test """
         try: