Performance: Pack performance data to SQUAD

Pack memory footprint and profiling data together to performance
metrics JSON file. Send it to current QA-Report SQUAD dashboard.

Now performance script generates the data of storage during the
build process, and then saves as JSON file in the SHARE_FOLDER.
After LAVA testing artifacts, the script collects the profiling
data from the target log and sends all performance data to SQUAD.

Signed-off-by: Jianliang Shen <jianliang.shen@arm.com>
Change-Id: I29ffef8fa53def896df67d1e2b1818caf435506b
diff --git a/build_helper/build_helper_configs.py b/build_helper/build_helper_configs.py
index 0ce6c5e..46755ae 100755
--- a/build_helper/build_helper_configs.py
+++ b/build_helper/build_helper_configs.py
@@ -980,6 +980,32 @@
                 "invalid": _common_tfm_invalid_configs + []
                 }
 
+config_mem_footprint = {"seed_params": {
+               "tfm_platform":      ["arm/mps2/an521"],
+                "compiler":         ["ARMCLANG_6_13"],
+                "isolation_level":  ["1"],
+                "test_regression":  ["OFF"],
+                "test_psa_api":     ["OFF"],
+                "cmake_build_type": ["Minsizerel"],
+                "with_bl2":         [True],
+                "profile":          [""],
+                "extra_params":     [""]
+                },
+                "common_params": _common_tfm_builder_cfg,
+                "valid": [
+                    # AN521_ARMCLANG_1_Minsizerel_BL2_SMALL_PSOFF
+                    ("arm/mps2/an521", "ARMCLANG_6_13", "1",
+                     "OFF", "OFF", "Minsizerel", True, "profile_small", "PSOFF"),
+                    # AN521_ARMCLANG_2_Minsizerel_BL2_MEDIUM_PSOFF
+                    ("arm/mps2/an521", "ARMCLANG_6_13", "2",
+                     "OFF", "OFF", "Minsizerel", True, "profile_medium", "PSOFF"),
+                    # AN521_ARMCLANG_3_Minsizerel_BL2_LARGE_PSOFF
+                    ("arm/mps2/an521", "ARMCLANG_6_13", "3",
+                     "OFF", "OFF", "Minsizerel", True, "profile_large", "PSOFF"),
+                ],
+                "invalid": _common_tfm_invalid_configs + []
+                }
+
 config_prof = {"seed_params": {
                "tfm_platform":      ["arm/mps2/an521"],
                 "compiler":         ["GCC_10_3"],
@@ -1136,7 +1162,8 @@
                     "example_dma350_s": config_example_dma350_s,
                     "example_dma350_ns": config_example_dma350_ns,
 
-                    # profiling
+                    # config groups for tf-m performance monitor
+                    "mem_footprint": config_mem_footprint,
                     "profiling": config_prof,
 
                     # config groups for debug
diff --git a/jenkins/build-config.jpl b/jenkins/build-config.jpl
index d714897..802f0e8 100644
--- a/jenkins/build-config.jpl
+++ b/jenkins/build-config.jpl
@@ -40,18 +40,11 @@
         if (upstreamProject == "tf-m-build-and-test") {
           archiveArtifacts 'trusted-firmware-m/build/generated/**'
         }
-        if (env.SQUAD_CONFIGURATIONS == "enabled"){
+        if (upstreamProject == "tf-m-nightly-performance"){
           //Creating a folder to store memory footprint artifacts and launching the memory footprint script.
-          sh "mkdir tf-m-ci-scripts/Memory_footprint/"
-          withCredentials([string(credentialsId: 'QA_REPORTS_TOKEN', variable: 'TOKEN')]) {
-            output = sh(script: """python3 tf-m-ci-scripts/memory_footprint.py ${TOKEN}""", returnStdout: true).trim()
-          }
-          if (fileExists('tf-m-ci-scripts/Memory_footprint/filesize.json')) {
-            println("--- output from memory_footprint.py ---")
-            println(output)
-            println("--- end of output from memory_footprint.py ---")
-            archiveArtifacts 'tf-m-ci-scripts/Memory_footprint/filesize.json'
-          }
+          sh "mkdir -p ${SHARE_FOLDER}/Memory_footprint/"
+          output = sh(script: """python3 tf-m-ci-scripts/performance.py --generate-memory""", returnStdout: true).trim()
+          println(output)
         }
       }
     } catch (Exception e) {
diff --git a/jenkins/ci.jpl b/jenkins/ci.jpl
index 36b90ee..743c056 100644
--- a/jenkins/ci.jpl
+++ b/jenkins/ci.jpl
@@ -117,8 +117,6 @@
   params += string(name: 'QCBOR_VERSION', value: env.QCBOR_VERSION)
   params += string(name: 'QCBOR_URL', value: env.QCBOR_URL)
   params += string(name: 'SHARE_FOLDER', value: env.SHARE_FOLDER)
-  params += string(name: 'SQUAD_CONFIGURATIONS', value: env.SQUAD_CONFIGURATIONS)
-  params += string(name: 'SQUAD_PROFILING', value: env.SQUAD_PROFILING)
   return { -> results
     def build_res = build(job: 'tf-m-build-config', parameters: params, propagate: false)
     def build_url = build_res.getAbsoluteUrl()
@@ -407,6 +405,9 @@
                 archiveArtifacts artifacts: 'merged_report/**', allowEmptyArchive: true
               }
             }
+            withCredentials([string(credentialsId: 'QA_REPORTS_TOKEN', variable: 'TOKEN')]) {
+              sh(script: """./tf-m-ci-scripts/performance.py --send-squad --squad-token ${TOKEN} > SQUAD.log""")
+            }
           }
         }
         else {
@@ -422,10 +423,14 @@
       } finally {
         if (all_jobs.size() > 0) {
           output = readFile("output.log")
+          performance_output = readFile("SQUAD.log")
           println("--- output from lava_wait_jobs.py ---")
           println(output)
           println("--- end of output from lava_wait_jobs.py ---")
-          test_results = parseTestResults(output)
+          println("--- output from performance.py ---")
+          println(performance_output)
+          println("--- end of output from performance.py ---")
+          test_results = parseTestResults(output + performance_output)
           archiveArtifacts artifacts: 'test_summary.*', allowEmptyArchive: true
           archiveArtifacts artifacts: 'cfgs/**', allowEmptyArchive: true
           if (all_jobs.size() > 0) {
diff --git a/lava_helper/lava_wait_jobs.py b/lava_helper/lava_wait_jobs.py
index 34089e0..37ea4d2 100755
--- a/lava_helper/lava_wait_jobs.py
+++ b/lava_helper/lava_wait_jobs.py
@@ -22,7 +22,6 @@
 import shutil
 import logging
 import json
-import re
 from xmlrpc.client import ProtocolError
 from jinja2 import Environment, FileSystemLoader
 from lava_helper import test_lava_dispatch_credentials
@@ -141,12 +140,12 @@
         job_links += "Build link: {}\n".format(info['metadata']['build_job_url'])
         job_links += "LAVA link: {}\n".format(lava_id_to_url(job, user_args))
         job_links += "TFM LOG: {}artifact/{}/target_log.txt\n".format(os.getenv("BUILD_URL"), info['job_dir'])
-        if os.getenv('SQUAD_PROFILING') == 'enabled':
-            prof_json = os.path.join(info['job_dir'], 'prof.json')
-            target_log = os.path.join(info['job_dir'], 'target_log.txt')
-            prof_report = get_prof_psa_client_api_data(target_log, prof_json)
-            job_links += "Profiling Data: {}artifact/{}/prof.json\n".format(os.getenv("BUILD_URL"), info['job_dir'])
-            job_links += "Profiling Report:\n{}".format(prof_report)
+
+        # Save job information to share folder.
+        if os.getenv('JOB_NAME') == 'tf-m-nightly-performance':
+            with open(os.path.join(os.getenv('SHARE_FOLDER'), 'performance_config.txt'), 'a') as f:
+                f.write(info['metadata']['build_name'] + ' ' + info['job_dir'] + '\n')
+
     print(job_links)
 
 def remove_lava_dupes(results):
@@ -210,35 +209,6 @@
     if not silent:
         print("INFO: {}".format(line))
 
-def get_prof_psa_client_api_data(f_log_path, f_json_path):
-    """
-    Get PSA Client API profiling data report from target log, print and write it
-    to prof.json under job directory.
-    """
-
-    prof_report = '---------- TF-M PSA Client API Profiling Data ----------\n'
-    prof_data = {}
-
-    with open(f_log_path,'r') as f_log:
-        tfm_log = f_log.read()
-
-        # Extract TF-M PSA Client API profiling data
-        pattern = r'(secure|non-secure) ([^\n]+) average is (\d+) CPU cycles'
-        matches = re.findall(pattern, tfm_log)
-        for match in matches:
-            type, api, cpu_cycles = match
-            prof_report += '{:<15}{:<25}{:5} CPU cycles\n'.format(type, api, cpu_cycles)
-            prof_data[('s_' if type == 'secure' else 'ns_') + api.replace(' ', '_')] = cpu_cycles
-
-        try:
-            # Write result to JSON file
-            metrics = json.dumps(prof_data)
-            with open(f_json_path, 'w') as f_json:
-                f_json.write(metrics)
-        except:
-            return -1
-    return prof_report
-
 # WARNING: Setting this to >1 is a last resort, temporary stop-gap measure,
 # which will overload LAVA and jeopardize stability of the entire TF CI.
 INEFFICIENT_RETRIES = 1
diff --git a/memory_footprint.py b/memory_footprint.py
deleted file mode 100644
index f0b5be0..0000000
--- a/memory_footprint.py
+++ /dev/null
@@ -1,120 +0,0 @@
-#!/usr/bin/env python
-
-#memory_footprint.py : Script for sending memory footprint data from the TFM CI
-#to a SQUAD web interface
-#
-#Copyright (c) 2020-2022, Arm Limited. All rights reserved.
-#
-#SPDX-License-Identifier: BSD-3-Clause
-
-
-import os
-import re
-import sys
-import json
-import requests
-import subprocess
-from tfm_ci_pylib import utils
-
-# SQAUD constant
-SQUAD_TOKEN            = sys.argv[1]
-SQUAD_BASE_PROJECT_URL = ("https://qa-reports.linaro.org/api/submit/tf/tf-m/")
-
-reference_configs = [
-    'AN521_ARMCLANG_1_Minsizerel_BL2',
-    'AN521_ARMCLANG_1_Minsizerel_BL2_SMALL_PSOFF',
-    'AN521_ARMCLANG_2_Minsizerel_BL2_MEDIUM_PSOFF',
-    'AN521_ARMCLANG_3_Minsizerel_BL2_LARGE_PSOFF'
-]
-
-# This function uses arm_non_eabi_size to get the sizes of a file
-#  in the build directory of tfm
-def get_file_size(filename):
-    f_path = os.path.join(os.getenv('WORKSPACE'), "trusted-firmware-m", "build", "bin", filename)
-    if os.path.exists(f_path) :
-        data_fromelf = utils.fromelf(f_path)
-        print(data_fromelf[1])  # Output of fromelf
-        return data_fromelf[0]  # Data parsed from output of fromelf
-    else :
-        print(f_path + "Not found")
-        return -1
-
-# This function creates a json file containing all the data about
-#  memory footprint and sends this data to SQUAD
-def send_file_size(git_commit, config_name, bl2_sizes, tfms_sizes):
-    url = SQUAD_BASE_PROJECT_URL + git_commit + '/' + config_name
-
-    try:
-        metrics = json.dumps({ "bl2_code_size"    : bl2_sizes["Code"],
-                               "bl2_inline_data"  : bl2_sizes["Inline Data"],
-                               "bl2_ro_data"      : bl2_sizes["RO Data"],
-                               "bl2_rw_data"      : bl2_sizes["RW Data"],
-                               "bl2_zi_data"      : bl2_sizes["ZI Data"],
-                               "spe_code_size"    : tfms_sizes["Code"],
-                               "spe_inline_data"  : tfms_sizes["Inline Data"],
-                               "spe_ro_data"      : tfms_sizes["RO Data"],
-                               "spe_rw_data"      : tfms_sizes["RW Data"],
-                               "spe_zi_data"      : tfms_sizes["ZI Data"]})
-    except:
-        return -1
-
-    headers = {"Auth-Token": SQUAD_TOKEN}
-    data= {"metrics": metrics}
-
-    try:
-        #Sending the data to SQUAD, 40s timeout
-        result = requests.post(url, headers=headers, data=data, timeout=40)
-    except:
-        return -1
-
-    with open(os.path.join(os.getenv('WORKSPACE'),
-                           "tf-m-ci-scripts",
-                           "Memory_footprint",
-                           "filesize.json"), "w") as F:
-        #Storing the json file
-        F.write(metrics)
-
-    if not result.ok:
-        print(f"Error submitting to qa-reports: {result.reason}: {result.text}")
-        return -1
-    else :
-        print ("POST request sent to project " + config_name )
-        return 0
-
-def get_git_commit_hash(repo='trusted-firmware-m'):
-    cur_dir = os.path.abspath(os.getcwd())
-
-    os.chdir(os.path.join(os.getenv('WORKSPACE'), repo)) # Going to the repo's directory
-    git_commit = os.popen('git rev-parse --short HEAD').read()[:-1]
-    os.chdir(cur_dir) # Going back to the initial directory
-
-    return git_commit
-
-def print_image_sizes(image_sizes):
-    for sec, size in image_sizes.items():
-        print("{:4}: {}".format(sec, size))
-
-if __name__ == "__main__":
-    # Export ARMClang v6.13 to ENV PATH
-    os.environ["PATH"] += os.pathsep + os.getenv('ARMCLANG_6_13_PATH')
-    if os.getenv('CONFIG_NAME') in reference_configs:
-        print("Configuration " + os.getenv('CONFIG_NAME') + " is a reference")
-        try :
-            git_commit = get_git_commit_hash("trusted-firmware-m")
-        except :
-            git_commit = -1
-
-        print("------ BL2 Memory Footprint ------")
-        bl2_sizes = get_file_size("bl2.axf")
-        print("\n\n------ TFM Secure Memory Footprint ------")
-        tfms_sizes = get_file_size("tfm_s.axf")
-
-        if (bl2_sizes != -1 and git_commit != -1) :
-            squad_config_name = os.getenv('CONFIG_NAME')
-            send_file_size(git_commit, squad_config_name, bl2_sizes, tfms_sizes)
-        else :
-            #Directory or file weren't found
-            if git_commit == -1 :
-                print("Error : trusted-firmware-m repo not found")
-            else :
-                print("Error : file not found")
diff --git a/performance.py b/performance.py
new file mode 100755
index 0000000..d201cd6
--- /dev/null
+++ b/performance.py
@@ -0,0 +1,186 @@
+#!/usr/bin/env python3
+
+__copyright__ = '''
+/*
+ * Copyright (c) 2020-2023, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ '''
+
+import argparse
+import os
+import json
+import logging
+import re
+import requests
+from tfm_ci_pylib import utils
+
+mem_configs = {
+    'AN521_ARMCLANG_1_Minsizerel_BL2':              'AN521-ARMCC-Default-Minsizerel-BL2',
+    'AN521_ARMCLANG_1_Minsizerel_BL2_SMALL_PSOFF':  'AN521-ARMCC-Small-Minsizerel-BL2',
+    'AN521_ARMCLANG_2_Minsizerel_BL2_MEDIUM_PSOFF': 'AN521-ARMCC-Medium-Minsizerel-BL2',
+    'AN521_ARMCLANG_3_Minsizerel_BL2_LARGE_PSOFF':  'AN521-ARMCC-Large-Minsizerel-BL2'
+}
+
+profiling_configs = {
+    'AN521_GCC_1_Release_BL2_PROF':     'AN521-GCC-Level1-SFN-Release-BL2',
+    'AN521_GCC_1_Release_BL2_IPC_PROF': 'AN521-GCC-Level1-IPC-Release-BL2',
+    'AN521_GCC_2_Release_BL2_PROF':     'AN521-GCC-Level2-IPC-Release-BL2',
+    'AN521_GCC_3_Release_BL2_PROF':     'AN521-GCC-Level3-IPC-Release-BL2'
+}
+
+def get_git_commit_hash(repo='trusted-firmware-m'):
+    cur_dir = os.path.abspath(os.getcwd())
+
+    os.chdir(os.path.join(os.getenv('WORKSPACE'), repo)) # Going to the repo's directory
+    git_commit = os.popen('git rev-parse --short HEAD').read()[:-1]
+    os.chdir(cur_dir) # Going back to the initial directory
+
+    return git_commit
+
+def get_file_size(filename):
+    '''
+    This function uses fromelf of ARMCLANG to get the sizes of a file in the build binary directory of TF-M
+    '''
+    f_path = os.path.join(os.getenv('WORKSPACE'), 'trusted-firmware-m', 'build', 'bin', filename)
+    if os.path.exists(f_path) :
+        data_fromelf = utils.fromelf(f_path)
+        print(data_fromelf[1])  # Output of fromelf
+        return data_fromelf[0]  # Data parsed from output of fromelf
+    else :
+        print(f_path + 'Not found')
+        return -1
+
+def save_mem_to_json(config_name, bl2_sizes, tfm_s_sizes):
+    '''
+    This function creates a json file containing all the data about memory footprint in share folder.
+    '''
+    try:
+        metrics = json.dumps({ 'bl2_code_size'    : bl2_sizes['Code'],
+                               'bl2_inline_data'  : bl2_sizes['Inline Data'],
+                               'bl2_ro_data'      : bl2_sizes['RO Data'],
+                               'bl2_rw_data'      : bl2_sizes['RW Data'],
+                               'bl2_zi_data'      : bl2_sizes['ZI Data'],
+                               'spe_code_size'    : tfm_s_sizes['Code'],
+                               'spe_inline_data'  : tfm_s_sizes['Inline Data'],
+                               'spe_ro_data'      : tfm_s_sizes['RO Data'],
+                               'spe_rw_data'      : tfm_s_sizes['RW Data'],
+                               'spe_zi_data'      : tfm_s_sizes['ZI Data']})
+    except:
+        return -1
+
+    with open(os.path.join(os.getenv('SHARE_FOLDER'),
+                           'Memory_footprint',
+                           '{}_filesize.json'.format(config_name)), 'w') as F:
+        #Storing the json file
+        F.write(metrics)
+    return 0
+
+def get_prof_psa_client_api_data(f_log_path):
+    '''
+    Get PSA Client API profiling data report from target log.
+    '''
+
+    prof_data = {}
+    with open(f_log_path,'r') as f_log:
+        tfm_log = f_log.read()
+
+        # Extract TF-M PSA Client API profiling data
+        pattern = r'(secure|non-secure) ([^\n]+) average is (\d+) CPU cycles'
+        matches = re.findall(pattern, tfm_log)
+        for match in matches:
+            type, api, cpu_cycles = match
+            prof_data[('s_' if type == 'secure' else 'ns_') + api.replace(' ', '_')] = cpu_cycles
+
+    return prof_data
+
+
+def send_squad(user_args, job_dir, config_name):
+    '''
+    Send performance data to SQUAD dashboard.
+    '''
+    prof_data, mem_data = {}, {}
+
+    # Generate Profiling data from target log
+    if config_name in profiling_configs.keys():
+        target_log = os.path.join(job_dir, 'target_log.txt')
+        prof_data = get_prof_psa_client_api_data(target_log)
+        config_name = profiling_configs[config_name]
+
+    # Load Memory Footprint data from share folder json files.
+    if config_name in mem_configs.keys():
+        mem_json_path = os.path.join(os.getenv('SHARE_FOLDER'), 'Memory_footprint', '{}_filesize.json'.format(config_name))
+        with open(mem_json_path, 'r') as f:
+            mem_data = json.load(f)
+        config_name = mem_configs[config_name]
+
+    # Write result to JSON file
+    metrics = json.dumps({**prof_data, **mem_data})
+    with open(os.path.join(job_dir, 'performance.json'), 'w') as f_json:
+        f_json.write(metrics)
+
+    # SQAUD constant
+    SQUAD_TOKEN = user_args.squad_token
+    SQUAD_BASE_PROJECT_URL = ('https://qa-reports.linaro.org/api/submit/tf/tf-m/')
+    url = SQUAD_BASE_PROJECT_URL + get_git_commit_hash('trusted-firmware-m') + '/' + config_name
+    headers = {'Auth-Token': SQUAD_TOKEN}
+    data= {'metrics': metrics}
+
+    # Sending the data to SQUAD, 40s timeout
+    try:
+        result = requests.post(url, headers=headers, data=data, timeout=40)
+    except:
+        return -1
+
+    if not result.ok:
+        print(f'Error submitting to qa-reports: {result.reason}: {result.text}')
+        return -1
+    else :
+        print ('POST request sent to project ' + config_name)
+        return 0
+
+def main(user_args):
+    if user_args.generate_memory:
+        # Export ARMClang v6.13 to ENV PATH
+        os.environ['PATH'] += os.pathsep + os.getenv('ARMCLANG_6_13_PATH')
+        if os.getenv('CONFIG_NAME') in mem_configs.keys():
+            print('Configuration ' + os.getenv('CONFIG_NAME') + ' is a reference')
+
+            print('---------- BL2 Memory Footprint ----------')
+            bl2_sizes = get_file_size('bl2.axf')
+
+            print('------ TF-M Secure Memory Footprint ------')
+            tfm_s_sizes = get_file_size('tfm_s.axf')
+
+            if save_mem_to_json(os.getenv('CONFIG_NAME'), bl2_sizes, tfm_s_sizes) == -1:
+                print('Memory footprint generate failed.')
+
+    if user_args.send_squad:
+        with open(os.path.join(os.getenv('SHARE_FOLDER'), 'performance_config.txt'), 'r') as f:
+            for line in f:
+                config_name, job_dir = line.split()[0], line.split()[1]
+                send_squad(user_args, job_dir, config_name)
+
+def get_cmd_args():
+    parser = argparse.ArgumentParser(description='Performance')
+    cmdargs = parser.add_argument_group('Performance')
+
+    # Configuration control
+    cmdargs.add_argument(
+        '--generate-memory', dest='generate_memory', action='store_true', default=False, help='Generate memory footprint data'
+    )
+    cmdargs.add_argument(
+        '--send-squad', dest='send_squad', action='store_true', default=False, help='Send data to SQUAD'
+    )
+    cmdargs.add_argument(
+        '--squad-token', dest='squad_token', action='store', help='SQUAD BOARD TOKEN'
+    )
+
+    return parser.parse_args()
+
+
+if __name__ == '__main__':
+    logging.basicConfig(level=logging.INFO)
+    main(get_cmd_args())