Performance: Pack performance data to SQUAD

Pack memory footprint and profiling data together to performance
metrics JSON file. Send it to current QA-Report SQUAD dashboard.

Now performance script generates the data of storage during the
build process, and then saves as JSON file in the SHARE_FOLDER.
After LAVA testing artifacts, the script collects the profiling
data from the target log and sends all performance data to SQUAD.

Signed-off-by: Jianliang Shen <jianliang.shen@arm.com>
Change-Id: I29ffef8fa53def896df67d1e2b1818caf435506b
diff --git a/jenkins/build-config.jpl b/jenkins/build-config.jpl
index d714897..802f0e8 100644
--- a/jenkins/build-config.jpl
+++ b/jenkins/build-config.jpl
@@ -40,18 +40,11 @@
         if (upstreamProject == "tf-m-build-and-test") {
           archiveArtifacts 'trusted-firmware-m/build/generated/**'
         }
-        if (env.SQUAD_CONFIGURATIONS == "enabled"){
+        if (upstreamProject == "tf-m-nightly-performance"){
           //Creating a folder to store memory footprint artifacts and launching the memory footprint script.
-          sh "mkdir tf-m-ci-scripts/Memory_footprint/"
-          withCredentials([string(credentialsId: 'QA_REPORTS_TOKEN', variable: 'TOKEN')]) {
-            output = sh(script: """python3 tf-m-ci-scripts/memory_footprint.py ${TOKEN}""", returnStdout: true).trim()
-          }
-          if (fileExists('tf-m-ci-scripts/Memory_footprint/filesize.json')) {
-            println("--- output from memory_footprint.py ---")
-            println(output)
-            println("--- end of output from memory_footprint.py ---")
-            archiveArtifacts 'tf-m-ci-scripts/Memory_footprint/filesize.json'
-          }
+          sh "mkdir -p ${SHARE_FOLDER}/Memory_footprint/"
+          output = sh(script: """python3 tf-m-ci-scripts/performance.py --generate-memory""", returnStdout: true).trim()
+          println(output)
         }
       }
     } catch (Exception e) {
diff --git a/jenkins/ci.jpl b/jenkins/ci.jpl
index 36b90ee..743c056 100644
--- a/jenkins/ci.jpl
+++ b/jenkins/ci.jpl
@@ -117,8 +117,6 @@
   params += string(name: 'QCBOR_VERSION', value: env.QCBOR_VERSION)
   params += string(name: 'QCBOR_URL', value: env.QCBOR_URL)
   params += string(name: 'SHARE_FOLDER', value: env.SHARE_FOLDER)
-  params += string(name: 'SQUAD_CONFIGURATIONS', value: env.SQUAD_CONFIGURATIONS)
-  params += string(name: 'SQUAD_PROFILING', value: env.SQUAD_PROFILING)
   return { -> results
     def build_res = build(job: 'tf-m-build-config', parameters: params, propagate: false)
     def build_url = build_res.getAbsoluteUrl()
@@ -407,6 +405,9 @@
                 archiveArtifacts artifacts: 'merged_report/**', allowEmptyArchive: true
               }
             }
+            withCredentials([string(credentialsId: 'QA_REPORTS_TOKEN', variable: 'TOKEN')]) {
+              sh(script: """./tf-m-ci-scripts/performance.py --send-squad --squad-token ${TOKEN} > SQUAD.log""")
+            }
           }
         }
         else {
@@ -422,10 +423,14 @@
       } finally {
         if (all_jobs.size() > 0) {
           output = readFile("output.log")
+          performance_output = readFile("SQUAD.log")
           println("--- output from lava_wait_jobs.py ---")
           println(output)
           println("--- end of output from lava_wait_jobs.py ---")
-          test_results = parseTestResults(output)
+          println("--- output from performance.py ---")
+          println(performance_output)
+          println("--- end of output from performance.py ---")
+          test_results = parseTestResults(output + performance_output)
           archiveArtifacts artifacts: 'test_summary.*', allowEmptyArchive: true
           archiveArtifacts artifacts: 'cfgs/**', allowEmptyArchive: true
           if (all_jobs.size() > 0) {