Initial commit.
- qa-tools public release which includes:
- trace-based coverage tool
- quality metrics measurement and tracking setup
- associated in-source documentation.
Signed-off-by: Basil Eljuse <basil.eljuse@arm.com>
diff --git a/quality-metrics/broker-component/Dockerfile b/quality-metrics/broker-component/Dockerfile
new file mode 100644
index 0000000..2f960ec
--- /dev/null
+++ b/quality-metrics/broker-component/Dockerfile
@@ -0,0 +1,28 @@
+#======================================================================
+# Copyright (c) 2020, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#======================================================================
+
+#======================================================================
+# FILE: Dockerfile
+#
+# DESCRIPTION: This is used for containerised deployment of the quality
+# metrics broker component.
+#======================================================================
+
+FROM alpine:3.11
+
+RUN apk update && \
+ apk upgrade && \
+ apk add --no-cache python3-dev && \
+ pip3 install --upgrade pip
+
+WORKDIR /metrics_server
+COPY . /metrics_server
+
+RUN pip3 --no-cache-dir install -r requirements.txt
+
+EXPOSE 5000
+
+CMD python3 quality_metrics_server.py
diff --git a/quality-metrics/broker-component/constants.py b/quality-metrics/broker-component/constants.py
new file mode 100644
index 0000000..32ce30f
--- /dev/null
+++ b/quality-metrics/broker-component/constants.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python3
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" constants.py:
+
+ This file contains the constants required by metrics server.
+
+"""
+
+JWT_EXPIRATION_DAYS = 365
+
+HOST = "<Host Public IP Address>"
+PORT = "8086"
+BUFF_SIZE = 10
+POLL_DELAY = 0.1
+
+LISTEN_ALL_IPS = "0.0.0.0"
+
+VALID_METRICS = [
+ 'tfm_image_size',
+ 'tfa_code_churn',
+ 'tfa_defects_stats',
+ 'tfa_defects_tracking',
+ 'tfa_complexity_stats',
+ 'tfa_complexity_tracking',
+ 'tfa_rtinstr',
+ 'tfa_image_size',
+ 'tfa_misra_defects']
+
+DATABASE_DICT = {
+ "TFM_IMAGE_SIZE": "TFM_ImageSize",
+ "TFA_CODE_CHURN": "TFA_CodeChurn",
+ "TFA_DEFECTS": "TFA_Defects",
+ "TFA_COMPLEXITY": "TFA_Complexity",
+ "TFA_RTINSTR": "TFA_RTINSTR",
+ "TFA_IMAGE_SIZE": "TFA_ImageSize",
+ "TFA_MISRA_DEFECTS": "TFA_MisraDefects"
+}
+
+SUPPORTED_API_VERSIONS = ["1.0"]
diff --git a/quality-metrics/broker-component/credentials.py b/quality-metrics/broker-component/credentials.py
new file mode 100644
index 0000000..7e9793a
--- /dev/null
+++ b/quality-metrics/broker-component/credentials.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python3
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" credentials.py:
+
+ Credentials class. This is for reference only.
+
+"""
+
+# SECRET_KEY is set for reference purpose only
+# It is recommended to change its value before deployment
+SECRET_KEY = 'SECRET_KEY'
+
+
+class User(object):
+ def __init__(self, id, username, password):
+ self.id = id
+ self.username = username
+ self.password = password
+
+ def __str__(self):
+ return "User(id='%s')" % self.id
+
+
+# User credentials are set for reference purpose only
+# It is recommended to change these value accordingly before deployment
+users = [
+ User(1, 'metrics_1', 'metrics_pass_1'),
+ User(2, 'metrics_2', 'metrics_pass_2'),
+ User(3, 'tfa_metrics', 'tfa_metrics_pass'),
+]
diff --git a/quality-metrics/broker-component/data_converter.py b/quality-metrics/broker-component/data_converter.py
new file mode 100644
index 0000000..5b3c74b
--- /dev/null
+++ b/quality-metrics/broker-component/data_converter.py
@@ -0,0 +1,95 @@
+#!/usr/bin/env python3
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" data_converter.py:
+
+ Data converter class. This class is aimed at converting the received
+ data in the format which InfluxDB understands.
+
+"""
+
+import json
+import constants
+
+
+class DataConverter:
+
+ @staticmethod
+ def convert_tfm_imagesize_data(data):
+ # Transform keys names
+ data['metadata']['DataProducer'] = data['metadata'].pop(
+ 'data_producer')
+
+ data['metadata']['git_info']['CommitTitle'] = data['metadata']['git_info'].pop(
+ 'commit_title')
+ data['metadata']['git_info']['CommitID'] = data['metadata']['git_info'].pop(
+ 'commit_id')
+ data['metadata']['git_info']['GerritID'] = data['metadata']['git_info'].pop(
+ 'gerrit_id')
+ data['metadata']['git_info']['CommitURL'] = data['metadata']['git_info'].pop(
+ 'commit_url')
+ data['metadata']['git_info']['Branch'] = data['metadata']['git_info'].pop(
+ 'branch')
+
+ data['metadata']['build_info']['BuildType'] = data['metadata']['build_info'].pop(
+ 'build_type')
+ data['metadata']['build_info']['CmakeConfig'] = data['metadata']['build_info'].pop(
+ 'cmake_config')
+ data['metadata']['build_info']['Compiler'] = data['metadata']['build_info'].pop(
+ 'compiler')
+ data['metadata']['build_info']['Target'] = data['metadata']['build_info'].pop(
+ 'target')
+
+ ret = {}
+ ret['tags'] = {}
+ ret['fields'] = {}
+
+ ret['measurement'] = 'TFM_ImageSize_Statistics'
+
+ for file_info in data['data']:
+ ret['fields'][file_info['file'].rsplit(
+ '.', 1)[0] + '_b'] = file_info['bss']
+ ret['fields'][file_info['file'].rsplit(
+ '.', 1)[0] + '_d'] = file_info['data']
+ ret['fields'][file_info['file'].rsplit(
+ '.', 1)[0] + '_t'] = file_info['text']
+
+ ret['tags']['DataProducer'] = str(data['metadata']['DataProducer'])
+
+ ret['time'] = str(data['metadata']['git_info']['commit_time'])
+
+ for key in data['metadata']['git_info']:
+ if key == 'commit_time':
+ continue
+ ret['tags'][key] = str(data['metadata']['git_info'][key])
+
+ for key in data['metadata']['build_info']:
+ ret['tags'][key] = str(data['metadata']['build_info'][key])
+
+ print(ret)
+
+ return [ret]
+
+ @staticmethod
+ def convert_data(data):
+ """
+ Convert data to a dictionary containing measurement
+ name, fields and tags. It is required by InfluxDB.
+
+ :param data: data to be converted to InfluxDB format
+ """
+
+ if data['metadata']['metrics'] == 'tfm_imagesize':
+ ret = DataConverter.convert_tfm_imagesize_data(data)
+ else:
+ ret = data['data']
+
+ return ret
diff --git a/quality-metrics/broker-component/data_validator.py b/quality-metrics/broker-component/data_validator.py
new file mode 100644
index 0000000..a67b86b
--- /dev/null
+++ b/quality-metrics/broker-component/data_validator.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python3
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" data_validator.py:
+
+ JSON data validator class. This class is aimed at validating the JSON
+ data sent in curl command with the JSON schema, so that before pushing
+ the data to the database, it is ensured that required data is received
+ in agreed-upon format.
+
+"""
+
+import sys
+import json
+import os.path
+import constants
+import jsonschema
+from jsonschema import validate
+
+
+class DataValidator:
+ @staticmethod
+ def validate_request_sanity(data_dict):
+ """
+ Input sanitisation/authentication in the application flow
+
+ :param: data_dict: Data to be validated
+ :return: Validation info and error code
+ """
+ if 'metrics' in data_dict['metadata'] and 'api_version' in data_dict and \
+ data_dict['metadata']['metrics'] in constants.VALID_METRICS:
+ if data_dict['api_version'] not in constants.SUPPORTED_API_VERSIONS:
+ return 'Incorrect API version', 401
+
+ filename = 'metrics-schemas/' + data_dict['metadata']['metrics'] + '_schema_' + \
+ data_dict['api_version'].replace(".", "_") + '.json'
+ if not os.path.exists(filename):
+ return filename + ' does not exist', 501
+
+ try:
+ with open(filename, 'r') as handle:
+ parsed = json.load(handle)
+ validate(data_dict, parsed)
+ sys.stdout.write('Record OK\n')
+ return 'OK', 204
+ except jsonschema.exceptions.ValidationError as ve:
+ sys.stdout.write('Record ERROR\n')
+ sys.stderr.write(str(ve) + "\n")
+ return 'Incorrect JSON Schema: ' + \
+ str(ve).split('\n', 1)[0], 400
+ else:
+ return 'Invalid schema - metrics or api version missing\n', 401
diff --git a/quality-metrics/broker-component/db_manager.py b/quality-metrics/broker-component/db_manager.py
new file mode 100644
index 0000000..8f2d77c
--- /dev/null
+++ b/quality-metrics/broker-component/db_manager.py
@@ -0,0 +1,151 @@
+#!/usr/bin/env python3
+
+from __future__ import print_function
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" db_manager.py:
+
+ Database interface abstraction class. This class is aimed at providing an
+ asynchronous interface between a blocking IO resource(database) and a
+ public interface designed for high concurrency.
+
+"""
+
+import time
+import threading
+from queue import Queue
+from pprint import pformat
+from influxdb import InfluxDBClient
+
+import constants
+from data_converter import DataConverter
+
+
+class dbManager(object):
+ def __init__(self,
+ host=constants.HOST,
+ port=constants.PORT,
+ user=None,
+ password=None,
+ buff_size=constants.BUFF_SIZE,
+ poll_delay=constants.POLL_DELAY,
+ app=None):
+ self.queue_buff_sz = buff_size
+ self.poll_delay = poll_delay
+
+ self.db_host = host
+ self.db_port = port
+ self.db_user = user
+ self.db_pass = password
+ self.write_queue = Queue(maxsize=self.queue_buff_sz)
+ self.stop_threads = False
+ self.app = app
+
+ for key in constants.DATABASE_DICT:
+ client = InfluxDBClient(host=self.db_host,
+ port=self.db_port,
+ username=self.db_user,
+ password=self.db_pass,
+ database=constants.DATABASE_DICT[key])
+ setattr(self, key.lower() + '_client', client)
+
+ def store(self, data):
+ """
+ Places data in the FIFO to be broadcast when
+ the database is not busy
+
+ :param: data: Data to be placed in FIFO
+ """
+ validation = 'OK'
+ err_code = 204
+ try:
+ self.write_queue.put(data)
+ except Exception as e:
+ validation = "** Write to Queue Failed. ** "
+ err_code = 402
+ print(validation, e)
+ self.app.logger.error(pformat({"error_code": err_code,
+ "info": validation, "exception": e}))
+ return validation, err_code
+
+ def get_db_client(self, metrics):
+ if "stats" in metrics:
+ client = metrics.replace("_stats", "") + "_client"
+ elif "tracking" in metrics:
+ client = metrics.replace("_tracking", "") + "_client"
+ else:
+ client = metrics + "_client"
+ if hasattr(self, client):
+ return getattr(self, client)
+ else:
+ self.app.logger.error("Invalid metrics %" % (metrics))
+
+ def write_db_direct(self, data):
+ """
+ Write data to database ( will block if database is busy )
+
+ :param: data: data to be written to database
+ """
+ db_client = self.get_db_client(data['metadata']['metrics'])
+
+ converted_data = DataConverter.convert_data(data)
+
+ if db_client:
+ if db_client.write_points(converted_data):
+ self.app.logger.info(
+ "Writing to InfluxDB hosted at %s "
+ "has been successful for %s!" %
+ (self.db_host, data['metadata']['metrics']))
+ else:
+ self.app.logger.error(
+ "Writing to InfluxDB hosted at %s "
+ "has FAILED for %s!" %
+ (self.db_host, data['metadata']['metrics']))
+ else:
+ self.app.logger.error(
+ "%s database not connected.." %
+ data['metadata']['metrics'])
+
+ def start_daemon(self):
+ """
+ Spawn a new thread that will consume data in the write FIFO
+ and place it into the databse
+ """
+
+ def write_db_loop():
+
+ while True:
+ try:
+ time.sleep(self.poll_delay)
+ if self.stop_threads:
+ self.app.logger.info(
+ "\n ** Shutting Down Database Writer **")
+ return
+ elif self.write_queue.qsize() > 0:
+ dt = self.write_queue.get()
+ # Write the data to the databse
+ self.write_db_direct(dt)
+ self.write_queue.task_done()
+ except Exception as e:
+ self.app.logger.error(
+ "** DB Writer Thread Failed. ** \n%s" % e)
+
+ self.db_write_thread = threading.Thread(target=write_db_loop)
+ self.db_write_thread.daemon = True
+ self.db_write_thread.start()
+ return self
+
+ def stop(self):
+ """
+ Flag which terminates db_write_threads loop
+ """
+ self.app.logger.info("** Setting stop_threads to True **")
+ self.stop_threads = True
diff --git a/quality-metrics/broker-component/docker-compose.yml b/quality-metrics/broker-component/docker-compose.yml
new file mode 100644
index 0000000..bb91628
--- /dev/null
+++ b/quality-metrics/broker-component/docker-compose.yml
@@ -0,0 +1,58 @@
+#======================================================================
+# Copyright (c) 2020, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#======================================================================
+
+#======================================================================
+# FILE: docker-compose.yml
+#
+# DESCRIPTION: This is a docker compose file for the deployment of all
+# the components involved in the quality metrics collection setup. It
+# brings up Grafana, InfluxDB and the broker component as a multi-
+# container deployment.
+#======================================================================
+
+version: "2"
+services:
+ grafana:
+ image: grafana/grafana
+ container_name: grafana_container
+ restart: always
+ ports:
+ - 3000:3000
+ networks:
+ - metrics_network
+ volumes:
+ - grafana-volume:/var/lib/grafana
+ - ./grafana-provisioning/:/etc/grafana/provisioning
+ environment:
+ - GF_INSTALL_PLUGINS=grafana-piechart-panel
+ influxdb:
+ image: influxdb
+ container_name: influxdb_container
+ restart: always
+ ports:
+ - 8086:8086
+ networks:
+ - metrics_network
+ volumes:
+ - influxdb-volume:/var/lib/influxdb
+ web:
+ build: .
+ ports:
+ - "5000:5000"
+ networks:
+ - metrics_network
+ depends_on:
+ - influxdb
+ links:
+ - influxdb:influx
+networks:
+ metrics_network:
+volumes:
+ grafana-volume:
+ external: true
+ influxdb-volume:
+ external: true
+
diff --git a/quality-metrics/broker-component/metrics-schemas/tfa_code_churn_schema_1_0.json b/quality-metrics/broker-component/metrics-schemas/tfa_code_churn_schema_1_0.json
new file mode 100644
index 0000000..8935e76
--- /dev/null
+++ b/quality-metrics/broker-component/metrics-schemas/tfa_code_churn_schema_1_0.json
@@ -0,0 +1,58 @@
+{
+ "type" : "object",
+ "properties" : {
+ "api_version" : {
+ "type" : "string"
+ },
+ "metadata" : {
+ "type" : "object",
+ "properties" : {
+ "metrics" : {
+ "type" : "string"
+ }
+ }
+ },
+ "data" : {
+ "type" : "array",
+ "minItems": 1,
+ "items" : {
+ "type" : "object",
+ "properties" :{
+ "measurement" : {
+ "type" : "string"
+ },
+ "fields" : {
+ "type" : "object",
+ "properties" : {
+ "Lines_of_Change" : {
+ "type" : "number"
+ }
+ },
+ "required" : ["Lines_of_Change"]
+ },
+ "tags" : {
+ "type" : "object",
+ "properties" : {
+ "Git_Tag_Date" : {
+ "type" : "number"
+ },
+ "Base_Tag" : {
+ "type" : "string"
+ },
+ "Target_Tag" : {
+ "type" : "string"
+ }
+ },
+ "required" : ["Git_Tag_Date", "Base_Tag", "Target_Tag"],
+ "additionalProperties": false
+ },
+ "time" : {
+ }
+ }
+ },
+ "required" : ["measurement", "fields", "tags", "time"],
+ "additionalProperties": false
+ }
+ },
+ "required" : ["api_version", "metadata", "data"]
+}
diff --git a/quality-metrics/broker-component/metrics-schemas/tfa_complexity_stats_schema_1_0.json b/quality-metrics/broker-component/metrics-schemas/tfa_complexity_stats_schema_1_0.json
new file mode 100644
index 0000000..0c22160
--- /dev/null
+++ b/quality-metrics/broker-component/metrics-schemas/tfa_complexity_stats_schema_1_0.json
@@ -0,0 +1,70 @@
+{
+ "type" : "object",
+ "properties" : {
+ "api_version" : {
+ "type" : "string"
+ },
+ "metadata" : {
+ "type" : "object",
+ "properties" : {
+ "metrics" : {
+ "type" : "string"
+ }
+ }
+ },
+ "data" : {
+ "type" : "array",
+ "minItems": 1,
+ "items" : {
+ "type" : "object",
+ "properties" :{
+ "measurement" : {
+ "type" : "string"
+ },
+ "fields" : {
+ "type" : "object",
+ "properties" : {
+ "Function_ID" : {
+ "type" : "string"
+ },
+ "Score" : {
+ "type" : "number"
+ },
+ "Whitelisted" : {
+ "type" : "string"
+ },
+ "Threshold" : {
+ "type" : "number"
+ }
+ },
+ "required" : ["Function_ID", "Score", "Whitelisted", "Threshold"]
+ },
+ "tags" : {
+ "type" : "object",
+ "properties" : {
+ "Git_Tag_Date" : {
+ "type" : "number"
+ },
+ "Base_Tag" : {
+ "type" : "string"
+ },
+ "Target_Tag" : {
+ "type" : "string"
+ },
+ "Location" : {
+ "type" : "string"
+ }
+ },
+ "required" : ["Git_Tag_Date", "Base_Tag", "Target_Tag", "Location"],
+ "additionalProperties": false
+ },
+ "time" : {
+ }
+ }
+ },
+ "required" : ["measurement", "fields", "tags", "time"],
+ "additionalProperties": false
+ }
+ },
+ "required" : ["api_version", "metadata", "data"]
+}
\ No newline at end of file
diff --git a/quality-metrics/broker-component/metrics-schemas/tfa_complexity_tracking_schema_1_0.json b/quality-metrics/broker-component/metrics-schemas/tfa_complexity_tracking_schema_1_0.json
new file mode 100644
index 0000000..509a966
--- /dev/null
+++ b/quality-metrics/broker-component/metrics-schemas/tfa_complexity_tracking_schema_1_0.json
@@ -0,0 +1,61 @@
+{
+ "type" : "object",
+ "properties" : {
+ "api_version" : {
+ "type" : "string"
+ },
+ "metadata" : {
+ "type" : "object",
+ "properties" : {
+ "metrics" : {
+ "type" : "string"
+ }
+ }
+ },
+ "data" : {
+ "type" : "array",
+ "minItems": 1,
+ "items" : {
+ "type" : "object",
+ "properties" :{
+ "measurement" : {
+ "type" : "string"
+ },
+ "fields" : {
+ "type" : "object",
+ "properties" : {
+ "Functions_Exceeding_Threshold_Not_Whitelisted" : {
+ "type" : "number"
+ },
+ "Whitelisted" : {
+ "type" : "string"
+ },
+ "Threshold" : {
+ "type" : "number"
+ }
+ },
+ "required" : ["Functions_Exceeding_Threshold_Not_Whitelisted", "Whitelisted", "Threshold"]
+ },
+ "tags" : {
+ "type" : "object",
+ "properties" : {
+ "Git_Tag_Date" : {
+ "type" : "number"
+ },
+ "Target_Tag" : {
+ "type" : "string"
+ }
+ },
+ "required" : ["Git_Tag_Date", "Target_Tag"],
+ "additionalProperties": false
+ },
+ "time" : {
+ }
+ }
+ },
+ "required" : ["measurement", "fields", "tags", "time"],
+ "additionalProperties": false
+ }
+ },
+ "required" : ["api_version", "metadata", "data"]
+}
diff --git a/quality-metrics/broker-component/metrics-schemas/tfa_defects_stats_schema_1_0.json b/quality-metrics/broker-component/metrics-schemas/tfa_defects_stats_schema_1_0.json
new file mode 100644
index 0000000..04dcdf8
--- /dev/null
+++ b/quality-metrics/broker-component/metrics-schemas/tfa_defects_stats_schema_1_0.json
@@ -0,0 +1,58 @@
+{
+ "type" : "object",
+ "properties" : {
+ "api_version" : {
+ "type" : "string"
+ },
+ "metadata" : {
+ "type" : "object",
+ "properties" : {
+ "metrics" : {
+ "type" : "string"
+ }
+ }
+ },
+ "data" : {
+ "type" : "array",
+ "minItems": 1,
+ "items" : {
+ "type" : "object",
+ "properties" :{
+ "measurement" : {
+ "type" : "string"
+ },
+ "fields" : {
+ "type" : "object",
+ "properties" : {
+ "Title" : {
+ "type" : "string"
+ },
+ "Issue_Status" : {
+ "type" : "string"
+ },
+ "URL" : {
+ "type" : "string"
+ }
+ },
+ "required" : ["Title", "Issue_Status", "URL"]
+ },
+ "tags" : {
+ "type" : "object",
+ "properties" : {
+ "Defect_ID" : {
+ "type" : "string"
+ },
+ "Measured_Date" : {
+ }
+ },
+ "required" : ["Defect_ID", "Measured_Date"],
+ "additionalProperties": false
+ }
+ }
+ },
+ "required" : ["measurement", "fields", "tags"],
+ "additionalProperties": false
+ }
+ },
+ "required" : ["api_version", "metadata", "data"]
+}
diff --git a/quality-metrics/broker-component/metrics-schemas/tfa_defects_tracking_schema_1_0.json b/quality-metrics/broker-component/metrics-schemas/tfa_defects_tracking_schema_1_0.json
new file mode 100644
index 0000000..7a0d855
--- /dev/null
+++ b/quality-metrics/broker-component/metrics-schemas/tfa_defects_tracking_schema_1_0.json
@@ -0,0 +1,52 @@
+{
+ "type" : "object",
+ "properties" : {
+ "api_version" : {
+ "type" : "string"
+ },
+ "metadata" : {
+ "type" : "object",
+ "properties" : {
+ "metrics" : {
+ "type" : "string"
+ }
+ }
+ },
+ "data" : {
+ "type" : "array",
+ "minItems": 1,
+ "items" : {
+ "type" : "object",
+ "properties" :{
+ "measurement" : {
+ "type" : "string"
+ },
+ "fields" : {
+ "type" : "object",
+ "properties" : {
+ "Issue_Status" : {
+ "type" : "string"
+ },
+ "Number_of_Defects" : {
+ "type" : "number"
+ }
+ },
+ "required" : ["Issue_Status", "Number_of_Defects"]
+ },
+ "tags" : {
+ "type" : "object",
+ "properties" : {
+ "Measured_Date" : {
+ }
+ },
+ "required" : ["Measured_Date"],
+ "additionalProperties": false
+ }
+ }
+ },
+ "required" : ["measurement", "fields", "tags"],
+ "additionalProperties": false
+ }
+ },
+ "required" : ["api_version", "metadata", "data"]
+}
diff --git a/quality-metrics/broker-component/metrics-schemas/tfa_image_size_schema_1_0.json b/quality-metrics/broker-component/metrics-schemas/tfa_image_size_schema_1_0.json
new file mode 100644
index 0000000..e64e1f8
--- /dev/null
+++ b/quality-metrics/broker-component/metrics-schemas/tfa_image_size_schema_1_0.json
@@ -0,0 +1,58 @@
+{
+ "type" : "object",
+ "properties" : {
+ "api_version" : {
+ "type" : "string"
+ },
+ "metadata" : {
+ "type" : "object",
+ "properties" : {
+ "metrics" : {
+ "type" : "string"
+ }
+ }
+ },
+ "data" : {
+ "type" : "array",
+ "minItems": 1,
+ "items" : {
+ "type" : "object",
+ "properties" :{
+ "measurement" : {
+ "type" : "string"
+ },
+ "fields" : {
+ "type" : "object",
+ "minProperties": 1,
+ "patternProperties" : {
+ "^BL*$" : {
+ "type" : "number"
+ }
+ }
+ },
+ "tags" : {
+ "type" : "object",
+ "properties" : {
+ "BinMode" : {
+ "type" : "string"
+ },
+ "CommitID" : {
+ "type" : "string"
+ },
+ "CommitTitle" : {
+ "type" : "string"
+ }
+ },
+ "required" : ["BinMode", "CommitID", "CommitTitle"],
+ "additionalProperties": false
+ },
+ "time" : {
+ }
+ }
+ },
+ "required" : ["measurement", "fields", "tags", "time"],
+ "additionalProperties": false
+ }
+ },
+ "required" : ["api_version", "metadata", "data"]
+}
diff --git a/quality-metrics/broker-component/metrics-schemas/tfa_misra_defects_schema_1_0.json b/quality-metrics/broker-component/metrics-schemas/tfa_misra_defects_schema_1_0.json
new file mode 100644
index 0000000..9109bd9
--- /dev/null
+++ b/quality-metrics/broker-component/metrics-schemas/tfa_misra_defects_schema_1_0.json
@@ -0,0 +1,67 @@
+{
+ "type" : "object",
+ "properties" : {
+ "api_version" : {
+ "type" : "string"
+ },
+ "metadata" : {
+ "type" : "object",
+ "properties" : {
+ "metrics" : {
+ "type" : "string"
+ }
+ }
+ },
+ "data" : {
+ "type" : "array",
+ "minItems": 1,
+ "items" : {
+ "type" : "object",
+ "properties" :{
+ "measurement" : {
+ "type" : "string"
+ },
+ "fields" : {
+ "type" : "object",
+ "properties" : {
+ "TotalDefects" : {
+ "type" : "number"
+ },
+ "MandatoryDefects" : {
+ "type" : "number"
+ },
+ "RequiredDefects" : {
+ "type" : "number"
+ },
+ "AdvisoryDefects" : {
+ "type" : "number"
+ }
+ },
+ "required" : ["TotalDefects", "MandatoryDefects", "RequiredDefects", "AdvisoryDefects"]
+ },
+ "tags" : {
+ "type" : "object",
+ "properties" : {
+ "BinMode" : {
+ "type" : "string"
+ },
+ "CommitID" : {
+ "type" : "string"
+ },
+ "CommitTitle" : {
+ "type" : "string"
+ }
+ },
+ "required" : ["BinMode", "CommitID", "CommitTitle"],
+ "additionalProperties": false
+ },
+ "time" : {
+ }
+ }
+ },
+ "required" : ["measurement", "fields", "tags", "time"],
+ "additionalProperties": false
+ }
+ },
+ "required" : ["api_version", "metadata", "data"]
+}
diff --git a/quality-metrics/broker-component/metrics-schemas/tfa_rtinstr_schema_1_0.json b/quality-metrics/broker-component/metrics-schemas/tfa_rtinstr_schema_1_0.json
new file mode 100644
index 0000000..1801814
--- /dev/null
+++ b/quality-metrics/broker-component/metrics-schemas/tfa_rtinstr_schema_1_0.json
@@ -0,0 +1,82 @@
+{
+ "type" : "object",
+ "properties" : {
+ "api_version" : {
+ "type" : "string"
+ },
+ "metadata" : {
+ "type" : "object",
+ "properties" : {
+ "metrics" : {
+ "type" : "string"
+ }
+ }
+ },
+ "data" : {
+ "type" : "array",
+ "minItems": 1,
+ "items" : {
+ "type" : "object",
+ "properties" :{
+ "measurement" : {
+ "type" : "string"
+ },
+ "fields" : {
+ "type" : "object",
+ "properties" : {
+ "Latency_EL3Entry_EL3Exit" : {
+ "type" : "number"
+ },
+ "Latency_EL3Entry_CPUPowerDown" : {
+ "type" : "number"
+ },
+ "Latency_CPUWakeup_EL3Exit" : {
+ "type" : "number"
+ },
+ "CacheFlush" : {
+ "type" : "number"
+ }
+ },
+ "oneOf": [
+ { "required": [
+ "Latency_EL3Entry_EL3Exit"
+ ]},
+ { "required": [
+ "Latency_EL3Entry_CPUPowerDown",
+ "Latency_CPUWakeup_EL3Exit",
+ "CacheFlush"
+ ]}
+ ]
+ },
+ "tags" : {
+ "type" : "object",
+ "properties" : {
+ "CommitID" : {
+ "type" : "string"
+ },
+ "CommitTitle" : {
+ "type" : "string"
+ },
+ "TC_Name" : {
+ "type" : "string"
+ },
+ "Cluster_ID" : {
+ "type" : "number"
+ },
+ "CPU_Core" : {
+ "type" : "number"
+ }
+ },
+ "required" : ["CommitID", "CommitTitle", "TC_Name", "Cluster_ID", "CPU_Core"],
+ "additionalProperties": false
+ },
+ "time" : {
+ }
+ }
+ },
+ "required" : ["measurement", "fields", "tags", "time"],
+ "additionalProperties": false
+ }
+ },
+ "required" : ["api_version", "metadata", "data"]
+}
diff --git a/quality-metrics/broker-component/metrics-schemas/tfm_code_churn_schema_1_0.json b/quality-metrics/broker-component/metrics-schemas/tfm_code_churn_schema_1_0.json
new file mode 100644
index 0000000..a181c31
--- /dev/null
+++ b/quality-metrics/broker-component/metrics-schemas/tfm_code_churn_schema_1_0.json
@@ -0,0 +1,58 @@
+{
+ "type" : "object",
+ "properties" : {
+ "api_version" : {
+ "type" : "string"
+ },
+ "metadata" : {
+ "type" : "object",
+ "properties" : {
+ "metrics" : {
+ "type" : "string"
+ }
+ }
+ },
+ "data" : {
+ "type" : "array",
+ "minItems": 1,
+ "items" : {
+ "type" : "object",
+ "properties" :{
+ "measurement" : {
+ "type" : "string"
+ },
+ "fields" : {
+ "type" : "object",
+ "properties" : {
+ "Lines_of_Change" : {
+ "type" : "number"
+ }
+ },
+ "required" : ["Lines_of_Change"]
+ },
+ "tags" : {
+ "type" : "object",
+ "properties" : {
+ "Git_Tag_Date" : {
+ "type" : "string"
+ },
+ "Base_Tag" : {
+ "type" : "string"
+ },
+ "Target_Tag" : {
+ "type" : "string"
+ }
+ },
+ "required" : ["Git_Tag_Date", "Base_Tag", "Target_Tag"],
+ "additionalProperties": false
+ },
+ "time" : {
+ }
+ }
+ },
+ "required" : ["measurement", "fields", "tags", "time"],
+ "additionalProperties": false
+ }
+ },
+ "required" : ["api_version", "metadata", "data"]
+}
diff --git a/quality-metrics/broker-component/metrics-schemas/tfm_complexity_stats_schema_1_0.json b/quality-metrics/broker-component/metrics-schemas/tfm_complexity_stats_schema_1_0.json
new file mode 100644
index 0000000..8390692
--- /dev/null
+++ b/quality-metrics/broker-component/metrics-schemas/tfm_complexity_stats_schema_1_0.json
@@ -0,0 +1,73 @@
+{
+ "type" : "object",
+ "properties" : {
+ "api_version" : {
+ "type" : "string"
+ },
+ "metadata" : {
+ "type" : "object",
+ "properties" : {
+ "metrics" : {
+ "type" : "string"
+ }
+ }
+ },
+ "data" : {
+ "type" : "array",
+ "minItems": 1,
+ "items" : {
+ "type" : "object",
+ "properties" :{
+ "measurement" : {
+ "type" : "string"
+ },
+ "fields" : {
+ "type" : "object",
+ "properties" : {
+ "Function" : {
+ "type" : "string"
+ },
+ "Score" : {
+ "type" : "number"
+ },
+ "Whitelisted" : {
+ "type" : "string"
+ },
+ "Threshold" : {
+ "type" : "number"
+ },
+ "Location" : {
+ "type" : "string"
+ }
+ },
+ "required" : ["Function", "Whitelisted", "Threshold", "Score", "Location"]
+ },
+ "tags" : {
+ "type" : "object",
+ "properties" : {
+ "Git_Tag_Date" : {
+ "type" : "string"
+ },
+ "Target_Tag" : {
+ "type" : "string"
+ },
+ "Base_Tag" : {
+ "type" : "string"
+ },
+ "Location_Tag" : {
+ "type" : "string"
+ }
+ },
+ "required" : ["Git_Tag_Date", "Target_Tag", "Base_Tag", "Location_Tag"],
+ "additionalProperties": false
+ },
+ "time" : {
+ }
+ }
+ },
+ "required" : ["measurement", "fields", "tags", "time"],
+ "additionalProperties": false
+ }
+ },
+ "required" : ["api_version", "metadata", "data"]
+}
diff --git a/quality-metrics/broker-component/metrics-schemas/tfm_defects_stats_schema_1_0.json b/quality-metrics/broker-component/metrics-schemas/tfm_defects_stats_schema_1_0.json
new file mode 100644
index 0000000..b5f819d
--- /dev/null
+++ b/quality-metrics/broker-component/metrics-schemas/tfm_defects_stats_schema_1_0.json
@@ -0,0 +1,65 @@
+{
+ "type" : "object",
+ "properties" : {
+ "api_version" : {
+ "type" : "string"
+ },
+ "metadata" : {
+ "type" : "object",
+ "properties" : {
+ "metrics" : {
+ "type" : "string"
+ }
+ }
+ },
+ "data" : {
+ "type" : "array",
+ "minItems": 1,
+ "items" : {
+ "type" : "object",
+ "properties" :{
+ "measurement" : {
+ "type" : "string"
+ },
+ "fields" : {
+ "type" : "object",
+ "properties" : {
+ "Status" : {
+ "type" : "string"
+ },
+ "Priority" : {
+ "type" : "string"
+ },
+ "Summary" : {
+ "type" : "string"
+ },
+ "URL" : {
+ "type" : "string"
+ },
+ "Existing_Defect" : {
+ "type" : "number"
+ }
+ },
+ "required" : ["Status", "Priority", "Summary", "URL", "Existing_Defect"]
+ },
+ "tags" : {
+ "type" : "object",
+ "properties" : {
+ "Defect_ID" : {
+ "type" : "string"
+ },
+ "Measured_Date" : {
+ "type" : "string"
+ }
+ },
+ "required" : ["Defect_ID", "Measured_Date"],
+ "additionalProperties": false
+ }
+ }
+ },
+ "required" : ["measurement", "fields", "tags"],
+ "additionalProperties": false
+ }
+ },
+ "required" : ["api_version", "metadata", "data"]
+}
diff --git a/quality-metrics/broker-component/metrics-schemas/tfm_image_size_schema_1_0.json b/quality-metrics/broker-component/metrics-schemas/tfm_image_size_schema_1_0.json
new file mode 100644
index 0000000..30a2ffc
--- /dev/null
+++ b/quality-metrics/broker-component/metrics-schemas/tfm_image_size_schema_1_0.json
@@ -0,0 +1,88 @@
+{
+ "type" : "object",
+ "properties" : {
+ "api_version" : {
+ "type" : "string"
+ },
+ "metadata" : {
+ "type" : "object",
+ "properties" : {
+ "metrics" : {
+ "type" : "string"
+ },
+ "data_producer" : {
+ "type" : "string"
+ },
+ "git_info" : {
+ "type" : "object",
+ "properties" : {
+ "commit_time" : {
+ "type" : "string"
+ },
+ "commit_title" : {
+ "type" : "string"
+ },
+ "commit_id" : {
+ "type" : "string"
+ },
+ "commit_url" : {
+ "type" : "string"
+ },
+ "branch" : {
+ "type" : "string"
+ },
+ "gerrit_id" : {
+ "type" : "string"
+ }
+ },
+ "required" : ["commit_time", "commit_title", "commit_id", "commit_url", "branch"],
+ "additionalProperties": false
+ },
+ "build_info" : {
+ "type" : "object",
+ "properties" : {
+ "build_type" : {
+ "type" : "string"
+ },
+ "cmake_config" : {
+ "type" : "string"
+ },
+ "compiler" : {
+ "type" : "string"
+ },
+ "target" : {
+ "type" : "string"
+ }
+ },
+ "required" : ["build_type", "cmake_config", "compiler", "target"]
+ }
+ },
+ "required" : ["metrics", "build_info", "git_info", "data_producer"],
+ "additionalProperties": false
+ },
+ "data" : {
+ "type" : "array",
+ "minItems" : 1,
+ "items" : {
+ "type" : "object",
+ "properties" : {
+ "file" : {
+ "type" : "string"
+ },
+ "bss" : {
+ "type" : "number"
+ },
+ "data" : {
+ "type" : "number"
+ },
+ "text" : {
+ "type" : "number"
+ }
+ },
+ "required" : ["file", "bss", "data", "text"],
+ "additionalProperties": false
+ }
+ }
+ },
+ "required" : ["metadata", "data", "api_version"]
+}
diff --git a/quality-metrics/broker-component/quality_metrics_server.py b/quality-metrics/broker-component/quality_metrics_server.py
new file mode 100644
index 0000000..f68f4b2
--- /dev/null
+++ b/quality-metrics/broker-component/quality_metrics_server.py
@@ -0,0 +1,154 @@
+#!/usr/bin/env python3
+
+from __future__ import print_function
+from data_validator import DataValidator
+import credentials
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" quality_metrics_server.py:
+
+ This is the broker component which accepts the data from data
+ generator scripts, and performs basic sanity check and pushes
+ the data to Influx-DB for visualisation with Grafana component.
+ It is not mandatory to push data via data generator scripts.
+ The request to push data to database, in this case - InfluxDB,
+ is expected to be be a POST request with right credentials and
+ should be in agreed upon format.
+
+"""
+
+from pprint import pprint
+from pprint import pformat
+from db_manager import dbManager
+from flask_jwt import JWT, jwt_required
+from flask import Flask, jsonify, request
+from werkzeug.security import safe_str_cmp
+from logging.handlers import RotatingFileHandler
+
+import sys
+import json
+import random
+import logging
+import argparse
+import datetime
+
+import constants
+""" It is suggested to keep credentials.py is kept locally in the
+ system where server is running. This file has been provided
+ for reference.
+"""
+
+username_table = {u.username: u for u in credentials.users}
+userid_table = {u.id: u for u in credentials.users}
+
+
+def authenticate(username, password):
+ user = username_table.get(username, None)
+ if user and safe_str_cmp(
+ user.password.encode('utf-8'),
+ password.encode('utf-8')):
+ return user
+
+
+def identity(payload):
+ user_id = payload['identity']
+ return userid_table.get(user_id, None)
+
+
+def setup_logging(app):
+ # maxBytes and backupCount values to allow the file to rollover at a predetermined size.
+ # When the size is about to be exceeded, the file is closed and a new file is silently
+ # opened for output. Rollover occurs whenever the current log file is nearly maxBytes in length.
+ # When backupCount is non-zero, the system will save old log files by appending the extensions
+ # ‘.1’, ‘.2’ etc., to the filename.
+ file_handler = RotatingFileHandler(
+ "./flask.log",
+ maxBytes=1024 * 1024 * 1024 * 5,
+ backupCount=5)
+ file_handler.setFormatter(
+ logging.Formatter(
+ '[%(asctime)s][PID:%(process)d][%(levelname)s]'
+ '[%(lineno)s][%(name)s.%(funcName)s()] %(message)s'))
+ file_handler.setLevel(logging.INFO)
+ loggers = [app.logger]
+ for logger in loggers:
+ logger.addHandler(file_handler)
+ app.logger.setLevel(logging.INFO)
+
+
+app = Flask(__name__)
+
+setup_logging(app)
+
+logger = logging.getLogger(__name__)
+
+app.debug = True
+app.config['SECRET_KEY'] = credentials.SECRET_KEY
+app.config['JWT_EXPIRATION_DELTA'] = datetime.timedelta(
+ days=constants.JWT_EXPIRATION_DAYS)
+
+dbm = dbManager(app=app).start_daemon()
+
+jwt = JWT(app, authenticate, identity)
+
+# ----------------------- Database Methods ----------------------------------#
+
+
+def store_to_db(data_dict):
+ """
+ Use the database manager to asynchronously update the database
+
+ :param: data_dict: Dictionary containing data to be stored
+ """
+ validation, err_code = dbm.store(data_dict)
+ return validation, err_code
+
+# ----------------------- FLASK API Methods ---------------------------------- #
+
+
+@app.route('/', methods=['POST'])
+@jwt_required()
+def add_db_entry():
+ """
+ Store received data to database if validation is okay
+
+ :return: validation information and error code
+ """
+
+ data = request.get_json()
+ app.logger.debug("Received Data (POST)")
+ app.logger.debug(pformat(data))
+ # Make sure the data is valid
+ validation, err_code = DataValidator.validate_request_sanity(data)
+ if validation == "OK":
+ app.logger.info("<<<<VALIDATION OK>>>>")
+ validation, err_code = store_to_db(data)
+ else:
+ app.logger.error("<<<<VALIDATION NOT OK>>>>")
+ app.logger.error(pformat({"data": validation, "error_code": err_code}))
+ info_json = jsonify({"data": validation, "error_code": err_code})
+ return info_json, err_code
+
+
+@app.route("/")
+def home():
+ info_json = jsonify({"type": "INFO", "data": "Quality Metrics"})
+ return info_json, 200
+
+
+if __name__ == '__main__':
+ try:
+ app.run(host=constants.LISTEN_ALL_IPS, port=5000)
+ except Exception as ex:
+ template = "An exception of type {0} occurred. Arguments:\n{1!r}"
+ message = template.format(type(ex).__name__, ex.args)
+ app.logger.error("message")
+ dbm.stop()
diff --git a/quality-metrics/broker-component/requirements.txt b/quality-metrics/broker-component/requirements.txt
new file mode 100644
index 0000000..115caa2
--- /dev/null
+++ b/quality-metrics/broker-component/requirements.txt
@@ -0,0 +1,35 @@
+#======================================================================
+# Copyright (c) 2020, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#======================================================================
+
+#===============================================================================
+# FILE: requirements.txt
+#
+# DESCRIPTION: Requirements for metrics server
+#===============================================================================
+
+attrs==19.3.0
+certifi==2020.4.5.2
+chardet==3.0.4
+click==7.1.2
+Flask==1.1.2
+Flask-JWT==0.3.2
+idna==2.9
+importlib-metadata==1.6.1
+influxdb==5.3.0
+itsdangerous==1.1.0
+Jinja2==2.11.2
+jsonschema==3.2.0
+MarkupSafe==1.1.1
+msgpack==0.6.1
+PyJWT==1.4.2
+pyrsistent==0.16.0
+python-dateutil==2.8.1
+pytz==2020.1
+requests==2.23.0
+six==1.15.0
+urllib3==1.25.9
+Werkzeug==1.0.1
+zipp==3.1.0
diff --git a/quality-metrics/data-generator/common_metrics/__init__.py b/quality-metrics/data-generator/common_metrics/__init__.py
new file mode 100644
index 0000000..7337f68
--- /dev/null
+++ b/quality-metrics/data-generator/common_metrics/__init__.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python3
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" __init__.py:
+
+ __init__.py for complexity parser
+
+"""
+from complexity_parser import *
diff --git a/quality-metrics/data-generator/common_metrics/common_utilities/common_utilities.sh b/quality-metrics/data-generator/common_metrics/common_utilities/common_utilities.sh
new file mode 100755
index 0000000..decb753
--- /dev/null
+++ b/quality-metrics/data-generator/common_metrics/common_utilities/common_utilities.sh
@@ -0,0 +1,164 @@
+#!/usr/bin/env bash
+
+#======================================================================
+# Copyright (c) 2020, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#======================================================================
+
+#===============================================================================
+# FILE: common_utilities.sh
+#
+# DESCRIPTION: Contains common utilities required by all the metrics
+#===============================================================================
+
+# === Function ========================================================
+# NAME: include_variables_file
+# DESCRIPTION: Includes the variables file, specific to repository for
+# which metics are being computed. For example, include and
+# exclude folders are different for different repositories
+# PARAMETERS:
+# $1: File containing variables specific to the repository for which
+# metrics are computed.
+# =====================================================================
+include_variables_file()
+{
+ . ./"${1}"
+}
+
+
+# === Function ========================================================
+# NAME: cleanup_and_exit
+# DESCRIPTION: Deletes a repository, if it exists, and exits
+# =====================================================================
+cleanup_and_exit()
+{
+ # Delete the cloned repository
+ if [ -d "$REPOSITORY" ]; then
+ printf "Deleting $REPOSITORY...\n"
+ rm -rf $REPOSITORY
+ fi
+
+ printf "Exiting...\n"
+ exit
+}
+
+# === Function ========================================================
+# NAME: generate_code_churn_summary
+# DESCRIPTION: Generates the code churn summary from stats
+# PARAMETER:
+# $1: STATS
+# =====================================================================
+generate_code_churn_summary()
+{
+ INS_DEL_LOC_EXTRACT="[0-9]+ file(s)? changed, ([0-9]+) insertion(s)?\(\+\), ([0-9]+) deletion(s)?\(\-\)"
+ INS_LOC_EXTRACT="[0-9]+ file(s)? changed, ([0-9]+) insertion(s)?\(\+\)"
+ DEL_LOC_EXTRACT="[0-9]+ file(s)? changed, ([0-9]+) deletion(s)?\(\-\)"
+ if [[ $1 =~ ${INS_DEL_LOC_EXTRACT} ]]; then
+ INS=${BASH_REMATCH[2]}
+ DEL=${BASH_REMATCH[4]}
+ elif [[ $1 =~ ${INS_LOC_EXTRACT} ]]; then
+ INS=${BASH_REMATCH[2]}
+ DEL=0
+ elif [[ $1 =~ ${DEL_LOC_EXTRACT} ]]; then
+ INS=0
+ DEL=${BASH_REMATCH[2]}
+ else
+ INS=DEL=0
+ fi
+
+ CODE_CHURN=$((INS+DEL))
+ echo "$CODE_CHURN"
+}
+
+# === Function ========================================================
+# NAME: get_git_tag_date
+# DESCRIPTION: Returns the git tag date, as follows:
+# 1. tagger date is returned for annotated tag
+# 2. creator date is returned for non-annotated tag
+# =====================================================================
+get_git_tag_date()
+{
+ GIT_TAG_DATE_TIME=''
+ GIT_TAG_DATE=''
+
+ if [ -n "$1" ]; then
+ tag=$1
+ else
+ tag=$TARGET_TAG
+ fi
+ # Get tagger date for git tag in YYYY-MM-DD format
+ GIT_TAG_DATE_TIME=$(git rev-parse $tag | xargs git cat-file -p | \
+ awk '/^tagger/ { print strftime("%F",$(NF-1))}')
+ # If tagger date is not returned (in case of non-annotated tag), then get created date
+ if [ -z "${GIT_TAG_DATE}" ]; then
+ printf "\nGit tag date is \"created date\" because $tag is non-annotated...\n"
+ GIT_TAG_DATE_TIME=$(git log -1 --format=%ai $tag)
+ else
+ printf "\nGit tag date is \"tagger date\" because $tag is annotated...\n"
+ fi
+ export GIT_TAG_DATE_TIME
+ arr=($GIT_TAG_DATE_TIME)
+ export GIT_TAG_DATE=${arr[0]}
+}
+
+# === Function =================================================================
+# NAME: get_base_tag
+# DESCRIPTION: Checks if target tag exists. If it is exists, get the base tag
+# ==============================================================================
+get_base_tag()
+{
+ # list git tag by commit date and extract the tag string
+ tagList=$(git tag | xargs -I@ git log --format=format:"%ai @%n" -1 @ | sort | awk '{print $4}')
+
+ tagArray=($tagList)
+ matched=0
+
+ prevTag=""
+ currentTag=""
+ counter=0
+ TAG_PATTERN=$1
+
+ # Check if target tag exists
+ for i in "${tagArray[@]}"; do
+ if [ "$i" == "$tag" ]; then
+ matched=1
+ currentTag=$i
+ break
+ else
+ # If not in form of vXXX.YYY, continue
+ counter=$((counter+1))
+ continue
+ fi
+ done
+
+ if [ $matched -eq 0 ]; then
+ printf "@@ Tag $tag does not exist. Please specify an existing one.\n"
+ echo "Existing Tags:"
+ git tag | xargs -I@ git log --format=format:"%ai @%n" -1 @ | sort | awk '{print $4}'
+ exit
+ fi
+
+ get_git_tag_date "$tag"
+ tag_date_1=$GIT_TAG_DATE
+
+ # Search for previous tag in the form of vXXX.YYY before the current tag
+ # Skip the current tag itself and find the first match
+ START=$((counter-1))
+ for ((i=${START};i>=0;i--)); do
+ temp_tag="${tagArray[$i]}"
+ get_git_tag_date "$temp_tag"
+ tag_date_2=$GIT_TAG_DATE
+ echo "$temp_tag $GIT_TAG_DATE $tag_date_2"
+ if [[ $temp_tag =~ $TAG_PATTERN ]] && [[ "$tag_date_1" != "$tag_date_2" ]]; then
+ prevTag=$temp_tag
+ break
+ fi
+ done
+
+ printf "@@ Tag $tag is valid\n"
+ export TARGET_TAG=$currentTag
+ export BASE_TAG=$prevTag
+ echo "@@ Target tag is $TARGET_TAG ($tag_date_1)"
+ echo "@@ Base tag is $BASE_TAG ($tag_date_2)"
+}
diff --git a/quality-metrics/data-generator/common_metrics/complexity_parser/__init__.py b/quality-metrics/data-generator/common_metrics/complexity_parser/__init__.py
new file mode 100644
index 0000000..01b3878
--- /dev/null
+++ b/quality-metrics/data-generator/common_metrics/complexity_parser/__init__.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python3
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" __init__.py:
+
+ __init__.py for complexity parser
+
+"""
+
+__all__ = ["complexity_parser"]
+
+from complexity_parser import ComplexityParser
diff --git a/quality-metrics/data-generator/common_metrics/complexity_parser/complexity_parser.py b/quality-metrics/data-generator/common_metrics/complexity_parser/complexity_parser.py
new file mode 100644
index 0000000..afb3d76
--- /dev/null
+++ b/quality-metrics/data-generator/common_metrics/complexity_parser/complexity_parser.py
@@ -0,0 +1,164 @@
+#!/usr/bin/env python3
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" complexity_parser.py:
+
+ Data converter class. This class is aimed at converting the received
+ data in the format which InfluxDB understands.
+
+"""
+
+import collections
+import re
+import sys
+
+
+class ComplexityParser(object):
+ """
+ Extract the following data from the complexity logs:
+ - complexity table: {filename: <complexity score>}
+ """
+
+ def __init__(self, complexityLog, threshold):
+ """ class constructor function """
+ self.complexityLog = complexityLog
+ self.complexityDict = collections.OrderedDict()
+ self.threshold = threshold
+
+ self.process_complexity_log()
+ self.process_complexity_data()
+
+ def process_complexity_log(self):
+ """ function to process complexity log and populate the complexity dictionary """
+ with open(self.complexityLog) as fp:
+ for line in fp:
+ scoreRegex = r"([0-9]+)\s+[0-9]+\s+[0-9]+\s+[0-9]+\s+[0-9]+\s+(.*)"
+ m = re.match(scoreRegex, line)
+
+ if m:
+ score = m.group(1)
+
+ self.complexityDict[m.group(2).strip()] = score
+
+ def process_complexity_data(self):
+ """ function to extract the function IDs above the complexity threshold """
+ self.complexityDict = collections.OrderedDict(
+ (k, v) for k, v in self.complexityDict.items() if int(v) >= self.threshold)
+
+ def apply_whitelist(self):
+ """ Add an additional field to indicate whitelist YES/NO """
+ tmpDict = collections.OrderedDict()
+ exclusionDict = collections.OrderedDict()
+
+ # read in the whitelist
+ with open('./whitelist.dat') as f:
+ lines = f.read().splitlines()
+
+ # construct a dictionary for the white list to deal with:
+ # FULL_DIR_FOR_EXCLUSION, FULL_FILE_FOR_EXCLUSION and function
+ for i in lines:
+ tmpK = i.split(':')[0]
+ tmpV = i.split(':')[1]
+ exclusionDict[tmpK] = tmpV
+
+ whitelist_match = 0
+
+ for k, v in self.complexityDict.items():
+ # dealing with whitelist
+ for wlK, wlV in exclusionDict.items():
+
+ if (wlV == "FULL_DIR_FOR_EXCLUSION") or (
+ wlV == "FULL_FILE_FOR_EXCLUSION"):
+ # dealing with FULL_DIR_EXCLUSION and FULL_FILE_FOR_EXCLUSION, here we compare the directory path name or
+ # file name before the ':'
+ if wlK in k.split(':')[0]:
+ whitelist_match = 1
+ else:
+ # dealing with function exclusion
+ if wlV in k.split(':')[1]:
+ whitelist_match = 1
+
+ if whitelist_match != 1:
+ newValue = v + ",NO"
+ else:
+ newValue = v + ",YES"
+
+ # add into the dictionary
+ tmpDict[k] = newValue
+
+ whitelist_match = 0
+
+ return tmpDict
+
+
+class ComplexityHTMLCreator(object):
+ """
+ Create HTML using the defect statistics
+ """
+
+ def __init__(self, complexityDict, fileName):
+ """ Class constructor function """
+ self.complexityDict = complexityDict
+ # output file name
+ self.fileName = fileName
+
+ self.create_template_head()
+ self.add_table_content()
+ self.create_template_tail()
+
+ def create_template_head(self):
+ """ Function to make the HTML template """
+ with open(self.fileName, 'w') as f:
+ f.write("<!DOCTYPE html>\n")
+ f.write("<html>\n")
+ f.write("<head>\n")
+ f.write("<style>\n")
+ f.write("table, th, td{\n")
+ f.write(" border: 1px solid black;\n")
+ f.write(" border-collapse: collapse;\n")
+ f.write("}\n")
+ f.write("</style>\n")
+ f.write("</head>\n")
+ f.write("<body>\n")
+ f.write("<table>\n")
+ f.write(" <tr>\n")
+ f.write(" <th>Function ID</th>\n")
+ f.write(" <th>In-file location</th>\n")
+ f.write(" <th>Complexity Score</th>\n")
+ f.write(" </tr>\n")
+
+ def add_table_content(self):
+ """ function to add rows for test case result summary """
+ with open(self.fileName, "a") as f:
+
+ for functionInfo, score in self.complexityDict.items():
+ if int(score) >= 10:
+ f.write(" <tr bgcolor=\"#E67E62\">\n")
+ else:
+ f.write(" <tr>\n")
+
+ # add function information
+ location = functionInfo.split(':')[0].strip()
+ functionName = functionInfo.split(':', 1)[1].strip()
+
+ # add complexity score
+ f.write(" <td>{0}</td>\n".format(functionName))
+ f.write(" <td>{0}</td>\n".format(location))
+ f.write(" <td>{0}</td>\n".format(score))
+ f.write(" </tr>\n")
+
+ def create_template_tail(self):
+ """ function to add the closing part of html """
+
+ with open(self.fileName, "a") as f:
+ f.write("</table>\n")
+ f.write("</body>\n")
+ f.write("</html>\n")
diff --git a/quality-metrics/data-generator/tfa_metrics/tfa_defects.py b/quality-metrics/data-generator/tfa_metrics/tfa_defects.py
new file mode 100755
index 0000000..8725909
--- /dev/null
+++ b/quality-metrics/data-generator/tfa_metrics/tfa_defects.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python3
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" tfa_defects.py:
+
+ Retrieves TF-A defects from GitHub
+
+"""
+
+from github import GitHub, ApiError, ApiNotFoundError
+
+try:
+ token = "<GitHub Access Token>"
+ gh = GitHub(access_token=token)
+
+ # Please note that currently 'open' defects are reported
+ # In future, labels='bug' would be used for defect density
+ open_bug_issues = gh.repos(
+ 'ARM-software')('tf-issues').issues.get(state='open', labels='bug')
+
+ bugCounter = 0
+
+ TFA_URL = "https://github.com/ARM-software/tf-issues/issues/"
+
+ for issue in open_bug_issues:
+ print("Found open bug with id: %s: %s, %s" %
+ (issue.number, issue.title, issue.state))
+ bugCounter += 1
+
+ print("\t url for this issue is: %s" % (TFA_URL + str(issue.number)))
+
+ print("@@ Total number of open bugs: %d" % (bugCounter))
+
+except ApiNotFoundError as e:
+ print(e, e.request, e.response)
diff --git a/quality-metrics/data-generator/tfa_metrics/tfa_generate_influxdb_files.py b/quality-metrics/data-generator/tfa_metrics/tfa_generate_influxdb_files.py
new file mode 100755
index 0000000..825c1c9
--- /dev/null
+++ b/quality-metrics/data-generator/tfa_metrics/tfa_generate_influxdb_files.py
@@ -0,0 +1,344 @@
+#!/usr/bin/env python3
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" tfa_generate_influxdb_files.py:
+
+ Parses the TF-A metrics summary files and generates JSON files
+ containing data to be written to InfluxDB.
+ Usage: python3 tfa_generate_influxdb_files.py --defectLog <defect log> \
+ --complexityLog <complexity log> --loc <code churn loc> \
+ --gitTagDate <tag date> --influxTime <git tag date & time>
+
+"""
+
+import argparse
+import os
+import re
+import collections
+import string
+import time
+import json
+
+
+def load_module(name, fpath):
+ """
+ Function to return access to the module
+
+ :param: name: Module name to be loaded
+ :param: fpath: Relative path to complexity_parser.py
+ :return: Module object
+ """
+ import os
+ import imp
+ return imp.load_source(
+ name, os.path.join(
+ os.path.dirname(__file__), fpath))
+
+
+load_module(
+ "complexity_parser",
+ "../common_metrics/complexity_parser/complexity_parser.py")
+
+from complexity_parser import ComplexityParser
+
+def args_parse():
+
+ global DEFECT_LOG
+ global COMPLEXITY_LOG
+ global CODE_CHURN
+ global BASE_RELEASE_TAG
+ global TARGET_RELEASE_TAG
+ global GIT_TAG_DATE
+ global GIT_TAG_DATE_TIME
+
+ # Create parser instance and add arguments
+ parser = argparse.ArgumentParser(
+ description="TF-A quality metrics InfluxDB JSON files generator")
+ parser.add_argument("--defectLog", help="name of the defect log")
+ parser.add_argument("--complexityLog", help="name of the complexity log")
+ parser.add_argument("--loc", help="code churn statistics", required=True)
+ parser.add_argument(
+ "--baseTag",
+ help="name of the base release tag",
+ required=True)
+ parser.add_argument(
+ "--targetTag",
+ help="name of the target release tag",
+ required=True)
+ parser.add_argument("--gitTagDate", help="Git Tag Date", required=True)
+ parser.add_argument(
+ "--influxTime",
+ help="InfluxDB time, which is Git Tag Date and Time",
+ required=True)
+
+ # Parse the arguments
+ args = parser.parse_args()
+
+ if args.defectLog:
+ DEFECT_LOG = args.defectLog
+
+ if args.complexityLog:
+ COMPLEXITY_LOG = args.complexityLog
+
+ if args.loc:
+ CODE_CHURN = args.loc
+
+ if args.baseTag:
+ BASE_RELEASE_TAG = args.baseTag
+
+ if args.targetTag:
+ TARGET_RELEASE_TAG = args.targetTag
+
+ if args.gitTagDate:
+ GIT_TAG_DATE = re.sub('[-]', '', args.gitTagDate)
+
+ if args.influxTime:
+ GIT_TAG_DATE_TIME = args.influxTime
+
+
+def tfa_generate_defect_data(data):
+ """
+ Function to write the data of defects into influxdb """
+
+ dict_list = []
+ runDate = time.strftime('%H:%M-%x')
+
+ # "Issue_Status" acts as an indicative field to help the viewer figure out
+ # the current status of the bug
+ defects_tracking = {
+ "metadata": {
+ "metrics": "tfa_defects_tracking"
+ },
+ "api_version": "1.0",
+ "data": [{
+ "measurement": "TFA_Defects_Tracking",
+ "fields": {
+ "Issue_Status": "{}".format("Open"),
+ "Number_of_Defects": int(len(data))
+ },
+ "tags": {
+ "Measured_Date": "{}".format(runDate)
+ },
+ }]
+ }
+
+ with open('defects_tracking.json', 'w') as fp:
+ json.dump(defects_tracking, fp)
+
+ # Write details of each defects into the other measurement called
+ # "TFA_Defects_Statistics"
+ defect_stats = {}
+ defect_stats["data"] = []
+ defect_stats["metadata"] = {}
+ defect_stats["metadata"]["metrics"] = "tfa_defects_stats"
+ defect_stats["api_version"] = "1.0"
+ for ID, description in data.items():
+ json_body = {
+ "measurement": "TFA_Defects_Statistics",
+ "fields": {
+ "Title": "{}".format(description['title']),
+ "Issue_Status": "{}".format(description['state']),
+ "URL": "{}".format(description['url'])
+ },
+ "tags": {
+ "Defect_ID": "{}".format(ID),
+ "Measured_Date": "{}".format(runDate)
+ }
+ }
+
+ defect_stats["data"].append(json_body)
+
+ with open('defects_statistics.json', 'w') as fp:
+ json.dump(defect_stats, fp)
+
+
+def tfa_generate_codechurn_data(data, base_tag, target_tag):
+ """
+ Generates InfluxDB data for TF-A code churn and
+ writes that to code_churn.json file.
+
+ :param: data: Lines of change
+ :param: base_tag: Release tag prior to target_tag
+ :param: target_tag: Tag being tested
+ """
+
+ json_body = {
+ "metadata": {
+ "metrics": "tfa_code_churn"
+ },
+ "api_version": "1.0",
+ "data": [{
+ "measurement": "TFA_CodeChurn_Tracking",
+ "fields": {
+ "Lines_of_Change": int(data)
+ },
+ "tags": {
+ "Git_Tag_Date": int(GIT_TAG_DATE),
+ "Base_Tag": "{}".format(base_tag),
+ "Target_Tag": "{}".format(target_tag)
+ },
+ "time": GIT_TAG_DATE_TIME
+ }]
+ }
+
+ with open('code_churn.json', 'w') as fp:
+ json.dump(json_body, fp)
+
+
+def tfa_generate_complexity_data(data, base_tag, target_tag, threshold):
+ """
+ Generates InfluxDB data for TF-A complexity scores and
+ writes that to complexity stats and tracking json files.
+
+ :param: data: Complexity data
+ :param: base_tag: Release tag prior to target_tag
+ :param: target_tag: Tag being tested
+ :param: threshold: Complexity threshold
+ """
+
+ complexity_stats = {}
+ complexity_stats["data"] = []
+ complexity_stats["metadata"] = {}
+ complexity_stats["metadata"]["metrics"] = "tfa_complexity_stats"
+ complexity_stats["api_version"] = "1.0"
+
+ totalComplexity = 0
+
+ print(
+ "@@ Number of functions with complexity score > %d: %d" %
+ (threshold, len(data)))
+
+ for k, v in data.items():
+ # Extract the location and function name
+ location = k.split(':', 1)[0].strip()
+ functionID = k.split(':', 1)[1].strip()
+ json_body = {
+ "measurement": "TFA_Complexity_Statistics",
+ "fields": {
+ "Function_ID": "{}".format(functionID),
+ "Score": int(v),
+ "Whitelisted": "{}".format("no"),
+ "Threshold": int(threshold)
+ },
+ "tags": {
+ "Location": "{}".format(location),
+ "Git_Tag_Date": int(GIT_TAG_DATE),
+ "Base_Tag": "{}".format(base_tag),
+ "Target_Tag": "{}".format(target_tag)
+ },
+ "time": GIT_TAG_DATE_TIME
+ }
+
+ complexity_stats["data"].append(json_body)
+ totalComplexity += int(v)
+
+ with open('complexity_stats.json', 'w') as fp:
+ json.dump(complexity_stats, fp)
+
+ totalExceedThreshold = len(data)
+ complexity_tracking = {
+ "metadata": {
+ "metrics": "tfa_complexity_tracking"
+ },
+ "api_version": "1.0",
+ "data": [{
+ "measurement": "TFA_Complexity_Tracking",
+ "fields": {
+ "Threshold": int(threshold),
+ "Whitelisted": "{}".format("no"),
+ "Functions_Exceeding_Threshold_Not_Whitelisted": int(totalExceedThreshold)
+ },
+ "tags": {
+ "Git_Tag_Date": int(GIT_TAG_DATE),
+ "Target_Tag": "{}".format(target_tag)
+ },
+ "time": GIT_TAG_DATE_TIME
+ }]
+ }
+
+ with open('complexity_tracking.json', 'w') as fp:
+ json.dump(complexity_tracking, fp)
+
+
+class DefectParser(object):
+ """
+ Extract the following data from the defect/complexity logs:
+ - defect list: {test class ID:{title: <title>, link: <URL>}}
+ - int variable: total number of defects
+ """
+
+ def __init__(self, defectLog):
+ self.defectLog = defectLog
+ self.defectDict = collections.OrderedDict()
+
+ self.process_defect_log()
+
+ def process_defect_log(self):
+ """
+ Function to process defect log and populate the defect dictionary
+ """
+ with open(self.defectLog) as fp:
+ content = fp.readlines()
+
+ baseURL = "https://github.com/ARM-software/tf-issues/issues/"
+
+ # Get defect id, title and URL link to populate the defect dictionary
+ for i in content:
+ i_strip = i.strip()
+
+ titleIDRegex = "^Found open bug with id: ([0-9]+): (.*)"
+ mIDTitle = re.match(titleIDRegex, i)
+
+ if mIDTitle:
+ defectID = mIDTitle.group(1)
+ defectTitle = mIDTitle.group(2)
+ defectURL = baseURL + mIDTitle.group(1)
+
+ self.defectDict[defectID] = {}
+ self.defectDict[defectID]['title'] = defectTitle.split(',')[0]
+ self.defectDict[defectID]['url'] = defectURL
+ self.defectDict[defectID]['state'] = defectTitle.split(',')[1]
+
+
+if __name__ == "__main__":
+
+ # Initialise global variables
+ DEFECT_LOG = ""
+ COMPLEXITY_LOG = ""
+ CODE_CHURN = 0
+ BASE_RELEASE_TAG = 0
+ TARGET_RELEASE_TAG = 0
+ # Functions having pmcabbe cylomatic complexity >= TFA_THRESHOLD
+ # are reported
+ TFA_THRESHOLD = 11
+ GIT_TAG_DATE = ""
+
+ # parse arguments
+ args_parse()
+
+ # Generate defect data
+ defectData = DefectParser(DEFECT_LOG)
+
+ # Generate complexity data
+ complexityData = ComplexityParser(COMPLEXITY_LOG, TFA_THRESHOLD)
+
+ tfa_generate_defect_data(defectData.defectDict)
+
+ tfa_generate_codechurn_data(
+ CODE_CHURN,
+ BASE_RELEASE_TAG,
+ TARGET_RELEASE_TAG)
+
+ tfa_generate_complexity_data(
+ complexityData.complexityDict,
+ BASE_RELEASE_TAG,
+ TARGET_RELEASE_TAG,
+ TFA_THRESHOLD)
diff --git a/quality-metrics/data-generator/tfa_metrics/tfa_quality_metrics.sh b/quality-metrics/data-generator/tfa_metrics/tfa_quality_metrics.sh
new file mode 100755
index 0000000..cc920d9
--- /dev/null
+++ b/quality-metrics/data-generator/tfa_metrics/tfa_quality_metrics.sh
@@ -0,0 +1,353 @@
+#!/usr/bin/env bash
+
+#======================================================================
+# Copyright (c) 2020, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#======================================================================
+
+#======================================================================
+# FILE: tfa_quality_metrics.sh
+#
+# DESCRIPTION: script to defects and calculate complexity score for arm-trusted-firmware
+#
+# USAGE: ./tfa_quality_metrics.sh --tag <release tag>
+#
+#======================================================================
+. ../common_metrics/common_utilities/common_utilities.sh
+. ./tfa_variables.sh
+
+# === Function ========================================================
+# NAME: clone_git_repo
+# DESCRIPTION: Clones the repository via "git clone" command
+# =====================================================================
+clone_git_repo()
+{
+ REPO_URL=$1
+ REPOSITORY=$(basename $REPO_URL .git)
+ # If repository already exists, then return from this function
+ if [ -d $REPOSITORY ]; then
+ printf "\nRepository \"$REPOSITORY\" already exists."
+ return
+ fi
+
+ # Clone repo. If it doesn't exist, then exit.
+ printf "\nCloning $REPOSITORY...\n"
+ printf "git clone $REPO_URL\n"
+ clone_err=$(git clone "$REPO_URL" 2>&1 | grep "fatal")
+
+ if [[ ! -z $clone_err ]]; then
+ printf "Repository \"$REPOSITORY\" not found. Exiting...\n"
+ exit
+ fi
+}
+
+# === Function ========================================================
+# NAME: tag_validation
+# DESCRIPTION: Invokes get_base_tag which retrieves base tag is target
+# tag is valid
+# PARAMETER:
+# $1: tag id
+# =====================================================================
+tag_validation()
+{
+ tag=$1
+
+ # check that tag actually exists
+ pushd arm-trusted-firmware
+ get_base_tag "^v[0-9]+\.[0-9]+$"
+ popd
+}
+
+# === Function ========================================================
+# NAME: generate_defect_summary
+# DESCRIPTION: Calculates the number of the total defects
+# PARAMETER:
+# $1: output defect log
+# =====================================================================
+generate_defect_summary()
+{
+ # copy the github module to this level
+ cp $DIR/./githubpy/github.py .
+ cp $DIR/./githubpy/setup.py .
+
+ python3 $DIR/tfa_defects.py > $DEFECT_LOG
+}
+
+# === Function ========================================================
+# NAME: get_complexity_score
+# DESCRIPTION: Finds cyclomatic complexity of all the C/C++ files.
+# =====================================================================
+get_complexity_score()
+{
+ complexity_dir="$(basename $TFA_REPO .git)"
+
+ # check the availability of pmccabe
+ validation=$(which pmccabe)
+ if [ -z "$validation" ]; then
+ echo "pmccabe not found. Aborting test...\n"
+ exit
+ fi
+
+ # find out complexity on computed folder
+ pmccabe -vt `find $complexity_dir -name "*.c"` `find $complexity_dir -name "*.cpp"` > $COMPLEXITY_LOG
+}
+
+# === Function ========================================================
+# NAME: complexity_score
+# DESCRIPTION: Calculates the McCabe complexity score
+# =====================================================================
+complexity_score()
+{
+ # checkout the tag before running pmmcabe
+ pushd $DIR/arm-trusted-firmware
+
+ echo "git checkout ${TARGET_TAG}"
+ git checkout ${TARGET_TAG}
+ git status
+
+ # exclude subfolders under plat except for 'arm' and 'common'
+ mv plat tmp_plat
+ mkdir plat
+ cp -rp tmp_plat/arm tmp_plat/common tmp_plat/compat plat 2>/dev/null
+ rm -rf tmp_plat
+
+ # exclude subfolders under lib
+ rm -rf lib/stdlib
+ rm -rf lib/libfdt
+ rm -rf lib/compiler-rt
+
+ # exclude tools
+ rm -rf tools
+
+ # exclude services/spd except for 'tspd'
+ mv services/spd services/safe_spd
+ mkdir services/spd
+ cp -rp services/safe_spd/tspd services/spd 2>/dev/null
+ rm -rf services/safe_spd
+
+ popd
+
+ get_complexity_score
+}
+
+# === Function ========================================================
+# NAME: code_churn_summary
+# DESCRIPTION: Function to get code churn summary
+# PARAMETER:
+# $1: code churn log
+# =====================================================================
+code_churn_summary()
+{
+ pushd $DIR/arm-trusted-firmware
+
+ echo "@@ Calculating code churn excluding plat folder..."
+
+ # Calculate code churn
+ stats1=$(git diff --stat $BASE_TAG $TARGET_TAG -- . ':!plat' | grep -E "[0-9]+ file(s)? changed,")
+ CODE_CHURN1=$(generate_code_churn_summary "$stats1")
+
+ echo "@@ Calculating code churn plat/arm and plat/common folder..."
+ stats2=$(git diff --stat $BASE_TAG $TARGET_TAG -- 'plat/arm' 'plat/common' | grep -E "[0-9]+ file(s)? changed,")
+ CODE_CHURN2=$(generate_code_churn_summary "$stats2")
+
+ CODE_CHURN=$((CODE_CHURN1+CODE_CHURN2))
+ echo "Code churn: $CODE_CHURN LOC" | tee $DIR/$CODE_CHURN_LOG
+
+ # get tagger date for git tag in YYYY-MM-DD format
+ get_git_tag_date
+
+ popd
+
+ echo $CODE_CHURN
+}
+
+# === Function ========================================================
+# NAME: write_influxdb_data
+# DESCRIPTION: Function to generate JSON files containing DB data
+# =====================================================================
+write_influxdb_data()
+{
+ # Create a result folder using the current time stamp and
+ # copy InfluxDB json files to it
+ local resultDir=$(date +%Y-%m-%d_%H_%M_%S)
+ local_result=$DIR/$resultDir
+
+ mkdir -p $local_result
+ mv *.json *.txt $local_result
+
+ pushd $local_result
+
+ for json_file in *.json; do
+ curl -X POST -H "Content-Type: application/json" -d "$(cat ${json_file})" \
+ "http://${INFLUX_HOST}:5000" -H "${TFA_METRICS_AUTH_TOKEN}"
+ done
+
+ popd
+}
+
+# === Function ========================================================
+# NAME: generate_defect_codechurn_complexity_data
+# DESCRIPTION: Function to generate defects, code churn and complexity
+# quality metrics data for given tag.
+# =====================================================================
+generate_defect_codechurn_complexity_data()
+{
+ # Remove files from previous run, if any
+ rm -rf arm-trusted-firmware/ github* setup.py
+
+ clone_git_repo $TFA_REPO
+ clone_git_repo $GITHUBPY_REPO
+
+ # validate TARGET_TAG and get base tag
+ tag_validation $TARGET_TAG
+
+ # do defect statistics
+ generate_defect_summary
+
+ # cyclomatic complexity check
+ complexity_score
+
+ # code churn
+ code_churn_summary
+
+ # Create InfluxDB json files to be written to InfluxDB
+ python3 $DIR/tfa_generate_influxdb_files.py --defectLog $DEFECT_LOG \
+ --complexityLog $COMPLEXITY_LOG --loc $CODE_CHURN --baseTag $BASE_TAG \
+ --targetTag $TARGET_TAG --gitTagDate $GIT_TAG_DATE --influxTime "$GIT_TAG_DATE_TIME"
+}
+
+# === Function ========================================================
+# NAME: usage
+# DESCRIPTION: Function to print script usage
+# =====================================================================
+usage()
+{
+ # print usage common to all files
+ printf "USAGE: $(basename $0) [options]\n"
+ printf "\t params: \n"
+ printf "\t -h|--help print help information\n"
+ printf "\t --tag user specified release tag\n"
+ printf "\t --metric_type [ runtime_instrumentation | image_size | coverity_misra ]*\n"
+ printf "\t --rt_instr Path to file containing instrumentation data\n"
+ printf "\t Required when metric_type is runtime_instrumentation\n"
+ printf "\t --image_size_file Path to file containing image size data\n"
+ printf "\t Required when metric_type is image_size\n"
+ printf "\t --misra_defects_file Path to file containing MISRA defects information\n"
+ printf "\t Required when metric_type is coverity_misra\n"
+ printf "* By default, code coverage, defects and complexity metrics are generated for given tag\n"
+ printf "When metric_type is specified, corresponding data file to be parsed is also required\n"
+ exit
+}
+
+# === Function ========================================================
+# NAME: generate_tfa_metrics_data
+# DESCRIPTION: Function to generate InfluxDB JSON file for specified
+# TF-A metrics - run time instrumentation/image size/MISRA defects
+# =====================================================================
+generate_tfa_metrics_data()
+{
+ case $METRIC_TYPE in
+ runtime_instrumentation)
+ if [[ ! -f $RTINSTR_FILE ]]; then
+ echo "$RTINSTR_FILE doesn't exist.. Exiting.."
+ exit 1
+ else
+ python3 tfa_rt_instr.py --rt_instr $RTINSTR_FILE
+ fi
+ ;;
+ image_size)
+ if [[ ! -f $IMAGE_SIZE_FILE ]]; then
+ echo "$IMAGE_SIZE_FILE doesn't exist.. Exiting.."
+ exit 1
+ else
+ python3 tfa_track_image_size.py --image_size_file $IMAGE_SIZE_FILE
+ fi
+ ;;
+ coverity_misra)
+ if [[ ! -f $MISRA_DEFECTS_FILE ]]; then
+ echo "$MISRA_DEFECTS_FILE doesn't exist.. Exiting.."
+ exit 1
+ else
+ python3 tfa_track_misra_defects.py --misra_defects_file $MISRA_DEFECTS_FILE
+ fi
+ ;;
+ esac
+ write_influxdb_data
+ exit
+}
+
+# === Function ========================================================
+# NAME: parse_args
+# DESCRIPTION: Arguments parser function
+# =====================================================================
+parse_args()
+{
+ # parse the arguments
+ while [[ $# -gt 0 ]]
+ do
+ key="$1"
+ case $key in
+ -h|--help)
+ usage
+ ;;
+ --tag)
+ export TARGET_TAG="$2"
+ shift
+ shift
+ ;;
+ --metric_type)
+ export METRIC_TYPE="$2"
+ shift
+ shift
+ ;;
+ --rt_instr_file)
+ export RTINSTR_FILE="$2"
+ shift
+ shift
+ ;;
+ --image_size_file)
+ export IMAGE_SIZE_FILE="$2"
+ shift
+ shift
+ ;;
+ --misra_defects_file)
+ export MISRA_DEFECTS_FILE="$2"
+ shift
+ shift
+ ;;
+ *)
+ echo "Unknown argument $key in arguments $@"
+ usage
+ ;;
+ esac
+ done
+
+}
+
+# === Function ========================================================
+# NAME: main
+# DESCRIPTION: main function
+# PARAMETER: Command-line arguments
+# =====================================================================
+main()
+{
+ parse_args $@
+
+ # If metrics type is specified, then generate influxdb JSON files
+ # from given text files
+ if [[ ! -z $METRIC_TYPE ]]; then
+ generate_tfa_metrics_data
+ # Otherwise generate code churn, complexity and defects data for given tag
+ elif [[ ! -z $TARGET_TAG ]]; then
+ generate_defect_codechurn_complexity_data
+ else
+ echo "Please specify either metric_type or tag.."
+ usage
+ fi
+
+ # write generated data (JSON files) to InfluxDB
+ write_influxdb_data
+}
+
+main $@
diff --git a/quality-metrics/data-generator/tfa_metrics/tfa_rt_instr.py b/quality-metrics/data-generator/tfa_metrics/tfa_rt_instr.py
new file mode 100644
index 0000000..bc40a7f
--- /dev/null
+++ b/quality-metrics/data-generator/tfa_metrics/tfa_rt_instr.py
@@ -0,0 +1,130 @@
+#!/usr/bin/env python
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" tfa_rt_instr.py:
+
+ Parses the job output log file, stores the data in a list of dictionaries
+ and creates JSON file to be written to influxDB.
+
+ USAGE: python3 tfa_rt_instr.py --rt_instr <job_output.log>
+
+ """
+
+import argparse
+import os
+import os.path
+import re
+import json
+
+
+class TFAInstrFileParser:
+ dict_list = []
+ file_name = None
+ rtinstr_data = {}
+ rtinstr_data["data"] = []
+ rtinstr_data["metadata"] = {}
+ rtinstr_data["metadata"]["metrics"] = "tfa_rtinstr"
+ rtinstr_data["api_version"] = "1.0"
+
+ def __init__(self, input_file):
+ self.file_name = input_file
+ self.parse_instr_file()
+ print(json.dumps(self.dict_list, indent=4, sort_keys=True))
+
+ def write_database_instr_tfa(self, file_dict):
+ self.rtinstr_data["data"].append(file_dict)
+
+ def parse_instr_file(self):
+ with open(self.file_name) as fp:
+ # Store instrumentation target as measurement name
+ line = fp.readline()
+ val = line.split(':')
+ if val[0].strip() != 'InstrumentationTarget':
+ print("Invalid file format.. Intrumentation not found..")
+ print("Exiting..")
+ exit()
+ measurement = val[1].strip()
+
+ # Store commit ID
+ line = fp.readline()
+ val = line.split(':')
+ if val[0].strip() != 'CommitID':
+ print("Invalid file format.. Commit ID not found..")
+ print("Exiting..")
+ exit()
+ commit_id = val[1].strip()[0:10]
+
+ # Store commit title
+ line = fp.readline()
+ val = line.split(':', 1)
+ if val[0].strip() != 'CommitTitle':
+ print("Invalid file format.. CommitTitle not found..")
+ print("Exiting..")
+ exit()
+ commit_title = val[1].strip()
+
+ # Store time as commit date
+ line = fp.readline()
+ if line.split()[0] != 'CommitDate:':
+ print("Invalid file format.. Commit Date not found..")
+ print("Exiting..")
+ exit()
+ commit_time = line.split()[1]
+
+ # Store latency data per test case
+ for line in iter(fp.readline, ''):
+ file_dict = {}
+ file_dict['tags'] = {}
+ file_dict['fields'] = {}
+ file_dict['measurement'] = measurement
+ file_dict['tags']['CommitID'] = commit_id
+ file_dict['tags']['CommitTitle'] = commit_title
+ file_dict['time'] = commit_time
+ tc_arr = line.split()
+ file_dict['tags']['TC_Name'] = tc_arr[0]
+ file_dict['tags']['Cluster_ID'] = int(tc_arr[1])
+ file_dict['tags']['CPU_Core'] = int(tc_arr[2])
+ if file_dict['tags']['TC_Name'] == 'testrtinstrpsciversionparallel':
+ file_dict['fields']['Latency_EL3Entry_EL3Exit'] = int(
+ tc_arr[3])
+ else:
+ file_dict['fields']['Latency_EL3Entry_CPUPowerDown'] = int(
+ tc_arr[3])
+ file_dict['fields']['Latency_CPUWakeup_EL3Exit'] = int(
+ tc_arr[4])
+ file_dict['fields']['CacheFlush'] = int(tc_arr[5])
+ self.write_database_instr_tfa(file_dict)
+
+ with open('tfa_rtinstr.json', 'w') as fp:
+ json.dump(self.rtinstr_data, fp)
+
+
+def get_tfa_instr_file():
+ # Create parser instance and add argument
+ parser = argparse.ArgumentParser(
+ description="TFA quality metrics: Runtime Instrumentation tracking")
+ parser.add_argument(
+ "--rt_instr",
+ help="file containing TF-A runtime instrumentation info")
+
+ # Parse the args
+ args = parser.parse_args()
+
+ # Check if file exists
+ if os.path.isfile(str(args.rt_instr)):
+ return args.rt_instr
+ else:
+ print("Runtime Instrumentation file not found.. Exiting..")
+ exit()
+
+
+if __name__ == '__main__':
+ tfa_instr_file_data = TFAInstrFileParser(str(get_tfa_instr_file()))
diff --git a/quality-metrics/data-generator/tfa_metrics/tfa_track_image_size.py b/quality-metrics/data-generator/tfa_metrics/tfa_track_image_size.py
new file mode 100755
index 0000000..44dba10
--- /dev/null
+++ b/quality-metrics/data-generator/tfa_metrics/tfa_track_image_size.py
@@ -0,0 +1,199 @@
+#!/usr/bin/env python
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" tfa_track_image_size.py:
+
+ Parses TFA firmware image size file, stores the data in a list of
+ dictionaries and creates JSON file to be written to influxDB.
+
+ USAGE: python3 tfa_track_image_size.py --image_size_file <ImageSizeFil.txte>
+
+ """
+
+import argparse
+import os.path
+import re
+import json
+
+# Validation Variables
+MEM_SECTION_VALIDATION_TABLE = ['B', 'D', 'R', 'T', 'V', 'W']
+ELF_FILES_LOOKUP_TABLE = [
+ 'bl1.elf',
+ 'bl1u.elf',
+ 'bl2.elf',
+ 'bl2u.elf',
+ 'bl31.elf',
+ 'bl32.elf']
+
+
+class TFASizeFileParser:
+ """
+ Download the file containing sizes of various TFA build configs
+ Store the size data in a list of dictionaries in the following format:
+ [
+ {
+ "measurement": <build_config>,
+ "fields" : {
+ "BlX_B": Size of uninitialized data section
+ "BlX_D": Size of initialized data section
+ "BlX_R": Size of read only data section
+ "BlX_T": Size of text (code) section
+ "BlX_V": Size of weak object
+ "BlX_W": Size of weak symbol
+ },
+ "tags" : {
+ "BinMode" : Type of build (Release|Debug)
+ "CommitID" : Commit ID
+ "CommitTitle" : Commit title
+ }
+ "time" : Commit Time
+ }
+ ]
+ """
+
+ file_dict = {}
+ file_name = None
+
+ def __init__(self, input_file):
+ self.file_name = input_file
+ self.parse_image_size_file()
+ print(json.dumps(self.file_dict, indent=4, sort_keys=True))
+
+ def parse_image_size_file(self):
+ self.file_dict['tags'] = {}
+ self.file_dict['fields'] = {}
+
+ with open(self.file_name) as fp:
+ # Store measurement name as build config
+ line = fp.readline()
+ val = line.split(':')
+ if val[0].strip() != 'BuildConfig':
+ print("Invalid file format.. BuildConfig not found..")
+ print("Exiting..")
+ exit()
+ self.file_dict['measurement'] = val[1].strip()
+
+ # Store bin_mode
+ line = fp.readline()
+ val = line.split(':')
+ if val[0].strip() != 'BinMode':
+ print("Invalid file format.. BinMode not found..")
+ print("Exiting..")
+ exit()
+ self.file_dict['tags'][val[0].strip()] = val[1].strip().title()
+
+ # Store Commit ID
+ line = fp.readline()
+ val = line.split(':')
+ if val[0].strip() != 'CommitID':
+ print("Invalid file format.. Commit ID not found..")
+ print("Exiting..")
+ exit()
+ self.file_dict['tags'][val[0].strip()] = val[1].strip()[0:10]
+
+ # Store Commit Title
+ line = fp.readline()
+ val = line.split(':', 1)
+ if val[0].strip() != 'CommitTitle':
+ print("Invalid file format.. CommitTitle not found..")
+ print("Exiting..")
+ exit()
+ self.file_dict['tags']['CommitTitle'] = val[1].strip()
+
+ # Store time as commit date
+ line = fp.readline()
+ if line.split()[0] != 'CommitDate:':
+ print("Invalid file format.. Commit Date not found..")
+ print("Exiting..")
+ exit()
+ self.file_dict['time'] = line.split()[1]
+
+ # Store Image Size memory related data component-wise
+ for line in iter(fp.readline, ''):
+ if ".elf" in line:
+ searched_build = line.split('/')[-1].split(':')[0]
+ build = searched_build.upper().rsplit('.', 1)[0]
+ if searched_build not in ELF_FILES_LOOKUP_TABLE:
+ print(
+ "WARNING: " +
+ searched_build +
+ " not present in ELF_FILES_LOOKUP_TABLE..")
+ print(
+ "Skipping publishing data for " +
+ searched_build +
+ " to InfluxDB")
+ build = None
+ continue
+ elif build is not None:
+ val = line.split(' ')
+ if len(val) > 1:
+ if not val[0].strip() in MEM_SECTION_VALIDATION_TABLE:
+ print(
+ "Invalid memory section \"%s\".. Exiting.." %
+ val[0].strip())
+ exit()
+ mem_comp = build + "_" + val[0].strip()
+ self.file_dict['fields'][mem_comp] = int(
+ val[1].strip())
+
+ json_body = json.dumps(str(self.file_dict))
+ if not self.file_dict['fields']:
+ failed_configs = 'failed_configs.txt'
+
+ if os.path.exists(failed_configs):
+ append_write = 'a' # append if already exists
+ else:
+ append_write = 'w' # make a new file if not
+
+ failed_configs_file = open(failed_configs, append_write)
+ failed_configs_file.write(
+ self.file_dict['measurement'] +
+ ', ' +
+ self.file_dict['tags']['BinMode'] +
+ ': bl1/bl1u/bl2/bl2u/bl31/bl32 not found\n')
+ failed_configs_file.close()
+ print("No memory section found.. Exiting")
+ exit()
+
+
+def generate_influxdb_json_file(file_dict):
+ image_size_data = {}
+ image_size_data["data"] = []
+ image_size_data["metadata"] = {}
+ image_size_data["metadata"]["metrics"] = "tfa_image_size"
+ image_size_data["api_version"] = "1.0"
+ image_size_data["data"].append(file_dict)
+ with open('tfa_image_size.json', 'w') as fp:
+ json.dump(image_size_data, fp)
+
+
+def get_tfa_size_file():
+ # Create parser instance and add argument
+ parser = argparse.ArgumentParser(
+ description="TFA quality metrics: firmware image size tracking")
+ parser.add_argument(
+ "--image_size_file",
+ help="file containing TFA image size info")
+
+ # Parse the args
+ args = parser.parse_args()
+
+ # Check if file exists
+ if os.path.isfile(str(args.image_size_file)):
+ return args.image_size_file
+ else:
+ print("Image size file not found.. Exiting..")
+ exit()
+
+
+if __name__ == '__main__':
+ tfa_size_file_data = TFASizeFileParser(str(get_tfa_size_file()))
+ generate_influxdb_json_file(tfa_size_file_data.file_dict)
diff --git a/quality-metrics/data-generator/tfa_metrics/tfa_track_misra_defects.py b/quality-metrics/data-generator/tfa_metrics/tfa_track_misra_defects.py
new file mode 100755
index 0000000..6962ca5
--- /dev/null
+++ b/quality-metrics/data-generator/tfa_metrics/tfa_track_misra_defects.py
@@ -0,0 +1,177 @@
+#!/usr/bin/env python
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" tfa_track_misra_defects.py:
+
+ Parses TFA MISRA defects file, stores the data in a list of
+ dictionaries and creates JSON file to be written to influxDB.
+
+ USAGE: python3 tfa_track_misra_defects.py --misra_defects_file <DefectsFile.txt>
+
+ """
+
+import argparse
+import os.path
+import re
+import json
+
+
+class TFACoverityFileParser:
+ """
+ Store the Misra C defects data in a list of dictionaries in the following
+ format:
+ [
+ {
+ "measurement": <build_config>,
+ "fields" : {
+ "TotalDefects" : Total coverity defects
+ "MandatoryDefects": Mandatory defects
+ "RequiredDefects" : Required defects
+ "AdvisoryDefects" : Advisory defects
+ },
+ "tags" : {
+ "BinMode" : Type of build (Release|Debug)
+ "CommitID" : Commit ID
+ "CommitTitle" : Commit Title
+ }
+ "time" : PR Merge Commit Time
+ }
+ ]
+ """
+
+ file_dict = {}
+ file_name = None
+
+ def __init__(self, input_file):
+ self.file_name = input_file
+ self.parse_misra_defects_file()
+ print(json.dumps(self.file_dict, indent=4, sort_keys=True))
+
+ def parse_misra_defects_file(self):
+ self.file_dict = {}
+ self.file_dict['tags'] = {}
+ self.file_dict['fields'] = {}
+
+ with open(self.file_name) as fp:
+ # Store measurement name as build config
+ line = fp.readline()
+ val = line.split(':')
+ if val[0].strip() != 'BuildConfig':
+ print("Invalid file format.. BuildConfig not found..")
+ print("Exiting..")
+ exit()
+ self.file_dict['measurement'] = val[1].strip()
+
+ # Store bin_mode
+ line = fp.readline()
+ val = line.split(':')
+ if val[0].strip() != 'BinMode':
+ print("Invalid file format.. BinMode not found..")
+ print("Exiting..")
+ exit()
+ self.file_dict['tags'][val[0].strip()] = val[1].strip().title()
+
+ # Store Commit ID
+ line = fp.readline()
+ val = line.split(':')
+ if val[0].strip() != 'CommitID':
+ print("Invalid file format.. Commit ID not found..")
+ print("Exiting..")
+ exit()
+ self.file_dict['tags'][val[0].strip()] = val[1].strip()[0:10]
+
+ # Store Commit Title
+ line = fp.readline()
+ val = line.split(':', 1)
+ if val[0].strip() != 'CommitTitle':
+ print("Invalid file format.. CommitTitle not found..")
+ print("Exiting..")
+ exit()
+ self.file_dict['tags']['CommitTitle'] = val[1].strip()
+
+ # Store time as commit date
+ line = fp.readline()
+ if line.split()[0] != 'CommitDate:':
+ print("Invalid file format.. Commit Date not found..")
+ print("Exiting..")
+ exit()
+ self.file_dict['time'] = line.split()[1]
+
+ # Store Total Defects
+ line = fp.readline()
+ val = line.split(':')
+ if val[0].strip() != 'TotalDefects':
+ print("Invalid file format.. TotalDefects not found..")
+ print("Exiting..")
+ exit()
+ self.file_dict['fields']['TotalDefects'] = int(val[1].strip())
+
+ # Store Mandatory Defects
+ line = fp.readline()
+ val = line.split(':')
+ if val[0].strip() != 'MandatoryDefects':
+ print("Invalid file format.. MandatoryDefects not found..")
+ print("Exiting..")
+ exit()
+ self.file_dict['fields']['MandatoryDefects'] = int(val[1].strip())
+
+ # Store Required Defects
+ line = fp.readline()
+ val = line.split(':')
+ if val[0].strip() != 'RequiredDefects':
+ print("Invalid file format.. RequiredDefects not found..")
+ print("Exiting..")
+ exit()
+ self.file_dict['fields']['RequiredDefects'] = int(val[1].strip())
+
+ # Store Advisory Defects
+ line = fp.readline()
+ val = line.split(':')
+ if val[0].strip() != 'AdvisoryDefects':
+ print("Invalid file format.. AdvisoryDefects not found..")
+ print("Exiting..")
+ exit()
+ self.file_dict['fields']['AdvisoryDefects'] = int(val[1].strip())
+
+
+def write_database(file_dict):
+ misra_defects_data = {}
+ misra_defects_data["data"] = []
+ misra_defects_data["metadata"] = {}
+ misra_defects_data["metadata"]["metrics"] = "tfa_misra_defects"
+ misra_defects_data["api_version"] = "1.0"
+ misra_defects_data["data"].append(file_dict)
+ with open('tfa_misra_defects.json', 'w') as fp:
+ json.dump(misra_defects_data, fp)
+
+
+def get_tfa_coverity_file():
+ # Create parser instance and add argument
+ parser = argparse.ArgumentParser(
+ description="TF-A quality metrics: Misra C defects tracking")
+ parser.add_argument("--misra_defects_file",
+ help="file containing Misra defects information")
+
+ # Parse the args
+ args = parser.parse_args()
+
+ # Check if file exists
+ if os.path.isfile(str(args.misra_defects_file)):
+ return args.misra_defects_file
+ else:
+ print("Coverity file not found.. Exiting..")
+ exit()
+
+
+if __name__ == '__main__':
+ tfa_misra_defects_data = TFACoverityFileParser(
+ str(get_tfa_coverity_file()))
+ write_database(tfa_misra_defects_data.file_dict)
diff --git a/quality-metrics/data-generator/tfa_metrics/tfa_variables.sh b/quality-metrics/data-generator/tfa_metrics/tfa_variables.sh
new file mode 100644
index 0000000..56a2715
--- /dev/null
+++ b/quality-metrics/data-generator/tfa_metrics/tfa_variables.sh
@@ -0,0 +1,31 @@
+#!/usr/bin/env bash
+
+#======================================================================
+# Copyright (c) 2020, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#======================================================================
+
+export TFA_REPO='https://github.com/ARM-software/arm-trusted-firmware.git'
+export GITHUBPY_REPO='https://github.com/michaelliao/githubpy.git'
+
+export DEFECT_LOG=tfa_defects_summary.txt
+export COMPLEXITY_LOG=tfa_complexity_summary.txt
+export CODE_CHURN_LOG=tfa_code_churn.txt
+
+# Authentication token needs to be generated using following command:
+# curl -H "Content-Type: application/json" -X POST -d \
+# "$(cat <CREDENTIALS_JSON_FILE>)" http://<IP_ADDR>:5000/auth
+# where "IP_ADDR" is the IP address of host where metrics server is running, and
+# CREDENTIALS_JSON file should contain credentials which should match with
+# the credentials in ../../broker-component/credentials.py
+# Response would contain a JWT token, which needs to be added here
+# during deployment
+export TFA_METRICS_AUTH_TOKEN="<TFA Authorization Token>"
+
+# INFLUX_HOST is the IP address of host where InfluxDB service is running
+# It needs to be updated during deployment
+export INFLUX_HOST="<Influx Public Host IP>"
+
+# Use relative path to the current script
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
diff --git a/quality-metrics/docs/broker_component_user_guide.md b/quality-metrics/docs/broker_component_user_guide.md
new file mode 100644
index 0000000..58ddbdf
--- /dev/null
+++ b/quality-metrics/docs/broker_component_user_guide.md
@@ -0,0 +1,84 @@
+# Broker Component User Guide
+Broker component is Python Flask app, which handles the data being pushed by the data generator components of the quality metrics project. It implements APIs that allow the data generator scripts to POST metrics data in an agreed JSON format which gets pushed to InfluxDB backend database. For each received request with valid authorization token, it performs basic sanity check and pushes the data to InfluxDB. For details on how to visualize InfluxDB data using Grafana, please refer [visualisation user guide](./visualisation_user_guide.md).
+
+## Deploying broker component
+The broker component can be deployed in the infrastructure as a docker container with the docker files in the project.
+
+### Dependencies
+- docker
+- docker-compose
+- python3-dev
+- python3-pip
+- Ensure that ports 3000, 5000 and 8086 are enabled for incoming traffic.
+
+### Bringing-up Broker Component - Setup Instructions
+In order to bring-up broker component (for a first time setup), docker-compose.yml is used, which brings up 3 containers - Grafana, InfluxDB and Flask App. It is upto the user to adapt the docker_compose.yml file if only some of the components needs to be deployed.
+JWT Authorization Token is generated using following steps, which needs to be added in [tfa_variables.sh](../data_generator/tfa_variables.sh)
+1. Set value of HOST to host public IP address in file [constants.py](../broker-component/constants.py)
+1. [credentials.py](../broker-component/credentials.py) is provided for reference purpose only. Please change the value of SECRET_KEY and password for _tfa_metrics_ user.
+1. Ensure the current working directory is broker_component. Create [teamname]_credentials.json (as an example let us use 'tfa' as the teamname) and run following commands to generate Authorization Token:
+```bash
+$ cat tfa_credentials.json
+{
+ "username": "tfa_metrics",
+ "password": "[tfa_metrics password matching with password in credentials.py]"
+}
+
+$ docker network create metrics_network
+$ docker volume create influxdb-volume
+$ docker volume create grafana-volume
+$ docker-compose build
+$ docker-compose up -d
+$ curl -H "Content-Type: application/json" -X POST -d "$(cat tfa_credentials.json)" http://[Host Public IP]:5000/auth
+```
+
+## Testing the setup after deployment
+The following steps can help confirm if the deployment steps detailed in previous setup was indeed successful or not.
+
+### Verify that Grafana is up
+If URL *http://`[HOST Public IP]`:3000* is accessible, it confirms that Grafana is up.
+
+### Verify that InfluxDB is up
+If URL *http://`[HOST Public IP]`:8086/query* is accessible, it confirms that InfluxDB is up.
+
+### Create InfluxDB Database
+Database can be created by accessing InfluxDB container or by using InfluxDB API.
+
+#### Create database by accessing InfluxDB container
+```bash
+$ docker exec -it influxdb_container sh
+# influx
+> create database TFA_CodeChurn
+> create database TFA_Complexity
+> create database TFA_Defects
+> create database TFA_MisraDefects
+> create database TFA_ImageSize
+> create database TFA_RTINSTR
+> exit
+# exit
+```
+#### Create database using the InfluxDB API
+```bash
+$ curl -i -XPOST http://[HOST Public IP]:8086/query --data-urlencode "q=CREATE DATABASE TFA_CodeChurn"
+$ curl -i -XPOST http://[HOST Public IP]:8086/query --data-urlencode "q=CREATE DATABASE TFA_Complexity"
+$ curl -i -XPOST http://[HOST Public IP]:8086/query --data-urlencode "q=CREATE DATABASE TFA_Defects"
+$ curl -i -XPOST http://[HOST Public IP]:8086/query --data-urlencode "q=CREATE DATABASE TFA_MisraDefects"
+$ curl -i -XPOST http://[HOST Public IP]:8086/query --data-urlencode "q=CREATE DATABASE TFA_ImageSize"
+$ curl -i -XPOST http://[HOST Public IP]:8086/query --data-urlencode "q=CREATE DATABASE TFA_RTINSTR"
+$ curl -i -XPOST http://[HOST Public IP]:8086/query --data-urlencode "q=SHOW DATABASES"
+```
+
+### Pushing Data to InfluxDB
+Data can be pushed to InfluxDB by sending cURL POST request in the agreed-upon format and with correct authorization token.
+* The steps above mention that how authorization token can be generated.
+* Request is validated using [JSON schemas](../broker-component/metrics-schemas)
+In order to send push data, run following commands:
+```bash
+$ cd qa-tools/quality-metrics/data-generator/tfa_metrics
+$ ./tfa_quality_metrics.sh --tag [Release Tag]
+```
+
+For details, please refer [data generator user guide](./data_generator_user_guide.md).
+
+## License
+[BSD-3-Clause](../../license.md)
diff --git a/quality-metrics/docs/data_generator_user_guide.md b/quality-metrics/docs/data_generator_user_guide.md
new file mode 100644
index 0000000..fc81b54
--- /dev/null
+++ b/quality-metrics/docs/data_generator_user_guide.md
@@ -0,0 +1,194 @@
+# Data Generator User Guide
+Data generator scripts contain scripts to push JSON files containing metrics data to be stored in the InfluxDB database. These mainly push data for following TFA (trusted-firmware-a) metrics:
+1. Code churn, Code complexity and GitHub defects
+1. Image Size
+1. MISRA defects
+1. Runtime instrumentation
+
+For code churn, code complexity and defects metrics, data is generated by data generator scripts. For other metrics, the data generator scripts take as input a text file, that converts the information in the given file to JSON file that is posted to broker component. The text file is expected to be generated by TFA CI setup.
+
+## Dependencies
+- python3-dev
+- python3-pip
+
+## Generating Data
+Please refer [broker component user guide](./broker_component_user_guide.md) for changes essential for the setup. Once broker component is up and running, please make following changes to run data generator scipt.
+1. Please refer [broker component user guide](./broker_component_user_guide.md) for details on how to generate token, and add it in [tfa_variables.sh](../data-generator/tfa_metrics/tfa_variables.sh). "Bringing-up Broker Component - Setup Instructions" contains details on how to generate authentication token.
+1. Set the value of INFLUX_HOST to Influx Public Host IP in [tfa_variables.sh](../data-generator/tfa_metrics/tfa_variables.sh).
+1. Set the value of GitHub access token in [tfa_defects.py](../data-generator/tfa_metrics/tfa_defects.py)
+
+### Code churn, Code complexity and GitHub defects
+As mentioned in [broker component user guide](./broker_component_user_guide.md), code churn, complexity and defects data can be generated as follows for a given release tag:
+```bash
+$ cd qa-tools/quality-metrics/data-generator/tfa_metrics
+$ ./tfa_quality_metrics.sh --tag [Release Tag]
+```
+
+An example of "Release Tag" can be any of the [trusted-firmware-a repo's tags](https://git.trustedfirmware.org/TF-A/trusted-firmware-a.git/refs/tags)
+
+JSON files which are sent to broker component can be found in a timestamped folder in tfa_metrics folder. Data written to InfluxDB can be queried using following commands:
+```bash
+$ curl -G 'http://[HOST Public IP]:8086/query?db=TFA_CodeChurn' --data-urlencode 'q=show measurements'
+$ curl -G 'http://[HOST Public IP]:8086/query?db=TFA_CodeChurn' --data-urlencode 'q=select * from TFA_CodeChurn_Tracking'
+
+$ curl -G 'http://[HOST Public IP]:8086/query?db=TFA_Complexity' --data-urlencode 'q=show measurements'
+$ curl -G 'http://[HOST Public IP]:8086/query?db=TFA_Complexity' --data-urlencode 'q=select * from TFA_Complexity_Tracking'
+$ curl -G 'http://[HOST Public IP]:8086/query?db=TFA_Complexity' --data-urlencode 'q=select * from TFA_Complexity_Statistics'
+
+$ curl -G 'http://[HOST Public IP]:8086/query?db=TFA_Defects' --data-urlencode 'q=show measurements'
+$ curl -G 'http://[HOST Public IP]:8086/query?db=TFA_Defects' --data-urlencode 'q=select * from TFA_Defects_Tracking'
+$ curl -G 'http://[HOST Public IP]:8086/query?db=TFA_Defects' --data-urlencode 'q=select * from TFA_Defects_Statistics'
+```
+
+Using query "show measurements", all the measurements for corresponding DB are listed. After this, one can issue command "select * from measurement", where measurement can be any measurement from "show measurements" output.
+
+### Image Size
+```bash
+$ cd qa-tools/quality-metrics/data-generator/tfa_metrics
+$ ./tfa_quality_metrics.sh --metric_type image_size --image_size_file [Image Size Text File]
+$ curl -G 'http://[HOST Public IP]:8086/query?db=TFA_ImageSize' --data-urlencode 'q=show measurements'
+```
+
+"show measurements" would list the measurements available for TFA_ImageSize DB. To view the data image size measurements, issue command "select * from measurement", where measurement is the name of measurement for which data needs to be view data.
+
+#### Image Size Sample File
+```
+BuildConfig: zynqmp-tspd-tdram
+BinMode: debug
+CommitID: ed39d5e3c0709bab22821a1da3a62737c5d531de
+CommitTitle: Merge "Enabling DPU in dts file for TC0" into integration
+CommitDate: 2020-09-08T14:22:45+00:00
+/work/workspace/workspace/swqt-atf-image-size-metrics/scripts/metrics/atf_metrics/tf-topics/tools/renesas/rcar_layout_create/cert_header_sa6.elf:
+R 288
+
+/work/workspace/workspace/swqt-atf-image-size-metrics/scripts/metrics/atf_metrics/tf-topics/tools/renesas/rcar_layout_create/bootparam_sa0.elf:
+R 20
+
+/work/workspace/workspace/swqt-atf-image-size-metrics/scripts/metrics/atf_metrics/tf-topics/build/zynqmp/debug/bl31/bl31.elf:
+B 26676
+D 14193
+R 3121
+T 40916
+W 516
+
+/work/workspace/workspace/swqt-atf-image-size-metrics/scripts/metrics/atf_metrics/tf-topics/build/zynqmp/debug/bl32/bl32.elf:
+B 21512
+D 5
+R 1218
+T 11496
+W 196
+```
+
+### MISRA Defects
+```bash
+$ cd qa-tools/quality-metrics/data-generator/tfa_metrics
+$ ./tfa_quality_metrics.sh --metric_type coverity_misra --misra_defects_file [MISRA Defects Text File]
+$ curl -G 'http://[HOST Public IP]:8086/query?db=TFA_MisraDefects' --data-urlencode 'q=show measurements'
+```
+
+"show measurements" would list the measurements available for TFA_MisraDefects DB. To view the data MISRA defects measurements, issue command "select * from measurement", where measurement is the name of measurement for which data needs to be view data.
+
+#### MISRA Defects Sample File
+```
+BuildConfig: fvp-rst-bl31
+BinMode: debug
+CommitID: ed39d5e3c0709bab22821a1da3a62737c5d531de
+CommitTitle: Merge "Enabling DPU in dts file for TC0" into integration
+CommitDate: 2020-09-08T14:22:45+00:00
+TotalDefects: 667
+MandatoryDefects: 0
+RequiredDefects: 534
+AdvisoryDefects: 119
+```
+
+### Runtime Instrumentation
+```bash
+$ cd qa-tools/quality-metrics/data-generator/tfa_metrics
+$ ./tfa_quality_metrics.sh --metric_type runtime_instrumentation --rt_instr_file [RTINSTR Text File]
+$ curl -G 'http://[HOST Public IP]:8086/query?db=TFA_RTINSTR' --data-urlencode 'q=show measurements'
+```
+
+"show measurements" would list the measurements available for TFA_RTINSTR DB. To view the data runtime instrumentation measurements, issue command "select * from measurement", where measurement is the name of measurement for which data needs to be view data.
+
+#### Runtime Instrumentation Sample File
+```
+InstrumentationTarget: juno-tftf+aarch32-rt32.instr-r2
+CommitID: ed39d5e3c0709bab22821a1da3a62737c5d531de
+CommitTitle: Merge "Enabling DPU in dts file for TC0" into integration
+CommitDate: 2020-09-08T14:22:45+00:00
+testrtinstrsuspdeepparallel 1 0 128700 31280 9200
+testrtinstrsuspdeepparallel 1 0 128700 31280 9200
+testrtinstrsuspdeepparallel 1 1 283520 27260 9120
+testrtinstrsuspdeepparallel 1 1 283520 27260 9120
+testrtinstrsuspdeepparallel 1 2 541120 26860 214200
+testrtinstrsuspdeepparallel 1 2 541120 26860 214200
+testrtinstrsuspdeepparallel 1 3 206300 25180 9100
+testrtinstrsuspdeepparallel 1 3 206300 25180 9100
+testrtinstrsuspdeepparallel 0 0 51060 76960 6760
+testrtinstrsuspdeepparallel 0 0 51060 76960 6760
+testrtinstrsuspdeepparallel 0 1 361040 25560 203720
+testrtinstrsuspdeepparallel 0 1 361040 25560 203720
+testrtinstrsuspdeepserial 1 0 259740 28140 214060
+testrtinstrsuspdeepserial 1 0 259740 28140 214060
+testrtinstrsuspdeepserial 1 1 54480 23060 8920
+testrtinstrsuspdeepserial 1 1 54480 23060 8920
+testrtinstrsuspdeepserial 1 2 53860 23200 8920
+testrtinstrsuspdeepserial 1 2 53860 23200 8920
+testrtinstrsuspdeepserial 1 3 54280 23340 9120
+testrtinstrsuspdeepserial 1 3 54280 23340 9120
+testrtinstrsuspdeepserial 0 0 249020 25600 204180
+testrtinstrsuspdeepserial 0 0 249020 25600 204180
+testrtinstrsuspdeepserial 0 1 248800 25660 203940
+testrtinstrsuspdeepserial 0 1 248800 25660 203940
+testrtinstrcpususpparallel 1 0 209820 23500 10700
+testrtinstrcpususpparallel 1 0 209820 23500 10700
+testrtinstrcpususpparallel 1 1 287420 21920 11600
+testrtinstrcpususpparallel 1 1 287420 21920 11600
+testrtinstrcpususpparallel 1 2 364080 20980 11160
+testrtinstrcpususpparallel 1 2 364080 20980 11160
+testrtinstrcpususpparallel 1 3 441280 20720 10980
+testrtinstrcpususpparallel 1 3 441280 20720 10980
+testrtinstrcpususpparallel 0 0 52960 18920 9780
+testrtinstrcpususpparallel 0 0 52960 18920 9780
+testrtinstrcpususpparallel 0 1 130940 19020 9800
+testrtinstrcpususpparallel 0 1 130940 19020 9800
+testrtinstrcpususpserial 1 0 82460 20500 8960
+testrtinstrcpususpserial 1 0 82460 20500 8960
+testrtinstrcpususpserial 1 1 51320 20400 9040
+testrtinstrcpususpserial 1 1 51320 20400 9040
+testrtinstrcpususpserial 1 2 51180 19860 8980
+testrtinstrcpususpserial 1 2 51180 19860 8980
+testrtinstrcpususpserial 1 3 51220 20160 8940
+testrtinstrcpususpserial 1 3 51220 20160 8940
+testrtinstrcpususpserial 0 0 48560 19260 6840
+testrtinstrcpususpserial 0 0 48560 19260 6840
+testrtinstrcpususpserial 0 1 48340 19160 6760
+testrtinstrcpususpserial 0 1 48340 19160 6760
+testrtinstrcpuoffserial 1 0 260060 28060 214080
+testrtinstrcpuoffserial 1 0 260060 28060 214080
+testrtinstrcpuoffserial 1 1 55780 28100 9140
+testrtinstrcpuoffserial 1 1 55780 28100 9140
+testrtinstrcpuoffserial 1 2 55620 26860 9180
+testrtinstrcpuoffserial 1 2 55620 26860 9180
+testrtinstrcpuoffserial 1 3 55760 26440 9260
+testrtinstrcpuoffserial 1 3 55760 26440 9260
+testrtinstrcpuoffserial 0 0 251900 28880 204240
+testrtinstrcpuoffserial 0 0 251900 28880 204240
+testrtinstrcpuoffserial 0 1 252440 29560 204460
+testrtinstrcpuoffserial 0 1 252440 29560 204460
+testrtinstrpsciversionparallel 1 0 880
+testrtinstrpsciversionparallel 1 0 880
+testrtinstrpsciversionparallel 1 1 960
+testrtinstrpsciversionparallel 1 1 960
+testrtinstrpsciversionparallel 1 2 980
+testrtinstrpsciversionparallel 1 2 980
+testrtinstrpsciversionparallel 1 3 980
+testrtinstrpsciversionparallel 1 3 980
+testrtinstrpsciversionparallel 0 0 1040
+testrtinstrpsciversionparallel 0 0 1040
+testrtinstrpsciversionparallel 0 1 1180
+testrtinstrpsciversionparallel 0 1 1180
+```
+
+## License
+[BSD-3-Clause](../../license.md)
diff --git a/quality-metrics/docs/design_overview.md b/quality-metrics/docs/design_overview.md
new file mode 100644
index 0000000..77523a6
--- /dev/null
+++ b/quality-metrics/docs/design_overview.md
@@ -0,0 +1,73 @@
+# Multi-tier architecture
+The quality-metrics setup is designed based on a multi-tier architecture where we have different components at each tier. Current design has components at 3 tiers:
+1. front-end components which include a number of data generator scripts that compute the metric to be tracked and visualised. There is Grafana visualisation component to visualise the quality metrics data.
+1. middleware components which include a broker component that acts as an abstraction layer on top of the backend-components. This broker component performs basic data sanity checks and any data translation required.
+1. back-end components which include the InfluxDB timeseries database as the data sink.
+
+## Front-end: Data Generator scripts and Grafana visualisation tool
+Data generator scripts are metric specific scripts that are expected to be integrated as part of the regular CI setup. Metrics are generated based on a pre-determined frequency with which it is executed in the team's CI setup. Each data generator script is expected to compute the agreed metric and push the metric data along with the associated metadata to the broker component as a JSON data fragment which then gets pushed to the backend for visualisation.
+
+The data generator scripts are organised on a per-team basis.
+
+Grafana tool is used to visualise the quality metrics data on a per-team basis.
+
+### Defect Metric
+This metric tracks the open defects against a given project.
+
+### Code Churn Metric
+This metric tracks the code churn, that is number of lines added, modified and deleted, against given tag for a project.
+
+### Code Complexity Metric
+This metric tracks the code complexity against a given tag for a project. It reports modified McCabe score for the code complexity where a switch-statement is treated as a single decision point, thus reporting a value of 2. It uses pmccabe utility for calculating complexity score, and list all the functions having "Modified McCabe Cyclomatic Complexity" above the agreed upon threshold are pushed to database.
+
+### Other Metrics
+While the above mentioned metrics are computed by data generator scripts, there are some other metrics like image size, MISRA defects and run time instrumentation, which are not computed by data generator scripts. The role of scripts present in data generator folder for these metrics is to convert the input text file containing data (to be written to DB) into JSON file (which is sent to broker component). These input text files are expected to be generated as part of CI setup.
+
+## Middleware: Broker component
+The broker component provides a level of abstraction and decouples the data generator components from the backend components. This decoupling allows ease of future changes if a new database or visualisation component is to be added, without major changes needed at the front-end scripts.
+
+The broker component provides a simple token-based authentication scheme to validate the data sources that pushes the metrics data to the quality metrics setup. A token is issued on a per-team basis in this setup. The broker component implements a service queue for the data requests received from clients. The broker component always does a sanity check of the data pushed by a client. Only well-formed data against agreed data template will be processed by the broker component. The broker component can perform agreed data transformation also on some data pushed to it before committing them to the backend data base.
+
+## Back-end: InfluxDB database
+The backend consists of the data sink component which holds the quality metrics data. An individual data model is defined for each metric. The below sections capture the details of the individual measurements and the InfluxDB line protocol outlining the data model. Separate database is created for each metric for a team and all measurements associated with a metric is held on this database.
+
+### TF-A Defect Data Model
+This database holds the measurements which contains data for the open defects against TFA components raised in GitHub.
+
+#### Measurement: TFA_Defects_Tracking
+This measurement captures the open defect count against TFA in GitHub and allows to visualize the trend over time.
+
+Line Protocol Description: TFA_Defects_Tracking,Measured_Date=[timestamp in date format] Issue_Status=[Open from GitHub Issue states],Number_of_Defects=[integer value for the defect count]
+
+#### Measurement: TFA_Defects_Statistics
+This measurement is a holding database for the raw data to feed into the TFA_Defects_Tracking.
+
+Line Protocol Description: TFA_Defects_Statistics,Defect_ID=[defect identifier],Measured_Date=[timestamp in date format] Title=[Defect title],Issue_Status=[Open|Closed|... from GitHub Issue states],URL=[URL to the issue]
+
+### TF-A Code Churn Data Model
+This database holds the measurements that is used to provide the visualization for the trend of LoC changes over time against the TFA (trusted-firmware-a) code base.
+
+#### Measurement: TFA_CodeChurn_Tracking
+This measurement captures the LoC add/delete/modified count against the TFA versions and allows to visualize the trend over time.
+
+Line Protocol Description: TFA_CodeChurn_Tracking,Git_Tag_Date=[Git Tag Date],Target_Tag=[TFA version tag],Base_Tag=[base tag] Lines_of_Change=[LoC changed]
+
+
+### TF-A Complexity Data Model
+This database holds the measurements that is used to provide the visualization for the trend of complex functions over time against the TFA code.
+
+#### Measurement: TFA_Complexity_Tracking
+This measurement captures the function count which are above a given threshold against the TFA code base and allows to visualize the trend over time.
+
+Line Protocol Description: TFA_Complexity_Tracking,Git_Tag_Date=[Git Tag Date],Target_Tag=[TFA version tag] Threshold=[threshold value],Whitelisted=[no],Functions_Exceeding_Threshold_Not_Whitelisted=[number of functions exceeding complexity threshold which are not whitelisted]
+
+#### Measurement: TFA_Complexity_Statistics
+This measurement is a holding database for the raw data to feed into the TFA_Complexity_Tracking.
+
+Line Protocol Description: TFA_Complexity_Statistics,Git_Tag_Date=[Git Tag Date],Base_Tag=[base tag],Target_Tag=[TFA version tag],Location=[path in the code base for the function] Function_ID=[function identifier],Score=[mccabe score],Threshold=[threshold value],Whitelisted=[yes|no]
+
+Most data models can also be interpreted from [JSON schemas](../broker-component/metrics-schemas). "data" section contains the details about fields and tags.
+
+## License
+[BSD-3-Clause](../../license.md)
+
diff --git a/quality-metrics/docs/sample-dashboards/tfa_codechurn.json b/quality-metrics/docs/sample-dashboards/tfa_codechurn.json
new file mode 100644
index 0000000..ac9e0b7
--- /dev/null
+++ b/quality-metrics/docs/sample-dashboards/tfa_codechurn.json
@@ -0,0 +1,288 @@
+{
+ "annotations": {
+ "list": [
+ {
+ "builtIn": 1,
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "rgba(0, 211, 255, 1)",
+ "name": "Annotations & Alerts",
+ "type": "dashboard"
+ }
+ ]
+ },
+ "editable": true,
+ "gnetId": null,
+ "graphTooltip": 0,
+ "id": 3,
+ "links": [],
+ "panels": [
+ {
+ "aliasColors": {},
+ "bars": true,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "TFA_CodeChurn",
+ "decimals": 2,
+ "description": "Tracking the lines of changes in TF-A source code per release tag",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 7,
+ "w": 24,
+ "x": 0,
+ "y": 0
+ },
+ "hiddenSeries": false,
+ "id": 5,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": false,
+ "total": false,
+ "values": false
+ },
+ "lines": false,
+ "linewidth": 1,
+ "links": [],
+ "nullPointMode": "null",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "$tag_Target_Tag",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT max(Lines_of_Change) FROM \"TFA_CodeChurn_Tracking\" where $timeFilter group by Target_Tag, Git_Tag_Date order by time",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "TFA CodeChurn Tracking",
+ "tooltip": {
+ "shared": false,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "series",
+ "name": null,
+ "show": true,
+ "values": [
+ "max"
+ ]
+ },
+ "yaxes": [
+ {
+ "decimals": 0,
+ "format": "short",
+ "label": "Lines of Change",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "columns": [],
+ "datasource": "TFA_CodeChurn",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fontSize": "100%",
+ "gridPos": {
+ "h": 10,
+ "w": 24,
+ "x": 0,
+ "y": 7
+ },
+ "id": 7,
+ "links": [],
+ "pageSize": 9,
+ "scroll": true,
+ "showHeader": true,
+ "sort": {
+ "col": 0,
+ "desc": true
+ },
+ "styles": [
+ {
+ "alias": "Time",
+ "align": "auto",
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "pattern": "Time",
+ "type": "hidden"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "decimals": 2,
+ "pattern": "/.*/",
+ "thresholds": [],
+ "type": "number",
+ "unit": "short"
+ }
+ ],
+ "targets": [
+ {
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "TFA_CodeChurn_Tracking",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT Base_Tag as \"Base Tag\", max(Lines_of_Change) as \"Lines of Change\" FROM \"TFA_CodeChurn_Tracking\" GROUP BY Target_Tag ",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "table",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "title": "TF-A Code Churn Statistics",
+ "transform": "table",
+ "type": "table-old"
+ }
+ ],
+ "refresh": "1d",
+ "schemaVersion": 26,
+ "style": "dark",
+ "tags": [
+ "TFA_QUALITY_METRICS"
+ ],
+ "templating": {
+ "list": []
+ },
+ "time": {
+ "from": "now-5y",
+ "to": "now"
+ },
+ "timepicker": {
+ "refresh_intervals": [
+ "5s",
+ "10s",
+ "30s",
+ "1m",
+ "5m",
+ "15m",
+ "30m",
+ "1h",
+ "2h",
+ "1d"
+ ],
+ "time_options": [
+ "5m",
+ "15m",
+ "1h",
+ "6h",
+ "12h",
+ "24h",
+ "2d",
+ "7d",
+ "30d"
+ ]
+ },
+ "timezone": "",
+ "title": "TFA_Churn",
+ "uid": "tfa-churn",
+ "version": 4
+}
\ No newline at end of file
diff --git a/quality-metrics/docs/sample-dashboards/tfa_complexity.json b/quality-metrics/docs/sample-dashboards/tfa_complexity.json
new file mode 100644
index 0000000..87ee822
--- /dev/null
+++ b/quality-metrics/docs/sample-dashboards/tfa_complexity.json
@@ -0,0 +1,487 @@
+{
+ "annotations": {
+ "list": [
+ {
+ "builtIn": 1,
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "rgba(0, 211, 255, 1)",
+ "name": "Annotations & Alerts",
+ "type": "dashboard"
+ }
+ ]
+ },
+ "editable": true,
+ "gnetId": null,
+ "graphTooltip": 0,
+ "id": 5,
+ "iteration": 1600199355075,
+ "links": [],
+ "panels": [
+ {
+ "content": "<b><center>First table shows the details for the latest tag and the second table shows the details for the selected \"Target Tag\". </b>",
+ "datasource": null,
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 2,
+ "w": 24,
+ "x": 0,
+ "y": 0
+ },
+ "id": 6,
+ "links": [],
+ "mode": "html",
+ "options": {
+ "content": "<b><center>First table shows the details for the latest tag and the second table shows the details for the selected \"Target Tag\". </b>",
+ "mode": "html"
+ },
+ "pluginVersion": "7.1.0",
+ "title": "Please note that when \"yes\" is selected from the above drop-down, data for both whitelisted and non-whitelisted functions is shown. Currently there are 0 whitelisted functions for TF-A.",
+ "type": "text"
+ },
+ {
+ "aliasColors": {},
+ "bars": true,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "TFA_Complexity",
+ "description": "Tracking the number of functions exceeding threshold",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 9,
+ "w": 24,
+ "x": 0,
+ "y": 2
+ },
+ "hiddenSeries": false,
+ "id": 3,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": false,
+ "total": false,
+ "values": false
+ },
+ "lines": false,
+ "linewidth": 1,
+ "links": [],
+ "nullPointMode": "null",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "$tag_Target_Tag",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT max(Functions_Exceeding_Threshold_Not_Whitelisted) FROM \"TFA_Complexity_Tracking\" where ((Whitelisted =~ /^$Whitelisted$/ OR Whitelisted =~ /^no$/) AND $timeFilter) group by Target_Tag, Git_Tag_Date order by time",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "TFA Complexity Tracking",
+ "tooltip": {
+ "shared": false,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "series",
+ "name": null,
+ "show": true,
+ "values": [
+ "max"
+ ]
+ },
+ "yaxes": [
+ {
+ "decimals": 0,
+ "format": "short",
+ "label": "Functions Exceeding Threshold",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "columns": [],
+ "datasource": "TFA_Complexity",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fontSize": "100%",
+ "gridPos": {
+ "h": 9,
+ "w": 24,
+ "x": 0,
+ "y": 11
+ },
+ "id": 4,
+ "links": [],
+ "pageSize": 100,
+ "scroll": true,
+ "showHeader": true,
+ "sort": {
+ "col": 0,
+ "desc": true
+ },
+ "styles": [
+ {
+ "alias": "Time",
+ "align": "auto",
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "pattern": "Time",
+ "type": "date"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "decimals": 2,
+ "pattern": "/.*/",
+ "thresholds": [],
+ "type": "number",
+ "unit": "short"
+ }
+ ],
+ "targets": [
+ {
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT Base_Tag, Function_ID, Location, Score, Threshold, Whitelisted FROM \"TFA_Complexity_Statistics\" where ((Whitelisted =~ /^$Whitelisted$/ OR Whitelisted =~ /^no$/) AND $timeFilter AND Target_Tag =~ /^$TargetTag1$/) GROUP BY Target_Tag",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "table",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "title": "TFA Complexity Statistics for Target Tag $TargetTag1",
+ "transform": "table",
+ "type": "table-old"
+ },
+ {
+ "columns": [],
+ "datasource": "TFA_Complexity",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fontSize": "100%",
+ "gridPos": {
+ "h": 9,
+ "w": 24,
+ "x": 0,
+ "y": 20
+ },
+ "id": 7,
+ "links": [],
+ "pageSize": 100,
+ "scroll": true,
+ "showHeader": true,
+ "sort": {
+ "col": 0,
+ "desc": true
+ },
+ "styles": [
+ {
+ "alias": "Time",
+ "align": "auto",
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "pattern": "Time",
+ "type": "date"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "decimals": 2,
+ "pattern": "/.*/",
+ "thresholds": [],
+ "type": "number",
+ "unit": "short"
+ }
+ ],
+ "targets": [
+ {
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT Base_Tag, Function_ID, Location, Score, Threshold, Whitelisted FROM \"TFA_Complexity_Statistics\" where ((Whitelisted =~ /^$Whitelisted$/ OR Whitelisted =~ /^no$/) AND Target_Tag =~ /^$TargetTag2$/) GROUP BY Target_Tag",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "table",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "title": "TFA Complexity Statistics for Target Tag $TargetTag2",
+ "transform": "table",
+ "type": "table-old"
+ }
+ ],
+ "schemaVersion": 26,
+ "style": "dark",
+ "tags": [
+ "TFA_QUALITY_METRICS"
+ ],
+ "templating": {
+ "list": [
+ {
+ "allValue": null,
+ "current": {
+ "tags": [],
+ "text": "no",
+ "value": "no"
+ },
+ "hide": 0,
+ "includeAll": false,
+ "label": "Show data including \"whitelisted\" functions?",
+ "multi": false,
+ "name": "Whitelisted",
+ "options": [
+ {
+ "selected": false,
+ "text": "yes",
+ "value": "yes"
+ },
+ {
+ "selected": true,
+ "text": "no",
+ "value": "no"
+ }
+ ],
+ "query": "yes,no",
+ "skipUrlSync": false,
+ "type": "custom"
+ },
+ {
+ "allValue": null,
+ "current": {
+ "selected": false,
+ "text": "v2.3",
+ "value": "v2.3"
+ },
+ "datasource": "TFA_Complexity",
+ "definition": "SELECT LAST(Target_Tag) FROM (SELECT Target_Tag, Function_ID FROM TFA_Complexity_Statistics)",
+ "hide": 2,
+ "includeAll": false,
+ "label": "Target Tag 1",
+ "multi": false,
+ "name": "TargetTag1",
+ "options": [],
+ "query": "SELECT LAST(Target_Tag) FROM (SELECT Target_Tag, Function_ID FROM TFA_Complexity_Statistics)",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 0,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "allValue": null,
+ "current": {
+ "selected": false,
+ "text": "v2.0",
+ "value": "v2.0"
+ },
+ "datasource": "TFA_Complexity",
+ "definition": "SHOW TAG VALUES WITH KEY=\"Target_Tag\"",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Target Tag",
+ "multi": false,
+ "name": "TargetTag2",
+ "options": [],
+ "query": "SHOW TAG VALUES WITH KEY=\"Target_Tag\"",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 0,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ }
+ ]
+ },
+ "time": {
+ "from": "now-2y",
+ "to": "now"
+ },
+ "timepicker": {
+ "refresh_intervals": [
+ "5s",
+ "10s",
+ "30s",
+ "1m",
+ "5m",
+ "15m",
+ "30m",
+ "1h",
+ "2h",
+ "1d"
+ ],
+ "time_options": [
+ "5m",
+ "15m",
+ "1h",
+ "6h",
+ "12h",
+ "24h",
+ "2d",
+ "7d",
+ "30d"
+ ]
+ },
+ "timezone": "",
+ "title": "TFA_Complexity",
+ "uid": "tfa-complexity",
+ "version": 6
+}
\ No newline at end of file
diff --git a/quality-metrics/docs/sample-dashboards/tfa_defects.json b/quality-metrics/docs/sample-dashboards/tfa_defects.json
new file mode 100644
index 0000000..17b7f1e
--- /dev/null
+++ b/quality-metrics/docs/sample-dashboards/tfa_defects.json
@@ -0,0 +1,275 @@
+{
+ "annotations": {
+ "list": [
+ {
+ "builtIn": 1,
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "rgba(0, 211, 255, 1)",
+ "name": "Annotations & Alerts",
+ "type": "dashboard"
+ }
+ ]
+ },
+ "editable": true,
+ "gnetId": null,
+ "graphTooltip": 0,
+ "id": 6,
+ "links": [],
+ "panels": [
+ {
+ "aliasColors": {},
+ "bars": true,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "TFA_Defects",
+ "decimals": 0,
+ "description": "Tracks the number of defects of TF-A per release tag",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 7,
+ "w": 24,
+ "x": 0,
+ "y": 0
+ },
+ "hiddenSeries": false,
+ "id": 1,
+ "legend": {
+ "alignAsTable": false,
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "rightSide": false,
+ "show": false,
+ "total": false,
+ "values": false
+ },
+ "lines": false,
+ "linewidth": 1,
+ "links": [],
+ "nullPointMode": "null",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "Total Defects",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT last(Number_of_Defects) as Defects FROM \"TFA_Defects_Tracking\" where $timeFilter GROUP BY time(1w) fill(none)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "TFA Defects Tracking",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "decimals": 0,
+ "format": "short",
+ "label": "Number of Defects",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "datasource": "TFA_Defects",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {
+ "align": null
+ },
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "red",
+ "value": 80
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 8,
+ "w": 24,
+ "x": 0,
+ "y": 7
+ },
+ "id": 2,
+ "links": [],
+ "options": {
+ "showHeader": true
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "alias": "",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT * FROM \"TFA_Defects_Statistics\" where $timeFilter",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "table",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "title": "TF-A Defects Statistics",
+ "type": "table"
+ }
+ ],
+ "refresh": "1d",
+ "schemaVersion": 26,
+ "style": "dark",
+ "tags": [
+ "TFA_QUALITY_METRICS"
+ ],
+ "templating": {
+ "list": []
+ },
+ "time": {
+ "from": "now-90d",
+ "to": "now"
+ },
+ "timepicker": {
+ "refresh_intervals": [
+ "5s",
+ "10s",
+ "30s",
+ "1m",
+ "5m",
+ "15m",
+ "30m",
+ "1h",
+ "2h",
+ "1d"
+ ],
+ "time_options": [
+ "5m",
+ "15m",
+ "1h",
+ "6h",
+ "12h",
+ "24h",
+ "2d",
+ "7d",
+ "30d"
+ ]
+ },
+ "timezone": "",
+ "title": "TFA_Defects",
+ "uid": "tfa-defects",
+ "version": 6
+}
\ No newline at end of file
diff --git a/quality-metrics/docs/sample-dashboards/tfa_image_size.json b/quality-metrics/docs/sample-dashboards/tfa_image_size.json
new file mode 100644
index 0000000..8325e46
--- /dev/null
+++ b/quality-metrics/docs/sample-dashboards/tfa_image_size.json
@@ -0,0 +1,642 @@
+{
+ "annotations": {
+ "list": [
+ {
+ "builtIn": 1,
+ "datasource": "TFA_ImageSize",
+ "enable": false,
+ "hide": false,
+ "iconColor": "#e0f9d7",
+ "limit": 100,
+ "name": "View PR Details",
+ "query": "select PullRequestURL, Pull_Request_Title, CommitID, BL1_B, BL2_B, BL1U_B, BL2U_B, BL31_B, BL32_B from \"[[BuildConfig]]\"",
+ "showIn": 0,
+ "tagsColumn": "CommitID",
+ "textColumn": "Pull_Request_Title",
+ "type": "dashboard"
+ }
+ ]
+ },
+ "editable": true,
+ "gnetId": null,
+ "graphTooltip": 0,
+ "id": 9,
+ "iteration": 1600199357868,
+ "links": [],
+ "panels": [
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "TFA_ImageSize",
+ "decimals": 3,
+ "description": "This shows the trend in image size of .ELF files for selected build config in stacked manner. The values are individually stacked, not cumulative.\n\nBuild Config and Bin Mode can be changed from the drop down at the top of dashboard.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 2,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 9,
+ "w": 24,
+ "x": 0,
+ "y": 0
+ },
+ "hiddenSeries": false,
+ "id": 10,
+ "legend": {
+ "avg": false,
+ "current": true,
+ "hideEmpty": true,
+ "hideZero": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 1,
+ "links": [],
+ "nullPointMode": "null",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "$col",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT (\"BL1_B\" + \"BL1_D\" + \"BL1_R\" + \"BL1_T\" + \"BL1_W\") as bl1, (\"BL1U_B\" + \"BL1U_D\" + \"BL1U_R\" + \"BL1U_T\" + \"BL1U_W\") as bl1u, (\"BL2_B\" + \"BL2_D\" + \"BL2_R\" + \"BL2_T\" + \"BL2_W\") as bl2, (\"BL2U_B\" + \"BL2U_D\" + \"BL2U_R\" + \"BL2U_T\" + \"BL2U_W\") as bl2u, (\"BL31_B\" + \"BL31_D\" + \"BL31_R\" + \"BL31_T\" + \"BL31_W\") as bl31, (\"BL32_B\" + \"BL32_D\" + \"BL32_R\" + \"BL32_T\" + \"BL32_W\") as bl32 from \"$BuildConfig\" WHERE (\"BinMode\" =~ /^$BinMode$/ ) fill(0)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "TF-A Image Size Time Series Graph for Selected Build Config ($BuildConfig) and Bin Mode ($BinMode)",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "decimals": 3,
+ "format": "decbytes",
+ "label": "Image Size",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": "",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "columns": [],
+ "datasource": "TFA_ImageSize",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fontSize": "100%",
+ "gridPos": {
+ "h": 9,
+ "w": 24,
+ "x": 0,
+ "y": 9
+ },
+ "id": 4,
+ "links": [],
+ "pageSize": null,
+ "scroll": true,
+ "showHeader": true,
+ "sort": {
+ "col": 0,
+ "desc": true
+ },
+ "styles": [
+ {
+ "alias": "Time",
+ "align": "auto",
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "pattern": "Time",
+ "type": "date"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "decimals": 3,
+ "pattern": "/^bl*/",
+ "thresholds": [],
+ "type": "number",
+ "unit": "decbytes"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "decimals": 2,
+ "link": true,
+ "linkTargetBlank": true,
+ "linkTooltip": "",
+ "linkUrl": "${__cell:raw}",
+ "pattern": "Pull Request URL",
+ "thresholds": [],
+ "type": "number",
+ "unit": "short"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "decimals": 2,
+ "pattern": "/.*/",
+ "thresholds": [],
+ "type": "number",
+ "unit": "short"
+ }
+ ],
+ "targets": [
+ {
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT CommitTitle as \"Commit Title\", \"BinMode\", \"CommitID\" as \"Commit ID\", (\"BL1_B\" + \"BL1_D\" + \"BL1_R\" + \"BL1_T\" + \"BL1_W\") as bl1, (\"BL1U_B\" + \"BL1U_D\" + \"BL1U_R\" + \"BL1U_T\" + \"BL1U_W\") as bl1u, (\"BL2_B\" + \"BL2_D\" + \"BL2_R\" + \"BL2_T\" + \"BL2_W\") as bl2, (\"BL2U_B\" + \"BL2U_D\" + \"BL2U_R\" + \"BL2U_T\" + \"BL2U_W\") as bl2u, (\"BL31_B\" + \"BL31_D\" + \"BL31_R\" + \"BL31_T\" + \"BL31_W\") as bl31, (\"BL32_B\" + \"BL32_D\" + \"BL32_R\" + \"BL32_T\" + \"BL32_W\") as bl32 from \"$BuildConfig\" WHERE (\"BinMode\" =~ /^$BinMode$/ ) fill(0)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "table",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "title": "TF-A Image Size Details Table for $BuildConfig",
+ "transform": "table",
+ "type": "table-old"
+ },
+ {
+ "aliasColors": {},
+ "bars": true,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "TFA_ImageSize",
+ "decimals": 3,
+ "description": "",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 9,
+ "w": 24,
+ "x": 0,
+ "y": 18
+ },
+ "hiddenSeries": false,
+ "id": 8,
+ "legend": {
+ "alignAsTable": true,
+ "avg": false,
+ "current": true,
+ "hideEmpty": true,
+ "hideZero": true,
+ "max": false,
+ "min": false,
+ "rightSide": true,
+ "show": true,
+ "sideWidth": null,
+ "total": false,
+ "values": true
+ },
+ "lines": false,
+ "linewidth": 1,
+ "links": [],
+ "nullPointMode": "null",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "$col",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT *\nFROM \"$BuildConfig\"\nWHERE (\"CommitID\" =~ /^$CommitID$/ AND \"BinMode\" =~ /^$BinMode$/ )\nGROUP BY \"BinMode\", \"PullRequestURL\", \"CommitID\"",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Image size memory details for selected Commit ID ($CommitID) and Bin Mode ($BinMode) for $BuildConfig",
+ "tooltip": {
+ "shared": false,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "series",
+ "name": null,
+ "show": true,
+ "values": [
+ "total"
+ ]
+ },
+ "yaxes": [
+ {
+ "decimals": 3,
+ "format": "decbytes",
+ "label": "",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": "",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "breakPoint": "50%",
+ "cacheTimeout": null,
+ "combine": {
+ "label": "Others",
+ "threshold": 0
+ },
+ "datasource": "TFA_ImageSize",
+ "decimals": 3,
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fontSize": "80%",
+ "format": "decbytes",
+ "gridPos": {
+ "h": 9,
+ "w": 19,
+ "x": 0,
+ "y": 27
+ },
+ "id": 6,
+ "interval": null,
+ "legend": {
+ "header": "Image Size",
+ "show": true,
+ "sideWidth": 300,
+ "values": true
+ },
+ "legendType": "Right side",
+ "links": [],
+ "maxDataPoints": 3,
+ "nullPointMode": "connected",
+ "pieType": "pie",
+ "strokeWidth": "2",
+ "targets": [
+ {
+ "alias": "$col",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT (\"BL1_B\" + \"BL1_D\" + \"BL1_R\" + \"BL1_T\" + \"BL1_W\") as bl1, (\"BL1U_B\" + \"BL1U_D\" + \"BL1U_R\" + \"BL1U_T\" + \"BL1U_W\") as bl1u, (\"BL2_B\" + \"BL2_D\" + \"BL2_R\" + \"BL2_T\" + \"BL2_W\") as bl2, (\"BL2U_B\" + \"BL2U_D\" + \"BL2U_R\" + \"BL2U_T\" + \"BL2U_W\") as bl2u, (\"BL31_B\" + \"BL31_D\" + \"BL31_R\" + \"BL31_T\" + \"BL31_W\") as bl31, (\"BL32_B\" + \"BL32_D\" + \"BL32_R\" + \"BL32_T\" + \"BL32_W\") as bl32 \nFROM \"$BuildConfig\"\nWHERE (\"CommitID\" =~ /^$CommitID$/ AND (\"BinMode\" =~ /^$BinMode$/ ))\nGROUP BY \"CommitID\", \"PullRequestURL\", \"BinMode\" fill(0)\n",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "title": "Image size pie chart for selected Commit ID ($CommitID) and Bin Mode ($BinMode) for $BuildConfig",
+ "type": "grafana-piechart-panel",
+ "valueName": "current"
+ },
+ {
+ "content": "<li>\"BlX_B\": Size of uninitialized data section</li>\n<li>\"BlX_D\": Size of initialized data section</li>\n<li>\"BlX_R\": Size of read only data section</li>\n<li>\"BlX_T\": Size of text (code) section</li>\n<li>\"BlX_V\": Size of weak object</li>\n<li>\"BlX_W\": Size of weak symbol</li>\n\n<br>Build Config, Commit ID and Bin Mode can<br>\nbe changed from the drop down menu at <br>the top of\npage.",
+ "datasource": null,
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 8,
+ "w": 5,
+ "x": 19,
+ "y": 27
+ },
+ "id": 12,
+ "links": [],
+ "mode": "html",
+ "options": {
+ "content": "<li>\"BlX_B\": Size of uninitialized data section</li>\n<li>\"BlX_D\": Size of initialized data section</li>\n<li>\"BlX_R\": Size of read only data section</li>\n<li>\"BlX_T\": Size of text (code) section</li>\n<li>\"BlX_V\": Size of weak object</li>\n<li>\"BlX_W\": Size of weak symbol</li>\n\n<br>Build Config, Commit ID and Bin Mode can<br>\nbe changed from the drop down menu at <br>the top of\npage.",
+ "mode": "html"
+ },
+ "pluginVersion": "7.1.0",
+ "title": "Memory Section Details:",
+ "transparent": true,
+ "type": "text"
+ }
+ ],
+ "refresh": "1d",
+ "schemaVersion": 26,
+ "style": "dark",
+ "tags": [
+ "TFA_QUALITY_METRICS"
+ ],
+ "templating": {
+ "list": [
+ {
+ "allValue": null,
+ "current": {
+ "selected": false,
+ "text": "poplar-default",
+ "value": "poplar-default"
+ },
+ "datasource": "TFA_ImageSize",
+ "definition": "",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Build Config",
+ "multi": false,
+ "name": "BuildConfig",
+ "options": [],
+ "query": "show measurements",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 1,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "allValue": null,
+ "current": {
+ "selected": false,
+ "text": "9b2bf15016",
+ "value": "9b2bf15016"
+ },
+ "datasource": "TFA_ImageSize",
+ "definition": "",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Commit ID",
+ "multi": false,
+ "name": "CommitID",
+ "options": [],
+ "query": "SHOW TAG VALUES WITH KEY = \"CommitID\"",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 0,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "allValue": null,
+ "current": {
+ "selected": false,
+ "text": "Debug",
+ "value": "Debug"
+ },
+ "datasource": "TFA_ImageSize",
+ "definition": "",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Bin Mode",
+ "multi": false,
+ "name": "BinMode",
+ "options": [],
+ "query": "SHOW TAG VALUES WITH KEY = \"BinMode\"",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 0,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ }
+ ]
+ },
+ "time": {
+ "from": "now-30d",
+ "to": "now"
+ },
+ "timepicker": {
+ "refresh_intervals": [
+ "1d"
+ ],
+ "time_options": [
+ "5m",
+ "15m",
+ "1h",
+ "6h",
+ "12h",
+ "24h",
+ "2d",
+ "7d",
+ "30d"
+ ]
+ },
+ "timezone": "",
+ "title": "TFA_Image_Size",
+ "uid": "GkzYOKFiz",
+ "version": 6
+}
\ No newline at end of file
diff --git a/quality-metrics/docs/sample-dashboards/tfa_misra_defects.json b/quality-metrics/docs/sample-dashboards/tfa_misra_defects.json
new file mode 100644
index 0000000..303e802
--- /dev/null
+++ b/quality-metrics/docs/sample-dashboards/tfa_misra_defects.json
@@ -0,0 +1,897 @@
+{
+ "annotations": {
+ "list": [
+ {
+ "builtIn": 1,
+ "datasource": "TFA_MisraDefects",
+ "enable": false,
+ "hide": false,
+ "iconColor": "#e0f9d7",
+ "limit": 100,
+ "name": "View PR Details",
+ "query": "select PullRequestURL, Pull_Request_Title, CommitID, TotalDefects from \"[[BuildConfig]]\"",
+ "showIn": 0,
+ "tagsColumn": "CommitID",
+ "textColumn": "Pull_Request_Title",
+ "type": "dashboard"
+ }
+ ]
+ },
+ "editable": true,
+ "gnetId": null,
+ "graphTooltip": 0,
+ "id": 8,
+ "iteration": 1600199358960,
+ "links": [],
+ "panels": [
+ {
+ "aliasColors": {
+ "MandatoryDefects": "#bf1b00",
+ "RequiredDefects": "#eab839"
+ },
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "TFA_MisraDefects",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 9,
+ "w": 24,
+ "x": 0,
+ "y": 0
+ },
+ "hiddenSeries": false,
+ "id": 2,
+ "legend": {
+ "alignAsTable": false,
+ "avg": false,
+ "current": true,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "$col",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT TotalDefects FROM \"$BuildConfig\" WHERE (\"BinMode\" =~ /^Debug$/ )",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "$col",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT MandatoryDefects FROM \"$BuildConfig\" WHERE (\"BinMode\" =~ /^Debug$/ )",
+ "rawQuery": true,
+ "refId": "B",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "$col",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT RequiredDefects FROM \"$BuildConfig\" WHERE (\"BinMode\" =~ /^Debug$/ )",
+ "rawQuery": true,
+ "refId": "C",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "$col",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT AdvisoryDefects FROM \"$BuildConfig\" WHERE (\"BinMode\" =~ /^Debug$/ )",
+ "rawQuery": true,
+ "refId": "D",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "TF-A Misra Defects Time Series Graph for Selected Build Config ($BuildConfig) and Bin Mode Debug",
+ "tooltip": {
+ "shared": true,
+ "sort": 1,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "columns": [],
+ "datasource": "TFA_MisraDefects",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fontSize": "100%",
+ "gridPos": {
+ "h": 9,
+ "w": 24,
+ "x": 0,
+ "y": 9
+ },
+ "id": 4,
+ "links": [],
+ "pageSize": null,
+ "scroll": true,
+ "showHeader": true,
+ "sort": {
+ "col": 0,
+ "desc": true
+ },
+ "styles": [
+ {
+ "alias": "Time",
+ "align": "auto",
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "pattern": "Time",
+ "type": "date"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "decimals": 2,
+ "link": true,
+ "linkTargetBlank": true,
+ "linkTooltip": "",
+ "linkUrl": "${__cell:raw} ",
+ "pattern": "Pull Request URL",
+ "thresholds": [],
+ "type": "number",
+ "unit": "short"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "decimals": 3,
+ "pattern": "/.*/",
+ "thresholds": [],
+ "type": "number",
+ "unit": "short"
+ }
+ ],
+ "targets": [
+ {
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT CommitTitle as \"Commit Title\", \"BinMode\", \"CommitID\" as \"Commit ID\", \"MandatoryDefects\" as \"Mandatory Defects\", \"RequiredDefects\" as \"Required Defects\", \"AdvisoryDefects\" as \"Advisory Defects\", \"TotalDefects\" as \"Total Defects\" from \"$BuildConfig\" WHERE (\"BinMode\" =~ /^Debug$/ )",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "table",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "title": "TF-A Misra C Defects Details Table for $BuildConfig",
+ "transform": "table",
+ "type": "table-old"
+ },
+ {
+ "columns": [],
+ "datasource": "TFA_MisraDefects",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fontSize": "100%",
+ "gridPos": {
+ "h": 5,
+ "w": 24,
+ "x": 0,
+ "y": 18
+ },
+ "id": 6,
+ "links": [],
+ "pageSize": null,
+ "scroll": true,
+ "showHeader": true,
+ "sort": {
+ "col": 0,
+ "desc": true
+ },
+ "styles": [
+ {
+ "alias": "Time",
+ "align": "auto",
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "pattern": "Time",
+ "type": "date"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "decimals": 2,
+ "link": true,
+ "linkTargetBlank": true,
+ "linkTooltip": "",
+ "linkUrl": "$__cell",
+ "pattern": "Pull Request URL",
+ "thresholds": [],
+ "type": "number",
+ "unit": "short"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "decimals": 3,
+ "pattern": "/.*/",
+ "thresholds": [],
+ "type": "number",
+ "unit": "short"
+ }
+ ],
+ "targets": [
+ {
+ "alias": "$col",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT \"CommitID\" as \"Commit ID\", CommitTitle as \"Commit Title\", \"BinMode\", \"MandatoryDefects\" as \"Mandatory Defects\", \"RequiredDefects\" as \"Required Defects\", \"AdvisoryDefects\" as \"Advisory Defects\", \"TotalDefects\" as \"Total Defects\" from \"$BuildConfig\" WHERE (\"BinMode\" =~ /^Debug$/ AND \"CommitID\" =~ /^$CommitID1$/ )",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "table",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT \"CommitID\" as \"Commit ID\", CommitTitle as \"Commit Title\", \"BinMode\", \"MandatoryDefects\" as \"Mandatory Defects\", \"RequiredDefects\" as \"Required Defects\", \"AdvisoryDefects\" as \"Advisory Defects\", \"TotalDefects\" as \"Total Defects\" from \"$BuildConfig\" WHERE (\"BinMode\" =~ /^Debug$/ AND \"CommitID\" =~ /^$CommitID2$/ )",
+ "rawQuery": true,
+ "refId": "B",
+ "resultFormat": "table",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "title": "Comparison table between $CommitID1 and $CommitID2 for BinMode \"Debug\"",
+ "transform": "table",
+ "transparent": true,
+ "type": "table-old"
+ },
+ {
+ "aliasColors": {
+ "Advisory Defects": "#ef843c",
+ "Mandatory Defects": "#bf1b00",
+ "Total Defects": "#629e51"
+ },
+ "bars": true,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "TFA_MisraDefects",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 9,
+ "w": 24,
+ "x": 0,
+ "y": 23
+ },
+ "hiddenSeries": false,
+ "id": 8,
+ "legend": {
+ "alignAsTable": true,
+ "avg": false,
+ "current": true,
+ "max": false,
+ "min": false,
+ "rightSide": true,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": false,
+ "linewidth": 1,
+ "links": [],
+ "nullPointMode": "null",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "Mandatory Defects",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "hide": false,
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT DIFFERENCE(\"MandatoryDefects\") FROM \"$BuildConfig\" WHERE ((\"CommitID\" =~ /^$CommitID1$/ AND (\"BinMode\" =~ /^Debug$/ )) OR (\"CommitID\" =~ /^$CommitID2$/ AND (\"BinMode\" =~ /^Debug$/ )))",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "Required Defects",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "hide": false,
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT DIFFERENCE(\"RequiredDefects\") FROM \"$BuildConfig\" WHERE ((\"CommitID\" =~ /^$CommitID1$/ AND (\"BinMode\" =~ /^Debug$/ )) OR (\"CommitID\" =~ /^$CommitID2$/ AND (\"BinMode\" =~ /^Debug$/ )))",
+ "rawQuery": true,
+ "refId": "B",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "Advisory Defects",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "hide": false,
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT DIFFERENCE(\"AdvisoryDefects\") FROM \"$BuildConfig\" WHERE ((\"CommitID\" =~ /^$CommitID1$/ AND (\"BinMode\" =~ /^Debug$/ )) OR (\"CommitID\" =~ /^$CommitID2$/ AND (\"BinMode\" =~ /^Debug$/ )))",
+ "rawQuery": true,
+ "refId": "C",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "Total Defects",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "hide": false,
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT DIFFERENCE(\"TotalDefects\") FROM \"$BuildConfig\" WHERE ((\"CommitID\" =~ /^$CommitID1$/ AND (\"BinMode\" =~ /^Debug$/ )) OR (\"CommitID\" =~ /^$CommitID2$/ AND (\"BinMode\" =~ /^Debug$/ )))",
+ "rawQuery": true,
+ "refId": "D",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Comparison Chart between [$CommitID1,Debug] and [$CommitID2,Debug] Misra C Defects for \"$BuildConfig\"",
+ "tooltip": {
+ "shared": false,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "transparent": true,
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "series",
+ "name": null,
+ "show": true,
+ "values": [
+ "total"
+ ]
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "Difference in Misra C Defects",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ }
+ ],
+ "schemaVersion": 26,
+ "style": "dark",
+ "tags": [
+ "TFA_QUALITY_METRICS"
+ ],
+ "templating": {
+ "list": [
+ {
+ "allValue": null,
+ "current": {
+ "selected": false,
+ "text": "fvp-rst-bl31",
+ "value": "fvp-rst-bl31"
+ },
+ "datasource": "TFA_MisraDefects",
+ "definition": "",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Build Config",
+ "multi": false,
+ "name": "BuildConfig",
+ "options": [],
+ "query": "show measurements",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 0,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "allValue": null,
+ "current": {
+ "selected": false,
+ "text": "9b2bf15016",
+ "value": "9b2bf15016"
+ },
+ "datasource": "TFA_MisraDefects",
+ "definition": "",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Commit ID 1",
+ "multi": false,
+ "name": "CommitID1",
+ "options": [],
+ "query": "SHOW TAG VALUES WITH KEY = \"CommitID\" ",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 0,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "allValue": null,
+ "current": {
+ "selected": false,
+ "text": "a41ca4c344",
+ "value": "a41ca4c344"
+ },
+ "datasource": "TFA_MisraDefects",
+ "definition": "",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Commit ID 2",
+ "multi": false,
+ "name": "CommitID2",
+ "options": [],
+ "query": "SHOW TAG VALUES WITH KEY = \"CommitID\" ",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 0,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ }
+ ]
+ },
+ "time": {
+ "from": "now-30d",
+ "to": "now"
+ },
+ "timepicker": {
+ "refresh_intervals": [
+ "1d"
+ ],
+ "time_options": [
+ "5m",
+ "15m",
+ "1h",
+ "6h",
+ "12h",
+ "24h",
+ "2d",
+ "7d",
+ "30d"
+ ]
+ },
+ "timezone": "",
+ "title": "TFA_Misra_Defects",
+ "uid": "41hRgW-mz",
+ "version": 10
+}
\ No newline at end of file
diff --git a/quality-metrics/docs/sample-dashboards/tfa_runtime_perf_details.json b/quality-metrics/docs/sample-dashboards/tfa_runtime_perf_details.json
new file mode 100644
index 0000000..bbe140b
--- /dev/null
+++ b/quality-metrics/docs/sample-dashboards/tfa_runtime_perf_details.json
@@ -0,0 +1,1089 @@
+{
+ "annotations": {
+ "list": [
+ {
+ "builtIn": 1,
+ "datasource": "TFA_RunTime_Perf",
+ "enable": false,
+ "hide": false,
+ "iconColor": "#fce2de",
+ "limit": 100,
+ "name": "View Commit Details",
+ "query": "select CommitTitle, CommitID, TC_Name, CPU_Core, CacheFlush, Latency_EL3Entry_EL3Exit from \"[[Instrumentation]]\" WHERE ($timeFilter AND \"TC_Name\" =~ /^$TestCase$/ AND \"CPU_Core\" =~ /^0/ AND \"Cluster_ID\" =~ /^0/) LIMIT 1000",
+ "showIn": 0,
+ "tagsColumn": "CommitID",
+ "textColumn": "CommitTitle",
+ "type": "dashboard"
+ }
+ ]
+ },
+ "editable": true,
+ "gnetId": null,
+ "graphTooltip": 0,
+ "id": 10,
+ "iteration": 1600199360656,
+ "links": [],
+ "panels": [
+ {
+ "aliasColors": {
+ "Cluster ID: 0, CPU Core: 0": "#eab839",
+ "Cluster ID: 0, CPU Core: 1": "#eab839",
+ "Cluster ID: 1, CPU Core: 0": "#70dbed",
+ "Cluster ID: 1, CPU Core: 1": "#70dbed",
+ "Cluster ID: 1, CPU Core: 2": "#70dbed",
+ "Cluster ID: 1, CPU Core: 3": "#70dbed",
+ "Latency_CPUWakeup_EL3Exit for CPU Core 0": "#e24d42"
+ },
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "TFA_RunTime_Perf",
+ "decimals": 3,
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 9,
+ "w": 24,
+ "x": 0,
+ "y": 0
+ },
+ "hiddenSeries": false,
+ "id": 2,
+ "legend": {
+ "alignAsTable": true,
+ "avg": false,
+ "current": true,
+ "hideEmpty": true,
+ "hideZero": false,
+ "max": false,
+ "min": false,
+ "rightSide": true,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": false,
+ "linewidth": 1,
+ "links": [],
+ "nullPointMode": "null",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 3,
+ "points": true,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "Cluster ID: 0, CPU Core: 0",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT \"$Latency\" FROM \"$Instrumentation\" WHERE (\"TC_Name\" =~ /^$TestCase$/ AND \"CPU_Core\" =~ /^0/ AND \"Cluster_ID\" =~ /^0/)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "Cluster ID: 0, CPU Core: 1",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT \"$Latency\" FROM \"$Instrumentation\" WHERE (\"TC_Name\" =~ /^$TestCase$/ AND \"CPU_Core\" =~ /^1/ AND \"Cluster_ID\" =~ /^0/)",
+ "rawQuery": true,
+ "refId": "B",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "Cluster ID: 0, CPU Core: 2",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT \"$Latency\" FROM \"$Instrumentation\" WHERE (\"TC_Name\" =~ /^$TestCase$/ AND \"CPU_Core\" =~ /^2/ AND \"Cluster_ID\" =~ /^0/)",
+ "rawQuery": true,
+ "refId": "C",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "Cluster ID: 0, CPU Core: 3",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT \"$Latency\" FROM \"$Instrumentation\" WHERE (\"TC_Name\" =~ /^$TestCase$/ AND \"CPU_Core\" =~ /^3/ AND \"Cluster_ID\" =~ /^0/)",
+ "rawQuery": true,
+ "refId": "D",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "Cluster ID: 1, CPU Core: 0",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT \"$Latency\" FROM \"$Instrumentation\" WHERE (\"TC_Name\" =~ /^$TestCase$/ AND \"CPU_Core\" =~ /^0/ AND \"Cluster_ID\" =~ /^1/)",
+ "rawQuery": true,
+ "refId": "E",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "Cluster ID: 1, CPU Core: 1",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT \"$Latency\" FROM \"$Instrumentation\" WHERE (\"TC_Name\" =~ /^$TestCase$/ AND \"CPU_Core\" =~ /^1/ AND \"Cluster_ID\" =~ /^1/)",
+ "rawQuery": true,
+ "refId": "F",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "Cluster ID: 1, CPU Core: 2",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT \"$Latency\" FROM \"$Instrumentation\" WHERE (\"TC_Name\" =~ /^$TestCase$/ AND \"CPU_Core\" =~ /^2/ AND \"Cluster_ID\" =~ /^1/)",
+ "rawQuery": true,
+ "refId": "G",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "Cluster ID: 1, CPU Core: 3",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT \"$Latency\" FROM \"$Instrumentation\" WHERE (\"TC_Name\" =~ /^$TestCase$/ AND \"CPU_Core\" =~ /^3/ AND \"Cluster_ID\" =~ /^1/)",
+ "rawQuery": true,
+ "refId": "H",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "[TC: $TestCase] $Latency Graph for $Instrumentation",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "transparent": true,
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "decimals": 3,
+ "format": "µs",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "columns": [],
+ "datasource": "TFA_RunTime_Perf",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fontSize": "100%",
+ "gridPos": {
+ "h": 9,
+ "w": 24,
+ "x": 0,
+ "y": 9
+ },
+ "id": 7,
+ "links": [],
+ "pageSize": null,
+ "scroll": true,
+ "showHeader": true,
+ "sort": {
+ "col": 0,
+ "desc": true
+ },
+ "styles": [
+ {
+ "alias": "Time",
+ "align": "auto",
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "pattern": "Time",
+ "type": "hidden"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "decimals": 3,
+ "pattern": "/^Max*/",
+ "thresholds": [],
+ "type": "number",
+ "unit": "ns"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "decimals": 2,
+ "link": true,
+ "linkTargetBlank": true,
+ "linkTooltip": "",
+ "linkUrl": "${__cell:raw}",
+ "pattern": "Pull_Request_URL",
+ "thresholds": [],
+ "type": "number",
+ "unit": "short"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "decimals": 2,
+ "pattern": "/.*/",
+ "thresholds": [],
+ "type": "number",
+ "unit": "short"
+ }
+ ],
+ "targets": [
+ {
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT CommitID, CommitTitle as \"Commit Title\", Cluster_ID as \"Cluster ID\", MAX($Latency) AS \"Max $Latency\"\nFROM \"[[Instrumentation]]\" WHERE (TC_Name =~ /^$TestCase$/ AND Cluster_ID =~ /^0/) GROUP BY CommitID",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "table",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT CommitID, CommitTitle as \"Commit Title\", Cluster_ID as \"Cluster ID\", MAX($Latency) AS \"Max $Latency\"\nFROM \"[[Instrumentation]]\" WHERE (TC_Name =~ /^$TestCase$/ AND Cluster_ID =~ /^1/) GROUP BY CommitID",
+ "rawQuery": true,
+ "refId": "B",
+ "resultFormat": "table",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "title": "Details for selected instrumentation and test case ([[Instrumentation]], [[TestCase]])",
+ "transform": "table",
+ "type": "table-old"
+ },
+ {
+ "columns": [],
+ "datasource": "TFA_RunTime_Perf",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fontSize": "110%",
+ "gridPos": {
+ "h": 3.7,
+ "w": 12,
+ "x": 0,
+ "y": 18
+ },
+ "id": 4,
+ "links": [],
+ "pageSize": null,
+ "scroll": true,
+ "showHeader": true,
+ "sort": {
+ "col": null,
+ "desc": false
+ },
+ "styles": [
+ {
+ "alias": "Time",
+ "align": "auto",
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "pattern": "Time",
+ "type": "hidden"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "decimals": 3,
+ "pattern": "/^MAX*/",
+ "thresholds": [],
+ "type": "number",
+ "unit": "ns"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "decimals": 2,
+ "pattern": "/.*/",
+ "thresholds": [],
+ "type": "number",
+ "unit": "short"
+ }
+ ],
+ "targets": [
+ {
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT Cluster_ID, MAX(\"$Latency\") AS \"MAX($Latency)\" FROM \"[[Instrumentation]]\" WHERE (CommitID =~ /^$CommitID1$/ AND Cluster_ID =~ /^0/ AND \"TC_Name\" =~ /^$TestCase$/)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "table",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT Cluster_ID, MAX(\"$Latency\") AS \"MAX($Latency)\" FROM \"[[Instrumentation]]\" WHERE (CommitID =~ /^$CommitID1$/ AND Cluster_ID =~ /^1/ AND \"TC_Name\" =~ /^$TestCase$/)",
+ "rawQuery": true,
+ "refId": "B",
+ "resultFormat": "table",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "title": "Max $Latency for Cluster IDs 0 and 1 for ($TestCase, $CommitID1)",
+ "transform": "table",
+ "transparent": true,
+ "type": "table-old"
+ },
+ {
+ "columns": [],
+ "datasource": "TFA_RunTime_Perf",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fontSize": "110%",
+ "gridPos": {
+ "h": 3.7,
+ "w": 12,
+ "x": 12,
+ "y": 18
+ },
+ "id": 5,
+ "links": [],
+ "pageSize": null,
+ "scroll": true,
+ "showHeader": true,
+ "sort": {
+ "col": null,
+ "desc": false
+ },
+ "styles": [
+ {
+ "alias": "Time",
+ "align": "auto",
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "pattern": "Time",
+ "type": "hidden"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "decimals": 3,
+ "pattern": "/^MAX*/",
+ "thresholds": [],
+ "type": "number",
+ "unit": "ns"
+ },
+ {
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "decimals": 2,
+ "pattern": "/.*/",
+ "thresholds": [],
+ "type": "number",
+ "unit": "short"
+ }
+ ],
+ "targets": [
+ {
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT Cluster_ID, MAX(\"$Latency\") AS \"MAX($Latency)\" FROM \"[[Instrumentation]]\" WHERE (CommitID =~ /^$CommitID2$/ AND Cluster_ID =~ /^0/ AND \"TC_Name\" =~ /^$TestCase$/)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "table",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT Cluster_ID, MAX(\"$Latency\") AS \"MAX($Latency)\" FROM \"[[Instrumentation]]\" WHERE (CommitID =~ /^$CommitID2$/ AND Cluster_ID =~ /^1/ AND \"TC_Name\" =~ /^$TestCase$/)",
+ "rawQuery": true,
+ "refId": "B",
+ "resultFormat": "table",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "title": "Max $Latency for Cluster IDs 0 and 1 for ($TestCase, $CommitID2)",
+ "transform": "table",
+ "transparent": true,
+ "type": "table-old"
+ }
+ ],
+ "refresh": false,
+ "schemaVersion": 26,
+ "style": "dark",
+ "tags": [
+ "TFA_QUALITY_METRICS"
+ ],
+ "templating": {
+ "list": [
+ {
+ "allValue": null,
+ "current": {
+ "selected": false,
+ "text": "juno-tftf+aarch32-rt32.instr-r2",
+ "value": "juno-tftf+aarch32-rt32.instr-r2"
+ },
+ "datasource": "TFA_RunTime_Perf",
+ "definition": "show measurements",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Instrumentation",
+ "multi": false,
+ "name": "Instrumentation",
+ "options": [],
+ "query": "show measurements",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 0,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "allValue": null,
+ "current": {
+ "tags": [],
+ "text": "testrtinstrpsciversionparallel",
+ "value": "testrtinstrpsciversionparallel"
+ },
+ "hide": 0,
+ "includeAll": false,
+ "label": "Test Case",
+ "multi": false,
+ "name": "TestCase",
+ "options": [
+ {
+ "selected": false,
+ "text": "testrtinstrcpuoffserial",
+ "value": "testrtinstrcpuoffserial"
+ },
+ {
+ "selected": false,
+ "text": "testrtinstrcpususpparallel",
+ "value": "testrtinstrcpususpparallel"
+ },
+ {
+ "selected": false,
+ "text": "testrtinstrcpususpserial",
+ "value": "testrtinstrcpususpserial"
+ },
+ {
+ "selected": true,
+ "text": "testrtinstrpsciversionparallel",
+ "value": "testrtinstrpsciversionparallel"
+ },
+ {
+ "selected": false,
+ "text": "testrtinstrsuspdeepparallel",
+ "value": "testrtinstrsuspdeepparallel"
+ },
+ {
+ "selected": false,
+ "text": "testrtinstrsuspdeepserial",
+ "value": "testrtinstrsuspdeepserial"
+ }
+ ],
+ "query": "testrtinstrcpuoffserial,testrtinstrcpususpparallel,testrtinstrcpususpserial,testrtinstrpsciversionparallel,testrtinstrsuspdeepparallel,testrtinstrsuspdeepserial",
+ "skipUrlSync": false,
+ "type": "custom"
+ },
+ {
+ "allValue": null,
+ "current": {
+ "tags": [],
+ "text": "Latency_EL3Entry_EL3Exit",
+ "value": "Latency_EL3Entry_EL3Exit"
+ },
+ "hide": 0,
+ "includeAll": false,
+ "label": "Latency",
+ "multi": false,
+ "name": "Latency",
+ "options": [
+ {
+ "selected": false,
+ "text": "CacheFlush",
+ "value": "CacheFlush"
+ },
+ {
+ "selected": false,
+ "text": "Latency_CPUWakeup_EL3Exit",
+ "value": "Latency_CPUWakeup_EL3Exit"
+ },
+ {
+ "selected": false,
+ "text": "Latency_EL3Entry_CPUPowerDown",
+ "value": "Latency_EL3Entry_CPUPowerDown"
+ },
+ {
+ "selected": true,
+ "text": "Latency_EL3Entry_EL3Exit",
+ "value": "Latency_EL3Entry_EL3Exit"
+ }
+ ],
+ "query": "CacheFlush,Latency_CPUWakeup_EL3Exit,Latency_EL3Entry_CPUPowerDown,Latency_EL3Entry_EL3Exit",
+ "skipUrlSync": false,
+ "type": "custom"
+ },
+ {
+ "allValue": null,
+ "current": {
+ "selected": false,
+ "text": "e98d934aee",
+ "value": "e98d934aee"
+ },
+ "datasource": "TFA_RunTime_Perf",
+ "definition": "SHOW TAG VALUES WITH KEY=CommitID",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Commit ID 1",
+ "multi": false,
+ "name": "CommitID1",
+ "options": [],
+ "query": "SHOW TAG VALUES WITH KEY=CommitID",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 0,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "allValue": null,
+ "current": {
+ "selected": false,
+ "text": "e98d934aee",
+ "value": "e98d934aee"
+ },
+ "datasource": "TFA_RunTime_Perf",
+ "definition": "SHOW TAG VALUES WITH KEY=CommitID",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Commit ID 2",
+ "multi": false,
+ "name": "CommitID2",
+ "options": [],
+ "query": "SHOW TAG VALUES WITH KEY=CommitID",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 0,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ }
+ ]
+ },
+ "time": {
+ "from": "now-30d",
+ "to": "now"
+ },
+ "timepicker": {
+ "refresh_intervals": [
+ "1d"
+ ],
+ "time_options": [
+ "5m",
+ "15m",
+ "1h",
+ "6h",
+ "12h",
+ "24h",
+ "2d",
+ "7d",
+ "30d"
+ ]
+ },
+ "timezone": "",
+ "title": "TFA_RunTime_Perf_Details",
+ "uid": "qqVv390mz",
+ "version": 10
+}
\ No newline at end of file
diff --git a/quality-metrics/docs/visualisation_user_guide.md b/quality-metrics/docs/visualisation_user_guide.md
new file mode 100644
index 0000000..6233e53
--- /dev/null
+++ b/quality-metrics/docs/visualisation_user_guide.md
@@ -0,0 +1,19 @@
+# Visualisation User Guide
+Once broker component is up and running, and it is verified that data is getting pushed to InfluxDB, it can be visualised using Grafana. Following are the steps to create Grafana dashboard:
+
+1. Go to `http://[Host public IP address]:3000` and sign in using the appropriate credentials (Grafana has the following default credentials: admin, admin).
+1. Create data sources for each of the database. Set the value of data source name, database and URL to "`http://[Host public IP address]:8086`". Click on "Save & Test".
+1. In order to create dashboard, click on '+' sign on the left side bar. Select "Import" and paste the content of JSON file from [sample dashboards](./sample-dashboards) folder for the dashboard that needs to be created. In [sample dashboards](./sample-dashboards), TFA dashboards JSON files have been provided for reference. User can also create custom dashboard or modify sample ones. For details on creating dashboard, please refer [Grafana Labs](https://grafana.com/docs/grafana/latest/getting-started/getting-started/).
+
+Following table captures the details of datasource for the dashboards provided in [sample dashboards](./sample-dashboards) folder:
+
+| S. No. | Data Source Name | Database |
+| ------------- | ------------- | ------------- |
+| 1 | TFA_CodeChurn | TFA_CodeChurn |
+| 2 | TFA_Complexity | TFA_Complexity |
+| 3 | TFA_Defects | TFA_Defects |
+| 4 | TFA_ImageSize | TFA_ImageSize |
+| 5 | TFA_MisraDefects | TFA_MisraDefects |
+| 6 | TFA_RunTime_Perf | TFA_RTINSTR |
+
+Please note that URL remains same for all the data sources, i.e., `http://[Host public IP address]:8086`
diff --git a/quality-metrics/readme.md b/quality-metrics/readme.md
new file mode 100644
index 0000000..c427834
--- /dev/null
+++ b/quality-metrics/readme.md
@@ -0,0 +1,25 @@
+# quality-metrics
+
+The *quality-metrics* implements a set of components that enable project teams to generate and track code quality metrics for data driven quality improvement. It comprises of:
+- a set of data generator scripts that produce quality metrics data (like code churn, open defects, code complexity etc.) for a given project.
+- a data broker middleware component to manage the capturing of the data generated by multiple projects.
+- a database backend to store and track the generated quality metrics; current implementation uses [InfluxDB](https://github.com/influxdata/influxdb) time-series database.
+- a visualisation front-end to view the trend of these metrics over time with [Grafana](https://github.com/grafana/grafana) visualisation tool.
+- a set of docker files for easy deployment of containerised components.
+
+Additional documentation is also provided that outlines how a user can visualise InfluxDB data using Grafana.
+
+## Design Overview
+Please refer to [design overview](./docs/design_overview.md) for design details for the broker component and the data generator scripts.
+
+## Broker Component User Guide
+[Broker component user guide](./docs/broker_component_user_guide.md) contains details of how to bring up the broker component that implements APIs (Application Programming Interface) for data generator clients. The data generator clients POST metrics data, which in-turn gets committed to the backend database. It performs a set of basic sanity check before commiting any metrics data and provides simple token-based authentication for clients.
+
+## Data Generator User Guide
+Please refer to [data generator user guide](./docs/data_generator_user_guide.md) for details on how metrics data is generated. These data generator scripts are typically integrated with individual projects CI (Continuous Integration) setup.
+
+## Visualisation User Guide
+[Visualisation user guide](./docs/visualisation_user_guide.md) contains details on visualising the InfluxDB data using Grafana.
+
+## License
+[BSD-3-Clause](../license.md)