Initial commit.

 - qa-tools public release which includes:
    - trace-based coverage tool
    - quality metrics measurement and tracking setup
    - associated in-source documentation.

Signed-off-by: Basil Eljuse <basil.eljuse@arm.com>
diff --git a/quality-metrics/data-generator/common_metrics/__init__.py b/quality-metrics/data-generator/common_metrics/__init__.py
new file mode 100644
index 0000000..7337f68
--- /dev/null
+++ b/quality-metrics/data-generator/common_metrics/__init__.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python3
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" __init__.py:
+
+    __init__.py for complexity parser
+
+"""
+from complexity_parser import *
diff --git a/quality-metrics/data-generator/common_metrics/common_utilities/common_utilities.sh b/quality-metrics/data-generator/common_metrics/common_utilities/common_utilities.sh
new file mode 100755
index 0000000..decb753
--- /dev/null
+++ b/quality-metrics/data-generator/common_metrics/common_utilities/common_utilities.sh
@@ -0,0 +1,164 @@
+#!/usr/bin/env bash
+
+#======================================================================
+# Copyright (c) 2020, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#======================================================================
+
+#===============================================================================
+# FILE: common_utilities.sh
+#
+# DESCRIPTION: Contains common utilities required by all the metrics
+#===============================================================================
+
+# === Function ========================================================
+# NAME: include_variables_file
+# DESCRIPTION: Includes the variables file, specific to repository for
+#              which metics are being computed. For example, include and
+#              exclude folders are different for different repositories
+# PARAMETERS:
+#    $1: File containing variables specific to the repository for which
+#        metrics are computed.
+# =====================================================================
+include_variables_file()
+{
+  . ./"${1}"
+}
+
+
+# === Function ========================================================
+# NAME: cleanup_and_exit
+# DESCRIPTION: Deletes a repository, if it exists, and exits
+# =====================================================================
+cleanup_and_exit()
+{
+  # Delete the cloned repository
+  if [ -d "$REPOSITORY" ]; then
+    printf "Deleting $REPOSITORY...\n"
+    rm -rf $REPOSITORY
+  fi
+
+  printf "Exiting...\n"
+  exit
+}
+
+# === Function ========================================================
+# NAME: generate_code_churn_summary
+# DESCRIPTION: Generates the code churn summary from stats
+# PARAMETER:
+#   $1: STATS
+# =====================================================================
+generate_code_churn_summary()
+{
+  INS_DEL_LOC_EXTRACT="[0-9]+ file(s)? changed, ([0-9]+) insertion(s)?\(\+\), ([0-9]+) deletion(s)?\(\-\)"
+  INS_LOC_EXTRACT="[0-9]+ file(s)? changed, ([0-9]+) insertion(s)?\(\+\)"
+  DEL_LOC_EXTRACT="[0-9]+ file(s)? changed, ([0-9]+) deletion(s)?\(\-\)"
+  if [[ $1 =~ ${INS_DEL_LOC_EXTRACT} ]]; then
+    INS=${BASH_REMATCH[2]}
+    DEL=${BASH_REMATCH[4]}
+  elif [[ $1 =~ ${INS_LOC_EXTRACT} ]]; then
+    INS=${BASH_REMATCH[2]}
+    DEL=0
+  elif [[ $1 =~ ${DEL_LOC_EXTRACT} ]]; then
+    INS=0
+    DEL=${BASH_REMATCH[2]}
+  else
+    INS=DEL=0
+  fi
+
+  CODE_CHURN=$((INS+DEL))
+  echo "$CODE_CHURN"
+}
+
+# === Function ========================================================
+# NAME: get_git_tag_date
+# DESCRIPTION: Returns the git tag date, as follows:
+#              1. tagger date is returned for annotated tag
+#              2. creator date is returned for non-annotated tag
+# =====================================================================
+get_git_tag_date()
+{
+  GIT_TAG_DATE_TIME=''
+  GIT_TAG_DATE=''
+
+  if [ -n "$1" ]; then
+    tag=$1
+  else
+    tag=$TARGET_TAG
+  fi
+  # Get tagger date for git tag in YYYY-MM-DD format
+  GIT_TAG_DATE_TIME=$(git rev-parse $tag | xargs git cat-file -p | \
+                      awk '/^tagger/ { print strftime("%F",$(NF-1))}')
+  # If tagger date is not returned (in case of non-annotated tag), then get created date
+  if [ -z "${GIT_TAG_DATE}" ]; then
+    printf "\nGit tag date is \"created date\" because $tag is non-annotated...\n"
+    GIT_TAG_DATE_TIME=$(git log -1 --format=%ai $tag)
+  else
+    printf "\nGit tag date is \"tagger date\" because $tag is annotated...\n"
+  fi
+  export GIT_TAG_DATE_TIME
+  arr=($GIT_TAG_DATE_TIME)
+  export GIT_TAG_DATE=${arr[0]}
+}
+
+# === Function =================================================================
+# NAME: get_base_tag
+# DESCRIPTION: Checks if target tag exists. If it is exists, get the base tag
+# ==============================================================================
+get_base_tag()
+{
+  # list git tag by commit date and extract the tag string
+  tagList=$(git tag | xargs -I@ git log --format=format:"%ai @%n" -1 @ | sort | awk '{print $4}')
+
+  tagArray=($tagList)
+  matched=0
+
+  prevTag=""
+  currentTag=""
+  counter=0
+  TAG_PATTERN=$1
+
+  # Check if target tag exists
+  for i in "${tagArray[@]}"; do
+    if [ "$i" == "$tag" ]; then
+      matched=1
+      currentTag=$i
+      break
+    else
+      # If not in form of vXXX.YYY, continue
+      counter=$((counter+1))
+      continue
+    fi
+  done
+
+  if [ $matched -eq 0 ]; then
+    printf "@@ Tag $tag does not exist. Please specify an existing one.\n"
+    echo "Existing Tags:"
+    git tag | xargs -I@ git log --format=format:"%ai @%n" -1 @ | sort | awk '{print $4}'
+    exit
+  fi
+
+  get_git_tag_date "$tag"
+  tag_date_1=$GIT_TAG_DATE
+
+  # Search for previous tag in the form of vXXX.YYY before the current tag
+  # Skip the current tag itself and find the first match
+  START=$((counter-1))
+  for ((i=${START};i>=0;i--)); do
+    temp_tag="${tagArray[$i]}"
+    get_git_tag_date "$temp_tag"
+    tag_date_2=$GIT_TAG_DATE
+    echo "$temp_tag $GIT_TAG_DATE $tag_date_2"
+      if [[ $temp_tag =~ $TAG_PATTERN ]] && [[ "$tag_date_1" != "$tag_date_2" ]]; then
+        prevTag=$temp_tag
+        break
+      fi
+  done
+
+  printf "@@ Tag $tag is valid\n"
+  export TARGET_TAG=$currentTag
+  export BASE_TAG=$prevTag
+  echo "@@ Target tag is $TARGET_TAG ($tag_date_1)"
+  echo "@@ Base tag is $BASE_TAG ($tag_date_2)"
+}
diff --git a/quality-metrics/data-generator/common_metrics/complexity_parser/__init__.py b/quality-metrics/data-generator/common_metrics/complexity_parser/__init__.py
new file mode 100644
index 0000000..01b3878
--- /dev/null
+++ b/quality-metrics/data-generator/common_metrics/complexity_parser/__init__.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python3
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" __init__.py:
+
+    __init__.py for complexity parser
+
+"""
+
+__all__ = ["complexity_parser"]
+
+from complexity_parser import ComplexityParser
diff --git a/quality-metrics/data-generator/common_metrics/complexity_parser/complexity_parser.py b/quality-metrics/data-generator/common_metrics/complexity_parser/complexity_parser.py
new file mode 100644
index 0000000..afb3d76
--- /dev/null
+++ b/quality-metrics/data-generator/common_metrics/complexity_parser/complexity_parser.py
@@ -0,0 +1,164 @@
+#!/usr/bin/env python3
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" complexity_parser.py:
+
+    Data converter class. This class is aimed at converting the received
+    data in the format which InfluxDB understands.
+
+"""
+
+import collections
+import re
+import sys
+
+
+class ComplexityParser(object):
+    """
+        Extract the following data from the complexity logs:
+            - complexity table: {filename: <complexity score>}
+    """
+
+    def __init__(self, complexityLog, threshold):
+        """ class constructor function """
+        self.complexityLog = complexityLog
+        self.complexityDict = collections.OrderedDict()
+        self.threshold = threshold
+
+        self.process_complexity_log()
+        self.process_complexity_data()
+
+    def process_complexity_log(self):
+        """ function to process complexity log and populate the complexity dictionary """
+        with open(self.complexityLog) as fp:
+            for line in fp:
+                scoreRegex = r"([0-9]+)\s+[0-9]+\s+[0-9]+\s+[0-9]+\s+[0-9]+\s+(.*)"
+                m = re.match(scoreRegex, line)
+
+                if m:
+                    score = m.group(1)
+
+                    self.complexityDict[m.group(2).strip()] = score
+
+    def process_complexity_data(self):
+        """ function to extract the function IDs above the complexity threshold """
+        self.complexityDict = collections.OrderedDict(
+            (k, v) for k, v in self.complexityDict.items() if int(v) >= self.threshold)
+
+    def apply_whitelist(self):
+        """ Add an additional field to indicate whitelist YES/NO """
+        tmpDict = collections.OrderedDict()
+        exclusionDict = collections.OrderedDict()
+
+        # read in the whitelist
+        with open('./whitelist.dat') as f:
+            lines = f.read().splitlines()
+
+        # construct a dictionary for the white list to deal with:
+        # FULL_DIR_FOR_EXCLUSION, FULL_FILE_FOR_EXCLUSION and function
+        for i in lines:
+            tmpK = i.split(':')[0]
+            tmpV = i.split(':')[1]
+            exclusionDict[tmpK] = tmpV
+
+        whitelist_match = 0
+
+        for k, v in self.complexityDict.items():
+            # dealing with whitelist
+            for wlK, wlV in exclusionDict.items():
+
+                if (wlV == "FULL_DIR_FOR_EXCLUSION") or (
+                        wlV == "FULL_FILE_FOR_EXCLUSION"):
+                    # dealing with FULL_DIR_EXCLUSION and FULL_FILE_FOR_EXCLUSION, here we compare the directory path name or
+                    # file name before the ':'
+                    if wlK in k.split(':')[0]:
+                        whitelist_match = 1
+                else:
+                    # dealing with function exclusion
+                    if wlV in k.split(':')[1]:
+                        whitelist_match = 1
+
+            if whitelist_match != 1:
+                newValue = v + ",NO"
+            else:
+                newValue = v + ",YES"
+
+            # add into the dictionary
+            tmpDict[k] = newValue
+
+            whitelist_match = 0
+
+        return tmpDict
+
+
+class ComplexityHTMLCreator(object):
+    """
+        Create HTML using the defect statistics
+    """
+
+    def __init__(self, complexityDict, fileName):
+        """ Class constructor function """
+        self.complexityDict = complexityDict
+        # output file name
+        self.fileName = fileName
+
+        self.create_template_head()
+        self.add_table_content()
+        self.create_template_tail()
+
+    def create_template_head(self):
+        """ Function to make the HTML template """
+        with open(self.fileName, 'w') as f:
+            f.write("<!DOCTYPE html>\n")
+            f.write("<html>\n")
+            f.write("<head>\n")
+            f.write("<style>\n")
+            f.write("table, th, td{\n")
+            f.write("    border: 1px solid black;\n")
+            f.write("    border-collapse: collapse;\n")
+            f.write("}\n")
+            f.write("</style>\n")
+            f.write("</head>\n")
+            f.write("<body>\n")
+            f.write("<table>\n")
+            f.write("  <tr>\n")
+            f.write("    <th>Function ID</th>\n")
+            f.write("    <th>In-file location</th>\n")
+            f.write("    <th>Complexity Score</th>\n")
+            f.write("  </tr>\n")
+
+    def add_table_content(self):
+        """ function to add rows for test case result summary """
+        with open(self.fileName, "a") as f:
+
+            for functionInfo, score in self.complexityDict.items():
+                if int(score) >= 10:
+                    f.write("  <tr bgcolor=\"#E67E62\">\n")
+                else:
+                    f.write("  <tr>\n")
+
+                # add function information
+                location = functionInfo.split(':')[0].strip()
+                functionName = functionInfo.split(':', 1)[1].strip()
+
+                # add complexity score
+                f.write("    <td>{0}</td>\n".format(functionName))
+                f.write("    <td>{0}</td>\n".format(location))
+                f.write("    <td>{0}</td>\n".format(score))
+                f.write("  </tr>\n")
+
+    def create_template_tail(self):
+        """ function to add the closing part of html """
+
+        with open(self.fileName, "a") as f:
+            f.write("</table>\n")
+            f.write("</body>\n")
+            f.write("</html>\n")
diff --git a/quality-metrics/data-generator/tfa_metrics/tfa_defects.py b/quality-metrics/data-generator/tfa_metrics/tfa_defects.py
new file mode 100755
index 0000000..8725909
--- /dev/null
+++ b/quality-metrics/data-generator/tfa_metrics/tfa_defects.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python3
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" tfa_defects.py:
+
+    Retrieves TF-A defects from GitHub
+
+"""
+
+from github import GitHub, ApiError, ApiNotFoundError
+
+try:
+    token = "<GitHub Access Token>"
+    gh = GitHub(access_token=token)
+
+    # Please note that currently 'open' defects are reported
+    # In future, labels='bug' would be used for defect density
+    open_bug_issues = gh.repos(
+        'ARM-software')('tf-issues').issues.get(state='open', labels='bug')
+
+    bugCounter = 0
+
+    TFA_URL = "https://github.com/ARM-software/tf-issues/issues/"
+
+    for issue in open_bug_issues:
+        print("Found open bug with id: %s: %s, %s" %
+              (issue.number, issue.title, issue.state))
+        bugCounter += 1
+
+        print("\t url for this issue is: %s" % (TFA_URL + str(issue.number)))
+
+    print("@@ Total number of open bugs: %d" % (bugCounter))
+
+except ApiNotFoundError as e:
+    print(e, e.request, e.response)
diff --git a/quality-metrics/data-generator/tfa_metrics/tfa_generate_influxdb_files.py b/quality-metrics/data-generator/tfa_metrics/tfa_generate_influxdb_files.py
new file mode 100755
index 0000000..825c1c9
--- /dev/null
+++ b/quality-metrics/data-generator/tfa_metrics/tfa_generate_influxdb_files.py
@@ -0,0 +1,344 @@
+#!/usr/bin/env python3
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" tfa_generate_influxdb_files.py:
+
+    Parses the TF-A metrics summary files and generates JSON files
+    containing data to be written to InfluxDB.
+    Usage: python3 tfa_generate_influxdb_files.py --defectLog <defect log> \
+                --complexityLog <complexity log> --loc <code churn loc> \
+                --gitTagDate <tag date> --influxTime <git tag date & time>
+
+"""
+
+import argparse
+import os
+import re
+import collections
+import string
+import time
+import json
+
+
+def load_module(name, fpath):
+    """
+    Function to return access to the module
+
+    :param: name: Module name to be loaded
+    :param: fpath: Relative path to complexity_parser.py
+    :return: Module object
+    """
+    import os
+    import imp
+    return imp.load_source(
+        name, os.path.join(
+            os.path.dirname(__file__), fpath))
+
+
+load_module(
+    "complexity_parser",
+    "../common_metrics/complexity_parser/complexity_parser.py")
+
+from complexity_parser import ComplexityParser
+
+def args_parse():
+
+    global DEFECT_LOG
+    global COMPLEXITY_LOG
+    global CODE_CHURN
+    global BASE_RELEASE_TAG
+    global TARGET_RELEASE_TAG
+    global GIT_TAG_DATE
+    global GIT_TAG_DATE_TIME
+
+    # Create parser instance and add arguments
+    parser = argparse.ArgumentParser(
+        description="TF-A quality metrics InfluxDB JSON files generator")
+    parser.add_argument("--defectLog", help="name of the defect log")
+    parser.add_argument("--complexityLog", help="name of the complexity log")
+    parser.add_argument("--loc", help="code churn statistics", required=True)
+    parser.add_argument(
+        "--baseTag",
+        help="name of the base release tag",
+        required=True)
+    parser.add_argument(
+        "--targetTag",
+        help="name of the target release tag",
+        required=True)
+    parser.add_argument("--gitTagDate", help="Git Tag Date", required=True)
+    parser.add_argument(
+        "--influxTime",
+        help="InfluxDB time, which is Git Tag Date and Time",
+        required=True)
+
+    # Parse the arguments
+    args = parser.parse_args()
+
+    if args.defectLog:
+        DEFECT_LOG = args.defectLog
+
+    if args.complexityLog:
+        COMPLEXITY_LOG = args.complexityLog
+
+    if args.loc:
+        CODE_CHURN = args.loc
+
+    if args.baseTag:
+        BASE_RELEASE_TAG = args.baseTag
+
+    if args.targetTag:
+        TARGET_RELEASE_TAG = args.targetTag
+
+    if args.gitTagDate:
+        GIT_TAG_DATE = re.sub('[-]', '', args.gitTagDate)
+
+    if args.influxTime:
+        GIT_TAG_DATE_TIME = args.influxTime
+
+
+def tfa_generate_defect_data(data):
+    """
+    Function to write the data of defects into influxdb """
+
+    dict_list = []
+    runDate = time.strftime('%H:%M-%x')
+
+    # "Issue_Status" acts as an indicative field to help the viewer figure out
+    # the current status of the bug
+    defects_tracking = {
+        "metadata": {
+            "metrics": "tfa_defects_tracking"
+        },
+        "api_version": "1.0",
+        "data": [{
+            "measurement": "TFA_Defects_Tracking",
+            "fields": {
+                "Issue_Status": "{}".format("Open"),
+                "Number_of_Defects": int(len(data))
+            },
+            "tags": {
+                "Measured_Date": "{}".format(runDate)
+            },
+        }]
+    }
+
+    with open('defects_tracking.json', 'w') as fp:
+        json.dump(defects_tracking, fp)
+
+    # Write details of each defects into the other measurement called
+    # "TFA_Defects_Statistics"
+    defect_stats = {}
+    defect_stats["data"] = []
+    defect_stats["metadata"] = {}
+    defect_stats["metadata"]["metrics"] = "tfa_defects_stats"
+    defect_stats["api_version"] = "1.0"
+    for ID, description in data.items():
+        json_body = {
+            "measurement": "TFA_Defects_Statistics",
+            "fields": {
+                "Title": "{}".format(description['title']),
+                "Issue_Status": "{}".format(description['state']),
+                "URL": "{}".format(description['url'])
+            },
+            "tags": {
+                "Defect_ID": "{}".format(ID),
+                "Measured_Date": "{}".format(runDate)
+            }
+        }
+
+        defect_stats["data"].append(json_body)
+
+    with open('defects_statistics.json', 'w') as fp:
+        json.dump(defect_stats, fp)
+
+
+def tfa_generate_codechurn_data(data, base_tag, target_tag):
+    """
+        Generates InfluxDB data for TF-A code churn and
+        writes that to code_churn.json file.
+
+        :param: data: Lines of change
+        :param: base_tag: Release tag prior to target_tag
+        :param: target_tag: Tag being tested
+    """
+
+    json_body = {
+        "metadata": {
+            "metrics": "tfa_code_churn"
+        },
+        "api_version": "1.0",
+        "data": [{
+            "measurement": "TFA_CodeChurn_Tracking",
+            "fields": {
+                "Lines_of_Change": int(data)
+            },
+            "tags": {
+                "Git_Tag_Date": int(GIT_TAG_DATE),
+                "Base_Tag": "{}".format(base_tag),
+                "Target_Tag": "{}".format(target_tag)
+            },
+            "time": GIT_TAG_DATE_TIME
+        }]
+    }
+
+    with open('code_churn.json', 'w') as fp:
+        json.dump(json_body, fp)
+
+
+def tfa_generate_complexity_data(data, base_tag, target_tag, threshold):
+    """
+        Generates InfluxDB data for TF-A complexity scores and
+        writes that to complexity stats and tracking json files.
+
+        :param: data: Complexity data
+        :param: base_tag: Release tag prior to target_tag
+        :param: target_tag: Tag being tested
+        :param: threshold: Complexity threshold
+    """
+
+    complexity_stats = {}
+    complexity_stats["data"] = []
+    complexity_stats["metadata"] = {}
+    complexity_stats["metadata"]["metrics"] = "tfa_complexity_stats"
+    complexity_stats["api_version"] = "1.0"
+
+    totalComplexity = 0
+
+    print(
+        "@@ Number of functions with complexity score > %d: %d" %
+        (threshold, len(data)))
+
+    for k, v in data.items():
+        # Extract the location and function name
+        location = k.split(':', 1)[0].strip()
+        functionID = k.split(':', 1)[1].strip()
+        json_body = {
+            "measurement": "TFA_Complexity_Statistics",
+            "fields": {
+                "Function_ID": "{}".format(functionID),
+                "Score": int(v),
+                "Whitelisted": "{}".format("no"),
+                "Threshold": int(threshold)
+            },
+            "tags": {
+                "Location": "{}".format(location),
+                "Git_Tag_Date": int(GIT_TAG_DATE),
+                "Base_Tag": "{}".format(base_tag),
+                "Target_Tag": "{}".format(target_tag)
+            },
+            "time": GIT_TAG_DATE_TIME
+        }
+
+        complexity_stats["data"].append(json_body)
+        totalComplexity += int(v)
+
+    with open('complexity_stats.json', 'w') as fp:
+        json.dump(complexity_stats, fp)
+
+    totalExceedThreshold = len(data)
+    complexity_tracking = {
+        "metadata": {
+            "metrics": "tfa_complexity_tracking"
+        },
+        "api_version": "1.0",
+        "data": [{
+            "measurement": "TFA_Complexity_Tracking",
+            "fields": {
+                "Threshold": int(threshold),
+                "Whitelisted": "{}".format("no"),
+                "Functions_Exceeding_Threshold_Not_Whitelisted": int(totalExceedThreshold)
+            },
+            "tags": {
+                "Git_Tag_Date": int(GIT_TAG_DATE),
+                "Target_Tag": "{}".format(target_tag)
+            },
+            "time": GIT_TAG_DATE_TIME
+        }]
+    }
+
+    with open('complexity_tracking.json', 'w') as fp:
+        json.dump(complexity_tracking, fp)
+
+
+class DefectParser(object):
+    """
+        Extract the following data from the defect/complexity logs:
+            - defect list: {test class ID:{title: <title>, link: <URL>}}
+            - int variable: total number of defects
+    """
+
+    def __init__(self, defectLog):
+        self.defectLog = defectLog
+        self.defectDict = collections.OrderedDict()
+
+        self.process_defect_log()
+
+    def process_defect_log(self):
+        """
+            Function to process defect log and populate the defect dictionary
+        """
+        with open(self.defectLog) as fp:
+            content = fp.readlines()
+
+        baseURL = "https://github.com/ARM-software/tf-issues/issues/"
+
+        # Get defect id, title and URL link to populate the defect dictionary
+        for i in content:
+            i_strip = i.strip()
+
+            titleIDRegex = "^Found open bug with id: ([0-9]+): (.*)"
+            mIDTitle = re.match(titleIDRegex, i)
+
+            if mIDTitle:
+                defectID = mIDTitle.group(1)
+                defectTitle = mIDTitle.group(2)
+                defectURL = baseURL + mIDTitle.group(1)
+
+                self.defectDict[defectID] = {}
+                self.defectDict[defectID]['title'] = defectTitle.split(',')[0]
+                self.defectDict[defectID]['url'] = defectURL
+                self.defectDict[defectID]['state'] = defectTitle.split(',')[1]
+
+
+if __name__ == "__main__":
+
+    # Initialise global variables
+    DEFECT_LOG = ""
+    COMPLEXITY_LOG = ""
+    CODE_CHURN = 0
+    BASE_RELEASE_TAG = 0
+    TARGET_RELEASE_TAG = 0
+    # Functions having pmcabbe cylomatic complexity >= TFA_THRESHOLD
+    # are reported
+    TFA_THRESHOLD = 11
+    GIT_TAG_DATE = ""
+
+    # parse arguments
+    args_parse()
+
+    # Generate defect data
+    defectData = DefectParser(DEFECT_LOG)
+
+    # Generate complexity data
+    complexityData = ComplexityParser(COMPLEXITY_LOG, TFA_THRESHOLD)
+
+    tfa_generate_defect_data(defectData.defectDict)
+
+    tfa_generate_codechurn_data(
+        CODE_CHURN,
+        BASE_RELEASE_TAG,
+        TARGET_RELEASE_TAG)
+
+    tfa_generate_complexity_data(
+        complexityData.complexityDict,
+        BASE_RELEASE_TAG,
+        TARGET_RELEASE_TAG,
+        TFA_THRESHOLD)
diff --git a/quality-metrics/data-generator/tfa_metrics/tfa_quality_metrics.sh b/quality-metrics/data-generator/tfa_metrics/tfa_quality_metrics.sh
new file mode 100755
index 0000000..cc920d9
--- /dev/null
+++ b/quality-metrics/data-generator/tfa_metrics/tfa_quality_metrics.sh
@@ -0,0 +1,353 @@
+#!/usr/bin/env bash
+
+#======================================================================
+# Copyright (c) 2020, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#======================================================================
+
+#======================================================================
+# FILE: tfa_quality_metrics.sh
+#
+# DESCRIPTION: script to defects and calculate complexity score for arm-trusted-firmware
+#
+# USAGE: ./tfa_quality_metrics.sh --tag <release tag>
+#
+#======================================================================
+. ../common_metrics/common_utilities/common_utilities.sh
+. ./tfa_variables.sh
+
+# === Function ========================================================
+# NAME: clone_git_repo
+# DESCRIPTION: Clones the repository via "git clone" command
+# =====================================================================
+clone_git_repo()
+{
+  REPO_URL=$1
+  REPOSITORY=$(basename $REPO_URL .git)
+  # If repository already exists, then return from this function
+  if [ -d $REPOSITORY ]; then
+    printf "\nRepository \"$REPOSITORY\" already exists."
+    return
+  fi
+
+  # Clone repo. If it doesn't exist, then exit.
+  printf "\nCloning $REPOSITORY...\n"
+  printf "git clone $REPO_URL\n"
+  clone_err=$(git clone "$REPO_URL" 2>&1 | grep "fatal")
+
+  if [[ ! -z $clone_err ]]; then
+    printf "Repository \"$REPOSITORY\" not found. Exiting...\n"
+    exit
+  fi
+}
+
+# === Function ========================================================
+# NAME: tag_validation
+# DESCRIPTION: Invokes get_base_tag which retrieves base tag is target
+#              tag is valid
+# PARAMETER:
+#   $1: tag id
+# =====================================================================
+tag_validation()
+{
+  tag=$1
+
+  # check that tag actually exists
+  pushd arm-trusted-firmware
+  get_base_tag "^v[0-9]+\.[0-9]+$"
+  popd
+}
+
+# === Function ========================================================
+# NAME: generate_defect_summary
+# DESCRIPTION: Calculates the number of the total defects
+# PARAMETER:
+#   $1: output defect log
+# =====================================================================
+generate_defect_summary()
+{
+  # copy the github module to this level
+  cp $DIR/./githubpy/github.py .
+  cp $DIR/./githubpy/setup.py .
+
+  python3 $DIR/tfa_defects.py > $DEFECT_LOG
+}
+
+# === Function ========================================================
+# NAME: get_complexity_score
+# DESCRIPTION: Finds cyclomatic complexity of all the C/C++ files.
+# =====================================================================
+get_complexity_score()
+{
+  complexity_dir="$(basename $TFA_REPO .git)"
+
+  # check the availability of pmccabe
+  validation=$(which pmccabe)
+  if [ -z "$validation" ]; then
+    echo "pmccabe not found. Aborting test...\n"
+    exit
+  fi
+
+  # find out complexity on computed folder
+  pmccabe -vt `find $complexity_dir -name "*.c"` `find $complexity_dir -name "*.cpp"` > $COMPLEXITY_LOG
+}
+
+# === Function ========================================================
+# NAME: complexity_score
+# DESCRIPTION: Calculates the McCabe complexity score
+# =====================================================================
+complexity_score()
+{
+  # checkout the tag before running pmmcabe
+  pushd $DIR/arm-trusted-firmware
+
+  echo "git checkout ${TARGET_TAG}"
+  git checkout ${TARGET_TAG}
+  git status
+
+  # exclude subfolders under plat except for 'arm' and 'common'
+  mv plat tmp_plat
+  mkdir plat
+  cp -rp tmp_plat/arm tmp_plat/common tmp_plat/compat plat 2>/dev/null
+  rm -rf tmp_plat
+
+  # exclude subfolders under lib
+  rm -rf lib/stdlib
+  rm -rf lib/libfdt
+  rm -rf lib/compiler-rt
+
+  # exclude tools
+  rm -rf tools
+
+  # exclude services/spd except for 'tspd'
+  mv services/spd services/safe_spd
+  mkdir services/spd
+  cp -rp services/safe_spd/tspd services/spd 2>/dev/null
+  rm -rf services/safe_spd
+
+  popd
+
+  get_complexity_score
+}
+
+# === Function ========================================================
+# NAME: code_churn_summary
+# DESCRIPTION: Function to get code churn summary
+# PARAMETER:
+#   $1: code churn log
+# =====================================================================
+code_churn_summary()
+{
+  pushd $DIR/arm-trusted-firmware
+
+  echo "@@ Calculating code churn excluding plat folder..."
+
+  # Calculate code churn
+  stats1=$(git diff --stat $BASE_TAG $TARGET_TAG  -- . ':!plat' | grep -E "[0-9]+ file(s)? changed,")
+  CODE_CHURN1=$(generate_code_churn_summary "$stats1")
+
+  echo "@@ Calculating code churn plat/arm and plat/common folder..."
+  stats2=$(git diff --stat $BASE_TAG $TARGET_TAG  -- 'plat/arm' 'plat/common' | grep -E "[0-9]+ file(s)? changed,")
+  CODE_CHURN2=$(generate_code_churn_summary "$stats2")
+
+  CODE_CHURN=$((CODE_CHURN1+CODE_CHURN2))
+  echo "Code churn: $CODE_CHURN  LOC" | tee $DIR/$CODE_CHURN_LOG
+
+  # get tagger date for git tag in YYYY-MM-DD format
+  get_git_tag_date
+
+  popd
+
+  echo $CODE_CHURN
+}
+
+# === Function ========================================================
+# NAME: write_influxdb_data
+# DESCRIPTION: Function to generate JSON files containing DB data
+# =====================================================================
+write_influxdb_data()
+{
+  # Create a result folder using the current time stamp and
+  # copy InfluxDB json files to it
+  local resultDir=$(date +%Y-%m-%d_%H_%M_%S)
+  local_result=$DIR/$resultDir
+
+  mkdir -p $local_result
+  mv *.json *.txt $local_result
+
+  pushd $local_result
+
+  for json_file in *.json; do
+    curl -X POST -H "Content-Type: application/json" -d "$(cat ${json_file})" \
+      "http://${INFLUX_HOST}:5000" -H "${TFA_METRICS_AUTH_TOKEN}"
+  done
+
+  popd
+}
+
+# === Function ========================================================
+# NAME: generate_defect_codechurn_complexity_data
+# DESCRIPTION: Function to generate defects, code churn and complexity
+#   quality metrics data for given tag.
+# =====================================================================
+generate_defect_codechurn_complexity_data()
+{
+  # Remove files from previous run, if any
+  rm -rf arm-trusted-firmware/ github* setup.py
+
+  clone_git_repo $TFA_REPO
+  clone_git_repo $GITHUBPY_REPO
+
+  # validate TARGET_TAG and get base tag
+  tag_validation $TARGET_TAG
+
+  # do defect statistics
+  generate_defect_summary
+
+  # cyclomatic complexity check
+  complexity_score
+
+  # code churn
+  code_churn_summary
+
+  # Create InfluxDB json files to be written to InfluxDB
+  python3 $DIR/tfa_generate_influxdb_files.py --defectLog $DEFECT_LOG \
+    --complexityLog $COMPLEXITY_LOG --loc $CODE_CHURN --baseTag $BASE_TAG \
+    --targetTag $TARGET_TAG --gitTagDate $GIT_TAG_DATE --influxTime "$GIT_TAG_DATE_TIME"
+}
+
+# === Function ========================================================
+# NAME: usage
+# DESCRIPTION: Function to print script usage
+# =====================================================================
+usage()
+{
+  # print usage common to all files
+  printf "USAGE: $(basename $0) [options]\n"
+  printf "\t params: \n"
+  printf "\t -h|--help            print help information\n"
+  printf "\t --tag                user specified release tag\n"
+  printf "\t --metric_type        [ runtime_instrumentation | image_size | coverity_misra ]*\n"
+  printf "\t --rt_instr           Path to file containing instrumentation data\n"
+  printf "\t                      Required when metric_type is runtime_instrumentation\n"
+  printf "\t --image_size_file    Path to file containing image size data\n"
+  printf "\t                      Required when metric_type is image_size\n"
+  printf "\t --misra_defects_file Path to file containing MISRA defects information\n"
+  printf "\t                      Required when metric_type is coverity_misra\n"
+  printf "* By default, code coverage, defects and complexity metrics are generated for given tag\n"
+  printf "When metric_type is specified, corresponding data file to be parsed is also required\n"
+  exit
+}
+
+# === Function ========================================================
+# NAME: generate_tfa_metrics_data
+# DESCRIPTION: Function to generate InfluxDB JSON file for specified
+#   TF-A metrics - run time instrumentation/image size/MISRA defects
+# =====================================================================
+generate_tfa_metrics_data()
+{
+  case $METRIC_TYPE in
+    runtime_instrumentation)
+      if [[ ! -f $RTINSTR_FILE ]]; then
+        echo "$RTINSTR_FILE doesn't exist.. Exiting.."
+        exit 1
+      else
+        python3 tfa_rt_instr.py --rt_instr $RTINSTR_FILE
+      fi
+    ;;
+    image_size)
+      if [[ ! -f $IMAGE_SIZE_FILE ]]; then
+        echo "$IMAGE_SIZE_FILE doesn't exist.. Exiting.."
+        exit 1
+      else
+        python3 tfa_track_image_size.py --image_size_file $IMAGE_SIZE_FILE
+      fi
+    ;;
+    coverity_misra)
+      if [[ ! -f $MISRA_DEFECTS_FILE ]]; then
+        echo "$MISRA_DEFECTS_FILE doesn't exist.. Exiting.."
+        exit 1
+      else
+        python3 tfa_track_misra_defects.py --misra_defects_file $MISRA_DEFECTS_FILE
+      fi
+    ;;
+  esac
+  write_influxdb_data
+  exit
+}
+
+# === Function ========================================================
+# NAME: parse_args
+# DESCRIPTION: Arguments parser function
+# =====================================================================
+parse_args()
+{
+  # parse the arguments
+  while [[ $# -gt 0 ]]
+  do
+    key="$1"
+    case $key in
+      -h|--help)
+        usage
+      ;;
+      --tag)
+        export TARGET_TAG="$2"
+        shift
+        shift
+      ;;
+      --metric_type)
+        export METRIC_TYPE="$2"
+        shift
+        shift
+      ;;
+      --rt_instr_file)
+        export RTINSTR_FILE="$2"
+        shift
+        shift
+      ;;
+      --image_size_file)
+        export IMAGE_SIZE_FILE="$2"
+        shift
+        shift
+      ;;
+      --misra_defects_file)
+        export MISRA_DEFECTS_FILE="$2"
+        shift
+        shift
+      ;;
+      *)
+        echo "Unknown argument $key in arguments $@"
+        usage
+      ;;
+    esac
+  done
+
+}
+
+# === Function ========================================================
+# NAME: main
+# DESCRIPTION: main function
+# PARAMETER: Command-line arguments
+# =====================================================================
+main()
+{
+  parse_args $@
+
+  # If metrics type is specified, then generate influxdb JSON files
+  # from given text files
+  if [[ ! -z $METRIC_TYPE ]]; then
+    generate_tfa_metrics_data
+  # Otherwise generate code churn, complexity and defects data for given tag
+  elif [[ ! -z $TARGET_TAG ]]; then
+    generate_defect_codechurn_complexity_data
+  else
+    echo "Please specify either metric_type or tag.."
+    usage
+  fi
+
+  # write generated data (JSON files) to InfluxDB
+  write_influxdb_data
+}
+
+main $@
diff --git a/quality-metrics/data-generator/tfa_metrics/tfa_rt_instr.py b/quality-metrics/data-generator/tfa_metrics/tfa_rt_instr.py
new file mode 100644
index 0000000..bc40a7f
--- /dev/null
+++ b/quality-metrics/data-generator/tfa_metrics/tfa_rt_instr.py
@@ -0,0 +1,130 @@
+#!/usr/bin/env python
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" tfa_rt_instr.py:
+
+       Parses the job output log file, stores the data in a list of dictionaries
+       and creates JSON file to be written to influxDB.
+
+       USAGE: python3 tfa_rt_instr.py --rt_instr <job_output.log>
+
+   """
+
+import argparse
+import os
+import os.path
+import re
+import json
+
+
+class TFAInstrFileParser:
+    dict_list = []
+    file_name = None
+    rtinstr_data = {}
+    rtinstr_data["data"] = []
+    rtinstr_data["metadata"] = {}
+    rtinstr_data["metadata"]["metrics"] = "tfa_rtinstr"
+    rtinstr_data["api_version"] = "1.0"
+
+    def __init__(self, input_file):
+        self.file_name = input_file
+        self.parse_instr_file()
+        print(json.dumps(self.dict_list, indent=4, sort_keys=True))
+
+    def write_database_instr_tfa(self, file_dict):
+        self.rtinstr_data["data"].append(file_dict)
+
+    def parse_instr_file(self):
+        with open(self.file_name) as fp:
+            # Store instrumentation target as measurement name
+            line = fp.readline()
+            val = line.split(':')
+            if val[0].strip() != 'InstrumentationTarget':
+                print("Invalid file format.. Intrumentation not found..")
+                print("Exiting..")
+                exit()
+            measurement = val[1].strip()
+
+            # Store commit ID
+            line = fp.readline()
+            val = line.split(':')
+            if val[0].strip() != 'CommitID':
+                print("Invalid file format.. Commit ID not found..")
+                print("Exiting..")
+                exit()
+            commit_id = val[1].strip()[0:10]
+
+            # Store commit title
+            line = fp.readline()
+            val = line.split(':', 1)
+            if val[0].strip() != 'CommitTitle':
+                print("Invalid file format.. CommitTitle not found..")
+                print("Exiting..")
+                exit()
+            commit_title = val[1].strip()
+
+            # Store time as commit date
+            line = fp.readline()
+            if line.split()[0] != 'CommitDate:':
+                print("Invalid file format.. Commit Date not found..")
+                print("Exiting..")
+                exit()
+            commit_time = line.split()[1]
+
+            # Store latency data per test case
+            for line in iter(fp.readline, ''):
+                file_dict = {}
+                file_dict['tags'] = {}
+                file_dict['fields'] = {}
+                file_dict['measurement'] = measurement
+                file_dict['tags']['CommitID'] = commit_id
+                file_dict['tags']['CommitTitle'] = commit_title
+                file_dict['time'] = commit_time
+                tc_arr = line.split()
+                file_dict['tags']['TC_Name'] = tc_arr[0]
+                file_dict['tags']['Cluster_ID'] = int(tc_arr[1])
+                file_dict['tags']['CPU_Core'] = int(tc_arr[2])
+                if file_dict['tags']['TC_Name'] == 'testrtinstrpsciversionparallel':
+                    file_dict['fields']['Latency_EL3Entry_EL3Exit'] = int(
+                        tc_arr[3])
+                else:
+                    file_dict['fields']['Latency_EL3Entry_CPUPowerDown'] = int(
+                        tc_arr[3])
+                    file_dict['fields']['Latency_CPUWakeup_EL3Exit'] = int(
+                        tc_arr[4])
+                    file_dict['fields']['CacheFlush'] = int(tc_arr[5])
+                self.write_database_instr_tfa(file_dict)
+
+            with open('tfa_rtinstr.json', 'w') as fp:
+                json.dump(self.rtinstr_data, fp)
+
+
+def get_tfa_instr_file():
+    # Create parser instance and add argument
+    parser = argparse.ArgumentParser(
+        description="TFA quality metrics: Runtime Instrumentation tracking")
+    parser.add_argument(
+        "--rt_instr",
+        help="file containing TF-A runtime instrumentation info")
+
+    # Parse the args
+    args = parser.parse_args()
+
+    # Check if file exists
+    if os.path.isfile(str(args.rt_instr)):
+        return args.rt_instr
+    else:
+        print("Runtime Instrumentation file not found.. Exiting..")
+        exit()
+
+
+if __name__ == '__main__':
+    tfa_instr_file_data = TFAInstrFileParser(str(get_tfa_instr_file()))
diff --git a/quality-metrics/data-generator/tfa_metrics/tfa_track_image_size.py b/quality-metrics/data-generator/tfa_metrics/tfa_track_image_size.py
new file mode 100755
index 0000000..44dba10
--- /dev/null
+++ b/quality-metrics/data-generator/tfa_metrics/tfa_track_image_size.py
@@ -0,0 +1,199 @@
+#!/usr/bin/env python
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" tfa_track_image_size.py:
+
+       Parses TFA firmware image size file, stores the data in a list of
+       dictionaries and creates JSON file to be written to influxDB.
+
+       USAGE: python3 tfa_track_image_size.py --image_size_file <ImageSizeFil.txte>
+
+   """
+
+import argparse
+import os.path
+import re
+import json
+
+# Validation Variables
+MEM_SECTION_VALIDATION_TABLE = ['B', 'D', 'R', 'T', 'V', 'W']
+ELF_FILES_LOOKUP_TABLE = [
+    'bl1.elf',
+    'bl1u.elf',
+    'bl2.elf',
+    'bl2u.elf',
+    'bl31.elf',
+    'bl32.elf']
+
+
+class TFASizeFileParser:
+    """
+        Download the file containing sizes of various TFA build configs
+        Store the size data in a list of dictionaries in the following format:
+            [
+                {
+                    "measurement": <build_config>,
+                    "fields" : {
+                        "BlX_B": Size of uninitialized data section
+                        "BlX_D": Size of initialized data section
+                        "BlX_R": Size of read only data section
+                        "BlX_T": Size of text (code) section
+                        "BlX_V": Size of weak object
+                        "BlX_W": Size of weak symbol
+                    },
+                    "tags" : {
+                        "BinMode"         : Type of build (Release|Debug)
+                        "CommitID"        : Commit ID
+                        "CommitTitle"     : Commit title
+                    }
+                    "time" : Commit Time
+                }
+            ]
+    """
+
+    file_dict = {}
+    file_name = None
+
+    def __init__(self, input_file):
+        self.file_name = input_file
+        self.parse_image_size_file()
+        print(json.dumps(self.file_dict, indent=4, sort_keys=True))
+
+    def parse_image_size_file(self):
+        self.file_dict['tags'] = {}
+        self.file_dict['fields'] = {}
+
+        with open(self.file_name) as fp:
+            # Store measurement name as build config
+            line = fp.readline()
+            val = line.split(':')
+            if val[0].strip() != 'BuildConfig':
+                print("Invalid file format.. BuildConfig not found..")
+                print("Exiting..")
+                exit()
+            self.file_dict['measurement'] = val[1].strip()
+
+            # Store bin_mode
+            line = fp.readline()
+            val = line.split(':')
+            if val[0].strip() != 'BinMode':
+                print("Invalid file format.. BinMode not found..")
+                print("Exiting..")
+                exit()
+            self.file_dict['tags'][val[0].strip()] = val[1].strip().title()
+
+            # Store Commit ID
+            line = fp.readline()
+            val = line.split(':')
+            if val[0].strip() != 'CommitID':
+                print("Invalid file format.. Commit ID not found..")
+                print("Exiting..")
+                exit()
+            self.file_dict['tags'][val[0].strip()] = val[1].strip()[0:10]
+
+            # Store Commit Title
+            line = fp.readline()
+            val = line.split(':', 1)
+            if val[0].strip() != 'CommitTitle':
+                print("Invalid file format.. CommitTitle not found..")
+                print("Exiting..")
+                exit()
+            self.file_dict['tags']['CommitTitle'] = val[1].strip()
+
+            # Store time as commit date
+            line = fp.readline()
+            if line.split()[0] != 'CommitDate:':
+                print("Invalid file format.. Commit Date not found..")
+                print("Exiting..")
+                exit()
+            self.file_dict['time'] = line.split()[1]
+
+            # Store Image Size memory related data component-wise
+            for line in iter(fp.readline, ''):
+                if ".elf" in line:
+                    searched_build = line.split('/')[-1].split(':')[0]
+                    build = searched_build.upper().rsplit('.', 1)[0]
+                    if searched_build not in ELF_FILES_LOOKUP_TABLE:
+                        print(
+                            "WARNING: " +
+                            searched_build +
+                            " not present in ELF_FILES_LOOKUP_TABLE..")
+                        print(
+                            "Skipping publishing data for " +
+                            searched_build +
+                            " to InfluxDB")
+                        build = None
+                        continue
+                elif build is not None:
+                    val = line.split(' ')
+                    if len(val) > 1:
+                        if not val[0].strip() in MEM_SECTION_VALIDATION_TABLE:
+                            print(
+                                "Invalid memory section \"%s\".. Exiting.." %
+                                val[0].strip())
+                            exit()
+                        mem_comp = build + "_" + val[0].strip()
+                        self.file_dict['fields'][mem_comp] = int(
+                            val[1].strip())
+
+            json_body = json.dumps(str(self.file_dict))
+            if not self.file_dict['fields']:
+                failed_configs = 'failed_configs.txt'
+
+                if os.path.exists(failed_configs):
+                    append_write = 'a'  # append if already exists
+                else:
+                    append_write = 'w'  # make a new file if not
+
+                failed_configs_file = open(failed_configs, append_write)
+                failed_configs_file.write(
+                    self.file_dict['measurement'] +
+                    ', ' +
+                    self.file_dict['tags']['BinMode'] +
+                    ': bl1/bl1u/bl2/bl2u/bl31/bl32 not found\n')
+                failed_configs_file.close()
+                print("No memory section found.. Exiting")
+                exit()
+
+
+def generate_influxdb_json_file(file_dict):
+    image_size_data = {}
+    image_size_data["data"] = []
+    image_size_data["metadata"] = {}
+    image_size_data["metadata"]["metrics"] = "tfa_image_size"
+    image_size_data["api_version"] = "1.0"
+    image_size_data["data"].append(file_dict)
+    with open('tfa_image_size.json', 'w') as fp:
+        json.dump(image_size_data, fp)
+
+
+def get_tfa_size_file():
+    # Create parser instance and add argument
+    parser = argparse.ArgumentParser(
+        description="TFA quality metrics: firmware image size tracking")
+    parser.add_argument(
+        "--image_size_file",
+        help="file containing TFA image size info")
+
+    # Parse the args
+    args = parser.parse_args()
+
+    # Check if file exists
+    if os.path.isfile(str(args.image_size_file)):
+        return args.image_size_file
+    else:
+        print("Image size file not found.. Exiting..")
+        exit()
+
+
+if __name__ == '__main__':
+    tfa_size_file_data = TFASizeFileParser(str(get_tfa_size_file()))
+    generate_influxdb_json_file(tfa_size_file_data.file_dict)
diff --git a/quality-metrics/data-generator/tfa_metrics/tfa_track_misra_defects.py b/quality-metrics/data-generator/tfa_metrics/tfa_track_misra_defects.py
new file mode 100755
index 0000000..6962ca5
--- /dev/null
+++ b/quality-metrics/data-generator/tfa_metrics/tfa_track_misra_defects.py
@@ -0,0 +1,177 @@
+#!/usr/bin/env python
+
+__copyright__ = """
+/*
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ *
+ */
+ """
+
+""" tfa_track_misra_defects.py:
+
+       Parses TFA MISRA defects file, stores the data in a list of
+       dictionaries and creates JSON file to be written to influxDB.
+
+       USAGE: python3 tfa_track_misra_defects.py --misra_defects_file <DefectsFile.txt>
+
+   """
+
+import argparse
+import os.path
+import re
+import json
+
+
+class TFACoverityFileParser:
+    """
+        Store the Misra C defects data in a list of dictionaries in the following
+        format:
+            [
+                {
+                    "measurement": <build_config>,
+                    "fields" : {
+                        "TotalDefects"    : Total coverity defects
+                        "MandatoryDefects": Mandatory defects
+                        "RequiredDefects" : Required defects
+                        "AdvisoryDefects" : Advisory defects
+                    },
+                    "tags" : {
+                        "BinMode"         : Type of build (Release|Debug)
+                        "CommitID"        : Commit ID
+                        "CommitTitle"     : Commit Title
+                    }
+                    "time" : PR Merge Commit Time
+                }
+            ]
+    """
+
+    file_dict = {}
+    file_name = None
+
+    def __init__(self, input_file):
+        self.file_name = input_file
+        self.parse_misra_defects_file()
+        print(json.dumps(self.file_dict, indent=4, sort_keys=True))
+
+    def parse_misra_defects_file(self):
+        self.file_dict = {}
+        self.file_dict['tags'] = {}
+        self.file_dict['fields'] = {}
+
+        with open(self.file_name) as fp:
+            # Store measurement name as build config
+            line = fp.readline()
+            val = line.split(':')
+            if val[0].strip() != 'BuildConfig':
+                print("Invalid file format.. BuildConfig not found..")
+                print("Exiting..")
+                exit()
+            self.file_dict['measurement'] = val[1].strip()
+
+            # Store bin_mode
+            line = fp.readline()
+            val = line.split(':')
+            if val[0].strip() != 'BinMode':
+                print("Invalid file format.. BinMode not found..")
+                print("Exiting..")
+                exit()
+            self.file_dict['tags'][val[0].strip()] = val[1].strip().title()
+
+            # Store Commit ID
+            line = fp.readline()
+            val = line.split(':')
+            if val[0].strip() != 'CommitID':
+                print("Invalid file format.. Commit ID not found..")
+                print("Exiting..")
+                exit()
+            self.file_dict['tags'][val[0].strip()] = val[1].strip()[0:10]
+
+            # Store Commit Title
+            line = fp.readline()
+            val = line.split(':', 1)
+            if val[0].strip() != 'CommitTitle':
+                print("Invalid file format.. CommitTitle not found..")
+                print("Exiting..")
+                exit()
+            self.file_dict['tags']['CommitTitle'] = val[1].strip()
+
+            # Store time as commit date
+            line = fp.readline()
+            if line.split()[0] != 'CommitDate:':
+                print("Invalid file format.. Commit Date not found..")
+                print("Exiting..")
+                exit()
+            self.file_dict['time'] = line.split()[1]
+
+            # Store Total Defects
+            line = fp.readline()
+            val = line.split(':')
+            if val[0].strip() != 'TotalDefects':
+                print("Invalid file format.. TotalDefects not found..")
+                print("Exiting..")
+                exit()
+            self.file_dict['fields']['TotalDefects'] = int(val[1].strip())
+
+            # Store Mandatory Defects
+            line = fp.readline()
+            val = line.split(':')
+            if val[0].strip() != 'MandatoryDefects':
+                print("Invalid file format.. MandatoryDefects not found..")
+                print("Exiting..")
+                exit()
+            self.file_dict['fields']['MandatoryDefects'] = int(val[1].strip())
+
+            # Store Required Defects
+            line = fp.readline()
+            val = line.split(':')
+            if val[0].strip() != 'RequiredDefects':
+                print("Invalid file format.. RequiredDefects not found..")
+                print("Exiting..")
+                exit()
+            self.file_dict['fields']['RequiredDefects'] = int(val[1].strip())
+
+            # Store Advisory Defects
+            line = fp.readline()
+            val = line.split(':')
+            if val[0].strip() != 'AdvisoryDefects':
+                print("Invalid file format.. AdvisoryDefects not found..")
+                print("Exiting..")
+                exit()
+            self.file_dict['fields']['AdvisoryDefects'] = int(val[1].strip())
+
+
+def write_database(file_dict):
+    misra_defects_data = {}
+    misra_defects_data["data"] = []
+    misra_defects_data["metadata"] = {}
+    misra_defects_data["metadata"]["metrics"] = "tfa_misra_defects"
+    misra_defects_data["api_version"] = "1.0"
+    misra_defects_data["data"].append(file_dict)
+    with open('tfa_misra_defects.json', 'w') as fp:
+        json.dump(misra_defects_data, fp)
+
+
+def get_tfa_coverity_file():
+    # Create parser instance and add argument
+    parser = argparse.ArgumentParser(
+        description="TF-A quality metrics: Misra C defects tracking")
+    parser.add_argument("--misra_defects_file",
+                        help="file containing Misra defects information")
+
+    # Parse the args
+    args = parser.parse_args()
+
+    # Check if file exists
+    if os.path.isfile(str(args.misra_defects_file)):
+        return args.misra_defects_file
+    else:
+        print("Coverity file not found.. Exiting..")
+        exit()
+
+
+if __name__ == '__main__':
+    tfa_misra_defects_data = TFACoverityFileParser(
+        str(get_tfa_coverity_file()))
+    write_database(tfa_misra_defects_data.file_dict)
diff --git a/quality-metrics/data-generator/tfa_metrics/tfa_variables.sh b/quality-metrics/data-generator/tfa_metrics/tfa_variables.sh
new file mode 100644
index 0000000..56a2715
--- /dev/null
+++ b/quality-metrics/data-generator/tfa_metrics/tfa_variables.sh
@@ -0,0 +1,31 @@
+#!/usr/bin/env bash
+
+#======================================================================
+# Copyright (c) 2020, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#======================================================================
+
+export TFA_REPO='https://github.com/ARM-software/arm-trusted-firmware.git'
+export GITHUBPY_REPO='https://github.com/michaelliao/githubpy.git'
+
+export DEFECT_LOG=tfa_defects_summary.txt
+export COMPLEXITY_LOG=tfa_complexity_summary.txt
+export CODE_CHURN_LOG=tfa_code_churn.txt
+
+# Authentication token needs to be generated using following command:
+# curl -H "Content-Type: application/json" -X POST -d \
+# "$(cat <CREDENTIALS_JSON_FILE>)" http://<IP_ADDR>:5000/auth
+# where "IP_ADDR" is the IP address of host where metrics server is running, and
+# CREDENTIALS_JSON file should contain credentials which should match with
+# the credentials in ../../broker-component/credentials.py
+# Response would contain a JWT token, which needs to be added here
+# during deployment
+export TFA_METRICS_AUTH_TOKEN="<TFA Authorization Token>"
+
+# INFLUX_HOST is the IP address of host where InfluxDB service is running
+# It needs to be updated during deployment
+export INFLUX_HOST="<Influx Public Host IP>"
+
+# Use relative path to the current script
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"