Initial commit.

 - qa-tools public release which includes:
    - trace-based coverage tool
    - quality metrics measurement and tracking setup
    - associated in-source documentation.

Signed-off-by: Basil Eljuse <basil.eljuse@arm.com>
diff --git a/coverage-tool/coverage-plugin/Makefile b/coverage-tool/coverage-plugin/Makefile
new file mode 100644
index 0000000..3a2a18f
--- /dev/null
+++ b/coverage-tool/coverage-plugin/Makefile
@@ -0,0 +1,30 @@
+##############################################################################
+# Copyright (c) 2020, ARM Limited and Contributors. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+##############################################################################
+
+CPPFLAGS = -I${PVLIB_HOME}/include/fmruntime
+CXXFLAGS = -fpic -Wall -Werror -g
+LDFLAGS  =
+CMAKE_CXX_FLAGS = -std=c++11 -O3
+
+ifeq (${CROSS_COMPILE_32BIT},1)
+CXXFLAGS += -m32
+LDFLAGS  += -m32
+endif
+
+PLUGIN_NAME = coverage_trace
+
+PLUGIN_LIB     = ${PLUGIN_NAME}.so
+PLUGIN_OBJECTS = ${PLUGIN_NAME}.o plugin_utils.o
+
+${PLUGIN_LIB}: ${PLUGIN_OBJECTS}
+	${CXX} -shared -o $@ ${LDFLAGS} $^
+
+.cc.o:
+	${CXX} -c -o $@ ${CXXFLAGS} ${CMAKE_CXX_FLAGS} ${CPPFLAGS} $^
+
+.PHONY: clean
+clean:
+	rm -f ${PLUGIN_OBJECTS} ${PLUGIN_LIB}
diff --git a/coverage-tool/coverage-plugin/coverage_trace.cc b/coverage-tool/coverage-plugin/coverage_trace.cc
new file mode 100644
index 0000000..4dc72ee
--- /dev/null
+++ b/coverage-tool/coverage-plugin/coverage_trace.cc
@@ -0,0 +1,349 @@
+/*!
+##############################################################################
+# Copyright (c) 2020, ARM Limited and Contributors. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+##############################################################################
+*/
+// Implements the trace plugin interface for the MTI interface to trace
+// source data from Arm FVP.
+
+#include "MTI/PluginInterface.h"
+#include "MTI/PluginFactory.h"
+#include "MTI/PluginInstance.h"
+#include "MTI/ModelTraceInterface.h"
+
+#include "plugin_utils.h"
+#include "trace_sources.h"
+
+#include <errno.h>
+#include <string>
+#include <algorithm>
+#include <cstdio>
+#include <sstream>
+#include <vector>
+#include <map>
+#include <typeinfo>
+#include <typeindex>
+#include <utility>
+
+#ifdef SG_MODEL_BUILD
+    #include "builddata.h"
+    #define PLUGIN_VERSION FULL_VERSION_STRING
+#else
+    #define PLUGIN_VERSION "unreleased"
+#endif
+
+using namespace eslapi;
+using namespace MTI;
+using namespace std;
+
+// Implements the plugin interface for trace coverage
+class CoverageTrace :public PluginInstance
+{
+public:
+    virtual CAInterface * ObtainInterface(if_name_t    ifName,
+                                          if_rev_t     minRev,
+                                          if_rev_t *   actualRev);
+
+    CoverageTrace(const char *instance_name, const char *trace_file_prefix);
+    ~CoverageTrace();
+
+    /** This is to associate a plugin with a simulation instance. Exactly one
+     * simulation must be registered.
+     * */
+    virtual eslapi::CADIReturn_t RegisterSimulation(eslapi::CAInterface
+                                                    *simulation);
+
+    // This is called before the plugin .dll/.so is unloaded and should allow
+    // the plugin to do it's cleanup.
+    virtual void Release();
+
+    virtual const char *GetName() const;
+
+private:
+    std::string instance_name;
+
+    bool Error(const char *);
+
+    vector<TraceComponentContext*> trace_components;
+    std::string trace_file_prefix;
+};
+
+CAInterface *CoverageTrace::ObtainInterface(if_name_t ifName,
+                            if_rev_t     minRev,
+                            if_rev_t *   actualRev)
+{
+  printf("CoverageTrace::ObtainInterface\n");
+    // If someone is asking for the matching interface
+    if((strcmp(ifName,IFNAME()) == 0) &&
+    // and the revision of this interface implementation is
+       (minRev <= IFREVISION()))
+        // at least what is being asked for
+    {
+        if (actualRev) // Make sure this is not a NULL pointer
+            *actualRev = IFREVISION();
+        return this;
+    }
+
+    if((strcmp(ifName, CAInterface::IFNAME()) == 0) &&
+       minRev <= CAInterface::IFREVISION())
+    {
+        if (actualRev != NULL)
+            *actualRev = CAInterface::IFREVISION();
+        return this;// Dynamic_cast<TracePluginInterface *>(this);
+    }
+    return NULL;
+}
+
+
+CoverageTrace::CoverageTrace(const char *instance_name_,
+                             const char *trace_file_prefix_) :
+    instance_name(instance_name_),
+    trace_file_prefix(trace_file_prefix_)
+{
+  printf("CoverageTrace::CoverageTrace\n");
+}
+
+CoverageTrace::~CoverageTrace()
+{
+  printf("CoverageTrace::~CoverageTrace\n");
+}
+
+bool
+CoverageTrace::Error(const char *msg)
+{
+    fprintf(stderr, "%s\n", msg);
+    return false;
+}
+
+// Method that registers the simulation traces events. In this case registers
+// for trace sources with the 'INST' name.
+CADIReturn_t
+CoverageTrace::RegisterSimulation(CAInterface *ca_interface)
+{
+  printf("CoverageTrace::RegisterSimulation\n");
+    if (!ca_interface) {
+        Error("Received CAInterface NULL pointer.");
+        return CADI_STATUS_IllegalArgument;
+    }
+    std::stringstream ss;
+
+    SystemTraceInterface *sys_if =
+                          ca_interface->ObtainPointer<SystemTraceInterface>();
+    if (sys_if == 0) {
+        Error("Got a NULL SystemTraceInterface.");
+        return CADI_STATUS_GeneralError;
+    }
+
+    for(SystemTraceInterface::TraceComponentIndex tci=0;
+        tci < sys_if->GetNumOfTraceComponents(); ++tci) {
+        const char* tpath = sys_if->GetComponentTracePath(tci);
+        CAInterface *caif = sys_if->GetComponentTrace(tci);
+        ComponentTraceInterface *cti =
+                                 caif->ObtainPointer<ComponentTraceInterface>();
+        if (cti == 0) {
+            Error("Could not get TraceInterface for component.");
+            continue;
+        }
+
+        if (cti->GetTraceSource("INST") != 0) {
+            TraceComponentContext *trace_component = new
+                                                TraceComponentContext(tpath);
+
+            // To register a new trace source the arguments are the
+            // name of the trace source followed by a vector of
+            // pairs of (field name,field type).
+            InstructionTraceContext *inst_cont = new InstructionTraceContext(
+                                            "INST",
+                                            { {"PC", u32},
+                                            {"SIZE", u32}}
+                                        );
+            inst_cont->nb_insts = 0;
+            inst_cont->CreateEvent(&cti, inst_cont->Callback);
+            trace_component->AddTraceSource(inst_cont);
+            trace_components.push_back(trace_component);
+        }
+    }
+
+    return CADI_STATUS_OK;
+}
+
+// This is called before the plugin .dll/.so is unloaded and should allow the
+// plugin to do it's cleanup.
+void
+CoverageTrace::Release()
+{
+  printf("CoverageTrace::Release\n");
+    // We can dump our data now
+    int error = 0;
+    char* fname;
+    int ret;
+    std::vector<TraceComponentContext*>::iterator tcc;
+    for (tcc = trace_components.begin(); tcc < trace_components.end(); ++tcc) {
+        TraceComponentContext *tcont = *tcc;
+        // Print some overall stats
+        InstructionTraceContext* rtc = (InstructionTraceContext*)
+                                    tcont->trace_sources["INST"];
+        printf("Trace path: %s\n", tcont->trace_path.c_str());
+
+        // Construct a trace file name
+        int status = asprintf(&fname, "%s-%s.log",
+                              this->trace_file_prefix.c_str(),
+                              tcont->trace_path.c_str());
+        if ( status != 0)
+        {
+            printf("Error in asprintf: %d\n", status);
+            printf("Error description is : %s\n", strerror(errno));
+          }
+
+        // Open it
+        FILE* fp = fopen(fname, "w");
+        if (fp == NULL) {
+            fprintf(stderr, "Can't open file %s for writing.\n", fname);
+            error = 1;
+            break;
+        }
+
+        InstStatMap::iterator map_it;
+        // Dump the detailed stats
+        for (map_it = rtc->stats.begin(); map_it != rtc->stats.end();
+            ++map_it) {
+            fprintf(fp, "%08x %lu %lu\n", map_it->first, map_it->second.cnt,
+                    map_it->second.size);
+        }
+
+        // Close the file
+        ret = fclose(fp);
+        if (ret != 0) {
+            fprintf(stderr, "Failed to close %s: %s.", fname, strerror(errno));
+            error = 1;
+            break;
+        }
+
+        free(fname);
+    }
+if (error != 0)
+    delete this;
+}
+
+const char *
+CoverageTrace::GetName() const
+{
+  printf("CoverageTrace::GetName\n");
+    return instance_name.c_str();
+}
+
+// Class used to return a static object CAInterface. CAInterface provides a
+// basis for a software model built around ’components’ and ’interfaces’.
+// A component provides concrete implementations of one or more interfaces.
+// Interfaces are identified by a string name (of type if_name_t), and an
+// integer revision (type if_rev_t). A higher revision number indicates a newer
+// revision of the same interface.
+class ThePluginFactory :public PluginFactory
+{
+public:
+    virtual CAInterface *ObtainInterface(if_name_t    ifName,
+                                          if_rev_t     minRev,
+                                          if_rev_t *   actualRev);
+
+    virtual uint32_t GetNumberOfParameters();
+
+    virtual eslapi::CADIReturn_t
+        GetParameterInfos(eslapi::CADIParameterInfo_t *parameter_info_list);
+
+    virtual CAInterface *Instantiate(const char *instance_name,
+                                     uint32_t number_of_parameters,
+                                     eslapi::CADIParameterValue_t *parameter_values);
+
+    virtual void Release();
+
+    virtual const char *GetType() const { return "CoverageTrace"; }
+    virtual const char *GetVersion() const { return PLUGIN_VERSION; }
+};
+
+// Allows a client to obtain a reference to any of the interfaces that the
+// component implements. The client specifies the id and revision of the
+// interface that it wants to request. The component can return NULL if it
+// doesn’t implement that interface, or only implements a lower revision.
+// The client in this case is the Arm FVP model.
+CAInterface *ThePluginFactory::ObtainInterface(if_name_t ifName,
+                                  if_rev_t     minRev,
+                                  if_rev_t *   actualRev)
+{
+  printf("ThePluginFactory::ObtainInterface\n");
+    // If someone is asking for the matching interface
+    if((strcmp(ifName,IFNAME()) == 0) &&
+        // and the revision of this interface implementation is
+       (minRev <= IFREVISION()))
+        // at least what is being asked for
+    {
+        if (actualRev) // Make sure this is not a NULL pointer
+            *actualRev = IFREVISION();
+        return static_cast<ThePluginFactory *>(this);
+    }
+
+    if((strcmp(ifName, CAInterface::IFNAME()) == 0) &&
+       minRev <= CAInterface::IFREVISION())
+    {
+        if (actualRev) // Make sure this is not a NULL pointer
+            *actualRev = CAInterface::IFREVISION();
+        return static_cast<CAInterface *>(this);
+    }
+    return NULL;
+}
+
+uint32_t ThePluginFactory::GetNumberOfParameters()
+{
+  printf("ThePluginFactory::GetNumberOfParameters\n");
+  return 1;
+}
+
+eslapi::CADIReturn_t
+ThePluginFactory::GetParameterInfos(
+eslapi::CADIParameterInfo_t *parameter_info_list)
+{
+    printf("ThePluginFactory::GetParameterInfos\n");
+    *parameter_info_list = CADIParameterInfo_t(
+        0, "trace-file-prefix", CADI_PARAM_STRING,
+        "Prefix of the trace files.", 0, 0, 0, 0, "covtrace"
+    );
+    return CADI_STATUS_OK;
+}
+
+// Method that creates a new instance of the trace plugin
+CAInterface *ThePluginFactory::Instantiate(const char *instance_name,
+                              uint32_t param_nb,
+                              eslapi::CADIParameterValue_t *values)
+{
+    printf("ThePluginFactory::Instantiate\n");
+    const char *trace_file_prefix = 0;
+    printf("CoverageTrace: number of params: %d\n", param_nb);
+    for (uint32_t i = 0; i < param_nb; ++i) {
+        if (values[i].parameterID == 0) {
+            trace_file_prefix = values[i].stringValue;
+        } else {
+            printf("\tCoverageTrace: got unexpected param %d\n",
+                   values[i].parameterID);
+        }
+    }
+    return new CoverageTrace(instance_name, trace_file_prefix);
+}
+
+void ThePluginFactory::Release()
+{
+  printf("ThePluginFactory::Release\n");
+}
+
+static ThePluginFactory factory_instance;
+
+// Entry point for the instantiation of the plugin.
+// Returns a pointer to an static object to create the interface for the
+// plugin.
+CAInterface *GetCAInterface()
+{
+    printf("********->GetCAInterface\n");
+    return &factory_instance;
+}
+
+// End of file CoverageTrace.cpp
diff --git a/coverage-tool/coverage-plugin/plugin_utils.cc b/coverage-tool/coverage-plugin/plugin_utils.cc
new file mode 100644
index 0000000..8eb3024
--- /dev/null
+++ b/coverage-tool/coverage-plugin/plugin_utils.cc
@@ -0,0 +1,65 @@
+/*!
+##############################################################################
+# Copyright (c) 2020, ARM Limited and Contributors. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+##############################################################################
+*/
+
+#include "plugin_utils.h"
+
+// Get a named trace source, create an event class from the named subset of
+// event fields, register the event class and look up the field indexes, and
+// register a user-provided MTI callback with the trace source.
+// Writes to error_ss and returns false if anything fails.
+bool RegisterCallbackForComponent(const MTI::ComponentTraceInterface *mti,
+                         const char *trace_source,
+                         ValueBind_t *value_bind, void *this_ptr,
+                         MTI::CallbackT callback,
+                         MTI::EventClass **ptr_event_class,
+                         std::stringstream &error_ss)
+{
+    const MTI::TraceSource *source = mti->GetTraceSource(trace_source);
+    if (!source) {
+        error_ss << "Could not find " << trace_source << " source";
+        return false;
+    }
+
+    MTI::FieldMask mask = 0;
+    const MTI::EventFieldType *eft;
+
+    for(unsigned i=0; value_bind[i].name != 0; i++) {
+        if ((eft = source->GetField( value_bind[i].name )) != 0) {
+            mask |= 1 << eft->GetIndex();
+        } else {
+            error_ss << "No field " << value_bind[i].name <<
+                    " found in " << trace_source << " trace source";
+            return false;
+        }
+    }
+
+    MTI::EventClass *event_class = source->CreateEventClass(mask);
+    if (!event_class) {
+        error_ss << "Unable to register event class for " <<
+                trace_source << " trace source.";
+        return false;
+    }
+    for(unsigned i=0; value_bind[i].name != 0; i++)
+    {
+        MTI::ValueIndex idx = event_class->GetValueIndex(value_bind[i].name);
+        if (idx != -1) {
+            *(value_bind[i].index) = idx;
+       } else {
+           error_ss << "Unable to GetValueIndex for " << trace_source
+                    << "." << value_bind[i].name << ".";
+           return false;
+       }
+    }
+    if (callback &&
+        event_class->RegisterCallback(callback, this_ptr) != MTI::MTI_OK) {
+        error_ss << "RegisterCallback failed for " << trace_source;
+        return false;
+    }
+    *ptr_event_class = event_class;
+    return true;
+}
diff --git a/coverage-tool/coverage-plugin/plugin_utils.h b/coverage-tool/coverage-plugin/plugin_utils.h
new file mode 100644
index 0000000..546e5fc
--- /dev/null
+++ b/coverage-tool/coverage-plugin/plugin_utils.h
@@ -0,0 +1,46 @@
+/*!
+##############################################################################
+# Copyright (c) 2020, ARM Limited and Contributors. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+##############################################################################
+*/
+#ifndef _COVERAGE_TOOL_COVERAGE_PLUGIN_PLUGIN_UTILS_H_
+#define _COVERAGE_TOOL_COVERAGE_PLUGIN_PLUGIN_UTILS_H_
+
+#include <sstream>
+#include <map>
+#include <vector>
+#include <string>
+#include "MTI/ModelTraceInterface.h"
+using namespace eslapi;
+using namespace MTI;
+using namespace std;
+
+typedef struct {
+    const char *name;
+    MTI::ValueIndex *index;
+} ValueBind_t;
+
+// Declare an MTI callback method and define a static thunk method to call
+// into this from C code.
+#define CALLBACK_DECL_AND_THUNK(class_name, name) \
+    static void name##Thunk(void * user_data, const MTI::EventClass *event_class, const MTI::EventRecord *record) \
+    {                                                                                                   \
+        reinterpret_cast<class_name *>(user_data)->name(event_class, record);                           \
+    }                                                                                                   \
+    void name(const MTI::EventClass *event_class, const MTI::EventRecord *record)
+
+
+// Get a named trace source, create an event class from the named subset of
+// event fields, register the event class and look up the field indexes, and
+// register a user-provided MTI callback with the trace source.
+// Writes to error_ss and returns false if anything fails.
+bool RegisterCallbackForComponent(const MTI::ComponentTraceInterface *mti,
+                         const char *trace_source,
+                         ValueBind_t *value_bind, void *this_ptr,
+                         MTI::CallbackT callback,
+                         MTI::EventClass **ptr_event_class,
+                         std::stringstream &error_ss);
+
+#endif // _COVERAGE_TOOL_COVERAGE_PLUGIN_PLUGIN_UTILS_H_
diff --git a/coverage-tool/coverage-plugin/trace_sources.h b/coverage-tool/coverage-plugin/trace_sources.h
new file mode 100644
index 0000000..57f6462
--- /dev/null
+++ b/coverage-tool/coverage-plugin/trace_sources.h
@@ -0,0 +1,229 @@
+/*!
+##############################################################################
+# Copyright (c) 2020, ARM Limited and Contributors. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+##############################################################################
+*/
+
+#ifndef _COVERAGE_TOOL_COVERAGE_PLUGIN_TRACE_SOURCES_H_
+#define _COVERAGE_TOOL_COVERAGE_PLUGIN_TRACE_SOURCES_H_
+
+#include <map>
+#include <vector>
+#include <string>
+#include <algorithm>
+#include "MTI/ModelTraceInterface.h"
+
+using namespace MTI;
+using namespace std;
+
+struct InstStat {
+    uint64_t cnt;
+    uint64_t size;
+};
+
+typedef std::map<uint32_t, InstStat> InstStatMap;
+
+//Defining types for fields
+enum enum_types {u32, boolT};
+typedef enum_types ValueTypes;
+
+/*
+ * Structure used to save field data
+ */
+struct TFields{
+    ValueTypes t;
+    MTI::ValueIndex index;
+    void *value;
+};
+// Map of fields => Key -> Field name
+typedef map<string, TFields> TraceFieldsMap;
+
+/*
+ * Structure used to pass field data between trace contexts
+ */
+struct TParams {
+    void *value;
+    ValueTypes t;
+};
+// Map of fields => Key -> Field name
+typedef map<string, TParams> ParamsMap;
+
+/*
+ * Generic function to output errors
+ */
+bool Error(const char *msg)
+{
+    fprintf(stderr, "%s\n", msg);
+    return false;
+}
+
+/*
+ * Base class for Trace Source contexts
+ *
+ */
+class TraceSourceContext {
+    public:
+        string name; //Trace source name
+        TraceFieldsMap fields; //Fields to be used for the event
+        MTI::EventClass *event_class; //Event object to register callback
+        ParamsMap params; //List of parameters from another trace source
+
+/*
+ * Constructor that converts/stores the pairs of <field name, field type>
+ * in the 'fields' member.
+*/
+TraceSourceContext(const char* tname,
+                    vector<pair<string, ValueTypes>> fields_def) {
+    name = tname;
+    string key;
+    // Referenced by field name => field type
+    for (size_t i=0; i < fields_def.size(); ++ i) {
+        key = fields_def[i].first;
+        fields[key].t = fields_def[i].second;
+    }
+}
+
+/*
+ * Generic Callback that can be used by derived objects. It fills the
+ * 'value' member in the 'fields' structure with a void* to the value
+ * retrieved from the component.
+*/
+template <class T>
+static T *TraceCallback(void* user_data,
+                         const MTI::EventClass *event_class,
+                         const MTI::EventRecord *record) {
+    T *tc = static_cast<T*>(user_data);
+    // Filled by Component
+    TraceFieldsMap::iterator it;
+    for (it = tc->fields.begin(); it != tc->fields.end(); ++it) {
+       // Based in the type creates an object with initial
+       // value retrieved from the component using the index
+       // for that field.
+        switch (it->second.t) {
+            case u32: it->second.value = new uint32_t(
+                record->Get<uint32_t>(event_class, it->second.index));
+                break;
+            case boolT: it->second.value = new bool(
+                record->GetBool(event_class, it->second.index));
+                break;
+        }
+    }
+    return tc;
+}
+
+/*
+ * Generic method to copy the fields from this trace source to the params
+ * member in other trace source. Optionally a list of field names can be
+ * passed to filter the list of field names copied.
+ * The params member is a Map of with the Field Id (name)  as the key.
+*/
+void PassFieldstoParams(TraceSourceContext *target,
+                            vector<string> field_names={}) {
+        TraceFieldsMap::iterator it;
+        for (it = fields.begin(); it != fields.end(); ++it) {
+            bool found = std::find(field_names.begin(), field_names.end(),
+                it->first) != field_names.end();
+            if ((!field_names.empty()) && (!found))
+                continue;
+            target->params[it->first].t = it->second.t;
+            switch (it->second.t) {
+                case u32:
+                    target->params[it->first].value =
+                        new uint32_t(*((uint32_t*)it->second.value));
+                    break;
+                case boolT:
+                    target->params[it->first].value =
+                        new bool(*((bool*)it->second.value));
+                    break;
+            }
+        }
+}
+/*
+ * Method that creates an event object in the trace source based in the
+ * fields given in the constructor. It then registers the given callback
+ * to this event.
+*/
+MTI::EventClass *CreateEvent(ComponentTraceInterface **ptr_cti,
+                MTI::CallbackT callback) {
+
+    ComponentTraceInterface *cti = *ptr_cti;
+    std::stringstream ss;
+    ComponentTraceInterface *mti = 0;
+
+    if (cti->GetTraceSource(name.c_str()) != 0) {
+        TraceSource* ts = cti->GetTraceSource(name.c_str());
+        printf("Trace source attached: %s\n", ts->GetName());
+
+        size_t map_size = fields.size();
+        ValueBind_t *values_array = new ValueBind_t[map_size + 1];
+        TraceFieldsMap::iterator it;
+        int i = 0;
+        for (it = fields.begin(); it != fields.end(); ++it) {
+            values_array[i]= ((ValueBind_t) { it->first.c_str(),
+                &it->second.index });
+            ++i;
+        };
+        values_array[map_size] = {0, 0}; //sentinel
+
+        mti = static_cast<ModelTraceInterface *>(cti);
+        if (!RegisterCallbackForComponent(mti, name.c_str(), values_array,
+            this, callback, &event_class, ss)) {
+            Error(ss.str().c_str());
+            return 0;
+        }
+        return event_class;
+    }
+    return 0;
+}
+};
+
+/*
+ * Class and types used to handle trace sources belonging to a
+ * component.
+*/
+typedef map<string, TraceSourceContext*> MapTraceSourcesType;
+class TraceComponentContext {
+    public:
+        string trace_path;
+        MapTraceSourcesType trace_sources;
+
+TraceComponentContext(string tpath) {
+        trace_path = tpath;
+}
+
+void AddTraceSource(TraceSourceContext *ts) {
+        trace_sources[ts->name] = ts;
+}
+};
+
+/*
+ * Class used to instantiate a Instruction trace source
+*/
+class InstructionTraceContext: public TraceSourceContext {
+    public:
+        using TraceSourceContext::TraceSourceContext;
+        InstStatMap stats;
+        uint64_t nb_insts;
+
+    static void Callback(void* user_data,
+                         const MTI::EventClass *event_class,
+                         const MTI::EventRecord *record) {
+        InstructionTraceContext* itc = static_cast<InstructionTraceContext*>
+                                       (user_data);
+        itc->nb_insts++; // Number of instructions
+        // Filled by Component
+        uint32_t pc = record->GetAs<uint32_t>(event_class,
+                                                      itc->fields["PC"].index);
+        uint32_t size = record->Get<uint32_t>(event_class,
+                                                    itc->fields["SIZE"].index);
+        // Save PC stats. If not already present in the map, a counter with
+        // value 0 will be created before incrementing.
+        InstStat& is = itc->stats[pc];
+        is.cnt++;
+        is.size = size;
+    };
+};
+
+#endif // _COVERAGE_TOOL_COVERAGE_PLUGIN_TRACE_SOURCES_H_
diff --git a/coverage-tool/coverage-reporting/branch_coverage.sh b/coverage-tool/coverage-reporting/branch_coverage.sh
new file mode 100755
index 0000000..3dc88f3
--- /dev/null
+++ b/coverage-tool/coverage-reporting/branch_coverage.sh
@@ -0,0 +1,140 @@
+#!/usr/bin/env bash
+
+##############################################################################
+# Copyright (c) 2020, ARM Limited and Contributors. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+##############################################################################
+
+#==============================================================================
+# FILE: branch_coverage.sh
+#
+# DESCRIPTION: Generates intermediate layer json file and then
+# code coverage HTML reports using LCOV report Open Source tool
+#==============================================================================
+
+set +x
+set -e
+
+ERROR_FILE=coverage_error.log
+
+###############################################################################
+# Prints error message to STDERR and log file.
+# Globals:
+# ERROR_FILE
+# Arguments:
+#   None
+# Outputs:
+#   Writes error to STDERR and log file with a timestamp
+###############################################################################
+err() {
+  echo "[$(date +'%Y-%m-%dT%H:%M:%S%z')]: $*" | tee -a ${ERROR_FILE} 1>&2
+}
+
+touch ${ERROR_FILE}
+if ! [ -x "$(command -v lcov)" ]; then
+  err 'Error: lcov is not installed. Install it with:\nsudo apt install lcov\n'
+  exit 1
+fi
+
+###############################################################################
+# Prints script usage.
+# Arguments:
+#   None
+# Outputs:
+#   Writes usage to stdout
+###############################################################################
+usage()
+{
+    # print the usage information
+    printf "Usage: $(basename $0) [options]\n"
+    printf "\t params:\n"
+    printf "\t --config Configuration json file. Required.\n"
+    printf "\t --workspace Local workspace folder where source codes reside. \
+            Required.\n"
+    printf "\t --json-path Intermediate json file name. Optional defaults to \
+            'output_file.json'\n"
+    printf "\t --outdir Report folder. Optional defaults to 'out'\n"
+    printf "\t -h|--help Display usage\n"
+    printf "Example of usage:\n"
+    printf "./branch_coverage.sh --config config_file.json \
+            --workspace /server_side/source/ --outdir html_report\n"
+    exit 1
+}
+
+# default values
+JSON_PATH=output_file.json
+OUTDIR=out
+
+###############################################################################
+# Parse arguments.
+# Globals:
+# CONFIG_JSON
+# LOCAL_WORKSPACE
+# JSON_PATH
+# OUTDIR
+# Arguments:
+#   Command line arguments
+# Outputs:
+#   Writes usage to stdout
+###############################################################################
+parse_arguments()
+{
+  while [ $# -gt 1 ]
+  do
+    key="$1"
+    case $key in
+      --config)
+        CONFIG_JSON="$2"
+        shift
+      ;;
+      --workspace)
+        LOCAL_WORKSPACE="$2"
+        shift
+      ;;
+      --json-path)
+        JSON_PATH="$2"
+        shift
+      ;;
+      --outdir)
+        OUTDIR="$2"
+        shift
+      ;;
+      -h|--help)
+        usage
+      ;;
+      *)
+        printf "Unknown argument $key\n"
+        usage
+      ;;
+    esac
+    shift
+  done
+}
+
+
+parse_arguments $@
+
+if [ -z "$LOCAL_WORKSPACE" ] || [ -z "$CONFIG_JSON" ]; then
+    usage
+fi
+
+if [ ! -d "$LOCAL_WORKSPACE" ]; then
+    err "$LOCAL_WORKSPACE doesn't exist\n"
+    exit 1
+fi
+
+if [ ! -f "$CONFIG_JSON" ]; then
+    err "$CONFIG_JSON doesn't exist\n"
+    exit 1
+fi
+
+clear
+echo "Generating intermediate layer file '$JSON_PATH'..."
+python3 intermediate_layer.py --config-json "$CONFIG_JSON" --local-workspace $LOCAL_WORKSPACE
+echo "Converting intermediate layer file to info file..."
+python3 generate_info_file.py --workspace $LOCAL_WORKSPACE --json $JSON_PATH
+echo "Generating LCOV report at '$OUTDIR'..."
+genhtml --branch-coverage coverage.info --output-directory $OUTDIR
+mv coverage.info $OUTDIR/coverage.info
+mv error_log.txt $OUTDIR/error_log.txt
diff --git a/coverage-tool/coverage-reporting/clone_sources.py b/coverage-tool/coverage-reporting/clone_sources.py
new file mode 100644
index 0000000..fb1807d
--- /dev/null
+++ b/coverage-tool/coverage-reporting/clone_sources.py
@@ -0,0 +1,151 @@
+# !/usr/bin/env python
+###############################################################################
+# Copyright (c) 2020, ARM Limited and Contributors. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+###############################################################################
+
+###############################################################################
+# FILE: clone_sources.py
+#
+# DESCRIPTION: Clone the source files for code coverage
+###############################################################################
+
+import os
+import subprocess
+import json
+import time
+from random import random
+
+
+def call_cmd(cmd, print_cmd=False):
+    """
+    Function that execute an os command and returns its output
+
+    :param cmd: OS command as string
+    :param print_cmd: Optional argument to print the command in stdout
+    :return: The string output of the os command
+    """
+    if print_cmd:
+        print("+" + cmd)
+    out = subprocess.check_output(cmd, shell=True)
+    return out
+
+
+def skip_source(output_dir, source, handler=None):
+    """
+    Function that handles overwriting source files
+
+    :param output_dir: Folder where to put the source files and folders
+    :param source: Dictionary with the information the source
+    :return: True if must skip the given source cloning False otherwise
+    """
+    location = os.path.join(output_dir, source['LOCATION'])
+    # Check if exists and have files
+    if os.path.isdir(location):
+        if not os.listdir(location):
+            if handler is not None:
+                return handler(source, "Directory exists and is empty")
+            else:
+                # By default send a warning and overwrite it
+                print(("WARNING!: Directory {} already exists and is "
+                       "empty. Overwriting it...'").format(location))
+                os.rmdir(location)
+                return False
+        commit_id = call_cmd(("cd {} && git log -1 2>/dev/null | "
+                              "grep commit | awk '{{print $2}}'").format(
+                              location), print_cmd=True).strip()
+        if source['type'] == "git":
+            if commit_id == "":
+                # is not a git
+                if handler is not None:
+                    return handler(source, "Directory exists and is not git")
+                else:
+                    print(("WARNING!: Directory {} already exists and is not a"
+                           " git repo: '{}'").format(location, source['URL']))
+            elif commit_id != source["COMMIT"].strip():
+                # there are mismatching commit id's
+                if handler is not None:
+                    return handler(source, "Mismatch in gits")
+                else:
+                    print(("WARNING!: Mismatch in git repo {}\nExpected {}, "
+                           "Cloned {}").format(source['URL'], source['COMMIT'],
+                                               commit_id))
+        elif source['type'] == "http":
+            if handler is not None:
+                return handler(source,
+                               "WARNING!: Directory already exists")
+            else:
+                print("WARNING!: Directory {} already exists".format(
+                    location))
+        return True
+    return False
+
+
+class CloneSources(object):
+    """Class used to clone the source code needed to produce code coverage
+    reports.
+    """
+    def __init__(self, json_file):
+        self.json_file = json_file
+        self.json_data = None
+        self.load_json()
+
+    def load_json(self):
+        with open(self.json_file, "r") as json_file:
+            self.json_data = json.load(json_file)
+
+    def clone_repo(self, output_dir, overwrite_handler=None):
+        """
+        Clones or reproduces a folder with source code based in the
+        configuration in the json file
+
+        :param output_dir: Where to put the source files
+        :param overwrite_handler: Optional function to handle overwrites
+        """
+        if self.json_data is None:
+            self.load_json()
+        sources = []
+        try:
+            if 'parameters' in self.json_data:
+                sources = self.json_data['parameters']['sources']
+            elif 'configuration' in self.json_data:
+                sources = self.json_data['configuration']['sources']
+            else:
+                raise Exception("No correct format for json sources!")
+        except Exception as ex:
+            raise Exception(ex)
+
+        for source in sources:
+            if skip_source(output_dir, source, overwrite_handler):
+                continue
+            if source['type'] == "git":
+                git = source
+                url = git["URL"]
+                commit_id = git["COMMIT"]
+                output_loc = os.path.join(output_dir, git["LOCATION"])
+                cmd = "git clone {} {}".format(url, output_loc)
+                output = call_cmd(cmd)
+                if git['REFSPEC']:
+                    call_cmd("cd {};git fetch -q origin {}".format(
+                        output_loc, git['REFSPEC']))
+                if commit_id:
+                    call_cmd("cd {};git checkout -q {}".format(
+                        output_loc, commit_id))
+                else:
+                    call_cmd("cd {};git checkout -q FETCH_HEAD".format(
+                        output_loc))
+            elif source['type'] == 'http':
+                site = source
+                output_loc = os.path.join(output_dir, site["LOCATION"])
+                tmp_folder = os.path.join(output_dir,
+                                          "_tmp_{}_{}".format(time.time(),
+                                                              random()))
+                call_cmd("mkdir -p {}".format(tmp_folder))
+                call_cmd("wget -q {} -P {}".format(
+                    site['URL'], tmp_folder))
+                call_cmd("mkdir -p {}".format(output_loc))
+                if site['COMPRESSION'] == "xz":
+                    call_cmd("cd {};tar -xzf $(basename {}) -C {}".format(
+                        tmp_folder, site['URL'], output_loc))
+                call_cmd("rm -rf {}".format(tmp_folder))
diff --git a/coverage-tool/coverage-reporting/generate_info_file.py b/coverage-tool/coverage-reporting/generate_info_file.py
new file mode 100755
index 0000000..0c0f39a
--- /dev/null
+++ b/coverage-tool/coverage-reporting/generate_info_file.py
@@ -0,0 +1,410 @@
+# !/usr/bin/env python
+##############################################################################
+# Copyright (c) 2020, ARM Limited and Contributors. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+##############################################################################
+
+import os
+import sys
+import json
+import re
+import argparse
+
+
+def function_coverage(function_tuples, info_file):
+    """
+    Parses and get information from intermediate json file to info
+    file for function coverage
+
+    :param function_tuples: List of tuples with function name
+                            and its data as pairs.
+    :param info_file: Handler to for file writing coverage
+    """
+    total_func = 0
+    covered_func = 0
+    function_names = []
+    function_cov = []
+    for func_name, func_data in function_tuples:
+        function_names.append(
+            'FN:{},{}\n'.format(
+                func_data["line_number"],
+                func_name))
+        total_func += 1
+        if func_data["covered"]:
+            covered_func += 1
+            function_cov.append('FNDA:1,{}\n'.format(func_name))
+        else:
+            function_cov.append('FNDA:0,{}\n'.format(func_name))
+    info_file.write("\n".join(function_names))
+    info_file.write("\n".join(function_cov))
+    info_file.write('FNF:{}\n'.format(total_func))
+    info_file.write('FNH:{}\n'.format(covered_func))
+
+
+def line_coverage(lines_dict, info_file):
+    """
+    Parses and get information from intermediate json file to info
+    file for line coverage
+
+    :param lines_dict: Dictionary of lines with line number as key
+                       and its data as value
+    :param info_file: Handler to for file writing coverage
+    """
+    total_lines = 0
+    covered_lines = 0
+    for line in lines_dict:
+        total_lines += 1
+        if lines_dict[line]['covered']:
+            covered_lines += 1
+            info_file.write('DA:' + line + ',1\n')
+        else:
+            info_file.write('DA:' + line + ',0\n')
+    info_file.write('LF:' + str(total_lines) + '\n')
+    info_file.write('LH:' + str(covered_lines) + '\n')
+
+
+def sanity_check(branch_line, lines_dict, abs_path_file):
+    """
+    Check if the 'branch_line' line of the C source corresponds to actual
+    branching instructions in the assembly code. Also, check if that
+    line is covered. If it's not covered, this branching statement can
+    be omitted from the report.
+    Returns False and prints an error message if check is not successful,
+    True otherwise
+
+    :param branch_line: Source code line with the branch instruction
+    :param lines_dict: Dictionary of lines with line number as key
+                        and its data as value
+    :param abs_path_file: File name of the source file
+    """
+    if str(branch_line) not in lines_dict:
+        return False
+    found_branching = False
+    for i in lines_dict[str(branch_line)]['elf_index']:
+        for j in lines_dict[str(branch_line)]['elf_index'][i]:
+            string = lines_dict[str(branch_line)]['elf_index'][i][j][0]
+            # these cover all the possible branching instructions
+            if ('\tb' in string or
+                '\tcbnz' in string or
+                '\tcbz' in string or
+                '\ttbnz' in string or
+                    '\ttbz' in string):
+                # '\tbl' in string or  # already covered by '\tb'
+                # '\tblr' in string or  # already covered by '\tb'
+                # '\tbr' in string or  # already covered by '\tb'
+                found_branching = True
+    if not found_branching:
+        error_log.write(
+            '\nSomething possibly wrong:\n\tFile ' +
+            abs_path_file +
+            ', line ' +
+            str(branch_line) +
+            '\n\tshould be a branching statement but couldn\'t ' +
+            'find correspondence in assembly code')
+    return True
+
+
+def manage_if_branching(branch_line, lines_dict, info_file, abs_path_file):
+    """
+    Takes care of branch coverage, branch_line is the source code
+    line in which the 'if' statement is located the function produces
+    branch coverage info based on C source code and json file content
+
+    :param branch_line: Source code line with the 'if' instruction
+    :param lines_dict: Dictionary of lines with line number as key
+                        and its data as value
+    :param info_file: Handler to for file writing coverage
+    :param abs_path_file: File name of the source file
+    """
+    total_branch_local = 0
+    covered_branch_local = 0
+
+    if not sanity_check(branch_line, lines_dict, abs_path_file):
+        return(total_branch_local, covered_branch_local)
+    total_branch_local += 2
+    current_line = branch_line  # used to read lines one by one
+    # check for multiline if-condition and update current_line accordingly
+    parenthesis_count = 0
+    while True:
+        end_of_condition = False
+        for char in lines[current_line]:
+            if char == ')':
+                parenthesis_count -= 1
+                if parenthesis_count == 0:
+                    end_of_condition = True
+            elif char == '(':
+                parenthesis_count += 1
+        if end_of_condition:
+            break
+        current_line += 1
+    # first branch
+    # simple case: 'if' statements with no braces
+    if '{' not in lines[current_line] and '{' not in lines[current_line + 1]:
+
+        if (str(current_line + 1) in lines_dict and
+                lines_dict[str(current_line + 1)]['covered']):
+            info_file.write('BRDA:' + str(branch_line) + ',0,' + '0,' + '1\n')
+            covered_branch_local += 1
+        else:
+            info_file.write('BRDA:' + str(branch_line) + ',0,' + '0,' + '0\n')
+        current_line += 1
+
+    # more complex case: '{' after the 'if' statement
+    else:
+        if '{' in lines[current_line]:
+            current_line += 1
+        else:
+            current_line += 2
+
+        # we need to check whether at least one line in the block is covered
+        found_covered_line = False
+
+        # this is a simpler version of a stack used to check when a code block
+        # ends at the moment, it just checks for '{' and '}', doesn't take into
+        # account the presence of commented braces
+        brace_counter = 1
+        while True:
+            end_of_block = False
+            for char in lines[current_line]:
+                if char == '}':
+                    brace_counter -= 1
+                    if brace_counter == 0:
+                        end_of_block = True
+                elif char == '{':
+                    brace_counter += 1
+            if end_of_block:
+                break
+            if (str(current_line) in lines_dict and
+                    lines_dict[str(current_line)]['covered']):
+                found_covered_line = True
+
+            current_line += 1
+
+        if found_covered_line:
+            info_file.write('BRDA:' + str(branch_line) + ',0,' + '0,' + '1\n')
+            covered_branch_local += 1
+        else:
+            info_file.write('BRDA:' + str(branch_line) + ',0,' + '0,' + '0\n')
+
+    # second branch (if present). If not present, second branch is covered by
+    # default
+    current_line -= 1
+    candidate_else_line = current_line
+    while 'else' not in lines[current_line] and candidate_else_line + \
+            2 >= current_line:
+        current_line += 1
+        if current_line == len(lines):
+            break
+
+    # no 'else': branch covered by default
+    if current_line == candidate_else_line + 3:
+        info_file.write('BRDA:' + str(branch_line) + ',0,' + '1,' + '1\n')
+        covered_branch_local += 1
+        return(total_branch_local, covered_branch_local)
+
+    # 'else' found: check if opening braces are present
+    if '{' not in lines[current_line - 1] and '{' not in lines[current_line]:
+        if str(current_line + 1) in lines_dict:
+            if lines_dict[str(current_line + 1)]['covered']:
+                info_file.write(
+                    'BRDA:' +
+                    str(branch_line) +
+                    ',0,' +
+                    '1,' +
+                    '1\n')
+                covered_branch_local += 1
+            else:
+                info_file.write(
+                    'BRDA:' +
+                    str(branch_line) +
+                    ',0,' +
+                    '1,' +
+                    '0\n')
+        else:
+            info_file.write('BRDA:' + str(branch_line) + ',0,' + '1,' + '0\n')
+
+    else:
+        if '{' in lines[current_line]:
+            current_line += 1
+        else:
+            current_line += 2
+        found_covered_line = False
+        while '}' not in lines[current_line]:
+            if (str(current_line) in lines_dict and
+                    lines_dict[str(current_line)]['covered']):
+                found_covered_line = True
+                break
+            current_line += 1
+        if found_covered_line:
+            info_file.write('BRDA:' + str(branch_line) + ',0,' + '1,' + '1\n')
+            covered_branch_local += 1
+        else:
+            info_file.write('BRDA:' + str(branch_line) + ',0,' + '1,' + '0\n')
+
+    return(total_branch_local, covered_branch_local)
+
+
+def manage_switch_branching(switch_line, lines_dict, info_file, abs_path_file):
+    """
+    Takes care of branch coverage, branch_line is the source code
+    line in which the 'switch' statement is located the function produces
+    branch coverage info based on C source code and json file content
+
+    :param switch_line: Source code line with the 'switch' instruction
+    :param lines_dict: Dictionary of lines with line number as key
+                        and its data as value
+    :param info_file: Handler to for file writing coverage
+    :param abs_path_file: File name of the source file
+    """
+
+    total_branch_local = 0
+    covered_branch_local = 0
+
+    if not sanity_check(switch_line, lines_dict, abs_path_file):
+        return(total_branch_local, covered_branch_local)
+
+    current_line = switch_line  # used to read lines one by one
+    branch_counter = 0          # used to count the number of switch branches
+    brace_counter = 0
+
+    # parse the switch-case line by line, checking if every 'case' is covered
+    # the switch-case ends with a '}'
+    while True:
+        if '{' in lines[current_line]:
+            brace_counter += 1
+        if '}' in lines[current_line]:
+            brace_counter -= 1
+        if brace_counter == 0:
+            return(total_branch_local, covered_branch_local)
+        if 'case' in lines[current_line] or 'default' in lines[current_line]:
+            covered = False
+            total_branch_local += 1
+            inner_brace = 0
+            current_line += 1
+            while (('case' not in lines[current_line]
+                   and 'default' not in lines[current_line]) or
+                   inner_brace > 0):
+                if (str(current_line) in lines_dict and
+                        lines_dict[str(current_line)]['covered']):
+                    covered = True
+                if '{' in lines[current_line]:
+                    inner_brace += 1
+                    brace_counter += 1
+                if '}' in lines[current_line]:
+                    inner_brace -= 1
+                    brace_counter -= 1
+                if brace_counter == 0:
+                    break
+                current_line += 1
+            if covered:
+                info_file.write(
+                    'BRDA:' +
+                    str(switch_line) +
+                    ',0,' +
+                    str(branch_counter) +
+                    ',1\n')
+                covered_branch_local += 1
+            else:
+                info_file.write(
+                    'BRDA:' +
+                    str(switch_line) +
+                    ',0,' +
+                    str(branch_counter) +
+                    ',0\n')
+            if brace_counter == 0:
+                return(total_branch_local, covered_branch_local)
+            branch_counter += 1
+        else:
+            current_line += 1
+
+    return(total_branch_local, covered_branch_local)
+
+
+def branch_coverage(abs_path_file, info_file, lines_dict):
+    """
+    Produces branch coverage information, using the functions
+    'manage_if_branching' and 'manage_switch_branching'
+
+    :param abs_path_file: File name of the source file
+    :param info_file: Handler to for file writing coverage
+    :param lines_dict: Dictionary of lines with line number as key
+                       and its data as value
+    """
+    total_branch = 0
+    covered_branch = 0
+
+    # branch coverage: if statements
+    branching_lines = []
+
+    # regex: find all the lines starting with 'if' or 'else if'
+    # (possibly preceded by whitespaces/tabs)
+    pattern = re.compile(r"^\s+if|^\s+} else if|^\s+else if")
+    for i, line in enumerate(open(abs_path_file)):
+        for match in re.finditer(pattern, line):
+            branching_lines.append(i + 1)
+    while branching_lines:
+        t = manage_if_branching(branching_lines.pop(0), lines_dict,
+                                info_file, abs_path_file)
+        total_branch += t[0]
+        covered_branch += t[1]
+
+    # branch coverage: switch statements
+    switch_lines = []
+
+    # regex: find all the lines starting with 'switch'
+    # (possibly preceded by whitespaces/tabs)
+    pattern = re.compile(r"^\s+switch")
+    for i, line in enumerate(open(abs_path_file)):
+        for match in re.finditer(pattern, line):
+            switch_lines.append(i + 1)
+    while switch_lines:
+        t = manage_switch_branching(switch_lines.pop(0), lines_dict,
+                                    info_file, abs_path_file)
+        total_branch += t[0]
+        covered_branch += t[1]
+
+    info_file.write('BRF:' + str(total_branch) + '\n')
+    info_file.write('BRH:' + str(covered_branch) + '\n')
+
+
+parser = argparse.ArgumentParser(
+    description="Script to convert intermediate json file to LCOV info file")
+parser.add_argument('--workspace', metavar='PATH',
+                    help='Folder with source files structure',
+                    required=True)
+parser.add_argument('--json', metavar='PATH',
+                    help='Intermediate json file name',
+                    required=True)
+parser.add_argument('--info', metavar='PATH',
+                    help='Output info file name',
+                    default="coverage.info")
+args = parser.parse_args()
+with open(args.json) as json_file:
+    json_data = json.load(json_file)
+info_file = open(args.info, "w+")
+error_log = open("error_log.txt", "w+")
+file_list = json_data['source_files'].keys()
+
+for relative_path in file_list:
+    abs_path_file = os.path.join(args.workspace, relative_path)
+    if not os.path.exists(abs_path_file):
+        continue
+    source = open(abs_path_file)
+    lines = source.readlines()
+    info_file.write('TN:\n')
+    info_file.write('SF:' + os.path.abspath(abs_path_file) + '\n')
+    lines = [-1] + lines  # shifting the lines indexes to the right
+    function_coverage(
+        json_data['source_files'][relative_path]['functions'].items(),
+        info_file)
+    branch_coverage(abs_path_file, info_file,
+                    json_data['source_files'][relative_path]['lines'])
+    line_coverage(json_data['source_files'][relative_path]['lines'],
+                  info_file)
+    info_file.write('end_of_record\n\n')
+    source.close()
+
+json_file.close()
+info_file.close()
+error_log.close()
diff --git a/coverage-tool/coverage-reporting/intermediate_layer.py b/coverage-tool/coverage-reporting/intermediate_layer.py
new file mode 100644
index 0000000..794c7a4
--- /dev/null
+++ b/coverage-tool/coverage-reporting/intermediate_layer.py
@@ -0,0 +1,647 @@
+# !/usr/bin/env python
+###############################################################################
+# Copyright (c) 2020, ARM Limited and Contributors. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+###############################################################################
+
+###############################################################################
+# FILE: intermediate_layer.py
+#
+# DESCRIPTION: Creates an intermediate json file with information provided
+#              by the configuration json file, dwarf signatures and trace
+#              files.
+#
+###############################################################################
+
+import os
+import re
+import glob
+import argparse
+import subprocess
+import json
+from argparse import RawTextHelpFormatter
+import logging
+import time
+
+__version__ = "6.0"
+
+# Static map that defines the elf file source type in the intermediate json
+ELF_MAP = {
+    "bl1": 0,
+    "bl2": 1,
+    "bl31": 2,
+    "bl32": 3,
+    "scp_ram": 10,
+    "scp_rom": 11,
+    "mcp_rom": 12,
+    "mcp_ram": 13,
+    "custom_offset": 100
+}
+
+
+def os_command(command, show_command=False):
+    """
+    Function that execute an os command, on fail exit the program
+
+    :param command: OS command as string
+    :param show_command: Optional argument to print the command in stdout
+    :return: The string output of the os command
+    """
+    out = ""
+    try:
+        if show_command:
+            print("OS command: {}".format(command))
+        out = subprocess.check_output(
+            command, stderr=subprocess.STDOUT, shell=True)
+    except subprocess.CalledProcessError as ex:
+        raise Exception(
+            "Exception running command '{}': {}({})".format(
+                command, ex.output, ex.returncode))
+    return out.decode("utf8")
+
+
+def load_stats_from_traces(trace_globs):
+    """
+    Function to process and consolidate statistics from trace files
+
+    :param trace_globs: List of trace file patterns
+    :return: Dictionary with stats from trace files i.e.
+        {mem address in decimal}=(times executed, inst size)
+    """
+    stats = {}
+    stat_size = {}
+
+    # Make a list of unique trace files
+    trace_files = []
+    for tg in trace_globs:
+        trace_files.extend(glob.glob(tg))
+    trace_files = set(trace_files)
+
+    if not trace_files:
+        raise Exception("No trace files found for '{}'".format(trace_globs))
+    # Load stats from the trace files
+    for trace_file in trace_files:
+        try:
+            with open(trace_file, 'r') as f:
+                for line in f:
+                    data = line.split()
+                    address = int(data[0], 16)
+                    stat = int(data[1])
+                    size = int(data[2])
+                    stat_size[address] = size
+                    if address in stats:
+                        stats[address] += stat
+                    else:
+                        stats[address] = stat
+        except Exception as ex:
+            logger.error("@Loading stats from trace files:{}".format(ex))
+    # Merge the two dicts
+    for address in stats:
+        stats[address] = (stats[address], stat_size[address])
+    return stats
+
+
+def get_code_sections_for_binary(elf_name):
+    """
+    Function to return the ranges of memory address for sections of code
+    in the elf file
+
+    :param elf_name: Elf binary file name
+    :return: List of code sections tuples, i.e. (section type, initial
+            address, end address)
+    """
+    command = """%s -h %s | grep -B 1 CODE | grep -v CODE \
+                | awk '{print $2" "$4" "$3}'""" % (OBJDUMP, elf_name)
+    text_out = os_command(command)
+    sections = text_out.split('\n')
+    sections.pop()
+    secs = []
+    for sec in sections:
+        try:
+            d = sec.split()
+            secs.append((d[0], int(d[1], 16), int(d[2], 16)))
+        except Exception as ex:
+            logger.error(
+                "@Returning memory address code sections:".format(ex))
+    return secs
+
+
+def get_executable_ranges_for_binary(elf_name):
+    """
+    Get function ranges from an elf file
+
+    :param elf_name: Elf binary file name
+    :return: List of tuples for ranges i.e. (range start, range end)
+    """
+    # Parse all $x / $d symbols
+    symbol_table = []
+    command = r"""%s -s %s | awk '/\$[xatd]/ {print $2" "$8}'""" % (
+        READELF, elf_name)
+    text_out = os_command(command)
+    lines = text_out.split('\n')
+    lines.pop()
+    for line in lines:
+        try:
+            data = line.split()
+            address = int(data[0], 16)
+            _type = 'X' if data[1] in ['$x', '$t', '$a'] else 'D'
+        except Exception as ex:
+            logger.error("@Getting executable ranges:".format(ex))
+        symbol_table.append((address, _type))
+
+    # Add markers for end of code sections
+    sections = get_code_sections_for_binary(elf_name)
+    for sec in sections:
+        symbol_table.append((sec[1] + sec[2], 'S'))
+
+    # Sort by address
+    symbol_table = sorted(symbol_table, key=lambda tup: tup[0])
+
+    # Create ranges (list of START/END tuples)
+    ranges = []
+    range_start = symbol_table[0][0]
+    rtype = symbol_table[0][1]
+    for sym in symbol_table:
+        if sym[1] != rtype:
+            if rtype == 'X':
+                # Substract one because the first address of the
+                # next range belongs to the next range.
+                ranges.append((range_start, sym[0] - 1))
+            range_start = sym[0]
+            rtype = sym[1]
+    return ranges
+
+
+def list_of_functions_for_binary(elf_name):
+    """
+    Get an array of the functions in the elf file
+
+    :param elf_name: Elf binary file name
+    :return: An array of function address start, function address end,
+            function dwarf signature (sources) addressed by function name
+    """
+    _functions = {}
+    command = "%s -t %s | awk 'NR>4' | sed /^$/d" % (OBJDUMP, elf_name)
+    symbols_output = os_command(command)
+    rex = r'([0-9a-fA-F]+) (.{7}) ([^ ]+)[ \t]([0-9a-fA-F]+) (.*)'
+    symbols = symbols_output.split('\n')[:-1]
+    for sym in symbols:
+        try:
+            symbol_details = re.findall(rex, sym)
+            symbol_details = symbol_details[0]
+            if 'F' not in symbol_details[1]:
+                continue
+            function_name = symbol_details[4]
+            # We don't want the .hidden for hidden functions
+            if function_name.startswith('.hidden '):
+                function_name = function_name[len('.hidden '):]
+            if function_name not in _functions:
+                _functions[function_name] = {'start': symbol_details[0],
+                                             'end': symbol_details[3],
+                                             'sources': False}
+            else:
+                logger.warning("'{}' duplicated in '{}'".format(
+                    function_name,
+                    elf_name))
+        except Exception as ex:
+            logger.error("@Listing functions at file {}: {}".format(
+                elf_name,
+                ex))
+    return _functions
+
+
+def apply_functions_exclude(elf_config, functions):
+    """
+    Remove excluded functions from the list of functions
+
+    :param elf_config: Config for elf binary file
+    :param functions: Array of functions in the binary elf file
+    :return: Tuple with included and excluded functions
+    """
+    if 'exclude_functions' not in elf_config:
+        return functions, []
+    incl = {}
+    excl = {}
+    for fname in functions:
+        exclude = False
+        for rex in elf_config['exclude_functions']:
+            if re.match(rex, fname):
+                exclude = True
+                excl[fname] = functions[fname]
+                break
+        if not exclude:
+            incl[fname] = functions[fname]
+    return incl, excl
+
+
+def remove_workspace(path, workspace):
+    """
+    Get the relative path to a given workspace
+
+    :param path: Path relative to the workspace to be returned
+    :param workspace: Path.
+    """
+    ret = path if workspace is None else os.path.relpath(path, workspace)
+    # print("{} => {}".format(path, ret))
+    return ret
+
+
+def get_function_line_numbers(source_file):
+    """
+    Using ctags get all the function names with their line numbers
+    within the source_file
+
+    :return: Dictionary with function name as key and line number as value
+    """
+    function_lines = os_command(
+        "ctags -x --c-kinds=f {}".format(source_file)).split("\n")
+    fln = {}
+    try:
+        for line in function_lines:
+            cols = line.split()
+            if len(cols) < 3:
+                continue
+            if cols[1] == "function":
+                fln[cols[0]] = int(cols[2])
+            elif cols[1] == "label" and cols[0] == "func":
+                fln[cols[-1]] = int(cols[2])
+    except BaseException:
+        logger.warning("Warning: Can't get all function line numbers from %s" %
+                       source_file)
+    return fln
+
+
+class FunctionLineNumbers(object):
+
+    def __init__(self, workspace):
+        self.filenames = {}
+        self.workspace = workspace
+
+    def get_line_number(self, filename, function_name):
+        if not FUNCTION_LINES_ENABLED:
+            return 0
+        if filename not in self.filenames:
+            newp = os.path.join(self.workspace, filename)
+            self.filenames[filename] = get_function_line_numbers(newp)
+        return 0 if function_name not in self.filenames[filename] else \
+            self.filenames[filename][function_name]
+
+
+class PostProcessCC(object):
+    """Class used to process the trace data along with the dwarf
+    signature files to produce an intermediate layer in json with
+    code coverage in assembly and c source code.
+    """
+
+    def __init__(self, _config, local_workspace):
+        self._data = {}
+        self.config = _config
+        self.local_workspace = local_workspace
+        self.elfs = self.config['elfs']
+        # Dictionary with stats from trace files {address}=(times executed,
+        # inst size)
+        self.traces_stats = {}
+        # Dictionary of unique assembly line memory address against source
+        # file location
+        # {assembly address} = (opcode, source file location, line number in
+        # the source file, times executed)
+        self.asm_lines = {}
+        # Dictionary of {source file location}=>{'lines': {'covered':Boolean,
+        # 'elf_index'; {elf index}=>{assembly address}=>(opcode,
+        # times executed),
+        # 'functions': {function name}=>is covered(boolean)}
+        self.source_files_coverage = {}
+        self.functions = []
+        # Unique set of elf list of files
+        self.elf_map = {}
+        # For elf custom mappings
+        self.elf_custom = None
+
+    def process(self):
+        """
+        Public method to process the trace files and dwarf signatures
+        using the information contained in the json configuration file.
+        This method writes the intermediate json file output linking
+        the trace data and c source and assembly code.
+        """
+        self.source_files_coverage = {}
+        self.asm_lines = {}
+        # Initialize for unknown elf files
+        self.elf_custom = ELF_MAP["custom_offset"]
+        sources_config = {}
+        print("Generating intermediate json layer '{}'...".format(
+            self.config['parameters']['output_file']))
+        for elf in self.elfs:
+            # Gather information
+            elf_name = elf['name']
+            os_command("ls {}".format(elf_name))
+            # Trace data
+            self.traces_stats = load_stats_from_traces(elf['traces'])
+            prefix = self.config['parameters']['workspace'] \
+                if self.config['configuration']['remove_workspace'] else \
+                None
+            functions_list = list_of_functions_for_binary(elf_name)
+            (functions_list, excluded_functions) = apply_functions_exclude(
+                elf, functions_list)
+            # Produce code coverage
+            self.dump_sources(elf_name, functions_list, prefix)
+            sources_config = self.config['parameters']['sources']
+            # Now check code coverage in the functions with no dwarf signature
+            # (sources)
+            nf = {f: functions_list[f] for f in
+                  functions_list if not
+                  functions_list[f]["sources"]}
+            self.process_fn_no_sources(nf)
+            # Write to the intermediate json file
+        data = {"source_files": self.source_files_coverage,
+                "configuration": {
+                    "sources": sources_config,
+                    "metadata": "" if 'metadata' not in
+                                      self.config['parameters'] else
+                    self.config['parameters']['metadata'],
+                    "elf_map": self.elf_map
+                }
+                }
+        json_data = json.dumps(data, indent=4, sort_keys=True)
+        with open(self.config['parameters']['output_file'], "w") as f:
+            f.write(json_data)
+
+    def dump_sources(self, elf_filename, function_list, prefix=None):
+        """
+        Process an elf file i.e. match the source and asm lines against trace
+            files (coverage).
+
+        :param elf_filename: Elf binary file name
+        :param function_list: List of functions in the elf file i.e.
+                                [(address start, address end, function name)]
+        :param prefix: Optional path name to be removed at the start of source
+                        file locations
+        """
+        command = "%s -Sl %s" % (OBJDUMP, elf_filename)
+        dump = os_command(command)
+        dump += "\n"  # For pattern matching the last \n
+        elf_name = os.path.splitext(os.path.basename(elf_filename))[0]
+        # Object that handles the function line numbers in
+        # their filename
+        function_line_numbers = FunctionLineNumbers(self.local_workspace)
+        # To map the elf filename against an index
+        if elf_name not in self.elf_map:
+            if elf_name in ELF_MAP:
+                self.elf_map[elf_name] = ELF_MAP[elf_name]
+            else:
+                self.elf_map[elf_name] = self.elf_custom
+                self.elf_custom += 1
+        elf_index = self.elf_map[elf_name]
+        # The function groups have 2 elements:
+        # Function's block name, Function's block code
+        function_groups = re.findall(
+            r"(?s)[0-9a-fA-F]+ <([a-zA-Z0-9_]+)>:\n(.+?)(?:\r*\n\n|\n$)",
+            dump, re.DOTALL | re.MULTILINE)
+        # Pointer to files dictionary
+        source_files = self.source_files_coverage
+        for function_group in function_groups:
+            if len(function_group) != 2:
+                continue
+            block_function_name, block_code = function_group
+            block_code += "\n"
+            # Find if the function has C source code filename
+            function_signature_group = re.findall(
+                r"(?s){}\(\):\n(/.+?):[0-9]+.*(?:\r*\n\n|\n$)".format(
+                    block_function_name), block_code, re.DOTALL | re.MULTILINE)
+            if not function_signature_group:
+                continue  # Function does not have dwarf signature (sources)
+            function_list[block_function_name]["sources"] = True
+            block_function_source_file = remove_workspace(
+                function_signature_group[0], prefix)
+            fn_line_number = function_line_numbers.get_line_number(
+                block_function_source_file, block_function_name)
+            if block_function_source_file not in source_files:
+                source_files[block_function_source_file] = {"functions": {},
+                                                            "lines": {}}
+            source_files[block_function_source_file]["functions"][
+                block_function_name] = {"covered": False,
+                                        "line_number": fn_line_number}
+            # Now lets check the block code
+            # The source code groups have 5 elements:
+            # Function for the statements (optional), Source file for the asm
+            # statements,
+            # line number for the asm statements, asm statements, lookahead
+            # (ignored)
+            source_code_groups = re.findall(SOURCE_PATTERN, block_code,
+                                            re.DOTALL | re.MULTILINE)
+            is_function_block_covered = False
+            # When not present the last function name applies
+            statements_function_name = block_function_name
+            for source_code_group in source_code_groups:
+                if len(source_code_group) != 5:
+                    continue
+                fn_name, source_file, ln, asm_code, _ = source_code_group
+                if not fn_name:
+                    # The statement belongs to the most recent function
+                    fn_name = statements_function_name
+                else:
+                    # Usually in the first iteration fn_name is not empty and
+                    # is the function's name block
+                    statements_function_name = fn_name
+                if statements_function_name in function_list:
+                    # Some of the functions within a block are not defined in
+                    # the function list dump
+                    function_list[statements_function_name]["sources"] = True
+                statements_source_file = remove_workspace(source_file, prefix)
+                if statements_source_file not in source_files:
+                    source_files[statements_source_file] = {"functions": {},
+                                                            "lines": {}}
+                if statements_function_name not in \
+                        source_files[statements_source_file]["functions"]:
+                    fn_line_number = function_line_numbers.get_line_number(
+                        statements_source_file,
+                        statements_function_name)
+                    source_files[statements_source_file]["functions"][
+                        statements_function_name] = \
+                        {"covered": False, "line_number": fn_line_number}
+                if ln not in source_files[statements_source_file]["lines"]:
+                    source_files[statements_source_file]["lines"][ln] = \
+                        {"covered": False, "elf_index": {}}
+                source_file_ln = source_files[statements_source_file]["lines"][
+                    ln]
+                asm_line_groups = re.findall(
+                    r"(?s)([a-fA-F0-9]+):\t(.+?)(?:\n|$)",
+                    asm_code, re.DOTALL | re.MULTILINE)
+                for asm_line in asm_line_groups:
+                    if len(asm_line) != 2:
+                        continue
+                    hex_line_number, opcode = asm_line
+                    dec_address = int(hex_line_number, 16)
+                    times_executed = 0 if dec_address not in self.traces_stats \
+                        else self.traces_stats[dec_address][0]
+                    if times_executed > 0:
+                        is_function_block_covered = True
+                        source_file_ln["covered"] = True
+                        source_files[statements_source_file]["functions"][
+                            statements_function_name]["covered"] = True
+                    if elf_index not in source_file_ln["elf_index"]:
+                        source_file_ln["elf_index"][elf_index] = {}
+                    if dec_address not in \
+                            source_file_ln["elf_index"][elf_index]:
+                        source_file_ln["elf_index"][elf_index][dec_address] = (
+                            opcode, times_executed)
+            source_files[block_function_source_file]["functions"][
+                block_function_name]["covered"] |= is_function_block_covered
+
+    def process_fn_no_sources(self, function_list):
+        """
+        Checks function coverage for functions with no dwarf signature i.e
+         sources.
+
+        :param function_list: Dictionary of functions to be checked
+        """
+        if not FUNCTION_LINES_ENABLED:
+            return  # No source code at the workspace
+        address_seq = sorted(self.traces_stats.keys())
+        for function_name in function_list:
+            # Just check if the start address is in the trace logs
+            covered = function_list[function_name]["start"] in address_seq
+            # Find the source file
+            files = os_command(("grep --include *.c --include *.s -nrw '{}' {}"
+                                "| cut -d: -f1").format(function_name,
+                                                        self.local_workspace))
+            unique_files = set(files.split())
+            sources = []
+            line_number = 0
+            for source_file in unique_files:
+                d = get_function_line_numbers(source_file)
+                if function_name in d:
+                    line_number = d[function_name]
+                    sources.append(source_file)
+            if len(sources) > 1:
+                logger.warning("'{}' declared in {} files:{}".format(
+                    function_name, len(sources),
+                    ", ".join(sources)))
+            elif len(sources) == 1:
+                source_file = remove_workspace(sources[0],
+                                               self.local_workspace)
+                if source_file not in self.source_files_coverage:
+                    self.source_files_coverage[source_file] = {"functions": {},
+                                                               "lines": {}}
+                if function_name not in \
+                        self.source_files_coverage[source_file]["functions"] or \
+                        covered:
+                    self.source_files_coverage[source_file]["functions"][
+                        function_name] = {"covered": covered,
+                                          "line_number": line_number}
+            else:
+                logger.warning("Function '{}' not found in sources.".format(
+                    function_name))
+
+
+json_conf_help = """
+Produces an intermediate json layer for code coverage reporting
+using an input json configuration file.
+
+Input json configuration file format:
+{
+    "configuration":
+        {
+        "remove_workspace": <true if 'workspace' must be from removed from the
+                                path of the source files>,
+        "include_assembly": <true to include assembly source code in the
+                            intermediate layer>
+        },
+    "parameters":
+        {
+        "objdump": "<Path to the objdump binary to handle dwarf signatures>",
+        "readelf: "<Path to the readelf binary to handle dwarf signatures>",
+        "sources": [ <List of source code origins, one or more of the next
+                        options>
+                    {
+                    "type": "git",
+                    "URL":  "<URL git repo>",
+                    "COMMIT": "<Commit id>",
+                    "REFSPEC": "<Refspec>",
+                    "LOCATION": "<Folder within 'workspace' where this source
+                                is located>"
+                    },
+                    {
+                    "type": "http",
+                    "URL":  <URL link to file>",
+                    "COMPRESSION": "xz",
+                    "LOCATION": "<Folder within 'workspace' where this source
+                                is located>"
+                    }
+                ],
+        "workspace": "<Workspace folder where the source code was located to
+                        produce the elf/axf files>",
+        "output_file": "<Intermediate layer output file name and location>",
+        "metadata": {<Metadata objects to be passed to the intermediate json
+                    files>}
+        },
+    "elfs": [ <List of elf files to be traced/parsed>
+            {
+                    "name": "<Full path name to elf/axf file>",
+                    "traces": [ <List of trace files to be parsed for this
+                                elf/axf file>
+                                "Full path name to the trace file,"
+                              ]
+                }
+        ]
+}
+"""
+OBJDUMP = None
+READELF = None
+FUNCTION_LINES_ENABLED = None
+SOURCE_PATTERN = (r'(?s)([a-zA-Z0-0_]+)?(?:\(\):\n)?(^/.+?):([0-9]+)'
+                  r'(?: \(.+?\))?\n(.+?)(?=\n/|\n$|([a-zA-Z0-0_]+\(\):))')
+
+
+def main():
+    global OBJDUMP
+    global READELF
+    global FUNCTION_LINES_ENABLED
+
+    parser = argparse.ArgumentParser(epilog=json_conf_help,
+                                     formatter_class=RawTextHelpFormatter)
+    parser.add_argument('--config-json', metavar='PATH',
+                        dest="config_json", default='config_file.json',
+                        help='JSON configuration file', required=True)
+    parser.add_argument('--local-workspace', default="",
+                        help=('Local workspace folder where source code files'
+                              ' and folders resides'))
+    args = parser.parse_args()
+    try:
+        with open(args.config_json, 'r') as f:
+            config = json.load(f)
+    except Exception as ex:
+        print("Error at opening and processing JSON: {}".format(ex))
+        return
+    # Setting toolchain binary tools variables
+    OBJDUMP = config['parameters']['objdump']
+    READELF = config['parameters']['readelf']
+    # Checking if are installed
+    os_command("{} --version".format(OBJDUMP))
+    os_command("{} --version".format(READELF))
+
+    if args.local_workspace != "":
+        # Checking ctags installed
+        try:
+            os_command("ctags --version")
+        except BaseException:
+            print("Warning!: ctags not installed/working function line numbers\
+                    will be set to 0. [{}]".format(
+                "sudo apt install exuberant-ctags"))
+        else:
+            FUNCTION_LINES_ENABLED = True
+
+    pp = PostProcessCC(config, args.local_workspace)
+    pp.process()
+
+
+if __name__ == '__main__':
+    logging.basicConfig(filename='intermediate_layer.log', level=logging.DEBUG,
+                        format=('%(asctime)s %(levelname)s %(name)s '
+                                '%(message)s'))
+    logger = logging.getLogger(__name__)
+    start_time = time.time()
+    main()
+    elapsed_time = time.time() - start_time
+    print("Elapsed time: {}s".format(elapsed_time))
diff --git a/coverage-tool/coverage-reporting/merge.py b/coverage-tool/coverage-reporting/merge.py
new file mode 100755
index 0000000..e3d9d65
--- /dev/null
+++ b/coverage-tool/coverage-reporting/merge.py
@@ -0,0 +1,179 @@
+# !/usr/bin/env python
+###############################################################################
+# Copyright (c) 2020, ARM Limited and Contributors. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+###############################################################################
+
+###############################################################################
+# FILE: merge.py
+#
+# DESCRIPTION: Merge two or more .info and json files, sanitizing source file
+#              paths.
+#              If different .info files contain the same source code duplicated
+#              in different directories, we use the absolute paths of the
+#              first .info file.
+#
+###############################################################################
+
+
+import os
+import sys
+import argparse
+from argparse import RawTextHelpFormatter
+import subprocess
+import json
+
+
+# Define an argument parser using the argparse library
+parser = argparse.ArgumentParser(epilog="""Example of usage:
+python3 merge.py -a coverage_1.info -a coverage_2.info -o coverage_merge.info \
+-j input_file1.json -j input_file2.json -m merge_file.json
+
+It is possible to merge any number of files at once.
+If metadata json files are defined then they must pair with their
+corresponding info file, i.e. have the same name.
+If a local workspace is defined then the paths in the info files will
+be translated from the original test workspace to the local workspace
+to enable the usage of LCOV, but the original files will be kept intact.
+By default, the output file must be a new file.
+To overwrite an existing file, use the "--force" option.
+
+Note: the user is expected to merge .info files referring to the same project.
+If merging .info files from different projects, LCOV can be exploited directly
+using a command such as "lcov -rc lcov_branch_coverage=1 -a coverage_1.info \
+-a coverage_2.info -o coverage_merge.info."
+""", formatter_class=RawTextHelpFormatter)
+requiredNamed = parser.add_argument_group('required named arguments')
+requiredNamed.add_argument("-a", "--add-file",
+                           help="Input info file to be merged.",
+                           action='append', required=True)
+requiredNamed.add_argument("-o", "--output",
+                           help="Name of the output info (merged) file.",
+                           required=False)
+parser.add_argument("-j", "--json-file", action='append',
+                    help="Input json file to be merged.")
+parser.add_argument("-m", "--output-json",
+                    help="Name of the output json (merged) file.")
+parser.add_argument("--force", dest='force', action='store_true',
+                    help="force overwriting of output file.")
+parser.add_argument("--local-workspace", dest='local_workspace',
+                    help='Local workspace where source files reside.')
+
+options = parser.parse_args(sys.argv[1:])
+# At least two .info files are expected
+if len(options.add_file) < 2:
+    print('Error: too few input files.\n')
+    sys.exit(1)
+# The same number of info and json files expected
+if options.json_file:
+    if len(options.json_file) != len(options.add_file):
+        print('Umatched number of info and json files.\n')
+        sys.exit(1)
+
+file_groups = []
+info_files_to_merge = []
+# Check if files exist
+for file_name in options.add_file:
+    print("Merging '{}'".format(file_name))
+    if not os.path.isfile(file_name):
+        print('Error: file "' + file_name + '" not found.\n')
+        sys.exit(1)
+    if not file_name[-5:] == '.info':
+        print('Error: file "' + file_name +
+              '" has wrong extension. Expected .info file.\n')
+        sys.exit(1)
+    if file_name in info_files_to_merge:
+        print("Error: Duplicated info file '{}'".format(file_name))
+        sys.exit(1)
+    info_files_to_merge.append(file_name)
+    file_group = {"info": file_name, "locations": [], "json": ""}
+    info_name = os.path.basename(file_name).split(".")[0]
+    if options.json_file:
+        json_name = [i for i in options.json_file
+                     if os.path.basename(i).split(".")[0] == info_name]
+        if not json_name:
+            print("Umatched json file name for '{}'".format(file_name))
+            sys.exit(1)
+        json_name = json_name.pop()
+        if not json_name[-5:] == '.json':
+            print('Error: file "' + json_name +
+                  '" has wrong extension. Expected .json file.\n')
+            sys.exit(1)
+        if not os.path.isfile(json_name):
+            print('Error: file "' + json_name + '" not found.\n')
+            sys.exit(1)
+        # Now we have to extract the location folders for each info
+        # this is needed if we want translation to local workspace
+        file_group["json"] = json_name
+        with open(json_name) as json_file:
+            json_data = json.load(json_file)
+        locations = []
+        for source in json_data["configuration"]["sources"]:
+            locations.append(source["LOCATION"])
+        file_group["locations"] = locations
+    file_groups.append(file_group)
+
+# Check the extension of the output file
+if not options.output[-5:] == '.info':
+    print('Error: file "' + options.output +
+          '" has wrong extension. Expected .info file.\n')
+    sys.exit(1)
+
+if options.local_workspace is not None:
+    # Translation from test to local workspace
+    i = 0
+    while i < len(info_files_to_merge):
+        info_file = open(info_files_to_merge[i], "r")
+        print("Translating workspace for '{}'...".format(
+              info_files_to_merge[i]))
+        info_lines = info_file.readlines()
+        info_file.close()
+        common_prefix = os.path.normpath(
+            os.path.commonprefix([line[3:] for line in info_lines
+                                  if 'SF:' in line]))
+        temp_file = 'temporary_' + str(i) + '.info'
+        with open(temp_file, "w+") as f:
+            for line in info_lines:
+                cf = common_prefix
+                if os.path.basename(common_prefix) in file_groups[i]["locations"]:
+                    cf = os.path.dirname(common_prefix)
+                f.write(line.replace(cf, options.local_workspace))
+        info_files_to_merge[i] = temp_file  # Replace info file to be merged
+        i += 1
+
+# Merge json files
+if len(options.json_file):
+    json_merged_list = []
+    json_merged = {}
+    j = 0
+    while j < len(options.json_file):
+        json_file = options.json_file[j]
+        with open(json_file) as f:
+            data = json.load(f)
+        for source in data['configuration']['sources']:
+            if source not in json_merged_list:
+                json_merged_list.append(source)
+        j += 1
+    json_merged = {'configuration': {'sources': json_merged_list}}
+    with open(options.output_json, 'w') as f:
+        json.dump(json_merged, f)
+
+
+# Exploit LCOV merging capabilities
+# Example of LCOV usage: lcov -rc lcov_branch_coverage=1 -a coverage_1.info \
+# -a coverage_2.info -o coverage_merge.info
+command = ['lcov', '-rc', 'lcov_branch_coverage=1']
+
+for file_name in info_files_to_merge:
+    command.append('-a')
+    command.append(file_name)
+command.append('-o')
+command.append(options.output)
+
+subprocess.call(command)
+
+# Delete the temporary files
+if options.local_workspace is not None:
+    for f in info_files_to_merge:
+        os.remove(f)
diff --git a/coverage-tool/coverage-reporting/merge.sh b/coverage-tool/coverage-reporting/merge.sh
new file mode 100755
index 0000000..354dbc8
--- /dev/null
+++ b/coverage-tool/coverage-reporting/merge.sh
@@ -0,0 +1,417 @@
+#!/usr/bin/env bash
+
+##############################################################################
+# Copyright (c) 2020, ARM Limited and Contributors. All rights reserved.
+#
+# SPDX-License-Identifier: GPL-2.0-only
+##############################################################################
+
+#==============================================================================
+# FILE: merge.sh
+#
+# DESCRIPTION: Wrapper to merge intermediate json files and LCOV trace .info
+# files.
+#==============================================================================
+
+set -e
+#################################################################
+# Function to manipulate json objects.
+# The json object properties can be accessed through "." separated
+# property names. There are special characters that define a function
+# over a given property value:
+# If the qualifier list starts with '-' then is asking for the len of the
+# json array defined by the qualifiers.
+# If the qualifier list starts with '*' then the resulting json value
+# is returned without double quotes at the end and the beginning.
+# If some property name starts with "?" then is requesting if that
+# property exists within the json object.
+# Globals:
+#   None
+# Arguments:
+#   1-Json string that describes the json object
+#   2-String of '.' separated qualifiers to access properties
+#       within the json object
+#   3- Optional default value for a sought property value
+# Outputs:
+#   None
+################################################################
+get_json_object() {
+  export _json_string="$1"
+  export _qualifiers="$2"
+  export _default="$3"
+  python3 - << EOT
+import os
+import json
+import sys
+
+_json_string = os.getenv("_json_string", "")
+_qualifiers = os.getenv("_qualifiers", "")
+_default = os.getenv("_default", "")
+try:
+    data = json.loads(_json_string)
+except Exception as ex:
+    print("Error decoding json string:{}".format(ex))
+    sys.exit(-1)
+ptr = data
+if _qualifiers[0] in ['-', '*']:
+    cmd = _qualifiers[0]
+    _qualifiers = _qualifiers[1:]
+else:
+    cmd = ""
+for _name in _qualifiers.split("."):
+    if _name in ptr:
+        ptr = ptr[_name]
+    elif _name.isdigit() and int(_name) < len(ptr):
+        ptr = ptr[int(_name)]
+    elif _name.startswith("?"):
+        print(_name[1:] in ptr)
+        sys.exit(0)
+    elif _default:
+        print(_default)
+        sys.exit(0)
+    else:
+        print("'{}' is not in the json object".format(_name))
+        sys.exit(-1)
+if cmd == "-":
+    # return len of the json array
+    print(len(ptr))
+elif cmd == "*":
+    #remove quotes
+    string = json.dumps(ptr)
+    if string.startswith('"') and string.endswith('"'):
+        string = string[1:-1]
+    print(string)
+else:
+    print(json.dumps(ptr))
+EOT
+}
+
+#################################################################
+# Convert a relative path to absolute path
+# Globals:
+#   None
+# Arguments:
+#   1-Path to be converted
+# Outputs:
+#   Absolute path
+################################################################
+get_abs_path() {
+  path="$1"
+  echo "$(cd $(dirname $path) && echo "$(pwd -P)"/$(basename $path))"
+}
+
+#################################################################
+# Clone the source files
+# Globals:
+#   None
+# Arguments:
+#   1-Json file with the sources to be cloned
+#   2-Folder where to clone the sources
+# Outputs:
+#   None
+################################################################
+clone_repos() {
+  export OUTPUT_JSON="$1"
+  export CSOURCE_FOLDER="${2:-$LOCAL_WORKSPACE}"
+
+  cd $DIR # To be run at the same level of this script
+python3 - << EOT
+import os
+import clone_sources
+
+output_file = os.getenv('OUTPUT_JSON', 'output_file.json')
+source_folder = os.getenv('CSOURCE_FOLDER', 'source')
+try:
+    r = clone_sources.CloneSources(output_file)
+    r.clone_repo(source_folder)
+except Exception as ex:
+    print(ex)
+EOT
+	cd -
+}
+
+#################################################################
+# Get the a file defined in the json object
+# Globals:
+#   None
+# Arguments:
+#   1-Json object that defines the locations of the info and json
+#       files
+#   2-Folder to save the info and json files
+#   3-Variable that holds the name of the variable that will hold
+#       the name of the file to be downloaded (reference argument)
+# Outputs:
+#   None
+################################################################
+get_file() {
+  json_object="$1"
+  where="$2"
+  var_name="${3:-param_cloned}" # Defaults to globar var
+
+  local _type=$(get_json_object "$json_object" "type")
+  local _origin=$(get_json_object "$json_object" "*origin")
+  local _compression=$(get_json_object "$json_object" "*compression" None)
+  local fname=""
+  local cloned_file=""
+  local full_filename=$(basename -- "$_origin")
+  local extension="${full_filename##*.}"
+  local filename="${full_filename%.*}"
+
+  if [ "$_type" = '"http"' ];then
+    fname="$where.$extension" # Same filename as folder
+    rm $where/$fname &>/dev/null || true
+    wget -o error.log $_origin -O $where/$fname || (
+			cat error.log && exit -1)
+    cloned_file="$(get_abs_path $where/$fname)"
+  elif [ "$_type" = '"bundle"' ];then
+    # Check file exists at origin, i.e. was unbundled before
+    fname="$_origin"
+    if [ -f "$where/$fname" ];then
+        cloned_file="$(get_abs_path $where/$fname)"
+    fi
+  elif [ "$_type" = '"file"' ];then
+	if [[ "$_origin" = http* ]]; then
+		echo "$_origin looks like 'http' rather than 'file' please check..."
+		exit -1
+	fi
+    fname="$where.$extension" # Same filename as folder
+    cp -f $_origin $where/$fname
+    cloned_file="$(get_abs_path $where/$fname)"
+  else
+    echo "Error unsupported file type:$_type.... Aborting."
+    exit -1
+  fi
+  if [ "$_compression" = "tar.xz" ];then
+    cd $where
+    pwd
+    tar -xzf $fname
+    rm -f $fname
+    cd -
+  fi
+  eval "${var_name}=${cloned_file}"
+}
+
+#####################################################################
+# Get (download/copy) info and json files from the input json file
+# Globals:
+#   merge_input_json_file: Input json file with locations of info
+#                          and intermediate json files to be merged.
+#   input_folder: Folder to put info and json files to be merged
+# Arguments:
+#   None
+# Outputs:
+#   None
+###################################################################
+get_info_json_files() {
+  json_string="$(cat $merge_input_json_file)"
+  nf=$(get_json_object "$json_string" "-files")
+  rm -rf $input_folder > /dev/null || true
+  mkdir -p $input_folder
+  for f in $(seq 0 $(($nf - 1)));
+  do
+    pushd $input_folder > /dev/null
+    _file=$(get_json_object "$json_string" "files.$f")
+    folder=$(get_json_object "$_file" "*id")
+    echo "Geting files from project '$folder' into '$input_folder'..."
+    mkdir -p $folder
+    bundles=$(get_json_object "$_file" "bundles" None)
+    if [ "$bundles" != "None" ];then
+      nb=$(get_json_object "$_file" "-bundles")
+      for n in $(seq 0 $(($nb - 1)));
+      do
+        get_file "$(get_json_object "$bundles" "$n")" $folder
+      done
+    fi
+    get_file "$(get_json_object "$_file" "config")" $folder config_json_file
+    get_file "$(get_json_object "$_file" "info")" $folder info_file
+    popd > /dev/null
+  done
+}
+
+#################################################################
+# Merge json and info files and generate branch coverage report
+# Globals:
+#   output_coverage_file: Location and name for merge coverage info
+#   output_json_file: Location and name for merge json output
+#   input_folder: Location where reside json and info files
+#   LOCAL_WORKSPACE: Local workspace folder with the source files
+# Arguments:
+#   None
+# Outputs:
+#   Output merge coverage file
+#   Output merge json file
+################################################################
+merge_files() {
+# Merge info and json files
+  local lc=" "
+  if [ -n "$LOCAL_WORKSPACE" ];then
+    # Translation to be done in the info files to local workspace
+    lc=" --local-workspace $LOCAL_WORKSPACE"
+  fi
+  # Getting the path of the merge.py must reside at the same
+  # path as the merge.sh
+  python3 ${DIR}/merge.py \
+      $(find $input_folder -name "*.info" -exec echo "-a {}" \;) \
+      $(find $input_folder -name "*.json" -exec echo "-j {}" \;) \
+      -o $output_coverage_file \
+      -m $output_json_file \
+      $lc
+
+}
+
+
+#################################################################
+# Print scripts usage
+# Arguments:
+#   None
+# Outputs:
+#   Prints to stdout script usage
+################################################################
+usage() {
+  clear
+  echo "Usage:"
+  echo "merge -h              Display this help message."
+  echo "-j <input json file>  Input json file(info and intermediate json files to be merged)."
+  echo "-l <report folder>    Folder for branch coverage report. Defaults to ./lcov_folder."
+  echo "-i <Path>             Folder to copy/download info and json files. Defaults to ./input."
+  echo "-w <Folder>           Local workspace folder for source files."
+  echo "-o <name>             Name of the merged info file. Defaults to ./coverage_merge.info"
+  echo "-m <name>             Name of the merged metadata json file. Defaults to ./merge_output.json"
+  echo "-c                    If it is set, sources from merged json files will be cloned/copied to local workspace folder."
+  echo "$help_message"
+}
+
+help_message=$(cat <<EOF
+
+# The script that merges the info data (code coverage) and json metadata
+# (intermediate layer) needs as an input a json file with the following
+# properties:
+# files: array of objects that describe the type of file/project to be
+# merged.
+#   id: Unique identifier (project) associated to the info and
+#       intermediate json files
+#   config: Intermediate json file
+#       type: Type of storage for the file. (http or file)
+#       origin: Location (url or folder) of the file
+#   info:  Info file
+#       type: Type of storage for the file. (http or file)
+#       origin: Location (url or folder) of the file
+# Example:
+{ "files" : [
+                {
+                    "id": "<project 1>",
+                    "config":
+                        {
+                            "type": "http",
+                            "origin": "<URL of json file for project 1>"
+                        },
+                    "info":
+                        {
+                            "type": "http",
+                            "origin": "<URL of info file for project 1>"
+                        }
+                },
+                {
+                    "id": "<project 2>",
+                    "config":
+                        {
+                            "type": "http",
+                            "origin": "<URL of json file for project 2>"
+                        },
+                    "info":
+                        {
+                            "type": "http",
+                            "origin": "<URL of info file for project 2>"
+                        }
+                },
+                .
+                .
+                .
+        ]
+}
+EOF
+)
+
+clear
+# Local workspace folder to contain source files
+LOCAL_WORKSPACE=""
+# If this is true then will clone/copy sources from merged json
+# file into local workspace
+CLONE_SOURCES=false
+# Location of the input json file that contains information about
+# the info and json files to be merged and produced a report
+merge_input_json_file=""
+# Folder to download json and info files
+input_folder="./input_folder"
+# Folder to to put the reports
+LCOV_FOLDER="./lcov_folder"
+# File name for merge coverage info
+output_coverage_file="./coverage_merge.info"
+# File name for merge json output
+output_json_file="./merge_output.json"
+while getopts ":hj:o:l:w:i:cm:" opt; do
+  case ${opt} in
+    h )
+      usage
+      exit 0
+      ;;
+    w )
+      LOCAL_WORKSPACE=$(cd $OPTARG; pwd)
+      ;;
+    i )
+      input_folder=$OPTARG
+      ;;
+    c )
+      CLONE_SOURCES=true
+      ;;
+    j )
+      merge_input_json_file=$OPTARG
+      ;;
+    l )
+      LCOV_FOLDER=$OPTARG
+      ;;
+    o )
+      output_coverage_file=$OPTARG
+      ;;
+    m )
+      output_json_file=$OPTARG
+      ;;
+    \? )
+      echo "Invalid option: $OPTARG" 1>&2
+      usage
+      exit -1
+      ;;
+    : )
+      echo "Invalid option: $OPTARG requires an argument" 1>&2
+      usage
+      exit -1
+      ;;
+  esac
+done
+shift $((OPTIND -1))
+if [ -z "$merge_input_json_file" ]; then
+  echo "Input json file required"
+  usage
+  exit -1
+fi
+if [ -z "$LOCAL_WORKSPACE" ] && [ $CLONE_SOURCES = true ]; then
+	echo "Need to define a local workspace folder to clone/copy sources!"
+	exit -1
+fi
+# Getting the script folder where other script files must reside, i.e
+# merge.py, clone_sources.py
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
+input_folder="$(get_abs_path $input_folder)"
+LCOV_FOLDER="$(get_abs_path  $LCOV_FOLDER)"
+output_coverage_file="$(get_abs_path $output_coverage_file)"
+output_json_file="$(get_abs_path $output_json_file)"
+param_cloned=""
+get_info_json_files
+merge_files
+if [ $CLONE_SOURCES = true ];then
+	clone_repos $output_json_file
+fi
+# Generate branch coverage report
+genhtml --branch-coverage $output_coverage_file \
+    --output-directory $LCOV_FOLDER
+cd -
diff --git a/coverage-tool/docs/code_cov_diag.jpg b/coverage-tool/docs/code_cov_diag.jpg
new file mode 100644
index 0000000..d2f3f63
--- /dev/null
+++ b/coverage-tool/docs/code_cov_diag.jpg
Binary files differ
diff --git a/coverage-tool/docs/design_overview.md b/coverage-tool/docs/design_overview.md
new file mode 100644
index 0000000..89c4b66
--- /dev/null
+++ b/coverage-tool/docs/design_overview.md
@@ -0,0 +1,78 @@
+# Design overview
+
+This document explains the overall design approach to the trace-based code coverage tool.
+
+## Motivation
+
+The primary motivation for this code coverage tool is driven by the fact that there are no commercial off-the-shelf (COTS) tools that can be readily used for doing code coverage measurement for firmware components - especially those meant for memory constraint platforms. Most of the tools rely on the traditional approach where the code is instrumented to enable the coverage measurement. In the case of  firmware components designed for memory constraint platforms, code size is a key consideration and the need to change memory maps to accomodate the instrumented code for enabling coverage measurement is seen as a pain point. A possible alternative is to perform the coverage measurement on emulation platforms which could free up the constraints of memory limitations. However this adds the need to have more platform specific code to be supported in the firmware for the emulation platform.
+
+The above factors led to a design approach to measure the code coverage based on execution trace, without the need for any code instrumentation. This approach provides the following benefits:
+- allows the user to test the real software stack without worrying about memory constraints - no code is instrumented; meaning real software is used during coverage run.
+- allows the user to test on real platforms rather than partial system emulations - coverage information can be obtained without expensive modelling or porting effort.
+
+
+## Known Limitations
+
+The following limitations are understood to exist with the trace-based coverage tool
+
+- This works only with non-relocatable code: here we can easily map the execution address of an instruction to those determined from the generated binaries. Even if there is some position independent code involved, if the location binding happens at build time then also the user can use this tool as the post-processing stage could still be made to do the mapping.
+- Accuracy of code coverage info mapped to the source code is limited by the completeness of DWARF signatures embedded: we know that with higher levels of code optimisation the DWARF signatures embedded will be `sparse` in nature, especially when the generated code is optimised for size. Ideally this solution works best when there is no compiler optimisation turned ON.
+- This is currently proven to work on FVPs (Fixed Virtual Platforms): Early prototyping shows this approach can work with Silicon platforms, however needs further development.
+
+
+## Design Details
+The following diagram outlines the individual components involved in the trace-based coverage tool.
+
+![](code_cov_diag.jpg)
+
+The following changes are needed at each of the stages to enable this code coverage measurement tool to work.
+
+### Compilation stage
+
+The coverage tool relies on the DWARF signatures embedded within the binaries generated for the firmware that runs as part of the coverage run. In case of GCC toolchain we enable it by adding -g flag during the compilation.
+
+The -g flag generates DWARF signatures embedded within the binaries as see in the example below:
+```
+100005b0 <tfm_plat_get_rotpk_hash>:
+tfm_plat_get_rotpk_hash():
+/workspace/workspace/tf-m-build-config/trusted-firmware-m/platform/ext/common/template/crypto_keys.c:173
+100005b0:	b510	push	{r4, lr}
+/workspace/workspace/tf-m-build-config/trusted-firmware-m/platform/ext/common/template/crypto_keys.c:174
+100005b2:	6814	ldr	r4, [r2, #0]
+```
+
+### Trace generation stage
+
+The coverage tool relies on the generation of the execution trace from the target platform (in our case FVP). It relies on the coverage trace plugin which is an MTI based custom plugin that registers for trace source type `INST` and dumps a filtered set of instruction data that got executed during the coverage run. In case of silicon platforms it expects to use trace capture with tools like DSTREAM-ST.
+
+See [Coverage Plugin](./plugin_design.md) documentation to know more about this custom plugin.
+
+The following diagram shows an example trace capture output from the coverage trace plugin:
+```
+[PC address, times executed, opcode size]
+0010065c 1 4
+00100660 1 4
+00100664 1 2
+00100666 1 2
+...
+```
+
+### Post-processing stage
+
+In this stage coverage information is generated by:
+1. Determining the instructions executed from the trace output captured.
+2. Mapping those instructions to source code by utilising the DWARF signatures embedded within the binaries.
+3. Generating the LCOV .info files allowing us to report the coverage information with the LCOV tool and merge reports from multiple runs.
+
+### Typical steps to integrate trace-based coverage tool to CI setup
+
+- Generate the DWARF binary (elf or axf) files at build stage using the -g flag or equivalent compiler switches.
+- Build the coverage plugin using the corresponding PVLIB_HOME library for the 64-bit compiler and deploy in your CI to be used during execution.
+- Use the coverage plugin during FVP execution by providing the additional parameters. See [here](./plugin_user_guide.md#capturing-a-trace)
+- Clone the sources in your local workspace if not already there.
+- The generated trace logs along with the DWARF binary files, the bin utilities (objdump, readelf from the same toolchain for the  DWARF binary files) and source code will be used as input to the *intermediate_layer.py* to generate the intermediate json layer.
+- The *generate_info_file.py* will parse the json intermediate layer file to an info file that can be read by the genhtml binary from LCOV.
+- Optionally use the merge.py to merge multiple coverage info files to generate a combined report.
+## License
+[BSD-3-Clause](../../license.md)
+
diff --git a/coverage-tool/docs/plugin_user_guide.md b/coverage-tool/docs/plugin_user_guide.md
new file mode 100644
index 0000000..5c600dd
--- /dev/null
+++ b/coverage-tool/docs/plugin_user_guide.md
@@ -0,0 +1,30 @@
+# coverage-plugin User Guide
+
+The *coverage-plugin* is a C++ project using the Model Trace Interface Plugin Development Kit (MTIPDK) in order to create a trace plugin, which is a special shared library. The trace plugins can be loaded into Arm Fast Models to produce execution trace data for doing code coverage measurement.
+
+## Dependencies
+- GCC 7.5.0 at least
+
+## Building the coverage-plugin
+```bash
+$ cd coverage-plugin
+$ make PVLIB_HOME=</path/to/model_library>
+```
+
+## Capturing a trace
+
+You need to add two options to your model command-line:
+
+```bash
+   --plugin /path/to/coverage_trace.so
+   -C TRACE.coverage_trace.trace-file-prefix="/path/to/TRACE-PREFIX"
+```
+
+You can then run your FVP model. The traces will be created at the end of the simulation*.
+
+BEWARE: Traces aren't numbered and will be overwritten if you do two successive runs. Aggregating results will require moving traces to a separate place or changing the prefix between runs. This is the responsibility of the plugin user.
+
+*NOTE: The plugin captures the traces in memory and on the termination of the simulation it writes the data to a file. If user terminates the simulation forcefully with a Ctrl+C the trace files are not generated.
+
+## License
+[BSD-3-Clause](../../license.md)
diff --git a/coverage-tool/docs/reporting_user_guide.md b/coverage-tool/docs/reporting_user_guide.md
new file mode 100644
index 0000000..7c0deea
--- /dev/null
+++ b/coverage-tool/docs/reporting_user_guide.md
@@ -0,0 +1,357 @@
+# coverage-reporting User Guide
+
+The *coverage-reporting* is collection of python and bash scripts to generate LCOV HTML-based reports for code coverage against C source code. There are two stages for this process:
+
+1. Converting the information from the execution traces (using coverage-plugin) of the FVP and the DWARF signatures from the elf/axf files to an intermediate JSON file.
+
+2. Converting the intermediate JSON file into an info file that can be read by LCOV utilities to produce a code coverage HTML report. There are merrge utility scipts provided to merge multiple info files to generate a combined report from multiple runs.
+
+## Intermediate JSON file
+This is a JSON file that contains the information including the source code line numbers embedded in the elf files (by virtue of DWARF signatures) paired against the execution trace log files from the coverage-plugin. Hence only the lines that are compiled and linked to form the final binaries will be referenced by the DWARF signatures. Thus the coverage information will always be against the compiled code that made into the binary. The tools needs a configuration json file as an input with the needed metadata to perform the coverage computation. This configuration file is given as below:
+```json
+{
+    "configuration":
+        {
+        "remove_workspace": "<true> if workspace must be from removed from the path of the source files",
+        "include_assembly": "<true> to include assembly source code in the intermediate layer"
+        },
+    "parameters":
+        {
+        "objdump": "<Path> to the objdump binary to handle DWARF signatures",
+        "readelf": "<Path> to the readelf binary to handle DWARF signatures",
+        "sources": [
+                    {
+                    "type": "git",
+                    "URL":  "<URL> git repo",
+                    "COMMIT": "<Commit id>",
+                    "REFSPEC": "<Refspec>",
+                    "LOCATION": "<Folder> within 'workspace' where this source is located"
+                    },
+                    {
+                    "type": "http",
+                    "URL":  "<URL> link to file",
+                    "COMPRESSION": "xz",
+                    "LOCATION": "<Folder within 'workspace' where this source is located>"
+                    }
+                ],
+        "workspace": "<Workspace folder> where the source code was located to produce(build) the elf/axf files",
+        "output_file": "<Intermediate json layer output file name and location>",
+        "metadata": {"metadata_1": "metadata value"}
+        },
+    "elfs": [
+            {
+                    "name": "<Full path name to elf/axf file>",
+                    "traces": [
+                                "Full path name to the trace file,"
+                              ]
+                }
+        ]
+}
+```
+
+Here is an example of an actual configuration JSON file:
+
+```json
+{
+    "configuration":
+        {
+        "remove_workspace": true,
+        "include_assembly": true
+        },
+    "parameters":
+        {
+        "objdump": "gcc-arm-none-eabi-7-2018-q2-update/bin/arm-none-eabi-objdump",
+        "readelf": "gcc-arm-none-eabi-7-2018-q2-update/bin/arm-none-eabi-readelf",
+        "sources": [
+                    {
+                    "type": "git",
+                    "URL":  "https://git.trustedfirmware.org/TF-M/trusted-firmware-m.git/",
+                    "COMMIT": "2ffadc12fb34baf0717908336698f8f612904",
+                    "REFSPEC": "",
+                    "LOCATION": "trusted-firmware-m"
+                    },
+                    {
+                    "type": "git",
+                    "URL":  "https://mucboot.com/mcuboot.git",
+                    "COMMIT": "507689a57516f558dac72bef634723b60c5cfb46b",
+                    "REFSPEC": "",
+                    "LOCATION": "mcuboot"
+                    },
+                    {
+                    "type": "git",
+                    "URL":  "https://tf.org/mbed/mbed-crypto.git",
+                    "COMMIT": "1146b4589011b69a6437e6b728f2af043a06ec19",
+                    "REFSPEC": "",
+                    "LOCATION": "mbed-crypto"
+                    }
+                ],
+        "workspace": "/workspace/workspace/tf-m",
+        "output_file": "output_file.json"
+        },
+    "elfs": [
+            {
+                    "name": "mcuboot.axf",
+                    "traces": [
+                                "reg-covtrace*.log"
+                              ]
+                },
+            {
+                    "name": "tfms.axf",
+                    "traces": [
+                                "reg-covtrace*.log"
+                              ]
+                },
+            {
+                    "name": "tfmns.axf",
+                    "traces": [
+                                "reg-covtrace*.log"
+                              ]
+                }
+        ]
+}
+```
+
+
+As dependencies the script needs the path to the objdump and readelf binares from the *same* toolchain used to build the elf binaries tested.
+Now it can be invoked as:
+
+```bash
+$ python3 intermediate_layer.py --config-json <config json file> [--local-workspace <path to local folder/workspace where the source files are located]
+```
+The *local-workspace* option must be indicated if the current path to the source files is different from the workspace where the build (compiling and linking) happened. The latter will be in the DWARF signature while the former will be used to produce the coverage report. It is not a requirement to have the local workspace recreated but if not present then the program will not be able to find the line numbers belonging to functions within the source files (also **ctags** must be installed i.e. **sudo apt install exuberant-ctags**)
+
+The output is an intermediate json file with the following format:
+
+```json
+{
+	"configuration": {
+		"elf_map": {
+			"binary name 1": 0,
+			"binary name 2": 1
+		},
+		"metadata": {
+			"property 1": "metadata value 1",
+			"property 2": "metadata value 2"
+		},
+		"sources": [{
+			"type": "<git or http>",
+			"URL": "<url for the source>",
+			"COMMIT": "<commit id for git source>",
+			"REFSPEC": "<refspec for the git source",
+			"LOCATION": "<folder to put the source>"
+		}]
+	},
+	"source_files": {
+		"<Source file name>": {
+			"functions": {
+				"line": "<Function line number>",
+				"covered": "<true or false>"
+			},
+			"lines": {
+				"<line number>": {
+					"covered": "<true or false>",
+					"elf_index": {
+						"<Index from elf map>": {
+							"<Address in decimal>": [
+								"<Assembly opcode>",
+								"<Number of times executed>"
+							]
+						}
+					}
+				}
+			}
+		}
+	}
+}
+```
+
+An example snippet of an intermediate JSON file is here:
+
+```json
+{
+    "configuration": {
+        "elf_map": {
+            "bl1": 0,
+            "bl2": 1,
+            "bl31": 2
+        },
+        "metadata": {
+            "BUILD_CONFIG": "tf1",
+            "RUN_CONFIG": "tf2"
+        },
+        "sources": [
+                    {
+                    "type": "git",
+                    "URL":  "https://git.trustedfirmware.org/TF-M/trusted-firmware-m.git/",
+                    "COMMIT": "2ffadc12fb34baf0717908336698f8f612904",
+                    "REFSPEC": "",
+                    "LOCATION": "trusted-firmware-m"
+                    },
+                    {
+                    "type": "git",
+                    "URL":  "https://mucboot.com/mcuboot.git",
+                    "COMMIT": "507689a57516f558dac72bef634723b60c5cfb46b",
+                    "REFSPEC": "",
+                    "LOCATION": "mcuboot"
+                    },
+                    {
+                    "type": "git",
+                    "URL":  "https://tf.org/mbed/mbed-crypto.git",
+                    "COMMIT": "1146b4589011b69a6437e6b728f2af043a06ec19",
+                    "REFSPEC": "",
+                    "LOCATION": "mbed-crypto"
+                    }
+        ]
+    },
+    "source_files": {
+        "mcuboot/boot1.c": {
+            "functions": {
+                "arch_setup": true
+            },
+            "lines": {
+                "12": {
+                    "covered": true,
+                    "elf_index": {
+                        "0": {
+                            "6948": [
+                                "b2760000 \torr\tx0, x0, #0x400",
+                                1
+                            ]
+                        }
+                    }
+                },
+                "19": {
+                    "covered": true,
+                    "elf_index": {
+                        "0": {
+                            "6956": [
+                                "d65f03c0 \tret",
+                                1
+                            ]
+                        }
+                    }
+                }
+            }
+        },
+... more lines
+```
+
+
+
+## Report
+LCOV uses **info** files to produce a HTML report; hence to convert the intermediate json file to **info** file:
+```bash
+$ python3 generate_info_file.py --workspace <Workspace where the C source folder structure resides> --json <Intermediate json file> [--info <patht and filename for the info file>]
+```
+As was mentioned, the *workspace* option tells the program where to look for the source files thus is a requirement that the local workspace is populated.
+
+This will generate an info file *coverage.info* that can be input into LCOV to generate the final coverage report as below:
+
+```bash
+$ genhtml --branch-coverage coverage.info --output-directory <HTML report folder>
+```
+
+Here is a example snippet of a info file:
+
+```bash
+TN:
+SF:/home/projects/initial_attestation/attestation_key.c
+FN:213,attest_get_instance_id
+
+FN:171,attest_calc_instance_id
+
+FN:61,attest_register_initial_attestation_key
+
+FN:137,attest_get_signing_key_handle
+
+FN:149,attest_get_initial_attestation_public_key
+
+FN:118,attest_unregister_initial_attestation_key
+FNDA:1,attest_get_instance_id
+
+FNDA:1,attest_calc_instance_id
+
+FNDA:1,attest_register_initial_attestation_key
+
+FNDA:1,attest_get_signing_key_handle
+
+FNDA:1,attest_get_initial_attestation_public_key
+
+FNDA:1,attest_unregister_initial_attestation_key
+FNF:6
+FNH:6
+BRDA:71,0,0,0
+BRDA:71,0,1,1
+...<more lines>
+```
+
+Refer to [](http://ltp.sourceforge.net/coverage/lcov/geninfo.1.php) for meaning of the flags.
+
+## Wrapper
+There is a wrapper bash script that can generate the intermediate json file, create the info file and the LCOV report:
+```bash
+$ ./branch_coverage.sh --config config_file.json --workspace Local workspace --outdir html_report
+```
+
+## Merge files
+There is an utility wrapper that can merge jso and info files to produce a merge of the code coverage:
+```bash
+$ ./merge.sh -j <input json file> [-l <filename for report>] [-w <local workspace>] [-c to indicate to recreate workspace from sources]
+```
+This utility needs a input json file with the list of json/info files to be merged:
+```json
+{ "files" : [
+                {
+                    "id": "<unique project id (string) that belongs the json and info files>",
+                    "config":
+                        {
+                            "type": "<'http' or 'file'>",
+                            "origin": "<URL or folder where the json files reside>"
+                        },
+                    "info":
+                        {
+                            "type": "<'http' or 'file'>",
+                            "origin": "<URL or folder where the info files reside>"
+                        }
+                },
+....More of these json objects
+        ]
+}
+```
+This utility will merge the files, create the C source folder structure and produce the LCOV reports for the merged files. The utility can do a translation from the workspaces for each info file to the local workspace in case the info files come from different workspaces. The only requirement is that all the info files come from the **same** sources, i.e. repositories.
+
+Example snippet of input json file:
+
+```bash
+{ "files" : [
+                {
+                    "id": "Tests_Release_BL2",
+                    "config":
+                        {
+                            "type": "file",
+                            "origin": "/home/workspace/150133/output_file.json"
+                        },
+                    "info":
+                        {
+                            "type": "file",
+                            "origin": "/home/workspace/150133/coverage.info"
+                        }
+                },
+                {
+                    "id": "Tests_Regression_BL2",
+                    "config":
+                        {
+                            "type": "file",
+                            "origin": "/home//workspace/150143/output_file.json"
+                        },
+                    "info":
+                            "type": "file",
+                            "origin": "/home/workspace/150143/coverage.info"
+                        }
+                }
+        ]
+}
+```
+
+## License
+[BSD-3-Clause](../../license.md)
diff --git a/coverage-tool/docs/user_guide.md b/coverage-tool/docs/user_guide.md
new file mode 100644
index 0000000..70505a1
--- /dev/null
+++ b/coverage-tool/docs/user_guide.md
@@ -0,0 +1,15 @@
+# Trace-based Coverage Tool User Guide
+
+The *coverage-tool* is developed to provide code coverage measurement based on execution trace and without the need for code instrumentation. This tool is specifically meant for firmware components which are run on memory constraint platforms. The non-reliance on code instrumentation in this approach circumvents the frequent issue of instrumented code affecting the target memory model, where the firmware is expected to run. Thus here we test the firmware in the actual memory model it is intended to be eventually released. The coverage tool comprises of 2 main components. A *trace plugin component* and a set of *post processing scripts* to generate the coverage report.
+
+## Design Overview
+Refer to [design overview](./design_overview.md) for an outline of the design of this trace-based coverage tool.
+
+## Plugin user guide
+Refer to [plugin user guide](./plugin_user_guide.md) to learn more on how the plugin component is to be used as part of trace-based coverage tool.
+
+## Reporting user guide
+Refer to [reporting user guide](./reporting_user_guide.md) to learn more on how to use the post-processing scripts, that are part of the trace-based coverage tool, in order to generate the coverage report for analysis.
+
+## License
+[BSD-3-Clause](../../license.md)
diff --git a/coverage-tool/readme.md b/coverage-tool/readme.md
new file mode 100644
index 0000000..e956f2c
--- /dev/null
+++ b/coverage-tool/readme.md
@@ -0,0 +1,35 @@
+# coverage-tool
+
+The *coverage-tool* is a coverage measurement tool based on a custom plugin (implementing Model Trace Interface (MTI)) to generate execution traces and a report generator based on LCOV. Current implementation of this tool reports statement/line coverage, function coverage and branch coverage information.
+
+## Installation
+
+Please clone the repository.
+
+```bash
+git clone https://gitlab.arm.com/qa-tools.git
+```
+
+## Dependencies
+For the plugin
+- Python 3
+
+For the report generator:
+- LCOV (https://github.com/linux-test-project/lcov)
+```bash
+sudo apt-get update -y
+sudo apt-get install -y lcov
+sudo apt-get install exuberant-ctags
+```
+## Usage
+
+Please see the individual [user guide](./docs/user_guide.md) for more details.
+
+## Contributing
+Pull requests are welcome. For major changes, please open an issue first to discuss what you would like to change.
+
+Please follow the recommended [coding style](../readme.md#Style Guide) specified for each component. Also do  make sure to update the in-source documentation as appropriate.
+
+
+## License
+[BSD-3-Clause](../../license.md)