Bugfix for intermediate layer creation at coverage-tool

- Problem corrected when 'remote_workspace' was false in configuration
  json file for input in the intermediate layer python script.
- Changes in merge.py to allow for config and output (intermediate)
  json files used as input for merging.

Change-Id: I2d7ebc8fdff172ab7be14e30de928967a0f453a8
diff --git a/coverage-tool/coverage-reporting/intermediate_layer.py b/coverage-tool/coverage-reporting/intermediate_layer.py
index 8713149..9ef6f81 100644
--- a/coverage-tool/coverage-reporting/intermediate_layer.py
+++ b/coverage-tool/coverage-reporting/intermediate_layer.py
@@ -311,7 +311,8 @@
     traces and produce a code coverage report"""
 
     def __init__(self, dump: str, function_list: Dict[str, Dict[str, any]],
-                 prefix: str, function_line_numbers: FunctionLineNumbers):
+                 _workspace: str, _remove_workspace: bool,
+                 function_line_numbers: FunctionLineNumbers):
         """
         Initialisation of the instance to parse binary files.
 
@@ -319,15 +320,16 @@
         code metadata, i.e. source code location and line number.
         :param function_list: Dictionary of functions defined in the binary
         dump.
-        :param prefix: Prefix for every source code file contained in the
-        binary dump file, usually the workspace (folders) where the source code
-        files where built.
+        :param _workspace: Workspace (folder) where the source files were built.
+        :param _remove_workspace: Boolean to indicate if the building of
+        source files is local (false) or in a CI (true).
         :param function_line_numbers: Object instance to get a function line
         number within a source code file.
         """
         self.dump = dump
         self.function_list = function_list
-        self.prefix = prefix
+        self.workspace = _workspace
+        self.remove_workspace = _remove_workspace
         self.function_definition = None
         self.function_line_numbers = function_line_numbers
 
@@ -493,11 +495,11 @@
             function_block.name)
         pattern = r'(?s)(^[a-zA-Z0-9_]+)?(?:\(\):\n)?(^{0}.+?):([0-9]+)[' \
                   r'^\n]*\n(.+?)(?={0}.+?:[0-9]+.+\n|^[a-zA-Z0-9_]+\(' \
-                  r'\):\n)'.format(self.prefix)
+                  r'\):\n)'.format(self.workspace)
         source_code_blocks = re.findall(pattern,
                                         "{}\n{}/:000".format(
                                             function_block.code,
-                                            self.prefix),
+                                            self.workspace),
                                         re.DOTALL |
                                         re.MULTILINE)
         for block in source_code_blocks:
@@ -509,8 +511,10 @@
                 # and is the function's name block
                 self.function_definition.function_name = \
                     source_code_block.function_name
-            self.function_definition.source_file = remove_workspace(
-                source_code_block.source_file, self.prefix)
+            self.function_definition.source_file = source_code_block.source_file
+            if self.remove_workspace:
+                self.function_definition.source_file = remove_workspace(
+                    source_code_block.source_file, self.workspace)
             yield source_code_block
 
     def get_function_block(self):
@@ -531,8 +535,10 @@
                       format(function_block.name))
                 continue  # Function not found in function list
             source_code_file = signature_group[0]
-            function_block.source_file = remove_workspace(
-                source_code_file, self.prefix)
+            function_block.source_file = source_code_file
+            if self.remove_workspace:
+                function_block.source_file = remove_workspace(
+                    source_code_file, self.workspace)
             function_block.function_line_number = \
                 self.function_line_numbers.get_line_number(
                     function_block.source_file, function_block.name)
@@ -548,6 +554,8 @@
     def __init__(self, _config, local_workspace):
         self._data = {}
         self.config = _config
+        self.workspace = self.config['parameters']['workspace']
+        self.remove_workspace = self.config['configuration']['remove_workspace']
         self.local_workspace = local_workspace
         self.elfs = self.config['elfs']
         # Dictionary with stats from trace files {address}=(times executed,
@@ -588,14 +596,11 @@
             elf_name = elf['name']
             # Trace data
             self.traces_stats = load_stats_from_traces(elf['traces'])
-            prefix = self.config['parameters']['workspace'] \
-                if self.config['configuration']['remove_workspace'] else \
-                None
             functions_list = list_of_functions_for_binary(elf_name)
             (functions_list, excluded_functions) = apply_functions_exclude(
                 elf, functions_list)
             # Produce code coverage
-            self.process_binary(elf_name, functions_list, prefix)
+            self.process_binary(elf_name, functions_list)
             sources_config = self.config['parameters']['sources']
             # Now check code coverage in the functions with no dwarf signature
             # (sources)
@@ -626,7 +631,7 @@
                 ELF_MAP["custom_offset"] += 1
         return self.elf_map[elf_name]
 
-    def process_binary(self, elf_filename: str, function_list, prefix=None):
+    def process_binary(self, elf_filename: str, function_list):
         """
         Process an elf file i.e. match the source code and asm lines against
         trace files (coverage).
@@ -634,8 +639,6 @@
         :param elf_filename: Elf binary file name
         :param function_list: List of functions in the elf file i.e.
                                 [(address start, address end, function name)]
-        :param prefix: Optional path name to be removed at the start of source
-                        file locations
         """
         command = "%s -Sl %s | tee %s" % (OBJDUMP, elf_filename,
                                           elf_filename.replace(".elf", ".dump"))
@@ -646,8 +649,8 @@
         elf_index = self.get_elf_index(elf_name)
         # Pointer to files dictionary
         source_files = self.source_files_coverage
-        parser = BinaryParser(dump, function_list, prefix,
-                              function_line_numbers)
+        parser = BinaryParser(dump, function_list, self.workspace,
+                              self.remove_workspace, function_line_numbers)
         for function_block in parser.get_function_block():
             function_list[function_block.name]["sources"] = True
             source_files.setdefault(function_block.source_file,
diff --git a/coverage-tool/coverage-reporting/merge.py b/coverage-tool/coverage-reporting/merge.py
index c4a80ad..bb07b10 100755
--- a/coverage-tool/coverage-reporting/merge.py
+++ b/coverage-tool/coverage-reporting/merge.py
@@ -109,6 +109,9 @@
     if file_name in info_files_to_merge:
         print("Error: Duplicated info file '{}'".format(file_name))
         sys.exit(1)
+    if os.stat(file_name).st_size == 0:
+        print("Warning: Empty info file '{}', skipping it".format(file_name))
+        continue
     info_files_to_merge.append(file_name)
     file_group = {"info": file_name, "locations": [], "json": ""}
     info_name = os.path.basename(file_name).split(".")[0]
@@ -132,7 +135,8 @@
         with open(json_name) as json_file:
             json_data = json.load(json_file)
         locations = []
-        for source in json_data["configuration"]["sources"]:
+        parent = json_data.get("parameters", json_data.get("configuration"))
+        for source in parent["sources"]:
             location = source["LOCATION"]
             locations.append((location, source.get("LOCAL", location)))
         file_group["locations"] = locations
@@ -180,7 +184,8 @@
         json_file = options.json_file[j]
         with open(json_file) as f:
             data = json.load(f)
-        for source in data['configuration']['sources']:
+        parent = data.get("parameters", data.get("configuration"))
+        for source in parent['sources']:
             if source not in json_merged_list:
                 json_merged_list.append(source)
         j += 1