Add options and refactoring
diff --git a/coverage-tool/coverage-reporting/clone_sources.py b/coverage-tool/coverage-reporting/clone_sources.py
index fb1807d..ec38acd 100644
--- a/coverage-tool/coverage-reporting/clone_sources.py
+++ b/coverage-tool/coverage-reporting/clone_sources.py
@@ -18,7 +18,7 @@
from random import random
-def call_cmd(cmd, print_cmd=False):
+def call_cmd(cmd, print_cmd=True):
"""
Function that execute an os command and returns its output
@@ -135,6 +135,7 @@
else:
call_cmd("cd {};git checkout -q FETCH_HEAD".format(
output_loc))
+ call_cmd("cd {};git submodule update --init --recursive || true".format(output_loc))
elif source['type'] == 'http':
site = source
output_loc = os.path.join(output_dir, site["LOCATION"])
@@ -146,6 +147,7 @@
site['URL'], tmp_folder))
call_cmd("mkdir -p {}".format(output_loc))
if site['COMPRESSION'] == "xz":
- call_cmd("cd {};tar -xzf $(basename {}) -C {}".format(
- tmp_folder, site['URL'], output_loc))
+ call_cmd("cd {};tar -xzf $(basename {}) -C {} {}".format(
+ tmp_folder, site['URL'], output_loc,
+ source.get("EXTRA_PARAMS", "")))
call_cmd("rm -rf {}".format(tmp_folder))
diff --git a/coverage-tool/coverage-reporting/generate_info_file.py b/coverage-tool/coverage-reporting/generate_info_file.py
index 0c0f39a..a606682 100755
--- a/coverage-tool/coverage-reporting/generate_info_file.py
+++ b/coverage-tool/coverage-reporting/generate_info_file.py
@@ -340,7 +340,7 @@
# regex: find all the lines starting with 'if' or 'else if'
# (possibly preceded by whitespaces/tabs)
pattern = re.compile(r"^\s+if|^\s+} else if|^\s+else if")
- for i, line in enumerate(open(abs_path_file)):
+ for i, line in enumerate(open(abs_path_file, encoding='utf-8')):
for match in re.finditer(pattern, line):
branching_lines.append(i + 1)
while branching_lines:
@@ -355,7 +355,7 @@
# regex: find all the lines starting with 'switch'
# (possibly preceded by whitespaces/tabs)
pattern = re.compile(r"^\s+switch")
- for i, line in enumerate(open(abs_path_file)):
+ for i, line in enumerate(open(abs_path_file, encoding='utf-8')):
for match in re.finditer(pattern, line):
switch_lines.append(i + 1)
while switch_lines:
@@ -380,7 +380,7 @@
help='Output info file name',
default="coverage.info")
args = parser.parse_args()
-with open(args.json) as json_file:
+with open(args.json, encoding='utf-8') as json_file:
json_data = json.load(json_file)
info_file = open(args.info, "w+")
error_log = open("error_log.txt", "w+")
@@ -390,7 +390,7 @@
abs_path_file = os.path.join(args.workspace, relative_path)
if not os.path.exists(abs_path_file):
continue
- source = open(abs_path_file)
+ source = open(abs_path_file, encoding='utf-8')
lines = source.readlines()
info_file.write('TN:\n')
info_file.write('SF:' + os.path.abspath(abs_path_file) + '\n')
diff --git a/coverage-tool/coverage-reporting/intermediate_layer.py b/coverage-tool/coverage-reporting/intermediate_layer.py
index 701aeac..8713149 100644
--- a/coverage-tool/coverage-reporting/intermediate_layer.py
+++ b/coverage-tool/coverage-reporting/intermediate_layer.py
@@ -23,8 +23,10 @@
from argparse import RawTextHelpFormatter
import logging
import time
+from typing import Dict
+from typing import List
-__version__ = "6.0"
+__version__ = "7.0"
# Static map that defines the elf file source type in the intermediate json
ELF_MAP = {
@@ -36,6 +38,8 @@
"scp_rom": 11,
"mcp_rom": 12,
"mcp_ram": 13,
+ "secure_hafnium": 14,
+ "hafium": 15,
"custom_offset": 100
}
@@ -48,7 +52,6 @@
:param show_command: Optional argument to print the command in stdout
:return: The string output of the os command
"""
- out = ""
try:
if show_command:
print("OS command: {}".format(command))
@@ -136,6 +139,8 @@
"""
# Parse all $x / $d symbols
symbol_table = []
+ address = None
+ _type = None
command = r"""%s -s %s | awk '/\$[xatd]/ {print $2" "$8}'""" % (
READELF, elf_name)
text_out = os_command(command)
@@ -165,7 +170,7 @@
for sym in symbol_table:
if sym[1] != rtype:
if rtype == 'X':
- # Substract one because the first address of the
+ # Subtract one because the first address of the
# next range belongs to the next range.
ranges.append((range_start, sym[0] - 1))
range_start = sym[0]
@@ -173,13 +178,13 @@
return ranges
-def list_of_functions_for_binary(elf_name):
+def list_of_functions_for_binary(elf_name: str) -> Dict[str, Dict[str, any]]:
"""
Get an array of the functions in the elf file
:param elf_name: Elf binary file name
:return: An array of function address start, function address end,
- function dwarf signature (sources) addressed by function name
+ function dwarf signature (sources) indexed by function name
"""
_functions = {}
command = "%s -t %s | awk 'NR>4' | sed /^$/d" % (OBJDUMP, elf_name)
@@ -243,11 +248,10 @@
:param workspace: Path.
"""
ret = path if workspace is None else os.path.relpath(path, workspace)
- # print("{} => {}".format(path, ret))
return ret
-def get_function_line_numbers(source_file):
+def get_function_line_numbers(source_file: str) -> Dict[str, int]:
"""
Using ctags get all the function names with their line numbers
within the source_file
@@ -270,19 +274,28 @@
logger.warning("Warning: Can't get all function line numbers from %s" %
source_file)
except Exception as ex:
- logger.warning(f"Warning: Unknown error '{ex}' when executing command '{command}'")
+ logger.warning(f"Warning: Unknown error '{ex}' when executing command "
+ f"'{command}'")
return {}
return fln
class FunctionLineNumbers(object):
+ """Helper class used to get a function start line number within
+ a source code file"""
- def __init__(self, workspace):
+ def __init__(self, workspace: str):
+ """
+ Initialise dictionary to allocate source code files with the
+ corresponding function start line numbers.
+
+ :param workspace: The folder where the source files are deployed
+ """
self.filenames = {}
self.workspace = workspace
- def get_line_number(self, filename, function_name):
+ def get_line_number(self, filename: str, function_name: str) -> int:
if not FUNCTION_LINES_ENABLED:
return 0
if filename not in self.filenames:
@@ -292,7 +305,241 @@
self.filenames[filename][function_name]
-class PostProcessCC(object):
+class BinaryParser(object):
+ """Class used to create an instance to parse the binary files with a
+ dwarf signature in order to produce logical information to be matched with
+ traces and produce a code coverage report"""
+
+ def __init__(self, dump: str, function_list: Dict[str, Dict[str, any]],
+ prefix: str, function_line_numbers: FunctionLineNumbers):
+ """
+ Initialisation of the instance to parse binary files.
+
+ :param dump: Binary dump (string) containing assembly code and source
+ code metadata, i.e. source code location and line number.
+ :param function_list: Dictionary of functions defined in the binary
+ dump.
+ :param prefix: Prefix for every source code file contained in the
+ binary dump file, usually the workspace (folders) where the source code
+ files where built.
+ :param function_line_numbers: Object instance to get a function line
+ number within a source code file.
+ """
+ self.dump = dump
+ self.function_list = function_list
+ self.prefix = prefix
+ self.function_definition = None
+ self.function_line_numbers = function_line_numbers
+
+ class FunctionBlock(object):
+ """Class used to parse and obtain a function block from the
+ binary dump file that corresponds to a function declaration within
+ the binary assembly code.
+ The function block has the following components:
+ - Function start address in memory (hexadecimal).
+ - Function name.
+ - Function code.
+ """
+
+ def __init__(self, function_group: List[str]):
+ """
+ Create an instance of a function block within a binary dump.
+
+ :param function_group: List containing the function start
+ address, name and code in the function block.
+ """
+ self.start, self.name, self.code = function_group
+ self.source_file = None
+ self.function_line_number = None
+
+ @staticmethod
+ def get(dump: str):
+ """
+ Static method generator to extract a function block from the binary
+ dump.
+
+ :param dump: Binary dump (string) that contains the binary file
+ information.
+ :return: A FunctionBlock object that is a logical representation
+ of a function declaration within the binary dump.
+ """
+ function_groups = re.findall(
+ r"(?s)([0-9a-fA-F]+) <([a-zA-Z0-9_]+)>:\n(.+?)(?=[A-Fa-f0-9]* "
+ r"<[a-zA-Z0-9_]+>:)", dump, re.DOTALL | re.MULTILINE)
+ for group in function_groups:
+ if len(group) != 3:
+ continue
+ function_group = list(group)
+ function_group[-1] += "\n"
+ yield BinaryParser.FunctionBlock(function_group)
+
+ class SourceCodeBlock(object):
+ """Class used to represent a source code block of information within
+ a function block in a binary dump file.
+ The source code block contains the following components:
+ - Optional function name where the source code/assembly code is defined.
+ - Source code file that contains the source code corresponding
+ to the assembly code.
+ - Line number within the source code file corresponding to the source
+ code.
+ - Assembly code block.
+ """
+
+ def __init__(self, source_code_block):
+ """
+ Create an instance of a source code block within a function block.
+
+ :param source_code_block: Tuple of 4 elements that contains the
+ components of a source code block.
+ """
+ self.function_name, self.source_file, self.line, self.asm_code \
+ = source_code_block
+
+ def get_assembly_line(self):
+ """Getter to return and AssemblyLine instance that corresponds to
+ a logical representation of an assembly code line contained
+ within a source code block (assembly code block)"""
+ return BinaryParser.AssemblyLine.get(self)
+
+ class AssemblyLine(object):
+ """Class used to represent an assembly code line within an
+ assembly code block.
+ The assembly line instruction is formed by the following components:
+ - Hexadecimal address of the assembly instruction.
+ - Assembly instruction.
+ """
+
+ def __init__(self, line):
+ """
+ Create an instance representing an assembly code line within an
+ assembly code block.
+
+ :param line: Tuple of 2 elements [Hexadecimal number,
+ and assembly code]
+ """
+ self.hex_line_number, self.opcode = line
+ self.dec_address = int(self.hex_line_number, 16)
+
+ @staticmethod
+ def get(source_code_block):
+ """
+ Static method generator to extract an assembly code line from a
+ assembly code block.
+
+ :param source_code_block: Object that contains the assembly code
+ within the source code block.
+ :return: AssemblyLine object.
+ """
+ lines = re.findall(
+ r"^[\s]+([a-fA-F0-9]+):\t(.+?)\n",
+ source_code_block.asm_code, re.DOTALL | re.MULTILINE)
+ for line in lines:
+ if len(line) != 2:
+ continue
+ yield BinaryParser.AssemblyLine(line)
+
+ class FunctionDefinition(object):
+ """
+ Class used to handle a function definition i.e. function name, source
+ code filename and line number where is declared.
+ """
+
+ def __init__(self, function_name):
+ """
+ Create an instance representing a function definition within a
+ function code block.
+
+ :param function_name: Initial function name
+ """
+ self.function_line_number = None
+ self.function_name = function_name
+ self.source_file: str = None
+
+ def update_sources(self, source_files, function_line_numbers):
+ """
+ Method to update source files dictionary
+
+ :param source_files: Dictionary that contains the representation
+ of the intermediate layer.
+
+ :param function_line_numbers: Object that obtains the start line
+ number for a function definition inside it source file.
+ :return:Nothing
+ """
+ source_files.setdefault(self.source_file, {"functions": {},
+ "lines": {}})
+ if self.function_name not in \
+ source_files[self.source_file]["functions"]:
+ self.function_line_number = \
+ function_line_numbers.get_line_number(
+ self.source_file,
+ self.function_name)
+ source_files[self.source_file]["functions"][
+ self.function_name] = {"covered": False,
+ "line_number":
+ self.function_line_number}
+
+ def get_source_code_block(self, function_block: FunctionBlock):
+ """
+ Generator method to obtain all the source code blocks within a
+ function block.
+
+ :param function_block: FunctionBlock object that contains the code
+ the source code blocks.
+ :return: A SourceCodeBlock object.
+ """
+ # When not present the block function name applies
+ self.function_definition = BinaryParser.FunctionDefinition(
+ function_block.name)
+ pattern = r'(?s)(^[a-zA-Z0-9_]+)?(?:\(\):\n)?(^{0}.+?):([0-9]+)[' \
+ r'^\n]*\n(.+?)(?={0}.+?:[0-9]+.+\n|^[a-zA-Z0-9_]+\(' \
+ r'\):\n)'.format(self.prefix)
+ source_code_blocks = re.findall(pattern,
+ "{}\n{}/:000".format(
+ function_block.code,
+ self.prefix),
+ re.DOTALL |
+ re.MULTILINE)
+ for block in source_code_blocks:
+ if len(block) != 4:
+ continue
+ source_code_block = BinaryParser.SourceCodeBlock(block)
+ if source_code_block.function_name:
+ # Usually in the first iteration function name is not empty
+ # and is the function's name block
+ self.function_definition.function_name = \
+ source_code_block.function_name
+ self.function_definition.source_file = remove_workspace(
+ source_code_block.source_file, self.prefix)
+ yield source_code_block
+
+ def get_function_block(self):
+ """Generator method to obtain all the function blocks contained in
+ the binary dump file.
+ """
+ for function_block in BinaryParser.FunctionBlock.get(self.dump):
+ # Find out if the function block has C source code filename in
+ # the function block code
+ signature_group = re.findall(
+ r"(?s){}\(\):\n(/.+?):[0-9]+.*(?:\r*\n\n|\n$)".format(
+ function_block.name), function_block.code,
+ re.DOTALL | re.MULTILINE)
+ if not signature_group:
+ continue # Function does not have dwarf signature (sources)
+ if function_block.name not in self.function_list:
+ print("Warning:Function '{}' not found in function list!!!".
+ format(function_block.name))
+ continue # Function not found in function list
+ source_code_file = signature_group[0]
+ function_block.source_file = remove_workspace(
+ source_code_file, self.prefix)
+ function_block.function_line_number = \
+ self.function_line_numbers.get_line_number(
+ function_block.source_file, function_block.name)
+ yield function_block
+
+
+class IntermediateCodeCoverage(object):
"""Class used to process the trace data along with the dwarf
signature files to produce an intermediate layer in json with
code coverage in assembly and c source code.
@@ -339,7 +586,6 @@
for elf in self.elfs:
# Gather information
elf_name = elf['name']
- os_command("ls {}".format(elf_name))
# Trace data
self.traces_stats = load_stats_from_traces(elf['traces'])
prefix = self.config['parameters']['workspace'] \
@@ -349,7 +595,7 @@
(functions_list, excluded_functions) = apply_functions_exclude(
elf, functions_list)
# Produce code coverage
- self.dump_sources(elf_name, functions_list, prefix)
+ self.process_binary(elf_name, functions_list, prefix)
sources_config = self.config['parameters']['sources']
# Now check code coverage in the functions with no dwarf signature
# (sources)
@@ -364,17 +610,26 @@
"metadata": "" if 'metadata' not in
self.config['parameters'] else
self.config['parameters']['metadata'],
- "elf_map": self.elf_map
- }
+ "elf_map": self.elf_map}
}
json_data = json.dumps(data, indent=4, sort_keys=True)
with open(self.config['parameters']['output_file'], "w") as f:
f.write(json_data)
- def dump_sources(self, elf_filename, function_list, prefix=None):
+ def get_elf_index(self, elf_name: str) -> int:
+ """Obtains the elf index and fills the elf_map instance variable"""
+ if elf_name not in self.elf_map:
+ if elf_name in ELF_MAP:
+ self.elf_map[elf_name] = ELF_MAP[elf_name]
+ else:
+ self.elf_map[elf_name] = ELF_MAP["custom_offset"]
+ ELF_MAP["custom_offset"] += 1
+ return self.elf_map[elf_name]
+
+ def process_binary(self, elf_filename: str, function_list, prefix=None):
"""
- Process an elf file i.e. match the source and asm lines against trace
- files (coverage).
+ Process an elf file i.e. match the source code and asm lines against
+ trace files (coverage).
:param elf_filename: Elf binary file name
:param function_list: List of functions in the elf file i.e.
@@ -382,119 +637,60 @@
:param prefix: Optional path name to be removed at the start of source
file locations
"""
- command = "%s -Sl %s" % (OBJDUMP, elf_filename)
- dump = os_command(command)
+ command = "%s -Sl %s | tee %s" % (OBJDUMP, elf_filename,
+ elf_filename.replace(".elf", ".dump"))
+ dump = os_command(command, show_command=True)
dump += "\n0 <null>:" # For pattern matching the last function
elf_name = os.path.splitext(os.path.basename(elf_filename))[0]
- # Object that handles the function line numbers in
- # their filename
function_line_numbers = FunctionLineNumbers(self.local_workspace)
- # To map the elf filename against an index
- if elf_name not in self.elf_map:
- if elf_name in ELF_MAP:
- self.elf_map[elf_name] = ELF_MAP[elf_name]
- else:
- self.elf_map[elf_name] = self.elf_custom
- self.elf_custom += 1
- elf_index = self.elf_map[elf_name]
- # The function groups have 2 elements:
- # Function's block name, Function's block code
- function_groups = re.findall(
- r"(?s)[0-9a-fA-F]+ <([a-zA-Z0-9_]+)>:\n(.+?)(?=[A-Fa-f0-9]* <[a-zA-Z0-9_]+>:)",
- dump, re.DOTALL | re.MULTILINE)
+ elf_index = self.get_elf_index(elf_name)
# Pointer to files dictionary
source_files = self.source_files_coverage
- for function_group in function_groups:
- if len(function_group) != 2:
- continue
- block_function_name, block_code = function_group
- block_code += "\n"
- # Find if the function has C source code filename
- function_signature_group = re.findall(
- r"(?s){}\(\):\n(/.+?):[0-9]+.*(?:\r*\n\n|\n$)".format(
- block_function_name), block_code, re.DOTALL | re.MULTILINE)
- if not function_signature_group:
- continue # Function does not have dwarf signature (sources)
- if not block_function_name in function_list:
- print("Warning:Function '{}' not found in function list!!!".format(block_function_name))
- continue # Function not found in function list
- function_list[block_function_name]["sources"] = True
- block_function_source_file = remove_workspace(
- function_signature_group[0], prefix)
- fn_line_number = function_line_numbers.get_line_number(
- block_function_source_file, block_function_name)
- if block_function_source_file not in source_files:
- source_files[block_function_source_file] = {"functions": {},
- "lines": {}}
- source_files[block_function_source_file]["functions"][
- block_function_name] = {"covered": False,
- "line_number": fn_line_number}
- # Now lets check the block code
- # The source code groups have 5 elements:
- # Function for the statements (optional), Source file for the asm
- # statements,
- # line number for the asm statements, asm statements, lookahead
- # (ignored)
- source_code_groups = re.findall(SOURCE_PATTERN, block_code,
- re.DOTALL | re.MULTILINE)
+ parser = BinaryParser(dump, function_list, prefix,
+ function_line_numbers)
+ for function_block in parser.get_function_block():
+ function_list[function_block.name]["sources"] = True
+ source_files.setdefault(function_block.source_file,
+ {"functions": {},
+ "lines": {}})
+ source_files[function_block.source_file]["functions"][
+ function_block.name] = {"covered": False,
+ "line_number":
+ function_block.function_line_number}
is_function_block_covered = False
- # When not present the last function name applies
- statements_function_name = block_function_name
- for source_code_group in source_code_groups:
- if len(source_code_group) != 5:
- continue
- fn_name, source_file, ln, asm_code, _ = source_code_group
- if not fn_name:
- # The statement belongs to the most recent function
- fn_name = statements_function_name
- else:
- # Usually in the first iteration fn_name is not empty and
- # is the function's name block
- statements_function_name = fn_name
- if statements_function_name in function_list:
- # Some of the functions within a block are not defined in
- # the function list dump
- function_list[statements_function_name]["sources"] = True
- statements_source_file = remove_workspace(source_file, prefix)
- if statements_source_file not in source_files:
- source_files[statements_source_file] = {"functions": {},
- "lines": {}}
- if statements_function_name not in \
- source_files[statements_source_file]["functions"]:
- fn_line_number = function_line_numbers.get_line_number(
- statements_source_file,
- statements_function_name)
- source_files[statements_source_file]["functions"][
- statements_function_name] = \
- {"covered": False, "line_number": fn_line_number}
- if ln not in source_files[statements_source_file]["lines"]:
- source_files[statements_source_file]["lines"][ln] = \
- {"covered": False, "elf_index": {}}
- source_file_ln = source_files[statements_source_file]["lines"][
- ln]
- asm_line_groups = re.findall(
- r"(?s)([a-fA-F0-9]+):\t(.+?)(?:\n|$)",
- asm_code, re.DOTALL | re.MULTILINE)
- for asm_line in asm_line_groups:
- if len(asm_line) != 2:
- continue
- hex_line_number, opcode = asm_line
- dec_address = int(hex_line_number, 16)
- times_executed = 0 if dec_address not in self.traces_stats \
- else self.traces_stats[dec_address][0]
+ source_code_block: BinaryParser.SourceCodeBlock
+ for source_code_block in parser.get_source_code_block(
+ function_block):
+ if parser.function_definition.function_name in function_list:
+ function_list[parser.function_definition.function_name][
+ "sources"] = True
+ parser.function_definition.update_sources(source_files,
+ function_line_numbers)
+ source_file_ln = \
+ source_files[parser.function_definition.source_file][
+ "lines"].setdefault(source_code_block.line,
+ {"covered": False, "elf_index": {}})
+ for asm_block in source_code_block.get_assembly_line():
+ times_executed = 0 if \
+ asm_block.dec_address not in self.traces_stats else \
+ self.traces_stats[asm_block.dec_address][0]
if times_executed > 0:
is_function_block_covered = True
source_file_ln["covered"] = True
- source_files[statements_source_file]["functions"][
- statements_function_name]["covered"] = True
+ source_files[parser.function_definition.source_file][
+ "functions"][
+ parser.function_definition.function_name][
+ "covered"] = True
+ source_file_ln.setdefault("elf_index", {'elf_index': {}})
if elf_index not in source_file_ln["elf_index"]:
source_file_ln["elf_index"][elf_index] = {}
- if dec_address not in \
+ if asm_block.dec_address not in \
source_file_ln["elf_index"][elf_index]:
- source_file_ln["elf_index"][elf_index][dec_address] = (
- opcode, times_executed)
- source_files[block_function_source_file]["functions"][
- block_function_name]["covered"] |= is_function_block_covered
+ source_file_ln["elf_index"][elf_index][
+ asm_block.dec_address] = (
+ asm_block.opcode, times_executed)
+ source_files[function_block.source_file]["functions"][
+ function_block.name]["covered"] |= is_function_block_covered
def process_fn_no_sources(self, function_list):
"""
@@ -532,8 +728,8 @@
self.source_files_coverage[source_file] = {"functions": {},
"lines": {}}
if function_name not in \
- self.source_files_coverage[source_file]["functions"] or \
- covered:
+ self.source_files_coverage[source_file]["functions"] \
+ or covered:
self.source_files_coverage[source_file]["functions"][
function_name] = {"covered": covered,
"line_number": line_number}
@@ -597,8 +793,6 @@
OBJDUMP = None
READELF = None
FUNCTION_LINES_ENABLED = None
-SOURCE_PATTERN = (r'(?s)([a-zA-Z0-9_]+)?(?:\(\):\n)?(^/.+?):([0-9]+)'
- r'(?: \(.+?\))?\n(.+?)(?=\n/|([a-zA-Z0-9_]+\(\):))')
def main():
@@ -639,8 +833,8 @@
else:
FUNCTION_LINES_ENABLED = True
- pp = PostProcessCC(config, args.local_workspace)
- pp.process()
+ intermediate_layer = IntermediateCodeCoverage(config, args.local_workspace)
+ intermediate_layer.process()
if __name__ == '__main__':
diff --git a/coverage-tool/coverage-reporting/merge.py b/coverage-tool/coverage-reporting/merge.py
index 3ab46f7..c4a80ad 100755
--- a/coverage-tool/coverage-reporting/merge.py
+++ b/coverage-tool/coverage-reporting/merge.py
@@ -30,6 +30,25 @@
python3 merge.py -a coverage_1.info -a coverage_2.info -o coverage_merge.info \
-j input_file1.json -j input_file2.json -m merge_file.json
+The metadata json file must contain the information for every repo that is
+used to build the binaries that were tested (and where coverage is desired).
+As a minimum this file must look like this:
+{
+ "configuration" : {
+ "sources": [
+ {
+ "COMMIT": "XXXXXXX", # [optional]
+ "LOCATION": "YYY", # Folder where the 'URL' repo is cloned in the
+ test workspace.
+ "LOCAL": "ZZZZ", # Local folder for the repo cloned at
+ the local workspace (optional, if not defined 'LOCATION' is assumed).
+ "REFSPEC": "XXXXXX", # [optional]
+ "URL": "XXXXXXXX",
+ "type": "git"
+ }
+ ]
+ }
+}
It is possible to merge any number of files at once.
If metadata json files are defined then they must pair with their
corresponding info file, i.e. have the same name.
@@ -39,9 +58,10 @@
By default, the output file must be a new file.
To overwrite an existing file, use the "--force" option.
-Note: the user is expected to merge .info files referring to the same project.
-If merging .info files from different projects, LCOV can be exploited directly
-using a command such as "lcov -rc lcov_branch_coverage=1 -a coverage_1.info \
+Note: the user is expected to merge .info files referring to the same
+project, i.e. same sources. If merging .info files from different projects,
+LCOV can be exploited directly using a command such as "lcov -rc
+lcov_branch_coverage=1 -a coverage_1.info \
-a coverage_2.info -o coverage_merge.info."
""", formatter_class=RawTextHelpFormatter)
requiredNamed = parser.add_argument_group('required named arguments')
@@ -59,6 +79,9 @@
help="force overwriting of output file.")
parser.add_argument("--local-workspace", dest='local_workspace',
help='Local workspace where source files reside.')
+parser.add_argument("-k", action='store_true', dest='keep_trans',
+ help='Keeps translated info files')
+
options = parser.parse_args(sys.argv[1:])
# At least two .info files are expected
@@ -68,7 +91,7 @@
# The same number of info and json files expected
if options.json_file:
if len(options.json_file) != len(options.add_file):
- print('Umatched number of info and json files.\n')
+ print('Unmatched number of info and json files.\n')
sys.exit(1)
file_groups = []
@@ -93,7 +116,7 @@
json_name = [i for i in options.json_file
if os.path.basename(i).split(".")[0] == info_name]
if not json_name:
- print("Umatched json file name for '{}'".format(file_name))
+ print("Unmatched json file name for '{}'".format(file_name))
sys.exit(1)
json_name = json_name.pop()
if not json_name[-5:] == '.json':
@@ -110,7 +133,8 @@
json_data = json.load(json_file)
locations = []
for source in json_data["configuration"]["sources"]:
- locations.append(source["LOCATION"])
+ location = source["LOCATION"]
+ locations.append((location, source.get("LOCAL", location)))
file_group["locations"] = locations
file_groups.append(file_group)
@@ -129,22 +153,26 @@
info_files_to_merge[i]))
info_lines = info_file.readlines()
info_file.close()
- temp_file = 'temporary_' + str(i) + '.info'
+ temp_file = info_file.name.replace('.info', '_local.info')
+ if options.keep_trans:
+ print("Keeping translated info file {}...".format(temp_file))
parts = None
with open(temp_file, "w+") as f:
for line in info_lines:
if "SF" in line:
for location in file_groups[i]["locations"]:
- if location in line:
- parts = line[3:].partition(location)
- line = line.replace(parts[0], options.local_workspace + "/")
+ if location[0] in line:
+ parts = line[3:].partition(location[0] + "/")
+ local_name = location[1]
+ line = line[:3] + os.path.join(
+ options.local_workspace, location[1], parts[2])
break
f.write(line)
info_files_to_merge[i] = temp_file # Replace info file to be merged
i += 1
# Merge json files
-if len(options.json_file):
+if options.json_file and len(options.json_file):
json_merged_list = []
json_merged = {}
j = 0
@@ -158,7 +186,7 @@
j += 1
json_merged = {'configuration': {'sources': json_merged_list}}
with open(options.output_json, 'w') as f:
- json.dump(json_merged, f)
+ json.dump(json_merged, f, indent=4)
# Exploit LCOV merging capabilities
@@ -175,6 +203,6 @@
subprocess.call(command)
# Delete the temporary files
-if options.local_workspace is not None:
+if options.local_workspace is not None and not options.keep_trans:
for f in info_files_to_merge:
os.remove(f)
diff --git a/coverage-tool/coverage-reporting/merge.sh b/coverage-tool/coverage-reporting/merge.sh
index 8304487..e0ec69c 100755
--- a/coverage-tool/coverage-reporting/merge.sh
+++ b/coverage-tool/coverage-reporting/merge.sh
@@ -138,8 +138,7 @@
# 1-Json object that defines the locations of the info and json
# files
# 2-Folder to save the info and json files
-# 3-Variable that holds the name of the variable that will hold
-# the name of the file to be downloaded (reference argument)
+# 3-Reference argument to hold the copied file name location
# Outputs:
# None
################################################################
@@ -192,27 +191,35 @@
}
#####################################################################
-# Get (download/copy) info and json files from the input json file
+# Get (download/copy) info and json files from the configuration json
+# file
# Globals:
-# merge_input_json_file: Input json file with locations of info
-# and intermediate json files to be merged.
-# input_folder: Folder to put info and json files to be merged
+# merge_configuration_file: Input json file with locations of info
+# and json scm files to be merged.
+# info_files: Array of info file locations.
# Arguments:
-# None
+# 1: Target folder to download info and json files to be merged.
# Outputs:
# None
###################################################################
get_info_json_files() {
- json_string="$(cat $merge_input_json_file)"
- nf=$(get_json_object "$json_string" "-files")
+ local input_folder="${1:-$LCOV_FOLDER}"
+ local json_string="$(cat $merge_configuration_file)"
+ local config_json_file=""
+ local info_file=""
+ # Get files array
+ local nf=$(get_json_object "$json_string" "-files")
+ # Init target folder
rm -rf $input_folder > /dev/null || true
mkdir -p $input_folder
+ # Iterate through each file element and get the files
for f in $(seq 0 $(($nf - 1)));
do
pushd $input_folder > /dev/null
_file=$(get_json_object "$json_string" "files.$f")
+ # The name of the folder is the 'id' value
folder=$(get_json_object "$_file" "*id")
- echo "Geting files from project '$folder' into '$input_folder'..."
+ echo "Getting files from project '$folder' into '$input_folder'..."
mkdir -p $folder
bundles=$(get_json_object "$_file" "bundles" None)
if [ "$bundles" != "None" ];then
@@ -222,8 +229,10 @@
get_file "$(get_json_object "$bundles" "$n")" $folder
done
fi
+ # Download/copy files and save their locations
get_file "$(get_json_object "$_file" "config")" $folder config_json_file
get_file "$(get_json_object "$_file" "info")" $folder info_file
+ info_files+=($info_file)
popd > /dev/null
done
}
@@ -231,36 +240,62 @@
#################################################################
# Merge json and info files and generate branch coverage report
# Globals:
-# output_coverage_file: Location and name for merge coverage info
-# output_json_file: Location and name for merge json output
-# input_folder: Location where reside json and info files
-# LOCAL_WORKSPACE: Local workspace folder with the source files
+# merged_coverage_file: Location and name for merged coverage info
+# merged_json_file: Location and name for merged json scm sources
+# LOCAL_WORKSPACE: Local workspace folder with the source code files
+# generate_local: Flag to generate local lcov reports
# Arguments:
-# None
+# 1: Location where reside json and info files
# Outputs:
-# Output merge coverage file
-# Output merge json file
+# Merged coverage file
+# Merged json file
################################################################
merge_files() {
+ local input_folder="${1:-$LCOV_FOLDER}"
# Merge info and json files
local lc=" "
if [ -n "$LOCAL_WORKSPACE" ];then
- # Translation to be done in the info files to local workspace
+ # Translation from info workspaces into local workspace
lc=" --local-workspace $LOCAL_WORKSPACE"
fi
+ if [ "$generate_local" = true ];then
+ # Generate local reports
+ lc="${lc} -k"
+ fi
# Getting the path of the merge.py must reside at the same
# path as the merge.sh
python3 ${DIR}/merge.py \
$(find $input_folder -name "*.info" -exec echo "-a {}" \;) \
$(find $input_folder -name "*.json" -exec echo "-j {}" \;) \
- -o $output_coverage_file \
- -m $output_json_file \
+ -o $merged_coverage_file \
+ -m $merged_json_file \
$lc
}
#################################################################
+# Generate local lcov reports
+# Globals:
+# info_files: Array of locations and names of info files
+# Arguments:
+# None
+# Outputs:
+# Lcov report files for each info file
+################################################################
+generate_local_reports() {
+ for i in ${!info_files[@]};
+ do
+ local info_file=${info_files[$i]}
+ local parentdir=$(dirname "$info_file")
+ local t_info_file="${info_file/.info/_local.info}"
+ genhtml --branch-coverage $t_info_file \
+ --output-directory $parentdir
+ done
+}
+
+
+#################################################################
# Print scripts usage
# Arguments:
# None
@@ -271,20 +306,20 @@
clear
echo "Usage:"
echo "merge -h Display this help message."
- echo "-j <input json file> Input json file(info and intermediate json files to be merged)."
- echo "-l <report folder> Folder for branch coverage report. Defaults to ./lcov_folder."
- echo "-i <Path> Folder to copy/download info and json files. Defaults to ./input."
- echo "-w <Folder> Local workspace folder for source files."
- echo "-o <name> Name of the merged info file. Defaults to ./coverage_merge.info"
- echo "-m <name> Name of the merged metadata json file. Defaults to ./merge_output.json"
- echo "-c If it is set, sources from merged json files will be cloned/copied to local workspace folder."
+ echo "-j <JSON filename> JSON configuration file (info and intermediate json filenames to be merged)."
+ echo "[-l <Report path>] Coverage reports directory. Defaults to ./Coverage"
+ echo "[-w <Workspace path>] Workspace directory for source code files."
+ echo "[-o <Info filename>] Merged info file. Defaults to ./merged_coverage.info"
+ echo "[-m <JSON filename>] JSON merged SCM sources. Defaults to ./merged_scm.json"
+ echo "[-c] Flag to download/copy the source files from the JSON merged SCM into the workspace directory."
+ echo "[-g] Flag to generate local reports for each info/json instance."
echo "$help_message"
}
help_message=$(cat <<EOF
-# The script that merges the info data (code coverage) and json metadata
-# (intermediate layer) needs as an input a json file with the following
+# The script merging the info files (code coverage) and json SCM sources
+# (intermediate layer) needs a JSON configuration file with the following
# properties:
# files: array of objects that describe the type of file/project to be
# merged.
@@ -333,23 +368,17 @@
)
clear
-# Local workspace folder to contain source files
LOCAL_WORKSPACE=""
-# If this is true then will clone/copy sources from merged json
-# file into local workspace
CLONE_SOURCES=false
-# Location of the input json file that contains information about
-# the info and json files to be merged and produced a report
-merge_input_json_file=""
-# Folder to download json and info files
-input_folder="./input_folder"
+merge_configuration_file=""
+generate_local=false
# Folder to to put the reports
-LCOV_FOLDER="./lcov_folder"
+LCOV_FOLDER="./Coverage"
# File name for merge coverage info
-output_coverage_file="./coverage_merge.info"
-# File name for merge json output
-output_json_file="./merge_output.json"
-while getopts ":hj:o:l:w:i:cm:" opt; do
+merged_coverage_file="./merged_coverage.info"
+merged_json_file="./merged_scm.json"
+info_files=() # Array of info files
+while getopts ":hj:o:l:w:i:cm:g" opt; do
case ${opt} in
h )
usage
@@ -358,23 +387,23 @@
w )
LOCAL_WORKSPACE=$(cd $OPTARG; pwd)
;;
- i )
- input_folder=$OPTARG
- ;;
c )
CLONE_SOURCES=true
;;
j )
- merge_input_json_file=$OPTARG
+ merge_configuration_file=$OPTARG
;;
l )
LCOV_FOLDER=$OPTARG
;;
o )
- output_coverage_file=$OPTARG
+ merged_coverage_file=$OPTARG
;;
m )
- output_json_file=$OPTARG
+ merged_json_file=$OPTARG
+ ;;
+ g )
+ generate_local=true
;;
\? )
echo "Invalid option: $OPTARG" 1>&2
@@ -389,29 +418,32 @@
esac
done
shift $((OPTIND -1))
-if [ -z "$merge_input_json_file" ]; then
- echo "Input json file required"
+if [ -z "$merge_configuration_file" ]; then
+ echo "Merged configuration file required."
usage
exit -1
fi
if [ -z "$LOCAL_WORKSPACE" ] && [ $CLONE_SOURCES = true ]; then
- echo "Need to define a local workspace folder to clone/copy sources!"
+ echo "A local workspace directory is required to clone/copy sources!"
exit -1
fi
-# Getting the script folder where other script files must reside, i.e
+# Getting the script folder where other qa-tools script files must reside, i.e
# merge.py, clone_sources.py
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
-input_folder="$(get_abs_path $input_folder)"
-LCOV_FOLDER="$(get_abs_path $LCOV_FOLDER)"
-output_coverage_file="$(get_abs_path $output_coverage_file)"
-output_json_file="$(get_abs_path $output_json_file)"
+LCOV_FOLDER="$(get_abs_path $LCOV_FOLDER)"
+merged_coverage_file="$(get_abs_path $merged_coverage_file)"
+merged_json_file="$(get_abs_path $merged_json_file)"
param_cloned=""
get_info_json_files
merge_files
if [ $CLONE_SOURCES = true ];then
- clone_repos $output_json_file
+ clone_repos $merged_json_file
fi
+
# Generate branch coverage report
-genhtml --branch-coverage $output_coverage_file \
+genhtml --branch-coverage $merged_coverage_file \
--output-directory $LCOV_FOLDER
-cd -
+
+if [ "$generate_local" = true ];then
+ generate_local_reports
+fi