Basil Eljuse | 4b14afb | 2020-09-30 13:07:23 +0100 | [diff] [blame] | 1 | # !/usr/bin/env python |
| 2 | ############################################################################### |
| 3 | # Copyright (c) 2020, ARM Limited and Contributors. All rights reserved. |
| 4 | # |
| 5 | # SPDX-License-Identifier: BSD-3-Clause |
| 6 | ############################################################################### |
| 7 | |
| 8 | ############################################################################### |
| 9 | # FILE: merge.py |
| 10 | # |
| 11 | # DESCRIPTION: Merge two or more .info and json files, sanitizing source file |
| 12 | # paths. |
| 13 | # If different .info files contain the same source code duplicated |
| 14 | # in different directories, we use the absolute paths of the |
| 15 | # first .info file. |
| 16 | # |
| 17 | ############################################################################### |
| 18 | |
| 19 | |
| 20 | import os |
| 21 | import sys |
| 22 | import argparse |
| 23 | from argparse import RawTextHelpFormatter |
| 24 | import subprocess |
| 25 | import json |
| 26 | |
| 27 | |
| 28 | # Define an argument parser using the argparse library |
| 29 | parser = argparse.ArgumentParser(epilog="""Example of usage: |
| 30 | python3 merge.py -a coverage_1.info -a coverage_2.info -o coverage_merge.info \ |
| 31 | -j input_file1.json -j input_file2.json -m merge_file.json |
| 32 | |
Saul Romero | 884d214 | 2023-01-16 10:31:22 +0000 | [diff] [blame] | 33 | The metadata json file must contain the information for every repo that is |
| 34 | used to build the binaries that were tested (and where coverage is desired). |
| 35 | As a minimum this file must look like this: |
| 36 | { |
| 37 | "configuration" : { |
| 38 | "sources": [ |
| 39 | { |
| 40 | "COMMIT": "XXXXXXX", # [optional] |
| 41 | "LOCATION": "YYY", # Folder where the 'URL' repo is cloned in the |
| 42 | test workspace. |
| 43 | "LOCAL": "ZZZZ", # Local folder for the repo cloned at |
| 44 | the local workspace (optional, if not defined 'LOCATION' is assumed). |
| 45 | "REFSPEC": "XXXXXX", # [optional] |
| 46 | "URL": "XXXXXXXX", |
| 47 | "type": "git" |
| 48 | } |
| 49 | ] |
| 50 | } |
| 51 | } |
Basil Eljuse | 4b14afb | 2020-09-30 13:07:23 +0100 | [diff] [blame] | 52 | It is possible to merge any number of files at once. |
| 53 | If metadata json files are defined then they must pair with their |
| 54 | corresponding info file, i.e. have the same name. |
| 55 | If a local workspace is defined then the paths in the info files will |
| 56 | be translated from the original test workspace to the local workspace |
| 57 | to enable the usage of LCOV, but the original files will be kept intact. |
| 58 | By default, the output file must be a new file. |
| 59 | To overwrite an existing file, use the "--force" option. |
| 60 | |
Saul Romero | 884d214 | 2023-01-16 10:31:22 +0000 | [diff] [blame] | 61 | Note: the user is expected to merge .info files referring to the same |
| 62 | project, i.e. same sources. If merging .info files from different projects, |
| 63 | LCOV can be exploited directly using a command such as "lcov -rc |
| 64 | lcov_branch_coverage=1 -a coverage_1.info \ |
Basil Eljuse | 4b14afb | 2020-09-30 13:07:23 +0100 | [diff] [blame] | 65 | -a coverage_2.info -o coverage_merge.info." |
| 66 | """, formatter_class=RawTextHelpFormatter) |
| 67 | requiredNamed = parser.add_argument_group('required named arguments') |
| 68 | requiredNamed.add_argument("-a", "--add-file", |
| 69 | help="Input info file to be merged.", |
| 70 | action='append', required=True) |
| 71 | requiredNamed.add_argument("-o", "--output", |
| 72 | help="Name of the output info (merged) file.", |
| 73 | required=False) |
| 74 | parser.add_argument("-j", "--json-file", action='append', |
| 75 | help="Input json file to be merged.") |
| 76 | parser.add_argument("-m", "--output-json", |
| 77 | help="Name of the output json (merged) file.") |
| 78 | parser.add_argument("--force", dest='force', action='store_true', |
| 79 | help="force overwriting of output file.") |
| 80 | parser.add_argument("--local-workspace", dest='local_workspace', |
| 81 | help='Local workspace where source files reside.') |
Saul Romero | 884d214 | 2023-01-16 10:31:22 +0000 | [diff] [blame] | 82 | parser.add_argument("-k", action='store_true', dest='keep_trans', |
| 83 | help='Keeps translated info files') |
| 84 | |
Basil Eljuse | 4b14afb | 2020-09-30 13:07:23 +0100 | [diff] [blame] | 85 | |
| 86 | options = parser.parse_args(sys.argv[1:]) |
| 87 | # At least two .info files are expected |
| 88 | if len(options.add_file) < 2: |
| 89 | print('Error: too few input files.\n') |
| 90 | sys.exit(1) |
| 91 | # The same number of info and json files expected |
| 92 | if options.json_file: |
| 93 | if len(options.json_file) != len(options.add_file): |
Saul Romero | 884d214 | 2023-01-16 10:31:22 +0000 | [diff] [blame] | 94 | print('Unmatched number of info and json files.\n') |
Basil Eljuse | 4b14afb | 2020-09-30 13:07:23 +0100 | [diff] [blame] | 95 | sys.exit(1) |
| 96 | |
| 97 | file_groups = [] |
| 98 | info_files_to_merge = [] |
| 99 | # Check if files exist |
| 100 | for file_name in options.add_file: |
| 101 | print("Merging '{}'".format(file_name)) |
| 102 | if not os.path.isfile(file_name): |
| 103 | print('Error: file "' + file_name + '" not found.\n') |
| 104 | sys.exit(1) |
| 105 | if not file_name[-5:] == '.info': |
| 106 | print('Error: file "' + file_name + |
| 107 | '" has wrong extension. Expected .info file.\n') |
| 108 | sys.exit(1) |
| 109 | if file_name in info_files_to_merge: |
| 110 | print("Error: Duplicated info file '{}'".format(file_name)) |
| 111 | sys.exit(1) |
Saul Romero Dominguez | fd4d0c9 | 2023-02-15 10:47:59 +0000 | [diff] [blame] | 112 | if os.stat(file_name).st_size == 0: |
| 113 | print("Warning: Empty info file '{}', skipping it".format(file_name)) |
| 114 | continue |
Basil Eljuse | 4b14afb | 2020-09-30 13:07:23 +0100 | [diff] [blame] | 115 | info_files_to_merge.append(file_name) |
| 116 | file_group = {"info": file_name, "locations": [], "json": ""} |
| 117 | info_name = os.path.basename(file_name).split(".")[0] |
| 118 | if options.json_file: |
| 119 | json_name = [i for i in options.json_file |
| 120 | if os.path.basename(i).split(".")[0] == info_name] |
| 121 | if not json_name: |
Saul Romero | 884d214 | 2023-01-16 10:31:22 +0000 | [diff] [blame] | 122 | print("Unmatched json file name for '{}'".format(file_name)) |
Basil Eljuse | 4b14afb | 2020-09-30 13:07:23 +0100 | [diff] [blame] | 123 | sys.exit(1) |
| 124 | json_name = json_name.pop() |
| 125 | if not json_name[-5:] == '.json': |
| 126 | print('Error: file "' + json_name + |
| 127 | '" has wrong extension. Expected .json file.\n') |
| 128 | sys.exit(1) |
| 129 | if not os.path.isfile(json_name): |
| 130 | print('Error: file "' + json_name + '" not found.\n') |
| 131 | sys.exit(1) |
| 132 | # Now we have to extract the location folders for each info |
| 133 | # this is needed if we want translation to local workspace |
| 134 | file_group["json"] = json_name |
| 135 | with open(json_name) as json_file: |
| 136 | json_data = json.load(json_file) |
| 137 | locations = [] |
Saul Romero Dominguez | fd4d0c9 | 2023-02-15 10:47:59 +0000 | [diff] [blame] | 138 | parent = json_data.get("parameters", json_data.get("configuration")) |
| 139 | for source in parent["sources"]: |
Saul Romero | 884d214 | 2023-01-16 10:31:22 +0000 | [diff] [blame] | 140 | location = source["LOCATION"] |
| 141 | locations.append((location, source.get("LOCAL", location))) |
Basil Eljuse | 4b14afb | 2020-09-30 13:07:23 +0100 | [diff] [blame] | 142 | file_group["locations"] = locations |
| 143 | file_groups.append(file_group) |
| 144 | |
| 145 | # Check the extension of the output file |
| 146 | if not options.output[-5:] == '.info': |
| 147 | print('Error: file "' + options.output + |
| 148 | '" has wrong extension. Expected .info file.\n') |
| 149 | sys.exit(1) |
| 150 | |
| 151 | if options.local_workspace is not None: |
| 152 | # Translation from test to local workspace |
| 153 | i = 0 |
| 154 | while i < len(info_files_to_merge): |
| 155 | info_file = open(info_files_to_merge[i], "r") |
| 156 | print("Translating workspace for '{}'...".format( |
| 157 | info_files_to_merge[i])) |
| 158 | info_lines = info_file.readlines() |
| 159 | info_file.close() |
Saul Romero | 884d214 | 2023-01-16 10:31:22 +0000 | [diff] [blame] | 160 | temp_file = info_file.name.replace('.info', '_local.info') |
| 161 | if options.keep_trans: |
| 162 | print("Keeping translated info file {}...".format(temp_file)) |
saul-romero-arm | 6c40b24 | 2021-06-18 10:21:04 +0000 | [diff] [blame] | 163 | parts = None |
Basil Eljuse | 4b14afb | 2020-09-30 13:07:23 +0100 | [diff] [blame] | 164 | with open(temp_file, "w+") as f: |
| 165 | for line in info_lines: |
saul-romero-arm | 6c40b24 | 2021-06-18 10:21:04 +0000 | [diff] [blame] | 166 | if "SF" in line: |
| 167 | for location in file_groups[i]["locations"]: |
Saul Romero | 884d214 | 2023-01-16 10:31:22 +0000 | [diff] [blame] | 168 | if location[0] in line: |
| 169 | parts = line[3:].partition(location[0] + "/") |
| 170 | local_name = location[1] |
| 171 | line = line[:3] + os.path.join( |
| 172 | options.local_workspace, location[1], parts[2]) |
saul-romero-arm | 6c40b24 | 2021-06-18 10:21:04 +0000 | [diff] [blame] | 173 | break |
| 174 | f.write(line) |
Basil Eljuse | 4b14afb | 2020-09-30 13:07:23 +0100 | [diff] [blame] | 175 | info_files_to_merge[i] = temp_file # Replace info file to be merged |
| 176 | i += 1 |
| 177 | |
| 178 | # Merge json files |
Saul Romero | 884d214 | 2023-01-16 10:31:22 +0000 | [diff] [blame] | 179 | if options.json_file and len(options.json_file): |
Basil Eljuse | 4b14afb | 2020-09-30 13:07:23 +0100 | [diff] [blame] | 180 | json_merged_list = [] |
| 181 | json_merged = {} |
| 182 | j = 0 |
| 183 | while j < len(options.json_file): |
| 184 | json_file = options.json_file[j] |
| 185 | with open(json_file) as f: |
| 186 | data = json.load(f) |
Saul Romero Dominguez | fd4d0c9 | 2023-02-15 10:47:59 +0000 | [diff] [blame] | 187 | parent = data.get("parameters", data.get("configuration")) |
| 188 | for source in parent['sources']: |
Basil Eljuse | 4b14afb | 2020-09-30 13:07:23 +0100 | [diff] [blame] | 189 | if source not in json_merged_list: |
| 190 | json_merged_list.append(source) |
| 191 | j += 1 |
| 192 | json_merged = {'configuration': {'sources': json_merged_list}} |
| 193 | with open(options.output_json, 'w') as f: |
Saul Romero | 884d214 | 2023-01-16 10:31:22 +0000 | [diff] [blame] | 194 | json.dump(json_merged, f, indent=4) |
Basil Eljuse | 4b14afb | 2020-09-30 13:07:23 +0100 | [diff] [blame] | 195 | |
| 196 | |
| 197 | # Exploit LCOV merging capabilities |
| 198 | # Example of LCOV usage: lcov -rc lcov_branch_coverage=1 -a coverage_1.info \ |
| 199 | # -a coverage_2.info -o coverage_merge.info |
Saul Romero | aed264c | 2024-04-02 10:20:51 +0000 | [diff] [blame^] | 200 | command = ['lcov', '--rc', 'lcov_branch_coverage=1'] |
Basil Eljuse | 4b14afb | 2020-09-30 13:07:23 +0100 | [diff] [blame] | 201 | |
| 202 | for file_name in info_files_to_merge: |
| 203 | command.append('-a') |
| 204 | command.append(file_name) |
| 205 | command.append('-o') |
| 206 | command.append(options.output) |
| 207 | |
| 208 | subprocess.call(command) |
| 209 | |
| 210 | # Delete the temporary files |
Saul Romero | 884d214 | 2023-01-16 10:31:22 +0000 | [diff] [blame] | 211 | if options.local_workspace is not None and not options.keep_trans: |
Basil Eljuse | 4b14afb | 2020-09-30 13:07:23 +0100 | [diff] [blame] | 212 | for f in info_files_to_merge: |
| 213 | os.remove(f) |