Basil Eljuse | 4b14afb | 2020-09-30 13:07:23 +0100 | [diff] [blame] | 1 | #!/usr/bin/env bash |
| 2 | |
| 3 | ############################################################################## |
| 4 | # Copyright (c) 2020, ARM Limited and Contributors. All rights reserved. |
| 5 | # |
| 6 | # SPDX-License-Identifier: GPL-2.0-only |
| 7 | ############################################################################## |
| 8 | |
| 9 | #============================================================================== |
| 10 | # FILE: merge.sh |
| 11 | # |
| 12 | # DESCRIPTION: Wrapper to merge intermediate json files and LCOV trace .info |
| 13 | # files. |
| 14 | #============================================================================== |
| 15 | |
Paul Sokolovsky | 5485165 | 2021-12-25 20:50:20 +0300 | [diff] [blame] | 16 | set -x |
Basil Eljuse | 4b14afb | 2020-09-30 13:07:23 +0100 | [diff] [blame] | 17 | ################################################################# |
| 18 | # Function to manipulate json objects. |
| 19 | # The json object properties can be accessed through "." separated |
| 20 | # property names. There are special characters that define a function |
| 21 | # over a given property value: |
| 22 | # If the qualifier list starts with '-' then is asking for the len of the |
| 23 | # json array defined by the qualifiers. |
| 24 | # If the qualifier list starts with '*' then the resulting json value |
| 25 | # is returned without double quotes at the end and the beginning. |
| 26 | # If some property name starts with "?" then is requesting if that |
| 27 | # property exists within the json object. |
| 28 | # Globals: |
| 29 | # None |
| 30 | # Arguments: |
| 31 | # 1-Json string that describes the json object |
| 32 | # 2-String of '.' separated qualifiers to access properties |
| 33 | # within the json object |
| 34 | # 3- Optional default value for a sought property value |
| 35 | # Outputs: |
| 36 | # None |
| 37 | ################################################################ |
| 38 | get_json_object() { |
| 39 | export _json_string="$1" |
| 40 | export _qualifiers="$2" |
| 41 | export _default="$3" |
| 42 | python3 - << EOT |
| 43 | import os |
| 44 | import json |
| 45 | import sys |
| 46 | |
| 47 | _json_string = os.getenv("_json_string", "") |
| 48 | _qualifiers = os.getenv("_qualifiers", "") |
| 49 | _default = os.getenv("_default", "") |
| 50 | try: |
| 51 | data = json.loads(_json_string) |
| 52 | except Exception as ex: |
| 53 | print("Error decoding json string:{}".format(ex)) |
| 54 | sys.exit(-1) |
| 55 | ptr = data |
| 56 | if _qualifiers[0] in ['-', '*']: |
| 57 | cmd = _qualifiers[0] |
| 58 | _qualifiers = _qualifiers[1:] |
| 59 | else: |
| 60 | cmd = "" |
| 61 | for _name in _qualifiers.split("."): |
| 62 | if _name in ptr: |
| 63 | ptr = ptr[_name] |
| 64 | elif _name.isdigit() and int(_name) < len(ptr): |
| 65 | ptr = ptr[int(_name)] |
| 66 | elif _name.startswith("?"): |
| 67 | print(_name[1:] in ptr) |
| 68 | sys.exit(0) |
| 69 | elif _default: |
| 70 | print(_default) |
| 71 | sys.exit(0) |
| 72 | else: |
| 73 | print("'{}' is not in the json object".format(_name)) |
| 74 | sys.exit(-1) |
| 75 | if cmd == "-": |
| 76 | # return len of the json array |
| 77 | print(len(ptr)) |
| 78 | elif cmd == "*": |
| 79 | #remove quotes |
| 80 | string = json.dumps(ptr) |
| 81 | if string.startswith('"') and string.endswith('"'): |
| 82 | string = string[1:-1] |
| 83 | print(string) |
| 84 | else: |
| 85 | print(json.dumps(ptr)) |
| 86 | EOT |
| 87 | } |
| 88 | |
| 89 | ################################################################# |
| 90 | # Convert a relative path to absolute path |
| 91 | # Globals: |
| 92 | # None |
| 93 | # Arguments: |
| 94 | # 1-Path to be converted |
| 95 | # Outputs: |
| 96 | # Absolute path |
| 97 | ################################################################ |
| 98 | get_abs_path() { |
| 99 | path="$1" |
| 100 | echo "$(cd $(dirname $path) && echo "$(pwd -P)"/$(basename $path))" |
| 101 | } |
| 102 | |
| 103 | ################################################################# |
| 104 | # Clone the source files |
| 105 | # Globals: |
| 106 | # None |
| 107 | # Arguments: |
| 108 | # 1-Json file with the sources to be cloned |
| 109 | # 2-Folder where to clone the sources |
| 110 | # Outputs: |
| 111 | # None |
| 112 | ################################################################ |
| 113 | clone_repos() { |
| 114 | export OUTPUT_JSON="$1" |
| 115 | export CSOURCE_FOLDER="${2:-$LOCAL_WORKSPACE}" |
| 116 | |
| 117 | cd $DIR # To be run at the same level of this script |
| 118 | python3 - << EOT |
| 119 | import os |
| 120 | import clone_sources |
| 121 | |
| 122 | output_file = os.getenv('OUTPUT_JSON', 'output_file.json') |
| 123 | source_folder = os.getenv('CSOURCE_FOLDER', 'source') |
| 124 | try: |
| 125 | r = clone_sources.CloneSources(output_file) |
| 126 | r.clone_repo(source_folder) |
| 127 | except Exception as ex: |
| 128 | print(ex) |
| 129 | EOT |
saul-romero-arm | 6c40b24 | 2021-06-18 10:21:04 +0000 | [diff] [blame] | 130 | cd - |
Basil Eljuse | 4b14afb | 2020-09-30 13:07:23 +0100 | [diff] [blame] | 131 | } |
| 132 | |
| 133 | ################################################################# |
| 134 | # Get the a file defined in the json object |
| 135 | # Globals: |
| 136 | # None |
| 137 | # Arguments: |
| 138 | # 1-Json object that defines the locations of the info and json |
| 139 | # files |
| 140 | # 2-Folder to save the info and json files |
| 141 | # 3-Variable that holds the name of the variable that will hold |
| 142 | # the name of the file to be downloaded (reference argument) |
| 143 | # Outputs: |
| 144 | # None |
| 145 | ################################################################ |
| 146 | get_file() { |
| 147 | json_object="$1" |
| 148 | where="$2" |
| 149 | var_name="${3:-param_cloned}" # Defaults to globar var |
| 150 | |
| 151 | local _type=$(get_json_object "$json_object" "type") |
| 152 | local _origin=$(get_json_object "$json_object" "*origin") |
| 153 | local _compression=$(get_json_object "$json_object" "*compression" None) |
| 154 | local fname="" |
| 155 | local cloned_file="" |
| 156 | local full_filename=$(basename -- "$_origin") |
| 157 | local extension="${full_filename##*.}" |
| 158 | local filename="${full_filename%.*}" |
| 159 | |
| 160 | if [ "$_type" = '"http"' ];then |
| 161 | fname="$where.$extension" # Same filename as folder |
| 162 | rm $where/$fname &>/dev/null || true |
| 163 | wget -o error.log $_origin -O $where/$fname || ( |
saul-romero-arm | 6c40b24 | 2021-06-18 10:21:04 +0000 | [diff] [blame] | 164 | cat error.log && exit -1) |
Basil Eljuse | 4b14afb | 2020-09-30 13:07:23 +0100 | [diff] [blame] | 165 | cloned_file="$(get_abs_path $where/$fname)" |
| 166 | elif [ "$_type" = '"bundle"' ];then |
| 167 | # Check file exists at origin, i.e. was unbundled before |
| 168 | fname="$_origin" |
| 169 | if [ -f "$where/$fname" ];then |
| 170 | cloned_file="$(get_abs_path $where/$fname)" |
| 171 | fi |
| 172 | elif [ "$_type" = '"file"' ];then |
saul-romero-arm | 6c40b24 | 2021-06-18 10:21:04 +0000 | [diff] [blame] | 173 | if [[ "$_origin" = http* ]]; then |
| 174 | echo "$_origin looks like 'http' rather than 'file' please check..." |
| 175 | exit -1 |
| 176 | fi |
Basil Eljuse | 4b14afb | 2020-09-30 13:07:23 +0100 | [diff] [blame] | 177 | fname="$where.$extension" # Same filename as folder |
| 178 | cp -f $_origin $where/$fname |
| 179 | cloned_file="$(get_abs_path $where/$fname)" |
| 180 | else |
| 181 | echo "Error unsupported file type:$_type.... Aborting." |
| 182 | exit -1 |
| 183 | fi |
| 184 | if [ "$_compression" = "tar.xz" ];then |
| 185 | cd $where |
| 186 | pwd |
| 187 | tar -xzf $fname |
| 188 | rm -f $fname |
| 189 | cd - |
| 190 | fi |
| 191 | eval "${var_name}=${cloned_file}" |
| 192 | } |
| 193 | |
| 194 | ##################################################################### |
| 195 | # Get (download/copy) info and json files from the input json file |
| 196 | # Globals: |
| 197 | # merge_input_json_file: Input json file with locations of info |
| 198 | # and intermediate json files to be merged. |
| 199 | # input_folder: Folder to put info and json files to be merged |
| 200 | # Arguments: |
| 201 | # None |
| 202 | # Outputs: |
| 203 | # None |
| 204 | ################################################################### |
| 205 | get_info_json_files() { |
| 206 | json_string="$(cat $merge_input_json_file)" |
| 207 | nf=$(get_json_object "$json_string" "-files") |
| 208 | rm -rf $input_folder > /dev/null || true |
| 209 | mkdir -p $input_folder |
| 210 | for f in $(seq 0 $(($nf - 1))); |
| 211 | do |
| 212 | pushd $input_folder > /dev/null |
| 213 | _file=$(get_json_object "$json_string" "files.$f") |
| 214 | folder=$(get_json_object "$_file" "*id") |
| 215 | echo "Geting files from project '$folder' into '$input_folder'..." |
| 216 | mkdir -p $folder |
| 217 | bundles=$(get_json_object "$_file" "bundles" None) |
| 218 | if [ "$bundles" != "None" ];then |
| 219 | nb=$(get_json_object "$_file" "-bundles") |
| 220 | for n in $(seq 0 $(($nb - 1))); |
| 221 | do |
| 222 | get_file "$(get_json_object "$bundles" "$n")" $folder |
| 223 | done |
| 224 | fi |
| 225 | get_file "$(get_json_object "$_file" "config")" $folder config_json_file |
| 226 | get_file "$(get_json_object "$_file" "info")" $folder info_file |
| 227 | popd > /dev/null |
| 228 | done |
| 229 | } |
| 230 | |
| 231 | ################################################################# |
| 232 | # Merge json and info files and generate branch coverage report |
| 233 | # Globals: |
| 234 | # output_coverage_file: Location and name for merge coverage info |
| 235 | # output_json_file: Location and name for merge json output |
| 236 | # input_folder: Location where reside json and info files |
| 237 | # LOCAL_WORKSPACE: Local workspace folder with the source files |
| 238 | # Arguments: |
| 239 | # None |
| 240 | # Outputs: |
| 241 | # Output merge coverage file |
| 242 | # Output merge json file |
| 243 | ################################################################ |
| 244 | merge_files() { |
| 245 | # Merge info and json files |
| 246 | local lc=" " |
| 247 | if [ -n "$LOCAL_WORKSPACE" ];then |
| 248 | # Translation to be done in the info files to local workspace |
| 249 | lc=" --local-workspace $LOCAL_WORKSPACE" |
| 250 | fi |
| 251 | # Getting the path of the merge.py must reside at the same |
| 252 | # path as the merge.sh |
| 253 | python3 ${DIR}/merge.py \ |
| 254 | $(find $input_folder -name "*.info" -exec echo "-a {}" \;) \ |
| 255 | $(find $input_folder -name "*.json" -exec echo "-j {}" \;) \ |
| 256 | -o $output_coverage_file \ |
| 257 | -m $output_json_file \ |
| 258 | $lc |
| 259 | |
| 260 | } |
| 261 | |
| 262 | |
| 263 | ################################################################# |
| 264 | # Print scripts usage |
| 265 | # Arguments: |
| 266 | # None |
| 267 | # Outputs: |
| 268 | # Prints to stdout script usage |
| 269 | ################################################################ |
| 270 | usage() { |
| 271 | clear |
| 272 | echo "Usage:" |
| 273 | echo "merge -h Display this help message." |
| 274 | echo "-j <input json file> Input json file(info and intermediate json files to be merged)." |
| 275 | echo "-l <report folder> Folder for branch coverage report. Defaults to ./lcov_folder." |
| 276 | echo "-i <Path> Folder to copy/download info and json files. Defaults to ./input." |
| 277 | echo "-w <Folder> Local workspace folder for source files." |
| 278 | echo "-o <name> Name of the merged info file. Defaults to ./coverage_merge.info" |
| 279 | echo "-m <name> Name of the merged metadata json file. Defaults to ./merge_output.json" |
| 280 | echo "-c If it is set, sources from merged json files will be cloned/copied to local workspace folder." |
| 281 | echo "$help_message" |
| 282 | } |
| 283 | |
| 284 | help_message=$(cat <<EOF |
| 285 | |
| 286 | # The script that merges the info data (code coverage) and json metadata |
| 287 | # (intermediate layer) needs as an input a json file with the following |
| 288 | # properties: |
| 289 | # files: array of objects that describe the type of file/project to be |
| 290 | # merged. |
| 291 | # id: Unique identifier (project) associated to the info and |
| 292 | # intermediate json files |
| 293 | # config: Intermediate json file |
| 294 | # type: Type of storage for the file. (http or file) |
| 295 | # origin: Location (url or folder) of the file |
| 296 | # info: Info file |
| 297 | # type: Type of storage for the file. (http or file) |
| 298 | # origin: Location (url or folder) of the file |
| 299 | # Example: |
| 300 | { "files" : [ |
| 301 | { |
| 302 | "id": "<project 1>", |
| 303 | "config": |
| 304 | { |
| 305 | "type": "http", |
| 306 | "origin": "<URL of json file for project 1>" |
| 307 | }, |
| 308 | "info": |
| 309 | { |
| 310 | "type": "http", |
| 311 | "origin": "<URL of info file for project 1>" |
| 312 | } |
| 313 | }, |
| 314 | { |
| 315 | "id": "<project 2>", |
| 316 | "config": |
| 317 | { |
| 318 | "type": "http", |
| 319 | "origin": "<URL of json file for project 2>" |
| 320 | }, |
| 321 | "info": |
| 322 | { |
| 323 | "type": "http", |
| 324 | "origin": "<URL of info file for project 2>" |
| 325 | } |
| 326 | }, |
| 327 | . |
| 328 | . |
| 329 | . |
| 330 | ] |
| 331 | } |
| 332 | EOF |
| 333 | ) |
| 334 | |
| 335 | clear |
| 336 | # Local workspace folder to contain source files |
| 337 | LOCAL_WORKSPACE="" |
| 338 | # If this is true then will clone/copy sources from merged json |
| 339 | # file into local workspace |
| 340 | CLONE_SOURCES=false |
| 341 | # Location of the input json file that contains information about |
| 342 | # the info and json files to be merged and produced a report |
| 343 | merge_input_json_file="" |
| 344 | # Folder to download json and info files |
| 345 | input_folder="./input_folder" |
| 346 | # Folder to to put the reports |
| 347 | LCOV_FOLDER="./lcov_folder" |
| 348 | # File name for merge coverage info |
| 349 | output_coverage_file="./coverage_merge.info" |
| 350 | # File name for merge json output |
| 351 | output_json_file="./merge_output.json" |
| 352 | while getopts ":hj:o:l:w:i:cm:" opt; do |
| 353 | case ${opt} in |
| 354 | h ) |
| 355 | usage |
| 356 | exit 0 |
| 357 | ;; |
| 358 | w ) |
| 359 | LOCAL_WORKSPACE=$(cd $OPTARG; pwd) |
| 360 | ;; |
| 361 | i ) |
| 362 | input_folder=$OPTARG |
| 363 | ;; |
| 364 | c ) |
| 365 | CLONE_SOURCES=true |
| 366 | ;; |
| 367 | j ) |
| 368 | merge_input_json_file=$OPTARG |
| 369 | ;; |
| 370 | l ) |
| 371 | LCOV_FOLDER=$OPTARG |
| 372 | ;; |
| 373 | o ) |
| 374 | output_coverage_file=$OPTARG |
| 375 | ;; |
| 376 | m ) |
| 377 | output_json_file=$OPTARG |
| 378 | ;; |
| 379 | \? ) |
| 380 | echo "Invalid option: $OPTARG" 1>&2 |
| 381 | usage |
| 382 | exit -1 |
| 383 | ;; |
| 384 | : ) |
| 385 | echo "Invalid option: $OPTARG requires an argument" 1>&2 |
| 386 | usage |
| 387 | exit -1 |
| 388 | ;; |
| 389 | esac |
| 390 | done |
| 391 | shift $((OPTIND -1)) |
| 392 | if [ -z "$merge_input_json_file" ]; then |
| 393 | echo "Input json file required" |
| 394 | usage |
| 395 | exit -1 |
| 396 | fi |
| 397 | if [ -z "$LOCAL_WORKSPACE" ] && [ $CLONE_SOURCES = true ]; then |
saul-romero-arm | 6c40b24 | 2021-06-18 10:21:04 +0000 | [diff] [blame] | 398 | echo "Need to define a local workspace folder to clone/copy sources!" |
| 399 | exit -1 |
Basil Eljuse | 4b14afb | 2020-09-30 13:07:23 +0100 | [diff] [blame] | 400 | fi |
| 401 | # Getting the script folder where other script files must reside, i.e |
| 402 | # merge.py, clone_sources.py |
| 403 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" |
| 404 | input_folder="$(get_abs_path $input_folder)" |
| 405 | LCOV_FOLDER="$(get_abs_path $LCOV_FOLDER)" |
| 406 | output_coverage_file="$(get_abs_path $output_coverage_file)" |
| 407 | output_json_file="$(get_abs_path $output_json_file)" |
| 408 | param_cloned="" |
| 409 | get_info_json_files |
| 410 | merge_files |
| 411 | if [ $CLONE_SOURCES = true ];then |
Paul Sokolovsky | 83ec482 | 2021-12-25 22:19:44 +0300 | [diff] [blame] | 412 | cat $output_json_file; echo |
saul-romero-arm | 6c40b24 | 2021-06-18 10:21:04 +0000 | [diff] [blame] | 413 | clone_repos $output_json_file |
Basil Eljuse | 4b14afb | 2020-09-30 13:07:23 +0100 | [diff] [blame] | 414 | fi |
Paul Sokolovsky | 9fe3784 | 2022-06-09 02:05:16 +0300 | [diff] [blame] | 415 | if [ -n "$LOCAL_WORKSPACE" ]; then |
| 416 | # Perform path translation specific to OpenCI tf-a-ci-gateway/tf-a-builder |
| 417 | sed -i "s|SF:/home/buildslave/workspace/[^/]*|SF:$LOCAL_WORKSPACE|" $output_coverage_file |
Paul Sokolovsky | 4c97dcb | 2022-06-14 16:05:42 -0700 | [diff] [blame^] | 418 | # Filter out 3rd-party component files which shouldn't be in the report |
| 419 | lcov -rc lcov_branch_coverage=1 -r $output_coverage_file '*/workspace/mbedtls/*' -o $output_coverage_file.tmp |
| 420 | mv $output_coverage_file.tmp $output_coverage_file |
Paul Sokolovsky | 9fe3784 | 2022-06-09 02:05:16 +0300 | [diff] [blame] | 421 | fi |
Paul Sokolovsky | 40c8303 | 2021-12-25 21:29:40 +0300 | [diff] [blame] | 422 | cat $output_coverage_file |
Basil Eljuse | 4b14afb | 2020-09-30 13:07:23 +0100 | [diff] [blame] | 423 | # Generate branch coverage report |
| 424 | genhtml --branch-coverage $output_coverage_file \ |
| 425 | --output-directory $LCOV_FOLDER |
| 426 | cd - |