| #!/usr/bin/env bash |
| |
| ############################################################################## |
| # Copyright (c) 2020-2025, ARM Limited and Contributors. All rights reserved. |
| # |
| # SPDX-License-Identifier: GPL-2.0-only |
| ############################################################################## |
| |
| #============================================================================== |
| # FILE: merge.sh |
| # |
| # DESCRIPTION: Wrapper to merge intermediate json files and LCOV trace .info |
| # files. |
| #============================================================================== |
| set -x |
| ################################################################# |
| # Function to manipulate json objects. |
| # The json object properties can be accessed through "." separated |
| # property names. There are special characters that define a function |
| # over a given property value: |
| # If the qualifier list starts with '-' then is asking for the len of the |
| # json array defined by the qualifiers. |
| # If the qualifier list starts with '*' then the resulting json value |
| # is returned without double quotes at the end and the beginning. |
| # If some property name starts with "?" then is requesting if that |
| # property exists within the json object. |
| # Globals: |
| # None |
| # Arguments: |
| # 1-Json string that describes the json object |
| # 2-String of '.' separated qualifiers to access properties |
| # within the json object |
| # 3- Optional default value for a sought property value |
| # Outputs: |
| # None |
| ################################################################ |
| get_json_object() { |
| export _json_string="$1" |
| export _qualifiers="$2" |
| export _default="$3" |
| python3 - << EOT |
| import os |
| import json |
| import sys |
| |
| _json_string = os.getenv("_json_string", "") |
| _qualifiers = os.getenv("_qualifiers", "") |
| _default = os.getenv("_default", "") |
| try: |
| data = json.loads(_json_string) |
| except Exception as ex: |
| print("Error decoding json string:{}".format(ex)) |
| sys.exit(-1) |
| ptr = data |
| if _qualifiers[0] in ['-', '*']: |
| cmd = _qualifiers[0] |
| _qualifiers = _qualifiers[1:] |
| else: |
| cmd = "" |
| for _name in _qualifiers.split("."): |
| if _name in ptr: |
| ptr = ptr[_name] |
| elif _name.isdigit() and int(_name) < len(ptr): |
| ptr = ptr[int(_name)] |
| elif _name.startswith("?"): |
| print(_name[1:] in ptr) |
| sys.exit(0) |
| elif _default: |
| print(_default) |
| sys.exit(0) |
| else: |
| print("'{}' is not in the json object".format(_name)) |
| sys.exit(-1) |
| if cmd == "-": |
| # return len of the json array |
| print(len(ptr)) |
| elif cmd == "*": |
| #remove quotes |
| string = json.dumps(ptr) |
| if string.startswith('"') and string.endswith('"'): |
| string = string[1:-1] |
| print(string) |
| else: |
| print(json.dumps(ptr)) |
| EOT |
| } |
| |
| ################################################################# |
| # Convert a relative path to absolute path |
| # Globals: |
| # None |
| # Arguments: |
| # 1-Path to be converted |
| # Outputs: |
| # Absolute path |
| ################################################################ |
| get_abs_path() { |
| path="$1" |
| echo "$(cd $(dirname $path) && echo "$(pwd -P)"/$(basename $path))" |
| } |
| |
| ################################################################# |
| # Clone the source files |
| # Globals: |
| # None |
| # Arguments: |
| # 1-Json file with the sources to be cloned |
| # 2-Folder where to clone the sources |
| # Outputs: |
| # None |
| ################################################################ |
| clone_repos() { |
| export OUTPUT_JSON="$1" |
| export CSOURCE_FOLDER="${2:-$LOCAL_WORKSPACE}" |
| |
| cd $DIR # To be run at the same level of this script |
| python3 - << EOT |
| import os |
| import clone_sources |
| |
| output_file = os.getenv('OUTPUT_JSON', 'output_file.json') |
| source_folder = os.getenv('CSOURCE_FOLDER', 'source') |
| try: |
| r = clone_sources.CloneSources(output_file) |
| r.clone_repo(source_folder) |
| except Exception as ex: |
| print(ex) |
| EOT |
| cd - |
| } |
| |
| ################################################################# |
| # Get the a file defined in the json object |
| # Globals: |
| # None |
| # Arguments: |
| # 1-Json object that defines the locations of the info and json |
| # files |
| # 2-Folder to save the info and json files |
| # 3-Reference argument to hold the copied file name location |
| # Outputs: |
| # None |
| ################################################################ |
| get_file() { |
| json_object="$1" |
| where="$2" |
| var_name="${3:-param_cloned}" # Defaults to globar var |
| |
| local _type=$(get_json_object "$json_object" "type") |
| local _origin=$(get_json_object "$json_object" "*origin") |
| local _compression=$(get_json_object "$json_object" "*compression" None) |
| local fname="" |
| local cloned_file="" |
| local full_filename=$(basename -- "$_origin") |
| local extension="${full_filename##*.}" |
| local filename="${full_filename%.*}" |
| |
| if [ "$_type" = '"http"' ];then |
| fname="$where.$extension" # Same filename as folder |
| rm $where/$fname &>/dev/null || true |
| wget -nv $_origin -O $where/$fname || return -1 |
| cloned_file="$(get_abs_path $where/$fname)" |
| elif [ "$_type" = '"bundle"' ];then |
| # Check file exists at origin, i.e. was unbundled before |
| fname="$_origin" |
| if [ -f "$where/$fname" ];then |
| cloned_file="$(get_abs_path $where/$fname)" |
| fi |
| elif [ "$_type" = '"file"' ];then |
| if [[ "$_origin" = http* ]]; then |
| echo "$_origin looks like 'http' rather than 'file' please check..." |
| return -1 |
| fi |
| fname="$where.$extension" # Same filename as folder |
| cp -f $_origin $where/$fname |
| cloned_file="$(get_abs_path $where/$fname)" |
| else |
| echo "Error unsupported file type:$_type.... Aborting." |
| return -1 |
| fi |
| if [ "$_compression" = "tar.xz" ];then |
| cd $where |
| pwd |
| tar -xzf $fname |
| rm -f $fname |
| cd - |
| fi |
| eval "${var_name}=${cloned_file}" |
| return 0 |
| } |
| |
| ##################################################################### |
| # Get (download/copy) info and json files from the configuration json |
| # file |
| # Globals: |
| # merge_configuration_file: Input json file with locations of info |
| # and json scm files to be merged. |
| # info_files: Array of info file locations. |
| # Arguments: |
| # 1: Target folder to download info and json files to be merged. |
| # Outputs: |
| # None |
| ################################################################### |
| get_info_json_files() { |
| local input_folder="${1:-$LCOV_FOLDER}" |
| local json_string="$(cat $merge_configuration_file)" |
| local config_json_file="" |
| local info_file="" |
| |
| #printf "\tReading from JSON data:\n\t\t%s" $json_string |
| # Get files array |
| local nf=$(get_json_object "$json_string" "-files") |
| # Init target folder |
| rm -rf $input_folder > /dev/null || true |
| mkdir -p $input_folder |
| # Iterate through each file element and get the files |
| for f in $(seq 0 $(($nf - 1))); |
| do |
| pushd $input_folder > /dev/null |
| _file=$(get_json_object "$json_string" "files.$f") |
| # The name of the folder is the 'id' value |
| id=$(get_json_object "$_file" "*id") |
| tf_config=$(get_json_object "$_file" "*tf-configuration" "N/A") |
| folder=$id |
| printf "Getting files from configuration '$tf_config', build '$folder' into '$input_folder'...\n" |
| mkdir -p $folder |
| bundles=$(get_json_object "$_file" "bundles" None) |
| if [ "$bundles" != "None" ];then |
| nb=$(get_json_object "$_file" "-bundles") |
| for n in $(seq 0 $(($nb - 1))); |
| do |
| get_file "$(get_json_object "$bundles" "$n")" $folder |
| done |
| fi |
| # Download/copy files and save their locations only if all are found |
| get_file "$(get_json_object "$_file" "config")" $folder config_json_file && \ |
| get_file "$(get_json_object "$_file" "info")" $folder info_file && \ |
| info_files+=($info_file) && json_files+=($config_json_file) && \ |
| list_of_merged_builds+=($id) |
| popd > /dev/null |
| done |
| } |
| |
| ################################################################# |
| # Merge json and info files and generate branch coverage report |
| # Globals: |
| # merged_coverage_file: Location and name for merged coverage info |
| # merged_json_file: Location and name for merged json scm sources |
| # LOCAL_WORKSPACE: Local workspace folder with the source code files |
| # generate_local: Flag to generate local lcov reports |
| # info_files: Array of locations and names of info files |
| # json_files: Array of locations and names of json files |
| # Arguments: |
| # 1: Location where reside json and info files |
| # Outputs: |
| # Merged coverage file |
| # Merged json configuration file |
| ################################################################ |
| merge_files() { |
| # Merge info and json configuration files |
| printf "\tFound report files from %d code coverage folders to be merged...\n" ${#info_files[@]} |
| local lc=" " |
| if [ -n "$LOCAL_WORKSPACE" ];then |
| # Translation from info workspaces into local workspace |
| lc=" --local-workspace $LOCAL_WORKSPACE" |
| fi |
| if [ "$generate_local" = true ];then |
| # Generate local reports |
| lc="${lc} -k" |
| fi |
| # Getting the path of the merge.py must reside at the same |
| # path as the merge.sh |
| python3 ${DIR}/merge.py \ |
| ${info_files[@]/#/-a } \ |
| ${json_files[@]/#/-j } \ |
| -o $merged_coverage_file \ |
| -m $merged_json_file \ |
| $lc |
| |
| } |
| |
| |
| ################################################################# |
| # Generate local lcov reports |
| # Globals: |
| # info_files: Array of locations and names of info files |
| # Arguments: |
| # None |
| # Outputs: |
| # Lcov report files for each info file |
| ################################################################ |
| generate_local_reports() { |
| printf "\tCreating local code coverage reports...\n" |
| for i in ${!info_files[@]}; |
| do |
| local info_file=${info_files[$i]} |
| local parentdir=$(dirname "$info_file") |
| local t_info_file="${info_file/.info/_local.info}" |
| genhtml --branch-coverage $t_info_file \ |
| --output-directory $parentdir |
| done |
| } |
| |
| |
| ################################################################# |
| # Print scripts usage |
| # Arguments: |
| # None |
| # Outputs: |
| # Prints to stdout script usage |
| ################################################################ |
| usage() { |
| help_message=$(cat <<EOF |
| |
| # The script merging the info files (code coverage) and json SCM sources |
| # (intermediate layer) needs a minimum JSON configuration file with the following |
| # properties: |
| # files: array of objects that describe the type of file/project to be |
| # merged. |
| # id: Unique identifier (project) associated to the info and |
| # intermediate json files |
| # config: Intermediate json file |
| # type: Type of storage for the file. (http or file) |
| # origin: Location (url or folder) of the file |
| # info: Info file |
| # type: Type of storage for the file. (http or file) |
| # origin: Location (url or folder) of the file |
| # <metadata>: Metadata that can be used for print more information related to |
| # each project [Optional] |
| # Example: |
| { "files" : [ |
| { |
| "id": "<project 1>", |
| "config": |
| { |
| "type": "http", |
| "origin": "<URL of json file for project 1>" |
| }, |
| "info": |
| { |
| "type": "http", |
| "origin": "<URL of info file for project 1>" |
| }, |
| "metadata": .... |
| }, |
| { |
| "id": "<project 2>", |
| "config": |
| { |
| "type": "http", |
| "origin": "<URL of json file for project 2>" |
| }, |
| "info": |
| { |
| "type": "http", |
| "origin": "<URL of info file for project 2>" |
| }, |
| "metadata": .... |
| }, |
| . |
| . |
| . |
| ] |
| } |
| EOF |
| ) |
| clear || true |
| echo "Usage:" |
| echo "merge -h Display this help message." |
| echo "-j <JSON filename> JSON configuration file (info and intermediate json filenames to be merged)." |
| echo "[-l <Report path>] Coverage reports directory. Defaults to ./Coverage" |
| echo "[-w <Workspace path>] Workspace directory for source code files." |
| echo "[-o <Info filename>] Merged info file. Defaults to ./merged_coverage.info" |
| echo "[-m <JSON filename>] JSON merged SCM sources. Defaults to ./merged_scm.json" |
| echo "[-c] Flag to download/copy the source files from the JSON merged SCM into the workspace directory." |
| echo "[-g] Flag to generate local reports for each info/json instance." |
| echo "[-i] Ignore errors on genhtml." |
| echo "[-d] Enable debug mode for the script." |
| echo "$help_message" |
| } |
| |
| |
| [ ${-/x} != ${-} ] && TRACING=true || TRACING=false |
| LOCAL_WORKSPACE="" |
| CLONE_SOURCES=false |
| merge_configuration_file="" |
| generate_local=false |
| # Folder to to put the reports |
| LCOV_FOLDER="./Coverage" |
| # File name for merge coverage info |
| merged_coverage_file="./merged_coverage.info" |
| merged_json_file="./merged_scm.json" |
| # File name to pass variables to calling script |
| variables_file="./variables.sh" |
| info_files=() # Array of info files |
| json_files=() # Array of configuration json files |
| list_of_merged_builds=() |
| GENHTML_ARGS="" |
| DEBUG_MODE=false |
| genhtml_version=$(genhtml --version | rev | cut -d ' ' -f1 | rev | xargs) |
| gen_major=$(echo "$genhtml_version" | cut -d '.' -f1) |
| gen_minor=$(echo "$genhtml_version" | rev | cut -d '.' -f1 | rev) |
| while getopts ":hj:o:l:w:idcm:g" opt; do |
| case ${opt} in |
| h ) |
| usage |
| exit 0 |
| ;; |
| w ) |
| LOCAL_WORKSPACE=$(cd $OPTARG &>/dev/null || true; pwd) |
| ;; |
| c ) |
| CLONE_SOURCES=true |
| ;; |
| d ) |
| DEBUG_MODE=true |
| ;; |
| i ) |
| GENHTML_ARGS="${GENHTML_ARGS} --ignore-errors $([ $gen_major = '2' ] && echo inconsistent || echo source)" |
| ;; |
| j ) |
| merge_configuration_file=$OPTARG |
| ;; |
| l ) |
| LCOV_FOLDER=$OPTARG |
| ;; |
| o ) |
| merged_coverage_file=$OPTARG |
| ;; |
| m ) |
| merged_json_file=$OPTARG |
| ;; |
| g ) |
| generate_local=true |
| ;; |
| x ) |
| variables_file=$OPTARG |
| ;; |
| \? ) |
| echo "Invalid option: $OPTARG" 1>&2 |
| usage |
| exit -1 |
| ;; |
| : ) |
| echo "Invalid option: $OPTARG requires an argument" 1>&2 |
| usage |
| exit -1 |
| ;; |
| esac |
| done |
| [ $DEBUG_MODE = true ] && set -x || set +x |
| shift $((OPTIND -1)) |
| if [ -z "$merge_configuration_file" ]; then |
| echo "Merged configuration file required." |
| usage |
| exit -1 |
| fi |
| if [ -z "$LOCAL_WORKSPACE" ] && [ $CLONE_SOURCES = true ]; then |
| echo "A local workspace directory is required to clone/copy sources!" |
| exit -1 |
| fi |
| # Getting the script folder where other qa-tools script files must reside, i.e |
| # merge.py, clone_sources.py |
| DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" |
| LCOV_FOLDER="$(get_abs_path $LCOV_FOLDER)" |
| merged_coverage_file="$(get_abs_path $merged_coverage_file)" |
| merged_json_file="$(get_abs_path $merged_json_file)" |
| param_cloned="" |
| set +x |
| get_info_json_files # always disabled for debug |
| [ $DEBUG_MODE = true ] && set -x || set +x |
| merge_files |
| if [ $CLONE_SOURCES = true ];then |
| clone_repos $merged_json_file |
| fi |
| |
| # Generate merged coverage report |
| merged_status=true |
| genhtml $GENHTML_ARGS --branch-coverage $merged_coverage_file \ |
| --output-directory $LCOV_FOLDER |
| if [ $? -ne 0 ];then |
| merged_status=false |
| echo "ERROR: Cannot merge coverage reports" |
| fi |
| |
| if [ "$generate_local" = true ];then |
| generate_local_reports |
| fi |