Add options and refactoring
diff --git a/coverage-tool/coverage-reporting/merge.py b/coverage-tool/coverage-reporting/merge.py
index 3ab46f7..c4a80ad 100755
--- a/coverage-tool/coverage-reporting/merge.py
+++ b/coverage-tool/coverage-reporting/merge.py
@@ -30,6 +30,25 @@
 python3 merge.py -a coverage_1.info -a coverage_2.info -o coverage_merge.info \
 -j input_file1.json -j input_file2.json -m merge_file.json
 
+The metadata json file must contain the information for every repo that is
+used to build the binaries that were tested (and where coverage is desired).
+As a minimum this file must look like this:
+{
+  "configuration" : {
+        "sources": [
+            {
+                "COMMIT": "XXXXXXX", # [optional]
+                "LOCATION": "YYY", # Folder where the 'URL' repo is cloned in the
+                test workspace.
+                "LOCAL": "ZZZZ", # Local folder for the repo cloned at
+                the local workspace (optional, if not defined 'LOCATION' is assumed).
+                "REFSPEC": "XXXXXX", # [optional]
+                "URL": "XXXXXXXX",
+                "type": "git"
+            }
+        ]
+      }
+}
 It is possible to merge any number of files at once.
 If metadata json files are defined then they must pair with their
 corresponding info file, i.e. have the same name.
@@ -39,9 +58,10 @@
 By default, the output file must be a new file.
 To overwrite an existing file, use the "--force" option.
 
-Note: the user is expected to merge .info files referring to the same project.
-If merging .info files from different projects, LCOV can be exploited directly
-using a command such as "lcov -rc lcov_branch_coverage=1 -a coverage_1.info \
+Note: the user is expected to merge .info files referring to the same
+project, i.e. same sources. If merging .info files from different projects,
+LCOV can be exploited directly using a command such as "lcov -rc
+lcov_branch_coverage=1 -a coverage_1.info \
 -a coverage_2.info -o coverage_merge.info."
 """, formatter_class=RawTextHelpFormatter)
 requiredNamed = parser.add_argument_group('required named arguments')
@@ -59,6 +79,9 @@
                     help="force overwriting of output file.")
 parser.add_argument("--local-workspace", dest='local_workspace',
                     help='Local workspace where source files reside.')
+parser.add_argument("-k", action='store_true', dest='keep_trans',
+                    help='Keeps translated info files')
+
 
 options = parser.parse_args(sys.argv[1:])
 # At least two .info files are expected
@@ -68,7 +91,7 @@
 # The same number of info and json files expected
 if options.json_file:
     if len(options.json_file) != len(options.add_file):
-        print('Umatched number of info and json files.\n')
+        print('Unmatched number of info and json files.\n')
         sys.exit(1)
 
 file_groups = []
@@ -93,7 +116,7 @@
         json_name = [i for i in options.json_file
                      if os.path.basename(i).split(".")[0] == info_name]
         if not json_name:
-            print("Umatched json file name for '{}'".format(file_name))
+            print("Unmatched json file name for '{}'".format(file_name))
             sys.exit(1)
         json_name = json_name.pop()
         if not json_name[-5:] == '.json':
@@ -110,7 +133,8 @@
             json_data = json.load(json_file)
         locations = []
         for source in json_data["configuration"]["sources"]:
-            locations.append(source["LOCATION"])
+            location = source["LOCATION"]
+            locations.append((location, source.get("LOCAL", location)))
         file_group["locations"] = locations
     file_groups.append(file_group)
 
@@ -129,22 +153,26 @@
               info_files_to_merge[i]))
         info_lines = info_file.readlines()
         info_file.close()
-        temp_file = 'temporary_' + str(i) + '.info'
+        temp_file = info_file.name.replace('.info', '_local.info')
+        if options.keep_trans:
+            print("Keeping translated info file {}...".format(temp_file))
         parts = None
         with open(temp_file, "w+") as f:
             for line in info_lines:
                 if "SF" in line:
                     for location in file_groups[i]["locations"]:
-                        if location in line:
-                            parts = line[3:].partition(location)
-                            line = line.replace(parts[0], options.local_workspace + "/")
+                        if location[0] in line:
+                            parts = line[3:].partition(location[0] + "/")
+                            local_name = location[1]
+                            line = line[:3] + os.path.join(
+                                options.local_workspace, location[1], parts[2])
                             break
                 f.write(line)
         info_files_to_merge[i] = temp_file  # Replace info file to be merged
         i += 1
 
 # Merge json files
-if len(options.json_file):
+if options.json_file and len(options.json_file):
     json_merged_list = []
     json_merged = {}
     j = 0
@@ -158,7 +186,7 @@
         j += 1
     json_merged = {'configuration': {'sources': json_merged_list}}
     with open(options.output_json, 'w') as f:
-        json.dump(json_merged, f)
+        json.dump(json_merged, f, indent=4)
 
 
 # Exploit LCOV merging capabilities
@@ -175,6 +203,6 @@
 subprocess.call(command)
 
 # Delete the temporary files
-if options.local_workspace is not None:
+if options.local_workspace is not None and not options.keep_trans:
     for f in info_files_to_merge:
         os.remove(f)