Infineon: Add PSoC63, PSoC61 platforms, update hw crypto acceleration, allow build on CM4 build

Release notes:
1. Added platforms PSoC63, PSoC61
2. Added possibility to build MCUBootApp for CM4 core and BlinkyApp for CM0p
3. Updated cy-mbedtls-acceleration package to support mbedtls-3.0
4. Change CY_SMIF_SYSCLK_HFCLK_DIVIDER to achieve increased SMIF clock source
5. Improved memory map configuration in json files
6. Added optional performance measurement macros
7. Improved usage of FIH types in security critical code branches
8. Updated documentation
9. Improved MISRAa nd CERT-C compliance
10. Switch to latest mtb-pdl-cat1 3.0.0
11. Fixed minor bugs
diff --git a/boot/cypress/scripts/cppcheck-htmlreport.py b/boot/cypress/scripts/cppcheck-htmlreport.py
deleted file mode 100644
index afc7380..0000000
--- a/boot/cypress/scripts/cppcheck-htmlreport.py
+++ /dev/null
@@ -1,696 +0,0 @@
-#! /usr/bin/python3
-
-from __future__ import unicode_literals
-
-import io
-import sys
-import optparse
-import os
-import operator
-
-from collections import Counter
-from pygments import highlight
-from pygments.lexers import guess_lexer_for_filename
-from pygments.formatters import HtmlFormatter
-from xml.sax import parse as xml_parse
-from xml.sax import SAXParseException as XmlParseException
-from xml.sax.handler import ContentHandler as XmlContentHandler
-from xml.sax.saxutils import escape
-"""
-Turns a cppcheck xml file into a browsable html report along
-with syntax highlighted source code.
-"""
-
-STYLE_FILE = """
-body {
-    font: 13px Arial, Verdana, Sans-Serif;
-    margin: 0;
-    width: auto;
-}
-
-h1 {
-    margin: 10px;
-}
-
-#footer > p {
-    margin: 4px;
-}
-
-.error {
-    background-color: #ffb7b7;
-}
-
-.error2 {
-    background-color: #faa;
-    border: 1px dotted black;
-    display: inline-block;
-    margin-left: 4px;
-}
-
-.inconclusive {
-    background-color: #B6B6B4;
-}
-
-.inconclusive2 {
-    background-color: #B6B6B4;
-    border: 1px dotted black;
-    display: inline-block;
-    margin-left: 4px;
-}
-
-div.verbose {
-    display: inline-block;
-    vertical-align: top;
-    cursor: help;
-}
-
-div.verbose div.content {
-    display: none;
-    position: absolute;
-    padding: 10px;
-    margin: 4px;
-    max-width: 40%;
-    white-space: pre-wrap;
-    border: 1px solid black;
-    background-color: #FFFFCC;
-    cursor: auto;
-}
-
-.highlight .hll {
-    padding: 1px;
-}
-
-#header {
-    border-bottom: thin solid #aaa;
-}
-
-#menu {
-    float: left;
-    margin-top: 5px;
-    text-align: left;
-    width: 150px;
-    height: 75%;
-    position: fixed;
-    overflow: auto;
-    z-index: 1;
-}
-
-#menu_index {
-    float: left;
-    margin-top: 5px;
-    padding-left: 5px;
-    text-align: left;
-    width: 200px;
-    height: 75%;
-    position: fixed;
-    overflow: auto;
-    z-index: 1;
-}
-
-#menu > a {
-    display: block;
-    margin-left: 10px;
-    font: 12px;
-    z-index: 1;
-}
-
-#filename  {
-    margin-left: 10px;
-    font: 12px;
-    z-index: 1;
-}
-
-.highlighttable {
-    background-color:white;
-    z-index: 10;
-    position: relative;
-    margin: -10 px;
-}
-
-#content {
-    background-color: white;
-    -webkit-box-sizing: content-box;
-    -moz-box-sizing: content-box;
-    box-sizing: content-box;
-    float: left;
-    margin: 5px;
-    margin-left: 10px;
-    padding: 0 10px 10px 10px;
-    width: 80%;
-    padding-left: 150px;
-}
-
-#content_index {
-    background-color: white;
-    -webkit-box-sizing: content-box;
-    -moz-box-sizing: content-box;
-    box-sizing: content-box;
-    float: left;
-    margin: 5px;
-    margin-left: 10px;
-    padding: 0 10px 10px 10px;
-    width: 80%;
-    padding-left: 200px;
-}
-
-.linenos {
-    border-right: thin solid #aaa;
-    color: lightgray;
-    padding-right: 6px;
-}
-
-#footer {
-    border-top: thin solid #aaa;
-    clear: both;
-    font-size: 90%;
-    margin-top: 5px;
-}
-
-#footer ul {
-    list-style-type: none;
-    padding-left: 0;
-}
-"""
-
-HTML_HEAD = """
-<!DOCTYPE html>
-<html lang="en">
-  <head>
-    <meta charset="utf-8">
-    <title>Cppcheck - HTML report - %s</title>
-    <link rel="stylesheet" href="style.css">
-    <style>
-%s
-    </style>
-    <script language="javascript">
-      function getStyle(el,styleProp) {
-        if (el.currentStyle)
-          var y = el.currentStyle[styleProp];
-        else if (window.getComputedStyle)
-          var y = document.defaultView.getComputedStyle(el,null).getPropertyValue(styleProp);
-        return y;
-      }
-      function toggle() {
-        var el = this.expandable_content;
-        var mark = this.expandable_marker;
-        if (el.style.display == "block") {
-          el.style.display = "none";
-          mark.innerHTML = "[+]";
-        } else {
-          el.style.display = "block";
-          mark.innerHTML = "[-]";
-        }
-      }
-      function init_expandables() {
-        var elts = document.getElementsByClassName("expandable");
-        for (var i = 0; i < elts.length; i++) {
-          var el = elts[i];
-          var clickable = el.getElementsByTagName("span")[0];
-          var marker = clickable.getElementsByClassName("marker")[0];
-          var content = el.getElementsByClassName("content")[0];
-          var width = clickable.clientWidth - parseInt(getStyle(content, "padding-left")) - parseInt(getStyle(content, "padding-right"));
-          content.style.width = width + "px";
-          clickable.expandable_content = content;
-          clickable.expandable_marker = marker;
-          clickable.onclick = toggle;
-        }
-      }
-      function set_class_display(c, st) {
-        var elements = document.querySelectorAll('.' + c),
-            len = elements.length;
-        for (i = 0; i < len; i++) {
-            elements[i].style.display = st;
-        }
-      }
-      function toggle_class_visibility(id) {
-        var box = document.getElementById(id);
-        set_class_display(id, box.checked ? '' : 'none');
-      }
-    </script>
-  </head>
-  <body onload="init_expandables()">
-      <div id="header">
-        <h1>Cppcheck report - %s: %s </h1>
-      </div>
-      <div id="menu" dir="rtl">
-       <p id="filename"><a href="index.html">Defects:</a> %s</p>
-"""
-
-HTML_HEAD_END = """
-      </div>
-      <div id="content">
-"""
-
-HTML_FOOTER = """
-      </div>
-      <div id="footer">
-        <p>
-         Cppcheck %s - a tool for static C/C++ code analysis</br>
-         </br>
-         Internet: <a href="http://cppcheck.net">http://cppcheck.net</a></br>
-         IRC: <a href="irc://irc.freenode.net/cppcheck">irc://irc.freenode.net/cppcheck</a></br>
-        <p>
-      </div>
-  </body>
-</html>
-"""
-
-HTML_ERROR = "<span class='error2'>&lt;--- %s</span>\n"
-HTML_INCONCLUSIVE = "<span class='inconclusive2'>&lt;--- %s</span>\n"
-
-HTML_EXPANDABLE_ERROR = "<div class='verbose expandable'><span class='error2'>&lt;--- %s <span class='marker'>[+]</span></span><div class='content'>%s</div></div>\n"""
-HTML_EXPANDABLE_INCONCLUSIVE = "<div class='verbose expandable'><span class='inconclusive2'>&lt;--- %s <span class='marker'>[+]</span></span><div class='content'>%s</div></div>\n"""
-
-# escape() and unescape() takes care of &, < and >.
-html_escape_table = {
-    '"': "&quot;",
-    "'": "&apos;"
-}
-html_unescape_table = {v: k for k, v in html_escape_table.items()}
-
-
-def html_escape(text):
-    return escape(text, html_escape_table)
-
-
-class AnnotateCodeFormatter(HtmlFormatter):
-    errors = []
-
-    def wrap(self, source, outfile):
-        line_no = 1
-        for i, t in HtmlFormatter.wrap(self, source, outfile):
-            # If this is a source code line we want to add a span tag at the
-            # end.
-            if i == 1:
-                for error in self.errors:
-                    if error['line'] == line_no:
-                        try:
-                            if error['inconclusive'] == 'true':
-                                # only print verbose msg if it really differs
-                                # from actual message
-                                if error.get('verbose') and (error['verbose'] != error['msg']):
-                                    index = t.rfind('\n')
-                                    t = t[:index] + HTML_EXPANDABLE_INCONCLUSIVE % (error['msg'], html_escape(error['verbose'].replace("\\012", '\n'))) + t[index + 1:]
-                                else:
-                                    t = t.replace('\n', HTML_INCONCLUSIVE % error['msg'])
-                        except KeyError:
-                            if error.get('verbose') and (error['verbose'] != error['msg']):
-                                index = t.rfind('\n')
-                                t = t[:index] + HTML_EXPANDABLE_ERROR % (error['msg'], html_escape(error['verbose'].replace("\\012", '\n'))) + t[index + 1:]
-                            else:
-                                t = t.replace('\n', HTML_ERROR % error['msg'])
-
-                line_no = line_no + 1
-            yield i, t
-
-
-class CppCheckHandler(XmlContentHandler):
-
-    """Parses the cppcheck xml file and produces a list of all its errors."""
-
-    def __init__(self):
-        XmlContentHandler.__init__(self)
-        self.errors = []
-        self.version = '1'
-        self.versionCppcheck = ''
-
-    def startElement(self, name, attributes):
-        if name == 'results':
-            self.version = attributes.get('version', self.version)
-
-        if self.version == '1':
-            self.handleVersion1(name, attributes)
-        else:
-            self.handleVersion2(name, attributes)
-
-    def handleVersion1(self, name, attributes):
-        if name != 'error':
-            return
-
-        self.errors.append({
-            'file': attributes.get('file', ''),
-            'line': int(attributes.get('line', 0)),
-            'locations': [{
-                'file': attributes.get('file', ''),
-                'line': int(attributes.get('line', 0)),
-            }],
-            'id': attributes['id'],
-            'severity': attributes['severity'],
-            'msg': attributes['msg']
-        })
-
-    def handleVersion2(self, name, attributes):
-        if name == 'cppcheck':
-            self.versionCppcheck = attributes['version']
-        if name == 'error':
-            error = {
-                'locations': [],
-                'file': '',
-                'line': 0,
-                'id': attributes['id'],
-                'severity': attributes['severity'],
-                'msg': attributes['msg'],
-                'verbose': attributes.get('verbose')
-            }
-
-            if 'inconclusive' in attributes:
-                error['inconclusive'] = attributes['inconclusive']
-            if 'cwe' in attributes:
-                error['cwe'] = attributes['cwe']
-
-            self.errors.append(error)
-        elif name == 'location':
-            assert self.errors
-            error = self.errors[-1]
-            locations = error['locations']
-            file = attributes['file']
-            line = int(attributes['line'])
-            if not locations:
-                error['file'] = file
-                error['line'] = line
-            locations.append({
-                'file': file,
-                'line': line,
-                'info': attributes.get('info')
-            })
-
-if __name__ == '__main__':
-    # Configure all the options this little utility is using.
-    parser = optparse.OptionParser()
-    parser.add_option('--title', dest='title',
-                      help='The title of the project.',
-                      default='[project name]')
-    parser.add_option('--file', dest='file',
-                      help='The cppcheck xml output file to read defects '
-                           'from. Default is reading from stdin.')
-    parser.add_option('--report-dir', dest='report_dir',
-                      help='The directory where the HTML report content is '
-                           'written.')
-    parser.add_option('--source-dir', dest='source_dir',
-                      help='Base directory where source code files can be '
-                           'found.')
-    parser.add_option('--source-encoding', dest='source_encoding',
-                      help='Encoding of source code.', default='utf-8')
-
-    # Parse options and make sure that we have an output directory set.
-    options, args = parser.parse_args()
-
-    try:
-        sys.argv[1]
-    except IndexError:  # no arguments give, print --help
-        parser.print_help()
-        quit()
-
-    if not options.report_dir:
-        parser.error('No report directory set.')
-
-    # Get the directory where source code files are located.
-    source_dir = os.getcwd()
-    if options.source_dir:
-        source_dir = options.source_dir
-
-    # Get the stream that we read cppcheck errors from.
-    input_file = sys.stdin
-    if options.file:
-        if not os.path.exists(options.file):
-            parser.error('cppcheck xml file: %s not found.' % options.file)
-        input_file = io.open(options.file, 'r')
-    else:
-        parser.error('No cppcheck xml file specified. (--file=)')
-
-    # Parse the xml file and produce a simple list of errors.
-    print('Parsing xml report.')
-    try:
-        contentHandler = CppCheckHandler()
-        xml_parse(input_file, contentHandler)
-    except XmlParseException as msg:
-        print('Failed to parse cppcheck xml file: %s' % msg)
-        sys.exit(1)
-
-    # We have a list of errors. But now we want to group them on
-    # each source code file. Lets create a files dictionary that
-    # will contain a list of all the errors in that file. For each
-    # file we will also generate a HTML filename to use.
-    files = {}
-    file_no = 0
-    for error in contentHandler.errors:
-        filename = error['file']
-        if filename not in files.keys():
-            files[filename] = {
-                'errors': [], 'htmlfile': str(file_no) + '.html'}
-            file_no = file_no + 1
-        files[filename]['errors'].append(error)
-
-    # Make sure that the report directory is created if it doesn't exist.
-    print('Creating %s directory' % options.report_dir)
-    if not os.path.exists(options.report_dir):
-        os.mkdir(options.report_dir)
-
-    # Generate a HTML file with syntax highlighted source code for each
-    # file that contains one or more errors.
-    print('Processing errors')
-
-    decode_errors = []
-    for filename, data in sorted(files.items()):
-        htmlfile = data['htmlfile']
-        errors = []
-
-        for error in data['errors']:
-            for location in error['locations']:
-                if filename == location['file']:
-                    newError = dict(error)
-
-                    del newError['locations']
-                    newError['line'] = location['line']
-                    if location.get('info'):
-                        newError['msg'] = location['info']
-                        newError['severity'] = 'information'
-                        del newError['verbose']
-
-                    errors.append(newError)
-
-        lines = []
-        for error in errors:
-            lines.append(error['line'])
-
-        if filename == '':
-            continue
-
-        source_filename = os.path.join(source_dir, filename)
-        try:
-            with io.open(source_filename, 'r', encoding=options.source_encoding) as input_file:
-                content = input_file.read()
-        except IOError:
-            if (error['id'] == 'unmatchedSuppression'):
-                continue  # file not found, bail out
-            else:
-                sys.stderr.write("ERROR: Source file '%s' not found.\n" %
-                                 source_filename)
-            continue
-        except UnicodeDecodeError:
-            sys.stderr.write("WARNING: Unicode decode error in '%s'.\n" %
-                             source_filename)
-            decode_errors.append(source_filename[2:])  # "[2:]" gets rid of "./" at beginning
-            continue
-
-        htmlFormatter = AnnotateCodeFormatter(linenos=True,
-                                              style='colorful',
-                                              hl_lines=lines,
-                                              lineanchors='line',
-                                              encoding=options.source_encoding)
-        htmlFormatter.errors = errors
-
-        with io.open(os.path.join(options.report_dir, htmlfile), 'w', encoding='utf-8') as output_file:
-            output_file.write(HTML_HEAD %
-                              (options.title,
-                               htmlFormatter.get_style_defs('.highlight'),
-                               options.title,
-                               filename,
-                               filename.split('/')[-1]))
-
-            for error in sorted(errors, key=lambda k: k['line']):
-                output_file.write("<a href='%s#line-%d'> %s %s</a>" % (data['htmlfile'], error['line'], error['id'],   error['line']))
-
-            output_file.write(HTML_HEAD_END)
-            try:
-                lexer = guess_lexer_for_filename(source_filename, '')
-            except:
-                sys.stderr.write("ERROR: Couldn't determine lexer for the file' " + source_filename + " '. Won't be able to syntax highlight this file.")
-                output_file.write("\n <tr><td colspan='4'> Could not generated content because pygments failed to retrieve the determine code type.</td></tr>")
-                output_file.write("\n <tr><td colspan='4'> Sorry about this.</td></tr>")
-                continue
-
-            if options.source_encoding:
-                lexer.encoding = options.source_encoding
-
-            output_file.write(
-                highlight(content, lexer, htmlFormatter).decode(
-                    options.source_encoding))
-
-            output_file.write(HTML_FOOTER % contentHandler.versionCppcheck)
-
-        print('  ' + filename)
-
-    # Generate a master index.html file that will contain a list of
-    # all the errors created.
-    print('Creating index.html')
-
-    with io.open(os.path.join(options.report_dir, 'index.html'),
-                 'w') as output_file:
-
-        stats_count = 0
-        stats = []
-        for filename, data in sorted(files.items()):
-            for error in data['errors']:
-                stats.append(error['id'])  # get the stats
-                stats_count += 1
-
-        counter = Counter(stats)
-
-        stat_html = []
-        # the following lines sort the stat primary by value (occurrences),
-        # but if two IDs occur equally often, then we sort them alphabetically by warning ID
-        try:
-            cnt_max = counter.most_common()[0][1]
-        except IndexError:
-            cnt_max = 0
-
-        try:
-            cnt_min = counter.most_common()[-1][1]
-        except IndexError:
-            cnt_min = 0
-
-        stat_fmt = "            <tr><td><input type='checkbox' onclick='toggle_class_visibility(this.id)' id='{}' name='{}' checked></td><td>{}</td><td>{}</td></tr>"
-        for occurrences in reversed(range(cnt_min, cnt_max + 1)):
-            for _id in [k for k, v in sorted(counter.items()) if v == occurrences]:
-                stat_html.append(stat_fmt.format(_id, _id, dict(counter.most_common())[_id], _id))
-
-        output_file.write(HTML_HEAD.replace('id="menu" dir="rtl"', 'id="menu_index"', 1).replace("Defects:", "Defect summary;", 1) % (options.title, '', options.title, '', ''))
-        output_file.write('       <table>')
-        output_file.write('           <tr><th>Show</th><th>#</th><th>Defect ID</th></tr>')
-        output_file.write(''.join(stat_html))
-        output_file.write('           <tr><td></td><td>' + str(stats_count) + '</td><td>total</td></tr>')
-        output_file.write('       </table>')
-        output_file.write('       <a href="stats.html">Statistics</a></p>')
-        output_file.write(HTML_HEAD_END.replace("content", "content_index", 1))
-        output_file.write('       <table>\n')
-
-        output_file.write(
-            '       <tr><th>Line</th><th>Id</th><th>CWE</th><th>Severity</th><th>Message</th></tr>')
-        for filename, data in sorted(files.items()):
-            if filename in decode_errors:  # don't print a link but a note
-                output_file.write("\n       <tr><td colspan='4'>%s</td></tr>" % (filename))
-                output_file.write("\n       <tr><td colspan='4'> Could not generated due to UnicodeDecodeError</td></tr>")
-            else:
-                if filename.endswith('*'):  # assume unmatched suppression
-                    output_file.write(
-                        "\n       <tr><td colspan='4'>%s</td></tr>" %
-                        (filename))
-                else:
-                    output_file.write(
-                        "\n       <tr><td colspan='4'><a href='%s'>%s</a></td></tr>" %
-                        (data['htmlfile'], filename))
-
-                for error in sorted(data['errors'], key=lambda k: k['line']):
-                    error_class = ''
-                    try:
-                        if error['inconclusive'] == 'true':
-                            error_class = 'class="inconclusive"'
-                            error['severity'] += ", inconcl."
-                    except KeyError:
-                        pass
-
-                    try:
-                        if error['cwe']:
-                            cwe_url = "<a href='https://cwe.mitre.org/data/definitions/" + error['cwe'] + ".html'>" + error['cwe'] + "</a>"
-                    except KeyError:
-                        cwe_url = ""
-
-                    if error['severity'] == 'error':
-                        error_class = 'class="error"'
-                    if error['id'] == 'missingInclude':
-                        output_file.write(
-                            '\n         <tr class="%s"><td></td><td>%s</td><td></td><td>%s</td><td>%s</td></tr>' %
-                            (error['id'], error['id'], error['severity'], error['msg']))
-                    elif (error['id'] == 'unmatchedSuppression') and filename.endswith('*'):
-                        output_file.write(
-                            '\n         <tr class="%s"><td></td><td>%s</td><td></td><td>%s</td><td %s>%s</td></tr>' %
-                            (error['id'], error['id'], error['severity'], error_class,
-                             error['msg']))
-                    else:
-                        output_file.write(
-                            '\n       <tr class="%s"><td><a href="%s#line-%d">%d</a></td><td>%s</td><td>%s</td><td>%s</td><td %s>%s</td></tr>' %
-                            (error['id'], data['htmlfile'], error['line'], error['line'],
-                             error['id'], cwe_url, error['severity'], error_class,
-                             error['msg']))
-
-        output_file.write('\n       </table>')
-        output_file.write(HTML_FOOTER % contentHandler.versionCppcheck)
-
-    if (decode_errors):
-        sys.stderr.write("\nGenerating html failed for the following files: " + ' '.join(decode_errors))
-        sys.stderr.write("\nConsider changing source-encoding (for example: \"htmlreport ... --source-encoding=\"iso8859-1\"\"\n")
-
-    print('Creating style.css file')
-    with io.open(os.path.join(options.report_dir, 'style.css'),
-                 'w') as css_file:
-        css_file.write(STYLE_FILE)
-
-    print("Creating stats.html (statistics)\n")
-    stats_countlist = {}
-
-    for filename, data in sorted(files.items()):
-        if (filename == ''):
-            continue
-        stats_tmplist = []
-        for error in sorted(data['errors'], key=lambda k: k['line']):
-            stats_tmplist.append(error['severity'])
-
-        stats_countlist[filename] = dict(Counter(stats_tmplist))
-
-    # get top ten for each severity
-    SEVERITIES = "error", "warning", "portability", "performance", "style", "unusedFunction", "information", "missingInclude", "internal"
-
-    with io.open(os.path.join(options.report_dir, 'stats.html'), 'w') as stats_file:
-
-        stats_file.write(HTML_HEAD.replace('id="menu" dir="rtl"', 'id="menu_index"', 1).replace("Defects:", "Back to summary", 1) % (options.title, '', options.title, 'Statistics', ''))
-        stats_file.write(HTML_HEAD_END.replace("content", "content_index", 1))
-
-        for sev in SEVERITIES:
-            _sum = 0
-            stats_templist = {}
-
-            # if the we have an style warning but we are checking for
-            # portability, we have to skip it to prevent KeyError
-            try:
-                for filename in stats_countlist:
-                    try:  # also bail out if we have a file with no sev-results
-                        _sum += stats_countlist[filename][sev]
-                        stats_templist[filename] = (int)(stats_countlist[filename][sev])  # file : amount,
-                    except KeyError:
-                        continue
-                # don't print "0 style" etc, if no style warnings were found
-                if (_sum == 0):
-                    break
-            except KeyError:
-                continue
-            stats_file.write("<p>Top 10 files for " + sev + " severity, total findings: " + str(_sum) + "</br>\n")
-
-            # sort, so that the file with the most severities per type is first
-            stats_list_sorted = sorted(stats_templist.items(), key=operator.itemgetter(1, 0), reverse=True)
-            it = 0
-            LENGTH = 0
-
-            for i in stats_list_sorted:  # printing loop
-                # for aesthetics: if it's the first iteration of the loop, get
-                # the max length of the number string
-                if (it == 0):
-                    LENGTH = len(str(i[1]))  # <- length of longest number, now get the difference and try to  make other numbers align to it
-
-                stats_file.write("&#160;" * 3 + str(i[1]) + "&#160;" * (1 + LENGTH - len(str(i[1]))) + "<a href=\"" + files[i[0]]['htmlfile'] + "\">  " + i[0] + "</a></br>\n")
-                it += 1
-                if (it == 10):  # print only the top 10
-                    break
-            stats_file.write("</p>\n")
-
-    print("\nOpen '" + options.report_dir + "/index.html' to see the results.")
diff --git a/boot/cypress/scripts/cppcheck.sh b/boot/cypress/scripts/cppcheck.sh
deleted file mode 100644
index 862b926..0000000
--- a/boot/cypress/scripts/cppcheck.sh
+++ /dev/null
@@ -1,145 +0,0 @@
-#!/bin/bash
-#
-# this must be the first non-commented line in this script. It ensures
-# bash doesn't choke on \r on Windows
-(set -o igncr) 2>/dev/null && set -o igncr; # this comment is needed
-
-#
-# This script does static code analysis using Cppcheck tool
-# Copyright (c) 2019 Cypress Semiconductor.
-#
-
-# It performs Cppcheck code analysis with following inputs
-# 1. CypressBootloader/sources - Code analysis is done on all the sources of CypressBootloader.
-# 2. Additional source files to be analyzed are grabbed from config file that is provided as a first argument to the script.
-# 3. Files to be ignored are grabbed from config file that is provided as a first argument to the script.
-# 4. To ignore a file its name need to be added to the config file with word "ignore" as perfix
-# 5. To add any additional files, apart the files from CypressBootloader/sources, those names need
-#    to be added in a config file.
-#    Example
-#    A). add below entries in cpp_check.dat file
-#        ignore cy_bootloader_hw.c
-#        file1.c
-#        file2.c
-#        ignore cy_bootloader_services.c
-#    B). invoke cpp_check shell script
-#        cpp_check.sh cpp_check.dat
-#
-#    Above example performs Cppcheck analysis on CypressBootloader/sources, ignore cy_bootloader_hw.c, file1.c, file2.c and ignores cy_bootloader_services.c
-
-
-app_name="$1"
-platform="$2"
-app_defines="$3"
-app_includes="$4"
-CPP_CHECK_FILES="$5"
-scope="$6"
-buildcfg="$7"
-
-if [[ ${scope} != "" ]]; then
-	SCOPE="--enable=${scope}"
-else
-	SCOPE=""
-fi
-
-#Retrieve list of files need to be ignored
-while IFS= read -r line
-do
-	CPP_CHECK_IGNORE_FILES="$CPP_CHECK_IGNORE_FILES -i $line"
-done < "${app_name}/cppcheck/ignore_files.list"
-
-#Retrieve list of cppcheck directives
-while IFS= read -r line
-do
-	CPP_CHECK_SUPPRESS="$CPP_CHECK_SUPPRESS --suppress=$line"
-done < "${app_name}/cppcheck/suppress_types.list"
-
-echo "-------------------------------------------"
-echo "Suppress options:" "$CPP_CHECK_SUPPRESS"
-echo "-------------------------------------------"
-echo "Additional files:" "$CPP_CHECK_FILES"
-echo "-------------------------------------------"
-echo "Ignoring files:" "$CPP_CHECK_IGNORE_FILES"
-echo "-------------------------------------------"
-echo "CppCheck scope of messages defined with option " ${SCOPE}
-echo "-------------------------------------------"
-echo "Run CppCheck for platform" ${platform}
-echo "-------------------------------------------"
-echo "Defines passed to CppCheck:"
-echo ${app_defines}
-echo "-------------------------------------------"
-echo "Include dirs passed to CppCheck:"
-echo ${app_includes}
-echo "-------------------------------------------"
-
-mkdir -p ${app_name}/cppcheck/results/${platform}/${buildcfg}/cppcheck_report_html
-
-dos2unix ${app_name}/cppcheck/suppress_messages.list
-
-#Generate xml file
-cppcheck ${SCOPE} ${CPP_CHECK_SUPPRESS} -D__GNUC__ -D${platform} ${app_defines} ${app_includes} ${CPP_CHECK_FILES} ${CPP_CHECK_IGNORE_FILES} \
-	--xml 2> ${app_name}/cppcheck/results/${platform}/${buildcfg}/cppcheck_report_${scope}.xml
-
-#Generate html file
-python scripts/cppcheck-htmlreport.py --file=${app_name}/cppcheck/results/${platform}/${buildcfg}/cppcheck_report_${scope}.xml --report-dir=${app_name}/cppcheck/results/${platform}/${buildcfg}/cppcheck_report_html --title=${app_name}
-
-cppcheck ${SCOPE} ${CPP_CHECK_SUPPRESS} -D__GNUC__ -D${platform} ${app_defines} ${app_includes} ${CPP_CHECK_FILES} ${CPP_CHECK_IGNORE_FILES} \
-	--template="{severity}\n{id}\n{message}\n{file}\n{line}:{column}\n{code}\n" 2> ${app_name}/cppcheck/results/${platform}/${buildcfg}/cppcheck_report_${scope}.full
-
-#Generate csv file
-echo "severity@id@message@file@line" > ${app_name}/cppcheck/results/${platform}/${buildcfg}/cppcheck_report_${scope}.csv
-while IFS= read -r line
-do
-	read -r line2
-	read -r line3
-	read -r line4
-	read -r line5
-	line4=$(echo $line4 | sed 's/.*\\cy_mcuboot\\//' | tr '\\' '/')
-	if grep -xq "${line}@${line2}@${line3}@${line4}@${line5}" ${app_name}/cppcheck/suppress_messages.list
-	then
-		:;#suppress current warning
-	else
-		echo ${line}@${line2}@${line3}@${line4}@${line5}
-	fi
-	read -r line
-	read -r line
-	read -r line
-done \
-< ${app_name}/cppcheck/results/${platform}/${buildcfg}/cppcheck_report_${scope}.full \
->>${app_name}/cppcheck/results/${platform}/${buildcfg}/cppcheck_report_${scope}.csv
-
-#Generate log file
-while IFS= read -r line
-do
-	read -r line2
-	read -r line3
-	read -r line4
-	read -r line5
-	line4=$(echo $line4 | sed 's/.*\\cy_mcuboot\\//' | tr '\\' '/')
-	if grep -xq "${line}@${line2}@${line3}@${line4}@${line5}" ${app_name}/cppcheck/suppress_messages.list
-	then
-		read -r line
-		read -r line
-		read -r line
-	else
-		echo ${line} : ${line2}
-		echo ${line3}
-		echo "${line4} (${line5})"
-		read -r line
-		echo ${line}
-		read -r line
-		echo ${line}
-		read -r line
-		echo "-------------------------------------------"
-	fi
-done \
-< ${app_name}/cppcheck/results/${platform}/${buildcfg}/cppcheck_report_${scope}.full \
-> ${app_name}/cppcheck/results/${platform}/${buildcfg}/cppcheck_report_${scope}.log
-
-rm ${app_name}/cppcheck/results/${platform}/${buildcfg}/cppcheck_report_${scope}.full
-cat ${app_name}/cppcheck/results/${platform}/${buildcfg}/cppcheck_report_${scope}.log
-
-RC=$(( $(wc -l ${app_name}/cppcheck/results/${platform}/${buildcfg}/cppcheck_report_${scope}.csv | cut -d' ' -f1) -1 ))
-echo "${app_name} CPPCHECK FOR ${platform} KIT FOUND $RC ERRORS"
-
-exit $RC
\ No newline at end of file
diff --git a/boot/cypress/scripts/flashmap.py b/boot/cypress/scripts/flashmap.py
index 339e4c5..0cb0805 100644
--- a/boot/cypress/scripts/flashmap.py
+++ b/boot/cypress/scripts/flashmap.py
@@ -7,33 +7,98 @@
 import json
 
 # Supported Platforms
+cm0pCore = {
+    'cortex-m0+': 'CM0P',
+    'cm0+': 'CM0P',
+    'm0+': 'CM0P',
+    'cortex-m0p': 'CM0P',
+    'cm0p': 'CM0P',
+    'm0p': 'CM0P',
+    'cortex-m0plus': 'CM0P',
+    'cm0plus': 'CM0P',
+    'm0plus': 'CM0P'
+}
+
+cm4Core = {
+    'cortex-m4': 'CM4',
+    'cm4': 'CM4',
+    'm4': 'CM4'
+}
+
+cm33Core = {
+    'cortex-m33': 'CM33',
+    'cm33': 'CM33',
+    'm33': 'CM33'
+}
+
+allCores_PSOC_06x = {**cm0pCore, **cm4Core}
+
+common_PSOC_061 = {
+    'flashAddr': 0x10000000,
+    'eraseSize': 0x200,  # 512 bytes
+    'smifAddr': 0x18000000,
+    'smifSize': 0x8000000,  # i.e., window size
+    'VTAlign': 0x400,  # Vector Table alignment
+    'allCores': cm4Core,
+    'bootCore': 'Cortex-M4',
+    'appCore': 'Cortex-M4'
+}
+
+common_PSOC_06x = {
+    'flashAddr': 0x10000000,
+    'eraseSize': 0x200,  # 512 bytes
+    'smifAddr': 0x18000000,
+    'smifSize': 0x8000000,  # i.e., window size
+    'VTAlign': 0x400,  # Vector Table alignment
+    'allCores': allCores_PSOC_06x,
+    'bootCore': 'Cortex-M0+',
+    'appCore': 'Cortex-M4'
+}
+
 platDict = {
-    'PSOC_062_2M': {
-        'flashAddr': 0x10000000,
+    'PSOC_061_2M': {
         'flashSize': 0x200000,  # 2 MBytes
-        'eraseSize': 0x200,  # 512 bytes
-        'smifAddr': 0x18000000,
-        'smifSize': 0x8000000  # i.e., window size
+        **common_PSOC_061
+    },
+    'PSOC_061_1M': {
+        'flashSize': 0x100000,  # 1 MByte
+        **common_PSOC_061
+    },
+    'PSOC_061_512K': {
+        'flashSize': 0x80000,  # 512 KBytes
+        **common_PSOC_061
+    },
+
+    'PSOC_062_2M': {
+        'flashSize': 0x200000,  # 2 MBytes
+        **common_PSOC_06x
     },
     'PSOC_062_1M': {
-        'flashAddr': 0x10000000,
         'flashSize': 0x100000,  # 1 MByte
-        'eraseSize': 0x200,  # 512 bytes
-        'smifAddr': 0x18000000,
-        'smifSize': 0x8000000  # i.e., window size
+        **common_PSOC_06x
     },
     'PSOC_062_512K': {
-        'flashAddr': 0x10000000,
         'flashSize': 0x80000,  # 512 KBytes
-        'eraseSize': 0x200,  # 512 bytes
-        'smifAddr': 0x18000000,
-        'smifSize': 0x8000000  # i.e., window size
+        **common_PSOC_06x
     },
+
+    'PSOC_063_1M': {
+        'flashSize': 0x100000,  # 1 MByte
+        **common_PSOC_06x
+    },
+
     'CYW20829': {
         'flashSize': 0,  # n/a
         'smifAddr': 0x60000000,
-        'smifSize': 0x8000000  # i.e., window size
-    }
+        'smifSize': 0x8000000,  # i.e., window size
+        'VTAlign': 0x200,  # Vector Table alignment
+        'allCores': cm33Core,
+        'bootCore': 'Cortex-M33',
+        'appCore': 'Cortex-M33',
+        'bitsPerCnt': False
+    },
+
+
 }
 
 # Supported SPI Flash ICs
@@ -115,6 +180,8 @@
         self.in_file = ''
         self.out_file = ''
         self.img_id = None
+        self.policy = None
+        self.set_core = False
 
         usage = 'USAGE:\n' + sys.argv[0] + \
                 ''' -p <platform> -i <flash_map.json> -o <flash_map.h> -d <img_id>
@@ -124,12 +191,15 @@
 -p  --platform=  Target (e.g., PSOC_062_512K)
 -i  --ifile=     JSON flash map file
 -o  --ofile=     C header file to be generated
--d  --img_id     ID of application to build'''
+-d  --img_id     ID of application to build
+-c  --policy     Policy file in JSON format
+-m  --core       Detect and set Cortex-M CORE
+'''
 
         try:
             opts, unused = getopt.getopt(
-                sys.argv[1:], 'hi:o:p:d:',
-                ['help', 'platform=', 'ifile=', 'ofile=', 'img_id='])
+                sys.argv[1:], 'hi:o:p:d:c:m',
+                ['help', 'platform=', 'ifile=', 'ofile=', 'img_id=', 'policy=', 'core'])
             if len(unused) > 0:
                 print(usage, file=sys.stderr)
                 sys.exit(1)
@@ -149,6 +219,10 @@
                 self.out_file = arg
             elif opt in ('-d', '--img_id'):
                 self.img_id = arg
+            elif opt in ('-c', '--policy'):
+                self.policy = arg
+            elif opt in ('-m', '--core'):
+                self.set_core = True
 
         if len(self.in_file) == 0 or len(self.out_file) == 0:
             print(usage, file=sys.stderr)
@@ -361,8 +435,9 @@
         try:
             with open(params.out_file, "w", encoding='UTF-8') as out_f:
                 out_f.write('/* AUTO-GENERATED FILE, DO NOT EDIT.'
-                            ' ALL CHANGES WILL BE LOST! */\n')
-                out_f.write(f'/* Platform: {params.plat_id} */\n')
+                            ' ALL CHANGES WILL BE LOST! */\n\n'
+                            '#ifndef CY_FLASH_MAP_H\n#define CY_FLASH_MAP_H\n')
+                out_f.write(f'\n/* Platform: {params.plat_id} */\n')
                 out_f.write(f'\nstatic struct flash_area {c_array}[] = {{\n')
                 comma = len(self.areas)
                 area_count = 0
@@ -383,7 +458,7 @@
                             'struct flash_area *boot_area_descs[] = {\n')
                 for area_index in range(area_count):
                     out_f.write(f'    &{c_array}[{area_index}U],\n')
-                out_f.write('    NULL\n};\n')
+                out_f.write('    NULL\n};\n\n#endif /* CY_FLASH_MAP_H */\n')
         except (FileNotFoundError, OSError):
             print('Cannot create', params.out_file, file=sys.stderr)
             sys.exit(4)
@@ -411,7 +486,7 @@
 
 
 def get_bool(obj, attr, def_val=False):
-    """Get JSON boolean value (returns def_val if it missing)"""
+    """Get JSON boolean value (returns def_val if it is missing)"""
     ret_val = def_val
     obj = obj.get(attr)
     if obj is not None:
@@ -433,6 +508,21 @@
     return ret_val
 
 
+def get_str(obj, attr, def_val=None):
+    """Get JSON string value (returns def_val if it is missing)"""
+    ret_val = def_val
+    obj = obj.get(attr)
+    if obj is not None:
+        try:
+            ret_val = str(obj['value'])
+        except KeyError as key:
+            print('Malformed JSON:', key,
+                  'is missing in', "'" + attr + "'",
+                  file=sys.stderr)
+            sys.exit(5)
+    return ret_val
+
+
 class AddrSize:
     """Bootloader area"""
 
@@ -514,7 +604,7 @@
     slot_sectors_max = 0
     all_shared = get_bool(boot_and_upgrade['bootloader'], 'shared_slot')
     any_shared = all_shared
-
+    app_core = None
     apps_flash_map = [None, ]
 
     for stage in range(2):
@@ -543,6 +633,21 @@
                     area_list.chk_area(primary_addr, primary_size)
                     area_list.chk_area(secondary_addr, secondary_size,
                                        primary_addr)
+                    if application.get('core') is None:
+                        if app_index == 1:
+                            app_core = area_list.plat['appCore']
+                    elif app_index > 1:
+                        print('"core" makes sense only for the 1st app',
+                              file=sys.stderr)
+                        sys.exit(6)
+                    else:
+                        app_core = get_str(application, 'core',
+                                           area_list.plat['appCore'])
+                    if app_index == 1:
+                        app_core = area_list.plat['allCores'].get(app_core.lower())
+                        if app_core is None:
+                            print('Unknown "core"', file=sys.stderr)
+                            sys.exit(6)
                 else:
                     slot_sectors_max = max(
                         slot_sectors_max,
@@ -577,7 +682,7 @@
                   file=sys.stderr)
             sys.exit(5)
 
-    return app_count, slot_sectors_max, apps_flash_map, any_shared
+    return app_core, app_count, slot_sectors_max, apps_flash_map, any_shared
 
 
 def main():
@@ -642,9 +747,19 @@
             sys.exit(5)
 
     # Fill flash areas
-    app_count, slot_sectors_max, apps_flash_map, shared_slot = \
+    app_core, app_count, slot_sectors_max, apps_flash_map, shared_slot = \
         process_images(area_list, boot_and_upgrade)
 
+    cy_img_hdr_size = 0x400
+    app_start = int(apps_flash_map[1].get("primary").get("address"), 0) + cy_img_hdr_size
+
+    if app_start % plat['VTAlign'] != 0:
+        print('Starting address', apps_flash_map[1].get("primary").get("address"),
+              '+', hex(cy_img_hdr_size),
+              'must be aligned to', hex(plat['VTAlign']),
+              file=sys.stderr)
+        sys.exit(7)
+
     slot_sectors_max = max(slot_sectors_max, 32)
 
     if swap_status is not None:
@@ -673,17 +788,19 @@
 
     # Report necessary values back to make
     print('# AUTO-GENERATED FILE, DO NOT EDIT. ALL CHANGES WILL BE LOST!')
+    print('BOOTLOADER_SIZE :=', hex(boot.fa_size))
+    if params.set_core:
+        print('CORE :=', plat['allCores'][plat['bootCore'].lower()])
+    print('APP_CORE :=', app_core)
 
     if params.img_id is not None:
-        primary_img_start = (apps_flash_map[int(params.img_id)].get("primary")).get("address")
-        secondary_img_start = (apps_flash_map[int(params.img_id)].get("secondary")).get("address")
-        bootloader_size = (bootloader.get("size")).get("value")
-        slot_size = (apps_flash_map[int(params.img_id)].get("primary")).get("size")
+        primary_img_start = apps_flash_map[int(params.img_id)].get("primary").get("address")
+        secondary_img_start = apps_flash_map[int(params.img_id)].get("secondary").get("address")
+        slot_size = apps_flash_map[int(params.img_id)].get("primary").get("size")
 
         print('PRIMARY_IMG_START := ' + primary_img_start)
         print('SECONDARY_IMG_START := ' + secondary_img_start)
         print('SLOT_SIZE := ' + slot_size)
-        print('BOOTLOADER_SIZE := ' + bootloader_size)
     else:
         print('MCUBOOT_IMAGE_NUMBER :=', app_count)
         print('MAX_IMG_SECTORS :=', slot_sectors_max)
diff --git a/boot/cypress/scripts/github_pr_cleaner.py b/boot/cypress/scripts/github_pr_cleaner.py
index be26c07..df221f0 100644
--- a/boot/cypress/scripts/github_pr_cleaner.py
+++ b/boot/cypress/scripts/github_pr_cleaner.py
@@ -6,17 +6,23 @@
 
 shutil.rmtree('../cppcheck')
 shutil.rmtree('../coverity')
+shutil.rmtree('../manifests')
 remove('../../../.gitlab-ci.yml')
 remove('../BlinkyApp/BlinkyApp_CM4_Debug.launch')
 remove('../MCUBootApp/MCUBootApp_CM0P_Debug.launch')
 remove('../MCUBootApp/MCUBootApp_CYW20829_Debug.launch')
-remove('../cy_flash_pal/flash_cyw208xx/flashmap/cyw20829_xip_swap_single_psvp.json')
-remove('../cy_flash_pal/flash_cyw208xx/flashmap/cyw20829_xip_swap_multi2_psvp.json')
-remove('../cy_flash_pal/flash_cyw208xx/flashmap/cyw20829_xip_overwrite_single_psvp.json')
-remove('../cy_flash_pal/flash_cyw208xx/flashmap/cyw20829_xip_overwrite_multi2_psvp.json')
+remove('./cppcheck.sh')
 remove('./cppcheck-htmlreport.py')
 remove('./rbc_policy_and_cert_revision_modify.py')
-remove('../platforms/CYW20829/cyw20829_psvp.h')
+remove('../platforms/BSP/CYW20829/cyw20829_psvp.h')
+remove('../platforms/cy_flash_pal/flash_cyw20829/flashmap/cyw20829_xip_swap_single_psvp.json')
+remove('../platforms/cy_flash_pal/flash_cyw20829/flashmap/cyw20829_xip_swap_multi2_psvp.json')
+remove('../platforms/cy_flash_pal/flash_cyw20829/flashmap/cyw20829_xip_overwrite_single_psvp.json')
+remove('../platforms/cy_flash_pal/flash_cyw20829/flashmap/cyw20829_xip_overwrite_multi2_psvp.json')
+remove('../platforms/cy_flash_pal/flash_cyw20829/flashmap/hw_rollback_prot/cyw20829_xip_swap_single_psvp.json')
+remove('../platforms/cy_flash_pal/flash_cyw20829/flashmap/hw_rollback_prot/cyw20829_xip_swap_multi2_psvp.json')
+remove('../platforms/cy_flash_pal/flash_cyw20829/flashmap/hw_rollback_prot/cyw20829_xip_overwrite_single_psvp.json')
+remove('../platforms/cy_flash_pal/flash_cyw20829/flashmap/hw_rollback_prot/cyw20829_xip_overwrite_multi2_psvp.json')
 remove(argv[0])
 
 print('Cleanup complete')
diff --git a/boot/cypress/scripts/rbc_policy_and_cert_revision_modify.py b/boot/cypress/scripts/rbc_policy_and_cert_revision_modify.py
deleted file mode 100644
index 7b47634..0000000
--- a/boot/cypress/scripts/rbc_policy_and_cert_revision_modify.py
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env python
-
-import json
-
-with open("./policy/policy_reprovisioning_secure.json", "r+") as f:
-    data = json.load(f)
-    data["device_policy"]["flow_control"]["sys_reset_req"]["value"] = True
-    f.seek(0)
-    json.dump(data, f)
-    f.truncate()
-    f.close()
-
-with open("./packets/debug_cert.json", "r+") as f:
-    data = json.load(f)
-    data["device_id"]["revision_id"] = "0x00"
-    f.seek(0)
-    json.dump(data, f)
-    f.truncate()
-    f.close()