Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 1 | #!/usr/bin/env python3 |
| 2 | # |
| 3 | # Copyright The Mbed TLS Contributors |
| 4 | # SPDX-License-Identifier: Apache-2.0 |
| 5 | # |
| 6 | # Licensed under the Apache License, Version 2.0 (the "License"); you may |
| 7 | # not use this file except in compliance with the License. |
| 8 | # You may obtain a copy of the License at |
| 9 | # |
| 10 | # http://www.apache.org/licenses/LICENSE-2.0 |
| 11 | # |
| 12 | # Unless required by applicable law or agreed to in writing, software |
| 13 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT |
| 14 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 15 | # See the License for the specific language governing permissions and |
| 16 | # limitations under the License. |
| 17 | |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 18 | """ |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 19 | This script confirms that the naming of all symbols and identifiers in Mbed TLS |
| 20 | are consistent with the house style and are also self-consistent. |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 21 | """ |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 22 | |
| 23 | import argparse |
| 24 | import textwrap |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 25 | import os |
| 26 | import sys |
| 27 | import traceback |
| 28 | import re |
| 29 | import shutil |
| 30 | import subprocess |
| 31 | import logging |
| 32 | |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 33 | # Naming patterns to check against |
Yuto Takano | bb7dca4 | 2021-08-05 19:57:58 +0100 | [diff] [blame] | 34 | MACRO_PATTERN = r"^(MBEDTLS|PSA)_[0-9A-Z_]*[0-9A-Z]$" |
Yuto Takano | c183893 | 2021-08-05 19:52:09 +0100 | [diff] [blame] | 35 | IDENTIFIER_PATTERN = r"^(mbedtls|psa)_[0-9a-z_]*[0-9a-z]$" |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 36 | |
| 37 | class Match(object): |
| 38 | def __init__(self, filename, line, pos, name): |
| 39 | self.filename = filename |
| 40 | self.line = line |
| 41 | self.pos = pos |
| 42 | self.name = name |
| 43 | |
| 44 | def __str__(self): |
| 45 | return self.name |
| 46 | |
| 47 | class Problem(object): |
| 48 | def __init__(self): |
| 49 | self.textwrapper = textwrap.TextWrapper() |
| 50 | self.textwrapper.initial_indent = " * " |
| 51 | self.textwrapper.subsequent_indent = " " |
| 52 | |
| 53 | class SymbolNotInHeader(Problem): |
| 54 | def __init__(self, symbol_name): |
| 55 | self.symbol_name = symbol_name |
| 56 | Problem.__init__(self) |
| 57 | |
| 58 | def __str__(self): |
| 59 | return self.textwrapper.fill( |
| 60 | "'{0}' was found as an available symbol in the output of nm, " |
| 61 | "however it was not declared in any header files." |
| 62 | .format(self.symbol_name)) |
| 63 | |
| 64 | class PatternMismatch(Problem): |
| 65 | def __init__(self, pattern, match): |
| 66 | self.pattern = pattern |
| 67 | self.match = match |
| 68 | Problem.__init__(self) |
| 69 | |
| 70 | def __str__(self): |
| 71 | return self.textwrapper.fill( |
| 72 | "{0}: '{1}' does not match the required pattern '{2}'." |
| 73 | .format(self.match.filename, self.match.name, self.pattern)) |
| 74 | |
| 75 | class Typo(Problem): |
| 76 | def __init__(self, match): |
| 77 | self.match = match |
| 78 | Problem.__init__(self) |
| 79 | |
| 80 | def __str__(self): |
| 81 | return self.textwrapper.fill( |
| 82 | "{0}: '{1}' looks like a typo. It was not found in any macros or " |
| 83 | "any enums. If this is not a typo, put //no-check-names after it." |
| 84 | .format(self.match.filename, self.match.name)) |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 85 | |
| 86 | class NameCheck(object): |
| 87 | def __init__(self): |
| 88 | self.log = None |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 89 | self.check_repo_path() |
| 90 | self.return_code = 0 |
Yuto Takano | e503d61 | 2021-08-05 20:14:05 +0100 | [diff] [blame] | 91 | self.excluded_files = ["bn_mul"] |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 92 | |
| 93 | def set_return_code(self, return_code): |
| 94 | if return_code > self.return_code: |
| 95 | self.return_code = return_code |
| 96 | |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 97 | def setup_logger(self, verbose=False): |
| 98 | """ |
| 99 | Set up a logger and set the change the default logging level from |
| 100 | WARNING to INFO. Loggers are better than print statements since their |
| 101 | verbosity can be controlled. |
| 102 | """ |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 103 | self.log = logging.getLogger() |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 104 | if verbose: |
| 105 | self.log.setLevel(logging.DEBUG) |
| 106 | else: |
| 107 | self.log.setLevel(logging.INFO) |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 108 | self.log.addHandler(logging.StreamHandler()) |
| 109 | |
| 110 | def check_repo_path(self): |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 111 | """ |
| 112 | Check that the current working directory is the project root, and throw |
| 113 | an exception if not. |
| 114 | """ |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 115 | current_dir = os.path.realpath('.') |
| 116 | root_dir = os.path.dirname(os.path.dirname( |
| 117 | os.path.dirname(os.path.realpath(__file__)))) |
| 118 | if current_dir != root_dir: |
| 119 | raise Exception("Must be run from Mbed TLS root") |
| 120 | |
Yuto Takano | 157444c | 2021-08-05 20:10:45 +0100 | [diff] [blame] | 121 | def get_files(self, extension, directory): |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 122 | filenames = [] |
| 123 | for root, dirs, files in sorted(os.walk(directory)): |
| 124 | for filename in sorted(files): |
| 125 | if (filename not in self.excluded_files and |
Yuto Takano | 157444c | 2021-08-05 20:10:45 +0100 | [diff] [blame] | 126 | filename.endswith("." + extension)): |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 127 | filenames.append(os.path.join(root, filename)) |
| 128 | return filenames |
| 129 | |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 130 | def parse_macros(self, header_files): |
| 131 | """ |
| 132 | Parse all macros defined by #define preprocessor directives. |
| 133 | |
| 134 | Args: |
| 135 | header_files: A list of filepaths to look through. |
| 136 | |
| 137 | Returns: |
| 138 | A list of Match objects for the macros. |
| 139 | """ |
| 140 | MACRO_REGEX = r"#define (?P<macro>\w+)" |
| 141 | NON_MACROS = ( |
| 142 | "asm", "inline", "EMIT", "_CRT_SECURE_NO_DEPRECATE", "MULADDC_" |
| 143 | ) |
| 144 | |
| 145 | macros = [] |
| 146 | |
| 147 | for header_file in header_files: |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 148 | with open(header_file, "r") as header: |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 149 | for line in header: |
| 150 | macro = re.search(MACRO_REGEX, line) |
| 151 | if (macro and |
| 152 | not macro.group("macro").startswith(NON_MACROS)): |
| 153 | macros.append(Match( |
| 154 | header_file, |
| 155 | line, |
| 156 | (macro.start(), macro.end()), |
| 157 | macro.group("macro"))) |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 158 | |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 159 | return macros |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 160 | |
Yuto Takano | bb7dca4 | 2021-08-05 19:57:58 +0100 | [diff] [blame] | 161 | def parse_MBED_names(self, files): |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 162 | """ |
| 163 | Parse all words in the file that begin with MBED. Includes macros. |
| 164 | |
| 165 | Args: |
Yuto Takano | bb7dca4 | 2021-08-05 19:57:58 +0100 | [diff] [blame] | 166 | files: A list of filepaths to look through. |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 167 | |
| 168 | Returns: |
| 169 | A list of Match objects for words beginning with MBED. |
| 170 | """ |
| 171 | MBED_names = [] |
| 172 | |
Yuto Takano | bb7dca4 | 2021-08-05 19:57:58 +0100 | [diff] [blame] | 173 | for filename in files: |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 174 | with open(filename, "r") as fp: |
| 175 | for line in fp: |
Yuto Takano | c62b408 | 2021-08-05 20:17:07 +0100 | [diff] [blame^] | 176 | # Ignore any names that are deliberately opted-out |
| 177 | if re.search(r"// *no-check-names", line): |
| 178 | continue |
| 179 | |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 180 | for name in re.finditer(r"\bMBED.+?_[A-Z0-9_]*", line): |
| 181 | MBED_names.append(Match( |
| 182 | filename, |
| 183 | line, |
| 184 | (name.start(), name.end()), |
| 185 | name.group(0) |
| 186 | )) |
| 187 | |
| 188 | return MBED_names |
| 189 | |
| 190 | def parse_enum_consts(self, header_files): |
| 191 | """ |
| 192 | Parse all enum value constants that are declared. |
| 193 | |
| 194 | Args: |
| 195 | header_files: A list of filepaths to look through. |
| 196 | |
| 197 | Returns: |
| 198 | A list of (enum constants, containing filename). |
| 199 | """ |
| 200 | |
| 201 | enum_consts = [] |
| 202 | |
| 203 | for header_file in header_files: |
| 204 | # Emulate a finite state machine to parse enum declarations. |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 205 | state = 0 |
| 206 | with open(header_file, "r") as header: |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 207 | for line in header: |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 208 | if state is 0 and re.match(r"^(typedef )?enum {", line): |
| 209 | state = 1 |
| 210 | elif state is 0 and re.match(r"^(typedef )?enum", line): |
| 211 | state = 2 |
| 212 | elif state is 2 and re.match(r"^{", line): |
| 213 | state = 1 |
| 214 | elif state is 1 and re.match(r"^}", line): |
| 215 | state = 0 |
| 216 | elif state is 1: |
| 217 | enum_const = re.match(r"^\s*(?P<enum_const>\w+)", line) |
| 218 | if enum_const: |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 219 | enum_consts.append(Match( |
| 220 | header_file, |
| 221 | line, |
| 222 | (enum_const.start(), enum_const.end()), |
| 223 | enum_const.group("enum_const"))) |
| 224 | |
| 225 | return enum_consts |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 226 | |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 227 | def parse_identifiers(self, header_files): |
| 228 | """ |
| 229 | Parse all lines of a header where a function identifier is declared, |
| 230 | based on some huersitics. Assumes every line that is not a comment or a |
| 231 | preprocessor directive contains some identifier. |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 232 | |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 233 | Args: |
| 234 | header_files: A list of filepaths to look through. |
| 235 | |
| 236 | Returns: |
| 237 | A list of (identifier, containing filename) |
| 238 | """ |
| 239 | EXCLUDED_DECLARATIONS = ( |
| 240 | r"^(extern \"C\"|(typedef )?(struct|enum)( {)?$|};?$|$)" |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 241 | ) |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 242 | |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 243 | identifiers = [] |
| 244 | |
| 245 | for header_file in header_files: |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 246 | with open(header_file, "r") as header: |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 247 | in_block_comment = False |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 248 | |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 249 | for line in header: |
| 250 | # Skip parsing this line if it begins or ends a block |
| 251 | # comment, and set the state machine's state. |
| 252 | if re.search(r"/\*", line): |
| 253 | in_block_comment = True |
| 254 | continue |
| 255 | elif re.search(r"\*/", line) and in_block_comment: |
| 256 | in_block_comment = False |
| 257 | continue |
| 258 | |
| 259 | # Skip parsing this line if it's a line comment, or if it |
| 260 | # begins with a preprocessor directive |
| 261 | if in_block_comment or re.match(r"(//|#)", line): |
| 262 | continue |
| 263 | |
| 264 | if re.match(EXCLUDED_DECLARATIONS, line): |
| 265 | continue |
| 266 | |
| 267 | identifier = re.search( |
| 268 | # Matches: "mbedtls_aes_init(" |
| 269 | r"([a-zA-Z_][a-zA-Z0-9_]*)\(|" |
| 270 | # Matches: "(*f_rng)(" |
| 271 | r"\(\*(.+)\)\(|" |
| 272 | # TODO: unknown purpose |
| 273 | r"(\w+)\W*$", |
| 274 | line |
| 275 | ) |
| 276 | |
| 277 | if identifier: |
| 278 | for group in identifier.groups(): |
| 279 | if group: |
| 280 | identifiers.append(Match( |
| 281 | header_file, |
| 282 | line, |
| 283 | (identifier.start(), identifier.end()), |
| 284 | identifier.group(0))) |
| 285 | |
| 286 | return identifiers |
| 287 | |
| 288 | def parse_symbols(self): |
| 289 | """ |
| 290 | Compile the Mbed TLS libraries, and parse the TLS, Crypto, and x509 |
| 291 | object files using nm to retrieve the list of referenced symbols. |
| 292 | |
| 293 | Returns: |
| 294 | A list of unique symbols defined and used in the libraries. |
| 295 | """ |
| 296 | |
| 297 | symbols = [] |
| 298 | |
| 299 | # Back up the config and atomically compile with the full configratuion. |
| 300 | shutil.copy("include/mbedtls/mbedtls_config.h", |
| 301 | "include/mbedtls/mbedtls_config.h.bak") |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 302 | try: |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 303 | subprocess.run( |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 304 | ["perl", "scripts/config.pl", "full"], |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 305 | encoding=sys.stdout.encoding, |
| 306 | check=True |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 307 | ) |
| 308 | my_environment = os.environ.copy() |
| 309 | my_environment["CFLAGS"] = "-fno-asynchronous-unwind-tables" |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 310 | subprocess.run( |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 311 | ["make", "clean", "lib"], |
| 312 | env=my_environment, |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 313 | encoding=sys.stdout.encoding, |
| 314 | stdout=subprocess.PIPE, |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 315 | stderr=subprocess.STDOUT, |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 316 | check=True |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 317 | ) |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 318 | |
| 319 | # Perform object file analysis using nm |
| 320 | symbols = self.parse_symbols_from_nm( |
| 321 | ["library/libmbedcrypto.a", |
| 322 | "library/libmbedtls.a", |
| 323 | "library/libmbedx509.a"]) |
| 324 | |
| 325 | symbols.sort() |
| 326 | |
| 327 | subprocess.run( |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 328 | ["make", "clean"], |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 329 | encoding=sys.stdout.encoding, |
| 330 | check=True |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 331 | ) |
| 332 | except subprocess.CalledProcessError as error: |
| 333 | self.log.error(error) |
| 334 | self.set_return_code(2) |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 335 | finally: |
| 336 | shutil.move("include/mbedtls/mbedtls_config.h.bak", |
| 337 | "include/mbedtls/mbedtls_config.h") |
| 338 | |
| 339 | return symbols |
| 340 | |
| 341 | def parse_symbols_from_nm(self, object_files): |
| 342 | """ |
| 343 | Run nm to retrieve the list of referenced symbols in each object file. |
| 344 | Does not return the position data since it is of no use. |
| 345 | |
| 346 | Returns: |
| 347 | A list of unique symbols defined and used in any of the object files. |
| 348 | """ |
| 349 | UNDEFINED_SYMBOL = r"^\S+: +U |^$|^\S+:$" |
| 350 | VALID_SYMBOL = r"^\S+( [0-9A-Fa-f]+)* . _*(?P<symbol>\w+)" |
| 351 | |
| 352 | symbols = [] |
| 353 | |
| 354 | nm_output = "" |
| 355 | for lib in object_files: |
| 356 | nm_output += subprocess.run( |
| 357 | ["nm", "-og", lib], |
| 358 | encoding=sys.stdout.encoding, |
| 359 | stdout=subprocess.PIPE, |
| 360 | stderr=subprocess.STDOUT, |
| 361 | check=True |
| 362 | ).stdout |
| 363 | for line in nm_output.splitlines(): |
| 364 | if not re.match(UNDEFINED_SYMBOL, line): |
| 365 | symbol = re.match(VALID_SYMBOL, line) |
| 366 | if symbol: |
| 367 | symbols.append(symbol.group('symbol')) |
| 368 | else: |
| 369 | self.log.error(line) |
| 370 | |
| 371 | return symbols |
| 372 | |
| 373 | def parse_names_in_source(self): |
| 374 | """ |
| 375 | Calls each parsing function to retrieve various elements of the code, |
| 376 | together with their source location. Puts the parsed values in the |
| 377 | internal variable self.parse_result. |
| 378 | """ |
| 379 | self.log.info("Parsing source code...") |
| 380 | |
Yuto Takano | 157444c | 2021-08-05 20:10:45 +0100 | [diff] [blame] | 381 | m_headers = self.get_files("h", os.path.join("include", "mbedtls")) |
| 382 | p_headers = self.get_files("h", os.path.join("include", "psa")) |
Yuto Takano | fa950ae | 2021-08-05 20:03:44 +0100 | [diff] [blame] | 383 | t_headers = ["3rdparty/everest/include/everest/everest.h", |
| 384 | "3rdparty/everest/include/everest/x25519.h"] |
Yuto Takano | 157444c | 2021-08-05 20:10:45 +0100 | [diff] [blame] | 385 | l_headers = self.get_files("h", "library") |
| 386 | libraries = self.get_files("c", "library") + [ |
Yuto Takano | fa950ae | 2021-08-05 20:03:44 +0100 | [diff] [blame] | 387 | "3rdparty/everest/library/everest.c", |
| 388 | "3rdparty/everest/library/x25519.c"] |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 389 | |
Yuto Takano | bb7dca4 | 2021-08-05 19:57:58 +0100 | [diff] [blame] | 390 | all_macros = self.parse_macros( |
Yuto Takano | 157444c | 2021-08-05 20:10:45 +0100 | [diff] [blame] | 391 | m_headers + p_headers + t_headers + l_headers) |
Yuto Takano | fa950ae | 2021-08-05 20:03:44 +0100 | [diff] [blame] | 392 | enum_consts = self.parse_enum_consts(m_headers + t_headers) |
| 393 | identifiers = self.parse_identifiers(m_headers + p_headers + t_headers) |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 394 | symbols = self.parse_symbols() |
Yuto Takano | fa950ae | 2021-08-05 20:03:44 +0100 | [diff] [blame] | 395 | mbed_names = self.parse_MBED_names( |
Yuto Takano | 157444c | 2021-08-05 20:10:45 +0100 | [diff] [blame] | 396 | m_headers + p_headers + t_headers + l_headers + libraries) |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 397 | |
| 398 | # Remove identifier macros like mbedtls_printf or mbedtls_calloc |
| 399 | macros = list(set(all_macros) - set(identifiers)) |
| 400 | |
| 401 | self.log.info("Found:") |
| 402 | self.log.info(" {} Macros".format(len(all_macros))) |
| 403 | self.log.info(" {} Enum Constants".format(len(enum_consts))) |
| 404 | self.log.info(" {} Identifiers".format(len(identifiers))) |
| 405 | self.log.info(" {} Exported Symbols".format(len(symbols))) |
| 406 | self.log.info("Analysing...") |
| 407 | |
| 408 | self.parse_result = { |
| 409 | "macros": macros, |
| 410 | "enum_consts": enum_consts, |
| 411 | "identifiers": identifiers, |
| 412 | "symbols": symbols, |
| 413 | "mbed_names": mbed_names |
| 414 | } |
| 415 | |
| 416 | def perform_checks(self): |
| 417 | """ |
| 418 | Perform each check in order, output its PASS/FAIL status. Maintain an |
| 419 | overall test status, and output that at the end. |
| 420 | """ |
| 421 | problems = 0 |
| 422 | |
| 423 | problems += self.check_symbols_declared_in_header() |
| 424 | |
| 425 | pattern_checks = [ |
| 426 | ("macros", MACRO_PATTERN), |
| 427 | ("enum_consts", MACRO_PATTERN), |
| 428 | ("identifiers", IDENTIFIER_PATTERN)] |
| 429 | for group, check_pattern in pattern_checks: |
| 430 | problems += self.check_match_pattern(group, check_pattern) |
| 431 | |
| 432 | problems += self.check_for_typos() |
| 433 | |
| 434 | self.log.info("=============") |
| 435 | if problems > 0: |
| 436 | self.log.info("FAIL: {0} problem(s) to fix".format(str(problems))) |
| 437 | else: |
| 438 | self.log.info("PASS") |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 439 | |
| 440 | def check_symbols_declared_in_header(self): |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 441 | """ |
| 442 | Perform a check that all detected symbols in the library object files |
| 443 | are properly declared in headers. |
| 444 | |
| 445 | Outputs to the logger the PASS/FAIL status, followed by the location of |
| 446 | problems. |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 447 | |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 448 | Returns the number of problems that needs fixing. |
| 449 | """ |
| 450 | problems = [] |
| 451 | for symbol in self.parse_result["symbols"]: |
| 452 | found_symbol_declared = False |
| 453 | for identifier_match in self.parse_result["identifiers"]: |
| 454 | if symbol == identifier_match.name: |
| 455 | found_symbol_declared = True |
| 456 | break |
| 457 | |
| 458 | if not found_symbol_declared: |
| 459 | problems.append(SymbolNotInHeader(symbol)) |
| 460 | |
| 461 | if problems: |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 462 | self.set_return_code(1) |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 463 | self.log.info("All symbols in header: FAIL") |
| 464 | for problem in problems: |
| 465 | self.log.info(str(problem) + "\n") |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 466 | else: |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 467 | self.log.info("All symbols in header: PASS") |
| 468 | |
| 469 | return len(problems) |
| 470 | |
| 471 | def check_match_pattern(self, group_to_check, check_pattern): |
| 472 | problems = [] |
| 473 | for item_match in self.parse_result[group_to_check]: |
| 474 | if not re.match(check_pattern, item_match.name): |
| 475 | problems.append(PatternMismatch(check_pattern, item_match)) |
Yuto Takano | c763cc3 | 2021-08-05 20:06:34 +0100 | [diff] [blame] | 476 | if re.match(r".*__.*", item_match.name): |
| 477 | problems.append(PatternMismatch("double underscore", item_match)) |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 478 | |
| 479 | if problems: |
| 480 | self.set_return_code(1) |
| 481 | self.log.info("Naming patterns of {}: FAIL".format(group_to_check)) |
| 482 | for problem in problems: |
| 483 | self.log.info(str(problem) + "\n") |
| 484 | else: |
| 485 | self.log.info("Naming patterns of {}: PASS".format(group_to_check)) |
| 486 | |
| 487 | return len(problems) |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 488 | |
| 489 | def check_for_typos(self): |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 490 | problems = [] |
| 491 | all_caps_names = list(set([ |
| 492 | match.name for match |
| 493 | in self.parse_result["macros"] + self.parse_result["enum_consts"]] |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 494 | )) |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 495 | |
| 496 | TYPO_EXCLUSION = r"XXX|__|_$|^MBEDTLS_.*CONFIG_FILE$" |
| 497 | |
| 498 | for name_match in self.parse_result["mbed_names"]: |
| 499 | if name_match.name not in all_caps_names: |
| 500 | if not re.search(TYPO_EXCLUSION, name_match.name): |
| 501 | problems.append(Typo(name_match)) |
| 502 | |
| 503 | if problems: |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 504 | self.set_return_code(1) |
| 505 | self.log.info("Likely typos: FAIL") |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 506 | for problem in problems: |
| 507 | self.log.info(str(problem) + "\n") |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 508 | else: |
| 509 | self.log.info("Likely typos: PASS") |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 510 | |
| 511 | return len(problems) |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 512 | |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 513 | def main(): |
| 514 | """ |
| 515 | Main function, parses command-line arguments. |
| 516 | """ |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 517 | |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 518 | parser = argparse.ArgumentParser( |
| 519 | formatter_class=argparse.RawDescriptionHelpFormatter, |
| 520 | description=( |
| 521 | "This script confirms that the naming of all symbols and identifiers " |
| 522 | "in Mbed TLS are consistent with the house style and are also " |
| 523 | "self-consistent.\n\n" |
| 524 | "Expected to be run from the MbedTLS root directory.")) |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 525 | |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 526 | parser.add_argument("-v", "--verbose", |
| 527 | action="store_true", |
| 528 | help="enable script debug outputs") |
| 529 | |
| 530 | args = parser.parse_args() |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 531 | |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 532 | try: |
| 533 | name_check = NameCheck() |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 534 | name_check.setup_logger(verbose=args.verbose) |
| 535 | name_check.parse_names_in_source() |
| 536 | name_check.perform_checks() |
Darryl Green | d580292 | 2018-05-08 15:30:59 +0100 | [diff] [blame] | 537 | sys.exit(name_check.return_code) |
| 538 | except Exception: |
| 539 | traceback.print_exc() |
| 540 | sys.exit(2) |
| 541 | |
| 542 | |
| 543 | if __name__ == "__main__": |
Yuto Takano | 3963967 | 2021-08-05 19:47:48 +0100 | [diff] [blame] | 544 | main() |