Merge remote-tracking branch 'mbedtls-3.6' into psa-storage-test-cases-never-supported-positive-3.6

Update framework submodule to the tip of main.
This commit is contained in:
Gilles Peskine 2024-12-20 20:30:02 +01:00
commit e7d92315ab
13 changed files with 26 additions and 846 deletions

View File

@ -354,18 +354,18 @@ if(ENABLE_TESTING OR ENABLE_PROGRAMS)
if(GEN_FILES)
add_custom_command(
OUTPUT
${CMAKE_CURRENT_SOURCE_DIR}/framework/tests/src/test_keys.h
${CMAKE_CURRENT_SOURCE_DIR}/framework/tests/include/test/test_keys.h
WORKING_DIRECTORY
${CMAKE_CURRENT_SOURCE_DIR}/tests
COMMAND
"${MBEDTLS_PYTHON_EXECUTABLE}"
"${CMAKE_CURRENT_SOURCE_DIR}/framework/scripts/generate_test_keys.py"
"--output"
"${CMAKE_CURRENT_SOURCE_DIR}/framework/tests/src/test_keys.h"
"${CMAKE_CURRENT_SOURCE_DIR}/framework/tests/include/test/test_keys.h"
DEPENDS
${CMAKE_CURRENT_SOURCE_DIR}/framework/scripts/generate_test_keys.py
)
add_custom_target(test_keys_header DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/framework/tests/src/test_keys.h)
add_custom_target(test_keys_header DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/framework/tests/include/test/test_keys.h)
add_custom_command(
OUTPUT
${CMAKE_CURRENT_SOURCE_DIR}/tests/src/test_certs.h

View File

@ -0,0 +1,5 @@
Bugfix
* Fix missing constraints on the AES-NI inline assembly which is used on
GCC-like compilers when building AES for generic x86_64 targets. This
may have resulted in incorrect code with some compilers, depending on
optimizations. Fixes #9819.

View File

@ -94,6 +94,8 @@ visualc_files: $(VISUALC_FILES)
# present before it runs. It doesn't matter if the files aren't up-to-date,
# they just need to be present.
$(VISUALC_FILES): | library/generated_files
$(VISUALC_FILES): | programs/generated_files
$(VISUALC_FILES): | tests/generated_files
$(VISUALC_FILES): $(gen_file_dep) scripts/generate_visualc_files.pl
$(VISUALC_FILES): $(gen_file_dep) scripts/data_files/vs2017-app-template.vcxproj
$(VISUALC_FILES): $(gen_file_dep) scripts/data_files/vs2017-main-template.vcxproj

@ -1 +1 @@
Subproject commit e1f38eb599fffb6b3ac14b087a5f38306d89279f
Subproject commit 2db68049e1ba586407a1db6a37e94a1f9836142f

View File

@ -489,7 +489,7 @@ int mbedtls_aesni_crypt_ecb(mbedtls_aes_context *ctx,
"movdqu %%xmm0, (%4) \n\t" // export output
:
: "r" (ctx->nr), "r" (ctx->buf + ctx->rk_offset), "r" (mode), "r" (input), "r" (output)
: "memory", "cc", "xmm0", "xmm1");
: "memory", "cc", "xmm0", "xmm1", "0", "1");
return 0;
@ -679,7 +679,7 @@ static void aesni_setkey_enc_128(unsigned char *rk,
AESKEYGENA(xmm0_xmm1, "0x36") "call 1b \n\t"
:
: "r" (rk), "r" (key)
: "memory", "cc", "0");
: "memory", "cc", "xmm0", "xmm1", "0");
}
/*
@ -737,7 +737,7 @@ static void aesni_setkey_enc_192(unsigned char *rk,
:
: "r" (rk), "r" (key)
: "memory", "cc", "0");
: "memory", "cc", "xmm0", "xmm1", "xmm2", "0");
}
#endif /* !MBEDTLS_AES_ONLY_128_BIT_KEY_LENGTH */
@ -805,7 +805,7 @@ static void aesni_setkey_enc_256(unsigned char *rk,
AESKEYGENA(xmm1_xmm2, "0x40") "call 1b \n\t"
:
: "r" (rk), "r" (key)
: "memory", "cc", "0");
: "memory", "cc", "xmm0", "xmm1", "xmm2", "0");
}
#endif /* !MBEDTLS_AES_ONLY_128_BIT_KEY_LENGTH */

View File

@ -1,277 +0,0 @@
#!/usr/bin/env python3
"""Check or fix the code style by running Uncrustify.
This script must be run from the root of a Git work tree containing Mbed TLS.
"""
# Copyright The Mbed TLS Contributors
# SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
import argparse
import os
import re
import subprocess
import sys
from typing import FrozenSet, List, Optional
UNCRUSTIFY_SUPPORTED_VERSION = "0.75.1"
CONFIG_FILE = ".uncrustify.cfg"
UNCRUSTIFY_EXE = "uncrustify"
UNCRUSTIFY_ARGS = ["-c", CONFIG_FILE]
CHECK_GENERATED_FILES = "tests/scripts/check-generated-files.sh"
def print_err(*args):
print("Error: ", *args, file=sys.stderr)
# Print the file names that will be skipped and the help message
def print_skip(files_to_skip):
print()
print(*files_to_skip, sep=", SKIP\n", end=", SKIP\n")
print("Warning: The listed files will be skipped because\n"
"they are not known to git.")
print()
# Match FILENAME(s) in "check SCRIPT (FILENAME...)"
CHECK_CALL_RE = re.compile(r"\n\s*check\s+[^\s#$&*?;|]+([^\n#$&*?;|]+)",
re.ASCII)
def list_generated_files() -> FrozenSet[str]:
"""Return the names of generated files.
We don't reformat generated files, since the result might be different
from the output of the generator. Ideally the result of the generator
would conform to the code style, but this would be difficult, especially
with respect to the placement of line breaks in long logical lines.
"""
# Parse check-generated-files.sh to get an up-to-date list of
# generated files. Read the file rather than calling it so that
# this script only depends on Git, Python and uncrustify, and not other
# tools such as sh or grep which might not be available on Windows.
# This introduces a limitation: check-generated-files.sh must have
# the expected format and must list the files explicitly, not through
# wildcards or command substitution.
content = open(CHECK_GENERATED_FILES, encoding="utf-8").read()
checks = re.findall(CHECK_CALL_RE, content)
return frozenset(word for s in checks for word in s.split())
# Check for comment string indicating an auto-generated file
AUTOGEN_RE = re.compile(r"Warning[ :-]+This file is (now )?auto[ -]?generated",
re.ASCII | re.IGNORECASE)
def is_file_autogenerated(filename):
content = open(filename, encoding="utf-8").read()
return AUTOGEN_RE.search(content) is not None
def get_src_files(since: Optional[str]) -> List[str]:
"""
Use git to get a list of the source files.
The optional argument since is a commit, indicating to only list files
that have changed since that commit. Without this argument, list all
files known to git.
Only C files are included, and certain files (generated, or 3rdparty)
are excluded.
"""
file_patterns = ["*.[hc]",
"tests/suites/*.function",
"scripts/data_files/*.fmt"]
output = subprocess.check_output(["git", "ls-files"] + file_patterns,
universal_newlines=True)
src_files = output.split()
# When this script is called from a git hook, some environment variables
# are set by default which force all git commands to use the main repository
# (i.e. prevent us from performing commands on the framework repo).
# Create an environment without these variables for running commands on the
# framework repo.
framework_env = os.environ.copy()
# Get a list of environment vars that git sets
git_env_vars = subprocess.check_output(["git", "rev-parse", "--local-env-vars"],
universal_newlines=True)
# Remove the vars from the environment
for var in git_env_vars.split():
framework_env.pop(var, None)
output = subprocess.check_output(["git", "-C", "framework", "ls-files"]
+ file_patterns,
universal_newlines=True,
env=framework_env)
framework_src_files = output.split()
if since:
# get all files changed in commits since the starting point in ...
# ... the main repository
cmd = ["git", "log", since + "..HEAD", "--ignore-submodules",
"--name-only", "--pretty=", "--"] + src_files
output = subprocess.check_output(cmd, universal_newlines=True)
committed_changed_files = output.split()
# ... the framework submodule
framework_since = get_submodule_hash(since, "framework")
cmd = ["git", "-C", "framework", "log", framework_since + "..HEAD",
"--name-only", "--pretty=", "--"] + framework_src_files
output = subprocess.check_output(cmd, universal_newlines=True,
env=framework_env)
committed_changed_files += ["framework/" + s for s in output.split()]
# and also get all files with uncommitted changes in ...
# ... the main repository
cmd = ["git", "diff", "--name-only", "--"] + src_files
output = subprocess.check_output(cmd, universal_newlines=True)
uncommitted_changed_files = output.split()
# ... the framework submodule
cmd = ["git", "-C", "framework", "diff", "--name-only", "--"] + \
framework_src_files
output = subprocess.check_output(cmd, universal_newlines=True,
env=framework_env)
uncommitted_changed_files += ["framework/" + s for s in output.split()]
src_files = committed_changed_files + uncommitted_changed_files
else:
src_files += ["framework/" + s for s in framework_src_files]
generated_files = list_generated_files()
# Don't correct style for third-party files (and, for simplicity,
# companion files in the same subtree), or for automatically
# generated files (we're correcting the templates instead).
src_files = [filename for filename in src_files
if not (filename.startswith("3rdparty/") or
filename in generated_files or
is_file_autogenerated(filename))]
return src_files
def get_submodule_hash(commit: str, submodule: str) -> str:
"""Get the commit hash of a submodule at a given commit in the Git repository."""
cmd = ["git", "ls-tree", commit, submodule]
output = subprocess.check_output(cmd, universal_newlines=True)
return output.split()[2]
def get_uncrustify_version() -> str:
"""
Get the version string from Uncrustify
"""
result = subprocess.run([UNCRUSTIFY_EXE, "--version"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
check=False)
if result.returncode != 0:
print_err("Could not get Uncrustify version:", str(result.stderr, "utf-8"))
return ""
else:
return str(result.stdout, "utf-8")
def check_style_is_correct(src_file_list: List[str]) -> bool:
"""
Check the code style and output a diff for each file whose style is
incorrect.
"""
style_correct = True
for src_file in src_file_list:
uncrustify_cmd = [UNCRUSTIFY_EXE] + UNCRUSTIFY_ARGS + [src_file]
result = subprocess.run(uncrustify_cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, check=False)
if result.returncode != 0:
print_err("Uncrustify returned " + str(result.returncode) +
" correcting file " + src_file)
return False
# Uncrustify makes changes to the code and places the result in a new
# file with the extension ".uncrustify". To get the changes (if any)
# simply diff the 2 files.
diff_cmd = ["diff", "-u", src_file, src_file + ".uncrustify"]
cp = subprocess.run(diff_cmd, check=False)
if cp.returncode == 1:
print(src_file + " changed - code style is incorrect.")
style_correct = False
elif cp.returncode != 0:
raise subprocess.CalledProcessError(cp.returncode, cp.args,
cp.stdout, cp.stderr)
# Tidy up artifact
os.remove(src_file + ".uncrustify")
return style_correct
def fix_style_single_pass(src_file_list: List[str]) -> bool:
"""
Run Uncrustify once over the source files.
"""
code_change_args = UNCRUSTIFY_ARGS + ["--no-backup"]
for src_file in src_file_list:
uncrustify_cmd = [UNCRUSTIFY_EXE] + code_change_args + [src_file]
result = subprocess.run(uncrustify_cmd, check=False)
if result.returncode != 0:
print_err("Uncrustify with file returned: " +
str(result.returncode) + " correcting file " +
src_file)
return False
return True
def fix_style(src_file_list: List[str]) -> int:
"""
Fix the code style. This takes 2 passes of Uncrustify.
"""
if not fix_style_single_pass(src_file_list):
return 1
if not fix_style_single_pass(src_file_list):
return 1
# Guard against future changes that cause the codebase to require
# more passes.
if not check_style_is_correct(src_file_list):
print_err("Code style still incorrect after second run of Uncrustify.")
return 1
else:
return 0
def main() -> int:
"""
Main with command line arguments.
"""
uncrustify_version = get_uncrustify_version().strip()
if UNCRUSTIFY_SUPPORTED_VERSION not in uncrustify_version:
print("Warning: Using unsupported Uncrustify version '" +
uncrustify_version + "'")
print("Note: The only supported version is " +
UNCRUSTIFY_SUPPORTED_VERSION)
parser = argparse.ArgumentParser()
parser.add_argument('-f', '--fix', action='store_true',
help=('modify source files to fix the code style '
'(default: print diff, do not modify files)'))
parser.add_argument('-s', '--since', metavar='COMMIT', const='development', nargs='?',
help=('only check files modified since the specified commit'
' (e.g. --since=HEAD~3 or --since=development). If no'
' commit is specified, default to development.'))
# --subset is almost useless: it only matters if there are no files
# ('code_style.py' without arguments checks all files known to Git,
# 'code_style.py --subset' does nothing). In particular,
# 'code_style.py --fix --subset ...' is intended as a stable ("porcelain")
# way to restyle a possibly empty set of files.
parser.add_argument('--subset', action='store_true',
help='only check the specified files (default with non-option arguments)')
parser.add_argument('operands', nargs='*', metavar='FILE',
help='files to check (files MUST be known to git, if none: check all)')
args = parser.parse_args()
covered = frozenset(get_src_files(args.since))
# We only check files that are known to git
if args.subset or args.operands:
src_files = [f for f in args.operands if f in covered]
skip_src_files = [f for f in args.operands if f not in covered]
if skip_src_files:
print_skip(skip_src_files)
else:
src_files = list(covered)
if args.fix:
# Fix mode
return fix_style(src_files)
else:
# Check mode
if check_style_is_correct(src_files):
print("Checked {} files, style ok.".format(len(src_files)))
return 0
else:
return 1
if __name__ == '__main__':
sys.exit(main())

View File

@ -26,6 +26,6 @@ python framework\scripts\generate_bignum_tests.py || exit /b 1
python framework\scripts\generate_config_tests.py || exit /b 1
python framework\scripts\generate_ecp_tests.py || exit /b 1
python framework\scripts\generate_psa_tests.py || exit /b 1
python framework\scripts\generate_test_keys.py --output framework\tests\src\test_keys.h || exit /b 1
python framework\scripts\generate_test_keys.py --output framework\tests\include\test\test_keys.h || exit /b 1
python framework\scripts\generate_test_cert_macros.py --output tests\src\test_certs.h || exit /b 1
python framework\scripts\generate_tls13_compat_tests.py || exit /b 1

View File

@ -53,7 +53,7 @@ endif
GENERATED_DATA_FILES += $(GENERATED_PSA_DATA_FILES)
GENERATED_FILES = $(GENERATED_DATA_FILES)
GENERATED_FILES += ../framework/tests/src/test_keys.h src/test_certs.h
GENERATED_FILES += ../framework/tests/include/test/test_keys.h src/test_certs.h
# Generated files needed to (fully) run ssl-opt.sh
.PHONY: ssl-opt
@ -162,7 +162,7 @@ src/test_certs.h: ../framework/scripts/generate_test_cert_macros.py \
echo " Gen $@"
$(PYTHON) ../framework/scripts/generate_test_cert_macros.py --output $@
../framework/tests/src/test_keys.h: ../framework/scripts/generate_test_keys.py
../framework/tests/include/test/test_keys.h: ../framework/scripts/generate_test_keys.py
echo " Gen $@"
$(PYTHON) ../framework/scripts/generate_test_keys.py --output $@
@ -173,7 +173,7 @@ ifdef RECORD_PSA_STATUS_COVERAGE_LOG
# therefore the wildcard enumeration above doesn't include it.
TEST_OBJS_DEPS += ../framework/tests/include/test/instrument_record_status.h
endif
TEST_OBJS_DEPS += src/test_certs.h ../framework/tests/src/test_keys.h
TEST_OBJS_DEPS += src/test_certs.h ../framework/tests/include/test/test_keys.h
# Rule to compile common test C files in framework
../framework/tests/src/%.o : ../framework/tests/src/%.c $(TEST_OBJS_DEPS)

View File

@ -111,7 +111,7 @@ check()
}
# Note: if the format of calls to the "check" function changes, update
# scripts/code_style.py accordingly. For generated C source files (*.h or *.c),
# framework/scripts/code_style.py accordingly. For generated C source files (*.h or *.c),
# the format must be "check SCRIPT FILENAME...". For other source files,
# any shell syntax is permitted (including e.g. command substitution).
@ -126,7 +126,7 @@ check framework/scripts/generate_bignum_tests.py $(framework/scripts/generate_bi
check framework/scripts/generate_config_tests.py $(framework/scripts/generate_config_tests.py --list)
check framework/scripts/generate_ecp_tests.py $(framework/scripts/generate_ecp_tests.py --list)
check framework/scripts/generate_psa_tests.py $(framework/scripts/generate_psa_tests.py --list)
check framework/scripts/generate_test_keys.py framework/tests/src/test_keys.h
check framework/scripts/generate_test_keys.py framework/tests/include/test/test_keys.h
check scripts/generate_driver_wrappers.py $library_dir/psa_crypto_driver_wrappers.h $library_dir/psa_crypto_driver_wrappers_no_static.c
# Additional checks for Mbed TLS only

View File

@ -1,546 +0,0 @@
#!/usr/bin/env python3
# Copyright The Mbed TLS Contributors
# SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
"""
This script checks the current state of the source code for minor issues,
including incorrect file permissions, presence of tabs, non-Unix line endings,
trailing whitespace, and presence of UTF-8 BOM.
Note: requires python 3, must be run from Mbed TLS root.
"""
import argparse
import codecs
import inspect
import logging
import os
import re
import subprocess
import sys
try:
from typing import FrozenSet, Optional, Pattern # pylint: disable=unused-import
except ImportError:
pass
import scripts_path # pylint: disable=unused-import
from mbedtls_framework import build_tree
class FileIssueTracker:
"""Base class for file-wide issue tracking.
To implement a checker that processes a file as a whole, inherit from
this class and implement `check_file_for_issue` and define ``heading``.
``suffix_exemptions``: files whose name ends with a string in this set
will not be checked.
``path_exemptions``: files whose path (relative to the root of the source
tree) matches this regular expression will not be checked. This can be
``None`` to match no path. Paths are normalized and converted to ``/``
separators before matching.
``heading``: human-readable description of the issue
"""
suffix_exemptions = frozenset() #type: FrozenSet[str]
path_exemptions = None #type: Optional[Pattern[str]]
# heading must be defined in derived classes.
# pylint: disable=no-member
def __init__(self):
self.files_with_issues = {}
@staticmethod
def normalize_path(filepath):
"""Normalize ``filepath`` with / as the directory separator."""
filepath = os.path.normpath(filepath)
# On Windows, we may have backslashes to separate directories.
# We need slashes to match exemption lists.
seps = os.path.sep
if os.path.altsep is not None:
seps += os.path.altsep
return '/'.join(filepath.split(seps))
def should_check_file(self, filepath):
"""Whether the given file name should be checked.
Files whose name ends with a string listed in ``self.suffix_exemptions``
or whose path matches ``self.path_exemptions`` will not be checked.
"""
for files_exemption in self.suffix_exemptions:
if filepath.endswith(files_exemption):
return False
if self.path_exemptions and \
re.match(self.path_exemptions, self.normalize_path(filepath)):
return False
return True
def check_file_for_issue(self, filepath):
"""Check the specified file for the issue that this class is for.
Subclasses must implement this method.
"""
raise NotImplementedError
def record_issue(self, filepath, line_number):
"""Record that an issue was found at the specified location."""
if filepath not in self.files_with_issues.keys():
self.files_with_issues[filepath] = []
self.files_with_issues[filepath].append(line_number)
def output_file_issues(self, logger):
"""Log all the locations where the issue was found."""
if self.files_with_issues.values():
logger.info(self.heading)
for filename, lines in sorted(self.files_with_issues.items()):
if lines:
logger.info("{}: {}".format(
filename, ", ".join(str(x) for x in lines)
))
else:
logger.info(filename)
logger.info("")
BINARY_FILE_PATH_RE_LIST = [
r'docs/.*\.pdf\Z',
r'docs/.*\.png\Z',
r'programs/fuzz/corpuses/[^.]+\Z',
r'framework/data_files/[^.]+\Z',
r'framework/data_files/.*\.(crt|csr|db|der|key|pubkey)\Z',
r'framework/data_files/.*\.req\.[^/]+\Z',
r'framework/data_files/.*malformed[^/]+\Z',
r'framework/data_files/format_pkcs12\.fmt\Z',
r'framework/data_files/.*\.bin\Z',
]
BINARY_FILE_PATH_RE = re.compile('|'.join(BINARY_FILE_PATH_RE_LIST))
class LineIssueTracker(FileIssueTracker):
"""Base class for line-by-line issue tracking.
To implement a checker that processes files line by line, inherit from
this class and implement `line_with_issue`.
"""
# Exclude binary files.
path_exemptions = BINARY_FILE_PATH_RE
def issue_with_line(self, line, filepath, line_number):
"""Check the specified line for the issue that this class is for.
Subclasses must implement this method.
"""
raise NotImplementedError
def check_file_line(self, filepath, line, line_number):
if self.issue_with_line(line, filepath, line_number):
self.record_issue(filepath, line_number)
def check_file_for_issue(self, filepath):
"""Check the lines of the specified file.
Subclasses must implement the ``issue_with_line`` method.
"""
with open(filepath, "rb") as f:
for i, line in enumerate(iter(f.readline, b"")):
self.check_file_line(filepath, line, i + 1)
def is_windows_file(filepath):
_root, ext = os.path.splitext(filepath)
return ext in ('.bat', '.dsp', '.dsw', '.sln', '.vcxproj')
class ShebangIssueTracker(FileIssueTracker):
"""Track files with a bad, missing or extraneous shebang line.
Executable scripts must start with a valid shebang (#!) line.
"""
heading = "Invalid shebang line:"
# Allow either /bin/sh, /bin/bash, or /usr/bin/env.
# Allow at most one argument (this is a Linux limitation).
# For sh and bash, the argument if present must be options.
# For env, the argument must be the base name of the interpreter.
_shebang_re = re.compile(rb'^#! ?(?:/bin/(bash|sh)(?: -[^\n ]*)?'
rb'|/usr/bin/env ([^\n /]+))$')
_extensions = {
b'bash': 'sh',
b'perl': 'pl',
b'python3': 'py',
b'sh': 'sh',
}
path_exemptions = re.compile(r'framework/scripts/quiet/.*')
def is_valid_shebang(self, first_line, filepath):
m = re.match(self._shebang_re, first_line)
if not m:
return False
interpreter = m.group(1) or m.group(2)
if interpreter not in self._extensions:
return False
if not filepath.endswith('.' + self._extensions[interpreter]):
return False
return True
def check_file_for_issue(self, filepath):
is_executable = os.access(filepath, os.X_OK)
with open(filepath, "rb") as f:
first_line = f.readline()
if first_line.startswith(b'#!'):
if not is_executable:
# Shebang on a non-executable file
self.files_with_issues[filepath] = None
elif not self.is_valid_shebang(first_line, filepath):
self.files_with_issues[filepath] = [1]
elif is_executable:
# Executable without a shebang
self.files_with_issues[filepath] = None
class EndOfFileNewlineIssueTracker(FileIssueTracker):
"""Track files that end with an incomplete line
(no newline character at the end of the last line)."""
heading = "Missing newline at end of file:"
path_exemptions = BINARY_FILE_PATH_RE
def check_file_for_issue(self, filepath):
with open(filepath, "rb") as f:
try:
f.seek(-1, 2)
except OSError:
# This script only works on regular files. If we can't seek
# 1 before the end, it means that this position is before
# the beginning of the file, i.e. that the file is empty.
return
if f.read(1) != b"\n":
self.files_with_issues[filepath] = None
class Utf8BomIssueTracker(FileIssueTracker):
"""Track files that start with a UTF-8 BOM.
Files should be ASCII or UTF-8. Valid UTF-8 does not start with a BOM."""
heading = "UTF-8 BOM present:"
suffix_exemptions = frozenset([".vcxproj", ".sln"])
path_exemptions = BINARY_FILE_PATH_RE
def check_file_for_issue(self, filepath):
with open(filepath, "rb") as f:
if f.read().startswith(codecs.BOM_UTF8):
self.files_with_issues[filepath] = None
class UnicodeIssueTracker(LineIssueTracker):
"""Track lines with invalid characters or invalid text encoding."""
heading = "Invalid UTF-8 or forbidden character:"
# Only allow valid UTF-8, and only other explicitly allowed characters.
# We deliberately exclude all characters that aren't a simple non-blank,
# non-zero-width glyph, apart from a very small set (tab, ordinary space,
# line breaks, "basic" no-break space and soft hyphen). In particular,
# non-ASCII control characters, combinig characters, and Unicode state
# changes (e.g. right-to-left text) are forbidden.
# Note that we do allow some characters with a risk of visual confusion,
# for example '-' (U+002D HYPHEN-MINUS) vs '­' (U+00AD SOFT HYPHEN) vs
# '' (U+2010 HYPHEN), or 'A' (U+0041 LATIN CAPITAL LETTER A) vs
# 'Α' (U+0391 GREEK CAPITAL LETTER ALPHA).
GOOD_CHARACTERS = ''.join([
'\t\n\r -~', # ASCII (tabs and line endings are checked separately)
'\u00A0-\u00FF', # Latin-1 Supplement (for NO-BREAK SPACE and punctuation)
'\u2010-\u2027\u2030-\u205E', # General Punctuation (printable)
'\u2070\u2071\u2074-\u208E\u2090-\u209C', # Superscripts and Subscripts
'\u2190-\u21FF', # Arrows
'\u2200-\u22FF', # Mathematical Symbols
'\u2500-\u257F' # Box Drawings characters used in markdown trees
])
# Allow any of the characters and ranges above, and anything classified
# as a word constituent.
GOOD_CHARACTERS_RE = re.compile(r'[\w{}]+\Z'.format(GOOD_CHARACTERS))
def issue_with_line(self, line, _filepath, line_number):
try:
text = line.decode('utf-8')
except UnicodeDecodeError:
return True
if line_number == 1 and text.startswith('\uFEFF'):
# Strip BOM (U+FEFF ZERO WIDTH NO-BREAK SPACE) at the beginning.
# Which files are allowed to have a BOM is handled in
# Utf8BomIssueTracker.
text = text[1:]
return not self.GOOD_CHARACTERS_RE.match(text)
class UnixLineEndingIssueTracker(LineIssueTracker):
"""Track files with non-Unix line endings (i.e. files with CR)."""
heading = "Non-Unix line endings:"
def should_check_file(self, filepath):
if not super().should_check_file(filepath):
return False
return not is_windows_file(filepath)
def issue_with_line(self, line, _filepath, _line_number):
return b"\r" in line
class WindowsLineEndingIssueTracker(LineIssueTracker):
"""Track files with non-Windows line endings (i.e. CR or LF not in CRLF)."""
heading = "Non-Windows line endings:"
def should_check_file(self, filepath):
if not super().should_check_file(filepath):
return False
return is_windows_file(filepath)
def issue_with_line(self, line, _filepath, _line_number):
return not line.endswith(b"\r\n") or b"\r" in line[:-2]
class TrailingWhitespaceIssueTracker(LineIssueTracker):
"""Track lines with trailing whitespace."""
heading = "Trailing whitespace:"
suffix_exemptions = frozenset([".dsp", ".md"])
def issue_with_line(self, line, _filepath, _line_number):
return line.rstrip(b"\r\n") != line.rstrip()
class TabIssueTracker(LineIssueTracker):
"""Track lines with tabs."""
heading = "Tabs present:"
suffix_exemptions = frozenset([
".make",
".pem", # some openssl dumps have tabs
".sln",
"/.gitmodules",
"/Makefile",
"/Makefile.inc",
"/generate_visualc_files.pl",
])
def issue_with_line(self, line, _filepath, _line_number):
return b"\t" in line
class MergeArtifactIssueTracker(LineIssueTracker):
"""Track lines with merge artifacts.
These are leftovers from a ``git merge`` that wasn't fully edited."""
heading = "Merge artifact:"
def issue_with_line(self, line, _filepath, _line_number):
# Detect leftover git conflict markers.
if line.startswith(b'<<<<<<< ') or line.startswith(b'>>>>>>> '):
return True
if line.startswith(b'||||||| '): # from merge.conflictStyle=diff3
return True
if line.rstrip(b'\r\n') == b'=======' and \
not _filepath.endswith('.md'):
return True
return False
def this_location():
frame = inspect.currentframe()
assert frame is not None
info = inspect.getframeinfo(frame)
return os.path.basename(info.filename), info.lineno
THIS_FILE_BASE_NAME, LINE_NUMBER_BEFORE_LICENSE_ISSUE_TRACKER = this_location()
class LicenseIssueTracker(LineIssueTracker):
"""Check copyright statements and license indications.
This class only checks that statements are correct if present. It does
not enforce the presence of statements in each file.
"""
heading = "License issue:"
LICENSE_EXEMPTION_RE_LIST = [
# Third-party code, other than whitelisted third-party modules,
# may be under a different license.
r'3rdparty/(?!(p256-m)/.*)',
# Documentation explaining the license may have accidental
# false positives.
r'(ChangeLog|LICENSE|framework\/LICENSE|[-0-9A-Z_a-z]+\.md)\Z',
# Files imported from TF-M, and not used except in test builds,
# may be under a different license.
r'configs/ext/crypto_config_profile_medium\.h\Z',
r'configs/ext/tfm_mbedcrypto_config_profile_medium\.h\Z',
r'configs/ext/README\.md\Z',
# Third-party file.
r'dco\.txt\Z',
r'framework\/dco\.txt\Z',
]
path_exemptions = re.compile('|'.join(BINARY_FILE_PATH_RE_LIST +
LICENSE_EXEMPTION_RE_LIST))
COPYRIGHT_HOLDER = rb'The Mbed TLS Contributors'
# Catch "Copyright foo", "Copyright (C) foo", "Copyright © foo", etc.
COPYRIGHT_RE = re.compile(rb'.*\bcopyright\s+((?:\w|\s|[()]|[^ -~])*\w)', re.I)
SPDX_HEADER_KEY = b'SPDX-License-Identifier'
LICENSE_IDENTIFIER = b'Apache-2.0 OR GPL-2.0-or-later'
SPDX_RE = re.compile(br'.*?(' +
re.escape(SPDX_HEADER_KEY) +
br')(:\s*(.*?)\W*\Z|.*)', re.I)
LICENSE_MENTION_RE = re.compile(rb'.*(?:' + rb'|'.join([
rb'Apache License',
rb'General Public License',
]) + rb')', re.I)
def __init__(self):
super().__init__()
# Record what problem was caused. We can't easily report it due to
# the structure of the script. To be fixed after
# https://github.com/Mbed-TLS/mbedtls/pull/2506
self.problem = None
def issue_with_line(self, line, filepath, line_number):
#pylint: disable=too-many-return-statements
# Use endswith() rather than the more correct os.path.basename()
# because experimentally, it makes a significant difference to
# the running time.
if filepath.endswith(THIS_FILE_BASE_NAME) and \
line_number > LINE_NUMBER_BEFORE_LICENSE_ISSUE_TRACKER:
# Avoid false positives from the code in this class.
# Also skip the rest of this file, which is highly unlikely to
# contain any problematic statements since we put those near the
# top of files.
return False
m = self.COPYRIGHT_RE.match(line)
if m and m.group(1) != self.COPYRIGHT_HOLDER:
self.problem = 'Invalid copyright line'
return True
m = self.SPDX_RE.match(line)
if m:
if m.group(1) != self.SPDX_HEADER_KEY:
self.problem = 'Misspelled ' + self.SPDX_HEADER_KEY.decode()
return True
if not m.group(3):
self.problem = 'Improperly formatted SPDX license identifier'
return True
if m.group(3) != self.LICENSE_IDENTIFIER:
self.problem = 'Wrong SPDX license identifier'
return True
m = self.LICENSE_MENTION_RE.match(line)
if m:
self.problem = 'Suspicious license mention'
return True
return False
class IntegrityChecker:
"""Sanity-check files under the current directory."""
def __init__(self, log_file):
"""Instantiate the sanity checker.
Check files under the current directory.
Write a report of issues to log_file."""
build_tree.check_repo_path()
self.logger = None
self.setup_logger(log_file)
self.issues_to_check = [
ShebangIssueTracker(),
EndOfFileNewlineIssueTracker(),
Utf8BomIssueTracker(),
UnicodeIssueTracker(),
UnixLineEndingIssueTracker(),
WindowsLineEndingIssueTracker(),
TrailingWhitespaceIssueTracker(),
TabIssueTracker(),
MergeArtifactIssueTracker(),
LicenseIssueTracker(),
]
def setup_logger(self, log_file, level=logging.INFO):
"""Log to log_file if provided, or to stderr if None."""
self.logger = logging.getLogger()
self.logger.setLevel(level)
if log_file:
handler = logging.FileHandler(log_file)
self.logger.addHandler(handler)
else:
console = logging.StreamHandler()
self.logger.addHandler(console)
@staticmethod
def collect_files():
"""Return the list of files to check.
These are the regular files commited into Git.
"""
bytes_output = subprocess.check_output(['git', '-C', 'framework',
'ls-files', '-z'])
bytes_framework_filepaths = bytes_output.split(b'\0')[:-1]
bytes_framework_filepaths = ["framework/".encode() + filepath
for filepath in bytes_framework_filepaths]
bytes_output = subprocess.check_output(['git', 'ls-files', '-z'])
bytes_filepaths = bytes_output.split(b'\0')[:-1] + \
bytes_framework_filepaths
ascii_filepaths = map(lambda fp: fp.decode('ascii'), bytes_filepaths)
# Filter out directories. Normally Git doesn't list directories
# (it only knows about the files inside them), but there is
# at least one case where 'git ls-files' includes a directory:
# submodules. Just skip submodules (and any other directories).
ascii_filepaths = [fp for fp in ascii_filepaths
if os.path.isfile(fp)]
# Prepend './' to files in the top-level directory so that
# something like `'/Makefile' in fp` matches in the top-level
# directory as well as in subdirectories.
return [fp if os.path.dirname(fp) else os.path.join(os.curdir, fp)
for fp in ascii_filepaths]
def check_files(self):
"""Check all files for all issues."""
for issue_to_check in self.issues_to_check:
for filepath in self.collect_files():
if issue_to_check.should_check_file(filepath):
issue_to_check.check_file_for_issue(filepath)
def output_issues(self):
"""Log the issues found and their locations.
Return 1 if there were issues, 0 otherwise.
"""
integrity_return_code = 0
for issue_to_check in self.issues_to_check:
if issue_to_check.files_with_issues:
integrity_return_code = 1
issue_to_check.output_file_issues(self.logger)
return integrity_return_code
def run_main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
"-l", "--log_file", type=str, help="path to optional output log",
)
check_args = parser.parse_args()
integrity_check = IntegrityChecker(check_args.log_file)
integrity_check.check_files()
return_code = integrity_check.output_issues()
sys.exit(return_code)
if __name__ == "__main__":
run_main()

View File

@ -41,7 +41,7 @@ component_check_doxy_blocks () {
component_check_files () {
msg "Check: file sanity checks (permissions, encodings)" # < 1s
tests/scripts/check_files.py
framework/scripts/check_files.py
}
component_check_changelog () {
@ -137,7 +137,7 @@ component_check_doxygen_warnings () {
component_check_code_style () {
msg "Check C code style"
./scripts/code_style.py
./framework/scripts/code_style.py
}
support_check_code_style () {

View File

@ -181,7 +181,7 @@
#define MBEDTLS_MD_ALG_FOR_TEST MBEDTLS_MD_SHA512
#endif
#include <../src/test_keys.h>
#include <test/test_keys.h>
/* Define an RSA key size we know it's present in predefined_key[] array. */
#define RSA_KEY_SIZE 1024
@ -243,7 +243,7 @@ static psa_status_t pk_psa_import_key(const unsigned char *key_data, size_t key_
/** Setup the provided PK context.
*
* Predefined keys used for the setup are taken from "test/src/test_keys.h"
* Predefined keys used for the setup are taken from <test/test_keys.h>
* which is automatically generated using "framework/scripts/generate_test_keys.py".
*
* \param pk The PK object to fill. It must have been initialized

View File

@ -156,10 +156,6 @@ PSA import/export RSA keypair: export buffer too small, opaque
depends_on:PSA_WANT_ALG_RSA_PKCS1V15_SIGN:PSA_WANT_KEY_TYPE_RSA_KEY_PAIR_BASIC:PSA_WANT_KEY_TYPE_RSA_KEY_PAIR_IMPORT:PSA_WANT_KEY_TYPE_RSA_KEY_PAIR_EXPORT:PSA_CRYPTO_DRIVER_TEST
import_export:"3082025e02010002818100af057d396ee84fb75fdbb5c2b13c7fe5a654aa8aa2470b541ee1feb0b12d25c79711531249e1129628042dbbb6c120d1443524ef4c0e6e1d8956eeb2077af12349ddeee54483bc06c2c61948cd02b202e796aebd94d3a7cbf859c2c1819c324cb82b9cd34ede263a2abffe4733f077869e8660f7d6834da53d690ef7985f6bc3020301000102818100874bf0ffc2f2a71d14671ddd0171c954d7fdbf50281e4f6d99ea0e1ebcf82faa58e7b595ffb293d1abe17f110b37c48cc0f36c37e84d876621d327f64bbe08457d3ec4098ba2fa0a319fba411c2841ed7be83196a8cdf9daa5d00694bc335fc4c32217fe0488bce9cb7202e59468b1ead119000477db2ca797fac19eda3f58c1024100e2ab760841bb9d30a81d222de1eb7381d82214407f1b975cbbfe4e1a9467fd98adbd78f607836ca5be1928b9d160d97fd45c12d6b52e2c9871a174c66b488113024100c5ab27602159ae7d6f20c3c2ee851e46dc112e689e28d5fcbbf990a99ef8a90b8bb44fd36467e7fc1789ceb663abda338652c3c73f111774902e840565927091024100b6cdbd354f7df579a63b48b3643e353b84898777b48b15f94e0bfc0567a6ae5911d57ad6409cf7647bf96264e9bd87eb95e263b7110b9a1f9f94acced0fafa4d024071195eec37e8d257decfc672b07ae639f10cbb9b0c739d0c809968d644a94e3fd6ed9287077a14583f379058f76a8aecd43c62dc8c0f41766650d725275ac4a1024100bb32d133edc2e048d463388b7be9cb4be29f4b6250be603e70e3647501c97ddde20a4e71be95fd5e71784e25aca4baf25be5738aae59bbfe1c997781447a2b24":PSA_KEY_TYPE_RSA_KEY_PAIR:PSA_KEY_USAGE_EXPORT:PSA_ALG_RSA_PKCS1V15_SIGN_RAW:PSA_KEY_LIFETIME_FROM_PERSISTENCE_AND_LOCATION( PSA_KEY_PERSISTENCE_VOLATILE, TEST_DRIVER_LOCATION ):1024:-1:PSA_ERROR_BUFFER_TOO_SMALL:1
PSA import/export RSA keypair: trailing garbage rejected, opaque
depends_on:PSA_WANT_ALG_RSA_PKCS1V15_SIGN:PSA_WANT_KEY_TYPE_RSA_KEY_PAIR_BASIC:PSA_WANT_KEY_TYPE_RSA_KEY_PAIR_IMPORT:PSA_WANT_KEY_TYPE_RSA_KEY_PAIR_EXPORT:PSA_CRYPTO_DRIVER_TEST
import_with_data:"3082025e02010002818100af057d396ee84fb75fdbb5c2b13c7fe5a654aa8aa2470b541ee1feb0b12d25c79711531249e1129628042dbbb6c120d1443524ef4c0e6e1d8956eeb2077af12349ddeee54483bc06c2c61948cd02b202e796aebd94d3a7cbf859c2c1819c324cb82b9cd34ede263a2abffe4733f077869e8660f7d6834da53d690ef7985f6bc3020301000102818100874bf0ffc2f2a71d14671ddd0171c954d7fdbf50281e4f6d99ea0e1ebcf82faa58e7b595ffb293d1abe17f110b37c48cc0f36c37e84d876621d327f64bbe08457d3ec4098ba2fa0a319fba411c2841ed7be83196a8cdf9daa5d00694bc335fc4c32217fe0488bce9cb7202e59468b1ead119000477db2ca797fac19eda3f58c1024100e2ab760841bb9d30a81d222de1eb7381d82214407f1b975cbbfe4e1a9467fd98adbd78f607836ca5be1928b9d160d97fd45c12d6b52e2c9871a174c66b488113024100c5ab27602159ae7d6f20c3c2ee851e46dc112e689e28d5fcbbf990a99ef8a90b8bb44fd36467e7fc1789ceb663abda338652c3c73f111774902e840565927091024100b6cdbd354f7df579a63b48b3643e353b84898777b48b15f94e0bfc0567a6ae5911d57ad6409cf7647bf96264e9bd87eb95e263b7110b9a1f9f94acced0fafa4d024071195eec37e8d257decfc672b07ae639f10cbb9b0c739d0c809968d644a94e3fd6ed9287077a14583f379058f76a8aecd43c62dc8c0f41766650d725275ac4a1024100bb32d133edc2e048d463388b7be9cb4be29f4b6250be603e70e3647501c97ddde20a4e71be95fd5e71784e25aca4baf25be5738aae59bbfe1c997781447a2b2400":PSA_KEY_TYPE_RSA_KEY_PAIR:1024:PSA_ERROR_INVALID_ARGUMENT
PSA import RSA keypair: truncated
depends_on:PSA_WANT_KEY_TYPE_RSA_KEY_PAIR_BASIC:PSA_WANT_KEY_TYPE_RSA_KEY_PAIR_IMPORT
import_with_data:"3082025e02010002818100af057d396ee84fb75fdbb5c2b13c7fe5a654aa8aa2470b541ee1feb0b12d25c79711531249e1129628042dbbb6c120d1443524ef4c0e6e1d8956eeb2077af12349ddeee54483bc06c2c61948cd02b202e796aebd94d3a7cbf859c2c1819c324cb82b9cd34ede263a2abffe4733f077869e8660f7d6834da53d690ef7985f6bc3020301000102818100874bf0ffc2f2a71d14671ddd0171c954d7fdbf50281e4f6d99ea0e1ebcf82faa58e7b595ffb293d1abe17f110b37c48cc0f36c37e84d876621d327f64bbe08457d3ec4098ba2fa0a319fba411c2841ed7be83196a8cdf9daa5d00694bc335fc4c32217fe0488bce9cb7202e59468b1ead119000477db2ca797fac19eda3f58c1024100e2ab760841bb9d30a81d222de1eb7381d82214407f1b975cbbfe4e1a9467fd98adbd78f607836ca5be1928b9d160d97fd45c12d6b52e2c9871a174c66b488113024100c5ab27602159ae7d6f20c3c2ee851e46dc112e689e28d5fcbbf990a99ef8a90b8bb44fd36467e7fc1789ceb663abda338652c3c73f111774902e840565927091024100b6cdbd354f7df579a63b48b3643e353b84898777b48b15f94e0bfc0567a6ae5911d57ad6409cf7647bf96264e9bd87eb95e263b7110b9a1f9f94acced0fafa4d024071195eec37e8d257decfc672b07ae639f10cbb9b0c739d0c809968d644a94e3fd6ed9287077a14583f379058f76a8aecd43c62dc8c0f41766650d725275ac4a1024100bb32d133edc2e048d463388b7be9cb4be29f4b6250be603e70e3647501c97ddde20a4e71be95fd5e71784e25aca4baf25be5738aae59bbfe1c997781447a2b":PSA_KEY_TYPE_RSA_KEY_PAIR:0:PSA_ERROR_INVALID_ARGUMENT
@ -422,7 +418,7 @@ import_export:"372c9778f69f726cbca3f4a268f16b4d617d10280d79a6a029cd51879fe101293
PSA import/export-public EC brainpool512r1: good, opaque
depends_on:PSA_WANT_ALG_ECDSA:PSA_WANT_KEY_TYPE_ECC_KEY_PAIR_BASIC:PSA_WANT_KEY_TYPE_ECC_KEY_PAIR_IMPORT:PSA_WANT_KEY_TYPE_ECC_KEY_PAIR_EXPORT:PSA_WANT_ECC_BRAINPOOL_P_R1_512:PSA_CRYPTO_DRIVER_TEST
import_export_public_key:"372c9778f69f726cbca3f4a268f16b4d617d10280d79a6a029cd51879fe1012934dfe5395455337df6906dc7d6d2eea4dbb2065c0228f73b3ed716480e7d71d2":PSA_KEY_TYPE_ECC_KEY_PAIR(PSA_ECC_FAMILY_BRAINPOOL_P_R1):PSA_ALG_ECDSA_ANY:0:0:PSA_SUCCESS:"0438b7ec92b61c5c6c7fbc28a4ec759d48fcd4e2e374defd5c4968a54dbef7510e517886fbfc38ea39aa529359d70a7156c35d3cbac7ce776bdb251dd64bce71234424ee7049eed072f0dbc4d79996e175d557e263763ae97095c081e73e7db2e38adc3d4c9a0487b1ede876dc1fca61c902e9a1d8722b8612928f18a24845591a"
import_export_public_key:"372c9778f69f726cbca3f4a268f16b4d617d10280d79a6a029cd51879fe1012934dfe5395455337df6906dc7d6d2eea4dbb2065c0228f73b3ed716480e7d71d2":PSA_KEY_TYPE_ECC_KEY_PAIR(PSA_ECC_FAMILY_BRAINPOOL_P_R1):PSA_ALG_ECDSA_ANY:PSA_KEY_LIFETIME_FROM_PERSISTENCE_AND_LOCATION( PSA_KEY_PERSISTENCE_VOLATILE, TEST_DRIVER_LOCATION ):0:PSA_SUCCESS:"0438b7ec92b61c5c6c7fbc28a4ec759d48fcd4e2e374defd5c4968a54dbef7510e517886fbfc38ea39aa529359d70a7156c35d3cbac7ce776bdb251dd64bce71234424ee7049eed072f0dbc4d79996e175d557e263763ae97095c081e73e7db2e38adc3d4c9a0487b1ede876dc1fca61c902e9a1d8722b8612928f18a24845591a"
PSA import/export EC curve25519 key pair: good (already properly masked), opaque
depends_on:PSA_WANT_ALG_ECDH:PSA_WANT_KEY_TYPE_ECC_KEY_PAIR_BASIC:PSA_WANT_KEY_TYPE_ECC_KEY_PAIR_IMPORT:PSA_WANT_KEY_TYPE_ECC_KEY_PAIR_EXPORT:PSA_WANT_ECC_MONTGOMERY_255:PSA_CRYPTO_DRIVER_TEST