1
0
mirror of https://github.com/Mbed-TLS/mbedtls.git synced 2025-07-29 11:41:15 +03:00

Merge remote-tracking branch 'upstream/development' into fix_vcxproj

Signed-off-by: irwir <irwir@users.noreply.github.com>
This commit is contained in:
irwir
2020-04-19 16:29:20 +03:00
537 changed files with 181513 additions and 8539 deletions

View File

@ -19,7 +19,7 @@ fi
CONFIG_BAK=${CONFIG_H}.bak
cp -p $CONFIG_H $CONFIG_BAK
scripts/config.pl realfull
scripts/config.py realfull
make apidoc
mv $CONFIG_BAK $CONFIG_H

505
scripts/assemble_changelog.py Executable file
View File

@ -0,0 +1,505 @@
#!/usr/bin/env python3
"""Assemble Mbed TLS change log entries into the change log file.
Add changelog entries to the first level-2 section.
Create a new level-2 section for unreleased changes if needed.
Remove the input files unless --keep-entries is specified.
In each level-3 section, entries are sorted in chronological order
(oldest first). From oldest to newest:
* Merged entry files are sorted according to their merge date (date of
the merge commit that brought the commit that created the file into
the target branch).
* Committed but unmerged entry files are sorted according to the date
of the commit that adds them.
* Uncommitted entry files are sorted according to their modification time.
You must run this program from within a git working directory.
"""
# Copyright (C) 2019, Arm Limited, All Rights Reserved
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This file is part of Mbed TLS (https://tls.mbed.org)
import argparse
from collections import OrderedDict, namedtuple
import datetime
import functools
import glob
import os
import re
import subprocess
import sys
class InputFormatError(Exception):
def __init__(self, filename, line_number, message, *args, **kwargs):
message = '{}:{}: {}'.format(filename, line_number,
message.format(*args, **kwargs))
super().__init__(message)
class CategoryParseError(Exception):
def __init__(self, line_offset, error_message):
self.line_offset = line_offset
self.error_message = error_message
super().__init__('{}: {}'.format(line_offset, error_message))
class LostContent(Exception):
def __init__(self, filename, line):
message = ('Lost content from {}: "{}"'.format(filename, line))
super().__init__(message)
# The category names we use in the changelog.
# If you edit this, update ChangeLog.d/README.md.
STANDARD_CATEGORIES = (
b'API changes',
b'Default behavior changes',
b'Requirement changes',
b'New deprecations',
b'Removals',
b'Features',
b'Security',
b'Bugfix',
b'Changes',
)
CategoryContent = namedtuple('CategoryContent', [
'name', 'title_line', # Title text and line number of the title
'body', 'body_line', # Body text and starting line number of the body
])
class ChangelogFormat:
"""Virtual class documenting how to write a changelog format class."""
@classmethod
def extract_top_version(cls, changelog_file_content):
"""Split out the top version section.
If the top version is already released, create a new top
version section for an unreleased version.
Return ``(header, top_version_title, top_version_body, trailer)``
where the "top version" is the existing top version section if it's
for unreleased changes, and a newly created section otherwise.
To assemble the changelog after modifying top_version_body,
concatenate the four pieces.
"""
raise NotImplementedError
@classmethod
def version_title_text(cls, version_title):
"""Return the text of a formatted version section title."""
raise NotImplementedError
@classmethod
def split_categories(cls, version_body):
"""Split a changelog version section body into categories.
Return a list of `CategoryContent` the name is category title
without any formatting.
"""
raise NotImplementedError
@classmethod
def format_category(cls, title, body):
"""Construct the text of a category section from its title and body."""
raise NotImplementedError
class TextChangelogFormat(ChangelogFormat):
"""The traditional Mbed TLS changelog format."""
_unreleased_version_text = b'= mbed TLS x.x.x branch released xxxx-xx-xx'
@classmethod
def is_released_version(cls, title):
# Look for an incomplete release date
return not re.search(br'[0-9x]{4}-[0-9x]{2}-[0-9x]?x', title)
_top_version_re = re.compile(br'(?:\A|\n)(=[^\n]*\n+)(.*?\n)(?:=|$)',
re.DOTALL)
@classmethod
def extract_top_version(cls, changelog_file_content):
"""A version section starts with a line starting with '='."""
m = re.search(cls._top_version_re, changelog_file_content)
top_version_start = m.start(1)
top_version_end = m.end(2)
top_version_title = m.group(1)
top_version_body = m.group(2)
if cls.is_released_version(top_version_title):
top_version_end = top_version_start
top_version_title = cls._unreleased_version_text + b'\n\n'
top_version_body = b''
return (changelog_file_content[:top_version_start],
top_version_title, top_version_body,
changelog_file_content[top_version_end:])
@classmethod
def version_title_text(cls, version_title):
return re.sub(br'\n.*', version_title, re.DOTALL)
_category_title_re = re.compile(br'(^\w.*)\n+', re.MULTILINE)
@classmethod
def split_categories(cls, version_body):
"""A category title is a line with the title in column 0."""
if not version_body:
return []
title_matches = list(re.finditer(cls._category_title_re, version_body))
if not title_matches or title_matches[0].start() != 0:
# There is junk before the first category.
raise CategoryParseError(0, 'Junk found where category expected')
title_starts = [m.start(1) for m in title_matches]
body_starts = [m.end(0) for m in title_matches]
body_ends = title_starts[1:] + [len(version_body)]
bodies = [version_body[body_start:body_end].rstrip(b'\n') + b'\n'
for (body_start, body_end) in zip(body_starts, body_ends)]
title_lines = [version_body[:pos].count(b'\n') for pos in title_starts]
body_lines = [version_body[:pos].count(b'\n') for pos in body_starts]
return [CategoryContent(title_match.group(1), title_line,
body, body_line)
for title_match, title_line, body, body_line
in zip(title_matches, title_lines, bodies, body_lines)]
@classmethod
def format_category(cls, title, body):
# `split_categories` ensures that each body ends with a newline.
# Make sure that there is additionally a blank line between categories.
if not body.endswith(b'\n\n'):
body += b'\n'
return title + b'\n' + body
class ChangeLog:
"""An Mbed TLS changelog.
A changelog file consists of some header text followed by one or
more version sections. The version sections are in reverse
chronological order. Each version section consists of a title and a body.
The body of a version section consists of zero or more category
subsections. Each category subsection consists of a title and a body.
A changelog entry file has the same format as the body of a version section.
A `ChangelogFormat` object defines the concrete syntax of the changelog.
Entry files must have the same format as the changelog file.
"""
# Only accept dotted version numbers (e.g. "3.1", not "3").
# Refuse ".x" in a version number where x is a letter: this indicates
# a version that is not yet released. Something like "3.1a" is accepted.
_version_number_re = re.compile(br'[0-9]+\.[0-9A-Za-z.]+')
_incomplete_version_number_re = re.compile(br'.*\.[A-Za-z]')
def add_categories_from_text(self, filename, line_offset,
text, allow_unknown_category):
"""Parse a version section or entry file."""
try:
categories = self.format.split_categories(text)
except CategoryParseError as e:
raise InputFormatError(filename, line_offset + e.line_offset,
e.error_message)
for category in categories:
if not allow_unknown_category and \
category.name not in self.categories:
raise InputFormatError(filename,
line_offset + category.title_line,
'Unknown category: "{}"',
category.name.decode('utf8'))
self.categories[category.name] += category.body
def __init__(self, input_stream, changelog_format):
"""Create a changelog object.
Populate the changelog object from the content of the file
input_stream.
"""
self.format = changelog_format
whole_file = input_stream.read()
(self.header,
self.top_version_title, top_version_body,
self.trailer) = self.format.extract_top_version(whole_file)
# Split the top version section into categories.
self.categories = OrderedDict()
for category in STANDARD_CATEGORIES:
self.categories[category] = b''
offset = (self.header + self.top_version_title).count(b'\n') + 1
self.add_categories_from_text(input_stream.name, offset,
top_version_body, True)
def add_file(self, input_stream):
"""Add changelog entries from a file.
"""
self.add_categories_from_text(input_stream.name, 1,
input_stream.read(), False)
def write(self, filename):
"""Write the changelog to the specified file.
"""
with open(filename, 'wb') as out:
out.write(self.header)
out.write(self.top_version_title)
for title, body in self.categories.items():
if not body:
continue
out.write(self.format.format_category(title, body))
out.write(self.trailer)
@functools.total_ordering
class EntryFileSortKey:
"""This classes defines an ordering on changelog entry files: older < newer.
* Merged entry files are sorted according to their merge date (date of
the merge commit that brought the commit that created the file into
the target branch).
* Committed but unmerged entry files are sorted according to the date
of the commit that adds them.
* Uncommitted entry files are sorted according to their modification time.
This class assumes that the file is in a git working directory with
the target branch checked out.
"""
# Categories of files. A lower number is considered older.
MERGED = 0
COMMITTED = 1
LOCAL = 2
@staticmethod
def creation_hash(filename):
"""Return the git commit id at which the given file was created.
Return None if the file was never checked into git.
"""
hashes = subprocess.check_output(['git', 'log', '--format=%H',
'--follow',
'--', filename])
m = re.search(b'(.+)$', hashes)
if not m:
# The git output is empty. This means that the file was
# never checked in.
return None
# The last commit in the log is the oldest one, which is when the
# file was created.
return m.group(0)
@staticmethod
def list_merges(some_hash, target, *options):
"""List merge commits from some_hash to target.
Pass options to git to select which commits are included.
"""
text = subprocess.check_output(['git', 'rev-list',
'--merges', *options,
b'..'.join([some_hash, target])])
return text.rstrip(b'\n').split(b'\n')
@classmethod
def merge_hash(cls, some_hash):
"""Return the git commit id at which the given commit was merged.
Return None if the given commit was never merged.
"""
target = b'HEAD'
# List the merges from some_hash to the target in two ways.
# The ancestry list is the ones that are both descendants of
# some_hash and ancestors of the target.
ancestry = frozenset(cls.list_merges(some_hash, target,
'--ancestry-path'))
# The first_parents list only contains merges that are directly
# on the target branch. We want it in reverse order (oldest first).
first_parents = cls.list_merges(some_hash, target,
'--first-parent', '--reverse')
# Look for the oldest merge commit that's both on the direct path
# and directly on the target branch. That's the place where some_hash
# was merged on the target branch. See
# https://stackoverflow.com/questions/8475448/find-merge-commit-which-include-a-specific-commit
for commit in first_parents:
if commit in ancestry:
return commit
return None
@staticmethod
def commit_timestamp(commit_id):
"""Return the timestamp of the given commit."""
text = subprocess.check_output(['git', 'show', '-s',
'--format=%ct',
commit_id])
return datetime.datetime.utcfromtimestamp(int(text))
@staticmethod
def file_timestamp(filename):
"""Return the modification timestamp of the given file."""
mtime = os.stat(filename).st_mtime
return datetime.datetime.fromtimestamp(mtime)
def __init__(self, filename):
"""Determine position of the file in the changelog entry order.
This constructor returns an object that can be used with comparison
operators, with `sort` and `sorted`, etc. Older entries are sorted
before newer entries.
"""
self.filename = filename
creation_hash = self.creation_hash(filename)
if not creation_hash:
self.category = self.LOCAL
self.datetime = self.file_timestamp(filename)
return
merge_hash = self.merge_hash(creation_hash)
if not merge_hash:
self.category = self.COMMITTED
self.datetime = self.commit_timestamp(creation_hash)
return
self.category = self.MERGED
self.datetime = self.commit_timestamp(merge_hash)
def sort_key(self):
""""Return a concrete sort key for this entry file sort key object.
``ts1 < ts2`` is implemented as ``ts1.sort_key() < ts2.sort_key()``.
"""
return (self.category, self.datetime, self.filename)
def __eq__(self, other):
return self.sort_key() == other.sort_key()
def __lt__(self, other):
return self.sort_key() < other.sort_key()
def check_output(generated_output_file, main_input_file, merged_files):
"""Make sanity checks on the generated output.
The intent of these sanity checks is to have reasonable confidence
that no content has been lost.
The sanity check is that every line that is present in an input file
is also present in an output file. This is not perfect but good enough
for now.
"""
generated_output = set(open(generated_output_file, 'rb'))
for line in open(main_input_file, 'rb'):
if line not in generated_output:
raise LostContent('original file', line)
for merged_file in merged_files:
for line in open(merged_file, 'rb'):
if line not in generated_output:
raise LostContent(merged_file, line)
def finish_output(changelog, output_file, input_file, merged_files):
"""Write the changelog to the output file.
The input file and the list of merged files are used only for sanity
checks on the output.
"""
if os.path.exists(output_file) and not os.path.isfile(output_file):
# The output is a non-regular file (e.g. pipe). Write to it directly.
output_temp = output_file
else:
# The output is a regular file. Write to a temporary file,
# then move it into place atomically.
output_temp = output_file + '.tmp'
changelog.write(output_temp)
check_output(output_temp, input_file, merged_files)
if output_temp != output_file:
os.rename(output_temp, output_file)
def remove_merged_entries(files_to_remove):
for filename in files_to_remove:
os.remove(filename)
def list_files_to_merge(options):
"""List the entry files to merge, oldest first.
"Oldest" is defined by `EntryFileSortKey`.
"""
files_to_merge = glob.glob(os.path.join(options.dir, '*.txt'))
files_to_merge.sort(key=EntryFileSortKey)
return files_to_merge
def merge_entries(options):
"""Merge changelog entries into the changelog file.
Read the changelog file from options.input.
Read entries to merge from the directory options.dir.
Write the new changelog to options.output.
Remove the merged entries if options.keep_entries is false.
"""
with open(options.input, 'rb') as input_file:
changelog = ChangeLog(input_file, TextChangelogFormat)
files_to_merge = list_files_to_merge(options)
if not files_to_merge:
sys.stderr.write('There are no pending changelog entries.\n')
return
for filename in files_to_merge:
with open(filename, 'rb') as input_file:
changelog.add_file(input_file)
finish_output(changelog, options.output, options.input, files_to_merge)
if not options.keep_entries:
remove_merged_entries(files_to_merge)
def show_file_timestamps(options):
"""List the files to merge and their timestamp.
This is only intended for debugging purposes.
"""
files = list_files_to_merge(options)
for filename in files:
ts = EntryFileSortKey(filename)
print(ts.category, ts.datetime, filename)
def set_defaults(options):
"""Add default values for missing options."""
output_file = getattr(options, 'output', None)
if output_file is None:
options.output = options.input
if getattr(options, 'keep_entries', None) is None:
options.keep_entries = (output_file is not None)
def main():
"""Command line entry point."""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--dir', '-d', metavar='DIR',
default='ChangeLog.d',
help='Directory to read entries from'
' (default: ChangeLog.d)')
parser.add_argument('--input', '-i', metavar='FILE',
default='ChangeLog',
help='Existing changelog file to read from and augment'
' (default: ChangeLog)')
parser.add_argument('--keep-entries',
action='store_true', dest='keep_entries', default=None,
help='Keep the files containing entries'
' (default: remove them if --output/-o is not specified)')
parser.add_argument('--no-keep-entries',
action='store_false', dest='keep_entries',
help='Remove the files containing entries after they are merged'
' (default: remove them if --output/-o is not specified)')
parser.add_argument('--output', '-o', metavar='FILE',
help='Output changelog file'
' (default: overwrite the input)')
parser.add_argument('--list-files-only',
action='store_true',
help=('Only list the files that would be processed '
'(with some debugging information)'))
options = parser.parse_args()
set_defaults(options)
if options.list_files_only:
show_file_timestamps(options)
return
merge_entries(options)
if __name__ == '__main__':
main()

View File

@ -132,7 +132,7 @@ done
[ $VERBOSE ] && echo "Re-generating library/error.c"
scripts/generate_errors.pl
[ $VERBOSE ] && echo "Re-generating programs/ssl/query_config.c"
[ $VERBOSE ] && echo "Re-generating programs/test/query_config.c"
scripts/generate_query_config.pl
[ $VERBOSE ] && echo "Re-generating library/version_features.c"

View File

@ -1,315 +1,27 @@
#!/usr/bin/env perl
#
# This file is part of mbed TLS (https://tls.mbed.org)
#
# Copyright (c) 2014-2016, ARM Limited, All Rights Reserved
#
# Purpose
#
# Comments and uncomments #define lines in the given header file and optionally
# sets their value or can get the value. This is to provide scripting control of
# what preprocessor symbols, and therefore what build time configuration flags
# are set in the 'config.h' file.
#
# Usage: config.pl [-f <file> | --file <file>] [-o | --force]
# [set <symbol> <value> | unset <symbol> | get <symbol> |
# full | realfull]
#
# Full usage description provided below.
#
# The following options are disabled instead of enabled with "full".
#
# MBEDTLS_TEST_NULL_ENTROPY
# MBEDTLS_DEPRECATED_REMOVED
# MBEDTLS_HAVE_SSE2
# MBEDTLS_PLATFORM_NO_STD_FUNCTIONS
# MBEDTLS_ECP_DP_M221_ENABLED
# MBEDTLS_ECP_DP_M383_ENABLED
# MBEDTLS_ECP_DP_M511_ENABLED
# MBEDTLS_MEMORY_BACKTRACE
# MBEDTLS_MEMORY_BUFFER_ALLOC_C
# MBEDTLS_NO_DEFAULT_ENTROPY_SOURCES
# MBEDTLS_NO_PLATFORM_ENTROPY
# MBEDTLS_REMOVE_ARC4_CIPHERSUITES
# MBEDTLS_REMOVE_3DES_CIPHERSUITES
# MBEDTLS_SSL_HW_RECORD_ACCEL
# MBEDTLS_RSA_NO_CRT
# MBEDTLS_X509_ALLOW_EXTENSIONS_NON_V3
# MBEDTLS_X509_ALLOW_UNSUPPORTED_CRITICAL_EXTENSION
# - this could be enabled if the respective tests were adapted
# MBEDTLS_ZLIB_SUPPORT
# MBEDTLS_PKCS11_C
# MBEDTLS_NO_UDBL_DIVISION
# MBEDTLS_NO_64BIT_MULTIPLICATION
# MBEDTLS_PSA_CRYPTO_SPM
# MBEDTLS_PSA_INJECT_ENTROPY
# MBEDTLS_ECP_RESTARTABLE
# and any symbol beginning _ALT
#
# Backward compatibility redirection
use warnings;
use strict;
## Copyright (C) 2019, ARM Limited, All Rights Reserved
## SPDX-License-Identifier: Apache-2.0
##
## Licensed under the Apache License, Version 2.0 (the "License"); you may
## not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
## This file is part of Mbed TLS (https://tls.mbed.org)
my $config_file = "include/mbedtls/config.h";
my $usage = <<EOU;
$0 [-f <file> | --file <file>] [-o | --force]
[set <symbol> <value> | unset <symbol> | get <symbol> |
full | realfull | baremetal]
Commands
set <symbol> [<value>] - Uncomments or adds a #define for the <symbol> to
the configuration file, and optionally making it
of <value>.
If the symbol isn't present in the file an error
is returned.
unset <symbol> - Comments out the #define for the given symbol if
present in the configuration file.
get <symbol> - Finds the #define for the given symbol, returning
an exitcode of 0 if the symbol is found, and 1 if
not. The value of the symbol is output if one is
specified in the configuration file.
full - Uncomments all #define's in the configuration file
excluding some reserved symbols, until the
'Module configuration options' section
realfull - Uncomments all #define's with no exclusions
baremetal - Sets full configuration suitable for baremetal build.
Options
-f | --file <filename> - The file or file path for the configuration file
to edit. When omitted, the following default is
used:
$config_file
-o | --force - If the symbol isn't present in the configuration
file when setting its value, a #define is
appended to the end of the file.
EOU
my @excluded = qw(
MBEDTLS_TEST_NULL_ENTROPY
MBEDTLS_DEPRECATED_REMOVED
MBEDTLS_HAVE_SSE2
MBEDTLS_PLATFORM_NO_STD_FUNCTIONS
MBEDTLS_ECP_DP_M221_ENABLED
MBEDTLS_ECP_DP_M383_ENABLED
MBEDTLS_ECP_DP_M511_ENABLED
MBEDTLS_MEMORY_DEBUG
MBEDTLS_MEMORY_BACKTRACE
MBEDTLS_MEMORY_BUFFER_ALLOC_C
MBEDTLS_NO_DEFAULT_ENTROPY_SOURCES
MBEDTLS_NO_PLATFORM_ENTROPY
MBEDTLS_RSA_NO_CRT
MBEDTLS_REMOVE_ARC4_CIPHERSUITES
MBEDTLS_REMOVE_3DES_CIPHERSUITES
MBEDTLS_SSL_HW_RECORD_ACCEL
MBEDTLS_X509_ALLOW_EXTENSIONS_NON_V3
MBEDTLS_X509_ALLOW_UNSUPPORTED_CRITICAL_EXTENSION
MBEDTLS_ZLIB_SUPPORT
MBEDTLS_PKCS11_C
MBEDTLS_NO_UDBL_DIVISION
MBEDTLS_NO_64BIT_MULTIPLICATION
MBEDTLS_PSA_CRYPTO_SPM
MBEDTLS_PSA_INJECT_ENTROPY
MBEDTLS_ECP_RESTARTABLE
MBEDTLS_ECDH_VARIANT_EVEREST_ENABLED
_ALT\s*$
);
# Things that should be disabled in "baremetal"
my @excluded_baremetal = qw(
MBEDTLS_NET_C
MBEDTLS_TIMING_C
MBEDTLS_FS_IO
MBEDTLS_ENTROPY_NV_SEED
MBEDTLS_HAVE_TIME
MBEDTLS_HAVE_TIME_DATE
MBEDTLS_DEPRECATED_WARNING
MBEDTLS_HAVEGE_C
MBEDTLS_THREADING_C
MBEDTLS_THREADING_PTHREAD
MBEDTLS_MEMORY_BACKTRACE
MBEDTLS_MEMORY_BUFFER_ALLOC_C
MBEDTLS_PLATFORM_TIME_ALT
MBEDTLS_PLATFORM_FPRINTF_ALT
MBEDTLS_PSA_ITS_FILE_C
MBEDTLS_PSA_CRYPTO_STORAGE_C
);
# Things that should be enabled in "full" even if they match @excluded
my @non_excluded = qw(
PLATFORM_[A-Z0-9]+_ALT
);
# Things that should be enabled in "baremetal"
my @non_excluded_baremetal = qw(
MBEDTLS_NO_PLATFORM_ENTROPY
);
# Process the command line arguments
my $force_option = 0;
my ($arg, $name, $value, $action);
while ($arg = shift) {
# Check if the argument is an option
if ($arg eq "-f" || $arg eq "--file") {
$config_file = shift;
-f $config_file or die "No such file: $config_file\n";
}
elsif ($arg eq "-o" || $arg eq "--force") {
$force_option = 1;
}
else
{
# ...else assume it's a command
$action = $arg;
if ($action eq "full" || $action eq "realfull" || $action eq "baremetal" ) {
# No additional parameters
die $usage if @ARGV;
}
elsif ($action eq "unset" || $action eq "get") {
die $usage unless @ARGV;
$name = shift;
}
elsif ($action eq "set") {
die $usage unless @ARGV;
$name = shift;
$value = shift if @ARGV;
}
else {
die "Command '$action' not recognised.\n\n".$usage;
}
}
}
# If no command was specified, exit...
if ( not defined($action) ){ die $usage; }
# Check the config file is present
if (! -f $config_file) {
chdir '..' or die;
# Confirm this is the project root directory and try again
if ( !(-d 'scripts' && -d 'include' && -d 'library' && -f $config_file) ) {
die "If no file specified, must be run from the project root or scripts directory.\n";
}
}
# Now read the file and process the contents
open my $config_read, '<', $config_file or die "read $config_file: $!\n";
my @config_lines = <$config_read>;
close $config_read;
# Add required baremetal symbols to the list that is included.
if ( $action eq "baremetal" ) {
@non_excluded = ( @non_excluded, @non_excluded_baremetal );
}
my ($exclude_re, $no_exclude_re, $exclude_baremetal_re);
if ($action eq "realfull") {
$exclude_re = qr/^$/;
$no_exclude_re = qr/./;
} else {
$exclude_re = join '|', @excluded;
$no_exclude_re = join '|', @non_excluded;
}
if ( $action eq "baremetal" ) {
$exclude_baremetal_re = join '|', @excluded_baremetal;
}
my $config_write = undef;
if ($action ne "get") {
open $config_write, '>', $config_file or die "write $config_file: $!\n";
}
my $done;
for my $line (@config_lines) {
if ($action eq "full" || $action eq "realfull" || $action eq "baremetal" ) {
if ($line =~ /name SECTION: Module configuration options/) {
$done = 1;
}
if (!$done && $line =~ m!^//\s?#define! &&
( $line !~ /$exclude_re/ || $line =~ /$no_exclude_re/ ) &&
( $action ne "baremetal" || ( $line !~ /$exclude_baremetal_re/ ) ) ) {
$line =~ s!^//\s?!!;
}
if (!$done && $line =~ m!^\s?#define! &&
! ( ( $line !~ /$exclude_re/ || $line =~ /$no_exclude_re/ ) &&
( $action ne "baremetal" || ( $line !~ /$exclude_baremetal_re/ ) ) ) ) {
$line =~ s!^!//!;
}
} elsif ($action eq "unset") {
if (!$done && $line =~ /^\s*#define\s*$name\b/) {
$line = '//' . $line;
$done = 1;
}
} elsif (!$done && $action eq "set") {
if ($line =~ m!^(?://)?\s*#define\s*$name\b!) {
$line = "#define $name";
$line .= " $value" if defined $value && $value ne "";
$line .= "\n";
$done = 1;
}
} elsif (!$done && $action eq "get") {
if ($line =~ /^\s*#define\s*$name(?:\s+(.*?))\s*(?:$|\/\*|\/\/)/) {
$value = $1;
$done = 1;
}
}
if (defined $config_write) {
print $config_write $line or die "write $config_file: $!\n";
}
}
# Did the set command work?
if ($action eq "set" && $force_option && !$done) {
# If the force option was set, append the symbol to the end of the file
my $line = "#define $name";
$line .= " $value" if defined $value && $value ne "";
$line .= "\n";
$done = 1;
print $config_write $line or die "write $config_file: $!\n";
}
if (defined $config_write) {
close $config_write or die "close $config_file: $!\n";
}
if ($action eq "get") {
if ($done) {
if ($value ne '') {
print "$value\n";
}
exit 0;
} else {
# If the symbol was not found, return an error
exit 1;
}
}
if ($action eq "full" && !$done) {
die "Configuration section was not found in $config_file\n";
}
if ($action ne "full" && $action ne "unset" && !$done) {
die "A #define for the symbol $name was not found in $config_file\n";
}
__END__
my $py = $0;
$py =~ s/\.pl$/.py/ or die "Unable to determine the name of the Python script";
exec 'python3', $py, @ARGV;
print STDERR "$0: python3: $!. Trying python instead.\n";
exec 'python', $py, @ARGV;
print STDERR "$0: python: $!\n";
exit 127;

469
scripts/config.py Executable file
View File

@ -0,0 +1,469 @@
#!/usr/bin/env python3
"""Mbed TLS configuration file manipulation library and tool
Basic usage, to read the Mbed TLS or Mbed Crypto configuration:
config = ConfigFile()
if 'MBEDTLS_RSA_C' in config: print('RSA is enabled')
"""
## Copyright (C) 2019, ARM Limited, All Rights Reserved
## SPDX-License-Identifier: Apache-2.0
##
## Licensed under the Apache License, Version 2.0 (the "License"); you may
## not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
## This file is part of Mbed TLS (https://tls.mbed.org)
import os
import re
class Setting:
"""Representation of one Mbed TLS config.h setting.
Fields:
* name: the symbol name ('MBEDTLS_xxx').
* value: the value of the macro. The empty string for a plain #define
with no value.
* active: True if name is defined, False if a #define for name is
present in config.h but commented out.
* section: the name of the section that contains this symbol.
"""
# pylint: disable=too-few-public-methods
def __init__(self, active, name, value='', section=None):
self.active = active
self.name = name
self.value = value
self.section = section
class Config:
"""Representation of the Mbed TLS configuration.
In the documentation of this class, a symbol is said to be *active*
if there is a #define for it that is not commented out, and *known*
if there is a #define for it whether commented out or not.
This class supports the following protocols:
* `name in config` is `True` if the symbol `name` is active, `False`
otherwise (whether `name` is inactive or not known).
* `config[name]` is the value of the macro `name`. If `name` is inactive,
raise `KeyError` (even if `name` is known).
* `config[name] = value` sets the value associated to `name`. `name`
must be known, but does not need to be set. This does not cause
name to become set.
"""
def __init__(self):
self.settings = {}
def __contains__(self, name):
"""True if the given symbol is active (i.e. set).
False if the given symbol is not set, even if a definition
is present but commented out.
"""
return name in self.settings and self.settings[name].active
def all(self, *names):
"""True if all the elements of names are active (i.e. set)."""
return all(self.__contains__(name) for name in names)
def any(self, *names):
"""True if at least one symbol in names are active (i.e. set)."""
return any(self.__contains__(name) for name in names)
def known(self, name):
"""True if a #define for name is present, whether it's commented out or not."""
return name in self.settings
def __getitem__(self, name):
"""Get the value of name, i.e. what the preprocessor symbol expands to.
If name is not known, raise KeyError. name does not need to be active.
"""
return self.settings[name].value
def get(self, name, default=None):
"""Get the value of name. If name is inactive (not set), return default.
If a #define for name is present and not commented out, return
its expansion, even if this is the empty string.
If a #define for name is present but commented out, return default.
"""
if name in self.settings:
return self.settings[name].value
else:
return default
def __setitem__(self, name, value):
"""If name is known, set its value.
If name is not known, raise KeyError.
"""
self.settings[name].value = value
def set(self, name, value=None):
"""Set name to the given value and make it active.
If value is None and name is already known, don't change its value.
If value is None and name is not known, set its value to the empty
string.
"""
if name in self.settings:
if value is not None:
self.settings[name].value = value
self.settings[name].active = True
else:
self.settings[name] = Setting(True, name, value=value)
def unset(self, name):
"""Make name unset (inactive).
name remains known if it was known before.
"""
if name not in self.settings:
return
self.settings[name].active = False
def adapt(self, adapter):
"""Run adapter on each known symbol and (de)activate it accordingly.
`adapter` must be a function that returns a boolean. It is called as
`adapter(name, active, section)` for each setting, where `active` is
`True` if `name` is set and `False` if `name` is known but unset,
and `section` is the name of the section containing `name`. If
`adapter` returns `True`, then set `name` (i.e. make it active),
otherwise unset `name` (i.e. make it known but inactive).
"""
for setting in self.settings.values():
setting.active = adapter(setting.name, setting.active,
setting.section)
def is_full_section(section):
"""Is this section affected by "config.py full" and friends?"""
return section.endswith('support') or section.endswith('modules')
def realfull_adapter(_name, active, section):
"""Activate all symbols found in the system and feature sections."""
if not is_full_section(section):
return active
return True
def include_in_full(name):
"""Rules for symbols in the "full" configuration."""
if re.search(r'PLATFORM_[A-Z0-9]+_ALT', name):
return True
if name in [
'MBEDTLS_CTR_DRBG_USE_128_BIT_KEY',
'MBEDTLS_DEPRECATED_REMOVED',
'MBEDTLS_ECDH_VARIANT_EVEREST_ENABLED',
'MBEDTLS_ECP_RESTARTABLE',
'MBEDTLS_ENTROPY_FORCE_SHA256', # Variant toggle, tested separately
'MBEDTLS_HAVE_SSE2',
'MBEDTLS_MEMORY_BACKTRACE',
'MBEDTLS_MEMORY_BUFFER_ALLOC_C',
'MBEDTLS_MEMORY_DEBUG',
'MBEDTLS_NO_64BIT_MULTIPLICATION',
'MBEDTLS_NO_DEFAULT_ENTROPY_SOURCES',
'MBEDTLS_NO_PLATFORM_ENTROPY',
'MBEDTLS_NO_UDBL_DIVISION',
'MBEDTLS_PKCS11_C',
'MBEDTLS_PLATFORM_NO_STD_FUNCTIONS',
'MBEDTLS_PSA_CRYPTO_KEY_FILE_ID_ENCODES_OWNER',
'MBEDTLS_PSA_CRYPTO_SE_C',
'MBEDTLS_PSA_CRYPTO_SPM',
'MBEDTLS_PSA_INJECT_ENTROPY',
'MBEDTLS_REMOVE_3DES_CIPHERSUITES',
'MBEDTLS_REMOVE_ARC4_CIPHERSUITES',
'MBEDTLS_RSA_NO_CRT',
'MBEDTLS_SHA512_NO_SHA384',
'MBEDTLS_SSL_HW_RECORD_ACCEL',
'MBEDTLS_SSL_PROTO_SSL3',
'MBEDTLS_SSL_SRV_SUPPORT_SSLV2_CLIENT_HELLO',
'MBEDTLS_TEST_NULL_ENTROPY',
'MBEDTLS_X509_ALLOW_EXTENSIONS_NON_V3',
'MBEDTLS_X509_ALLOW_UNSUPPORTED_CRITICAL_EXTENSION',
'MBEDTLS_ZLIB_SUPPORT',
]:
return False
if name.endswith('_ALT'):
return False
return True
def full_adapter(name, active, section):
"""Config adapter for "full"."""
if not is_full_section(section):
return active
return include_in_full(name)
def keep_in_baremetal(name):
"""Rules for symbols in the "baremetal" configuration."""
if name in [
'MBEDTLS_DEPRECATED_WARNING',
'MBEDTLS_ENTROPY_NV_SEED',
'MBEDTLS_FS_IO',
'MBEDTLS_HAVEGE_C',
'MBEDTLS_HAVE_TIME',
'MBEDTLS_HAVE_TIME_DATE',
'MBEDTLS_NET_C',
'MBEDTLS_PLATFORM_FPRINTF_ALT',
'MBEDTLS_PLATFORM_TIME_ALT',
'MBEDTLS_PSA_CRYPTO_SE_C',
'MBEDTLS_PSA_CRYPTO_STORAGE_C',
'MBEDTLS_PSA_ITS_FILE_C',
'MBEDTLS_THREADING_C',
'MBEDTLS_THREADING_PTHREAD',
'MBEDTLS_TIMING_C',
]:
return False
return True
def baremetal_adapter(name, active, section):
"""Config adapter for "baremetal"."""
if not is_full_section(section):
return active
if name == 'MBEDTLS_NO_PLATFORM_ENTROPY':
return True
return include_in_full(name) and keep_in_baremetal(name)
def include_in_crypto(name):
"""Rules for symbols in a crypto configuration."""
if name.startswith('MBEDTLS_X509_') or \
name.startswith('MBEDTLS_SSL_') or \
name.startswith('MBEDTLS_KEY_EXCHANGE_'):
return False
if name in [
'MBEDTLS_CERTS_C',
'MBEDTLS_DEBUG_C',
'MBEDTLS_NET_C',
'MBEDTLS_PKCS11_C',
]:
return False
return True
def crypto_adapter(adapter):
"""Modify an adapter to disable non-crypto symbols.
``crypto_adapter(adapter)(name, active, section)`` is like
``adapter(name, active, section)``, but unsets all X.509 and TLS symbols.
"""
def continuation(name, active, section):
if not include_in_crypto(name):
return False
if adapter is None:
return active
return adapter(name, active, section)
return continuation
class ConfigFile(Config):
"""Representation of the Mbed TLS configuration read for a file.
See the documentation of the `Config` class for methods to query
and modify the configuration.
"""
_path_in_tree = 'include/mbedtls/config.h'
default_path = [_path_in_tree,
os.path.join(os.path.dirname(__file__),
os.pardir,
_path_in_tree),
os.path.join(os.path.dirname(os.path.abspath(os.path.dirname(__file__))),
_path_in_tree)]
def __init__(self, filename=None):
"""Read the Mbed TLS configuration file."""
if filename is None:
for filename in self.default_path:
if os.path.lexists(filename):
break
super().__init__()
self.filename = filename
self.current_section = 'header'
with open(filename, 'r', encoding='utf-8') as file:
self.templates = [self._parse_line(line) for line in file]
self.current_section = None
def set(self, name, value=None):
if name not in self.settings:
self.templates.append((name, '', '#define ' + name + ' '))
super().set(name, value)
_define_line_regexp = (r'(?P<indentation>\s*)' +
r'(?P<commented_out>(//\s*)?)' +
r'(?P<define>#\s*define\s+)' +
r'(?P<name>\w+)' +
r'(?P<arguments>(?:\((?:\w|\s|,)*\))?)' +
r'(?P<separator>\s*)' +
r'(?P<value>.*)')
_section_line_regexp = (r'\s*/?\*+\s*[\\@]name\s+SECTION:\s*' +
r'(?P<section>.*)[ */]*')
_config_line_regexp = re.compile(r'|'.join([_define_line_regexp,
_section_line_regexp]))
def _parse_line(self, line):
"""Parse a line in config.h and return the corresponding template."""
line = line.rstrip('\r\n')
m = re.match(self._config_line_regexp, line)
if m is None:
return line
elif m.group('section'):
self.current_section = m.group('section')
return line
else:
active = not m.group('commented_out')
name = m.group('name')
value = m.group('value')
template = (name,
m.group('indentation'),
m.group('define') + name +
m.group('arguments') + m.group('separator'))
self.settings[name] = Setting(active, name, value,
self.current_section)
return template
def _format_template(self, name, indent, middle):
"""Build a line for config.h for the given setting.
The line has the form "<indent>#define <name> <value>"
where <middle> is "#define <name> ".
"""
setting = self.settings[name]
value = setting.value
if value is None:
value = ''
# Normally the whitespace to separte the symbol name from the
# value is part of middle, and there's no whitespace for a symbol
# with no value. But if a symbol has been changed from having a
# value to not having one, the whitespace is wrong, so fix it.
if value:
if middle[-1] not in '\t ':
middle += ' '
else:
middle = middle.rstrip()
return ''.join([indent,
'' if setting.active else '//',
middle,
value]).rstrip()
def write_to_stream(self, output):
"""Write the whole configuration to output."""
for template in self.templates:
if isinstance(template, str):
line = template
else:
line = self._format_template(*template)
output.write(line + '\n')
def write(self, filename=None):
"""Write the whole configuration to the file it was read from.
If filename is specified, write to this file instead.
"""
if filename is None:
filename = self.filename
with open(filename, 'w', encoding='utf-8') as output:
self.write_to_stream(output)
if __name__ == '__main__':
def main():
"""Command line config.h manipulation tool."""
parser = argparse.ArgumentParser(description="""
Mbed TLS and Mbed Crypto configuration file manipulation tool.
""")
parser.add_argument('--file', '-f',
help="""File to read (and modify if requested).
Default: {}.
""".format(ConfigFile.default_path))
parser.add_argument('--force', '-o',
action='store_true',
help="""For the set command, if SYMBOL is not
present, add a definition for it.""")
parser.add_argument('--write', '-w', metavar='FILE',
help="""File to write to instead of the input file.""")
subparsers = parser.add_subparsers(dest='command',
title='Commands')
parser_get = subparsers.add_parser('get',
help="""Find the value of SYMBOL
and print it. Exit with
status 0 if a #define for SYMBOL is
found, 1 otherwise.
""")
parser_get.add_argument('symbol', metavar='SYMBOL')
parser_set = subparsers.add_parser('set',
help="""Set SYMBOL to VALUE.
If VALUE is omitted, just uncomment
the #define for SYMBOL.
Error out of a line defining
SYMBOL (commented or not) is not
found, unless --force is passed.
""")
parser_set.add_argument('symbol', metavar='SYMBOL')
parser_set.add_argument('value', metavar='VALUE', nargs='?',
default='')
parser_unset = subparsers.add_parser('unset',
help="""Comment out the #define
for SYMBOL. Do nothing if none
is present.""")
parser_unset.add_argument('symbol', metavar='SYMBOL')
def add_adapter(name, function, description):
subparser = subparsers.add_parser(name, help=description)
subparser.set_defaults(adapter=function)
add_adapter('baremetal', baremetal_adapter,
"""Like full, but exclude features that require platform
features such as file input-output.""")
add_adapter('full', full_adapter,
"""Uncomment most features.
Exclude alternative implementations and platform support
options, as well as some options that are awkward to test.
""")
add_adapter('realfull', realfull_adapter,
"""Uncomment all boolean #defines.
Suitable for generating documentation, but not for building.""")
add_adapter('crypto', crypto_adapter(None),
"""Only include crypto features. Exclude X.509 and TLS.""")
add_adapter('crypto_baremetal', crypto_adapter(baremetal_adapter),
"""Like baremetal, but with only crypto features,
excluding X.509 and TLS.""")
add_adapter('crypto_full', crypto_adapter(full_adapter),
"""Like full, but with only crypto features,
excluding X.509 and TLS.""")
args = parser.parse_args()
config = ConfigFile(args.file)
if args.command is None:
parser.print_help()
return 1
elif args.command == 'get':
if args.symbol in config:
value = config[args.symbol]
if value:
sys.stdout.write(value + '\n')
return args.symbol not in config
elif args.command == 'set':
if not args.force and args.symbol not in config.settings:
sys.stderr.write("A #define for the symbol {} "
"was not found in {}\n"
.format(args.symbol, config.filename))
return 1
config.set(args.symbol, value=args.value)
elif args.command == 'unset':
config.unset(args.symbol)
else:
config.adapt(args.adapter)
config.write(args.write)
# Import modules only used by main only if main is defined and called.
# pylint: disable=wrong-import-position
import argparse
import sys
sys.exit(main())

View File

@ -25,8 +25,7 @@
#include MBEDTLS_CONFIG_FILE
#endif
#if defined(MBEDTLS_ERROR_C) || defined(MBEDTLS_ERROR_STRERROR_DUMMY)
#include "mbedtls/error.h"
#if defined(MBEDTLS_ERROR_STRERROR_DUMMY)
#include <string.h>
#endif

View File

@ -706,9 +706,9 @@ POLARSSL_KEY_EXCHANGE_RSA MBEDTLS_KEY_EXCHANGE_RSA
POLARSSL_KEY_EXCHANGE_RSA_ENABLED MBEDTLS_KEY_EXCHANGE_RSA_ENABLED
POLARSSL_KEY_EXCHANGE_RSA_PSK MBEDTLS_KEY_EXCHANGE_RSA_PSK
POLARSSL_KEY_EXCHANGE_RSA_PSK_ENABLED MBEDTLS_KEY_EXCHANGE_RSA_PSK_ENABLED
POLARSSL_KEY_EXCHANGE__SOME__ECDHE_ENABLED MBEDTLS_KEY_EXCHANGE__SOME__ECDHE_ENABLED
POLARSSL_KEY_EXCHANGE__SOME__PSK_ENABLED MBEDTLS_KEY_EXCHANGE__SOME__PSK_ENABLED
POLARSSL_KEY_EXCHANGE__WITH_CERT__ENABLED MBEDTLS_KEY_EXCHANGE__WITH_CERT__ENABLED
POLARSSL_KEY_EXCHANGE__SOME__ECDHE_ENABLED MBEDTLS_KEY_EXCHANGE_SOME_ECDHE_ENABLED
POLARSSL_KEY_EXCHANGE__SOME__PSK_ENABLED MBEDTLS_KEY_EXCHANGE_SOME_PSK_ENABLED
POLARSSL_KEY_EXCHANGE__WITH_CERT__ENABLED MBEDTLS_KEY_EXCHANGE_WITH_CERT_ENABLED
POLARSSL_KEY_LENGTH_DES MBEDTLS_KEY_LENGTH_DES
POLARSSL_KEY_LENGTH_DES_EDE MBEDTLS_KEY_LENGTH_DES_EDE
POLARSSL_KEY_LENGTH_DES_EDE3 MBEDTLS_KEY_LENGTH_DES_EDE3

View File

@ -84,7 +84,7 @@
</PrecompiledHeader>
<WarningLevel>Level3</WarningLevel>
<Optimization>Disabled</Optimization>
<PreprocessorDefinitions>WIN32;_DEBUG;_WINDOWS;_USRDLL;MBEDTLS_EXPORTS;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<PreprocessorDefinitions>WIN32;_DEBUG;_WINDOWS;_USRDLL;MBEDTLS_EXPORTS;KRML_VERIFIED_UINT128;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<AdditionalIncludeDirectories>
INCLUDE_DIRECTORIES
</AdditionalIncludeDirectories>
@ -101,7 +101,7 @@ INCLUDE_DIRECTORIES
</PrecompiledHeader>
<WarningLevel>Level3</WarningLevel>
<Optimization>Disabled</Optimization>
<PreprocessorDefinitions>WIN32;_DEBUG;_WINDOWS;_USRDLL;MBEDTLS_EXPORTS;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<PreprocessorDefinitions>WIN32;_DEBUG;_WINDOWS;_USRDLL;MBEDTLS_EXPORTS;KRML_VERIFIED_UINT128;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<AdditionalIncludeDirectories>
INCLUDE_DIRECTORIES
</AdditionalIncludeDirectories>
@ -120,7 +120,7 @@ INCLUDE_DIRECTORIES
<Optimization>MaxSpeed</Optimization>
<FunctionLevelLinking>true</FunctionLevelLinking>
<IntrinsicFunctions>true</IntrinsicFunctions>
<PreprocessorDefinitions>WIN32;NDEBUG;_WINDOWS;_USRDLL;MBEDTLS_EXPORTS;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<PreprocessorDefinitions>WIN32;NDEBUG;_WINDOWS;_USRDLL;MBEDTLS_EXPORTS;KRML_VERIFIED_UINT128;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<AdditionalIncludeDirectories>
INCLUDE_DIRECTORIES
</AdditionalIncludeDirectories>
@ -140,7 +140,7 @@ INCLUDE_DIRECTORIES
<Optimization>MaxSpeed</Optimization>
<FunctionLevelLinking>true</FunctionLevelLinking>
<IntrinsicFunctions>true</IntrinsicFunctions>
<PreprocessorDefinitions>WIN64;NDEBUG;_WINDOWS;_USRDLL;MBEDTLS_EXPORTS;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<PreprocessorDefinitions>WIN64;NDEBUG;_WINDOWS;_USRDLL;MBEDTLS_EXPORTS;KRML_VERIFIED_UINT128;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<AdditionalIncludeDirectories>
INCLUDE_DIRECTORIES
</AdditionalIncludeDirectories>

View File

@ -59,8 +59,8 @@ EOF
for F in 0 1; do
for W in 2 3 4 5 6; do
scripts/config.pl set MBEDTLS_ECP_WINDOW_SIZE $W
scripts/config.pl set MBEDTLS_ECP_FIXED_POINT_OPTIM $F
scripts/config.py set MBEDTLS_ECP_WINDOW_SIZE $W
scripts/config.py set MBEDTLS_ECP_FIXED_POINT_OPTIM $F
make benchmark >/dev/null 2>&1
echo "fixed point optim = $F, max window size = $W"
echo "--------------------------------------------"

View File

@ -62,10 +62,10 @@ doit()
fi
{
scripts/config.pl unset MBEDTLS_NET_C || true
scripts/config.pl unset MBEDTLS_TIMING_C || true
scripts/config.pl unset MBEDTLS_FS_IO || true
scripts/config.pl --force set MBEDTLS_NO_PLATFORM_ENTROPY || true
scripts/config.py unset MBEDTLS_NET_C || true
scripts/config.py unset MBEDTLS_TIMING_C || true
scripts/config.py unset MBEDTLS_FS_IO || true
scripts/config.py --force set MBEDTLS_NO_PLATFORM_ENTROPY || true
} >/dev/null 2>&1
make clean >/dev/null

View File

@ -3,17 +3,15 @@
# Generate error.c
#
# Usage: ./generate_errors.pl or scripts/generate_errors.pl without arguments,
# or generate_errors.pl include_dir data_dir error_file include_crypto
# include_crypto can be either 0 (don't include) or 1 (include). On by default.
# or generate_errors.pl include_dir data_dir error_file
use strict;
my ($include_dir, $data_dir, $error_file, $include_crypto);
my $crypto_dir = "crypto";
my ($include_dir, $data_dir, $error_file);
if( @ARGV ) {
die "Invalid number of arguments" if scalar @ARGV != 4;
($include_dir, $data_dir, $error_file, $include_crypto) = @ARGV;
die "Invalid number of arguments" if scalar @ARGV != 3;
($include_dir, $data_dir, $error_file) = @ARGV;
-d $include_dir or die "No such directory: $include_dir\n";
-d $data_dir or die "No such directory: $data_dir\n";
@ -21,7 +19,6 @@ if( @ARGV ) {
$include_dir = 'include/mbedtls';
$data_dir = 'scripts/data_files';
$error_file = 'library/error.c';
$include_crypto = 1;
unless( -d $include_dir && -d $data_dir ) {
chdir '..' or die;
@ -30,15 +27,11 @@ if( @ARGV ) {
}
}
if( $include_crypto ) {
-d $crypto_dir or die "Crypto submodule not present\n";
}
my $error_format_file = $data_dir.'/error.fmt';
my @low_level_modules = qw( AES ARC4 ARIA ASN1 BASE64 BIGNUM BLOWFISH
CAMELLIA CCM CHACHA20 CHACHAPOLY CMAC CTR_DRBG DES
ENTROPY GCM HKDF HMAC_DRBG MD2 MD4 MD5
ENTROPY ERROR GCM HKDF HMAC_DRBG MD2 MD4 MD5
NET OID PADLOCK PBKDF2 PLATFORM POLY1305 RIPEMD160
SHA1 SHA256 SHA512 THREADING XTEA );
my @high_level_modules = qw( CIPHER DHM ECP MD
@ -54,19 +47,9 @@ close(FORMAT_FILE);
$/ = $line_separator;
my @headers = ();
if ($include_crypto) {
@headers = <$crypto_dir/$include_dir/*.h>;
foreach my $header (<$include_dir/*.h>) {
my $basename = $header; $basename =~ s!.*/!!;
push @headers, $header unless -e "$crypto_dir/$include_dir/$basename";
}
} else {
@headers = <$include_dir/*.h>;
}
my @files = <$include_dir/*.h>;
my @matches;
foreach my $file (@headers) {
foreach my $file (@files) {
open(FILE, "$file");
my @grep_res = grep(/^\s*#define\s+MBEDTLS_ERR_\w+\s+\-0x[0-9A-Fa-f]+/, <FILE>);
push(@matches, @grep_res);
@ -90,9 +73,8 @@ foreach my $line (@matches)
my ($error_name, $error_code) = $line =~ /(MBEDTLS_ERR_\w+)\s+\-(0x\w+)/;
my ($description) = $line =~ /\/\*\*< (.*?)\.? \*\//;
if( $error_codes_seen{$error_code}++ ) {
die "Duplicated error code: $error_code ($error_name)\n";
}
die "Duplicated error code: $error_code ($error_name)\n"
if( $error_codes_seen{$error_code}++ );
$description =~ s/\\/\\\\/g;
if ($description eq "") {

400
scripts/generate_psa_constants.py Executable file
View File

@ -0,0 +1,400 @@
#!/usr/bin/env python3
"""Generate programs/psa/psa_constant_names_generated.c
which is included by programs/psa/psa_constant_names.c.
The code generated by this module is only meant to be used in the context
of that program.
"""
import os
import re
OUTPUT_TEMPLATE = '''\
/* Automatically generated by generate_psa_constant.py. DO NOT EDIT. */
static const char *psa_strerror(psa_status_t status)
{
switch (status) {
%(status_cases)s
default: return NULL;
}
}
static const char *psa_ecc_curve_name(psa_ecc_curve_t curve)
{
switch (curve) {
%(ecc_curve_cases)s
default: return NULL;
}
}
static const char *psa_dh_group_name(psa_dh_group_t group)
{
switch (group) {
%(dh_group_cases)s
default: return NULL;
}
}
static const char *psa_hash_algorithm_name(psa_algorithm_t hash_alg)
{
switch (hash_alg) {
%(hash_algorithm_cases)s
default: return NULL;
}
}
static const char *psa_ka_algorithm_name(psa_algorithm_t ka_alg)
{
switch (ka_alg) {
%(ka_algorithm_cases)s
default: return NULL;
}
}
static int psa_snprint_key_type(char *buffer, size_t buffer_size,
psa_key_type_t type)
{
size_t required_size = 0;
switch (type) {
%(key_type_cases)s
default:
%(key_type_code)s{
return snprintf(buffer, buffer_size,
"0x%%04x", (unsigned) type);
}
break;
}
buffer[0] = 0;
return (int) required_size;
}
#define NO_LENGTH_MODIFIER 0xfffffffflu
static int psa_snprint_algorithm(char *buffer, size_t buffer_size,
psa_algorithm_t alg)
{
size_t required_size = 0;
psa_algorithm_t core_alg = alg;
unsigned long length_modifier = NO_LENGTH_MODIFIER;
if (PSA_ALG_IS_MAC(alg)) {
core_alg = PSA_ALG_TRUNCATED_MAC(alg, 0);
if (core_alg != alg) {
append(&buffer, buffer_size, &required_size,
"PSA_ALG_TRUNCATED_MAC(", 22);
length_modifier = PSA_MAC_TRUNCATED_LENGTH(alg);
}
} else if (PSA_ALG_IS_AEAD(alg)) {
core_alg = PSA_ALG_AEAD_WITH_DEFAULT_TAG_LENGTH(alg);
if (core_alg == 0) {
/* For unknown AEAD algorithms, there is no "default tag length". */
core_alg = alg;
} else if (core_alg != alg) {
append(&buffer, buffer_size, &required_size,
"PSA_ALG_AEAD_WITH_TAG_LENGTH(", 29);
length_modifier = PSA_AEAD_TAG_LENGTH(alg);
}
} else if (PSA_ALG_IS_KEY_AGREEMENT(alg) &&
!PSA_ALG_IS_RAW_KEY_AGREEMENT(alg)) {
core_alg = PSA_ALG_KEY_AGREEMENT_GET_KDF(alg);
append(&buffer, buffer_size, &required_size,
"PSA_ALG_KEY_AGREEMENT(", 22);
append_with_alg(&buffer, buffer_size, &required_size,
psa_ka_algorithm_name,
PSA_ALG_KEY_AGREEMENT_GET_BASE(alg));
append(&buffer, buffer_size, &required_size, ", ", 2);
}
switch (core_alg) {
%(algorithm_cases)s
default:
%(algorithm_code)s{
append_integer(&buffer, buffer_size, &required_size,
"0x%%08lx", (unsigned long) core_alg);
}
break;
}
if (core_alg != alg) {
if (length_modifier != NO_LENGTH_MODIFIER) {
append(&buffer, buffer_size, &required_size, ", ", 2);
append_integer(&buffer, buffer_size, &required_size,
"%%lu", length_modifier);
}
append(&buffer, buffer_size, &required_size, ")", 1);
}
buffer[0] = 0;
return (int) required_size;
}
static int psa_snprint_key_usage(char *buffer, size_t buffer_size,
psa_key_usage_t usage)
{
size_t required_size = 0;
if (usage == 0) {
if (buffer_size > 1) {
buffer[0] = '0';
buffer[1] = 0;
} else if (buffer_size == 1) {
buffer[0] = 0;
}
return 1;
}
%(key_usage_code)s
if (usage != 0) {
if (required_size != 0) {
append(&buffer, buffer_size, &required_size, " | ", 3);
}
append_integer(&buffer, buffer_size, &required_size,
"0x%%08lx", (unsigned long) usage);
} else {
buffer[0] = 0;
}
return (int) required_size;
}
/* End of automatically generated file. */
'''
KEY_TYPE_FROM_CURVE_TEMPLATE = '''if (%(tester)s(type)) {
append_with_curve(&buffer, buffer_size, &required_size,
"%(builder)s", %(builder_length)s,
PSA_KEY_TYPE_GET_CURVE(type));
} else '''
KEY_TYPE_FROM_GROUP_TEMPLATE = '''if (%(tester)s(type)) {
append_with_group(&buffer, buffer_size, &required_size,
"%(builder)s", %(builder_length)s,
PSA_KEY_TYPE_GET_GROUP(type));
} else '''
ALGORITHM_FROM_HASH_TEMPLATE = '''if (%(tester)s(core_alg)) {
append(&buffer, buffer_size, &required_size,
"%(builder)s(", %(builder_length)s + 1);
append_with_alg(&buffer, buffer_size, &required_size,
psa_hash_algorithm_name,
PSA_ALG_GET_HASH(core_alg));
append(&buffer, buffer_size, &required_size, ")", 1);
} else '''
BIT_TEST_TEMPLATE = '''\
if (%(var)s & %(flag)s) {
if (required_size != 0) {
append(&buffer, buffer_size, &required_size, " | ", 3);
}
append(&buffer, buffer_size, &required_size, "%(flag)s", %(length)d);
%(var)s ^= %(flag)s;
}\
'''
class MacroCollector:
"""Collect PSA crypto macro definitions from C header files.
1. Call `read_file` on the input header file(s).
2. Call `write_file` to write ``psa_constant_names_generated.c``.
"""
def __init__(self):
self.statuses = set()
self.key_types = set()
self.key_types_from_curve = {}
self.key_types_from_group = {}
self.ecc_curves = set()
self.dh_groups = set()
self.algorithms = set()
self.hash_algorithms = set()
self.ka_algorithms = set()
self.algorithms_from_hash = {}
self.key_usages = set()
# "#define" followed by a macro name with either no parameters
# or a single parameter and a non-empty expansion.
# Grab the macro name in group 1, the parameter name if any in group 2
# and the expansion in group 3.
_define_directive_re = re.compile(r'\s*#\s*define\s+(\w+)' +
r'(?:\s+|\((\w+)\)\s*)' +
r'(.+)')
_deprecated_definition_re = re.compile(r'\s*MBEDTLS_DEPRECATED')
def read_line(self, line):
"""Parse a C header line and record the PSA identifier it defines if any.
This function analyzes lines that start with "#define PSA_"
(up to non-significant whitespace) and skips all non-matching lines.
"""
# pylint: disable=too-many-branches
m = re.match(self._define_directive_re, line)
if not m:
return
name, parameter, expansion = m.groups()
expansion = re.sub(r'/\*.*?\*/|//.*', r' ', expansion)
if re.match(self._deprecated_definition_re, expansion):
# Skip deprecated values, which are assumed to be
# backward compatibility aliases that share
# numerical values with non-deprecated values.
return
if name.endswith('_FLAG') or name.endswith('MASK'):
# Macro only to build actual values
return
elif (name.startswith('PSA_ERROR_') or name == 'PSA_SUCCESS') \
and not parameter:
self.statuses.add(name)
elif name.startswith('PSA_KEY_TYPE_') and not parameter:
self.key_types.add(name)
elif name.startswith('PSA_KEY_TYPE_') and parameter == 'curve':
self.key_types_from_curve[name] = name[:13] + 'IS_' + name[13:]
elif name.startswith('PSA_KEY_TYPE_') and parameter == 'group':
self.key_types_from_group[name] = name[:13] + 'IS_' + name[13:]
elif name.startswith('PSA_ECC_CURVE_') and not parameter:
self.ecc_curves.add(name)
elif name.startswith('PSA_DH_GROUP_') and not parameter:
self.dh_groups.add(name)
elif name.startswith('PSA_ALG_') and not parameter:
if name in ['PSA_ALG_ECDSA_BASE',
'PSA_ALG_RSA_PKCS1V15_SIGN_BASE']:
# Ad hoc skipping of duplicate names for some numerical values
return
self.algorithms.add(name)
# Ad hoc detection of hash algorithms
if re.search(r'0x010000[0-9A-Fa-f]{2}', expansion):
self.hash_algorithms.add(name)
# Ad hoc detection of key agreement algorithms
if re.search(r'0x30[0-9A-Fa-f]{2}0000', expansion):
self.ka_algorithms.add(name)
elif name.startswith('PSA_ALG_') and parameter == 'hash_alg':
if name in ['PSA_ALG_DSA', 'PSA_ALG_ECDSA']:
# A naming irregularity
tester = name[:8] + 'IS_RANDOMIZED_' + name[8:]
else:
tester = name[:8] + 'IS_' + name[8:]
self.algorithms_from_hash[name] = tester
elif name.startswith('PSA_KEY_USAGE_') and not parameter:
self.key_usages.add(name)
else:
# Other macro without parameter
return
_nonascii_re = re.compile(rb'[^\x00-\x7f]+')
_continued_line_re = re.compile(rb'\\\r?\n\Z')
def read_file(self, header_file):
for line in header_file:
m = re.search(self._continued_line_re, line)
while m:
cont = next(header_file)
line = line[:m.start(0)] + cont
m = re.search(self._continued_line_re, line)
line = re.sub(self._nonascii_re, rb'', line).decode('ascii')
self.read_line(line)
@staticmethod
def _make_return_case(name):
return 'case %(name)s: return "%(name)s";' % {'name': name}
@staticmethod
def _make_append_case(name):
template = ('case %(name)s: '
'append(&buffer, buffer_size, &required_size, "%(name)s", %(length)d); '
'break;')
return template % {'name': name, 'length': len(name)}
@staticmethod
def _make_bit_test(var, flag):
return BIT_TEST_TEMPLATE % {'var': var,
'flag': flag,
'length': len(flag)}
def _make_status_cases(self):
return '\n '.join(map(self._make_return_case,
sorted(self.statuses)))
def _make_ecc_curve_cases(self):
return '\n '.join(map(self._make_return_case,
sorted(self.ecc_curves)))
def _make_dh_group_cases(self):
return '\n '.join(map(self._make_return_case,
sorted(self.dh_groups)))
def _make_key_type_cases(self):
return '\n '.join(map(self._make_append_case,
sorted(self.key_types)))
@staticmethod
def _make_key_type_from_curve_code(builder, tester):
return KEY_TYPE_FROM_CURVE_TEMPLATE % {'builder': builder,
'builder_length': len(builder),
'tester': tester}
@staticmethod
def _make_key_type_from_group_code(builder, tester):
return KEY_TYPE_FROM_GROUP_TEMPLATE % {'builder': builder,
'builder_length': len(builder),
'tester': tester}
def _make_ecc_key_type_code(self):
d = self.key_types_from_curve
make = self._make_key_type_from_curve_code
return ''.join([make(k, d[k]) for k in sorted(d.keys())])
def _make_dh_key_type_code(self):
d = self.key_types_from_group
make = self._make_key_type_from_group_code
return ''.join([make(k, d[k]) for k in sorted(d.keys())])
def _make_hash_algorithm_cases(self):
return '\n '.join(map(self._make_return_case,
sorted(self.hash_algorithms)))
def _make_ka_algorithm_cases(self):
return '\n '.join(map(self._make_return_case,
sorted(self.ka_algorithms)))
def _make_algorithm_cases(self):
return '\n '.join(map(self._make_append_case,
sorted(self.algorithms)))
@staticmethod
def _make_algorithm_from_hash_code(builder, tester):
return ALGORITHM_FROM_HASH_TEMPLATE % {'builder': builder,
'builder_length': len(builder),
'tester': tester}
def _make_algorithm_code(self):
d = self.algorithms_from_hash
make = self._make_algorithm_from_hash_code
return ''.join([make(k, d[k]) for k in sorted(d.keys())])
def _make_key_usage_code(self):
return '\n'.join([self._make_bit_test('usage', bit)
for bit in sorted(self.key_usages)])
def write_file(self, output_file):
"""Generate the pretty-printer function code from the gathered
constant definitions.
"""
data = {}
data['status_cases'] = self._make_status_cases()
data['ecc_curve_cases'] = self._make_ecc_curve_cases()
data['dh_group_cases'] = self._make_dh_group_cases()
data['key_type_cases'] = self._make_key_type_cases()
data['key_type_code'] = (self._make_ecc_key_type_code() +
self._make_dh_key_type_code())
data['hash_algorithm_cases'] = self._make_hash_algorithm_cases()
data['ka_algorithm_cases'] = self._make_ka_algorithm_cases()
data['algorithm_cases'] = self._make_algorithm_cases()
data['algorithm_code'] = self._make_algorithm_code()
data['key_usage_code'] = self._make_key_usage_code()
output_file.write(OUTPUT_TEMPLATE % data)
def generate_psa_constants(header_file_names, output_file_name):
collector = MacroCollector()
for header_file_name in header_file_names:
with open(header_file_name, 'rb') as header_file:
collector.read_file(header_file)
temp_file_name = output_file_name + '.tmp'
with open(temp_file_name, 'w') as output_file:
collector.write_file(output_file)
os.rename(temp_file_name, output_file_name)
if __name__ == '__main__':
if not os.path.isdir('programs') and os.path.isdir('../programs'):
os.chdir('..')
generate_psa_constants(['include/psa/crypto_values.h',
'include/psa/crypto_extra.h'],
'programs/psa/psa_constant_names_generated.c')

View File

@ -21,7 +21,7 @@ use strict;
my $config_file = "./include/mbedtls/config.h";
my $query_config_format_file = "./scripts/data_files/query_config.fmt";
my $query_config_file = "./programs/ssl/query_config.c";
my $query_config_file = "./programs/test/query_config.c";
# Excluded macros from the generated query_config.c. For example, macros that
# have commas or function-like macros cannot be transformed into strings easily

View File

@ -4,8 +4,7 @@
# 2010
#
# Must be run from mbedTLS root or scripts directory.
# Takes "include_crypto" as an argument that can be either 0 (don't include) or
# 1 (include). On by default.
# Takes no argument.
use warnings;
use strict;
@ -19,29 +18,39 @@ my $vsx_main_file = "$vsx_dir/mbedTLS.$vsx_ext";
my $vsx_sln_tpl_file = "scripts/data_files/vs2010-sln-template.sln";
my $vsx_sln_file = "$vsx_dir/mbedTLS.sln";
my $include_crypto = 1;
if( @ARGV ) {
die "Invalid number of arguments" if scalar @ARGV != 1;
($include_crypto) = @ARGV;
}
my $programs_dir = 'programs';
my $header_dir = 'include/mbedtls';
my $crypto_headers_dir = 'include/psa';
my $mbedtls_header_dir = 'include/mbedtls';
my $psa_header_dir = 'include/psa';
my $source_dir = 'library';
my $crypto_dir = 'crypto';
my @thirdparty_header_dirs = qw(
3rdparty/everest/include/everest
);
my @thirdparty_source_dirs = qw(
3rdparty/everest/library
3rdparty/everest/library/kremlib
3rdparty/everest/library/legacy
);
# Directories to add to the include path.
# Order matters in case there are files with the same name in more than
# one directory: the compiler will use the first match.
my @include_directories = qw(
include
3rdparty/everest/include/
3rdparty/everest/include/everest
3rdparty/everest/include/everest/vs2010
3rdparty/everest/include/everest/kremlib
);
my $include_directories = join(';', map {"../../$_"} @include_directories);
my @excluded_files = qw(
3rdparty/everest/library/Hacl_Curve25519.c
);
my %excluded_files = ();
foreach (@excluded_files) { $excluded_files{$_} = 1 }
# Need windows line endings!
my $include_directories = <<EOT;
../../include\r
EOT
if ($include_crypto) {
$include_directories = <<EOT;
../../include;../../crypto/include;../../crypto/3rdparty/everest/include/;../../crypto/3rdparty/everest/include/everest;../../crypto/3rdparty/everest/include/everest/vs2010;../../crypto/3rdparty/everest/include/everest/kremlib\r
EOT
}
my $vsx_hdr_tpl = <<EOT;
<ClInclude Include="..\\..\\{NAME}" />\r
EOT
@ -71,12 +80,14 @@ EOT
exit( main() );
sub check_dirs {
foreach my $d (@thirdparty_header_dirs, @thirdparty_source_dirs) {
if (not (-d $d)) { return 0; }
}
return -d $vsx_dir
&& -d $header_dir
&& -d $mbedtls_header_dir
&& -d $psa_header_dir
&& -d $source_dir
&& -d $programs_dir
&& -d $crypto_dir
&& -d "$crypto_dir/$crypto_headers_dir";
&& -d $programs_dir;
}
sub slurp_file {
@ -117,7 +128,7 @@ sub gen_app {
my $srcs = "\n <ClCompile Include=\"..\\..\\programs\\$path.c\" \/>\r";
if( $appname eq "ssl_client2" or $appname eq "ssl_server2" or
$appname eq "query_compile_time_config" ) {
$srcs .= "\n <ClCompile Include=\"..\\..\\programs\\ssl\\query_config.c\" \/>\r";
$srcs .= "\n <ClCompile Include=\"..\\..\\programs\\test\\query_config.c\" \/>\r";
}
my $content = $template;
@ -159,7 +170,9 @@ sub gen_entry_list {
}
sub gen_main_file {
my ($headers, $sources, $hdr_tpl, $src_tpl, $main_tpl, $main_out) = @_;
my ($headers, $sources,
$hdr_tpl, $src_tpl,
$main_tpl, $main_out) = @_;
my $header_entries = gen_entry_list( $hdr_tpl, @$headers );
my $source_entries = gen_entry_list( $src_tpl, @$sources );
@ -216,20 +229,21 @@ sub main {
del_vsx_files();
my @app_list = get_app_list();
my @headers = <$header_dir/*.h>;
my @sources = ();
if ($include_crypto) {
@sources = <$crypto_dir/$source_dir/*.c>;
foreach my $file (<$source_dir/*.c>) {
my $basename = $file; $basename =~ s!.*/!!;
push @sources, $file unless -e "$crypto_dir/$source_dir/$basename";
}
push @headers, <$crypto_dir/$crypto_headers_dir/*.h>;
} else {
@sources = <$source_dir/*.c>;
}
my @header_dirs = (
$mbedtls_header_dir,
$psa_header_dir,
$source_dir,
@thirdparty_header_dirs,
);
my @headers = (map { <$_/*.h> } @header_dirs);
my @source_dirs = (
$source_dir,
@thirdparty_source_dirs,
);
my @sources = (map { <$_/*.c> } @source_dirs);
@headers = grep { ! $excluded_files{$_} } @headers;
@sources = grep { ! $excluded_files{$_} } @sources;
map { s!/!\\!g } @headers;
map { s!/!\\!g } @sources;

View File

@ -46,10 +46,10 @@ do_config()
echo ""
echo "config-$NAME:"
cp configs/config-$NAME.h $CONFIG_H
scripts/config.pl unset MBEDTLS_SSL_SRV_C
scripts/config.py unset MBEDTLS_SSL_SRV_C
for FLAG in $UNSET_LIST; do
scripts/config.pl unset $FLAG
scripts/config.py unset $FLAG
done
grep -F SSL_MAX_CONTENT_LEN $CONFIG_H || echo 'SSL_MAX_CONTENT_LEN=16384'

View File

@ -23,11 +23,15 @@ print_version()
shift
ARGS="$1"
shift
FAIL_MSG="$1"
VARIANT="$1"
shift
if ! `type "$BIN" > /dev/null 2>&1`; then
echo "* $FAIL_MSG"
if [ -n "$VARIANT" ]; then
VARIANT=" ($VARIANT)"
fi
if ! type "$BIN" > /dev/null 2>&1; then
echo " * ${BIN##*/}$VARIANT: Not found."
return 0
fi
@ -41,81 +45,127 @@ print_version()
VERSION_STR=`echo "$VERSION_STR" | $FILTER`
done
echo "* ${BIN##*/}: $BIN: $VERSION_STR"
if [ -z "$VERSION_STR" ]; then
VERSION_STR="Version could not be determined."
fi
echo " * ${BIN##*/}$VARIANT: ${BIN} : ${VERSION_STR} "
}
echo "** Platform:"
echo
if [ `uname -s` = "Linux" ]; then
echo "Linux variant"
lsb_release -d -c
else
echo "Unknown Unix variant"
fi
echo
print_version "uname" "-a" ""
echo
echo
echo "** Tool Versions:"
echo
if [ "${RUN_ARMCC:-1}" -ne 0 ]; then
: "${ARMC5_CC:=armcc}"
print_version "$ARMC5_CC" "--vsn" "armcc not found!" "head -n 2"
print_version "$ARMC5_CC" "--vsn" "" "head -n 2"
echo
: "${ARMC6_CC:=armclang}"
print_version "$ARMC6_CC" "--vsn" "armclang not found!" "head -n 2"
print_version "$ARMC6_CC" "--vsn" "" "head -n 2"
echo
fi
print_version "arm-none-eabi-gcc" "--version" "gcc-arm not found!" "head -n 1"
print_version "arm-none-eabi-gcc" "--version" "" "head -n 1"
echo
print_version "gcc" "--version" "gcc not found!" "head -n 1"
print_version "gcc" "--version" "" "head -n 1"
echo
print_version "clang" "--version" "clang not found" "head -n 2"
print_version "clang" "--version" "" "head -n 2"
echo
print_version "ldd" "--version" \
"No ldd present: can't determine libc version!" \
"head -n 1"
print_version "ldd" "--version" "" "head -n 1"
echo
print_version "valgrind" "--version" "valgrind not found!"
print_version "valgrind" "--version" ""
echo
print_version "gdb" "--version" "" "head -n 1"
echo
print_version "perl" "--version" "" "head -n 2" "grep ."
echo
print_version "python" "--version" "" "head -n 1"
echo
# Find the installed version of Pylint. Installed as a distro package this can
# be pylint3 and as a PEP egg, pylint. In test scripts We prefer pylint over
# pylint3
if type pylint >/dev/null 2>/dev/null; then
print_version "pylint" "--version" "" "sed /^.*config/d" "grep pylint"
elif type pylint3 >/dev/null 2>/dev/null; then
print_version "pylint3" "--version" "" "sed /^.*config/d" "grep pylint"
else
echo " * pylint or pylint3: Not found."
fi
echo
: ${OPENSSL:=openssl}
print_version "$OPENSSL" "version" "openssl not found!"
print_version "$OPENSSL" "version" "default"
echo
if [ -n "${OPENSSL_LEGACY+set}" ]; then
print_version "$OPENSSL_LEGACY" "version" "openssl legacy version not found!"
echo
print_version "$OPENSSL_LEGACY" "version" "legacy"
else
echo " * openssl (legacy): Not configured."
fi
echo
if [ -n "${OPENSSL_NEXT+set}" ]; then
print_version "$OPENSSL_NEXT" "version" "openssl next version not found!"
echo
print_version "$OPENSSL_NEXT" "version" "next"
else
echo " * openssl (next): Not configured."
fi
echo
: ${GNUTLS_CLI:=gnutls-cli}
print_version "$GNUTLS_CLI" "--version" "gnuTLS client not found!" "head -n 1"
print_version "$GNUTLS_CLI" "--version" "default" "head -n 1"
echo
: ${GNUTLS_SERV:=gnutls-serv}
print_version "$GNUTLS_SERV" "--version" "gnuTLS server not found!" "head -n 1"
print_version "$GNUTLS_SERV" "--version" "default" "head -n 1"
echo
if [ -n "${GNUTLS_LEGACY_CLI+set}" ]; then
print_version "$GNUTLS_LEGACY_CLI" "--version" \
"gnuTLS client legacy version not found!" \
"head -n 1"
echo
fi
if [ -n "${GNUTLS_LEGACY_SERV+set}" ]; then
print_version "$GNUTLS_LEGACY_SERV" "--version" \
"gnuTLS server legacy version not found!" \
"head -n 1"
echo
fi
if `hash dpkg > /dev/null 2>&1`; then
echo "* asan:"
dpkg -s libasan2 2> /dev/null | grep -i version
dpkg -s libasan1 2> /dev/null | grep -i version
dpkg -s libasan0 2> /dev/null | grep -i version
print_version "$GNUTLS_LEGACY_CLI" "--version" "legacy" "head -n 1"
else
echo "* No dpkg present: can't determine asan version!"
echo " * gnutls-cli (legacy): Not configured."
fi
echo
if [ -n "${GNUTLS_LEGACY_SERV+set}" ]; then
print_version "$GNUTLS_LEGACY_SERV" "--version" "legacy" "head -n 1"
else
echo " * gnutls-serv (legacy): Not configured."
fi
echo
echo " * Installed asan versions:"
if type dpkg-query >/dev/null 2>/dev/null; then
if ! dpkg-query -f '${Status} ${Package}: ${Version}\n' -W 'libasan*' |
awk '$3 == "installed" && $4 !~ /-/ {print $4, $5}' |
grep .
then
echo " No asan versions installed."
fi
else
echo " Unable to determine the asan version without dpkg."
fi
echo