psa: Remove PSA secure binary building tools

The PSA-implementing secure binary is not built using Mbed OS build
tools anymore. Instead, the TrustedFirmware-M (TF-M) build system is
used to produce the secure binary. As such, we remove PSA related hooks
from the build system, remove PSA related scripts from tools/test
folder, and also remove the psa-autogen job from travis which was
running the now unecessary and removed generate_partition_code.py.

Remove the ability to generate new PSA binaries in the old manner, where
Mbed OS implements PSA. We don't yet remove any PSA binaries or break
the currently checked-in Mbed-implemented PSA support. PSA targets
integrated in the old manner will continue working at this point.

Signed-off-by: Devaraj Ranganna <devaraj.ranganna@arm.com>
Signed-off-by: Jaeden Amero <jaeden.amero@arm.com>
pull/12737/head
Devaraj Ranganna 2020-02-12 14:46:40 +00:00 committed by Jaeden Amero
parent fcf427ac11
commit fd74d678a7
35 changed files with 14 additions and 3907 deletions

3
.gitignore vendored
View File

@ -97,6 +97,3 @@ test_suite.json
# default delivery dir
DELIVERY/
# Directory hosting PSA autogenerated source files
PSA_AUTOGEN/

View File

@ -226,8 +226,8 @@ matrix:
### Extended Tests ###
- &extended-vm
stage: "Extended"
name: "psa autogen"
env: NAME=psa-autogen
name: "events"
env: NAME=events EVENTS=events
language: python
python: 3.7
install:
@ -239,13 +239,6 @@ matrix:
- python -m pip install --upgrade setuptools==40.4.3
- pip install -r requirements.txt
- pip list --verbose
script:
- python tools/psa/generate_partition_code.py
- git diff --exit-code
- <<: *extended-vm
name: "events"
env: NAME=events EVENTS=events
script:
# Check that example compiles
- sed -n '/``` cpp/,/```/{/```$/Q;/```/d;p;}' ${EVENTS}/README.md > main.cpp

View File

@ -44,8 +44,6 @@ from tools.utils import argparse_filestring_type, args_error, argparse_many
from tools.utils import argparse_dir_not_parent
from tools.utils import NoValidToolchainException
from tools.utils import print_end_warnings
from tools.psa import generate_psa_sources
from tools.resources import OsAndSpeResourceFilter
def main():
start = time()
@ -189,12 +187,6 @@ def main():
if options.source_dir:
resource_filter = None
if target.is_PSA_secure_target:
generate_psa_sources(
source_dirs=options.source_dir,
ignore_paths=[options.build_dir]
)
resource_filter = OsAndSpeResourceFilter()
lib_build_res = build_library(
options.source_dir, options.build_dir, target, toolchain_name,

View File

@ -42,7 +42,7 @@ from .paths import (MBED_CMSIS_PATH, MBED_TARGETS_PATH, MBED_LIBRARIES,
MBED_CONFIG_FILE, MBED_LIBRARIES_DRIVERS,
MBED_LIBRARIES_PLATFORM, MBED_LIBRARIES_HAL,
BUILD_DIR)
from .resources import Resources, FileType, FileRef, PsaManifestResourceFilter
from .resources import Resources, FileType, FileRef
from .notifier.mock import MockNotifier
from .targets import TARGET_NAMES, TARGET_MAP, CORE_ARCH, Target
from .libraries import Library
@ -422,7 +422,6 @@ def get_mbed_official_release(version):
) for target in TARGET_NAMES \
if (hasattr(TARGET_MAP[target], 'release_versions')
and version in TARGET_MAP[target].release_versions)
and not Target.get_target(target).is_PSA_secure_target
)
)
@ -624,11 +623,7 @@ def build_project(src_paths, build_path, target, toolchain_name,
into_dir, extra_artifacts = toolchain.config.deliver_into()
if into_dir:
copy_when_different(res[0], into_dir)
if not extra_artifacts:
if toolchain.target.is_TrustZone_secure_target:
cmse_lib = join(dirname(res[0]), "cmse_lib.o")
copy_when_different(cmse_lib, into_dir)
else:
if extra_artifacts:
for tc, art in extra_artifacts:
if toolchain_name == tc:
copy_when_different(join(build_path, art), into_dir)
@ -774,7 +769,6 @@ def build_library(src_paths, build_path, target, toolchain_name,
res = Resources(notify).scan_with_toolchain(
src_paths, toolchain, dependencies_paths, inc_dirs=inc_dirs)
res.filter(resource_filter)
res.filter(PsaManifestResourceFilter())
# Copy headers, objects and static libraries - all files needed for
# static lib

View File

@ -706,25 +706,14 @@ class Config(object):
)
if hasattr(self.target, "mbed_{}_size".format(memory_type)):
mem_size = getattr(self.target, "mbed_{}_size".format(memory_type))
if (
self.target.is_PSA_non_secure_target or
self.target.is_PSA_secure_target
):
if self.target.is_PSA_non_secure_target:
config, _ = self.get_config_data()
if self.target.is_PSA_secure_target:
mem_start = config.get(
"target.secure-{}-start".format(memory_type), mem_start
).value
mem_size = config.get(
"target.secure-{}-size".format(memory_type), mem_size
).value
elif self.target.is_PSA_non_secure_target:
mem_start = config.get(
"target.non-secure-{}-start".format(memory_type), mem_start
).value
mem_size = config.get(
"target.non-secure-{}-size".format(memory_type), mem_size
).value
mem_start = config.get(
"target.non-secure-{}-start".format(memory_type), mem_start
).value
mem_size = config.get(
"target.non-secure-{}-size".format(memory_type), mem_size
).value
if mem_start and not isinstance(mem_start, int):
mem_start = int(mem_start, 0)
if mem_size and not isinstance(mem_size, int):

View File

@ -1,100 +0,0 @@
{
"files": [
{
"src_file": "interface/src/tfm_ns_lock_rtx.c",
"dest_file": "components/TARGET_PSA/TARGET_TFM/COMPONENT_NSPE/interface/src/tfm_ns_lock_rtx.c"
},
{
"src_file": "interface/src/tfm_psa_ns_api.c",
"dest_file": "components/TARGET_PSA/TARGET_TFM/COMPONENT_NSPE/interface/src/tfm_psa_ns_api.c"
},
{
"src_file": "interface/include/psa_client.h",
"dest_file": "components/TARGET_PSA/TARGET_TFM/interface/include/psa_client.h"
},
{
"src_file": "interface/include/psa_service.h",
"dest_file": "components/TARGET_PSA/TARGET_TFM/interface/include/psa_service.h"
},
{
"src_file": "interface/include/tfm_api.h",
"dest_file": "components/TARGET_PSA/TARGET_TFM/interface/include/tfm_api.h"
},
{
"src_file": "interface/include/tfm_ns_lock.h",
"dest_file": "components/TARGET_PSA/TARGET_TFM/interface/include/tfm_ns_lock.h"
},
{
"src_file": "interface/include/tfm_ns_svc.h",
"dest_file": "components/TARGET_PSA/TARGET_TFM/interface/include/tfm_ns_svc.h"
},
{
"src_file": "interface/include/tfm_nspm_svc_handler.h",
"dest_file": "components/TARGET_PSA/TARGET_TFM/interface/include/tfm_nspm_svc_handler.h"
},
{
"src_file": "platform/include/tfm_spm_hal.h",
"dest_file": "components/TARGET_PSA/TARGET_TFM/COMPONENT_SPE/platform/include/tfm_spm_hal.h"
},
{
"src_file": "secure_fw/ns_callable/tfm_psa_api_veneers.c",
"dest_file": "components/TARGET_PSA/TARGET_TFM/COMPONENT_SPE/secure_fw/ns_callable/tfm_psa_api_veneers.c"
}
],
"folders": [
{
"src_folder": "secure_fw/core",
"dest_folder": "components/TARGET_PSA/TARGET_TFM/COMPONENT_SPE/secure_fw/core"
},
{
"src_folder": "secure_fw/core/ipc",
"dest_folder": "components/TARGET_PSA/TARGET_TFM/COMPONENT_SPE/secure_fw/core/ipc"
},
{
"src_folder": "secure_fw/core/ipc/include",
"dest_folder": "components/TARGET_PSA/TARGET_TFM/COMPONENT_SPE/secure_fw/core/ipc/include"
},
{
"src_folder": "secure_fw/include",
"dest_folder": "components/TARGET_PSA/TARGET_TFM/COMPONENT_SPE/secure_fw/include"
},
{
"src_folder": "secure_fw/spm",
"dest_folder": "components/TARGET_PSA/TARGET_TFM/COMPONENT_SPE/secure_fw/spm"
},
{
"src_folder": "bl2/include",
"dest_folder": "components/TARGET_PSA/TARGET_TFM/COMPONENT_SPE/bl2/include"
}
],
"commit_sha": [
{
"sha": "11bff3f3cbfbd3e2c284e884d0066531e6b47d7e",
"msg": "TF-M patch: General modifications, Remove un-needed files, Disable printf and uart, Modify include paths, Guard macros from mbed_lib with ifndef"
},
{
"sha": "795e6418d0e73841868b351b605659a05c04e1f6",
"msg": "TF-M patch: Fix tfm_ns_lock_init issue (TF-M issue #239), Link to bug tracking: https://developer.trustedfirmware.org/T239"
},
{
"sha": "35938a407133fe0c20c25b6fae2836148d1adfca",
"msg": "TF-M patch: Fix service handles not cleared issue (TF-M issue #230), Link to bug tracking: https://developer.trustedfirmware.org/T230"
},
{
"sha": "910a402ce6c96b654cb6ae1a5b679e4f856c5419",
"msg": "TF-M patch: Fix tfm_psa_call_venner wrong argument type (TF-M issue #241), Link to bug tracking: https://developer.trustedfirmware.org/T241"
},
{
"sha": "cb748c5608cd68a1dbecde5b3b2c1488c3d0d17b",
"msg": "TF-M patch: Change #if TFM_PSA_API to #ifdef TFM_PSA_API to avoid compiler errors as mbed-cli only generates "-D" macros only for "macros" defined in targets.json, TF-M task link: https://developer.trustedfirmware.org/T396"
},
{
"sha": "9a5110561a60ec9f663079a25ec54f7ad0832743",
"msg": "TF-M patch: Remove secure_fw/core/tfm_func_api.c which is required only when TFM_PSA_API is not set"
},
{
"sha": "6e899b3cc98c3e1811a160df09abbccddb2fa014",
"msg": "TF-M patch/workaround related to (TF-M issue #T240), Link to bug tracking: https://developer.trustedfirmware.org/T240, The issue is fixed by TF-M team. However they autogenerate region details (code, ro, rw, zi and stack ) using linker scripts and in mbed-os we also autogenerate region details but using mix of service definition in json file and other template files."
}
]
}

View File

@ -55,8 +55,6 @@ from tools.utils import print_end_warnings
from tools.utils import print_large_string
from tools.settings import ROOT
from tools.targets import Target
from tools.psa import generate_psa_sources
from tools.resources import OsAndSpeResourceFilter
def default_args_dict(options):
return dict(
@ -336,13 +334,6 @@ def main():
if options.source_dir is not None:
resource_filter = None
if target.is_PSA_secure_target:
generate_psa_sources(
source_dirs=options.source_dir,
ignore_paths=[options.build_dir]
)
resource_filter = OsAndSpeResourceFilter()
wrapped_build_project(
options.source_dir,
options.build_dir,

View File

@ -53,8 +53,6 @@ from tools.utils import print_large_string
from tools.utils import NotSupportedException
from tools.options import extract_profile, list_profiles, extract_mcus
from tools.notifier.term import TerminalNotifier
from tools.psa import generate_psa_sources
from tools.resources import OsAndSpeResourceFilter
""" The CLI entry point for exporting projects from the mbed tools to any of the
supported IDEs or project structures.
@ -389,14 +387,7 @@ def main():
args_error(parser, "%s not supported by %s" % (mcu, ide))
try:
target = Target.get_target(mcu)
resource_filter = None
if target.is_PSA_secure_target:
generate_psa_sources(source_dirs=options.source_dir,
ignore_paths=[]
)
resource_filter = OsAndSpeResourceFilter()
export(
mcu,
ide,

View File

@ -1,69 +0,0 @@
# PSA tools
## Code generation script
Mbed-OS contains two implementations of PSA Firmware Framework:
* Mbed-SPM - Implementation for dual-core v7 targets.
* TF-M - Implementation for v8 targets.
Both PSA Firmware Framework implementation impose the following requirements:
* PSA manifests must be valid according to the JSON schema file provided by PSA FF spec.
* There are no conflicts between various PSA manifests (duplicate SIDs and PIDs, dependencies, etc.)
* Secure partition initialization code to be present at mbed-os core compile time.
To satisfy the requirement listed above, Mbed-OS build system invokes `generate_partition_code.py` script
during the build process for PSA targets.
PSA code generation step has the following effects:
* Scan the whole source tree for PSA manifest files, including application (in case invoked from application directory) and all the `TESTS` directories.
* All found PSA manifest files get parsed and validated.
* Source and header files for initializing SPM are generated. Test related partitions and SIDs are disabled by default by `#ifndef` guards.
To enable them following defines must be passed to build command (typically done automatically via [release.py](#secure-image-generation)):
* `-DUSE_PSA_TEST_PARTITIONS`
* `-DUSE_<name>` where `<name>` corresponds to the name in PSA manifest file (`"name"` property).
## Secure image generation
`release.py` is the script assigned with compiling the default secure images.
For an application with custom secure portions, the secure image should be generated by invoking `mbed-cli` directly.
> **Note**: when building targets utilizing TF-M PSA implementations, add the following arguments to a build command for the secure image:
`--app-config <mbed-os-root>/tools/psa/tfm/mbed_app.json`
### Usage
```text
usage: release.py [-h] [-m MCU] [-t TC] [-d] [-q] [-l] [--commit]
[--skip-tests] [-x ...]
optional arguments:
-h, --help show this help message and exit
-m MCU, --mcu MCU build for the given MCU
-t TC, --tc TC build for the given tool chain (default is
default_toolchain)
-d, --debug set build profile to debug
-q, --quiet No Build log will be printed
-l, --list Print supported PSA secure targets
--commit create a git commit for each platform
--skip-tests skip the test build phase
-x ..., --extra ... additional build parameters
```
* The script must be run from the mbed-os folder via `tools/psa/release.py`,
otherwise the list of available tests will not be accurate and the test
partitions will not be properly generated.
* When `MCU ` is not specified, the script compiles all the images for all the targets.
* When `-t/--tc` is not specified, the script compiles with the default_toolchain speciified in targets.json.
* When `-d/--debug` is not specified, the script compiles the images using the release profile.
* When `--commit` is not specified, the script will not commit the images to git and
any auto-generated PSA related components and services.
* A user can specify additional commands that will be passed on to the build commands (Ex. -D for compilation defines).
This script should be run in following scenarios:
* Release.
* Update to files originating in the secure side.
* Drivers update.
* PSA updates.

View File

@ -1,4 +1,4 @@
#!/usr/bin/python
#!/usr/bin/env python
# Copyright (c) 2019 ARM Limited
#
# SPDX-License-Identifier: Apache-2.0
@ -16,13 +16,7 @@
# limitations under the License.
import os
import shutil
from tools.resources import FileType
from tools.settings import ROOT
from .generate_partition_code import manifests_discovery, generate_spm_code
def find_secure_image(notify, resources, ns_image_path,
configured_s_image_filename, image_type):
@ -56,12 +50,3 @@ def find_secure_image(notify, resources, ns_image_path,
raise Exception("Required secure image not found.")
return secure_image
def generate_psa_sources(source_dirs, ignore_paths):
services, apps = manifests_discovery(root_dirs=source_dirs,
ignore_paths=ignore_paths + ['.git'])
assert len(services + apps), 'PSA manifest discovery failed'
psa_out_dir = os.path.join(ROOT, 'components', 'TARGET_PSA')
generate_spm_code(services, apps, psa_out_dir)
return psa_out_dir

View File

@ -1,129 +0,0 @@
#!/usr/bin/python
# Copyright (c) 2017-2019 ARM Limited
#
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import itertools
import json
import os
import sys
from os.path import join as path_join
# Be sure that the tools directory is in the search path
ROOT = os.path.abspath(path_join(os.path.dirname(__file__), os.pardir, os.pardir))
sys.path.insert(0, ROOT)
from tools.psa.mbed_spm_tfm_common import validate_partition_manifests, \
manifests_discovery, parse_manifests, generate_source_files, \
MBED_OS_ROOT
__version__ = '1.1'
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
MANIFEST_FILE_PATTERN = '*_psa.json'
PSA_CORE_ROOT = path_join(MBED_OS_ROOT, 'components', 'TARGET_PSA')
TEMPLATES_DESC = path_join(SCRIPT_DIR, 'spm_template_file_list.json')
def _get_timestamp(f):
return os.path.getmtime(f) if os.path.isfile(f) else 0
def is_up_to_date(manifest_files, out_files):
manifest_timestamp = max(_get_timestamp(f) for f in manifest_files)
out_timestamps = min(_get_timestamp(f) for f in out_files)
return manifest_timestamp <= out_timestamps
def generate_spm_code(service_files, app_files, output_dir):
with open(TEMPLATES_DESC, 'r') as fh:
templates_data = json.load(fh)
templates_dict = {
path_join(MBED_OS_ROOT, t['template']):
path_join(output_dir, t['output']) for t in templates_data
}
if is_up_to_date(service_files + app_files, list(templates_dict.values())):
return
# Construct lists of all the manifests and mmio_regions.
service_manifests, service_region_list = parse_manifests(service_files)
test_manifests, test_region_list = parse_manifests(app_files)
# Validate the correctness of the manifest collection.
validate_partition_manifests(service_manifests + test_manifests)
region_list = service_region_list + test_region_list
render_args = {
'service_partitions': service_manifests,
'test_partitions': test_manifests,
'script_ver': __version__,
'regions': region_list,
'region_pair_list': list(itertools.combinations(region_list, 2)),
}
generate_source_files(templates_dict, render_args)
class AppendReadableDir(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
prosp_dir = os.path.abspath(values)
if not os.path.isdir(prosp_dir):
raise argparse.ArgumentTypeError("{} is missing".format(prosp_dir))
if not os.access(prosp_dir, os.R_OK):
raise argparse.ArgumentTypeError(
"{} is not a accessible for read".format(prosp_dir))
if not getattr(namespace, self.dest):
setattr(namespace, self.dest, [])
getattr(namespace, self.dest).append(prosp_dir)
def get_parser():
parser = argparse.ArgumentParser(
description='PSA SPM code generator',
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument(
'-u', '--user-app',
action=AppendReadableDir,
default=[ROOT],
help='Root directory for recursive PSA manifest scan. Use for adding '
'application specific secure partitions. Can be supplied more '
'than once',
metavar='DIR'
)
parser.add_argument(
'-o', '--output-dir',
default=ROOT,
help='Root directory for generating the sources',
metavar='DIR'
)
return parser
def main():
parser = get_parser()
args = parser.parse_args()
services, apps = manifests_discovery(root_dirs=args.user_app,
ignore_paths=['BUILD', '.git'])
generate_spm_code(services, apps, args.output_dir)
if __name__ == '__main__':
main()

View File

@ -1,678 +0,0 @@
#!/usr/bin/python
# Copyright (c) 2017-2018 ARM Limited
#
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from os.path import join as path_join
import json
from jsonschema import validate
import fnmatch
from six import integer_types, string_types
from jinja2 import Environment, FileSystemLoader, StrictUndefined
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
MBED_OS_ROOT = os.path.abspath(path_join(SCRIPT_DIR, os.pardir, os.pardir))
SERVICES_DIR = path_join(MBED_OS_ROOT, "components", "TARGET_PSA", "services")
TESTS_DIR = path_join(MBED_OS_ROOT, "TESTS", "psa")
MANIFEST_FILE_PATTERN = '*_psa.json'
def _assert_int(num):
"""
Tries to parse an integer num from a given string
:param num: Number in int/string type
:return: Numeric value
"""
if isinstance(num, int):
return num
num_str = str(num)
radix = 16 if num_str.lower().startswith('0x') else 10
res = int(num_str, radix)
# Python converts str to int as a signed integer
if res > 0x7FFFFFFF:
res -= 0x100000000
return res
class RotService(object):
MINOR_POLICIES = ['STRICT', 'RELAXED']
def __init__(
self,
name,
identifier,
signal,
non_secure_clients,
minor_version=1,
minor_policy='STRICT'
):
"""
Root of Trust Service C'tor (Aligned with json schema)
:param name: Root of Trust Service identifier (available to user)
:param identifier: Root of Trust Service numeric enumeration.
:param signal: Root of Trust Service identifier inside the partition
:param non_secure_clients: True to allow connections from non-secure
partitions
:param minor_version: Root of Trust Service version
:param minor_policy: Enforcement level of minor version
"""
self.name = name
self.id = identifier
self.signal = signal
assert _assert_int(identifier)
assert isinstance(non_secure_clients, bool), \
'non_secure_clients parameter must be of boolean type'
self.nspe_callable = non_secure_clients
self.minor_version = _assert_int(minor_version)
assert self.minor_version > 0, 'minor_version parameter is invalid'
assert minor_policy in self.MINOR_POLICIES, \
'minor_policy parameter is invalid'
self.minor_policy = minor_policy
@property
def numeric_id(self):
return _assert_int(self.id)
def __eq__(self, other):
return (
(self.name == other.name) and
(self.id == other.id) and
(self.signal == other.signal) and
(self.nspe_callable == other.nspe_callable) and
(self.minor_version == other.minor_version) and
(self.minor_policy == other.minor_policy)
)
class MmioRegion(object):
MMIO_PERMISSIONS = {
'READ-ONLY': 'PSA_MMIO_PERM_READ_ONLY',
'READ-WRITE': 'PSA_MMIO_PERM_READ_WRITE'
}
def __init__(self, **kwargs):
"""
MMIO Region C'tor (Aligned with json schema)
Supports both named and numeric regions
In case of named region the acceptable params are name and permission
In case of numeric region the acceptable params are name, size and
permission
:param name: C definition name of the region (size will be
auto-generated)
:param base: C hex string defining a memory address (must be 32bit)
:param size: size of a region (Applicable only for numbered regions)
:param permission: Access permissions to the described region (R/RW)
"""
assert 'permission' in kwargs
self.permission = self.MMIO_PERMISSIONS[kwargs['permission']]
if 'name' in kwargs:
self.base = kwargs['name']
self.size = '(sizeof(*({})))'.format(kwargs['name'])
if 'base' in kwargs:
self.base = kwargs['base']
self.size = _assert_int(kwargs['size'])
assert 'partition_id' in kwargs
self.partition_id = _assert_int(kwargs['partition_id'])
assert hasattr(self, 'base')
assert hasattr(self, 'size')
assert hasattr(self, 'permission')
assert hasattr(self, 'partition_id')
def __eq__(self, other):
return (
(self.base == other.base) and
(self.size == other.size) and
(self.permission == other.permission)
)
class Irq(object):
def __init__(self, line_num, signal):
"""
IRQ line C'tor (Aligned with json schema)
:param line_num: number of interrupt used by the partition
:param signal: IRQ line identifier inside the partition
"""
self.line_num = _assert_int(line_num)
assert isinstance(signal, string_types)
self.signal = signal
def __eq__(self, other):
return (self.line_num == other.line_num) and \
(self.signal == other.signal)
class Manifest(object):
PRIORITY = {
'LOW': 'osPriorityLow',
'NORMAL': 'osPriorityNormal',
'HIGH': 'osPriorityHigh'
}
PARTITION_TYPES = ['APPLICATION-ROT', 'PSA-ROT']
# The following signal bits cannot be used:
# bit[0-2] | Reserved
# bit[3] | PSA Doorbell
# bit[31] | RTX error bit
RESERVED_SIGNALS = 5
def __init__(
self,
manifest_file,
name,
partition_id,
partition_type,
priority,
entry_point,
heap_size,
stack_size,
source_files,
mmio_regions=None,
rot_services=None,
extern_sids=None,
irqs=None
):
"""
Manifest C'tor (Aligned with json schema)
:param manifest_file: Path to json manifest
:param name: Partition unique name
:param partition_id: Partition identifier
:param partition_type: Whether the partition is unprivileged or part
of the trusted computing base
:param priority: Priority of the partition's thread
:param entry_point: C symbol name of the partition's main function
:param heap_size: Size of heap required for the partition
:param stack_size: Size of stack required for the partition
:param source_files: List of files assembling the partition
(relative paths)
:param mmio_regions: List of MMIO regions used by the partition
:param rot_services: List of Root of Trust Services declared by the
partition
:param extern_sids: List of Root of Trust Services the partition can call
:param irqs: List of interrupts the partition can handle
"""
assert manifest_file is not None
assert name is not None
assert partition_id is not None
assert partition_type is not None
assert entry_point is not None
assert priority is not None
assert heap_size is not None
assert stack_size is not None
assert source_files is not None
mmio_regions = [] if mmio_regions is None else mmio_regions
rot_services = [] if rot_services is None else rot_services
extern_sids = [] if extern_sids is None else extern_sids
irqs = [] if irqs is None else irqs
assert os.path.isfile(manifest_file)
assert isinstance(partition_id, integer_types)
assert isinstance(heap_size, int)
assert isinstance(stack_size, int)
assert isinstance(entry_point, string_types)
assert partition_type in self.PARTITION_TYPES
assert partition_id > 0
self.file = manifest_file
self.name = name
self.id = partition_id
self.type = partition_type
self.priority_tfm = priority
self.priority_mbed = self.PRIORITY[priority]
self.heap_size = heap_size
self.stack_size = stack_size
self.entry_point = entry_point
if isinstance(source_files, list):
self.source_files = source_files
else:
self.source_files = [source_files]
self.mmio_regions = mmio_regions
self.rot_services = rot_services
self.extern_sids = extern_sids
self.irqs = irqs
for src_file in self.source_files:
assert os.path.isfile(src_file), \
"The source file {} mentioned in {} doesn't exist.".format(
src_file, self.file
)
for rot_srv in self.rot_services:
assert isinstance(rot_srv, RotService)
for extern_sid in self.extern_sids:
assert isinstance(extern_sid, string_types)
assert len(self.extern_sids) == len(set(self.extern_sids)), \
'Detected duplicates external SIDs in {}'.format(self.file)
for irq in self.irqs:
assert isinstance(irq, Irq)
total_signals = len(self.rot_services) + len(self.irqs)
assert total_signals <= 32 - self.RESERVED_SIGNALS, \
'Manifest {} - {} exceeds limit of RoT services and IRQs allowed ' \
'({}).'.format(
self.name, self.file, 32 - self.RESERVED_SIGNALS
)
def __eq__(self, other):
return (
(self.file == other.file) and
(self.name == other.name) and
(self.id == other.id) and
(self.type == other.type) and
(self.priority_mbed == other.priority_mbed) and
(self.priority_tfm == other.priority_tfm) and
(self.heap_size == other.heap_size) and
(self.stack_size == other.stack_size) and
(self.entry_point == other.entry_point) and
(self.source_files == other.source_files) and
(self.mmio_regions == other.mmio_regions) and
(self.rot_services == other.rot_services) and
(self.extern_sids == other.extern_sids) and
(self.irqs == other.irqs)
)
@classmethod
def from_json(cls, manifest_file, skip_src=False):
"""
Load a partition manifest file
:param manifest_file: Manifest file path
:param skip_src: Ignore the `source_files` entry
:return: Manifest object
"""
partition_schema_path = path_join(
SCRIPT_DIR,
'partition_description_schema.json'
)
with open(partition_schema_path) as schema_fh:
partition_schema = json.load(schema_fh)
# Load partition manifest file.
with open(manifest_file) as fh:
manifest = json.load(fh)
validate(manifest, partition_schema)
manifest_dir = os.path.dirname(manifest_file)
source_files = []
if not skip_src:
for src_file in manifest['source_files']:
source_files.append(
os.path.normpath(path_join(manifest_dir, src_file)))
mmio_regions = []
for mmio_region in manifest.get('mmio_regions', []):
mmio_regions.append(
MmioRegion(partition_id=manifest['id'], **mmio_region))
rot_services = []
for rot_srv in manifest.get('services', []):
rot_services.append(RotService(**rot_srv))
irqs = []
for irq in manifest.get('irqs', []):
irqs.append(Irq(**irq))
return Manifest(
manifest_file=manifest_file,
name=manifest['name'],
partition_id=_assert_int(manifest['id']),
partition_type=manifest['type'],
priority=manifest['priority'],
heap_size=_assert_int(manifest['heap_size']),
stack_size=_assert_int(manifest['stack_size']),
entry_point=manifest['entry_point'],
source_files=source_files,
mmio_regions=mmio_regions,
rot_services=rot_services,
extern_sids=manifest.get('extern_sids', []),
irqs=irqs
)
@property
def sids(self):
return [rot_srv.name for rot_srv in self.rot_services]
@property
def autogen_folder(self):
return os.path.abspath(os.path.dirname(self.file))
def find_dependencies(self, manifests):
"""
Find other manifests which holds Root of Trust Services that
are declared as extern in this manifest
:param manifests: list of manifests to filter
:return: list of manifest's names that holds current
extern Root of Trust Services
"""
manifests = [man for man in manifests if man != self]
extern_sids_set = set(self.extern_sids)
return [manifest.name for manifest in manifests
if extern_sids_set.intersection(set(manifest.sids))]
def templates_to_files(self, templates, templates_base, output_dir):
"""
Translates a list of partition templates to file names
:param templates: List of partition templates
:param templates_base: Base directory of the templates
:param output_dir: Output directory (Default is autogen folder property)
:return: Dictionary of template to output file translation
"""
generated_files = {}
for t in templates:
fname = os.path.relpath(t, templates_base)
_tpl = fname.replace('NAME', self.name.lower())
full_path = path_join(
output_dir,
os.path.splitext(_tpl)[0]
)
generated_files[t] = full_path
return generated_files
def check_circular_call_dependencies(manifests):
"""
Check if there is a circular dependency between the partitions
described by the manifests.
A circular dependency might happen if there is a scenario in which a
partition calls a Root of Trust Service in another partition which than
calls another Root of Trust Service which resides in the
originating partition.
For example: Partition A has a Root of Trust Service A1 and extern sid B1,
partition B has a Root of Trust Service B1 and extern sid A1.
:param manifests: List of the partition manifests.
:return: True if a circular dependency exists, false otherwise.
"""
# Construct a call graph.
call_graph = {}
for manifest in manifests:
call_graph[manifest.name] = {
'calls': manifest.find_dependencies(manifests),
'called_by': set()
}
for manifest_name in call_graph:
for called in call_graph[manifest_name]['calls']:
call_graph[called]['called_by'].add(manifest_name)
# Run topological sort on the call graph.
while len(call_graph) > 0:
# Find all the nodes that aren't called by anyone and
# therefore can be removed.
nodes_to_remove = [x for x in list(call_graph.keys()) if
len(call_graph[x]['called_by']) == 0]
# If no node can be removed we have a circle.
if not nodes_to_remove:
return True
# Remove the nodes.
for node in nodes_to_remove:
for called in call_graph[node]['calls']:
call_graph[called]['called_by'].remove(node)
call_graph.pop(node)
return False
def validate_partition_manifests(manifests):
"""
Check the correctness of the manifests list
(no conflicts, no missing elements, etc.)
:param manifests: List of the partition manifests
"""
for manifest in manifests:
assert isinstance(manifest, Manifest)
partitions_names = {}
partitions_ids = {}
rot_service_ids = {}
rot_service_names = {}
rot_service_signals = {}
irq_signals = {}
irq_numbers = {}
all_extern_sids = set()
spe_contained_manifests = []
for manifest in manifests:
# Make sure the partition names are unique.
if manifest.name in partitions_names:
raise ValueError(
'Partition name {} is not unique, '
'found in both {} and {}.'.format(
manifest.name,
partitions_names[manifest.name],
manifest.file
)
)
partitions_names[manifest.name] = manifest.file
# Make sure the partition ID's are unique.
if manifest.id in partitions_ids:
raise ValueError(
'Partition id {} is not unique, '
'found in both {} and {}.'.format(
manifest.id,
partitions_ids[manifest.id],
manifest.file
)
)
partitions_ids[manifest.id] = manifest.file
is_nspe_callabale = False
# Make sure all the Root of Trust Service IDs and signals are unique.
for rot_service in manifest.rot_services:
if rot_service.name in rot_service_names:
raise ValueError(
'Root of Trust Service name {} is found '
'in both {} and {}.'.format(
rot_service.name,
rot_service_names[rot_service.name],
manifest.file
)
)
rot_service_names[rot_service.name] = manifest.file
if rot_service.signal in rot_service_signals:
raise ValueError(
'Root of Trust Service signal {} is found '
'in both {} and {}.'.format(
rot_service.signal,
rot_service_signals[rot_service.signal],
manifest.file
)
)
rot_service_signals[rot_service.signal] = manifest.file
if rot_service.numeric_id in rot_service_ids:
raise ValueError(
'Root of Trust Service identifier {} is found '
'in both {} and {}.'.format(
rot_service.numeric_id,
rot_service_ids[rot_service.numeric_id],
manifest.file
)
)
rot_service_ids[rot_service.numeric_id] = manifest.file
is_nspe_callabale |= rot_service.nspe_callable
if not is_nspe_callabale:
spe_contained_manifests.append(manifest)
# Make sure all the IRQ signals and line-numbers are unique.
for irq in manifest.irqs:
if irq.signal in irq_signals:
raise ValueError(
'IRQ signal {} is found in both {} and {}.'.format(
irq.signal,
irq_signals[irq.signal],
manifest.file
)
)
irq_signals[irq.signal] = manifest.file
if irq.line_num in irq_numbers:
raise ValueError(
'IRQ line number {} is found in both {} and {}.'.format(
irq.line_num,
irq_numbers[irq.line_num],
manifest.file
)
)
irq_numbers[irq.line_num] = manifest.file
all_extern_sids.update(manifest.extern_sids)
# Check that all the external SIDs can be found.
declared_sids = set(rot_service_names.keys())
for manifest in manifests:
extern_sids = set(manifest.extern_sids)
if not extern_sids.issubset(declared_sids):
missing_sids = extern_sids.difference(declared_sids)
raise ValueError(
"External SID(s) {} required by {} can't be found in "
"any partition manifest.".format(
', '.join(missing_sids), manifest.file)
)
if check_circular_call_dependencies(manifests):
raise ValueError(
"Detected a circular call dependency between the partitions.")
for manifest in spe_contained_manifests:
rot_services = set([service.name for service in manifest.rot_services])
if not rot_services.intersection(all_extern_sids) and len(
manifest.irqs) == 0:
raise ValueError(
'Partition {} (defined by {}) is not accessible from NSPE '
'and not referenced by any other partition.'.format(
manifest.name,
manifest.file
)
)
def is_test_manifest(manifest):
return 'TESTS' in manifest
def is_service_manifest(manifest):
return not is_test_manifest(manifest)
def manifests_discovery(root_dirs, ignore_paths):
service_manifest_files = set()
test_manifest_files = set()
for root_dir in root_dirs:
for root, dirs, files in os.walk(root_dir, followlinks=True):
# Filters paths if they are inside one of the ignore paths
if next((True for igp in ignore_paths if igp in root), False):
continue
to_add = [path_join(root, f) for f in
fnmatch.filter(files, MANIFEST_FILE_PATTERN)]
service_manifest_files.update(filter(is_service_manifest, to_add))
test_manifest_files.update(filter(is_test_manifest, to_add))
service_manifest_files = sorted(list(service_manifest_files))
test_manifest_files = sorted(list(test_manifest_files))
return service_manifest_files, test_manifest_files
def parse_manifests(manifests_files):
region_list = []
manifests = []
for manifest_file in manifests_files:
manifest_obj = Manifest.from_json(manifest_file)
manifests.append(manifest_obj)
for region in manifest_obj.mmio_regions:
region_list.append(region)
return manifests, region_list
def generate_source_files(
templates,
render_args,
extra_filters=None
):
"""
Generate SPM common C code from manifests using given templates
:param templates: Dictionary of template and their auto-generated products
:param render_args: Dictionary of arguments that should be passed to render
:param extra_filters: Dictionary of extra filters to use in the rendering
process
:return: Path to generated folder containing common generated files
"""
rendered_files = []
templates_dirs = list(
set([os.path.dirname(path) for path in templates])
)
template_files = {os.path.basename(t): t for t in templates}
# Load templates for the code generation.
env = Environment(
loader=FileSystemLoader(templates_dirs),
lstrip_blocks=True,
trim_blocks=True,
undefined=StrictUndefined
)
if extra_filters:
env.filters.update(extra_filters)
for tf in template_files:
template = env.get_template(tf)
rendered_files.append(
(templates[template_files[tf]], template.render(**render_args)))
rendered_file_dir = os.path.dirname(templates[template_files[tf]])
if not os.path.exists(rendered_file_dir):
os.makedirs(rendered_file_dir)
for fname, data in rendered_files:
output_folder = os.path.dirname(fname)
if not os.path.isdir(output_folder):
os.makedirs(output_folder)
with open(fname, 'wt') as fh:
fh.write(data)

View File

@ -1,196 +0,0 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "schema for a partition description.",
"type": "object",
"required": ["name", "type", "priority", "id", "entry_point", "stack_size", "heap_size", "source_files"],
"anyOf": [
{"required" : ["services"]},
{"required" : ["irqs"]}
],
"properties": {
"name": {
"description": "Alphanumeric C macro for referring to a partition. (all capital)",
"$ref": "#/definitions/c_macro"
},
"type": {
"description": "Whether the partition is unprivileged or part of the trusted computing base.",
"enum": ["APPLICATION-ROT", "PSA-ROT"]
},
"priority": {
"description": "Partition task priority.",
"enum": ["LOW", "NORMAL", "HIGH"]
},
"id": {
"description": "Partition numeric unique positive identifier. (must be a positive 8 bytes hex string)",
"type": "string",
"pattern": "^0x[0-7][0-9a-fA-F]{7}$"
},
"entry_point": {
"description": "C symbol name of the partition's entry point. (unmangled, use extern C if needed)",
"$ref": "#/definitions/c_symbol"
},
"stack_size": {
"description": "Partition's task stack size in bytes.",
"$ref": "#/definitions/positive_integer_or_hex_string"
},
"heap_size": {
"description": "Partition's task heap size in bytes.",
"$ref": "#/definitions/positive_integer_or_hex_string"
},
"mmio_regions": {
"description": "List of Memory-Mapped IO region objects which the partition has access to.",
"type": "array",
"items": {
"anyOf": [{
"$ref": "#/definitions/named_region"
},
{
"$ref": "#/definitions/numbered_region"
}
]
},
"uniqueItems": true
},
"services": {
"description": "List of RoT Service objects which the partition implements.",
"type": "array",
"items": {
"$ref": "#/definitions/service"
},
"uniqueItems": true
},
"extern_sids": {
"description": "List of SID which the partition code depends on and allowed to access.",
"type": "array",
"items": {
"$ref": "#/definitions/c_macro"
},
"uniqueItems": true
},
"source_files": {
"description": "List of source files relative to PSA Manifest file. A Secure Partition is built from explicit file list.",
"type": "array",
"items": {
"type": "string",
"pattern": "^[a-zA-Z0-9-_./]+$"
},
"minItems": 1,
"uniqueItems": true
},
"irqs": {
"description": "List of IRQ objects which the partition implements.",
"type": "array",
"items": {
"$ref": "#/definitions/irq"
},
"uniqueItems": true
}
},
"definitions": {
"c_macro": {
"type": "string",
"pattern": "^[A-Z_][A-Z0-9_]*$"
},
"c_symbol": {
"type": "string",
"pattern": "^[a-zA-Z_][a-zA-Z0-9_]*$"
},
"hex_string": {
"type": "string",
"pattern": "^0x(0*[1-9a-fA-F][0-9a-fA-F]*)$",
"minLength": 3,
"maxLength": 10
},
"positive_integer": {
"type": "integer",
"exclusiveMinimum": true,
"minimum": 0
},
"positive_integer_or_hex_string": {
"oneOf": [{
"$ref": "#/definitions/positive_integer"
},
{
"$ref": "#/definitions/hex_string"
}
]
},
"named_region": {
"description": "MMIO region which is described by it's C macro name and access permissions.",
"required": ["name", "permission"],
"properties": {
"name": {
"description": "Alphanumeric C macro for referring to the region.",
"$ref": "#/definitions/c_macro"
},
"permission": {
"description": "Access permissions for the region.",
"enum": ["READ-ONLY", "READ-WRITE"]
}
}
},
"numbered_region": {
"description": "MMIO region which is described by it's base address, size and access permissions.",
"required": ["base", "size", "permission"],
"properties": {
"base": {
"description": "The base address of the region.",
"$ref": "#/definitions/hex_string"
},
"size": {
"description": "Size in bytes of the region.",
"$ref": "#/definitions/positive_integer_or_hex_string"
},
"permission": {
"description": "Access permissions for the region.",
"enum": ["READ-ONLY", "READ-WRITE"]
}
}
},
"service": {
"required": ["name", "identifier", "non_secure_clients", "signal"],
"properties": {
"name": {
"description": "Alphanumeric C macro for referring to a RoT Service from source code (all capital)",
"$ref": "#/definitions/c_macro"
},
"identifier": {
"description": "The integer value of the NAME field",
"$ref": "#/definitions/positive_integer_or_hex_string"
},
"non_secure_clients": {
"description": "Denote whether the RoT Service is exposed to non-secure clients.",
"type": "boolean"
},
"signal": {
"description": "Alphanumeric C macro for referring to the RoT Service's signal value. (all capital)",
"$ref": "#/definitions/c_macro"
},
"minor_version": {
"description": "Optional: Minor version number of the RoT Service's interface.",
"$ref": "#/definitions/positive_integer",
"default": 1
},
"minor_policy": {
"description": "Optional: Minor version policy to apply on connections to the RoT Service.",
"enum": ["STRICT", "RELAXED"],
"default": "STRICT"
}
}
},
"irq": {
"required": ["line_num", "signal"],
"properties": {
"line_num": {
"description": "Interrupt line number for registering to ISR table entry and enable/disable the specific IRQ once received.",
"type": "integer",
"minimum": 0
},
"signal": {
"description": "Alphanumeric C macro for referring to the IRQ's signal value. (all capital)",
"$ref": "#/definitions/c_macro"
}
}
}
}
}

View File

@ -1,389 +0,0 @@
#!/usr/bin/python
# Copyright (c) 2017-2018 ARM Limited
#
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import sys
import shutil
import logging
import argparse
FNULL = open(os.devnull, 'w')
ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__),
os.pardir, os.pardir))
sys.path.insert(0, ROOT)
from tools.toolchains import TOOLCHAIN_CLASSES
from tools.targets import Target, TARGET_MAP, TARGET_NAMES
logging.basicConfig(level=logging.DEBUG,
format='[%(name)s] %(asctime)s: %(message)s.',
datefmt='%H:%M:%S')
logger = logging.getLogger('PSA release tool')
subprocess_output = None
subprocess_err = None
MAKE_PY_LOCATTION = os.path.join(ROOT, 'tools', 'make.py')
TEST_PY_LOCATTION = os.path.join(ROOT, 'tools', 'test.py')
TFM_MBED_APP = os.path.join(ROOT, 'tools', 'psa', 'tfm', 'mbed_app.json')
PSA_TESTS = {
'*psa-spm_smoke': ['USE_PSA_TEST_PARTITIONS', 'USE_SMOKE_TESTS_PART1'],
'*psa-spm_client': ['USE_PSA_TEST_PARTITIONS', 'USE_CLIENT_TESTS_PART1'],
'*psa-spm_server': ['USE_PSA_TEST_PARTITIONS', 'USE_SERVER_TESTS_PART1',
'USE_SERVER_TESTS_PART2'],
'*psa-crypto_access_control': ['USE_PSA_TEST_PARTITIONS',
'USE_CRYPTO_ACL_TEST']
}
PSA_AUTOGEN_LOCATION = os.path.join(ROOT, 'components', 'TARGET_PSA')
def _psa_backend(target):
"""
Returns a target PSA backend.
:param target: Target name as in targets.json
:return: PSA backend as string (TFM/MBED_SPM)
"""
return 'TFM' if 'TFM' in Target.get_target(target).labels else 'MBED_SPM'
def _get_target_info(target, toolchain):
"""
Creates a PSA target tuple with default toolchain and
artifact delivery directory.
:param target: Target name.
:return: tuple (target, toolchain, delivery directory).
"""
delivery_dir = os.path.join(ROOT, 'targets',
TARGET_MAP[target].delivery_dir)
if not os.path.exists(delivery_dir):
raise Exception("{} does not have delivery_dir".format(target))
if toolchain:
if toolchain not in TARGET_MAP[target].supported_toolchains:
raise Exception("Toolchain {} is not supported by {}".format(toolchain, TARGET_MAP[target].name))
return tuple([TARGET_MAP[target].name,
toolchain,
delivery_dir])
else:
return tuple([TARGET_MAP[target].name,
TARGET_MAP[target].default_toolchain,
delivery_dir])
def _get_psa_secure_targets_list():
"""
Creates a list of PSA secure targets.
:return: List of PSA secure targets.
"""
return [str(t) for t in TARGET_NAMES if
Target.get_target(t).is_PSA_secure_target]
def verbose_check_call(cmd, check_call=True):
"""
Calls a shell command and logs the call.
:param cmd: command to run as a list
:param check_call: choose subprocess method (call/check_call)
:return: return code of the executed command
"""
logger.info('Running: {}'.format(' '.join(cmd)))
if check_call:
return subprocess.check_call(cmd, stdout=subprocess_output,
stderr=subprocess_err)
return subprocess.call(cmd, stdout=subprocess_output, stderr=subprocess_err)
def get_mbed_official_psa_release(target=None, toolchain=None):
"""
Creates a list of PSA targets with default toolchain and
artifact delivery directory.
:param target: Ask for specific target, None for all targets.
:return: List of tuples (target, toolchain, delivery directory).
"""
psa_secure_targets = _get_psa_secure_targets_list()
logger.debug("Found the following PSA targets: {}".format(
', '.join(psa_secure_targets)))
if target is not None:
return [_get_target_info(target, toolchain)]
return [_get_target_info(t, toolchain) for t in psa_secure_targets]
def create_mbed_ignore(build_dir):
"""
Creates a .mbedignore file in a given directory.
:param build_dir: Directory to create .mbedignore file.
"""
logger.debug('Created .mbedignore in {}'.format(build_dir))
with open(os.path.join(build_dir, '.mbedignore'), 'w') as f:
f.write('*\n')
def build_tests(target, toolchain, profile, args):
"""
Builds secure images for tests.
:param target: target to be built.
:param toolchain: toolchain to be used.
:param profile: build profile.
:param args: list of extra arguments.
"""
build_dir = os.path.join(ROOT, 'BUILD', 'tests', target)
if os.path.exists(build_dir):
logger.info("BUILD directory deleted: {}".format(build_dir))
shutil.rmtree(build_dir)
for test in PSA_TESTS.keys():
logger.info(
"Building tests image({}) for {} using {} with {} profile".format(
test, target, toolchain, profile))
test_defines = ['-D{}'.format(define) for define in PSA_TESTS[test]]
cmd = [
sys.executable, TEST_PY_LOCATTION,
'--greentea',
'--profile', profile,
'-t', toolchain,
'-m', target,
'--source', ROOT,
'--build', build_dir,
'--test-spec', os.path.join(build_dir, 'test_spec.json'),
'--build-data', os.path.join(build_dir, 'build_data.json'),
'-n', test] + test_defines + args
if _psa_backend(target) is 'TFM':
cmd += ['--app-config', TFM_MBED_APP]
verbose_check_call(cmd)
logger.info(
"Finished Building tests image({}) for {}".format(test, target))
def build_default_image(target, toolchain, profile, args):
"""
Builds the default secure image.
:param target: target to be built.
:param toolchain: toolchain to be used.
:param profile: build profile.
:param args: list of extra arguments.
"""
logger.info("Building default image for {} using {} with {} profile".format(
target, toolchain, profile))
build_dir = os.path.join(ROOT, 'BUILD', target)
if os.path.exists(build_dir):
logger.info("BUILD directory deleted: {}".format(build_dir))
shutil.rmtree(build_dir)
cmd = [
sys.executable, MAKE_PY_LOCATTION,
'-t', toolchain,
'-m', target,
'--profile', profile,
'--source', ROOT,
'--build', build_dir] + args
if _psa_backend(target) is 'TFM':
cmd += ['--app-config', TFM_MBED_APP]
else:
cmd += ['--artifact-name', 'psa_release_1.0']
verbose_check_call(cmd)
logger.info(
"Finished building default image for {} successfully".format(target))
def commit_binaries(target, delivery_dir, toolchain):
"""
Commits changes in secure binaries.
:param target: Target name.
:param delivery_dir: Secure images should be moved to this folder
by the build system.
"""
changes_made = verbose_check_call([
'git',
'-C', ROOT,
'diff', '--exit-code', '--quiet',
delivery_dir], check_call=False)
if changes_made:
logger.info("Change in images for {} has been detected".format(target))
verbose_check_call([
'git',
'-C', ROOT,
'add', os.path.relpath(delivery_dir, ROOT)])
logger.info("Committing images for {}".format(target))
commit_message = '--message="Update secure binaries for %s (%s)"' % (
target, toolchain)
verbose_check_call([
'git',
'-C', ROOT,
'commit',
commit_message])
else:
logger.info("No changes detected in {}, Skipping commit".format(target))
def commit_psa_autogen():
"""
Commit changes related to auto-generated PSA components and services
"""
changes_made = verbose_check_call([
'git',
'-C', ROOT,
'diff', '--exit-code', '--quiet',
PSA_AUTOGEN_LOCATION], check_call=False)
if changes_made:
logger.info("Change in PSA auto-generated files has been detected")
verbose_check_call([
'git',
'-C', ROOT,
'add', PSA_AUTOGEN_LOCATION])
logger.info("Committing changes...")
commit_message = ('--message=Update PSA auto-generated components and '
'services')
verbose_check_call([
'git',
'-C', ROOT,
'commit',
commit_message])
else:
logger.info("No changes has been detected for PSA autogen, "
"Skipping commit")
def build_psa_platform(target, toolchain, delivery_dir, debug, git_commit,
skip_tests, args):
"""
Calls the correct build function and commits if requested.
:param target: Target name.
:param toolchain: Toolchain to be used.
:param delivery_dir: Artifact directory, where images should be placed.
:param debug: Build with debug profile.
:param git_commit: Commit the changes.
:param skip_tests: skip the test images build phase.
:param args: list of extra arguments.
"""
profile = 'debug' if debug else 'release'
if not skip_tests:
build_tests(target, toolchain, profile, args)
build_default_image(target, toolchain, profile, args)
if git_commit:
commit_binaries(target, delivery_dir, toolchain)
commit_psa_autogen()
def get_parser():
parser = argparse.ArgumentParser()
parser.add_argument("-m", "--mcu",
help="build for the given MCU",
default=None,
choices=_get_psa_secure_targets_list(),
metavar="MCU")
parser.add_argument("-t", "--tc",
help="build for the given tool chain (default is default_toolchain)",
default=None)
parser.add_argument("-d", "--debug",
help="set build profile to debug",
action="store_true",
default=False)
parser.add_argument('-q', '--quiet',
action="store_true",
default=False,
help="No Build log will be printed")
parser.add_argument('-l', '--list',
action="store_true",
default=False,
help="Print supported PSA secure targets")
parser.add_argument("--commit",
help="create a git commit for each platform",
action="store_true",
default=False)
parser.add_argument('--skip-tests',
action="store_true",
default=False,
help="skip the test build phase")
parser.add_argument('-x', '--extra',
dest='extra_args',
default=[],
nargs=argparse.REMAINDER,
help="additional build parameters")
return parser
def prep_build_dir():
build_dir = os.path.join(ROOT, 'BUILD')
if not os.path.exists(build_dir):
logger.info("BUILD directory created in {}".format(build_dir))
os.makedirs(build_dir)
create_mbed_ignore(build_dir)
def main():
parser = get_parser()
options = parser.parse_args()
if options.quiet:
logger.setLevel(logging.INFO)
global subprocess_output, subprocess_err
subprocess_output = FNULL
subprocess_err = subprocess.STDOUT
if options.list:
logger.info("Available platforms are: {}".format(
', '.join([t for t in _get_psa_secure_targets_list()])))
return
prep_build_dir()
psa_platforms_list = get_mbed_official_psa_release(options.mcu, options.tc)
logger.info("Building the following platforms: {}".format(
', '.join([t[0] for t in psa_platforms_list])))
toolchains_set = set([t[1] for t in psa_platforms_list])
for tc in toolchains_set:
if not TOOLCHAIN_CLASSES[tc].check_executable():
raise Exception("Toolchain {} was not found in PATH".format(tc))
for target, tc, directory in psa_platforms_list:
build_psa_platform(target, tc, directory, options.debug,
options.commit, options.skip_tests,
options.extra_args)
logger.info("Finished Updating PSA images")
if __name__ == '__main__':
main()

View File

@ -1,37 +0,0 @@
[
{
"name": "Secure Partition ID definitions",
"template": "tools/psa/templates/tfm_partition_defs.inc.tpl",
"output": "TARGET_TFM/COMPONENT_SPE/inc/tfm_partition_defs.inc"
},
{
"name": "Secure Partition declarations",
"template": "tools/psa/templates/tfm_partition_list.inc.tpl",
"output": "TARGET_TFM/COMPONENT_SPE/inc/tfm_partition_list.inc"
},
{
"name": "Secure Service list",
"template": "tools/psa/templates/tfm_service_list.inc.tpl",
"output": "TARGET_TFM/COMPONENT_SPE/inc/tfm_service_list.inc"
},
{
"name": "Secure Service signals list",
"template": "tools/psa/templates/tfm_spm_signal_defs.h.tpl",
"output": "TARGET_TFM/COMPONENT_SPE/inc/tfm_spm_signal_defs.h"
},
{
"name": "mbed-SPM database",
"template": "tools/psa/templates/psa_setup.c.tpl",
"output": "TARGET_MBED_SPM/COMPONENT_SPE/psa_setup.c"
},
{
"name": "Mappings from RoT Service names to SIDs",
"template": "tools/psa/templates/sid.h.tpl",
"output": "services/inc/autogen_sid.h"
},
{
"name": "Details partition defines and structures",
"template": "tools/psa/templates/mbed_spm_partitions.h.tpl",
"output": "services/inc/mbed_spm_partitions.h"
}
]

View File

@ -1,104 +0,0 @@
/* Copyright (c) 2017-2019 ARM Limited
*
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*******************************************************************************
* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
* THIS FILE IS AN AUTO-GENERATED FILE - DO NOT MODIFY IT.
* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
* Template Version 1.0
* Generated by tools/psa/generate_partition_code.py Version {{script_ver}}
******************************************************************************/
#ifndef __MBED_SPM_PARTITIONS_H___
#define __MBED_SPM_PARTITIONS_H___
{% macro do_parition(partition) -%}
/* -----------------------------------------------------------------------------
* {{partition.name|upper}} defines
* -------------------------------------------------------------------------- */
#define {{partition.name|upper}}_ID {{partition.id}}
{% if partition.rot_services|count > 0 %}
#define {{partition.name|upper}}_ROT_SRV_COUNT ({{partition.rot_services|count}}UL)
{% endif %}
#define {{partition.name|upper}}_EXT_ROT_SRV_COUNT ({{partition.extern_sids|count}}UL)
{% for irq in partition.irqs %}
#define {{irq.signal|upper}}_POS ({{loop.index + 3 }}UL)
#define {{irq.signal|upper}} (1UL << {{irq.signal|upper}}_POS)
{% endfor %}
{% if partition.irqs|count > 0 %}
#define {{partition.name|upper}}_WAIT_ANY_IRQ_MSK (\
{% for irq in partition.irqs %}
{{irq.signal|upper}}{{")" if loop.last else " | \\"}}
{% endfor %}
{% else %}
#define {{partition.name|upper}}_WAIT_ANY_IRQ_MSK (0)
{% endif %}
{% for rot_srv in partition.rot_services %}
#define {{rot_srv.signal|upper}}_POS ({{loop.index + 3 + partition.irqs|count}}UL)
#define {{rot_srv.signal|upper}} (1UL << {{rot_srv.signal|upper}}_POS)
{% endfor %}
{% if partition.rot_services|count > 0 %}
#define {{partition.name|upper}}_WAIT_ANY_SID_MSK (\
{% for rot_srv in partition.rot_services %}
{{rot_srv.signal|upper}}{{")" if loop.last else " | \\"}}
{% endfor %}
{% else %}
#define {{partition.name|upper}}_WAIT_ANY_SID_MSK (0)
{% endif %}
{% if partition.irqs|count > 0 %}
uint32_t spm_{{partition.name|lower}}_signal_to_irq_mapper(uint32_t signal);
{% endif %}
{%- endmacro %}
{# ------------------ macro do_parition(partition) -------------------------- #}
/****************** Common definitions ****************************************/
/* PSA reserved event flags */
#define PSA_RESERVED1_POS (1UL)
#define PSA_RESERVED1_MSK (1UL << PSA_RESERVED1_POS)
#define PSA_RESERVED2_POS (2UL)
#define PSA_RESERVED2_MSK (1UL << PSA_RESERVED2_POS)
/****************** Service Partitions ****************************************/
{% for partition in service_partitions %}
{{ do_parition(partition) }}
{% endfor %}
/****************** Test Partitions *******************************************/
#ifdef USE_PSA_TEST_PARTITIONS
{% for test_partition in test_partitions %}
#ifdef USE_{{test_partition.name|upper}}
{{ do_parition(test_partition) }}
#endif // USE_{{test_partition.name|upper}}
{% endfor %}
#endif // USE_PSA_TEST_PARTITIONS
#endif // __MBED_SPM_PARTITIONS_H___
{# End of file #}

View File

@ -1,281 +0,0 @@
/* Copyright (c) 2017-2019 ARM Limited
*
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*******************************************************************************
* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
* THIS FILE IS AN AUTO-GENERATED FILE - DO NOT MODIFY IT.
* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
* Template Version 1.0
* Generated by tools/psa/generate_partition_code.py Version {{script_ver}}
******************************************************************************/
#include "cmsis.h"
#include "rtx_os.h"
#include "mbed_toolchain.h" /* For using MBED_ALIGN macro */
#include "spm_panic.h"
#include "spm_internal.h"
#include "handles_manager.h"
#include "mbed_spm_partitions.h"
#include "psa_manifest/sid.h"
extern spm_db_t g_spm;
{% macro do_parition(partition) -%}
/* -----------------------------------------------------------------------------
* {{partition.name|upper}} declarations
* -------------------------------------------------------------------------- */
MBED_ALIGN(8) static uint8_t {{partition.name|lower}}_thread_stack[{{partition.stack_size}}] = {0};
static osRtxThread_t {{partition.name|lower}}_thread_cb = {0};
static const osThreadAttr_t {{partition.name|lower}}_thread_attr = {
.name = "{{partition.name|lower}}",
.attr_bits = 0,
.cb_mem = &{{partition.name|lower}}_thread_cb,
.cb_size = sizeof({{partition.name|lower}}_thread_cb),
.stack_mem = {{partition.name|lower}}_thread_stack,
.stack_size = {{partition.stack_size}},
.priority = {{partition.priority_mbed}},
.tz_module = 0,
.reserved = 0
};
static osRtxMutex_t {{partition.name|lower}}_mutex = {0};
static const osMutexAttr_t {{partition.name|lower}}_mutex_attr = {
.name = "{{partition.name|lower}}_mutex",
.attr_bits = osMutexRecursive | osMutexPrioInherit | osMutexRobust,
.cb_mem = &{{partition.name|lower}}_mutex,
.cb_size = sizeof({{partition.name|lower}}_mutex),
};
{% if partition.rot_services|count > 0 %}
spm_rot_service_t {{partition.name|lower}}_rot_services[] = {
{% for rot_srv in partition.rot_services %}
{
.sid = {{rot_srv.name|upper}},
.mask = {{rot_srv.signal|upper}},
.partition = NULL,
.min_version = {{rot_srv.minor_version}},
.min_version_policy = PSA_MINOR_VERSION_POLICY_{{rot_srv.minor_policy|upper}},
{% if rot_srv.nspe_callable %}
.allow_nspe = true,
{% else %}
.allow_nspe = false,
{% endif %}
.queue = {
.head = NULL,
.tail = NULL
}
},
{% endfor %}
};
{% endif %}
{% if partition.extern_sids|count > 0 %}
/* External SIDs used by {{partition.name}} */
const uint32_t {{partition.name|lower}}_external_sids[{{partition.extern_sids|count}}] = {
{% for sid in partition.extern_sids %}
{{sid|upper}},
{% endfor %}
};
{% endif %}
{% if partition.irqs|count > 0 %}
// Mapper function from irq signal to interupts number
IRQn_Type spm_{{partition.name|lower}}_signal_to_irq_mapper(uint32_t signal)
{
SPM_ASSERT({{partition.name|upper}}_WAIT_ANY_IRQ_MSK & signal);
switch(signal){
{% for irq in partition.irqs %}
case {{ irq.signal }}:
return (IRQn_Type){{irq.line_num}};
break;
{% endfor %}
default:
break;
}
SPM_PANIC("Unknown signal number %lu", signal);
return 0;
}
{% for irq in partition.irqs %}
// ISR handler for interrupt {irq.line_num}
void spm_irq_{{irq.signal}}_{{partition.name|lower}}(void)
{
spm_partition_t *partition = NULL;
for (uint32_t i = 0; i < g_spm.partition_count; ++i) {
if (g_spm.partitions[i].partition_id == {{partition.name|upper}}_ID) {
partition = &(g_spm.partitions[i]);
}
}
SPM_ASSERT(partition);
NVIC_DisableIRQ((IRQn_Type){{irq.line_num}}); // will be enabled by psa_eoi()
osThreadFlagsSet(partition->thread_id, {{irq.signal|upper}}); // notify partition
}
{% endfor %}
{% endif %}
extern void {{partition.entry_point}}(void *ptr);
void {{partition.name|lower}}_init(spm_partition_t *partition)
{
if (NULL == partition) {
SPM_PANIC("partition is NULL!\n");
}
partition->mutex = osMutexNew(&{{partition.name|lower}}_mutex_attr);
if (NULL == partition->mutex) {
SPM_PANIC("Failed to create mutex for secure partition {{partition.name|lower}}!\n");
}
{% if partition.rot_services|count > 0 %}
for (uint32_t i = 0; i < {{partition.name|upper}}_ROT_SRV_COUNT; ++i) {
{{partition.name|lower}}_rot_services[i].partition = partition;
}
partition->rot_services = {{partition.name|lower}}_rot_services;
{% else %}
partition->rot_services = NULL;
{% endif %}
partition->thread_id = osThreadNew({{partition.entry_point}}, NULL, &{{partition.name|lower}}_thread_attr);
if (NULL == partition->thread_id) {
SPM_PANIC("Failed to create start main thread of partition {{partition.name|lower}}!\n");
}
}
{%- endmacro %}
{# -------------- macro do_parition(partition) ----------------------------- #}
/****************** Service Partitions ****************************************/
{% for partition in service_partitions %}
{{do_parition(partition)}}
{% endfor %}
/****************** Test Partitions *******************************************/
#ifdef USE_PSA_TEST_PARTITIONS
{% for test_partition in test_partitions %}
#ifdef USE_{{test_partition.name|upper}}
{{ do_parition(test_partition) }}
#endif // USE_{{test_partition.name|upper}}
{% endfor %}
#endif // USE_PSA_TEST_PARTITIONS
{# -------------- spm_db_entry(partition) ----------------------------------- #}
{% macro spm_db_entry(partition) -%}
/* {{partition.name|upper}} */
{
.partition_id = {{partition.name|upper}}_ID,
.thread_id = 0,
.flags = {{partition.name|upper}}_WAIT_ANY_SID_MSK | {{partition.name|upper}}_WAIT_ANY_IRQ_MSK,
.rot_services = NULL,
{% if partition.rot_services|count > 0 %}
.rot_services_count = {{partition.name|upper}}_ROT_SRV_COUNT,
{% else %}
.rot_services_count = 0,
{% endif %}
{% if partition.extern_sids|count > 0 %}
.extern_sids = {{partition.name|lower}}_external_sids,
{% else %}
.extern_sids = NULL,
{% endif %}
.extern_sids_count = {{partition.name|upper}}_EXT_ROT_SRV_COUNT,
{% if partition.irqs|count > 0 %}
.irq_mapper = spm_{{partition.name|lower}}_signal_to_irq_mapper,
{% else %}
.irq_mapper = NULL,
{% endif %}
},
{%- endmacro %}
{# -------------- spm_db_entry(partition) ----------------------------------- #}
/****************** SPM DB initialization *************************************/
spm_partition_t g_partitions[] = {
{% for partition in service_partitions %}
{{spm_db_entry(partition)}}
{% endfor %}
#ifdef USE_PSA_TEST_PARTITIONS
{% for test_partition in test_partitions %}
#ifdef USE_{{test_partition.name|upper}} {{ spm_db_entry(test_partition) }}
#endif // USE_{{test_partition.name|upper}}
{% endfor %}
#endif // USE_PSA_TEST_PARTITIONS
};
/****************** MMIO regions **********************************************/
{% if regions|count > 0 %}
/****************** Sanity checks *********************************************/
/* Check all the defined memory regions for overlapping. */
{% for region_pair in region_pair_list %}
MBED_STATIC_ASSERT(
((uintptr_t)({{region_pair[0].base}}) + {{region_pair[0].size}} - 1 < (uintptr_t)({{region_pair[1].base}})) ||
((uintptr_t)({{region_pair[1].base}}) + {{region_pair[1].size}} - 1 < (uintptr_t)({{region_pair[0].base}})),
"The region with base {{region_pair[0].base}} and size {{region_pair[0].size}} overlaps with the region with base {{region_pair[1].base}} and size {{region_pair[1].size}}!");
{% endfor %}
/****************** MMIO regions definition ***********************************/
/* A list of all the memory regions. */
const mem_region_t mem_regions[] = {
{% for region in regions %}
{ (uint32_t)({{region.base}}), {{region.size}}, {{region.permission}}, {{region.partition_id}} },
{% endfor %}
};
{% else %}
const mem_region_t *mem_regions = NULL;
{% endif %}
const uint32_t mem_region_count = {{regions|count}};
/****************** Partitions init function *********************************/
uint32_t init_partitions(spm_partition_t **partitions)
{
uint32_t partition_idx = 0;
if (NULL == partitions) {
SPM_PANIC("partitions is NULL!\n");
}
{% for partition in service_partitions %}
{{partition.name|lower}}_init(&(g_partitions[partition_idx++]));
{% endfor %}
#ifdef USE_PSA_TEST_PARTITIONS
{% for test_partition in test_partitions %}
#ifdef USE_{{test_partition.name|upper}}
{{test_partition.name|lower}}_init(&(g_partitions[partition_idx++]));
#endif // USE_{{test_partition.name|upper}}
{% endfor %}
#endif // USE_PSA_TEST_PARTITIONS
*partitions = g_partitions;
return partition_idx;
}
{# End of file #}

View File

@ -1,47 +0,0 @@
/* Copyright (c) 2019 ARM Limited
*
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*******************************************************************************
* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
* THIS FILE IS AN AUTO-GENERATED FILE - DO NOT MODIFY IT.
* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
* Template Version 1.0
* Generated by tools/psa/generate_partition_code.py Version {{script_ver}}
******************************************************************************/
{% macro parition_sid(partition) -%}
/* -----------------------------------------------------------------------------
* {{partition.name|upper}} Service IDs
* -------------------------------------------------------------------------- */
{% for rot_srv in partition.rot_services %}
#define {{rot_srv.name|upper}} {{rot_srv.id}}
{% endfor %}
{%- endmacro %}
{# -------------- macro parition_sid(partition) ---------------------------- #}
/****************** Service Partitions ****************************************/
{% for partition in service_partitions %}
{{parition_sid(partition)}}
{% endfor %}
/****************** Test Partitions *******************************************/
{% for partition in test_partitions %}
{{parition_sid(partition)}}
{% endfor %}
{# End of file #}

View File

@ -1,56 +0,0 @@
/* Copyright (c) 2017-2019 ARM Limited
*
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*******************************************************************************
* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
* THIS FILE IS AN AUTO-GENERATED FILE - DO NOT MODIFY IT.
* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
* Template Version 1.0
* Generated by tools/psa/generate_partition_code.py Version {{script_ver}}
******************************************************************************/
#ifndef __TFM_PARTITION_DEFS_INC__
#define __TFM_PARTITION_DEFS_INC__
/*************************** Service Partitions *******************************/
{% for partition in service_partitions %}
{% set partition_loop = loop %}
#define {{partition.name|upper}}_ID (TFM_SP_BASE + {{ partition_loop.index0 }})
{% endfor %}
/*************************** Test Partitions **********************************/
#ifdef USE_PSA_TEST_PARTITIONS
{% for partition in test_partitions %}
{% set partition_loop = loop %}
#ifdef USE_{{partition.name|upper}}
#define {{partition.name|upper}}_ID (TFM_SP_BASE + {{service_partitions|count}} + {{ partition_loop.index0 }})
#endif
{% endfor %}
#endif // USE_PSA_TEST_PARTITIONS
#ifdef USE_PSA_TEST_PARTITIONS
#define TFM_MAX_USER_PARTITIONS ({{service_partitions|count}} + {{test_partitions|count}})
#else
#define TFM_MAX_USER_PARTITIONS ({{service_partitions|count}})
#endif
#endif // __TFM_PARTITION_DEFS_INC__
{# End of file #}

View File

@ -1,48 +0,0 @@
/*
* Copyright (c) 2018-2019, Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
/*******************************************************************************
* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
* THIS FILE IS AN AUTO-GENERATED FILE - DO NOT MODIFY IT.
* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
* Template Version 1.0
* Generated by tools/psa/generate_partition_code.py Version {{script_ver}}
******************************************************************************/
#ifndef __TFM_PARTITION_LIST_INC__
#define __TFM_PARTITION_LIST_INC__
/*************************** Service Partitions *******************************/
{% for partition in service_partitions %}
/* -----------------------------------------------------------------------------
* {{partition.name|upper}}
* -------------------------------------------------------------------------- */
PARTITION_DECLARE({{partition.name|upper}}, 0
| SPM_PART_FLAG_IPC
, "{{partition.type}}", {{partition.id}}, {{partition.priority_tfm}}, {{partition.stack_size}});
PARTITION_ADD_INIT_FUNC({{partition.name|upper}}, {{partition.entry_point}});
{% endfor %}
/*************************** Test Partitions **********************************/
#ifdef USE_PSA_TEST_PARTITIONS
{% for partition in test_partitions %}
#ifdef USE_{{partition.name|upper}}
/* -----------------------------------------------------------------------------
* {{partition.name|upper}}
* -------------------------------------------------------------------------- */
PARTITION_DECLARE({{partition.name|upper}}, 0
| SPM_PART_FLAG_IPC
, "{{partition.type}}", {{partition.id}}, {{partition.priority_tfm}}, {{partition.stack_size}});
PARTITION_ADD_INIT_FUNC({{partition.name|upper}}, {{partition.entry_point}});
#endif // USE_{{partition.name|upper}}
{% endfor %}
#endif // USE_PSA_TEST_PARTITIONS
#endif // __TFM_PARTITION_LIST_INC__
{# End of file #}

View File

@ -1,46 +0,0 @@
/*
* Copyright (c) 2018-2019, Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
/*******************************************************************************
* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
* THIS FILE IS AN AUTO-GENERATED FILE - DO NOT MODIFY IT.
* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
* Template Version 1.0
* Generated by tools/psa/generate_partition_code.py Version {{script_ver}}
******************************************************************************/
#ifndef __TFM_SERVICE_LIST_INC__
#define __TFM_SERVICE_LIST_INC__
/*************************** Service Partitions *******************************/
{% for partition in service_partitions %}
/* -----------------------------------------------------------------------------
* {{partition.name|upper}} Services
* -------------------------------------------------------------------------- */
{% for rot_srv in partition.rot_services %}
{"{{rot_srv.name|upper}}", {{partition.name|upper}}_ID, {{rot_srv.signal|upper}}, {{rot_srv.id}}, {% if rot_srv.nspe_callable %}true{% else %}false{% endif %}, {{rot_srv.minor_version}}, TFM_VERSION_POLICY_{{rot_srv.minor_policy|upper}}},
{% endfor %}
{% endfor %}
/*************************** Test Partitions **********************************/
#ifdef USE_PSA_TEST_PARTITIONS
{% for partition in test_partitions %}
#ifdef USE_{{partition.name|upper}}
/* -----------------------------------------------------------------------------
* {{partition.name|upper}} Services
* -------------------------------------------------------------------------- */
{% for rot_srv in partition.rot_services %}
{"{{rot_srv.name|upper}}", {{partition.name|upper}}_ID, {{rot_srv.signal|upper}}, {{rot_srv.id}}, {% if rot_srv.nspe_callable %}true{% else %}false{% endif %}, {{rot_srv.minor_version}}, TFM_VERSION_POLICY_{{rot_srv.minor_policy|upper}}},
{% endfor %}
#endif // USE_{{partition.name|upper}}
{% endfor %}
#endif // USE_PSA_TEST_PARTITIONS
#endif // __TFM_SERVICE_LIST_INC__
{# End of file #}

View File

@ -1,48 +0,0 @@
/*
* Copyright (c) 2018-2019, Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
/*******************************************************************************
* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
* THIS FILE IS AN AUTO-GENERATED FILE - DO NOT MODIFY IT.
* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
* Template Version 1.0
* Generated by tools/psa/generate_partition_code.py Version {{script_ver}}
******************************************************************************/
#ifndef __TFM_SPM_SIGNAL_DEFS_H__
#define __TFM_SPM_SIGNAL_DEFS_H__
/*************************** Service Partitions *******************************/
{% for partition in service_partitions %}
/* -----------------------------------------------------------------------------
* {{partition.name|upper}} Signals
* -------------------------------------------------------------------------- */
{% for rot_srv in partition.rot_services %}
#define {{rot_srv.signal|upper}}_POS ({{loop.index + 3}}UL)
#define {{rot_srv.signal|upper}} (1UL << {{rot_srv.signal|upper}}_POS)
{% endfor %}
{% endfor %}
/*************************** Test Partitions **********************************/
#ifdef USE_PSA_TEST_PARTITIONS
{% for partition in test_partitions %}
#ifdef USE_{{partition.name|upper}}
/* -----------------------------------------------------------------------------
* {{partition.name|upper}} Signals
* -------------------------------------------------------------------------- */
{% for rot_srv in partition.rot_services %}
#define {{rot_srv.signal|upper}}_POS ({{loop.index + 3}}UL)
#define {{rot_srv.signal|upper}} (1UL << {{rot_srv.signal|upper}}_POS)
{% endfor %}
#endif // USE_{{partition.name|upper}}
{% endfor %}
#endif // USE_PSA_TEST_PARTITIONS
#endif // __TFM_SPM_SIGNAL_DEFS_H__
{# End of file #}

View File

@ -1,6 +0,0 @@
{
"name": "tfm_build",
"requires" : ["psa-services", "tfm", "tfm-s", "psa"],
"macros": ["MBEDTLS_CIPHER_MODE_CTR", "MBEDTLS_CMAC_C"],
"artifact_name": "tfm"
}

View File

@ -604,38 +604,3 @@ class Resources(object):
for t in res_filter.file_types:
self._file_refs[t] = set(filter(
res_filter.predicate, self._file_refs[t]))
class ResourceFilter(object):
def __init__(self, file_types):
self.file_types = file_types
def predicate(self, ref):
raise NotImplemented
class SpeOnlyResourceFilter(ResourceFilter):
def __init__(self):
ResourceFilter.__init__(
self, [FileType.ASM_SRC, FileType.C_SRC, FileType.CPP_SRC])
def predicate(self, ref):
return 'COMPONENT_SPE' in ref.name
class OsAndSpeResourceFilter(ResourceFilter):
def __init__(self):
ResourceFilter.__init__(
self, [FileType.ASM_SRC, FileType.C_SRC, FileType.CPP_SRC])
def predicate(self, ref):
return ROOT in abspath(ref.name) or 'COMPONENT_SPE' in ref.name
class PsaManifestResourceFilter(ResourceFilter):
def __init__(self):
ResourceFilter.__init__(
self, [FileType.JSON])
def predicate(self, ref):
return not ref.name.endswith('_psa.json')

View File

@ -387,28 +387,13 @@ class Target(namedtuple(
else:
return self.core
# Mechanism for specifying TrustZone is subject to change - see
# discussion on https://github.com/ARMmbed/mbed-os/issues/9460
# In the interim, we follow heuristics that support existing
# documentation for ARMv8-M TF-M integration (check the "TFM" label),
# plus an extra "trustzone" flag set by M2351, and looking at the "-NS"
# suffix. This now permits non-TrustZone ARMv8 builds if
# having trustzone = false (default), no TFM flag, and no -NS suffix.
@property
def is_TrustZone_secure_target(self):
return (getattr(self, 'trustzone', False) or 'TFM' in self.labels) and not self.core.endswith('-NS')
@property
def is_TrustZone_non_secure_target(self):
return self.core.endswith('-NS')
@property
def is_TrustZone_target(self):
return self.is_TrustZone_secure_target or self.is_TrustZone_non_secure_target
@property
def is_PSA_secure_target(self):
return 'SPE_Target' in self.labels
return self.is_TrustZone_non_secure_target
@property
def is_PSA_non_secure_target(self):

View File

@ -43,8 +43,6 @@ from tools.utils import argparse_dir_not_parent
from tools.utils import print_end_warnings
from tools.settings import ROOT
from tools.targets import Target
from tools.psa import generate_psa_sources
from tools.resources import OsAndSpeResourceFilter, SpeOnlyResourceFilter
def main():
error = False
@ -247,12 +245,6 @@ def main():
profile = extract_profile(parser, options, internal_tc_name)
try:
resource_filter = None
if target.is_PSA_secure_target:
resource_filter = OsAndSpeResourceFilter()
generate_psa_sources(
source_dirs=base_source_paths,
ignore_paths=[options.build_dir]
)
# Build sources
notify = TerminalNotifier(options.verbose, options.silent)
@ -288,10 +280,7 @@ def main():
if not library_build_success:
print("Failed to build library")
else:
if target.is_PSA_secure_target:
resource_filter = SpeOnlyResourceFilter()
else:
resource_filter = None
resource_filter = None
# Build all the tests
notify = TerminalNotifier(options.verbose, options.silent)

View File

@ -1,17 +0,0 @@
"""
Copyright (c) 2019 ARM Limited. All rights reserved.
SPDX-License-Identifier: Apache-2.0
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations
"""

View File

@ -1,732 +0,0 @@
# Copyright (c) 2017-2018 ARM Limited
#
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
manifests = [
{
'name': 'TEST_PARTITION',
'id': "0x7FFFFFFF",
"type": "APPLICATION-ROT",
'priority': 'NORMAL',
'entry_point': 'test_main',
'stack_size': 512, # 512 == 0x200
'heap_size': 2048,
'mmio_regions': [
{
'name': 'PERIPH1',
'permission': 'READ-ONLY'
},
{
'name': 'PERIPH2',
'permission': 'READ-ONLY'
},
{
'base': '0xCCCCCCCC',
'size': 4096, 'permission': 'READ-ONLY'
},
{
'base': '0xDDDDDDDD',
'size': 33554432, 'permission': 'READ-WRITE'
}
],
'services': [
{
'name': 'SID1',
'identifier': '0x00000001',
'signal': 'SID1',
'minor_version': 1,
'minor_policy': 'RELAXED',
'non_secure_clients': True
},
{
'name': 'SID2',
'identifier': '0x00000002',
'signal': 'SID2',
'minor_version': 2,
'minor_policy': 'STRICT',
'non_secure_clients': False
},
],
'source_files': ['src1.cpp', 'src2.cpp'],
'irqs': [
{"line_num": 20, "signal": "ISR20"},
{"line_num": 21, "signal": "ISR21"}
],
'extern_sids': ['SID3', 'SID4']
},
{
'name': 'TEST_PARTITION2',
'id': "0x7FFFFFFE",
"type": "APPLICATION-ROT",
'priority': 'NORMAL',
'entry_point': 'test2_main',
'stack_size': 512, # 512 == 0x200
'heap_size': 2048,
'mmio_regions': [
{
'name': 'PERIPH1',
'permission': 'READ-ONLY'
},
{
'name': 'PERIPH3',
'permission': 'READ-ONLY'
},
{
'base': '0xAAAAAAAA',
'size': 4096, 'permission': 'READ-ONLY'
},
{
'base': '0xBBBBBBBB',
'size': 33554432, 'permission': 'READ-WRITE'
}
],
'services': [
{
'name': 'SID3',
'identifier': '0x00000003',
'signal': 'SID3',
'minor_version': 5,
'minor_policy': 'RELAXED',
'non_secure_clients': True
},
{
'name': 'SID4',
'identifier': '0x00000004',
'signal': 'SID4',
'minor_version': 12,
'minor_policy': 'STRICT',
'non_secure_clients': False
},
],
'source_files': ['src3.cpp', 'src4.cpp'],
'irqs': [
{"line_num": 22, "signal": "ISR22"},
{"line_num": 23, "signal": "ISR23"}
]
}
]
manifests_for_circular_call_dependency_checks = [
{
'name': 'PARTITION1',
'id': '0x7FFFFFFF',
'type': 'APPLICATION-ROT',
'priority': 'NORMAL',
'entry_point': 'test_main',
'stack_size': 512,
'heap_size': 2048,
'source_files': ['src1.cpp'],
'services': [
{
'name': 'SID1',
'identifier': '0x00000001',
'signal': 'SID1',
'non_secure_clients': False
},
{
'name': 'SID2',
'identifier': '0x00000002',
'signal': 'SID2',
'non_secure_clients': False
}
],
'extern_sids': ['SID3', 'SID4']
},
{
'name': 'PARTITION2',
'id': '0x7FFFFFFE',
'type': 'APPLICATION-ROT',
'priority': 'NORMAL',
'entry_point': 'test_main',
'stack_size': 512,
'heap_size': 2048,
'source_files': ['src2.cpp'],
'services': [
{
'name': 'SID3',
'identifier': '0x00000003',
'signal': 'SID3',
'non_secure_clients': False
},
{
'name': 'SID4',
'identifier': '0x00000004',
'signal': 'SID4',
'non_secure_clients': False
}
],
'extern_sids': ['SID1', 'SID2']
},
{
'name': 'PARTITION3',
'id': '0x7FFFFFFD',
'type': 'APPLICATION-ROT',
'priority': 'NORMAL',
'entry_point': 'test_main',
'stack_size': 512,
'heap_size': 2048,
'source_files': ['src3.cpp'],
'services': [
{
'name': 'SID5',
'identifier': '0x00000005',
'signal': 'SID5',
'non_secure_clients': False
}
],
'extern_sids': ['SID7']
},
{
'name': 'PARTITION4',
'id': '0x7FFFFFFC',
'type': 'APPLICATION-ROT',
'priority': 'NORMAL',
'entry_point': 'test_main',
'stack_size': 512,
'heap_size': 2048,
'source_files': ['src4.cpp'],
'services': [
{
'name': 'SID6',
'identifier': '0x00000006',
'signal': 'SID6',
'non_secure_clients': False
},
{
'name': 'SID7',
'identifier': '0x00000007',
'signal': 'SID7',
'non_secure_clients': False
},
],
'extern_sids': ['SID9']
},
{
'name': 'PARTITION5',
'id': '0x7FFFFFFB',
'type': 'APPLICATION-ROT',
'priority': 'NORMAL',
'entry_point': 'test_main',
'stack_size': 512,
'heap_size': 2048,
'source_files': ['src5.cpp'],
'services': [
{
'name': 'SID8',
'identifier': '0x00000008',
'signal': 'SID8',
'non_secure_clients': False
},
{
'name': 'SID9',
'identifier': '0x00000009',
'signal': 'SID9',
'non_secure_clients': False
}
],
'extern_sids': ['SID5']
},
{
'name': 'PARTITION6',
'id': '0x7FFFFFFA',
'type': 'APPLICATION-ROT',
'priority': 'NORMAL',
'entry_point': 'test_main',
'stack_size': 512,
'heap_size': 2048,
'source_files': ['src6.cpp'],
'services': [
{
'name': 'SID10',
'identifier': '0x0000000A',
'signal': 'SID10',
'non_secure_clients': False
},
{
'name': 'SID11',
'identifier': '0x0000000B',
'signal': 'SID11',
'non_secure_clients': False
}
],
'extern_sids': ['SID7', 'SID5']
},
{
'name': 'PARTITION7',
'id': '0x7FFFFFF9',
'type': 'APPLICATION-ROT',
'priority': 'NORMAL',
'entry_point': 'test_main',
'stack_size': 512,
'heap_size': 2048,
'source_files': ['src6.cpp'],
'services': [
{
'name': 'SID12',
'identifier': '0x0000000C',
'signal': 'SID12',
'non_secure_clients': False
}
]
}
]
invalid_minor_version_policy_rot_srv = [
{
'name': 'SID1',
'identifier': '0x00000001',
'signal': 'SID1',
'minor_version': 1,
'minor_policy': 'invalid_policy',
'non_secure_clients': True
}
]
invalid_nspe_callable_rot_srv = [
{
'name': 'SID1',
'identifier': '0x00000001',
'signal': 'SID1',
'minor_version': 1,
'minor_policy': 'STRICT',
'non_secure_clients': 'invalid_value'
}
]
missing_nspe_callable_rot_srv = [
{
'name': 'SID1',
'identifier': '0x00000001',
'signal': 'SID1',
'minor_version': 1,
'minor_policy': 'STRICT'
}
]
duplicate_signal_rot_services = [
{
'name': 'SID3',
'identifier': '0x00000001',
'signal': 'SID1',
'minor_version': 5,
'minor_policy': 'RELAXED',
'non_secure_clients': True
},
{
'name': 'SID4',
'identifier': '0x00000002',
'signal': 'SID2',
'minor_version': 12,
'minor_policy': 'STRICT',
'non_secure_clients': True
},
]
duplicate_identifier_rot_services = [
{
'name': 'SID3',
'identifier': '0x00000003',
'signal': 'SID3',
'minor_version': 5,
'minor_policy': 'RELAXED',
'non_secure_clients': True
},
{
'name': 'SID4',
'identifier': '0x00000002',
'signal': 'SID4',
'minor_version': 12,
'minor_policy': 'STRICT',
'non_secure_clients': True
},
]
spe_contained_rot_services = [
{
'name': 'SID5',
'identifier': '0x00000005',
'signal': 'SID5',
'minor_version': 5,
'minor_policy': 'RELAXED',
'non_secure_clients': False
},
{
'name': 'SID6',
'identifier': '0x00000006',
'signal': 'SID6',
'minor_version': 12,
'minor_policy': 'STRICT',
'non_secure_clients': False
}
]
missing_minor_version_rot_srv = [
{
'name': 'SID1',
'identifier': '0x00000001',
'signal': 'SID1',
'minor_policy': 'RELAXED',
'non_secure_clients': True
}
]
missing_minor_version_policy_rot_srv = [
{
'name': 'SID2',
'identifier': '0x00000002',
'signal': 'SID2',
'minor_version': 1,
'non_secure_clients': True
}
]
missing_minor_completley_rot_srv = [
{'name': 'SID2', 'identifier': '0x00000002', 'signal': 'SID2',
'non_secure_clients': True}
]
duplicate_signal_irqs = [
{"line_num": 22, "signal": "ISR20"}
]
duplicate_line_num_irqs = [
{"line_num": 21, "signal": "ISR22"}
]
invalid_mmioregion_base = {
'base': 'str',
'size': 4096,
'permission': 'READ-ONLY'
}
invalid_mmioregion_size = {
'base': '0xEEEEEEEE',
'size': 'str',
'permission': 'READ-ONLY'
}
test_mock_files = {
'manifest1': 1,
'manifest2': 2,
'template_common1': 3,
'template_common2': 4,
'template_NAME_3': 5,
'template_NAME_4': 6,
'gen1': 7,
'gen2': 8,
'gen3': 9,
'gen4': 10,
'gen5': 11,
'gen6': 12
}
test_common_template = '''{
"num_of_partitions": {{partitions|count}},
"partition_names": [
{% for partition in partitions %}
"{{partition.name}}"{{"" if loop.last else ","}}
{% endfor %}
],
"num_of_region_pairs": {{region_pair_list|count}}
}
'''
test_common_expected = '''{
"num_of_partitions": 2,
"partition_names": [
"TEST_PARTITION",
"TEST_PARTITION2"
],
"num_of_region_pairs": 28
}
'''
test_partition_template = '''{
"name": "{{partition.name}}",
"id": "0x{{"%0x"|format(partition.id|int)|upper}}",
"type": "{{partition.type}}",
"priority": "{{partition.priority_mbed|find_priority_key}}",
"entry_point": "{{partition.entry_point}}",
"stack_size": {{partition.stack_size}},
"heap_size": {{partition.heap_size}},
"mmio_regions": [
{% for mmio in partition.mmio_regions %}
{
{% if mmio.size|int %}
"base": "{{mmio.base}}",
"size": {{mmio.size}},
{% else %}
"name": "{{mmio.base}}",
{% endif %}
"permission": "{{mmio.permission|find_permission_key}}"
{{"}" if loop.last else "},"}}
{% endfor %}
],
"services": [
{% for rot_srv in partition.rot_services %}
{
"name": "{{rot_srv.name}}",
"identifier": "{{rot_srv.id}}",
"signal": "{{rot_srv.signal}}",
"minor_version": {{rot_srv.minor_version}},
"minor_policy": "{{rot_srv.minor_policy}}",
"non_secure_clients": {{rot_srv.nspe_callable|lower}}
{{"}" if loop.last else "},"}}
{% endfor %}
],
{% if partition.extern_sids %}
"extern_sids": [
{% for ext_sid in partition.extern_sids %}
"{{ext_sid}}"{{"" if loop.last else ","}}
{% endfor %}
],
{% endif %}
"source_files": [
{% for src in partition.source_files %}
"{{src|basename}}"{{"" if loop.last else ","}}
{% endfor %}
],
"irqs": [
{% for irq in partition.irqs %}
{
"line_num": {{irq.line_num}},
"signal": "{{irq.signal}}"
{{"}" if loop.last else "},"}}
{% endfor %}
]
}
'''
exceeding_services = [
{
"name": "XSID1",
"signal": "XSID1",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x00000009"
},
{
"name": "XSID2",
"signal": "XSID2",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x0000000a"
}, {
"name": "XSID3",
"signal": "XSID3",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x0000000b"
}, {
"name": "XSID4",
"signal": "XSID4",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x0000000c"
}, {
"name": "XSID5",
"signal": "XSID5",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x0000000d"
}, {
"name": "XSID6",
"signal": "XSID6",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x0000000e"
}, {
"name": "XSID7",
"signal": "XSID7",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x0000000f"
}, {
"name": "XSID8",
"signal": "XSID8",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x00000010"
}, {
"name": "XSID9",
"signal": "XSID9",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x00000011"
}, {
"name": "XSID10",
"signal": "XSID10",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x00000012"
}, {
"name": "XSID11",
"signal": "XSID11",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x00000013"
}, {
"name": "XSID12",
"signal": "XSID12",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x00000014"
}, {
"name": "XSID13",
"signal": "XSID13",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x00000015"
}, {
"name": "XSID14",
"signal": "XSID14",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x00000016"
}, {
"name": "XSID15",
"signal": "XSID15",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x00000017"
}, {
"name": "XSID16",
"signal": "XSID16",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x00000018"
}, {
"name": "XSID17",
"signal": "XSID17",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x00000019"
}, {
"name": "XSID18",
"signal": "XSID18",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x0000001a"
}, {
"name": "XSID19",
"signal": "XSID19",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x0000001b"
}, {
"name": "XSID20",
"signal": "XSID20",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x0000001c"
}, {
"name": "XSID21",
"signal": "XSID21",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x0000001d"
}, {
"name": "XSID22",
"signal": "XSID22",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x0000001e"
}, {
"name": "XSID23",
"signal": "XSID23",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x0000001f"
}, {
"name": "XSID24",
"signal": "XSID24",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x00000020"
}, {
"name": "XSID25",
"signal": "XSID25",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x00000021"
}, {
"name": "XSID26",
"signal": "XSID26",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x00000022"
}, {
"name": "XSID27",
"signal": "XSID27",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x00000023"
}, {
"name": "XSID28",
"signal": "XSID28",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x00000024"
}, {
"name": "XSID29",
"signal": "XSID29",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x00000025"
}, {
"name": "XSID30",
"signal": "XSID30",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x00000026"
}, {
"name": "XSID31",
"signal": "XSID31",
"non_secure_clients": True,
"minor_version": 5,
"minor_policy": "RELAXED",
"identifier": "0x00000027"
}
]

View File

@ -1,57 +0,0 @@
# Copyright (c) 2019 ARM Limited
#
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import os
from tools.notifier.mock import MockNotifier
from tools.resources import Resources, FileType
from tools.psa import find_secure_image
def test_find_secure_image():
mock_notifier = MockNotifier()
mock_resources = Resources(mock_notifier)
ns_image_path = os.path.join('BUILD', 'TARGET_NS', 'app.bin')
ns_test_path = os.path.join('BUILD', 'TARGET_NS', 'test.bin')
config_s_image_name = 'target_config.bin'
default_bin = os.path.join('prebuilt', config_s_image_name)
test_bin = os.path.join('prebuilt', 'test.bin')
with pytest.raises(Exception, match='ns_image_path and configured_s_image_path are mandatory'):
find_secure_image(mock_notifier, mock_resources, None, None, FileType.BIN)
find_secure_image(mock_notifier, mock_resources, ns_image_path, None, FileType.BIN)
find_secure_image(mock_notifier, mock_resources, None, config_s_image_name, FileType.BIN)
with pytest.raises(Exception, match='image_type must be of type BIN or HEX'):
find_secure_image(mock_notifier, mock_resources, ns_image_path, config_s_image_name, None)
find_secure_image(mock_notifier, mock_resources, ns_image_path, config_s_image_name, FileType.C_SRC)
with pytest.raises(Exception, match='No image files found for this target'):
find_secure_image(mock_notifier, mock_resources, ns_image_path, config_s_image_name, FileType.BIN)
dummy_bin = os.path.join('path', 'to', 'dummy.bin')
mock_resources.add_file_ref(FileType.BIN, dummy_bin, dummy_bin)
with pytest.raises(Exception, match='Required secure image not found'):
find_secure_image(mock_notifier, mock_resources, ns_image_path, config_s_image_name, FileType.BIN)
mock_resources.add_file_ref(FileType.BIN, default_bin, default_bin)
mock_resources.add_file_ref(FileType.BIN, test_bin, test_bin)
secure_image = find_secure_image(mock_notifier, mock_resources, ns_image_path, config_s_image_name, FileType.BIN)
assert secure_image == default_bin
secure_image = find_secure_image(mock_notifier, mock_resources, ns_test_path, config_s_image_name, FileType.BIN)
assert secure_image == test_bin

View File

@ -1,683 +0,0 @@
# Copyright (c) 2017-2018 ARM Limited
#
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import filecmp
import re
import shutil
import tempfile
import pytest
import jsonschema.exceptions as jexcep
from jinja2.defaults import DEFAULT_FILTERS
from tools.psa.mbed_spm_tfm_common import *
from tools.psa.generate_partition_code import *
from .test_data import *
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
def extract_test_name(line):
return re.search(r'.*\[(.*)\]', line).group(1)
def dump_manifest_to_json(manifest, test_name, test_dir, create_files=True):
"""
Create a JSON manifest file from a dictionary.
:param manifest: The manifest dictionary.
:param test_name: Name of the test.
:param test_dir: Directory to contain the JSON file.
:param create_files: Whether to create the source files listed in the
manifest 'source_files' entry.
:return: Path of the JSON file.
"""
test_file_name = test_dir.join('{}.json'.format(test_name))
with open(test_file_name.strpath, 'wt') as fh:
json.dump(manifest, fh, indent=2)
# Create all the partition source files
if create_files:
[test_dir.join(name).write(name) for name in
manifest.get('source_files', [])]
return test_file_name.strpath
def find_priority_key(value):
"""
Finds the key in 'Manifest.PRIORITY' of a given value.
:param value: The value.
:return: The key of the given value.
"""
return next(
(key for key, val in Manifest.PRIORITY.items() if val == value),
None
)
def find_permission_key(value):
"""
Finds the key in 'MmioRegion.MMIO_PERMISIONS' of a given value.
:param value: The value.
:return: The key of the given value.
"""
return next(
(key for key, val in MmioRegion.MMIO_PERMISSIONS.items() if
val == value),
None
)
@pytest.fixture(scope="session")
def temp_test_data(tmpdir_factory):
"""
Fixture (https://docs.pytest.org/en/latest/fixture.html) function to be
used by the tests.
This fixture function Creates a valid JSON manifest file in a temporary
directory. The scope of this fixture is the entire test session.
:param tmpdir_factory: Fixture used to create temporary directories.
see: https://docs.pytest.org/en/latest/tmpdir.html#the-tmpdir-factory-fixture
:return: A dictionary containing these keys:
'dir': The temporary directory object created by this fixture.
'json': The created valid manifest JSON file.
'manifest': The manifest object read from the JSON file.
"""
test_dir = tmpdir_factory.mktemp('test_data')
fname = dump_manifest_to_json(manifests[0], 'valid_partition', test_dir)
valid_manifest = Manifest.from_json(fname)
return {'dir': test_dir, 'json': fname, 'manifest': valid_manifest}
"""
'modified_json_params' contain the parameters to be used in the
'modified_json' fixture.
Each key in the dictionary represents a different parameter to be used by
'modified_json', so for each test which uses
the 'modified_json' fixture, the test will run len(modified_json_params) times,
each time with different parameters.
Each parameter is a dictionary which contains these keys:
'partition': A modified partition dictionary.
'assert': The expected assertion which must occur when running with this
parameter.
"""
modified_json_params = {
'missing_partition_name': {
'partition': {k: manifests[0][k] for k in manifests[0] if k != 'name'},
'assert': jexcep.ValidationError
},
'missing_partition_id': {
'partition': {k: manifests[0][k] for k in manifests[0] if k != 'id'},
'assert': jexcep.ValidationError
},
'missing_partition_priority': {
'partition': {k: manifests[0][k] for k in manifests[0] if
k != 'priority'},
'assert': jexcep.ValidationError
},
'missing_entry_point': {
'partition': {k: manifests[0][k] for k in manifests[0] if
k != 'entry_point'},
'assert': jexcep.ValidationError
},
'missing_stack_size': {
'partition': {k: manifests[0][k] for k in manifests[0] if
k != 'stack_size'},
'assert': jexcep.ValidationError
},
'missing_heap_size': {
'partition': {k: manifests[0][k] for k in manifests[0] if
k != 'heap_size'},
'assert': jexcep.ValidationError
},
'missing_source_files': {
'partition': {k: manifests[0][k] for k in manifests[0] if
k != 'source_files'},
'assert': jexcep.ValidationError
},
'missing_irqs_and_sids': {
'partition': {k: manifests[0][k] for k in manifests[0] if
k not in ['services', 'irqs']},
'assert': jexcep.ValidationError
},
'empty_source_files': {
'partition': dict(manifests[0], source_files=[]),
'assert': jexcep.ValidationError
},
'invalid_minor_policy': {
'partition': dict(manifests[0],
services=invalid_minor_version_policy_rot_srv),
'assert': jexcep.ValidationError
},
'invalid_nspe_callable': {
'partition': dict(manifests[0],
services=invalid_nspe_callable_rot_srv),
'assert': jexcep.ValidationError
},
'missing_nspe_callable': {
'partition': dict(manifests[0],
services=missing_nspe_callable_rot_srv),
'assert': jexcep.ValidationError
},
'invalid_stack_size': {
'partition': dict(manifests[0], stack_size='str'),
'assert': jexcep.ValidationError
},
'invalid_heap_size': {
'partition': dict(manifests[0], heap_size='str'),
'assert': jexcep.ValidationError
},
'invalid_priority': {
'partition': dict(manifests[0], priority='invalid_priority'),
'assert': jexcep.ValidationError
},
'invalid_mmioregion_base': {
'partition': dict(manifests[0],
mmio_regions=[invalid_mmioregion_base]),
'assert': jexcep.ValidationError
},
'invalid_mmioregion_size': {
'partition': dict(manifests[0],
mmio_regions=[invalid_mmioregion_size]),
'assert': jexcep.ValidationError
},
'invalid_irq_num': {
'partition': dict(manifests[0],
irqs=[{"line_num": "str", "signal": "ISR22"}]),
'assert': jexcep.ValidationError
},
'not_exist_src_filename': {
'partition': dict(manifests[0], source_files=['missing.cpp']),
'assert': AssertionError
},
'invalid_partition_id_decimal': {
'partition': dict(manifests[0], id=-1),
'assert': jexcep.ValidationError
},
'invalid_partition_id_hex': {
'partition': dict(manifests[0], id='0xFFFFFFFF'),
'assert': jexcep.ValidationError
},
'duplicates_extern_sids': {
'partition': dict(manifests[0], extern_sids=['SID66', 'SID66']),
'assert': jexcep.ValidationError
},
'exceeding_services': {
'partition': dict(manifests[1], services=exceeding_services),
'assert': AssertionError
}
}
@pytest.fixture(params=modified_json_params.values(),
ids=modified_json_params.keys())
def modified_json(request, temp_test_data):
"""
Fixture (https://docs.pytest.org/en/latest/fixture.html) function to be
used by the tests.
This fixture function Creates a JSON manifest file from a given partition
dictionary and save it
to a temporary directory.
This fixture uses the 'temp_test_data' fixture.
This fixture is a parametrized fixture
(https://docs.pytest.org/en/latest/fixture.html#parametrizing-fixtures).
The scope of this fixture is a specific test.
:param request: Request object which contain the current parameter from
'modified_json_params'.
:param temp_test_data: The 'temp_test_data' fixture.
:return: A list containing these values:
- The created manifest JSON file for the current parameter.
- The expected assertion for the current parameter.
"""
testname = extract_test_name(request.node.name)
test_file = dump_manifest_to_json(request.param['partition'], testname,
temp_test_data['dir'], False)
return test_file, request.param['assert']
def test_invalid_json(modified_json):
"""
Test which gets an invalid JSON manifest file (from the
'modified_json' fixture) and tries to create a
Manifest object from it.
The test expects an assertion to happen.
:param modified_json: The 'modified_json' fixture.
:return:
"""
with pytest.raises(modified_json[1]):
Manifest.from_json(modified_json[0])
def test_valid_json(temp_test_data):
"""
Test which gets a valid JSON manifest file (from the 'temp_test_data'
fixture) and tries to create a Manifest object from it.
The test expects the Manifest to be same as the Manifest created by the
'temp_test_data' fixture.
:param temp_test_data: The 'temp_test_data' fixture.
:return:
"""
manifest = Manifest.from_json(temp_test_data['json'])
assert manifest == temp_test_data['manifest']
# Test parametrization decorator
# See https://docs.pytest.org/en/latest/parametrize.html#pytest-mark-parametrize-parametrizing-test-functions
# Contain the parameters to be used in the 'test_validate_partition_manifest'
# test. It defines a list of (manifest, assertion) tuples which each entry
# will be the input of the 'test_validate_partition_manifest' test, the test
# will run len(LIST_OF_TUPPLES) times, each time with different (manifest,
# assertion) tuple.
# The tuple fields are:
# 'manifest': A modified partition dictionary.
# 'assertion': A tuple containing the expected assertion and assertion
# string which must occur when running with this parameter.
@pytest.mark.parametrize(
'manifests, assertion',
[
pytest.param(
[manifests[0], dict(manifests[1], name=manifests[0]['name'])],
(ValueError, r'Partition name .* is not unique, .*'),
id='duplicate_partition_name'
),
pytest.param(
[manifests[0], dict(manifests[1], id=manifests[0]['id'])],
(ValueError, r'Partition id .* is not unique, .*'),
id='duplicate_partition_id'
),
pytest.param(
[manifests[0], dict(manifests[1], services=manifests[0]['services'])],
(ValueError, r'Root of Trust Service name .* is found in both .*'),
id='duplicate_rot_srv_name'
),
pytest.param(
[manifests[0], dict(manifests[1], services=duplicate_signal_rot_services)],
(ValueError, r'Root of Trust Service signal .* is found in both .*'),
id='duplicate_rot_srv_signal'
),
pytest.param(
[manifests[0], dict(manifests[1], services=duplicate_identifier_rot_services)],
(ValueError, r'Root of Trust Service identifier .* is found in both .*'),
id='duplicate_rot_srv_identifier'
),
pytest.param(
[manifests[0], dict(manifests[1], irqs=duplicate_signal_irqs)],
(ValueError, r'IRQ signal .* is found in both .*'),
id='duplicate_irq_signal'
),
pytest.param(
[manifests[0], dict(manifests[1], irqs=duplicate_line_num_irqs)],
(ValueError, r'IRQ line number .* is found in both .*'),
id='duplicate_irq_line_num'
),
pytest.param(
[manifests[0], dict(manifests[1], extern_sids=['SID66', 'SID999'])],
(ValueError, r'External SID\(s\) .* can\'t be found in any partition manifest.'),
id='orphan_extern_ids'
),
pytest.param(
[manifests[0], dict(manifests[1], extern_sids=[manifests[0]['services'][0]['name']])],
(ValueError, r'Detected a circular call dependency between the partitions.'),
id='circular_call_dependency'
),
pytest.param(
[{k: manifests[0][k] for k in manifests[0] if k != 'extern_sids'},
dict({k: manifests[1][k] for k in manifests[1] if k != 'services'
and k != 'irqs'}, services=spe_contained_rot_services)],
(ValueError, r'Partition .* is not accessible from NSPE '
'and not referenced by any other partition.'),
id='dead_partition'
)
]
)
def test_validate_partition_manifest(request, temp_test_data, manifests, assertion):
"""
Test which creates an invalid manifest object (after passing JSON schema
validation) and call
validate_partition_manifests() with it and with a valid manifest object.
The test expects an assertion to happen.
:param request: Request object.
:param temp_test_data: The 'temp_test_data' fixture.
:param manifest: The manifest value from the (manifest, assertion) tuple
for the current parameter.
:param assertion: The assertion value from the (manifest, assertion) tuple
for the current parameter.
:return:
"""
test_name = extract_test_name(request.node.name)
jsons = [dump_manifest_to_json(m, '%s_%d' % (test_name, i), temp_test_data['dir']) for i, m in enumerate(manifests)]
created_manifests, _ = parse_manifests(jsons)
with pytest.raises(assertion[0], match=assertion[1]):
validate_partition_manifests(created_manifests)
"""
'verify_json_params' contain the parameters to be used in the 'verify_json'
fixture. Each key in the dictionary represents a different parameter to be used
by 'verify_json', so for each test which uses the 'verify_json' fixture, the
test will run len(verify_json_params) times, each time with different
parameters.
Each parameter is a dictionary which contains these keys:
'partition': A modified partition dictionary.
'field': The modified field name.
'expected': The expected field object.
"""
verify_json_params = {
'missing_minor_version_rot_services': {
'partition': dict(manifests[0],
services=missing_minor_version_rot_srv),
'field': 'rot_services',
'expected': [
RotService(
name='SID1', identifier='0x00000001',signal='SID1',
minor_policy='RELAXED', non_secure_clients=True, minor_version=1
)
]
},
'missing_minor_version_policy_rot_services': {
'partition': dict(manifests[0],
services=missing_minor_version_policy_rot_srv),
'field': 'rot_services',
'expected': [
RotService(
name='SID2', identifier='0x00000002', signal='SID2',
minor_policy='STRICT', non_secure_clients=True, minor_version=1
)
]
},
'missing_minor_completley_rot_services': {
'partition': dict(manifests[0],
services=missing_minor_completley_rot_srv),
'field': 'rot_services',
'expected': [
RotService(
name='SID2', identifier='0x00000002', signal='SID2',
minor_policy='STRICT', non_secure_clients=True, minor_version=1
)
]
}
}
@pytest.fixture(params=verify_json_params.values(),
ids=verify_json_params.keys())
def verify_json(request, tmpdir_factory):
"""
Fixture (https://docs.pytest.org/en/latest/fixture.html) function to be
used by the tests.
This fixture function Creates 2 JSON manifest files (The 1st from
'verify_json_params', the 2nd from manifests[1]) and saves them to a
temporary directory. This fixture is a parametrized fixture
(https://docs.pytest.org/en/latest/fixture.html#parametrizing-fixtures).
The scope of this fixture is a specific test.
:param request: Request object which contain the current parameter from
'verify_json_params'.
:param tmpdir_factory: The 'tmpdir_factory' fixture.
:return: A dictionary containing these keys:
'files_list': A list of the created manifest JSON files.
'field': The changed field in the 1st manifest.
'expected': The expected 'field' object.
"""
test_dir = tmpdir_factory.mktemp('test_data')
test_name = extract_test_name(request.node.name)
files_list = [
dump_manifest_to_json(request.param['partition'], '%s1' % test_name,
test_dir),
dump_manifest_to_json(dict(manifests[1], extern_sids=[]),
'%s2' % test_name, test_dir)
]
return {'files_list': files_list, 'field': request.param['field'],
'expected': request.param['expected']}
def test_verify_json(verify_json):
"""
Test which gets 2 JSON manifest files (from the 'verify_json' fixture),
create Manifest objects from them, call validate_partition_manifests() on
the manifest objects and check that the 1st Manifest object is as expected.
:param verify_json: The 'verify_json' fixture.
:return:
"""
test_manifests, _ = parse_manifests(verify_json['files_list'])
validate_partition_manifests(test_manifests)
assert getattr(test_manifests[0], verify_json['field']) == verify_json['expected']
@pytest.fixture(scope="function")
def test_template_setup(tmpdir_factory):
"""
Fixture (https://docs.pytest.org/en/latest/fixture.html) function to be
used by the tests. This fixture function Creates JSON manifest files,
Manifest objects from 'manifest' and template files in a temporary
directory. The scope of this fixture is the entire test session.
:param tmpdir_factory: Fixture used to create temporary directories.
see: https://docs.pytest.org/en/latest/tmpdir.html#the-tmpdir-factory-fixture
:return: A dictionary containing these keys:
'dir': The temporary directory object created by this fixture.
'template_files': List of the created template files.
'manifest_files': List of the created manifest JSON files.
'manifests': List of the created Manifest objects.
'filters': Dictionary with additional filters for
generate_source_files()
"""
def find_priority_key(value):
"""
Finds the key in 'Manifest.PRIORITY' of a given value.
:param value: The value.
:return: The key of the given value.
"""
return next(
(key for key, val in Manifest.PRIORITY.items() if val == value),
None)
def find_permission_key(value):
"""
Finds the key in 'MmioRegion.MMIO_PERMISIONS' of a given value.
:param value: The value.
:return: The key of the given value.
"""
return next((key for key, val in MmioRegion.MMIO_PERMISSIONS.items() if
val == value), None)
test_dir = tmpdir_factory.mktemp('test_data')
manifest_files = [
dump_manifest_to_json(manifest, manifest['name'], test_dir) for
manifest in manifests]
manifest_objects, regions = parse_manifests(manifest_files)
filters = {
'basename': os.path.basename,
'find_priority_key': find_priority_key,
'find_permission_key': find_permission_key
}
template_files = [test_dir.join('_NAME_data.json.tpl'),
test_dir.join('common.json.tpl')]
for template, _file in [(test_partition_template, template_files[0]),
(test_common_template, template_files[1])]:
_file.write(template)
template_files = [_file.strpath for _file in template_files]
expected_common_files = [test_dir.join('common.json')]
for output, _file in [(test_common_expected, expected_common_files[0])]:
_file.write(output)
expected_common_files = [_file.strpath for _file in expected_common_files]
return {
'dir': test_dir.strpath,
'template_files': template_files,
'manifest_files': manifest_files,
'common_files': expected_common_files,
'manifests': manifest_objects,
'region_list': regions,
'filters': filters
}
def test_generate_source_files(test_template_setup):
"""
Test which calls generate_source_files() with the data from
'test_template_setup' fixture and checks normal output.
:param test_template_setup: The 'test_template_setup' fixture.
:return:
"""
before_file_list = set(os.listdir(test_template_setup['dir']))
partition_templates = filter(lambda filename: '_NAME_' in filename, test_template_setup['template_files'])
common_templates = filter(lambda filename: '_NAME_' not in filename, test_template_setup['template_files'])
common_templates = {
t: path_join(test_template_setup['dir'], os.path.basename(os.path.splitext(t)[0])) for t in common_templates
}
region_pair_list = list(itertools.combinations(test_template_setup['region_list'], 2))
for manifest in test_template_setup['manifests']:
generate_source_files(
templates=manifest.templates_to_files(partition_templates, test_template_setup['dir'], test_template_setup['dir']),
render_args={
'partition': manifest,
'dependent_partitions': manifest.find_dependencies(test_template_setup['manifests'])
},
extra_filters=test_template_setup['filters']
)
generate_source_files(
common_templates,
render_args={
'partitions': test_template_setup['manifests'],
'region_pair_list': region_pair_list
},
extra_filters=test_template_setup['filters']
)
after_file_list = set(os.listdir(test_template_setup['dir']))
generated_files = list(after_file_list.difference(before_file_list))
for gen_file in [os.path.join(test_template_setup['dir'], f) for f in generated_files]:
"""
For each generated json file in 'autogen_dir':
1. Load the json file to a dictionary named 'generated'.
2. If it was generated from a partition template ('generated' has a 'name' key):
a) Read the original manifest json from the test temp dir.
b) Load the manifest json file to a dictionary named 'expected'.
Else (generated from a common template):
a) Calculate 'region_list'.
b) Build the 'expected' dictionary with values from the original manifest objects.
3. Compare 'generated' with 'expected'.
"""
with open(gen_file) as fh:
generated = json.load(fh)
if 'name' in generated:
input_file = os.path.join(test_template_setup['dir'],
generated['name'] + '.json')
assert os.path.isfile(input_file)
assert input_file in test_template_setup['manifest_files']
with open(input_file) as fh:
expected = json.load(fh)
else:
expected = {
'num_of_partitions': len(test_template_setup['manifests']),
'partition_names': [manifest.name for manifest in
test_template_setup['manifests']],
'num_of_region_pairs': len(region_pair_list)
}
assert generated == expected
circular_call_dependency_params = {
'no manifests': {
'manifests': [],
'result': False
},
'one manifest': {
'manifests': ['PARTITION1'],
'result': False
},
'2 manifests with dependency': {
'manifests': ['PARTITION1', 'PARTITION2'],
'result': True
},
'2 manifests without dependency': {
'manifests': ['PARTITION1', 'PARTITION3'],
'result': False
},
'5 manifests with dependency': {
'manifests': ['PARTITION1', 'PARTITION3', 'PARTITION4', 'PARTITION5', 'PARTITION6'],
'result': True
},
'5 manifests without dependency': {
'manifests': ['PARTITION1', 'PARTITION3', 'PARTITION4', 'PARTITION6', 'PARTITION7'],
'result': False
}
}
@pytest.fixture(params=circular_call_dependency_params.values(),
ids=circular_call_dependency_params.keys())
def circular_dependencies(request, tmpdir_factory):
"""
Fixture (https://docs.pytest.org/en/latest/fixture.html) function to be
used by the tests.
This fixture function Creates a JSON manifest file from a given partition
dictionary and save it
to a temporary directory.
This fixture uses the 'temp_test_data' fixture.
This fixture is a parametrized fixture
(https://docs.pytest.org/en/latest/fixture.html#parametrizing-fixtures).
The scope of this fixture is a specific test.
:param request: Request object which contain the current parameter from
'circular_call_dependency_params'.
:param temp_test_data: The 'temp_test_data' fixture.
:return: A Dictionary containing these values:
- files - list of manifest filesgenerated
- The expected result from check_circular_call_dependencies
"""
test_dir = tmpdir_factory.mktemp('test_data')
test_manifests = filter(lambda x: x['name'] in request.param['manifests'],
manifests_for_circular_call_dependency_checks)
manifest_files = [
dump_manifest_to_json(manifest, manifest['name'], test_dir) for
manifest in test_manifests]
return {'files': manifest_files, 'result': request.param['result']}
def test_check_circular_call_dependencies(circular_dependencies):
"""
Test detection of circular call dependencies between the partitions.
The test performs the circular call dependency check in a few
predefined partition topologies and compares the result with the expected value.
:param circular_dependencies: the 'circular_dependencies' fixture
:return:
"""
objects, _ = parse_manifests(circular_dependencies['files'])
assert check_circular_call_dependencies(objects) == circular_dependencies[
'result']

View File

@ -130,7 +130,6 @@ class TestGccToolchain(TestCase):
mock_target.c_lib = "std"
del mock_target.default_lib
mock_target.supported_c_libs = {"gcc_arm": ["std"]}
mock_target.is_TrustZone_secure_target = False
gcc_obj = GCC_ARM(mock_target)
@ -158,7 +157,6 @@ class TestGccToolchain(TestCase):
mock_target.c_lib = "sMALL"
del mock_target.default_lib
mock_target.supported_toolchains = ["GCC_ARM"]
mock_target.is_TrustZone_secure_target = False
gcc_arm_obj = GCC_ARM(mock_target)
self.assertIn("-DMBED_RTOS_SINGLE_THREAD", gcc_arm_obj.flags["common"])
self.assertIn("-D__NEWLIB_NANO", gcc_arm_obj.flags["common"])
@ -213,7 +211,6 @@ class TestIarToolchain(TestCase):
del mock_target.default_lib
mock_target.c_lib = "std"
mock_target.supported_c_libs = {"iar": ["std"]}
mock_target.is_TrustZone_secure_target = False
iar_obj = IAR(mock_target)
var = "-DMBED_MINIMAL_PRINTF"
@ -227,7 +224,6 @@ class TestIarToolchain(TestCase):
mock_target.c_lib = "sTD"
del mock_target.default_lib
mock_target.supported_toolchains = ["IAR"]
mock_target.is_TrustZone_secure_target = False
try:
IAR(mock_target)
except NotSupportedException:

View File

@ -591,17 +591,6 @@ class ARMC6(ARM_STD):
self.check_and_add_minimal_printf(target)
if target.is_TrustZone_secure_target:
if kwargs.get('build_dir', False):
# Output secure import library
build_dir = kwargs['build_dir']
secure_file = join(build_dir, "cmse_lib.o")
self.flags["ld"] += ["--import_cmse_lib_out=%s" % secure_file]
# Enable compiler security extensions
self.flags['cxx'].append("-mcmse")
self.flags['c'].append("-mcmse")
if target.is_TrustZone_non_secure_target:
# Add linking time preprocessor macro DOMAIN_NS
# (DOMAIN_NS is passed to compiler and assembler via CORTEX_SYMBOLS

View File

@ -92,15 +92,6 @@ class GCC(mbedToolchain):
self.flags["ld"].append(minimal_printf_wrap)
self.cpu = []
if target.is_TrustZone_secure_target:
# Enable compiler security extensions
self.cpu.append("-mcmse")
# Output secure import library
self.flags["ld"].extend([
"-Wl,--cmse-implib",
"-Wl,--out-implib=%s" % join(build_dir, "cmse_lib.o")
])
if target.is_TrustZone_non_secure_target:
# Add linking time preprocessor macro DOMAIN_NS
# (DOMAIN_NS is passed to compiler and assembler via CORTEX_SYMBOLS

View File

@ -63,14 +63,6 @@ class IAR(mbedToolchain):
self.check_c_lib_supported(target, "iar")
if target.is_TrustZone_secure_target:
# Enable compiler security extensions
self.flags["asm"] += ["--cmse"]
self.flags["common"] += ["--cmse"]
# Output secure import library
secure_file = join(build_dir, "cmse_lib.o")
self.flags["ld"] += ["--import_cmse_lib_out=%s" % secure_file]
if target.is_TrustZone_non_secure_target:
# Add linking time preprocessor macro DOMAIN_NS
# (DOMAIN_NS is passed to compiler and assembler via CORTEX_SYMBOLS

View File

@ -990,15 +990,6 @@ class mbedToolchain(with_metaclass(ABCMeta, object)):
self.ld.append(define_string)
self.flags["ld"].append(define_string)
if self.target.is_PSA_secure_target:
for flag, param in [
("MBED_PUBLIC_RAM_START", "target.public-ram-start"),
("MBED_PUBLIC_RAM_SIZE", "target.public-ram-size")
]:
define_string = self.make_ld_define(flag, params[param].value)
self.ld.append(define_string)
self.flags["ld"].append(define_string)
if hasattr(self.target, 'post_binary_hook'):
if self.target.post_binary_hook is None:
define_string = self.make_ld_define(